Blender  V2.93
tracking_stabilize.c
Go to the documentation of this file.
1 /*
2  * This program is free software; you can redistribute it and/or
3  * modify it under the terms of the GNU General Public License
4  * as published by the Free Software Foundation; either version 2
5  * of the License, or (at your option) any later version.
6  *
7  * This program is distributed in the hope that it will be useful,
8  * but WITHOUT ANY WARRANTY; without even the implied warranty of
9  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10  * GNU General Public License for more details.
11  *
12  * You should have received a copy of the GNU General Public License
13  * along with this program; if not, write to the Free Software Foundation,
14  * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
15  *
16  * The Original Code is Copyright (C) 2011 Blender Foundation.
17  * All rights reserved.
18  */
19 
26 #include <limits.h>
27 
28 #include "DNA_anim_types.h"
29 #include "DNA_movieclip_types.h"
30 #include "DNA_scene_types.h"
31 #include "RNA_access.h"
32 
33 #include "BLI_ghash.h"
34 #include "BLI_listbase.h"
35 #include "BLI_math.h"
36 #include "BLI_math_vector.h"
37 #include "BLI_sort_utils.h"
38 #include "BLI_task.h"
39 #include "BLI_utildefines.h"
40 
41 #include "BKE_fcurve.h"
42 #include "BKE_movieclip.h"
43 #include "BKE_tracking.h"
44 
45 #include "IMB_colormanagement.h"
46 #include "IMB_imbuf.h"
47 #include "IMB_imbuf_types.h"
48 #include "MEM_guardedalloc.h"
49 
50 /* == Parameterization constants == */
51 
52 /* When measuring the scale changes relative to the rotation pivot point, it
53  * might happen accidentally that a probe point (tracking point), which doesn't
54  * actually move on a circular path, gets very close to the pivot point, causing
55  * the measured scale contribution to go toward infinity. We damp this undesired
56  * effect by adding a bias (floor) to the measured distances, which will
57  * dominate very small distances and thus cause the corresponding track's
58  * contribution to diminish.
59  * Measurements happen in normalized (0...1) coordinates within a frame.
60  */
61 static float SCALE_ERROR_LIMIT_BIAS = 0.01f;
62 
63 /* When to consider a track as completely faded out.
64  * This is used in conjunction with the "disabled" flag of the track
65  * to determine start positions, end positions and gaps
66  */
67 static float EPSILON_WEIGHT = 0.005f;
68 
69 /* == private working data == */
70 
71 /* Per track baseline for stabilization, defined at reference frame.
72  * A track's reference frame is chosen as close as possible to the (global)
73  * anchor_frame. Baseline holds the constant part of each track's contribution
74  * to the observed movement; it is calculated at initialization pass, using the
75  * measurement value at reference frame plus the average contribution to fill
76  * the gap between global anchor_frame and the reference frame for this track.
77  * This struct with private working data is associated to the local call context
78  * via `StabContext::private_track_data`
79  */
80 typedef struct TrackStabilizationBase {
82 
83  /* measured relative to translated pivot */
85 
86  /* measured relative to translated pivot */
88 
92 
93 /* Tracks are reordered for initialization, starting as close as possible to
94  * anchor_frame
95  */
96 typedef struct TrackInitOrder {
101 
102 /* Per frame private working data, for accessing possibly animated values. */
103 typedef struct StabContext {
116 
118  MovieTrackingTrack *track)
119 {
120  return BLI_ghash_lookup(ctx->private_track_data, track);
121 }
122 
124  MovieTrackingTrack *track,
125  TrackStabilizationBase *private_data)
126 {
127  BLI_ghash_insert(ctx->private_track_data, track, private_data);
128 }
129 
131 {
132  if (val != NULL) {
133  MEM_freeN(val);
134  }
135 }
136 
137 /* == access animated values for given frame == */
138 
139 static FCurve *retrieve_stab_animation(MovieClip *clip, const char *data_path, int idx)
140 {
141  return id_data_find_fcurve(&clip->id,
142  &clip->tracking.stabilization,
144  data_path,
145  idx,
146  NULL);
147 }
148 
150 {
151  return id_data_find_fcurve(&clip->id, track, &RNA_MovieTrackingTrack, "weight_stab", 0, NULL);
152 }
153 
154 static float fetch_from_fcurve(FCurve *animationCurve,
155  int framenr,
156  StabContext *ctx,
157  float default_value)
158 {
159  if (ctx && ctx->use_animation && animationCurve) {
160  int scene_framenr = BKE_movieclip_remap_clip_to_scene_frame(ctx->clip, framenr);
161  return evaluate_fcurve(animationCurve, scene_framenr);
162  }
163  return default_value;
164 }
165 
166 static float get_animated_locinf(StabContext *ctx, int framenr)
167 {
168  return fetch_from_fcurve(ctx->locinf, framenr, ctx, ctx->stab->locinf);
169 }
170 
171 static float get_animated_rotinf(StabContext *ctx, int framenr)
172 {
173  return fetch_from_fcurve(ctx->rotinf, framenr, ctx, ctx->stab->rotinf);
174 }
175 
176 static float get_animated_scaleinf(StabContext *ctx, int framenr)
177 {
178  return fetch_from_fcurve(ctx->scaleinf, framenr, ctx, ctx->stab->scaleinf);
179 }
180 
181 static void get_animated_target_pos(StabContext *ctx, int framenr, float target_pos[2])
182 {
183  target_pos[0] = fetch_from_fcurve(ctx->target_pos[0], framenr, ctx, ctx->stab->target_pos[0]);
184  target_pos[1] = fetch_from_fcurve(ctx->target_pos[1], framenr, ctx, ctx->stab->target_pos[1]);
185 }
186 
187 static float get_animated_target_rot(StabContext *ctx, int framenr)
188 {
189  return fetch_from_fcurve(ctx->target_rot, framenr, ctx, ctx->stab->target_rot);
190 }
191 
192 static float get_animated_target_scale(StabContext *ctx, int framenr)
193 {
194  return fetch_from_fcurve(ctx->target_scale, framenr, ctx, ctx->stab->scale);
195 }
196 
197 static float get_animated_weight(StabContext *ctx, MovieTrackingTrack *track, int framenr)
198 {
200  if (working_data && working_data->track_weight_curve) {
201  int scene_framenr = BKE_movieclip_remap_clip_to_scene_frame(ctx->clip, framenr);
202  return evaluate_fcurve(working_data->track_weight_curve, scene_framenr);
203  }
204  /* Use weight at global 'current frame' as fallback default. */
205  return track->weight_stab;
206 }
207 
208 static void use_values_from_fcurves(StabContext *ctx, bool toggle)
209 {
210  if (ctx != NULL) {
211  ctx->use_animation = toggle;
212  }
213 }
214 
215 /* Prepare per call private working area.
216  * Used for access to possibly animated values: retrieve available F-curves.
217  */
219 {
220  StabContext *ctx = MEM_callocN(sizeof(StabContext), "2D stabilization animation runtime data");
221  ctx->clip = clip;
222  ctx->tracking = &clip->tracking;
223  ctx->stab = &clip->tracking.stabilization;
224  ctx->private_track_data = BLI_ghash_ptr_new("2D stabilization per track private working data");
225  ctx->locinf = retrieve_stab_animation(clip, "influence_location", 0);
226  ctx->rotinf = retrieve_stab_animation(clip, "influence_rotation", 0);
227  ctx->scaleinf = retrieve_stab_animation(clip, "influence_scale", 0);
228  ctx->target_pos[0] = retrieve_stab_animation(clip, "target_pos", 0);
229  ctx->target_pos[1] = retrieve_stab_animation(clip, "target_pos", 1);
230  ctx->target_rot = retrieve_stab_animation(clip, "target_rot", 0);
231  ctx->target_scale = retrieve_stab_animation(clip, "target_zoom", 0);
232  ctx->use_animation = true;
233  return ctx;
234 }
235 
245 {
246  if (ctx != NULL) {
248  MEM_freeN(ctx);
249  }
250 }
251 
253 {
255  return (working_data != NULL && working_data->is_init_for_stabilization);
256 }
257 
259 {
260  return (track->flag & TRACK_USE_2D_STAB) && is_init_for_stabilization(ctx, track);
261 }
262 
264  MovieTrackingTrack *track,
265  MovieTrackingMarker *marker)
266 {
267  return (marker->flag & MARKER_DISABLED) ||
268  (EPSILON_WEIGHT > get_animated_weight(ctx, track, marker->framenr));
269 }
270 
271 static int search_closest_marker_index(MovieTrackingTrack *track, int ref_frame)
272 {
273  const MovieTrackingMarker *marker = BKE_tracking_marker_get(track, ref_frame);
274  return marker - track->markers;
275 }
276 
278  StabContext *ctx, MovieTrackingTrack *track, int i, int ref_frame, int *next_higher)
279 {
281  int end = track->markersnr;
282  BLI_assert(0 <= i && i < end);
283 
284  while (i < end &&
285  (markers[i].framenr < ref_frame || is_effectively_disabled(ctx, track, &markers[i]))) {
286  i++;
287  }
288  if (i < end && markers[i].framenr < *next_higher) {
289  BLI_assert(markers[i].framenr >= ref_frame);
290  *next_higher = markers[i].framenr;
291  }
292 }
293 
295  StabContext *ctx, MovieTrackingTrack *track, int i, int ref_frame, int *next_lower)
296 {
298  BLI_assert(0 <= i && i < track->markersnr);
299  while (i >= 0 &&
300  (markers[i].framenr > ref_frame || is_effectively_disabled(ctx, track, &markers[i]))) {
301  i--;
302  }
303  if (0 <= i && markers[i].framenr > *next_lower) {
304  BLI_assert(markers[i].framenr <= ref_frame);
305  *next_lower = markers[i].framenr;
306  }
307 }
308 
309 /* Find closest frames with usable stabilization data.
310  * A frame counts as _usable_ when there is at least one track marked for
311  * translation stabilization, which has an enabled tracking marker at this very
312  * frame. We search both for the next lower and next higher position, to allow
313  * the caller to interpolate gaps and to extrapolate at the ends of the
314  * definition range. */
316  int framenr,
317  int *next_lower,
318  int *next_higher)
319 {
321  if (is_usable_for_stabilization(ctx, track)) {
322  int startpoint = search_closest_marker_index(track, framenr);
323  retrieve_next_higher_usable_frame(ctx, track, startpoint, framenr, next_higher);
324  retrieve_next_lower_usable_frame(ctx, track, startpoint, framenr, next_lower);
325  }
326  }
327 }
328 
329 /* Find active (enabled) marker closest to the reference frame. */
331  MovieTrackingTrack *track,
332  int ref_frame)
333 {
334  int next_lower = MINAFRAME;
335  int next_higher = MAXFRAME;
336  int i = search_closest_marker_index(track, ref_frame);
337  retrieve_next_higher_usable_frame(ctx, track, i, ref_frame, &next_higher);
338  retrieve_next_lower_usable_frame(ctx, track, i, ref_frame, &next_lower);
339 
340  if ((next_higher - ref_frame) < (ref_frame - next_lower)) {
341  return BKE_tracking_marker_get_exact(track, next_higher);
342  }
343 
344  return BKE_tracking_marker_get_exact(track, next_lower);
345 }
346 
347 /* Retrieve tracking data, if available and applicable for this frame.
348  * The returned weight value signals the validity; data recorded for this
349  * tracking marker on the exact requested frame is output with the full weight
350  * of this track, while gaps in the data sequence cause the weight to go to zero.
351  */
353  MovieTrackingTrack *track,
354  int framenr,
355  float *r_weight)
356 {
357  MovieTrackingMarker *marker = BKE_tracking_marker_get_exact(track, framenr);
358  if (marker != NULL && !(marker->flag & MARKER_DISABLED)) {
359  *r_weight = get_animated_weight(ctx, track, framenr);
360  return marker;
361  }
362 
363  /* No marker at this frame (=gap) or marker disabled. */
364  *r_weight = 0.0f;
365  return NULL;
366 }
367 
368 /* Define the reference point for rotation/scale measurement and compensation.
369  * The stabilizer works by assuming the image was distorted by a affine linear
370  * transform, i.e. it was rotated and stretched around this reference point
371  * (pivot point) and then shifted laterally. Any scale and orientation changes
372  * will be picked up relative to this point. And later the image will be
373  * stabilized by rotating around this point. The result can only be as
374  * accurate as this pivot point actually matches the real rotation center
375  * of the actual movements. Thus any scheme to define a pivot point is
376  * always guesswork.
377  *
378  * As a simple default, we use the weighted average of the location markers
379  * of the current frame as pivot point. TODO It is planned to add further
380  * options, like e.g. anchoring the pivot point at the canvas. Moreover,
381  * it is planned to allow for a user controllable offset.
382  */
383 static void setup_pivot(const float ref_pos[2], float r_pivot[2])
384 {
385  zero_v2(r_pivot); /* TODO: add an animated offset position here. */
386  add_v2_v2(r_pivot, ref_pos);
387 }
388 
389 /* Calculate the contribution of a single track at the time position (frame) of
390  * the given marker. Each track has a local reference frame, which is as close
391  * as possible to the global anchor_frame. Thus the translation contribution is
392  * comprised of the offset relative to the image position at that reference
393  * frame, plus a guess of the contribution for the time span between the
394  * anchor_frame and the local reference frame of this track. The constant part
395  * of this contribution is precomputed initially. At the anchor_frame, by
396  * definition the contribution of all tracks is zero, keeping the frame in place.
397  *
398  * track_ref is per track baseline contribution at reference frame; filled in at
399  * initialization
400  * marker is tracking data to use as contribution for current frame.
401  * result_offset is a total cumulated contribution of this track,
402  * relative to the stabilization anchor_frame,
403  * in normalized (0...1) coordinates.
404  */
406  MovieTrackingMarker *marker,
407  float result_offset[2])
408 {
409  add_v2_v2v2(result_offset, track_ref->stabilization_offset_base, marker->pos);
410 }
411 
412 /* Similar to the ::translation_contribution(), the rotation contribution is
413  * comprised of the contribution by this individual track, and the averaged
414  * contribution from anchor_frame to the ref point of this track.
415  * - Contribution is in terms of angles, -pi < angle < +pi, and all averaging
416  * happens in this domain.
417  * - Yet the actual measurement happens as vector between pivot and the current
418  * tracking point
419  * - Currently we use the center of frame as approximation for the rotation pivot
420  * point.
421  * - Moreover, the pivot point has to be compensated for the already determined
422  * shift offset, in order to get the pure rotation around the pivot.
423  * To turn this into a _contribution_, the likewise corrected angle at the
424  * reference frame has to be subtracted, to get only the pure angle difference
425  * this tracking point has captured.
426  * - To get from vectors to angles, we have to go through an arcus tangens,
427  * which involves the issue of the definition range: the resulting angles will
428  * flip by 360deg when the measured vector passes from the 2nd to the third
429  * quadrant, thus messing up the average calculation. Since _any_ tracking
430  * point might be used, these problems are quite common in practice.
431  * - Thus we perform the subtraction of the reference and the addition of the
432  * baseline contribution in polar coordinates as simple addition of angles;
433  * since these parts are fixed, we can bake them into a rotation matrix.
434  * With this approach, the border of the arcus tangens definition range will
435  * be reached only, when the _whole_ contribution approaches +- 180deg,
436  * meaning we've already tilted the frame upside down. This situation is way
437  * less common and can be tolerated.
438  * - As an additional feature, when activated, also changes in image scale
439  * relative to the rotation center can be picked up. To handle those values
440  * in the same framework, we average the scales as logarithms.
441  *
442  * aspect is a total aspect ratio of the undistorted image (includes fame and
443  * pixel aspect). The function returns a quality factor, which can be used
444  * to damp the contributions of points in close proximity to the pivot point,
445  * since such contributions might be dominated by rounding errors and thus
446  * poison the calculated average. When the quality factor goes towards zero,
447  * the weight of this contribution should be reduced accordingly.
448  */
450  MovieTrackingMarker *marker,
451  const float aspect,
452  const float pivot[2],
453  float *result_angle,
454  float *result_scale)
455 {
456  float len, quality;
457  float pos[2];
458  sub_v2_v2v2(pos, marker->pos, pivot);
459 
460  pos[0] *= aspect;
462 
463  *result_angle = atan2f(pos[1], pos[0]);
464 
465  len = len_v2(pos);
466 
467  /* prevent points very close to the pivot point from poisoning the result */
470 
471  *result_scale = len * track_ref->stabilization_scale_base;
472  BLI_assert(0.0 < *result_scale);
473 
474  return quality;
475 }
476 
477 /* Workaround to allow for rotation around an arbitrary pivot point.
478  * Currently, the public API functions do not support this flexibility.
479  * Rather, rotation will always be applied around a fixed origin.
480  * As a workaround, we shift the image after rotation to match the
481  * desired rotation center. And since this offset needs to be applied
482  * after the rotation and scaling, we can collapse it with the
483  * translation compensation, which is also a lateral shift (offset).
484  * The offset to apply is intended_pivot - rotated_pivot
485  */
486 static void compensate_rotation_center(const int size,
487  float aspect,
488  const float angle,
489  const float scale,
490  const float pivot[2],
491  float result_translation[2])
492 {
493  const float origin[2] = {0.5f * aspect * size, 0.5f * size};
494  float intended_pivot[2], rotated_pivot[2];
495  float rotation_mat[2][2];
496 
497  copy_v2_v2(intended_pivot, pivot);
498  copy_v2_v2(rotated_pivot, pivot);
499  angle_to_mat2(rotation_mat, +angle);
500  sub_v2_v2(rotated_pivot, origin);
501  mul_m2_v2(rotation_mat, rotated_pivot);
502  mul_v2_fl(rotated_pivot, scale);
503  add_v2_v2(rotated_pivot, origin);
504  add_v2_v2(result_translation, intended_pivot);
505  sub_v2_v2(result_translation, rotated_pivot);
506 }
507 
508 /* Weighted average of the per track cumulated contributions at given frame.
509  * Returns truth if all desired calculations could be done and all averages are
510  * available.
511  *
512  * NOTE: Even if the result is not `true`, the returned translation and angle
513  * are always sensible and as good as can be. Especially in the
514  * initialization phase we might not be able to get any average (yet) or
515  * get only a translation value. Since initialization visits tracks in a
516  * specific order, starting from anchor_frame, the result is logically
517  * correct non the less. But under normal operation conditions,
518  * a result of `false` should disable the stabilization function
519  */
521  int framenr,
522  float aspect,
523  float r_translation[2],
524  float r_pivot[2],
525  float *r_angle,
526  float *r_scale_step)
527 {
528  bool ok;
529  float weight_sum;
530  MovieTracking *tracking = ctx->tracking;
531  MovieTrackingStabilization *stab = &tracking->stabilization;
532  float ref_pos[2];
534 
535  zero_v2(r_translation);
536  *r_scale_step = 0.0f; /* logarithm */
537  *r_angle = 0.0f;
538 
539  zero_v2(ref_pos);
540 
541  ok = false;
542  weight_sum = 0.0f;
543  LISTBASE_FOREACH (MovieTrackingTrack *, track, &tracking->tracks) {
544  if (!is_init_for_stabilization(ctx, track)) {
545  continue;
546  }
547  if (track->flag & TRACK_USE_2D_STAB) {
548  float weight = 0.0f;
549  MovieTrackingMarker *marker = get_tracking_data_point(ctx, track, framenr, &weight);
550  if (marker) {
552  track);
553  BLI_assert(stabilization_base != NULL);
554  float offset[2];
555  weight_sum += weight;
556  translation_contribution(stabilization_base, marker, offset);
557  r_translation[0] += weight * offset[0];
558  r_translation[1] += weight * offset[1];
559  ref_pos[0] += weight * marker->pos[0];
560  ref_pos[1] += weight * marker->pos[1];
561  ok |= (weight_sum > EPSILON_WEIGHT);
562  }
563  }
564  }
565  if (!ok) {
566  return false;
567  }
568 
569  ref_pos[0] /= weight_sum;
570  ref_pos[1] /= weight_sum;
571  r_translation[0] /= weight_sum;
572  r_translation[1] /= weight_sum;
573  setup_pivot(ref_pos, r_pivot);
574 
575  if (!(stab->flag & TRACKING_STABILIZE_ROTATION)) {
576  return ok;
577  }
578 
579  ok = false;
580  weight_sum = 0.0f;
581  LISTBASE_FOREACH (MovieTrackingTrack *, track, &tracking->tracks) {
582  if (!is_init_for_stabilization(ctx, track)) {
583  continue;
584  }
585  if (track->flag & TRACK_USE_2D_STAB_ROT) {
586  float weight = 0.0f;
587  MovieTrackingMarker *marker = get_tracking_data_point(ctx, track, framenr, &weight);
588  if (marker) {
590  track);
591  BLI_assert(stabilization_base != NULL);
592  float rotation, scale, quality;
593  quality = rotation_contribution(
594  stabilization_base, marker, aspect, r_pivot, &rotation, &scale);
595  const float quality_weight = weight * quality;
596  weight_sum += quality_weight;
597  *r_angle += rotation * quality_weight;
598  if (stab->flag & TRACKING_STABILIZE_SCALE) {
599  *r_scale_step += logf(scale) * quality_weight;
600  }
601  else {
602  *r_scale_step = 0;
603  }
604  /* NOTE: Use original marker weight and not the scaled one with the proximity here to allow
605  * simple stabilization setups when there is a single track in a close proximity of the
606  * center. */
607  ok |= (weight > EPSILON_WEIGHT);
608  }
609  }
610  }
611  if (ok) {
612  *r_scale_step /= weight_sum;
613  *r_angle /= weight_sum;
614  }
615  else {
616  /* We reach this point because translation could be calculated,
617  * but rotation/scale found no data to work on.
618  */
619  *r_scale_step = 0.0f;
620  *r_angle = 0.0f;
621  }
622  return true;
623 }
624 
625 /* Calculate weight center of location tracks for given frame.
626  * This function performs similar calculations as average_track_contributions(),
627  * but does not require the tracks to be initialized for stabilization. Moreover,
628  * when there is no usable tracking data for the given frame number, data from
629  * a neighboring frame is used. Thus this function can be used to calculate
630  * a starting point on initialization.
631  */
632 static void average_marker_positions(StabContext *ctx, int framenr, float r_ref_pos[2])
633 {
634  bool ok = false;
635  float weight_sum;
636  MovieTracking *tracking = ctx->tracking;
637 
638  zero_v2(r_ref_pos);
639  weight_sum = 0.0f;
640  LISTBASE_FOREACH (MovieTrackingTrack *, track, &tracking->tracks) {
641  if (track->flag & TRACK_USE_2D_STAB) {
642  float weight = 0.0f;
643  MovieTrackingMarker *marker = get_tracking_data_point(ctx, track, framenr, &weight);
644  if (marker) {
645  weight_sum += weight;
646  r_ref_pos[0] += weight * marker->pos[0];
647  r_ref_pos[1] += weight * marker->pos[1];
648  ok |= (weight_sum > EPSILON_WEIGHT);
649  }
650  }
651  }
652  if (ok) {
653  r_ref_pos[0] /= weight_sum;
654  r_ref_pos[1] /= weight_sum;
655  }
656  else {
657  /* No usable tracking data on any track on this frame.
658  * Use data from neighboring frames to extrapolate...
659  */
660  int next_lower = MINAFRAME;
661  int next_higher = MAXFRAME;
662  use_values_from_fcurves(ctx, true);
663  LISTBASE_FOREACH (MovieTrackingTrack *, track, &tracking->tracks) {
664  /* Note: we deliberately do not care if this track
665  * is already initialized for stabilization. */
666  if (track->flag & TRACK_USE_2D_STAB) {
667  int startpoint = search_closest_marker_index(track, framenr);
668  retrieve_next_higher_usable_frame(ctx, track, startpoint, framenr, &next_higher);
669  retrieve_next_lower_usable_frame(ctx, track, startpoint, framenr, &next_lower);
670  }
671  }
672  if (next_lower >= MINFRAME) {
673  /* use next usable frame to the left.
674  * Also default to this frame when we're in a gap */
675  average_marker_positions(ctx, next_lower, r_ref_pos);
676  }
677  else if (next_higher < MAXFRAME) {
678  average_marker_positions(ctx, next_higher, r_ref_pos);
679  }
680  use_values_from_fcurves(ctx, false);
681  }
682 }
683 
684 /* Linear interpolation of data retrieved at two measurement points.
685  * This function is used to fill gaps in the middle of the covered area,
686  * at frames without any usable tracks for stabilization.
687  *
688  * framenr is a position to interpolate for.
689  * frame_a is a valid measurement point below framenr
690  * frame_b is a valid measurement point above framenr
691  * Returns truth if both measurements could actually be retrieved.
692  * Otherwise output parameters remain unaltered
693  */
695  int framenr,
696  int frame_a,
697  int frame_b,
698  const float aspect,
699  float r_translation[2],
700  float r_pivot[2],
701  float *r_angle,
702  float *r_scale_step)
703 {
704  float t, s;
705  float trans_a[2], trans_b[2];
706  float angle_a, angle_b;
707  float scale_a, scale_b;
708  float pivot_a[2], pivot_b[2];
709  bool success = false;
710 
711  BLI_assert(frame_a <= frame_b);
712  BLI_assert(frame_a <= framenr);
713  BLI_assert(framenr <= frame_b);
714 
715  t = ((float)framenr - frame_a) / (frame_b - frame_a);
716  s = 1.0f - t;
717 
718  success = average_track_contributions(
719  ctx, frame_a, aspect, trans_a, pivot_a, &angle_a, &scale_a);
720  if (!success) {
721  return false;
722  }
723  success = average_track_contributions(
724  ctx, frame_b, aspect, trans_b, pivot_b, &angle_b, &scale_b);
725  if (!success) {
726  return false;
727  }
728 
729  interp_v2_v2v2(r_translation, trans_a, trans_b, t);
730  interp_v2_v2v2(r_pivot, pivot_a, pivot_b, t);
731  *r_scale_step = s * scale_a + t * scale_b;
732  *r_angle = s * angle_a + t * angle_b;
733  return true;
734 }
735 
736 /* Reorder tracks starting with those providing a tracking data frame
737  * closest to the global anchor_frame. Tracks with a gap at anchor_frame or
738  * starting farer away from anchor_frame altogether will be visited later.
739  * This allows to build up baseline contributions incrementally.
740  *
741  * order is an array for sorting the tracks. Must be of suitable size to hold
742  * all tracks.
743  * Returns number of actually usable tracks, can be less than the overall number
744  * of tracks.
745  *
746  * NOTE: After returning, the order array holds entries up to the number of
747  * usable tracks, appropriately sorted starting with the closest tracks.
748  * Initialization includes disabled tracks, since they might be enabled
749  * through automation later.
750  */
752 {
753  size_t tracknr = 0;
754  MovieTracking *tracking = ctx->tracking;
755  int anchor_frame = tracking->stabilization.anchor_frame;
756 
757  LISTBASE_FOREACH (MovieTrackingTrack *, track, &tracking->tracks) {
758  MovieTrackingMarker *marker;
759  order[tracknr].data = track;
760  marker = get_closest_marker(ctx, track, anchor_frame);
761  if (marker != NULL && (track->flag & (TRACK_USE_2D_STAB | TRACK_USE_2D_STAB_ROT))) {
762  order[tracknr].sort_value = abs(marker->framenr - anchor_frame);
763  order[tracknr].reference_frame = marker->framenr;
764  tracknr++;
765  }
766  }
767  if (tracknr) {
768  qsort(order, tracknr, sizeof(TrackInitOrder), BLI_sortutil_cmp_int);
769  }
770  return tracknr;
771 }
772 
773 /* Setup the constant part of this track's contribution to the determined frame
774  * movement. Tracks usually don't provide tracking data for every frame. Thus,
775  * for determining data at a given frame, we split up the contribution into a
776  * part covered by actual measurements on this track, and the initial gap
777  * between this track's reference frame and the global anchor_frame.
778  * The (missing) data for the gap can be substituted by the average offset
779  * observed by the other tracks covering the gap. This approximation doesn't
780  * introduce wrong data, but it records data with incorrect weight. A totally
781  * correct solution would require us to average the contribution per frame, and
782  * then integrate stepwise over all frames -- which of course would be way more
783  * expensive, especially for longer clips. To the contrary, our solution
784  * cumulates the total contribution per track and averages afterwards over all
785  * tracks; it can thus be calculated just based on the data of a single frame,
786  * plus the "baseline" for the reference frame, which is what we are computing
787  * here.
788  *
789  * Since we're averaging _contributions_, we have to calculate the _difference_
790  * of the measured position at current frame and the position at the reference
791  * frame. But the "reference" part of this difference is constant and can thus
792  * be packed together with the baseline contribution into a single precomputed
793  * vector per track.
794  *
795  * In case of the rotation contribution, the principle is the same, but we have
796  * to compensate for the already determined translation and measure the pure
797  * rotation, simply because this is how we model the offset: shift plus rotation
798  * around the shifted rotation center. To circumvent problems with the
799  * definition range of the arcus tangens function, we perform this baseline
800  * addition and reference angle subtraction in polar coordinates and bake this
801  * operation into a precomputed rotation matrix.
802  *
803  * track is a track to be initialized to initialize
804  * reference_frame is a local frame for this track, the closest pick to the
805  * global anchor_frame.
806  * aspect is a total aspect ratio of the undistorted image (includes fame and
807  * pixel aspect).
808  * target_pos is a possibly animated target position as set by the user for
809  * the reference_frame
810  * average_translation is a value observed by the _other_ tracks for the gap
811  * between reference_frame and anchor_frame. This
812  * average must not contain contributions of frames
813  * not yet initialized
814  * average_angle in a similar way, the rotation value observed by the
815  * _other_ tracks.
816  * average_scale_step is an image scale factor observed on average by the other
817  * tracks for this frame. This value is recorded and
818  * averaged as logarithm. The recorded scale changes
819  * are damped for very small contributions, to limit
820  * the effect of probe points approaching the pivot
821  * too closely.
822  *
823  * NOTE: when done, this track is marked as initialized
824  */
826  MovieTrackingTrack *track,
827  int reference_frame,
828  float aspect,
829  const float average_translation[2],
830  const float pivot[2],
831  const float average_angle,
832  const float average_scale_step)
833 {
834  float pos[2], angle, len;
836  MovieTrackingMarker *marker = BKE_tracking_marker_get_exact(track, reference_frame);
837  /* Logic for initialization order ensures there *is* a marker on that
838  * very frame.
839  */
840  BLI_assert(marker != NULL);
841  BLI_assert(local_data != NULL);
842 
843  /* Per track baseline value for translation. */
844  sub_v2_v2v2(local_data->stabilization_offset_base, average_translation, marker->pos);
845 
846  /* Per track baseline value for rotation. */
847  sub_v2_v2v2(pos, marker->pos, pivot);
848 
849  pos[0] *= aspect;
850  angle = average_angle - atan2f(pos[1], pos[0]);
852 
853  /* Per track baseline value for zoom. */
855  local_data->stabilization_scale_base = expf(average_scale_step) / len;
856 
857  local_data->is_init_for_stabilization = true;
858 }
859 
860 static void init_all_tracks(StabContext *ctx, float aspect)
861 {
862  size_t track_len = 0;
863  MovieClip *clip = ctx->clip;
864  MovieTracking *tracking = ctx->tracking;
866 
867  /* Attempt to start initialization at anchor_frame.
868  * By definition, offset contribution is zero there.
869  */
870  int reference_frame = tracking->stabilization.anchor_frame;
871  float average_angle = 0, average_scale_step = 0;
872  float average_translation[2], average_pos[2], pivot[2];
873  zero_v2(average_translation);
874  zero_v2(pivot);
875 
876  /* Initialize private working data. */
877  LISTBASE_FOREACH (MovieTrackingTrack *, track, &tracking->tracks) {
879  if (!local_data) {
880  local_data = MEM_callocN(sizeof(TrackStabilizationBase),
881  "2D stabilization per track baseline data");
882  attach_stabilization_baseline_data(ctx, track, local_data);
883  }
884  BLI_assert(local_data != NULL);
885  local_data->track_weight_curve = retrieve_track_weight_animation(clip, track);
886  local_data->is_init_for_stabilization = false;
887 
888  track_len++;
889  }
890  if (!track_len) {
891  return;
892  }
893 
894  order = MEM_mallocN(track_len * sizeof(TrackInitOrder), "stabilization track order");
895  if (!order) {
896  return;
897  }
898 
900  if (track_len == 0) {
901  goto cleanup;
902  }
903 
904  /* starting point for pivot, before having initialized any track */
905  average_marker_positions(ctx, reference_frame, average_pos);
906  setup_pivot(average_pos, pivot);
907 
908  for (int i = 0; i < track_len; i++) {
909  MovieTrackingTrack *track = order[i].data;
910  if (reference_frame != order[i].reference_frame) {
911  reference_frame = order[i].reference_frame;
913  reference_frame,
914  aspect,
915  average_translation,
916  pivot,
917  &average_angle,
918  &average_scale_step);
919  }
921  track,
922  reference_frame,
923  aspect,
924  average_translation,
925  pivot,
926  average_angle,
927  average_scale_step);
928  }
929 
930 cleanup:
931  MEM_freeN(order);
932 }
933 
934 /* Retrieve the measurement of frame movement by averaging contributions of
935  * active tracks.
936  *
937  * translation is a measurement in normalized 0..1 coordinates.
938  * angle is a measurement in radians -pi..+pi counter clockwise relative to
939  * translation compensated frame center
940  * scale_step is a measurement of image scale changes, in logarithmic scale
941  * (zero means scale == 1)
942  * Returns calculation enabled and all data retrieved as expected for this frame.
943  *
944  * NOTE: when returning `false`, output parameters are reset to neutral values.
945  */
947  int framenr,
948  float aspect,
949  float r_translation[2],
950  float r_pivot[2],
951  float *r_angle,
952  float *r_scale_step)
953 {
954  bool success = false;
955 
956  /* Early output if stabilization is disabled. */
957  if ((ctx->stab->flag & TRACKING_2D_STABILIZATION) == 0) {
958  zero_v2(r_translation);
959  *r_scale_step = 0.0f;
960  *r_angle = 0.0f;
961  return false;
962  }
963 
964  success = average_track_contributions(
965  ctx, framenr, aspect, r_translation, r_pivot, r_angle, r_scale_step);
966  if (!success) {
967  /* Try to hold extrapolated settings beyond the definition range
968  * and to interpolate in gaps without any usable tracking data
969  * to prevent sudden jump to image zero position.
970  */
971  int next_lower = MINAFRAME;
972  int next_higher = MAXFRAME;
973  use_values_from_fcurves(ctx, true);
974  find_next_working_frames(ctx, framenr, &next_lower, &next_higher);
975  if (next_lower >= MINFRAME && next_higher < MAXFRAME) {
977  framenr,
978  next_lower,
979  next_higher,
980  aspect,
981  r_translation,
982  r_pivot,
983  r_angle,
984  r_scale_step);
985  }
986  else if (next_higher < MAXFRAME) {
987  /* Before start of stabilized range: extrapolate start point
988  * settings.
989  */
990  success = average_track_contributions(
991  ctx, next_higher, aspect, r_translation, r_pivot, r_angle, r_scale_step);
992  }
993  else if (next_lower >= MINFRAME) {
994  /* After end of stabilized range: extrapolate end point settings. */
995  success = average_track_contributions(
996  ctx, next_lower, aspect, r_translation, r_pivot, r_angle, r_scale_step);
997  }
998  use_values_from_fcurves(ctx, false);
999  }
1000  return success;
1001 }
1002 
1003 /* Calculate stabilization data (translation, scale and rotation) from given raw
1004  * measurements. Result is in absolute image dimensions (expanded image, square
1005  * pixels), includes automatic or manual scaling and compensates for a target
1006  * frame position, if given.
1007  *
1008  * size is a size of the expanded image, the width in pixels is size * aspect.
1009  * aspect is a ratio (width / height) of the effective canvas (square pixels).
1010  * do_compensate denotes whether to actually output values necessary to
1011  * _compensate_ the determined frame movement.
1012  * Otherwise, the effective target movement is returned.
1013  */
1015  int framenr,
1016  int size,
1017  float aspect,
1018  bool do_compensate,
1019  float scale_step,
1020  float r_translation[2],
1021  float r_pivot[2],
1022  float *r_scale,
1023  float *r_angle)
1024 {
1025  float target_pos[2], target_scale;
1026  float scaleinf = get_animated_scaleinf(ctx, framenr);
1027 
1028  if (ctx->stab->flag & TRACKING_STABILIZE_SCALE) {
1029  *r_scale = expf(scale_step * scaleinf); /* Averaged in log scale */
1030  }
1031  else {
1032  *r_scale = 1.0f;
1033  }
1034 
1035  mul_v2_fl(r_translation, get_animated_locinf(ctx, framenr));
1036  *r_angle *= get_animated_rotinf(ctx, framenr);
1037 
1038  /* Compensate for a target frame position.
1039  * This allows to follow tracking / panning shots in a semi manual fashion,
1040  * when animating the settings for the target frame position.
1041  */
1042  get_animated_target_pos(ctx, framenr, target_pos);
1043  sub_v2_v2(r_translation, target_pos);
1044  *r_angle -= get_animated_target_rot(ctx, framenr);
1045  target_scale = get_animated_target_scale(ctx, framenr);
1046  if (target_scale != 0.0f) {
1047  *r_scale /= target_scale;
1048  /* target_scale is an expected/intended reference zoom value */
1049  }
1050 
1051  /* Convert from relative to absolute coordinates, square pixels. */
1052  r_translation[0] *= (float)size * aspect;
1053  r_translation[1] *= (float)size;
1054  r_pivot[0] *= (float)size * aspect;
1055  r_pivot[1] *= (float)size;
1056 
1057  /* Output measured data, or inverse of the measured values for
1058  * compensation?
1059  */
1060  if (do_compensate) {
1061  mul_v2_fl(r_translation, -1.0f);
1062  *r_angle *= -1.0f;
1063  if (*r_scale != 0.0f) {
1064  *r_scale = 1.0f / *r_scale;
1065  }
1066  }
1067 }
1068 
1069 static void stabilization_data_to_mat4(float pixel_aspect,
1070  const float pivot[2],
1071  const float translation[2],
1072  float scale,
1073  float angle,
1074  float r_mat[4][4])
1075 {
1076  float translation_mat[4][4], rotation_mat[4][4], scale_mat[4][4], pivot_mat[4][4],
1077  inv_pivot_mat[4][4], aspect_mat[4][4], inv_aspect_mat[4][4];
1078  const float scale_vector[3] = {scale, scale, 1.0f};
1079 
1080  unit_m4(translation_mat);
1081  unit_m4(rotation_mat);
1082  unit_m4(scale_mat);
1083  unit_m4(aspect_mat);
1084  unit_m4(pivot_mat);
1085  unit_m4(inv_pivot_mat);
1086 
1087  /* aspect ratio correction matrix */
1088  aspect_mat[0][0] /= pixel_aspect;
1089  invert_m4_m4(inv_aspect_mat, aspect_mat);
1090 
1091  add_v2_v2(pivot_mat[3], pivot);
1092  sub_v2_v2(inv_pivot_mat[3], pivot);
1093 
1094  size_to_mat4(scale_mat, scale_vector); /* scale matrix */
1095  add_v2_v2(translation_mat[3], translation); /* translation matrix */
1096  rotate_m4(rotation_mat, 'Z', angle); /* rotation matrix */
1097 
1098  /* Compose transformation matrix. */
1099  mul_m4_series(r_mat,
1100  aspect_mat,
1101  translation_mat,
1102  pivot_mat,
1103  scale_mat,
1104  rotation_mat,
1105  inv_pivot_mat,
1106  inv_aspect_mat);
1107 }
1108 
1109 /* Calculate scale factor necessary to eliminate black image areas
1110  * caused by the compensating movements of the stabilizer.
1111  * This function visits every frame where stabilization data is
1112  * available and determines the factor for this frame. The overall
1113  * largest factor found is returned as result.
1114  *
1115  * NOTE: all tracks need to be initialized before calling this function.
1116  */
1117 static float calculate_autoscale_factor(StabContext *ctx, int size, float aspect)
1118 {
1119  MovieTrackingStabilization *stab = ctx->stab;
1120  float pixel_aspect = ctx->tracking->camera.pixel_aspect;
1121  int height = size, width = aspect * size;
1122 
1123  int sfra = INT_MAX, efra = INT_MIN;
1124  float scale = 1.0f, scale_step = 0.0f;
1125 
1126  /* Calculate maximal frame range of tracks where stabilization is active. */
1127  LISTBASE_FOREACH (MovieTrackingTrack *, track, &ctx->tracking->tracks) {
1128  if ((track->flag & TRACK_USE_2D_STAB) ||
1129  ((stab->flag & TRACKING_STABILIZE_ROTATION) && (track->flag & TRACK_USE_2D_STAB_ROT))) {
1130  int first_frame = track->markers[0].framenr;
1131  int last_frame = track->markers[track->markersnr - 1].framenr;
1132  sfra = min_ii(sfra, first_frame);
1133  efra = max_ii(efra, last_frame);
1134  }
1135  }
1136 
1137  use_values_from_fcurves(ctx, true);
1138  for (int cfra = sfra; cfra <= efra; cfra++) {
1139  float translation[2], pivot[2], angle, tmp_scale;
1140  float mat[4][4];
1141  const float points[4][2] = {{0.0f, 0.0f}, {0.0f, height}, {width, height}, {width, 0.0f}};
1142  const bool do_compensate = true;
1143  /* Calculate stabilization parameters for the current frame. */
1145  ctx, cfra, aspect, translation, pivot, &angle, &scale_step);
1147  cfra,
1148  size,
1149  aspect,
1150  do_compensate,
1151  scale_step,
1152  translation,
1153  pivot,
1154  &tmp_scale,
1155  &angle);
1156  /* Compose transformation matrix. */
1157  /* NOTE: Here we operate in NON-COMPENSATED coordinates, meaning we have
1158  * to construct transformation matrix using proper pivot point.
1159  * Compensation for that will happen later on.
1160  */
1161  stabilization_data_to_mat4(pixel_aspect, pivot, translation, tmp_scale, angle, mat);
1162  /* Investigate the transformed border lines for this frame;
1163  * find out, where it cuts the original frame.
1164  */
1165  for (int edge_index = 0; edge_index < 4; edge_index++) {
1166  /* Calculate coordinates of stabilized frame edge points.
1167  * Use matrix multiplication here so we operate in homogeneous
1168  * coordinates.
1169  */
1170  float stable_edge_p1[3], stable_edge_p2[3];
1171  copy_v2_v2(stable_edge_p1, points[edge_index]);
1172  copy_v2_v2(stable_edge_p2, points[(edge_index + 1) % 4]);
1173  stable_edge_p1[2] = stable_edge_p2[2] = 0.0f;
1174  mul_m4_v3(mat, stable_edge_p1);
1175  mul_m4_v3(mat, stable_edge_p2);
1176  /* Now we iterate over all original frame corners (we call them
1177  * 'point' here) to see if there's black area between stabilized
1178  * frame edge and original point.
1179  */
1180  for (int point_index = 0; point_index < 4; point_index++) {
1181  const float point[3] = {points[point_index][0], points[point_index][1], 0.0f};
1182  /* Calculate vector which goes from first edge point to
1183  * second one.
1184  */
1185  float stable_edge_vec[3];
1186  sub_v3_v3v3(stable_edge_vec, stable_edge_p2, stable_edge_p1);
1187  /* Calculate vector which connects current frame point to
1188  * first edge point.
1189  */
1190  float point_to_edge_start_vec[3];
1191  sub_v3_v3v3(point_to_edge_start_vec, point, stable_edge_p1);
1192  /* Use this two vectors to check whether frame point is inside
1193  * of the stabilized frame or not.
1194  * If the point is inside, there is no black area happening
1195  * and no scaling required for it.
1196  */
1197  if (cross_v2v2(stable_edge_vec, point_to_edge_start_vec) >= 0.0f) {
1198  /* We are scaling around motion-compensated pivot point. */
1199  float scale_pivot[2];
1200  add_v2_v2v2(scale_pivot, pivot, translation);
1201  /* Calculate line which goes via `point` and parallel to
1202  * the stabilized frame edge. This line is coming via
1203  * `point` and `point2` at the end.
1204  */
1205  float point2[2];
1206  add_v2_v2v2(point2, point, stable_edge_vec);
1207  /* Calculate actual distance between pivot point and
1208  * the stabilized frame edge. Then calculate distance
1209  * between pivot point and line which goes via actual
1210  * corner and is parallel to the edge.
1211  *
1212  * Dividing one by another will give us required scale
1213  * factor to get rid of black areas.
1214  */
1215  float real_dist = dist_to_line_v2(scale_pivot, stable_edge_p1, stable_edge_p2);
1216  float required_dist = dist_to_line_v2(scale_pivot, point, point2);
1217  const float S = required_dist / real_dist;
1218  scale = max_ff(scale, S);
1219  }
1220  }
1221  }
1222  }
1223  if (stab->maxscale > 0.0f) {
1224  scale = min_ff(scale, stab->maxscale);
1225  }
1226  use_values_from_fcurves(ctx, false);
1227 
1228  return scale;
1229 }
1230 
1231 /* Prepare working data and determine reference point for each track.
1232  *
1233  * NOTE: These calculations _could_ be cached and reused for all frames of the
1234  * same clip. However, since proper initialization depends on (weight)
1235  * animation and setup of tracks, ensuring consistency of cached init data
1236  * turns out to be tricky, hard to maintain and generally not worth the
1237  * effort. Thus we'll re-initialize on every frame.
1238  */
1239 static StabContext *init_stabilizer(MovieClip *clip, int size, float aspect)
1240 {
1242  BLI_assert(ctx != NULL);
1243  init_all_tracks(ctx, aspect);
1244  if (ctx->stab->flag & TRACKING_AUTOSCALE) {
1245  ctx->stab->scale = 1.0;
1246  ctx->stab->scale = calculate_autoscale_factor(ctx, size, aspect);
1247  }
1248  /* By default, just use values for the global current frame. */
1249  use_values_from_fcurves(ctx, false);
1250  return ctx;
1251 }
1252 
1253 /* === public interface functions === */
1254 
1255 /* Get stabilization data (translation, scaling and angle) for a given frame.
1256  * Returned data describes how to compensate the detected movement, but with any
1257  * chosen scale factor already applied and any target frame position already
1258  * compensated. In case stabilization fails or is disabled, neutral values are
1259  * returned.
1260  *
1261  * framenr is a frame number, relative to the clip (not relative to the scene
1262  * timeline)
1263  * width is an effective width of the canvas (square pixels), used to scale the
1264  * determined translation
1265  *
1266  * Outputs:
1267  * - translation of the lateral shift, absolute canvas coordinates
1268  * (square pixels).
1269  * - scale of the scaling to apply
1270  * - angle of the rotation angle, relative to the frame center
1271  */
1272 /* TODO(sergey): Use r_ prefix for output parameters here. */
1274  int framenr,
1275  int width,
1276  int height,
1277  float translation[2],
1278  float *scale,
1279  float *angle)
1280 {
1281  StabContext *ctx = NULL;
1282  MovieTracking *tracking = &clip->tracking;
1283  bool enabled = (tracking->stabilization.flag & TRACKING_2D_STABILIZATION);
1284  /* Might become a parameter of a stabilization compositor node. */
1285  bool do_compensate = true;
1286  float scale_step = 0.0f;
1287  float pixel_aspect = tracking->camera.pixel_aspect;
1288  float aspect = (float)width * pixel_aspect / height;
1289  int size = height;
1290  float pivot[2];
1291 
1292  if (enabled) {
1293  ctx = init_stabilizer(clip, size, aspect);
1294  }
1295 
1297  ctx, framenr, aspect, translation, pivot, angle, &scale_step)) {
1299  ctx, framenr, size, aspect, do_compensate, scale_step, translation, pivot, scale, angle);
1300  compensate_rotation_center(size, aspect, *angle, *scale, pivot, translation);
1301  }
1302  else {
1303  zero_v2(translation);
1304  *scale = 1.0f;
1305  *angle = 0.0f;
1306  }
1308 }
1309 
1310 typedef void (*interpolation_func)(struct ImBuf *, struct ImBuf *, float, float, int, int);
1311 
1315  float (*mat)[4];
1316 
1319 
1321  void *__restrict userdata, const int j, const TaskParallelTLS *__restrict UNUSED(tls))
1322 {
1324  ImBuf *ibuf = data->ibuf;
1325  ImBuf *tmpibuf = data->tmpibuf;
1326  float(*mat)[4] = data->mat;
1327 
1328  interpolation_func interpolation = data->interpolation;
1329 
1330  for (int i = 0; i < tmpibuf->x; i++) {
1331  float vec[3] = {i, j, 0.0f};
1332 
1333  mul_v3_m4v3(vec, mat, vec);
1334 
1335  interpolation(ibuf, tmpibuf, vec[0], vec[1], i, j);
1336  }
1337 }
1338 
1339 /* Stabilize given image buffer using stabilization data for a specified
1340  * frame number.
1341  *
1342  * NOTE: frame number should be in clip space, not scene space.
1343  */
1344 /* TODO(sergey): Use r_ prefix for output parameters here. */
1346  MovieClip *clip, int framenr, ImBuf *ibuf, float translation[2], float *scale, float *angle)
1347 {
1348  float tloc[2], tscale, tangle;
1349  MovieTracking *tracking = &clip->tracking;
1350  MovieTrackingStabilization *stab = &tracking->stabilization;
1351  ImBuf *tmpibuf;
1352  int width = ibuf->x, height = ibuf->y;
1353  float pixel_aspect = tracking->camera.pixel_aspect;
1354  float mat[4][4];
1355  int filter = tracking->stabilization.filter;
1356  interpolation_func interpolation = NULL;
1357  int ibuf_flags;
1358 
1359  if (translation) {
1360  copy_v2_v2(tloc, translation);
1361  }
1362 
1363  if (scale) {
1364  tscale = *scale;
1365  }
1366 
1367  /* Perform early output if no stabilization is used. */
1368  if ((stab->flag & TRACKING_2D_STABILIZATION) == 0) {
1369  if (translation) {
1370  zero_v2(translation);
1371  }
1372 
1373  if (scale) {
1374  *scale = 1.0f;
1375  }
1376 
1377  if (angle) {
1378  *angle = 0.0f;
1379  }
1380 
1381  return ibuf;
1382  }
1383 
1384  /* Allocate frame for stabilization result, copy alpha mode and colorspace. */
1385  ibuf_flags = 0;
1386  if (ibuf->rect) {
1387  ibuf_flags |= IB_rect;
1388  }
1389  if (ibuf->rect_float) {
1390  ibuf_flags |= IB_rectfloat;
1391  }
1392 
1393  tmpibuf = IMB_allocImBuf(ibuf->x, ibuf->y, ibuf->planes, ibuf_flags);
1394  IMB_colormanagegent_copy_settings(ibuf, tmpibuf);
1395 
1396  /* Calculate stabilization matrix. */
1397  BKE_tracking_stabilization_data_get(clip, framenr, width, height, tloc, &tscale, &tangle);
1399  ibuf->x, ibuf->y, pixel_aspect, tloc, tscale, tangle, mat);
1400 
1401  /* The following code visits each nominal target grid position
1402  * and picks interpolated data "backwards" from source.
1403  * thus we need the inverse of the transformation to apply. */
1404  invert_m4(mat);
1405 
1407  interpolation = nearest_interpolation;
1408  }
1409  else if (filter == TRACKING_FILTER_BILINEAR) {
1410  interpolation = bilinear_interpolation;
1411  }
1412  else if (filter == TRACKING_FILTER_BICUBIC) {
1413  interpolation = bicubic_interpolation;
1414  }
1415  else {
1416  /* fallback to default interpolation method */
1417  interpolation = nearest_interpolation;
1418  }
1419 
1421  .ibuf = ibuf,
1422  .tmpibuf = tmpibuf,
1423  .mat = mat,
1424  .interpolation = interpolation,
1425  };
1426 
1427  TaskParallelSettings settings;
1429  settings.use_threading = (tmpibuf->y > 128);
1431  0, tmpibuf->y, &data, tracking_stabilize_frame_interpolation_cb, &settings);
1432 
1433  if (tmpibuf->rect_float) {
1434  tmpibuf->userflags |= IB_RECT_INVALID;
1435  }
1436 
1437  if (translation) {
1438  copy_v2_v2(translation, tloc);
1439  }
1440 
1441  if (scale) {
1442  *scale = tscale;
1443  }
1444 
1445  if (angle) {
1446  *angle = tangle;
1447  }
1448 
1449  return tmpibuf;
1450 }
1451 
1452 /* Build a 4x4 transformation matrix based on the given 2D stabilization data.
1453  * mat is a 4x4 matrix in homogeneous coordinates, adapted to the
1454  * final image buffer size and compensated for pixel aspect ratio,
1455  * ready for direct OpenGL drawing.
1456  *
1457  * TODO(sergey): The signature of this function should be changed. we actually
1458  * don't need the dimensions of the image buffer. Instead we
1459  * should consider to provide the pivot point of the rotation as a
1460  * further stabilization data parameter.
1461  */
1463  int buffer_height,
1464  float pixel_aspect,
1465  float translation[2],
1466  float scale,
1467  float angle,
1468  float r_mat[4][4])
1469 {
1470  /* Since we cannot receive the real pivot point coordinates (API limitation),
1471  * we perform the rotation/scale around the center of frame.
1472  * Then we correct by an additional shift, which was calculated in
1473  * compensate_rotation_center() and "sneaked in" as additional offset
1474  * in the translation parameter. This works, since translation needs to be
1475  * applied after rotation/scale anyway. Thus effectively the image gets
1476  * rotated around the desired pivot point
1477  */
1478  /* TODO(sergey): pivot shouldn't be calculated here, rather received
1479  * as a parameter.
1480  */
1481  float pivot[2];
1482  pivot[0] = 0.5f * pixel_aspect * buffer_width;
1483  pivot[1] = 0.5f * buffer_height;
1484  /* Compose transformation matrix. */
1485  stabilization_data_to_mat4(pixel_aspect, pivot, translation, scale, angle, r_mat);
1486 }
typedef float(TangentPoint)[2]
float evaluate_fcurve(struct FCurve *fcu, float evaltime)
Definition: fcurve.c:2186
struct FCurve * id_data_find_fcurve(ID *id, void *data, struct StructRNA *type, const char *prop_name, int index, bool *r_driven)
Definition: fcurve.c:221
float BKE_movieclip_remap_clip_to_scene_frame(const struct MovieClip *clip, float framenr)
struct MovieTrackingMarker * BKE_tracking_marker_get_exact(struct MovieTrackingTrack *track, int framenr)
Definition: tracking.c:1556
struct MovieTrackingMarker * BKE_tracking_marker_get(struct MovieTrackingTrack *track, int framenr)
Definition: tracking.c:1523
#define BLI_assert(a)
Definition: BLI_assert.h:58
void BLI_ghash_insert(GHash *gh, void *key, void *val)
Definition: BLI_ghash.c:756
void BLI_ghash_free(GHash *gh, GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp)
Definition: BLI_ghash.c:1008
GHash * BLI_ghash_ptr_new(const char *info) ATTR_MALLOC ATTR_WARN_UNUSED_RESULT
void * BLI_ghash_lookup(GHash *gh, const void *key) ATTR_WARN_UNUSED_RESULT
Definition: BLI_ghash.c:803
#define LISTBASE_FOREACH(type, var, list)
Definition: BLI_listbase.h:172
MINLINE float max_ff(float a, float b)
MINLINE int min_ii(int a, int b)
MINLINE float min_ff(float a, float b)
MINLINE int max_ii(int a, int b)
float dist_to_line_v2(const float p[2], const float l1[2], const float l2[2])
Definition: math_geom.c:332
bool invert_m4(float R[4][4])
Definition: math_matrix.c:1187
void unit_m4(float m[4][4])
Definition: rct.c:1140
bool invert_m4_m4(float R[4][4], const float A[4][4])
Definition: math_matrix.c:1278
void size_to_mat4(float R[4][4], const float size[3])
Definition: math_matrix.c:2118
void mul_m4_v3(const float M[4][4], float r[3])
Definition: math_matrix.c:732
#define mul_m4_series(...)
void mul_m2_v2(const float M[2][2], float v[2])
Definition: math_matrix.c:788
void mul_v3_m4v3(float r[3], const float M[4][4], const float v[3])
Definition: math_matrix.c:742
void rotate_m4(float mat[4][4], const char axis, const float angle)
Definition: math_matrix.c:2352
void angle_to_mat2(float R[2][2], const float angle)
void interp_v2_v2v2(float r[2], const float a[2], const float b[2], const float t)
Definition: math_vector.c:32
MINLINE void sub_v2_v2(float r[2], const float a[2])
MINLINE void sub_v3_v3v3(float r[3], const float a[3], const float b[3])
MINLINE void mul_v2_fl(float r[2], float f)
MINLINE void copy_v2_v2(float r[2], const float a[2])
MINLINE void add_v2_v2(float r[2], const float a[2])
MINLINE float cross_v2v2(const float a[2], const float b[2]) ATTR_WARN_UNUSED_RESULT
MINLINE void add_v2_v2v2(float r[2], const float a[2], const float b[2])
MINLINE void sub_v2_v2v2(float r[2], const float a[2], const float b[2])
MINLINE void zero_v2(float r[2])
MINLINE float len_v2(const float a[2]) ATTR_WARN_UNUSED_RESULT
int BLI_sortutil_cmp_int(const void *a_, const void *b_)
Definition: sort_utils.c:68
void BLI_task_parallel_range(const int start, const int stop, void *userdata, TaskParallelRangeFunc func, const TaskParallelSettings *settings)
Definition: task_range.cc:110
BLI_INLINE void BLI_parallel_range_settings_defaults(TaskParallelSettings *settings)
Definition: BLI_task.h:231
#define UNUSED(x)
#define MINFRAME
#define MINAFRAME
#define MAXFRAME
@ TRACK_USE_2D_STAB
@ TRACK_USE_2D_STAB_ROT
@ MARKER_DISABLED
@ TRACKING_FILTER_BICUBIC
@ TRACKING_FILTER_NEAREST
@ TRACKING_FILTER_BILINEAR
@ TRACKING_AUTOSCALE
@ TRACKING_STABILIZE_SCALE
@ TRACKING_STABILIZE_ROTATION
@ TRACKING_2D_STABILIZATION
_GL_VOID GLfloat value _GL_VOID_RET _GL_VOID const GLuint GLboolean *residences _GL_BOOL_RET _GL_VOID GLsizei GLfloat GLfloat GLfloat GLfloat const GLubyte *bitmap _GL_VOID_RET _GL_VOID GLenum const void *lists _GL_VOID_RET _GL_VOID const GLdouble *equation _GL_VOID_RET _GL_VOID GLdouble GLdouble blue _GL_VOID_RET _GL_VOID GLfloat GLfloat blue _GL_VOID_RET _GL_VOID GLint GLint blue _GL_VOID_RET _GL_VOID GLshort GLshort blue _GL_VOID_RET _GL_VOID GLubyte GLubyte blue _GL_VOID_RET _GL_VOID GLuint GLuint blue _GL_VOID_RET _GL_VOID GLushort GLushort blue _GL_VOID_RET _GL_VOID GLbyte GLbyte GLbyte alpha _GL_VOID_RET _GL_VOID GLdouble GLdouble GLdouble alpha _GL_VOID_RET _GL_VOID GLfloat GLfloat GLfloat alpha _GL_VOID_RET _GL_VOID GLint GLint GLint alpha _GL_VOID_RET _GL_VOID GLshort GLshort GLshort alpha _GL_VOID_RET _GL_VOID GLubyte GLubyte GLubyte alpha _GL_VOID_RET _GL_VOID GLuint GLuint GLuint alpha _GL_VOID_RET _GL_VOID GLushort GLushort GLushort alpha _GL_VOID_RET _GL_VOID GLenum mode _GL_VOID_RET _GL_VOID GLint GLsizei width
_GL_VOID GLfloat value _GL_VOID_RET _GL_VOID const GLuint GLboolean *residences _GL_BOOL_RET _GL_VOID GLsizei height
_GL_VOID GLfloat value _GL_VOID_RET _GL_VOID const GLuint GLboolean *residences _GL_BOOL_RET _GL_VOID GLsizei GLfloat GLfloat GLfloat GLfloat const GLubyte *bitmap _GL_VOID_RET _GL_VOID GLenum const void *lists _GL_VOID_RET _GL_VOID const GLdouble *equation _GL_VOID_RET _GL_VOID GLdouble GLdouble blue _GL_VOID_RET _GL_VOID GLfloat GLfloat blue _GL_VOID_RET _GL_VOID GLint GLint blue _GL_VOID_RET _GL_VOID GLshort GLshort blue _GL_VOID_RET _GL_VOID GLubyte GLubyte blue _GL_VOID_RET _GL_VOID GLuint GLuint blue _GL_VOID_RET _GL_VOID GLushort GLushort blue _GL_VOID_RET _GL_VOID GLbyte GLbyte GLbyte alpha _GL_VOID_RET _GL_VOID GLdouble GLdouble GLdouble alpha _GL_VOID_RET _GL_VOID GLfloat GLfloat GLfloat alpha _GL_VOID_RET _GL_VOID GLint GLint GLint alpha _GL_VOID_RET _GL_VOID GLshort GLshort GLshort alpha _GL_VOID_RET _GL_VOID GLubyte GLubyte GLubyte alpha _GL_VOID_RET _GL_VOID GLuint GLuint GLuint alpha _GL_VOID_RET _GL_VOID GLushort GLushort GLushort alpha _GL_VOID_RET _GL_VOID GLenum mode _GL_VOID_RET _GL_VOID GLint GLsizei GLsizei GLenum type _GL_VOID_RET _GL_VOID GLsizei GLenum GLenum const void *pixels _GL_VOID_RET _GL_VOID const void *pointer _GL_VOID_RET _GL_VOID GLdouble v _GL_VOID_RET _GL_VOID GLfloat v _GL_VOID_RET _GL_VOID GLint GLint i2 _GL_VOID_RET _GL_VOID GLint j _GL_VOID_RET _GL_VOID GLfloat param _GL_VOID_RET _GL_VOID GLint param _GL_VOID_RET _GL_VOID GLdouble GLdouble GLdouble GLdouble GLdouble zFar _GL_VOID_RET _GL_UINT GLdouble *equation _GL_VOID_RET _GL_VOID GLenum GLint *params _GL_VOID_RET _GL_VOID GLenum GLfloat *v _GL_VOID_RET _GL_VOID GLenum GLfloat *params _GL_VOID_RET _GL_VOID GLfloat *values _GL_VOID_RET _GL_VOID GLushort *values _GL_VOID_RET _GL_VOID GLenum GLfloat *params _GL_VOID_RET _GL_VOID GLenum GLdouble *params _GL_VOID_RET _GL_VOID GLenum GLint *params _GL_VOID_RET _GL_VOID GLsizei const void *pointer _GL_VOID_RET _GL_VOID GLsizei const void *pointer _GL_VOID_RET _GL_BOOL GLfloat param _GL_VOID_RET _GL_VOID GLint param _GL_VOID_RET _GL_VOID GLenum GLfloat param _GL_VOID_RET _GL_VOID GLenum GLint param _GL_VOID_RET _GL_VOID GLushort pattern _GL_VOID_RET _GL_VOID GLdouble GLdouble GLint GLint const GLdouble *points _GL_VOID_RET _GL_VOID GLdouble GLdouble GLint GLint GLdouble GLdouble GLint GLint const GLdouble *points _GL_VOID_RET _GL_VOID GLdouble GLdouble u2 _GL_VOID_RET _GL_VOID GLdouble GLdouble GLint GLdouble GLdouble v2 _GL_VOID_RET _GL_VOID GLenum GLfloat param _GL_VOID_RET _GL_VOID GLenum GLint param _GL_VOID_RET _GL_VOID GLenum mode _GL_VOID_RET _GL_VOID GLdouble GLdouble nz _GL_VOID_RET _GL_VOID GLfloat GLfloat nz _GL_VOID_RET _GL_VOID GLint GLint nz _GL_VOID_RET _GL_VOID GLshort GLshort nz _GL_VOID_RET _GL_VOID GLsizei const void *pointer _GL_VOID_RET _GL_VOID GLsizei const GLfloat *values _GL_VOID_RET _GL_VOID GLsizei const GLushort *values _GL_VOID_RET _GL_VOID GLint param _GL_VOID_RET _GL_VOID const GLuint const GLclampf *priorities _GL_VOID_RET _GL_VOID GLdouble y _GL_VOID_RET _GL_VOID GLfloat y _GL_VOID_RET _GL_VOID GLint y _GL_VOID_RET _GL_VOID GLshort y _GL_VOID_RET _GL_VOID GLdouble GLdouble z _GL_VOID_RET _GL_VOID GLfloat GLfloat z _GL_VOID_RET _GL_VOID GLint GLint z _GL_VOID_RET _GL_VOID GLshort GLshort z _GL_VOID_RET _GL_VOID GLdouble GLdouble GLdouble w _GL_VOID_RET _GL_VOID GLfloat GLfloat GLfloat w _GL_VOID_RET _GL_VOID GLint GLint GLint w _GL_VOID_RET _GL_VOID GLshort GLshort GLshort w _GL_VOID_RET _GL_VOID GLdouble GLdouble GLdouble y2 _GL_VOID_RET _GL_VOID GLfloat GLfloat GLfloat y2 _GL_VOID_RET _GL_VOID GLint GLint GLint y2 _GL_VOID_RET _GL_VOID GLshort GLshort GLshort y2 _GL_VOID_RET _GL_VOID GLdouble GLdouble GLdouble z _GL_VOID_RET _GL_VOID GLdouble GLdouble z _GL_VOID_RET _GL_VOID GLuint *buffer _GL_VOID_RET _GL_VOID GLdouble t _GL_VOID_RET _GL_VOID GLfloat t _GL_VOID_RET _GL_VOID GLint t _GL_VOID_RET _GL_VOID GLshort t _GL_VOID_RET _GL_VOID GLdouble t
_GL_VOID GLfloat value _GL_VOID_RET _GL_VOID const GLuint GLboolean *residences _GL_BOOL_RET _GL_VOID GLsizei GLfloat GLfloat GLfloat GLfloat const GLubyte *bitmap _GL_VOID_RET _GL_VOID GLenum const void *lists _GL_VOID_RET _GL_VOID const GLdouble *equation _GL_VOID_RET _GL_VOID GLdouble GLdouble blue _GL_VOID_RET _GL_VOID GLfloat GLfloat blue _GL_VOID_RET _GL_VOID GLint GLint blue _GL_VOID_RET _GL_VOID GLshort GLshort blue _GL_VOID_RET _GL_VOID GLubyte GLubyte blue _GL_VOID_RET _GL_VOID GLuint GLuint blue _GL_VOID_RET _GL_VOID GLushort GLushort blue _GL_VOID_RET _GL_VOID GLbyte GLbyte GLbyte alpha _GL_VOID_RET _GL_VOID GLdouble GLdouble GLdouble alpha _GL_VOID_RET _GL_VOID GLfloat GLfloat GLfloat alpha _GL_VOID_RET _GL_VOID GLint GLint GLint alpha _GL_VOID_RET _GL_VOID GLshort GLshort GLshort alpha _GL_VOID_RET _GL_VOID GLubyte GLubyte GLubyte alpha _GL_VOID_RET _GL_VOID GLuint GLuint GLuint alpha _GL_VOID_RET _GL_VOID GLushort GLushort GLushort alpha _GL_VOID_RET _GL_VOID GLenum mode _GL_VOID_RET _GL_VOID GLint GLsizei GLsizei GLenum type _GL_VOID_RET _GL_VOID GLsizei GLenum GLenum const void *pixels _GL_VOID_RET _GL_VOID const void *pointer _GL_VOID_RET _GL_VOID GLdouble v _GL_VOID_RET _GL_VOID GLfloat v _GL_VOID_RET _GL_VOID GLint GLint i2 _GL_VOID_RET _GL_VOID GLint j _GL_VOID_RET _GL_VOID GLfloat param _GL_VOID_RET _GL_VOID GLint param _GL_VOID_RET _GL_VOID GLdouble GLdouble GLdouble GLdouble GLdouble zFar _GL_VOID_RET _GL_UINT GLdouble *equation _GL_VOID_RET _GL_VOID GLenum GLint *params _GL_VOID_RET _GL_VOID GLenum GLfloat *v _GL_VOID_RET _GL_VOID GLenum GLfloat *params _GL_VOID_RET _GL_VOID GLfloat *values _GL_VOID_RET _GL_VOID GLushort *values _GL_VOID_RET _GL_VOID GLenum GLfloat *params _GL_VOID_RET _GL_VOID GLenum GLdouble *params _GL_VOID_RET _GL_VOID GLenum GLint *params _GL_VOID_RET _GL_VOID GLsizei const void *pointer _GL_VOID_RET _GL_VOID GLsizei const void *pointer _GL_VOID_RET _GL_BOOL GLfloat param _GL_VOID_RET _GL_VOID GLint param _GL_VOID_RET _GL_VOID GLenum GLfloat param _GL_VOID_RET _GL_VOID GLenum GLint param _GL_VOID_RET _GL_VOID GLushort pattern _GL_VOID_RET _GL_VOID GLdouble GLdouble GLint GLint order
void IMB_colormanagegent_copy_settings(struct ImBuf *ibuf_src, struct ImBuf *ibuf_dst)
void nearest_interpolation(struct ImBuf *in, struct ImBuf *out, float u, float v, int xout, int yout)
Definition: imageprocess.c:337
struct ImBuf * IMB_allocImBuf(unsigned int x, unsigned int y, unsigned char planes, unsigned int flags)
Definition: allocimbuf.c:478
Contains defines and structs used throughout the imbuf module.
@ IB_RECT_INVALID
@ IB_rectfloat
@ IB_rect
Read Guarded memory(de)allocation.
StructRNA RNA_MovieTrackingStabilization
StructRNA RNA_MovieTrackingTrack
static DBVT_INLINE btScalar size(const btDbvtVolume &a)
Definition: btDbvt.cpp:52
SIMD_FORCE_INLINE btScalar angle(const btVector3 &v) const
Return the angle between this and another vector.
Definition: btVector3.h:356
uint pos
DO_INLINE void filter(lfVector *V, fmatrix3x3 *S)
const vector< Marker > & markers
bool enabled
#define logf(x)
#define expf(x)
#define atan2f(x, y)
void(* MEM_freeN)(void *vmemh)
Definition: mallocn.c:41
void *(* MEM_callocN)(size_t len, const char *str)
Definition: mallocn.c:45
void *(* MEM_mallocN)(size_t len, const char *str)
Definition: mallocn.c:47
BLI_INLINE void bilinear_interpolation(const unsigned char *byte_buffer, const float *float_buffer, unsigned char *byte_output, float *float_output, int width, int height, int components, float u, float v, bool wrap_x, bool wrap_y)
Definition: math_interp.c:264
BLI_INLINE void bicubic_interpolation(const unsigned char *byte_buffer, const float *float_buffer, unsigned char *byte_output, float *float_output, int width, int height, int components, float u, float v)
Definition: math_interp.c:94
int userflags
unsigned char planes
unsigned int * rect
float * rect_float
struct MovieTracking tracking
MovieTrackingMarker * markers
MovieTrackingStabilization stabilization
MovieTrackingCamera camera
MovieTrackingStabilization * stab
FCurve * target_pos[2]
FCurve * target_scale
GHash * private_track_data
MovieClip * clip
MovieTracking * tracking
MovieTrackingTrack * data
float stabilization_rotation_base[2][2]
void BKE_tracking_stabilization_data_to_mat4(int buffer_width, int buffer_height, float pixel_aspect, float translation[2], float scale, float angle, float r_mat[4][4])
static void retrieve_next_lower_usable_frame(StabContext *ctx, MovieTrackingTrack *track, int i, int ref_frame, int *next_lower)
static void average_marker_positions(StabContext *ctx, int framenr, float r_ref_pos[2])
struct TrackingStabilizeFrameInterpolationData TrackingStabilizeFrameInterpolationData
static void discard_stabilization_baseline_data(void *val)
static int establish_track_initialization_order(StabContext *ctx, TrackInitOrder *order)
static float get_animated_target_scale(StabContext *ctx, int framenr)
static FCurve * retrieve_track_weight_animation(MovieClip *clip, MovieTrackingTrack *track)
void BKE_tracking_stabilization_data_get(MovieClip *clip, int framenr, int width, int height, float translation[2], float *scale, float *angle)
static float get_animated_target_rot(StabContext *ctx, int framenr)
ImBuf * BKE_tracking_stabilize_frame(MovieClip *clip, int framenr, ImBuf *ibuf, float translation[2], float *scale, float *angle)
static FCurve * retrieve_stab_animation(MovieClip *clip, const char *data_path, int idx)
static float get_animated_rotinf(StabContext *ctx, int framenr)
static bool average_track_contributions(StabContext *ctx, int framenr, float aspect, float r_translation[2], float r_pivot[2], float *r_angle, float *r_scale_step)
static MovieTrackingMarker * get_closest_marker(StabContext *ctx, MovieTrackingTrack *track, int ref_frame)
struct TrackStabilizationBase TrackStabilizationBase
static StabContext * init_stabilizer(MovieClip *clip, int size, float aspect)
static void stabilization_calculate_data(StabContext *ctx, int framenr, int size, float aspect, bool do_compensate, float scale_step, float r_translation[2], float r_pivot[2], float *r_scale, float *r_angle)
struct TrackInitOrder TrackInitOrder
static void tracking_stabilize_frame_interpolation_cb(void *__restrict userdata, const int j, const TaskParallelTLS *__restrict UNUSED(tls))
static bool stabilization_determine_offset_for_frame(StabContext *ctx, int framenr, float aspect, float r_translation[2], float r_pivot[2], float *r_angle, float *r_scale_step)
static bool is_init_for_stabilization(StabContext *ctx, MovieTrackingTrack *track)
static bool is_usable_for_stabilization(StabContext *ctx, MovieTrackingTrack *track)
static bool is_effectively_disabled(StabContext *ctx, MovieTrackingTrack *track, MovieTrackingMarker *marker)
static void use_values_from_fcurves(StabContext *ctx, bool toggle)
static bool interpolate_averaged_track_contributions(StabContext *ctx, int framenr, int frame_a, int frame_b, const float aspect, float r_translation[2], float r_pivot[2], float *r_angle, float *r_scale_step)
static void get_animated_target_pos(StabContext *ctx, int framenr, float target_pos[2])
static float get_animated_locinf(StabContext *ctx, int framenr)
static void init_all_tracks(StabContext *ctx, float aspect)
static float get_animated_scaleinf(StabContext *ctx, int framenr)
static float get_animated_weight(StabContext *ctx, MovieTrackingTrack *track, int framenr)
static float SCALE_ERROR_LIMIT_BIAS
static int search_closest_marker_index(MovieTrackingTrack *track, int ref_frame)
static void find_next_working_frames(StabContext *ctx, int framenr, int *next_lower, int *next_higher)
static void attach_stabilization_baseline_data(StabContext *ctx, MovieTrackingTrack *track, TrackStabilizationBase *private_data)
static void setup_pivot(const float ref_pos[2], float r_pivot[2])
static float calculate_autoscale_factor(StabContext *ctx, int size, float aspect)
static float EPSILON_WEIGHT
static void stabilization_data_to_mat4(float pixel_aspect, const float pivot[2], const float translation[2], float scale, float angle, float r_mat[4][4])
static float fetch_from_fcurve(FCurve *animationCurve, int framenr, StabContext *ctx, float default_value)
static StabContext * init_stabilization_working_context(MovieClip *clip)
static void retrieve_next_higher_usable_frame(StabContext *ctx, MovieTrackingTrack *track, int i, int ref_frame, int *next_higher)
static float rotation_contribution(TrackStabilizationBase *track_ref, MovieTrackingMarker *marker, const float aspect, const float pivot[2], float *result_angle, float *result_scale)
static void compensate_rotation_center(const int size, float aspect, const float angle, const float scale, const float pivot[2], float result_translation[2])
static MovieTrackingMarker * get_tracking_data_point(StabContext *ctx, MovieTrackingTrack *track, int framenr, float *r_weight)
static void discard_stabilization_working_context(StabContext *ctx)
static void init_track_for_stabilization(StabContext *ctx, MovieTrackingTrack *track, int reference_frame, float aspect, const float average_translation[2], const float pivot[2], const float average_angle, const float average_scale_step)
static TrackStabilizationBase * access_stabilization_baseline_data(StabContext *ctx, MovieTrackingTrack *track)
static void translation_contribution(TrackStabilizationBase *track_ref, MovieTrackingMarker *marker, float result_offset[2])
void(* interpolation_func)(struct ImBuf *, struct ImBuf *, float, float, int, int)
struct StabContext StabContext
__forceinline const avxi abs(const avxi &a)
Definition: util_avxi.h:186
uint len