67 if (bl ==
NULL || !bl->
nr) {
84 float *len_data = (
float *)
MEM_mallocN(
sizeof(
float) * seg_size,
"calcpathdist");
88 float prev_len = 0.0f;
89 for (
int i = 0; i < bl->
nr - 1; i++) {
90 prev_len +=
len_v3v3(bp_arr[i].vec, bp_arr[i + 1].vec);
91 len_data[i] = prev_len;
96 len_data[seg_size - 1] = prev_len +
len_v3v3(bp_arr[0].vec, bp_arr[bl->
nr - 1].
vec);
118 *r_p0 = &bp_arr[bl->
nr - 1];
136 if (idx == bl->
nr - 2) {
140 *r_p0 = &bp_arr[idx - 1];
141 *r_p1 = &bp_arr[idx];
142 *r_p2 = &bp_arr[idx + 1];
153 if (idx == bl->
nr - 1) {
156 *r_p0 = &bp_arr[idx - 1];
157 *r_p1 = &bp_arr[idx];
167 *r_p0 = &bp_arr[idx - 1];
168 *r_p1 = &bp_arr[idx];
169 *r_p2 = &bp_arr[idx + 1];
170 *r_p3 = &bp_arr[idx + 2];
175 const float goal_len,
179 float left_len, right_len;
180 int cur_idx = 0, cur_base = 0;
181 int cur_step = seg_size - 1;
184 cur_idx = cur_base + cur_step / 2;
185 left_len = accum_len_arr[cur_idx];
186 right_len = accum_len_arr[cur_idx + 1];
188 if (left_len <= goal_len && right_len > goal_len) {
189 *r_idx = cur_idx + 1;
190 *r_frac = (goal_len - left_len) / (right_len - left_len);
196 *r_frac = goal_len / accum_len_arr[0];
202 CLOG_ERROR(&
LOG,
"Couldn't find any valid point on the animation path!");
203 BLI_assert_msg(0,
"Couldn't find any valid point on the animation path!");
207 if (left_len < goal_len) {
209 cur_base = cur_idx + 1;
239 if (bl ==
NULL || !bl->
nr) {
249 if (ctime < 0.0f || ctime > 1.0f) {
260 const float goal_len = ctime * accum_len_arr[seg_size - 1];
263 if (ctime <= 0.0f || ctime >= 1.0f) {
264 const float clamp_time =
clamp_f(ctime, 0.0f, 1.0f);
265 const int idx = clamp_time * (seg_size - 1);
269 frac = goal_len / accum_len_arr[0];
272 frac = (goal_len - accum_len_arr[idx - 1]) / (accum_len_arr[idx] - accum_len_arr[idx - 1]);
325 w[0] * p0->
vec[0] +
w[1] * p1->
vec[0] +
w[2] * p2->
vec[0] +
w[3] * p3->
vec[0];
327 w[0] * p0->
vec[1] +
w[1] * p1->
vec[1] +
w[2] * p2->
vec[1] +
w[3] * p3->
vec[1];
329 w[0] * p0->
vec[2] +
w[1] * p1->
vec[2] +
w[2] * p2->
vec[2] +
w[3] * p3->
vec[2];
341 float totfac,
q1[4], q2[4];
343 totfac =
w[0] +
w[3];
344 if (totfac > FLT_EPSILON) {
351 totfac =
w[1] +
w[2];
352 if (totfac > FLT_EPSILON) {
359 totfac =
w[0] +
w[1] +
w[2] +
w[3];
360 if (totfac > FLT_EPSILON) {
struct ListBase * BKE_curve_editNurbs_get(struct Curve *cu)
void key_curve_tangent_weights(float t, float data[4], int type)
void key_curve_position_weights(float t, float data[4], int type)
#define BLI_assert_msg(a, msg)
MINLINE float clamp_f(float value, float min, float max)
void interp_qt_qtqt(float q[4], const float a[4], const float b[4], float t)
void copy_qt_qt(float q[4], const float a[4])
MINLINE float len_v3v3(const float a[3], const float b[3]) ATTR_WARN_UNUSED_RESULT
MINLINE void clamp_v4(float vec[4], float min, float max)
MINLINE void negate_v3(float r[3])
void interp_v3_v3v3v3v3(float p[3], const float v1[3], const float v2[3], const float v3[3], const float v4[3], const float w[4])
#define CLOG_ERROR(clg_ref,...)
#define CLOG_WARN(clg_ref,...)
Object is a sort of wrapper for general info.
Read Guarded memory(de)allocation.
int BKE_anim_path_get_array_size(const CurveCache *curve_cache)
bool BKE_where_on_path(const Object *ob, float ctime, float r_vec[4], float r_dir[3], float r_quat[4], float *r_radius, float *r_weight)
static bool binary_search_anim_path(const float *accum_len_arr, const int seg_size, const float goal_len, int *r_idx, float *r_frac)
static void get_curve_points_from_idx(const int idx, const BevList *bl, const bool is_cyclic, BevPoint const **r_p0, BevPoint const **r_p1, BevPoint const **r_p2, BevPoint const **r_p3)
float BKE_anim_path_get_length(const CurveCache *curve_cache)
void BKE_anim_path_calc_data(Object *ob)
static int get_bevlist_seg_array_size(const BevList *bl)
SIMD_FORCE_INLINE const btScalar & w() const
Return the w value.
static bool is_cyclic(const Nurb *nu)
void(* MEM_freeN)(void *vmemh)
void *(* MEM_mallocN)(size_t len, const char *str)
ccl_device_inline float frac(float x, int *ix)
const float * anim_path_accum_length
struct CurveCache * curve_cache