2022-02-11 09:07:11 +11:00
|
|
|
# SPDX-License-Identifier: GPL-2.0-or-later
|
2011-09-22 22:51:54 +00:00
|
|
|
|
|
|
|
__all__ = (
|
|
|
|
"bake_action",
|
2017-09-10 16:58:04 +10:00
|
|
|
"bake_action_objects",
|
|
|
|
|
|
|
|
"bake_action_iter",
|
|
|
|
"bake_action_objects_iter",
|
|
|
|
)
|
2011-09-22 22:51:54 +00:00
|
|
|
|
|
|
|
import bpy
|
2022-12-15 09:26:40 +11:00
|
|
|
from bpy.types import Action
|
2011-09-22 22:51:54 +00:00
|
|
|
|
2022-12-15 09:26:40 +11:00
|
|
|
from typing import (
|
|
|
|
List,
|
|
|
|
Mapping,
|
|
|
|
Sequence,
|
|
|
|
Tuple,
|
|
|
|
)
|
|
|
|
|
|
|
|
FCurveKey = Tuple[
|
|
|
|
# `fcurve.data_path`.
|
|
|
|
str,
|
|
|
|
# `fcurve.array_index`.
|
|
|
|
int,
|
|
|
|
]
|
|
|
|
|
|
|
|
# List of `[frame0, value0, frame1, value1, ...]` pairs.
|
2022-11-24 11:26:17 -08:00
|
|
|
ListKeyframes = List[float]
|
2011-09-22 22:51:54 +00:00
|
|
|
|
2022-12-05 12:54:00 +11:00
|
|
|
|
2017-09-10 14:30:03 +10:00
|
|
|
def bake_action(
|
|
|
|
obj,
|
2017-09-10 16:58:04 +10:00
|
|
|
*,
|
|
|
|
action, frames,
|
2017-09-14 17:03:40 +10:00
|
|
|
**kwargs
|
2017-09-10 16:58:04 +10:00
|
|
|
):
|
|
|
|
"""
|
|
|
|
:arg obj: Object to bake.
|
|
|
|
:type obj: :class:`bpy.types.Object`
|
|
|
|
:arg action: An action to bake the data into, or None for a new action
|
|
|
|
to be created.
|
|
|
|
:type action: :class:`bpy.types.Action` or None
|
|
|
|
:arg frames: Frames to bake.
|
|
|
|
:type frames: iterable of int
|
|
|
|
|
|
|
|
:return: an action or None
|
|
|
|
:rtype: :class:`bpy.types.Action`
|
|
|
|
"""
|
2019-01-10 13:11:48 +11:00
|
|
|
if not (kwargs.get("do_pose") or kwargs.get("do_object")):
|
2017-09-10 16:58:04 +10:00
|
|
|
return None
|
|
|
|
|
|
|
|
action, = bake_action_objects(
|
|
|
|
[(obj, action)],
|
2019-01-10 13:11:48 +11:00
|
|
|
frames=frames,
|
2017-09-10 16:58:04 +10:00
|
|
|
**kwargs,
|
|
|
|
)
|
|
|
|
return action
|
|
|
|
|
|
|
|
|
|
|
|
def bake_action_objects(
|
|
|
|
object_action_pairs,
|
|
|
|
*,
|
|
|
|
frames,
|
2017-09-14 17:03:40 +10:00
|
|
|
**kwargs
|
2017-09-10 16:58:04 +10:00
|
|
|
):
|
|
|
|
"""
|
|
|
|
A version of :func:`bake_action_objects_iter` that takes frames and returns the output.
|
|
|
|
|
|
|
|
:arg frames: Frames to bake.
|
|
|
|
:type frames: iterable of int
|
|
|
|
|
|
|
|
:return: A sequence of Action or None types (aligned with `object_action_pairs`)
|
|
|
|
:rtype: sequence of :class:`bpy.types.Action`
|
|
|
|
"""
|
|
|
|
iter = bake_action_objects_iter(object_action_pairs, **kwargs)
|
|
|
|
iter.send(None)
|
|
|
|
for frame in frames:
|
|
|
|
iter.send(frame)
|
|
|
|
return iter.send(None)
|
|
|
|
|
|
|
|
|
|
|
|
def bake_action_objects_iter(
|
|
|
|
object_action_pairs,
|
2017-09-14 17:03:40 +10:00
|
|
|
**kwargs
|
2017-09-10 16:58:04 +10:00
|
|
|
):
|
|
|
|
"""
|
|
|
|
An coroutine that bakes actions for multiple objects.
|
|
|
|
|
|
|
|
:arg object_action_pairs: Sequence of object action tuples,
|
|
|
|
action is the destination for the baked data. When None a new action will be created.
|
|
|
|
:type object_action_pairs: Sequence of (:class:`bpy.types.Object`, :class:`bpy.types.Action`)
|
|
|
|
"""
|
|
|
|
scene = bpy.context.scene
|
|
|
|
frame_back = scene.frame_current
|
|
|
|
iter_all = tuple(
|
|
|
|
bake_action_iter(obj, action=action, **kwargs)
|
|
|
|
for (obj, action) in object_action_pairs
|
|
|
|
)
|
|
|
|
for iter in iter_all:
|
|
|
|
iter.send(None)
|
|
|
|
while True:
|
|
|
|
frame = yield None
|
|
|
|
if frame is None:
|
|
|
|
break
|
|
|
|
scene.frame_set(frame)
|
2019-05-17 10:40:44 +02:00
|
|
|
bpy.context.view_layer.update()
|
2017-09-10 16:58:04 +10:00
|
|
|
for iter in iter_all:
|
|
|
|
iter.send(frame)
|
|
|
|
scene.frame_set(frame_back)
|
|
|
|
yield tuple(iter.send(None) for iter in iter_all)
|
|
|
|
|
|
|
|
|
|
|
|
# XXX visual keying is actually always considered as True in this code...
|
|
|
|
def bake_action_iter(
|
|
|
|
obj,
|
|
|
|
*,
|
|
|
|
action,
|
2017-09-10 14:30:03 +10:00
|
|
|
only_selected=False,
|
|
|
|
do_pose=True,
|
|
|
|
do_object=True,
|
|
|
|
do_visual_keying=True,
|
|
|
|
do_constraint_clear=False,
|
|
|
|
do_parents_clear=False,
|
2017-09-14 17:03:40 +10:00
|
|
|
do_clean=False
|
2017-09-10 14:30:03 +10:00
|
|
|
):
|
2011-09-22 22:51:54 +00:00
|
|
|
"""
|
2017-09-10 16:58:04 +10:00
|
|
|
An coroutine that bakes action for a single object.
|
2011-09-22 22:51:54 +00:00
|
|
|
|
2017-09-10 14:30:03 +10:00
|
|
|
:arg obj: Object to bake.
|
|
|
|
:type obj: :class:`bpy.types.Object`
|
2017-09-10 16:58:04 +10:00
|
|
|
:arg action: An action to bake the data into, or None for a new action
|
|
|
|
to be created.
|
|
|
|
:type action: :class:`bpy.types.Action` or None
|
2016-09-16 11:49:42 +02:00
|
|
|
:arg only_selected: Only bake selected bones.
|
2011-09-22 22:51:54 +00:00
|
|
|
:type only_selected: bool
|
|
|
|
:arg do_pose: Bake pose channels.
|
|
|
|
:type do_pose: bool
|
|
|
|
:arg do_object: Bake objects.
|
|
|
|
:type do_object: bool
|
2013-07-04 23:52:02 +00:00
|
|
|
:arg do_visual_keying: Use the final transformations for baking ('visual keying')
|
|
|
|
:type do_visual_keying: bool
|
|
|
|
:arg do_constraint_clear: Remove constraints after baking.
|
2011-09-22 22:51:54 +00:00
|
|
|
:type do_constraint_clear: bool
|
2013-04-11 08:42:25 +00:00
|
|
|
:arg do_parents_clear: Unparent after baking objects.
|
|
|
|
:type do_parents_clear: bool
|
2011-09-22 22:51:54 +00:00
|
|
|
:arg do_clean: Remove redundant keyframes after baking.
|
|
|
|
:type do_clean: bool
|
2011-09-26 15:39:15 +00:00
|
|
|
|
2011-09-22 22:51:54 +00:00
|
|
|
:return: an action or None
|
|
|
|
:rtype: :class:`bpy.types.Action`
|
|
|
|
"""
|
|
|
|
# -------------------------------------------------------------------------
|
2012-12-28 13:34:19 +00:00
|
|
|
# Helper Functions and vars
|
2011-09-22 22:51:54 +00:00
|
|
|
|
2017-11-20 01:32:03 +13:00
|
|
|
# Note: BBONE_PROPS is a list so we can preserve the ordering
|
|
|
|
BBONE_PROPS = [
|
2022-12-15 09:26:40 +11:00
|
|
|
"bbone_curveinx", "bbone_curveoutx",
|
|
|
|
"bbone_curveinz", "bbone_curveoutz",
|
|
|
|
"bbone_rollin", "bbone_rollout",
|
|
|
|
"bbone_scalein", "bbone_scaleout",
|
2022-12-15 17:24:23 +11:00
|
|
|
"bbone_easein", "bbone_easeout",
|
2017-11-20 01:32:03 +13:00
|
|
|
]
|
2022-11-24 11:26:17 -08:00
|
|
|
BBONE_PROPS_LENGTHS = {
|
|
|
|
"bbone_curveinx": 1,
|
|
|
|
"bbone_curveoutx": 1,
|
|
|
|
"bbone_curveinz": 1,
|
|
|
|
"bbone_curveoutz": 1,
|
|
|
|
"bbone_rollin": 1,
|
|
|
|
"bbone_rollout": 1,
|
|
|
|
"bbone_scalein": 3,
|
|
|
|
"bbone_scaleout": 3,
|
|
|
|
"bbone_easein": 1,
|
|
|
|
"bbone_easeout": 1,
|
|
|
|
}
|
2017-11-20 01:32:03 +13:00
|
|
|
|
2015-02-17 07:16:59 +11:00
|
|
|
def pose_frame_info(obj):
|
2012-12-28 13:34:19 +00:00
|
|
|
matrix = {}
|
2017-11-20 01:32:03 +13:00
|
|
|
bbones = {}
|
2012-12-28 13:34:19 +00:00
|
|
|
for name, pbone in obj.pose.bones.items():
|
|
|
|
if do_visual_keying:
|
|
|
|
# Get the final transform of the bone in its own local space...
|
2018-09-12 11:50:35 +02:00
|
|
|
matrix[name] = obj.convert_space(pose_bone=pbone, matrix=pbone.matrix,
|
|
|
|
from_space='POSE', to_space='LOCAL')
|
2012-12-28 13:34:19 +00:00
|
|
|
else:
|
|
|
|
matrix[name] = pbone.matrix_basis.copy()
|
2017-11-20 01:32:03 +13:00
|
|
|
|
|
|
|
# Bendy Bones
|
|
|
|
if pbone.bone.bbone_segments > 1:
|
2018-07-03 06:27:53 +02:00
|
|
|
bbones[name] = {bb_prop: getattr(pbone, bb_prop) for bb_prop in BBONE_PROPS}
|
2017-11-20 01:32:03 +13:00
|
|
|
return matrix, bbones
|
2011-09-22 22:51:54 +00:00
|
|
|
|
2013-04-11 08:42:25 +00:00
|
|
|
if do_parents_clear:
|
2015-02-17 07:16:59 +11:00
|
|
|
if do_visual_keying:
|
|
|
|
def obj_frame_info(obj):
|
|
|
|
return obj.matrix_world.copy()
|
|
|
|
else:
|
|
|
|
def obj_frame_info(obj):
|
|
|
|
parent = obj.parent
|
|
|
|
matrix = obj.matrix_basis
|
|
|
|
if parent:
|
2019-02-14 13:51:14 +00:00
|
|
|
return parent.matrix_world @ matrix
|
2015-02-17 07:16:59 +11:00
|
|
|
else:
|
|
|
|
return matrix.copy()
|
2015-02-03 15:41:34 +01:00
|
|
|
else:
|
2015-02-17 07:16:59 +11:00
|
|
|
if do_visual_keying:
|
|
|
|
def obj_frame_info(obj):
|
|
|
|
parent = obj.parent
|
|
|
|
matrix = obj.matrix_world
|
|
|
|
if parent:
|
2019-02-14 13:51:14 +00:00
|
|
|
return parent.matrix_world.inverted_safe() @ matrix
|
2015-02-17 07:16:59 +11:00
|
|
|
else:
|
|
|
|
return matrix.copy()
|
|
|
|
else:
|
|
|
|
def obj_frame_info(obj):
|
|
|
|
return obj.matrix_basis.copy()
|
2011-09-22 22:51:54 +00:00
|
|
|
|
|
|
|
# -------------------------------------------------------------------------
|
|
|
|
# Setup the Context
|
|
|
|
|
2012-12-28 13:34:19 +00:00
|
|
|
if obj.pose is None:
|
2011-09-22 22:51:54 +00:00
|
|
|
do_pose = False
|
|
|
|
|
2012-12-28 13:34:19 +00:00
|
|
|
if not (do_pose or do_object):
|
2017-09-10 16:58:04 +10:00
|
|
|
raise Exception("Pose and object baking is disabled, no action needed")
|
2011-09-22 22:51:54 +00:00
|
|
|
|
|
|
|
pose_info = []
|
|
|
|
obj_info = []
|
|
|
|
|
|
|
|
# -------------------------------------------------------------------------
|
|
|
|
# Collect transformations
|
|
|
|
|
2017-09-10 16:58:04 +10:00
|
|
|
while True:
|
|
|
|
# Caller is responsible for setting the frame and updating the scene.
|
|
|
|
frame = yield None
|
|
|
|
|
|
|
|
# Signal we're done!
|
|
|
|
if frame is None:
|
|
|
|
break
|
|
|
|
|
2011-09-22 22:51:54 +00:00
|
|
|
if do_pose:
|
2017-11-20 01:32:03 +13:00
|
|
|
pose_info.append((frame, *pose_frame_info(obj)))
|
2011-09-22 22:51:54 +00:00
|
|
|
if do_object:
|
2017-09-10 16:58:04 +10:00
|
|
|
obj_info.append((frame, obj_frame_info(obj)))
|
2011-09-22 22:51:54 +00:00
|
|
|
|
2015-09-08 03:59:03 +10:00
|
|
|
# -------------------------------------------------------------------------
|
|
|
|
# Clean (store initial data)
|
|
|
|
if do_clean and action is not None:
|
|
|
|
clean_orig_data = {fcu: {p.co[1] for p in fcu.keyframe_points} for fcu in action.fcurves}
|
|
|
|
else:
|
|
|
|
clean_orig_data = {}
|
|
|
|
|
2011-09-22 22:51:54 +00:00
|
|
|
# -------------------------------------------------------------------------
|
|
|
|
# Create action
|
|
|
|
|
2012-02-08 04:37:37 +00:00
|
|
|
# in case animation data hasn't been created
|
2011-09-22 22:51:54 +00:00
|
|
|
atd = obj.animation_data_create()
|
2022-11-24 11:26:17 -08:00
|
|
|
is_new_action = action is None
|
|
|
|
if is_new_action:
|
2011-09-22 22:51:54 +00:00
|
|
|
action = bpy.data.actions.new("Action")
|
2016-07-24 03:18:40 +02:00
|
|
|
|
2023-02-12 14:37:16 +11:00
|
|
|
# Only leave tweak mode if we actually need to modify the action (#57159)
|
2020-01-14 15:49:30 +03:00
|
|
|
if action != atd.action:
|
2023-02-12 14:37:16 +11:00
|
|
|
# Leave tweak mode before trying to modify the action (#48397)
|
2020-01-14 15:49:30 +03:00
|
|
|
if atd.use_tweak_mode:
|
|
|
|
atd.use_tweak_mode = False
|
|
|
|
|
|
|
|
atd.action = action
|
2016-07-24 03:18:40 +02:00
|
|
|
|
2023-02-12 14:37:16 +11:00
|
|
|
# Baking the action only makes sense in Replace mode, so force it (#69105)
|
2020-01-14 16:04:23 +03:00
|
|
|
if not atd.use_tweak_mode:
|
|
|
|
atd.action_blend_type = 'REPLACE'
|
2011-09-22 22:51:54 +00:00
|
|
|
|
|
|
|
# -------------------------------------------------------------------------
|
|
|
|
# Apply transformations to action
|
|
|
|
|
|
|
|
# pose
|
2022-11-24 11:26:17 -08:00
|
|
|
lookup_fcurves = {(fcurve.data_path, fcurve.array_index): fcurve for fcurve in action.fcurves}
|
2012-12-28 13:34:19 +00:00
|
|
|
if do_pose:
|
|
|
|
for name, pbone in obj.pose.bones.items():
|
|
|
|
if only_selected and not pbone.bone.select:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if do_constraint_clear:
|
|
|
|
while pbone.constraints:
|
|
|
|
pbone.constraints.remove(pbone.constraints[0])
|
|
|
|
|
2019-11-27 01:57:14 +11:00
|
|
|
# Create compatible eulers, quats.
|
2012-12-28 13:34:19 +00:00
|
|
|
euler_prev = None
|
2019-11-27 01:57:14 +11:00
|
|
|
quat_prev = None
|
2012-12-28 13:34:19 +00:00
|
|
|
|
2022-11-24 11:26:17 -08:00
|
|
|
base_fcurve_path = pbone.path_from_id() + "."
|
|
|
|
path_location = base_fcurve_path + "location"
|
|
|
|
path_quaternion = base_fcurve_path + "rotation_quaternion"
|
|
|
|
path_axis_angle = base_fcurve_path + "rotation_axis_angle"
|
|
|
|
path_euler = base_fcurve_path + "rotation_euler"
|
|
|
|
path_scale = base_fcurve_path + "scale"
|
|
|
|
paths_bbprops = [(base_fcurve_path + bbprop) for bbprop in BBONE_PROPS]
|
|
|
|
|
|
|
|
keyframes = KeyframesCo()
|
|
|
|
keyframes.add_paths(path_location, 3)
|
|
|
|
keyframes.add_paths(path_quaternion, 4)
|
|
|
|
keyframes.add_paths(path_axis_angle, 4)
|
|
|
|
keyframes.add_paths(path_euler, 3)
|
|
|
|
keyframes.add_paths(path_scale, 3)
|
|
|
|
|
|
|
|
if pbone.bone.bbone_segments > 1:
|
|
|
|
for prop_name, path in zip(BBONE_PROPS, paths_bbprops):
|
|
|
|
keyframes.add_paths(path, BBONE_PROPS_LENGTHS[prop_name])
|
|
|
|
|
|
|
|
rotation_mode = pbone.rotation_mode
|
|
|
|
total_new_keys = len(pose_info)
|
2017-11-20 01:32:03 +13:00
|
|
|
for (f, matrix, bbones) in pose_info:
|
2012-12-28 13:34:19 +00:00
|
|
|
pbone.matrix_basis = matrix[name].copy()
|
|
|
|
|
2022-11-24 11:26:17 -08:00
|
|
|
keyframes.extend_co_values(path_location, 3, f, pbone.location)
|
2012-12-28 13:34:19 +00:00
|
|
|
|
|
|
|
if rotation_mode == 'QUATERNION':
|
2019-11-27 01:57:14 +11:00
|
|
|
if quat_prev is not None:
|
|
|
|
quat = pbone.rotation_quaternion.copy()
|
|
|
|
quat.make_compatible(quat_prev)
|
|
|
|
pbone.rotation_quaternion = quat
|
|
|
|
quat_prev = quat
|
|
|
|
del quat
|
|
|
|
else:
|
|
|
|
quat_prev = pbone.rotation_quaternion.copy()
|
2022-11-24 11:26:17 -08:00
|
|
|
keyframes.extend_co_values(path_quaternion, 4, f, pbone.rotation_quaternion)
|
2012-12-28 13:34:19 +00:00
|
|
|
elif rotation_mode == 'AXIS_ANGLE':
|
2022-11-24 11:26:17 -08:00
|
|
|
keyframes.extend_co_values(path_axis_angle, 4, f, pbone.rotation_axis_angle)
|
2012-12-28 13:34:19 +00:00
|
|
|
else: # euler, XYZ, ZXY etc
|
|
|
|
if euler_prev is not None:
|
2021-05-20 12:53:14 +02:00
|
|
|
euler = pbone.matrix_basis.to_euler(pbone.rotation_mode, euler_prev)
|
2012-12-28 13:34:19 +00:00
|
|
|
pbone.rotation_euler = euler
|
|
|
|
del euler
|
2020-12-22 10:04:21 +01:00
|
|
|
euler_prev = pbone.rotation_euler.copy()
|
2022-11-24 11:26:17 -08:00
|
|
|
keyframes.extend_co_values(path_euler, 3, f, pbone.rotation_euler)
|
2012-12-28 13:34:19 +00:00
|
|
|
|
2022-11-24 11:26:17 -08:00
|
|
|
keyframes.extend_co_values(path_scale, 3, f, pbone.scale)
|
2011-09-22 22:51:54 +00:00
|
|
|
|
2017-11-20 01:32:03 +13:00
|
|
|
# Bendy Bones
|
|
|
|
if pbone.bone.bbone_segments > 1:
|
|
|
|
bbone_shape = bbones[name]
|
2022-11-24 11:26:17 -08:00
|
|
|
for prop_index, prop_name in enumerate(BBONE_PROPS):
|
|
|
|
prop_len = BBONE_PROPS_LENGTHS[prop_name]
|
|
|
|
if prop_len > 1:
|
|
|
|
keyframes.extend_co_values(
|
|
|
|
paths_bbprops[prop_index], prop_len, f, bbone_shape[prop_name]
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
keyframes.extend_co_value(
|
|
|
|
paths_bbprops[prop_index], f, bbone_shape[prop_name]
|
|
|
|
)
|
|
|
|
|
|
|
|
if is_new_action:
|
|
|
|
keyframes.insert_keyframes_into_new_action(total_new_keys, action, name)
|
|
|
|
else:
|
|
|
|
keyframes.insert_keyframes_into_existing_action(lookup_fcurves, total_new_keys, action, name)
|
2017-11-20 01:32:03 +13:00
|
|
|
|
2011-09-22 22:51:54 +00:00
|
|
|
# object. TODO. multiple objects
|
|
|
|
if do_object:
|
|
|
|
if do_constraint_clear:
|
|
|
|
while obj.constraints:
|
|
|
|
obj.constraints.remove(obj.constraints[0])
|
|
|
|
|
2019-11-27 01:57:14 +11:00
|
|
|
# Create compatible eulers, quats.
|
2012-08-25 12:37:15 +00:00
|
|
|
euler_prev = None
|
2019-11-27 01:57:14 +11:00
|
|
|
quat_prev = None
|
2012-08-25 12:37:15 +00:00
|
|
|
|
2022-11-24 11:26:17 -08:00
|
|
|
path_location = "location"
|
|
|
|
path_quaternion = "rotation_quaternion"
|
|
|
|
path_axis_angle = "rotation_axis_angle"
|
|
|
|
path_euler = "rotation_euler"
|
|
|
|
path_scale = "scale"
|
|
|
|
|
|
|
|
keyframes = KeyframesCo()
|
|
|
|
keyframes.add_paths(path_location, 3)
|
|
|
|
keyframes.add_paths(path_quaternion, 4)
|
|
|
|
keyframes.add_paths(path_axis_angle, 4)
|
|
|
|
keyframes.add_paths(path_euler, 3)
|
|
|
|
keyframes.add_paths(path_scale, 3)
|
|
|
|
|
|
|
|
rotation_mode = obj.rotation_mode
|
|
|
|
total_new_keys = len(obj_info)
|
2017-09-10 16:58:04 +10:00
|
|
|
for (f, matrix) in obj_info:
|
2013-03-28 19:33:14 +00:00
|
|
|
name = "Action Bake" # XXX: placeholder
|
2013-01-21 02:40:51 +00:00
|
|
|
obj.matrix_basis = matrix
|
2011-09-22 22:51:54 +00:00
|
|
|
|
2022-11-24 11:26:17 -08:00
|
|
|
keyframes.extend_co_values(path_location, 3, f, obj.location)
|
2011-09-22 22:51:54 +00:00
|
|
|
|
|
|
|
if rotation_mode == 'QUATERNION':
|
2019-11-27 01:57:14 +11:00
|
|
|
if quat_prev is not None:
|
|
|
|
quat = obj.rotation_quaternion.copy()
|
|
|
|
quat.make_compatible(quat_prev)
|
|
|
|
obj.rotation_quaternion = quat
|
|
|
|
quat_prev = quat
|
|
|
|
del quat
|
|
|
|
else:
|
|
|
|
quat_prev = obj.rotation_quaternion.copy()
|
2022-11-24 11:26:17 -08:00
|
|
|
keyframes.extend_co_values(path_quaternion, 4, f, obj.rotation_quaternion)
|
|
|
|
|
2011-09-22 22:51:54 +00:00
|
|
|
elif rotation_mode == 'AXIS_ANGLE':
|
2022-11-24 11:26:17 -08:00
|
|
|
keyframes.extend_co_values(path_axis_angle, 4, f, obj.rotation_axis_angle)
|
2011-09-22 22:51:54 +00:00
|
|
|
else: # euler, XYZ, ZXY etc
|
2012-08-25 12:37:15 +00:00
|
|
|
if euler_prev is not None:
|
2020-12-18 12:50:36 +01:00
|
|
|
obj.rotation_euler = matrix.to_euler(obj.rotation_mode, euler_prev)
|
2020-12-18 13:02:28 +01:00
|
|
|
euler_prev = obj.rotation_euler.copy()
|
2022-11-24 11:26:17 -08:00
|
|
|
keyframes.extend_co_values(path_euler, 3, f, obj.rotation_euler)
|
2012-08-25 12:37:15 +00:00
|
|
|
|
2022-11-24 11:26:17 -08:00
|
|
|
keyframes.extend_co_values(path_scale, 3, f, obj.scale)
|
|
|
|
|
|
|
|
if is_new_action:
|
|
|
|
keyframes.insert_keyframes_into_new_action(total_new_keys, action, name)
|
|
|
|
else:
|
|
|
|
keyframes.insert_keyframes_into_existing_action(lookup_fcurves, total_new_keys, action, name)
|
2011-09-22 22:51:54 +00:00
|
|
|
|
2013-04-11 08:42:25 +00:00
|
|
|
if do_parents_clear:
|
|
|
|
obj.parent = None
|
|
|
|
|
2011-09-22 22:51:54 +00:00
|
|
|
# -------------------------------------------------------------------------
|
|
|
|
# Clean
|
|
|
|
|
|
|
|
if do_clean:
|
|
|
|
for fcu in action.fcurves:
|
2015-09-08 03:59:03 +10:00
|
|
|
fcu_orig_data = clean_orig_data.get(fcu, set())
|
|
|
|
|
2011-09-22 22:51:54 +00:00
|
|
|
keyframe_points = fcu.keyframe_points
|
|
|
|
i = 1
|
2015-09-08 03:59:03 +10:00
|
|
|
while i < len(keyframe_points) - 1:
|
|
|
|
val = keyframe_points[i].co[1]
|
|
|
|
|
|
|
|
if val in fcu_orig_data:
|
|
|
|
i += 1
|
|
|
|
continue
|
|
|
|
|
2011-09-22 22:51:54 +00:00
|
|
|
val_prev = keyframe_points[i - 1].co[1]
|
|
|
|
val_next = keyframe_points[i + 1].co[1]
|
|
|
|
|
|
|
|
if abs(val - val_prev) + abs(val - val_next) < 0.0001:
|
|
|
|
keyframe_points.remove(keyframe_points[i])
|
|
|
|
else:
|
|
|
|
i += 1
|
|
|
|
|
2017-09-10 16:58:04 +10:00
|
|
|
yield action
|
2022-11-24 11:26:17 -08:00
|
|
|
|
2022-12-05 12:54:00 +11:00
|
|
|
|
2022-11-24 11:26:17 -08:00
|
|
|
class KeyframesCo:
|
2022-12-15 09:26:40 +11:00
|
|
|
"""
|
|
|
|
A buffer for keyframe Co unpacked values per ``FCurveKey``. ``FCurveKeys`` are added using
|
|
|
|
``add_paths()``, Co values stored using extend_co_values(), then finally use
|
|
|
|
``insert_keyframes_into_*_action()`` for efficiently inserting keys into the F-curves.
|
2022-11-24 11:26:17 -08:00
|
|
|
|
|
|
|
Users are limited to one Action Group per instance.
|
|
|
|
"""
|
2022-12-15 09:26:40 +11:00
|
|
|
__slots__ = (
|
|
|
|
"keyframes_from_fcurve",
|
|
|
|
)
|
2022-11-24 11:26:17 -08:00
|
|
|
|
2022-12-15 09:26:40 +11:00
|
|
|
# `keyframes[(rna_path, array_index)] = list(time0,value0, time1,value1,...)`.
|
2022-11-24 11:26:17 -08:00
|
|
|
keyframes_from_fcurve: Mapping[FCurveKey, ListKeyframes]
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.keyframes_from_fcurve = {}
|
|
|
|
|
|
|
|
def add_paths(
|
|
|
|
self,
|
|
|
|
rna_path: str,
|
|
|
|
total_indices: int,
|
|
|
|
) -> None:
|
|
|
|
keyframes_from_fcurve = self.keyframes_from_fcurve
|
|
|
|
for array_index in range(0, total_indices):
|
|
|
|
keyframes_from_fcurve[(rna_path, array_index)] = []
|
|
|
|
|
|
|
|
def extend_co_values(
|
|
|
|
self,
|
|
|
|
rna_path: str,
|
|
|
|
total_indices: int,
|
|
|
|
frame: float,
|
|
|
|
values: Sequence[float],
|
|
|
|
) -> None:
|
|
|
|
keyframes_from_fcurve = self.keyframes_from_fcurve
|
|
|
|
for array_index in range(0, total_indices):
|
|
|
|
keyframes_from_fcurve[(rna_path, array_index)].extend((frame, values[array_index]))
|
|
|
|
|
|
|
|
def extend_co_value(
|
|
|
|
self,
|
|
|
|
rna_path: str,
|
|
|
|
frame: float,
|
|
|
|
value: float,
|
|
|
|
) -> None:
|
|
|
|
self.keyframes_from_fcurve[(rna_path, 0)].extend((frame, value))
|
|
|
|
|
|
|
|
def insert_keyframes_into_new_action(
|
|
|
|
self,
|
|
|
|
total_new_keys: int,
|
|
|
|
action: Action,
|
|
|
|
action_group_name: str,
|
|
|
|
) -> None:
|
2022-12-15 09:26:40 +11:00
|
|
|
"""
|
|
|
|
Assumes the action is new, that it has no F-curves. Otherwise, the only difference between versions is
|
2022-11-24 11:26:17 -08:00
|
|
|
performance and implementation simplicity.
|
|
|
|
|
2022-12-15 09:26:40 +11:00
|
|
|
:arg action_group_name: Name of Action Group that F-curves are added to.
|
|
|
|
:type action_group_name: str
|
2022-11-24 11:26:17 -08:00
|
|
|
"""
|
|
|
|
linear_enum_values = [
|
|
|
|
bpy.types.Keyframe.bl_rna.properties["interpolation"].enum_items["LINEAR"].value
|
|
|
|
] * total_new_keys
|
|
|
|
|
|
|
|
for fc_key, key_values in self.keyframes_from_fcurve.items():
|
|
|
|
if len(key_values) == 0:
|
|
|
|
continue
|
|
|
|
|
|
|
|
data_path, array_index = fc_key
|
|
|
|
keyframe_points = action.fcurves.new(
|
|
|
|
data_path, index=array_index, action_group=action_group_name
|
|
|
|
).keyframe_points
|
|
|
|
|
|
|
|
keyframe_points.add(total_new_keys)
|
|
|
|
keyframe_points.foreach_set("co", key_values)
|
|
|
|
keyframe_points.foreach_set("interpolation", linear_enum_values)
|
|
|
|
|
|
|
|
# There's no need to do fcurve.update() because the keys are already ordered, have
|
|
|
|
# no duplicates and all handles are Linear.
|
|
|
|
|
|
|
|
def insert_keyframes_into_existing_action(
|
|
|
|
self,
|
|
|
|
lookup_fcurves: Mapping[FCurveKey, bpy.types.FCurve],
|
|
|
|
total_new_keys: int,
|
|
|
|
action: Action,
|
|
|
|
action_group_name: str,
|
|
|
|
) -> None:
|
2022-12-15 09:26:40 +11:00
|
|
|
"""
|
|
|
|
Assumes the action already exists, that it might already have F-curves. Otherwise, the
|
2022-11-24 11:26:17 -08:00
|
|
|
only difference between versions is performance and implementation simplicity.
|
|
|
|
|
2022-12-15 09:26:40 +11:00
|
|
|
:arg lookup_fcurves: : This is only used for efficiency.
|
|
|
|
It's a substitute for ``action.fcurves.find()`` which is a potentially expensive linear search.
|
|
|
|
:type lookup_fcurves: ``Mapping[FCurveKey, bpy.types.FCurve]``
|
|
|
|
:arg action_group_name: Name of Action Group that F-curves are added to.
|
|
|
|
:type action_group_name: str
|
2022-11-24 11:26:17 -08:00
|
|
|
"""
|
|
|
|
linear_enum_values = [
|
|
|
|
bpy.types.Keyframe.bl_rna.properties["interpolation"].enum_items["LINEAR"].value
|
|
|
|
] * total_new_keys
|
|
|
|
|
|
|
|
for fc_key, key_values in self.keyframes_from_fcurve.items():
|
|
|
|
if len(key_values) == 0:
|
|
|
|
continue
|
|
|
|
|
|
|
|
fcurve = lookup_fcurves.get(fc_key, None)
|
|
|
|
if fcurve is None:
|
|
|
|
data_path, array_index = fc_key
|
|
|
|
fcurve = action.fcurves.new(
|
|
|
|
data_path, index=array_index, action_group=action_group_name
|
|
|
|
)
|
|
|
|
|
|
|
|
keyframe_points = fcurve.keyframe_points
|
|
|
|
|
2022-12-15 09:12:17 +11:00
|
|
|
co_buffer = [0] * (2 * len(keyframe_points))
|
2022-11-24 11:26:17 -08:00
|
|
|
keyframe_points.foreach_get("co", co_buffer)
|
|
|
|
co_buffer.extend(key_values)
|
|
|
|
|
|
|
|
ipo_buffer = [None] * len(keyframe_points)
|
|
|
|
keyframe_points.foreach_get("interpolation", ipo_buffer)
|
|
|
|
ipo_buffer.extend(linear_enum_values)
|
|
|
|
|
|
|
|
# XXX: Currently baking inserts the same number of keys for all baked properties.
|
|
|
|
# This block of code breaks if that's no longer true since we then will not be properly
|
|
|
|
# initializing all the data.
|
|
|
|
keyframe_points.add(total_new_keys)
|
|
|
|
keyframe_points.foreach_set("co", co_buffer)
|
|
|
|
keyframe_points.foreach_set("interpolation", ipo_buffer)
|
|
|
|
|
Animation: add function for deduplicating FCurve keys
Introduce `BKE_fcurve_deduplicate_keys()` to merge keys that are on the
same time, or within the time comparison threshold (1/100th of a frame).
When merging two consecutive keys, the last one 'wins' and determines
the final key *value*. The first key's *time* is retained, to ensure the
reference point for the next comparisons is stable. The only exception
here is when there is a key exactly on an integer frame number, in which
case that one is preferred.
The function is exposed in RNA as `fcurve.keyframe_points.deduplicate()`
This commit also introduces a new function `BKE_fcurve_bezt_shrink(fcu,
new_totvert);` that can reallocate the `bezt` array to ensure removed
keys no longer take up memory.
The RNA function `fcurve.update()` currently performs two steps, which
are now exposed to RNA as well, as `keyframe_points.sort()` and
`keyframe_points.handles_recalc()`. This is so that Python code can
sort, deduplicate, and then recalculate the handles only once (calling
`update` + `deduplicate` would do the latter twice).
In Blender 4.0 the deduplication will also be part of `fcurve.update()`,
see #107126.
Reviewed on https://projects.blender.org/blender/blender/pulls/107089
2023-04-24 12:05:36 +02:00
|
|
|
# TODO: in Blender 4.0 the next lines can be replaced with one call to `fcurve.update()`.
|
|
|
|
# See https://projects.blender.org/blender/blender/issues/107126 for more info.
|
|
|
|
keyframe_points.sort()
|
|
|
|
keyframe_points.deduplicate()
|
|
|
|
keyframe_points.handles_recalc()
|