[Bf-extensions-cvs] [6e0b57f] fbx_io_development: FBX export: Animation rework + add support for animated shape keys.
Bastien Montagne
noreply at git.blender.org
Sun Jun 15 15:24:19 CEST 2014
Commit: 6e0b57ff163d5b72b2bd13f0eda1a69dd98d61a8
Author: Bastien Montagne
Date: Sat Jun 14 22:26:58 2014 +0200
https://developer.blender.org/rBA6e0b57ff163d5b72b2bd13f0eda1a69dd98d61a8
FBX export: Animation rework + add support for animated shape keys.
Note: Unity does not seem to understand those animated shape keys, though
generated FBX *looks* OK. I suspect it rather relies on older animated shapes
(FBX seems to have two different ways to do it, either through the (assumed
newer and future-proof) deformers system, or through (assumed old, tobedeprecated)
mesh's properties... I'd bet it's the issue here... :/
So we'll probably have to export twice all shapes animations. :'(
===================================================================
M io_scene_fbx/__init__.py
M io_scene_fbx/export_fbx_bin.py
M io_scene_fbx/fbx_utils.py
===================================================================
diff --git a/io_scene_fbx/__init__.py b/io_scene_fbx/__init__.py
index 86d7ac5..d1baf57 100644
--- a/io_scene_fbx/__init__.py
+++ b/io_scene_fbx/__init__.py
@@ -34,13 +34,13 @@ bl_info = {
if "bpy" in locals():
- import imp
+ import importlib
if "import_fbx" in locals():
- imp.reload(import_fbx)
+ importlib.reload(import_fbx)
if "export_fbx_bin" in locals():
- imp.reload(export_fbx_bin)
+ importlib.reload(export_fbx_bin)
if "export_fbx" in locals():
- imp.reload(export_fbx)
+ importlib.reload(export_fbx)
import bpy
diff --git a/io_scene_fbx/export_fbx_bin.py b/io_scene_fbx/export_fbx_bin.py
index c684805..9eaf029 100644
--- a/io_scene_fbx/export_fbx_bin.py
+++ b/io_scene_fbx/export_fbx_bin.py
@@ -30,11 +30,20 @@ import time
from collections import OrderedDict
from itertools import zip_longest, chain
+if "bpy" in locals():
+ import importlib
+ if "encode_bin" in locals():
+ importlib.reload(encode_bin)
+ if "data_types" in locals():
+ importlib.reload(data_types)
+ if "fbx_utils" in locals():
+ importlib.reload(fbx_utils)
+
import bpy
import bpy_extras
from mathutils import Vector, Matrix
-from . import encode_bin, data_types
+from . import encode_bin, data_types, fbx_utils
from .fbx_utils import (
# Constants.
FBX_VERSION, FBX_HEADER_VERSION, FBX_SCENEINFO_VERSION, FBX_TEMPLATES_VERSION,
@@ -46,6 +55,7 @@ from .fbx_utils import (
FBX_POSE_BIND_VERSION, FBX_DEFORMER_SKIN_VERSION, FBX_DEFORMER_CLUSTER_VERSION,
FBX_MATERIAL_VERSION, FBX_TEXTURE_VERSION,
FBX_ANIM_KEY_VERSION,
+ FBX_ANIM_PROPSGROUP_NAME,
FBX_KTIME,
BLENDER_OTHER_OBJECT_TYPES, BLENDER_OBJECT_TYPES_MESHLIKE,
FBX_LIGHT_TYPES, FBX_LIGHT_DECAY_TYPES,
@@ -74,6 +84,8 @@ from .fbx_utils import (
elem_props_template_init, elem_props_template_set, elem_props_template_finalize,
# Templates.
FBXTemplate, fbx_templates_generate,
+ # Animation.
+ AnimationCurveNodeWrapper,
# Objects.
ObjectWrapper, fbx_name_class,
# Top level.
@@ -474,7 +486,7 @@ def fbx_template_def_animlayer(scene, settings, override_defaults=None, nbr_user
def fbx_template_def_animcurvenode(scene, settings, override_defaults=None, nbr_users=0):
props = OrderedDict((
- (b"d", (None, "p_compound", False)),
+ (FBX_ANIM_PROPSGROUP_NAME.encode(), (None, "p_compound", False)),
))
if override_defaults is not None:
props.update(override_defaults)
@@ -742,8 +754,10 @@ def fbx_data_mesh_shapes_elements(root, me_obj, me, scene_data, fbx_me_tmpl, fbx
elem_data_single_float64_array(fbx_channel, b"FullWeights", shape_verts_weights)
# *WHY* add this in linked mesh properties too? *cry*
- # No idea whether it’s percent here too, or more usual factor :/
- elem_props_template_set(fbx_me_tmpl, fbx_me_props, "p_number", shape.name.encode(), shape.value, animatable=True)
+ # No idea whether it’s percent here too, or more usual factor (assume percentage for now) :/
+ elem_props_template_set(fbx_me_tmpl, fbx_me_props, "p_number", shape.name.encode(), shape.value * 100.0,
+ animatable=True)
+
def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
"""
@@ -763,7 +777,6 @@ def fbx_data_mesh_elements(root, me_obj, scene_data, done_meshes):
# No gscale/gmat here, all data are supposed to be in object space.
smooth_type = scene_data.settings.mesh_smooth_type
- me_obj = ObjectWrapper(me_obj)
do_bake_space_transform = me_obj.use_bake_space_transform(scene_data)
# Vertices are in object space, but we are post-multiplying all transforms with the inverse of the
@@ -1366,7 +1379,7 @@ def fbx_data_armature_elements(root, arm_obj, scene_data):
elem_data_single_float64_array(fbx_posenode, b"Matrix", matrix_to_array(bomat))
"""
- mat_worl_obj, mat_world_bones = fbx_data_bindpose_element(root, ob_obj, me, scene_data, arm_obj, bones)
+ mat_world_obj, mat_world_bones = fbx_data_bindpose_element(root, ob_obj, me, scene_data, arm_obj, bones)
# Deformer.
fbx_skin = elem_data_single_int64(root, b"Deformer", get_fbx_uuid_from_key(skin_key))
@@ -1647,9 +1660,8 @@ def fbx_skeleton_from_armature(scene, settings, arm_obj, objects, data_meshes,
continue
# Always handled by an Armature modifier...
- ob = ob_obj.bdata
found = False
- for mod in ob.modifiers:
+ for mod in ob_obj.bdata.modifiers:
if mod.type not in {'ARMATURE'}:
continue
# We only support vertex groups binding method, not bone envelopes one!
@@ -1663,7 +1675,7 @@ def fbx_skeleton_from_armature(scene, settings, arm_obj, objects, data_meshes,
# Now we have a mesh using this armature.
# Note: bindpose have no relations at all (no connections), so no need for any preprocess for them.
# Create skin & clusters relations (note skins are connected to geometry, *not* model!).
- _key, me, _free = data_meshes[ob]
+ _key, me, _free = data_meshes[ob_obj]
clusters = OrderedDict((bo, get_blender_bone_cluster_key(arm_obj.bdata, me, bo.bdata)) for bo in bones)
data_deformers_skin.setdefault(arm_obj, OrderedDict())[me] = (get_blender_armature_skin_key(arm_obj.bdata, me),
ob_obj, clusters)
@@ -1674,61 +1686,13 @@ def fbx_skeleton_from_armature(scene, settings, arm_obj, objects, data_meshes,
objects.update(bones)
-def fbx_animations_simplify(scene_data, animdata):
- """
- Simplifies FCurves!
- """
- fac = scene_data.settings.bake_anim_simplify_factor
- step = scene_data.settings.bake_anim_step
- # So that, with default factor and step values (1), we get:
- max_frame_diff = step * fac * 10 # max step of 10 frames.
- value_diff_fac = fac / 1000 # min value evolution: 0.1% of whole range.
- min_significant_diff = 1.0e-6
-
- for keys in animdata.values():
- if not keys:
- continue
- extremums = [(min(values), max(values)) for values in zip(*(k[1] for k in keys))]
- min_diffs = [max((mx - mn) * value_diff_fac, min_significant_diff) for mn, mx in extremums]
- p_currframe, p_key, p_key_write = keys[0]
- p_keyed = [(p_currframe - max_frame_diff, val) for val in p_key]
- are_keyed = [False] * len(p_key)
- for currframe, key, key_write in keys:
- for idx, (val, p_val) in enumerate(zip(key, p_key)):
- p_keyedframe, p_keyedval = p_keyed[idx]
- if val == p_val:
- # Never write keyframe when value is exactly the same as prev one!
- continue
- if abs(val - p_val) >= min_diffs[idx]:
- # If enough difference from previous sampled value, key this value *and* the previous one!
- key_write[idx] = True
- p_key_write[idx] = True
- p_keyed[idx] = (currframe, val)
- are_keyed[idx] = True
- else:
- frame_diff = currframe - p_keyedframe
- val_diff = abs(val - p_keyedval)
- if ((val_diff >= min_diffs[idx]) or
- ((val_diff >= min_significant_diff) and (frame_diff >= max_frame_diff))):
- # Else, if enough difference from previous keyed value
- # (or any significant difference and max gap between keys is reached),
- # key this value only!
- key_write[idx] = True
- p_keyed[idx] = (currframe, val)
- are_keyed[idx] = True
- p_currframe, p_key, p_key_write = currframe, key, key_write
- # If we did key something, ensure first and last sampled values are keyed as well.
- for idx, is_keyed in enumerate(are_keyed):
- if is_keyed:
- keys[0][2][idx] = keys[-1][2][idx] = True
-
-
-def fbx_animations_objects_do(scene_data, ref_id, f_start, f_end, start_zero, objects=None, force_keep=False):
+def fbx_animations_do(scene_data, ref_id, f_start, f_end, start_zero, objects=None, force_keep=False):
"""
Generate animation data (a single AnimStack) from objects, for a given frame range.
"""
bake_step = scene_data.settings.bake_anim_step
scene = scene_data.scene
+ meshes = scene_data.data_meshes
if objects is not None:
# Add bones and duplis!
@@ -1745,15 +1709,19 @@ def fbx_animations_objects_do(scene_data, ref_id, f_start, f_end, start_zero, ob
else:
objects = scene_data.objects
- # FBX mapping info: Property affected, and name of the "sub" property (to distinguish e.g. vector's channels).
- fbx_names = (
- ("Lcl Translation", "T", "d|X"), ("Lcl Translation", "T", "d|Y"), ("Lcl Translation", "T", "d|Z"),
- ("Lcl Rotation", "R", "d|X"), ("Lcl Rotation", "R", "d|Y"), ("Lcl Rotation", "R", "d|Z"),
- ("Lcl Scaling", "S", "d|X"), ("Lcl Scaling", "S", "d|Y"), ("Lcl Scaling", "S", "d|Z"),
- )
-
back_currframe = scene.frame_current
- animdata = OrderedDict((obj, []) for obj in objects)
+ animdata_ob = OrderedDict((ob_obj, (AnimationCurveNodeWrapper(ob_obj.key, 'LCL_TRANSLATION', (0.0, 0.0, 0.0)),
+ AnimationCurveNodeWrapper(ob_obj.key, 'LCL_ROTATION', (0.0, 0.0, 0.0)),
+ AnimationCurveNodeWrapper(ob_obj.key, 'LCL_SCALING', (1.0, 1.0, 1.0))))
+ for ob_obj in objects)
+
+ animdata_shapes = OrderedDict()
+ for me, (me_key, _shapes_key, shapes) in scene_data.data_deformers_shape.items():
+ # Ignore absolute shape keys for now!
+ if not me.shape_keys.use_relative:
+ continue
+ for shape, (channel_key, geom_key, _shape_verts_co, _shape_verts_idx) in shapes.items():
+ animdata_shapes[channel_key] = (AnimationCurveNodeWrapp
@@ Diff output truncated at 10240 characters. @@
More information about the Bf-extensions-cvs
mailing list