2023-08-16 00:20:26 +10:00
|
|
|
# SPDX-FileCopyrightText: 2021-2023 Blender Authors
|
2023-06-15 13:09:04 +10:00
|
|
|
#
|
2022-02-11 09:07:11 +11:00
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
2021-03-22 11:33:48 +01:00
|
|
|
|
2024-02-14 00:13:38 +11:00
|
|
|
# ./blender.bin --background --python tests/python/bl_blendfile_library_overrides.py -- --output-dir=/tmp/
|
2021-03-22 11:33:48 +01:00
|
|
|
import pathlib
|
|
|
|
import bpy
|
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
|
|
|
|
sys.path.append(os.path.dirname(os.path.realpath(__file__)))
|
|
|
|
from bl_blendfile_utils import TestHelper
|
|
|
|
|
|
|
|
|
2025-01-17 19:52:45 +01:00
|
|
|
class TestLibraryOverrides(TestHelper):
|
2021-03-22 11:33:48 +01:00
|
|
|
MESH_LIBRARY_PARENT = "LibMeshParent"
|
|
|
|
OBJECT_LIBRARY_PARENT = "LibMeshParent"
|
|
|
|
MESH_LIBRARY_CHILD = "LibMeshChild"
|
|
|
|
OBJECT_LIBRARY_CHILD = "LibMeshChild"
|
2021-03-29 09:53:12 +02:00
|
|
|
MESH_LIBRARY_PERMISSIVE = "LibMeshPermissive"
|
|
|
|
OBJECT_LIBRARY_PERMISSIVE = "LibMeshPermissive"
|
2021-03-22 11:33:48 +01:00
|
|
|
|
|
|
|
def __init__(self, args):
|
2025-01-17 19:52:45 +01:00
|
|
|
super().__init__(args)
|
2021-03-22 11:33:48 +01:00
|
|
|
|
|
|
|
output_dir = pathlib.Path(self.args.output_dir)
|
|
|
|
self.ensure_path(str(output_dir))
|
|
|
|
self.output_path = output_dir / "blendlib_overrides.blend"
|
|
|
|
self.test_output_path = output_dir / "blendlib_overrides_test.blend"
|
|
|
|
|
|
|
|
bpy.ops.wm.read_homefile(use_empty=True, use_factory_startup=True)
|
|
|
|
mesh = bpy.data.meshes.new(TestLibraryOverrides.MESH_LIBRARY_PARENT)
|
|
|
|
obj = bpy.data.objects.new(TestLibraryOverrides.OBJECT_LIBRARY_PARENT, object_data=mesh)
|
|
|
|
bpy.context.collection.objects.link(obj)
|
|
|
|
mesh_child = bpy.data.meshes.new(TestLibraryOverrides.MESH_LIBRARY_CHILD)
|
|
|
|
obj_child = bpy.data.objects.new(TestLibraryOverrides.OBJECT_LIBRARY_CHILD, object_data=mesh_child)
|
|
|
|
obj_child.parent = obj
|
|
|
|
bpy.context.collection.objects.link(obj_child)
|
2021-03-29 09:53:12 +02:00
|
|
|
|
|
|
|
mesh = bpy.data.meshes.new(TestLibraryOverrides.MESH_LIBRARY_PERMISSIVE)
|
|
|
|
obj = bpy.data.objects.new(TestLibraryOverrides.OBJECT_LIBRARY_PERMISSIVE, object_data=mesh)
|
|
|
|
bpy.context.collection.objects.link(obj)
|
|
|
|
|
2021-03-22 11:33:48 +01:00
|
|
|
bpy.ops.wm.save_as_mainfile(filepath=str(self.output_path), check_existing=False, compress=False)
|
|
|
|
|
|
|
|
def test_link_and_override_property(self):
|
|
|
|
bpy.ops.wm.read_homefile(use_empty=True, use_factory_startup=True)
|
|
|
|
bpy.data.orphans_purge()
|
|
|
|
|
|
|
|
link_dir = self.output_path / "Object"
|
|
|
|
bpy.ops.wm.link(directory=str(link_dir), filename=TestLibraryOverrides.OBJECT_LIBRARY_PARENT)
|
|
|
|
|
|
|
|
obj = bpy.data.objects[TestLibraryOverrides.OBJECT_LIBRARY_PARENT]
|
2021-03-29 09:53:12 +02:00
|
|
|
self.assertIsNone(obj.override_library)
|
2021-03-22 11:33:48 +01:00
|
|
|
local_id = obj.override_create()
|
2021-03-29 09:53:12 +02:00
|
|
|
self.assertIsNotNone(local_id.override_library)
|
|
|
|
self.assertIsNone(local_id.data.override_library)
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(local_id.override_library.properties), 0)
|
2021-03-22 11:33:48 +01:00
|
|
|
|
2022-04-19 15:25:55 +10:00
|
|
|
# #### Generate an override property & operation automatically by editing the local override data.
|
2021-03-22 11:33:48 +01:00
|
|
|
local_id.location.y = 1.0
|
2021-08-04 12:36:06 +02:00
|
|
|
local_id.override_library.operations_update()
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(local_id.override_library.properties), 1)
|
2021-03-22 11:33:48 +01:00
|
|
|
override_prop = local_id.override_library.properties[0]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(override_prop.rna_path, "location")
|
|
|
|
self.assertEqual(len(override_prop.operations), 1)
|
2021-03-22 11:33:48 +01:00
|
|
|
override_operation = override_prop.operations[0]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(override_operation.operation, 'REPLACE')
|
2021-11-19 12:44:27 +01:00
|
|
|
# Setting location.y overrode all elements in the location array. -1 is a wildcard.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(override_operation.subitem_local_index, -1)
|
2021-05-14 17:35:08 +10:00
|
|
|
|
2022-04-19 15:25:55 +10:00
|
|
|
# #### Reset the override to its linked reference data.
|
2021-08-04 15:13:20 +02:00
|
|
|
local_id.override_library.reset()
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(local_id.override_library.properties), 0)
|
|
|
|
self.assertEqual(local_id.location, local_id.override_library.reference.location)
|
2021-08-04 15:13:20 +02:00
|
|
|
|
2022-04-19 15:25:55 +10:00
|
|
|
# #### Generate an override property & operation manually using the API.
|
2021-08-04 16:56:06 +02:00
|
|
|
override_property = local_id.override_library.properties.add(rna_path="location")
|
|
|
|
override_property.operations.add(operation='REPLACE')
|
|
|
|
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(local_id.override_library.properties), 1)
|
2021-08-04 16:56:06 +02:00
|
|
|
override_prop = local_id.override_library.properties[0]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(override_prop.rna_path, "location")
|
|
|
|
self.assertEqual(len(override_prop.operations), 1)
|
2021-08-04 16:56:06 +02:00
|
|
|
override_operation = override_prop.operations[0]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(override_operation.operation, 'REPLACE')
|
2021-11-19 12:44:27 +01:00
|
|
|
# Setting location.y overrode all elements in the location array. -1 is a wildcard.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(override_operation.subitem_local_index, -1)
|
2021-08-04 16:56:06 +02:00
|
|
|
|
|
|
|
override_property = local_id.override_library.properties[0]
|
|
|
|
override_property.operations.remove(override_property.operations[0])
|
|
|
|
local_id.override_library.properties.remove(override_property)
|
|
|
|
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(local_id.override_library.properties), 0)
|
2021-08-04 16:56:06 +02:00
|
|
|
|
2022-04-19 15:25:55 +10:00
|
|
|
# #### Delete the override.
|
2021-08-04 15:13:20 +02:00
|
|
|
local_id_name = local_id.name
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(bpy.data.objects.get((local_id_name, None), None), local_id)
|
2021-08-04 15:13:20 +02:00
|
|
|
local_id.override_library.destroy()
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNone(bpy.data.objects.get((local_id_name, None), None))
|
2021-08-04 15:13:20 +02:00
|
|
|
|
2021-03-29 09:53:12 +02:00
|
|
|
def test_link_permissive(self):
|
|
|
|
bpy.ops.wm.read_homefile(use_empty=True, use_factory_startup=True)
|
|
|
|
bpy.data.orphans_purge()
|
|
|
|
|
|
|
|
link_dir = self.output_path / "Object"
|
|
|
|
bpy.ops.wm.link(directory=str(link_dir), filename=TestLibraryOverrides.OBJECT_LIBRARY_PERMISSIVE)
|
|
|
|
|
|
|
|
obj = bpy.data.objects[TestLibraryOverrides.OBJECT_LIBRARY_PERMISSIVE]
|
2023-12-20 17:53:34 +01:00
|
|
|
self.assertIsNone(obj.override_library)
|
2021-03-29 09:53:12 +02:00
|
|
|
local_id = obj.override_create()
|
|
|
|
self.assertIsNotNone(local_id.override_library)
|
|
|
|
self.assertIsNone(local_id.data.override_library)
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(local_id.override_library.properties), 0)
|
2021-03-29 09:53:12 +02:00
|
|
|
local_id.location.y = 1.0
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(local_id.location.y, 1.0)
|
2021-03-29 09:53:12 +02:00
|
|
|
|
2021-08-04 12:36:06 +02:00
|
|
|
local_id.override_library.operations_update()
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(local_id.location.y, 1.0)
|
2021-03-29 09:53:12 +02:00
|
|
|
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(local_id.override_library.properties), 1)
|
2021-03-29 09:53:12 +02:00
|
|
|
override_prop = local_id.override_library.properties[0]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(override_prop.rna_path, "location")
|
|
|
|
self.assertEqual(len(override_prop.operations), 1)
|
2021-03-29 09:53:12 +02:00
|
|
|
override_operation = override_prop.operations[0]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(override_operation.operation, 'REPLACE')
|
|
|
|
self.assertEqual(override_operation.subitem_local_index, -1)
|
2021-03-29 09:53:12 +02:00
|
|
|
|
|
|
|
|
2025-01-17 19:52:45 +01:00
|
|
|
class TestLibraryOverridesComplex(TestHelper):
|
2023-08-06 23:06:48 +02:00
|
|
|
# Test resync, recursive resync, overrides of overrides, ID names collision handling, and multiple overrides.
|
|
|
|
|
2022-06-29 18:05:25 +02:00
|
|
|
DATA_NAME_CONTAINER = "LibCollection"
|
|
|
|
DATA_NAME_RIGGED = "LibRigged"
|
|
|
|
DATA_NAME_RIG = "LibRig"
|
|
|
|
DATA_NAME_CONTROLLER_1 = "LibController1"
|
|
|
|
DATA_NAME_CONTROLLER_2 = "LibController2"
|
2023-08-06 23:06:48 +02:00
|
|
|
DATA_NAME_SAMENAME_CONTAINER = "LibCube"
|
|
|
|
DATA_NAME_SAMENAME_0 = "LibCube"
|
|
|
|
DATA_NAME_SAMENAME_1 = "LibCube.001"
|
|
|
|
DATA_NAME_SAMENAME_2 = "LibCube.002"
|
|
|
|
DATA_NAME_SAMENAME_3 = "LibCube.003"
|
2022-06-29 18:05:25 +02:00
|
|
|
|
|
|
|
def __init__(self, args):
|
2025-01-17 19:52:45 +01:00
|
|
|
super().__init__(args)
|
2022-06-29 18:05:25 +02:00
|
|
|
|
|
|
|
output_dir = pathlib.Path(self.args.output_dir)
|
|
|
|
self.ensure_path(str(output_dir))
|
2023-08-06 23:06:48 +02:00
|
|
|
self.lib_output_path = output_dir / "blendlib_overrides_lib.blend"
|
2022-06-29 18:05:25 +02:00
|
|
|
self.test_output_path = output_dir / "blendlib_overrides_test.blend"
|
2023-08-06 23:06:48 +02:00
|
|
|
self.test_output_path_recursive = output_dir / "blendlib_overrides_test_recursive.blend"
|
2022-06-29 18:05:25 +02:00
|
|
|
|
2023-08-06 23:06:48 +02:00
|
|
|
def reset(self):
|
2022-06-29 18:05:25 +02:00
|
|
|
bpy.ops.wm.read_homefile(use_empty=True, use_factory_startup=True)
|
|
|
|
|
2023-08-06 23:06:48 +02:00
|
|
|
def init_lib_data(self, custom_cb=None):
|
|
|
|
self.reset()
|
|
|
|
|
|
|
|
collection_container = bpy.data.collections.new(self.__class__.DATA_NAME_CONTAINER)
|
2022-06-29 18:05:25 +02:00
|
|
|
bpy.context.collection.children.link(collection_container)
|
|
|
|
|
2023-08-06 23:06:48 +02:00
|
|
|
mesh = bpy.data.meshes.new(self.__class__.DATA_NAME_RIGGED)
|
|
|
|
obj_child = bpy.data.objects.new(self.__class__.DATA_NAME_RIGGED, object_data=mesh)
|
2022-06-29 18:05:25 +02:00
|
|
|
collection_container.objects.link(obj_child)
|
2023-08-06 23:06:48 +02:00
|
|
|
armature = bpy.data.armatures.new(self.__class__.DATA_NAME_RIG)
|
|
|
|
obj_armature = bpy.data.objects.new(self.__class__.DATA_NAME_RIG, object_data=armature)
|
2022-06-29 18:05:25 +02:00
|
|
|
obj_child.parent = obj_armature
|
|
|
|
collection_container.objects.link(obj_armature)
|
|
|
|
|
|
|
|
obj_child_modifier = obj_child.modifiers.new("", 'ARMATURE')
|
|
|
|
obj_child_modifier.object = obj_armature
|
|
|
|
|
2023-08-06 23:06:48 +02:00
|
|
|
obj_ctrl1 = bpy.data.objects.new(self.__class__.DATA_NAME_CONTROLLER_1, object_data=None)
|
2022-06-29 18:05:25 +02:00
|
|
|
collection_container.objects.link(obj_ctrl1)
|
|
|
|
|
|
|
|
obj_armature_constraint = obj_armature.constraints.new('COPY_LOCATION')
|
|
|
|
obj_armature_constraint.target = obj_ctrl1
|
|
|
|
|
2023-08-06 23:06:48 +02:00
|
|
|
collection_sub = bpy.data.collections.new(self.__class__.DATA_NAME_CONTROLLER_2)
|
2022-06-29 18:05:25 +02:00
|
|
|
collection_container.children.link(collection_sub)
|
2023-08-06 23:06:48 +02:00
|
|
|
obj_ctrl2 = bpy.data.objects.new(self.__class__.DATA_NAME_CONTROLLER_2, object_data=None)
|
2022-06-29 18:05:25 +02:00
|
|
|
collection_sub.objects.link(obj_ctrl2)
|
|
|
|
|
2023-08-06 23:06:48 +02:00
|
|
|
collection_sub = bpy.data.collections.new(self.__class__.DATA_NAME_SAMENAME_CONTAINER)
|
|
|
|
collection_container.children.link(collection_sub)
|
|
|
|
# 'Samename' objects are purposedly not added to the collection here.
|
2022-06-29 18:05:25 +02:00
|
|
|
|
2023-08-06 23:06:48 +02:00
|
|
|
if custom_cb is not None:
|
|
|
|
custom_cb(self)
|
|
|
|
|
|
|
|
bpy.ops.wm.save_as_mainfile(
|
|
|
|
filepath=str(self.lib_output_path),
|
|
|
|
check_existing=False,
|
|
|
|
compress=False,
|
|
|
|
relative_remap=False,
|
|
|
|
)
|
2022-06-29 18:05:25 +02:00
|
|
|
|
2023-08-06 23:06:48 +02:00
|
|
|
def edit_lib_data(self, custom_cb):
|
|
|
|
bpy.ops.wm.open_mainfile(filepath=str(self.lib_output_path))
|
|
|
|
custom_cb(self)
|
|
|
|
bpy.ops.wm.save_as_mainfile(
|
|
|
|
filepath=str(self.lib_output_path),
|
|
|
|
check_existing=False,
|
|
|
|
compress=False,
|
|
|
|
relative_remap=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
def link_lib_data(self, num_collections, num_objects, num_meshes, num_armatures):
|
|
|
|
link_dir = self.lib_output_path / "Collection"
|
2022-07-05 14:32:53 +10:00
|
|
|
bpy.ops.wm.link(
|
|
|
|
directory=str(link_dir),
|
2023-08-06 23:06:48 +02:00
|
|
|
filename=self.__class__.DATA_NAME_CONTAINER,
|
2022-07-05 14:32:53 +10:00
|
|
|
instance_collections=False,
|
2023-08-06 23:06:48 +02:00
|
|
|
relative_path=False,
|
2022-07-05 14:32:53 +10:00
|
|
|
)
|
2022-06-29 18:05:25 +02:00
|
|
|
|
2023-08-06 23:06:48 +02:00
|
|
|
linked_collection_container = bpy.data.collections[self.__class__.DATA_NAME_CONTAINER]
|
|
|
|
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNotNone(linked_collection_container.library)
|
|
|
|
self.assertIsNone(linked_collection_container.override_library)
|
|
|
|
self.assertEqual(len(bpy.data.collections), num_collections)
|
|
|
|
self.assertTrue(all(id_.library is not None for id_ in bpy.data.collections))
|
|
|
|
self.assertEqual(len(bpy.data.objects), num_objects)
|
|
|
|
self.assertTrue(all(id_.library is not None for id_ in bpy.data.objects))
|
|
|
|
self.assertEqual(len(bpy.data.meshes), num_meshes)
|
|
|
|
self.assertTrue(all(id_.library is not None for id_ in bpy.data.meshes))
|
|
|
|
self.assertEqual(len(bpy.data.armatures), num_armatures)
|
|
|
|
self.assertTrue(all(id_.library is not None for id_ in bpy.data.armatures))
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
return linked_collection_container
|
|
|
|
|
|
|
|
def link_liboverride_data(self, num_collections, num_objects, num_meshes, num_armatures):
|
|
|
|
link_dir = self.test_output_path / "Collection"
|
|
|
|
bpy.ops.wm.link(
|
|
|
|
directory=str(link_dir),
|
|
|
|
filename=self.__class__.DATA_NAME_CONTAINER,
|
|
|
|
instance_collections=False,
|
|
|
|
relative_path=False,
|
|
|
|
)
|
|
|
|
|
2023-08-07 09:20:26 +02:00
|
|
|
linked_collection_container = bpy.data.collections[self.__class__.DATA_NAME_CONTAINER, str(
|
|
|
|
self.test_output_path)]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNotNone(linked_collection_container.library)
|
|
|
|
self.assertIsNotNone(linked_collection_container.override_library)
|
|
|
|
self.assertEqual(len(bpy.data.collections), num_collections)
|
|
|
|
self.assertTrue(all(id_.library is not None for id_ in bpy.data.collections))
|
|
|
|
self.assertEqual(len(bpy.data.objects), num_objects)
|
|
|
|
self.assertTrue(all(id_.library is not None for id_ in bpy.data.objects))
|
|
|
|
self.assertEqual(len(bpy.data.meshes), num_meshes)
|
|
|
|
self.assertTrue(all(id_.library is not None for id_ in bpy.data.meshes))
|
|
|
|
self.assertEqual(len(bpy.data.armatures), num_armatures)
|
|
|
|
self.assertTrue(all(id_.library is not None for id_ in bpy.data.armatures))
|
2022-06-29 18:05:25 +02:00
|
|
|
|
2023-08-06 23:06:48 +02:00
|
|
|
self.liboverride_hierarchy_validate(linked_collection_container)
|
|
|
|
|
|
|
|
return linked_collection_container
|
|
|
|
|
2025-01-17 19:52:45 +01:00
|
|
|
def liboverride_hierarchy_validate(self, root_collection):
|
2023-08-06 23:06:48 +02:00
|
|
|
def liboverride_systemoverrideonly_hierarchy_validate(id_, id_root):
|
|
|
|
if not id_.override_library:
|
|
|
|
return
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(id_.override_library.hierarchy_root, id_root)
|
2023-08-06 23:06:48 +02:00
|
|
|
for op in id_.override_library.properties:
|
|
|
|
for opop in op.operations:
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIn('IDPOINTER_MATCH_REFERENCE', opop.flag)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
for coll_ in root_collection.children_recursive:
|
|
|
|
liboverride_systemoverrideonly_hierarchy_validate(coll_, root_collection)
|
LibOverride: Add ID pointer to operations over ID pointers.
In RNA collections storing ID references, the name of the collection
item may not always be unique, when several IDs from different libraries
are present.
While rare, this situation can become deadly to liboverride, by causing
random but exponential liboverride hierarchies corruptions.
This has already been alleviated by using preferably both name and index
in items lookup (a05419f18b) and by reducing the risk of name collision
in general between liboverrides and their linked reference (b9becc47de).
This commit goes further, by ensuring that references to items of RNA
collections of IDs stored in liboverride operations become completely
unambiguous. This is achieved by storing an extra pointer to the item's
ID itself, when relevant.
Lookup then requires a complete match `name + ID` to be successful,
which is guaranteed to match at most a single item in the whole RNA
collection (since RNA collection of IDs do not allow duplicates, and
the ID pointer is always unique).
Note that this ID pointer is implemented as an `std::optional` one
(either directly in C++ code, or using an new liboverride operation `flag`
in DNA). This allows to smoothly transition from existing data to the
added ID pointer info (when needed), without needing any dedicated
versioning. This solution also preserves forward compatibility as much
as possible.
It may also provide marginal performances improvements in some cases, as
looking up for ID items in RNA collections will first check for the
ID pointer, which should be faster than a string comparision.
Implements #110421.
Pull Request: https://projects.blender.org/blender/blender/pulls/110773
2023-08-03 20:32:36 +02:00
|
|
|
if coll_.override_library:
|
|
|
|
for op in coll_.override_library.properties:
|
|
|
|
for opop in op.operations:
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIn('IDPOINTER_ITEM_USE_ID', opop.flag)
|
LibOverride: Add ID pointer to operations over ID pointers.
In RNA collections storing ID references, the name of the collection
item may not always be unique, when several IDs from different libraries
are present.
While rare, this situation can become deadly to liboverride, by causing
random but exponential liboverride hierarchies corruptions.
This has already been alleviated by using preferably both name and index
in items lookup (a05419f18b) and by reducing the risk of name collision
in general between liboverrides and their linked reference (b9becc47de).
This commit goes further, by ensuring that references to items of RNA
collections of IDs stored in liboverride operations become completely
unambiguous. This is achieved by storing an extra pointer to the item's
ID itself, when relevant.
Lookup then requires a complete match `name + ID` to be successful,
which is guaranteed to match at most a single item in the whole RNA
collection (since RNA collection of IDs do not allow duplicates, and
the ID pointer is always unique).
Note that this ID pointer is implemented as an `std::optional` one
(either directly in C++ code, or using an new liboverride operation `flag`
in DNA). This allows to smoothly transition from existing data to the
added ID pointer info (when needed), without needing any dedicated
versioning. This solution also preserves forward compatibility as much
as possible.
It may also provide marginal performances improvements in some cases, as
looking up for ID items in RNA collections will first check for the
ID pointer, which should be faster than a string comparision.
Implements #110421.
Pull Request: https://projects.blender.org/blender/blender/pulls/110773
2023-08-03 20:32:36 +02:00
|
|
|
print(
|
|
|
|
coll_,
|
|
|
|
opop.flag,
|
|
|
|
opop.subitem_reference_name,
|
|
|
|
opop.subitem_reference_id,
|
|
|
|
opop.subitem_local_name,
|
|
|
|
opop.subitem_local_id)
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNotNone(opop.subitem_reference_id.library)
|
|
|
|
self.assertTrue(opop.subitem_local_id.library is None if coll_.library is None
|
|
|
|
else opop.subitem_local_id.library is not None)
|
2023-08-06 23:06:48 +02:00
|
|
|
for ob_ in root_collection.all_objects:
|
|
|
|
liboverride_systemoverrideonly_hierarchy_validate(ob_, root_collection)
|
|
|
|
|
|
|
|
def test_link_and_override_resync(self):
|
|
|
|
self.init_lib_data()
|
|
|
|
self.reset()
|
|
|
|
|
|
|
|
# NOTE: All counts below are in the form `local_ids + linked_ids`.
|
|
|
|
linked_collection_container = self.link_lib_data(
|
|
|
|
num_collections=0 + 3,
|
|
|
|
num_objects=0 + 4,
|
|
|
|
num_meshes=0 + 1,
|
|
|
|
num_armatures=0 + 1)
|
|
|
|
|
2022-07-05 14:32:53 +10:00
|
|
|
override_collection_container = linked_collection_container.override_hierarchy_create(
|
|
|
|
bpy.context.scene,
|
|
|
|
bpy.context.view_layer,
|
|
|
|
)
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNone(override_collection_container.library)
|
|
|
|
self.assertIsNotNone(override_collection_container.override_library)
|
2023-08-06 23:06:48 +02:00
|
|
|
# Objects and collections are duplicated as overrides (except for empty collection),
|
|
|
|
# but meshes and armatures remain only linked data.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(bpy.data.collections), 2 + 3)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.collections[:2]))
|
|
|
|
self.assertEqual(len(bpy.data.objects), 4 + 4)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None) for id_ in bpy.data.objects[:4]))
|
|
|
|
self.assertEqual(len(bpy.data.meshes), 0 + 1)
|
|
|
|
self.assertEqual(len(bpy.data.armatures), 0 + 1)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
self.liboverride_hierarchy_validate(override_collection_container)
|
|
|
|
|
|
|
|
bpy.ops.wm.save_as_mainfile(
|
|
|
|
filepath=str(self.test_output_path),
|
|
|
|
check_existing=False,
|
|
|
|
compress=False,
|
|
|
|
relative_remap=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Create linked liboverrides file (for recursive resync).
|
|
|
|
self.reset()
|
|
|
|
|
|
|
|
self.link_liboverride_data(
|
|
|
|
num_collections=0 + 5,
|
|
|
|
num_objects=0 + 8,
|
|
|
|
num_meshes=0 + 1,
|
|
|
|
num_armatures=0 + 1)
|
|
|
|
|
|
|
|
bpy.ops.wm.save_as_mainfile(
|
|
|
|
filepath=str(self.test_output_path_recursive),
|
|
|
|
check_existing=False,
|
|
|
|
compress=False,
|
|
|
|
relative_remap=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Re-open the lib file, and change its ID relationships.
|
|
|
|
bpy.ops.wm.open_mainfile(filepath=str(self.lib_output_path))
|
|
|
|
|
|
|
|
obj_armature = bpy.data.objects[self.__class__.DATA_NAME_RIG]
|
|
|
|
obj_armature_constraint = obj_armature.constraints[0]
|
|
|
|
obj_ctrl2 = bpy.data.objects[self.__class__.DATA_NAME_CONTROLLER_2]
|
|
|
|
obj_armature_constraint.target = obj_ctrl2
|
|
|
|
|
|
|
|
bpy.ops.wm.save_as_mainfile(filepath=str(self.lib_output_path), check_existing=False, compress=False)
|
|
|
|
|
|
|
|
# Re-open the main file, and check that automatic resync did its work correctly, remapping the target of the
|
|
|
|
# armature constraint to controller 2, without creating unexpected garbage IDs along the line.
|
|
|
|
bpy.ops.wm.open_mainfile(filepath=str(self.test_output_path))
|
|
|
|
|
|
|
|
override_collection_container = bpy.data.collections[self.__class__.DATA_NAME_CONTAINER]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNone(override_collection_container.library)
|
|
|
|
self.assertIsNotNone(override_collection_container.override_library)
|
|
|
|
self.assertEqual(len(bpy.data.collections), 2 + 3)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.collections[:2]))
|
|
|
|
self.assertEqual(len(bpy.data.objects), 4 + 4)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None) for id_ in bpy.data.objects[:4]))
|
|
|
|
self.assertEqual(len(bpy.data.meshes), 0 + 1)
|
|
|
|
self.assertEqual(len(bpy.data.armatures), 0 + 1)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
obj_armature = bpy.data.objects[self.__class__.DATA_NAME_RIG]
|
|
|
|
obj_ctrl2 = bpy.data.objects[self.__class__.DATA_NAME_CONTROLLER_2]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNone(obj_armature.library)
|
|
|
|
self.assertIsNotNone(obj_armature.override_library)
|
|
|
|
self.assertIsNone(obj_ctrl2.library)
|
|
|
|
self.assertIsNotNone(obj_ctrl2.override_library)
|
|
|
|
self.assertEqual(obj_armature.constraints[0].target, obj_ctrl2)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
self.liboverride_hierarchy_validate(override_collection_container)
|
|
|
|
|
|
|
|
# Re-open the 'recursive resync' file, and check that automatic recursive resync did its work correctly,
|
|
|
|
# remapping the target of the linked liboverride armature constraint to controller 2, without creating
|
|
|
|
# unexpected garbage IDs along the line.
|
|
|
|
bpy.ops.wm.open_mainfile(filepath=str(self.test_output_path_recursive))
|
|
|
|
|
2023-08-07 09:20:26 +02:00
|
|
|
override_collection_container = bpy.data.collections[self.__class__.DATA_NAME_CONTAINER, str(
|
|
|
|
self.test_output_path)]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNotNone(override_collection_container.library)
|
|
|
|
self.assertIsNotNone(override_collection_container.override_library)
|
2023-08-06 23:06:48 +02:00
|
|
|
test_output_path_lib = override_collection_container.library
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(bpy.data.collections), 0 + 5)
|
|
|
|
self.assertTrue(all((id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.collections if id_.library == test_output_path_lib))
|
|
|
|
self.assertEqual(len(bpy.data.objects), 0 + 8)
|
|
|
|
self.assertTrue(all((id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.objects if id_.library == test_output_path_lib))
|
|
|
|
self.assertEqual(len(bpy.data.meshes), 0 + 1)
|
|
|
|
self.assertEqual(len(bpy.data.armatures), 0 + 1)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
obj_armature = bpy.data.objects[self.__class__.DATA_NAME_RIG, str(self.test_output_path)]
|
|
|
|
obj_ctrl2 = bpy.data.objects[self.__class__.DATA_NAME_CONTROLLER_2, str(self.test_output_path)]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNotNone(obj_armature.override_library)
|
|
|
|
self.assertIsNotNone(obj_ctrl2.override_library)
|
|
|
|
self.assertEqual(obj_armature.constraints[0].target, obj_ctrl2)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
self.liboverride_hierarchy_validate(override_collection_container)
|
|
|
|
|
|
|
|
def test_link_and_override_multiple(self):
|
|
|
|
self.init_lib_data()
|
|
|
|
self.reset()
|
|
|
|
|
|
|
|
# NOTE: All counts below are in the form `local_ids + linked_ids`.
|
|
|
|
linked_collection_container = self.link_lib_data(
|
|
|
|
num_collections=0 + 3,
|
|
|
|
num_objects=0 + 4,
|
|
|
|
num_meshes=0 + 1,
|
|
|
|
num_armatures=0 + 1)
|
|
|
|
|
|
|
|
override_collection_containers = [linked_collection_container.override_hierarchy_create(
|
2023-08-07 09:20:26 +02:00
|
|
|
bpy.context.scene,
|
|
|
|
bpy.context.view_layer,
|
|
|
|
) for i in range(3)]
|
2023-08-06 23:06:48 +02:00
|
|
|
for override_container in override_collection_containers:
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNone(override_container.library)
|
|
|
|
self.assertIsNotNone(override_container.override_library)
|
2023-08-06 23:06:48 +02:00
|
|
|
self.liboverride_hierarchy_validate(override_container)
|
|
|
|
|
|
|
|
# Objects and collections are duplicated as overrides (except for empty collection),
|
|
|
|
# but meshes and armatures remain only linked data.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(bpy.data.collections), 3 * 2 + 3)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.collections[:3 * 2]))
|
|
|
|
self.assertEqual(len(bpy.data.objects), 3 * 4 + 4)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.objects[:3 * 4]))
|
|
|
|
self.assertEqual(len(bpy.data.meshes), 0 + 1)
|
|
|
|
self.assertEqual(len(bpy.data.armatures), 0 + 1)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
bpy.ops.wm.save_as_mainfile(
|
|
|
|
filepath=str(self.test_output_path),
|
|
|
|
check_existing=False,
|
|
|
|
compress=False,
|
|
|
|
relative_remap=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Create linked liboverrides file (for recursive resync).
|
|
|
|
self.reset()
|
|
|
|
|
|
|
|
self.link_liboverride_data(
|
|
|
|
num_collections=0 + 5,
|
|
|
|
num_objects=0 + 8,
|
|
|
|
num_meshes=0 + 1,
|
|
|
|
num_armatures=0 + 1)
|
|
|
|
|
|
|
|
bpy.ops.wm.save_as_mainfile(
|
|
|
|
filepath=str(self.test_output_path_recursive),
|
|
|
|
check_existing=False,
|
|
|
|
compress=False,
|
|
|
|
relative_remap=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Change the lib's ID relationships.
|
|
|
|
def edit_lib_cb(self):
|
|
|
|
obj_armature = bpy.data.objects[self.__class__.DATA_NAME_RIG]
|
|
|
|
obj_armature_constraint = obj_armature.constraints[0]
|
|
|
|
obj_ctrl2 = bpy.data.objects[self.__class__.DATA_NAME_CONTROLLER_2]
|
|
|
|
obj_armature_constraint.target = obj_ctrl2
|
|
|
|
self.edit_lib_data(edit_lib_cb)
|
|
|
|
|
|
|
|
# Re-open the main file, and check that automatic resync did its work correctly, remapping the target of the
|
|
|
|
# armature constraint to controller 2, without creating unexpected garbage IDs along the line.
|
|
|
|
bpy.ops.wm.open_mainfile(filepath=str(self.test_output_path))
|
|
|
|
|
|
|
|
override_collection_container = bpy.data.collections[self.__class__.DATA_NAME_CONTAINER]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNone(override_collection_container.library)
|
|
|
|
self.assertIsNotNone(override_collection_container.override_library)
|
2023-08-06 23:06:48 +02:00
|
|
|
# Objects and collections are duplicated as overrides, but meshes and armatures remain only linked data.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(bpy.data.collections), 3 * 2 + 3)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.collections[:3 * 2]))
|
|
|
|
self.assertEqual(len(bpy.data.objects), 3 * 4 + 4)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.objects[:3 * 4]))
|
|
|
|
self.assertEqual(len(bpy.data.meshes), 0 + 1)
|
|
|
|
self.assertEqual(len(bpy.data.armatures), 0 + 1)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
obj_armature = bpy.data.objects[self.__class__.DATA_NAME_RIG]
|
|
|
|
obj_ctrl2 = bpy.data.objects[self.__class__.DATA_NAME_CONTROLLER_2]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNotNone(obj_armature.library is None and obj_armature.override_library)
|
|
|
|
self.assertIsNotNone(obj_ctrl2.library is None and obj_ctrl2.override_library)
|
|
|
|
self.assertEqual(obj_armature.constraints[0].target, obj_ctrl2)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
override_collection_containers = [
|
|
|
|
bpy.data.collections[self.__class__.DATA_NAME_CONTAINER],
|
|
|
|
bpy.data.collections[self.__class__.DATA_NAME_CONTAINER + ".001"],
|
|
|
|
bpy.data.collections[self.__class__.DATA_NAME_CONTAINER + ".002"],
|
|
|
|
]
|
|
|
|
for override_container in override_collection_containers:
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNone(override_container.library)
|
|
|
|
self.assertIsNotNone(override_container.override_library)
|
2023-08-06 23:06:48 +02:00
|
|
|
self.liboverride_hierarchy_validate(override_container)
|
|
|
|
|
|
|
|
# Re-open the 'recursive resync' file, and check that automatic recursive resync did its work correctly,
|
|
|
|
# remapping the target of the linked liboverride armature constraint to controller 2, without creating
|
|
|
|
# unexpected garbage IDs along the line.
|
|
|
|
bpy.ops.wm.open_mainfile(filepath=str(self.test_output_path_recursive))
|
|
|
|
|
2023-08-07 09:20:26 +02:00
|
|
|
linked_collection_container = bpy.data.collections[self.__class__.DATA_NAME_CONTAINER, str(
|
|
|
|
self.test_output_path)]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNotNone(linked_collection_container.library)
|
|
|
|
self.assertIsNotNone(linked_collection_container.override_library)
|
2023-08-06 23:06:48 +02:00
|
|
|
test_output_path_lib = linked_collection_container.library
|
2022-06-29 18:05:25 +02:00
|
|
|
# Objects and collections are duplicated as overrides, but meshes and armatures remain only linked data.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(bpy.data.collections), 0 + 5)
|
|
|
|
self.assertTrue(all((id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.collections if id_.library == test_output_path_lib))
|
|
|
|
self.assertEqual(len(bpy.data.objects), 0 + 8)
|
|
|
|
self.assertTrue(all((id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.objects if id_.library == test_output_path_lib))
|
|
|
|
self.assertEqual(len(bpy.data.meshes), 0 + 1)
|
|
|
|
self.assertEqual(len(bpy.data.armatures), 0 + 1)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
obj_armature = bpy.data.objects[self.__class__.DATA_NAME_RIG, str(self.test_output_path)]
|
|
|
|
obj_ctrl2 = bpy.data.objects[self.__class__.DATA_NAME_CONTROLLER_2, str(self.test_output_path)]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNotNone(obj_armature.override_library)
|
|
|
|
self.assertIsNotNone(obj_ctrl2.override_library)
|
|
|
|
self.assertEqual(obj_armature.constraints[0].target, obj_ctrl2)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
self.liboverride_hierarchy_validate(linked_collection_container)
|
|
|
|
|
|
|
|
def test_link_and_override_of_override(self):
|
|
|
|
self.init_lib_data()
|
|
|
|
self.reset()
|
|
|
|
|
|
|
|
# NOTE: All counts below are in the form `local_ids + linked_ids`.
|
|
|
|
linked_collection_container = self.link_lib_data(
|
|
|
|
num_collections=0 + 3,
|
|
|
|
num_objects=0 + 4,
|
|
|
|
num_meshes=0 + 1,
|
|
|
|
num_armatures=0 + 1)
|
|
|
|
|
|
|
|
override_collection_container = linked_collection_container.override_hierarchy_create(
|
|
|
|
bpy.context.scene,
|
|
|
|
bpy.context.view_layer,
|
|
|
|
)
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNone(override_collection_container.library)
|
|
|
|
self.assertIsNotNone(override_collection_container.override_library)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
# Objects and collections are duplicated as overrides (except for empty collection),
|
|
|
|
# but meshes and armatures remain only linked data.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(bpy.data.collections), 2 + 3)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.collections[:2]))
|
|
|
|
self.assertEqual(len(bpy.data.objects), 4 + 4)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None) for id_ in bpy.data.objects[:4]))
|
|
|
|
self.assertEqual(len(bpy.data.meshes), 0 + 1)
|
|
|
|
self.assertEqual(len(bpy.data.armatures), 0 + 1)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
self.liboverride_hierarchy_validate(override_collection_container)
|
|
|
|
|
|
|
|
bpy.ops.wm.save_as_mainfile(
|
|
|
|
filepath=str(self.test_output_path),
|
|
|
|
check_existing=False,
|
|
|
|
compress=False,
|
|
|
|
relative_remap=False,
|
|
|
|
)
|
2022-06-29 18:05:25 +02:00
|
|
|
|
2023-08-06 23:06:48 +02:00
|
|
|
# Create liboverrides of liboverrides file.
|
|
|
|
self.reset()
|
|
|
|
|
|
|
|
linked_collection_container = self.link_liboverride_data(
|
|
|
|
num_collections=0 + 5,
|
|
|
|
num_objects=0 + 8,
|
|
|
|
num_meshes=0 + 1,
|
|
|
|
num_armatures=0 + 1)
|
|
|
|
|
|
|
|
override_collection_container = linked_collection_container.override_hierarchy_create(
|
|
|
|
bpy.context.scene,
|
|
|
|
bpy.context.view_layer,
|
|
|
|
)
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNone(override_collection_container.library)
|
|
|
|
self.assertIsNotNone(override_collection_container.override_library)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
# Objects and collections are duplicated as overrides (except for empty collection),
|
|
|
|
# but meshes and armatures remain only linked data.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(bpy.data.collections), 2 + 5)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.collections[:2]))
|
|
|
|
self.assertEqual(len(bpy.data.objects), 4 + 8)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None) for id_ in bpy.data.objects[:4]))
|
|
|
|
self.assertEqual(len(bpy.data.meshes), 0 + 1)
|
|
|
|
self.assertEqual(len(bpy.data.armatures), 0 + 1)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
self.liboverride_hierarchy_validate(override_collection_container)
|
|
|
|
|
|
|
|
bpy.ops.wm.save_as_mainfile(
|
|
|
|
filepath=str(self.test_output_path_recursive),
|
|
|
|
check_existing=False,
|
|
|
|
compress=False,
|
|
|
|
relative_remap=False,
|
|
|
|
)
|
2022-06-29 18:05:25 +02:00
|
|
|
|
|
|
|
# Re-open the lib file, and change its ID relationships.
|
2023-08-06 23:06:48 +02:00
|
|
|
bpy.ops.wm.open_mainfile(filepath=str(self.lib_output_path))
|
2022-06-29 18:05:25 +02:00
|
|
|
|
2023-08-06 23:06:48 +02:00
|
|
|
obj_armature = bpy.data.objects[self.__class__.DATA_NAME_RIG]
|
2022-06-29 18:05:25 +02:00
|
|
|
obj_armature_constraint = obj_armature.constraints[0]
|
2023-08-06 23:06:48 +02:00
|
|
|
obj_ctrl2 = bpy.data.objects[self.__class__.DATA_NAME_CONTROLLER_2]
|
2022-06-29 18:05:25 +02:00
|
|
|
obj_armature_constraint.target = obj_ctrl2
|
|
|
|
|
2023-08-06 23:06:48 +02:00
|
|
|
bpy.ops.wm.save_as_mainfile(filepath=str(self.lib_output_path), check_existing=False, compress=False)
|
2022-06-29 18:05:25 +02:00
|
|
|
|
|
|
|
# Re-open the main file, and check that automatic resync did its work correctly, remapping the target of the
|
|
|
|
# armature constraint to controller 2, without creating unexpected garbage IDs along the line.
|
|
|
|
bpy.ops.wm.open_mainfile(filepath=str(self.test_output_path))
|
|
|
|
|
2023-08-06 23:06:48 +02:00
|
|
|
override_collection_container = bpy.data.collections[self.__class__.DATA_NAME_CONTAINER]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNone(override_collection_container.library)
|
|
|
|
self.assertIsNotNone(override_collection_container.override_library)
|
2022-06-29 18:05:25 +02:00
|
|
|
# Objects and collections are duplicated as overrides, but meshes and armatures remain only linked data.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(bpy.data.collections), 2 + 3)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.collections[:2]))
|
|
|
|
self.assertEqual(len(bpy.data.objects), 4 + 4)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None) for id_ in bpy.data.objects[:4]))
|
|
|
|
self.assertEqual(len(bpy.data.meshes), 0 + 1)
|
|
|
|
self.assertEqual(len(bpy.data.armatures), 0 + 1)
|
2022-06-29 18:05:25 +02:00
|
|
|
|
2023-08-06 23:06:48 +02:00
|
|
|
obj_armature = bpy.data.objects[self.__class__.DATA_NAME_RIG]
|
|
|
|
obj_ctrl2 = bpy.data.objects[self.__class__.DATA_NAME_CONTROLLER_2]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNone(obj_armature.library)
|
|
|
|
self.assertIsNotNone(obj_armature.override_library)
|
|
|
|
self.assertIsNone(obj_ctrl2.library)
|
|
|
|
self.assertIsNotNone(obj_ctrl2.override_library)
|
|
|
|
self.assertEqual(obj_armature.constraints[0].target, obj_ctrl2)
|
2022-06-29 18:05:25 +02:00
|
|
|
|
2023-08-06 23:06:48 +02:00
|
|
|
self.liboverride_hierarchy_validate(override_collection_container)
|
|
|
|
|
|
|
|
# Re-open the 'recursive resync' file, and check that automatic recursive resync did its work correctly,
|
|
|
|
# remapping the target of the linked liboverride armature constraint to controller 2, without creating
|
|
|
|
# unexpected garbage IDs along the line.
|
|
|
|
bpy.ops.wm.open_mainfile(filepath=str(self.test_output_path_recursive))
|
|
|
|
|
|
|
|
override_collection_container = bpy.data.collections[self.__class__.DATA_NAME_CONTAINER]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNone(override_collection_container.library)
|
|
|
|
self.assertIsNotNone(override_collection_container.override_library)
|
2023-08-06 23:06:48 +02:00
|
|
|
# Objects and collections are duplicated as overrides, but meshes and armatures remain only linked data.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(bpy.data.collections), 2 + 5)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.collections[:2]))
|
|
|
|
self.assertEqual(len(bpy.data.objects), 4 + 8)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None) for id_ in bpy.data.objects[:4]))
|
|
|
|
self.assertEqual(len(bpy.data.meshes), 0 + 1)
|
|
|
|
self.assertEqual(len(bpy.data.armatures), 0 + 1)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
obj_armature = bpy.data.objects[self.__class__.DATA_NAME_RIG]
|
|
|
|
obj_ctrl2 = bpy.data.objects[self.__class__.DATA_NAME_CONTROLLER_2]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNotNone(obj_armature.override_library)
|
|
|
|
self.assertIsNotNone(obj_ctrl2.override_library)
|
|
|
|
self.assertEqual(obj_armature.constraints[0].target, obj_ctrl2)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
self.liboverride_hierarchy_validate(override_collection_container)
|
|
|
|
|
|
|
|
def test_link_and_override_idnames_conflict(self):
|
|
|
|
def init_lib_cb(self):
|
|
|
|
# Add some 'samename' objects to the library.
|
|
|
|
collection_sub = bpy.data.collections[self.__class__.DATA_NAME_SAMENAME_CONTAINER]
|
|
|
|
obj_samename_0 = bpy.data.objects.new(self.__class__.DATA_NAME_SAMENAME_0, object_data=None)
|
|
|
|
collection_sub.objects.link(obj_samename_0)
|
|
|
|
obj_samename_3 = bpy.data.objects.new(self.__class__.DATA_NAME_SAMENAME_3, object_data=None)
|
|
|
|
collection_sub.objects.link(obj_samename_3)
|
|
|
|
self.init_lib_data(init_lib_cb)
|
|
|
|
self.reset()
|
|
|
|
|
|
|
|
# NOTE: All counts below are in the form `local_ids + linked_ids`.
|
|
|
|
linked_collection_container = self.link_lib_data(
|
|
|
|
num_collections=0 + 3,
|
|
|
|
num_objects=0 + 6,
|
|
|
|
num_meshes=0 + 1,
|
|
|
|
num_armatures=0 + 1)
|
|
|
|
|
|
|
|
override_collection_containers = [linked_collection_container.override_hierarchy_create(
|
2023-08-07 09:20:26 +02:00
|
|
|
bpy.context.scene,
|
|
|
|
bpy.context.view_layer,
|
|
|
|
) for i in range(3)]
|
2023-08-06 23:06:48 +02:00
|
|
|
for override_container in override_collection_containers:
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNone(override_container.library)
|
|
|
|
self.assertIsNotNone(override_container.override_library)
|
2023-08-06 23:06:48 +02:00
|
|
|
self.liboverride_hierarchy_validate(override_container)
|
|
|
|
|
|
|
|
# Objects and collections are duplicated as overrides (except for empty collection),
|
|
|
|
# but meshes and armatures remain only linked data.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(bpy.data.collections), 3 * 3 + 3)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.collections[:3 * 3]))
|
|
|
|
self.assertEqual(len(bpy.data.objects), 3 * 6 + 6)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.objects[:3 * 6]))
|
|
|
|
self.assertEqual(len(bpy.data.meshes), 0 + 1)
|
|
|
|
self.assertEqual(len(bpy.data.armatures), 0 + 1)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
bpy.data.objects[self.__class__.DATA_NAME_SAMENAME_0].override_library.reference.name, self.__class__.DATA_NAME_SAMENAME_0)
|
|
|
|
self.assertEqual(
|
|
|
|
bpy.data.objects[self.__class__.DATA_NAME_SAMENAME_3].override_library.reference.name, self.__class__.DATA_NAME_SAMENAME_3)
|
2023-08-07 09:20:26 +02:00
|
|
|
# These names will be used by the second created liboverride, due to how
|
|
|
|
# naming is currently handled when original name is already used.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(
|
|
|
|
bpy.data.objects[self.__class__.DATA_NAME_SAMENAME_1].override_library.reference.name, self.__class__.DATA_NAME_SAMENAME_0)
|
|
|
|
self.assertEqual(
|
|
|
|
bpy.data.objects[self.__class__.DATA_NAME_SAMENAME_2].override_library.reference.name, self.__class__.DATA_NAME_SAMENAME_3)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
bpy.ops.wm.save_as_mainfile(
|
|
|
|
filepath=str(self.test_output_path),
|
|
|
|
check_existing=False,
|
|
|
|
compress=False,
|
|
|
|
relative_remap=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Create liboverrides of liboverrides file.
|
|
|
|
self.reset()
|
|
|
|
|
|
|
|
linked_collection_container = self.link_liboverride_data(
|
|
|
|
num_collections=0 + 6,
|
|
|
|
num_objects=0 + 12,
|
|
|
|
num_meshes=0 + 1,
|
|
|
|
num_armatures=0 + 1)
|
|
|
|
|
|
|
|
override_collection_container = linked_collection_container.override_hierarchy_create(
|
|
|
|
bpy.context.scene,
|
|
|
|
bpy.context.view_layer,
|
|
|
|
)
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNone(override_collection_container.library)
|
|
|
|
self.assertIsNotNone(override_collection_container.override_library)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
# Objects and collections are duplicated as overrides (except for empty collection),
|
|
|
|
# but meshes and armatures remain only linked data.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(bpy.data.collections), 3 + 6)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.collections[:3]))
|
|
|
|
self.assertEqual(len(bpy.data.objects), 6 + 12)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None) for id_ in bpy.data.objects[:6]))
|
|
|
|
self.assertEqual(len(bpy.data.meshes), 0 + 1)
|
|
|
|
self.assertEqual(len(bpy.data.armatures), 0 + 1)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
self.liboverride_hierarchy_validate(override_collection_container)
|
|
|
|
|
|
|
|
bpy.ops.wm.save_as_mainfile(
|
|
|
|
filepath=str(self.test_output_path_recursive),
|
|
|
|
check_existing=False,
|
|
|
|
compress=False,
|
|
|
|
relative_remap=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Modify the names of 'samename' objects in the library to generate ID name collisions.
|
|
|
|
def edit_lib_cb(self):
|
|
|
|
obj_samename_0 = bpy.data.objects[self.__class__.DATA_NAME_SAMENAME_0]
|
|
|
|
obj_samename_3 = bpy.data.objects[self.__class__.DATA_NAME_SAMENAME_3]
|
|
|
|
obj_samename_0.name = self.__class__.DATA_NAME_SAMENAME_2
|
|
|
|
obj_samename_3.name = self.__class__.DATA_NAME_SAMENAME_1
|
|
|
|
self.edit_lib_data(edit_lib_cb)
|
|
|
|
|
|
|
|
# Re-open the main file, and check that automatic resync did its work correctly, remapping the target of the
|
|
|
|
# armature constraint to controller 2, without creating unexpected garbage IDs along the line.
|
|
|
|
bpy.ops.wm.open_mainfile(filepath=str(self.test_output_path))
|
|
|
|
|
|
|
|
override_collection_container = bpy.data.collections[self.__class__.DATA_NAME_CONTAINER]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNone(override_collection_container.library)
|
|
|
|
self.assertIsNotNone(override_collection_container.override_library)
|
2023-08-06 23:06:48 +02:00
|
|
|
# Objects and collections are duplicated as overrides, but meshes and armatures remain only linked data.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(bpy.data.collections), 3 * 3 + 3)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.collections[:3 * 3]))
|
2024-02-21 16:14:51 +01:00
|
|
|
# Note that the 'missing' renamed objects from the library are now cleared as part of the resync process.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(bpy.data.objects), 3 * 6 + 6)
|
|
|
|
self.assertTrue(all((id_.library is None and id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.objects[:3 * 6]))
|
|
|
|
self.assertEqual(len(bpy.data.meshes), 0 + 1)
|
|
|
|
self.assertEqual(len(bpy.data.armatures), 0 + 1)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
override_collection_containers = [
|
|
|
|
bpy.data.collections[self.__class__.DATA_NAME_CONTAINER],
|
|
|
|
bpy.data.collections[self.__class__.DATA_NAME_CONTAINER + ".001"],
|
|
|
|
bpy.data.collections[self.__class__.DATA_NAME_CONTAINER + ".002"],
|
|
|
|
]
|
|
|
|
for override_container in override_collection_containers:
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNone(override_container.library)
|
|
|
|
self.assertIsNotNone(override_container.override_library)
|
2023-08-06 23:06:48 +02:00
|
|
|
self.liboverride_hierarchy_validate(override_container)
|
|
|
|
|
|
|
|
# Re-open the 'recursive resync' file, and check that automatic recursive resync did its work correctly,
|
|
|
|
# remapping the target of the linked liboverride armature constraint to controller 2, without creating
|
|
|
|
# unexpected garbage IDs along the line.
|
|
|
|
bpy.ops.wm.open_mainfile(filepath=str(self.test_output_path_recursive))
|
|
|
|
|
2023-08-07 09:20:26 +02:00
|
|
|
linked_collection_container = bpy.data.collections[self.__class__.DATA_NAME_CONTAINER, str(
|
|
|
|
self.test_output_path)]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertIsNotNone(linked_collection_container.library)
|
|
|
|
self.assertIsNotNone(linked_collection_container.override_library)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
test_output_path_lib = linked_collection_container.library
|
|
|
|
# Objects and collections are duplicated as overrides, but meshes and armatures remain only linked data.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(bpy.data.collections), 3 + 6)
|
|
|
|
self.assertTrue(all((id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.collections if id_.library == test_output_path_lib))
|
2024-02-21 16:14:51 +01:00
|
|
|
# Note that the 'missing' renamed objects from the library are now cleared as part of the resync process.
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(len(bpy.data.objects), 6 + 12)
|
|
|
|
self.assertTrue(all((id_.override_library is not None)
|
|
|
|
for id_ in bpy.data.objects if id_.library == test_output_path_lib))
|
|
|
|
self.assertEqual(len(bpy.data.meshes), 0 + 1)
|
|
|
|
self.assertEqual(len(bpy.data.armatures), 0 + 1)
|
2023-08-06 23:06:48 +02:00
|
|
|
|
|
|
|
self.liboverride_hierarchy_validate(linked_collection_container)
|
|
|
|
|
2022-06-29 18:05:25 +02:00
|
|
|
|
2025-01-17 19:52:45 +01:00
|
|
|
class TestLibraryOverridesFromProxies(TestHelper):
|
2023-05-26 18:37:22 +02:00
|
|
|
# Very basic test, could be improved/extended.
|
|
|
|
# NOTE: Tests way more than only liboverride proxy conversion actually, since this is a fairly old .blend file.
|
|
|
|
|
|
|
|
MAIN_BLEND_FILE = "library_test_scene.blend"
|
|
|
|
|
|
|
|
def __init__(self, args):
|
2025-01-17 19:52:45 +01:00
|
|
|
super().__init__(args)
|
2023-05-26 18:37:22 +02:00
|
|
|
|
|
|
|
self.test_dir = pathlib.Path(self.args.test_dir)
|
|
|
|
self.assertTrue(self.test_dir.exists(),
|
2025-01-17 19:52:45 +01:00
|
|
|
msg='Test dir {0} should exist'.format(self.test_dir))
|
2023-05-26 18:37:22 +02:00
|
|
|
|
|
|
|
bpy.ops.wm.read_homefile(use_empty=True, use_factory_startup=True)
|
|
|
|
|
|
|
|
def test_open_linked_proxy_file(self):
|
|
|
|
bpy.ops.wm.open_mainfile(filepath=str(self.test_dir / self.MAIN_BLEND_FILE))
|
|
|
|
|
|
|
|
# Check stability of 'same name' fixing for IDs.
|
|
|
|
direct_linked_A = bpy.data.libraries["lib.002"]
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(direct_linked_A.filepath, os.path.join("//libraries", "direct_linked_A.blend"))
|
2023-05-26 18:37:22 +02:00
|
|
|
|
2025-01-17 19:52:45 +01:00
|
|
|
self.assertEqual(bpy.data.objects['HairCubeArmatureGroup_proxy'].library, direct_linked_A)
|
|
|
|
self.assertIsNotNone(bpy.data.objects['HairCubeArmatureGroup_proxy'].override_library)
|
2023-05-26 18:37:22 +02:00
|
|
|
|
|
|
|
|
2021-03-22 11:33:48 +01:00
|
|
|
TESTS = (
|
|
|
|
TestLibraryOverrides,
|
2023-08-06 23:06:48 +02:00
|
|
|
TestLibraryOverridesComplex,
|
2023-05-26 18:37:22 +02:00
|
|
|
TestLibraryOverridesFromProxies,
|
2021-03-22 11:33:48 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def argparse_create():
|
|
|
|
import argparse
|
|
|
|
|
|
|
|
# When --help or no args are given, print this help
|
|
|
|
description = "Test library overrides of blend file."
|
|
|
|
parser = argparse.ArgumentParser(description=description)
|
|
|
|
parser.add_argument(
|
|
|
|
"--output-dir",
|
|
|
|
dest="output_dir",
|
|
|
|
default=".",
|
|
|
|
help="Where to output temp saved blendfiles",
|
|
|
|
required=False,
|
|
|
|
)
|
2023-05-26 18:37:22 +02:00
|
|
|
parser.add_argument(
|
|
|
|
"--test-dir",
|
|
|
|
dest="test_dir",
|
|
|
|
default=".",
|
|
|
|
help="Where are the test blendfiles",
|
|
|
|
required=False,
|
|
|
|
)
|
2021-03-22 11:33:48 +01:00
|
|
|
|
|
|
|
return parser
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
args = argparse_create().parse_args()
|
|
|
|
|
|
|
|
# Don't write thumbnails into the home directory.
|
2021-09-06 10:56:02 +02:00
|
|
|
bpy.context.preferences.filepaths.file_preview_type = 'NONE'
|
2021-03-22 11:33:48 +01:00
|
|
|
|
|
|
|
for Test in TESTS:
|
|
|
|
Test(args).run_all_tests()
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
import sys
|
|
|
|
sys.argv = [__file__] + \
|
|
|
|
(sys.argv[sys.argv.index("--") + 1:] if "--" in sys.argv else [])
|
|
|
|
main()
|