From 1718635b265a2a18b54f811b8e29b70c3dcc822b Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 11 Aug 2020 10:23:55 +0100 Subject: [PATCH 01/47] Unreal support for model and rig assets --- pype/plugins/unreal/load/load_rig.py | 122 ++++++++++++++++++ .../plugins/unreal/load/load_staticmeshfbx.py | 32 ++--- 2 files changed, 139 insertions(+), 15 deletions(-) create mode 100644 pype/plugins/unreal/load/load_rig.py diff --git a/pype/plugins/unreal/load/load_rig.py b/pype/plugins/unreal/load/load_rig.py new file mode 100644 index 0000000000..dcbc1f4105 --- /dev/null +++ b/pype/plugins/unreal/load/load_rig.py @@ -0,0 +1,122 @@ +from avalon import api +from avalon import unreal as avalon_unreal +import unreal + + +class SkeletalMeshFBXLoader(api.Loader): + """Load Unreal SkeletalMesh from FBX""" + + families = ["rig"] + label = "Import FBX Skeletal Mesh" + representations = ["fbx"] + icon = "cube" + color = "orange" + + def load(self, context, name, namespace, data): + """ + Load and containerise representation into Content Browser. + + This is two step process. First, import FBX to temporary path and + then call `containerise()` on it - this moves all content to new + directory and then it will create AssetContainer there and imprint it + with metadata. This will mark this path as container. + + Args: + context (dict): application context + name (str): subset name + namespace (str): in Unreal this is basically path to container. + This is not passed here, so namespace is set + by `containerise()` because only then we know + real path. + data (dict): Those would be data to be imprinted. This is not used + now, data are imprinted by `containerise()`. + + Returns: + list(str): list of container content + """ + + tools = unreal.AssetToolsHelpers().get_asset_tools() + temp_dir, temp_name = tools.create_unique_asset_name( + "/Game/{}".format(name), "_TMP" + ) + + unreal.EditorAssetLibrary.make_directory(temp_dir) + + asset = context.get('asset') + asset_name = asset.get('name') + + destination_name = "{}_{}".format(asset_name, name) + + task = unreal.AssetImportTask() + + task.set_editor_property('filename', self.fname) + task.set_editor_property('destination_path', temp_dir) + task.set_editor_property('destination_name', destination_name) + task.set_editor_property('replace_existing', False) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + # set import options here + task.options = unreal.FbxImportUI() + task.options.set_editor_property('create_physics_asset', True) + task.options.set_editor_property('import_as_skeletal', True) + task.options.set_editor_property('import_animations', False) + + # set to import normals, otherwise Unreal will compute them + # and it will take a long time, depending on the size of the mesh + task.options.skeletal_mesh_import_data.set_editor_property( + 'normal_import_method', + unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS + ) + + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 + + imported_assets = unreal.EditorAssetLibrary.list_assets( + temp_dir, recursive=True, include_folder=True + ) + new_dir = avalon_unreal.containerise( + name, namespace, imported_assets, context, self.__class__.__name__) + + asset_content = unreal.EditorAssetLibrary.list_assets( + new_dir, recursive=True, include_folder=True + ) + + unreal.EditorAssetLibrary.delete_directory(temp_dir) + + return asset_content + + def update(self, container, representation): + node = container["objectName"] + source_path = api.get_representation_path(representation) + destination_path = container["namespace"] + + task = unreal.AssetImportTask() + + task.set_editor_property('filename', source_path) + task.set_editor_property('destination_path', destination_path) + # strip suffix + task.set_editor_property('destination_name', node[:-4]) + task.set_editor_property('replace_existing', True) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + task.options = unreal.FbxImportUI() + task.options.set_editor_property('create_physics_asset', False) + task.options.set_editor_property('import_as_skeletal', True) + task.options.set_editor_property('import_animations', False) + + task.options.skeletal_mesh_import_data.set_editor_property( + 'normal_import_method', + unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS + ) + + # do import fbx and replace existing data + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) + container_path = "{}/{}".format(container["namespace"], + container["objectName"]) + # update metadata + avalon_unreal.imprint( + container_path, {"_id": str(representation["_id"])}) + + def remove(self, container): + unreal.EditorAssetLibrary.delete_directory(container["namespace"]) diff --git a/pype/plugins/unreal/load/load_staticmeshfbx.py b/pype/plugins/unreal/load/load_staticmeshfbx.py index 4c27f9aa92..704fdb875e 100644 --- a/pype/plugins/unreal/load/load_staticmeshfbx.py +++ b/pype/plugins/unreal/load/load_staticmeshfbx.py @@ -6,7 +6,7 @@ import unreal class StaticMeshFBXLoader(api.Loader): """Load Unreal StaticMesh from FBX""" - families = ["unrealStaticMesh"] + families = ["model", "unrealStaticMesh"] label = "Import FBX Static Mesh" representations = ["fbx"] icon = "cube" @@ -44,16 +44,18 @@ class StaticMeshFBXLoader(api.Loader): task = unreal.AssetImportTask() - task.filename = self.fname - task.destination_path = temp_dir - task.destination_name = name - task.replace_existing = False - task.automated = True - task.save = True + task.set_editor_property('filename', self.fname) + task.set_editor_property('destination_path', temp_dir) + task.set_editor_property('destination_name', name) + task.set_editor_property('replace_existing', False) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) # set import options here task.options = unreal.FbxImportUI() - task.options.import_animations = False + task.options.set_editor_property( + 'automated_import_should_detect_type', False) + task.options.set_editor_property('import_animations', False) unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 @@ -78,16 +80,16 @@ class StaticMeshFBXLoader(api.Loader): task = unreal.AssetImportTask() - task.filename = source_path - task.destination_path = destination_path + task.set_editor_property('filename', source_path) + task.set_editor_property('destination_path', destination_path) # strip suffix - task.destination_name = node[:-4] - task.replace_existing = True - task.automated = True - task.save = True + task.set_editor_property('destination_name', node[:-4]) + task.set_editor_property('replace_existing', True) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) task.options = unreal.FbxImportUI() - task.options.import_animations = False + task.options.set_editor_property('import_animations', False) # do import fbx and replace existing data unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) From 9d737c2e6337abc0ac87dbea363938bb956c1084 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 11 Aug 2020 10:42:41 +0100 Subject: [PATCH 02/47] Implemented layout asset in Unreal This layout is made to work with Blender. The layout exports a json file with the reference to the blend files in ftrack that generated the fbx that has been imported in Unreal. In Blender, when the layout is loaded, pype will load the assets from the blend files and set the transform from the layout. --- pype/plugins/unreal/create/create_layout.py | 42 +++++++ .../unreal/publish/collect_current_file.py | 19 +++ .../unreal/publish/collect_instances.py | 9 +- pype/plugins/unreal/publish/extract_layout.py | 109 ++++++++++++++++++ 4 files changed, 174 insertions(+), 5 deletions(-) create mode 100644 pype/plugins/unreal/create/create_layout.py create mode 100644 pype/plugins/unreal/publish/collect_current_file.py create mode 100644 pype/plugins/unreal/publish/extract_layout.py diff --git a/pype/plugins/unreal/create/create_layout.py b/pype/plugins/unreal/create/create_layout.py new file mode 100644 index 0000000000..82cef43cee --- /dev/null +++ b/pype/plugins/unreal/create/create_layout.py @@ -0,0 +1,42 @@ +from unreal import EditorLevelLibrary as ell +from pype.hosts.unreal.plugin import Creator +from avalon.unreal import ( + instantiate, +) + + +class CreateLayout(Creator): + """Layout output for character rigs""" + + name = "layoutMain" + label = "Layout" + family = "layout" + icon = "cubes" + + root = "/Game" + suffix = "_INS" + + def __init__(self, *args, **kwargs): + super(CreateLayout, self).__init__(*args, **kwargs) + + def process(self): + data = self.data + + name = data["subset"] + + selection = [] + # if (self.options or {}).get("useSelection"): + # sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() + # selection = [a.get_path_name() for a in sel_objects] + + data["level"] = ell.get_editor_world().get_path_name() + + data["members"] = [] + + if (self.options or {}).get("useSelection"): + # Set as members the selected actors + for actor in ell.get_selected_level_actors(): + data["members"].append("{}.{}".format( + actor.get_outer().get_name(), actor.get_name())) + + instantiate(self.root, name, data, selection, self.suffix) diff --git a/pype/plugins/unreal/publish/collect_current_file.py b/pype/plugins/unreal/publish/collect_current_file.py new file mode 100644 index 0000000000..4e828933bb --- /dev/null +++ b/pype/plugins/unreal/publish/collect_current_file.py @@ -0,0 +1,19 @@ +import unreal + +import pyblish.api + + +class CollectUnrealCurrentFile(pyblish.api.ContextPlugin): + """Inject the current working file into context""" + + order = pyblish.api.CollectorOrder - 0.5 + label = "Unreal Current File" + hosts = ['unreal'] + + def process(self, context): + """Inject the current working file""" + current_file = unreal.Paths.get_project_file_path() + context.data['currentFile'] = current_file + + assert current_file != '', "Current file is empty. " \ + "Save the file before continuing." diff --git a/pype/plugins/unreal/publish/collect_instances.py b/pype/plugins/unreal/publish/collect_instances.py index 766a73028c..62676f9938 100644 --- a/pype/plugins/unreal/publish/collect_instances.py +++ b/pype/plugins/unreal/publish/collect_instances.py @@ -1,5 +1,5 @@ +import ast import unreal - import pyblish.api @@ -35,13 +35,10 @@ class CollectInstances(pyblish.api.ContextPlugin): ) # content of container - members = unreal.EditorAssetLibrary.list_assets( - asset.get_path_name(), recursive=True, include_folder=True - ) + members = ast.literal_eval(data.get("members")) self.log.debug(members) self.log.debug(asset.get_path_name()) # remove instance container - members.remove(asset.get_path_name()) self.log.info("Creating instance for {}".format(asset.get_name())) instance = context.create_instance(asset.get_name()) @@ -50,6 +47,8 @@ class CollectInstances(pyblish.api.ContextPlugin): # Store the exact members of the object set instance.data["setMembers"] = members instance.data["families"] = [data.get("family")] + instance.data["level"] = data.get("level") + instance.data["parent"] = data.get("parent") label = "{0} ({1})".format(asset.get_name()[:-4], data["asset"]) diff --git a/pype/plugins/unreal/publish/extract_layout.py b/pype/plugins/unreal/publish/extract_layout.py new file mode 100644 index 0000000000..eadf186260 --- /dev/null +++ b/pype/plugins/unreal/publish/extract_layout.py @@ -0,0 +1,109 @@ +import os +import json +import math + +import unreal +from unreal import EditorLevelLibrary as ell +from unreal import EditorAssetLibrary as eal + +import pype.api +from avalon import io + + +class ExtractLayout(pype.api.Extractor): + """Extract a layout.""" + + label = "Extract Layout" + hosts = ["unreal"] + families = ["layout"] + optional = True + + def process(self, instance): + # Define extract output file path + stagingdir = self.staging_dir(instance) + + # Perform extraction + self.log.info("Performing extraction..") + + # Check if the loaded level is the same of the instance + current_level = ell.get_editor_world().get_path_name() + assert current_level == instance.data.get("level"), \ + "Wrong level loaded" + + json_data = [] + + for member in instance[:]: + actor = ell.get_actor_reference(member) + mesh = None + + # Check type the type of mesh + if actor.get_class().get_name() == 'SkeletalMeshActor': + mesh = actor.skeletal_mesh_component.skeletal_mesh + elif actor.get_class().get_name() == 'StaticMeshActor': + mesh = actor.static_mesh_component.static_mesh + + if mesh: + # Search the reference to the Asset Container for the object + path = unreal.Paths.get_path(mesh.get_path_name()) + filter = unreal.ARFilter( + class_names=["AssetContainer"], package_paths=[path]) + ar = unreal.AssetRegistryHelpers.get_asset_registry() + asset_container = ar.get_assets(filter)[0].get_asset() + + parent = eal.get_metadata_tag(asset_container, "parent") + family = eal.get_metadata_tag(asset_container, "family") + + self.log.info("Parent: {}".format(parent)) + blend = io.find_one( + { + "type": "representation", + "parent": io.ObjectId(parent), + "name": "blend" + }, + projection={"_id": True}) + blend_id = blend["_id"] + + json_element = {} + json_element["reference"] = str(blend_id) + json_element["family"] = family + json_element["instance_name"] = actor.get_name() + json_element["asset_name"] = mesh.get_name() + import_data = mesh.get_editor_property("asset_import_data") + json_element["file_path"] = import_data.get_first_filename() + transform = actor.get_actor_transform() + + json_element["transform"] = { + "translation": { + "x": transform.translation.x, + "y": transform.translation.y, + "z": transform.translation.z + }, + "rotation": { + "x": math.radians(transform.rotation.euler().x), + "y": math.radians(transform.rotation.euler().y), + "z": math.radians(transform.rotation.euler().z), + }, + "scale": { + "x": transform.scale3d.x, + "y": transform.scale3d.y, + "z": transform.scale3d.z + } + } + json_data.append(json_element) + + json_filename = "{}.json".format(instance.name) + json_path = os.path.join(stagingdir, json_filename) + + with open(json_path, "w+") as file: + json.dump(json_data, fp=file, indent=2) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + json_representation = { + 'name': 'json', + 'ext': 'json', + 'files': json_filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(json_representation) From 3c6525890196a0ba1665b3c0d343283181b7b89d Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 12 Aug 2020 17:18:23 +0100 Subject: [PATCH 03/47] Blender can load layout exported from Unreal as json --- pype/hosts/blender/plugin.py | 15 +- pype/plugins/blender/load/load_layout.py | 168 ++++++++++++++++++++++- 2 files changed, 178 insertions(+), 5 deletions(-) diff --git a/pype/hosts/blender/plugin.py b/pype/hosts/blender/plugin.py index 07080a86c4..4df23cf7bb 100644 --- a/pype/hosts/blender/plugin.py +++ b/pype/hosts/blender/plugin.py @@ -175,7 +175,17 @@ class AssetLoader(api.Loader): # just re-using the collection assert Path(self.fname).exists(), f"{self.fname} doesn't exist." - self.process_asset( + asset = context["asset"]["name"] + subset = context["subset"]["name"] + unique_number = get_unique_number( + asset, subset + ) + namespace = namespace or f"{asset}_{unique_number}" + name = name or asset_name( + asset, subset, unique_number + ) + + nodes = self.process_asset( context=context, name=name, namespace=namespace, @@ -183,7 +193,6 @@ class AssetLoader(api.Loader): ) # Only containerise if anything was loaded by the Loader. - nodes = self[:] if not nodes: return None @@ -201,7 +210,7 @@ class AssetLoader(api.Loader): asset = context["asset"]["name"] subset = context["subset"]["name"] - instance_name = asset_name(asset, subset, namespace) + instance_name = asset_name(asset, subset, unique_number) + '_CON' return self._get_instance_collection(instance_name, nodes) diff --git a/pype/plugins/blender/load/load_layout.py b/pype/plugins/blender/load/load_layout.py index 2c8948dd48..166e862f8d 100644 --- a/pype/plugins/blender/load/load_layout.py +++ b/pype/plugins/blender/load/load_layout.py @@ -1,5 +1,8 @@ """Load a layout in Blender.""" +import json +import math + import logging from pathlib import Path from pprint import pformat @@ -150,8 +153,9 @@ class BlendLayoutLoader(plugin.AssetLoader): # Save the list of objects in the metadata container container_metadata["objects"] = obj_container.all_objects - nodes = list(container.objects) - nodes.append(container) + # nodes = list(container.objects) + # nodes.append(container) + nodes = [container] self[:] = nodes return nodes @@ -271,3 +275,163 @@ class BlendLayoutLoader(plugin.AssetLoader): bpy.data.collections.remove(collection) return True + + +class UnrealLayoutLoader(plugin.AssetLoader): + """Load layout published from Unreal.""" + + families = ["layout"] + representations = ["json"] + + label = "Link Layout" + icon = "code-fork" + color = "orange" + + def _get_loader(self, loaders, family): + name = "" + if family == 'rig': + name = "BlendRigLoader" + elif family == 'model': + name = "BlendModelLoader" + + if name == "": + return None + + for loader in loaders: + if loader.__name__ == name: + return loader + + return None + + def set_transform(self, object, transform): + location = transform.get('translation') + rotation = transform.get('rotation') + scale = transform.get('scale') + + object.location = ( + location.get('x') / 10, + location.get('y') / 10, + location.get('z') / 10 + ) + object.rotation_euler = ( + rotation.get('x'), + rotation.get('y'), + rotation.get('z') + (math.pi / 2) + ) + object.scale = ( + scale.get('x') / 10, + scale.get('y') / 10, + scale.get('z') / 10 + ) + + def process_asset(self, + context: dict, + name: str, + namespace: Optional[str] = None, + options: Optional[Dict] = None): + """ + Arguments: + name: Use pre-defined name + namespace: Use pre-defined namespace + context: Full parenthood of representation to load + options: Additional settings dictionary + """ + libpath = self.fname + asset = context["asset"]["name"] + subset = context["subset"]["name"] + lib_container = plugin.asset_name( + asset, subset + ) + unique_number = plugin.get_unique_number( + asset, subset + ) + namespace = namespace or f"{asset}_{unique_number}" + container_name = plugin.asset_name( + asset, subset, unique_number + ) + + container = bpy.data.collections.new(lib_container) + container.name = container_name + blender.pipeline.containerise_existing( + container, + name, + namespace, + context, + self.__class__.__name__, + ) + + container_metadata = container.get( + blender.pipeline.AVALON_PROPERTY) + + container_metadata["libpath"] = libpath + container_metadata["lib_container"] = lib_container + + with open(libpath, "r") as fp: + data = json.load(fp) + + scene = bpy.context.scene + layout_collection = bpy.data.collections.new(container_name) + scene.collection.children.link(layout_collection) + + all_loaders = api.discover(api.Loader) + + for element in data: + reference = element.get('reference') + family = element.get('family') + + loaders = api.loaders_from_representation(all_loaders, reference) + loader = self._get_loader(loaders, family) + + if not loader: + continue + + instance_name = element.get('instance_name') + + element_container = api.load( + loader, + reference, + namespace=instance_name + ) + + if not element_container: + continue + + element_metadata = element_container.get( + blender.pipeline.AVALON_PROPERTY) + + # Unlink the object's collection from the scene collection and + # link it in the layout collection + element_collection = element_metadata.get('obj_container') + scene.collection.children.unlink(element_collection) + layout_collection.children.link(element_collection) + + objects = element_metadata.get('objects') + element_metadata['instance_name'] = instance_name + + objects_to_transform = [] + + if family == 'rig': + for o in objects: + if o.type == 'ARMATURE': + objects_to_transform.append(o) + break + elif family == 'model': + objects_to_transform = objects + + for o in objects_to_transform: + self.set_transform(o, element.get('transform')) + + container_metadata["obj_container"] = layout_collection + + # Save the list of objects in the metadata container + container_metadata["objects"] = layout_collection.all_objects + + nodes = [container] + self[:] = nodes + return nodes + + def update(self, container: Dict, representation: Dict): + pass + + def remove(self, container: Dict) -> bool: + pass From 1c27e661091845a74d7cc50b0963f599390eaae3 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 13 Aug 2020 15:19:23 +0100 Subject: [PATCH 04/47] Updated the loading of the layout to support nested containers --- pype/hosts/blender/plugin.py | 34 +++++++++++++----------- pype/plugins/blender/load/load_layout.py | 18 ++++++++----- 2 files changed, 31 insertions(+), 21 deletions(-) diff --git a/pype/hosts/blender/plugin.py b/pype/hosts/blender/plugin.py index 4df23cf7bb..ddc453dd34 100644 --- a/pype/hosts/blender/plugin.py +++ b/pype/hosts/blender/plugin.py @@ -29,15 +29,19 @@ def get_unique_number( c for c in bpy.data.collections if c.name == 'AVALON_CONTAINERS' ] - loaded_assets = [] + containers = [] + # First, add the children of avalon containers for c in avalon_containers: - loaded_assets.extend(c.children) - collections_names = [ - c.name for c in loaded_assets + containers.extend(c.children) + # then keep looping to include all the children + for c in containers: + containers.extend(c.children) + container_names = [ + c.name for c in containers ] count = 1 name = f"{asset}_{count:0>2}_{subset}_CON" - while name in collections_names: + while name in container_names: count += 1 name = f"{asset}_{count:0>2}_{subset}_CON" return f"{count:0>2}" @@ -197,16 +201,16 @@ class AssetLoader(api.Loader): return None # Only containerise if it's not already a collection from a .blend file. - representation = context["representation"]["name"] - if representation != "blend": - from avalon.blender.pipeline import containerise - return containerise( - name=name, - namespace=namespace, - nodes=nodes, - context=context, - loader=self.__class__.__name__, - ) + # representation = context["representation"]["name"] + # if representation != "blend": + # from avalon.blender.pipeline import containerise + # return containerise( + # name=name, + # namespace=namespace, + # nodes=nodes, + # context=context, + # loader=self.__class__.__name__, + # ) asset = context["asset"]["name"] subset = context["subset"]["name"] diff --git a/pype/plugins/blender/load/load_layout.py b/pype/plugins/blender/load/load_layout.py index 166e862f8d..260112988c 100644 --- a/pype/plugins/blender/load/load_layout.py +++ b/pype/plugins/blender/load/load_layout.py @@ -196,7 +196,7 @@ class BlendLayoutLoader(plugin.AssetLoader): assert libpath.is_file(), ( f"The file doesn't exist: {libpath}" ) - assert extension in pype.hosts.blender.plugin.VALID_EXTENSIONS, ( + assert extension in plugin.VALID_EXTENSIONS, ( f"Unsupported file: {libpath}" ) @@ -350,17 +350,17 @@ class UnrealLayoutLoader(plugin.AssetLoader): asset, subset, unique_number ) - container = bpy.data.collections.new(lib_container) - container.name = container_name + layout_container = bpy.data.collections.new(lib_container) + layout_container.name = container_name blender.pipeline.containerise_existing( - container, + layout_container, name, namespace, context, self.__class__.__name__, ) - container_metadata = container.get( + container_metadata = layout_container.get( blender.pipeline.AVALON_PROPERTY) container_metadata["libpath"] = libpath @@ -375,6 +375,9 @@ class UnrealLayoutLoader(plugin.AssetLoader): all_loaders = api.discover(api.Loader) + avalon_container = bpy.data.collections.get( + blender.pipeline.AVALON_CONTAINERS) + for element in data: reference = element.get('reference') family = element.get('family') @@ -396,6 +399,9 @@ class UnrealLayoutLoader(plugin.AssetLoader): if not element_container: continue + avalon_container.children.unlink(element_container) + layout_container.children.link(element_container) + element_metadata = element_container.get( blender.pipeline.AVALON_PROPERTY) @@ -426,7 +432,7 @@ class UnrealLayoutLoader(plugin.AssetLoader): # Save the list of objects in the metadata container container_metadata["objects"] = layout_collection.all_objects - nodes = [container] + nodes = [layout_container] self[:] = nodes return nodes From 5d1d88a21fd4455bf16cf79e75da30991f6bcf7c Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 13 Aug 2020 15:25:16 +0100 Subject: [PATCH 05/47] Implemented remove method for layout asset --- pype/plugins/blender/load/load_layout.py | 53 +++++++++++++++++++++++- 1 file changed, 52 insertions(+), 1 deletion(-) diff --git a/pype/plugins/blender/load/load_layout.py b/pype/plugins/blender/load/load_layout.py index 260112988c..5e5725b111 100644 --- a/pype/plugins/blender/load/load_layout.py +++ b/pype/plugins/blender/load/load_layout.py @@ -287,6 +287,26 @@ class UnrealLayoutLoader(plugin.AssetLoader): icon = "code-fork" color = "orange" + def _remove_objects(self, objects): + for obj in list(objects): + if obj.type == 'ARMATURE': + bpy.data.armatures.remove(obj.data) + elif obj.type == 'MESH': + bpy.data.meshes.remove(obj.data) + elif obj.type == 'CAMERA': + bpy.data.cameras.remove(obj.data) + elif obj.type == 'CURVE': + bpy.data.curves.remove(obj.data) + else: + self.log.error( + f"Object {obj.name} of type {obj.type} not recognized.") + + def _remove_collections(self, collection): + if collection.children: + for child in collection.children: + self._remove_collections(child) + bpy.data.collections.remove(child) + def _get_loader(self, loaders, family): name = "" if family == 'rig': @@ -440,4 +460,35 @@ class UnrealLayoutLoader(plugin.AssetLoader): pass def remove(self, container: Dict) -> bool: - pass + """Remove an existing container from a Blender scene. + + Arguments: + container (avalon-core:container-1.0): Container to remove, + from `host.ls()`. + + Returns: + bool: Whether the container was deleted. + """ + layout_container = bpy.data.collections.get( + container["objectName"] + ) + if not layout_container: + return False + # assert not (collection.children), ( + # "Nested collections are not supported." + # ) + + layout_container_metadata = layout_container.get( + blender.pipeline.AVALON_PROPERTY) + obj_container = plugin.get_local_collection_with_name( + layout_container_metadata["obj_container"].name + ) + objects = obj_container.all_objects + + self._remove_objects(objects) + self._remove_collections(obj_container) + bpy.data.collections.remove(obj_container) + self._remove_collections(layout_container) + bpy.data.collections.remove(layout_container) + + return True From daba0c00d979f49d8e9df4b584a8fc890ebd529b Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 17 Aug 2020 09:19:41 +0100 Subject: [PATCH 06/47] Implemented update for the layout --- pype/hosts/blender/plugin.py | 2 +- pype/plugins/blender/load/load_layout.py | 203 +++++++++++++++++------ 2 files changed, 155 insertions(+), 50 deletions(-) diff --git a/pype/hosts/blender/plugin.py b/pype/hosts/blender/plugin.py index ddc453dd34..413718d5e0 100644 --- a/pype/hosts/blender/plugin.py +++ b/pype/hosts/blender/plugin.py @@ -7,7 +7,7 @@ import bpy from avalon import api -VALID_EXTENSIONS = [".blend"] +VALID_EXTENSIONS = [".blend", ".json"] def asset_name( diff --git a/pype/plugins/blender/load/load_layout.py b/pype/plugins/blender/load/load_layout.py index 5e5725b111..d1d78baeae 100644 --- a/pype/plugins/blender/load/load_layout.py +++ b/pype/plugins/blender/load/load_layout.py @@ -1,6 +1,7 @@ """Load a layout in Blender.""" import json +from logging import log, warning import math import logging @@ -323,69 +324,30 @@ class UnrealLayoutLoader(plugin.AssetLoader): return None - def set_transform(self, object, transform): + def set_transform(self, obj, transform): location = transform.get('translation') rotation = transform.get('rotation') scale = transform.get('scale') - object.location = ( + # Y position is inverted in sign because Unreal and Blender have the + # Y axis mirrored + obj.location = ( location.get('x') / 10, - location.get('y') / 10, + -location.get('y') / 10, location.get('z') / 10 ) - object.rotation_euler = ( + obj.rotation_euler = ( rotation.get('x'), rotation.get('y'), - rotation.get('z') + (math.pi / 2) + -rotation.get('z') ) - object.scale = ( + obj.scale = ( scale.get('x') / 10, scale.get('y') / 10, scale.get('z') / 10 ) - def process_asset(self, - context: dict, - name: str, - namespace: Optional[str] = None, - options: Optional[Dict] = None): - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - libpath = self.fname - asset = context["asset"]["name"] - subset = context["subset"]["name"] - lib_container = plugin.asset_name( - asset, subset - ) - unique_number = plugin.get_unique_number( - asset, subset - ) - namespace = namespace or f"{asset}_{unique_number}" - container_name = plugin.asset_name( - asset, subset, unique_number - ) - - layout_container = bpy.data.collections.new(lib_container) - layout_container.name = container_name - blender.pipeline.containerise_existing( - layout_container, - name, - namespace, - context, - self.__class__.__name__, - ) - - container_metadata = layout_container.get( - blender.pipeline.AVALON_PROPERTY) - - container_metadata["libpath"] = libpath - container_metadata["lib_container"] = lib_container - + def _process(self, libpath, layout_container, container_name, actions): with open(libpath, "r") as fp: data = json.load(fp) @@ -447,6 +409,61 @@ class UnrealLayoutLoader(plugin.AssetLoader): for o in objects_to_transform: self.set_transform(o, element.get('transform')) + if actions: + if o.type == 'ARMATURE': + action = actions.get(instance_name, None) + + if action: + if o.animation_data is None: + o.animation_data_create() + o.animation_data.action = action + + return layout_collection + + def process_asset(self, + context: dict, + name: str, + namespace: Optional[str] = None, + options: Optional[Dict] = None): + """ + Arguments: + name: Use pre-defined name + namespace: Use pre-defined namespace + context: Full parenthood of representation to load + options: Additional settings dictionary + """ + libpath = self.fname + asset = context["asset"]["name"] + subset = context["subset"]["name"] + lib_container = plugin.asset_name( + asset, subset + ) + unique_number = plugin.get_unique_number( + asset, subset + ) + namespace = namespace or f"{asset}_{unique_number}" + container_name = plugin.asset_name( + asset, subset, unique_number + ) + + layout_container = bpy.data.collections.new(container_name) + blender.pipeline.containerise_existing( + layout_container, + name, + namespace, + context, + self.__class__.__name__, + ) + + container_metadata = layout_container.get( + blender.pipeline.AVALON_PROPERTY) + + container_metadata["libpath"] = libpath + container_metadata["lib_container"] = lib_container + + layout_collection = self._process( + libpath, layout_container, container_name, None) + container_metadata["obj_container"] = layout_collection # Save the list of objects in the metadata container @@ -457,7 +474,95 @@ class UnrealLayoutLoader(plugin.AssetLoader): return nodes def update(self, container: Dict, representation: Dict): - pass + """Update the loaded asset. + + This will remove all objects of the current collection, load the new + ones and add them to the collection. + If the objects of the collection are used in another collection they + will not be removed, only unlinked. Normally this should not be the + case though. + """ + print(container) + print(container["objectName"]) + layout_container = bpy.data.collections.get( + container["objectName"] + ) + libpath = Path(api.get_representation_path(representation)) + extension = libpath.suffix.lower() + + self.log.info( + "Container: %s\nRepresentation: %s", + pformat(container, indent=2), + pformat(representation, indent=2), + ) + + assert layout_container, ( + f"The asset is not loaded: {container['objectName']}" + ) + assert libpath, ( + "No existing library file found for {container['objectName']}" + ) + assert libpath.is_file(), ( + f"The file doesn't exist: {libpath}" + ) + assert extension in plugin.VALID_EXTENSIONS, ( + f"Unsupported file: {libpath}" + ) + + layout_container_metadata = layout_container.get( + blender.pipeline.AVALON_PROPERTY) + collection_libpath = layout_container_metadata["libpath"] + lib_container = layout_container_metadata["lib_container"] + obj_container = plugin.get_local_collection_with_name( + layout_container_metadata["obj_container"].name + ) + objects = obj_container.all_objects + + container_name = obj_container.name + + normalized_collection_libpath = ( + str(Path(bpy.path.abspath(collection_libpath)).resolve()) + ) + normalized_libpath = ( + str(Path(bpy.path.abspath(str(libpath))).resolve()) + ) + self.log.debug( + "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_collection_libpath, + normalized_libpath, + ) + if normalized_collection_libpath == normalized_libpath: + self.log.info("Library already loaded, not updating...") + return + + actions = {} + + for obj in objects: + if obj.type == 'ARMATURE': + if obj.animation_data and obj.animation_data.action: + obj_cont_name = obj.get( + blender.pipeline.AVALON_PROPERTY).get('container_name') + obj_cont = plugin.get_local_collection_with_name( + obj_cont_name) + element_metadata = obj_cont.get( + blender.pipeline.AVALON_PROPERTY) + instance_name = element_metadata.get('instance_name') + actions[instance_name] = obj.animation_data.action + + self._remove_objects(objects) + self._remove_collections(obj_container) + bpy.data.collections.remove(obj_container) + self._remove_collections(layout_container) + # bpy.data.collections.remove(layout_container) + + layout_collection = self._process( + libpath, layout_container, container_name, actions) + + layout_container_metadata["obj_container"] = layout_collection + layout_container_metadata["objects"] = layout_collection.all_objects + layout_container_metadata["libpath"] = str(libpath) + layout_container_metadata["representation"] = str(representation["_id"]) + def remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. From c80dfdf9a2b00a1d0e00bc1a2a26cdced2cb5f6a Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 24 Aug 2020 10:00:25 +0100 Subject: [PATCH 07/47] Fix rotation when loading from Unreal --- pype/plugins/blender/load/load_layout.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/blender/load/load_layout.py b/pype/plugins/blender/load/load_layout.py index d1d78baeae..96a6be952f 100644 --- a/pype/plugins/blender/load/load_layout.py +++ b/pype/plugins/blender/load/load_layout.py @@ -338,7 +338,7 @@ class UnrealLayoutLoader(plugin.AssetLoader): ) obj.rotation_euler = ( rotation.get('x'), - rotation.get('y'), + -rotation.get('y'), -rotation.get('z') ) obj.scale = ( From f95f99740605ac772cc644eff24cec58380512bc Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 30 Sep 2020 10:48:28 +0100 Subject: [PATCH 08/47] Improved generation of custom context --- pype/hosts/blender/plugin.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/pype/hosts/blender/plugin.py b/pype/hosts/blender/plugin.py index 413718d5e0..d0b81148c3 100644 --- a/pype/hosts/blender/plugin.py +++ b/pype/hosts/blender/plugin.py @@ -63,20 +63,20 @@ def create_blender_context(active: Optional[bpy.types.Object] = None, if not isinstance(selected, list): selected = [selected] + override_context = bpy.context.copy() + for win in bpy.context.window_manager.windows: for area in win.screen.areas: if area.type == 'VIEW_3D': for region in area.regions: if region.type == 'WINDOW': - override_context = { - 'window': win, - 'screen': win.screen, - 'area': area, - 'region': region, - 'scene': bpy.context.scene, - 'active_object': active, - 'selected_objects': selected - } + override_context['window'] = win + override_context['screen'] = win.screen + override_context['area'] = area + override_context['region'] = region + override_context['scene'] = bpy.context.scene + override_context['active_object'] = active + override_context['selected_objects'] = selected return override_context raise Exception("Could not create a custom Blender context.") From ea248a4386a6e57f367423396f5458962d613e82 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 30 Sep 2020 15:00:36 +0100 Subject: [PATCH 09/47] Implemented creator for setdress and updated creator for animation --- .../blender/create/create_animation.py | 32 +++---------------- .../plugins/blender/create/create_setdress.py | 24 ++++++++++++++ 2 files changed, 29 insertions(+), 27 deletions(-) create mode 100644 pype/plugins/blender/create/create_setdress.py diff --git a/pype/plugins/blender/create/create_animation.py b/pype/plugins/blender/create/create_animation.py index de74f9a358..acfd6ac1f3 100644 --- a/pype/plugins/blender/create/create_animation.py +++ b/pype/plugins/blender/create/create_animation.py @@ -2,12 +2,11 @@ import bpy -from avalon import api -from avalon.blender import Creator, lib +from avalon import api, blender import pype.hosts.blender.plugin -class CreateAnimation(Creator): +class CreateAnimation(blender.Creator): """Animation output for character rigs""" name = "animationMain" @@ -16,37 +15,16 @@ class CreateAnimation(Creator): icon = "male" def process(self): - asset = self.data["asset"] subset = self.data["subset"] name = pype.hosts.blender.plugin.asset_name(asset, subset) collection = bpy.data.collections.new(name=name) bpy.context.scene.collection.children.link(collection) self.data['task'] = api.Session.get('AVALON_TASK') - lib.imprint(collection, self.data) - - # Add the rig object and all the children meshes to - # a set and link them all at the end to avoid duplicates. - # Blender crashes if trying to link an object that is already linked. - # This links automatically the children meshes if they were not - # selected, and doesn't link them twice if they, insted, - # were manually selected by the user. - objects_to_link = set() + blender.lib.imprint(collection, self.data) if (self.options or {}).get("useSelection"): - - for obj in lib.get_selection(): - - objects_to_link.add(obj) - - if obj.type == 'ARMATURE': - - for subobj in obj.children: - - objects_to_link.add(subobj) - - for obj in objects_to_link: - - collection.objects.link(obj) + for obj in blender.lib.get_selection(): + collection.objects.link(obj) return collection diff --git a/pype/plugins/blender/create/create_setdress.py b/pype/plugins/blender/create/create_setdress.py new file mode 100644 index 0000000000..06acf716e5 --- /dev/null +++ b/pype/plugins/blender/create/create_setdress.py @@ -0,0 +1,24 @@ +import bpy + +from avalon import api, blender +import pype.hosts.blender.plugin + +class CreateSetDress(blender.Creator): + """A grouped package of loaded content""" + + name = "setdressMain" + label = "Set Dress" + family = "setdress" + icon = "cubes" + defaults = ["Main", "Anim"] + + def process(self): + asset = self.data["asset"] + subset = self.data["subset"] + name = pype.hosts.blender.plugin.asset_name(asset, subset) + collection = bpy.data.collections.new(name=name) + bpy.context.scene.collection.children.link(collection) + self.data['task'] = api.Session.get('AVALON_TASK') + blender.lib.imprint(collection, self.data) + + return collection From e4e43872015b0f7c77f751bec14a67e1d43608e8 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 30 Sep 2020 15:09:13 +0100 Subject: [PATCH 10/47] Updated naming for some variables --- pype/plugins/blender/load/load_rig.py | 50 +++++++++++++-------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/pype/plugins/blender/load/load_rig.py b/pype/plugins/blender/load/load_rig.py index 7b60b20064..518f389d0b 100644 --- a/pype/plugins/blender/load/load_rig.py +++ b/pype/plugins/blender/load/load_rig.py @@ -37,7 +37,7 @@ class BlendRigLoader(plugin.AssetLoader): bpy.data.collections.remove(obj_container) def _process( - self, libpath, lib_container, container_name, + self, libpath, lib_container, collection_name, action, parent_collection ): relative = bpy.context.preferences.filepaths.use_relative_paths @@ -54,7 +54,7 @@ class BlendRigLoader(plugin.AssetLoader): parent.children.link(bpy.data.collections[lib_container]) rig_container = parent.children[lib_container].make_local() - rig_container.name = container_name + rig_container.name = collection_name meshes = [] armatures = [ @@ -63,25 +63,28 @@ class BlendRigLoader(plugin.AssetLoader): ] for child in rig_container.children: - local_child = plugin.prepare_data(child, container_name) - meshes.extend(local_child.objects) + local_child = plugin.prepare_data(child, collection_name) + meshes.extend(local_child.objects) + + # for obj in bpy.data.objects: + # obj.select_set(False) # Link meshes first, then armatures. # The armature is unparented for all the non-local meshes, # when it is made local. for obj in meshes + armatures: - local_obj = plugin.prepare_data(obj, container_name) - plugin.prepare_data(local_obj.data, container_name) - - if not local_obj.get(blender.pipeline.AVALON_PROPERTY): - local_obj[blender.pipeline.AVALON_PROPERTY] = dict() - - avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY] - avalon_info.update({"container_name": container_name}) - - if local_obj.type == 'ARMATURE' and action is not None: - local_obj.animation_data.action = action - + local_obj = plugin.prepare_data(obj, collection_name) + plugin.prepare_data(local_obj.data, collection_name) + + if not local_obj.get(blender.pipeline.AVALON_PROPERTY): + local_obj[blender.pipeline.AVALON_PROPERTY] = dict() + + avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY] + avalon_info.update({"container_name": collection_name + '_CON'}) + + if local_obj.type == 'ARMATURE' and action is not None: + local_obj.animation_data.action = action + rig_container.pop(blender.pipeline.AVALON_PROPERTY) bpy.ops.object.select_all(action='DESELECT') @@ -99,7 +102,6 @@ class BlendRigLoader(plugin.AssetLoader): context: Full parenthood of representation to load options: Additional settings dictionary """ - libpath = self.fname asset = context["asset"]["name"] subset = context["subset"]["name"] @@ -110,12 +112,11 @@ class BlendRigLoader(plugin.AssetLoader): asset, subset ) namespace = namespace or f"{asset}_{unique_number}" - container_name = plugin.asset_name( + collection_name = plugin.asset_name( asset, subset, unique_number ) - container = bpy.data.collections.new(lib_container) - container.name = container_name + container = bpy.data.collections.new(collection_name) blender.pipeline.containerise_existing( container, name, @@ -131,10 +132,9 @@ class BlendRigLoader(plugin.AssetLoader): container_metadata["lib_container"] = lib_container obj_container = self._process( - libpath, lib_container, container_name, None, None) + libpath, lib_container, collection_name, None, None) container_metadata["obj_container"] = obj_container - # Save the list of objects in the metadata container container_metadata["objects"] = obj_container.all_objects @@ -214,9 +214,9 @@ class BlendRigLoader(plugin.AssetLoader): armatures = [obj for obj in objects if obj.type == 'ARMATURE'] assert(len(armatures) == 1) - action = None - if armatures[0].animation_data and armatures[0].animation_data.action: - action = armatures[0].animation_data.action + action = None + if armatures[0].animation_data and armatures[0].animation_data.action: + action = armatures[0].animation_data.action parent = plugin.get_parent_collection(obj_container) From 525c4fe2f8e674ce700c26ca26ec5b83377d5510 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 30 Sep 2020 15:26:17 +0100 Subject: [PATCH 11/47] Create the subset for animation and setdress when loading a layout --- pype/plugins/blender/load/load_layout.py | 55 +++++++++++++++++------- 1 file changed, 40 insertions(+), 15 deletions(-) diff --git a/pype/plugins/blender/load/load_layout.py b/pype/plugins/blender/load/load_layout.py index 96a6be952f..dc623d101e 100644 --- a/pype/plugins/blender/load/load_layout.py +++ b/pype/plugins/blender/load/load_layout.py @@ -321,7 +321,7 @@ class UnrealLayoutLoader(plugin.AssetLoader): for loader in loaders: if loader.__name__ == name: return loader - + return None def set_transform(self, obj, transform): @@ -332,9 +332,9 @@ class UnrealLayoutLoader(plugin.AssetLoader): # Y position is inverted in sign because Unreal and Blender have the # Y axis mirrored obj.location = ( - location.get('x') / 10, - -location.get('y') / 10, - location.get('z') / 10 + location.get('x'), + -location.get('y'), + location.get('z') ) obj.rotation_euler = ( rotation.get('x'), @@ -342,12 +342,15 @@ class UnrealLayoutLoader(plugin.AssetLoader): -rotation.get('z') ) obj.scale = ( - scale.get('x') / 10, - scale.get('y') / 10, - scale.get('z') / 10 + scale.get('x'), + scale.get('y'), + scale.get('z') ) - def _process(self, libpath, layout_container, container_name, actions): + def _process( + self, libpath, layout_container, container_name, context, actions, + parent + ): with open(libpath, "r") as fp: data = json.load(fp) @@ -366,7 +369,7 @@ class UnrealLayoutLoader(plugin.AssetLoader): loaders = api.loaders_from_representation(all_loaders, reference) loader = self._get_loader(loaders, family) - + if not loader: continue @@ -374,7 +377,7 @@ class UnrealLayoutLoader(plugin.AssetLoader): element_container = api.load( loader, - reference, + reference, namespace=instance_name ) @@ -387,7 +390,7 @@ class UnrealLayoutLoader(plugin.AssetLoader): element_metadata = element_container.get( blender.pipeline.AVALON_PROPERTY) - # Unlink the object's collection from the scene collection and + # Unlink the object's collection from the scene collection and # link it in the layout collection element_collection = element_metadata.get('obj_container') scene.collection.children.unlink(element_collection) @@ -402,6 +405,19 @@ class UnrealLayoutLoader(plugin.AssetLoader): for o in objects: if o.type == 'ARMATURE': objects_to_transform.append(o) + # Create an animation subset for each rig + o.select_set(True) + asset = api.Session["AVALON_ASSET"] + dependency = str(context["representation"]["_id"]) + c = api.create( + name="animation_" + element_collection.name, + asset=asset, + family="animation", + options={"useSelection": True}, + data={"dependencies": dependency}) + scene.collection.children.unlink(c) + parent.children.link(c) + o.select_set(False) break elif family == 'model': objects_to_transform = objects @@ -460,9 +476,18 @@ class UnrealLayoutLoader(plugin.AssetLoader): container_metadata["libpath"] = libpath container_metadata["lib_container"] = lib_container - + + # Create a setdress subset to contain all the animation for all + # the rigs in the layout + parent = api.create( + name="animation", + asset=api.Session["AVALON_ASSET"], + family="setdress", + options={"useSelection": True}, + data={"dependencies": str(context["representation"]["_id"])}) + layout_collection = self._process( - libpath, layout_container, container_name, None) + libpath, layout_container, container_name, context, None, parent) container_metadata["obj_container"] = layout_collection @@ -561,8 +586,8 @@ class UnrealLayoutLoader(plugin.AssetLoader): layout_container_metadata["obj_container"] = layout_collection layout_container_metadata["objects"] = layout_collection.all_objects layout_container_metadata["libpath"] = str(libpath) - layout_container_metadata["representation"] = str(representation["_id"]) - + layout_container_metadata["representation"] = str( + representation["_id"]) def remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. From bce22acf35ef1156a96b418450311e2354eabff7 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 30 Sep 2020 17:11:32 +0100 Subject: [PATCH 12/47] Implemented extraction of the animation and setdress --- .../publish/extract_animation_collection.py | 56 ++++++++++++ .../blender/publish/extract_fbx_animation.py | 89 +++++++++---------- .../blender/publish/integrate_animation.py | 49 ++++++++++ 3 files changed, 148 insertions(+), 46 deletions(-) create mode 100644 pype/plugins/blender/publish/extract_animation_collection.py create mode 100644 pype/plugins/blender/publish/integrate_animation.py diff --git a/pype/plugins/blender/publish/extract_animation_collection.py b/pype/plugins/blender/publish/extract_animation_collection.py new file mode 100644 index 0000000000..e5e0877280 --- /dev/null +++ b/pype/plugins/blender/publish/extract_animation_collection.py @@ -0,0 +1,56 @@ +import os +import json + +import pype.api +import pyblish.api + +import bpy + +class ExtractSetDress(pype.api.Extractor): + """Extract setdress.""" + + label = "Extract SetDress" + hosts = ["blender"] + families = ["setdress"] + optional = True + order = pyblish.api.ExtractorOrder + 0.1 + + def process(self, instance): + stagingdir = self.staging_dir(instance) + + json_data = [] + + for i in instance.context: + collection = i.data.get('name') + container = None + for obj in bpy.data.collections[collection].objects: + if obj.type == 'ARMATURE': + container_name = obj.get('avalon').get('container_name') + container = bpy.data.collections[container_name] + if container: + json_dict = {} + json_dict['subset'] = i.data.get('subset') + json_dict['container'] = container.name + json_dict['instance_name'] = container.get('avalon').get('instance_name') + json_data.append(json_dict) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + json_filename = f"{instance.name}.json" + json_path = os.path.join(stagingdir, json_filename) + + with open(json_path, "w+") as file: + json.dump(json_data, fp=file, indent=2) + + json_representation = { + 'name': 'json', + 'ext': 'json', + 'files': json_filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(json_representation) + + self.log.info("Extracted instance '{}' to: {}".format( + instance.name, json_representation)) + diff --git a/pype/plugins/blender/publish/extract_fbx_animation.py b/pype/plugins/blender/publish/extract_fbx_animation.py index d51c641e9c..9c421560f0 100644 --- a/pype/plugins/blender/publish/extract_fbx_animation.py +++ b/pype/plugins/blender/publish/extract_fbx_animation.py @@ -17,14 +17,10 @@ class ExtractAnimationFBX(pype.api.Extractor): def process(self, instance): # Define extract output file path - stagingdir = self.staging_dir(instance) - filename = f"{instance.name}.fbx" - filepath = os.path.join(stagingdir, filename) context = bpy.context scene = context.scene - view_layer = context.view_layer # Perform extraction self.log.info("Performing extraction..") @@ -35,22 +31,6 @@ class ExtractAnimationFBX(pype.api.Extractor): assert len(collections) == 1, "There should be one and only one " \ "collection collected for this asset" - old_active_layer_collection = view_layer.active_layer_collection - - layers = view_layer.layer_collection.children - - # Get the layer collection from the collection we need to export. - # This is needed because in Blender you can only set the active - # collection with the layer collection, and there is no way to get - # the layer collection from the collection - # (but there is the vice versa). - layer_collections = [ - layer for layer in layers if layer.collection == collections[0]] - - assert len(layer_collections) == 1 - - view_layer.active_layer_collection = layer_collections[0] - old_scale = scene.unit_settings.scale_length # We set the scale of the scene for the export @@ -59,6 +39,15 @@ class ExtractAnimationFBX(pype.api.Extractor): armatures = [ obj for obj in collections[0].objects if obj.type == 'ARMATURE'] + assert len(collections) == 1, "There should be one and only one " \ + "armature collected for this asset" + + armature = armatures[0] + + armature_name = armature.name + original_name = armature_name.split(':')[0] + armature.name = original_name + object_action_pairs = [] original_actions = [] @@ -66,23 +55,23 @@ class ExtractAnimationFBX(pype.api.Extractor): ending_frames = [] # For each armature, we make a copy of the current action - for obj in armatures: + curr_action = None + copy_action = None - curr_action = None - copy_action = None + if armature.animation_data and armature.animation_data.action: + curr_action = armature.animation_data.action + copy_action = curr_action.copy() - if obj.animation_data and obj.animation_data.action: + curr_frame_range = curr_action.frame_range - curr_action = obj.animation_data.action - copy_action = curr_action.copy() + starting_frames.append(curr_frame_range[0]) + ending_frames.append(curr_frame_range[1]) + else: + self.log.info("Object have no animation.") + return - curr_frame_range = curr_action.frame_range - - starting_frames.append(curr_frame_range[0]) - ending_frames.append(curr_frame_range[1]) - - object_action_pairs.append((obj, copy_action)) - original_actions.append(curr_action) + object_action_pairs.append((armature, copy_action)) + original_actions.append(curr_action) # We compute the starting and ending frames max_frame = min(starting_frames) @@ -96,44 +85,52 @@ class ExtractAnimationFBX(pype.api.Extractor): do_clean=False ) - # We export the fbx + for obj in bpy.data.objects: + obj.select_set(False) + + armature.select_set(True) + fbx_filename = f"{instance.name}_{armature.name}.fbx" + filepath = os.path.join(stagingdir, fbx_filename) + + override = bpy.context.copy() + override['selected_objects'] = [armature] bpy.ops.export_scene.fbx( + override, filepath=filepath, - use_active_collection=True, + use_selection=True, bake_anim_use_nla_strips=False, bake_anim_use_all_actions=False, - add_leaf_bones=False + add_leaf_bones=False, + armature_nodetype='ROOT', + object_types={'ARMATURE'} ) - - view_layer.active_layer_collection = old_active_layer_collection + armature.name = armature_name + armature.select_set(False) scene.unit_settings.scale_length = old_scale # We delete the baked action and set the original one back for i in range(0, len(object_action_pairs)): - pair = object_action_pairs[i] action = original_actions[i] if action: - pair[0].animation_data.action = action if pair[1]: - pair[1].user_clear() bpy.data.actions.remove(pair[1]) if "representations" not in instance.data: instance.data["representations"] = [] - representation = { + fbx_representation = { 'name': 'fbx', 'ext': 'fbx', - 'files': filename, + 'files': fbx_filename, "stagingDir": stagingdir, } - instance.data["representations"].append(representation) + instance.data["representations"].append(fbx_representation) - self.log.info("Extracted instance '%s' to: %s", - instance.name, representation) + self.log.info("Extracted instance '{}' to: {}".format( + instance.name, fbx_representation)) diff --git a/pype/plugins/blender/publish/integrate_animation.py b/pype/plugins/blender/publish/integrate_animation.py new file mode 100644 index 0000000000..90e94a4aac --- /dev/null +++ b/pype/plugins/blender/publish/integrate_animation.py @@ -0,0 +1,49 @@ +import json + +from avalon import io +import pyblish.api + + +class IntegrateAnimation(pyblish.api.InstancePlugin): + """Generate a JSON file for animation.""" + + label = "Integrate Animation" + order = pyblish.api.IntegratorOrder + 0.1 + optional = True + hosts = ["blender"] + families = ["setdress"] + + def process(self, instance): + self.log.info("Integrate Animation") + + representation = instance.data.get('representations')[0] + json_path = representation.get('publishedFiles')[0] + + with open(json_path, "r") as file: + data = json.load(file) + + # Update the json file for the setdress to add the published + # representations of the animations + for json_dict in data: + i = None + for elem in instance.context: + if elem.data.get('subset') == json_dict['subset']: + i = elem + break + if not i: + continue + rep = None + pub_repr = i.data.get('published_representations') + for elem in pub_repr: + if pub_repr.get(elem).get('representation').get('name') == "fbx": + rep = pub_repr.get(elem) + break + if not rep: + continue + obj_id = rep.get('representation').get('_id') + + if obj_id: + json_dict['_id'] = str(obj_id) + + with open(json_path, "w") as file: + json.dump(data, fp=file, indent=2) From 2ac826f350ac417bf0663d473a096247cf4a46fd Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 30 Sep 2020 17:12:29 +0100 Subject: [PATCH 13/47] Implemented loading of animation and setdress in Unreal --- pype/plugins/unreal/load/load_animation.py | 151 +++++++++++++++++++++ pype/plugins/unreal/load/load_setdress.py | 51 +++++++ 2 files changed, 202 insertions(+) create mode 100644 pype/plugins/unreal/load/load_animation.py create mode 100644 pype/plugins/unreal/load/load_setdress.py diff --git a/pype/plugins/unreal/load/load_animation.py b/pype/plugins/unreal/load/load_animation.py new file mode 100644 index 0000000000..c948fd17d4 --- /dev/null +++ b/pype/plugins/unreal/load/load_animation.py @@ -0,0 +1,151 @@ +from avalon import api +from avalon import unreal as avalon_unreal +import unreal + + +class AnimationFBXLoader(api.Loader): + """Load Unreal SkeletalMesh from FBX""" + + families = ["animation"] + label = "Import FBX Animation" + representations = ["fbx"] + icon = "cube" + color = "orange" + + def load(self, context, name, namespace, options = None): + """ + Load and containerise representation into Content Browser. + + This is two step process. First, import FBX to temporary path and + then call `containerise()` on it - this moves all content to new + directory and then it will create AssetContainer there and imprint it + with metadata. This will mark this path as container. + + Args: + context (dict): application context + name (str): subset name + namespace (str): in Unreal this is basically path to container. + This is not passed here, so namespace is set + by `containerise()` because only then we know + real path. + data (dict): Those would be data to be imprinted. This is not used + now, data are imprinted by `containerise()`. + + Returns: + list(str): list of container content + """ + + print("Loading animation") + tools = unreal.AssetToolsHelpers().get_asset_tools() + temp_dir, temp_name = tools.create_unique_asset_name( + "/Game/{}".format(name), "_TMP" + ) + + unreal.EditorAssetLibrary.make_directory(temp_dir) + + asset = context.get('asset') + asset_name = asset.get('name') + + destination_name = "{}_{}".format(asset_name, name) + + automated = False + + task = unreal.AssetImportTask() + task.options = unreal.FbxImportUI() + + # If there are no options, the process cannot be automated + if options: + automated = True + actor = unreal.EditorLevelLibrary.get_actor_reference( + 'PersistentLevel:' + options.get('instance_name')) + skeleton = actor.skeletal_mesh_component.skeletal_mesh.skeleton + task.options.set_editor_property('skeleton', skeleton) + + task.set_editor_property('filename', self.fname) + task.set_editor_property('destination_path', temp_dir) + task.set_editor_property('destination_name', destination_name) + task.set_editor_property('replace_existing', True) + task.set_editor_property('automated', automated) + task.set_editor_property('save', True) + + # set import options here + task.options.set_editor_property( + 'automated_import_should_detect_type', True) + task.options.set_editor_property( + 'original_import_type', unreal.FBXImportType.FBXIT_ANIMATION) + task.options.set_editor_property('import_mesh', False) + task.options.set_editor_property('import_animations', True) + + task.options.skeletal_mesh_import_data.set_editor_property( + 'import_content_type', + unreal.FBXImportContentType.FBXICT_SKINNING_WEIGHTS + ) + + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) + + imported_assets = unreal.EditorAssetLibrary.list_assets( + temp_dir, recursive=True, include_folder=True + ) + new_dir = avalon_unreal.containerise( + name, namespace, imported_assets, context, self.__class__.__name__) + + asset_content = unreal.EditorAssetLibrary.list_assets( + new_dir, recursive=True, include_folder=True + ) + + animation = None + + for a in asset_content: + imported_asset_data = unreal.EditorAssetLibrary.find_asset_data(a) + imported_asset = unreal.AssetRegistryHelpers.get_asset( + imported_asset_data) + if imported_asset.__class__ == unreal.AnimSequence: + animation = imported_asset + break + + if animation: + animation.set_editor_property('enable_root_motion', True) + actor.skeletal_mesh_component.set_editor_property( + 'animation_mode', unreal.AnimationMode.ANIMATION_SINGLE_NODE) + actor.skeletal_mesh_component.animation_data.set_editor_property( + 'anim_to_play', animation) + + unreal.EditorAssetLibrary.delete_directory(temp_dir) + + return asset_content + + def update(self, container, representation): + node = container["objectName"] + source_path = api.get_representation_path(representation) + destination_path = container["namespace"] + + task = unreal.AssetImportTask() + + task.set_editor_property('filename', source_path) + task.set_editor_property('destination_path', destination_path) + # strip suffix + task.set_editor_property('destination_name', node[:-4]) + task.set_editor_property('replace_existing', True) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + task.options = unreal.FbxImportUI() + task.options.set_editor_property('create_physics_asset', False) + task.options.set_editor_property('import_as_skeletal', True) + task.options.set_editor_property('import_animations', False) + + task.options.skeletal_mesh_import_data.set_editor_property( + 'normal_import_method', + unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS + ) + + # do import fbx and replace existing data + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) + container_path = "{}/{}".format(container["namespace"], + container["objectName"]) + # update metadata + avalon_unreal.imprint( + container_path, {"_id": str(representation["_id"])}) + + def remove(self, container): + unreal.EditorAssetLibrary.delete_directory(container["namespace"]) diff --git a/pype/plugins/unreal/load/load_setdress.py b/pype/plugins/unreal/load/load_setdress.py new file mode 100644 index 0000000000..b8705b81ff --- /dev/null +++ b/pype/plugins/unreal/load/load_setdress.py @@ -0,0 +1,51 @@ +import json + +from avalon import unreal as avalon_unreal +from avalon import api, io +import unreal + + +class AnimationCollectionLoader(api.Loader): + """Load Unreal SkeletalMesh from FBX""" + + families = ["setdress"] + representations = ["json"] + + label = "Load Animation Collection" + icon = "cube" + color = "orange" + + def load(self, context, name, namespace, options): + # Necessary because I think Python imports api from avalon_unreal + # as well. This forces it to use the right api. + from avalon import api + libpath = self.fname + + with open(libpath, "r") as fp: + data = json.load(fp) + + print(api) + + all_loaders = api.discover(api.Loader) + + for element in data: + reference = element.get('_id') + + loaders = api.loaders_from_representation(all_loaders, reference) + loader = None + for l in loaders: + if l.__name__ == "AnimationFBXLoader": + loader = l + break + + if not loader: + continue + + instance_name = element.get('instance_name') + + element_container = api.load( + loader, + reference, + namespace=instance_name, + options=element + ) From 8581339ad452217918d83b5cb343c8f345f8a6b3 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 1 Oct 2020 17:10:06 +0100 Subject: [PATCH 14/47] Fix the drivers to link to the local objects --- pype/plugins/blender/load/load_rig.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/pype/plugins/blender/load/load_rig.py b/pype/plugins/blender/load/load_rig.py index 518f389d0b..d8c14c5837 100644 --- a/pype/plugins/blender/load/load_rig.py +++ b/pype/plugins/blender/load/load_rig.py @@ -82,8 +82,15 @@ class BlendRigLoader(plugin.AssetLoader): avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY] avalon_info.update({"container_name": collection_name + '_CON'}) - if local_obj.type == 'ARMATURE' and action is not None: - local_obj.animation_data.action = action + if local_obj.type == 'ARMATURE': + if action is not None: + local_obj.animation_data.action = action + # Set link the drivers to the local object + if local_obj.data.animation_data: + for d in local_obj.data.animation_data.drivers: + for v in d.driver.variables: + for t in v.targets: + t.id = local_obj rig_container.pop(blender.pipeline.AVALON_PROPERTY) From 3fb4ee1d6f87eab6b0d90eaa378edfe37e402ce3 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 7 Oct 2020 13:00:34 +0100 Subject: [PATCH 15/47] Fixed problem with rig loader When reopening the Unreal project, the skeleton was not connected to the skeletal mesh anymore. Some materials were not assigned to the mesh either. --- pype/plugins/unreal/load/load_rig.py | 84 +++++++++++++++++++--------- 1 file changed, 58 insertions(+), 26 deletions(-) diff --git a/pype/plugins/unreal/load/load_rig.py b/pype/plugins/unreal/load/load_rig.py index dcbc1f4105..69d52086f7 100644 --- a/pype/plugins/unreal/load/load_rig.py +++ b/pype/plugins/unreal/load/load_rig.py @@ -1,5 +1,6 @@ -from avalon import api -from avalon import unreal as avalon_unreal +from avalon import api, pipeline +from avalon.unreal import lib +from avalon.unreal import pipeline as unreal_pipeline import unreal @@ -35,53 +36,84 @@ class SkeletalMeshFBXLoader(api.Loader): list(str): list of container content """ - tools = unreal.AssetToolsHelpers().get_asset_tools() - temp_dir, temp_name = tools.create_unique_asset_name( - "/Game/{}".format(name), "_TMP" - ) - - unreal.EditorAssetLibrary.make_directory(temp_dir) - + # Create directory for asset and avalon container + root = "/Game" asset = context.get('asset') asset_name = asset.get('name') + if asset_name: + container_name = "{}_{}".format(asset_name, name) + else: + container_name = "{}".format(name) - destination_name = "{}_{}".format(asset_name, name) + tools = unreal.AssetToolsHelpers().get_asset_tools() + asset_dir, avalon_asset_name = tools.create_unique_asset_name( + "{}/{}".format(root, container_name), "_CON" + ) + + unreal.EditorAssetLibrary.make_directory(asset_dir) task = unreal.AssetImportTask() task.set_editor_property('filename', self.fname) - task.set_editor_property('destination_path', temp_dir) - task.set_editor_property('destination_name', destination_name) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', container_name) task.set_editor_property('replace_existing', False) task.set_editor_property('automated', True) - task.set_editor_property('save', True) + task.set_editor_property('save', False) # set import options here - task.options = unreal.FbxImportUI() - task.options.set_editor_property('create_physics_asset', True) - task.options.set_editor_property('import_as_skeletal', True) - task.options.set_editor_property('import_animations', False) + options = unreal.FbxImportUI() + options.set_editor_property('import_as_skeletal', True) + options.set_editor_property('import_animations', False) + options.set_editor_property('import_mesh', True) + options.set_editor_property('import_materials', True) + options.set_editor_property('import_textures', True) + options.set_editor_property('skeleton', None) + options.set_editor_property('create_physics_asset', False) + options.set_editor_property('mesh_type_to_import', + unreal.FBXImportType.FBXIT_SKELETAL_MESH) + + options.skeletal_mesh_import_data.set_editor_property( + 'import_content_type', + unreal.FBXImportContentType.FBXICT_ALL + ) # set to import normals, otherwise Unreal will compute them # and it will take a long time, depending on the size of the mesh - task.options.skeletal_mesh_import_data.set_editor_property( + options.skeletal_mesh_import_data.set_editor_property( 'normal_import_method', unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS ) + task.options = options unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 - imported_assets = unreal.EditorAssetLibrary.list_assets( - temp_dir, recursive=True, include_folder=True - ) - new_dir = avalon_unreal.containerise( - name, namespace, imported_assets, context, self.__class__.__name__) + # Create Asset Container + lib.create_avalon_container( + container=avalon_asset_name, path=asset_dir) + + namespace = asset_dir + + data = { + "schema": "avalon-core:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "name": avalon_asset_name, + "namespace": namespace, + "asset_name": asset_name, + "loader": str(self.__class__.__name__), + "representation": context["representation"]["_id"], + "parent": context["representation"]["parent"], + "family": context["representation"]["context"]["family"] + } + unreal_pipeline.imprint( + "{}/{}".format(asset_dir, avalon_asset_name), data) asset_content = unreal.EditorAssetLibrary.list_assets( - new_dir, recursive=True, include_folder=True + asset_dir, recursive=True, include_folder=True ) - unreal.EditorAssetLibrary.delete_directory(temp_dir) + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) return asset_content @@ -115,7 +147,7 @@ class SkeletalMeshFBXLoader(api.Loader): container_path = "{}/{}".format(container["namespace"], container["objectName"]) # update metadata - avalon_unreal.imprint( + unreal_pipeline.imprint( container_path, {"_id": str(representation["_id"])}) def remove(self, container): From 15b782f25e765d90bb0fb8e933e0ea4f2a885cb4 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 14 Oct 2020 17:28:24 +0100 Subject: [PATCH 16/47] Improved remove and update for layout loader --- pype/plugins/blender/load/load_layout.py | 71 +++++++++++++++++------- 1 file changed, 50 insertions(+), 21 deletions(-) diff --git a/pype/plugins/blender/load/load_layout.py b/pype/plugins/blender/load/load_layout.py index dc623d101e..c8ef73aea3 100644 --- a/pype/plugins/blender/load/load_layout.py +++ b/pype/plugins/blender/load/load_layout.py @@ -9,7 +9,7 @@ from pathlib import Path from pprint import pformat from typing import Dict, List, Optional -from avalon import api, blender +from avalon import api, blender, pipeline import bpy import pype.hosts.blender.plugin as plugin @@ -308,6 +308,33 @@ class UnrealLayoutLoader(plugin.AssetLoader): self._remove_collections(child) bpy.data.collections.remove(child) + def _remove(self, layout_container): + layout_container_metadata = layout_container.get( + blender.pipeline.AVALON_PROPERTY) + + if layout_container.children: + for child in layout_container.children: + child_container = child.get(blender.pipeline.AVALON_PROPERTY) + child_container['objectName'] = child.name + api.remove(child_container) + + for c in bpy.data.collections: + metadata = c.get('avalon') + if metadata: + print("metadata.get('id')") + print(metadata.get('id')) + if metadata and metadata.get('id') == 'pyblish.avalon.instance': + print("metadata.get('dependencies')") + print(metadata.get('dependencies')) + print("layout_container_metadata.get('representation')") + print(layout_container_metadata.get('representation')) + if metadata.get('dependencies') == layout_container_metadata.get('representation'): + + for child in c.children: + bpy.data.collections.remove(child) + bpy.data.collections.remove(c) + break + def _get_loader(self, loaders, family): name = "" if family == 'rig': @@ -348,8 +375,8 @@ class UnrealLayoutLoader(plugin.AssetLoader): ) def _process( - self, libpath, layout_container, container_name, context, actions, - parent + self, libpath, layout_container, container_name, representation, + actions, parent ): with open(libpath, "r") as fp: data = json.load(fp) @@ -408,13 +435,12 @@ class UnrealLayoutLoader(plugin.AssetLoader): # Create an animation subset for each rig o.select_set(True) asset = api.Session["AVALON_ASSET"] - dependency = str(context["representation"]["_id"]) c = api.create( name="animation_" + element_collection.name, asset=asset, family="animation", options={"useSelection": True}, - data={"dependencies": dependency}) + data={"dependencies": representation}) scene.collection.children.unlink(c) parent.children.link(c) o.select_set(False) @@ -477,7 +503,7 @@ class UnrealLayoutLoader(plugin.AssetLoader): container_metadata["libpath"] = libpath container_metadata["lib_container"] = lib_container - # Create a setdress subset to contain all the animation for all + # Create a setdress subset to contain all the animation for all # the rigs in the layout parent = api.create( name="animation", @@ -487,7 +513,8 @@ class UnrealLayoutLoader(plugin.AssetLoader): data={"dependencies": str(context["representation"]["_id"])}) layout_collection = self._process( - libpath, layout_container, container_name, context, None, parent) + libpath, layout_container, container_name, + str(context["representation"]["_id"]), None, parent) container_metadata["obj_container"] = layout_collection @@ -507,11 +534,12 @@ class UnrealLayoutLoader(plugin.AssetLoader): will not be removed, only unlinked. Normally this should not be the case though. """ - print(container) - print(container["objectName"]) layout_container = bpy.data.collections.get( container["objectName"] ) + if not layout_container: + return False + libpath = Path(api.get_representation_path(representation)) extension = libpath.suffix.lower() @@ -574,14 +602,20 @@ class UnrealLayoutLoader(plugin.AssetLoader): instance_name = element_metadata.get('instance_name') actions[instance_name] = obj.animation_data.action - self._remove_objects(objects) - self._remove_collections(obj_container) + self._remove(layout_container) + bpy.data.collections.remove(obj_container) - self._remove_collections(layout_container) - # bpy.data.collections.remove(layout_container) + + parent = api.create( + name="animation", + asset=api.Session["AVALON_ASSET"], + family="setdress", + options={"useSelection": True}, + data={"dependencies": str(representation["_id"])}) layout_collection = self._process( - libpath, layout_container, container_name, actions) + libpath, layout_container, container_name, + str(representation["_id"]), actions, parent) layout_container_metadata["obj_container"] = layout_collection layout_container_metadata["objects"] = layout_collection.all_objects @@ -604,21 +638,16 @@ class UnrealLayoutLoader(plugin.AssetLoader): ) if not layout_container: return False - # assert not (collection.children), ( - # "Nested collections are not supported." - # ) layout_container_metadata = layout_container.get( blender.pipeline.AVALON_PROPERTY) obj_container = plugin.get_local_collection_with_name( layout_container_metadata["obj_container"].name ) - objects = obj_container.all_objects - self._remove_objects(objects) - self._remove_collections(obj_container) + self._remove(layout_container) + bpy.data.collections.remove(obj_container) - self._remove_collections(layout_container) bpy.data.collections.remove(layout_container) return True From 24703b0708533578cd2fa7a0583d86cbbc1fa73b Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 20 Oct 2020 14:56:28 +0100 Subject: [PATCH 17/47] Improved loading and update for rigs and staticmeshes --- pype/plugins/unreal/load/load_rig.py | 46 +++++++---- .../plugins/unreal/load/load_staticmeshfbx.py | 81 +++++++++++++------ 2 files changed, 87 insertions(+), 40 deletions(-) diff --git a/pype/plugins/unreal/load/load_rig.py b/pype/plugins/unreal/load/load_rig.py index 69d52086f7..b92c353b50 100644 --- a/pype/plugins/unreal/load/load_rig.py +++ b/pype/plugins/unreal/load/load_rig.py @@ -37,9 +37,10 @@ class SkeletalMeshFBXLoader(api.Loader): """ # Create directory for asset and avalon container - root = "/Game" + root = "/Game/Avalon/Assets" asset = context.get('asset') asset_name = asset.get('name') + suffix = "_CON" if asset_name: container_name = "{}_{}".format(asset_name, name) else: @@ -47,8 +48,9 @@ class SkeletalMeshFBXLoader(api.Loader): tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, avalon_asset_name = tools.create_unique_asset_name( - "{}/{}".format(root, container_name), "_CON" - ) + "{}/{}/{}".format(root, asset_name, name), suffix="") + + avalon_asset_name += suffix unreal.EditorAssetLibrary.make_directory(asset_dir) @@ -92,13 +94,11 @@ class SkeletalMeshFBXLoader(api.Loader): lib.create_avalon_container( container=avalon_asset_name, path=asset_dir) - namespace = asset_dir - data = { "schema": "avalon-core:container-2.0", "id": pipeline.AVALON_CONTAINER_ID, "name": avalon_asset_name, - "namespace": namespace, + "namespace": asset_dir, "asset_name": asset_name, "loader": str(self.__class__.__name__), "representation": context["representation"]["_id"], @@ -118,7 +118,7 @@ class SkeletalMeshFBXLoader(api.Loader): return asset_content def update(self, container, representation): - node = container["objectName"] + name = container["name"] source_path = api.get_representation_path(representation) destination_path = container["namespace"] @@ -126,29 +126,43 @@ class SkeletalMeshFBXLoader(api.Loader): task.set_editor_property('filename', source_path) task.set_editor_property('destination_path', destination_path) - # strip suffix - task.set_editor_property('destination_name', node[:-4]) + task.set_editor_property('destination_name', name) task.set_editor_property('replace_existing', True) task.set_editor_property('automated', True) task.set_editor_property('save', True) - task.options = unreal.FbxImportUI() - task.options.set_editor_property('create_physics_asset', False) - task.options.set_editor_property('import_as_skeletal', True) - task.options.set_editor_property('import_animations', False) + # set import options here + options = unreal.FbxImportUI() + options.set_editor_property('import_as_skeletal', True) + options.set_editor_property('import_animations', False) + options.set_editor_property('import_mesh', True) + options.set_editor_property('import_materials', True) + options.set_editor_property('import_textures', True) + options.set_editor_property('skeleton', None) + options.set_editor_property('create_physics_asset', False) - task.options.skeletal_mesh_import_data.set_editor_property( + options.set_editor_property('mesh_type_to_import', + unreal.FBXImportType.FBXIT_SKELETAL_MESH) + + options.skeletal_mesh_import_data.set_editor_property( + 'import_content_type', + unreal.FBXImportContentType.FBXICT_ALL + ) + # set to import normals, otherwise Unreal will compute them + # and it will take a long time, depending on the size of the mesh + options.skeletal_mesh_import_data.set_editor_property( 'normal_import_method', unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS ) + task.options = options # do import fbx and replace existing data - unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 container_path = "{}/{}".format(container["namespace"], container["objectName"]) # update metadata unreal_pipeline.imprint( - container_path, {"_id": str(representation["_id"])}) + container_path, {"representation": str(representation["_id"])}) def remove(self, container): unreal.EditorAssetLibrary.delete_directory(container["namespace"]) diff --git a/pype/plugins/unreal/load/load_staticmeshfbx.py b/pype/plugins/unreal/load/load_staticmeshfbx.py index 704fdb875e..8ea7338e2a 100644 --- a/pype/plugins/unreal/load/load_staticmeshfbx.py +++ b/pype/plugins/unreal/load/load_staticmeshfbx.py @@ -1,5 +1,7 @@ -from avalon import api +from avalon import api, pipeline from avalon import unreal as avalon_unreal +from avalon.unreal import lib +from avalon.unreal import pipeline as unreal_pipeline import unreal @@ -35,46 +37,73 @@ class StaticMeshFBXLoader(api.Loader): list(str): list of container content """ - tools = unreal.AssetToolsHelpers().get_asset_tools() - temp_dir, temp_name = tools.create_unique_asset_name( - "/Game/{}".format(name), "_TMP" - ) + # Create directory for asset and avalon container + root = "/Game/Avalon/Assets" + asset = context.get('asset') + asset_name = asset.get('name') + suffix = "_CON" + if asset_name: + container_name = "{}_{}".format(asset_name, name) + else: + container_name = "{}".format(name) - unreal.EditorAssetLibrary.make_directory(temp_dir) + tools = unreal.AssetToolsHelpers().get_asset_tools() + asset_dir, subset_name = tools.create_unique_asset_name( + "{}/{}/{}".format(root, asset_name, name), suffix="") + + avalon_asset_name = subset_name + suffix + + unreal.EditorAssetLibrary.make_directory(asset_dir) task = unreal.AssetImportTask() task.set_editor_property('filename', self.fname) - task.set_editor_property('destination_path', temp_dir) - task.set_editor_property('destination_name', name) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', container_name) task.set_editor_property('replace_existing', False) task.set_editor_property('automated', True) task.set_editor_property('save', True) # set import options here - task.options = unreal.FbxImportUI() - task.options.set_editor_property( + options = unreal.FbxImportUI() + options.set_editor_property( 'automated_import_should_detect_type', False) - task.options.set_editor_property('import_animations', False) + options.set_editor_property('import_animations', False) + task.options = options unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 - imported_assets = unreal.EditorAssetLibrary.list_assets( - temp_dir, recursive=True, include_folder=True - ) - new_dir = avalon_unreal.containerise( - name, namespace, imported_assets, context, self.__class__.__name__) + # Create Asset Container + lib.create_avalon_container( + container=avalon_asset_name, path=asset_dir) + + namespace = asset_dir + + data = { + "schema": "avalon-core:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "name": container_name, + "namespace": namespace, + "asset_name": asset_name, + "loader": str(self.__class__.__name__), + "representation": context["representation"]["_id"], + "parent": context["representation"]["parent"], + "family": context["representation"]["context"]["family"] + } + unreal_pipeline.imprint( + "{}/{}".format(asset_dir, avalon_asset_name), data) asset_content = unreal.EditorAssetLibrary.list_assets( - new_dir, recursive=True, include_folder=True + asset_dir, recursive=True, include_folder=True ) - unreal.EditorAssetLibrary.delete_directory(temp_dir) + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) return asset_content def update(self, container, representation): - node = container["objectName"] + name = container["name"] source_path = api.get_representation_path(representation) destination_path = container["namespace"] @@ -83,21 +112,25 @@ class StaticMeshFBXLoader(api.Loader): task.set_editor_property('filename', source_path) task.set_editor_property('destination_path', destination_path) # strip suffix - task.set_editor_property('destination_name', node[:-4]) + task.set_editor_property('destination_name', name) task.set_editor_property('replace_existing', True) task.set_editor_property('automated', True) task.set_editor_property('save', True) - task.options = unreal.FbxImportUI() - task.options.set_editor_property('import_animations', False) + # set import options here + options = unreal.FbxImportUI() + options.set_editor_property( + 'automated_import_should_detect_type', False) + options.set_editor_property('import_animations', False) + task.options = options # do import fbx and replace existing data unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) container_path = "{}/{}".format(container["namespace"], container["objectName"]) # update metadata - avalon_unreal.imprint( - container_path, {"_id": str(representation["_id"])}) + unreal_pipeline.imprint( + container_path, {"representation": str(representation["_id"])}) def remove(self, container): unreal.EditorAssetLibrary.delete_directory(container["namespace"]) From 41bede06888286bce73ac9b25e1cde194ae1ba97 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 3 Nov 2020 15:24:11 +0000 Subject: [PATCH 18/47] Improved loading for setdress and animation and implemented update --- pype/plugins/unreal/load/load_animation.py | 102 +++++++++++++-------- pype/plugins/unreal/load/load_setdress.py | 89 ++++++++++++++++-- 2 files changed, 147 insertions(+), 44 deletions(-) diff --git a/pype/plugins/unreal/load/load_animation.py b/pype/plugins/unreal/load/load_animation.py index c948fd17d4..c4777cdee0 100644 --- a/pype/plugins/unreal/load/load_animation.py +++ b/pype/plugins/unreal/load/load_animation.py @@ -1,5 +1,6 @@ -from avalon import api -from avalon import unreal as avalon_unreal +from avalon import api, pipeline +from avalon.unreal import lib +from avalon.unreal import pipeline as unreal_pipeline import unreal @@ -35,20 +36,25 @@ class AnimationFBXLoader(api.Loader): list(str): list of container content """ - print("Loading animation") + # Create directory for asset and avalon container + root = "/Game/Avalon/Assets" + asset = context.get('asset').get('name') + suffix = "_CON" + if asset: + asset_name = "{}_{}".format(asset, name) + else: + asset_name = "{}".format(name) + tools = unreal.AssetToolsHelpers().get_asset_tools() - temp_dir, temp_name = tools.create_unique_asset_name( - "/Game/{}".format(name), "_TMP" - ) + asset_dir, container_name = tools.create_unique_asset_name( + "{}/{}/{}".format(root, asset, name), suffix="") - unreal.EditorAssetLibrary.make_directory(temp_dir) + container_name += suffix - asset = context.get('asset') - asset_name = asset.get('name') - - destination_name = "{}_{}".format(asset_name, name) + unreal.EditorAssetLibrary.make_directory(asset_dir) automated = False + actor = None task = unreal.AssetImportTask() task.options = unreal.FbxImportUI() @@ -56,17 +62,20 @@ class AnimationFBXLoader(api.Loader): # If there are no options, the process cannot be automated if options: automated = True - actor = unreal.EditorLevelLibrary.get_actor_reference( - 'PersistentLevel:' + options.get('instance_name')) + actor_name = 'PersistentLevel.' + options.get('instance_name') + actor = unreal.EditorLevelLibrary.get_actor_reference(actor_name) skeleton = actor.skeletal_mesh_component.skeletal_mesh.skeleton task.options.set_editor_property('skeleton', skeleton) + if not actor: + return None + task.set_editor_property('filename', self.fname) - task.set_editor_property('destination_path', temp_dir) - task.set_editor_property('destination_name', destination_name) - task.set_editor_property('replace_existing', True) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', False) task.set_editor_property('automated', automated) - task.set_editor_property('save', True) + task.set_editor_property('save', False) # set import options here task.options.set_editor_property( @@ -83,19 +92,33 @@ class AnimationFBXLoader(api.Loader): unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) - imported_assets = unreal.EditorAssetLibrary.list_assets( - temp_dir, recursive=True, include_folder=True - ) - new_dir = avalon_unreal.containerise( - name, namespace, imported_assets, context, self.__class__.__name__) + # Create Asset Container + lib.create_avalon_container( + container=container_name, path=asset_dir) + + data = { + "schema": "avalon-core:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "asset": asset, + "namespace": asset_dir, + "container_name": container_name, + "asset_name": asset_name, + "loader": str(self.__class__.__name__), + "representation": context["representation"]["_id"], + "parent": context["representation"]["parent"], + "family": context["representation"]["context"]["family"] + } + unreal_pipeline.imprint( + "{}/{}".format(asset_dir, container_name), data) asset_content = unreal.EditorAssetLibrary.list_assets( - new_dir, recursive=True, include_folder=True + asset_dir, recursive=True, include_folder=True ) animation = None for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) imported_asset_data = unreal.EditorAssetLibrary.find_asset_data(a) imported_asset = unreal.AssetRegistryHelpers.get_asset( imported_asset_data) @@ -110,42 +133,49 @@ class AnimationFBXLoader(api.Loader): actor.skeletal_mesh_component.animation_data.set_editor_property( 'anim_to_play', animation) - unreal.EditorAssetLibrary.delete_directory(temp_dir) - return asset_content def update(self, container, representation): - node = container["objectName"] + name = container["asset_name"] source_path = api.get_representation_path(representation) destination_path = container["namespace"] task = unreal.AssetImportTask() + task.options = unreal.FbxImportUI() task.set_editor_property('filename', source_path) task.set_editor_property('destination_path', destination_path) # strip suffix - task.set_editor_property('destination_name', node[:-4]) + task.set_editor_property('destination_name', name) task.set_editor_property('replace_existing', True) task.set_editor_property('automated', True) - task.set_editor_property('save', True) + task.set_editor_property('save', False) - task.options = unreal.FbxImportUI() - task.options.set_editor_property('create_physics_asset', False) - task.options.set_editor_property('import_as_skeletal', True) - task.options.set_editor_property('import_animations', False) + # set import options here + task.options.set_editor_property( + 'automated_import_should_detect_type', True) + task.options.set_editor_property( + 'original_import_type', unreal.FBXImportType.FBXIT_ANIMATION) + task.options.set_editor_property('import_mesh', False) + task.options.set_editor_property('import_animations', True) task.options.skeletal_mesh_import_data.set_editor_property( - 'normal_import_method', - unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS + 'import_content_type', + unreal.FBXImportContentType.FBXICT_SKINNING_WEIGHTS ) + skeletal_mesh = unreal.EditorAssetLibrary.load_asset( + container.get('namespace') + "/" + container.get('asset_name')) + skeleton = skeletal_mesh.get_editor_property('skeleton') + task.options.set_editor_property('skeleton', skeleton) + # do import fbx and replace existing data unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) container_path = "{}/{}".format(container["namespace"], container["objectName"]) # update metadata - avalon_unreal.imprint( - container_path, {"_id": str(representation["_id"])}) + unreal_pipeline.imprint( + container_path, {"representation": str(representation["_id"])}) def remove(self, container): unreal.EditorAssetLibrary.delete_directory(container["namespace"]) diff --git a/pype/plugins/unreal/load/load_setdress.py b/pype/plugins/unreal/load/load_setdress.py index b8705b81ff..8a89d3e352 100644 --- a/pype/plugins/unreal/load/load_setdress.py +++ b/pype/plugins/unreal/load/load_setdress.py @@ -1,7 +1,7 @@ import json +from os import pipe -from avalon import unreal as avalon_unreal -from avalon import api, io +from avalon import api import unreal @@ -16,16 +16,29 @@ class AnimationCollectionLoader(api.Loader): color = "orange" def load(self, context, name, namespace, options): - # Necessary because I think Python imports api from avalon_unreal - # as well. This forces it to use the right api. - from avalon import api + from avalon import api, pipeline + from avalon.unreal import lib + from avalon.unreal import pipeline as unreal_pipeline + import unreal + + # Create directory for asset and avalon container + root = "/Game/Avalon/Assets" + asset = context.get('asset').get('name') + suffix = "_CON" + + tools = unreal.AssetToolsHelpers().get_asset_tools() + asset_dir, container_name = tools.create_unique_asset_name( + "{}/{}".format(root, asset), suffix="") + + container_name += suffix + + unreal.EditorAssetLibrary.make_directory(asset_dir) + libpath = self.fname with open(libpath, "r") as fp: data = json.load(fp) - print(api) - all_loaders = api.discover(api.Loader) for element in data: @@ -43,9 +56,69 @@ class AnimationCollectionLoader(api.Loader): instance_name = element.get('instance_name') - element_container = api.load( + api.load( loader, reference, namespace=instance_name, options=element ) + + # Create Asset Container + lib.create_avalon_container( + container=container_name, path=asset_dir) + + data = { + "schema": "avalon-core:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "asset": asset, + "namespace": asset_dir, + "container_name": container_name, + "loader": str(self.__class__.__name__), + "representation": context["representation"]["_id"], + "parent": context["representation"]["parent"], + "family": context["representation"]["context"]["family"] + } + unreal_pipeline.imprint( + "{}/{}".format(asset_dir, container_name), data) + + asset_content = unreal.EditorAssetLibrary.list_assets( + asset_dir, recursive=True, include_folder=True + ) + + return asset_content + + def update(self, container, representation): + from avalon import api, io + from avalon.unreal import pipeline + + source_path = api.get_representation_path(representation) + + with open(source_path, "r") as fp: + data = json.load(fp) + + animation_containers = [ + i for i in pipeline.ls() if + i.get('asset') == container.get('asset') and + i.get('family') == 'animation'] + + for element in data: + new_version = io.find_one({"_id": io.ObjectId(element.get('_id'))}) + new_version_number = new_version.get('context').get('version') + anim_container = None + for i in animation_containers: + if i.get('container_name') == (element.get('subset') + "_CON"): + anim_container = i + break + if not anim_container: + continue + + api.update(anim_container, new_version_number) + + container_path = "{}/{}".format(container["namespace"], + container["objectName"]) + # update metadata + pipeline.imprint( + container_path, {"representation": str(representation["_id"])}) + + def remove(self, container): + unreal.EditorAssetLibrary.delete_directory(container["namespace"]) From 53d24236c676ad0256984e645237906907790d90 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 3 Nov 2020 16:09:53 +0000 Subject: [PATCH 19/47] Improvements on loading for unreal --- pype/plugins/unreal/load/load_animation.py | 7 ++++ pype/plugins/unreal/load/load_rig.py | 33 +++++++++++-------- .../plugins/unreal/load/load_staticmeshfbx.py | 33 +++++++++++-------- 3 files changed, 47 insertions(+), 26 deletions(-) diff --git a/pype/plugins/unreal/load/load_animation.py b/pype/plugins/unreal/load/load_animation.py index c4777cdee0..a090ae9393 100644 --- a/pype/plugins/unreal/load/load_animation.py +++ b/pype/plugins/unreal/load/load_animation.py @@ -177,5 +177,12 @@ class AnimationFBXLoader(api.Loader): unreal_pipeline.imprint( container_path, {"representation": str(representation["_id"])}) + asset_content = unreal.EditorAssetLibrary.list_assets( + destination_path, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + def remove(self, container): unreal.EditorAssetLibrary.delete_directory(container["namespace"]) diff --git a/pype/plugins/unreal/load/load_rig.py b/pype/plugins/unreal/load/load_rig.py index b92c353b50..3bafac5fc7 100644 --- a/pype/plugins/unreal/load/load_rig.py +++ b/pype/plugins/unreal/load/load_rig.py @@ -38,19 +38,18 @@ class SkeletalMeshFBXLoader(api.Loader): # Create directory for asset and avalon container root = "/Game/Avalon/Assets" - asset = context.get('asset') - asset_name = asset.get('name') + asset = context.get('asset').get('name') suffix = "_CON" - if asset_name: - container_name = "{}_{}".format(asset_name, name) + if asset: + asset_name = "{}_{}".format(asset, name) else: - container_name = "{}".format(name) + asset_name = "{}".format(name) tools = unreal.AssetToolsHelpers().get_asset_tools() - asset_dir, avalon_asset_name = tools.create_unique_asset_name( - "{}/{}/{}".format(root, asset_name, name), suffix="") + asset_dir, container_name = tools.create_unique_asset_name( + "{}/{}/{}".format(root, asset, name), suffix="") - avalon_asset_name += suffix + container_name += suffix unreal.EditorAssetLibrary.make_directory(asset_dir) @@ -58,7 +57,7 @@ class SkeletalMeshFBXLoader(api.Loader): task.set_editor_property('filename', self.fname) task.set_editor_property('destination_path', asset_dir) - task.set_editor_property('destination_name', container_name) + task.set_editor_property('destination_name', asset_name) task.set_editor_property('replace_existing', False) task.set_editor_property('automated', True) task.set_editor_property('save', False) @@ -92,13 +91,14 @@ class SkeletalMeshFBXLoader(api.Loader): # Create Asset Container lib.create_avalon_container( - container=avalon_asset_name, path=asset_dir) + container=container_name, path=asset_dir) data = { "schema": "avalon-core:container-2.0", "id": pipeline.AVALON_CONTAINER_ID, - "name": avalon_asset_name, + "asset": asset, "namespace": asset_dir, + "container_name": container_name, "asset_name": asset_name, "loader": str(self.__class__.__name__), "representation": context["representation"]["_id"], @@ -106,7 +106,7 @@ class SkeletalMeshFBXLoader(api.Loader): "family": context["representation"]["context"]["family"] } unreal_pipeline.imprint( - "{}/{}".format(asset_dir, avalon_asset_name), data) + "{}/{}".format(asset_dir, container_name), data) asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=True @@ -118,7 +118,7 @@ class SkeletalMeshFBXLoader(api.Loader): return asset_content def update(self, container, representation): - name = container["name"] + name = container["asset_name"] source_path = api.get_representation_path(representation) destination_path = container["namespace"] @@ -164,5 +164,12 @@ class SkeletalMeshFBXLoader(api.Loader): unreal_pipeline.imprint( container_path, {"representation": str(representation["_id"])}) + asset_content = unreal.EditorAssetLibrary.list_assets( + destination_path, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + def remove(self, container): unreal.EditorAssetLibrary.delete_directory(container["namespace"]) diff --git a/pype/plugins/unreal/load/load_staticmeshfbx.py b/pype/plugins/unreal/load/load_staticmeshfbx.py index 8ea7338e2a..749ca896fc 100644 --- a/pype/plugins/unreal/load/load_staticmeshfbx.py +++ b/pype/plugins/unreal/load/load_staticmeshfbx.py @@ -39,19 +39,18 @@ class StaticMeshFBXLoader(api.Loader): # Create directory for asset and avalon container root = "/Game/Avalon/Assets" - asset = context.get('asset') - asset_name = asset.get('name') + asset = context.get('asset').get('name') suffix = "_CON" - if asset_name: - container_name = "{}_{}".format(asset_name, name) + if asset: + asset_name = "{}_{}".format(asset, name) else: - container_name = "{}".format(name) + asset_name = "{}".format(name) tools = unreal.AssetToolsHelpers().get_asset_tools() - asset_dir, subset_name = tools.create_unique_asset_name( - "{}/{}/{}".format(root, asset_name, name), suffix="") + asset_dir, container_name = tools.create_unique_asset_name( + "{}/{}/{}".format(root, asset, name), suffix="") - avalon_asset_name = subset_name + suffix + container_name += suffix unreal.EditorAssetLibrary.make_directory(asset_dir) @@ -59,7 +58,7 @@ class StaticMeshFBXLoader(api.Loader): task.set_editor_property('filename', self.fname) task.set_editor_property('destination_path', asset_dir) - task.set_editor_property('destination_name', container_name) + task.set_editor_property('destination_name', asset_name) task.set_editor_property('replace_existing', False) task.set_editor_property('automated', True) task.set_editor_property('save', True) @@ -75,15 +74,16 @@ class StaticMeshFBXLoader(api.Loader): # Create Asset Container lib.create_avalon_container( - container=avalon_asset_name, path=asset_dir) + container=container_name, path=asset_dir) namespace = asset_dir data = { "schema": "avalon-core:container-2.0", "id": pipeline.AVALON_CONTAINER_ID, - "name": container_name, - "namespace": namespace, + "asset": asset, + "namespace": asset_dir, + "container_name": container_name, "asset_name": asset_name, "loader": str(self.__class__.__name__), "representation": context["representation"]["_id"], @@ -91,7 +91,7 @@ class StaticMeshFBXLoader(api.Loader): "family": context["representation"]["context"]["family"] } unreal_pipeline.imprint( - "{}/{}".format(asset_dir, avalon_asset_name), data) + "{}/{}".format(asset_dir, container_name), data) asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=True @@ -132,5 +132,12 @@ class StaticMeshFBXLoader(api.Loader): unreal_pipeline.imprint( container_path, {"representation": str(representation["_id"])}) + asset_content = unreal.EditorAssetLibrary.list_assets( + destination_path, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + def remove(self, container): unreal.EditorAssetLibrary.delete_directory(container["namespace"]) From 6e0c621bfc384efd86a79b90d97367cda1524e9b Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 3 Nov 2020 16:31:22 +0000 Subject: [PATCH 20/47] Handle exception if asset container is not found when extracting layout --- pype/plugins/unreal/publish/extract_layout.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pype/plugins/unreal/publish/extract_layout.py b/pype/plugins/unreal/publish/extract_layout.py index eadf186260..6345b8da51 100644 --- a/pype/plugins/unreal/publish/extract_layout.py +++ b/pype/plugins/unreal/publish/extract_layout.py @@ -48,7 +48,11 @@ class ExtractLayout(pype.api.Extractor): filter = unreal.ARFilter( class_names=["AssetContainer"], package_paths=[path]) ar = unreal.AssetRegistryHelpers.get_asset_registry() - asset_container = ar.get_assets(filter)[0].get_asset() + try: + asset_container = ar.get_assets(filter)[0].get_asset() + except IndexError: + self.log.error("AssetContainer not found.") + return parent = eal.get_metadata_tag(asset_container, "parent") family = eal.get_metadata_tag(asset_container, "family") From 3dab7be53ac1ad30c1bfb6f671be741355cf04df Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 4 Nov 2020 10:25:43 +0000 Subject: [PATCH 21/47] Improved remove function to delete parent directory if empty --- pype/plugins/unreal/load/load_animation.py | 15 ++++++++++++++- pype/plugins/unreal/load/load_rig.py | 14 +++++++++++++- pype/plugins/unreal/load/load_staticmeshfbx.py | 15 ++++++++++++++- 3 files changed, 41 insertions(+), 3 deletions(-) diff --git a/pype/plugins/unreal/load/load_animation.py b/pype/plugins/unreal/load/load_animation.py index a090ae9393..a86f72d08d 100644 --- a/pype/plugins/unreal/load/load_animation.py +++ b/pype/plugins/unreal/load/load_animation.py @@ -1,3 +1,5 @@ +import os + from avalon import api, pipeline from avalon.unreal import lib from avalon.unreal import pipeline as unreal_pipeline @@ -185,4 +187,15 @@ class AnimationFBXLoader(api.Loader): unreal.EditorAssetLibrary.save_asset(a) def remove(self, container): - unreal.EditorAssetLibrary.delete_directory(container["namespace"]) + path = container["namespace"] + parent_path = os.path.dirname(path) + + unreal.EditorAssetLibrary.delete_directory(path) + + asset_content = unreal.EditorAssetLibrary.list_assets( + parent_path, recursive=False + ) + + if len(asset_content) == 0: + unreal.EditorAssetLibrary.delete_directory(parent_path) + diff --git a/pype/plugins/unreal/load/load_rig.py b/pype/plugins/unreal/load/load_rig.py index 3bafac5fc7..623b921daa 100644 --- a/pype/plugins/unreal/load/load_rig.py +++ b/pype/plugins/unreal/load/load_rig.py @@ -1,3 +1,5 @@ +import os + from avalon import api, pipeline from avalon.unreal import lib from avalon.unreal import pipeline as unreal_pipeline @@ -172,4 +174,14 @@ class SkeletalMeshFBXLoader(api.Loader): unreal.EditorAssetLibrary.save_asset(a) def remove(self, container): - unreal.EditorAssetLibrary.delete_directory(container["namespace"]) + path = container["namespace"] + parent_path = os.path.dirname(path) + + unreal.EditorAssetLibrary.delete_directory(path) + + asset_content = unreal.EditorAssetLibrary.list_assets( + parent_path, recursive=False + ) + + if len(asset_content) == 0: + unreal.EditorAssetLibrary.delete_directory(parent_path) diff --git a/pype/plugins/unreal/load/load_staticmeshfbx.py b/pype/plugins/unreal/load/load_staticmeshfbx.py index 749ca896fc..acafd16b13 100644 --- a/pype/plugins/unreal/load/load_staticmeshfbx.py +++ b/pype/plugins/unreal/load/load_staticmeshfbx.py @@ -1,3 +1,5 @@ +import os + from avalon import api, pipeline from avalon import unreal as avalon_unreal from avalon.unreal import lib @@ -140,4 +142,15 @@ class StaticMeshFBXLoader(api.Loader): unreal.EditorAssetLibrary.save_asset(a) def remove(self, container): - unreal.EditorAssetLibrary.delete_directory(container["namespace"]) + path = container["namespace"] + parent_path = os.path.dirname(path) + + unreal.EditorAssetLibrary.delete_directory(path) + + asset_content = unreal.EditorAssetLibrary.list_assets( + parent_path, recursive=False + ) + + if len(asset_content) == 0: + unreal.EditorAssetLibrary.delete_directory(parent_path) + From a06bac68eda891d282a14f4179a9a4dc5191cdf9 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 6 Nov 2020 11:05:55 +0000 Subject: [PATCH 22/47] Fix problem with update that didn't update some metadata --- pype/plugins/unreal/load/load_animation.py | 6 +++++- pype/plugins/unreal/load/load_rig.py | 6 +++++- pype/plugins/unreal/load/load_setdress.py | 6 +++++- pype/plugins/unreal/load/load_staticmeshfbx.py | 6 +++++- 4 files changed, 20 insertions(+), 4 deletions(-) diff --git a/pype/plugins/unreal/load/load_animation.py b/pype/plugins/unreal/load/load_animation.py index a86f72d08d..33fd265d1a 100644 --- a/pype/plugins/unreal/load/load_animation.py +++ b/pype/plugins/unreal/load/load_animation.py @@ -177,7 +177,11 @@ class AnimationFBXLoader(api.Loader): container["objectName"]) # update metadata unreal_pipeline.imprint( - container_path, {"representation": str(representation["_id"])}) + container_path, + { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]) + }) asset_content = unreal.EditorAssetLibrary.list_assets( destination_path, recursive=True, include_folder=True diff --git a/pype/plugins/unreal/load/load_rig.py b/pype/plugins/unreal/load/load_rig.py index 623b921daa..ab4cfe81d9 100644 --- a/pype/plugins/unreal/load/load_rig.py +++ b/pype/plugins/unreal/load/load_rig.py @@ -164,7 +164,11 @@ class SkeletalMeshFBXLoader(api.Loader): container["objectName"]) # update metadata unreal_pipeline.imprint( - container_path, {"representation": str(representation["_id"])}) + container_path, + { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]) + }) asset_content = unreal.EditorAssetLibrary.list_assets( destination_path, recursive=True, include_folder=True diff --git a/pype/plugins/unreal/load/load_setdress.py b/pype/plugins/unreal/load/load_setdress.py index 8a89d3e352..191ff395c3 100644 --- a/pype/plugins/unreal/load/load_setdress.py +++ b/pype/plugins/unreal/load/load_setdress.py @@ -118,7 +118,11 @@ class AnimationCollectionLoader(api.Loader): container["objectName"]) # update metadata pipeline.imprint( - container_path, {"representation": str(representation["_id"])}) + container_path, + { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]) + }) def remove(self, container): unreal.EditorAssetLibrary.delete_directory(container["namespace"]) diff --git a/pype/plugins/unreal/load/load_staticmeshfbx.py b/pype/plugins/unreal/load/load_staticmeshfbx.py index acafd16b13..a28b6e1226 100644 --- a/pype/plugins/unreal/load/load_staticmeshfbx.py +++ b/pype/plugins/unreal/load/load_staticmeshfbx.py @@ -132,7 +132,11 @@ class StaticMeshFBXLoader(api.Loader): container["objectName"]) # update metadata unreal_pipeline.imprint( - container_path, {"representation": str(representation["_id"])}) + container_path, + { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]) + }) asset_content = unreal.EditorAssetLibrary.list_assets( destination_path, recursive=True, include_folder=True From b59887b5858a4cbb541263a0b3e4e7ad8d0cfd2a Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 6 Nov 2020 14:52:55 +0000 Subject: [PATCH 23/47] Pep8 compliance --- pype/plugins/unreal/load/load_animation.py | 9 ++++----- pype/plugins/unreal/load/load_rig.py | 14 +++++++------- pype/plugins/unreal/load/load_setdress.py | 5 ++--- pype/plugins/unreal/load/load_staticmeshfbx.py | 5 +---- 4 files changed, 14 insertions(+), 19 deletions(-) diff --git a/pype/plugins/unreal/load/load_animation.py b/pype/plugins/unreal/load/load_animation.py index 33fd265d1a..5e106788ce 100644 --- a/pype/plugins/unreal/load/load_animation.py +++ b/pype/plugins/unreal/load/load_animation.py @@ -15,7 +15,7 @@ class AnimationFBXLoader(api.Loader): icon = "cube" color = "orange" - def load(self, context, name, namespace, options = None): + def load(self, context, name, namespace, options=None): """ Load and containerise representation into Content Browser. @@ -88,7 +88,7 @@ class AnimationFBXLoader(api.Loader): task.options.set_editor_property('import_animations', True) task.options.skeletal_mesh_import_data.set_editor_property( - 'import_content_type', + 'import_content_type', unreal.FBXImportContentType.FBXICT_SKINNING_WEIGHTS ) @@ -162,7 +162,7 @@ class AnimationFBXLoader(api.Loader): task.options.set_editor_property('import_animations', True) task.options.skeletal_mesh_import_data.set_editor_property( - 'import_content_type', + 'import_content_type', unreal.FBXImportContentType.FBXICT_SKINNING_WEIGHTS ) @@ -177,7 +177,7 @@ class AnimationFBXLoader(api.Loader): container["objectName"]) # update metadata unreal_pipeline.imprint( - container_path, + container_path, { "representation": str(representation["_id"]), "parent": str(representation["parent"]) @@ -202,4 +202,3 @@ class AnimationFBXLoader(api.Loader): if len(asset_content) == 0: unreal.EditorAssetLibrary.delete_directory(parent_path) - diff --git a/pype/plugins/unreal/load/load_rig.py b/pype/plugins/unreal/load/load_rig.py index ab4cfe81d9..56351e388b 100644 --- a/pype/plugins/unreal/load/load_rig.py +++ b/pype/plugins/unreal/load/load_rig.py @@ -75,16 +75,16 @@ class SkeletalMeshFBXLoader(api.Loader): options.set_editor_property('create_physics_asset', False) options.set_editor_property('mesh_type_to_import', - unreal.FBXImportType.FBXIT_SKELETAL_MESH) + unreal.FBXImportType.FBXIT_SKELETAL_MESH) options.skeletal_mesh_import_data.set_editor_property( - 'import_content_type', + 'import_content_type', unreal.FBXImportContentType.FBXICT_ALL ) # set to import normals, otherwise Unreal will compute them # and it will take a long time, depending on the size of the mesh options.skeletal_mesh_import_data.set_editor_property( - 'normal_import_method', + 'normal_import_method', unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS ) @@ -144,16 +144,16 @@ class SkeletalMeshFBXLoader(api.Loader): options.set_editor_property('create_physics_asset', False) options.set_editor_property('mesh_type_to_import', - unreal.FBXImportType.FBXIT_SKELETAL_MESH) + unreal.FBXImportType.FBXIT_SKELETAL_MESH) options.skeletal_mesh_import_data.set_editor_property( - 'import_content_type', + 'import_content_type', unreal.FBXImportContentType.FBXICT_ALL ) # set to import normals, otherwise Unreal will compute them # and it will take a long time, depending on the size of the mesh options.skeletal_mesh_import_data.set_editor_property( - 'normal_import_method', + 'normal_import_method', unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS ) @@ -164,7 +164,7 @@ class SkeletalMeshFBXLoader(api.Loader): container["objectName"]) # update metadata unreal_pipeline.imprint( - container_path, + container_path, { "representation": str(representation["_id"]), "parent": str(representation["parent"]) diff --git a/pype/plugins/unreal/load/load_setdress.py b/pype/plugins/unreal/load/load_setdress.py index 191ff395c3..08330e349b 100644 --- a/pype/plugins/unreal/load/load_setdress.py +++ b/pype/plugins/unreal/load/load_setdress.py @@ -1,5 +1,4 @@ import json -from os import pipe from avalon import api import unreal @@ -97,7 +96,7 @@ class AnimationCollectionLoader(api.Loader): data = json.load(fp) animation_containers = [ - i for i in pipeline.ls() if + i for i in pipeline.ls() if i.get('asset') == container.get('asset') and i.get('family') == 'animation'] @@ -118,7 +117,7 @@ class AnimationCollectionLoader(api.Loader): container["objectName"]) # update metadata pipeline.imprint( - container_path, + container_path, { "representation": str(representation["_id"]), "parent": str(representation["parent"]) diff --git a/pype/plugins/unreal/load/load_staticmeshfbx.py b/pype/plugins/unreal/load/load_staticmeshfbx.py index a28b6e1226..149bafcacc 100644 --- a/pype/plugins/unreal/load/load_staticmeshfbx.py +++ b/pype/plugins/unreal/load/load_staticmeshfbx.py @@ -78,8 +78,6 @@ class StaticMeshFBXLoader(api.Loader): lib.create_avalon_container( container=container_name, path=asset_dir) - namespace = asset_dir - data = { "schema": "avalon-core:container-2.0", "id": pipeline.AVALON_CONTAINER_ID, @@ -132,7 +130,7 @@ class StaticMeshFBXLoader(api.Loader): container["objectName"]) # update metadata unreal_pipeline.imprint( - container_path, + container_path, { "representation": str(representation["_id"]), "parent": str(representation["parent"]) @@ -157,4 +155,3 @@ class StaticMeshFBXLoader(api.Loader): if len(asset_content) == 0: unreal.EditorAssetLibrary.delete_directory(parent_path) - From be59efa891031ac9bb64c9c3f16accb1f3208dca Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 1 Dec 2020 12:28:43 +0000 Subject: [PATCH 24/47] Fixed constraints not referencing the local objects --- pype/plugins/blender/load/load_rig.py | 61 ++++++++++++++++++--------- 1 file changed, 42 insertions(+), 19 deletions(-) diff --git a/pype/plugins/blender/load/load_rig.py b/pype/plugins/blender/load/load_rig.py index d8c14c5837..12017fdbb2 100644 --- a/pype/plugins/blender/load/load_rig.py +++ b/pype/plugins/blender/load/load_rig.py @@ -30,12 +30,26 @@ class BlendRigLoader(plugin.AssetLoader): bpy.data.armatures.remove(obj.data) elif obj.type == 'MESH': bpy.data.meshes.remove(obj.data) + elif obj.type == 'CURVE': + bpy.data.curves.remove(obj.data) for child in obj_container.children: bpy.data.collections.remove(child) bpy.data.collections.remove(obj_container) + def make_local_and_metadata(self, obj, collection_name): + local_obj = plugin.prepare_data(obj, collection_name) + plugin.prepare_data(local_obj.data, collection_name) + + if not local_obj.get(blender.pipeline.AVALON_PROPERTY): + local_obj[blender.pipeline.AVALON_PROPERTY] = dict() + + avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY] + avalon_info.update({"container_name": collection_name + '_CON'}) + + return local_obj + def _process( self, libpath, lib_container, collection_name, action, parent_collection @@ -56,7 +70,7 @@ class BlendRigLoader(plugin.AssetLoader): rig_container = parent.children[lib_container].make_local() rig_container.name = collection_name - meshes = [] + objects = [] armatures = [ obj for obj in rig_container.objects if obj.type == 'ARMATURE' @@ -64,33 +78,42 @@ class BlendRigLoader(plugin.AssetLoader): for child in rig_container.children: local_child = plugin.prepare_data(child, collection_name) - meshes.extend(local_child.objects) + objects.extend(local_child.objects) # for obj in bpy.data.objects: # obj.select_set(False) - # Link meshes first, then armatures. + constraints = [] + + for armature in armatures: + for bone in armature.pose.bones: + for constraint in bone.constraints: + if hasattr(constraint, 'target'): + constraints.append(constraint) + + # Link armatures after other objects. # The armature is unparented for all the non-local meshes, # when it is made local. - for obj in meshes + armatures: - local_obj = plugin.prepare_data(obj, collection_name) - plugin.prepare_data(local_obj.data, collection_name) + for obj in objects: + local_obj = self.make_local_and_metadata(obj, collection_name) - if not local_obj.get(blender.pipeline.AVALON_PROPERTY): - local_obj[blender.pipeline.AVALON_PROPERTY] = dict() + if obj != local_obj: + for constraint in constraints: + if constraint.target == obj: + constraint.target = local_obj - avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY] - avalon_info.update({"container_name": collection_name + '_CON'}) + for armature in armatures: + local_obj = self.make_local_and_metadata(armature, collection_name) - if local_obj.type == 'ARMATURE': - if action is not None: - local_obj.animation_data.action = action - # Set link the drivers to the local object - if local_obj.data.animation_data: - for d in local_obj.data.animation_data.drivers: - for v in d.driver.variables: - for t in v.targets: - t.id = local_obj + if action is not None: + local_obj.animation_data.action = action + + # Set link the drivers to the local object + if local_obj.data.animation_data: + for d in local_obj.data.animation_data.drivers: + for v in d.driver.variables: + for t in v.targets: + t.id = local_obj rig_container.pop(blender.pipeline.AVALON_PROPERTY) From 26aeb1884e8fbc771b1941a6b3edfb6f4825151a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 10 Dec 2020 16:36:10 +0100 Subject: [PATCH 25/47] added `join_query_keys` function to ftrack base class --- pype/modules/ftrack/lib/ftrack_base_handler.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/modules/ftrack/lib/ftrack_base_handler.py b/pype/modules/ftrack/lib/ftrack_base_handler.py index e928f2fb88..30efe0c99b 100644 --- a/pype/modules/ftrack/lib/ftrack_base_handler.py +++ b/pype/modules/ftrack/lib/ftrack_base_handler.py @@ -37,6 +37,11 @@ class BaseHandler(object): preactions = [] role_list = [] + @staticmethod + def join_query_keys(keys): + """Helper to join keys to query.""" + return ",".join(["\"{}\"".format(key) for key in keys]) + def __init__(self, session, plugins_presets=None): '''Expects a ftrack_api.Session instance''' self.log = Logger().get_logger(self.__class__.__name__) From 81981e58460d203f6bd4ed1b1c6dbc58937e50b4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 10 Dec 2020 16:36:37 +0100 Subject: [PATCH 26/47] modified variables to match event --- .../events/action_push_frame_values_to_task.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py index 3a538b57eb..9cd085c553 100644 --- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py @@ -14,6 +14,9 @@ class PushFrameValuesToTaskAction(ServerAction): label = "Pype Admin" variant = "- Push Frame values to Task" + hierarchy_entities_query = ( + "select id, parent_id from TypedContext where project_id is \"{}\"" + ) entities_query = ( "select id, name, parent_id, link from TypedContext" " where project_id is \"{}\" and object_type_id in ({})" @@ -28,13 +31,10 @@ class PushFrameValuesToTaskAction(ServerAction): " where entity_id in ({}) and configuration_id in ({})" ) - pushing_entity_types = {"Shot"} - hierarchical_custom_attribute_keys = {"frameStart", "frameEnd"} - custom_attribute_mapping = { - "frameStart": "fstart", - "frameEnd": "fend" - } - role_list = {"Pypeclub", "Administrator", "Project Manager"} + # configurable + interest_entity_types = ["Shot"] + interest_attributes = ["frameStart", "frameEnd"] + role_list = ["Pypeclub", "Administrator", "Project Manager"] def discover(self, session, entities, event): """ Validation """ From c29d5a817e67e0b34b2ef2645772121a2b0992fd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 10 Dec 2020 16:38:02 +0100 Subject: [PATCH 27/47] different approach to get attribute configurations --- .../action_push_frame_values_to_task.py | 42 +++++++------------ 1 file changed, 15 insertions(+), 27 deletions(-) diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py index 9cd085c553..9ceeac911e 100644 --- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py @@ -85,35 +85,23 @@ class PushFrameValuesToTaskAction(ServerAction): job["status"] = "failed" session.commit() - def task_attributes(self, session): - task_object_type = session.query( - "ObjectType where name is \"Task\"" - ).one() + def attrs_configurations(self, session, object_ids): + attrs = session.query(self.cust_attrs_query.format( + self.join_query_keys(self.interest_attributes), + self.join_query_keys(object_ids) + )).all() - hier_attr_names = list( - self.custom_attribute_mapping.keys() - ) - entity_type_specific_names = list( - self.custom_attribute_mapping.values() - ) - joined_keys = self.join_keys( - hier_attr_names + entity_type_specific_names - ) - attribute_entities = session.query( - self.cust_attrs_query.format(joined_keys) - ).all() - - hier_attrs = [] - task_attrs = {} - for attr in attribute_entities: - attr_key = attr["key"] + output = {} + hiearchical = [] + for attr in attrs: if attr["is_hierarchical"]: - if attr_key in hier_attr_names: - hier_attrs.append(attr) - elif attr["object_type_id"] == task_object_type["id"]: - if attr_key in entity_type_specific_names: - task_attrs[attr_key] = attr["id"] - return task_attrs, hier_attrs + hiearchical.append(attr) + continue + obj_id = attr["object_type_id"] + if obj_id not in output: + output[obj_id] = [] + output[obj_id].append(attr) + return output, hiearchical def join_keys(self, items): return ",".join(["\"{}\"".format(item) for item in items]) From 53fa6bf6bdbcc03e3a41198ea5349de2a5614470 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 10 Dec 2020 16:39:14 +0100 Subject: [PATCH 28/47] try to find configurations fot specified object types and skip processing if they don't have define them --- .../action_push_frame_values_to_task.py | 43 ++++++++++++++----- 1 file changed, 32 insertions(+), 11 deletions(-) diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py index 9ceeac911e..c2841ea4f7 100644 --- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py @@ -110,24 +110,45 @@ class PushFrameValuesToTaskAction(ServerAction): self.log.debug("Querying project's entities \"{}\".".format( project_entity["full_name"] )) - pushing_entity_types = tuple( + interest_entity_types = tuple( ent_type.lower() - for ent_type in self.pushing_entity_types + for ent_type in self.interest_entity_types ) - destination_object_types = [] all_object_types = session.query("ObjectType").all() - for object_type in all_object_types: - lowered_name = object_type["name"].lower() - if ( - lowered_name == "task" - or lowered_name in pushing_entity_types - ): - destination_object_types.append(object_type) + object_types_by_low_name = { + object_type["name"].lower(): object_type + for object_type in all_object_types + } - destination_object_type_ids = tuple( + task_object_type = object_types_by_low_name["task"] + destination_object_types = [task_object_type] + for ent_type in interest_entity_types: + obj_type = object_types_by_low_name.get(ent_type) + if obj_type and obj_type not in destination_object_types: + destination_object_types.append(obj_type) + + destination_object_type_ids = set( obj_type["id"] for obj_type in destination_object_types ) + + # Find custom attributes definitions + attrs_by_obj_id, hier_attrs = self.attrs_configurations( + session, destination_object_type_ids + ) + # Filter destination object types if they have any object specific + # custom attribute + for obj_id in tuple(destination_object_type_ids): + if obj_id not in attrs_by_obj_id: + destination_object_type_ids.remove(obj_id) + + if not destination_object_type_ids: + # TODO report that there are not matching custom attributes + return { + "success": True, + "message": "Nothing has changed." + } + entities = session.query(self.entities_query.format( project_entity["id"], self.join_keys(destination_object_type_ids) From 94c4b3c8b2e72105d40a400ffffdf7adadd687f3 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 10 Dec 2020 16:40:43 +0100 Subject: [PATCH 29/47] removed report --- .../action_push_frame_values_to_task.py | 46 ------------------- 1 file changed, 46 deletions(-) diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py index c2841ea4f7..b081495cb6 100644 --- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py @@ -196,59 +196,13 @@ class PushFrameValuesToTaskAction(ServerAction): non_task_entities, hier_values_by_entity_id ) - if task_missing_keys: - missing_keys_by_object_name["Task"] = task_missing_keys - if missing_keys_by_object_name: - self.report(missing_keys_by_object_name, event) - return True - def report(self, missing_keys_by_object_name, event): - splitter = {"type": "label", "value": "---"} - title = "Push Custom Attribute values report:" - items = [] - items.append({ - "type": "label", - "value": "# Pushing values was not complete" - }) - items.append({ - "type": "label", - "value": ( - "

It was due to missing custom" - " attribute configurations for specific entity type/s." - " These configurations are not created automatically.

" - ) - }) - - log_message_items = [] - log_message_item_template = ( - "Entity type \"{}\" does not have created Custom Attribute/s: {}" ) - for object_name, missing_attr_names in ( - missing_keys_by_object_name.items() - ): - log_message_items.append(log_message_item_template.format( - object_name, self.join_keys(missing_attr_names) - )) - items.append(splitter) - items.append({ - "type": "label", - "value": "## Entity type: {}".format(object_name) - }) - items.append({ - "type": "label", - "value": "

{}

".format("
".join(missing_attr_names)) - }) - self.log.warning(( - "Couldn't finish pushing attribute values because" - " few entity types miss Custom attribute configurations:\n{}" - ).format("\n".join(log_message_items))) - - self.show_interface(items, title, event) def get_hier_values(self, session, hier_attrs, focus_entity_ids): joined_entity_ids = self.join_keys(focus_entity_ids) From 3a26805df72a7ad2d0b13d429d91e3e338cb6f1a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 10 Dec 2020 16:41:09 +0100 Subject: [PATCH 30/47] implemented function to return project's hiearrchy by entity ids --- .../ftrack/events/action_push_frame_values_to_task.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py index b081495cb6..d31ad9c870 100644 --- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py @@ -198,8 +198,18 @@ class PushFrameValuesToTaskAction(ServerAction): ) + def all_hierarchy_ids(self, session, project_entity): + parent_id_by_entity_id = {} + hierarchy_entities = session.query( + self.hierarchy_entities_query.format(project_entity["id"]) ) + for hierarchy_entity in hierarchy_entities: + entity_id = hierarchy_entity["id"] + parent_id = hierarchy_entity["parent_id"] + parent_id_by_entity_id[entity_id] = parent_id + return parent_id_by_entity_id + From daceb4ed1796674c8b8b8eec1568686d1d917844 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 10 Dec 2020 16:41:34 +0100 Subject: [PATCH 31/47] added filtering process of entities by selection --- .../action_push_frame_values_to_task.py | 33 ++++++++++++++++--- 1 file changed, 29 insertions(+), 4 deletions(-) diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py index d31ad9c870..6070e9df77 100644 --- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py @@ -151,12 +151,19 @@ class PushFrameValuesToTaskAction(ServerAction): entities = session.query(self.entities_query.format( project_entity["id"], - self.join_keys(destination_object_type_ids) + self.join_query_keys(destination_object_type_ids) )).all() - entities_by_id = { - entity["id"]: entity - for entity in entities + self.log.debug("Preparing whole project hierarchy by ids.") + parent_id_by_entity_id = self.all_hierarchy_ids( + session, project_entity + ) + filtered_entities = self.filter_entities_by_selection( + entities, selected_ids, parent_id_by_entity_id + ) + entities_by_obj_id = { + obj_id: [] + for obj_id in destination_object_type_ids } self.log.debug("Filtering Task entities.") @@ -210,9 +217,27 @@ class PushFrameValuesToTaskAction(ServerAction): parent_id_by_entity_id[entity_id] = parent_id return parent_id_by_entity_id + def filter_entities_by_selection( + self, entities, selected_ids, parent_id_by_entity_id + ): + filtered_entities = [] + for entity in entities: + entity_id = entity["id"] + if entity_id in selected_ids: + filtered_entities.append(entity) + continue + parent_id = entity["parent_id"] + while True: + if parent_id in selected_ids: + filtered_entities.append(entity) + break + parent_id = parent_id_by_entity_id.get(parent_id) + if parent_id is None: + break + return filtered_entities def get_hier_values(self, session, hier_attrs, focus_entity_ids): joined_entity_ids = self.join_keys(focus_entity_ids) From b5a619251b0525fa886695dcb2feddbb1ea97c49 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 10 Dec 2020 16:42:09 +0100 Subject: [PATCH 32/47] hierarchical values are queried across hierarchy --- .../action_push_frame_values_to_task.py | 90 +++++++++++++------ 1 file changed, 65 insertions(+), 25 deletions(-) diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py index 6070e9df77..5a4e26f0b5 100644 --- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py @@ -167,26 +167,26 @@ class PushFrameValuesToTaskAction(ServerAction): } self.log.debug("Filtering Task entities.") - task_entities_by_parent_id = collections.defaultdict(list) - non_task_entities = [] + focus_entity_ids = [] non_task_entity_ids = [] - for entity in entities: - if entity.entity_type.lower() != "task": - non_task_entities.append(entity) - non_task_entity_ids.append(entity["id"]) - continue + task_entity_ids = [] + for entity in filtered_entities: + entity_id = entity["id"] + focus_entity_ids.append(entity_id) + if entity.entity_type.lower() == "task": + task_entity_ids.append(entity_id) + else: + non_task_entity_ids.append(entity_id) - parent_id = entity["parent_id"] - if parent_id in entities_by_id: - task_entities_by_parent_id[parent_id].append(entity) + obj_id = entity["object_type_id"] + entities_by_obj_id[obj_id].append(entity_id) - task_attr_id_by_keys, hier_attrs = self.task_attributes(session) - - self.log.debug("Getting Custom attribute values from tasks' parents.") + self.log.debug("Getting Hierarchical custom attribute values parents.") hier_values_by_entity_id = self.get_hier_values( session, hier_attrs, - non_task_entity_ids + non_task_entity_ids, + parent_id_by_entity_id ) self.log.debug("Setting parents' values to task.") @@ -239,9 +239,30 @@ class PushFrameValuesToTaskAction(ServerAction): return filtered_entities - def get_hier_values(self, session, hier_attrs, focus_entity_ids): - joined_entity_ids = self.join_keys(focus_entity_ids) - hier_attr_ids = self.join_keys( + def get_hier_values( + self, + session, + hier_attrs, + focus_entity_ids, + parent_id_by_entity_id + ): + all_ids_with_parents = set() + for entity_id in focus_entity_ids: + all_ids_with_parents.add(entity_id) + _entity_id = entity_id + while True: + parent_id = parent_id_by_entity_id.get(_entity_id) + if ( + not parent_id + or parent_id in all_ids_with_parents + ): + break + all_ids_with_parents.add(parent_id) + _entity_id = parent_id + + joined_entity_ids = self.join_query_keys(all_ids_with_parents) + + hier_attr_ids = self.join_query_keys( tuple(hier_attr["id"] for hier_attr in hier_attrs) ) hier_attrs_key_by_id = { @@ -260,22 +281,41 @@ class PushFrameValuesToTaskAction(ServerAction): [values] = session._call(call_expr) values_per_entity_id = {} + for entity_id in all_ids_with_parents: + values_per_entity_id[entity_id] = {} + for key in hier_attrs_key_by_id.values(): + values_per_entity_id[entity_id][key] = None + for item in values["data"]: entity_id = item["entity_id"] key = hier_attrs_key_by_id[item["configuration_id"]] - if entity_id not in values_per_entity_id: - values_per_entity_id[entity_id] = {} - value = item["value"] - if value is not None: - values_per_entity_id[entity_id][key] = value + values_per_entity_id[entity_id][key] = item["value"] output = {} for entity_id in focus_entity_ids: - value = values_per_entity_id.get(entity_id) - if value: - output[entity_id] = value + output[entity_id] = {} + for key in hier_attrs_key_by_id.values(): + value = values_per_entity_id[entity_id][key] + tried_ids = set() + if value is None: + tried_ids.add(entity_id) + _entity_id = entity_id + while value is None: + parent_id = parent_id_by_entity_id.get(_entity_id) + if not parent_id: + break + value = values_per_entity_id[parent_id][key] + if value is not None: + break + _entity_id = parent_id + tried_ids.add(parent_id) + if value is not None: + for ent_id in tried_ids: + values_per_entity_id[ent_id][key] = value + + output[entity_id][key] = value return output def set_task_attr_values( From 13a414ec2d44a058f294eb0326da04e893f82dcd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 10 Dec 2020 16:42:29 +0100 Subject: [PATCH 33/47] propagate values works differently --- .../ftrack/events/action_push_frame_values_to_task.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py index 5a4e26f0b5..1d55f6052f 100644 --- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py @@ -61,8 +61,7 @@ class PushFrameValuesToTaskAction(ServerAction): session.commit() try: - project_entity = self.get_project_from_entity(entities[0]) - result = self.propagate_values(session, project_entity, event) + result = self.propagate_values(session, entities) job["status"] = "done" session.commit() @@ -103,10 +102,10 @@ class PushFrameValuesToTaskAction(ServerAction): output[obj_id].append(attr) return output, hiearchical - def join_keys(self, items): - return ",".join(["\"{}\"".format(item) for item in items]) + def propagate_values(self, session, selected_entities): + project_entity = self.get_project_from_entity(selected_entities[0]) + selected_ids = [entity["id"] for entity in selected_entities] - def propagate_values(self, session, project_entity, event): self.log.debug("Querying project's entities \"{}\".".format( project_entity["full_name"] )) From 8ec9bd7672477fa39611cc39618c4280d72a48e2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 10 Dec 2020 16:42:42 +0100 Subject: [PATCH 34/47] discovery is possible at any hierarchical item --- pype/modules/ftrack/events/action_push_frame_values_to_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py index 1d55f6052f..a97a124e73 100644 --- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py @@ -41,7 +41,7 @@ class PushFrameValuesToTaskAction(ServerAction): # Check if selection is valid for ent in event["data"]["selection"]: # Ignore entities that are not tasks or projects - if ent["entityType"].lower() == "show": + if ent["entityType"].lower() in ("task", "show"): return True return False From 4afca03a7d6669929d87421fa8ea786b7c67a976 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 10 Dec 2020 16:43:18 +0100 Subject: [PATCH 35/47] set task values is easier --- .../action_push_frame_values_to_task.py | 60 ++++++++++--------- 1 file changed, 32 insertions(+), 28 deletions(-) diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py index a97a124e73..cd2e4c6976 100644 --- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py @@ -189,11 +189,12 @@ class PushFrameValuesToTaskAction(ServerAction): ) self.log.debug("Setting parents' values to task.") - task_missing_keys = self.set_task_attr_values( + self.set_task_attr_values( session, - task_entities_by_parent_id, + hier_attrs, + task_entity_ids, hier_values_by_entity_id, - task_attr_id_by_keys + parent_id_by_entity_id ) self.log.debug("Setting values to entities themselves.") @@ -320,38 +321,41 @@ class PushFrameValuesToTaskAction(ServerAction): def set_task_attr_values( self, session, - task_entities_by_parent_id, + hier_attrs, + task_entity_ids, hier_values_by_entity_id, - task_attr_id_by_keys + parent_id_by_entity_id ): - missing_keys = set() - for parent_id, values in hier_values_by_entity_id.items(): - task_entities = task_entities_by_parent_id[parent_id] - for hier_key, value in values.items(): - key = self.custom_attribute_mapping[hier_key] - if key not in task_attr_id_by_keys: - missing_keys.add(key) - continue + hier_attr_id_by_key = { + attr["key"]: attr["id"] + for attr in hier_attrs + } + for task_id in task_entity_ids: + parent_id = parent_id_by_entity_id.get(task_id) or {} + parent_values = hier_values_by_entity_id.get(parent_id) + if not parent_values: + continue - for task_entity in task_entities: - _entity_key = collections.OrderedDict({ - "configuration_id": task_attr_id_by_keys[key], - "entity_id": task_entity["id"] - }) + hier_values_by_entity_id[task_id] = {} + for key, value in parent_values.items(): + hier_values_by_entity_id[task_id][key] = value + configuration_id = hier_attr_id_by_key[key] + _entity_key = collections.OrderedDict({ + "configuration_id": configuration_id, + "entity_id": task_id + }) - session.recorded_operations.push( - ftrack_api.operation.UpdateEntityOperation( - "ContextCustomAttributeValue", - _entity_key, - "value", - ftrack_api.symbol.NOT_SET, - value - ) + session.recorded_operations.push( + ftrack_api.operation.UpdateEntityOperation( + "ContextCustomAttributeValue", + _entity_key, + "value", + ftrack_api.symbol.NOT_SET, + value ) + ) session.commit() - return missing_keys - def push_values_to_entities( self, session, From f92a918f2398da69c1e46062d0c3ccaec21d97c9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 10 Dec 2020 16:43:47 +0100 Subject: [PATCH 36/47] push_values_to_entities was simplified with cached data --- .../action_push_frame_values_to_task.py | 79 +++++-------------- 1 file changed, 19 insertions(+), 60 deletions(-) diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py index cd2e4c6976..627d8ad859 100644 --- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py @@ -198,12 +198,14 @@ class PushFrameValuesToTaskAction(ServerAction): ) self.log.debug("Setting values to entities themselves.") - missing_keys_by_object_name = self.push_values_to_entities( + self.push_values_to_entities( session, - non_task_entities, + entities_by_obj_id, + attrs_by_obj_id, hier_values_by_entity_id ) + return True def all_hierarchy_ids(self, session, project_entity): parent_id_by_entity_id = {} @@ -359,64 +361,28 @@ class PushFrameValuesToTaskAction(ServerAction): def push_values_to_entities( self, session, - non_task_entities, + entities_by_obj_id, + attrs_by_obj_id, hier_values_by_entity_id ): - object_types = session.query( - "ObjectType where name in ({})".format( - self.join_keys(self.pushing_entity_types) - ) - ).all() - object_type_names_by_id = { - object_type["id"]: object_type["name"] - for object_type in object_types - } - joined_keys = self.join_keys( - self.custom_attribute_mapping.values() - ) - attribute_entities = session.query( - self.cust_attrs_query.format(joined_keys) - ).all() - - attrs_by_obj_id = {} - for attr in attribute_entities: - if attr["is_hierarchical"]: + for object_id, entity_ids in entities_by_obj_id.items(): + attrs = attrs_by_obj_id.get(object_id) + if not attrs or not entity_ids: continue - obj_id = attr["object_type_id"] - if obj_id not in object_type_names_by_id: - continue - - if obj_id not in attrs_by_obj_id: - attrs_by_obj_id[obj_id] = {} - - attr_key = attr["key"] - attrs_by_obj_id[obj_id][attr_key] = attr["id"] - - entities_by_obj_id = collections.defaultdict(list) - for entity in non_task_entities: - entities_by_obj_id[entity["object_type_id"]].append(entity) - - missing_keys_by_object_id = collections.defaultdict(set) - for obj_type_id, attr_keys in attrs_by_obj_id.items(): - entities = entities_by_obj_id.get(obj_type_id) - if not entities: - continue - - for entity in entities: - values = hier_values_by_entity_id.get(entity["id"]) - if not values: - continue - - for hier_key, value in values.items(): - key = self.custom_attribute_mapping[hier_key] - if key not in attr_keys: - missing_keys_by_object_id[obj_type_id].add(key) + for attr in attrs: + for entity_id in entity_ids: + value = ( + hier_values_by_entity_id + .get(entity_id, {}) + .get(attr["key"]) + ) + if value is None: continue _entity_key = collections.OrderedDict({ - "configuration_id": attr_keys[key], - "entity_id": entity["id"] + "configuration_id": attr["id"], + "entity_id": entity_id }) session.recorded_operations.push( @@ -430,13 +396,6 @@ class PushFrameValuesToTaskAction(ServerAction): ) session.commit() - missing_keys_by_object_name = {} - for obj_id, missing_keys in missing_keys_by_object_id.items(): - obj_name = object_type_names_by_id[obj_id] - missing_keys_by_object_name[obj_name] = missing_keys - - return missing_keys_by_object_name - def register(session, plugins_presets={}): PushFrameValuesToTaskAction(session, plugins_presets).register() From 5595e7f45c039571a0a9e553eae7a7438b69b195 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 10 Dec 2020 16:43:59 +0100 Subject: [PATCH 37/47] action is not ignored by default --- pype/modules/ftrack/events/action_push_frame_values_to_task.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py index 627d8ad859..640154f47c 100644 --- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py @@ -7,9 +7,6 @@ from pype.modules.ftrack.lib import ServerAction class PushFrameValuesToTaskAction(ServerAction): """Action for testing purpose or as base for new actions.""" - # Ignore event handler by default - ignore_me = True - identifier = "admin.push_frame_values_to_task" label = "Pype Admin" variant = "- Push Frame values to Task" From 8ba717f8287eb63c4c3d18321ed0adc8d0999553 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 10 Dec 2020 16:52:16 +0100 Subject: [PATCH 38/47] added docstring --- .../action_push_frame_values_to_task.py | 32 ++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py index 640154f47c..74fee054f0 100644 --- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py @@ -5,7 +5,37 @@ from pype.modules.ftrack.lib import ServerAction class PushFrameValuesToTaskAction(ServerAction): - """Action for testing purpose or as base for new actions.""" + """Action push hierarchical custom attribute values to non hierarchical. + + Hierarchical value is also pushed to their task entities. + + Action has 3 configurable attributes: + - `role_list`: List of use roles that can discover the action. + - `interest_attributes`: Keys of custom attributes that will be looking + for to push values. Attribute key must have both custom attribute types + hierarchical and on specific object type (entity type). + - `interest_entity_types`: Entity types that will be in focus of pushing + hierarchical to object type's custom attribute. + + EXAMPLE: + * Before action + |_ Project + |_ Shot1 + - hierarchical custom attribute value: `frameStart`: 1001 + - custom attribute for `Shot`: frameStart: 1 + |_ Task1 + - hierarchical custom attribute value: `frameStart`: 10 + - custom attribute for `Task`: frameStart: 0 + + * After action + |_ Project + |_ Shot1 + - hierarchical custom attribute value: `frameStart`: 1001 + - custom attribute for `Shot`: frameStart: 1001 + |_ Task1 + - hierarchical custom attribute value: `frameStart`: 1001 + - custom attribute for `Task`: frameStart: 1001 + """ identifier = "admin.push_frame_values_to_task" label = "Pype Admin" From f8c7cfd292959518d4a3082c2bc9640d37052ebb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 10 Dec 2020 16:53:51 +0100 Subject: [PATCH 39/47] modified name and label --- .../ftrack/events/action_push_frame_values_to_task.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py index 74fee054f0..1a5f32c9ea 100644 --- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py @@ -4,7 +4,7 @@ import ftrack_api from pype.modules.ftrack.lib import ServerAction -class PushFrameValuesToTaskAction(ServerAction): +class PushHierValuesToNonHier(ServerAction): """Action push hierarchical custom attribute values to non hierarchical. Hierarchical value is also pushed to their task entities. @@ -37,9 +37,9 @@ class PushFrameValuesToTaskAction(ServerAction): - custom attribute for `Task`: frameStart: 1001 """ - identifier = "admin.push_frame_values_to_task" + identifier = "admin.push_hier_values_to_non_hier" label = "Pype Admin" - variant = "- Push Frame values to Task" + variant = "- Push Hierarchical values To Non-Hierarchical" hierarchy_entities_query = ( "select id, parent_id from TypedContext where project_id is \"{}\"" @@ -425,4 +425,4 @@ class PushFrameValuesToTaskAction(ServerAction): def register(session, plugins_presets={}): - PushFrameValuesToTaskAction(session, plugins_presets).register() + PushHierValuesToNonHier(session, plugins_presets).register() From 09625b98f85531576300230eb42cc305689701d0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 11 Dec 2020 09:36:39 +0100 Subject: [PATCH 40/47] add selection check after filtering --- .../ftrack/events/action_push_frame_values_to_task.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pype/modules/ftrack/events/action_push_frame_values_to_task.py b/pype/modules/ftrack/events/action_push_frame_values_to_task.py index 1a5f32c9ea..de61728a62 100644 --- a/pype/modules/ftrack/events/action_push_frame_values_to_task.py +++ b/pype/modules/ftrack/events/action_push_frame_values_to_task.py @@ -207,6 +207,12 @@ class PushHierValuesToNonHier(ServerAction): obj_id = entity["object_type_id"] entities_by_obj_id[obj_id].append(entity_id) + if not non_task_entity_ids: + return { + "success": True, + "message": "Nothing to do in your selection." + } + self.log.debug("Getting Hierarchical custom attribute values parents.") hier_values_by_entity_id = self.get_hier_values( session, From aec53f4d803742f6579c76d9cb3c25e312615711 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 11 Dec 2020 12:32:15 +0100 Subject: [PATCH 41/47] fix palette and image sequence loader --- pype/hosts/harmony/js/PypeHarmony.js | 5 ++ .../harmony/js/loaders/ImageSequenceLoader.js | 64 +++++++++++++------ pype/plugins/harmony/load/load_palette.py | 2 +- 3 files changed, 49 insertions(+), 22 deletions(-) diff --git a/pype/hosts/harmony/js/PypeHarmony.js b/pype/hosts/harmony/js/PypeHarmony.js index 504bcc9ba2..a98dbd52cd 100644 --- a/pype/hosts/harmony/js/PypeHarmony.js +++ b/pype/hosts/harmony/js/PypeHarmony.js @@ -1,7 +1,12 @@ +/* global include */ // *************************************************************************** // * Pype Harmony Host * // *************************************************************************** +var LD_OPENHARMONY_PATH = System.getenv('LIB_OPENHARMONY_PATH'); +include(LD_OPENHARMONY_PATH + '/openHarmony.js'); +this.__proto__['$'] = $; + /** * @namespace diff --git a/pype/hosts/harmony/js/loaders/ImageSequenceLoader.js b/pype/hosts/harmony/js/loaders/ImageSequenceLoader.js index 7801f65cdd..3e2c853146 100644 --- a/pype/hosts/harmony/js/loaders/ImageSequenceLoader.js +++ b/pype/hosts/harmony/js/loaders/ImageSequenceLoader.js @@ -24,13 +24,42 @@ var ImageSequenceLoader = function() { }; +ImageSequenceLoader.getCurrentGroup = function () { + var doc = $.scn; + var nodeView = ''; + for (var i = 0; i < 200; i++) { + nodeView = 'View' + (i); + if (view.type(nodeView) == 'Node View') { + break; + } + } + + if (!nodeView) { + $.alert('You must have a Node View open!', + 'No Node View is currently open!\n' + + 'Open a Node View and Try Again.', + 'OK!'); + return; + } + + var currentGroup; + if (!nodeView) { + currentGroup = doc.root; + } else { + currentGroup = doc.$node(view.group(nodeView)); + } + + return currentGroup.path; +}; + + /** * Get unique column name. * @function * @param {string} columnPrefix Column name. * @return {string} Unique column name. */ -ImageSequenceLoader.prototype.getUniqueColumnName = function(columnPrefix) { +ImageSequenceLoader.getUniqueColumnName = function(columnPrefix) { var suffix = 0; // finds if unique name for a column var columnName = columnPrefix; @@ -63,6 +92,12 @@ ImageSequenceLoader.prototype.getUniqueColumnName = function(columnPrefix) { * ]; */ ImageSequenceLoader.prototype.importFiles = function(args) { + var PNGTransparencyMode = 0; // Premultiplied wih Black + var TGATransparencyMode = 0; // Premultiplied wih Black + var SGITransparencyMode = 0; // Premultiplied wih Black + var LayeredPSDTransparencyMode = 1; // Straight + var FlatPSDTransparencyMode = 2; // Premultiplied wih White + var doc = $.scn; var files = args[0]; var asset = args[1]; @@ -78,20 +113,8 @@ ImageSequenceLoader.prototype.importFiles = function(args) { } // Get the current group - var nodeViewWidget = $.app.getWidgetByName('Node View'); - if (!nodeViewWidget) { - $.alert('You must have a Node View open!', 'No Node View!', 'OK!'); - return; - } + var currentGroup = doc.$node(ImageSequenceLoader.getCurrentGroup()); - nodeViewWidget.setFocus(); - var nodeView = view.currentView(); - var currentGroup = null; - if (!nodeView) { - currentGroup = doc.root; - } else { - currentGroup = doc.$node(view.group(nodeView)); - } // Get a unique iterative name for the container read node var num = 0; var name = ''; @@ -99,7 +122,6 @@ ImageSequenceLoader.prototype.importFiles = function(args) { name = asset + '_' + (num++) + '_' + subset; } while (currentGroup.getNodeByName(name) != null); - extension = filename.substr(pos+1).toLowerCase(); if (extension == 'jpeg') { extension = 'jpg'; @@ -123,7 +145,7 @@ ImageSequenceLoader.prototype.importFiles = function(args) { return null; // no read to add. } - var uniqueColumnName = this.getUniqueColumnName(name); + var uniqueColumnName = ImageSequenceLoader.getUniqueColumnName(name); column.add(uniqueColumnName, 'DRAWING'); column.setElementIdOfDrawing(uniqueColumnName, elemId); var read = node.add(currentGroup, name, 'READ', 0, 0, 0); @@ -139,19 +161,19 @@ ImageSequenceLoader.prototype.importFiles = function(args) { read, frame.current(), 'applyMatteToColor' ); if (extension === 'png') { - transparencyModeAttr.setValue(this.PNGTransparencyMode); + transparencyModeAttr.setValue(PNGTransparencyMode); } if (extension === 'tga') { - transparencyModeAttr.setValue(this.TGATransparencyMode); + transparencyModeAttr.setValue(TGATransparencyMode); } if (extension === 'sgi') { - transparencyModeAttr.setValue(this.SGITransparencyMode); + transparencyModeAttr.setValue(SGITransparencyMode); } if (extension === 'psd') { - transparencyModeAttr.setValue(this.FlatPSDTransparencyMode); + transparencyModeAttr.setValue(FlatPSDTransparencyMode); } if (extension === 'jpg') { - transparencyModeAttr.setValue(this.LayeredPSDTransparencyMode); + transparencyModeAttr.setValue(LayeredPSDTransparencyMode); } var drawingFilePath; diff --git a/pype/plugins/harmony/load/load_palette.py b/pype/plugins/harmony/load/load_palette.py index fd3f99b06d..5dd5171aa1 100644 --- a/pype/plugins/harmony/load/load_palette.py +++ b/pype/plugins/harmony/load/load_palette.py @@ -7,7 +7,7 @@ from avalon import api, harmony class ImportPaletteLoader(api.Loader): """Import palettes.""" - families = ["palette"] + families = ["palette", "harmony.palette"] representations = ["plt"] label = "Import Palette" From 04da60da6f296e12dc5cdb82db7f595627bf8f02 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 11 Dec 2020 14:42:01 +0100 Subject: [PATCH 42/47] imlemented action that check and reset invalid metadata key --- .../tvpaint/publish/collect_workfile_data.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/pype/plugins/tvpaint/publish/collect_workfile_data.py b/pype/plugins/tvpaint/publish/collect_workfile_data.py index c6179b76cf..4116de1796 100644 --- a/pype/plugins/tvpaint/publish/collect_workfile_data.py +++ b/pype/plugins/tvpaint/publish/collect_workfile_data.py @@ -6,6 +6,36 @@ import avalon.api from avalon.tvpaint import pipeline, lib +class ResetTVPaintWorkfileMetadata(pyblish.api.Action): + """Fix invalid metadata in workfile.""" + label = "Reset invalid workfile metadata" + on = "failed" + + def process(self, context, plugin): + metadata_keys = { + pipeline.SECTION_NAME_CONTEXT: {}, + pipeline.SECTION_NAME_INSTANCES: [], + pipeline.SECTION_NAME_CONTAINERS: [] + } + for metadata_key, default in metadata_keys.items(): + json_string = pipeline.get_workfile_metadata_string(metadata_key) + if not json_string: + continue + + try: + return json.loads(json_string) + except Exception: + self.log.warning( + ( + "Couldn't parse metadata from key \"{}\"." + " Will reset to default value \"{}\"." + " Loaded value was: {}" + ).format(metadata_key, default, json_string), + exc_info=True + ) + pipeline.write_workfile_metadata(metadata_key, default) + + class CollectWorkfileData(pyblish.api.ContextPlugin): label = "Collect Workfile Data" order = pyblish.api.CollectorOrder - 1.01 From 7540e5d393d3ae00ad7109bdfe72a897ef45e0c8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 11 Dec 2020 14:42:13 +0100 Subject: [PATCH 43/47] add repair action to tvpaint collector --- pype/plugins/tvpaint/publish/collect_workfile_data.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pype/plugins/tvpaint/publish/collect_workfile_data.py b/pype/plugins/tvpaint/publish/collect_workfile_data.py index 4116de1796..6af659297d 100644 --- a/pype/plugins/tvpaint/publish/collect_workfile_data.py +++ b/pype/plugins/tvpaint/publish/collect_workfile_data.py @@ -40,6 +40,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): label = "Collect Workfile Data" order = pyblish.api.CollectorOrder - 1.01 hosts = ["tvpaint"] + actions = [ResetTVPaintWorkfileMetadata] def process(self, context): current_project_id = lib.execute_george("tv_projectcurrentid") From 5b7cdd5a6c21581dab795a26696de1e9b1c3875f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 15 Dec 2020 16:45:37 +0100 Subject: [PATCH 44/47] take into account vray master aov switch --- pype/hosts/maya/expected_files.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/pype/hosts/maya/expected_files.py b/pype/hosts/maya/expected_files.py index a2ddec1640..52c8893e4b 100644 --- a/pype/hosts/maya/expected_files.py +++ b/pype/hosts/maya/expected_files.py @@ -614,6 +614,15 @@ class ExpectedFilesVray(AExpectedFiles): if default_ext == "exr (multichannel)" or default_ext == "exr (deep)": default_ext = "exr" + enabled_aovs.append( + (u"beauty", default_ext) + ) + + if not self.maya_is_true( + cmds.getAttr("vraySettings.relements_enableall") + ): + return enabled_aovs + # filter all namespace prefixed AOVs - they are pulled in from # references and are not rendered. vr_aovs = [ @@ -635,9 +644,7 @@ class ExpectedFilesVray(AExpectedFiles): # todo: find how vray set format for AOVs enabled_aovs.append( (self._get_vray_aov_name(aov), default_ext)) - enabled_aovs.append( - (u"beauty", default_ext) - ) + return enabled_aovs def _get_vray_aov_name(self, node): From c55cbbccddca3ef8a1fb4d0dcca3aef341c51524 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 15 Dec 2020 17:09:22 +0000 Subject: [PATCH 45/47] respecting space in path --- setup/nuke/nuke_path/write_to_read.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup/nuke/nuke_path/write_to_read.py b/setup/nuke/nuke_path/write_to_read.py index 7ea9220ad8..58985947bd 100644 --- a/setup/nuke/nuke_path/write_to_read.py +++ b/setup/nuke/nuke_path/write_to_read.py @@ -76,7 +76,7 @@ def evaluate_filepath_new(k_value, k_eval, project_dir, first_frame): def create_read_node(ndata, comp_start): - read = nuke.createNode('Read', 'file ' + ndata['filepath']) + read = nuke.createNode('Read', 'file "' + ndata['filepath'] + '"') read.knob('colorspace').setValue(int(ndata['colorspace'])) read.knob('raw').setValue(ndata['rawdata']) read.knob('first').setValue(int(ndata['firstframe'])) From 3b4002e9e45e9c7f9abffcb50046568b0def4d65 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 16 Dec 2020 20:26:45 +0100 Subject: [PATCH 46/47] fix shared data access violation, check for oiio --- .../global/publish/extract_scanline_exr.py | 17 ++++---- .../maya/publish/submit_maya_deadline.py | 43 ++++++++++--------- 2 files changed, 31 insertions(+), 29 deletions(-) diff --git a/pype/plugins/global/publish/extract_scanline_exr.py b/pype/plugins/global/publish/extract_scanline_exr.py index ca62476ab2..9c3073d61d 100644 --- a/pype/plugins/global/publish/extract_scanline_exr.py +++ b/pype/plugins/global/publish/extract_scanline_exr.py @@ -46,6 +46,10 @@ class ExtractScanlineExr(pyblish.api.InstancePlugin): stagingdir = os.path.normpath(repre.get("stagingDir")) oiio_tool_path = os.getenv("PYPE_OIIO_PATH", "") + if not os.path.exists(oiio_tool_path): + self.log.error( + "OIIO tool not found in {}".format(oiio_tool_path)) + raise AssertionError("OIIO tool not found") for file in input_files: @@ -53,14 +57,11 @@ class ExtractScanlineExr(pyblish.api.InstancePlugin): temp_name = os.path.join(stagingdir, "__{}".format(file)) # move original render to temp location shutil.move(original_name, temp_name) - oiio_cmd = [] - oiio_cmd.append(oiio_tool_path) - oiio_cmd.append( - os.path.join(stagingdir, temp_name) - ) - oiio_cmd.append("--scanline") - oiio_cmd.append("-o") - oiio_cmd.append(os.path.join(stagingdir, original_name)) + oiio_cmd = [ + oiio_tool_path, + os.path.join(stagingdir, temp_name), "--scanline", "-o", + os.path.join(stagingdir, original_name) + ] subprocess_exr = " ".join(oiio_cmd) self.log.info(f"running: {subprocess_exr}") diff --git a/pype/plugins/maya/publish/submit_maya_deadline.py b/pype/plugins/maya/publish/submit_maya_deadline.py index 0ae19cbb81..1354e3d512 100644 --- a/pype/plugins/maya/publish/submit_maya_deadline.py +++ b/pype/plugins/maya/publish/submit_maya_deadline.py @@ -42,7 +42,7 @@ from pype.hosts.maya import lib # /products/deadline/8.0/1_User%20Manual/manual # /manual-submission.html#job-info-file-options -payload_skeleton = { +payload_skeleton_template = { "JobInfo": { "BatchName": None, # Top-level group name "Name": None, # Job name, as seen in Monitor @@ -268,6 +268,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): """Plugin entry point.""" instance.data["toBeRenderedOn"] = "deadline" self._instance = instance + self.payload_skeleton = copy.deepcopy(payload_skeleton_template) self._deadline_url = os.environ.get( "DEADLINE_REST_URL", "http://localhost:8082") assert self._deadline_url, "Requires DEADLINE_REST_URL" @@ -388,32 +389,32 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): self.log.info("- {}: {}".format(k, v)) self.log.info("-" * 20) - frame_pattern = payload_skeleton["JobInfo"]["Frames"] - payload_skeleton["JobInfo"]["Frames"] = frame_pattern.format( + frame_pattern = self.payload_skeleton["JobInfo"]["Frames"] + self.payload_skeleton["JobInfo"]["Frames"] = frame_pattern.format( start=int(self._instance.data["frameStartHandle"]), end=int(self._instance.data["frameEndHandle"]), step=int(self._instance.data["byFrameStep"])) - payload_skeleton["JobInfo"]["Plugin"] = self._instance.data.get( + self.payload_skeleton["JobInfo"]["Plugin"] = self._instance.data.get( "mayaRenderPlugin", "MayaPype") - payload_skeleton["JobInfo"]["BatchName"] = filename + self.payload_skeleton["JobInfo"]["BatchName"] = filename # Job name, as seen in Monitor - payload_skeleton["JobInfo"]["Name"] = jobname + self.payload_skeleton["JobInfo"]["Name"] = jobname # Arbitrary username, for visualisation in Monitor - payload_skeleton["JobInfo"]["UserName"] = deadline_user + self.payload_skeleton["JobInfo"]["UserName"] = deadline_user # Set job priority - payload_skeleton["JobInfo"]["Priority"] = self._instance.data.get( + self.payload_skeleton["JobInfo"]["Priority"] = self._instance.data.get( "priority", 50) # Optional, enable double-click to preview rendered # frames from Deadline Monitor - payload_skeleton["JobInfo"]["OutputDirectory0"] = \ + self.payload_skeleton["JobInfo"]["OutputDirectory0"] = \ os.path.dirname(output_filename_0).replace("\\", "/") - payload_skeleton["JobInfo"]["OutputFilename0"] = \ + self.payload_skeleton["JobInfo"]["OutputFilename0"] = \ output_filename_0.replace("\\", "/") - payload_skeleton["JobInfo"]["Comment"] = comment - payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer + self.payload_skeleton["JobInfo"]["Comment"] = comment + self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer # Adding file dependencies. dependencies = instance.context.data["fileDependencies"] @@ -421,7 +422,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): if self.asset_dependencies: for dependency in dependencies: key = "AssetDependency" + str(dependencies.index(dependency)) - payload_skeleton["JobInfo"][key] = dependency + self.payload_skeleton["JobInfo"][key] = dependency # Handle environments ----------------------------------------------- # We need those to pass them to pype for it to set correct context @@ -441,7 +442,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): if key in os.environ}, **api.Session) environment["PYPE_LOG_NO_COLORS"] = "1" environment["PYPE_MAYA_VERSION"] = cmds.about(v=True) - payload_skeleton["JobInfo"].update({ + self.payload_skeleton["JobInfo"].update({ "EnvironmentKeyValue%d" % index: "{key}={value}".format( key=key, value=environment[key] @@ -449,9 +450,9 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): }) # Add options from RenderGlobals------------------------------------- render_globals = instance.data.get("renderGlobals", {}) - payload_skeleton["JobInfo"].update(render_globals) + self.payload_skeleton["JobInfo"].update(render_globals) - # Submit preceeding export jobs ------------------------------------- + # Submit preceding export jobs ------------------------------------- export_job = None assert not all(x in instance.data["families"] for x in ['vrayscene', 'assscene']), ( @@ -731,7 +732,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): instance.data["deadlineSubmissionJob"] = response.json() def _get_maya_payload(self, data): - payload = copy.deepcopy(payload_skeleton) + payload = copy.deepcopy(self.payload_skeleton) if not self.asset_dependencies: job_info_ext = {} @@ -765,7 +766,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): return payload def _get_vray_export_payload(self, data): - payload = copy.deepcopy(payload_skeleton) + payload = copy.deepcopy(self.payload_skeleton) vray_settings = cmds.ls(type="VRaySettingsNode") node = vray_settings[0] template = cmds.getAttr("{}.vrscene_filename".format(node)) @@ -816,7 +817,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): script = os.path.normpath(module_path) - payload = copy.deepcopy(payload_skeleton) + payload = copy.deepcopy(self.payload_skeleton) job_info_ext = { # Job name, as seen in Monitor "Name": "Export {} [{}-{}]".format( @@ -870,7 +871,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): return payload def _get_vray_render_payload(self, data): - payload = copy.deepcopy(payload_skeleton) + payload = copy.deepcopy(self.payload_skeleton) vray_settings = cmds.ls(type="VRaySettingsNode") node = vray_settings[0] template = cmds.getAttr("{}.vrscene_filename".format(node)) @@ -905,7 +906,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): return payload def _get_arnold_render_payload(self, data): - payload = copy.deepcopy(payload_skeleton) + payload = copy.deepcopy(self.payload_skeleton) ass_file, _ = os.path.splitext(data["output_filename_0"]) first_file = ass_file + ".ass" job_info_ext = { From 6cd6c7b96631dc127cfb13e7ad4fe2e16cc8ded7 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 16 Dec 2020 22:15:46 +0100 Subject: [PATCH 47/47] bump version --- pype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/version.py b/pype/version.py index abe7e03a96..fc92f01ee6 100644 --- a/pype/version.py +++ b/pype/version.py @@ -1 +1 @@ -__version__ = "2.14.2" +__version__ = "2.14.3"