diff --git a/pype/hosts/blender/plugin.py b/pype/hosts/blender/plugin.py index 07080a86c4..d0b81148c3 100644 --- a/pype/hosts/blender/plugin.py +++ b/pype/hosts/blender/plugin.py @@ -7,7 +7,7 @@ import bpy from avalon import api -VALID_EXTENSIONS = [".blend"] +VALID_EXTENSIONS = [".blend", ".json"] def asset_name( @@ -29,15 +29,19 @@ def get_unique_number( c for c in bpy.data.collections if c.name == 'AVALON_CONTAINERS' ] - loaded_assets = [] + containers = [] + # First, add the children of avalon containers for c in avalon_containers: - loaded_assets.extend(c.children) - collections_names = [ - c.name for c in loaded_assets + containers.extend(c.children) + # then keep looping to include all the children + for c in containers: + containers.extend(c.children) + container_names = [ + c.name for c in containers ] count = 1 name = f"{asset}_{count:0>2}_{subset}_CON" - while name in collections_names: + while name in container_names: count += 1 name = f"{asset}_{count:0>2}_{subset}_CON" return f"{count:0>2}" @@ -59,20 +63,20 @@ def create_blender_context(active: Optional[bpy.types.Object] = None, if not isinstance(selected, list): selected = [selected] + override_context = bpy.context.copy() + for win in bpy.context.window_manager.windows: for area in win.screen.areas: if area.type == 'VIEW_3D': for region in area.regions: if region.type == 'WINDOW': - override_context = { - 'window': win, - 'screen': win.screen, - 'area': area, - 'region': region, - 'scene': bpy.context.scene, - 'active_object': active, - 'selected_objects': selected - } + override_context['window'] = win + override_context['screen'] = win.screen + override_context['area'] = area + override_context['region'] = region + override_context['scene'] = bpy.context.scene + override_context['active_object'] = active + override_context['selected_objects'] = selected return override_context raise Exception("Could not create a custom Blender context.") @@ -175,7 +179,17 @@ class AssetLoader(api.Loader): # just re-using the collection assert Path(self.fname).exists(), f"{self.fname} doesn't exist." - self.process_asset( + asset = context["asset"]["name"] + subset = context["subset"]["name"] + unique_number = get_unique_number( + asset, subset + ) + namespace = namespace or f"{asset}_{unique_number}" + name = name or asset_name( + asset, subset, unique_number + ) + + nodes = self.process_asset( context=context, name=name, namespace=namespace, @@ -183,25 +197,24 @@ class AssetLoader(api.Loader): ) # Only containerise if anything was loaded by the Loader. - nodes = self[:] if not nodes: return None # Only containerise if it's not already a collection from a .blend file. - representation = context["representation"]["name"] - if representation != "blend": - from avalon.blender.pipeline import containerise - return containerise( - name=name, - namespace=namespace, - nodes=nodes, - context=context, - loader=self.__class__.__name__, - ) + # representation = context["representation"]["name"] + # if representation != "blend": + # from avalon.blender.pipeline import containerise + # return containerise( + # name=name, + # namespace=namespace, + # nodes=nodes, + # context=context, + # loader=self.__class__.__name__, + # ) asset = context["asset"]["name"] subset = context["subset"]["name"] - instance_name = asset_name(asset, subset, namespace) + instance_name = asset_name(asset, subset, unique_number) + '_CON' return self._get_instance_collection(instance_name, nodes) diff --git a/pype/plugins/blender/create/create_animation.py b/pype/plugins/blender/create/create_animation.py index de74f9a358..acfd6ac1f3 100644 --- a/pype/plugins/blender/create/create_animation.py +++ b/pype/plugins/blender/create/create_animation.py @@ -2,12 +2,11 @@ import bpy -from avalon import api -from avalon.blender import Creator, lib +from avalon import api, blender import pype.hosts.blender.plugin -class CreateAnimation(Creator): +class CreateAnimation(blender.Creator): """Animation output for character rigs""" name = "animationMain" @@ -16,37 +15,16 @@ class CreateAnimation(Creator): icon = "male" def process(self): - asset = self.data["asset"] subset = self.data["subset"] name = pype.hosts.blender.plugin.asset_name(asset, subset) collection = bpy.data.collections.new(name=name) bpy.context.scene.collection.children.link(collection) self.data['task'] = api.Session.get('AVALON_TASK') - lib.imprint(collection, self.data) - - # Add the rig object and all the children meshes to - # a set and link them all at the end to avoid duplicates. - # Blender crashes if trying to link an object that is already linked. - # This links automatically the children meshes if they were not - # selected, and doesn't link them twice if they, insted, - # were manually selected by the user. - objects_to_link = set() + blender.lib.imprint(collection, self.data) if (self.options or {}).get("useSelection"): - - for obj in lib.get_selection(): - - objects_to_link.add(obj) - - if obj.type == 'ARMATURE': - - for subobj in obj.children: - - objects_to_link.add(subobj) - - for obj in objects_to_link: - - collection.objects.link(obj) + for obj in blender.lib.get_selection(): + collection.objects.link(obj) return collection diff --git a/pype/plugins/blender/create/create_setdress.py b/pype/plugins/blender/create/create_setdress.py new file mode 100644 index 0000000000..06acf716e5 --- /dev/null +++ b/pype/plugins/blender/create/create_setdress.py @@ -0,0 +1,24 @@ +import bpy + +from avalon import api, blender +import pype.hosts.blender.plugin + +class CreateSetDress(blender.Creator): + """A grouped package of loaded content""" + + name = "setdressMain" + label = "Set Dress" + family = "setdress" + icon = "cubes" + defaults = ["Main", "Anim"] + + def process(self): + asset = self.data["asset"] + subset = self.data["subset"] + name = pype.hosts.blender.plugin.asset_name(asset, subset) + collection = bpy.data.collections.new(name=name) + bpy.context.scene.collection.children.link(collection) + self.data['task'] = api.Session.get('AVALON_TASK') + blender.lib.imprint(collection, self.data) + + return collection diff --git a/pype/plugins/blender/load/load_layout.py b/pype/plugins/blender/load/load_layout.py index 2c8948dd48..c8ef73aea3 100644 --- a/pype/plugins/blender/load/load_layout.py +++ b/pype/plugins/blender/load/load_layout.py @@ -1,11 +1,15 @@ """Load a layout in Blender.""" +import json +from logging import log, warning +import math + import logging from pathlib import Path from pprint import pformat from typing import Dict, List, Optional -from avalon import api, blender +from avalon import api, blender, pipeline import bpy import pype.hosts.blender.plugin as plugin @@ -150,8 +154,9 @@ class BlendLayoutLoader(plugin.AssetLoader): # Save the list of objects in the metadata container container_metadata["objects"] = obj_container.all_objects - nodes = list(container.objects) - nodes.append(container) + # nodes = list(container.objects) + # nodes.append(container) + nodes = [container] self[:] = nodes return nodes @@ -192,7 +197,7 @@ class BlendLayoutLoader(plugin.AssetLoader): assert libpath.is_file(), ( f"The file doesn't exist: {libpath}" ) - assert extension in pype.hosts.blender.plugin.VALID_EXTENSIONS, ( + assert extension in plugin.VALID_EXTENSIONS, ( f"Unsupported file: {libpath}" ) @@ -271,3 +276,378 @@ class BlendLayoutLoader(plugin.AssetLoader): bpy.data.collections.remove(collection) return True + + +class UnrealLayoutLoader(plugin.AssetLoader): + """Load layout published from Unreal.""" + + families = ["layout"] + representations = ["json"] + + label = "Link Layout" + icon = "code-fork" + color = "orange" + + def _remove_objects(self, objects): + for obj in list(objects): + if obj.type == 'ARMATURE': + bpy.data.armatures.remove(obj.data) + elif obj.type == 'MESH': + bpy.data.meshes.remove(obj.data) + elif obj.type == 'CAMERA': + bpy.data.cameras.remove(obj.data) + elif obj.type == 'CURVE': + bpy.data.curves.remove(obj.data) + else: + self.log.error( + f"Object {obj.name} of type {obj.type} not recognized.") + + def _remove_collections(self, collection): + if collection.children: + for child in collection.children: + self._remove_collections(child) + bpy.data.collections.remove(child) + + def _remove(self, layout_container): + layout_container_metadata = layout_container.get( + blender.pipeline.AVALON_PROPERTY) + + if layout_container.children: + for child in layout_container.children: + child_container = child.get(blender.pipeline.AVALON_PROPERTY) + child_container['objectName'] = child.name + api.remove(child_container) + + for c in bpy.data.collections: + metadata = c.get('avalon') + if metadata: + print("metadata.get('id')") + print(metadata.get('id')) + if metadata and metadata.get('id') == 'pyblish.avalon.instance': + print("metadata.get('dependencies')") + print(metadata.get('dependencies')) + print("layout_container_metadata.get('representation')") + print(layout_container_metadata.get('representation')) + if metadata.get('dependencies') == layout_container_metadata.get('representation'): + + for child in c.children: + bpy.data.collections.remove(child) + bpy.data.collections.remove(c) + break + + def _get_loader(self, loaders, family): + name = "" + if family == 'rig': + name = "BlendRigLoader" + elif family == 'model': + name = "BlendModelLoader" + + if name == "": + return None + + for loader in loaders: + if loader.__name__ == name: + return loader + + return None + + def set_transform(self, obj, transform): + location = transform.get('translation') + rotation = transform.get('rotation') + scale = transform.get('scale') + + # Y position is inverted in sign because Unreal and Blender have the + # Y axis mirrored + obj.location = ( + location.get('x'), + -location.get('y'), + location.get('z') + ) + obj.rotation_euler = ( + rotation.get('x'), + -rotation.get('y'), + -rotation.get('z') + ) + obj.scale = ( + scale.get('x'), + scale.get('y'), + scale.get('z') + ) + + def _process( + self, libpath, layout_container, container_name, representation, + actions, parent + ): + with open(libpath, "r") as fp: + data = json.load(fp) + + scene = bpy.context.scene + layout_collection = bpy.data.collections.new(container_name) + scene.collection.children.link(layout_collection) + + all_loaders = api.discover(api.Loader) + + avalon_container = bpy.data.collections.get( + blender.pipeline.AVALON_CONTAINERS) + + for element in data: + reference = element.get('reference') + family = element.get('family') + + loaders = api.loaders_from_representation(all_loaders, reference) + loader = self._get_loader(loaders, family) + + if not loader: + continue + + instance_name = element.get('instance_name') + + element_container = api.load( + loader, + reference, + namespace=instance_name + ) + + if not element_container: + continue + + avalon_container.children.unlink(element_container) + layout_container.children.link(element_container) + + element_metadata = element_container.get( + blender.pipeline.AVALON_PROPERTY) + + # Unlink the object's collection from the scene collection and + # link it in the layout collection + element_collection = element_metadata.get('obj_container') + scene.collection.children.unlink(element_collection) + layout_collection.children.link(element_collection) + + objects = element_metadata.get('objects') + element_metadata['instance_name'] = instance_name + + objects_to_transform = [] + + if family == 'rig': + for o in objects: + if o.type == 'ARMATURE': + objects_to_transform.append(o) + # Create an animation subset for each rig + o.select_set(True) + asset = api.Session["AVALON_ASSET"] + c = api.create( + name="animation_" + element_collection.name, + asset=asset, + family="animation", + options={"useSelection": True}, + data={"dependencies": representation}) + scene.collection.children.unlink(c) + parent.children.link(c) + o.select_set(False) + break + elif family == 'model': + objects_to_transform = objects + + for o in objects_to_transform: + self.set_transform(o, element.get('transform')) + + if actions: + if o.type == 'ARMATURE': + action = actions.get(instance_name, None) + + if action: + if o.animation_data is None: + o.animation_data_create() + o.animation_data.action = action + + return layout_collection + + def process_asset(self, + context: dict, + name: str, + namespace: Optional[str] = None, + options: Optional[Dict] = None): + """ + Arguments: + name: Use pre-defined name + namespace: Use pre-defined namespace + context: Full parenthood of representation to load + options: Additional settings dictionary + """ + libpath = self.fname + asset = context["asset"]["name"] + subset = context["subset"]["name"] + lib_container = plugin.asset_name( + asset, subset + ) + unique_number = plugin.get_unique_number( + asset, subset + ) + namespace = namespace or f"{asset}_{unique_number}" + container_name = plugin.asset_name( + asset, subset, unique_number + ) + + layout_container = bpy.data.collections.new(container_name) + blender.pipeline.containerise_existing( + layout_container, + name, + namespace, + context, + self.__class__.__name__, + ) + + container_metadata = layout_container.get( + blender.pipeline.AVALON_PROPERTY) + + container_metadata["libpath"] = libpath + container_metadata["lib_container"] = lib_container + + # Create a setdress subset to contain all the animation for all + # the rigs in the layout + parent = api.create( + name="animation", + asset=api.Session["AVALON_ASSET"], + family="setdress", + options={"useSelection": True}, + data={"dependencies": str(context["representation"]["_id"])}) + + layout_collection = self._process( + libpath, layout_container, container_name, + str(context["representation"]["_id"]), None, parent) + + container_metadata["obj_container"] = layout_collection + + # Save the list of objects in the metadata container + container_metadata["objects"] = layout_collection.all_objects + + nodes = [layout_container] + self[:] = nodes + return nodes + + def update(self, container: Dict, representation: Dict): + """Update the loaded asset. + + This will remove all objects of the current collection, load the new + ones and add them to the collection. + If the objects of the collection are used in another collection they + will not be removed, only unlinked. Normally this should not be the + case though. + """ + layout_container = bpy.data.collections.get( + container["objectName"] + ) + if not layout_container: + return False + + libpath = Path(api.get_representation_path(representation)) + extension = libpath.suffix.lower() + + self.log.info( + "Container: %s\nRepresentation: %s", + pformat(container, indent=2), + pformat(representation, indent=2), + ) + + assert layout_container, ( + f"The asset is not loaded: {container['objectName']}" + ) + assert libpath, ( + "No existing library file found for {container['objectName']}" + ) + assert libpath.is_file(), ( + f"The file doesn't exist: {libpath}" + ) + assert extension in plugin.VALID_EXTENSIONS, ( + f"Unsupported file: {libpath}" + ) + + layout_container_metadata = layout_container.get( + blender.pipeline.AVALON_PROPERTY) + collection_libpath = layout_container_metadata["libpath"] + lib_container = layout_container_metadata["lib_container"] + obj_container = plugin.get_local_collection_with_name( + layout_container_metadata["obj_container"].name + ) + objects = obj_container.all_objects + + container_name = obj_container.name + + normalized_collection_libpath = ( + str(Path(bpy.path.abspath(collection_libpath)).resolve()) + ) + normalized_libpath = ( + str(Path(bpy.path.abspath(str(libpath))).resolve()) + ) + self.log.debug( + "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_collection_libpath, + normalized_libpath, + ) + if normalized_collection_libpath == normalized_libpath: + self.log.info("Library already loaded, not updating...") + return + + actions = {} + + for obj in objects: + if obj.type == 'ARMATURE': + if obj.animation_data and obj.animation_data.action: + obj_cont_name = obj.get( + blender.pipeline.AVALON_PROPERTY).get('container_name') + obj_cont = plugin.get_local_collection_with_name( + obj_cont_name) + element_metadata = obj_cont.get( + blender.pipeline.AVALON_PROPERTY) + instance_name = element_metadata.get('instance_name') + actions[instance_name] = obj.animation_data.action + + self._remove(layout_container) + + bpy.data.collections.remove(obj_container) + + parent = api.create( + name="animation", + asset=api.Session["AVALON_ASSET"], + family="setdress", + options={"useSelection": True}, + data={"dependencies": str(representation["_id"])}) + + layout_collection = self._process( + libpath, layout_container, container_name, + str(representation["_id"]), actions, parent) + + layout_container_metadata["obj_container"] = layout_collection + layout_container_metadata["objects"] = layout_collection.all_objects + layout_container_metadata["libpath"] = str(libpath) + layout_container_metadata["representation"] = str( + representation["_id"]) + + def remove(self, container: Dict) -> bool: + """Remove an existing container from a Blender scene. + + Arguments: + container (avalon-core:container-1.0): Container to remove, + from `host.ls()`. + + Returns: + bool: Whether the container was deleted. + """ + layout_container = bpy.data.collections.get( + container["objectName"] + ) + if not layout_container: + return False + + layout_container_metadata = layout_container.get( + blender.pipeline.AVALON_PROPERTY) + obj_container = plugin.get_local_collection_with_name( + layout_container_metadata["obj_container"].name + ) + + self._remove(layout_container) + + bpy.data.collections.remove(obj_container) + bpy.data.collections.remove(layout_container) + + return True diff --git a/pype/plugins/blender/load/load_rig.py b/pype/plugins/blender/load/load_rig.py index 7b60b20064..12017fdbb2 100644 --- a/pype/plugins/blender/load/load_rig.py +++ b/pype/plugins/blender/load/load_rig.py @@ -30,14 +30,28 @@ class BlendRigLoader(plugin.AssetLoader): bpy.data.armatures.remove(obj.data) elif obj.type == 'MESH': bpy.data.meshes.remove(obj.data) + elif obj.type == 'CURVE': + bpy.data.curves.remove(obj.data) for child in obj_container.children: bpy.data.collections.remove(child) bpy.data.collections.remove(obj_container) + def make_local_and_metadata(self, obj, collection_name): + local_obj = plugin.prepare_data(obj, collection_name) + plugin.prepare_data(local_obj.data, collection_name) + + if not local_obj.get(blender.pipeline.AVALON_PROPERTY): + local_obj[blender.pipeline.AVALON_PROPERTY] = dict() + + avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY] + avalon_info.update({"container_name": collection_name + '_CON'}) + + return local_obj + def _process( - self, libpath, lib_container, container_name, + self, libpath, lib_container, collection_name, action, parent_collection ): relative = bpy.context.preferences.filepaths.use_relative_paths @@ -54,34 +68,53 @@ class BlendRigLoader(plugin.AssetLoader): parent.children.link(bpy.data.collections[lib_container]) rig_container = parent.children[lib_container].make_local() - rig_container.name = container_name + rig_container.name = collection_name - meshes = [] + objects = [] armatures = [ obj for obj in rig_container.objects if obj.type == 'ARMATURE' ] for child in rig_container.children: - local_child = plugin.prepare_data(child, container_name) - meshes.extend(local_child.objects) + local_child = plugin.prepare_data(child, collection_name) + objects.extend(local_child.objects) - # Link meshes first, then armatures. + # for obj in bpy.data.objects: + # obj.select_set(False) + + constraints = [] + + for armature in armatures: + for bone in armature.pose.bones: + for constraint in bone.constraints: + if hasattr(constraint, 'target'): + constraints.append(constraint) + + # Link armatures after other objects. # The armature is unparented for all the non-local meshes, # when it is made local. - for obj in meshes + armatures: - local_obj = plugin.prepare_data(obj, container_name) - plugin.prepare_data(local_obj.data, container_name) - - if not local_obj.get(blender.pipeline.AVALON_PROPERTY): - local_obj[blender.pipeline.AVALON_PROPERTY] = dict() - - avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY] - avalon_info.update({"container_name": container_name}) - - if local_obj.type == 'ARMATURE' and action is not None: - local_obj.animation_data.action = action - + for obj in objects: + local_obj = self.make_local_and_metadata(obj, collection_name) + + if obj != local_obj: + for constraint in constraints: + if constraint.target == obj: + constraint.target = local_obj + + for armature in armatures: + local_obj = self.make_local_and_metadata(armature, collection_name) + + if action is not None: + local_obj.animation_data.action = action + + # Set link the drivers to the local object + if local_obj.data.animation_data: + for d in local_obj.data.animation_data.drivers: + for v in d.driver.variables: + for t in v.targets: + t.id = local_obj + rig_container.pop(blender.pipeline.AVALON_PROPERTY) bpy.ops.object.select_all(action='DESELECT') @@ -99,7 +132,6 @@ class BlendRigLoader(plugin.AssetLoader): context: Full parenthood of representation to load options: Additional settings dictionary """ - libpath = self.fname asset = context["asset"]["name"] subset = context["subset"]["name"] @@ -110,12 +142,11 @@ class BlendRigLoader(plugin.AssetLoader): asset, subset ) namespace = namespace or f"{asset}_{unique_number}" - container_name = plugin.asset_name( + collection_name = plugin.asset_name( asset, subset, unique_number ) - container = bpy.data.collections.new(lib_container) - container.name = container_name + container = bpy.data.collections.new(collection_name) blender.pipeline.containerise_existing( container, name, @@ -131,10 +162,9 @@ class BlendRigLoader(plugin.AssetLoader): container_metadata["lib_container"] = lib_container obj_container = self._process( - libpath, lib_container, container_name, None, None) + libpath, lib_container, collection_name, None, None) container_metadata["obj_container"] = obj_container - # Save the list of objects in the metadata container container_metadata["objects"] = obj_container.all_objects @@ -214,9 +244,9 @@ class BlendRigLoader(plugin.AssetLoader): armatures = [obj for obj in objects if obj.type == 'ARMATURE'] assert(len(armatures) == 1) - action = None - if armatures[0].animation_data and armatures[0].animation_data.action: - action = armatures[0].animation_data.action + action = None + if armatures[0].animation_data and armatures[0].animation_data.action: + action = armatures[0].animation_data.action parent = plugin.get_parent_collection(obj_container) diff --git a/pype/plugins/blender/publish/extract_animation_collection.py b/pype/plugins/blender/publish/extract_animation_collection.py new file mode 100644 index 0000000000..e5e0877280 --- /dev/null +++ b/pype/plugins/blender/publish/extract_animation_collection.py @@ -0,0 +1,56 @@ +import os +import json + +import pype.api +import pyblish.api + +import bpy + +class ExtractSetDress(pype.api.Extractor): + """Extract setdress.""" + + label = "Extract SetDress" + hosts = ["blender"] + families = ["setdress"] + optional = True + order = pyblish.api.ExtractorOrder + 0.1 + + def process(self, instance): + stagingdir = self.staging_dir(instance) + + json_data = [] + + for i in instance.context: + collection = i.data.get('name') + container = None + for obj in bpy.data.collections[collection].objects: + if obj.type == 'ARMATURE': + container_name = obj.get('avalon').get('container_name') + container = bpy.data.collections[container_name] + if container: + json_dict = {} + json_dict['subset'] = i.data.get('subset') + json_dict['container'] = container.name + json_dict['instance_name'] = container.get('avalon').get('instance_name') + json_data.append(json_dict) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + json_filename = f"{instance.name}.json" + json_path = os.path.join(stagingdir, json_filename) + + with open(json_path, "w+") as file: + json.dump(json_data, fp=file, indent=2) + + json_representation = { + 'name': 'json', + 'ext': 'json', + 'files': json_filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(json_representation) + + self.log.info("Extracted instance '{}' to: {}".format( + instance.name, json_representation)) + diff --git a/pype/plugins/blender/publish/extract_fbx_animation.py b/pype/plugins/blender/publish/extract_fbx_animation.py index d51c641e9c..9c421560f0 100644 --- a/pype/plugins/blender/publish/extract_fbx_animation.py +++ b/pype/plugins/blender/publish/extract_fbx_animation.py @@ -17,14 +17,10 @@ class ExtractAnimationFBX(pype.api.Extractor): def process(self, instance): # Define extract output file path - stagingdir = self.staging_dir(instance) - filename = f"{instance.name}.fbx" - filepath = os.path.join(stagingdir, filename) context = bpy.context scene = context.scene - view_layer = context.view_layer # Perform extraction self.log.info("Performing extraction..") @@ -35,22 +31,6 @@ class ExtractAnimationFBX(pype.api.Extractor): assert len(collections) == 1, "There should be one and only one " \ "collection collected for this asset" - old_active_layer_collection = view_layer.active_layer_collection - - layers = view_layer.layer_collection.children - - # Get the layer collection from the collection we need to export. - # This is needed because in Blender you can only set the active - # collection with the layer collection, and there is no way to get - # the layer collection from the collection - # (but there is the vice versa). - layer_collections = [ - layer for layer in layers if layer.collection == collections[0]] - - assert len(layer_collections) == 1 - - view_layer.active_layer_collection = layer_collections[0] - old_scale = scene.unit_settings.scale_length # We set the scale of the scene for the export @@ -59,6 +39,15 @@ class ExtractAnimationFBX(pype.api.Extractor): armatures = [ obj for obj in collections[0].objects if obj.type == 'ARMATURE'] + assert len(collections) == 1, "There should be one and only one " \ + "armature collected for this asset" + + armature = armatures[0] + + armature_name = armature.name + original_name = armature_name.split(':')[0] + armature.name = original_name + object_action_pairs = [] original_actions = [] @@ -66,23 +55,23 @@ class ExtractAnimationFBX(pype.api.Extractor): ending_frames = [] # For each armature, we make a copy of the current action - for obj in armatures: + curr_action = None + copy_action = None - curr_action = None - copy_action = None + if armature.animation_data and armature.animation_data.action: + curr_action = armature.animation_data.action + copy_action = curr_action.copy() - if obj.animation_data and obj.animation_data.action: + curr_frame_range = curr_action.frame_range - curr_action = obj.animation_data.action - copy_action = curr_action.copy() + starting_frames.append(curr_frame_range[0]) + ending_frames.append(curr_frame_range[1]) + else: + self.log.info("Object have no animation.") + return - curr_frame_range = curr_action.frame_range - - starting_frames.append(curr_frame_range[0]) - ending_frames.append(curr_frame_range[1]) - - object_action_pairs.append((obj, copy_action)) - original_actions.append(curr_action) + object_action_pairs.append((armature, copy_action)) + original_actions.append(curr_action) # We compute the starting and ending frames max_frame = min(starting_frames) @@ -96,44 +85,52 @@ class ExtractAnimationFBX(pype.api.Extractor): do_clean=False ) - # We export the fbx + for obj in bpy.data.objects: + obj.select_set(False) + + armature.select_set(True) + fbx_filename = f"{instance.name}_{armature.name}.fbx" + filepath = os.path.join(stagingdir, fbx_filename) + + override = bpy.context.copy() + override['selected_objects'] = [armature] bpy.ops.export_scene.fbx( + override, filepath=filepath, - use_active_collection=True, + use_selection=True, bake_anim_use_nla_strips=False, bake_anim_use_all_actions=False, - add_leaf_bones=False + add_leaf_bones=False, + armature_nodetype='ROOT', + object_types={'ARMATURE'} ) - - view_layer.active_layer_collection = old_active_layer_collection + armature.name = armature_name + armature.select_set(False) scene.unit_settings.scale_length = old_scale # We delete the baked action and set the original one back for i in range(0, len(object_action_pairs)): - pair = object_action_pairs[i] action = original_actions[i] if action: - pair[0].animation_data.action = action if pair[1]: - pair[1].user_clear() bpy.data.actions.remove(pair[1]) if "representations" not in instance.data: instance.data["representations"] = [] - representation = { + fbx_representation = { 'name': 'fbx', 'ext': 'fbx', - 'files': filename, + 'files': fbx_filename, "stagingDir": stagingdir, } - instance.data["representations"].append(representation) + instance.data["representations"].append(fbx_representation) - self.log.info("Extracted instance '%s' to: %s", - instance.name, representation) + self.log.info("Extracted instance '{}' to: {}".format( + instance.name, fbx_representation)) diff --git a/pype/plugins/blender/publish/integrate_animation.py b/pype/plugins/blender/publish/integrate_animation.py new file mode 100644 index 0000000000..90e94a4aac --- /dev/null +++ b/pype/plugins/blender/publish/integrate_animation.py @@ -0,0 +1,49 @@ +import json + +from avalon import io +import pyblish.api + + +class IntegrateAnimation(pyblish.api.InstancePlugin): + """Generate a JSON file for animation.""" + + label = "Integrate Animation" + order = pyblish.api.IntegratorOrder + 0.1 + optional = True + hosts = ["blender"] + families = ["setdress"] + + def process(self, instance): + self.log.info("Integrate Animation") + + representation = instance.data.get('representations')[0] + json_path = representation.get('publishedFiles')[0] + + with open(json_path, "r") as file: + data = json.load(file) + + # Update the json file for the setdress to add the published + # representations of the animations + for json_dict in data: + i = None + for elem in instance.context: + if elem.data.get('subset') == json_dict['subset']: + i = elem + break + if not i: + continue + rep = None + pub_repr = i.data.get('published_representations') + for elem in pub_repr: + if pub_repr.get(elem).get('representation').get('name') == "fbx": + rep = pub_repr.get(elem) + break + if not rep: + continue + obj_id = rep.get('representation').get('_id') + + if obj_id: + json_dict['_id'] = str(obj_id) + + with open(json_path, "w") as file: + json.dump(data, fp=file, indent=2) diff --git a/pype/plugins/unreal/create/create_layout.py b/pype/plugins/unreal/create/create_layout.py new file mode 100644 index 0000000000..82cef43cee --- /dev/null +++ b/pype/plugins/unreal/create/create_layout.py @@ -0,0 +1,42 @@ +from unreal import EditorLevelLibrary as ell +from pype.hosts.unreal.plugin import Creator +from avalon.unreal import ( + instantiate, +) + + +class CreateLayout(Creator): + """Layout output for character rigs""" + + name = "layoutMain" + label = "Layout" + family = "layout" + icon = "cubes" + + root = "/Game" + suffix = "_INS" + + def __init__(self, *args, **kwargs): + super(CreateLayout, self).__init__(*args, **kwargs) + + def process(self): + data = self.data + + name = data["subset"] + + selection = [] + # if (self.options or {}).get("useSelection"): + # sel_objects = unreal.EditorUtilityLibrary.get_selected_assets() + # selection = [a.get_path_name() for a in sel_objects] + + data["level"] = ell.get_editor_world().get_path_name() + + data["members"] = [] + + if (self.options or {}).get("useSelection"): + # Set as members the selected actors + for actor in ell.get_selected_level_actors(): + data["members"].append("{}.{}".format( + actor.get_outer().get_name(), actor.get_name())) + + instantiate(self.root, name, data, selection, self.suffix) diff --git a/pype/plugins/unreal/load/load_animation.py b/pype/plugins/unreal/load/load_animation.py new file mode 100644 index 0000000000..5e106788ce --- /dev/null +++ b/pype/plugins/unreal/load/load_animation.py @@ -0,0 +1,204 @@ +import os + +from avalon import api, pipeline +from avalon.unreal import lib +from avalon.unreal import pipeline as unreal_pipeline +import unreal + + +class AnimationFBXLoader(api.Loader): + """Load Unreal SkeletalMesh from FBX""" + + families = ["animation"] + label = "Import FBX Animation" + representations = ["fbx"] + icon = "cube" + color = "orange" + + def load(self, context, name, namespace, options=None): + """ + Load and containerise representation into Content Browser. + + This is two step process. First, import FBX to temporary path and + then call `containerise()` on it - this moves all content to new + directory and then it will create AssetContainer there and imprint it + with metadata. This will mark this path as container. + + Args: + context (dict): application context + name (str): subset name + namespace (str): in Unreal this is basically path to container. + This is not passed here, so namespace is set + by `containerise()` because only then we know + real path. + data (dict): Those would be data to be imprinted. This is not used + now, data are imprinted by `containerise()`. + + Returns: + list(str): list of container content + """ + + # Create directory for asset and avalon container + root = "/Game/Avalon/Assets" + asset = context.get('asset').get('name') + suffix = "_CON" + if asset: + asset_name = "{}_{}".format(asset, name) + else: + asset_name = "{}".format(name) + + tools = unreal.AssetToolsHelpers().get_asset_tools() + asset_dir, container_name = tools.create_unique_asset_name( + "{}/{}/{}".format(root, asset, name), suffix="") + + container_name += suffix + + unreal.EditorAssetLibrary.make_directory(asset_dir) + + automated = False + actor = None + + task = unreal.AssetImportTask() + task.options = unreal.FbxImportUI() + + # If there are no options, the process cannot be automated + if options: + automated = True + actor_name = 'PersistentLevel.' + options.get('instance_name') + actor = unreal.EditorLevelLibrary.get_actor_reference(actor_name) + skeleton = actor.skeletal_mesh_component.skeletal_mesh.skeleton + task.options.set_editor_property('skeleton', skeleton) + + if not actor: + return None + + task.set_editor_property('filename', self.fname) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', False) + task.set_editor_property('automated', automated) + task.set_editor_property('save', False) + + # set import options here + task.options.set_editor_property( + 'automated_import_should_detect_type', True) + task.options.set_editor_property( + 'original_import_type', unreal.FBXImportType.FBXIT_ANIMATION) + task.options.set_editor_property('import_mesh', False) + task.options.set_editor_property('import_animations', True) + + task.options.skeletal_mesh_import_data.set_editor_property( + 'import_content_type', + unreal.FBXImportContentType.FBXICT_SKINNING_WEIGHTS + ) + + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) + + # Create Asset Container + lib.create_avalon_container( + container=container_name, path=asset_dir) + + data = { + "schema": "avalon-core:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "asset": asset, + "namespace": asset_dir, + "container_name": container_name, + "asset_name": asset_name, + "loader": str(self.__class__.__name__), + "representation": context["representation"]["_id"], + "parent": context["representation"]["parent"], + "family": context["representation"]["context"]["family"] + } + unreal_pipeline.imprint( + "{}/{}".format(asset_dir, container_name), data) + + asset_content = unreal.EditorAssetLibrary.list_assets( + asset_dir, recursive=True, include_folder=True + ) + + animation = None + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + imported_asset_data = unreal.EditorAssetLibrary.find_asset_data(a) + imported_asset = unreal.AssetRegistryHelpers.get_asset( + imported_asset_data) + if imported_asset.__class__ == unreal.AnimSequence: + animation = imported_asset + break + + if animation: + animation.set_editor_property('enable_root_motion', True) + actor.skeletal_mesh_component.set_editor_property( + 'animation_mode', unreal.AnimationMode.ANIMATION_SINGLE_NODE) + actor.skeletal_mesh_component.animation_data.set_editor_property( + 'anim_to_play', animation) + + return asset_content + + def update(self, container, representation): + name = container["asset_name"] + source_path = api.get_representation_path(representation) + destination_path = container["namespace"] + + task = unreal.AssetImportTask() + task.options = unreal.FbxImportUI() + + task.set_editor_property('filename', source_path) + task.set_editor_property('destination_path', destination_path) + # strip suffix + task.set_editor_property('destination_name', name) + task.set_editor_property('replace_existing', True) + task.set_editor_property('automated', True) + task.set_editor_property('save', False) + + # set import options here + task.options.set_editor_property( + 'automated_import_should_detect_type', True) + task.options.set_editor_property( + 'original_import_type', unreal.FBXImportType.FBXIT_ANIMATION) + task.options.set_editor_property('import_mesh', False) + task.options.set_editor_property('import_animations', True) + + task.options.skeletal_mesh_import_data.set_editor_property( + 'import_content_type', + unreal.FBXImportContentType.FBXICT_SKINNING_WEIGHTS + ) + + skeletal_mesh = unreal.EditorAssetLibrary.load_asset( + container.get('namespace') + "/" + container.get('asset_name')) + skeleton = skeletal_mesh.get_editor_property('skeleton') + task.options.set_editor_property('skeleton', skeleton) + + # do import fbx and replace existing data + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) + container_path = "{}/{}".format(container["namespace"], + container["objectName"]) + # update metadata + unreal_pipeline.imprint( + container_path, + { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]) + }) + + asset_content = unreal.EditorAssetLibrary.list_assets( + destination_path, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + + def remove(self, container): + path = container["namespace"] + parent_path = os.path.dirname(path) + + unreal.EditorAssetLibrary.delete_directory(path) + + asset_content = unreal.EditorAssetLibrary.list_assets( + parent_path, recursive=False + ) + + if len(asset_content) == 0: + unreal.EditorAssetLibrary.delete_directory(parent_path) diff --git a/pype/plugins/unreal/load/load_rig.py b/pype/plugins/unreal/load/load_rig.py new file mode 100644 index 0000000000..56351e388b --- /dev/null +++ b/pype/plugins/unreal/load/load_rig.py @@ -0,0 +1,191 @@ +import os + +from avalon import api, pipeline +from avalon.unreal import lib +from avalon.unreal import pipeline as unreal_pipeline +import unreal + + +class SkeletalMeshFBXLoader(api.Loader): + """Load Unreal SkeletalMesh from FBX""" + + families = ["rig"] + label = "Import FBX Skeletal Mesh" + representations = ["fbx"] + icon = "cube" + color = "orange" + + def load(self, context, name, namespace, data): + """ + Load and containerise representation into Content Browser. + + This is two step process. First, import FBX to temporary path and + then call `containerise()` on it - this moves all content to new + directory and then it will create AssetContainer there and imprint it + with metadata. This will mark this path as container. + + Args: + context (dict): application context + name (str): subset name + namespace (str): in Unreal this is basically path to container. + This is not passed here, so namespace is set + by `containerise()` because only then we know + real path. + data (dict): Those would be data to be imprinted. This is not used + now, data are imprinted by `containerise()`. + + Returns: + list(str): list of container content + """ + + # Create directory for asset and avalon container + root = "/Game/Avalon/Assets" + asset = context.get('asset').get('name') + suffix = "_CON" + if asset: + asset_name = "{}_{}".format(asset, name) + else: + asset_name = "{}".format(name) + + tools = unreal.AssetToolsHelpers().get_asset_tools() + asset_dir, container_name = tools.create_unique_asset_name( + "{}/{}/{}".format(root, asset, name), suffix="") + + container_name += suffix + + unreal.EditorAssetLibrary.make_directory(asset_dir) + + task = unreal.AssetImportTask() + + task.set_editor_property('filename', self.fname) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', False) + task.set_editor_property('automated', True) + task.set_editor_property('save', False) + + # set import options here + options = unreal.FbxImportUI() + options.set_editor_property('import_as_skeletal', True) + options.set_editor_property('import_animations', False) + options.set_editor_property('import_mesh', True) + options.set_editor_property('import_materials', True) + options.set_editor_property('import_textures', True) + options.set_editor_property('skeleton', None) + options.set_editor_property('create_physics_asset', False) + + options.set_editor_property('mesh_type_to_import', + unreal.FBXImportType.FBXIT_SKELETAL_MESH) + + options.skeletal_mesh_import_data.set_editor_property( + 'import_content_type', + unreal.FBXImportContentType.FBXICT_ALL + ) + # set to import normals, otherwise Unreal will compute them + # and it will take a long time, depending on the size of the mesh + options.skeletal_mesh_import_data.set_editor_property( + 'normal_import_method', + unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS + ) + + task.options = options + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 + + # Create Asset Container + lib.create_avalon_container( + container=container_name, path=asset_dir) + + data = { + "schema": "avalon-core:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "asset": asset, + "namespace": asset_dir, + "container_name": container_name, + "asset_name": asset_name, + "loader": str(self.__class__.__name__), + "representation": context["representation"]["_id"], + "parent": context["representation"]["parent"], + "family": context["representation"]["context"]["family"] + } + unreal_pipeline.imprint( + "{}/{}".format(asset_dir, container_name), data) + + asset_content = unreal.EditorAssetLibrary.list_assets( + asset_dir, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + + return asset_content + + def update(self, container, representation): + name = container["asset_name"] + source_path = api.get_representation_path(representation) + destination_path = container["namespace"] + + task = unreal.AssetImportTask() + + task.set_editor_property('filename', source_path) + task.set_editor_property('destination_path', destination_path) + task.set_editor_property('destination_name', name) + task.set_editor_property('replace_existing', True) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + # set import options here + options = unreal.FbxImportUI() + options.set_editor_property('import_as_skeletal', True) + options.set_editor_property('import_animations', False) + options.set_editor_property('import_mesh', True) + options.set_editor_property('import_materials', True) + options.set_editor_property('import_textures', True) + options.set_editor_property('skeleton', None) + options.set_editor_property('create_physics_asset', False) + + options.set_editor_property('mesh_type_to_import', + unreal.FBXImportType.FBXIT_SKELETAL_MESH) + + options.skeletal_mesh_import_data.set_editor_property( + 'import_content_type', + unreal.FBXImportContentType.FBXICT_ALL + ) + # set to import normals, otherwise Unreal will compute them + # and it will take a long time, depending on the size of the mesh + options.skeletal_mesh_import_data.set_editor_property( + 'normal_import_method', + unreal.FBXNormalImportMethod.FBXNIM_IMPORT_NORMALS + ) + + task.options = options + # do import fbx and replace existing data + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 + container_path = "{}/{}".format(container["namespace"], + container["objectName"]) + # update metadata + unreal_pipeline.imprint( + container_path, + { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]) + }) + + asset_content = unreal.EditorAssetLibrary.list_assets( + destination_path, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + + def remove(self, container): + path = container["namespace"] + parent_path = os.path.dirname(path) + + unreal.EditorAssetLibrary.delete_directory(path) + + asset_content = unreal.EditorAssetLibrary.list_assets( + parent_path, recursive=False + ) + + if len(asset_content) == 0: + unreal.EditorAssetLibrary.delete_directory(parent_path) diff --git a/pype/plugins/unreal/load/load_setdress.py b/pype/plugins/unreal/load/load_setdress.py new file mode 100644 index 0000000000..08330e349b --- /dev/null +++ b/pype/plugins/unreal/load/load_setdress.py @@ -0,0 +1,127 @@ +import json + +from avalon import api +import unreal + + +class AnimationCollectionLoader(api.Loader): + """Load Unreal SkeletalMesh from FBX""" + + families = ["setdress"] + representations = ["json"] + + label = "Load Animation Collection" + icon = "cube" + color = "orange" + + def load(self, context, name, namespace, options): + from avalon import api, pipeline + from avalon.unreal import lib + from avalon.unreal import pipeline as unreal_pipeline + import unreal + + # Create directory for asset and avalon container + root = "/Game/Avalon/Assets" + asset = context.get('asset').get('name') + suffix = "_CON" + + tools = unreal.AssetToolsHelpers().get_asset_tools() + asset_dir, container_name = tools.create_unique_asset_name( + "{}/{}".format(root, asset), suffix="") + + container_name += suffix + + unreal.EditorAssetLibrary.make_directory(asset_dir) + + libpath = self.fname + + with open(libpath, "r") as fp: + data = json.load(fp) + + all_loaders = api.discover(api.Loader) + + for element in data: + reference = element.get('_id') + + loaders = api.loaders_from_representation(all_loaders, reference) + loader = None + for l in loaders: + if l.__name__ == "AnimationFBXLoader": + loader = l + break + + if not loader: + continue + + instance_name = element.get('instance_name') + + api.load( + loader, + reference, + namespace=instance_name, + options=element + ) + + # Create Asset Container + lib.create_avalon_container( + container=container_name, path=asset_dir) + + data = { + "schema": "avalon-core:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "asset": asset, + "namespace": asset_dir, + "container_name": container_name, + "loader": str(self.__class__.__name__), + "representation": context["representation"]["_id"], + "parent": context["representation"]["parent"], + "family": context["representation"]["context"]["family"] + } + unreal_pipeline.imprint( + "{}/{}".format(asset_dir, container_name), data) + + asset_content = unreal.EditorAssetLibrary.list_assets( + asset_dir, recursive=True, include_folder=True + ) + + return asset_content + + def update(self, container, representation): + from avalon import api, io + from avalon.unreal import pipeline + + source_path = api.get_representation_path(representation) + + with open(source_path, "r") as fp: + data = json.load(fp) + + animation_containers = [ + i for i in pipeline.ls() if + i.get('asset') == container.get('asset') and + i.get('family') == 'animation'] + + for element in data: + new_version = io.find_one({"_id": io.ObjectId(element.get('_id'))}) + new_version_number = new_version.get('context').get('version') + anim_container = None + for i in animation_containers: + if i.get('container_name') == (element.get('subset') + "_CON"): + anim_container = i + break + if not anim_container: + continue + + api.update(anim_container, new_version_number) + + container_path = "{}/{}".format(container["namespace"], + container["objectName"]) + # update metadata + pipeline.imprint( + container_path, + { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]) + }) + + def remove(self, container): + unreal.EditorAssetLibrary.delete_directory(container["namespace"]) diff --git a/pype/plugins/unreal/load/load_staticmeshfbx.py b/pype/plugins/unreal/load/load_staticmeshfbx.py index 4c27f9aa92..149bafcacc 100644 --- a/pype/plugins/unreal/load/load_staticmeshfbx.py +++ b/pype/plugins/unreal/load/load_staticmeshfbx.py @@ -1,12 +1,16 @@ -from avalon import api +import os + +from avalon import api, pipeline from avalon import unreal as avalon_unreal +from avalon.unreal import lib +from avalon.unreal import pipeline as unreal_pipeline import unreal class StaticMeshFBXLoader(api.Loader): """Load Unreal StaticMesh from FBX""" - families = ["unrealStaticMesh"] + families = ["model", "unrealStaticMesh"] label = "Import FBX Static Mesh" representations = ["fbx"] icon = "cube" @@ -35,67 +39,119 @@ class StaticMeshFBXLoader(api.Loader): list(str): list of container content """ - tools = unreal.AssetToolsHelpers().get_asset_tools() - temp_dir, temp_name = tools.create_unique_asset_name( - "/Game/{}".format(name), "_TMP" - ) + # Create directory for asset and avalon container + root = "/Game/Avalon/Assets" + asset = context.get('asset').get('name') + suffix = "_CON" + if asset: + asset_name = "{}_{}".format(asset, name) + else: + asset_name = "{}".format(name) - unreal.EditorAssetLibrary.make_directory(temp_dir) + tools = unreal.AssetToolsHelpers().get_asset_tools() + asset_dir, container_name = tools.create_unique_asset_name( + "{}/{}/{}".format(root, asset, name), suffix="") + + container_name += suffix + + unreal.EditorAssetLibrary.make_directory(asset_dir) task = unreal.AssetImportTask() - task.filename = self.fname - task.destination_path = temp_dir - task.destination_name = name - task.replace_existing = False - task.automated = True - task.save = True + task.set_editor_property('filename', self.fname) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', False) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) # set import options here - task.options = unreal.FbxImportUI() - task.options.import_animations = False + options = unreal.FbxImportUI() + options.set_editor_property( + 'automated_import_should_detect_type', False) + options.set_editor_property('import_animations', False) + task.options = options unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 - imported_assets = unreal.EditorAssetLibrary.list_assets( - temp_dir, recursive=True, include_folder=True - ) - new_dir = avalon_unreal.containerise( - name, namespace, imported_assets, context, self.__class__.__name__) + # Create Asset Container + lib.create_avalon_container( + container=container_name, path=asset_dir) + + data = { + "schema": "avalon-core:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "asset": asset, + "namespace": asset_dir, + "container_name": container_name, + "asset_name": asset_name, + "loader": str(self.__class__.__name__), + "representation": context["representation"]["_id"], + "parent": context["representation"]["parent"], + "family": context["representation"]["context"]["family"] + } + unreal_pipeline.imprint( + "{}/{}".format(asset_dir, container_name), data) asset_content = unreal.EditorAssetLibrary.list_assets( - new_dir, recursive=True, include_folder=True + asset_dir, recursive=True, include_folder=True ) - unreal.EditorAssetLibrary.delete_directory(temp_dir) + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) return asset_content def update(self, container, representation): - node = container["objectName"] + name = container["name"] source_path = api.get_representation_path(representation) destination_path = container["namespace"] task = unreal.AssetImportTask() - task.filename = source_path - task.destination_path = destination_path + task.set_editor_property('filename', source_path) + task.set_editor_property('destination_path', destination_path) # strip suffix - task.destination_name = node[:-4] - task.replace_existing = True - task.automated = True - task.save = True + task.set_editor_property('destination_name', name) + task.set_editor_property('replace_existing', True) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) - task.options = unreal.FbxImportUI() - task.options.import_animations = False + # set import options here + options = unreal.FbxImportUI() + options.set_editor_property( + 'automated_import_should_detect_type', False) + options.set_editor_property('import_animations', False) + task.options = options # do import fbx and replace existing data unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) container_path = "{}/{}".format(container["namespace"], container["objectName"]) # update metadata - avalon_unreal.imprint( - container_path, {"_id": str(representation["_id"])}) + unreal_pipeline.imprint( + container_path, + { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]) + }) + + asset_content = unreal.EditorAssetLibrary.list_assets( + destination_path, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) def remove(self, container): - unreal.EditorAssetLibrary.delete_directory(container["namespace"]) + path = container["namespace"] + parent_path = os.path.dirname(path) + + unreal.EditorAssetLibrary.delete_directory(path) + + asset_content = unreal.EditorAssetLibrary.list_assets( + parent_path, recursive=False + ) + + if len(asset_content) == 0: + unreal.EditorAssetLibrary.delete_directory(parent_path) diff --git a/pype/plugins/unreal/publish/collect_current_file.py b/pype/plugins/unreal/publish/collect_current_file.py new file mode 100644 index 0000000000..4e828933bb --- /dev/null +++ b/pype/plugins/unreal/publish/collect_current_file.py @@ -0,0 +1,19 @@ +import unreal + +import pyblish.api + + +class CollectUnrealCurrentFile(pyblish.api.ContextPlugin): + """Inject the current working file into context""" + + order = pyblish.api.CollectorOrder - 0.5 + label = "Unreal Current File" + hosts = ['unreal'] + + def process(self, context): + """Inject the current working file""" + current_file = unreal.Paths.get_project_file_path() + context.data['currentFile'] = current_file + + assert current_file != '', "Current file is empty. " \ + "Save the file before continuing." diff --git a/pype/plugins/unreal/publish/collect_instances.py b/pype/plugins/unreal/publish/collect_instances.py index 766a73028c..62676f9938 100644 --- a/pype/plugins/unreal/publish/collect_instances.py +++ b/pype/plugins/unreal/publish/collect_instances.py @@ -1,5 +1,5 @@ +import ast import unreal - import pyblish.api @@ -35,13 +35,10 @@ class CollectInstances(pyblish.api.ContextPlugin): ) # content of container - members = unreal.EditorAssetLibrary.list_assets( - asset.get_path_name(), recursive=True, include_folder=True - ) + members = ast.literal_eval(data.get("members")) self.log.debug(members) self.log.debug(asset.get_path_name()) # remove instance container - members.remove(asset.get_path_name()) self.log.info("Creating instance for {}".format(asset.get_name())) instance = context.create_instance(asset.get_name()) @@ -50,6 +47,8 @@ class CollectInstances(pyblish.api.ContextPlugin): # Store the exact members of the object set instance.data["setMembers"] = members instance.data["families"] = [data.get("family")] + instance.data["level"] = data.get("level") + instance.data["parent"] = data.get("parent") label = "{0} ({1})".format(asset.get_name()[:-4], data["asset"]) diff --git a/pype/plugins/unreal/publish/extract_layout.py b/pype/plugins/unreal/publish/extract_layout.py new file mode 100644 index 0000000000..6345b8da51 --- /dev/null +++ b/pype/plugins/unreal/publish/extract_layout.py @@ -0,0 +1,113 @@ +import os +import json +import math + +import unreal +from unreal import EditorLevelLibrary as ell +from unreal import EditorAssetLibrary as eal + +import pype.api +from avalon import io + + +class ExtractLayout(pype.api.Extractor): + """Extract a layout.""" + + label = "Extract Layout" + hosts = ["unreal"] + families = ["layout"] + optional = True + + def process(self, instance): + # Define extract output file path + stagingdir = self.staging_dir(instance) + + # Perform extraction + self.log.info("Performing extraction..") + + # Check if the loaded level is the same of the instance + current_level = ell.get_editor_world().get_path_name() + assert current_level == instance.data.get("level"), \ + "Wrong level loaded" + + json_data = [] + + for member in instance[:]: + actor = ell.get_actor_reference(member) + mesh = None + + # Check type the type of mesh + if actor.get_class().get_name() == 'SkeletalMeshActor': + mesh = actor.skeletal_mesh_component.skeletal_mesh + elif actor.get_class().get_name() == 'StaticMeshActor': + mesh = actor.static_mesh_component.static_mesh + + if mesh: + # Search the reference to the Asset Container for the object + path = unreal.Paths.get_path(mesh.get_path_name()) + filter = unreal.ARFilter( + class_names=["AssetContainer"], package_paths=[path]) + ar = unreal.AssetRegistryHelpers.get_asset_registry() + try: + asset_container = ar.get_assets(filter)[0].get_asset() + except IndexError: + self.log.error("AssetContainer not found.") + return + + parent = eal.get_metadata_tag(asset_container, "parent") + family = eal.get_metadata_tag(asset_container, "family") + + self.log.info("Parent: {}".format(parent)) + blend = io.find_one( + { + "type": "representation", + "parent": io.ObjectId(parent), + "name": "blend" + }, + projection={"_id": True}) + blend_id = blend["_id"] + + json_element = {} + json_element["reference"] = str(blend_id) + json_element["family"] = family + json_element["instance_name"] = actor.get_name() + json_element["asset_name"] = mesh.get_name() + import_data = mesh.get_editor_property("asset_import_data") + json_element["file_path"] = import_data.get_first_filename() + transform = actor.get_actor_transform() + + json_element["transform"] = { + "translation": { + "x": transform.translation.x, + "y": transform.translation.y, + "z": transform.translation.z + }, + "rotation": { + "x": math.radians(transform.rotation.euler().x), + "y": math.radians(transform.rotation.euler().y), + "z": math.radians(transform.rotation.euler().z), + }, + "scale": { + "x": transform.scale3d.x, + "y": transform.scale3d.y, + "z": transform.scale3d.z + } + } + json_data.append(json_element) + + json_filename = "{}.json".format(instance.name) + json_path = os.path.join(stagingdir, json_filename) + + with open(json_path, "w+") as file: + json.dump(json_data, fp=file, indent=2) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + json_representation = { + 'name': 'json', + 'ext': 'json', + 'files': json_filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(json_representation)