From a94ce94ca168dbf13b8356b07ec242e8d2677d4f Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 13 Nov 2019 19:38:07 +0100 Subject: [PATCH 001/393] move maya ascii, sertdress and layout to reference loader --- pype/plugins/maya/load/load_mayaascii.py | 4 +--- pype/plugins/maya/load/load_reference.py | 7 ++++++- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/pype/plugins/maya/load/load_mayaascii.py b/pype/plugins/maya/load/load_mayaascii.py index b9a5de2782..ab7b2daffb 100644 --- a/pype/plugins/maya/load/load_mayaascii.py +++ b/pype/plugins/maya/load/load_mayaascii.py @@ -6,9 +6,7 @@ import os class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader): """Load the model""" - families = ["mayaAscii", - "setdress", - "layout"] + families = [] representations = ["ma"] label = "Reference Maya Ascii" diff --git a/pype/plugins/maya/load/load_reference.py b/pype/plugins/maya/load/load_reference.py index 55db019cf4..f1df584feb 100644 --- a/pype/plugins/maya/load/load_reference.py +++ b/pype/plugins/maya/load/load_reference.py @@ -8,7 +8,12 @@ reload(pype.maya.plugin) class ReferenceLoader(pype.maya.plugin.ReferenceLoader): """Load the model""" - families = ["model", "pointcache", "animation"] + families = ["model", + "pointcache", + "animation", + "mayaAscii", + "setdress", + "layout"] representations = ["ma", "abc"] tool_names = ["loader"] From ccbef046058b20ada3a609c794705e3cce7da0b5 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 19 Nov 2019 22:41:27 +0100 Subject: [PATCH 002/393] preliminary copy of blender integration from sonar --- pype/blender/__init__.py | 34 +++ pype/blender/action.py | 42 +++ pype/blender/plugin.py | 135 +++++++++ .../plugins/blender/create/submarine_model.py | 35 +++ pype/plugins/blender/load/submarine_model.py | 264 ++++++++++++++++++ .../blender/publish/collect_current_file.py | 16 ++ pype/plugins/blender/publish/collect_model.py | 52 ++++ pype/plugins/blender/publish/extract_model.py | 34 +++ .../blender/publish/validate_mesh_has_uv.py | 47 ++++ .../validate_mesh_no_negative_scale.py | 31 ++ res/app_icons/blender.png | Bin 0 -> 51122 bytes 11 files changed, 690 insertions(+) create mode 100644 pype/blender/__init__.py create mode 100644 pype/blender/action.py create mode 100644 pype/blender/plugin.py create mode 100644 pype/plugins/blender/create/submarine_model.py create mode 100644 pype/plugins/blender/load/submarine_model.py create mode 100644 pype/plugins/blender/publish/collect_current_file.py create mode 100644 pype/plugins/blender/publish/collect_model.py create mode 100644 pype/plugins/blender/publish/extract_model.py create mode 100644 pype/plugins/blender/publish/validate_mesh_has_uv.py create mode 100644 pype/plugins/blender/publish/validate_mesh_no_negative_scale.py create mode 100644 res/app_icons/blender.png diff --git a/pype/blender/__init__.py b/pype/blender/__init__.py new file mode 100644 index 0000000000..8a29917e40 --- /dev/null +++ b/pype/blender/__init__.py @@ -0,0 +1,34 @@ +import logging +from pathlib import Path +import os + +import bpy + +from avalon import api as avalon +from pyblish import api as pyblish + +from .plugin import AssetLoader + +logger = logging.getLogger("pype.blender") + +PARENT_DIR = os.path.dirname(__file__) +PACKAGE_DIR = os.path.dirname(PARENT_DIR) +PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins") + +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "blender", "publish") +LOAD_PATH = os.path.join(PLUGINS_DIR, "blender", "load") +CREATE_PATH = os.path.join(PLUGINS_DIR, "blender", "create") + + +def install(): + """Install Blender configuration for Avalon.""" + pyblish.register_plugin_path(str(PUBLISH_PATH)) + avalon.register_plugin_path(avalon.Loader, str(LOAD_PATH)) + avalon.register_plugin_path(avalon.Creator, str(CREATE_PATH)) + + +def uninstall(): + """Uninstall Blender configuration for Avalon.""" + pyblish.deregister_plugin_path(str(PUBLISH_PATH)) + avalon.deregister_plugin_path(avalon.Loader, str(LOAD_PATH)) + avalon.deregister_plugin_path(avalon.Creator, str(CREATE_PATH)) diff --git a/pype/blender/action.py b/pype/blender/action.py new file mode 100644 index 0000000000..948123c3c5 --- /dev/null +++ b/pype/blender/action.py @@ -0,0 +1,42 @@ +import bpy + +import pyblish.api + +from ..action import get_errored_instances_from_context + + +class SelectInvalidAction(pyblish.api.Action): + """Select invalid objects in Blender when a publish plug-in failed.""" + label = "Select Invalid" + on = "failed" + icon = "search" + + def process(self, context, plugin): + errored_instances = get_errored_instances_from_context(context) + instances = pyblish.api.instances_by_plugin(errored_instances, plugin) + + # Get the invalid nodes for the plug-ins + self.log.info("Finding invalid nodes...") + invalid = list() + for instance in instances: + invalid_nodes = plugin.get_invalid(instance) + if invalid_nodes: + if isinstance(invalid_nodes, (list, tuple)): + invalid.extend(invalid_nodes) + else: + self.log.warning("Failed plug-in doens't have any selectable objects.") + + # Make sure every node is only processed once + invalid = list(set(invalid)) + + bpy.ops.object.select_all(action='DESELECT') + if invalid: + invalid_names = [obj.name for obj in invalid] + self.log.info("Selecting invalid objects: %s", ", ".join(invalid_names)) + # Select the objects and also make the last one the active object. + for obj in invalid: + obj.select_set(True) + bpy.context.view_layer.objects.active = invalid[-1] + + else: + self.log.info("No invalid nodes found.") diff --git a/pype/blender/plugin.py b/pype/blender/plugin.py new file mode 100644 index 0000000000..ad5a259785 --- /dev/null +++ b/pype/blender/plugin.py @@ -0,0 +1,135 @@ +"""Shared functionality for pipeline plugins for Blender.""" + +from pathlib import Path +from typing import Dict, List, Optional + +import bpy + +from avalon import api + +VALID_EXTENSIONS = [".blend"] + + +def model_name(asset: str, subset: str, namespace: Optional[str] = None) -> str: + """Return a consistent name for a model asset.""" + name = f"{asset}_{subset}" + if namespace: + name = f"{namespace}:{name}" + return name + + +class AssetLoader(api.Loader): + """A basic AssetLoader for Blender + + This will implement the basic logic for linking/appending assets + into another Blender scene. + + The `update` method should be implemented by a sub-class, because + it's different for different types (e.g. model, rig, animation, + etc.). + """ + + @staticmethod + def _get_instance_empty(instance_name: str, nodes: List) -> Optional[bpy.types.Object]: + """Get the 'instance empty' that holds the collection instance.""" + for node in nodes: + if not isinstance(node, bpy.types.Object): + continue + if (node.type == 'EMPTY' and node.instance_type == 'COLLECTION' + and node.instance_collection and node.name == instance_name): + return node + return None + + @staticmethod + def _get_instance_collection(instance_name: str, nodes: List) -> Optional[bpy.types.Collection]: + """Get the 'instance collection' (container) for this asset.""" + for node in nodes: + if not isinstance(node, bpy.types.Collection): + continue + if node.name == instance_name: + return node + return None + + @staticmethod + def _get_library_from_container(container: bpy.types.Collection) -> bpy.types.Library: + """Find the library file from the container. + + It traverses the objects from this collection, checks if there is only + 1 library from which the objects come from and returns the library. + + Warning: + No nested collections are supported at the moment! + """ + assert not container.children, "Nested collections are not supported." + assert container.objects, "The collection doesn't contain any objects." + libraries = set() + for obj in container.objects: + assert obj.library, f"'{obj.name}' is not linked." + libraries.add(obj.library) + + assert len(libraries) == 1, "'{container.name}' contains objects from more then 1 library." + + return list(libraries)[0] + + def process_asset(self, + context: dict, + name: str, + namespace: Optional[str] = None, + options: Optional[Dict] = None): + """Must be implemented by a sub-class""" + raise NotImplementedError("Must be implemented by a sub-class") + + def load(self, + context: dict, + name: Optional[str] = None, + namespace: Optional[str] = None, + options: Optional[Dict] = None) -> Optional[bpy.types.Collection]: + """Load asset via database + + Arguments: + context: Full parenthood of representation to load + name: Use pre-defined name + namespace: Use pre-defined namespace + options: Additional settings dictionary + """ + # TODO (jasper): make it possible to add the asset several times by + # just re-using the collection + assert Path(self.fname).exists(), f"{self.fname} doesn't exist." + + self.process_asset( + context=context, + name=name, + namespace=namespace, + options=options, + ) + + # Only containerise if anything was loaded by the Loader. + nodes = self[:] + if not nodes: + return None + + # Only containerise if it's not already a collection from a .blend file. + representation = context["representation"]["name"] + if representation != "blend": + from avalon.blender.pipeline import containerise + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__, + ) + + asset = context["asset"]["name"] + subset = context["subset"]["name"] + instance_name = model_name(asset, subset, namespace) + + return self._get_instance_collection(instance_name, nodes) + + def update(self, container: Dict, representation: Dict): + """Must be implemented by a sub-class""" + raise NotImplementedError("Must be implemented by a sub-class") + + def remove(self, container: Dict) -> bool: + """Must be implemented by a sub-class""" + raise NotImplementedError("Must be implemented by a sub-class") diff --git a/pype/plugins/blender/create/submarine_model.py b/pype/plugins/blender/create/submarine_model.py new file mode 100644 index 0000000000..29fcae8fbf --- /dev/null +++ b/pype/plugins/blender/create/submarine_model.py @@ -0,0 +1,35 @@ +"""Create a model asset.""" + +import bpy + +import sonar.blender +from avalon import api +from avalon.blender import Creator, lib + + +class CreateModel(Creator): + """Polygonal static geometry""" + + name = "model_default" + label = "Model" + family = "model" + icon = "cube" + + def process(self): + + asset = self.data["asset"] + subset = self.data["subset"] + name = sonar.blender.plugin.model_name(asset, subset) + collection = bpy.data.collections.new(name=name) + bpy.context.scene.collection.children.link(collection) + self.data['task'] = api.Session.get('AVALON_TASK') + lib.imprint(collection, self.data) + + if (self.options or {}).get("useSelection"): + for obj in bpy.context.selected_objects: + collection.objects.link(obj) + + if bpy.data.workspaces.get('Modeling'): + bpy.context.window.workspace = bpy.data.workspaces['Modeling'] + + return collection diff --git a/pype/plugins/blender/load/submarine_model.py b/pype/plugins/blender/load/submarine_model.py new file mode 100644 index 0000000000..4535b29065 --- /dev/null +++ b/pype/plugins/blender/load/submarine_model.py @@ -0,0 +1,264 @@ +"""Load a model asset in Blender.""" + +import logging +from pathlib import Path +from pprint import pformat +from typing import Dict, List, Optional + +import avalon.blender.pipeline +import bpy +import pype.blender +from avalon import api + +logger = logging.getLogger("pype").getChild("blender").getChild("load_model") + + +class BlendModelLoader(pype.blender.AssetLoader): + """Load models from a .blend file. + + Because they come from a .blend file we can simply link the collection that + contains the model. There is no further need to 'containerise' it. + + Warning: + Loading the same asset more then once is not properly supported at the + moment. + """ + + families = ["model"] + representations = ["blend"] + + label = "Link Model" + icon = "code-fork" + color = "orange" + + @staticmethod + def _get_lib_collection(name: str, libpath: Path) -> Optional[bpy.types.Collection]: + """Find the collection(s) with name, loaded from libpath. + + Note: + It is assumed that only 1 matching collection is found. + """ + for collection in bpy.data.collections: + if collection.name != name: + continue + if collection.library is None: + continue + if not collection.library.filepath: + continue + collection_lib_path = str(Path(bpy.path.abspath(collection.library.filepath)).resolve()) + normalized_libpath = str(Path(bpy.path.abspath(str(libpath))).resolve()) + if collection_lib_path == normalized_libpath: + return collection + return None + + @staticmethod + def _collection_contains_object(collection: bpy.types.Collection, object: bpy.types.Object) -> bool: + """Check if the collection contains the object.""" + for obj in collection.objects: + if obj == object: + return True + return False + + def process_asset(self, + context: dict, + name: str, + namespace: Optional[str] = None, + options: Optional[Dict] = None) -> Optional[List]: + """ + Arguments: + name: Use pre-defined name + namespace: Use pre-defined namespace + context: Full parenthood of representation to load + options: Additional settings dictionary + """ + + libpath = self.fname + asset = context["asset"]["name"] + subset = context["subset"]["name"] + lib_container = pype.blender.plugin.model_name(asset, subset) + container_name = pype.blender.plugin.model_name(asset, subset, namespace) + relative = bpy.context.preferences.filepaths.use_relative_paths + + with bpy.data.libraries.load(libpath, link=True, relative=relative) as (_, data_to): + data_to.collections = [lib_container] + + scene = bpy.context.scene + instance_empty = bpy.data.objects.new(container_name, None) + if not instance_empty.get("avalon"): + instance_empty["avalon"] = dict() + avalon_info = instance_empty["avalon"] + avalon_info.update({"container_name": container_name}) + scene.collection.objects.link(instance_empty) + instance_empty.instance_type = 'COLLECTION' + container = bpy.data.collections[lib_container] + container.name = container_name + instance_empty.instance_collection = container + container.make_local() + avalon.blender.pipeline.containerise_existing( + container, + name, + namespace, + context, + self.__class__.__name__, + ) + + nodes = list(container.objects) + nodes.append(container) + nodes.append(instance_empty) + self[:] = nodes + return nodes + + def update(self, container: Dict, representation: Dict): + """Update the loaded asset. + + This will remove all objects of the current collection, load the new + ones and add them to the collection. + If the objects of the collection are used in another collection they + will not be removed, only unlinked. Normally this should not be the + case though. + + Warning: + No nested collections are supported at the moment! + """ + collection = bpy.data.collections.get(container["objectName"]) + libpath = Path(api.get_representation_path(representation)) + extension = libpath.suffix.lower() + + logger.debug( + "Container: %s\nRepresentation: %s", + pformat(container, indent=2), + pformat(representation, indent=2), + ) + + assert collection, f"The asset is not loaded: {container['objectName']}" + assert not (collection.children), "Nested collections are not supported." + assert libpath, ("No existing library file found for {container['objectName']}") + assert libpath.is_file(), f"The file doesn't exist: {libpath}" + assert extension in pype.blender.plugin.VALID_EXTENSIONS, f"Unsupported file: {libpath}" + collection_libpath = self._get_library_from_container(collection).filepath + normalized_collection_libpath = str(Path(bpy.path.abspath(collection_libpath)).resolve()) + normalized_libpath = str(Path(bpy.path.abspath(str(libpath))).resolve()) + logger.debug( + "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_collection_libpath, + normalized_libpath, + ) + if normalized_collection_libpath == normalized_libpath: + logger.info("Library already loaded, not updating...") + return + # Let Blender's garbage collection take care of removing the library + # itself after removing the objects. + objects_to_remove = set() + collection_objects = list() + collection_objects[:] = collection.objects + for obj in collection_objects: + # Unlink every object + collection.objects.unlink(obj) + remove_obj = True + for coll in [coll for coll in bpy.data.collections if coll != collection]: + if coll.objects and self._collection_contains_object(coll, obj): + remove_obj = False + if remove_obj: + objects_to_remove.add(obj) + for obj in objects_to_remove: + # Only delete objects that are not used elsewhere + bpy.data.objects.remove(obj) + + instance_empties = [obj for obj in collection.users_dupli_group if obj.name in collection.name] + if instance_empties: + instance_empty = instance_empties[0] + container_name = instance_empty["avalon"]["container_name"] + relative = bpy.context.preferences.filepaths.use_relative_paths + with bpy.data.libraries.load(str(libpath), link=True, relative=relative) as (_, data_to): + data_to.collections = [container_name] + new_collection = self._get_lib_collection(container_name, libpath) + if new_collection is None: + raise ValueError("A matching collection '{container_name}' " + "should have been found in: {libpath}") + for obj in new_collection.objects: + collection.objects.link(obj) + bpy.data.collections.remove(new_collection) + # Update the representation on the collection + avalon_prop = collection[avalon.blender.pipeline.AVALON_PROPERTY] + avalon_prop["representation"] = str(representation["_id"]) + + def remove(self, container: Dict) -> bool: + """Remove an existing container from a Blender scene. + + Arguments: + container (avalon-core:container-1.0): Container to remove, + from `host.ls()`. + + Returns: + bool: Whether the container was deleted. + + Warning: + No nested collections are supported at the moment! + """ + collection = bpy.data.collections.get(container["objectName"]) + if not collection: + return False + assert not (collection.children), "Nested collections are not supported." + instance_parents = list(collection.users_dupli_group) + instance_objects = list(collection.objects) + for obj in instance_objects + instance_parents: + bpy.data.objects.remove(obj) + bpy.data.collections.remove(collection) + + return True + + +class CacheModelLoader(pype.blender.AssetLoader): + """Load cache models. + + Stores the imported asset in a collection named after the asset. + + Note: + At least for now it only supports Alembic files. + """ + + families = ["model"] + representations = ["abc"] + + label = "Link Model" + icon = "code-fork" + color = "orange" + + def process_asset(self, + context: dict, + name: str, + namespace: Optional[str] = None, + options: Optional[Dict] = None) -> Optional[List]: + """ + Arguments: + name: Use pre-defined name + namespace: Use pre-defined namespace + context: Full parenthood of representation to load + options: Additional settings dictionary + """ + raise NotImplementedError("Loading of Alembic files is not yet implemented.") + # TODO (jasper): implement Alembic import. + + libpath = self.fname + asset = context["asset"]["name"] + subset = context["subset"]["name"] + # TODO (jasper): evaluate use of namespace which is 'alien' to Blender. + lib_container = container_name = pype.blender.plugin.model_name(asset, subset, namespace) + relative = bpy.context.preferences.filepaths.use_relative_paths + + with bpy.data.libraries.load(libpath, link=True, relative=relative) as (data_from, data_to): + data_to.collections = [lib_container] + + scene = bpy.context.scene + instance_empty = bpy.data.objects.new(container_name, None) + scene.collection.objects.link(instance_empty) + instance_empty.instance_type = 'COLLECTION' + collection = bpy.data.collections[lib_container] + collection.name = container_name + instance_empty.instance_collection = collection + + nodes = list(collection.objects) + nodes.append(collection) + nodes.append(instance_empty) + self[:] = nodes + return nodes diff --git a/pype/plugins/blender/publish/collect_current_file.py b/pype/plugins/blender/publish/collect_current_file.py new file mode 100644 index 0000000000..a097c72047 --- /dev/null +++ b/pype/plugins/blender/publish/collect_current_file.py @@ -0,0 +1,16 @@ +import bpy + +import pyblish.api + + +class CollectBlenderCurrentFile(pyblish.api.ContextPlugin): + """Inject the current working file into context""" + + order = pyblish.api.CollectorOrder - 0.5 + label = "Blender Current File" + hosts = ['blender'] + + def process(self, context): + """Inject the current working file""" + current_file = bpy.data.filepath + context.data['currentFile'] = current_file diff --git a/pype/plugins/blender/publish/collect_model.py b/pype/plugins/blender/publish/collect_model.py new file mode 100644 index 0000000000..c60402f9ca --- /dev/null +++ b/pype/plugins/blender/publish/collect_model.py @@ -0,0 +1,52 @@ +import typing +from typing import Generator + +import bpy + +import avalon.api +import pyblish.api +from avalon.blender.pipeline import AVALON_PROPERTY + + +class CollectModel(pyblish.api.ContextPlugin): + """Collect the data of a model.""" + + hosts = ["blender"] + label = "Collect Model" + order = pyblish.api.CollectorOrder + + @staticmethod + def get_model_collections() -> Generator: + """Return all 'model' collections. + + Check if the family is 'model' and if it doesn't have the + representation set. If the representation is set, it is a loaded model + and we don't want to publish it. + """ + for collection in bpy.data.collections: + avalon_prop = collection.get(AVALON_PROPERTY) or dict() + if (avalon_prop.get('family') == 'model' + and not avalon_prop.get('representation')): + yield collection + + def process(self, context): + """Collect the models from the current Blender scene.""" + collections = self.get_model_collections() + for collection in collections: + avalon_prop = collection[AVALON_PROPERTY] + asset = avalon_prop['asset'] + family = avalon_prop['family'] + subset = avalon_prop['subset'] + task = avalon_prop['task'] + name = f"{asset}_{subset}" + instance = context.create_instance( + name=name, + family=family, + subset=subset, + asset=asset, + task=task, + ) + members = list(collection.objects) + members.append(collection) + instance[:] = members + self.log.debug(instance.data) diff --git a/pype/plugins/blender/publish/extract_model.py b/pype/plugins/blender/publish/extract_model.py new file mode 100644 index 0000000000..75ec33fb27 --- /dev/null +++ b/pype/plugins/blender/publish/extract_model.py @@ -0,0 +1,34 @@ +from pathlib import Path +import avalon.blender.workio + +import sonar.api + + +class ExtractModel(sonar.api.Extractor): + """Extract as model.""" + + label = "Model" + hosts = ["blender"] + families = ["model"] + optional = True + + def process(self, instance): + # Define extract output file path + stagingdir = Path(self.staging_dir(instance)) + filename = f"{instance.name}.blend" + filepath = str(stagingdir / filename) + + # Perform extraction + self.log.info("Performing extraction..") + + # Just save the file to a temporary location. At least for now it's no + # problem to have (possibly) extra stuff in the file. + avalon.blender.workio.save_file(filepath, copy=True) + + # Store reference for integration + if "files" not in instance.data: + instance.data["files"] = list() + + instance.data["files"].append(filename) + + self.log.info("Extracted instance '%s' to: %s", instance.name, filepath) diff --git a/pype/plugins/blender/publish/validate_mesh_has_uv.py b/pype/plugins/blender/publish/validate_mesh_has_uv.py new file mode 100644 index 0000000000..79a42a11d5 --- /dev/null +++ b/pype/plugins/blender/publish/validate_mesh_has_uv.py @@ -0,0 +1,47 @@ +from typing import List + +import bpy + +import pyblish.api +import sonar.blender.action + + +class ValidateMeshHasUvs(pyblish.api.InstancePlugin): + """Validate that the current mesh has UV's.""" + + order = pyblish.api.ValidatorOrder + hosts = ["blender"] + families = ["model"] + category = "geometry" + label = "Mesh Has UV's" + actions = [sonar.blender.action.SelectInvalidAction] + optional = True + + @staticmethod + def has_uvs(obj: bpy.types.Object) -> bool: + """Check if an object has uv's.""" + if not obj.data.uv_layers: + return False + for uv_layer in obj.data.uv_layers: + for polygon in obj.data.polygons: + for loop_index in polygon.loop_indices: + if not uv_layer.data[loop_index].uv: + return False + + return True + + @classmethod + def get_invalid(cls, instance) -> List: + invalid = [] + # TODO (jasper): only check objects in the collection that will be published? + for obj in [obj for obj in bpy.data.objects if obj.type == 'MESH']: + # Make sure we are in object mode. + bpy.ops.object.mode_set(mode='OBJECT') + if not cls.has_uvs(obj): + invalid.append(obj) + return invalid + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError(f"Meshes found in instance without valid UV's: {invalid}") diff --git a/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py new file mode 100644 index 0000000000..b2a927a2ed --- /dev/null +++ b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py @@ -0,0 +1,31 @@ +from typing import List + +import bpy + +import pyblish.api +import sonar.blender.action + + +class ValidateMeshNoNegativeScale(pyblish.api.Validator): + """Ensure that meshes don't have a negative scale.""" + + order = pyblish.api.ValidatorOrder + hosts = ["blender"] + families = ["model"] + label = "Mesh No Negative Scale" + actions = [sonar.blender.action.SelectInvalidAction] + + @staticmethod + def get_invalid(instance) -> List: + invalid = [] + # TODO (jasper): only check objects in the collection that will be published? + for obj in [obj for obj in bpy.data.objects if obj.type == 'MESH']: + if any(v < 0 for v in obj.scale): + invalid.append(obj) + + return invalid + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError(f"Meshes found in instance with negative scale: {invalid}") diff --git a/res/app_icons/blender.png b/res/app_icons/blender.png new file mode 100644 index 0000000000000000000000000000000000000000..6070a51fae3da0655d1bc14156e8359428d4f6f9 GIT binary patch literal 51122 zcmd?R2U}EG(>A)g$sphmBnKr)kc^UqHj$MqQ9uC^0m+Dx8WoY8gGf+8P$VN!ghq*i zARswPl$>+uvo_BAJ@0eA^AFBkbIlCWd#_p*?y9=0dV;iWC{vO%lLG)KRaLI)06+-; zlMs*+!+-g)2|fjY6R2Ljr01EuFzn|vwCS_F-)rM%Yf#e@T`*~Wxj|g<4oi!4VngPo zqH|2{t*k7}Pc(UdDb#=Il9YLh>JnXi_kOWrrZQ}@GRt8sb^3hflIoQp z6Q4G7pJ05V8Z)NX(9jcM80}!xsWxypqSzze@TbgUyLj|afkWaY*+BldflU%_yJWe2 zp9=*g`~S~hiTsvw@V1ea+ZTS_*wm1JBh?_i&g<0>HQn5i^o!>eLAs^V+~+-I`UtjN zT@z;jR%8)lPQNdEq}%k}hL6LVNN4L~^)l%Dfr*ttGe znxJ>yuOf2B{v?KW-OW%4hk^|~LqkPMA)1s8s_T3vW;b~+Ec8Qh3R$ARAT2?I^8j0n z^v%`27RNy(ID5~z7xy610_D$WYSpVC3+8@a=^NddM@8vtyeTd93c)WKj(@2XfQwP(9> z0LJ3iry{+Rpb8P(#n7gufw__F7f-_0>R0M9=J0T)iBEf>wi5%FK$8#Da-^l~R>~AOtGsp!iM|Bh%OQxy0SWSG!j`sX47#*a%MXfVc(NKubS0d=GLnQ@UMGVUw^z)8V-bGCSIbUJSJwV{E;xU@sG z?Yk2IyKoi0$mliVigbMwD(=zo#^D6ORhbF?X&@9uXx3pwk6xns;+-lEOx{Ey09His z4o$C-JA7*>Yl-UTsh%YnyB6Lx9pfr0df!+*P1izW7q*C3 zsH|-7Z#(4&R6csh$l}hSf#?OOA<-*;T`)_v;a5LYN9u%hu7XVNl$ZJA9uGR;!~!Kx zByQUx!1UNAq!YZ+p5g2zL*q7}zvH<3CaHn&m~?JK-75ihuU&^hZNT{o5k&e}?Uqz8 z9QRSUi_oa;w#F-eq8RLzQCNFK%?X?59kZFwSKkV66l zOFkbWmwxNcmdf3TMInIIQ#0c^8^%872>?8LrQjF?CKPe!{lE%nv|ZoV=sHF(D02by zQ0$D`LjGvxGu!A6=0PI>n5GdcPi+BU29L+lB2lAE(6;q#RVrxx5F(I|k}NJ2{uYKT zXV$SA2}>HI;OVK*P}l{_jHjphM)Zod1Nj_mP*}IO(6?fuSq=}chbJ#q?GS*>Zz?24 zjHH|i@cK>s9A9?q5}?>1;m&PNEbBG+Qa-n3$M<1;$-L+H2!IYmnaM7Hq6qd!wNPRe z#Eg^M}JY2 zx0NFUj))4KQ_fM2PQv$&UB8hQMMbFW_Ie|TYsMi9HFgdrUZ3r5bK|lnsv2*txcp55s)ycqT?>5iISSFoX z@LA1w;6p{4z(~rt`WO0GCzBNc;2-~!=zXK>iE>1hg&a9{2qq|t8;@UI(5GbaqwU>8 zfk@u)uK27v;cz05D44bg)6d8Bir>EzSJ@_}H~>~sJX84Y!D7m}O-*PG+%Nvbbn?;h~2A(ufb#h-pW0zJe^PUX0Z- zpn_@KvBE!iE=-jsZhofqRW|yz?k!~IZ$k|VuLWUpjm69 zF(8^7QPsjMG=-=1dk?cC6lk(lp9mk2HJM8*Vk5-f(g1+HuIvZ^&r@A6T|uv9kCC6x ze;WPL26=x049BNjZR{QdUuFIOeP9zs)PcQiN~1! zI*Gfu(c=^VV`>@%K<2xR9KimXweL5SU=?+!Iv@a88N<1=PC3Qzj`*Q9GfvS0O!%K; zKO?c9K?+D_t$6?Xw){0EYtTk=00xR_o$>hg%O)$vp(&IdHnF(s5h`D zL5aEwp$JXy+IMh>;GsV}dFQO`AVf+y6hsDa*P=s?l6jS&#%T^*eQt6G6iatKmvtl(Kym z5S4+d3vvI8PESi+;cqpXk8fNb1vtH+!i`>>LnKK_R!Eyz;2+)ZEG~@!J21>STmHuK zs6e_@X)%1T^?6j6-fCZzVo3@Ct`T{ZyJE`xuNo62!U>OabrU)i(fZKj22=g~-_)cE zR1^kLAYa{V)n>qR7y5ZcW2-BjbPwYBP^B>Pa@5RLXB=bxMiS_3K6athe?w_SfENID z*(4Tr4?5|F7U%^lXypMZXyBA{EOz!bsH$Kx!1+yj89GzdkJ*@xyuav9gR_IL7Jm=R z3d)&7sMWHUk6bhay-?UzOEhST3_si)oczPd1+NcO7l?p(2>erQ-t-SS+J=WeJ*c4V zawh&Hilu)p7dWY+2sILRe1<67jWq45a2lW@;sK+kbFk_|@H%u_&~1Voy_ zAhG@V-&Pr_Qy~##_gYo#Z$nLXIP=V37>u* z7}IietgS?;bg6Hst{&ud9(0@uq9~A|Vx#`;f(&xwNVOYj?ao`V$9ScRHHA3Xs@-0_ z_s0qMsrg>mvCGIbccc<|nIG5Z83o4afmTPFVGOK`qQgr>1GSf5UT z-uIxE24g{Px(+=haxs~Jf?aOy#UdT16iUupi`XA4=X9M@R0=+hKK|60czAe#Qi^OI z8DRrFGxRAUC`foq2zcvgFk{RJ5Pd00ZYElD3g4YU;nw0yLz33K(V%I9Mo!OZQ3 z&HY7oS5PX30_{ka0i0#^Y$Zte*~*bvun+fna0jBL(L=fZRCL}rcONTW+K3BqubgB<7lVMpKXfTt>2p%UM zJ#VvjMASDv}>!t3QxKbo?hklNcRn zA`?OkhDrJqsFW-f3z~>nQP}y_HRvcx{O_CR z8FQJa63-8cVQ254fl&tFmU zZfo2^oZq1l@LoY-bLx%{9cqa3NO5=|eKy>fOC472f|Mk`zW@6h#>Inf{Fhb7Yh+lV ztMG}g{?3)_Ou9BmZ2&}BcHiJQstzX|eN~}O#19m?i`0SA#U z)I3lLd{TvT95G_^Ijp!Kt5A?aG@?X7^zpd%WgNXUC<83=yzA+MbB4~u7`z@km?KML zQ8)}ApOKT?7ztt0AOQtGoRin{kKXQ&kR0s~I*dyLp*H|41vT45v|Bd(@t|b;JI5{2 ziBY!4c&V`D$I4Ha+2UzG$3AxhSo?A5wp~TL!}@_N{5A<*SPd9L0}INUobQn)XGJ8u zD_HgRQouY%a(4*61fJ?vxX4t0gZ26)aez6`5+t{L)QjH?afk&4{%oMBe6ppc=DPT8 zs1l3Jwd@P;C#&)5QTh*C7=$&N{W3*N->;l=56-!#b*3kOC*TIZLUc6V9$-9H_ceA? zF2p&oGqWBZv2Y|W7pIVnd?f_pbJ+>_M=&SiGAra7UyhTq#TV0DIb`-;E+8K9 zHVdLivBj@LBBGar#pl5;$Cm?o)tNeOAP#|rsq4SuxAm&oCNcf!mkZXTJMDF2tKXRA zeGR#?Mw@riC}E0$hGSL+k^ex=IelCSCjRkxCc+whTF}q8pvpKZ&V-7jgNIl~~z82_*81E-N#g}$}@ z^R%GNk&WffufaLL!;@ZUfU9S1l=0j7($>Cs5SYeJ3lFD-ho3*b^q(F@odWNQMaHh_ z2w^R&sd&L&(jq=s5H~W?<_QQI4i3w0zNT1=qD@W@B3-->pNFRSWvl}Lz#1CW9 z-2!_H4+h2V)-wp}dG4lsuURyEiBValNMVJL!ho2$fqs3y(kt+&=3$$uTb(n=U?c)M zd9ZT0etCL7nrslZQ4;!(@x7)Jdy4` zFr+7oule%J_iFRD6fY`L9QK$W5NIqPeW)S64yzUHOY;1CT2|e|rIs0b(1yEG+fHGe zpj_qbn91f(8^I9#(o{@-5Dnm;EM@~0)(}P^u8-Y()N-LOJo5aT@66X&IC|Zw4=vYT zV9fj%RRC5sb~4a+S=sy+{*mf#^xM*P&I_U_Y^zw@}-n_^0?W2CM2mcUWO!j28H zu0y%U$);d~8oRGA|G3ZNSxyw=TrbX~-EQp1vWq+bMOXdoH!7z5HrBY**omb1)Icj* z9fVXD!*D_dn2Ya*z4HSu0X3g=V076xHmTLF-KWp_qWOmB<5Zn6|A^Oj4{orM0|D}R z`|wTYIhOnsUrn8>osry)E<)v`MBwf_q}x!2O)e{lB>z1}nQ9EAS&sgo0~|l-T3vfe zQ|MbS;-=;tuT6l-yyY%|%jXH;rW|M^+5Elc0($0TGo_AxD&U0!d};uUk)-< z>?K8Gp6~c%R&B)>m5_uV>_3e4&Ppf5ebNGdOSuUJzmK6tNQam~LxdY5Z24HBbP|XQ zjcw~IZ1jI0ukqTodKJCwj^cTU2DH$epDym1bhks>VDj#ejScNmsKqoA+>~s3Mj}x}T*)fs4*Ffd8dASzi)i+1e zF}ClNDm4pWaCch$izTCuJ|91tG^Yb+)%1AF(=cUPs3CQ~m3J;|XE2rA!fx44hfhl_ zCs}DpeHNH&A%bra0t~Zh37se}f6gQwX2?az>NGjf$cCMnTHnkHQ1CXr9yBW#Qr}faazr*oxr`4869psVJ zH9|X?B77mOIEe#v4Gq;38!jhs(H4(N*6fzF?=|079cR7-5}yWiR5c*Mm#~f6;UR*B zM9W#QLWU4RN>&5X+tj#_uPw`;*lLTR{04dd#AbAi=q``}ER%S{Gdo9q6R)wR8o!yf z2(()DcD+!;<@l9;D(nJnOpQJSFXZ5m6zS|bt(rx~)ES7z9StJ0m4Q?rc9y=Z(Hr!^ zCNM9WVZ(7}R`=mWkM4*y5j+KZBOy@8On-NjZ`JAnRfh}Sc_AsAQ^nE)3u&JN=33l= zwVLVN6e51+|BfmJ-(BU~Jo&_|C5#mD+F&dgY1p`3kxrt(He+Hm*5J#fqbE9p+D>u* z7nF3J{6U>({w3pe?q+V@60~E`JnAB>LZhQ+jwZxMXNDAk3YU#vLl`jnZmyNRvL+Yj zH~2-|KT_%QvHS5_gpwJ=(dR5Ku|591rh~~N*^nlOTnc-CQPuSZA=m+fqS`)pH|V!5 zs-jUaBaIY3wIf7X(Dkj9^kih_Nj&OO)p-dqK|MWryusj&r?zVW6!d1kS^r4}lXb%V&;@TYS$<73X1(RttpNBR zN#I^YUjy=3H0~R_t?8Bn(>fCp0fVsM>c-Wtlb_p*J*br*bQZl1z4_knEWLs)h20Rr z*cY3wFt2H{*l=LEr&O7vCk=xGPP_#x?~&%#x|JRA1*uBkJZfJTL;tn=*VGsWZaigz zq9Kpn8}b24&GRTD@;@j#!2Sz1ze)|Pliw9V;!Ctd##TE1t%Rc2m8i}DI+U=aN03whz?R|&eqD`WdfV22`PD7A+T?yd|K-u2d(Cfz}N_sk9ZiX!NW!FEn zFJU8d74yP$`w2}Whl3j^*ME} zLYWNAx$dD)!mRXHJ`SvQ?e4}35w+fi+FezdC|YhwG(iH~)YXAZbb9=y z`CO}Jbyi+l^%-+f&H50>qS%wgumZa-CI8 z|B`hq2O|`Mzk<$5rTGhJLJ6T-o!Nd+Iy%E9)T*r{1jQ%e*L^EHd-zS~lwDUc`D`@f zoWVk!cHTdPk+gTXR@3!04^~`z52R0jP>;4eO`{VEyV+x2DzyJLG>vDoUJNi``6K&5 zLUA`$;b3)}?gd>TypRT`+tmLc*E>=$W{j%IU!!;^q=3g)?bW_AIP6pT9wTYNe@f>C znp63hu%E=2Lhv8ECc%%&m|Vp!Jv8rbHmJ_6`y=XwHWko%{N;>b+1aAEmteMr6>+(a zHg^G1OCRUI{cMg@6nX~o3CFtt#dM;|T43_^-@0;?*ESW>JY)J2=ir;AoK7(@Gk4oB5q^5YRd-CC75HsR&q#&nvh0afO?iyyOYGGqU|Om5sNR}iEl6WEfFkbUH(6MkaG75UQu4Z? z4I@YvLcHIf;<$QRR-zZ`%s;#ZT8W4-;pOssqX>&{ap!{os=-u#T|Gr%uj=uL=15PS z)Dzf+oentVt43tE9P@ulvL*rKp6(YC*baLl+LNoN(<7tH&>7Q?_nSz`v8+E~pUAxZ z;cw>(V$|KqIC-`S-D>u;5l`e+C8F0~Aai;HYXHP`@*h7blIvfV3QsAEZ(x`7nSN_HPzIs=I_lD4%!jmFr^b&-V!m=lDIxvrTHT2zfp!3hGG+B1(wJReIhhCGhi)=DD)r1 zOl89SJHD_#8t(O`(^^vk#?3+vRDfX)84Xb-Sw_bXw4qyL^UE(EjR!Lwwl&Q@?2ILD z5`Ni7CYrkYaY;vik!p*ueCTh3pNRiK80o>~JTv@VQNGCfNyKNKu)E^anGtAQfUNNx zUQOH!f^jAd?5^JMNRh`H2#VGi*RIWdI*{SI#Wz{E$9GhD>C7<^YL}X1hw>J^3ERS)Y+Mtr{iX zU7sL+()>arw2BU=>HF*fw#w=)R5(ew;emG`@u-(JfunE21PXMHlQhaPdmMe!jEGq8 z;8A4|Pa)Awhljx$pv>x1g9p`^F|3GUYqk`w-Mk@5`P_|JVRI`ZO;d_UY{30;kZR`H z#3xPigFLvzYxAYqmCj{n-cdLx*9!4G-=mjvj)nO?!_=PJ_tRVuhwp+9C3##WMe%A>c725Uw!N%Gy&yB9XtnoaQoA8c}{KXTFwxWlQ^ylw7g5~kZi_=GP z&ivD=I;4%GGgF^1rj7^LWb?o1;LkkU$@5AL%%)#NO_hz*L#zu)Th_wQ2f~)yq74!r z_?@!R{V>1B6RnBf0r}q-9HBuH?v9aE}A{90L5;h0^vx)O`byhWg znT{$9?PbytprQ}s7JW$<~02Xe$%yU-J6zv`*^L`WZx`nh(tdu8q% zj0e{I{PUA9i~CgWVD`Knht}%&t4ylk94S~S5sw_?@kI=R#LpK7_I>wLYj&IG5e;qF zsji+-{wjSI6qz2+AbzsOeEQ{aiW7ptIVgLOD-o8q63f-jZ!f$crTcHzuIIt2s zg+I+gI4z>veE0*_&T#wz3>&1Lz&K?I;>dkG9+>uO%rYEp*g%MhKOSZ`98@o|uAGM) zY3Kf{DG{L(HX;y7gnrj6SmY7{naCz4w!61Ev6`eiK5-^?-$#A6{>;)&c(-Z;*tQjE zK^5K`%D>tC%)!RLEN*DqoLG0xx;RHAuOO1_)vf-UR?g%(;cB8^r< z)H2fHsWyL165xje!k0F%c1alh&@|^2TmCD(W62ygteli{MR(Oy`%HH$71Z)yPO|RB`Q?n}GvB5!1wE4U;0{^kKeO#y_xC!vtUb*YXcxhze`0 zywGgE2)@CSnc!P!G^MYXN&o}P1GqBd0DWD+YlSIp2rso8EQl!j%=U2gPJ5ORW10-z^Q+Z`HQdvk zEe=@ymyq|2WX-e7*1_KgtUWoR6j3~S`8mroGyuzg6%~0JY4^##_4~m=ncg~V?j5{M zJMb%Xa8ILUNY6~VI}GToodr6u=enZwWPt1CBS@jqGR?}~hE@WuG>{9(^ZFCH=J>f6 zB5l{?_RH_bKAGhyB*Y&ZBwrDZ<|IJcX*OF(ZsM$x4Qm!j;q>|?9HG1oGuN4ANQ=)e z{|d|QF0DtmYV|$??mQw5GIH#Ru|dD`T9pfsH@Mc&XgE_Y z#YuCSFn;XojMJ(?C+fdLU(fhw^yr&IBGlhxC9E;@x9YRv489<{Wxa{D@E_kIR^qOg zpcAx#!to-)F)tp)DOHwJLYJb1U4$-F_D#f}w1Mf}(4Yr3KC5ndL{{V$r_O@Tx+u61 z1X>$#uSQGQ;O0XpS-io=$)me}{dKJ9YZZ>byvnCTPh`iGLYjgVrScTndtjRwgk7&F zOZsrl9VSkI(}g84ywj9O-p`fUvUfW#7FDn_!#C-}j>!z@6=!WZX&<=|!3##hgdnOaSqiW?dgGItJGJd2&VWk5G@F^nIiWxUmxY^H~LGh7?=D{k?ivJf8s zl7&pMkcfv{2u)}GE`2y7SbEm^gEADPh>c`L$Wp^INf(?}eHxv7qR!Mpa*J+2e z7{n(#h!fvGj=X-8R#wOW3M`0q0YtNLV;p}W0cQZsTH!8-jP;){3`pq%vjZ@sg}n-s z0TH^#6uH(9<$jBcl<-O_+egohl^=hFHA^?7ZUkV}b3pVC5z8U~nKJ%xhifK6q7Ba! zADYyEaIEs!+o6jPClWn=R{pnSe;de9U&ar8e6wv{4AUO4`wN^%eul+{eAZg!lg7;5 zdy=`ATmUWLk385Kg%6=Ju6tiCOshHL=N#~4s=Krco_8HimSD=096BO~3oj>DnP`Xq zhg(dX)8^p=N)jhq;u3+1zvtKA3ySi;zfvc?gkL@IG=rU>Kt+uaOuo<>?S?nw@m^it zv9~~s1q3=|!LB|GNe@_!phGy+Z2r*%ABwAatrtaa0#Bno9)_V@%y7#eb3Rur{3-MG zp58dVB^fW6zSp<&3L>5OV&{C|ul1BKY9}@a;aUM!3g&mUCC8g^GzqX~TUgc+y zh)#spbp6{0zjA~GQ(t8H&kz1-@>#z-o}JORgNP1rvqF*jI+>((pk&G z`K;}QPWQ_UzAI8;9{BHYlW3-nw(UcG1jlZet%9#M&8v?h=^!!Scr7aNCbY1Q z%kSub;8@}0jhB!@<2$Unw;D#Sp(B1(E$c`Eu5yGy#%1#`pH~fVQVx9H+xDIoF~he> zvR_x3(^-?o&S-iarXrQe=WJlNQgWM_-}m9ixN;A8qu^&rNb z%M%Gvq2a-hZ=@z3B-M}5&bDiPy0XqyVLqFhh zh>G!M03Ut+NKUG{>2<1%ujygkX=&5xmbxHS2_;GZSGnQiWp4>r;@@7oFt;V*cdP8; zM#4}L{3gk3LZFj%CTZJC!m~=$;gacBM>(v?anbW~6j5dIJzuSF>132)-1f?sk|3kD z_v_HSJGk91j=>KkUWJ}Ccd!zBCuLcXW}KthzaMgaheHnZS=Flpv$H}38>+#Ak63Xr zFkbQqC-4{nQL@O-c{TGWpDoQ)8@L~-0f+gR`$9rN8rzK6$yH16E3E4$T$vMi;Ek*f zx)8;Hig&OB@9~+MH!d%|bYzhVQ~H>I2He4BSlJg~pd(I|J1BPTG9qS-l+2mr#?FYM zL@C84CIk|o`YMh!c=k5;I{H)CPPH~1Z6Y!$26`%Keh^Sk~oZFYs(05XAl@fpo z$VH`~e_z%g{bKU)uz6jerlbd!Dotdu05=qS>iJ!l6P%jOs-XRL+dcAIKVChQZ7BN& zoz}pa6?Y41f=g=rFugwazre0F(p8!Yw5GNv{^?0tdzg0d@m`&zU%g83I|<$MYgY4O zv}Acl1f5HJMU%quBmVy&Got9Z5@^j_7)21uvB{-#6q<9xj;a^=%yeuoHF>b&^t08@ zP^;|KN^-O({1S;+M87spCTziDTW`~F%7xvUf7tRR!l{h|h-V!17=OV}But*z5+{Qj zcA{e@3Qw)Emhw_WI4Z1a!2xxXkO$f-Vea!o*IW)@=(2B6omcI|BZtB=CH)qM;~bbv zASStdwkH8q&uMVgke}+bN%J=A4*Lwjq91qA+Rg*+G zKNUsA<53>kXsa8LFnU$C+;PiqOt}^`*43F+Y%=X@ z331ZT!yFS9Pk`t|LX`-p`MuT=zhBuR^38C!+{qu>pkz%^;e+nQE6~llR^oo>OT0vr z3doQ}uFJ3bO=`%5;t7nVp!M`_F5_NIaW63l8@-=v7`ERc0Sw08jW;Pble>dByst#- z8J{dlCT%hcX(9Je-)lSJ`wr8&D_P&n+)8~b9d4oIaycJzL|RKziC%yco(Fvga!4pk zInD~edJ+`tReI+_%6aY^t>T0<5GAW=j~%Vd7~9Wvba&TWH+T{)jChmF!7=oq zi=aOP*-3`$+ZL%FS#~<@*oaN2Mko3?8{P6@arq+(B-|)>_2VrlaKVfmkJO@ zE(C-YsHn<)P4sS z+3+)p3Ea?Md!1Nk4!(X-R*?E;K7Vsf8q;)t$n3syY}ZH%-&yQ}R(7r&Vcs;DOl;m!e$ervc(-oHA&2%zDz?>~HB>71$S00(4} z9uhdHr(YjZPpb}5fzS^^L6T~FwI_Uegl3;`^IZ&D_)Ut<_)oUN4S7yU2@)Q3}XHPi5+Ix%d77EqxX#QP^lh45ftSjFL2Rde^h_g3^k%H;!e zbNBVFi0Z+&gdkd&gCH^JwaZFV)u;y;M0V*^s`(%QN6K~_yne1s%gn6W7cOqo{cyQw zF2yhJk-7o=S_C8`;eX72g;XG>S5>t$pjJBotrQhGgYV#AKLvNq+&4}f9?IDWH{4yZ zQ|i=6ZGW7a2An2Gt!2yR920_0c+5}n~GpXcu%!7D;SLXs~p zVc=V19;Gt*-F#+dre#?({2Bv2{jEp5ii(One(V-q0n~nPIHd4xe7L#2wx3|rA=Re{P`-^Q9w-yYIL~ov;qo?Pm2|9anWkFG`);gGBG-H1G zF$PZ0h8lf+vu#wO;nFR?dB17*j;3SsF2qD@d!{jqq%+o`f%v@o9mO&I^J-Wm<(%7A z`1(ZA^E-o59yLPC=pg&vM&-@|3ipPgG642I}PVaVPw>DexJ)GEId3rKVS3d)2DOu z^Ia>VEp>Swo1bvaF?{so0eUxYW*!`P|9Eww-9^^Z2r}t<`k$}x`L2X%dL#a2)fF79 zl;)C=j~LTeza z4*GfM8&PW6y--Wr?_op3uSV3jJ#UUra@_3_q6xY;QuRncK%lb7>ZD2;Y)~E_+EIsl z(P7{wZ`JMiM$o3}vN(G3;suoG8MvTKfL)MCI7ZoFmu2?2QR7#0hFJPSLycRvZpl7e zO14Ybs%Rb?dreG2;=Z+D6ma=;TtdRfoSaY=;>ep(os)5&6df;$F3o75aTLDm4Mmhm z!{QinP^MCF$blogYmEMP)C+U)Y$%acUEe)?UxRO-n7n*WSC;1d#%!0t7IADsf-W)V zIRvZcv+@yP-x6Y$1S6iaNUfteg=W2($E}`ag@eD7Osc~A4()=-Qus_);h&n7KulAVaIJUftR3}Lp5Wnvjmn$6zC2DQ%>-NDbdOhS|>Gc`i zJ(5WNeIhXVG{4T7YihBHk(pfT@vMuyVGArKvwLKl*O-}9n1yoOI(8!Z`iRcw4xK9R zW4Z8^DA?|u$EgtkOx!gShVX8Rt7HWNRu`_HV=AQW*Rl@@KBs2CaaHGS9p8@^-vXK1 z4zFb1jOTG?Z%WBy}iHs(=qGvo7Wv3+Aw0wK_*8`@dTXj9}moN#BJPssdzK%W7o#D8QKa#QzC4zIaz-;3D7qb zLK#=@1RAg7I93`sdcM!B_nzOJs8wVv>d%3HD=8`c>gaegJ$*M5trfS4U#*!<_+0Hg zsCb18%c;h5zf5l@-Mz?(33G4SQy6~f^Hs(lKdBS~?8W;Aj=!>wR)|U-l$&=9Je!rq z^snt!wWTTMj~;E6ReJA=zuG6#dZYF8*RNP!US4ihcX#*f4)4aORg2= zt&krT^$*lZQaCz7D3x7QT>Ncm>3Me#kEC!yicR^wjOSF#y{&ak4tUlWK$=7zL6Fnk zO@qt!xF}EWELPO*MhK2hsY<@T-Qtn!xyHkObo(8A=kMRQA%w55aOebFz6Aa4@%HdT z_upTMx|}WEP(rQ5ZOpWVx8FN}i;gExaIldWlFch!y&{GZEu4SY+ot3eG0O@LJ}%`2 z#*Xa8-k;OSOb7H8PQRZ(I5nD}Ry7F0y3VxOy1D7WK)?Us!Fe{a_| z)Q^mKQ77q|JwJ8&fH~`aajX_>lS9hz-~mKOrE~jpuJ<2rFd?#P@TR9LN6ZaXxXK?) z&`wWJOG-<>A3d19@ZCy0a{@<`*1zRYuXd2q#0{Qs(gaB!=Hu5Y)YP!`)}V*9o)9$R zQRUj#EkEm+lnPcH-R{!H2Di4gA;FfElqAkZbgF5}g_)62%g*kjtgLLTis6M4a%vtE z*-u;k`F1Pa7kLgJK~V|qOrp254<-N)KR^Gx6Kn6YbTGD~2zqV%if(PD4L7j(8QOVz z8gg-!jb>1kzbq^iyME(Fc6$1i#l=M^X5(XaADwh%Y5@uFw;76>WMEM0LLEoc#uswm zU)g9}wsU}*(*XzeT2QK2$-*jL+5O8lKg|RM`9e&ym87UB&csjeouSa&+EhzYax&B@ zTNfyQ_k8OCWzCnGNyYYm{0oerUwJ@I&2faItu=l(T`Kx6_qpvSl2z>`^N(sBq6P^+qM32}|Tc=L~Z5*g)ndfE$F z!TI2xt4T+5Y53ev!y=A5+Nv_D*8;g@gnz1o>G6U4vIM}fJ8$HDJfDZ);kzvO4f&p= z)YKjl4sQf(y0lM;81MEAKvUA|ktNHETzoW?gKDBjl3X|HSaC?57o@auHr4)*;upyQAX4PY16=`2PlI_H|87I|~bJl%F9YF|28lQc|SY zh@OAic#wCW9AVzX zlWu0-@iO1HSa2ayG>e@ajF*h0MbxwK@UYM~XI)v#w9+kT^N92rbWDGL)U8T4=(mhN zJ3kuC(>h<3SQ?W{9)YPjHtqd;?UF6{net3c+X;R$%p*OTjy z4hL%vU%{-Vp^P$(YnhU~crmx3L8Zp1^TV-+H13snJ1{ivopxH=m6to@(#y=ugkatz=6`y;E(r8mYJ!~~ z7ivCbcJgCom7hwVbt1xKy!X}J67aK4I zruU{pZ42c`IKE#|wwvsl+VU3!!S;Z3$N24x**X05s9=e;oZaC+D#gXcdt*VngBhx+P2P`VG=_*OwJr z3Vlg)`umM2Pn@{-AY3Gmw5*~+2L7Ohe)gj{0aRVkvJ! z8ux6nrg#>u59m%A;;DC3hi_5a?p`q0`8M4MeaE!^o1&9zEcjhG?VtbI<(3*q^mu6F zTGEKxQ+rJgK9qO6p@Bifrro1EWl8Kl&)ttm<prFt3# zQb444IW?w$9INsSe(n7A{{?a7;emdyCEN+_ zv>?a!Cozv45fF1fUeceZ{R|EM#`R#wnf=IL9NLOa(Rax0M0v*fmp5l-hTw|8S(HUX zZ&^Glg00=2ZNl4rkc^7Hg6+RKt#)WPmn>Nt)pN?`K2 zmaYw}x|iYTK%_v(u_@*yjws)gv1zp7Ry{{qZE`0J+*i5!@g-R4Ht)pIfnfNR?DAX) zZohEv6)!J*4WM9VW0PVdAbkciPyK{*Nm&_8pn>kbM`FDcNI;ILHl%m65*em{Y#*-f zBa%9)4*nLQD#f#`7%c?WrbZ9E-qmc-QR7K-o10btVh5*ITZ&v3{IN@HtUy1_PeUxC zTBLv#n7z1q=K5#$Ws_U=JlDy-GUSS)@?eiBe{z$UM@nqdd>D<`( z1odN>74olwb{<|l=if&vdXfltnH&Be8>H{Sluxhu{}A=%fl$5Q|IduEE0R#wELjqf zt%=Yk`l3t=l(F*FU`+Efcb7@VP=!% zn)+50{R4+cioX7uK7XvW(2pUJX_6x=k!;=1@x>qxd099q{rYea)h~-3=@h%4mo1Lo z<=R%7frZ?*0fNZ_QYMd2$S3TBox5ctpA#!(@}ju-n-7orhumiLs-d3fWuS4=qL~|w z|K=gim`P1Yp?nX-l5T$g$ju!hJx}6a{eg#;wA^CnQ&v$qk7weNwXm=#1Goxrd?z6p zV;7@`N_GNz)SQ_*Adp)Us<4PDs6x`mo_^2Xhu*mY$ZM)ze(+#IL{*4!`XP8!zcpr# z{r-Oild(+4`@VDXF9V6WWN6~dNy~kVj&mOD2oLjUOH{Q%T*<7Fd!C9bTTC8LsMPCIt{(w?+28xHHZ}6ms3s*E#=j^Re~o zef)xg9sPp#{FvDPFz|t?igq8K6VESOru~lhy?DX#!^hD#QYfY4-kp=>oZ5LDQw$F4 zIHH`q$8Kg$E!)W0Ei6{#OCwZ=ss4I(^3C4H@6~mW>@2H ztizgui2DPvX3*?wC;h)2K22>-=qOme+-`>KlL1D3ypyDM0(p3xI~Tf+u<7ky7*R7U zX?s!I$J??BIvCTaLmIVx1Trs#PbZqTmUjZAcsa)$Pbwpjs=GN!*TDT|9hllHbdwuJ zQ|xIY-e+D=W)jc&VamTVlCZ-xQWWgLE1?OizAh*HW`|o43w5nC98^(LYw$5fl(Td+ zpuMo;_ft?C>FtlP1!d?CII)sl4=$R%kN@Ku;;7cJf;s>3oYFtlBVD>19W5LCDKYxG zY$AU1^aW{HBM7Q%0!BiR)qH0zJJGSQ`x2gk%nVq(bLE3a^a4ZUhcPV&&UkEhvqBT% zHN&QxZ%)BE%u5{1Wk*NF&KT(;!-zb}+xU=${VFu-n+VlBWg+H*mQwH06Vg0}4aoEc zUD zRwo{7{n_N-b8X?TY9G84RBsV?o?t zJVp+7r*y-c_-!W^Mxneth&Q4x4}{w8v__W#;9=`g&J*FbG)&NCS>pFKbw)V8bPc!D z0xxWtOec=tW3xVkc(N|EZ$}Cp%I|W=n+AuhwMaX`jZUs`T6;i%2Y=0WO2_X0TQ%qt zdIp~NTu_~rWQ}p~VXl3_;~OKEeHi@(0X$K?d^zi#yP6Oq3(qmybq94Y$bT#5jN27W zv+FHGUYzNAlEg|dzMLYIc3+}MnP2mT(7{oX9@J0c_B;(T)J6T~Ab;QBUCAk6>CV47 z1@EdZn%wMq<;qtdjuYw44d8xbk@X`6Da`TPpuok*G*l9?q|`aB)BT^*Itm+^yPB#Q z)aAp0Hsga9^KyOtLzz6ohR*f}CrsSTHTURcahq8kWN#4=60$sVCfQMOcXiio^Giy3 z=QWd66azl^#7UZA$tMruS&5K5t2>_A=fjkKx+que#rS=pyDJRB{M)yiJu%%Y#87-c zbxhgozP;3Kr*~;%-`Zwdd6YyRG2hutp z?x)2mTNzw`xhPaFtyku66szi+=`+=v?PkD27KyY#;YVtHc;Y_BpGT3>7%y>VW9+?r z58T-J%8}jdNDq+Rgx5E(gndqWzKiebt1ss z&+nv@lT)^nww2ms(4P+_*McXXz8aXfa=eUpSbBAPep+ZxVDLIyHa6J7-Q7?~NXQUl zoe9EpdvL%Lf9#Yrrr={&EP8q7iz+pNQ+3!7=EdMVL6&=%kC9yT z51=PSZ*d*n=N%l7ppwbt?Vxl_%|WBh5$hs;FmQo5>~_*B&6+socDEwb%>X)67! zG)c$o%;(Y9W!;32K6hW+{ujN>4F+|8uPhrPnOxHhEQ~PVk-WNcw62Vmcu3<)%#FG4 z7>s3mcYa6w9|-LDKLFFJ#rfCU)r7+*qn53lE{BuoTjovMu?x#_mW?F8C;ktEm*y^v zs9xSC-zZDF+PUP1A&_fpc4XG3@PlEcl6ndIUcX`dL+!k0cRG7xwO`~#X+LTl6}Wk8YTJ5!8QA-=6x2ui=4U(OS&xz>tRFBz(9gh zgAox+&Ow>Ek|fGDlO?a4F&)RmjW*lVwp!%rNN#-Bfi((fs4-7kRI4QKhJkE zDS9g#2ow*BF9^XpY?0JfKk6pd`4=Neb%gpCWHtmO0XtkQ{wOQsNArC)Yqm zD^^7)%N1by)|gIpMGSfTf@0SnjOg<|9~5du7e9yxzS;*ygb_$N{51c;An{Wn6H*OV zDX&NmOKJc11z&{RvhYyk<%sof*8cK1;uEGiWU@jMd31 z>=SpX3D+S7L zxsLbrF*b@hm~pXPzx^_b8-oLt<>?L|aFJ@b@^Z<4`zp31$5fjB z&vZ!}Z#iJ7>s6OEnbKl9&3O8-ZJs3~(J}||NWx5;N5`M)jOUsB%a5QF#0*-B{}s6$ zA)3zvajJ&b`TTTS&y@QTnqM32_8CKdV^|rnXId@D>)`Lsp|rc zin`I*`N{O4TsE}LH^!fKk!u&2g6>OT?t0{g$Blms4RYwCQE;!#EiJP5k0(ao=sW(Z z>~LV858IL{7H1W5sUtiCX_o8sSR%BU4%CfeCLKCN3Ir#DQy+pS=}B9U4_4$Zfc>sl( zUVt5y^Pu?eZ2H@|t57lhO^GhWKG1US5-Mr?Fs~7W#Tjax9R7eK{%q4je%nw)0W*5> zI^gACgp|E`g+5Aw$KzWxp>zS(hYaC=YBjjFjfgR|39;*pZt ze(_oS#$>x8!l!sI9ti)nzf1;Jcou;`{Osv}gcY^l`bRYtjx(dO zrF%VhXC>ir6wf-)Z1(r>UVF6xLmofHu|&7NbiMLJnpDE@&H8}~BQnjW2jY-U+hgBX zSE9vbdY5)!_!VJ)<~L(6rb_!w(WCRJY3l%KCJH%7wBLu6F$ECcegm3AAbZ`9lkM9b zVjz1$N3#EdTEwUL^-Lvr%%Z98qwR_Xp-ZjK3*V{*m4x00_U~KUV+ex1=W9KR>Pi_Lj~~SA$)0wb0dH%JX%tem?!_ zoRT2(nA6!aSTD_JmbYgvbpY-Ze-N?Rb|U*_61pe${Wgq=R-+&%h3V&rMT-%2Vo{rI8y94d8{2;feN755zUxG> zdKV|l&SO_|=lY-QE4w$M^HtUAm$b)=y2|&1wPhS&X^E-7-7<{jpy%P);cumqZfrol zhiue;W}9g*)xLBCKx`if?RE9i$f?%4GT_7Ec6n;Lc&`G3^$}Js*L*&-IEmqE)65np ziO^%2zzK^NmnIa27>PQ;;jca~e@^kEMKu3(U;6s0bz-2%==b?bubvWr${D#&_nWzc zu-&_O^De_ahO-hUjy^Jy+Q`MbuFU#(%of>dtTF6NXdT^8eRZfD4tBS9ii`-Zd~@M- zC&??NDNtqtVZAl~;$Zuh;G23V=!lybOe3@q3%~8wu+q6O;EdmlL_R{ML)<#NjplGx zMth_rjAGRdx&m%ytbjFL!f0~kdr|g2(%mDil@JpZLp3BfJNo`!WTjYprfSk%M`8U@ zSY7(^OkreWi&C4J{NJ6_qnK$JDCO6^5HRSd1(3LxL3?{U zJa)99LH_jw0of+)F335|2v0wg7=x|YrjKZ=pI_|CiN}%-T zD2C|p0$gL@L~(bI$m1PaSC2F-Y0|5hG3$c~3m&R*EF(WC)w2aN_5o`pG%-f)?XW;A zxle<`Szu)!W=C<28mOGo!&pNe(P1hrN} zdXTWY9fIyFd}X7zsyVz z{iW~~_BWXq>8S_E#e}N)jhr2&O)uAt%V?^cF_j(q&Ha^>`1tV!5hU_7PJvUBs0EU1 zd%z&?&Vl?6qmKkr&IWXn5e@lvzT7#K$R-ms# zN-FK}SfhWC|4f(2k3mZe;XVUFW+>=$Mr0bT`z#U0(--8ag1M4}ojF4@c+`k6`)_$X!jawt|02AeHm5KwbR;^u<5NW#JwhU|Uh zMaZVryw}oD^a-dQp!`+U6`@L2o$D5(=TjVR+3%&s#O!4?-|=@3%j@v*tEt({v}2#>=K+AL6)fuv75?fzv#a2KXx zF3Hl!_Cd>jFPU$T+hvex;YzE=@Q_G;V-)?q$FE}+cJpmTeq%94o%IvaYqa2RKt?IHk)tGcLY7#@1)kpry9i4vr14^GsH0Z`jBh%M_7d8?t445k` zSL=5l+bbMg5#=9 z$)B@K!ADyCvL3ABfy3OVPn?GNg?eON{cGO&v zLTb9KT=G>4DB|e%_dC(Atl}#_MP${|n#MlYR6Z7(V1{O|uV9AFZ5|9~gurzYD#@F< z{2(*&dPYQ66+{b8ik@XUZ-~ez6A>}b(O1S!FyIb%df=w;vB382@s5Wv=>2as&wWA| z-ZBm4^x;*zX{gGLTlyzP6S=(%`HG5xOokufVRJvnMAG#3_U1t(U1?0P`0x6oGRW)9rv*-k z(;_*9yJ4gXi7Ot~NGZpp+rW-$W6+f3RNzM+oVCN+&n*kp6|nKpzK79a2-GwiuFH*J{JPxp|p`176;?$JY;Hm3H`PIloOx@+$sMQw>P^hDsrV7-JzF?xlYb> z3K;1UIX4Lnyn-`xPSt|@kDFro&4%lbuG=G0V!*du?OVv=pkVM)qYt+qJiGDX35{c{ z1#hk2W;glVrXOy$>($kmnE%|b3PkcSCsHU574zkVn*wkh=dZB=gZWSCg50VadZ&OsLot+Z~2QYL*rW zNNUC;Yc2gfT&4Qn0NlVGsRB z9CjN^zTPTHtcG?qOnu=sgQn5$wJ*HSPw4l^(W*RhG`D@b^)e%fvF1t&)HF{-M8#lo z%&_Y9Gh5`@Q1GG4@e1AVlEE?mb6wT2J`I>JCIG*-_ZBy43(PCEr~ar;iYCf}^ko1_ z0WA>?h0NJJ^(wWmf%fkC&Eh`v85Sx63l*(55jLYW9XK@_Ljnq-Gb@R^`;U;?kEst| zfrV2s%}{(C;U-+Geb}e0Xd+K3`eD${dkj1bu@--C$I*rPT40t!GP|)7L7;CwKv)siNjl0srFdpbit&`1%K!n-FXy0UcC*n z_5OWzlhR=+LtX9g2cO8ndS31C{ULjT-mA_oR!j6-tnE%4E;r#X5F`bG6^y^YEQ1XH zPp`d|jiD4WxQOh#DX4bzH0VfyYNW1-=KFJ0OQ4 zT^fn5ynsI>opO*8L3jGX)J;j`muKAyPFamxGjZ9il`9A>@z9_FGUzi zL&sRj!fYr`VztWv!Whr7X)1(NHTLSRALR`(aUSRKEevPxxZ5Os-4#vSy?u$BG$@Vf zuaQC0pVms3RJ>>TUQATB`tVH9T_|j;dRJ@1Z0X%`7HI}Izb5)h10jwX=bt92sqULG z+x%COs?zeSg2Ix+px$~L>+sbF+4wGZ+D5z@$b5t!kdJdbn9w#8qHvM&XFWT^fi-Dp zYqoegM@7-@*6e5=UF>b-No#7T#1$||65j+@<3zdJ$^n+W_%XTdyxrg>{OxUYkRveo zz5cxx8C&%>Ax7}7?SR~$ieWF|RgujnauL}3@e4h;PBG+;!U_Ih08Q=O89 zr^NlWeIJf=>iTH2mpNzD7Jt<=cbj5?95bl2;W(xHI&#b3lHUJmpdZs>B)!y5zVCy- zgumhYc%@brzN)FdIuRu6foBr@$`$ilAP&E6Nce{*53**H)ETdqZ=irFBdU z9@x@yKdcaFO&qCv0r1VWUqNAV>LEIVCg>$)m~ikSo}qCcm>k=cA-@e0xo z`l0B;!>iNhs{&2dTVxmB;(kS%JE(lf=v@e^yi2tT54ue&l;V7G{t7qwVe##h0S1SG zp^4>cL`;o%W90*CiC=G|jjReRQ$C5~=f7XVQ)Bt8M@6n&qm@UPMASJ7j$3A(`0@MZ z`NJv923o^I)q$VRnHTcEe|4?ltATpW3D#-ODGvL^wZl#Dg&V8RP1SkSsSqQ8%>({! zWpotrWXhxc0BzXiT8}n2BQfvv87L(e2m^ljL;0~|fyp_glmG8Fs#=SLu;7Qs0IOk| zd7g`r>|9ICLcM?S@>3Rmh$}s-DQ~0KFG_JzDT(1aRRP_1?%O%fzuUF{bW50D@a3qD z>X$fIi4{fUs7A^$)@ePz&pO*8U+w1Zam6#q7H6wG-{-U+FqjpD7@ZNDmfSi=MJ#~R z5HPzuS97A4ezC?~;YSX^o0bYzlXHuni)RVh+hjog84zn(*{~KT zt>yTxSbczEP7bX50JB`T5@2X);T!N-U=VcF#Spcv*J3=t%R}ri?nv8N_uKsU+M1Iq zu!GN6CJPa!>A8~kVM}fxtX)eP5=l?C!vAx}Gc|T}b!7vqKRJ2}OC)Cs#LD#I-eW4Z zK^dQwm)Goriz7-RF9T%4{0zo4Yxst+is~&VO%^>T9tBF?tN4Lt*YZ^iP@+=D@@fK*W3=*6_ZXbCvdw8X#^;+(3Zy-e<8ht)^Dyu>5c8r_A+y+yX&}trkdXO)HO$e#km=lQrhx7+ zacjcG9BVnCyzMeFGD?c(=BJwn5X9aY6Db*t#)n6QQ56-H!qsZVE5qW!2{$MD_=lAEIN7Au76^jy-+S-VM(*t;l1%c)BXM^r=~Np?9w%D{0I1 zzb%M~W%pQm$l0r#p+Tmo5RG zD+oLjyAm08Fe_||tkWqI$w{l+Ie4$psDJBZ&6Hgt`5fzB4HfUt_t{H7L&m8_mlbK9 zF_XF2rYMwDq z8ga?Tq3N5f1IMiMGBV!#gqj|gd`i#21uoW@Jl8xrer0!L`5IxW&@Rt#uJsdpKf`40 zlnv!5e>_9C1>)&p*=q7n%}WzjW6buYv9?sr z4{e0SAr?D|o}?rx_(}JaxnM(&1$y?;jJWeKD~h#_xu_LQn>{JJrIYSo=x=bz=F^>3 zC<*_eHl_xyu;&I$Oes?PirK3@hjd5c5j#73g6-X5>D{WBH5Nu~mO@2Q!zBKn?BRcU zdwZ4f_!ozsIN5S7VE5T#Yq|QHL>{b!~uKl4VH^3=>=i3cQJk|dB}qWaY%K(hqt-Ug;t zMM=pKf9%=!;&*yWd6&LI6_j2EhN_{O&?hr`@fz}S1#hZN-oAslo2aCCnYr5g#~p30 z!ffD$&F2F@gamD=0hj23e&XiqFC@5GeSLgPu@n-WFX5Z{_R>0A<<&ra_{CzstJ*=b z`C(HR+bZ3XI|S}2gz9{OE)+UaXKEz%5xO0pbCbW$r>G>O(-!E+wxo00q<}#uIiQa_ z?B)iarlB17bThBa(+hE$(_jBF-%<35xZ9gbU1|rY18}-*DI=VTVxYWZ!Yxw zhhaick^z;twz9S)tR%#6P=^)0ve5{H@kB^{3yH>?z;fIi=pQvRDFXtZepkk@VTz@2 zJARlgyp?!0GUlQu`;c6Bb5r;7pq*iF%`vsz80s`Z*w<~VcEc$PWMi2U2lO)wk_c=F z#GNd+>$YMNu?2@^x2#+5ET9u*UVAfciXO>d_mm7g9jjlL;c8M>q$D)@r)mU#)3{TKMq?K1DarC7M+TxIyGkyb@fkhZjA*BTBlCLYbh{##gS$D8 z;P>y}fx_s;MKkJ)9L!_60h!QEb}4=nv-$4WiGc9enzEniU96x+%#YE1mF%_rH`ybT~{rW1Yh<2Te8fM|NZ-`TPuEIIy~ND_pMuMM$Xa+z(P@W!K2ATz*j7AF`NG2sOZPdEZ5fASvOWaBG0+$ z;)NS7cw9s6p_$hP2`%M5wyVHvJ~H*F1+BtbSOI*v987@Z?q)X+kCV{+GtQ4ZGKz{U zo12><($}BNxtac{nJ>E3$K{(U@7sJN%`0vQ`D8dXtQ+4=mxbS*a*QZgRyf8D{t7aM zS=)T{%aKf(_U(=5Z82PVXR+vE22Q&OB4k78chuC>1R`=t_~XaI$7l>t-Nv;|pJ>Tr zCGJnicRP#OYn6$#ErE-;DINnQ_7F{hyO@LRU?Sky;Pg;EZOf4ydE5PN_`{_yccatP zHRrCVQzQcYj$`~cbKktNp!-G`YyduxPCjcO9;SQ`7hR!Nr)Z9T&H9c%{iO=4{ous8 z?dSz?aP$i~M7zt%%eO@xGipGG_UwpyL2nS?P!ENbnO!^kWU#rZ-1jVu{oiJiWK48O z$7xGK*~#1^ML|jD;cuu&x+I|xzUK@z6|ojFF94yTGNbc5_2Cs2AQKcj(zZRTUN;=m- zLy(NL77Nq%p24ATBjhRn&Qu+Cy%xy2#qX7f)8i8p88{>xXSl2L+c#%ClZ|rV{Jq(I zbYkYWk^n4>ceky0SIJ0vEu^K@tOp-ml|&xrFF1y(3GLh(StpGlvdJA>^X|7VNF7SF z!C61Ce!KXm`rKuVXm~6l@7G8DjV@jI+r}Hm6!Wz0*|;=u8~g!#9YEnMnTgfyUgNxe z(%P#CCHl|wK5$n+>a2*#6idDU#Qd?@FGR?^Sc~2NQsz4W50gBSVU~BM*m#3lSTd>X zN!#LObz#mUnpR$hNk12aF}<4t+2$dT+vKwi!(I@=K%Nv{&|@d4$E#N{VKlCfQ88l> zv%m{l8U37Hm$!S2CHp;9F@!I@!gA@7+B~WR=Sc*;U07IEX}gnQOtge>(tKak$?7@_rfT-oy0o3sv+V5fVg~M5 z>l%IIm*^?&ud~9~%j{*`)!J)Kt9EqOH zf&#$V$Q%ZVSUt!Tnn=d#`K@w3dGdZ$WuKxWG!~Uj+SD{-9lGk=?43d|rkMz)PJo5E zuFy7KvzKLJz26|O^*_}-FmkZ40O}_8 z;Myco&x%Woxw3v~Z_nuJItfp;Dth;qR};N!#iBsNIu`Ti6|qt*|0< z#D?_D@1IB0BKl}AclZQ!chD_6-PD|}3@oA(T^ooyAzO6O@!o5JA#x29j4a8DI5H1f zLu!VWpFSY1Wus|D=<=l759J-uruIxxW)L%U<(+9zyk2lpL--rWX-9uQG@fw$_@h-@Ke7qU$=j_uVa-s% z7SXk6ZqbTFEU8&ec38s0@w_VW)?t!N>l~LOc)k&j7Vn+s4FrO`^JC&cC<&7)3AJd zdy(Yzp&b#ILZjef`I6vuH>}0Rk3%WT7?=#6cx>>?($Z5QAtCy^VquR(Vhwi{d3?ub zwY#*Nk+(vx>+GV)lVRAgNeob!j{o!pf~Pq&9+^Oy5t1JOKMtO>w|`asw`{{q!Ib2D z7{=@S&GzL|Lql-M6l41q)LL4BNtb9Y|M&P0ylgqp4b})tIC$6aDGvZG=qfae)Lm1Y zjS-+%#FD~+Lb!34gPnO>-2}l9x{7iIFwpuVsJ$OH9jwoh9L_3`5VpJ@SayxUa;M=9&_Ear6ClLA@lWqnY76F4;S^%oEaP z1PhC{|8W7r>s*M?Ki+QUPzry*Z%EKfB-PC_sw_E&3Ii5bX;&6gnnZ?swh+xpboD5t zl#R1!tH9=+g{<@rtxx+KrR~M;n_5{VtgNhn?T0SP^ME?wKRN(?ogu_!(neANp4NXJ zjYptaMBQCs5aH)vRRag>#7^FINSi#N`J^Y(uZFz-CZ+8i|CcW18eXsW+# zs&R5Og8|^~XW=@HuA$#XdJd&`(F`zh$W_c8cL(qYOBE>lCZP*2xjV-vCp&I$60qil z;w1R-u`#;A?-&^Hz;BkD8>-@i65ZV$6#*lA60~aD7ii{)oj4|_ixE?o#@v{K8tHys zF3ZR|M;M&(6@I3pqvITYbMRBZf)ibN7S7I6!&K=$dckis>`TyM4m8kVrhV3{c}Upw zkye6g)>CN=7k?b8iBJ3VTiw%|eD8?ZlYs#{a5d3lq8}jAEIRw1$rsQGolMEVPR|xo zWRp9^`hT&pd&JNT43WA-?J*(?uRhVi>9DqXUR9M{Wo0FJ3k%tYNxnJ-^9Rm!K4OUrrWhfC8B#K6i2@aYHyAE)SZ49^J;`fY$nh^yOaV+`Z zKH>zN-%FNA-9P>e?682UyQ!{j7(G97T-pK-8W7ONw)pwYPIKzQC&0WO>Q2x`H7IvY zSbl;lIGXt_@+(0J2GKFZqMEq0PcJ~IR{9B4H|W^*f>aO?w0{Fy1P0cw*tYKO#9zPe z2J_W$Klqdhxd(t^ujt76GM6bm)&KcPC7ARoa?J`M97{TN|3~da-SOs)UuT!p-V3Co zwl72Lrx%tRps(S$oIK|kNYu}tnesLz>F%m-cwQghOSB(1waBWlr&Kl0E}Fn9R-_x7 zezNYHvr!nbIR^E8I=$;%`IOZqrAv6WL)l;(0r*1WawoKdKsV1uClK4i0F7&xE)nE- zyI;9X!xt;A#{@0hG-$A)vN+ifh-V+{W*_y}oOe4)pRYjdYq>ijjlo{1yS=*5<4LWC zcaYmMjl`*Y#{t`1E1eLaztw$v>oLM)b@h5dlgK_vm)?=qlIIRqP4|CHn1^D~^#%#; zX`4x{%(e{XPQN9F>V5|T%@j^pOBT+WXTn7B?>=(}AXvG!IU_+05u5M5h zGCQiwe@cbOh{y+;l0!d$AGFG(EHoWer<4v!H0aTfjg6&u{L3d~a9JNxN;&~YOUJ8p zFSc29{P!4vPAcIizDh|=JV>r-_^C;`o6I)PO{BMLyX31X0h}gF#xvm94KNOob>l>6 zrd{0prL$-aHV+(2#YmU}tf{|WTw(16SmVTp%715_`_0p(Hew(P`fkauB2hySx_Bt~ zsc%Ckty4DN%LKR!3=l|<{_PdU>S&!x2X&qaumLUkbnd~XHL}}WDMtp4Wb}C(Z~wr$ zy%J^7o-F`?X9(ib2VK4rJUBD4D=;*IKMr)ypym zTxbR8E+k~Vfl6a=uNi}uTmY4JFKKFl*0yAJ;zfzsSS+kC6t+-YE4H&-Yt7=))nAg( zIQim(V_}RDo7`zRCYW)Mo%6~gBIVOL2|Xw=zj+{g$zxZ4>d}XSz@96@?70-4m<9;f1S@xu ztsqS~5cpT%Q4&97>ZenmMp(pDc?G04iNq%Ow1L#*kxbxARuO@=TSDp}bjR(&+;D;c z7^g2Ut-IQEI@8->f4qjMcARikijzj#^SMOrJ} z8;MbbnmE8UZ&YJZ@Wib9&go&A`EfR4D4UjIDrRpKwo7EB^28%Lxr0~VP^_!B5OjjgygX~MXJ_BjkBDLG9#%`i`rw7^&=B>(;gTqG3!8%DSjX@ z>W?#hQK@^IGhkZL5C!CagrbM0K3cT}wWH(c281Xa6V&TpfAaCJa8?k;UxR;*1P%Mh zM%=Loohd*)q4V+10+)xRLa&Ma6_IYE_)tYODg5>o>j{TWqK+5fhB=5LX4vkjRKBze za48W=Gacky!`qmwz|rD_3VzyB9f0C<|3GmbP$yCurp5J9IRwvvqjfnDaUy=7s~3sB zH0|wdbw7HhXiYud!ofUS5z~6!I3n7^kMG$Rd_&?Iz5?q_@JZD$2z64$T zGNItc9}vF#Fp&VlPO1W93SbvRy@=!l^u%-DwM?_A_uZd?tKKPxdTBrH2V;F2eKv1g z?Ki1=pofPG9Xbb`AK067cqWI3bjZ<@wpnnFoDuwnYR`5S3tSRR$=u~5dF=T!U&C@5Q5y>3iFAUj2T)*<_+dFi`S;S zXg8IT#;ISY*S;SKzb-D!EOQlJYwA9lB@D zgiZkQ&~9D9TSF9eyN^v@gAU^tJilfLGCvtn6CyUk3qWhnNPspf=m1d`8e9TeIB`Kk zkhXLl!=)%hdJDZ@UWX!-lVKe4*k^0wt?i&j>4wnZFkebr`=d!~D#UrvRp%=s=fNEJCy)Ov2bksunho!ceFr86RQ@0uLk1B3PET{H zOH2;apkV1LwsHmED_z_GB4daQZdjl!Mp)JV>3K9b6~S zuUXVu{s|Lm)JT1e$8O^l5=v0+hVUvC#BGr@suaH^Pah!1jOY@9S=(}iE` z%$w=9%1UIkZS=$~Ecuv<90P*U9$BSAXr!HGs>zOB$2SoG%Cjm-Tq2Xke& zv}4x(^Gfu#aUI_aaygI#r}z0j)_r`UHNymY;L*udufe&=Mb3cqXFobJ6a?)?+Zv1v z9?g!uc~J|rW4b}ozNZKetFc-ddmqvYi9OWyd&Bh*b+34{+vTk%$U)324Yb3jwS$n? z#R);D2k=62sR^gvu%QvS7*79!lO<{Q)F#_S*CoQI2Q!3lNmT4}?c3q%lSh^kTQE4G zCd_2Va%sccAuXyJxL48=2ssW1)LJ_r9MXR`lNH|3)eyEJinlw7 zaT>+`laGOGGcNR-zroFP<${wD7ARMusR>_8QK2gfX;0nkmS_f?27Uja^bojOm!p!_ zMAyiwH!#&?gQm%apITewo}hVN$C7#JIbMq`Ap06{^%`bUG2Q^h?BZluGWy#*gX-#vJ zBAg8#g-<+3HL6EvuUVc-O1wKdXX1D*ZFF^&`shGVf3WL&D|?z`Jlgy$>x$0J%w)q+ zR2y1Hq@GZAz5ACkXQk}1A_vBYbf@e60iWs7-Jni~nnp$l4?4tlA#6Qw6T&1YvXV{4 zj}$Lf&&QqSglPhd$j1Ll!x=#;<~@G+d6YagWs2@Ph?s|&e>Oq$``IJ^ZrDr>&NBWR z&;;yLFQf0t3S(n^!TYd=eIY+IhG`7xh#xF3##9c+)bUDglV-u2;z zj*2N;KyhCTF1)mrm#4jvsIy(kOL>a;egzz2$qeY*{)alV z)IHA`@Zov%hd@3;OTxbwKi%|*k(sCM5-7{rI!A5Vjst&qzt4=mdRF585;&muwx;I9 zxX=ttzz0sVFf`G6B^F93LHpn1NTS<5ITi1w>&?sV1_tOo8f2Qo%08=58V#SJ8#e8uwN8a>(`b0kf&%FziG^?M(9;f zK~DG#O$S~iGs3y}NS%^4NgWz_^rgzQ>^Fb3L+IKAp=B+rVwgUG62c|-b*hB5_?J;) z`E1wnQZf+#4fA9-MC5}?=$W}H{~}sejx+{Q4)x8P{zDj-iC@PEFAR;h$np{7L&iA= z;W0PDcMoWNiX{@&;2wb>xVYw%kf#f+Np6*)LN=+;FK@u-+}ydB@jxaolh zM3lsvQrLCRuX+9xT?E=UjcqI>^~V&3wHi`8fmQF1Jk`z)28nYpzANhWVM3LOxan$d z-vdRf4THmAcQcGTPRx7|A#at}T9;Z5UM27{lJ-tZh?0TCPffU}m7(cY)nIWQ+3*xab}yBF5C(N`&OF@`Zg7^^0P*|#w&HKDBCl}BOOL)u z)EOsNEi+)Z5p^P@@^&PoQQ1EX*~31)`w`cr#E1eoA!dD>_N)TND=1~F#2*V5kLGd*P` z+jgxUPV#WvXX|n*MQW9|loIQrH)^w1S>cFv!0AtZb_FLtAEQSkUmRG<_d7@vWyfNE z9`S1v0UMMSUF>IBwg!h!-MBs!zIaj*rAKrqU9OVynX8oo7f(Js^KKuWvP3hpZB7G= z=a9$|yq=KBtM{&{I!VnC-L-yno=6N+WaW~RQnhkfiV<;ALqr_|5X-08`XUw6qVS;6 z7XO8k^?7?k41Nt#CO?lfJqjw8;ZE{|d0)GoxJ3gzs-{@K@0~gzD=z?hhhkh0*yUza zuN>G+)kf#s%g1B7fK%C|6{KVSb!71K?p^3gdV%O``Wai%W8{=>o2+#O%{mW+*70T* z@OQ1cArY24Z`wlp{Am_!-9z?{SW)NGvV6+N%Rc#R-neV;cdsS?`G?F2NzoV*L-#Z% zE@sbxGOs@agg>*be#axq*OIR`;IyN8y1VF`$Z;1oX2spPh`x#C7~;L$pgBeu$%N_H zVqjUi78{=#+EhKx21~mmgGgHRM2d_5z^lP)UNUesmy2e_Vcly*@i(=&wBJpR^)6|x zUub?Zmnu(nH#G048}v51KaOd@9l z)TmV>7+I=IKNNV;Um;U4T2j;m58tOu;G{k1_%l0IdKcj@jTnGX$O};(j)0eiM z*C?A!zp?==rh)F@FO>_TA2~wSr|x(B`Y}}+A%ynV%OjF~H!GJrob%XCKH(E!QsKNs zb$g?~xsJ|pUV(1U^aJ+?E|Mo`M-vFGmT-<33q+*tW#tOa*kwF&8j+put}+ZcHbqn0 zhxV~|UAlxH`O%s!nd&{WWMg#^ug$eit{QwKdh{2dT!-oU82OV*mt^k0hP=(=%&p(? zR>c;V8r&>TDn4`rzJKNqTi5=T+rI~bb=q2g!%LGNL|gwJ5m_5;MF96#S80~#P{5)e~0 zwOGHgWGy)#J{y+t=HXKQ>usklf#?rC!Lg)0HFKh?n$K{BE=*`UBst)B%-6FjeQ|3M zB& zNZKGv#S*E$%W6?i!4evb++b2+OCR$; zQm(#$4qkuNduVBBdAY~By%4ot1mzN0Z>{c}@wYd7x_%cL$TjjMA6Z+Lme%~q{_{dwMBz&>kJu1z07Cii-ZTBxB69N<^M8) z@sVVUPj`zZVhFE2?_W=YFG~^hWR-+O-PzdRDm%HAj|-wJ!~e_zvVUOglY4cr+!gG4 zy>a|^sJr+7Gj!Kl8|LYD0dV^l8hpeWy&^|nfYEX-S_+b{`>{s9zFV@ zdA-i-oa4f=+zu4E))Uy7yQHwtmQl zDsLF-c)pn}H*@yu-`(qY8KJ;b?}ICksGoVXb&|Jf+fiTd(fPFC;_q)~_wf=Gh6^x< z_kf#*%)*QK6`*_YGi!0tBDc?19hKgAp|js$qD+mNd z05VkUquif17o;@I16e*VBhRsJ?3P=)u!+poxrcP~MoniTtybj5tV8%RSi}VfY2u0u zHmY3jZs#*oQ#RgxBRI8yo3J1G69Je~?%nLT1M7uS6GlA7dcUk!(sOxg6M8=N0=*V3 zIxn&*d44ab7F6u;;&JN zw1-nnF|h>{SjQ-)NRqiV=}q2nW2CZua^ce+Y`I=_n_X$ppV8)lR1e0kB?U9BmX%#y z^3hd#^ryHBn6(FlJ@|w&CuK5+W6`v>*+2bRY_^-wt1QMP)A^_&e|R=#7W7V&jv9`! zc~3^esgX&$M~MN?RZhQuV2z(|jUj+l41JbzVE)&qR&GG&(22kvK?zxbH1=)Q99I?Y zME5NHo!a~Ku=iVRR#2K+gKa`Y$_{f^>4O12&ewMkKBsYvF)r|#lf65Oiw#H z0%&ahhs9*CcJ7X%?zgQIQ#Ek=pZkKEl$8~n+YhMkmX<$)V+gsD&N_)p0Ai8?rmg~L9C{&rYV-`%qIUB`0=oy+O1 zW{uCwqQuc|m;W)pu4{MYW`^JEo!*3DD!XC^RjJ|)tR0GhA^N%V-5T>FJAQw{-M-76 zoulycXYr;}{^GYkW5|nUK6_XusYPmUfyftfHU>@Y!er`@4qU4l4S;2A9!i?=HO|D$ zfQLB5jS%yBhE|Fi!KI=82lwy+O+3(`lTj6VwUT(=Bsz(7|NZG{V$}PlivhR%F50Noexve z8CZx6MXPLfxT*8lz`MHLnXj574uPyzW2K>&;L7S|->2t~dKSroaNJ_~$b^xi`h27edKF1f1W-YyZr$a8>nN)F#ITHl6 zpxJ6h6JNx-rn_O&bQiazZ0MAE_Rvd;BkkO^n(?#C(T*duj^nS!bYY1QF9UinEp*Ac zWH%MlBcMFl1hhPMA2ACT9Z%>lvpAz!*XGFTxt|S(Z=86-x^Nv8QUZq`c8O50xA&c!t(1R}Tr2{{LUUm>B|7@PAx7n)tqJMF%P&{icO=>y9>0W#} zukLDk1T;Ba4%|~j%>P)FL13p}%;}Srd09*`*ol*nz_vr)28Uc0$8liNe^BfL*{v&= zP*ES0V^(7?()X%t?9i^u;L?_FUYNhK3nzhxFMB}s5}evx4KO`E%HAT1qh9~D24Vw3 zviUryY&FL2vX8uaa$fr!y}NHEP*5*m3Lm`|@!ZUXL%*O_5E9S;&({bu9Uh`9xC1zT zBC>S&#Ubq}`3U0o1qvVGnXfVLDDa~&KauD-^LM;u(daXb)3q>~FwOu?5>~BYC)h3| z=)>jjKsJ2^3a9fTP84mpWF=pBhO=v(6TqbKwH95`OmQW=g z`um!fM8o>>sqhn(X?FBTil7Ns)r9K^#Fo6B8gL=Rr2vu^RDF!z68q-&nIr@_Pu7^X zv{RZk!U*N(XX9Oqd@$Af1#AH#mX$Qp;&S!nbM;p%YYmPO9Edmxy$7$i)7z;iACn5I z$6koeL{<8xw|R?^4vsO{Zrc1LWP8mwhg zz{`$Kx$q1IMQ)t1r6pSUuH+}~mChb{03lb8%^1=7tTdByf&=UB zMPzw387yp^8`oPxjeZ16-y})WfmRsQT#Tsrt(cY>E|9ZHYEg8XtZDSVsSDguxUTtG zkZg?qDhc{wY*q|P>BvLV)P)n1y?7YrZc$*fR}nOnhkof>Fa}!~b?|#FV#OhabL60* zMqK7gShxU28Y)56I_Xa1Be6s90TXTnoa0)b+9RnIZoCE)y+s|y&{^4ier`xG!pZ-4 z)H{lJI-;sT&$jW=n@vubq8o?WH$+!B*Y&O{Lf(A4$2j@;WUtg(#591Um8>QP{S0C#fi~JEUkJj{5>JE@G7Q4*^$t+{HV|bc{`ojfGYURcq1Pf}ck!pJ^+(V~c>} zq(GvjZibS%+dan#S~<<$LV?TpQU1fNt~zm#uVHf$#OCsMC`75EE^IA%=N)M-%bvk6 zK@JD75KPS8ONB1QCA{=>Y$#`wPesE{Y@cuI+YPjbBw=E3`a1UxHnGrCg#a8EpzskP zKadBmLvK;N@(kx7t->;d$X$zc(|@LR6_JWM0V`qk<#1{w6MUYBZY#EjA7)|T#Ar8g4F`yR=%7VnuKk6x}X&tucThXfw0JQh^h zyf|he#x@i%?GYhj+zl65GuxLyL~k*W^gzLR9iDnG9eZL1W2|=ZRS9=mE|TnNGSlMD zU@K*57-!i!cRS7MtU_3YJ|d2R##zm2K~45Sbf2j6Y>X?~^|wpR|E5{3@8Tj3U7rh!96!^+Ps2g_6xK^6nA z@}RmS$wdjTJ9dHqu|zNds*^H5`N*hbvXxdmrLg10MyhPg^yc1*LUrqZAThOuX^cnc zT0&`F#XR}*jC;GmTp4~Hlycv=A&kPC>ZQliH2+~@2pa$^F|go{dbiw)q?UK430oKD zq1lPaIgE|hK6(5S;{hn{1EtdvFW=Msv9Nx(9QU${&t0)AefJP?pT)nc6En4ySG(3{w5DT)F!FZgZ~67<+z=6l%($l^CTZ;E<@`ZTNZe_s0eb zDKxaMX8ZucXDeC1}Lyk9}zmD7Z#Gmp9Pj?ntLxN zO9Iruv{YrLMvVPq5vXI53&>U$v=&Ek_tOW# zM6Si)57X?AEQL!jnZ=xddE0a#JfV!~5s!nQHG_+)jvc^aky*tKYU|@UJK;%Pc%qz$ zJ#j5WoLgv40X?*6wif@hNnIRnWMD<~L4O#pa33g^L=)%i#+v5E(4`uW#GfE>JQOCD- z;ud)-oDX)-dFwyy@<$n(6AV>UEkme&keW)!((Ij91o@4@@T%xvjpAI##og-$xj725 z;)$h8b!FEO78)@|#EkAj(kkK6OM>aeq1@|=PrFdK}pKO z4aXo?ib1_XvbY#``%K8r3`BG;DVXhuFp9Z)4Td(k3PVQ&LjF&{6j+(icKjRR#!U?* z6uy{S@c}Blgny_i543X6TkUSd7)&m-Y?(Dx{dVd?bWWL{~z4 zt)X)RMm1-<_sC(%DU6&jTcNaYcm1le(RO}F`&{-Z{K8KnwJUfG<*y=%4B`@i>=_#>N~xV^4tIQ;``bBn>U(AeZb4(MSCO<2Bcmwx`|Ia{kw zbAs(V5laSmn4Wh0mBr=|%5|*u-RtU$109VOUvm@|zYFuClPe^1ro^j@C`m)BZEE`z z0$xg^>yG{31_uk0ZJid#7J{(-m=Ya7JGzgYa<0 zvte1+{_GAjA238)qyFkorwkhpd$2+@mRJg+?eyT$YgwO6$I8YQJ{Em_t`s9QQHgHk zc~Hl_|KV=rhr2DF((%bU+=ZpQ z`S-wtHqE(?c4djY>*prV#GM^qr+L$2i@Xq(3*Vp`U>fJ*(Yj~3zTkhR46NU}PzTVG z(VTZJd)v0|ZMV4=!pIq-R^Y5-LCy#AOQSkg8}^A|2=3e}ohSx@27BxIziU2E%k&rxoxxr> zxF_INeu47?A=G~mGhP^Ba(1ug9Fvejbb6?Y<;P;ru?&67HN_p?vWQa;{M&B|9)Iw= z)-wR;H&A2!hsD>>#gMaed}ivS-sCy&cb!?uclJ9Kz4gYUMJ&;zb=0?{Z`Z;r0(&;( za8|J>75b`O13IrhYJGoLKYK^g#*Bq3ijVuO5r0gRfBU^Z2&&&?I;lqC+=BlzxMZ2Z z6t@DB3be*Ar_2-2x}ClJRbhN~ICHjA0y#efDmlBrs-KnTyT0pPI9MkPcO}#N6jGsi zmZ;;xey@a=(!!@`!Fyd%a3-GJa7e+)t19$gy7IcC*t@XtH-l0;{JC0c&W zp}++~?}XoWDIw>rwxUAl8YVglfv#M=b^`%LMYnEH9{ysjh^OvE^J$-h8@Zm{Mc=3E z#VrMS7=-Q1;DvUG&d$8*BdFy#uIBU|fcpuQ^_|*&bSC_r#2zi!H`^%GjKhgSr(X27 z#{4cPVFwlw2LkPaSh}|mzlOMl0Lz9*DY?h#~QQ)!YKH>PbXfzHg(#z|FmHv-j-$vslpv~ z(ty5cG(DQhVfOb!?BtaLn@9MO?sAY{i7nt=f;*tV=1_9GPZ>7jZ9Sc?ft$%A3YsBW z;k4s=+?O41FXcZqD*TmNuQ${LWBP=p8X!{}LPUv-b2m z2(E46+=?P6&U~(ar`oG8BTP;6kcUb|%(+bqep^<8#pi)VbIh$_ONrX>)sx`~n+)fz zO4*E=vH91r8_p_2ir-lRIGbg8*6(*n)M0sMb5o6~TR(W+68O$C9+VivmNUyysjB|@ z{#I|&f$Eeh5UOiny%v-pm1Amen6MC{KgC|Vpkuo{p^D@hqD>wqTt zcG9=aLYJK1EVq=}35>?9aB287)i!-&k4_D@3lqF`syU7=2V02vi@*~F7HwM{FRAVS znnOg_8o@9-z5f$Oy<*mGa3kHcr7z8rE3`=sp^3KP35#@CDM-%F&IbhQeg-H~tz{%B z@6$TOBco(?tyH^OB z`Fnc|WeZ!mWGzA$h7Y94@MM0`!1;aIey=32jQR8XeH>E{j0BuSi>JIYn0)qXw%3mM z1Eh>uszmC1*eZgfd8s-NUvC?T+3Ql7WQ!^zfyTdI#>*RZraTJF8z#mLY3^Qr`~5oj zNPQx#qiSy8(~LfZvpO+cOP=7$eU|A-)?tft=1s3b?tHw zVPI9tP)u=U)}f#!mHIPV($2VBom==^ag!(#wiGzK3&fa^`YQgl=I^zb`BX$tshC$T9$X+39e~nddBgY50l-w zXs(tIqDm)JB1Xr_TPugh25vXRY5N(7qo@<$V8mhZv_H|Z$YXm+!xA&>E4cuynKhYn zT_SiAq35FK45rw&%Lt`o;MzeOl17WWkXBBb-m7IYx$y7X%bX})vi zmIu!hf0lcV-m?4;x0tlym~dti**V^NKbIEd;c54wCF>w;ryo#%s>i?4T(9Z83JGmk!OK(IKe3cc zn^<`5el2j%N{ysMC?v57 ztU+HaCYO&i9E2Acu-N^U{@9(v8*(xqGLG1^iW z?jfbXwKWhbM(G7NGYe_21?TjuCni(8YJ$J9(u!5!HeEAB*F5SwX<>T&!#6oG_f09T z*V{Zx7t}2UaWAfGKoq}IdGJC*hIbr|`KIbkc1x-17lu9fLjVW<#;WFY#!vZ|czW%A zJfx|-TnbjI4BO$My3D|a#|-;gwM)y$Y_XbA<5Ps+4(t1T8$x11>KV;Ex?>k!t(=Lv zE5?KQ71S%?FpUH3kk{O z^P*V{kx^XTzR`TfH-ly}7xR8qK7IkblGvXlT{e)1}zkxC$ph` z0@SJUZOB$(wPSz6CGo5K0tobLpoB#m)6+!$UPN4-C4R!WB6ZPA)S7W zKxX68(!tBtQ1$fX6=gX%ciag%`G)1!n^+hR>wzMz34a=_a~|j<^uG^Ba<88Y#lXe4$$-1TYb5)Qa5iUv=X4Ur6-mAJb6R1=m zjPtIfVI!Fsx3N{C5-W>1a{rFXH)TbvQg+V)QBx z<`TYfh(b^u&7|E-?m-zL^D<5Q_zs1y3n;_5Z0tU+38dQJ66W4wA{F;1VGzIFths!! z7gQgb!Ct*DX|RX>B%j}9)Xre0Z>TetZ~!dQX8l6c4-xK4JlMm>DMp8J1eSSA8Fvq4 zobc!~tDDyT?G0sH@!T2kH7~)@b-KSTVtwD*<_9R!hFR`ZnDgo6I!K&Oe z>$l+RFA-u)fhspclMqDnh1~aZtKZP>8r)qHuX@F zmmjDu6s6)wa7)$yK&>_qZaO24U>4Um4Z zd}a20d#gJ68senixL(0~_{5@F%qo}KUi!wlF|k>ZQd&SbZ9ty?2B?w_O-GS4YsrA7 zi017>jOnO?fKwge7lM3VzYxv{8(XijE?MrDal`Wz9MjN@`nm5d0T0XE9tD#((N@Y9 zTAOeMm|#&mQ+!*OdO@!EQs?!euOaJrF-Cy!N@TpXMM$_2R#Qjd5cHbcLbFTGc2nlW=8Sgv< z!`!~G`pY@PsK&yN;?!}_b;RJUUA!YHBIT0?Y>#z%q?sU=Q*wFdebm z^`xUe_$4tC+rE~Mh!gkoSMgz4exn?Vpo5q5PEIrq{yZy|GQKJ0l_{u#h45Kkq1EuV zuKFTX54mhLuURB(t7hfwvD8(@68sPNp17JGT=|@H-0IRy(y2fhLC&$}HFL9{#PLsH z7wK}LDiQGq1^oZmefH7(@-Pivp(TWP)KxI=c*~nzn>FpIIVQKv+&Uq7H-QZ6TLbI! zH}|yxNjqL=C!qTL(38p%k1Ypzn94c{sLO4(;&#%dvGKuh)j=nM-#qR*^tna;HusX1 z(k<3>OM7g_c=vKkiLjS?xOPSE6TDtzf&fF%X)O-Y5UH_Gx23**zwvdNOqV4`m{i)Q ziJbXh0M_mtkg^O5!IhCCu|&Jmec^5QUvz28T$d?#4;i$k(``%{g(vy6iRmfR8x!c=-`Be6gl2{97;He1Ry(J)@;B+ zAl#bxnp#~{Anp=|BOO1EW;g8@mWWV)W-67Wyyq`|x4YdZOm`Ud_Ds$wY@!SwH{RMd z98?jd#X-7o@B<0Ok4RrR&VNdeVfH0puBDvzerEm71IKEowp-nF*xIwE;B4fycf+MN z(|x7t(=sw2-4YHf7^t|FeAY|Rska{P`Py0)jAVJRkxGeH0uChKMyg7W?E&A;vZDLt zwE8%nN!M=QO-ZH08{>Nd3e^u%HpHAUR4~_e--;tBO#&B8D(29AiG2d19RvJVR9ww@Zr!Jg=*9QMw%A(IVv9zy{H?yf zpS3L-JFe6|+c5@hFM=pu*+b#7@goVGOxbZkDjd~8D0X@_qse0hdFb*7qG!I`Uh9C< zeIqZfHf<%XU%OUz(bqm?LKq^s`nqz*M`Y#?HAHSPm71c}C(P5$nr7Qt{RvPn4vd#x zpk&a=XV$MNX}qQqb6w89HjVf>KbSA?=p8is@T-Qlj^_H%@aAeNK1pju(TS-bu7hS= zj#c(3uBIaGTo@&|X)r(Nl$~ca(@kc!>*P0Zgh2(N{LKATw{fapA3<{59D+)2HIuwZ zG#!2sJ>q)-wd0Tgjr|(HU6*n7Z(gb(_RbAur5p73C`3tDeAY*b%NQa}>+08fELVg=;cvt7LgJ%VlR|)#D2yn mUt&&p3q0rFp(B)L=_&0;E0rjPrLnU;Tdz%R=b@ literal 0 HcmV?d00001 From 43a66826bc45573c3f904fc553ab0608d2ecc5e1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 20 Nov 2019 18:23:29 +0100 Subject: [PATCH 003/393] integrate remove components is deactivated by default to not remove thumbnails and movs --- pype/plugins/ftrack/publish/integrate_remove_components.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/plugins/ftrack/publish/integrate_remove_components.py b/pype/plugins/ftrack/publish/integrate_remove_components.py index bad50f7200..26cac0f1ae 100644 --- a/pype/plugins/ftrack/publish/integrate_remove_components.py +++ b/pype/plugins/ftrack/publish/integrate_remove_components.py @@ -11,13 +11,13 @@ class IntegrateCleanComponentData(pyblish.api.InstancePlugin): label = 'Clean component data' families = ["ftrack"] optional = True - active = True + active = False def process(self, instance): for comp in instance.data['representations']: self.log.debug('component {}'.format(comp)) - + if "%" in comp['published_path'] or "#" in comp['published_path']: continue From 7f63d864fd56afa13285653b56d9d7b3f8fb4e11 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 20 Nov 2019 18:31:40 +0100 Subject: [PATCH 004/393] created extract scaled thumbnails plugin which creates 3 types of thumbnails small, middle and large --- .../publish/extract_scaled_thumbnails.py | 137 ++++++++++++++++++ 1 file changed, 137 insertions(+) create mode 100644 pype/plugins/global/publish/extract_scaled_thumbnails.py diff --git a/pype/plugins/global/publish/extract_scaled_thumbnails.py b/pype/plugins/global/publish/extract_scaled_thumbnails.py new file mode 100644 index 0000000000..6d6aa6a73c --- /dev/null +++ b/pype/plugins/global/publish/extract_scaled_thumbnails.py @@ -0,0 +1,137 @@ +import os +import pyblish.api +import pype.api + + +class ExtractScaledThumbnails(pyblish.api.InstancePlugin): + """Create scaled thumbnails for GUIs like loader etc. + + Scaled thumbnails creation is based on data in `output_data` attribute. + The dictionary `output_data` store additional filename ending and + filters for ffmpeg. + + Example: + "small": { + "file_end": "S", + "filters": ["scale=160:-1"] + } + + "small" - key is used to store result under represetation + "file_end" - is distinguishing part for files. + - "S" means that source thumbnail "myasset_thumbnail.jpg" + will be converted to "myasset_thumbnail_S.jpg" + "filters" - should contain filters for ffmpeg, key is `scale` filter + which is used to render thumbnails with different + resolution. + - "160:-1" will render thumbnail with 160px width and keep + aspect ratio of source image + """ + + order = pyblish.api.ExtractorOrder + 0.499 + label = "Extract scaled thumbnails" + + optional = True + active = True + hosts = ["nuke", "maya", "shell"] + # Default setting for output data + output_data = { + "small": { + "file_end": "S", + "filters": ["scale=160:-1"] + }, + "middle": { + "file_end": "M", + "filters": ["scale=320:-1"] + }, + "large": { + "file_end": "L", + "filters": ["scale=1024:-1"] + } + } + + def process(self, instance): + for repre in instance.data["representations"]: + name = repre.get("name", "") + if name: + name = " <{}>".format(name) + self.log.debug("Checking repre{}: {}".format(name, repre)) + # Skip if thumbnail not in tags + tags = repre.get("tags") or [] + if ( + "thumbnail" not in tags and + not repre.get("thumbnail") # backwards compatibility + ): + continue + + # skip if files are not set or empty + files = repre.get("files") + if not files: + continue + + orig_filename = None + if isinstance(files, (str, unicode)): + orig_filename = files + elif isinstance(files, list): + orig_filename = files[0] + else: + self.log.debug(( + "Original `files`{} have invalid type \"{}\" on repre {}" + ).format(name, str(type(files)), str(repre))) + continue + + staging_dir = repre["stagingDir"] + full_input_path = os.path.join(staging_dir, orig_filename) + + orig_basename, orig_ext = os.path.splitext(orig_filename) + thumbnail_data = {} + + _input_args = [] + # Overrides output file + _input_args.append("-y") + # Set input path + _input_args.append("-i \"{}\"".format(full_input_path)) + + ffmpeg_path = os.path.join( + os.environ.get("FFMPEG_PATH", ""), "ffmpeg" + ) + + for output_type, single_data in self.output_data.items(): + # DEBUG remove after testing! + self.log.debug(output_type) + file_end = single_data["file_end"] + in_filters = single_data["filters"] + + ffmpeg_filters = [] + if in_filters: + ffmpeg_filters.append("-vf") + ffmpeg_filters.extend([fil for fil in in_filters]) + + # copy _input_args + input_args = [arg for arg in _input_args] + input_args.extend(ffmpeg_filters) + + output_args = [] + filename = "{}_{}{}".format( + orig_basename, file_end, orig_ext + ) + full_output_path = os.path.join(staging_dir, filename) + output_args.append("\"{}\"".format(full_output_path)) + + mov_args = [ + ffmpeg_path, + " ".join(input_args), + " ".join(output_args) + ] + subprcs_cmd = " ".join(mov_args) + + self.log.debug("Executing: {}".format(subprcs_cmd)) + output = pype.api.subprocess(subprcs_cmd) + self.log.debug("Output: {}".format(output)) + + # Store data for integrator + thumbnail_data[output_type] = { + "path": full_output_path, + "filename_append": file_end + } + + repre["thumbnail_data"] = thumbnail_data From c2e5f792f4a62a93fa59a160370ead4500c094fa Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 20 Nov 2019 18:32:20 +0100 Subject: [PATCH 005/393] added processing of thumbnails to integrate new so they can be accesible for guis --- pype/plugins/global/publish/integrate_new.py | 62 ++++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 64f6dd5015..a8e6999e8d 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -384,6 +384,65 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): repre['published_path'] = dst self.log.debug("__ dst: {}".format(dst)) + thumbnail_data = {} + if 'thumbnail' in repre.get('tags', []): + self.log.debug(( + "Looking for scaled thumbnails in <{}>" + ).format(repre["name"])) + # prepare template for thumbnails + # - same as anatomy but keys in basename are replaced with + # one single key `thumb_file_name` + # - template is same for all thumbnails + template_base_name = os.path.basename(template) + thumb_template = template.replace( + template_base_name, "{thumb_file_name}" + ) + self.log.debug( + "Thumbnail template: {}".format(thumb_template) + ) + # get orig thumbnail filename + repre_basename = os.path.basename(dst) + repre_file, repre_ext = os.path.splitext(repre_basename) + # get thumbnail data from reresentation (if there are any) + _thumbnail_data = repre.pop("thumbnail_data", {}) + if _thumbnail_data: + thumbnail_data["template"] = thumb_template + + for thumb_type, thumb_info in _thumbnail_data.items(): + _src = thumb_info["path"] + + # get filename appending "like `S` for small thumb" + filename_append = thumb_info["filename_append"] + thumb_file_name = "{}_{}{}".format( + repre_file, filename_append, repre_ext + ) + _template_data = template_data.copy() + _template_data["thumb_file_name"] = thumb_file_name + # fill thumbnail template with prepared data + self.log.debug( + "Thumbnail <{}> template data: {}".format( + thumb_type, _template_data + ) + ) + template_filled = thumb_template.format( + **_template_data + ) + _dst = os.path.normpath( + template_filled + ).replace("..", ".") + self.log.debug( + "Thumbnail <{}> src: {} || dst: {}".format( + thumb_type, _src, _dst + ) + ) + # add to transfers + instance.data["transfers"].append([_src, _dst]) + # store full path and additional context data + thumbnail_data[thumb_type] = { + "path": _dst, + "context": {"thumb_file_name": thumb_file_name} + } + representation = { "schema": "pype:representation-2.0", "type": "representation", @@ -409,6 +468,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): } } + if thumbnail_data: + representation["data"]["thumbnail_data"] = thumbnail_data + if sequence_repre and repre.get("frameStart"): representation['context']['frame'] = repre.get("frameStart") From a03bbc924a4d3f25d36b17a9527ca137f5f5438c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 20 Nov 2019 18:33:00 +0100 Subject: [PATCH 006/393] renamed extract review repre name to thumbnail --- pype/plugins/global/publish/extract_jpeg.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_jpeg.py b/pype/plugins/global/publish/extract_jpeg.py index 10c339e0c6..18d9286b86 100644 --- a/pype/plugins/global/publish/extract_jpeg.py +++ b/pype/plugins/global/publish/extract_jpeg.py @@ -69,7 +69,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): instance.data["representations"] = [] representation = { - 'name': 'jpg', + 'name': 'thumbnail', 'ext': 'jpg', 'files': jpegFile, "stagingDir": stagingdir, From 1ddf61a7ce98de34b0ddeb9b5ef273acd5f1f489 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 3 Dec 2019 16:24:10 +0100 Subject: [PATCH 007/393] assemblies were not loading correctly --- pype/plugins/global/publish/integrate.py | 2 +- pype/plugins/global/publish/integrate_new.py | 3 ++- pype/plugins/maya/load/load_reference.py | 8 ++++--- pype/plugins/maya/publish/extract_assembly.py | 24 +++++++++++++++---- 4 files changed, 28 insertions(+), 9 deletions(-) diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py index 59e05ee2aa..b2f273ec5c 100644 --- a/pype/plugins/global/publish/integrate.py +++ b/pype/plugins/global/publish/integrate.py @@ -24,7 +24,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): label = "Integrate Asset" order = pyblish.api.IntegratorOrder - families = ["assembly"] + families = [] exclude_families = ["clip"] def process(self, instance): diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index fce6b0b5c7..0a1a1fd031 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -70,7 +70,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "audio", "yetiRig", "yeticache", - "source" + "source", + "assembly" ] exclude_families = ["clip"] diff --git a/pype/plugins/maya/load/load_reference.py b/pype/plugins/maya/load/load_reference.py index 55db019cf4..c17538c57d 100644 --- a/pype/plugins/maya/load/load_reference.py +++ b/pype/plugins/maya/load/load_reference.py @@ -43,14 +43,16 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader): namespace = cmds.referenceQuery(nodes[0], namespace=True) shapes = cmds.ls(nodes, shapes=True, long=True) - print(shapes) newNodes = (list(set(nodes) - set(shapes))) - print(newNodes) + + current_namespace = pm.namespaceInfo(currentNamespace=True) + + if current_namespace != ":": + groupName = current_namespace + ":" + groupName groupNode = pm.PyNode(groupName) roots = set() - print(nodes) for node in newNodes: try: diff --git a/pype/plugins/maya/publish/extract_assembly.py b/pype/plugins/maya/publish/extract_assembly.py index 26b16a73c4..c12d57e836 100644 --- a/pype/plugins/maya/publish/extract_assembly.py +++ b/pype/plugins/maya/publish/extract_assembly.py @@ -22,11 +22,11 @@ class ExtractAssembly(pype.api.Extractor): def process(self, instance): - parent_dir = self.staging_dir(instance) + staging_dir = self.staging_dir(instance) hierarchy_filename = "{}.abc".format(instance.name) - hierarchy_path = os.path.join(parent_dir, hierarchy_filename) + hierarchy_path = os.path.join(staging_dir, hierarchy_filename) json_filename = "{}.json".format(instance.name) - json_path = os.path.join(parent_dir, json_filename) + json_path = os.path.join(staging_dir, json_filename) self.log.info("Dumping scene data for debugging ..") with open(json_path, "w") as filepath: @@ -46,8 +46,24 @@ class ExtractAssembly(pype.api.Extractor): "uvWrite": True, "selection": True}) - instance.data["files"] = [json_filename, hierarchy_filename] + if "representations" not in instance.data: + instance.data["representations"] = [] + representation_abc = { + 'name': 'abc', + 'ext': 'abc', + 'files': hierarchy_filename, + "stagingDir": staging_dir + } + instance.data["representations"].append(representation_abc) + + representation_json = { + 'name': 'json', + 'ext': 'json', + 'files': json_filename, + "stagingDir": staging_dir + } + instance.data["representations"].append(representation_json) # Remove data instance.data.pop("scenedata", None) From 6bc2042cea12869717b318b134ed652cc209ee42 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Dec 2019 18:29:50 +0100 Subject: [PATCH 008/393] removed added integraion and thumbnail extractor --- .../publish/extract_scaled_thumbnails.py | 137 ------------------ pype/plugins/global/publish/integrate_new.py | 65 +-------- 2 files changed, 2 insertions(+), 200 deletions(-) delete mode 100644 pype/plugins/global/publish/extract_scaled_thumbnails.py diff --git a/pype/plugins/global/publish/extract_scaled_thumbnails.py b/pype/plugins/global/publish/extract_scaled_thumbnails.py deleted file mode 100644 index 6d6aa6a73c..0000000000 --- a/pype/plugins/global/publish/extract_scaled_thumbnails.py +++ /dev/null @@ -1,137 +0,0 @@ -import os -import pyblish.api -import pype.api - - -class ExtractScaledThumbnails(pyblish.api.InstancePlugin): - """Create scaled thumbnails for GUIs like loader etc. - - Scaled thumbnails creation is based on data in `output_data` attribute. - The dictionary `output_data` store additional filename ending and - filters for ffmpeg. - - Example: - "small": { - "file_end": "S", - "filters": ["scale=160:-1"] - } - - "small" - key is used to store result under represetation - "file_end" - is distinguishing part for files. - - "S" means that source thumbnail "myasset_thumbnail.jpg" - will be converted to "myasset_thumbnail_S.jpg" - "filters" - should contain filters for ffmpeg, key is `scale` filter - which is used to render thumbnails with different - resolution. - - "160:-1" will render thumbnail with 160px width and keep - aspect ratio of source image - """ - - order = pyblish.api.ExtractorOrder + 0.499 - label = "Extract scaled thumbnails" - - optional = True - active = True - hosts = ["nuke", "maya", "shell"] - # Default setting for output data - output_data = { - "small": { - "file_end": "S", - "filters": ["scale=160:-1"] - }, - "middle": { - "file_end": "M", - "filters": ["scale=320:-1"] - }, - "large": { - "file_end": "L", - "filters": ["scale=1024:-1"] - } - } - - def process(self, instance): - for repre in instance.data["representations"]: - name = repre.get("name", "") - if name: - name = " <{}>".format(name) - self.log.debug("Checking repre{}: {}".format(name, repre)) - # Skip if thumbnail not in tags - tags = repre.get("tags") or [] - if ( - "thumbnail" not in tags and - not repre.get("thumbnail") # backwards compatibility - ): - continue - - # skip if files are not set or empty - files = repre.get("files") - if not files: - continue - - orig_filename = None - if isinstance(files, (str, unicode)): - orig_filename = files - elif isinstance(files, list): - orig_filename = files[0] - else: - self.log.debug(( - "Original `files`{} have invalid type \"{}\" on repre {}" - ).format(name, str(type(files)), str(repre))) - continue - - staging_dir = repre["stagingDir"] - full_input_path = os.path.join(staging_dir, orig_filename) - - orig_basename, orig_ext = os.path.splitext(orig_filename) - thumbnail_data = {} - - _input_args = [] - # Overrides output file - _input_args.append("-y") - # Set input path - _input_args.append("-i \"{}\"".format(full_input_path)) - - ffmpeg_path = os.path.join( - os.environ.get("FFMPEG_PATH", ""), "ffmpeg" - ) - - for output_type, single_data in self.output_data.items(): - # DEBUG remove after testing! - self.log.debug(output_type) - file_end = single_data["file_end"] - in_filters = single_data["filters"] - - ffmpeg_filters = [] - if in_filters: - ffmpeg_filters.append("-vf") - ffmpeg_filters.extend([fil for fil in in_filters]) - - # copy _input_args - input_args = [arg for arg in _input_args] - input_args.extend(ffmpeg_filters) - - output_args = [] - filename = "{}_{}{}".format( - orig_basename, file_end, orig_ext - ) - full_output_path = os.path.join(staging_dir, filename) - output_args.append("\"{}\"".format(full_output_path)) - - mov_args = [ - ffmpeg_path, - " ".join(input_args), - " ".join(output_args) - ] - subprcs_cmd = " ".join(mov_args) - - self.log.debug("Executing: {}".format(subprcs_cmd)) - output = pype.api.subprocess(subprcs_cmd) - self.log.debug("Output: {}".format(output)) - - # Store data for integrator - thumbnail_data[output_type] = { - "path": full_output_path, - "filename_append": file_end - } - - repre["thumbnail_data"] = thumbnail_data diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index a8e6999e8d..cc71fce49e 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -384,65 +384,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): repre['published_path'] = dst self.log.debug("__ dst: {}".format(dst)) - thumbnail_data = {} - if 'thumbnail' in repre.get('tags', []): - self.log.debug(( - "Looking for scaled thumbnails in <{}>" - ).format(repre["name"])) - # prepare template for thumbnails - # - same as anatomy but keys in basename are replaced with - # one single key `thumb_file_name` - # - template is same for all thumbnails - template_base_name = os.path.basename(template) - thumb_template = template.replace( - template_base_name, "{thumb_file_name}" - ) - self.log.debug( - "Thumbnail template: {}".format(thumb_template) - ) - # get orig thumbnail filename - repre_basename = os.path.basename(dst) - repre_file, repre_ext = os.path.splitext(repre_basename) - # get thumbnail data from reresentation (if there are any) - _thumbnail_data = repre.pop("thumbnail_data", {}) - if _thumbnail_data: - thumbnail_data["template"] = thumb_template - - for thumb_type, thumb_info in _thumbnail_data.items(): - _src = thumb_info["path"] - - # get filename appending "like `S` for small thumb" - filename_append = thumb_info["filename_append"] - thumb_file_name = "{}_{}{}".format( - repre_file, filename_append, repre_ext - ) - _template_data = template_data.copy() - _template_data["thumb_file_name"] = thumb_file_name - # fill thumbnail template with prepared data - self.log.debug( - "Thumbnail <{}> template data: {}".format( - thumb_type, _template_data - ) - ) - template_filled = thumb_template.format( - **_template_data - ) - _dst = os.path.normpath( - template_filled - ).replace("..", ".") - self.log.debug( - "Thumbnail <{}> src: {} || dst: {}".format( - thumb_type, _src, _dst - ) - ) - # add to transfers - instance.data["transfers"].append([_src, _dst]) - # store full path and additional context data - thumbnail_data[thumb_type] = { - "path": _dst, - "context": {"thumb_file_name": thumb_file_name} - } - representation = { "schema": "pype:representation-2.0", "type": "representation", @@ -468,9 +409,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): } } - if thumbnail_data: - representation["data"]["thumbnail_data"] = thumbnail_data - if sequence_repre and repre.get("frameStart"): representation['context']['frame'] = repre.get("frameStart") @@ -485,7 +423,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): for rep in instance.data["representations"]: self.log.debug("__ represNAME: {}".format(rep['name'])) self.log.debug("__ represPATH: {}".format(rep['published_path'])) - io.insert_many(representations) + result = io.insert_many(representations) + instance.data["published_representation_ids"] = result.inserted_ids # self.log.debug("Representation: {}".format(representations)) self.log.info("Registered {} items".format(len(representations))) From 9228536f52c52492ad9c074a5b97869b2031c63b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Dec 2019 18:30:32 +0100 Subject: [PATCH 009/393] added integrate thumbnails plugin --- .../global/publish/integrate_thumbnail.py | 141 ++++++++++++++++++ 1 file changed, 141 insertions(+) create mode 100644 pype/plugins/global/publish/integrate_thumbnail.py diff --git a/pype/plugins/global/publish/integrate_thumbnail.py b/pype/plugins/global/publish/integrate_thumbnail.py new file mode 100644 index 0000000000..9a7418eebe --- /dev/null +++ b/pype/plugins/global/publish/integrate_thumbnail.py @@ -0,0 +1,141 @@ +import os +import sys +import errno +import shutil +import copy + +import six +import pyblish.api +from bson.objectid import ObjectId + +from avalon import api, dbio + + +class IntegrateThumbnails(pyblish.api.InstancePlugin): + """Integrate Thumbnails.""" + + label = "Integrate Thumbnails" + order = pyblish.api.IntegratorOrder + 0.01 + families = ["review"] + + def process(self, instance): + repre_ids = instance.get("published_representation_ids") + if not repre_ids: + self.log.debug( + "There are not published representation ids on the instance." + ) + return + + project_name = api.Session["AVALON_PROJECT"] + + anatomy = instance.context.data["anatomy"] + if "publish" not in anatomy.templates: + self.log.error("Anatomy does not have set publish key!") + return + + if "thumbnail" not in anatomy.templates["publish"]: + self.log.warning(( + "There is not set \"thumbnail\" template for project {}" + ).format(project_name)) + return + + thumbnail_template = anatomy.templates["publish"]["thumbnail"] + + dbio.install() + repres = dbio.find({ + "_id": {"$in": repre_ids}, + "type": "representation" + }) + if not repres: + self.log.debug(( + "There are not representations in database with ids {}" + ).format(str(repre_ids))) + return + + thumb_repre = None + for repre in repres: + if repre["name"].lower() == "thumbnail": + thumb_repre = repre + break + + if not thumb_repre: + self.log.debug( + "There is not representation with name \"thumbnail\"" + ) + return + + version = dbio.find_one({"_id": thumb_repre["parent"]}) + if not version: + self.log.warning("There does not exist version with id {}".format( + str(thumb_repre["parent"]) + )) + return + + # Get full path to thumbnail file from representation + src_full_path = os.path.normpath(thumb_repre["data"]["path"]) + if not os.path.exists(src_full_path): + self.log.warning("Thumbnail file was not found. Path: {}".format( + src_full_path + )) + return + + # Create id for mongo entity now to fill anatomy template + thumbnail_id = ObjectId() + + # Prepare anatomy template fill data + template_data = copy.deepcopy(thumb_repre["context"]) + template_data["_id"] = str(thumbnail_id) + template_data["thumbnail_root"] = os.environ.get( + "AVALON_THUMBNAIL_ROOT" + ) + + anatomy_filled = anatomy.format(template_data) + final_path = anatomy_filled.get("publish", {}).get("thumbnail") + if not final_path: + self.log.warning(( + "Anatomy template was not filled with entered data" + "\nTemplate: {} " + "\nData: {}" + ).format(thumbnail_template, str(template_data))) + return + + dst_full_path = os.path.normpath(final_path) + self.log.debug( + "Copying file .. {} -> {}".format(src_full_path, dst_full_path) + ) + dirname = os.path.dirname(dst_full_path) + try: + os.makedirs(dirname) + except OSError as e: + if e.errno != errno.EEXIST: + tp, value, tb = sys.exc_info() + six.reraise(tp, value, tb) + + shutil.copy(src_full_path, dst_full_path) + + # Clean template data from keys that are dynamic + template_data.pop("_id") + template_data.pop("thumbnail_root") + + thumbnail_entity = { + "_id": thumbnail_id, + "type": "thumbnail", + "schema": "pype:thumbnail-1.0", + "data": { + "template": thumbnail_template, + "template_data": template_data + } + } + # Create thumbnail entity + dbio.insert_one(thumbnail_entity) + self.log.debug( + "Creating entity in database {}".format(str(thumbnail_entity)) + ) + # Set thumbnail id for version + dbio.update_one( + {"_id": version["_id"]}, + {"$set": {"data.thumbnail_id": thumbnail_id}} + ) + self.log.debug("Setting thumbnail for version \"{}\" <{}>".format( + version["name"], str(version["_id"]) + )) From 7a6085559ccc2f5df8c033c30946b6b3fd0fe1d9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 12:18:30 +0100 Subject: [PATCH 010/393] dicts' keys in io methods calling start on the next row to keep same indentations if line lenght changes --- .../global/publish/collect_templates.py | 30 ++++++++++------- pype/plugins/global/publish/integrate.py | 25 ++++++++------ .../publish/integrate_assumed_destination.py | 30 ++++++++++------- .../publish/integrate_rendered_frames.py | 28 ++++++++-------- .../global/publish/submit_publish_job.py | 33 ++++++++++++------- pype/plugins/maya/load/load_look.py | 8 +++-- pype/plugins/maya/publish/extract_look.py | 33 ++++++++++++------- .../maya/publish/validate_node_ids_related.py | 7 ++-- .../maya/publish/validate_renderlayer_aovs.py | 9 ++--- .../nuke/publish/collect_asset_info.py | 6 ++-- .../plugins/nuke/publish/collect_instances.py | 7 ++-- .../nukestudio/publish/extract_effects.py | 33 ++++++++++++------- .../nukestudio/publish/validate_version.py | 17 ++++++---- .../publish/integrate_assumed_destination.py | 30 ++++++++++------- pype/scripts/fusion_switch_shot.py | 6 ++-- pype/setdress_api.py | 4 ++- 16 files changed, 187 insertions(+), 119 deletions(-) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index b80ca4ae1b..429dbd8eea 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -31,32 +31,38 @@ class CollectTemplates(pyblish.api.InstancePlugin): asset_name = instance.data["asset"] project_name = api.Session["AVALON_PROJECT"] - project = io.find_one({"type": "project", - "name": project_name}, - projection={"config": True, "data": True}) + project = io.find_one( + {"type": "project", "name": project_name}, + projection={"config": True, "data": True} + ) template = project["config"]["template"]["publish"] anatomy = instance.context.data['anatomy'] - asset = io.find_one({"type": "asset", - "name": asset_name, - "parent": project["_id"]}) + asset = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project["_id"] + }) assert asset, ("No asset found by the name '{}' " "in project '{}'".format(asset_name, project_name)) silo = asset.get('silo') - subset = io.find_one({"type": "subset", - "name": subset_name, - "parent": asset["_id"]}) + subset = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset["_id"] + }) # assume there is no version yet, we start at `1` version = None version_number = 1 if subset is not None: - version = io.find_one({"type": "version", - "parent": subset["_id"]}, - sort=[("name", -1)]) + version = io.find_one( + {"type": "version", "parent": subset["_id"]}, + sort=[("name", -1)] + ) # if there is a subset there ought to be version if version is not None: diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py index 59e05ee2aa..33cb1862d0 100644 --- a/pype/plugins/global/publish/integrate.py +++ b/pype/plugins/global/publish/integrate.py @@ -84,9 +84,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin): project = io.find_one({"type": "project"}) - asset = io.find_one({"type": "asset", - "name": ASSET, - "parent": project["_id"]}) + asset = io.find_one({ + "type": "asset", + "name": ASSET, + "parent": project["_id"] + }) assert all([project, asset]), ("Could not find current project or " "asset '%s'" % ASSET) @@ -94,10 +96,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin): subset = self.get_subset(asset, instance) # get next version - latest_version = io.find_one({"type": "version", - "parent": subset["_id"]}, - {"name": True}, - sort=[("name", -1)]) + latest_version = io.find_one( + {"type": "version", "parent": subset["_id"]}, + {"name": True}, + sort=[("name", -1)] + ) next_version = 1 if latest_version is not None: @@ -318,9 +321,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin): def get_subset(self, asset, instance): - subset = io.find_one({"type": "subset", - "parent": asset["_id"], - "name": instance.data["subset"]}) + subset = io.find_one({ + "type": "subset", + "parent": asset["_id"], + "name": instance.data["subset"] + }) if subset is None: subset_name = instance.data["subset"] diff --git a/pype/plugins/global/publish/integrate_assumed_destination.py b/pype/plugins/global/publish/integrate_assumed_destination.py index a26529fc2c..25794a4498 100644 --- a/pype/plugins/global/publish/integrate_assumed_destination.py +++ b/pype/plugins/global/publish/integrate_assumed_destination.py @@ -82,31 +82,37 @@ class IntegrateAssumedDestination(pyblish.api.InstancePlugin): project_name = api.Session["AVALON_PROJECT"] a_template = anatomy.templates - project = io.find_one({"type": "project", - "name": project_name}, - projection={"config": True, "data": True}) + project = io.find_one( + {"type": "project", "name": project_name}, + projection={"config": True, "data": True} + ) template = a_template['publish']['path'] # anatomy = instance.context.data['anatomy'] - asset = io.find_one({"type": "asset", - "name": asset_name, - "parent": project["_id"]}) + asset = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project["_id"] + }) assert asset, ("No asset found by the name '{}' " "in project '{}'".format(asset_name, project_name)) - subset = io.find_one({"type": "subset", - "name": subset_name, - "parent": asset["_id"]}) + subset = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset["_id"] + }) # assume there is no version yet, we start at `1` version = None version_number = 1 if subset is not None: - version = io.find_one({"type": "version", - "parent": subset["_id"]}, - sort=[("name", -1)]) + version = io.find_one( + {"type": "version", "parent": subset["_id"]}, + sort=[("name", -1)] + ) # if there is a subset there ought to be version if version is not None: diff --git a/pype/plugins/global/publish/integrate_rendered_frames.py b/pype/plugins/global/publish/integrate_rendered_frames.py index 086b03802e..69280d272d 100644 --- a/pype/plugins/global/publish/integrate_rendered_frames.py +++ b/pype/plugins/global/publish/integrate_rendered_frames.py @@ -88,9 +88,11 @@ class IntegrateFrames(pyblish.api.InstancePlugin): project = io.find_one({"type": "project"}) - asset = io.find_one({"type": "asset", - "name": ASSET, - "parent": project["_id"]}) + asset = io.find_one({ + "type": "asset", + "name": ASSET, + "parent": project["_id"] + }) assert all([project, asset]), ("Could not find current project or " "asset '%s'" % ASSET) @@ -98,10 +100,11 @@ class IntegrateFrames(pyblish.api.InstancePlugin): subset = self.get_subset(asset, instance) # get next version - latest_version = io.find_one({"type": "version", - "parent": subset["_id"]}, - {"name": True}, - sort=[("name", -1)]) + latest_version = io.find_one( + {"type": "version", "parent": subset["_id"]}, + {"name": True}, + sort=[("name", -1)] + ) next_version = 1 if latest_version is not None: @@ -251,9 +254,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin): self.log.debug("path_to_save: {}".format(path_to_save)) - - - representation = { "schema": "pype:representation-2.0", "type": "representation", @@ -332,9 +332,11 @@ class IntegrateFrames(pyblish.api.InstancePlugin): def get_subset(self, asset, instance): - subset = io.find_one({"type": "subset", - "parent": asset["_id"], - "name": instance.data["subset"]}) + subset = io.find_one({ + "type": "subset", + "parent": asset["_id"], + "name": instance.data["subset"] + }) if subset is None: subset_name = instance.data["subset"] diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 2a254b015c..311f5274f6 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -27,14 +27,19 @@ def _get_script(): # Logic to retrieve latest files concerning extendFrames def get_latest_version(asset_name, subset_name, family): # Get asset - asset_name = io.find_one({"type": "asset", - "name": asset_name}, - projection={"name": True}) + asset_name = io.find_one( + {"type": "asset", "name": asset_name}, + projection={"name": True} + ) - subset = io.find_one({"type": "subset", - "name": subset_name, - "parent": asset_name["_id"]}, - projection={"_id": True, "name": True}) + subset = io.find_one( + { + "type": "subset", + "name": subset_name, + "parent": asset_name["_id"] + }, + projection={"_id": True, "name": True} + ) # Check if subsets actually exists (pre-run check) assert subset, "No subsets found, please publish with `extendFrames` off" @@ -45,11 +50,15 @@ def get_latest_version(asset_name, subset_name, family): "data.endFrame": True, "parent": True} - version = io.find_one({"type": "version", - "parent": subset["_id"], - "data.families": family}, - projection=version_projection, - sort=[("name", -1)]) + version = io.find_one( + { + "type": "version", + "parent": subset["_id"], + "data.families": family + }, + projection=version_projection, + sort=[("name", -1)] + ) assert version, "No version found, this is a bug" diff --git a/pype/plugins/maya/load/load_look.py b/pype/plugins/maya/load/load_look.py index b1c88bcd18..04ac9b23e4 100644 --- a/pype/plugins/maya/load/load_look.py +++ b/pype/plugins/maya/load/load_look.py @@ -116,9 +116,11 @@ class LookLoader(pype.maya.plugin.ReferenceLoader): shapes=True)) nodes = set(nodes_list) - json_representation = io.find_one({"type": "representation", - "parent": representation['parent'], - "name": "json"}) + json_representation = io.find_one({ + "type": "representation", + "parent": representation['parent'], + "name": "json" + }) # Load relationships shader_relation = api.get_representation_path(json_representation) diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index c7b8058852..02bd80cea2 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -417,32 +417,41 @@ class ExtractLook(pype.api.Extractor): project_name = api.Session["AVALON_PROJECT"] a_template = anatomy.templates - project = io.find_one({"type": "project", - "name": project_name}, - projection={"config": True, "data": True}) + project = io.find_one( + { + "type": "project", + "name": project_name + }, + projection={"config": True, "data": True + ) template = a_template['publish']['path'] # anatomy = instance.context.data['anatomy'] - asset = io.find_one({"type": "asset", - "name": asset_name, - "parent": project["_id"]}) + asset = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project["_id"] + }) assert asset, ("No asset found by the name '{}' " "in project '{}'".format(asset_name, project_name)) silo = asset.get('silo') - subset = io.find_one({"type": "subset", - "name": subset_name, - "parent": asset["_id"]}) + subset = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset["_id"] + }) # assume there is no version yet, we start at `1` version = None version_number = 1 if subset is not None: - version = io.find_one({"type": "version", - "parent": subset["_id"]}, - sort=[("name", -1)]) + version = io.find_one( + {"type": "version", "parent": subset["_id"]}, + sort=[("name", -1)] + ) # if there is a subset there ought to be version if version is not None: diff --git a/pype/plugins/maya/publish/validate_node_ids_related.py b/pype/plugins/maya/publish/validate_node_ids_related.py index 4a154d0b71..9eee63c3ef 100644 --- a/pype/plugins/maya/publish/validate_node_ids_related.py +++ b/pype/plugins/maya/publish/validate_node_ids_related.py @@ -38,9 +38,10 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin): invalid = list() asset = instance.data['asset'] - asset_data = io.find_one({"name": asset, - "type": "asset"}, - projection={"_id": True}) + asset_data = io.find_one( + {"name": asset, "type": "asset"}, + projection={"_id": True} + ) asset_id = str(asset_data['_id']) # We do want to check the referenced nodes as we it might be diff --git a/pype/plugins/maya/publish/validate_renderlayer_aovs.py b/pype/plugins/maya/publish/validate_renderlayer_aovs.py index e14c92a8b4..686a11e906 100644 --- a/pype/plugins/maya/publish/validate_renderlayer_aovs.py +++ b/pype/plugins/maya/publish/validate_renderlayer_aovs.py @@ -49,9 +49,10 @@ class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin): """Check if subset is registered in the database under the asset""" asset = io.find_one({"type": "asset", "name": asset_name}) - is_valid = io.find_one({"type": "subset", - "name": subset_name, - "parent": asset["_id"]}) + is_valid = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset["_id"] + }) return is_valid - diff --git a/pype/plugins/nuke/publish/collect_asset_info.py b/pype/plugins/nuke/publish/collect_asset_info.py index 76b93ef3d0..8a8791ec36 100644 --- a/pype/plugins/nuke/publish/collect_asset_info.py +++ b/pype/plugins/nuke/publish/collect_asset_info.py @@ -13,8 +13,10 @@ class CollectAssetInfo(pyblish.api.ContextPlugin): ] def process(self, context): - asset_data = io.find_one({"type": "asset", - "name": api.Session["AVALON_ASSET"]}) + asset_data = io.find_one({ + "type": "asset", + "name": api.Session["AVALON_ASSET"] + }) self.log.info("asset_data: {}".format(asset_data)) context.data['handles'] = int(asset_data["data"].get("handles", 0)) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index 483f260295..53a9383b39 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -15,9 +15,10 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): hosts = ["nuke", "nukeassist"] def process(self, context): - - asset_data = io.find_one({"type": "asset", - "name": api.Session["AVALON_ASSET"]}) + asset_data = io.find_one({ + "type": "asset", + "name": api.Session["AVALON_ASSET"] + }) self.log.debug("asset_data: {}".format(asset_data["data"])) instances = [] diff --git a/pype/plugins/nukestudio/publish/extract_effects.py b/pype/plugins/nukestudio/publish/extract_effects.py index 7aa79d6cc3..a337a5cd1d 100644 --- a/pype/plugins/nukestudio/publish/extract_effects.py +++ b/pype/plugins/nukestudio/publish/extract_effects.py @@ -169,32 +169,41 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): project_name = api.Session["AVALON_PROJECT"] a_template = anatomy.templates - project = io.find_one({"type": "project", - "name": project_name}, - projection={"config": True, "data": True}) + project = io.find_one( + { + "type": "project", + "name": project_name + }, + projection={"config": True, "data": True} + ) template = a_template['publish']['path'] # anatomy = instance.context.data['anatomy'] - asset = io.find_one({"type": "asset", - "name": asset_name, - "parent": project["_id"]}) + asset = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project["_id"] + }) assert asset, ("No asset found by the name '{}' " "in project '{}'".format(asset_name, project_name)) silo = asset.get('silo') - subset = io.find_one({"type": "subset", - "name": subset_name, - "parent": asset["_id"]}) + subset = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset["_id"] + }) # assume there is no version yet, we start at `1` version = None version_number = 1 if subset is not None: - version = io.find_one({"type": "version", - "parent": subset["_id"]}, - sort=[("name", -1)]) + version = io.find_one( + {"type": "version", "parent": subset["_id"]}, + sort=[("name", -1)] + ) # if there is a subset there ought to be version if version is not None: diff --git a/pype/plugins/nukestudio/publish/validate_version.py b/pype/plugins/nukestudio/publish/validate_version.py index 194b270d51..ebb8f357f8 100644 --- a/pype/plugins/nukestudio/publish/validate_version.py +++ b/pype/plugins/nukestudio/publish/validate_version.py @@ -3,6 +3,7 @@ from avalon import io from pype.action import get_errored_instances_from_context import pype.api as pype + @pyblish.api.log class RepairNukestudioVersionUp(pyblish.api.Action): label = "Version Up Workfile" @@ -53,13 +54,17 @@ class ValidateVersion(pyblish.api.InstancePlugin): io.install() project = io.find_one({"type": "project"}) - asset = io.find_one({"type": "asset", - "name": asset_name, - "parent": project["_id"]}) + asset = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project["_id"] + }) - subset = io.find_one({"type": "subset", - "parent": asset["_id"], - "name": subset_name}) + subset = io.find_one({ + "type": "subset", + "parent": asset["_id"], + "name": subset_name + }) version_db = io.find_one({ 'type': 'version', diff --git a/pype/plugins/premiere/publish/integrate_assumed_destination.py b/pype/plugins/premiere/publish/integrate_assumed_destination.py index c82b70c66f..a8862ff94c 100644 --- a/pype/plugins/premiere/publish/integrate_assumed_destination.py +++ b/pype/plugins/premiere/publish/integrate_assumed_destination.py @@ -77,32 +77,38 @@ class IntegrateAssumedDestination(pyblish.api.InstancePlugin): asset_name = instance.data["asset"] project_name = api.Session["AVALON_PROJECT"] - project = io.find_one({"type": "project", - "name": project_name}, - projection={"config": True, "data": True}) + project = io.find_one( + {"type": "project", "name": project_name}, + projection={"config": True, "data": True} + ) template = project["config"]["template"]["publish"] # anatomy = instance.context.data['anatomy'] - asset = io.find_one({"type": "asset", - "name": asset_name, - "parent": project["_id"]}) + asset = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project["_id"] + }) assert asset, ("No asset found by the name '{}' " "in project '{}'".format(asset_name, project_name)) silo = asset.get('silo') - subset = io.find_one({"type": "subset", - "name": subset_name, - "parent": asset["_id"]}) + subset = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset["_id"] + }) # assume there is no version yet, we start at `1` version = None version_number = 1 if subset is not None: - version = io.find_one({"type": "version", - "parent": subset["_id"]}, - sort=[("name", -1)]) + version = io.find_one( + {"type": "version", "parent": subset["_id"]}, + sort=[("name", -1)] + ) # if there is a subset there ought to be version if version is not None: diff --git a/pype/scripts/fusion_switch_shot.py b/pype/scripts/fusion_switch_shot.py index 26a93b9b9a..539bcf4f68 100644 --- a/pype/scripts/fusion_switch_shot.py +++ b/pype/scripts/fusion_switch_shot.py @@ -170,8 +170,10 @@ def switch(asset_name, filepath=None, new=True): assert asset, "Could not find '%s' in the database" % asset_name # Get current project - self._project = io.find_one({"type": "project", - "name": api.Session["AVALON_PROJECT"]}) + self._project = io.find_one({ + "type": "project", + "name": api.Session["AVALON_PROJECT"] + }) # Go to comp if not filepath: diff --git a/pype/setdress_api.py b/pype/setdress_api.py index c6de0a4f74..c58f259676 100644 --- a/pype/setdress_api.py +++ b/pype/setdress_api.py @@ -463,7 +463,9 @@ def update_scene(set_container, containers, current_data, new_data, new_file): # Check whether the conversion can be done by the Loader. # They *must* use the same asset, subset and Loader for # `api.update` to make sense. - old = io.find_one({"_id": io.ObjectId(representation_current)}) + old = io.find_one( + {"_id": io.ObjectId(representation_current)} + ) new = io.find_one({"_id": io.ObjectId(representation_new)}) is_valid = compare_representations(old=old, new=new) if not is_valid: From 8b02fcebf78dd0a7b60d18a8c167f35f952d5f39 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 12:20:08 +0100 Subject: [PATCH 011/393] another indentation change --- pype/lib.py | 48 ++++++++++++++++++++++++++++-------------------- 1 file changed, 28 insertions(+), 20 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index e41f9eb8bc..72874bad76 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -181,9 +181,10 @@ def any_outdated(): if representation in checked: continue - representation_doc = io.find_one({"_id": io.ObjectId(representation), - "type": "representation"}, - projection={"parent": True}) + representation_doc = io.find_one( + {"_id": io.ObjectId(representation), "type": "representation"}, + projection={"parent": True} + ) if representation_doc and not is_latest(representation_doc): return True elif not representation_doc: @@ -297,23 +298,28 @@ def switch_item(container, assert asset, ("Could not find asset in the database with the name " "'%s'" % asset_name) - subset = io.find_one({"name": subset_name, - "type": "subset", - "parent": asset["_id"]}) + subset = io.find_one({ + "name": subset_name, + "type": "subset", + "parent": asset["_id"] + }) assert subset, ("Could not find subset in the database with the name " "'%s'" % subset_name) - version = io.find_one({"type": "version", - "parent": subset["_id"]}, - sort=[('name', -1)]) + version = io.find_one( + {"type": "version", "parent": subset["_id"]}, + sort=[('name', -1)] + ) assert version, "Could not find a version for {}.{}".format( asset_name, subset_name ) - representation = io.find_one({"name": representation_name, - "type": "representation", - "parent": version["_id"]}) + representation = io.find_one({ + "name": representation_name, + "type": "representation", + "parent": version["_id"]} + ) assert representation, ("Could not find representation in the database with" " the name '%s'" % representation_name) @@ -523,8 +529,7 @@ def get_subsets(asset_name, from avalon import io # query asset from db - asset_io = io.find_one({"type": "asset", - "name": asset_name}) + asset_io = io.find_one({"type": "asset", "name": asset_name}) # check if anything returned assert asset_io, "Asset not existing. \ @@ -548,14 +553,17 @@ def get_subsets(asset_name, # Process subsets for subset in subsets: if not version: - version_sel = io.find_one({"type": "version", - "parent": subset["_id"]}, - sort=[("name", -1)]) + version_sel = io.find_one( + {"type": "version", "parent": subset["_id"]}, + sort=[("name", -1)] + ) else: assert isinstance(version, int), "version needs to be `int` type" - version_sel = io.find_one({"type": "version", - "parent": subset["_id"], - "name": int(version)}) + version_sel = io.find_one({ + "type": "version", + "parent": subset["_id"], + "name": int(version) + }) find_dict = {"type": "representation", "parent": version_sel["_id"]} From 77a6ebc9fd8c7b958473776e3b693932ef625405 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 12:21:38 +0100 Subject: [PATCH 012/393] integrate new indentation change --- pype/plugins/global/publish/integrate_new.py | 25 ++++++++++++-------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 5a00437a6f..bdcebeb5fe 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -150,9 +150,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): io.install() project = io.find_one({"type": "project"}) - asset = io.find_one({"type": "asset", - "name": ASSET, - "parent": project["_id"]}) + asset = io.find_one({ + "type": "asset", + "name": ASSET, + "parent": project["_id"] + }) assert all([project, asset]), ("Could not find current project or " "asset '%s'" % ASSET) @@ -160,10 +162,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset = self.get_subset(asset, instance) # get next version - latest_version = io.find_one({"type": "version", - "parent": subset["_id"]}, - {"name": True}, - sort=[("name", -1)]) + latest_version = io.find_one( + {"type": "version", "parent": subset["_id"]}, + {"name": True}, + sort=[("name", -1)] + ) next_version = 1 if latest_version is not None: @@ -500,9 +503,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): filelink.create(src, dst, filelink.HARDLINK) def get_subset(self, asset, instance): - subset = io.find_one({"type": "subset", - "parent": asset["_id"], - "name": instance.data["subset"]}) + subset = io.find_one({ + "type": "subset", + "parent": asset["_id"], + "name": instance.data["subset"] + }) if subset is None: subset_name = instance.data["subset"] From 7b2e5f1b9e5bbc90ad1fdf947e21b323e83f44a9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 19:31:58 +0100 Subject: [PATCH 013/393] integrate thumbnails now works --- .../global/publish/integrate_thumbnail.py | 48 +++++++++---------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/pype/plugins/global/publish/integrate_thumbnail.py b/pype/plugins/global/publish/integrate_thumbnail.py index 9a7418eebe..08157187df 100644 --- a/pype/plugins/global/publish/integrate_thumbnail.py +++ b/pype/plugins/global/publish/integrate_thumbnail.py @@ -8,7 +8,7 @@ import six import pyblish.api from bson.objectid import ObjectId -from avalon import api, dbio +from avalon import api, io class IntegrateThumbnails(pyblish.api.InstancePlugin): @@ -19,7 +19,7 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): families = ["review"] def process(self, instance): - repre_ids = instance.get("published_representation_ids") + repre_ids = instance.data.get("published_representation_ids") if not repre_ids: self.log.debug( "There are not published representation ids on the instance." @@ -30,27 +30,24 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): anatomy = instance.context.data["anatomy"] if "publish" not in anatomy.templates: - self.log.error("Anatomy does not have set publish key!") - return + raise AssertionError("Anatomy does not have set publish key!") if "thumbnail" not in anatomy.templates["publish"]: - self.log.warning(( - "There is not set \"thumbnail\" template for project {}" + raise AssertionError(( + "There is not set \"thumbnail\" template for project \"{}\"" ).format(project_name)) - return thumbnail_template = anatomy.templates["publish"]["thumbnail"] - dbio.install() - repres = dbio.find({ + io.install() + repres = io.find({ "_id": {"$in": repre_ids}, "type": "representation" }) if not repres: - self.log.debug(( + raise AssertionError(( "There are not representations in database with ids {}" ).format(str(repre_ids))) - return thumb_repre = None for repre in repres: @@ -64,12 +61,13 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ) return - version = dbio.find_one({"_id": thumb_repre["parent"]}) + version = io.find_one({"_id": thumb_repre["parent"]}) if not version: - self.log.warning("There does not exist version with id {}".format( - str(thumb_repre["parent"]) - )) - return + raise AssertionError( + "There does not exist version with id {}".format( + str(thumb_repre["parent"]) + ) + ) # Get full path to thumbnail file from representation src_full_path = os.path.normpath(thumb_repre["data"]["path"]) @@ -79,25 +77,27 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): )) return + filename, file_extension = os.path.splitext(src_full_path) # Create id for mongo entity now to fill anatomy template thumbnail_id = ObjectId() # Prepare anatomy template fill data template_data = copy.deepcopy(thumb_repre["context"]) - template_data["_id"] = str(thumbnail_id) - template_data["thumbnail_root"] = os.environ.get( - "AVALON_THUMBNAIL_ROOT" - ) + template_data.update({ + "_id": str(thumbnail_id), + "thumbnail_root": os.environ.get("AVALON_THUMBNAIL_ROOT"), + "ext": file_extension, + "thumbnail_type": "thumbnail" + }) anatomy_filled = anatomy.format(template_data) final_path = anatomy_filled.get("publish", {}).get("thumbnail") if not final_path: - self.log.warning(( + raise AssertionError(( "Anatomy template was not filled with entered data" "\nTemplate: {} " "\nData: {}" ).format(thumbnail_template, str(template_data))) - return dst_full_path = os.path.normpath(final_path) self.log.debug( @@ -127,12 +127,12 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): } } # Create thumbnail entity - dbio.insert_one(thumbnail_entity) + io.insert_one(thumbnail_entity) self.log.debug( "Creating entity in database {}".format(str(thumbnail_entity)) ) # Set thumbnail id for version - dbio.update_one( + io.update_many( {"_id": version["_id"]}, {"$set": {"data.thumbnail_id": thumbnail_id}} ) From eea4df6b518414a5eed2ce3b818e4fc475a85b64 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 6 Dec 2019 18:34:00 +0100 Subject: [PATCH 014/393] changes to keep indentation consistent --- pype/lib.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index 72874bad76..62bdc02242 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -294,7 +294,10 @@ def switch_item(container, representation_name = representation["name"] # Find the new one - asset = io.find_one({"name": asset_name, "type": "asset"}) + asset = io.find_one({ + "name": asset_name, + "type": "asset" + }) assert asset, ("Could not find asset in the database with the name " "'%s'" % asset_name) @@ -307,7 +310,10 @@ def switch_item(container, "'%s'" % subset_name) version = io.find_one( - {"type": "version", "parent": subset["_id"]}, + { + "type": "version", + "parent": subset["_id"] + }, sort=[('name', -1)] ) @@ -357,7 +363,10 @@ def get_asset(asset_name=None): if not asset_name: asset_name = avalon.api.Session["AVALON_ASSET"] - asset_document = io.find_one({"name": asset_name, "type": "asset"}) + asset_document = io.find_one({ + "name": asset_name, + "type": "asset" + }) if not asset_document: raise TypeError("Entity \"{}\" was not found in DB".format(asset_name)) @@ -554,7 +563,10 @@ def get_subsets(asset_name, for subset in subsets: if not version: version_sel = io.find_one( - {"type": "version", "parent": subset["_id"]}, + { + "type": "version", + "parent": subset["_id"] + }, sort=[("name", -1)] ) else: From 7f9a4e952832b73abdb006a9959c2bd6baf52b45 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 6 Dec 2019 18:41:54 +0100 Subject: [PATCH 015/393] changes to keep indentation consistend 2 --- pype/plugins/global/publish/collect_templates.py | 10 ++++++++-- pype/plugins/global/publish/integrate.py | 5 ++++- .../global/publish/integrate_assumed_destination.py | 5 ++++- pype/plugins/global/publish/integrate_new.py | 5 ++++- .../global/publish/integrate_rendered_frames.py | 5 ++++- pype/plugins/global/publish/submit_publish_job.py | 5 ++++- pype/plugins/maya/publish/extract_look.py | 5 ++++- pype/plugins/maya/publish/validate_node_ids_related.py | 5 ++++- pype/plugins/nukestudio/publish/extract_effects.py | 5 ++++- .../premiere/publish/integrate_assumed_destination.py | 10 ++++++++-- pype/setdress_api.py | 10 ++++++---- 11 files changed, 54 insertions(+), 16 deletions(-) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index 429dbd8eea..42b547b4ef 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -32,7 +32,10 @@ class CollectTemplates(pyblish.api.InstancePlugin): project_name = api.Session["AVALON_PROJECT"] project = io.find_one( - {"type": "project", "name": project_name}, + { + "type": "project", + "name": project_name + }, projection={"config": True, "data": True} ) @@ -60,7 +63,10 @@ class CollectTemplates(pyblish.api.InstancePlugin): version_number = 1 if subset is not None: version = io.find_one( - {"type": "version", "parent": subset["_id"]}, + { + "type": "version", + "parent": subset["_id"] + }, sort=[("name", -1)] ) diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py index 33cb1862d0..e24bad362d 100644 --- a/pype/plugins/global/publish/integrate.py +++ b/pype/plugins/global/publish/integrate.py @@ -97,7 +97,10 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # get next version latest_version = io.find_one( - {"type": "version", "parent": subset["_id"]}, + { + "type": "version", + "parent": subset["_id"] + }, {"name": True}, sort=[("name", -1)] ) diff --git a/pype/plugins/global/publish/integrate_assumed_destination.py b/pype/plugins/global/publish/integrate_assumed_destination.py index 25794a4498..d090e2711a 100644 --- a/pype/plugins/global/publish/integrate_assumed_destination.py +++ b/pype/plugins/global/publish/integrate_assumed_destination.py @@ -110,7 +110,10 @@ class IntegrateAssumedDestination(pyblish.api.InstancePlugin): version_number = 1 if subset is not None: version = io.find_one( - {"type": "version", "parent": subset["_id"]}, + { + "type": "version", + "parent": subset["_id"] + }, sort=[("name", -1)] ) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index bdcebeb5fe..da2ce5b457 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -163,7 +163,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # get next version latest_version = io.find_one( - {"type": "version", "parent": subset["_id"]}, + { + "type": "version", + "parent": subset["_id"] + }, {"name": True}, sort=[("name", -1)] ) diff --git a/pype/plugins/global/publish/integrate_rendered_frames.py b/pype/plugins/global/publish/integrate_rendered_frames.py index 69280d272d..5819051146 100644 --- a/pype/plugins/global/publish/integrate_rendered_frames.py +++ b/pype/plugins/global/publish/integrate_rendered_frames.py @@ -101,7 +101,10 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # get next version latest_version = io.find_one( - {"type": "version", "parent": subset["_id"]}, + { + "type": "version", + "parent": subset["_id"] + }, {"name": True}, sort=[("name", -1)] ) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 311f5274f6..ddf1d948a3 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -28,7 +28,10 @@ def _get_script(): def get_latest_version(asset_name, subset_name, family): # Get asset asset_name = io.find_one( - {"type": "asset", "name": asset_name}, + { + "type": "asset", + "name": asset_name + }, projection={"name": True} ) diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index 02bd80cea2..4da28da2a1 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -449,7 +449,10 @@ class ExtractLook(pype.api.Extractor): version_number = 1 if subset is not None: version = io.find_one( - {"type": "version", "parent": subset["_id"]}, + { + "type": "version", + "parent": subset["_id"] + }, sort=[("name", -1)] ) diff --git a/pype/plugins/maya/publish/validate_node_ids_related.py b/pype/plugins/maya/publish/validate_node_ids_related.py index 9eee63c3ef..7e8565c297 100644 --- a/pype/plugins/maya/publish/validate_node_ids_related.py +++ b/pype/plugins/maya/publish/validate_node_ids_related.py @@ -39,7 +39,10 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin): asset = instance.data['asset'] asset_data = io.find_one( - {"name": asset, "type": "asset"}, + { + "name": asset, + "type": "asset" + }, projection={"_id": True} ) asset_id = str(asset_data['_id']) diff --git a/pype/plugins/nukestudio/publish/extract_effects.py b/pype/plugins/nukestudio/publish/extract_effects.py index a337a5cd1d..15d2a80a55 100644 --- a/pype/plugins/nukestudio/publish/extract_effects.py +++ b/pype/plugins/nukestudio/publish/extract_effects.py @@ -201,7 +201,10 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): version_number = 1 if subset is not None: version = io.find_one( - {"type": "version", "parent": subset["_id"]}, + { + "type": "version", + "parent": subset["_id"] + }, sort=[("name", -1)] ) diff --git a/pype/plugins/premiere/publish/integrate_assumed_destination.py b/pype/plugins/premiere/publish/integrate_assumed_destination.py index a8862ff94c..a0393e8a43 100644 --- a/pype/plugins/premiere/publish/integrate_assumed_destination.py +++ b/pype/plugins/premiere/publish/integrate_assumed_destination.py @@ -78,7 +78,10 @@ class IntegrateAssumedDestination(pyblish.api.InstancePlugin): project_name = api.Session["AVALON_PROJECT"] project = io.find_one( - {"type": "project", "name": project_name}, + { + "type": "project", + "name": project_name + }, projection={"config": True, "data": True} ) @@ -106,7 +109,10 @@ class IntegrateAssumedDestination(pyblish.api.InstancePlugin): version_number = 1 if subset is not None: version = io.find_one( - {"type": "version", "parent": subset["_id"]}, + { + "type": "version", + "parent": subset["_id"] + }, sort=[("name", -1)] ) diff --git a/pype/setdress_api.py b/pype/setdress_api.py index c58f259676..63b3967828 100644 --- a/pype/setdress_api.py +++ b/pype/setdress_api.py @@ -463,10 +463,12 @@ def update_scene(set_container, containers, current_data, new_data, new_file): # Check whether the conversion can be done by the Loader. # They *must* use the same asset, subset and Loader for # `api.update` to make sense. - old = io.find_one( - {"_id": io.ObjectId(representation_current)} - ) - new = io.find_one({"_id": io.ObjectId(representation_new)}) + old = io.find_one({ + "_id": io.ObjectId(representation_current) + }) + new = io.find_one({ + "_id": io.ObjectId(representation_new) + }) is_valid = compare_representations(old=old, new=new) if not is_valid: log.error("Skipping: %s. See log for details.", From d0132a1fa055e07ae3780b7d900ea3eeebe19a54 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Sat, 7 Dec 2019 11:29:07 +0000 Subject: [PATCH 016/393] lib.py edited online with Bitbucket --- pype/lib.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pype/lib.py b/pype/lib.py index 62bdc02242..cb238459d1 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -182,7 +182,10 @@ def any_outdated(): continue representation_doc = io.find_one( - {"_id": io.ObjectId(representation), "type": "representation"}, + { + "_id": io.ObjectId(representation), + "type": "representation" + }, projection={"parent": True} ) if representation_doc and not is_latest(representation_doc): From 8b33b22d3081d7bd876d1930509f759d9e460caa Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Sat, 7 Dec 2019 13:03:44 +0100 Subject: [PATCH 017/393] create _id in representation before insert to DB to not require query them after --- pype/plugins/global/publish/integrate_new.py | 5 +++-- pype/plugins/global/publish/integrate_thumbnail.py | 14 +++----------- 2 files changed, 6 insertions(+), 13 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index cc71fce49e..3422c95d73 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -385,6 +385,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.debug("__ dst: {}".format(dst)) representation = { + "_id": io.ObjectId(), "schema": "pype:representation-2.0", "type": "representation", "parent": version_id, @@ -423,8 +424,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): for rep in instance.data["representations"]: self.log.debug("__ represNAME: {}".format(rep['name'])) self.log.debug("__ represPATH: {}".format(rep['published_path'])) - result = io.insert_many(representations) - instance.data["published_representation_ids"] = result.inserted_ids + io.insert_many(representations) + instance.data["published_representations"] = representations # self.log.debug("Representation: {}".format(representations)) self.log.info("Registered {} items".format(len(representations))) diff --git a/pype/plugins/global/publish/integrate_thumbnail.py b/pype/plugins/global/publish/integrate_thumbnail.py index 08157187df..bf6c62155f 100644 --- a/pype/plugins/global/publish/integrate_thumbnail.py +++ b/pype/plugins/global/publish/integrate_thumbnail.py @@ -19,8 +19,8 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): families = ["review"] def process(self, instance): - repre_ids = instance.data.get("published_representation_ids") - if not repre_ids: + published_repres = instance.data.get("published_representations") + if not published_repres: self.log.debug( "There are not published representation ids on the instance." ) @@ -40,17 +40,9 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): thumbnail_template = anatomy.templates["publish"]["thumbnail"] io.install() - repres = io.find({ - "_id": {"$in": repre_ids}, - "type": "representation" - }) - if not repres: - raise AssertionError(( - "There are not representations in database with ids {}" - ).format(str(repre_ids))) thumb_repre = None - for repre in repres: + for repre in published_repres: if repre["name"].lower() == "thumbnail": thumb_repre = repre break From 8159b029d573ffdc22e556caafae6bc96843986e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 10 Dec 2019 12:25:24 +0000 Subject: [PATCH 018/393] (genera) fixing subprocess function to allow pass through Popen.subprocess arguments. Also adding better output for running subprocess --- pype/lib.py | 33 ++++++++++++++++++++++----------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index c8fade7f4a..8772608b38 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -14,24 +14,35 @@ log = logging.getLogger(__name__) # Special naming case for subprocess since its a built-in method. -def _subprocess(args): +def _subprocess(*args, **kwargs): """Convenience method for getting output errors for subprocess.""" # make sure environment contains only strings - env = {k: str(v) for k, v in os.environ.items()} + filtered_env = {k: str(v) for k, v in os.environ.items()} - proc = subprocess.Popen( - args, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - stdin=subprocess.PIPE, - env=env - ) + # set overrides + kwargs['stdout'] = kwargs.get('stdout', subprocess.PIPE) + kwargs['stderr'] = kwargs.get('stderr', subprocess.STDOUT) + kwargs['stdin'] = kwargs.get('stdin', subprocess.PIPE) + kwargs['env'] = kwargs.get('env',filtered_env) - output = proc.communicate()[0] + proc = subprocess.Popen(*args, **kwargs) + + output, error = proc.communicate() + + if output: + output = output.decode("utf-8") + output += "\n" + for line in output.strip().split("\n"): + log.info(line) + + if error: + error = error.decode("utf-8") + error += "\n" + for line in error.strip().split("\n"): + log.error(line) if proc.returncode != 0: - log.error(output) raise ValueError("\"{}\" was not successful: {}".format(args, output)) return output From a4ae644e35aec8fdadd361401c96f321e4fd9eb9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 10 Dec 2019 14:05:05 +0100 Subject: [PATCH 019/393] feat(nuke): Loader plugin for nukenodes --- pype/nuke/lib.py | 67 +++++ pype/plugins/nuke/load/load_backdrop.py | 319 ++++++++++++++++++++++++ 2 files changed, 386 insertions(+) create mode 100644 pype/plugins/nuke/load/load_backdrop.py diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 816a7d5116..202798893a 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1230,3 +1230,70 @@ def get_dependent_nodes(nodes): }) return connections_in, connections_out + + +def find_free_space_to_paste_nodes( + nodes, + group=nuke.root(), + direction="right", + offset=300): + """ + For getting coordinates in DAG (node graph) for placing new nodes + + Arguments: + nodes (list): list of nuke.Node objects + group (nuke.Node) [optional]: object in which context it is + direction (str) [optional]: where we want it to be placed + [left, right, top, bottom] + offset (int) [optional]: what offset it is from rest of nodes + + Returns: + xpos (int): x coordinace in DAG + ypos (int): y coordinace in DAG + """ + if len(nodes) == 0: + return 0, 0 + + group_xpos = list() + group_ypos = list() + + # get local coordinates of all nodes + nodes_xpos = [n.xpos() for n in nodes] + \ + [n.xpos() + n.screenWidth() for n in nodes] + + nodes_ypos = [n.ypos() for n in nodes] + \ + [n.ypos() + n.screenHeight() for n in nodes] + + # get complete screen size of all nodes to be placed in + nodes_screen_width = max(nodes_xpos) - min(nodes_xpos) + nodes_screen_heigth = max(nodes_ypos) - min(nodes_ypos) + + # get screen size (r,l,t,b) of all nodes in `group` + with group: + group_xpos = [n.xpos() for n in nuke.allNodes() if n not in nodes] + \ + [n.xpos() + n.screenWidth() for n in nuke.allNodes() + if n not in nodes] + group_ypos = [n.ypos() for n in nuke.allNodes() if n not in nodes] + \ + [n.ypos() + n.screenHeight() for n in nuke.allNodes() + if n not in nodes] + + # calc output left + if direction in "left": + xpos = min(group_xpos) - abs(nodes_screen_width) - abs(offset) + ypos = min(group_ypos) + return xpos, ypos + # calc output right + if direction in "right": + xpos = max(group_xpos) + abs(offset) + ypos = min(group_ypos) + return xpos, ypos + # calc output top + if direction in "top": + xpos = min(group_xpos) + ypos = min(group_ypos) - abs(nodes_screen_heigth) - abs(offset) + return xpos, ypos + # calc output bottom + if direction in "bottom": + xpos = min(group_xpos) + ypos = max(group_ypos) + abs(offset) + return xpos, ypos diff --git a/pype/plugins/nuke/load/load_backdrop.py b/pype/plugins/nuke/load/load_backdrop.py new file mode 100644 index 0000000000..7f58d4e9ec --- /dev/null +++ b/pype/plugins/nuke/load/load_backdrop.py @@ -0,0 +1,319 @@ +from avalon import api, style, io +import nuke +import nukescripts +from pype.nuke import lib as pnlib +from avalon.nuke import lib as anlib +from avalon.nuke import containerise, update_container +reload(pnlib) + +class LoadBackdropNodes(api.Loader): + """Loading Published Backdrop nodes (workfile, nukenodes)""" + + representations = ["nk"] + families = ["workfile", "nukenodes"] + + label = "Iport Nuke Nodes" + order = 0 + icon = "eye" + color = style.colors.light + node_color = "0x7533c1ff" + + def load(self, context, name, namespace, data): + """ + Loading function to import .nk file into script and wrap + it on backdrop + + Arguments: + context (dict): context of version + name (str): name of the version + namespace (str): asset name + data (dict): compulsory attribute > not used + + Returns: + nuke node: containerised nuke node object + """ + + # get main variables + version = context['version'] + version_data = version.get("data", {}) + vname = version.get("name", None) + first = version_data.get("frameStart", None) + last = version_data.get("frameEnd", None) + namespace = namespace or context['asset']['name'] + colorspace = version_data.get("colorspace", None) + object_name = "{}_{}".format(name, namespace) + + # prepare data for imprinting + # add additional metadata from the version to imprint to Avalon knob + add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", + "source", "author", "fps"] + + data_imprint = {"frameStart": first, + "frameEnd": last, + "version": vname, + "colorspaceInput": colorspace, + "objectName": object_name} + + for k in add_keys: + data_imprint.update({k: version_data[k]}) + + # getting file path + file = self.fname.replace("\\", "/") + + # adding nodes to node graph + # just in case we are in group lets jump out of it + nuke.endGroup() + + # Get mouse position + n = nuke.createNode("NoOp") + xcursor, ycursor = (n.xpos(), n.ypos()) + anlib.reset_selection() + nuke.delete(n) + + bdn_frame = 50 + + with anlib.maintained_selection(): + + # add group from nk + nuke.nodePaste(file) + + # get all pasted nodes + new_nodes = list() + nodes = nuke.selectedNodes() + + # get pointer position in DAG + xpointer, ypointer = pnlib.find_free_space_to_paste_nodes(nodes, direction="right", offset=200+bdn_frame) + + # reset position to all nodes and replace inputs and output + for n in nodes: + anlib.reset_selection() + xpos = (n.xpos() - xcursor) + xpointer + ypos = (n.ypos() - ycursor) + ypointer + n.setXYpos(xpos, ypos) + + # replace Input nodes for dots + if n.Class() in "Input": + dot = nuke.createNode("Dot") + new_name = n.name().replace("INP", "DOT") + dot.setName(new_name) + dot["label"].setValue(new_name) + dot.setXYpos(xpos, ypos) + new_nodes.append(dot) + + # rewire + dep = n.dependent() + for d in dep: + index = next((i for i, dpcy in enumerate( + d.dependencies()) + if n is dpcy), 0) + d.setInput(index, dot) + + # remove Input node + anlib.reset_selection() + nuke.delete(n) + continue + + # replace Input nodes for dots + elif n.Class() in "Output": + dot = nuke.createNode("Dot") + new_name = n.name() + "_DOT" + dot.setName(new_name) + dot["label"].setValue(new_name) + dot.setXYpos(xpos, ypos) + new_nodes.append(dot) + + # rewire + dep = next((d for d in n.dependencies()), None) + if dep: + dot.setInput(0, dep) + + # remove Input node + anlib.reset_selection() + nuke.delete(n) + continue + else: + new_nodes.append(n) + + # reselect nodes with new Dot instead of Inputs and Output + anlib.reset_selection() + anlib.select_nodes(new_nodes) + # place on backdrop + bdn = nukescripts.autoBackdrop() + + # add frame offset + xpos = bdn.xpos() - bdn_frame + ypos = bdn.ypos() - bdn_frame + bdwidth = bdn["bdwidth"].value() + (bdn_frame*2) + bdheight = bdn["bdheight"].value() + (bdn_frame*2) + + bdn["xpos"].setValue(xpos) + bdn["ypos"].setValue(ypos) + bdn["bdwidth"].setValue(bdwidth) + bdn["bdheight"].setValue(bdheight) + + bdn["name"].setValue(object_name) + bdn["label"].setValue("Version tracked frame: \n`{}`\n\nPLEASE DO NOT REMOVE OR MOVE \nANYTHING FROM THIS FRAME!".format(object_name)) + bdn["note_font_size"].setValue(20) + + return containerise( + node=bdn, + name=name, + namespace=namespace, + context=context, + loader=self.__class__.__name__, + data=data_imprint) + + def update(self, container, representation): + """Update the Loader's path + + Nuke automatically tries to reset some variables when changing + the loader's path to a new file. These automatic changes are to its + inputs: + + """ + + # get main variables + # Get version from io + version = io.find_one({ + "type": "version", + "_id": representation["parent"] + }) + # get corresponding node + GN = nuke.toNode(container['objectName']) + + file = api.get_representation_path(representation).replace("\\", "/") + context = representation["context"] + name = container['name'] + version_data = version.get("data", {}) + vname = version.get("name", None) + first = version_data.get("frameStart", None) + last = version_data.get("frameEnd", None) + namespace = container['namespace'] + colorspace = version_data.get("colorspace", None) + object_name = "{}_{}".format(name, namespace) + + add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", + "source", "author", "fps"] + + data_imprint = {"representation": str(representation["_id"]), + "frameStart": first, + "frameEnd": last, + "version": vname, + "colorspaceInput": colorspace, + "objectName": object_name} + + for k in add_keys: + data_imprint.update({k: version_data[k]}) + + # adding nodes to node graph + # just in case we are in group lets jump out of it + nuke.endGroup() + + with anlib.maintained_selection(): + xpos = GN.xpos() + ypos = GN.ypos() + avalon_data = anlib.get_avalon_knob_data(GN) + nuke.delete(GN) + # add group from nk + nuke.nodePaste(file) + + GN = nuke.selectedNode() + anlib.set_avalon_knob_data(GN, avalon_data) + GN.setXYpos(xpos, ypos) + GN["name"].setValue(object_name) + + # get all versions in list + versions = io.find({ + "type": "version", + "parent": version["parent"] + }).distinct('name') + + max_version = max(versions) + + # change color of node + if version.get("name") not in [max_version]: + GN["tile_color"].setValue(int("0xd88467ff", 16)) + else: + GN["tile_color"].setValue(int(self.node_color, 16)) + + self.log.info("udated to version: {}".format(version.get("name"))) + + return update_container(GN, data_imprint) + + def connect_active_viewer(self, group_node): + """ + Finds Active viewer and + place the node under it, also adds + name of group into Input Process of the viewer + + Arguments: + group_node (nuke node): nuke group node object + + """ + group_node_name = group_node["name"].value() + + viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()] + if len(viewer) > 0: + viewer = viewer[0] + else: + self.log.error("Please create Viewer node before you " + "run this action again") + return None + + # get coordinates of Viewer1 + xpos = viewer["xpos"].value() + ypos = viewer["ypos"].value() + + ypos += 150 + + viewer["ypos"].setValue(ypos) + + # set coordinates to group node + group_node["xpos"].setValue(xpos) + group_node["ypos"].setValue(ypos + 50) + + # add group node name to Viewer Input Process + viewer["input_process_node"].setValue(group_node_name) + + # put backdrop under + pnlib.create_backdrop(label="Input Process", layer=2, + nodes=[viewer, group_node], color="0x7c7faaff") + + return True + + def get_item(self, data, trackIndex, subTrackIndex): + return {key: val for key, val in data.items() + if subTrackIndex == val["subTrackIndex"] + if trackIndex == val["trackIndex"]} + + def byteify(self, input): + """ + Converts unicode strings to strings + It goes trought all dictionary + + Arguments: + input (dict/str): input + + Returns: + dict: with fixed values and keys + + """ + + if isinstance(input, dict): + return {self.byteify(key): self.byteify(value) + for key, value in input.iteritems()} + elif isinstance(input, list): + return [self.byteify(element) for element in input] + elif isinstance(input, unicode): + return input.encode('utf-8') + else: + return input + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + from avalon.nuke import viewer_update_and_undo_stop + node = nuke.toNode(container['objectName']) + with viewer_update_and_undo_stop(): + nuke.delete(node) From bc9e7833b0b5403fe3b1fc3778a8a0bbd7c0ffd5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Dec 2019 14:32:28 +0100 Subject: [PATCH 020/393] width of Lighting button is not so complicated to set and ton size is default to 8pt --- .../widgets/widget_component_item.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/pype/standalonepublish/widgets/widget_component_item.py b/pype/standalonepublish/widgets/widget_component_item.py index 78287ccf37..0fd72cc70e 100644 --- a/pype/standalonepublish/widgets/widget_component_item.py +++ b/pype/standalonepublish/widgets/widget_component_item.py @@ -308,14 +308,15 @@ class ComponentItem(QtWidgets.QFrame): class LightingButton(QtWidgets.QPushButton): lightingbtnstyle = """ QPushButton { + font: %(font_size_pt)spt; text-align: center; color: #777777; background-color: transparent; border-width: 1px; border-color: #777777; border-style: solid; - padding-top: 2px; - padding-bottom: 2px; + padding-top: 0px; + padding-bottom: 0px; padding-left: 3px; padding-right: 3px; border-radius: 3px; @@ -351,14 +352,11 @@ class LightingButton(QtWidgets.QPushButton): color: #4BF543; } """ - def __init__(self, text, *args, **kwargs): - super().__init__(text, *args, **kwargs) - self.setStyleSheet(self.lightingbtnstyle) + def __init__(self, text, font_size_pt=8, *args, **kwargs): + super(LightingButton, self).__init__(text, *args, **kwargs) + self.setStyleSheet(self.lightingbtnstyle % { + "font_size_pt": font_size_pt + }) self.setCheckable(True) - preview_font_metrics = self.fontMetrics().boundingRect(text) - width = preview_font_metrics.width() + 16 - height = preview_font_metrics.height() + 5 - self.setMaximumWidth(width) - self.setMaximumHeight(height) From 8707a527d7d5836a280316d9357de64513ee7bf8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 13:49:58 +0100 Subject: [PATCH 021/393] ftrack api server in thread was replaced with subprocess --- pype/ftrack/ftrack_server/lib.py | 31 ++++++ pype/ftrack/tray/ftrack_module.py | 166 +++++++++++++++++++++--------- 2 files changed, 151 insertions(+), 46 deletions(-) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index edd3cee09b..fefba580e0 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -265,6 +265,37 @@ class ProcessEventHub(ftrack_api.event.hub.EventHub): return self._send_packet(self._code_name_mapping["heartbeat"]) return super()._handle_packet(code, packet_identifier, path, data) + + +class UserEventHub(ftrack_api.event.hub.EventHub): + def __init__(self, *args, **kwargs): + self.sock = kwargs.pop("sock") + super(UserEventHub, self).__init__(*args, **kwargs) + + def _handle_packet(self, code, packet_identifier, path, data): + """Override `_handle_packet` which extend heartbeat""" + code_name = self._code_name_mapping[code] + if code_name == "heartbeat": + # Reply with heartbeat. + self.sock.sendall(b"hearbeat") + return self._send_packet(self._code_name_mapping['heartbeat']) + + elif code_name == "connect": + event = ftrack_api.event.base.Event( + topic="pype.storer.started", + data={}, + source={ + "id": self.id, + "user": {"username": self._api_user} + } + ) + self._event_queue.put(event) + + return super(UserEventHub, self)._handle_packet( + code, packet_identifier, path, data + ) + + class SocketSession(ftrack_api.session.Session): '''An isolated session for interaction with an ftrack server.''' def __init__( diff --git a/pype/ftrack/tray/ftrack_module.py b/pype/ftrack/tray/ftrack_module.py index 8da97da56b..dab751c001 100644 --- a/pype/ftrack/tray/ftrack_module.py +++ b/pype/ftrack/tray/ftrack_module.py @@ -1,26 +1,27 @@ import os -import json -import threading import time -from Qt import QtCore, QtGui, QtWidgets +import datetime +import threading +from Qt import QtCore, QtWidgets import ftrack_api -from pypeapp import style -from pype.ftrack import FtrackServer, check_ftrack_url, credentials +from ..ftrack_server.lib import check_ftrack_url +from ..ftrack_server import socket_thread +from ..lib import credentials from . import login_dialog -from pype import api as pype +from pypeapp import Logger -log = pype.Logger().get_logger("FtrackModule", "ftrack") +log = Logger().get_logger("FtrackModule", "ftrack") class FtrackModule: def __init__(self, main_parent=None, parent=None): self.parent = parent self.widget_login = login_dialog.Login_Dialog_ui(self) - self.action_server = FtrackServer('action') self.thread_action_server = None + self.thread_socket_server = None self.thread_timer = None self.bool_logged = False @@ -75,14 +76,6 @@ class FtrackModule: # Actions part def start_action_server(self): - self.bool_action_thread_running = True - self.set_menu_visibility() - if ( - self.thread_action_server is not None and - self.bool_action_thread_running is False - ): - self.stop_action_server() - if self.thread_action_server is None: self.thread_action_server = threading.Thread( target=self.set_action_server @@ -90,35 +83,114 @@ class FtrackModule: self.thread_action_server.start() def set_action_server(self): - first_check = True - while self.bool_action_thread_running is True: - if not check_ftrack_url(os.environ['FTRACK_SERVER']): - if first_check: - log.warning( - "Could not connect to Ftrack server" - ) - first_check = False + if self.bool_action_server_running: + return + + self.bool_action_server_running = True + self.bool_action_thread_running = False + + ftrack_url = os.environ['FTRACK_SERVER'] + + parent_file_path = os.path.dirname( + os.path.dirname(os.path.realpath(__file__)) + ) + + min_fail_seconds = 5 + max_fail_count = 3 + wait_time_after_max_fail = 10 + + # Threads data + thread_name = "ActionServerThread" + thread_port = 10021 + subprocess_path = ( + "{}/ftrack_server/sub_user_server.py".format(parent_file_path) + ) + if self.thread_socket_server is not None: + self.thread_socket_server.stop() + self.thread_socket_server.join() + self.thread_socket_server = None + + last_failed = datetime.datetime.now() + failed_count = 0 + + ftrack_accessible = False + printed_ftrack_error = False + + # Main loop + while True: + if not self.bool_action_server_running: + log.debug("Action server was pushed to stop.") + break + + # Check if accessible Ftrack and Mongo url + if not ftrack_accessible: + ftrack_accessible = check_ftrack_url(ftrack_url) + + # Run threads only if Ftrack is accessible + if not ftrack_accessible: + if not printed_ftrack_error: + log.warning("Can't access Ftrack {}".format(ftrack_url)) + + if self.thread_socket_server is not None: + self.thread_socket_server.stop() + self.thread_socket_server.join() + self.thread_socket_server = None + self.bool_action_thread_running = False + self.set_menu_visibility() + + printed_ftrack_error = True + time.sleep(1) continue - log.info( - "Connected to Ftrack server. Running actions session" - ) - try: - self.bool_action_server_running = True + + printed_ftrack_error = False + + # Run backup thread which does not requeire mongo to work + if self.thread_socket_server is None: + if failed_count < max_fail_count: + self.thread_socket_server = socket_thread.SocketThread( + thread_name, thread_port, subprocess_path + ) + self.thread_socket_server.start() + self.bool_action_thread_running = True + self.set_menu_visibility() + + elif failed_count == max_fail_count: + log.warning(( + "Action server failed {} times." + " I'll try to run again {}s later" + ).format( + str(max_fail_count), str(wait_time_after_max_fail)) + ) + failed_count += 1 + + elif (( + datetime.datetime.now() - last_failed + ).seconds > wait_time_after_max_fail): + failed_count = 0 + + # If thread failed test Ftrack and Mongo connection + elif not self.thread_socket_server.isAlive(): + self.thread_socket_server_thread.join() + self.thread_socket_server = None + ftrack_accessible = False + + self.bool_action_thread_running = False self.set_menu_visibility() - self.action_server.run_server() - if self.bool_action_thread_running: - log.debug("Ftrack action server has stopped") - except Exception: - log.warning( - "Ftrack Action server crashed. Trying to connect again", - exc_info=True - ) - self.bool_action_server_running = False - self.set_menu_visibility() - first_check = True + + _last_failed = datetime.datetime.now() + delta_time = (_last_failed - last_failed).seconds + if delta_time < min_fail_seconds: + failed_count += 1 + else: + failed_count = 0 + last_failed = _last_failed + + time.sleep(1) self.bool_action_thread_running = False + self.bool_action_server_running = False + self.set_menu_visibility() def reset_action_server(self): self.stop_action_server() @@ -126,16 +198,18 @@ class FtrackModule: def stop_action_server(self): try: - self.bool_action_thread_running = False - self.action_server.stop_session() + self.bool_action_server_running = False + if self.thread_socket_server is not None: + self.thread_socket_server.stop() + self.thread_socket_server.join() + self.thread_socket_server = None + if self.thread_action_server is not None: self.thread_action_server.join() self.thread_action_server = None log.info("Ftrack action server was forced to stop") - self.bool_action_server_running = False - self.set_menu_visibility() except Exception: log.warning( "Error has happened during Killing action server", @@ -201,9 +275,9 @@ class FtrackModule: self.stop_timer_thread() return - self.aRunActionS.setVisible(not self.bool_action_thread_running) + self.aRunActionS.setVisible(not self.bool_action_server_running) self.aResetActionS.setVisible(self.bool_action_thread_running) - self.aStopActionS.setVisible(self.bool_action_thread_running) + self.aStopActionS.setVisible(self.bool_action_server_running) if self.bool_timer_event is False: self.start_timer_thread() From 474a0338af07f7bec29e3f3b099584bf23b2fef3 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 14:52:58 +0100 Subject: [PATCH 022/393] modified socket thread in ftrack server to be modifiable --- pype/ftrack/ftrack_server/socket_thread.py | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py index 3309f75cd7..fab211d103 100644 --- a/pype/ftrack/ftrack_server/socket_thread.py +++ b/pype/ftrack/ftrack_server/socket_thread.py @@ -26,6 +26,8 @@ class SocketThread(threading.Thread): self.mongo_error = False + self._temp_data = {} + def stop(self): self._is_running = False @@ -81,8 +83,9 @@ class SocketThread(threading.Thread): try: if not self._is_running: break + data = None try: - data = connection.recv(16) + data = self.get_data_from_con(connection) time_con = time.time() except socket.timeout: @@ -99,10 +102,7 @@ class SocketThread(threading.Thread): self._is_running = False break - if data: - if data == b"MongoError": - self.mongo_error = True - connection.sendall(data) + self._handle_data(connection, data) except Exception as exc: self.log.error( @@ -121,3 +121,14 @@ class SocketThread(threading.Thread): for line in lines: os.write(1, line) self.finished = True + + def get_data_from_con(self, connection): + return connection.recv(16) + + def handle_data(self, connection, data): + if not data: + return + + if data == b"MongoError": + self.mongo_error = True + connection.sendall(data) From 01626176dd1ba2b98c0f5f64659cc28f91ee5cf5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 12 Dec 2019 11:05:18 +0100 Subject: [PATCH 023/393] fix method name --- pype/ftrack/ftrack_server/socket_thread.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py index fab211d103..c688693c77 100644 --- a/pype/ftrack/ftrack_server/socket_thread.py +++ b/pype/ftrack/ftrack_server/socket_thread.py @@ -125,7 +125,7 @@ class SocketThread(threading.Thread): def get_data_from_con(self, connection): return connection.recv(16) - def handle_data(self, connection, data): + def _handle_data(self, connection, data): if not data: return From eac2629fcb27728396dc63b55de00f11cd006408 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 12 Dec 2019 11:26:12 +0100 Subject: [PATCH 024/393] use FFMPEG_PATH environ to get path to ffmpeg and ffprobe --- pype/scripts/otio_burnin.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 01dc76aacf..1b2c2a04aa 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -9,6 +9,21 @@ from pype import api as pype log = pype.Logger().get_logger("BurninWrapper", "burninwrap") +ffmpeg_path = os.environ.get("FFMPEG_PATH") +if ffmpeg_path and os.path.exists(ffmpeg_path): + # add separator "/" or "\" to be prepared for next part + ffmpeg_path += os.path.sep +else: + ffmpeg_path = "" + +FFMPEG = ( + '{} -loglevel panic -i %(input)s %(filters)s %(args)s%(output)s' +).format(os.path.normpath(ffmpeg_path + "ffmpeg")) +FFPROBE = ( + '{} -v quiet -print_format json -show_format -show_streams %(source)s' +).format(os.path.normpath(ffmpeg_path + "ffprobe")) + + class ModifiedBurnins(ffmpeg_burnins.Burnins): ''' This is modification of OTIO FFmpeg Burnin adapter. From 54c76b3b7f0358e9a8943524d22ce215cd006740 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 12 Dec 2019 11:26:55 +0100 Subject: [PATCH 025/393] copied _streams method from otio adapter to be able to use ffprobe full path --- pype/scripts/otio_burnin.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 1b2c2a04aa..dd62c59bec 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -1,5 +1,7 @@ import os import datetime +import subprocess +import json import opentimelineio_contrib.adapters.ffmpeg_burnins as ffmpeg_burnins from pypeapp.lib import config from pype import api as pype @@ -24,6 +26,19 @@ FFPROBE = ( ).format(os.path.normpath(ffmpeg_path + "ffprobe")) +def _streams(source): + """Reimplemented from otio burnins to be able use full path to ffprobe + :param str source: source media file + :rtype: [{}, ...] + """ + command = FFPROBE % {'source': source} + proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE) + out = proc.communicate()[0] + if proc.returncode != 0: + raise RuntimeError("Failed to run: %s" % command) + return json.loads(out)['streams'] + + class ModifiedBurnins(ffmpeg_burnins.Burnins): ''' This is modification of OTIO FFmpeg Burnin adapter. From fdf4182155e53f77e25fa32088f3776a9995f8b4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 12 Dec 2019 11:27:22 +0100 Subject: [PATCH 026/393] replace FFMPEG string in command method --- pype/scripts/otio_burnin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index dd62c59bec..d215bea55e 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -217,7 +217,7 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if self.filter_string: filters = '-vf "{}"'.format(self.filter_string) - return (ffmpeg_burnins.FFMPEG % { + return (FFMPEG % { 'input': self.source, 'output': output, 'args': '%s ' % args if args else '', @@ -398,7 +398,7 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) codec_args = '' if codec_data is not []: codec_args = " ".join(codec_data) - + burnin.render(output_path, args=codec_args, overwrite=overwrite) From 978e05b536debcc928d39376690a14558c25a198 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 12 Dec 2019 11:27:47 +0100 Subject: [PATCH 027/393] get streams before super init in Burnin class is called --- pype/scripts/otio_burnin.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index d215bea55e..3e8cb3b0c4 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -91,6 +91,9 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): } def __init__(self, source, streams=None, options_init=None): + if not streams: + streams = _streams(source) + super().__init__(source, streams) if options_init: self.options_init.update(options_init) From b5fe082a59061e5eb14d77966ecefbcf7e91dc08 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 16 Dec 2019 10:51:02 +0100 Subject: [PATCH 028/393] sonar import moved to process because in the time the file is imported, sonar is not available --- pype/plugins/blender/create/submarine_model.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/plugins/blender/create/submarine_model.py b/pype/plugins/blender/create/submarine_model.py index 29fcae8fbf..1845c9b222 100644 --- a/pype/plugins/blender/create/submarine_model.py +++ b/pype/plugins/blender/create/submarine_model.py @@ -2,7 +2,6 @@ import bpy -import sonar.blender from avalon import api from avalon.blender import Creator, lib @@ -16,7 +15,7 @@ class CreateModel(Creator): icon = "cube" def process(self): - + import sonar.blender asset = self.data["asset"] subset = self.data["subset"] name = sonar.blender.plugin.model_name(asset, subset) From 0841d91eef7584e729b58db8d3b2c8340d7e2b3a Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 16 Dec 2019 14:13:31 +0100 Subject: [PATCH 029/393] fix which import --- pype/scripts/publish_filesequence.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/pype/scripts/publish_filesequence.py b/pype/scripts/publish_filesequence.py index 7ad7318831..5517cfeb4c 100644 --- a/pype/scripts/publish_filesequence.py +++ b/pype/scripts/publish_filesequence.py @@ -4,7 +4,16 @@ import os import logging import subprocess import platform -from shutil import which +try: + from shutil import which +except ImportError: + # we are in python < 3.3 + def which(command): + path = os.getenv('PATH') + for p in path.split(os.path.pathsep): + p = os.path.join(p, command) + if os.path.exists(p) and os.access(p, os.X_OK): + return p handler = logging.basicConfig() log = logging.getLogger("Publish Image Sequences") From 19cf990fbdf1b24c481ad9ac6b6fa74419331a0d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 16 Dec 2019 17:35:23 +0100 Subject: [PATCH 030/393] added template data to burnins data --- pype/plugins/global/publish/extract_burnin.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 95a7144081..33935b4272 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -32,6 +32,7 @@ class ExtractBurnin(pype.api.Extractor): frame_start = int(instance.data.get("frameStart") or 0) frame_end = int(instance.data.get("frameEnd") or 1) duration = frame_end - frame_start + 1 + prep_data = { "username": instance.context.data['user'], "asset": os.environ['AVALON_ASSET'], @@ -39,8 +40,14 @@ class ExtractBurnin(pype.api.Extractor): "frame_start": frame_start, "frame_end": frame_end, "duration": duration, - "version": version + "version": version, + "comment": instance.context.data.get("comment"), + "intent": instance.context.data.get("intent") } + # Update data with template data + template_data = instance.data.get("assumedTemplateData") or {} + prep_data.update(template_data) + self.log.debug("__ prep_data: {}".format(prep_data)) for i, repre in enumerate(instance.data["representations"]): self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre)) From 1286edfc25c717815d16bb8bb18d7d6a98268b30 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 16 Dec 2019 17:36:10 +0100 Subject: [PATCH 031/393] added filled anatomy to burnin data to be able use `anatomy[...][...]` in burnin presets --- pype/plugins/global/publish/extract_burnin.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 33935b4272..06a62dd98b 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -1,5 +1,6 @@ import os import json +import copy import pype.api import pyblish @@ -48,6 +49,9 @@ class ExtractBurnin(pype.api.Extractor): template_data = instance.data.get("assumedTemplateData") or {} prep_data.update(template_data) + # get anatomy project + anatomy = instance.context.data['anatomy'] + self.log.debug("__ prep_data: {}".format(prep_data)) for i, repre in enumerate(instance.data["representations"]): self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre)) @@ -69,11 +73,17 @@ class ExtractBurnin(pype.api.Extractor): ) self.log.debug("__ full_burnin_path: {}".format(full_burnin_path)) + # create copy of prep_data for anatomy formatting + _prep_data = copy.deepcopy(prep_data) + _prep_data["representation"] = repre["name"] + _prep_data["anatomy"] = ( + anatomy.format_all(_prep_data).get("solved") or {} + ) burnin_data = { "input": full_movie_path.replace("\\", "/"), "codec": repre.get("codec", []), "output": full_burnin_path.replace("\\", "/"), - "burnin_data": prep_data + "burnin_data": _prep_data } self.log.debug("__ burnin_data2: {}".format(burnin_data)) From d78166a0da72d2a736be3c7b4bfc5da4fa38fff1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 16 Dec 2019 17:49:42 +0100 Subject: [PATCH 032/393] replace backslash in hierararchy which may cause issues in burnin path --- pype/plugins/global/publish/collect_templates.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index 9b0c03fdee..48623eec22 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -75,7 +75,7 @@ class CollectTemplates(pyblish.api.InstancePlugin): "asset": asset_name, "subset": subset_name, "version": version_number, - "hierarchy": hierarchy, + "hierarchy": hierarchy.replace("\\", "/"), "representation": "TEMP"} instance.data["template"] = template From 813673dd504eb0f83648daea606a00ae4ac8de86 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 12 Dec 2019 16:04:26 +0100 Subject: [PATCH 033/393] fix(global): comma missing `gizmo` family From f06857c42e8376f2f9b63efb77fb37846fd83c05 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 18 Dec 2019 14:56:57 +0100 Subject: [PATCH 034/393] fix(nuke): didn't collect all publishable isntaces --- pype/plugins/nuke/publish/collect_instances.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index cffe415058..c5fb289a1e 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -86,11 +86,13 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): node.end() family = avalon_knob_data["family"] - families = avalon_knob_data.get("families") - if families: - families = [families] + families = list() + families_ak = avalon_knob_data.get("families") + + if families_ak: + families.append(families_ak) else: - families = [family] + families.append(family) # Get format format = root['format'].value() @@ -100,7 +102,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): if node.Class() not in "Read": if "render" not in node.knobs().keys(): - families.insert(0, family) + pass elif node["render"].value(): self.log.info("flagged for render") add_family = "render.local" From d16865d96fcfe8b413507ca44535967ffa42140f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Dec 2019 16:08:07 +0100 Subject: [PATCH 035/393] modified sonar creator a little bit --- pype/plugins/blender/create/submarine_model.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/pype/plugins/blender/create/submarine_model.py b/pype/plugins/blender/create/submarine_model.py index 1845c9b222..7301073f05 100644 --- a/pype/plugins/blender/create/submarine_model.py +++ b/pype/plugins/blender/create/submarine_model.py @@ -9,26 +9,24 @@ from avalon.blender import Creator, lib class CreateModel(Creator): """Polygonal static geometry""" - name = "model_default" + name = "modelMain" label = "Model" family = "model" icon = "cube" def process(self): - import sonar.blender + import pype.blender + asset = self.data["asset"] subset = self.data["subset"] - name = sonar.blender.plugin.model_name(asset, subset) + name = pype.blender.plugin.model_name(asset, subset) collection = bpy.data.collections.new(name=name) bpy.context.scene.collection.children.link(collection) self.data['task'] = api.Session.get('AVALON_TASK') lib.imprint(collection, self.data) if (self.options or {}).get("useSelection"): - for obj in bpy.context.selected_objects: + for obj in lib.get_selection(): collection.objects.link(obj) - if bpy.data.workspaces.get('Modeling'): - bpy.context.window.workspace = bpy.data.workspaces['Modeling'] - return collection From cd79f0654dfc0efbb9f36ee040b83c4a5ce419c7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Dec 2019 16:13:38 +0100 Subject: [PATCH 036/393] added init file to pype setup --- setup/blender/init.py | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 setup/blender/init.py diff --git a/setup/blender/init.py b/setup/blender/init.py new file mode 100644 index 0000000000..05c15eaeb2 --- /dev/null +++ b/setup/blender/init.py @@ -0,0 +1,3 @@ +from pype import blender + +blender.install() From f0918ec7604734673c288e0bc55f1c5723dce7ff Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Dec 2019 17:30:47 +0100 Subject: [PATCH 037/393] blender plugins update --- pype/plugins/blender/load/submarine_model.py | 129 ++++++++++++------ .../blender/publish/collect_current_file.py | 2 +- pype/plugins/blender/publish/collect_model.py | 3 +- pype/plugins/blender/publish/extract_model.py | 33 +++-- .../blender/publish/validate_mesh_has_uv.py | 8 +- .../validate_mesh_no_negative_scale.py | 12 +- 6 files changed, 129 insertions(+), 58 deletions(-) diff --git a/pype/plugins/blender/load/submarine_model.py b/pype/plugins/blender/load/submarine_model.py index 4535b29065..99095d74cd 100644 --- a/pype/plugins/blender/load/submarine_model.py +++ b/pype/plugins/blender/load/submarine_model.py @@ -38,7 +38,7 @@ class BlendModelLoader(pype.blender.AssetLoader): Note: It is assumed that only 1 matching collection is found. """ - for collection in bpy.data.collections: + for collection in bpy.context.blend_data.collections: if collection.name != name: continue if collection.library is None: @@ -52,18 +52,19 @@ class BlendModelLoader(pype.blender.AssetLoader): return None @staticmethod - def _collection_contains_object(collection: bpy.types.Collection, object: bpy.types.Object) -> bool: + def _collection_contains_object( + collection: bpy.types.Collection, object: bpy.types.Object + ) -> bool: """Check if the collection contains the object.""" for obj in collection.objects: if obj == object: return True return False - def process_asset(self, - context: dict, - name: str, - namespace: Optional[str] = None, - options: Optional[Dict] = None) -> Optional[List]: + def process_asset( + self, context: dict, name: str, namespace: Optional[str] = None, + options: Optional[Dict] = None + ) -> Optional[List]: """ Arguments: name: Use pre-defined name @@ -76,21 +77,27 @@ class BlendModelLoader(pype.blender.AssetLoader): asset = context["asset"]["name"] subset = context["subset"]["name"] lib_container = pype.blender.plugin.model_name(asset, subset) - container_name = pype.blender.plugin.model_name(asset, subset, namespace) + container_name = pype.blender.plugin.model_name( + asset, subset, namespace + ) relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.data.libraries.load(libpath, link=True, relative=relative) as (_, data_to): + with bpy.context.blend_data.libraries.load( + libpath, link=True, relative=relative + ) as (_, data_to): data_to.collections = [lib_container] scene = bpy.context.scene - instance_empty = bpy.data.objects.new(container_name, None) + instance_empty = bpy.context.blend_data.objects.new( + container_name, None + ) if not instance_empty.get("avalon"): instance_empty["avalon"] = dict() avalon_info = instance_empty["avalon"] avalon_info.update({"container_name": container_name}) scene.collection.objects.link(instance_empty) instance_empty.instance_type = 'COLLECTION' - container = bpy.data.collections[lib_container] + container = bpy.context.blend_data.collections[lib_container] container.name = container_name instance_empty.instance_collection = container container.make_local() @@ -120,7 +127,9 @@ class BlendModelLoader(pype.blender.AssetLoader): Warning: No nested collections are supported at the moment! """ - collection = bpy.data.collections.get(container["objectName"]) + collection = bpy.context.blend_data.collections.get( + container["objectName"] + ) libpath = Path(api.get_representation_path(representation)) extension = libpath.suffix.lower() @@ -130,14 +139,30 @@ class BlendModelLoader(pype.blender.AssetLoader): pformat(representation, indent=2), ) - assert collection, f"The asset is not loaded: {container['objectName']}" - assert not (collection.children), "Nested collections are not supported." - assert libpath, ("No existing library file found for {container['objectName']}") - assert libpath.is_file(), f"The file doesn't exist: {libpath}" - assert extension in pype.blender.plugin.VALID_EXTENSIONS, f"Unsupported file: {libpath}" - collection_libpath = self._get_library_from_container(collection).filepath - normalized_collection_libpath = str(Path(bpy.path.abspath(collection_libpath)).resolve()) - normalized_libpath = str(Path(bpy.path.abspath(str(libpath))).resolve()) + assert collection, ( + f"The asset is not loaded: {container['objectName']}" + ) + assert not (collection.children), ( + "Nested collections are not supported." + ) + assert libpath, ( + "No existing library file found for {container['objectName']}" + ) + assert libpath.is_file(), ( + f"The file doesn't exist: {libpath}" + ) + assert extension in pype.blender.plugin.VALID_EXTENSIONS, ( + f"Unsupported file: {libpath}" + ) + collection_libpath = ( + self._get_library_from_container(collection).filepath + ) + normalized_collection_libpath = ( + str(Path(bpy.path.abspath(collection_libpath)).resolve()) + ) + normalized_libpath = ( + str(Path(bpy.path.abspath(str(libpath))).resolve()) + ) logger.debug( "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", normalized_collection_libpath, @@ -155,29 +180,46 @@ class BlendModelLoader(pype.blender.AssetLoader): # Unlink every object collection.objects.unlink(obj) remove_obj = True - for coll in [coll for coll in bpy.data.collections if coll != collection]: - if coll.objects and self._collection_contains_object(coll, obj): + for coll in [ + coll for coll in bpy.context.blend_data.collections + if coll != collection + ]: + if ( + coll.objects and + self._collection_contains_object(coll, obj) + ): remove_obj = False if remove_obj: objects_to_remove.add(obj) + for obj in objects_to_remove: # Only delete objects that are not used elsewhere - bpy.data.objects.remove(obj) + bpy.context.blend_data.objects.remove(obj) - instance_empties = [obj for obj in collection.users_dupli_group if obj.name in collection.name] + instance_empties = [ + obj for obj in collection.users_dupli_group + if obj.name in collection.name + ] if instance_empties: instance_empty = instance_empties[0] container_name = instance_empty["avalon"]["container_name"] + relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.data.libraries.load(str(libpath), link=True, relative=relative) as (_, data_to): + with bpy.context.blend_data.libraries.load( + str(libpath), link=True, relative=relative + ) as (_, data_to): data_to.collections = [container_name] + new_collection = self._get_lib_collection(container_name, libpath) if new_collection is None: - raise ValueError("A matching collection '{container_name}' " - "should have been found in: {libpath}") + raise ValueError( + "A matching collection '{container_name}' " + "should have been found in: {libpath}" + ) + for obj in new_collection.objects: collection.objects.link(obj) - bpy.data.collections.remove(new_collection) + bpy.context.blend_data.collections.remove(new_collection) # Update the representation on the collection avalon_prop = collection[avalon.blender.pipeline.AVALON_PROPERTY] avalon_prop["representation"] = str(representation["_id"]) @@ -195,10 +237,14 @@ class BlendModelLoader(pype.blender.AssetLoader): Warning: No nested collections are supported at the moment! """ - collection = bpy.data.collections.get(container["objectName"]) + collection = bpy.context.blend_data.collections.get( + container["objectName"] + ) if not collection: return False - assert not (collection.children), "Nested collections are not supported." + assert not (collection.children), ( + "Nested collections are not supported." + ) instance_parents = list(collection.users_dupli_group) instance_objects = list(collection.objects) for obj in instance_objects + instance_parents: @@ -224,11 +270,10 @@ class CacheModelLoader(pype.blender.AssetLoader): icon = "code-fork" color = "orange" - def process_asset(self, - context: dict, - name: str, - namespace: Optional[str] = None, - options: Optional[Dict] = None) -> Optional[List]: + def process_asset( + self, context: dict, name: str, namespace: Optional[str] = None, + options: Optional[Dict] = None + ) -> Optional[List]: """ Arguments: name: Use pre-defined name @@ -243,17 +288,23 @@ class CacheModelLoader(pype.blender.AssetLoader): asset = context["asset"]["name"] subset = context["subset"]["name"] # TODO (jasper): evaluate use of namespace which is 'alien' to Blender. - lib_container = container_name = pype.blender.plugin.model_name(asset, subset, namespace) + lib_container = container_name = ( + pype.blender.plugin.model_name(asset, subset, namespace) + ) relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.data.libraries.load(libpath, link=True, relative=relative) as (data_from, data_to): + with bpy.context.blend_data.libraries.load( + libpath, link=True, relative=relative + ) as (data_from, data_to): data_to.collections = [lib_container] scene = bpy.context.scene - instance_empty = bpy.data.objects.new(container_name, None) + instance_empty = bpy.context.blend_data.objects.new( + container_name, None + ) scene.collection.objects.link(instance_empty) instance_empty.instance_type = 'COLLECTION' - collection = bpy.data.collections[lib_container] + collection = bpy.context.blend_data.collections[lib_container] collection.name = container_name instance_empty.instance_collection = collection diff --git a/pype/plugins/blender/publish/collect_current_file.py b/pype/plugins/blender/publish/collect_current_file.py index a097c72047..5756431314 100644 --- a/pype/plugins/blender/publish/collect_current_file.py +++ b/pype/plugins/blender/publish/collect_current_file.py @@ -12,5 +12,5 @@ class CollectBlenderCurrentFile(pyblish.api.ContextPlugin): def process(self, context): """Inject the current working file""" - current_file = bpy.data.filepath + current_file = bpy.context.blend_data.filepath context.data['currentFile'] = current_file diff --git a/pype/plugins/blender/publish/collect_model.py b/pype/plugins/blender/publish/collect_model.py index c60402f9ca..4c7e840c17 100644 --- a/pype/plugins/blender/publish/collect_model.py +++ b/pype/plugins/blender/publish/collect_model.py @@ -23,7 +23,7 @@ class CollectModel(pyblish.api.ContextPlugin): representation set. If the representation is set, it is a loaded model and we don't want to publish it. """ - for collection in bpy.data.collections: + for collection in bpy.context.blend_data.collections: avalon_prop = collection.get(AVALON_PROPERTY) or dict() if (avalon_prop.get('family') == 'model' and not avalon_prop.get('representation')): @@ -42,6 +42,7 @@ class CollectModel(pyblish.api.ContextPlugin): instance = context.create_instance( name=name, family=family, + families=[family], subset=subset, asset=asset, task=task, diff --git a/pype/plugins/blender/publish/extract_model.py b/pype/plugins/blender/publish/extract_model.py index 75ec33fb27..501c4d9d5c 100644 --- a/pype/plugins/blender/publish/extract_model.py +++ b/pype/plugins/blender/publish/extract_model.py @@ -1,10 +1,10 @@ -from pathlib import Path +import os import avalon.blender.workio -import sonar.api +import pype.api -class ExtractModel(sonar.api.Extractor): +class ExtractModel(pype.api.Extractor): """Extract as model.""" label = "Model" @@ -14,9 +14,10 @@ class ExtractModel(sonar.api.Extractor): def process(self, instance): # Define extract output file path - stagingdir = Path(self.staging_dir(instance)) + + stagingdir = self.staging_dir(instance) filename = f"{instance.name}.blend" - filepath = str(stagingdir / filename) + filepath = os.path.join(stagingdir, filename) # Perform extraction self.log.info("Performing extraction..") @@ -24,11 +25,23 @@ class ExtractModel(sonar.api.Extractor): # Just save the file to a temporary location. At least for now it's no # problem to have (possibly) extra stuff in the file. avalon.blender.workio.save_file(filepath, copy=True) + # + # # Store reference for integration + # if "files" not in instance.data: + # instance.data["files"] = list() + # + # # instance.data["files"].append(filename) - # Store reference for integration - if "files" not in instance.data: - instance.data["files"] = list() + if "representations" not in instance.data: + instance.data["representations"] = [] - instance.data["files"].append(filename) + representation = { + 'name': 'blend', + 'ext': 'blend', + 'files': filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(representation) - self.log.info("Extracted instance '%s' to: %s", instance.name, filepath) + + self.log.info("Extracted instance '%s' to: %s", instance.name, representation) diff --git a/pype/plugins/blender/publish/validate_mesh_has_uv.py b/pype/plugins/blender/publish/validate_mesh_has_uv.py index 79a42a11d5..f8c5092ab7 100644 --- a/pype/plugins/blender/publish/validate_mesh_has_uv.py +++ b/pype/plugins/blender/publish/validate_mesh_has_uv.py @@ -3,7 +3,7 @@ from typing import List import bpy import pyblish.api -import sonar.blender.action +import pype.blender.action class ValidateMeshHasUvs(pyblish.api.InstancePlugin): @@ -14,7 +14,7 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): families = ["model"] category = "geometry" label = "Mesh Has UV's" - actions = [sonar.blender.action.SelectInvalidAction] + actions = [pype.blender.action.SelectInvalidAction] optional = True @staticmethod @@ -34,7 +34,9 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): def get_invalid(cls, instance) -> List: invalid = [] # TODO (jasper): only check objects in the collection that will be published? - for obj in [obj for obj in bpy.data.objects if obj.type == 'MESH']: + for obj in [ + obj for obj in bpy.context.blend_data.objects if obj.type == 'MESH' + ]: # Make sure we are in object mode. bpy.ops.object.mode_set(mode='OBJECT') if not cls.has_uvs(obj): diff --git a/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py index b2a927a2ed..1f050f6844 100644 --- a/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py +++ b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py @@ -3,7 +3,7 @@ from typing import List import bpy import pyblish.api -import sonar.blender.action +import pype.blender.action class ValidateMeshNoNegativeScale(pyblish.api.Validator): @@ -13,13 +13,15 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator): hosts = ["blender"] families = ["model"] label = "Mesh No Negative Scale" - actions = [sonar.blender.action.SelectInvalidAction] + actions = [pype.blender.action.SelectInvalidAction] @staticmethod def get_invalid(instance) -> List: invalid = [] # TODO (jasper): only check objects in the collection that will be published? - for obj in [obj for obj in bpy.data.objects if obj.type == 'MESH']: + for obj in [ + obj for obj in bpy.context.blend_data.objects if obj.type == 'MESH' + ]: if any(v < 0 for v in obj.scale): invalid.append(obj) @@ -28,4 +30,6 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError(f"Meshes found in instance with negative scale: {invalid}") + raise RuntimeError( + f"Meshes found in instance with negative scale: {invalid}" + ) From 2635268a494f612a7a75a8a873e4b211b1fca20e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Dec 2019 17:32:44 +0100 Subject: [PATCH 038/393] modified blender action --- pype/blender/action.py | 29 +++++++++++++++++------------ 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/pype/blender/action.py b/pype/blender/action.py index 948123c3c5..4bd7e303fc 100644 --- a/pype/blender/action.py +++ b/pype/blender/action.py @@ -24,19 +24,24 @@ class SelectInvalidAction(pyblish.api.Action): if isinstance(invalid_nodes, (list, tuple)): invalid.extend(invalid_nodes) else: - self.log.warning("Failed plug-in doens't have any selectable objects.") + self.log.warning( + "Failed plug-in doens't have any selectable objects." + ) + + bpy.ops.object.select_all(action='DESELECT') # Make sure every node is only processed once invalid = list(set(invalid)) - - bpy.ops.object.select_all(action='DESELECT') - if invalid: - invalid_names = [obj.name for obj in invalid] - self.log.info("Selecting invalid objects: %s", ", ".join(invalid_names)) - # Select the objects and also make the last one the active object. - for obj in invalid: - obj.select_set(True) - bpy.context.view_layer.objects.active = invalid[-1] - - else: + if not invalid: self.log.info("No invalid nodes found.") + return + + invalid_names = [obj.name for obj in invalid] + self.log.info( + "Selecting invalid objects: %s", ", ".join(invalid_names) + ) + # Select the objects and also make the last one the active object. + for obj in invalid: + obj.select_set(True) + + bpy.context.view_layer.objects.active = invalid[-1] From bba0d10e9165b859a0cfd050adf4d8a1c886abfb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 30 Dec 2019 15:09:05 +0100 Subject: [PATCH 039/393] feat(nuke): adding back plugin renaming to only mov creation in running nuke session --- .../nuke/publish/extract_review_mov.py | 181 ++++++++++++++++++ 1 file changed, 181 insertions(+) create mode 100644 pype/plugins/nuke/publish/extract_review_mov.py diff --git a/pype/plugins/nuke/publish/extract_review_mov.py b/pype/plugins/nuke/publish/extract_review_mov.py new file mode 100644 index 0000000000..ed3101951c --- /dev/null +++ b/pype/plugins/nuke/publish/extract_review_mov.py @@ -0,0 +1,181 @@ +import os +import nuke +import pyblish.api +import pype\ + +class ExtractReviewData(pype.api.Extractor): + """Extracts movie and thumbnail with baked in luts + + must be run after extract_render_local.py + + """ + + order = pyblish.api.ExtractorOrder + 0.01 + label = "Extract Review Data" + + families = ["review"] + hosts = ["nuke"] + + def process(self, instance): + + # Store selection + selection = [i for i in nuke.allNodes() if i["selected"].getValue()] + # Deselect all nodes to prevent external connections + [i["selected"].setValue(False) for i in nuke.allNodes()] + self.log.debug("creating staging dir:") + self.staging_dir(instance) + + self.log.debug("instance: {}".format(instance)) + self.log.debug("instance.data[families]: {}".format( + instance.data["families"])) + + self.render_review_representation(instance, representation="mov") + + # Restore selection + [i["selected"].setValue(False) for i in nuke.allNodes()] + [i["selected"].setValue(True) for i in selection] + + def render_review_representation(self, + instance, + representation="mov"): + + assert instance.data['representations'][0]['files'], "Instance data files should't be empty!" + + temporary_nodes = [] + stagingDir = instance.data[ + 'representations'][0]["stagingDir"].replace("\\", "/") + self.log.debug("StagingDir `{0}`...".format(stagingDir)) + + collection = instance.data.get("collection", None) + + if collection: + # get path + fname = os.path.basename(collection.format( + "{head}{padding}{tail}")) + fhead = collection.format("{head}") + + # get first and last frame + first_frame = min(collection.indexes) + last_frame = max(collection.indexes) + else: + fname = os.path.basename(instance.data.get("path", None)) + fhead = os.path.splitext(fname)[0] + "." + first_frame = instance.data.get("frameStart", None) + last_frame = instance.data.get("frameEnd", None) + + rnode = nuke.createNode("Read") + + rnode["file"].setValue( + os.path.join(stagingDir, fname).replace("\\", "/")) + + rnode["first"].setValue(first_frame) + rnode["origfirst"].setValue(first_frame) + rnode["last"].setValue(last_frame) + rnode["origlast"].setValue(last_frame) + temporary_nodes.append(rnode) + previous_node = rnode + + # get input process and connect it to baking + ipn = self.get_view_process_node() + if ipn is not None: + ipn.setInput(0, previous_node) + previous_node = ipn + temporary_nodes.append(ipn) + + reformat_node = nuke.createNode("Reformat") + + ref_node = self.nodes.get("Reformat", None) + if ref_node: + for k, v in ref_node: + self.log.debug("k,v: {0}:{1}".format(k,v)) + if isinstance(v, unicode): + v = str(v) + reformat_node[k].setValue(v) + + reformat_node.setInput(0, previous_node) + previous_node = reformat_node + temporary_nodes.append(reformat_node) + + dag_node = nuke.createNode("OCIODisplay") + dag_node.setInput(0, previous_node) + previous_node = dag_node + temporary_nodes.append(dag_node) + + # create write node + write_node = nuke.createNode("Write") + + if representation in "mov": + file = fhead + "baked.mov" + name = "baked" + path = os.path.join(stagingDir, file).replace("\\", "/") + self.log.debug("Path: {}".format(path)) + instance.data["baked_colorspace_movie"] = path + write_node["file"].setValue(path) + write_node["file_type"].setValue("mov") + write_node["raw"].setValue(1) + write_node.setInput(0, previous_node) + temporary_nodes.append(write_node) + tags = ["review", "delete"] + + elif representation in "jpeg": + file = fhead + "jpeg" + name = "thumbnail" + path = os.path.join(stagingDir, file).replace("\\", "/") + instance.data["thumbnail"] = path + write_node["file"].setValue(path) + write_node["file_type"].setValue("jpeg") + write_node["raw"].setValue(1) + write_node.setInput(0, previous_node) + temporary_nodes.append(write_node) + tags = ["thumbnail"] + + # retime for + first_frame = int(last_frame) / 2 + last_frame = int(last_frame) / 2 + + repre = { + 'name': name, + 'ext': representation, + 'files': file, + "stagingDir": stagingDir, + "frameStart": first_frame, + "frameEnd": last_frame, + "anatomy_template": "render", + "tags": tags + } + instance.data["representations"].append(repre) + + # Render frames + nuke.execute(write_node.name(), int(first_frame), int(last_frame)) + + self.log.debug("representations: {}".format(instance.data["representations"])) + + # Clean up + for node in temporary_nodes: + nuke.delete(node) + + def get_view_process_node(self): + + # Select only the target node + if nuke.selectedNodes(): + [n.setSelected(False) for n in nuke.selectedNodes()] + + ipn_orig = None + for v in [n for n in nuke.allNodes() + if "Viewer" in n.Class()]: + ip = v['input_process'].getValue() + ipn = v['input_process_node'].getValue() + if "VIEWER_INPUT" not in ipn and ip: + ipn_orig = nuke.toNode(ipn) + ipn_orig.setSelected(True) + + if ipn_orig: + nuke.nodeCopy('%clipboard%') + + [n.setSelected(False) for n in nuke.selectedNodes()] # Deselect all + + nuke.nodePaste('%clipboard%') + + ipn = nuke.selectedNode() + + return ipn From cd4ad045e6e53bb2ad9963e56d2acfac3c045ea2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 4 Jan 2020 17:14:31 +0100 Subject: [PATCH 040/393] fix(nks): workio on save_as if Untitled didnt do anything --- pype/nukestudio/workio.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/pype/nukestudio/workio.py b/pype/nukestudio/workio.py index 1681d8a2ab..c7484b826b 100644 --- a/pype/nukestudio/workio.py +++ b/pype/nukestudio/workio.py @@ -22,19 +22,16 @@ def has_unsaved_changes(): def save_file(filepath): + file = os.path.basename(filepath) project = hiero.core.projects()[-1] - # close `Untitled` project - if "Untitled" not in project.name(): - log.info("Saving project: `{}`".format(project.name())) + if project: + log.info("Saving project: `{}` as '{}'".format(project.name(), file)) project.saveAs(filepath) - elif not project: + else: log.info("Creating new project...") project = hiero.core.newProject() project.saveAs(filepath) - else: - log.info("Dropping `Untitled` project...") - return def open_file(filepath): From 9dae5afa2e7de9a3f47c591b70cf86dc8e2533e5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 4 Jan 2020 19:10:44 +0100 Subject: [PATCH 041/393] feat(nuke): added knobscripter for nuke --- setup/nuke/nuke_path/KnobScripter | 1 + setup/nuke/nuke_path/menu.py | 1 + 2 files changed, 2 insertions(+) create mode 160000 setup/nuke/nuke_path/KnobScripter diff --git a/setup/nuke/nuke_path/KnobScripter b/setup/nuke/nuke_path/KnobScripter new file mode 160000 index 0000000000..a1812f4159 --- /dev/null +++ b/setup/nuke/nuke_path/KnobScripter @@ -0,0 +1 @@ +Subproject commit a1812f415949719bd67cc6d387af3d26238d966b diff --git a/setup/nuke/nuke_path/menu.py b/setup/nuke/nuke_path/menu.py index fd87c98246..5a0cf8362b 100644 --- a/setup/nuke/nuke_path/menu.py +++ b/setup/nuke/nuke_path/menu.py @@ -1,4 +1,5 @@ import atom_server +import KnobScripter from pype.nuke.lib import ( writes_version_sync, From e60333d996e51630a66d5206bee85bdb8f0f31ad Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 4 Jan 2020 19:23:33 +0100 Subject: [PATCH 042/393] fix(nuke): fixing git submodule --- .gitmodules | 3 +++ setup/nuke/nuke_path/{KnobScripter => KnobScripter-github} | 0 2 files changed, 3 insertions(+) create mode 100644 .gitmodules rename setup/nuke/nuke_path/{KnobScripter => KnobScripter-github} (100%) diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000..d29bd038aa --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "setup/nuke/nuke_path/KnobScripter-github"] + path = setup/nuke/nuke_path/KnobScripter-github + url = https://github.com/pypeclub/KnobScripter diff --git a/setup/nuke/nuke_path/KnobScripter b/setup/nuke/nuke_path/KnobScripter-github similarity index 100% rename from setup/nuke/nuke_path/KnobScripter rename to setup/nuke/nuke_path/KnobScripter-github From aa3d976caad3cfe28f66aeaca80c02621957ec65 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 4 Jan 2020 19:35:27 +0100 Subject: [PATCH 043/393] feat(nuke): activating KnobSripter in nuke --- setup/nuke/nuke_path/menu.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/setup/nuke/nuke_path/menu.py b/setup/nuke/nuke_path/menu.py index 5a0cf8362b..1155d2ea2e 100644 --- a/setup/nuke/nuke_path/menu.py +++ b/setup/nuke/nuke_path/menu.py @@ -1,5 +1,6 @@ +import os +import sys import atom_server -import KnobScripter from pype.nuke.lib import ( writes_version_sync, @@ -20,3 +21,12 @@ nuke.addOnScriptSave(checkInventoryVersions) nuke.addOnScriptSave(writes_version_sync) log.info('Automatic syncing of write file knob to script version') + +def adding_knobscripter_to_nukepath(): + nuke_path_dir = os.path.dirname(__file__) + knobscripter_path = os.path.join(nuke_path_dir, "KnobScripter-github") + sys.path.append(knobscripter_path) + import KnobScripter + log.info('Adding `KnobScripter`') + +adding_knobscripter_to_nukepath() From 752b30f8b7b8e5417bde552c3c3f2484c9580221 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 4 Jan 2020 21:38:06 +0100 Subject: [PATCH 044/393] fix(nuke): update submodule --- setup/nuke/nuke_path/KnobScripter-github | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup/nuke/nuke_path/KnobScripter-github b/setup/nuke/nuke_path/KnobScripter-github index a1812f4159..ada32b0144 160000 --- a/setup/nuke/nuke_path/KnobScripter-github +++ b/setup/nuke/nuke_path/KnobScripter-github @@ -1 +1 @@ -Subproject commit a1812f415949719bd67cc6d387af3d26238d966b +Subproject commit ada32b014470dd283ec52df09295e7b56c7e14b2 From 73e50fa03fcc6efccbcf49cfac120e3dbb4bf01a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sun, 5 Jan 2020 14:24:56 +0100 Subject: [PATCH 045/393] change label to see whole label value --- pype/ftrack/actions/action_seed.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/actions/action_seed.py b/pype/ftrack/actions/action_seed.py index cf0a4b0445..5cbc5d1cec 100644 --- a/pype/ftrack/actions/action_seed.py +++ b/pype/ftrack/actions/action_seed.py @@ -9,7 +9,7 @@ class SeedDebugProject(BaseAction): #: Action identifier. identifier = "seed.debug.project" #: Action label. - label = "SeedDebugProject" + label = "Seed Debug Project" #: Action description. description = "Description" #: priority From 0024688a449a81919ab4b3331126a4f451a112ff Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sun, 5 Jan 2020 14:25:50 +0100 Subject: [PATCH 046/393] convert input values to integer and set to 0 if not successful --- pype/ftrack/actions/action_seed.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/pype/ftrack/actions/action_seed.py b/pype/ftrack/actions/action_seed.py index 5cbc5d1cec..260e854d14 100644 --- a/pype/ftrack/actions/action_seed.py +++ b/pype/ftrack/actions/action_seed.py @@ -265,6 +265,11 @@ class SeedDebugProject(BaseAction): def create_assets(self, project, asset_count): self.log.debug("*** Creating assets:") + try: + asset_count = int(asset_count) + except ValueError: + asset_count = 0 + main_entity = self.session.create("Folder", { "name": "Assets", "parent": project @@ -305,6 +310,19 @@ class SeedDebugProject(BaseAction): def create_shots(self, project, seq_count, shots_count): self.log.debug("*** Creating shots:") + + # Convert counts to integers + try: + seq_count = int(seq_count) + except ValueError: + seq_count = 0 + + try: + shots_count = int(shots_count) + except ValueError: + shots_count = 0 + + # Create Folder "Shots" main_entity = self.session.create("Folder", { "name": "Shots", "parent": project From 080f1f6819d09b5c7d9ca8c3f3bc061998e9933b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sun, 5 Jan 2020 14:26:13 +0100 Subject: [PATCH 047/393] check if input values of seeder are greater than 0 --- pype/ftrack/actions/action_seed.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/pype/ftrack/actions/action_seed.py b/pype/ftrack/actions/action_seed.py index 260e854d14..1238e73e72 100644 --- a/pype/ftrack/actions/action_seed.py +++ b/pype/ftrack/actions/action_seed.py @@ -270,6 +270,10 @@ class SeedDebugProject(BaseAction): except ValueError: asset_count = 0 + if asset_count <= 0: + self.log.debug("No assets to create") + return + main_entity = self.session.create("Folder", { "name": "Assets", "parent": project @@ -322,6 +326,18 @@ class SeedDebugProject(BaseAction): except ValueError: shots_count = 0 + # Check if both are higher than 0 + missing = [] + if seq_count <= 0: + missing.append("sequences") + + if shots_count <= 0: + missing.append("shots") + + if missing: + self.log.debug("No {} to create".format(" and ".join(missing))) + return + # Create Folder "Shots" main_entity = self.session.create("Folder", { "name": "Shots", From 730fbdd5090d06c55a9890d73e62c91e30ab1453 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 6 Jan 2020 00:45:22 +0100 Subject: [PATCH 048/393] fix(global): reformat didn't return correct data --- pype/plugins/global/publish/extract_review.py | 46 ++++++++++++------- 1 file changed, 30 insertions(+), 16 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index f621df0c66..0c39af64ed 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -1,5 +1,4 @@ import os -import math import pyblish.api import clique import pype.api @@ -25,14 +24,16 @@ class ExtractReview(pyblish.api.InstancePlugin): ext_filter = [] def process(self, instance): + to_width = 1920 + to_height = 1080 output_profiles = self.outputs or {} inst_data = instance.data fps = inst_data.get("fps") start_frame = inst_data.get("frameStart") - resolution_height = instance.data.get("resolutionHeight", 1080) - resolution_width = instance.data.get("resolutionWidth", 1920) + resolution_width = instance.data.get("resolutionWidth", to_width) + resolution_height = instance.data.get("resolutionHeight", to_height) pixel_aspect = instance.data.get("pixelAspect", 1) self.log.debug("Families In: `{}`".format(instance.data["families"])) @@ -172,22 +173,35 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("__ pixel_aspect: `{}`".format(pixel_aspect)) self.log.debug("__ resolution_width: `{}`".format(resolution_width)) self.log.debug("__ resolution_height: `{}`".format(resolution_height)) + # scaling none square pixels and 1920 width if "reformat" in p_tags: - width_scale = 1920 - width_half_pad = 0 - res_w = int(float(resolution_width) * pixel_aspect) - height_half_pad = int(( - (res_w - 1920) / ( - res_w * .01) * ( - 1080 * .01)) / 2 - ) - height_scale = 1080 - (height_half_pad * 2) - if height_scale > 1080: + resolution_ratio = float(resolution_width / ( + resolution_height * pixel_aspect)) + delivery_ratio = float(to_width) / float(to_height) + self.log.debug(resolution_ratio) + self.log.debug(delivery_ratio) + + if resolution_ratio < delivery_ratio: + self.log.debug("lower then delivery") + scale_factor = to_height / ( + resolution_height * pixel_aspect) + self.log.debug(scale_factor) + width_scale = int(to_width * scale_factor) + width_half_pad = int(( + to_width - width_scale)/2) + height_scale = to_height height_half_pad = 0 - height_scale = 1080 - width_half_pad = (1920 - (float(resolution_width) * (1080 / float(resolution_height))) ) / 2 - width_scale = int(1920 - (width_half_pad * 2)) + else: + self.log.debug("heigher then delivery") + width_scale = to_width + width_half_pad = 0 + scale_factor = to_width / resolution_width + self.log.debug(scale_factor) + height_scale = int( + resolution_height * scale_factor) + height_half_pad = int( + (to_height - height_scale)/2) self.log.debug("__ width_scale: `{}`".format(width_scale)) self.log.debug("__ width_half_pad: `{}`".format(width_half_pad)) From f20c4025c5b1f0df30f659b13d2734c9e3ec3ae6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 10:09:52 +0100 Subject: [PATCH 049/393] replace bpy.context.blend_data with bpy.data --- pype/plugins/blender/load/submarine_model.py | 26 +++++++++---------- .../blender/publish/collect_current_file.py | 2 +- pype/plugins/blender/publish/collect_model.py | 2 +- .../blender/publish/validate_mesh_has_uv.py | 2 +- .../validate_mesh_no_negative_scale.py | 2 +- 5 files changed, 17 insertions(+), 17 deletions(-) diff --git a/pype/plugins/blender/load/submarine_model.py b/pype/plugins/blender/load/submarine_model.py index 99095d74cd..bd6db17650 100644 --- a/pype/plugins/blender/load/submarine_model.py +++ b/pype/plugins/blender/load/submarine_model.py @@ -38,7 +38,7 @@ class BlendModelLoader(pype.blender.AssetLoader): Note: It is assumed that only 1 matching collection is found. """ - for collection in bpy.context.blend_data.collections: + for collection in bpy.data.collections: if collection.name != name: continue if collection.library is None: @@ -82,13 +82,13 @@ class BlendModelLoader(pype.blender.AssetLoader): ) relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.context.blend_data.libraries.load( + with bpy.data.libraries.load( libpath, link=True, relative=relative ) as (_, data_to): data_to.collections = [lib_container] scene = bpy.context.scene - instance_empty = bpy.context.blend_data.objects.new( + instance_empty = bpy.data.objects.new( container_name, None ) if not instance_empty.get("avalon"): @@ -97,7 +97,7 @@ class BlendModelLoader(pype.blender.AssetLoader): avalon_info.update({"container_name": container_name}) scene.collection.objects.link(instance_empty) instance_empty.instance_type = 'COLLECTION' - container = bpy.context.blend_data.collections[lib_container] + container = bpy.data.collections[lib_container] container.name = container_name instance_empty.instance_collection = container container.make_local() @@ -127,7 +127,7 @@ class BlendModelLoader(pype.blender.AssetLoader): Warning: No nested collections are supported at the moment! """ - collection = bpy.context.blend_data.collections.get( + collection = bpy.data.collections.get( container["objectName"] ) libpath = Path(api.get_representation_path(representation)) @@ -181,7 +181,7 @@ class BlendModelLoader(pype.blender.AssetLoader): collection.objects.unlink(obj) remove_obj = True for coll in [ - coll for coll in bpy.context.blend_data.collections + coll for coll in bpy.data.collections if coll != collection ]: if ( @@ -194,7 +194,7 @@ class BlendModelLoader(pype.blender.AssetLoader): for obj in objects_to_remove: # Only delete objects that are not used elsewhere - bpy.context.blend_data.objects.remove(obj) + bpy.data.objects.remove(obj) instance_empties = [ obj for obj in collection.users_dupli_group @@ -205,7 +205,7 @@ class BlendModelLoader(pype.blender.AssetLoader): container_name = instance_empty["avalon"]["container_name"] relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.context.blend_data.libraries.load( + with bpy.data.libraries.load( str(libpath), link=True, relative=relative ) as (_, data_to): data_to.collections = [container_name] @@ -219,7 +219,7 @@ class BlendModelLoader(pype.blender.AssetLoader): for obj in new_collection.objects: collection.objects.link(obj) - bpy.context.blend_data.collections.remove(new_collection) + bpy.data.collections.remove(new_collection) # Update the representation on the collection avalon_prop = collection[avalon.blender.pipeline.AVALON_PROPERTY] avalon_prop["representation"] = str(representation["_id"]) @@ -237,7 +237,7 @@ class BlendModelLoader(pype.blender.AssetLoader): Warning: No nested collections are supported at the moment! """ - collection = bpy.context.blend_data.collections.get( + collection = bpy.data.collections.get( container["objectName"] ) if not collection: @@ -293,18 +293,18 @@ class CacheModelLoader(pype.blender.AssetLoader): ) relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.context.blend_data.libraries.load( + with bpy.data.libraries.load( libpath, link=True, relative=relative ) as (data_from, data_to): data_to.collections = [lib_container] scene = bpy.context.scene - instance_empty = bpy.context.blend_data.objects.new( + instance_empty = bpy.data.objects.new( container_name, None ) scene.collection.objects.link(instance_empty) instance_empty.instance_type = 'COLLECTION' - collection = bpy.context.blend_data.collections[lib_container] + collection = bpy.data.collections[lib_container] collection.name = container_name instance_empty.instance_collection = collection diff --git a/pype/plugins/blender/publish/collect_current_file.py b/pype/plugins/blender/publish/collect_current_file.py index 5756431314..a097c72047 100644 --- a/pype/plugins/blender/publish/collect_current_file.py +++ b/pype/plugins/blender/publish/collect_current_file.py @@ -12,5 +12,5 @@ class CollectBlenderCurrentFile(pyblish.api.ContextPlugin): def process(self, context): """Inject the current working file""" - current_file = bpy.context.blend_data.filepath + current_file = bpy.data.filepath context.data['currentFile'] = current_file diff --git a/pype/plugins/blender/publish/collect_model.py b/pype/plugins/blender/publish/collect_model.py index 4c7e840c17..ee10eaf7f2 100644 --- a/pype/plugins/blender/publish/collect_model.py +++ b/pype/plugins/blender/publish/collect_model.py @@ -23,7 +23,7 @@ class CollectModel(pyblish.api.ContextPlugin): representation set. If the representation is set, it is a loaded model and we don't want to publish it. """ - for collection in bpy.context.blend_data.collections: + for collection in bpy.data.collections: avalon_prop = collection.get(AVALON_PROPERTY) or dict() if (avalon_prop.get('family') == 'model' and not avalon_prop.get('representation')): diff --git a/pype/plugins/blender/publish/validate_mesh_has_uv.py b/pype/plugins/blender/publish/validate_mesh_has_uv.py index f8c5092ab7..b71a40ad8f 100644 --- a/pype/plugins/blender/publish/validate_mesh_has_uv.py +++ b/pype/plugins/blender/publish/validate_mesh_has_uv.py @@ -35,7 +35,7 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): invalid = [] # TODO (jasper): only check objects in the collection that will be published? for obj in [ - obj for obj in bpy.context.blend_data.objects if obj.type == 'MESH' + obj for obj in bpy.data.objects if obj.type == 'MESH' ]: # Make sure we are in object mode. bpy.ops.object.mode_set(mode='OBJECT') diff --git a/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py index 1f050f6844..7e3b38dd19 100644 --- a/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py +++ b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py @@ -20,7 +20,7 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator): invalid = [] # TODO (jasper): only check objects in the collection that will be published? for obj in [ - obj for obj in bpy.context.blend_data.objects if obj.type == 'MESH' + obj for obj in bpy.data.objects if obj.type == 'MESH' ]: if any(v < 0 for v in obj.scale): invalid.append(obj) From 6d1b064d0b2593bcdba2914e40b75a2cf820f3fb Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 6 Jan 2020 12:19:56 +0100 Subject: [PATCH 050/393] add previous behaviour as default --- .../ftrack/events/event_version_to_task_statuses.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/pype/ftrack/events/event_version_to_task_statuses.py b/pype/ftrack/events/event_version_to_task_statuses.py index 1f5f1514d7..0d2a3130c0 100644 --- a/pype/ftrack/events/event_version_to_task_statuses.py +++ b/pype/ftrack/events/event_version_to_task_statuses.py @@ -62,9 +62,12 @@ class VersionToTaskStatus(BaseEvent): # Lower version status name and check if has mapping version_status = version_status_orig.lower() - new_status_names = status_mapping.get(version_status) - if not new_status_names: - continue + new_status_names = [] + mapped = status_mapping.get(version_status) + if mapped: + new_status_names.extend(list(mapped)) + + new_status_names.append(version_status) self.log.debug( "Processing AssetVersion status change: [ {} ]".format( @@ -72,10 +75,6 @@ class VersionToTaskStatus(BaseEvent): ) ) - # Backwards compatibility (convert string to list) - if isinstance(new_status_names, str): - new_status_names = [new_status_names] - # Lower all names from presets new_status_names = [name.lower() for name in new_status_names] From 68c8a253bfd3f82c3d535b4c5810324b9c88fa16 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 6 Jan 2020 12:43:43 +0100 Subject: [PATCH 051/393] feat(nuke): lock range on setting frame ranges --- pype/nuke/lib.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index f213b596ad..12a083eca1 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -707,9 +707,11 @@ class WorkfileSettings(object): frame_start = int(data["frameStart"]) - handle_start frame_end = int(data["frameEnd"]) + handle_end + self._root_node["lock_range"].setValue(False) self._root_node["fps"].setValue(fps) self._root_node["first_frame"].setValue(frame_start) self._root_node["last_frame"].setValue(frame_end) + self._root_node["lock_range"].setValue(True) # setting active viewers try: From 9009e99712e339fb03476780517ff2a0b2e5d0ae Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 6 Jan 2020 14:07:11 +0100 Subject: [PATCH 052/393] fix(global): passing resolution to context --- pype/plugins/global/publish/collect_filesequences.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index d0ff5722a3..e658cd434c 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -150,6 +150,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): if instance: instance_family = instance.get("family") pixel_aspect = instance.get("pixelAspect", 1) + resolution_width = instance.get("resolutionWidth", 1920) + resolution_height = instance.get("resolutionHeight", 1080) lut_path = instance.get("lutPath", None) @@ -229,6 +231,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "fps": fps, "source": data.get('source', ''), "pixelAspect": pixel_aspect, + "resolutionWidth": resolution_width, + "resolutionHeight": resolution_height }) if lut_path: instance.data.update({"lutPath": lut_path}) From 25d2e135d9e78a8c2680b421cc08312f328f1ae6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 15:48:19 +0100 Subject: [PATCH 053/393] add custom attributes key to assetversion data in integrate frant instances --- pype/plugins/ftrack/publish/integrate_ftrack_instances.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py index 5e680a172a..5b8c195730 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py @@ -125,6 +125,12 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): "thumbnail": comp['thumbnail'] } + # Add custom attributes for AssetVersion + assetversion_cust_attrs = {} + component_item["assetversion_data"]["custom_attributes"] = ( + assetversion_cust_attrs + ) + componentList.append(component_item) # Create copy with ftrack.unmanaged location if thumb or prev if comp.get('thumbnail') or comp.get('preview') \ From abe9334d5d4adf6962983bff4a8fc2939b0f4d9b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 15:49:18 +0100 Subject: [PATCH 054/393] add intent value from context to custom attributes if is set --- pype/plugins/ftrack/publish/integrate_ftrack_instances.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py index 5b8c195730..78583b0a2f 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py @@ -127,6 +127,10 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): # Add custom attributes for AssetVersion assetversion_cust_attrs = {} + intent_val = instance.context.data.get("intent") + if intent_val: + assetversion_cust_attrs["intent"] = intent_val + component_item["assetversion_data"]["custom_attributes"] = ( assetversion_cust_attrs ) From c71fc909cef6bd6536656a9abfcbd9ce36bc2fad Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 15:49:34 +0100 Subject: [PATCH 055/393] set asset version custom attributes if there are any --- .../ftrack/publish/integrate_ftrack_api.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_api.py b/pype/plugins/ftrack/publish/integrate_ftrack_api.py index 9fe4fddebf..337562c1f5 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_api.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_api.py @@ -144,8 +144,11 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): "version": 0, "asset": asset_entity, } - - assetversion_data.update(data.get("assetversion_data", {})) + _assetversion_data = data.get("assetversion_data", {}) + assetversion_cust_attrs = _assetversion_data.pop( + "custom_attributes", {} + ) + assetversion_data.update(_assetversion_data) assetversion_entity = session.query( self.query("AssetVersion", assetversion_data) @@ -182,6 +185,18 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): existing_assetversion_metadata.update(assetversion_metadata) assetversion_entity["metadata"] = existing_assetversion_metadata + # Adding Custom Attributes + for attr, val in assetversion_cust_attrs.items(): + if attr in assetversion_entity["custom_attributes"]: + assetversion_entity["custom_attributes"][attr] = val + continue + + self.log.warning(( + "Custom Attrubute \"{0}\"" + " is not available for AssetVersion." + " Can't set it's value to: \"{1}\"" + ).format(attr, str(val))) + # Have to commit the version and asset, because location can't # determine the final location without. try: From 3d33f8fd4ab22eadb27b46ecea8d063f5b856549 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 18:09:46 +0100 Subject: [PATCH 056/393] added get_fps method to burnins class which calculate fps from r_frame_rate --- pype/scripts/otio_burnin.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 3e8cb3b0c4..a8c4017c52 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -98,6 +98,24 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if options_init: self.options_init.update(options_init) + def get_fps(str_value): + if str_value == "0/0": + print("Source has \"r_frame_rate\" value set to \"0/0\".") + return "Unknown" + + items = str_value.split("/") + if len(items) == 1: + fps = float(items[0]) + + elif len(items) == 2: + fps = float(items[0]) / float(items[1]) + + # Check if fps is integer or float number + if int(fps) == fps: + fps = int(fps) + + return str(fps) + def add_text(self, text, align, options=None): """ Adding static text to a filter. From bb86c94c184645631906688ba184e29f50363be8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 18:10:19 +0100 Subject: [PATCH 057/393] width, height and fps values from ffprobe are added to options data --- pype/scripts/otio_burnin.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index a8c4017c52..ea1554876f 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -95,9 +95,24 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): streams = _streams(source) super().__init__(source, streams) + if options_init: self.options_init.update(options_init) + if "resolution_width" not in self.options_init: + self.options_init["resolution_width"] = ( + streams[0].get("width", "Unknown") + ) + + if "resolution_height" not in self.options_init: + self.options_init["resolution_height"] = ( + streams[0].get("height", "Unknown") + ) + + if "fps" not in self.options_init: + fps = self.get_fps(streams[0]["r_frame_rate"]) + self.options_init["fps"] = fps + def get_fps(str_value): if str_value == "0/0": print("Source has \"r_frame_rate\" value set to \"0/0\".") From 6f4d50d41d8b62f57d13e1c3fdc6fd121c5cd8ac Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 18:25:07 +0100 Subject: [PATCH 058/393] get_fps moved from Burnin class --- pype/scripts/otio_burnin.py | 37 +++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index ea1554876f..f6b5c34bff 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -39,6 +39,25 @@ def _streams(source): return json.loads(out)['streams'] +def get_fps(str_value): + if str_value == "0/0": + print("Source has \"r_frame_rate\" value set to \"0/0\".") + return "Unknown" + + items = str_value.split("/") + if len(items) == 1: + fps = float(items[0]) + + elif len(items) == 2: + fps = float(items[0]) / float(items[1]) + + # Check if fps is integer or float number + if int(fps) == fps: + fps = int(fps) + + return str(fps) + + class ModifiedBurnins(ffmpeg_burnins.Burnins): ''' This is modification of OTIO FFmpeg Burnin adapter. @@ -113,24 +132,6 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): fps = self.get_fps(streams[0]["r_frame_rate"]) self.options_init["fps"] = fps - def get_fps(str_value): - if str_value == "0/0": - print("Source has \"r_frame_rate\" value set to \"0/0\".") - return "Unknown" - - items = str_value.split("/") - if len(items) == 1: - fps = float(items[0]) - - elif len(items) == 2: - fps = float(items[0]) / float(items[1]) - - # Check if fps is integer or float number - if int(fps) == fps: - fps = int(fps) - - return str(fps) - def add_text(self, text, align, options=None): """ Adding static text to a filter. From 3dac4c1b69da68a850e1be4730f37b45b46fabd4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 18:30:17 +0100 Subject: [PATCH 059/393] data from frobe are stored to data not to options --- pype/scripts/otio_burnin.py | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index f6b5c34bff..0c985a0faf 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -118,20 +118,6 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if options_init: self.options_init.update(options_init) - if "resolution_width" not in self.options_init: - self.options_init["resolution_width"] = ( - streams[0].get("width", "Unknown") - ) - - if "resolution_height" not in self.options_init: - self.options_init["resolution_height"] = ( - streams[0].get("height", "Unknown") - ) - - if "fps" not in self.options_init: - fps = self.get_fps(streams[0]["r_frame_rate"]) - self.options_init["fps"] = fps - def add_text(self, text, align, options=None): """ Adding static text to a filter. @@ -362,6 +348,17 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) frame_start = data.get("frame_start") frame_start_tc = data.get('frame_start_tc', frame_start) + + stream = burnin._streams[0] + if "resolution_width" not in data: + data["resolution_width"] = stream.get("width", "Unknown") + + if "resolution_height" not in data: + data["resolution_height"] = stream.get("height", "Unknown") + + if "fps" not in data: + data["fps"] = get_fps(stream.get("r_frame_rate", "0/0")) + for align_text, preset in presets.get('burnins', {}).items(): align = None if align_text == 'TOP_LEFT': From f84f1537def6d65e0e9c399083e84111e940c83a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 18:30:24 +0100 Subject: [PATCH 060/393] formatting changes --- pype/scripts/otio_burnin.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 0c985a0faf..b3d0e544db 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -413,12 +413,14 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) elif bi_func == 'timecode': burnin.add_timecode(align, start_frame=frame_start_tc) + elif bi_func == 'text': if not preset.get('text'): log.error('Text is not set for text function burnin!') return text = preset['text'].format(**data) burnin.add_text(text, align) + elif bi_func == "datetime": date_format = preset["format"] burnin.add_datetime(date_format, align) @@ -445,4 +447,4 @@ if __name__ == '__main__': data['codec'], data['output'], data['burnin_data'] - ) + ) From a6af3ca90bb72c4bf430fa2d41f71590ab77ef04 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 7 Jan 2020 11:12:42 +0100 Subject: [PATCH 061/393] fix(global): reformat didnt compare properly resolution float and int --- pype/plugins/global/publish/extract_review.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 0c39af64ed..deceaa93a5 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -196,7 +196,7 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("heigher then delivery") width_scale = to_width width_half_pad = 0 - scale_factor = to_width / resolution_width + scale_factor = float(to_width) / float(resolution_width) self.log.debug(scale_factor) height_scale = int( resolution_height * scale_factor) From 26f2f882e2997f8e10f8098216edbe241b0cc144 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 7 Jan 2020 13:12:29 +0100 Subject: [PATCH 062/393] fix(otio): burnin right side didnt format properly --- pype/scripts/otio_burnin.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 3e8cb3b0c4..89b74e258e 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -139,12 +139,13 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): options['frame_offset'] = start_frame expr = r'%%{eif\:n+%d\:d}' % options['frame_offset'] + _text = str(int(self.end_frame + options['frame_offset'])) if text and isinstance(text, str): text = r"{}".format(text) expr = text.replace("{current_frame}", expr) + text = text.replace("{current_frame}", _text) options['expression'] = expr - text = str(int(self.end_frame + options['frame_offset'])) self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) def add_timecode(self, align, options=None, start_frame=None): From ade2a26e84b80c01fd3ea4b39bc216b483f786ab Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 8 Jan 2020 00:02:14 +0100 Subject: [PATCH 063/393] feat(nuke): adding back baking mov from nuke --- pype/nuke/lib.py | 275 ++++++++++++++---- .../global/publish/collect_filesequences.py | 2 + .../nuke/publish/extract_review_data_lut.py | 3 +- .../nuke/publish/extract_review_data_mov.py | 57 ++++ .../nuke/publish/extract_review_mov.py | 181 ------------ 5 files changed, 273 insertions(+), 245 deletions(-) create mode 100644 pype/plugins/nuke/publish/extract_review_data_mov.py delete mode 100644 pype/plugins/nuke/publish/extract_review_mov.py diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 12a083eca1..9201e9c63e 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1199,13 +1199,13 @@ class BuildWorkfile(WorkfileSettings): self.ypos -= (self.ypos_size * multiply) + self.ypos_gap -class Exporter_review_lut: +class Exporter_review: """ - Generator object for review lut from Nuke + Base class object for generating review data from Nuke Args: klass (pyblish.plugin): pyblish plugin parent - + instance (pyblish.context.instance): """ _temp_nodes = [] @@ -1213,6 +1213,101 @@ class Exporter_review_lut: "representations": list() }) + def __init__(self, + klass, + instance + ): + + self.log = klass.log + self.instance = instance + self.path_in = self.instance.data.get("path", None) + self.staging_dir = self.instance.data["stagingDir"] + self.collection = self.instance.data.get("collection", None) + + def get_file_info(self): + if self.collection: + self.log.debug("Collection: `{}`".format(self.collection)) + # get path + self.fname = os.path.basename(self.collection.format( + "{head}{padding}{tail}")) + self.fhead = self.collection.format("{head}") + + # get first and last frame + self.first_frame = min(self.collection.indexes) + self.last_frame = max(self.collection.indexes) + else: + self.fname = os.path.basename(self.path_in) + self.fhead = os.path.splitext(self.fname)[0] + "." + self.first_frame = self.instance.data.get("frameStart", None) + self.last_frame = self.instance.data.get("frameEnd", None) + + if "#" in self.fhead: + self.fhead = self.fhead.replace("#", "")[:-1] + + def get_representation_data(self, tags=None, range=False): + add_tags = [] + if tags: + add_tags = tags + + repre = { + 'name': self.name, + 'ext': self.ext, + 'files': self.file, + "stagingDir": self.staging_dir, + "anatomy_template": "publish", + "tags": [self.name.replace("_", "-")] + add_tags + } + + if range: + repre.update({ + "frameStart": self.first_frame, + "frameEnd": self.last_frame, + }) + + self.data["representations"].append(repre) + + def get_view_process_node(self): + """ + Will get any active view process. + + Arguments: + self (class): in object definition + + Returns: + nuke.Node: copy node of Input Process node + """ + anlib.reset_selection() + ipn_orig = None + for v in [n for n in nuke.allNodes() + if "Viewer" in n.Class()]: + ip = v['input_process'].getValue() + ipn = v['input_process_node'].getValue() + if "VIEWER_INPUT" not in ipn and ip: + ipn_orig = nuke.toNode(ipn) + ipn_orig.setSelected(True) + + if ipn_orig: + # copy selected to clipboard + nuke.nodeCopy('%clipboard%') + # reset selection + anlib.reset_selection() + # paste node and selection is on it only + nuke.nodePaste('%clipboard%') + # assign to variable + ipn = nuke.selectedNode() + + return ipn + + +class Exporter_review_lut(Exporter_review): + """ + Generator object for review lut from Nuke + + Args: + klass (pyblish.plugin): pyblish plugin parent + + + """ def __init__(self, klass, instance, @@ -1221,9 +1316,8 @@ class Exporter_review_lut: cube_size=None, lut_size=None, lut_style=None): - - self.log = klass.log - self.instance = instance + # initialize parent class + Exporter_review.__init__(self, klass, instance) self.name = name or "baked_lut" self.ext = ext or "cube" @@ -1231,16 +1325,13 @@ class Exporter_review_lut: self.lut_size = lut_size or 1024 self.lut_style = lut_style or "linear" - self.stagingDir = self.instance.data["stagingDir"] - self.collection = self.instance.data.get("collection", None) - # set frame start / end and file name to self self.get_file_info() self.log.info("File info was set...") self.file = self.fhead + self.name + ".{}".format(self.ext) - self.path = os.path.join(self.stagingDir, self.file).replace("\\", "/") + self.path = os.path.join(self.staging_dir, self.file).replace("\\", "/") def generate_lut(self): # ---------- start nodes creation @@ -1303,70 +1394,128 @@ class Exporter_review_lut: return self.data - def get_file_info(self): - if self.collection: - self.log.debug("Collection: `{}`".format(self.collection)) - # get path - self.fname = os.path.basename(self.collection.format( - "{head}{padding}{tail}")) - self.fhead = self.collection.format("{head}") - # get first and last frame - self.first_frame = min(self.collection.indexes) - self.last_frame = max(self.collection.indexes) +class Exporter_review_mov(Exporter_review): + """ + Metaclass for generating review mov files + + Args: + klass (pyblish.plugin): pyblish plugin parent + + + """ + def __init__(self, + klass, + instance, + name=None, + ext=None, + ): + # initialize parent class + Exporter_review.__init__(self, klass, instance) + + # passing presets for nodes to self + if hasattr(klass, "nodes"): + self.nodes = klass.nodes else: - self.fname = os.path.basename(self.instance.data.get("path", None)) - self.fhead = os.path.splitext(self.fname)[0] + "." - self.first_frame = self.instance.data.get("frameStart", None) - self.last_frame = self.instance.data.get("frameEnd", None) + self.nodes = {} - if "#" in self.fhead: - self.fhead = self.fhead.replace("#", "")[:-1] + self.name = name or "baked" + self.ext = ext or "mov" - def get_representation_data(self): + # set frame start / end and file name to self + self.get_file_info() - repre = { - 'name': self.name, - 'ext': self.ext, - 'files': self.file, - "stagingDir": self.stagingDir, - "anatomy_template": "publish", - "tags": [self.name.replace("_", "-")] - } + self.log.info("File info was set...") - self.data["representations"].append(repre) + self.file = self.fhead + self.name + ".{}".format(self.ext) + self.path = os.path.join(self.staging_dir, self.file).replace("\\", "/") - def get_view_process_node(self): - """ - Will get any active view process. + def generate_mov(self, farm=False): + # ---------- start nodes creation - Arguments: - self (class): in object definition + # Read node + r_node = nuke.createNode("Read") + r_node["file"].setValue(self.path_in) + r_node["first"].setValue(self.first_frame) + r_node["origfirst"].setValue(self.first_frame) + r_node["last"].setValue(self.last_frame) + r_node["origlast"].setValue(self.last_frame) + # connect + self._temp_nodes.append(r_node) + self.previous_node = r_node + self.log.debug("Read... `{}`".format(self._temp_nodes)) - Returns: - nuke.Node: copy node of Input Process node - """ - anlib.reset_selection() - ipn_orig = None - for v in [n for n in nuke.allNodes() - if "Viewer" in n.Class()]: - ip = v['input_process'].getValue() - ipn = v['input_process_node'].getValue() - if "VIEWER_INPUT" not in ipn and ip: - ipn_orig = nuke.toNode(ipn) - ipn_orig.setSelected(True) + # View Process node + ipn = self.get_view_process_node() + if ipn is not None: + # connect + ipn.setInput(0, self.previous_node) + self._temp_nodes.append(ipn) + self.previous_node = ipn + self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) - if ipn_orig: - # copy selected to clipboard - nuke.nodeCopy('%clipboard%') - # reset selection - anlib.reset_selection() - # paste node and selection is on it only - nuke.nodePaste('%clipboard%') - # assign to variable - ipn = nuke.selectedNode() + # reformat_node = nuke.createNode("Reformat") + # rn_preset = self.nodes.get("Reformat", None) + # if rn_preset: + # self.log.debug("Reformat preset") + # for k, v in rn_preset: + # self.log.debug("k, v: {0}:{1}".format(k, v)) + # if isinstance(v, unicode): + # v = str(v) + # reformat_node[k].setValue(v) + # # connect + # reformat_node.setInput(0, self.previous_node) + # self._temp_nodes.append(reformat_node) + # self.previous_node = reformat_node + # self.log.debug("Reformat... `{}`".format(self._temp_nodes)) + + # OCIODisplay node + dag_node = nuke.createNode("OCIODisplay") + # connect + dag_node.setInput(0, self.previous_node) + self._temp_nodes.append(dag_node) + self.previous_node = dag_node + self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) + + # Write node + write_node = nuke.createNode("Write") + self.log.debug("Path: {}".format(self.path)) + self.instance.data["baked_colorspace_movie"] = self.path + write_node["file"].setValue(self.path) + write_node["file_type"].setValue(self.ext) + write_node["raw"].setValue(1) + # connect + write_node.setInput(0, self.previous_node) + self._temp_nodes.append(write_node) + self.log.debug("Write... `{}`".format(self._temp_nodes)) + + # ---------- end nodes creation + + if not farm: + self.log.info("Rendering... ") + # Render Write node + nuke.execute( + write_node.name(), + int(self.first_frame), + int(self.last_frame)) + + self.log.info("Rendered...") + + # ---------- generate representation data + self.get_representation_data( + tags=["review", "delete"], + range=True + ) + + self.log.debug("Representation... `{}`".format(self.data)) + + # ---------- Clean up + # for node in self._temp_nodes: + # nuke.delete(node) + # self.log.info("Deleted nodes...") + + return self.data - return ipn def get_dependent_nodes(nodes): """Get all dependent nodes connected to the list of nodes. diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index e658cd434c..6a59f5dffc 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -148,6 +148,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): os.environ.update(session) instance = metadata.get("instance") if instance: + # here is the place to add ability for nuke noninteractive + # ______________________________________ instance_family = instance.get("family") pixel_aspect = instance.get("pixelAspect", 1) resolution_width = instance.get("resolutionWidth", 1920) diff --git a/pype/plugins/nuke/publish/extract_review_data_lut.py b/pype/plugins/nuke/publish/extract_review_data_lut.py index dfc10952cd..f5fc3e59db 100644 --- a/pype/plugins/nuke/publish/extract_review_data_lut.py +++ b/pype/plugins/nuke/publish/extract_review_data_lut.py @@ -6,7 +6,7 @@ import pype reload(pnlib) -class ExtractReviewLutData(pype.api.Extractor): +class ExtractReviewDataLut(pype.api.Extractor): """Extracts movie and thumbnail with baked in luts must be run after extract_render_local.py @@ -37,6 +37,7 @@ class ExtractReviewLutData(pype.api.Extractor): self.log.info( "StagingDir `{0}`...".format(instance.data["stagingDir"])) + # generate data with anlib.maintained_selection(): exporter = pnlib.Exporter_review_lut( self, instance diff --git a/pype/plugins/nuke/publish/extract_review_data_mov.py b/pype/plugins/nuke/publish/extract_review_data_mov.py new file mode 100644 index 0000000000..585bd3f108 --- /dev/null +++ b/pype/plugins/nuke/publish/extract_review_data_mov.py @@ -0,0 +1,57 @@ +import os +import nuke +import pyblish.api +from avalon.nuke import lib as anlib +from pype.nuke import lib as pnlib +import pype +reload(pnlib) + + +class ExtractReviewDataMov(pype.api.Extractor): + """Extracts movie and thumbnail with baked in luts + + must be run after extract_render_local.py + + """ + + order = pyblish.api.ExtractorOrder + 0.01 + label = "Extract Review Data Mov" + + families = ["review"] + hosts = ["nuke"] + + def process(self, instance): + families = instance.data["families"] + self.log.info("Creating staging dir...") + if "representations" in instance.data: + staging_dir = instance.data[ + "representations"][0]["stagingDir"].replace("\\", "/") + instance.data["stagingDir"] = staging_dir + instance.data["representations"][0]["tags"] = [] + else: + instance.data["representations"] = [] + # get output path + render_path = instance.data['path'] + staging_dir = os.path.normpath(os.path.dirname(render_path)) + instance.data["stagingDir"] = staging_dir + + self.log.info( + "StagingDir `{0}`...".format(instance.data["stagingDir"])) + + # generate data + with anlib.maintained_selection(): + exporter = pnlib.Exporter_review_mov( + self, instance) + + if "render.farm" in families: + instance.data["families"].remove("review") + instance.data["families"].remove("ftrack") + data = exporter.generate_mov(farm=True) + else: + data = exporter.generate_mov() + + # assign to representations + instance.data["representations"] += data["representations"] + + self.log.debug( + "_ representations: {}".format(instance.data["representations"])) diff --git a/pype/plugins/nuke/publish/extract_review_mov.py b/pype/plugins/nuke/publish/extract_review_mov.py deleted file mode 100644 index ed3101951c..0000000000 --- a/pype/plugins/nuke/publish/extract_review_mov.py +++ /dev/null @@ -1,181 +0,0 @@ -import os -import nuke -import pyblish.api -import pype\ - -class ExtractReviewData(pype.api.Extractor): - """Extracts movie and thumbnail with baked in luts - - must be run after extract_render_local.py - - """ - - order = pyblish.api.ExtractorOrder + 0.01 - label = "Extract Review Data" - - families = ["review"] - hosts = ["nuke"] - - def process(self, instance): - - # Store selection - selection = [i for i in nuke.allNodes() if i["selected"].getValue()] - # Deselect all nodes to prevent external connections - [i["selected"].setValue(False) for i in nuke.allNodes()] - self.log.debug("creating staging dir:") - self.staging_dir(instance) - - self.log.debug("instance: {}".format(instance)) - self.log.debug("instance.data[families]: {}".format( - instance.data["families"])) - - self.render_review_representation(instance, representation="mov") - - # Restore selection - [i["selected"].setValue(False) for i in nuke.allNodes()] - [i["selected"].setValue(True) for i in selection] - - def render_review_representation(self, - instance, - representation="mov"): - - assert instance.data['representations'][0]['files'], "Instance data files should't be empty!" - - temporary_nodes = [] - stagingDir = instance.data[ - 'representations'][0]["stagingDir"].replace("\\", "/") - self.log.debug("StagingDir `{0}`...".format(stagingDir)) - - collection = instance.data.get("collection", None) - - if collection: - # get path - fname = os.path.basename(collection.format( - "{head}{padding}{tail}")) - fhead = collection.format("{head}") - - # get first and last frame - first_frame = min(collection.indexes) - last_frame = max(collection.indexes) - else: - fname = os.path.basename(instance.data.get("path", None)) - fhead = os.path.splitext(fname)[0] + "." - first_frame = instance.data.get("frameStart", None) - last_frame = instance.data.get("frameEnd", None) - - rnode = nuke.createNode("Read") - - rnode["file"].setValue( - os.path.join(stagingDir, fname).replace("\\", "/")) - - rnode["first"].setValue(first_frame) - rnode["origfirst"].setValue(first_frame) - rnode["last"].setValue(last_frame) - rnode["origlast"].setValue(last_frame) - temporary_nodes.append(rnode) - previous_node = rnode - - # get input process and connect it to baking - ipn = self.get_view_process_node() - if ipn is not None: - ipn.setInput(0, previous_node) - previous_node = ipn - temporary_nodes.append(ipn) - - reformat_node = nuke.createNode("Reformat") - - ref_node = self.nodes.get("Reformat", None) - if ref_node: - for k, v in ref_node: - self.log.debug("k,v: {0}:{1}".format(k,v)) - if isinstance(v, unicode): - v = str(v) - reformat_node[k].setValue(v) - - reformat_node.setInput(0, previous_node) - previous_node = reformat_node - temporary_nodes.append(reformat_node) - - dag_node = nuke.createNode("OCIODisplay") - dag_node.setInput(0, previous_node) - previous_node = dag_node - temporary_nodes.append(dag_node) - - # create write node - write_node = nuke.createNode("Write") - - if representation in "mov": - file = fhead + "baked.mov" - name = "baked" - path = os.path.join(stagingDir, file).replace("\\", "/") - self.log.debug("Path: {}".format(path)) - instance.data["baked_colorspace_movie"] = path - write_node["file"].setValue(path) - write_node["file_type"].setValue("mov") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - tags = ["review", "delete"] - - elif representation in "jpeg": - file = fhead + "jpeg" - name = "thumbnail" - path = os.path.join(stagingDir, file).replace("\\", "/") - instance.data["thumbnail"] = path - write_node["file"].setValue(path) - write_node["file_type"].setValue("jpeg") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - tags = ["thumbnail"] - - # retime for - first_frame = int(last_frame) / 2 - last_frame = int(last_frame) / 2 - - repre = { - 'name': name, - 'ext': representation, - 'files': file, - "stagingDir": stagingDir, - "frameStart": first_frame, - "frameEnd": last_frame, - "anatomy_template": "render", - "tags": tags - } - instance.data["representations"].append(repre) - - # Render frames - nuke.execute(write_node.name(), int(first_frame), int(last_frame)) - - self.log.debug("representations: {}".format(instance.data["representations"])) - - # Clean up - for node in temporary_nodes: - nuke.delete(node) - - def get_view_process_node(self): - - # Select only the target node - if nuke.selectedNodes(): - [n.setSelected(False) for n in nuke.selectedNodes()] - - ipn_orig = None - for v in [n for n in nuke.allNodes() - if "Viewer" in n.Class()]: - ip = v['input_process'].getValue() - ipn = v['input_process_node'].getValue() - if "VIEWER_INPUT" not in ipn and ip: - ipn_orig = nuke.toNode(ipn) - ipn_orig.setSelected(True) - - if ipn_orig: - nuke.nodeCopy('%clipboard%') - - [n.setSelected(False) for n in nuke.selectedNodes()] # Deselect all - - nuke.nodePaste('%clipboard%') - - ipn = nuke.selectedNode() - - return ipn From fbb4c247f60d2d6210e38287f8206c2729e72779 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 8 Jan 2020 00:38:08 +0100 Subject: [PATCH 064/393] fix(global): fixing reformat and letter box --- pype/nuke/lib.py | 23 ++--------- pype/plugins/global/publish/extract_review.py | 38 ++++++++++++------- 2 files changed, 29 insertions(+), 32 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 9201e9c63e..c468343545 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1454,21 +1454,6 @@ class Exporter_review_mov(Exporter_review): self.previous_node = ipn self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) - # reformat_node = nuke.createNode("Reformat") - # rn_preset = self.nodes.get("Reformat", None) - # if rn_preset: - # self.log.debug("Reformat preset") - # for k, v in rn_preset: - # self.log.debug("k, v: {0}:{1}".format(k, v)) - # if isinstance(v, unicode): - # v = str(v) - # reformat_node[k].setValue(v) - # # connect - # reformat_node.setInput(0, self.previous_node) - # self._temp_nodes.append(reformat_node) - # self.previous_node = reformat_node - # self.log.debug("Reformat... `{}`".format(self._temp_nodes)) - # OCIODisplay node dag_node = nuke.createNode("OCIODisplay") # connect @@ -1509,10 +1494,10 @@ class Exporter_review_mov(Exporter_review): self.log.debug("Representation... `{}`".format(self.data)) - # ---------- Clean up - # for node in self._temp_nodes: - # nuke.delete(node) - # self.log.info("Deleted nodes...") + ---------- Clean up + for node in self._temp_nodes: + nuke.delete(node) + self.log.info("Deleted nodes...") return self.data diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index deceaa93a5..28eb0289fa 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -156,13 +156,34 @@ class ExtractReview(pyblish.api.InstancePlugin): # preset's output data output_args.extend(profile.get('output', [])) + # defining image ratios + resolution_ratio = float(resolution_width / ( + resolution_height * pixel_aspect)) + delivery_ratio = float(to_width) / float(to_height) + self.log.debug(resolution_ratio) + self.log.debug(delivery_ratio) + + # get scale factor + scale_factor = to_height / ( + resolution_height * pixel_aspect) + self.log.debug(scale_factor) + # letter_box lb = profile.get('letter_box', 0) - if lb is not 0: + if lb != 0: + ffmpet_width = to_width + ffmpet_height = to_height if "reformat" not in p_tags: lb /= pixel_aspect + if resolution_ratio != delivery_ratio: + ffmpet_width = resolution_width + ffmpet_height = int( + resolution_height * pixel_aspect) + else: + lb /= scale_factor + output_args.append( - "-filter:v scale=1920x1080:flags=lanczos,setsar=1,drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb)) + "-filter:v scale={0}x{1}:flags=lanczos,setsar=1,drawbox=0:0:iw:round((ih-(iw*(1/{2})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{2})))/2):iw:round((ih-(iw*(1/{2})))/2):t=fill:c=black".format(ffmpet_width, ffmpet_height, lb)) # In case audio is longer than video. output_args.append("-shortest") @@ -176,17 +197,8 @@ class ExtractReview(pyblish.api.InstancePlugin): # scaling none square pixels and 1920 width if "reformat" in p_tags: - resolution_ratio = float(resolution_width / ( - resolution_height * pixel_aspect)) - delivery_ratio = float(to_width) / float(to_height) - self.log.debug(resolution_ratio) - self.log.debug(delivery_ratio) - if resolution_ratio < delivery_ratio: self.log.debug("lower then delivery") - scale_factor = to_height / ( - resolution_height * pixel_aspect) - self.log.debug(scale_factor) width_scale = int(to_width * scale_factor) width_half_pad = int(( to_width - width_scale)/2) @@ -209,8 +221,8 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("__ height_half_pad: `{}`".format(height_half_pad)) - scaling_arg = "scale={0}x{1}:flags=lanczos,pad=1920:1080:{2}:{3}:black,setsar=1".format( - width_scale, height_scale, width_half_pad, height_half_pad + scaling_arg = "scale={0}x{1}:flags=lanczos,pad={2}:{3}:{4}:{5}:black,setsar=1".format( + width_scale, height_scale, to_width, to_height, width_half_pad, height_half_pad ) vf_back = self.add_video_filter_args( From 5bf0f2973dad63d690d2201443159879b5326f22 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 15:48:19 +0100 Subject: [PATCH 065/393] add custom attributes key to assetversion data in integrate frant instances --- pype/plugins/ftrack/publish/integrate_ftrack_instances.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py index 5e680a172a..5b8c195730 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py @@ -125,6 +125,12 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): "thumbnail": comp['thumbnail'] } + # Add custom attributes for AssetVersion + assetversion_cust_attrs = {} + component_item["assetversion_data"]["custom_attributes"] = ( + assetversion_cust_attrs + ) + componentList.append(component_item) # Create copy with ftrack.unmanaged location if thumb or prev if comp.get('thumbnail') or comp.get('preview') \ From 19f2b8148cd4ab2ced775491318ff1a2190bfd3f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 15:49:18 +0100 Subject: [PATCH 066/393] add intent value from context to custom attributes if is set --- pype/plugins/ftrack/publish/integrate_ftrack_instances.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py index 5b8c195730..78583b0a2f 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py @@ -127,6 +127,10 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): # Add custom attributes for AssetVersion assetversion_cust_attrs = {} + intent_val = instance.context.data.get("intent") + if intent_val: + assetversion_cust_attrs["intent"] = intent_val + component_item["assetversion_data"]["custom_attributes"] = ( assetversion_cust_attrs ) From 264a7c177ba985d3d5b72a0c5cdd4628754426d9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 15:49:34 +0100 Subject: [PATCH 067/393] set asset version custom attributes if there are any --- .../ftrack/publish/integrate_ftrack_api.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_api.py b/pype/plugins/ftrack/publish/integrate_ftrack_api.py index 9fe4fddebf..337562c1f5 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_api.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_api.py @@ -144,8 +144,11 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): "version": 0, "asset": asset_entity, } - - assetversion_data.update(data.get("assetversion_data", {})) + _assetversion_data = data.get("assetversion_data", {}) + assetversion_cust_attrs = _assetversion_data.pop( + "custom_attributes", {} + ) + assetversion_data.update(_assetversion_data) assetversion_entity = session.query( self.query("AssetVersion", assetversion_data) @@ -182,6 +185,18 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): existing_assetversion_metadata.update(assetversion_metadata) assetversion_entity["metadata"] = existing_assetversion_metadata + # Adding Custom Attributes + for attr, val in assetversion_cust_attrs.items(): + if attr in assetversion_entity["custom_attributes"]: + assetversion_entity["custom_attributes"][attr] = val + continue + + self.log.warning(( + "Custom Attrubute \"{0}\"" + " is not available for AssetVersion." + " Can't set it's value to: \"{1}\"" + ).format(attr, str(val))) + # Have to commit the version and asset, because location can't # determine the final location without. try: From 023aec0a61d6f239970cd848f0fb3cac19ab1a15 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 16 Dec 2019 17:35:23 +0100 Subject: [PATCH 068/393] added template data to burnins data --- pype/plugins/global/publish/extract_burnin.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 95a7144081..33935b4272 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -32,6 +32,7 @@ class ExtractBurnin(pype.api.Extractor): frame_start = int(instance.data.get("frameStart") or 0) frame_end = int(instance.data.get("frameEnd") or 1) duration = frame_end - frame_start + 1 + prep_data = { "username": instance.context.data['user'], "asset": os.environ['AVALON_ASSET'], @@ -39,8 +40,14 @@ class ExtractBurnin(pype.api.Extractor): "frame_start": frame_start, "frame_end": frame_end, "duration": duration, - "version": version + "version": version, + "comment": instance.context.data.get("comment"), + "intent": instance.context.data.get("intent") } + # Update data with template data + template_data = instance.data.get("assumedTemplateData") or {} + prep_data.update(template_data) + self.log.debug("__ prep_data: {}".format(prep_data)) for i, repre in enumerate(instance.data["representations"]): self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre)) From f89c1d3dbc28d2f533eb4828e889ece1f68a33f0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 16 Dec 2019 17:36:10 +0100 Subject: [PATCH 069/393] added filled anatomy to burnin data to be able use `anatomy[...][...]` in burnin presets --- pype/plugins/global/publish/extract_burnin.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 33935b4272..06a62dd98b 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -1,5 +1,6 @@ import os import json +import copy import pype.api import pyblish @@ -48,6 +49,9 @@ class ExtractBurnin(pype.api.Extractor): template_data = instance.data.get("assumedTemplateData") or {} prep_data.update(template_data) + # get anatomy project + anatomy = instance.context.data['anatomy'] + self.log.debug("__ prep_data: {}".format(prep_data)) for i, repre in enumerate(instance.data["representations"]): self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre)) @@ -69,11 +73,17 @@ class ExtractBurnin(pype.api.Extractor): ) self.log.debug("__ full_burnin_path: {}".format(full_burnin_path)) + # create copy of prep_data for anatomy formatting + _prep_data = copy.deepcopy(prep_data) + _prep_data["representation"] = repre["name"] + _prep_data["anatomy"] = ( + anatomy.format_all(_prep_data).get("solved") or {} + ) burnin_data = { "input": full_movie_path.replace("\\", "/"), "codec": repre.get("codec", []), "output": full_burnin_path.replace("\\", "/"), - "burnin_data": prep_data + "burnin_data": _prep_data } self.log.debug("__ burnin_data2: {}".format(burnin_data)) From dde70634e1d8789b17db595560143d03ddd459a3 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 16 Dec 2019 17:49:42 +0100 Subject: [PATCH 070/393] replace backslash in hierararchy which may cause issues in burnin path --- pype/plugins/global/publish/collect_templates.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index 9b0c03fdee..48623eec22 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -75,7 +75,7 @@ class CollectTemplates(pyblish.api.InstancePlugin): "asset": asset_name, "subset": subset_name, "version": version_number, - "hierarchy": hierarchy, + "hierarchy": hierarchy.replace("\\", "/"), "representation": "TEMP"} instance.data["template"] = template From 75cb30fe1da52f124ab25ed084ea1e63fab1a677 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 17:11:27 +0100 Subject: [PATCH 071/393] inital version of delivery action in ftrack --- pype/ftrack/actions/action_delivery.py | 421 +++++++++++++++++++++++++ 1 file changed, 421 insertions(+) create mode 100644 pype/ftrack/actions/action_delivery.py diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py new file mode 100644 index 0000000000..e23e35f91c --- /dev/null +++ b/pype/ftrack/actions/action_delivery.py @@ -0,0 +1,421 @@ +import os +import copy +import shutil + +import clique +from bson.objectid import ObjectId +from avalon import pipeline +from avalon.vendor import filelink +from avalon.tools.libraryloader.io_nonsingleton import DbConnector + +from pypeapp import Anatomy +from pype.ftrack import BaseAction +from pype.ftrack.lib.avalon_sync import CustAttrIdKey + + +class Delivery(BaseAction): + '''Edit meta data action.''' + + #: Action identifier. + identifier = "delivery.action" + #: Action label. + label = "Delivery" + #: Action description. + description = "Deliver data to client" + #: roles that are allowed to register this action + role_list = ["Pypeclub", "Administrator", "Project manager"] + # icon = '{}/ftrack/action_icons/TestAction.svg'.format( + # os.environ.get('PYPE_STATICS_SERVER', '') + # ) + + db_con = DbConnector() + + def discover(self, session, entities, event): + ''' Validation ''' + for entity in entities: + if entity.entity_type.lower() == "assetversion": + return True + + return False + + def interface(self, session, entities, event): + if event["data"].get("values", {}): + return + + title = "Delivery data to Client" + + items = [] + item_splitter = {"type": "label", "value": "---"} + + # Prepare component names for processing + components = None + project = None + for entity in entities: + if project is None: + project_id = None + for ent_info in entity["link"]: + if ent_info["type"].lower() == "project": + project_id = ent_info["id"] + break + + if project_id is None: + project = entity["asset"]["parent"]["project"] + else: + project = session.query(( + "select id, full_name from Project where id is \"{}\"" + ).format(project_id)).one() + + _components = set( + [component["name"] for component in entity["components"]] + ) + if components is None: + components = _components + continue + + components = components.intersection(_components) + if not components: + break + + project_name = project["full_name"] + items.append({ + "type": "hidden", + "name": "__project_name__", + "value": project_name + }) + + # Prpeare anatomy data + anatomy = Anatomy(project_name) + new_anatomies = [] + first = None + for key in (anatomy.templates.get("delivery") or {}): + new_anatomies.append({ + "label": key, + "value": key + }) + if first is None: + first = key + + skipped = False + # Add message if there are any common components + if not components or not new_anatomies: + skipped = True + items.append({ + "type": "label", + "value": "

Something went wrong:

" + }) + + items.append({ + "type": "hidden", + "name": "__skipped__", + "value": skipped + }) + + if not components: + if len(entities) == 1: + items.append({ + "type": "label", + "value": ( + "- Selected entity doesn't have components to deliver." + ) + }) + else: + items.append({ + "type": "label", + "value": ( + "- Selected entities don't have common components." + ) + }) + + # Add message if delivery anatomies are not set + if not new_anatomies: + items.append({ + "type": "label", + "value": ( + "- `\"delivery\"` anatomy key is not set in config." + ) + }) + + # Skip if there are any data shortcomings + if skipped: + return { + "items": items, + "title": title + } + + items.append({ + "value": "

Choose Components to deliver

", + "type": "label" + }) + + for component in components: + items.append({ + "type": "boolean", + "value": False, + "label": component, + "name": component + }) + + items.append(item_splitter) + + items.append({ + "value": "

Location for delivery

", + "type": "label" + }) + + items.append({ + "type": "text", + "name": "__location_path__", + "empty_text": "Type location path here..." + }) + + items.append(item_splitter) + + items.append({ + "value": "

Anatomy of delivery files

", + "type": "label" + }) + + items.append({ + "type": "label", + "value": ( + "

NOTE: These can be set in Anatomy.yaml" + " within `delivery` key.

" + ) + }) + + items.append({ + "type": "enumerator", + "name": "__new_anatomies__", + "data": new_anatomies, + "value": first + }) + + return { + "items": items, + "title": title + } + + def launch(self, session, entities, event): + if "values" not in event["data"]: + return + + values = event["data"]["values"] + skipped = values.pop("__skipped__") + if skipped: + return None + + component_names = [] + location_path = values.pop("__location_path__") + anatomy_name = values.pop("__new_anatomies__") + project_name = values.pop("__project_name__") + + for key, value in values.items(): + if value is True: + component_names.append(key) + + if not component_names: + return None + + location_path = os.path.normpath(location_path.strip()) + if location_path and not os.path.exists(location_path): + return { + "success": False, + "message": ( + "Entered location path does not exists. \"{}\"" + ).format(location_path) + } + + self.db_con.install() + self.db_con.Session["AVALON_PROJECT"] = project_name + + components = [] + repres_to_deliver = [] + for entity in entities: + asset = entity["asset"] + subset_name = asset["name"] + version = entity["version"] + + parent = asset["parent"] + parent_mongo_id = parent["custom_attributes"].get(CustAttrIdKey) + if not parent_mongo_id: + # TODO log error (much better) + self.log.warning(( + "Seems like entity <{}> is not synchronized to avalon" + ).format(parent["name"])) + continue + + parent_mongo_id = ObjectId(parent_mongo_id) + subset_ent = self.db_con.find_one({ + "type": "subset", + "parent": parent_mongo_id, + "name": subset_name + }) + + version_ent = self.db_con.find_one({ + "type": "version", + "name": version, + "parent": subset_ent["_id"] + }) + + repre_ents = self.db_con.find({ + "type": "representation", + "parent": version_ent["_id"] + }) + + repres_by_name = {} + for repre in repre_ents: + repre_name = repre["name"] + repres_by_name[repre_name] = repre + + for component in entity["components"]: + comp_name = component["name"] + if comp_name not in component_names: + continue + + repre = repres_by_name.get(comp_name) + repres_to_deliver.append(repre) + + src_dst_files = {} + anatomy = Anatomy(project_name) + for repre in repres_to_deliver: + # Get destination repre path + anatomy_data = copy.deepcopy(repre["context"]) + if location_path: + anatomy_data["root"] = location_path + else: + anatomy_data["root"] = pipeline.registered_root() + + # Get source repre path + repre_path = self.path_from_represenation(repre) + # TODO add backup solution where root of path from component + # is repalced with AVALON_PROJECTS root + + if repre_path and os.path.exists(repre_path): + self.process_single_file( + repre_path, anatomy, anatomy_name, anatomy_data + ) + + else: + self.process_sequence( + repre_path, anatomy, anatomy_name, anatomy_data + ) + + self.db_con.uninstall() + + def process_single_file( + self, repre_path, anatomy, anatomy_name, anatomy_data + ): + anatomy_filled = anatomy.format(anatomy_data) + delivery_path = anatomy_filled.get("delivery", {}).get(anatomy_name) + if not delivery_path: + # TODO log error! - missing keys in anatomy + return + + delivery_folder = os.path.dirname(delivery_path) + if not os.path.exists(delivery_folder): + os.makedirs(delivery_folder) + + self.copy_file(repre_path, delivery_path) + + def process_sequence( + self, repre_path, anatomy, anatomy_name, anatomy_data + ): + dir_path, file_name = os.path.split(repre_path) + if not os.path.exists(dir_path): + # TODO log if folder don't exist + return + + base_name, ext = os.path.splitext(file_name) + file_name_items = None + if "#" in base_name: + file_name_items = [part for part in base_name.split("#") if part] + + elif "%" in base_name: + file_name_items = base_name.split("%") + + if not file_name_items: + # TODO log if file does not exists + return + + src_collections, remainder = clique.assemble(os.listdir(dir_path)) + src_collection = None + for col in src_collections: + if col.tail != ext: + continue + + # skip if collection don't have same basename + if not col.head.startswith(file_name_items[0]): + continue + + src_collection = col + break + + if src_collection is None: + # TODO log error! + return + + anatomy_data["frame"] = "{frame}" + anatomy_filled = anatomy.format(anatomy_data) + delivery_path = anatomy_filled.get("delivery", {}).get(anatomy_name) + if not delivery_path: + # TODO log error! - missing keys in anatomy + return + + delivery_folder = os.path.dirname(delivery_path) + dst_head, dst_tail = delivery_path.split("{frame}") + dst_padding = src_collection.padding + dst_collection = clique.Collection( + head=dst_head, + tail=dst_tail, + padding=dst_padding + ) + + if not os.path.exists(delivery_folder): + os.makedirs(delivery_folder) + + src_head = src_collection.head + src_tail = src_collection.tail + for index in src_collection.indexes: + src_padding = src_collection.format("{padding}") % index + src_file_name = "{}{}{}".format(src_head, src_padding, src_tail) + + dst_padding = dst_collection.format("{padding}") % index + dst_file_name = "{}{}{}".format(dst_head, dst_padding, dst_tail) + + self.copy_file(src, dst) + + def path_from_represenation(self, representation): + try: + template = representation["data"]["template"] + + except KeyError: + return None + + try: + context = representation["context"] + context["root"] = os.environ.get("AVALON_PROJECTS") or "" + path = pipeline.format_template_with_optional_keys( + context, template + ) + + except KeyError: + # Template references unavailable data + return None + + if os.path.exists(path): + return os.path.normpath(path) + + def copy_file(self, src_path, dst_path): + try: + filelink.create( + src_path, + dst_path, + filelink.HARDLINK + ) + except OSError: + shutil.copyfile(src_path, dst_path) + +def register(session, plugins_presets={}): + '''Register plugin. Called when used as an plugin.''' + + Delivery(session, plugins_presets).register() From 830373f3d5c35c298285236a3a36b9eed0aaf5c4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 17:19:35 +0100 Subject: [PATCH 072/393] added delivery icon --- pype/ftrack/actions/action_delivery.py | 6 ++--- res/ftrack/action_icons/Delivery.svg | 34 ++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 3 deletions(-) create mode 100644 res/ftrack/action_icons/Delivery.svg diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index e23e35f91c..572a9bc8e0 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -24,9 +24,9 @@ class Delivery(BaseAction): description = "Deliver data to client" #: roles that are allowed to register this action role_list = ["Pypeclub", "Administrator", "Project manager"] - # icon = '{}/ftrack/action_icons/TestAction.svg'.format( - # os.environ.get('PYPE_STATICS_SERVER', '') - # ) + icon = '{}/ftrack/action_icons/Delivery.svg'.format( + os.environ.get('PYPE_STATICS_SERVER', '') + ) db_con = DbConnector() diff --git a/res/ftrack/action_icons/Delivery.svg b/res/ftrack/action_icons/Delivery.svg new file mode 100644 index 0000000000..3380487c31 --- /dev/null +++ b/res/ftrack/action_icons/Delivery.svg @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + From cbbb074a25c929582a26807691bf00a27c7325a4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 17:24:35 +0100 Subject: [PATCH 073/393] fix source filepath --- pype/ftrack/actions/action_delivery.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index 572a9bc8e0..ad3d6ef6cc 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -228,7 +228,6 @@ class Delivery(BaseAction): self.db_con.install() self.db_con.Session["AVALON_PROJECT"] = project_name - components = [] repres_to_deliver = [] for entity in entities: asset = entity["asset"] @@ -275,7 +274,6 @@ class Delivery(BaseAction): repre = repres_by_name.get(comp_name) repres_to_deliver.append(repre) - src_dst_files = {} anatomy = Anatomy(project_name) for repre in repres_to_deliver: # Get destination repre path @@ -302,6 +300,8 @@ class Delivery(BaseAction): self.db_con.uninstall() + return True + def process_single_file( self, repre_path, anatomy, anatomy_name, anatomy_data ): @@ -378,9 +378,12 @@ class Delivery(BaseAction): for index in src_collection.indexes: src_padding = src_collection.format("{padding}") % index src_file_name = "{}{}{}".format(src_head, src_padding, src_tail) + src = os.path.normpath( + os.path.join(dir_path, src_file_name) + ) dst_padding = dst_collection.format("{padding}") % index - dst_file_name = "{}{}{}".format(dst_head, dst_padding, dst_tail) + dst = "{}{}{}".format(dst_head, dst_padding, dst_tail) self.copy_file(src, dst) From 5e31299c2441ba57c323245b067062279817f24d Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 8 Jan 2020 17:38:03 +0100 Subject: [PATCH 074/393] add resolution and fps to anatomy keys --- pype/plugins/global/publish/collect_templates.py | 5 ++++- pype/plugins/global/publish/extract_review.py | 4 +++- pype/plugins/global/publish/integrate_new.py | 5 ++++- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index 48623eec22..d57d416dea 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -76,7 +76,10 @@ class CollectTemplates(pyblish.api.InstancePlugin): "subset": subset_name, "version": version_number, "hierarchy": hierarchy.replace("\\", "/"), - "representation": "TEMP"} + "representation": "TEMP", + "resolution_width": instance.data.get("resolutionWidth", ""), + "resolution_height": instance.data.get("resolutionHeight", ""), + "fps": str(instance.data.get("fps", ""))}} instance.data["template"] = template instance.data["assumedTemplateData"] = template_data diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index f621df0c66..c75bb488a2 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -249,7 +249,9 @@ class ExtractReview(pyblish.api.InstancePlugin): 'files': repr_file, "tags": new_tags, "outputName": name, - "codec": codec_args + "codec": codec_args, + "resolutionWidth": resolution_width, + "resolutionWidth": resolution_height }) if repre_new.get('preview'): repre_new.pop("preview") diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index faade613f2..ee18347703 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -267,7 +267,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "family": instance.data['family'], "subset": subset["name"], "version": int(version["name"]), - "hierarchy": hierarchy} + "hierarchy": hierarchy, + "resolution_width": repre.get("resolutionWidth", ""), + "resolution_height": repre.get("resolutionHeight", ""), + "fps": str(instance.data.get("fps", ""))} files = repre['files'] if repre.get('stagingDir'): From cfd9823abc0c8109f4c5e18e2a6f1a55e2977047 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 17:41:35 +0100 Subject: [PATCH 075/393] replaced {frame} with <> --- pype/ftrack/actions/action_delivery.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index ad3d6ef6cc..22fb15198b 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -354,7 +354,7 @@ class Delivery(BaseAction): # TODO log error! return - anatomy_data["frame"] = "{frame}" + anatomy_data["frame"] = "<>" anatomy_filled = anatomy.format(anatomy_data) delivery_path = anatomy_filled.get("delivery", {}).get(anatomy_name) if not delivery_path: @@ -362,7 +362,7 @@ class Delivery(BaseAction): return delivery_folder = os.path.dirname(delivery_path) - dst_head, dst_tail = delivery_path.split("{frame}") + dst_head, dst_tail = delivery_path.split("<>") dst_padding = src_collection.padding dst_collection = clique.Collection( head=dst_head, From ccd491d99e436c2d9ea91a4b58b0f9115ddb2f19 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 8 Jan 2020 18:24:35 +0100 Subject: [PATCH 076/393] add remapping from mounted to network path to render publish job --- pype/plugins/global/publish/submit_publish_job.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 2a254b015c..9c72ece73c 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -21,6 +21,12 @@ def _get_script(): if module_path.endswith(".pyc"): module_path = module_path[:-len(".pyc")] + ".py" + module_path = os.path.normpath(module_path) + mount_root = os.path.normpath(os.environ['PYPE_STUDIO_CORE_MOUNT']) + network_root = os.path.normpath(os.environ['PYPE_STUDIO_CORE_PATH']) + + module_path = module_path.replace(mount_root, network_root) + return module_path @@ -164,6 +170,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): output_dir = instance.data["outputDir"] metadata_path = os.path.join(output_dir, metadata_filename) + metadata_path = os.path.normpath(metadata_path) + mount_root = os.path.normpath(os.environ['PYPE_STUDIO_PROJECTS_MOUNT']) + network_root = os.path.normpath(os.environ['PYPE_STUDIO_PROJECTS_PATH']) + + metadata_path = metadata_path.replace(mount_root, network_root) + # Generate the payload for Deadline submission payload = { "JobInfo": { From 3cf559afba5058eae3e96cbb1d873e1b7403affe Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 19:21:15 +0100 Subject: [PATCH 077/393] better reporting and logging --- pype/ftrack/actions/action_delivery.py | 144 +++++++++++++++++++++---- 1 file changed, 121 insertions(+), 23 deletions(-) diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index 22fb15198b..e698c371e1 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -1,9 +1,12 @@ import os import copy import shutil +import collections +import string import clique from bson.objectid import ObjectId + from avalon import pipeline from avalon.vendor import filelink from avalon.tools.libraryloader.io_nonsingleton import DbConnector @@ -162,10 +165,17 @@ class Delivery(BaseAction): "type": "label" }) + items.append({ + "type": "label", + "value": ( + "NOTE: It is possible to replace `root` key in anatomy." + ) + }) + items.append({ "type": "text", "name": "__location_path__", - "empty_text": "Type location path here..." + "empty_text": "Type location path here...(Optional)" }) items.append(item_splitter) @@ -199,6 +209,8 @@ class Delivery(BaseAction): if "values" not in event["data"]: return + self.report_items = collections.defaultdict(list) + values = event["data"]["values"] skipped = values.pop("__skipped__") if skipped: @@ -214,7 +226,10 @@ class Delivery(BaseAction): component_names.append(key) if not component_names: - return None + return { + "success": True, + "message": "Not selected components to deliver." + } location_path = os.path.normpath(location_path.strip()) if location_path and not os.path.exists(location_path): @@ -236,14 +251,24 @@ class Delivery(BaseAction): parent = asset["parent"] parent_mongo_id = parent["custom_attributes"].get(CustAttrIdKey) - if not parent_mongo_id: - # TODO log error (much better) - self.log.warning(( - "Seems like entity <{}> is not synchronized to avalon" - ).format(parent["name"])) - continue + if parent_mongo_id: + parent_mongo_id = ObjectId(parent_mongo_id) + else: + asset_ent = self.db_con.find_one({ + "type": "asset", + "data.ftrackId": parent["id"] + }) + if not asset_ent: + ent_path = "/".join( + [ent["name"] for ent in parent["link"]] + ) + msg = "Not synchronized entities to avalon" + self.report_items[msg].append(ent_path) + self.log.warning("{} <{}>".format(msg, ent_path)) + continue + + parent_mongo_id = asset_ent["_id"] - parent_mongo_id = ObjectId(parent_mongo_id) subset_ent = self.db_con.find_one({ "type": "subset", "parent": parent_mongo_id, @@ -283,6 +308,50 @@ class Delivery(BaseAction): else: anatomy_data["root"] = pipeline.registered_root() + anatomy_filled = anatomy.format(anatomy_data) + test_path = ( + anatomy_filled + .get("delivery", {}) + .get(anatomy_name) + ) + + if not test_path: + msg = ( + "Missing keys in Representation's context" + " for anatomy template \"{}\"." + ).format(anatomy_name) + + all_anatomies = anatomy.format_all(anatomy_data) + result = None + for anatomies in all_anatomies.values(): + for key, temp in anatomies.get("delivery", {}).items(): + if key != anatomy_name: + continue + + result = temp + break + + # TODO log error! - missing keys in anatomy + if result: + missing_keys = [ + key[1] for key in string.Formatter().parse(result) + if key[1] is not None + ] + else: + missing_keys = ["unknown"] + + keys = ", ".join(missing_keys) + sub_msg = ( + "Representation: {}
- Missing keys: \"{}\"
" + ).format(str(repre["_id"]), keys) + self.report_items[msg].append(sub_msg) + self.log.warning( + "{} Representation: \"{}\" Filled: <{}>".format( + msg, str(repre["_id"]), str(result) + ) + ) + continue + # Get source repre path repre_path = self.path_from_represenation(repre) # TODO add backup solution where root of path from component @@ -300,17 +369,13 @@ class Delivery(BaseAction): self.db_con.uninstall() - return True + return self.report() def process_single_file( self, repre_path, anatomy, anatomy_name, anatomy_data ): anatomy_filled = anatomy.format(anatomy_data) - delivery_path = anatomy_filled.get("delivery", {}).get(anatomy_name) - if not delivery_path: - # TODO log error! - missing keys in anatomy - return - + delivery_path = anatomy_filled["delivery"][anatomy_name] delivery_folder = os.path.dirname(delivery_path) if not os.path.exists(delivery_folder): os.makedirs(delivery_folder) @@ -321,9 +386,6 @@ class Delivery(BaseAction): self, repre_path, anatomy, anatomy_name, anatomy_data ): dir_path, file_name = os.path.split(repre_path) - if not os.path.exists(dir_path): - # TODO log if folder don't exist - return base_name, ext = os.path.splitext(file_name) file_name_items = None @@ -334,7 +396,9 @@ class Delivery(BaseAction): file_name_items = base_name.split("%") if not file_name_items: - # TODO log if file does not exists + msg = "Source file was not found" + self.report_items[msg].append(repre_path) + self.log.warning("{} <{}>".format(msg, repre_path)) return src_collections, remainder = clique.assemble(os.listdir(dir_path)) @@ -352,15 +416,15 @@ class Delivery(BaseAction): if src_collection is None: # TODO log error! + msg = "Source collection of files was not found" + self.report_items[msg].append(repre_path) + self.log.warning("{} <{}>".format(msg, repre_path)) return anatomy_data["frame"] = "<>" anatomy_filled = anatomy.format(anatomy_data) - delivery_path = anatomy_filled.get("delivery", {}).get(anatomy_name) - if not delivery_path: - # TODO log error! - missing keys in anatomy - return + delivery_path = anatomy_filled["delivery"][anatomy_name] delivery_folder = os.path.dirname(delivery_path) dst_head, dst_tail = delivery_path.split("<>") dst_padding = src_collection.padding @@ -418,6 +482,40 @@ class Delivery(BaseAction): except OSError: shutil.copyfile(src_path, dst_path) + def report(self): + items = [] + title = "Delivery report" + for msg, _items in self.report_items.items(): + if not _items: + continue + + if items: + items.append({"type": "label", "value": "---"}) + + items.append({ + "type": "label", + "value": "# {}".format(msg) + }) + if isinstance(_items, str): + _items = [_items] + items.append({ + "type": "label", + "value": '

{}

'.format("
".join(_items)) + }) + + if not items: + return { + "success": True, + "message": "Delivery Finished" + } + + return { + "items": items, + "title": title, + "success": False, + "message": "Delivery Finished" + } + def register(session, plugins_presets={}): '''Register plugin. Called when used as an plugin.''' From bf24580b6f87ded4672661fb055a85ba92fd8b78 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 19:31:58 +0100 Subject: [PATCH 078/393] fix root path --- pype/ftrack/actions/action_delivery.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index e698c371e1..9edb7a5964 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -171,7 +171,7 @@ class Delivery(BaseAction): "NOTE: It is possible to replace `root` key in anatomy." ) }) - + items.append({ "type": "text", "name": "__location_path__", @@ -306,7 +306,7 @@ class Delivery(BaseAction): if location_path: anatomy_data["root"] = location_path else: - anatomy_data["root"] = pipeline.registered_root() + anatomy_data["root"] = os.environ.get("AVALON_PROJECTS") or "" anatomy_filled = anatomy.format(anatomy_data) test_path = ( From e6dc7c29a3dde61a8d27c03a862ef2dfce7a71c7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 8 Jan 2020 22:13:48 +0100 Subject: [PATCH 079/393] feat(): --- .../global/publish/collect_filesequences.py | 249 +++++++++++++----- .../global/publish/submit_publish_job.py | 13 + 2 files changed, 192 insertions(+), 70 deletions(-) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 6a59f5dffc..1214657856 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -54,10 +54,6 @@ def collect(root, patterns=[pattern], minimum_items=1) - # Ignore any remainders - if remainder: - print("Skipping remainder {}".format(remainder)) - # Exclude any frames outside start and end frame. for collection in collections: for index in list(collection.indexes): @@ -71,7 +67,7 @@ def collect(root, # Keep only collections that have at least a single frame collections = [c for c in collections if c.indexes] - return collections + return collections, remainder class CollectRenderedFrames(pyblish.api.ContextPlugin): @@ -119,8 +115,10 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): try: data = json.load(f) except Exception as exc: - self.log.error("Error loading json: " - "{} - Exception: {}".format(path, exc)) + self.log.error( + "Error loading json: " + "{} - Exception: {}".format(path, exc) + ) raise cwd = os.path.dirname(path) @@ -156,7 +154,6 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): resolution_height = instance.get("resolutionHeight", 1080) lut_path = instance.get("lutPath", None) - else: # Search in directory data = dict() @@ -167,14 +164,17 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): if regex: self.log.info("Using regex: {}".format(regex)) - collections = collect(root=root, - regex=regex, - exclude_regex=data.get("exclude_regex"), - frame_start=data.get("frameStart"), - frame_end=data.get("frameEnd")) + collections, remainder = collect( + root=root, + regex=regex, + exclude_regex=data.get("exclude_regex"), + frame_start=data.get("frameStart"), + frame_end=data.get("frameEnd"), + ) self.log.info("Found collections: {}".format(collections)) + """ if data.get("subset"): # If subset is provided for this json then it must be a single # collection. @@ -182,81 +182,190 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): self.log.error("Forced subset can only work with a single " "found sequence") raise RuntimeError("Invalid sequence") + """ fps = data.get("fps", 25) + if data.get("user"): + context.data["user"] = data["user"] + # Get family from the data families = data.get("families", ["render"]) if "render" not in families: families.append("render") if "ftrack" not in families: families.append("ftrack") - if "review" not in families: - families.append("review") if "write" in instance_family: families.append("write") - for collection in collections: - instance = context.create_instance(str(collection)) - self.log.info("Collection: %s" % list(collection)) + if data.get("attachTo"): + # we need to attach found collections to existing + # subset version as review represenation. - # Ensure each instance gets a unique reference to the data + for attach in data.get("attachTo"): + self.log.info( + "Attaching render {}:v{}".format( + attach["subset"], attach["version"])) + instance = context.create_instance( + attach["subset"]) + instance.data.update( + { + "name": attach["subset"], + "version": attach["version"], + "family": 'review', + "families": ['review', 'ftrack'], + "asset": data.get( + "asset", api.Session["AVALON_ASSET"]), + "stagingDir": root, + "frameStart": data.get("frameStart"), + "frameEnd": data.get("frameEnd"), + "fps": fps, + "source": data.get("source", ""), + "pixelAspect": pixel_aspect + }) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + for collection in collections: + self.log.info( + " - adding representation: {}".format( + str(collection)) + ) + ext = collection.tail.lstrip(".") + + representation = { + "name": ext, + "ext": "{}".format(ext), + "files": list(collection), + "stagingDir": root, + "anatomy_template": "render", + "fps": fps, + "tags": ["review"], + } + instance.data["representations"].append( + representation) + + elif data.get("subset"): + # if we have subset - add all collections and known + # reminder as representations + + self.log.info( + "Adding representations to subset {}".format( + data.get("subset"))) + + instance = context.create_instance(data.get("subset")) data = copy.deepcopy(data) - # If no subset provided, get it from collection's head - subset = data.get("subset", collection.head.rstrip("_. ")) - - # If no start or end frame provided, get it from collection - indices = list(collection.indexes) - start = data.get("frameStart", indices[0]) - end = data.get("frameEnd", indices[-1]) - - self.log.debug("Collected pixel_aspect:\n" - "{}".format(pixel_aspect)) - self.log.debug("type pixel_aspect:\n" - "{}".format(type(pixel_aspect))) - - # root = os.path.normpath(root) - # self.log.info("Source: {}}".format(data.get("source", ""))) - - ext = list(collection)[0].split('.')[-1] - - instance.data.update({ - "name": str(collection), - "family": families[0], # backwards compatibility / pyblish - "families": list(families), - "subset": subset, - "asset": data.get("asset", api.Session["AVALON_ASSET"]), - "stagingDir": root, - "frameStart": start, - "frameEnd": end, - "fps": fps, - "source": data.get('source', ''), - "pixelAspect": pixel_aspect, - "resolutionWidth": resolution_width, - "resolutionHeight": resolution_height - }) - if lut_path: - instance.data.update({"lutPath": lut_path}) - instance.append(collection) - instance.context.data['fps'] = fps + instance.data.update( + { + "name": data.get("subset"), + "family": families[0], + "families": list(families), + "subset": data.get("subset"), + "asset": data.get( + "asset", api.Session["AVALON_ASSET"]), + "stagingDir": root, + "frameStart": data.get("frameStart"), + "frameEnd": data.get("frameEnd"), + "fps": fps, + "source": data.get("source", ""), + "pixelAspect": pixel_aspect, + } + ) if "representations" not in instance.data: instance.data["representations"] = [] - representation = { - 'name': ext, - 'ext': '{}'.format(ext), - 'files': list(collection), - "stagingDir": root, - "anatomy_template": "render", - "fps": fps, - "tags": ['review'] - } - instance.data["representations"].append(representation) + for collection in collections: + self.log.info(" - {}".format(str(collection))) - if data.get('user'): - context.data["user"] = data['user'] + ext = collection.tail.lstrip(".") - self.log.debug("Collected instance:\n" - "{}".format(pformat(instance.data))) + representation = { + "name": ext, + "ext": "{}".format(ext), + "files": list(collection), + "stagingDir": root, + "anatomy_template": "render", + "fps": fps, + "tags": ["review"], + } + instance.data["representations"].append( + representation) + + # process reminders + for rem in remainder: + # add only known types to representation + if rem.split(".")[-1] in ['mov', 'jpg', 'mp4']: + self.log.info(" . {}".format(rem)) + representation = { + "name": rem.split(".")[-1], + "ext": "{}".format(rem.split(".")[-1]), + "files": rem, + "stagingDir": root, + "anatomy_template": "render", + "fps": fps, + "tags": ["review"], + } + instance.data["representations"].append( + representation) + + else: + # we have no subset so we take every collection and create one + # from it + for collection in collections: + instance = context.create_instance(str(collection)) + self.log.info("Creating subset from: %s" % str(collection)) + + # Ensure each instance gets a unique reference to the data + data = copy.deepcopy(data) + + # If no subset provided, get it from collection's head + subset = data.get("subset", collection.head.rstrip("_. ")) + + # If no start or end frame provided, get it from collection + indices = list(collection.indexes) + start = data.get("frameStart", indices[0]) + end = data.get("frameEnd", indices[-1]) + + ext = list(collection)[0].split(".")[-1] + + if "review" not in families: + families.append("review") + + instance.data.update( + { + "name": str(collection), + "family": families[0], # backwards compatibility + "families": list(families), + "subset": subset, + "asset": data.get( + "asset", api.Session["AVALON_ASSET"]), + "stagingDir": root, + "frameStart": start, + "frameEnd": end, + "fps": fps, + "source": data.get("source", ""), + "pixelAspect": pixel_aspect, + } + ) + if lut_path: + instance.data.update({"lutPath": lut_path}) + + instance.append(collection) + instance.context.data["fps"] = fps + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + "name": ext, + "ext": "{}".format(ext), + "files": list(collection), + "stagingDir": root, + "anatomy_template": "render", + "fps": fps, + "tags": ["review"], + } + instance.data["representations"].append(representation) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 2a254b015c..e7d5fe3147 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -282,6 +282,19 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): relative_path = os.path.relpath(source, api.registered_root()) source = os.path.join("{root}", relative_path).replace("\\", "/") + # find subsets and version to attach render to + attach_to = instance.data.get("attachTo") + attach_subset_versions = [] + if attach_to: + for subset in attach_to: + for instance in context: + if instance.data["subset"] != subset["subset"]: + continue + attach_subset_versions.append( + {"version": instance.data["version"], + "subset": subset["subset"], + "family": subset["family"]}) + # Write metadata for publish job metadata = { "asset": asset, From b2dfb6c95b77bf327291eccc6b50e9937e4c71a7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 9 Jan 2020 10:36:35 +0100 Subject: [PATCH 080/393] be specific about task custom attributes to avoid asset version's cust attrs --- pype/ftrack/events/event_sync_to_avalon.py | 11 +++++++---- pype/ftrack/lib/avalon_sync.py | 4 ++-- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 606866aba2..91355c6068 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1438,9 +1438,11 @@ class SyncToAvalonEvent(BaseEvent): if attr["entity_type"] != ent_info["entityType"]: continue - if ent_info["entityType"] != "show": - if attr["object_type_id"] != ent_info["objectTypeId"]: - continue + if ( + ent_info["entityType"] == "task" and + attr["object_type_id"] != ent_info["objectTypeId"] + ): + continue configuration_id = attr["id"] entity_type_conf_ids[entity_type] = configuration_id @@ -1712,7 +1714,8 @@ class SyncToAvalonEvent(BaseEvent): if ca_ent_type == "show": cust_attrs_by_obj_id[ca_ent_type][key] = cust_attr - else: + + elif ca_ent_type == "task": obj_id = cust_attr["object_type_id"] cust_attrs_by_obj_id[obj_id][key] = cust_attr diff --git a/pype/ftrack/lib/avalon_sync.py b/pype/ftrack/lib/avalon_sync.py index 064ea1adb8..5839d36e64 100644 --- a/pype/ftrack/lib/avalon_sync.py +++ b/pype/ftrack/lib/avalon_sync.py @@ -699,7 +699,7 @@ class SyncEntitiesFactory: if ca_ent_type == "show": avalon_attrs[ca_ent_type][key] = cust_attr["default"] avalon_attrs_ca_id[ca_ent_type][key] = cust_attr["id"] - else: + elif ca_ent_type == "task": obj_id = cust_attr["object_type_id"] avalon_attrs[obj_id][key] = cust_attr["default"] avalon_attrs_ca_id[obj_id][key] = cust_attr["id"] @@ -708,7 +708,7 @@ class SyncEntitiesFactory: if ca_ent_type == "show": attrs_per_entity_type[ca_ent_type][key] = cust_attr["default"] attrs_per_entity_type_ca_id[ca_ent_type][key] = cust_attr["id"] - else: + elif ca_ent_type == "task": obj_id = cust_attr["object_type_id"] attrs_per_entity_type[obj_id][key] = cust_attr["default"] attrs_per_entity_type_ca_id[obj_id][key] = cust_attr["id"] From 64a0360ce90a699d86c4ee166c36268f9857dae8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 9 Jan 2020 11:08:35 +0100 Subject: [PATCH 081/393] fix(global): letter box not created properly --- pype/plugins/global/publish/extract_review.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 28eb0289fa..4eb7fa16ed 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -180,7 +180,11 @@ class ExtractReview(pyblish.api.InstancePlugin): ffmpet_height = int( resolution_height * pixel_aspect) else: - lb /= scale_factor + # TODO: it might still be failing in some cases + if resolution_ratio != delivery_ratio: + lb /= scale_factor + else: + lb /= pixel_aspect output_args.append( "-filter:v scale={0}x{1}:flags=lanczos,setsar=1,drawbox=0:0:iw:round((ih-(iw*(1/{2})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{2})))/2):iw:round((ih-(iw*(1/{2})))/2):t=fill:c=black".format(ffmpet_width, ffmpet_height, lb)) From 69015fb7fc08970c8a9619466556eb02f8a76ab7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 9 Jan 2020 11:15:57 +0100 Subject: [PATCH 082/393] fix(nuke): updating nuke.lib and review data mov --- pype/nuke/lib.py | 121 ++++++++++++------ .../nuke/publish/extract_review_data_mov.py | 1 - 2 files changed, 81 insertions(+), 41 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index c468343545..9ded8b75d0 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1205,7 +1205,7 @@ class Exporter_review: Args: klass (pyblish.plugin): pyblish plugin parent - instance (pyblish.context.instance): + instance (pyblish.instance): instance of pyblish context """ _temp_nodes = [] @@ -1298,6 +1298,11 @@ class Exporter_review: return ipn + def clean_nodes(self): + for node in self._temp_nodes: + nuke.delete(node) + self.log.info("Deleted nodes...") + class Exporter_review_lut(Exporter_review): """ @@ -1305,6 +1310,7 @@ class Exporter_review_lut(Exporter_review): Args: klass (pyblish.plugin): pyblish plugin parent + instance (pyblish.instance): instance of pyblish context """ @@ -1319,6 +1325,12 @@ class Exporter_review_lut(Exporter_review): # initialize parent class Exporter_review.__init__(self, klass, instance) + # deal with now lut defined in viewer lut + if hasattr(klass, "viewer_lut_raw"): + self.viewer_lut_raw = klass.viewer_lut_raw + else: + self.viewer_lut_raw = False + self.name = name or "baked_lut" self.ext = ext or "cube" self.cube_size = cube_size or 32 @@ -1331,7 +1343,8 @@ class Exporter_review_lut(Exporter_review): self.log.info("File info was set...") self.file = self.fhead + self.name + ".{}".format(self.ext) - self.path = os.path.join(self.staging_dir, self.file).replace("\\", "/") + self.path = os.path.join( + self.staging_dir, self.file).replace("\\", "/") def generate_lut(self): # ---------- start nodes creation @@ -1353,13 +1366,14 @@ class Exporter_review_lut(Exporter_review): self.previous_node = ipn self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) - # OCIODisplay - dag_node = nuke.createNode("OCIODisplay") - # connect - dag_node.setInput(0, self.previous_node) - self._temp_nodes.append(dag_node) - self.previous_node = dag_node - self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) + if not self.viewer_lut_raw: + # OCIODisplay + dag_node = nuke.createNode("OCIODisplay") + # connect + dag_node.setInput(0, self.previous_node) + self._temp_nodes.append(dag_node) + self.previous_node = dag_node + self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) # GenerateLUT gen_lut_node = nuke.createNode("GenerateLUT") @@ -1388,9 +1402,7 @@ class Exporter_review_lut(Exporter_review): self.log.debug("Representation... `{}`".format(self.data)) # ---------- Clean up - for node in self._temp_nodes: - nuke.delete(node) - self.log.info("Deleted nodes...") + self.clean_nodes() return self.data @@ -1401,7 +1413,7 @@ class Exporter_review_mov(Exporter_review): Args: klass (pyblish.plugin): pyblish plugin parent - + instance (pyblish.instance): instance of pyblish context """ def __init__(self, @@ -1419,6 +1431,12 @@ class Exporter_review_mov(Exporter_review): else: self.nodes = {} + # deal with now lut defined in viewer lut + if hasattr(klass, "viewer_lut_raw"): + self.viewer_lut_raw = klass.viewer_lut_raw + else: + self.viewer_lut_raw = False + self.name = name or "baked" self.ext = ext or "mov" @@ -1428,7 +1446,31 @@ class Exporter_review_mov(Exporter_review): self.log.info("File info was set...") self.file = self.fhead + self.name + ".{}".format(self.ext) - self.path = os.path.join(self.staging_dir, self.file).replace("\\", "/") + self.path = os.path.join( + self.staging_dir, self.file).replace("\\", "/") + + def render(self, render_node_name): + self.log.info("Rendering... ") + # Render Write node + nuke.execute( + render_node_name, + int(self.first_frame), + int(self.last_frame)) + + self.log.info("Rendered...") + + def save_file(self): + with anlib.maintained_selection(): + self.log.info("Saving nodes as file... ") + # select temp nodes + anlib.select_nodes(self._temp_nodes) + # create nk path + path = os.path.splitext(self.path)[0] + ".nk" + # save file to the path + nuke.nodeCopy(path) + + self.log.info("Nodes exported...") + return path def generate_mov(self, farm=False): # ---------- start nodes creation @@ -1454,13 +1496,14 @@ class Exporter_review_mov(Exporter_review): self.previous_node = ipn self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) - # OCIODisplay node - dag_node = nuke.createNode("OCIODisplay") - # connect - dag_node.setInput(0, self.previous_node) - self._temp_nodes.append(dag_node) - self.previous_node = dag_node - self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) + if not self.viewer_lut_raw: + # OCIODisplay node + dag_node = nuke.createNode("OCIODisplay") + # connect + dag_node.setInput(0, self.previous_node) + self._temp_nodes.append(dag_node) + self.previous_node = dag_node + self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) # Write node write_node = nuke.createNode("Write") @@ -1476,28 +1519,26 @@ class Exporter_review_mov(Exporter_review): # ---------- end nodes creation - if not farm: - self.log.info("Rendering... ") - # Render Write node - nuke.execute( - write_node.name(), - int(self.first_frame), - int(self.last_frame)) - - self.log.info("Rendered...") - - # ---------- generate representation data - self.get_representation_data( - tags=["review", "delete"], - range=True - ) + # ---------- render or save to nk + if farm: + path_nk = self.save_file() + self.data.update({ + "bakeScriptPath": path_nk, + "bakeWriteNodeName": write_node.name(), + "bakeRenderPath": self.path + }) + else: + self.render(write_node.name()) + # ---------- generate representation data + self.get_representation_data( + tags=["review", "delete"], + range=True + ) self.log.debug("Representation... `{}`".format(self.data)) - ---------- Clean up - for node in self._temp_nodes: - nuke.delete(node) - self.log.info("Deleted nodes...") + #---------- Clean up + self.clean_nodes() return self.data diff --git a/pype/plugins/nuke/publish/extract_review_data_mov.py b/pype/plugins/nuke/publish/extract_review_data_mov.py index 585bd3f108..2208f8fa31 100644 --- a/pype/plugins/nuke/publish/extract_review_data_mov.py +++ b/pype/plugins/nuke/publish/extract_review_data_mov.py @@ -1,5 +1,4 @@ import os -import nuke import pyblish.api from avalon.nuke import lib as anlib from pype.nuke import lib as pnlib From 235079038965f1f3e038b60487e07447ed0bf039 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 9 Jan 2020 12:02:04 +0100 Subject: [PATCH 083/393] remove obsolete logge --- pype/plugins/nuke/create/create_read.py | 3 --- pype/plugins/nuke/create/create_write.py | 4 ---- 2 files changed, 7 deletions(-) diff --git a/pype/plugins/nuke/create/create_read.py b/pype/plugins/nuke/create/create_read.py index 87bb45a6ad..1aa7e68746 100644 --- a/pype/plugins/nuke/create/create_read.py +++ b/pype/plugins/nuke/create/create_read.py @@ -6,9 +6,6 @@ from pype import api as pype import nuke -log = pype.Logger().get_logger(__name__, "nuke") - - class CrateRead(avalon.nuke.Creator): # change this to template preset name = "ReadCopy" diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index 042826d4d9..f522c50511 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -7,10 +7,6 @@ from pypeapp import config import nuke - -log = pype.Logger().get_logger(__name__, "nuke") - - class CreateWriteRender(plugin.PypeCreator): # change this to template preset name = "WriteRender" From 3a4a6782abdf74e9278c029c0291abd889b1aa74 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 9 Jan 2020 15:07:44 +0100 Subject: [PATCH 084/393] pep8 class names --- pype/nuke/lib.py | 10 +++++----- pype/plugins/nuke/publish/extract_review_data_lut.py | 2 +- pype/plugins/nuke/publish/extract_review_data_mov.py | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 9ded8b75d0..4faea1da36 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1199,7 +1199,7 @@ class BuildWorkfile(WorkfileSettings): self.ypos -= (self.ypos_size * multiply) + self.ypos_gap -class Exporter_review: +class ExporterReview: """ Base class object for generating review data from Nuke @@ -1304,7 +1304,7 @@ class Exporter_review: self.log.info("Deleted nodes...") -class Exporter_review_lut(Exporter_review): +class ExporterReviewLut(ExporterReview): """ Generator object for review lut from Nuke @@ -1323,7 +1323,7 @@ class Exporter_review_lut(Exporter_review): lut_size=None, lut_style=None): # initialize parent class - Exporter_review.__init__(self, klass, instance) + ExporterReview.__init__(self, klass, instance) # deal with now lut defined in viewer lut if hasattr(klass, "viewer_lut_raw"): @@ -1407,7 +1407,7 @@ class Exporter_review_lut(Exporter_review): return self.data -class Exporter_review_mov(Exporter_review): +class ExporterReviewMov(ExporterReview): """ Metaclass for generating review mov files @@ -1423,7 +1423,7 @@ class Exporter_review_mov(Exporter_review): ext=None, ): # initialize parent class - Exporter_review.__init__(self, klass, instance) + ExporterReview.__init__(self, klass, instance) # passing presets for nodes to self if hasattr(klass, "nodes"): diff --git a/pype/plugins/nuke/publish/extract_review_data_lut.py b/pype/plugins/nuke/publish/extract_review_data_lut.py index f5fc3e59db..4373309363 100644 --- a/pype/plugins/nuke/publish/extract_review_data_lut.py +++ b/pype/plugins/nuke/publish/extract_review_data_lut.py @@ -39,7 +39,7 @@ class ExtractReviewDataLut(pype.api.Extractor): # generate data with anlib.maintained_selection(): - exporter = pnlib.Exporter_review_lut( + exporter = pnlib.ExporterReviewLut( self, instance ) data = exporter.generate_lut() diff --git a/pype/plugins/nuke/publish/extract_review_data_mov.py b/pype/plugins/nuke/publish/extract_review_data_mov.py index 2208f8fa31..333774bcd7 100644 --- a/pype/plugins/nuke/publish/extract_review_data_mov.py +++ b/pype/plugins/nuke/publish/extract_review_data_mov.py @@ -39,7 +39,7 @@ class ExtractReviewDataMov(pype.api.Extractor): # generate data with anlib.maintained_selection(): - exporter = pnlib.Exporter_review_mov( + exporter = pnlib.ExporterReviewMov( self, instance) if "render.farm" in families: From 5ace134b646dfb3a756859984236807a9ddd47aa Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 9 Jan 2020 15:24:51 +0100 Subject: [PATCH 085/393] add pathlib path resolve --- pype/plugins/global/publish/integrate_new.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index faade613f2..9bfaf2e417 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -7,6 +7,7 @@ import errno import pyblish.api from avalon import api, io from avalon.vendor import filelink +from pathlib import Path # this is needed until speedcopy for linux is fixed if sys.platform == "win32": from speedcopy import copyfile @@ -468,8 +469,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): Returns: None """ - src = os.path.normpath(src) - dst = os.path.normpath(dst) + src = Path(src).resolve() + dst = Path(dst).resolve() self.log.debug("Copying file .. {} -> {}".format(src, dst)) dirname = os.path.dirname(dst) @@ -490,6 +491,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def hardlink_file(self, src, dst): dirname = os.path.dirname(dst) + src = Path(src).resolve() + dst = Path(dst).resolve() try: os.makedirs(dirname) except OSError as e: From b3321a92ee4c0b05df0bd3f08684fcd632696f80 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 9 Jan 2020 23:06:57 +0100 Subject: [PATCH 086/393] fix(global): pathlib changed to pathlib2 --- pype/plugins/global/publish/integrate_new.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 9bfaf2e417..c2812880c7 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -7,7 +7,7 @@ import errno import pyblish.api from avalon import api, io from avalon.vendor import filelink -from pathlib import Path +from pathlib2 import Path # this is needed until speedcopy for linux is fixed if sys.platform == "win32": from speedcopy import copyfile @@ -469,8 +469,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): Returns: None """ + src = Path(src).resolve() - dst = Path(dst).resolve() + drive, _path = os.path.splitdrive(dst) + unc = Path(drive).resolve() + dst = str(unc / _path) self.log.debug("Copying file .. {} -> {}".format(src, dst)) dirname = os.path.dirname(dst) From 218405841c3540a66c7ac84d02f9e371ff115de1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 10 Jan 2020 00:15:58 +0100 Subject: [PATCH 087/393] fix(global, nuke): updating deadline submission --- pype/nuke/lib.py | 2 +- .../global/publish/collect_filesequences.py | 10 +- .../global/publish/submit_publish_job.py | 2 - .../nuke/publish/extract_review_data_mov.py | 9 ++ .../nuke/publish/submit_nuke_deadline.py | 119 +++++++++++------- 5 files changed, 88 insertions(+), 54 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 4faea1da36..5058a19472 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1537,7 +1537,7 @@ class ExporterReviewMov(ExporterReview): self.log.debug("Representation... `{}`".format(self.data)) - #---------- Clean up + # ---------- Clean up self.clean_nodes() return self.data diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 1214657856..43df6dfb8b 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -146,8 +146,6 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): os.environ.update(session) instance = metadata.get("instance") if instance: - # here is the place to add ability for nuke noninteractive - # ______________________________________ instance_family = instance.get("family") pixel_aspect = instance.get("pixelAspect", 1) resolution_width = instance.get("resolutionWidth", 1920) @@ -221,7 +219,9 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "frameEnd": data.get("frameEnd"), "fps": fps, "source": data.get("source", ""), - "pixelAspect": pixel_aspect + "pixelAspect": pixel_aspect, + "resolutionWidth": resolution_width, + "resolutionHeight": resolution_height }) if "representations" not in instance.data: @@ -271,6 +271,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "fps": fps, "source": data.get("source", ""), "pixelAspect": pixel_aspect, + "resolutionWidth": resolution_width, + "resolutionHeight": resolution_height } ) @@ -348,6 +350,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "fps": fps, "source": data.get("source", ""), "pixelAspect": pixel_aspect, + "resolutionWidth": resolution_width, + "resolutionHeight": resolution_height } ) if lut_path: diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 12737880d0..03bd25fb49 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -160,7 +160,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): """ data = instance.data.copy() subset = data["subset"] - state = data.get("publishJobState", "Suspended") job_name = "{batch} - {subset} [publish image sequence]".format( batch=job["Props"]["Name"], subset=subset @@ -186,7 +185,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "JobDependency0": job["_id"], "UserName": job["Props"]["User"], "Comment": instance.context.data.get("comment", ""), - "InitialStatus": state, "Priority": job["Props"]["Pri"] }, "PluginInfo": { diff --git a/pype/plugins/nuke/publish/extract_review_data_mov.py b/pype/plugins/nuke/publish/extract_review_data_mov.py index 333774bcd7..34bb28e9b8 100644 --- a/pype/plugins/nuke/publish/extract_review_data_mov.py +++ b/pype/plugins/nuke/publish/extract_review_data_mov.py @@ -46,6 +46,15 @@ class ExtractReviewDataMov(pype.api.Extractor): instance.data["families"].remove("review") instance.data["families"].remove("ftrack") data = exporter.generate_mov(farm=True) + + self.log.debug( + "_ data: {}".format(data)) + + instance.data.update({ + "bakeRenderPath": data.get("bakeRenderPath"), + "bakeScriptPath": data.get("bakeScriptPath"), + "bakeWriteNodeName": data.get("bakeWriteNodeName") + }) else: data = exporter.generate_mov() diff --git a/pype/plugins/nuke/publish/submit_nuke_deadline.py b/pype/plugins/nuke/publish/submit_nuke_deadline.py index d9207d2bfc..82cce892e3 100644 --- a/pype/plugins/nuke/publish/submit_nuke_deadline.py +++ b/pype/plugins/nuke/publish/submit_nuke_deadline.py @@ -1,7 +1,7 @@ import os import json import getpass - + from avalon import api from avalon.vendor import requests import re @@ -26,31 +26,61 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): def process(self, instance): node = instance[0] - # for x in instance: - # if x.Class() == "Write": - # node = x - # - # if node is None: - # return + context = instance.context DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL", "http://localhost:8082") assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL" - context = instance.context + self.deadline_url = "{}/api/jobs".format(DEADLINE_REST_URL) + self._comment = context.data.get("comment", "") + self._ver = re.search(r"\d+\.\d+", context.data.get("hostVersion")) + self._deadline_user = context.data.get( + "deadlineUser", getpass.getuser()) + self._frame_start = int(instance.data["frameStart"]) + self._frame_end = int(instance.data["frameEnd"]) # get output path render_path = instance.data['path'] - render_dir = os.path.normpath(os.path.dirname(render_path)) - script_path = context.data["currentFile"] - script_name = os.path.basename(script_path) - comment = context.data.get("comment", "") + response = self.payload_submit(instance, + script_path, + render_path, + node.name() + ) + # Store output dir for unified publisher (filesequence) + instance.data["deadlineSubmissionJob"] = response.json() + instance.data["publishJobState"] = "Active" - deadline_user = context.data.get("deadlineUser", getpass.getuser()) + if instance.data.get("bakeScriptPath"): + render_path = instance.data.get("bakeRenderPath") + script_path = instance.data.get("bakeScriptPath") + exe_node_name = instance.data.get("bakeWriteNodeName") + + resp = self.payload_submit(instance, + script_path, + render_path, + exe_node_name, + response.json() + ) + # Store output dir for unified publisher (filesequence) + instance.data["deadlineSubmissionJob"] = resp.json() + instance.data["publishJobState"] = "Suspended" + + def payload_submit(self, + instance, + script_path, + render_path, + exe_node_name, + responce_data=None + ): + render_dir = os.path.normpath(os.path.dirname(render_path)) + script_name = os.path.basename(script_path) jobname = "%s - %s" % (script_name, instance.name) - ver = re.search(r"\d+\.\d+", context.data.get("hostVersion")) + + if not responce_data: + responce_data = {} try: # Ensure render folder exists @@ -58,10 +88,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): except OSError: pass - # Documentation for keys available at: - # https://docs.thinkboxsoftware.com - # /products/deadline/8.0/1_User%20Manual/manual - # /manual-submission.html#job-info-file-options payload = { "JobInfo": { # Top-level group name @@ -71,21 +97,20 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): "Name": jobname, # Arbitrary username, for visualisation in Monitor - "UserName": deadline_user, + "UserName": self._deadline_user, + + "Priority": instance.data["deadlinePriority"], + + "Pool": "2d", + "SecondaryPool": "2d", "Plugin": "Nuke", "Frames": "{start}-{end}".format( - start=int(instance.data["frameStart"]), - end=int(instance.data["frameEnd"]) + start=self._frame_start, + end=self._frame_end ), - "ChunkSize": instance.data["deadlineChunkSize"], - "Priority": instance.data["deadlinePriority"], + "Comment": self._comment, - "Comment": comment, - - # Optional, enable double-click to preview rendered - # frames from Deadline Monitor - # "OutputFilename0": output_filename_0.replace("\\", "/"), }, "PluginInfo": { # Input @@ -96,27 +121,29 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): # "OutputFilePrefix": render_variables["filename_prefix"], # Mandatory for Deadline - "Version": ver.group(), + "Version": self._ver.group(), # Resolve relative references "ProjectPath": script_path, "AWSAssetFile0": render_path, # Only the specific write node is rendered. - "WriteNode": node.name() + "WriteNode": exe_node_name }, # Mandatory for Deadline, may be empty "AuxFiles": [] } + if responce_data.get("_id"): + payload["JobInfo"].update({ + "JobType": "Normal", + "BatchName": responce_data["Props"]["Batch"], + "JobDependency0": responce_data["_id"], + "ChunkSize": 99999999 + }) + # Include critical environment variables with submission keys = [ - # This will trigger `userSetup.py` on the slave - # such that proper initialisation happens the same - # way as it does on a local machine. - # TODO(marcus): This won't work if the slaves don't - # have accesss to these paths, such as if slaves are - # running Linux and the submitter is on Windows. "PYTHONPATH", "PATH", "AVALON_SCHEMA", @@ -162,11 +189,12 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): if key == "PYTHONPATH": clean_path = clean_path.replace('python2', 'python3') + clean_path = clean_path.replace( - os.path.normpath( - environment['PYPE_STUDIO_CORE_MOUNT']), # noqa - os.path.normpath( - environment['PYPE_STUDIO_CORE_PATH'])) # noqa + os.path.normpath( + environment['PYPE_STUDIO_CORE_MOUNT']), # noqa + os.path.normpath( + environment['PYPE_STUDIO_CORE_PATH'])) # noqa clean_environment[key] = clean_path environment = clean_environment @@ -181,20 +209,15 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): plugin = payload["JobInfo"]["Plugin"] self.log.info("using render plugin : {}".format(plugin)) - self.preflight_check(instance) - self.log.info("Submitting..") self.log.info(json.dumps(payload, indent=4, sort_keys=True)) - # E.g. http://192.168.0.1:8082/api/jobs - url = "{}/api/jobs".format(DEADLINE_REST_URL) - response = requests.post(url, json=payload) + response = requests.post(self.deadline_url, json=payload) + if not response.ok: raise Exception(response.text) - # Store output dir for unified publisher (filesequence) - instance.data["deadlineSubmissionJob"] = response.json() - instance.data["publishJobState"] = "Active" + return response def preflight_check(self, instance): """Ensure the startFrame, endFrame and byFrameStep are integers""" From ce64e6fa0706f5db01ce147f510b34074d6936fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 10 Jan 2020 00:30:20 +0000 Subject: [PATCH 088/393] fixing environment filtering --- pype/lib.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index 8772608b38..b19491adeb 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -18,13 +18,16 @@ def _subprocess(*args, **kwargs): """Convenience method for getting output errors for subprocess.""" # make sure environment contains only strings - filtered_env = {k: str(v) for k, v in os.environ.items()} + if not kwargs.get("env"): + filtered_env = {k: str(v) for k, v in os.environ.items()} + else: + filtered_env = {k: str(v) for k, v in kwargs.get("env").items()} # set overrides kwargs['stdout'] = kwargs.get('stdout', subprocess.PIPE) kwargs['stderr'] = kwargs.get('stderr', subprocess.STDOUT) kwargs['stdin'] = kwargs.get('stdin', subprocess.PIPE) - kwargs['env'] = kwargs.get('env',filtered_env) + kwargs['env'] = filtered_env proc = subprocess.Popen(*args, **kwargs) From 9bc2f557a39efb7aa1ebefbdb7025ff87b8c7515 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 10 Jan 2020 11:40:29 +0100 Subject: [PATCH 089/393] added new entityType `appointment` to ignored entity types --- pype/ftrack/events/event_sync_to_avalon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 91355c6068..8d75d932f8 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -28,7 +28,7 @@ class SyncToAvalonEvent(BaseEvent): ignore_entTypes = [ "socialfeed", "socialnotification", "note", "assetversion", "job", "user", "reviewsessionobject", "timer", - "timelog", "auth_userrole" + "timelog", "auth_userrole", "appointment" ] ignore_ent_types = ["Milestone"] ignore_keys = ["statusid"] From 4bb66af2016951942f4cdc2c0ecd004c82681df2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 10 Jan 2020 11:40:53 +0100 Subject: [PATCH 090/393] added debug with project name to sync to avalon action --- pype/ftrack/lib/avalon_sync.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pype/ftrack/lib/avalon_sync.py b/pype/ftrack/lib/avalon_sync.py index 5839d36e64..8cebd12a59 100644 --- a/pype/ftrack/lib/avalon_sync.py +++ b/pype/ftrack/lib/avalon_sync.py @@ -314,6 +314,9 @@ class SyncEntitiesFactory: self.log.warning(msg) return {"success": False, "message": msg} + self.log.debug(( + "*** Synchronization initialization started <{}>." + ).format(project_full_name)) # Check if `avalon_mongo_id` custom attribute exist or is accessible if CustAttrIdKey not in ft_project["custom_attributes"]: items = [] From 77d71d4bf356f40ce2a06cf27899529e8df2613c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 10 Jan 2020 11:43:07 +0100 Subject: [PATCH 091/393] it is tried to set intent value on ftrack entity and do not crash pyblish in integrate_ftrack_api --- .../plugins/ftrack/publish/integrate_ftrack_api.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_api.py b/pype/plugins/ftrack/publish/integrate_ftrack_api.py index 337562c1f5..c51685f84d 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_api.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_api.py @@ -188,14 +188,18 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): # Adding Custom Attributes for attr, val in assetversion_cust_attrs.items(): if attr in assetversion_entity["custom_attributes"]: - assetversion_entity["custom_attributes"][attr] = val - continue + try: + assetversion_entity["custom_attributes"][attr] = val + session.commit() + continue + except Exception: + session.rollback() self.log.warning(( "Custom Attrubute \"{0}\"" - " is not available for AssetVersion." - " Can't set it's value to: \"{1}\"" - ).format(attr, str(val))) + " is not available for AssetVersion <{1}>." + " Can't set it's value to: \"{2}\"" + ).format(attr, assetversion_entity["id"], str(val))) # Have to commit the version and asset, because location can't # determine the final location without. From d4bf25f01a823b042777730d6e09333223841656 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 10 Jan 2020 13:07:11 +0000 Subject: [PATCH 092/393] resolving `${TOKEN}` variables in PATH to env variables --- pype/plugins/maya/publish/collect_yeti_rig.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/pype/plugins/maya/publish/collect_yeti_rig.py b/pype/plugins/maya/publish/collect_yeti_rig.py index 7ab5649c0b..3b05e19fdb 100644 --- a/pype/plugins/maya/publish/collect_yeti_rig.py +++ b/pype/plugins/maya/publish/collect_yeti_rig.py @@ -140,9 +140,21 @@ class CollectYetiRig(pyblish.api.InstancePlugin): "atttribute'" % node) # Collect all texture files + # find all ${TOKEN} tokens and replace them with $TOKEN env. variable + env_re = re.compile(r"\$\{(\w+)\}") for texture in texture_filenames: files = [] + + matches = re.finditer(env_re, texture) + for m in matches: + try: + texture = texture.replace(m.group(), os.environ[m.group(1)]) + except KeyError: + msg = "Cannot find requested {} in environment".format(1) + self.log.error(msg) + raise RuntimeError(msg) + if os.path.isabs(texture): self.log.debug("Texture is absolute path, ignoring " "image search paths for: %s" % texture) From 9a7f36023b5f9f9d9a29ff4ae9a6c88c7a01069b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 10 Jan 2020 13:16:41 +0000 Subject: [PATCH 093/393] fixed error message --- pype/plugins/maya/publish/collect_yeti_rig.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/plugins/maya/publish/collect_yeti_rig.py b/pype/plugins/maya/publish/collect_yeti_rig.py index 3b05e19fdb..831bc5e0ca 100644 --- a/pype/plugins/maya/publish/collect_yeti_rig.py +++ b/pype/plugins/maya/publish/collect_yeti_rig.py @@ -151,7 +151,8 @@ class CollectYetiRig(pyblish.api.InstancePlugin): try: texture = texture.replace(m.group(), os.environ[m.group(1)]) except KeyError: - msg = "Cannot find requested {} in environment".format(1) + msg = "Cannot find requested {} in environment".format( + m.group(1)) self.log.error(msg) raise RuntimeError(msg) From acdc0fed0cbb1463c6f0b354c92d293f9cc1f13f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 10 Jan 2020 13:23:07 +0000 Subject: [PATCH 094/393] refactored to class method --- pype/plugins/maya/publish/collect_yeti_rig.py | 33 ++++++++++++------- 1 file changed, 21 insertions(+), 12 deletions(-) diff --git a/pype/plugins/maya/publish/collect_yeti_rig.py b/pype/plugins/maya/publish/collect_yeti_rig.py index 831bc5e0ca..39426ea623 100644 --- a/pype/plugins/maya/publish/collect_yeti_rig.py +++ b/pype/plugins/maya/publish/collect_yeti_rig.py @@ -119,6 +119,8 @@ class CollectYetiRig(pyblish.api.InstancePlugin): texture_filenames = [] if image_search_paths: + # find all ${TOKEN} tokens and replace them with $TOKEN env. variable + image_search_paths = self._replace_tokens(image_search_paths) # TODO: Somehow this uses OS environment path separator, `:` vs `;` # Later on check whether this is pipeline OS cross-compatible. image_search_paths = [p for p in @@ -141,21 +143,11 @@ class CollectYetiRig(pyblish.api.InstancePlugin): # Collect all texture files # find all ${TOKEN} tokens and replace them with $TOKEN env. variable - env_re = re.compile(r"\$\{(\w+)\}") + texture_filenames = self._replace_tokens(texture_filenames) for texture in texture_filenames: files = [] - - matches = re.finditer(env_re, texture) - for m in matches: - try: - texture = texture.replace(m.group(), os.environ[m.group(1)]) - except KeyError: - msg = "Cannot find requested {} in environment".format( - m.group(1)) - self.log.error(msg) - raise RuntimeError(msg) - + if os.path.isabs(texture): self.log.debug("Texture is absolute path, ignoring " "image search paths for: %s" % texture) @@ -296,3 +288,20 @@ class CollectYetiRig(pyblish.api.InstancePlugin): collection, remainder = clique.assemble(files, patterns=pattern) return collection + + def _replace_tokens(self, strings): + env_re = re.compile(r"\$\{(\w+)\}") + + replaced = [] + for s in strings: + matches = re.finditer(env_re, s) + for m in matches: + try: + s = s.replace(m.group(), os.environ[m.group(1)]) + except KeyError: + msg = "Cannot find requested {} in environment".format( + m.group(1)) + self.log.error(msg) + raise RuntimeError(msg) + replaced.append(s) + return replaced From d20853f617303acb6134f4863899a0246e43412b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 10 Jan 2020 14:35:19 +0100 Subject: [PATCH 095/393] feat(global): finalizing the nuke to deadline feature --- pype/nuke/lib.py | 2 - .../global/publish/collect_filesequences.py | 47 ++++++++++++------- 2 files changed, 31 insertions(+), 18 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 5058a19472..b523613afb 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1508,7 +1508,6 @@ class ExporterReviewMov(ExporterReview): # Write node write_node = nuke.createNode("Write") self.log.debug("Path: {}".format(self.path)) - self.instance.data["baked_colorspace_movie"] = self.path write_node["file"].setValue(self.path) write_node["file_type"].setValue(self.ext) write_node["raw"].setValue(1) @@ -1516,7 +1515,6 @@ class ExporterReviewMov(ExporterReview): write_node.setInput(0, self.previous_node) self._temp_nodes.append(write_node) self.log.debug("Write... `{}`".format(self._temp_nodes)) - # ---------- end nodes creation # ---------- render or save to nk diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 43df6dfb8b..13a593dbac 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -97,7 +97,10 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): def process(self, context): pixel_aspect = 1 + resolution_width = 1920 + resolution_height = 1080 lut_path = None + subset = None if os.environ.get("PYPE_PUBLISH_PATHS"): paths = os.environ["PYPE_PUBLISH_PATHS"].split(os.pathsep) self.log.info("Collecting paths: {}".format(paths)) @@ -151,6 +154,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): resolution_width = instance.get("resolutionWidth", 1920) resolution_height = instance.get("resolutionHeight", 1080) lut_path = instance.get("lutPath", None) + baked_mov_path = instance.get("bakeRenderPath") + subset = instance.get("subset") else: # Search in directory @@ -158,7 +163,11 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): root = path self.log.info("Collecting: {}".format(root)) + regex = data.get("regex") + if baked_mov_path: + regex = "^{}.*$".format(subset) + if regex: self.log.info("Using regex: {}".format(regex)) @@ -171,16 +180,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): ) self.log.info("Found collections: {}".format(collections)) - - """ - if data.get("subset"): - # If subset is provided for this json then it must be a single - # collection. - if len(collections) > 1: - self.log.error("Forced subset can only work with a single " - "found sequence") - raise RuntimeError("Invalid sequence") - """ + self.log.info("Found remainder: {}".format(remainder)) fps = data.get("fps", 25) @@ -246,23 +246,31 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): instance.data["representations"].append( representation) - elif data.get("subset"): + elif subset: # if we have subset - add all collections and known # reminder as representations + # take out review family if mov path + # this will make imagesequence none review + if baked_mov_path: + self.log.info( + "Baked mov is available {}".format( + baked_mov_path)) + families.append("review") + self.log.info( "Adding representations to subset {}".format( - data.get("subset"))) + subset)) - instance = context.create_instance(data.get("subset")) + instance = context.create_instance(subset) data = copy.deepcopy(data) instance.data.update( { - "name": data.get("subset"), + "name": subset, "family": families[0], "families": list(families), - "subset": data.get("subset"), + "subset": subset, "asset": data.get( "asset", api.Session["AVALON_ASSET"]), "stagingDir": root, @@ -291,11 +299,18 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "stagingDir": root, "anatomy_template": "render", "fps": fps, - "tags": ["review"], + "tags": ["review"] if not baked_mov_path else [], } instance.data["representations"].append( representation) + # filter out only relevant mov in case baked available + self.log.debug("__ remainder {}".format(remainder)) + if baked_mov_path: + remainder = [r for r in remainder + if r in baked_mov_path] + self.log.debug("__ remainder {}".format(remainder)) + # process reminders for rem in remainder: # add only known types to representation From efd71c7ef72090f6f989d9b237dce53333c8f6ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 10 Jan 2020 13:53:49 +0000 Subject: [PATCH 096/393] changed place where tokens are replaced for `image_search_path` --- pype/plugins/maya/publish/collect_yeti_rig.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pype/plugins/maya/publish/collect_yeti_rig.py b/pype/plugins/maya/publish/collect_yeti_rig.py index 39426ea623..c743b2c00b 100644 --- a/pype/plugins/maya/publish/collect_yeti_rig.py +++ b/pype/plugins/maya/publish/collect_yeti_rig.py @@ -119,13 +119,15 @@ class CollectYetiRig(pyblish.api.InstancePlugin): texture_filenames = [] if image_search_paths: - # find all ${TOKEN} tokens and replace them with $TOKEN env. variable - image_search_paths = self._replace_tokens(image_search_paths) + # TODO: Somehow this uses OS environment path separator, `:` vs `;` # Later on check whether this is pipeline OS cross-compatible. image_search_paths = [p for p in image_search_paths.split(os.path.pathsep) if p] + # find all ${TOKEN} tokens and replace them with $TOKEN env. variable + image_search_paths = self._replace_tokens(image_search_paths) + # List all related textures texture_filenames = cmds.pgYetiCommand(node, listTextures=True) self.log.info("Found %i texture(s)" % len(texture_filenames)) @@ -147,7 +149,6 @@ class CollectYetiRig(pyblish.api.InstancePlugin): for texture in texture_filenames: files = [] - if os.path.isabs(texture): self.log.debug("Texture is absolute path, ignoring " "image search paths for: %s" % texture) From 59305a12106aa81ffc19e5b92a2b3eb8aafec2c5 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 10 Jan 2020 16:48:23 +0100 Subject: [PATCH 097/393] make sure template keys exist only when needed --- pype/plugins/global/publish/integrate_new.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index ee18347703..01dc58dc1f 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -267,10 +267,19 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "family": instance.data['family'], "subset": subset["name"], "version": int(version["name"]), - "hierarchy": hierarchy, - "resolution_width": repre.get("resolutionWidth", ""), - "resolution_height": repre.get("resolutionHeight", ""), - "fps": str(instance.data.get("fps", ""))} + "hierarchy": hierarchy} + + resolution_width = repre.get("resolutionWidth") + resolution_height = repre.get("resolutionHeight") + fps = instance.data.get("fps") + + + if resolution_width: + template_data["resolution_width"] = resolution_width + if resolution_width: + template_data["resolution_height"] = resolution_height + if resolution_width: + template_data["fps"] = fps files = repre['files'] if repre.get('stagingDir'): From 2ae8dc7582b14fc243a53ed1d76679d179fd2ac7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 10 Jan 2020 17:47:23 +0100 Subject: [PATCH 098/393] fix(global): unc pathlib2 was not used correctly --- pype/plugins/global/publish/integrate_new.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index c2812880c7..36e27cdb3a 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -474,6 +474,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): drive, _path = os.path.splitdrive(dst) unc = Path(drive).resolve() dst = str(unc / _path) + src = str(src) self.log.debug("Copying file .. {} -> {}".format(src, dst)) dirname = os.path.dirname(dst) @@ -494,8 +495,13 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def hardlink_file(self, src, dst): dirname = os.path.dirname(dst) + src = Path(src).resolve() - dst = Path(dst).resolve() + drive, _path = os.path.splitdrive(dst) + unc = Path(drive).resolve() + dst = str(unc / _path) + src = str(src) + try: os.makedirs(dirname) except OSError as e: From 791bb63f97f9a74c7520ff19ea2a4e8fcd9283d2 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 10 Jan 2020 18:11:33 +0100 Subject: [PATCH 099/393] collect templates fps fix --- pype/plugins/global/publish/collect_templates.py | 16 ++++++++++++---- pype/plugins/global/publish/integrate_new.py | 1 - 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index d57d416dea..e27af82595 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -76,10 +76,18 @@ class CollectTemplates(pyblish.api.InstancePlugin): "subset": subset_name, "version": version_number, "hierarchy": hierarchy.replace("\\", "/"), - "representation": "TEMP", - "resolution_width": instance.data.get("resolutionWidth", ""), - "resolution_height": instance.data.get("resolutionHeight", ""), - "fps": str(instance.data.get("fps", ""))}} + "representation": "TEMP")} + + resolution_width = instance.data.get("resolutionWidth") + resolution_height = instance.data.get("resolutionHeight") + fps = instance.data.get("fps") + + if resolution_width: + template_data["resolution_width"] = resolution_width + if resolution_width: + template_data["resolution_height"] = resolution_height + if resolution_width: + template_data["fps"] = fps instance.data["template"] = template instance.data["assumedTemplateData"] = template_data diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 01dc58dc1f..8efec94013 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -273,7 +273,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): resolution_height = repre.get("resolutionHeight") fps = instance.data.get("fps") - if resolution_width: template_data["resolution_width"] = resolution_width if resolution_width: From 271a935ee754672d1b34592e86db7ca3b0f24360 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 11 Jan 2020 14:11:04 +0100 Subject: [PATCH 100/393] fixes to getting the path --- pype/ftrack/actions/action_delivery.py | 58 ++++++++++++++++---------- 1 file changed, 37 insertions(+), 21 deletions(-) diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index 9edb7a5964..afd20d12d1 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -231,14 +231,16 @@ class Delivery(BaseAction): "message": "Not selected components to deliver." } - location_path = os.path.normpath(location_path.strip()) - if location_path and not os.path.exists(location_path): - return { - "success": False, - "message": ( - "Entered location path does not exists. \"{}\"" - ).format(location_path) - } + location_path = location_path.strip() + if location_path: + location_path = os.path.normpath(location_path) + if not os.path.exists(location_path): + return { + "success": False, + "message": ( + "Entered location path does not exists. \"{}\"" + ).format(location_path) + } self.db_con.install() self.db_con.Session["AVALON_PROJECT"] = project_name @@ -299,14 +301,16 @@ class Delivery(BaseAction): repre = repres_by_name.get(comp_name) repres_to_deliver.append(repre) + if not location_path: + location_path = os.environ.get("AVALON_PROJECTS") or "" + + print(location_path) + anatomy = Anatomy(project_name) for repre in repres_to_deliver: # Get destination repre path anatomy_data = copy.deepcopy(repre["context"]) - if location_path: - anatomy_data["root"] = location_path - else: - anatomy_data["root"] = os.environ.get("AVALON_PROJECTS") or "" + anatomy_data["root"] = location_path anatomy_filled = anatomy.format(anatomy_data) test_path = ( @@ -353,11 +357,15 @@ class Delivery(BaseAction): continue # Get source repre path + frame = repre['context'].get('frame') + + if frame: + repre["context"]["frame"] = len(str(frame)) * "#" + repre_path = self.path_from_represenation(repre) # TODO add backup solution where root of path from component # is repalced with AVALON_PROJECTS root - - if repre_path and os.path.exists(repre_path): + if not frame: self.process_single_file( repre_path, anatomy, anatomy_name, anatomy_data ) @@ -385,7 +393,7 @@ class Delivery(BaseAction): def process_sequence( self, repre_path, anatomy, anatomy_name, anatomy_data ): - dir_path, file_name = os.path.split(repre_path) + dir_path, file_name = os.path.split(str(repre_path)) base_name, ext = os.path.splitext(file_name) file_name_items = None @@ -421,12 +429,15 @@ class Delivery(BaseAction): self.log.warning("{} <{}>".format(msg, repre_path)) return - anatomy_data["frame"] = "<>" + frame_indicator = "@####@" + + anatomy_data["frame"] = frame_indicator anatomy_filled = anatomy.format(anatomy_data) delivery_path = anatomy_filled["delivery"][anatomy_name] + print(delivery_path) delivery_folder = os.path.dirname(delivery_path) - dst_head, dst_tail = delivery_path.split("<>") + dst_head, dst_tail = delivery_path.split(frame_indicator) dst_padding = src_collection.padding dst_collection = clique.Collection( head=dst_head, @@ -469,10 +480,11 @@ class Delivery(BaseAction): # Template references unavailable data return None - if os.path.exists(path): - return os.path.normpath(path) + return os.path.normpath(path) def copy_file(self, src_path, dst_path): + if os.path.exists(dst_path): + return try: filelink.create( src_path, @@ -496,11 +508,15 @@ class Delivery(BaseAction): "type": "label", "value": "# {}".format(msg) }) - if isinstance(_items, str): + if not isinstance(_items, (list, tuple)): _items = [_items] + __items = [] + for item in _items: + __items.append(str(item)) + items.append({ "type": "label", - "value": '

{}

'.format("
".join(_items)) + "value": '

{}

'.format("
".join(__items)) }) if not items: From cc4857a5d87a39430b3d0b72fb72e7a824621a41 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 11 Jan 2020 14:56:48 +0100 Subject: [PATCH 101/393] hotfix/pathlib in integration --- pype/plugins/global/publish/integrate_new.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index c2812880c7..6e7a8d13a9 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -470,7 +470,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): None """ - src = Path(src).resolve() + src = str(Path(src).resolve()) drive, _path = os.path.splitdrive(dst) unc = Path(drive).resolve() dst = str(unc / _path) @@ -495,7 +495,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def hardlink_file(self, src, dst): dirname = os.path.dirname(dst) src = Path(src).resolve() - dst = Path(dst).resolve() + drive, _path = os.path.splitdrive(dst) + unc = Path(drive).resolve() + dst = str(unc / _path) try: os.makedirs(dirname) except OSError as e: From 035cba879d569e2115513071e66790b8819fc8a0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sun, 12 Jan 2020 14:41:16 +0100 Subject: [PATCH 102/393] fix(global): integrate new error with pathlib2 --- pype/plugins/global/publish/integrate_new.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index c2812880c7..d982cefd8a 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -474,6 +474,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): drive, _path = os.path.splitdrive(dst) unc = Path(drive).resolve() dst = str(unc / _path) + src = str(src) self.log.debug("Copying file .. {} -> {}".format(src, dst)) dirname = os.path.dirname(dst) @@ -494,8 +495,13 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def hardlink_file(self, src, dst): dirname = os.path.dirname(dst) + src = Path(src).resolve() - dst = Path(dst).resolve() + drive, _path = os.path.splitdrive(dst) + unc = Path(drive).resolve() + dst = str(unc / _path) + src = str(src) + try: os.makedirs(dirname) except OSError as e: From 518d4b2cfe0dd6d946b0d4b13fca3cf26628eccb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sun, 12 Jan 2020 14:41:40 +0100 Subject: [PATCH 103/393] feat(nuke): adding slate node integration --- .../nuke/publish/collect_slate_node.py | 39 +++++++++++++++++++ pype/plugins/nuke/publish/collect_writes.py | 14 +++++++ .../nuke/publish/extract_render_local.py | 9 +++++ 3 files changed, 62 insertions(+) create mode 100644 pype/plugins/nuke/publish/collect_slate_node.py diff --git a/pype/plugins/nuke/publish/collect_slate_node.py b/pype/plugins/nuke/publish/collect_slate_node.py new file mode 100644 index 0000000000..5253f29889 --- /dev/null +++ b/pype/plugins/nuke/publish/collect_slate_node.py @@ -0,0 +1,39 @@ +import pyblish.api +import nuke + +class CollectSlate(pyblish.api.InstancePlugin): + """Check if SLATE node is in scene and connected to rendering tree""" + + order = pyblish.api.CollectorOrder + 0.09 + label = "Collect Slate Node" + hosts = ["nuke"] + families = ["write"] + + def process(self, instance): + node = instance[0] + + slate = next((n for n in nuke.allNodes() + if "slate" in n.name().lower() + if not n["disable"].getValue()), + None) + + if slate: + # check if slate node is connected to write node tree + slate_check = 0 + slate_node = None + while slate_check == 0: + try: + node = node.dependencies()[0] + if slate.name() in node.name(): + slate_node = node + slate_check = 1 + except IndexError: + break + + if slate_node: + instance.data["slateNodeName"] = slate_node.name() + instance.data["families"].append("slate") + self.log.info( + "Slate node is in node graph: `{}`".format(slate.name())) + self.log.debug( + "__ instance: `{}`".format(instance)) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index dd3049834d..67b6e2e189 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -25,6 +25,9 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): self.log.debug("checking instance: {}".format(instance)) + # check if slate node available + slate_node = instance.data.get("slateNodeName") + # Determine defined file type ext = node["file_type"].value() @@ -40,6 +43,10 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): first_frame = int(nuke.root()["first_frame"].getValue()) last_frame = int(nuke.root()["last_frame"].getValue()) + # remove one frame at beggining if slate + if slate_node: + first_frame -= 1 + if node["use_limit"].getValue(): handles = 0 first_frame = int(node["first"].getValue()) @@ -100,6 +107,13 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "subset": instance.data["subset"], "fps": instance.context.data["fps"] } + + # if slate node then remove one frame from version data + if slate_node: + version_data.update({ + "frameStart": (first_frame + 1) + handle_start, + }) + instance.data["family"] = "write" group_node = [x for x in instance if x.Class() == "Group"][0] deadlineChunkSize = 1 diff --git a/pype/plugins/nuke/publish/extract_render_local.py b/pype/plugins/nuke/publish/extract_render_local.py index 825db67e9d..19acc86273 100644 --- a/pype/plugins/nuke/publish/extract_render_local.py +++ b/pype/plugins/nuke/publish/extract_render_local.py @@ -27,6 +27,9 @@ class NukeRenderLocal(pype.api.Extractor): self.log.debug("instance collected: {}".format(instance.data)) + # check if slate node available + slate_node = instance.data.get("slateNodeName") + first_frame = instance.data.get("frameStart", None) last_frame = instance.data.get("frameEnd", None) node_subset_name = instance.data.get("name", None) @@ -80,5 +83,11 @@ class NukeRenderLocal(pype.api.Extractor): collection = collections[0] instance.data['collection'] = collection + if slate_node: + instance.data['frameStart'] = first_frame + 1 + self.log.info( + 'Removing slate frame: `{}`'.format( + instance.data['frameStart'])) + self.log.info('Finished render') return From 6580d1881ae86cfa4fd61ae2ebf81b873a48d885 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 13 Jan 2020 02:26:09 +0100 Subject: [PATCH 104/393] fix(global): dealing with extension properly --- pype/plugins/global/publish/extract_burnin.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 06a62dd98b..a87fd47ea1 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -63,7 +63,8 @@ class ExtractBurnin(pype.api.Extractor): filename = "{0}".format(repre["files"]) name = "_burnin" - movieFileBurnin = filename.replace(".mov", "") + name + ".mov" + ext = os.path.splitext(filename)[1] + movieFileBurnin = filename.replace(ext, "") + name + ext full_movie_path = os.path.join( os.path.normpath(stagingdir), repre["files"] From b714353983b48dc1807348dff4bf7f91bc55a589 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 13 Jan 2020 02:27:05 +0100 Subject: [PATCH 105/393] fix(global): cleaning code and adding repre to render template --- pype/plugins/global/publish/extract_review.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 4eb7fa16ed..5e30658f68 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -32,13 +32,13 @@ class ExtractReview(pyblish.api.InstancePlugin): inst_data = instance.data fps = inst_data.get("fps") start_frame = inst_data.get("frameStart") - resolution_width = instance.data.get("resolutionWidth", to_width) - resolution_height = instance.data.get("resolutionHeight", to_height) - pixel_aspect = instance.data.get("pixelAspect", 1) - self.log.debug("Families In: `{}`".format(instance.data["families"])) + resolution_width = inst_data.get("resolutionWidth", to_width) + resolution_height = inst_data.get("resolutionHeight", to_height) + pixel_aspect = inst_data.get("pixelAspect", 1) + self.log.debug("Families In: `{}`".format(inst_data["families"])) # get representation and loop them - representations = instance.data["representations"] + representations = inst_data["representations"] # filter out mov and img sequences representations_new = representations[:] @@ -224,7 +224,6 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("__ height_scale: `{}`".format(height_scale)) self.log.debug("__ height_half_pad: `{}`".format(height_half_pad)) - scaling_arg = "scale={0}x{1}:flags=lanczos,pad={2}:{3}:{4}:{5}:black,setsar=1".format( width_scale, height_scale, to_width, to_height, width_half_pad, height_half_pad ) @@ -279,7 +278,9 @@ class ExtractReview(pyblish.api.InstancePlugin): 'files': repr_file, "tags": new_tags, "outputName": name, - "codec": codec_args + "codec": codec_args, + "_profile": profile, + "anatomy_template": "render" }) if repre_new.get('preview'): repre_new.pop("preview") From 09a38234e8f157b1991f00b8055d0ea0750577f5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 13 Jan 2020 02:27:44 +0100 Subject: [PATCH 106/393] feat(global): adding slate to video files --- .../global/publish/extract_review_slate.py | 244 ++++++++++++++++++ 1 file changed, 244 insertions(+) create mode 100644 pype/plugins/global/publish/extract_review_slate.py diff --git a/pype/plugins/global/publish/extract_review_slate.py b/pype/plugins/global/publish/extract_review_slate.py new file mode 100644 index 0000000000..8224cd51a8 --- /dev/null +++ b/pype/plugins/global/publish/extract_review_slate.py @@ -0,0 +1,244 @@ +import os +import pype.api +import pyblish + + +class ExtractReviewSlate(pype.api.Extractor): + """ + Will add slate frame at the start of the video files + """ + + label = "Review with Slate frame" + order = pyblish.api.ExtractorOrder + 0.031 + families = ["slate"] + hosts = ["nuke", "maya", "shell"] + optional = True + + def process(self, instance): + inst_data = instance.data + if "representations" not in inst_data: + raise RuntimeError("Burnin needs already created mov to work on.") + + suffix = "_slate" + slate_path = inst_data.get("slateFrame") + ffmpeg_path = os.path.join(os.environ.get("FFMPEG_PATH", ""), "ffmpeg") + + to_width = 1920 + to_height = 1080 + resolution_width = inst_data.get("resolutionWidth", to_width) + resolution_height = inst_data.get("resolutionHeight", to_height) + pixel_aspect = inst_data.get("pixelAspect", 1) + fps = inst_data.get("fps") + + # defining image ratios + resolution_ratio = float(resolution_width / ( + resolution_height * pixel_aspect)) + delivery_ratio = float(to_width) / float(to_height) + self.log.debug(resolution_ratio) + self.log.debug(delivery_ratio) + + # get scale factor + scale_factor = to_height / ( + resolution_height * pixel_aspect) + self.log.debug(scale_factor) + + for i, repre in enumerate(inst_data["representations"]): + _remove_at_end = [] + self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre)) + + p_tags = repre.get("tags", []) + + if "slate-frame" not in p_tags: + continue + + stagingdir = repre["stagingDir"] + input_file = "{0}".format(repre["files"]) + + ext = os.path.splitext(input_file)[1] + output_file = input_file.replace(ext, "") + suffix + ext + + input_path = os.path.join( + os.path.normpath(stagingdir), repre["files"]) + self.log.debug("__ input_path: {}".format(input_path)) + _remove_at_end.append(input_path) + + output_path = os.path.join( + os.path.normpath(stagingdir), output_file) + self.log.debug("__ output_path: {}".format(output_path)) + + input_args = [] + output_args = [] + # overrides output file + input_args.append("-y") + # preset's input data + input_args.extend(repre["_profile"].get('input', [])) + input_args.append("-loop 1 -i {}".format(slate_path)) + input_args.extend([ + "-r {}".format(fps), + "-t 0.04"] + ) + + # output args + codec_args = repre["_profile"].get('codec', []) + output_args.extend(codec_args) + # preset's output data + output_args.extend(repre["_profile"].get('output', [])) + + # make sure colors are correct + output_args.extend([ + "-vf scale=out_color_matrix=bt709", + "-color_primaries bt709", + "-color_trc bt709", + "-colorspace bt709" + ]) + + # scaling none square pixels and 1920 width + if "reformat" in p_tags: + if resolution_ratio < delivery_ratio: + self.log.debug("lower then delivery") + width_scale = int(to_width * scale_factor) + width_half_pad = int(( + to_width - width_scale)/2) + height_scale = to_height + height_half_pad = 0 + else: + self.log.debug("heigher then delivery") + width_scale = to_width + width_half_pad = 0 + scale_factor = float(to_width) / float(resolution_width) + self.log.debug(scale_factor) + height_scale = int( + resolution_height * scale_factor) + height_half_pad = int( + (to_height - height_scale)/2) + + self.log.debug( + "__ width_scale: `{}`".format(width_scale)) + self.log.debug( + "__ width_half_pad: `{}`".format(width_half_pad)) + self.log.debug( + "__ height_scale: `{}`".format(height_scale)) + self.log.debug( + "__ height_half_pad: `{}`".format(height_half_pad)) + + scaling_arg = "scale={0}x{1}:flags=lanczos,pad={2}:{3}:{4}:{5}:black,setsar=1".format( + width_scale, height_scale, to_width, to_height, width_half_pad, height_half_pad + ) + + vf_back = self.add_video_filter_args( + output_args, scaling_arg) + # add it to output_args + output_args.insert(0, vf_back) + + slate_v_path = slate_path.replace(".png", ext) + output_args.append(slate_v_path) + _remove_at_end.append(slate_v_path) + + slate_args = [ + ffmpeg_path, + " ".join(input_args), + " ".join(output_args) + ] + slate_subprcs_cmd = " ".join(slate_args) + + # run slate generation subprocess + self.log.debug("Slate Executing: {}".format(slate_subprcs_cmd)) + slate_output = pype.api.subprocess(slate_subprcs_cmd) + self.log.debug("Slate Output: {}".format(slate_output)) + + # create ffmpeg concat text file path + conc_text_file = input_file.replace(ext, "") + "_concat" + ".txt" + conc_text_path = os.path.join( + os.path.normpath(stagingdir), conc_text_file) + _remove_at_end.append(conc_text_path) + self.log.debug("__ conc_text_path: {}".format(conc_text_path)) + + new_line = "\n" + with open(conc_text_path, "w") as conc_text_f: + conc_text_f.writelines([ + "file {}".format( + slate_v_path.replace("\\", "/")), + new_line, + "file {}".format(input_path.replace("\\", "/")) + ]) + + # concat slate and videos together + conc_input_args = ["-y", "-f concat", "-safe 0"] + conc_input_args.append("-i {}".format(conc_text_path)) + + conc_output_args = ["-c copy"] + conc_output_args.append(output_path) + + concat_args = [ + ffmpeg_path, + " ".join(conc_input_args), + " ".join(conc_output_args) + ] + concat_subprcs_cmd = " ".join(concat_args) + + # ffmpeg concat subprocess + self.log.debug("Executing concat: {}".format(concat_subprcs_cmd)) + concat_output = pype.api.subprocess(concat_subprcs_cmd) + self.log.debug("Output concat: {}".format(concat_output)) + + self.log.debug("__ repre[tags]: {}".format(repre["tags"])) + repre_update = { + "files": output_file, + "name": repre["name"], + "tags": [x for x in repre["tags"] if x != "delete"], + "anatomy_template": "render" + } + inst_data["representations"][i].update(repre_update) + self.log.debug( + "_ representation {}: `{}`".format( + i, inst_data["representations"][i])) + + # removing temp files + for f in _remove_at_end: + os.remove(f) + self.log.debug("Removed: `{}`".format(f)) + + # Remove any representations tagged for deletion. + for repre in inst_data.get("representations", []): + if "delete" in repre.get("tags", []): + self.log.debug("Removing representation: {}".format(repre)) + inst_data["representations"].remove(repre) + + self.log.debug(inst_data["representations"]) + + def add_video_filter_args(self, args, inserting_arg): + """ + Fixing video filter argumets to be one long string + + Args: + args (list): list of string arguments + inserting_arg (str): string argument we want to add + (without flag `-vf`) + + Returns: + str: long joined argument to be added back to list of arguments + + """ + # find all video format settings + vf_settings = [p for p in args + for v in ["-filter:v", "-vf"] + if v in p] + self.log.debug("_ vf_settings: `{}`".format(vf_settings)) + + # remove them from output args list + for p in vf_settings: + self.log.debug("_ remove p: `{}`".format(p)) + args.remove(p) + self.log.debug("_ args: `{}`".format(args)) + + # strip them from all flags + vf_fixed = [p.replace("-vf ", "").replace("-filter:v ", "") + for p in vf_settings] + + self.log.debug("_ vf_fixed: `{}`".format(vf_fixed)) + vf_fixed.insert(0, inserting_arg) + self.log.debug("_ vf_fixed: `{}`".format(vf_fixed)) + # create new video filter setting + vf_back = "-vf " + ",".join(vf_fixed) + + return vf_back From 559ca48e187d65178c215de64fbaef565704e1e5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 13 Jan 2020 02:28:31 +0100 Subject: [PATCH 107/393] feat(nuke): reworking slate process --- pype/plugins/nuke/publish/collect_writes.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 67b6e2e189..ff315a12ea 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -25,9 +25,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): self.log.debug("checking instance: {}".format(instance)) - # check if slate node available - slate_node = instance.data.get("slateNodeName") - # Determine defined file type ext = node["file_type"].value() @@ -43,10 +40,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): first_frame = int(nuke.root()["first_frame"].getValue()) last_frame = int(nuke.root()["last_frame"].getValue()) - # remove one frame at beggining if slate - if slate_node: - first_frame -= 1 - if node["use_limit"].getValue(): handles = 0 first_frame = int(node["first"].getValue()) @@ -108,12 +101,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "fps": instance.context.data["fps"] } - # if slate node then remove one frame from version data - if slate_node: - version_data.update({ - "frameStart": (first_frame + 1) + handle_start, - }) - instance.data["family"] = "write" group_node = [x for x in instance if x.Class() == "Group"][0] deadlineChunkSize = 1 @@ -143,5 +130,4 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "subsetGroup": "renders" }) - self.log.debug("instance.data: {}".format(instance.data)) From d2455ee210893baa5176f124bbdaea7b7fe08913 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 13 Jan 2020 02:28:47 +0100 Subject: [PATCH 108/393] feat(nuke): reworking slate process --- pype/plugins/nuke/publish/extract_render_local.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_render_local.py b/pype/plugins/nuke/publish/extract_render_local.py index 19acc86273..825db67e9d 100644 --- a/pype/plugins/nuke/publish/extract_render_local.py +++ b/pype/plugins/nuke/publish/extract_render_local.py @@ -27,9 +27,6 @@ class NukeRenderLocal(pype.api.Extractor): self.log.debug("instance collected: {}".format(instance.data)) - # check if slate node available - slate_node = instance.data.get("slateNodeName") - first_frame = instance.data.get("frameStart", None) last_frame = instance.data.get("frameEnd", None) node_subset_name = instance.data.get("name", None) @@ -83,11 +80,5 @@ class NukeRenderLocal(pype.api.Extractor): collection = collections[0] instance.data['collection'] = collection - if slate_node: - instance.data['frameStart'] = first_frame + 1 - self.log.info( - 'Removing slate frame: `{}`'.format( - instance.data['frameStart'])) - self.log.info('Finished render') return From 975683cd083b7c5346e4e04bdccbc94fc5066753 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 13 Jan 2020 02:29:04 +0100 Subject: [PATCH 109/393] feat(nuke): adding plugin extracting slate file --- .../nuke/publish/extract_slate_frame.py | 141 ++++++++++++++++++ 1 file changed, 141 insertions(+) create mode 100644 pype/plugins/nuke/publish/extract_slate_frame.py diff --git a/pype/plugins/nuke/publish/extract_slate_frame.py b/pype/plugins/nuke/publish/extract_slate_frame.py new file mode 100644 index 0000000000..6ad6451176 --- /dev/null +++ b/pype/plugins/nuke/publish/extract_slate_frame.py @@ -0,0 +1,141 @@ +import os +import nuke +from avalon.nuke import lib as anlib +import pyblish.api +import pype + + +class ExtractSlateFrame(pype.api.Extractor): + """Extracts movie and thumbnail with baked in luts + + must be run after extract_render_local.py + + """ + + order = pyblish.api.ExtractorOrder + 0.01 + label = "Extract Slate Frame" + + families = ["slate"] + hosts = ["nuke"] + + def process(self, instance): + + with anlib.maintained_selection(): + self.log.debug("instance: {}".format(instance)) + self.log.debug("instance.data[families]: {}".format( + instance.data["families"])) + + self.render_slate(instance) + + def render_slate(self, instance): + node = instance[0] # group node + self.log.info("Creating staging dir...") + if "representations" in instance.data: + staging_dir = instance.data[ + "representations"][0]["stagingDir"].replace("\\", "/") + instance.data["stagingDir"] = staging_dir + else: + instance.data["representations"] = [] + # get output path + render_path = instance.data['path'] + staging_dir = os.path.normpath(os.path.dirname(render_path)) + instance.data["stagingDir"] = staging_dir + + self.log.info( + "StagingDir `{0}`...".format(instance.data["stagingDir"])) + + temporary_nodes = [] + collection = instance.data.get("collection", None) + + if collection: + # get path + fname = os.path.basename(collection.format( + "{head}{padding}{tail}")) + fhead = collection.format("{head}") + + # get first and last frame + first_frame = min(collection.indexes) - 1 + last_frame = first_frame + else: + fname = os.path.basename(instance.data.get("path", None)) + fhead = os.path.splitext(fname)[0] + "." + first_frame = instance.data.get("frameStart", None) - 1 + last_frame = first_frame + + if "#" in fhead: + fhead = fhead.replace("#", "")[:-1] + + previous_node = node + + # get input process and connect it to baking + ipn = self.get_view_process_node() + if ipn is not None: + ipn.setInput(0, previous_node) + previous_node = ipn + temporary_nodes.append(ipn) + + dag_node = nuke.createNode("OCIODisplay") + dag_node.setInput(0, previous_node) + previous_node = dag_node + temporary_nodes.append(dag_node) + + # create write node + write_node = nuke.createNode("Write") + file = fhead + "slate.png" + name = "slate" + path = os.path.join(staging_dir, file).replace("\\", "/") + instance.data["slateFrame"] = path + write_node["file"].setValue(path) + write_node["file_type"].setValue("png") + write_node["raw"].setValue(1) + write_node.setInput(0, previous_node) + temporary_nodes.append(write_node) + + repre = { + 'name': name, + 'ext': "png", + 'files': file, + "stagingDir": staging_dir, + "frameStart": first_frame, + "frameEnd": last_frame, + "anatomy_template": "render" + } + instance.data["representations"].append(repre) + + # Render frames + nuke.execute(write_node.name(), int(first_frame), int(last_frame)) + + self.log.debug( + "representations: {}".format(instance.data["representations"])) + self.log.debug( + "slate frame path: {}".format(instance.data["slateFrame"])) + + # Clean up + for node in temporary_nodes: + nuke.delete(node) + + def get_view_process_node(self): + + # Select only the target node + if nuke.selectedNodes(): + [n.setSelected(False) for n in nuke.selectedNodes()] + + ipn_orig = None + for v in [n for n in nuke.allNodes() + if "Viewer" in n.Class()]: + ip = v['input_process'].getValue() + ipn = v['input_process_node'].getValue() + if "VIEWER_INPUT" not in ipn and ip: + ipn_orig = nuke.toNode(ipn) + ipn_orig.setSelected(True) + + if ipn_orig: + nuke.nodeCopy('%clipboard%') + + [n.setSelected(False) for n in nuke.selectedNodes()] # Deselect all + + nuke.nodePaste('%clipboard%') + + ipn = nuke.selectedNode() + + return ipn From fcde886e0af56a96d599e2e4556155c4a52f44ab Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 13 Jan 2020 09:42:03 +0100 Subject: [PATCH 110/393] hotfix- string convertion for pathlib path --- pype/plugins/global/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index c2812880c7..c78e9c6442 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -470,7 +470,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): None """ - src = Path(src).resolve() + src = str(Path(src).resolve()) drive, _path = os.path.splitdrive(dst) unc = Path(drive).resolve() dst = str(unc / _path) From 68b33cbb9f0ef170da50547ccba5e34adbec2c1f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 13 Jan 2020 12:55:46 +0100 Subject: [PATCH 111/393] fix(nuke): thumnail `review` order error --- pype/plugins/nuke/publish/extract_thumbnail.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_thumbnail.py b/pype/plugins/nuke/publish/extract_thumbnail.py index 450bb39928..cdc3835d60 100644 --- a/pype/plugins/nuke/publish/extract_thumbnail.py +++ b/pype/plugins/nuke/publish/extract_thumbnail.py @@ -34,7 +34,6 @@ class ExtractThumbnail(pype.api.Extractor): staging_dir = instance.data[ "representations"][0]["stagingDir"].replace("\\", "/") instance.data["stagingDir"] = staging_dir - instance.data["representations"][0]["tags"] = ["review"] else: instance.data["representations"] = [] # get output path @@ -119,7 +118,7 @@ class ExtractThumbnail(pype.api.Extractor): write_node["raw"].setValue(1) write_node.setInput(0, previous_node) temporary_nodes.append(write_node) - tags = ["thumbnail"] + tags = ["thumbnail", "review"] # retime for first_frame = int(last_frame) / 2 From 880a1132a547485e76e9613f0990d96522e554c1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 13 Jan 2020 13:07:06 +0100 Subject: [PATCH 112/393] fix(nuke): thumbnail doesn't need to add `review` tag --- pype/plugins/nuke/publish/extract_thumbnail.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_thumbnail.py b/pype/plugins/nuke/publish/extract_thumbnail.py index cdc3835d60..1fa6231f72 100644 --- a/pype/plugins/nuke/publish/extract_thumbnail.py +++ b/pype/plugins/nuke/publish/extract_thumbnail.py @@ -28,7 +28,7 @@ class ExtractThumbnail(pype.api.Extractor): self.render_thumbnail(instance) def render_thumbnail(self, instance): - node = instance[0] # group node + node = instance[0] # group node self.log.info("Creating staging dir...") if "representations" in instance.data: staging_dir = instance.data[ @@ -118,7 +118,7 @@ class ExtractThumbnail(pype.api.Extractor): write_node["raw"].setValue(1) write_node.setInput(0, previous_node) temporary_nodes.append(write_node) - tags = ["thumbnail", "review"] + tags = ["thumbnail"] # retime for first_frame = int(last_frame) / 2 From 4de66d1bc8cfc84d0b7255d7f3f82aa4c85bf675 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 13 Jan 2020 14:12:09 +0100 Subject: [PATCH 113/393] fix(global, nuke): review in thumbnail, string from path --- pype/plugins/global/publish/integrate_new.py | 6 ++---- pype/plugins/nuke/publish/extract_thumbnail.py | 5 ++--- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 36e27cdb3a..36e993c0e4 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -470,11 +470,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): None """ - src = Path(src).resolve() + src = str(Path(src).resolve()) drive, _path = os.path.splitdrive(dst) unc = Path(drive).resolve() dst = str(unc / _path) - src = str(src) self.log.debug("Copying file .. {} -> {}".format(src, dst)) dirname = os.path.dirname(dst) @@ -496,11 +495,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def hardlink_file(self, src, dst): dirname = os.path.dirname(dst) - src = Path(src).resolve() + src = str(Path(src).resolve()) drive, _path = os.path.splitdrive(dst) unc = Path(drive).resolve() dst = str(unc / _path) - src = str(src) try: os.makedirs(dirname) diff --git a/pype/plugins/nuke/publish/extract_thumbnail.py b/pype/plugins/nuke/publish/extract_thumbnail.py index 450bb39928..efcfb4f87a 100644 --- a/pype/plugins/nuke/publish/extract_thumbnail.py +++ b/pype/plugins/nuke/publish/extract_thumbnail.py @@ -28,13 +28,12 @@ class ExtractThumbnail(pype.api.Extractor): self.render_thumbnail(instance) def render_thumbnail(self, instance): - node = instance[0] # group node + node = instance[0] # group node self.log.info("Creating staging dir...") if "representations" in instance.data: staging_dir = instance.data[ "representations"][0]["stagingDir"].replace("\\", "/") instance.data["stagingDir"] = staging_dir - instance.data["representations"][0]["tags"] = ["review"] else: instance.data["representations"] = [] # get output path @@ -165,7 +164,7 @@ class ExtractThumbnail(pype.api.Extractor): if ipn_orig: nuke.nodeCopy('%clipboard%') - [n.setSelected(False) for n in nuke.selectedNodes()] # Deselect all + [n.setSelected(False) for n in nuke.selectedNodes()] # Deselect all nuke.nodePaste('%clipboard%') From e964d466a0aff6bf04c385aab97406213e3b4786 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 13 Jan 2020 15:11:09 +0100 Subject: [PATCH 114/393] fix(nuke): validation preset was not defined correctly --- .../nuke/publish/validate_write_knobs.py | 30 +++++++++++-------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/pype/plugins/nuke/publish/validate_write_knobs.py b/pype/plugins/nuke/publish/validate_write_knobs.py index 072ffd4b17..24572bedb3 100644 --- a/pype/plugins/nuke/publish/validate_write_knobs.py +++ b/pype/plugins/nuke/publish/validate_write_knobs.py @@ -8,24 +8,31 @@ class ValidateNukeWriteKnobs(pyblish.api.ContextPlugin): """Ensure knobs are consistent. Knobs to validate and their values comes from the - "nuke/knobs.json" preset, which needs this structure: - { - "family": { - "knob_name": knob_value - } - } + + Example for presets in config: + "presets/plugins/nuke/publish.json" preset, which needs this structure: + "ValidateNukeWriteKnobs": { + "enabled": true, + "knobs": { + "family": { + "knob_name": knob_value + } + } + } """ order = pyblish.api.ValidatorOrder - label = "Knobs" + label = "Validate Write Knobs" hosts = ["nuke"] actions = [pype.api.RepairContextAction] optional = True def process(self, context): # Check for preset existence. - if not context.data["presets"]["nuke"].get("knobs"): + if not getattr(self, "knobs"): return + + self.log.debug("__ self.knobs: {}".format(self.knobs)) invalid = self.get_invalid(context, compute=True) if invalid: @@ -43,7 +50,6 @@ class ValidateNukeWriteKnobs(pyblish.api.ContextPlugin): @classmethod def get_invalid_knobs(cls, context): - presets = context.data["presets"]["nuke"]["knobs"] invalid_knobs = [] for instance in context: # Filter publisable instances. @@ -53,15 +59,15 @@ class ValidateNukeWriteKnobs(pyblish.api.ContextPlugin): # Filter families. families = [instance.data["family"]] families += instance.data.get("families", []) - families = list(set(families) & set(presets.keys())) + families = list(set(families) & set(cls.knobs.keys())) if not families: continue # Get all knobs to validate. knobs = {} for family in families: - for preset in presets[family]: - knobs.update({preset: presets[family][preset]}) + for preset in cls.knobs[family]: + knobs.update({preset: cls.knobs[family][preset]}) # Get invalid knobs. nodes = [] From c1fbd21f098547670193bb04292a764a466caa0f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 14 Jan 2020 09:47:39 +0100 Subject: [PATCH 115/393] feat(nuke): create plugin `prerender` --- pype/plugins/nuke/create/create_write.py | 208 ++++++++++++++--------- 1 file changed, 124 insertions(+), 84 deletions(-) diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index f522c50511..a85408cab3 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -1,18 +1,14 @@ from collections import OrderedDict -import avalon.api -import avalon.nuke -from pype import api as pype from pype.nuke import plugin -from pypeapp import config - import nuke + class CreateWriteRender(plugin.PypeCreator): # change this to template preset name = "WriteRender" label = "Create Write Render" hosts = ["nuke"] - nClass = "write" + n_class = "write" family = "render" icon = "sign-out" defaults = ["Main", "Mask"] @@ -23,7 +19,7 @@ class CreateWriteRender(plugin.PypeCreator): data = OrderedDict() data["family"] = self.family - data["families"] = self.nClass + data["families"] = self.n_class for k, v in self.data.items(): if k not in data.keys(): @@ -31,7 +27,100 @@ class CreateWriteRender(plugin.PypeCreator): self.data = data self.nodes = nuke.selectedNodes() - self.log.info("self.data: '{}'".format(self.data)) + self.log.debug("_ self.data: '{}'".format(self.data)) + + def process(self): + from pype.nuke import lib as pnlib + + inputs = [] + outputs = [] + instance = nuke.toNode(self.data["subset"]) + selected_node = None + + # use selection + if (self.options or {}).get("useSelection"): + nodes = self.nodes + + assert len(nodes) < 2, self.log.error( + "Select only one node. The node you want to connect to, " + "or tick off `Use selection`") + + selected_node = nodes[0] + inputs = [selected_node] + outputs = selected_node.dependent() + + if instance: + if (instance.name() in selected_node.name()): + selected_node = instance.dependencies()[0] + + # if node already exist + if instance: + # collect input / outputs + inputs = instance.dependencies() + outputs = instance.dependent() + selected_node = inputs[0] + # remove old one + nuke.delete(instance) + + # recreate new + write_data = { + "class": self.n_class, + "families": [self.family], + "avalon": self.data + } + + if self.presets.get('fpath_template'): + self.log.info("Adding template path from preset") + write_data.update( + {"fpath_template": self.presets["fpath_template"]} + ) + else: + self.log.info("Adding template path from plugin") + write_data.update({ + "fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}"}) + + write_node = pnlib.create_write_node( + self.data["subset"], + write_data, + input=selected_node) + + # relinking to collected connections + for i, input in enumerate(inputs): + write_node.setInput(i, input) + + write_node.autoplace() + + for output in outputs: + output.setInput(0, write_node) + + return write_node + + +class CreateWritePrerender(plugin.PypeCreator): + # change this to template preset + name = "WritePrerender" + label = "Create Write Prerender" + hosts = ["nuke"] + n_class = "write" + family = "prerender" + icon = "sign-out" + defaults = ["Key01", "Bg01", "Fg01", "Branch01", "Part01"] + + def __init__(self, *args, **kwargs): + super(CreateWritePrerender, self).__init__(*args, **kwargs) + + data = OrderedDict() + + data["family"] = self.family + data["families"] = self.n_class + + for k, v in self.data.items(): + if k not in data.keys(): + data.update({k: v}) + + self.data = data + self.nodes = nuke.selectedNodes() + self.log.debug("_ self.data: '{}'".format(self.data)) def process(self): from pype.nuke import lib as pnlib @@ -66,7 +155,7 @@ class CreateWriteRender(plugin.PypeCreator): # recreate new write_data = { - "class": self.nClass, + "class": self.n_class, "families": [self.family], "avalon": self.data } @@ -79,12 +168,13 @@ class CreateWriteRender(plugin.PypeCreator): else: self.log.info("Adding template path from plugin") write_data.update({ - "fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}"}) + "fpath_template": "{work}/prerenders/nuke/{subset}/{subset}.{frame}.{ext}"}) write_node = pnlib.create_write_node( self.data["subset"], write_data, - input=selected_node) + input=selected_node, + prenodes=[]) # relinking to collected connections for i, input in enumerate(inputs): @@ -95,77 +185,27 @@ class CreateWriteRender(plugin.PypeCreator): for output in outputs: output.setInput(0, write_node) - return write_node + # open group node + write_node.begin() + for n in nuke.allNodes(): + # get write node + if n.Class() in "Write": + w_node = n + write_node.end() -# -# class CreateWritePrerender(avalon.nuke.Creator): -# # change this to template preset -# preset = "prerender" -# -# name = "WritePrerender" -# label = "Create Write Prerender" -# hosts = ["nuke"] -# family = "{}_write".format(preset) -# families = preset -# icon = "sign-out" -# defaults = ["Main", "Mask"] -# -# def __init__(self, *args, **kwargs): -# super(CreateWritePrerender, self).__init__(*args, **kwargs) -# self.presets = config.get_presets()['plugins']["nuke"]["create"].get( -# self.__class__.__name__, {} -# ) -# -# data = OrderedDict() -# -# data["family"] = self.family.split("_")[1] -# data["families"] = self.families -# -# {data.update({k: v}) for k, v in self.data.items() -# if k not in data.keys()} -# self.data = data -# -# def process(self): -# self.name = self.data["subset"] -# -# instance = nuke.toNode(self.data["subset"]) -# node = 'write' -# -# if not instance: -# write_data = { -# "class": node, -# "preset": self.preset, -# "avalon": self.data -# } -# -# if self.presets.get('fpath_template'): -# self.log.info("Adding template path from preset") -# write_data.update( -# {"fpath_template": self.presets["fpath_template"]} -# ) -# else: -# self.log.info("Adding template path from plugin") -# write_data.update({ -# "fpath_template": "{work}/prerenders/{subset}/{subset}.{frame}.{ext}"}) -# -# # get group node -# group_node = create_write_node(self.data["subset"], write_data) -# -# # open group node -# group_node.begin() -# for n in nuke.allNodes(): -# # get write node -# if n.Class() in "Write": -# write_node = n -# group_node.end() -# -# # linking knobs to group property panel -# linking_knobs = ["first", "last", "use_limit"] -# for k in linking_knobs: -# lnk = nuke.Link_Knob(k) -# lnk.makeLink(write_node.name(), k) -# lnk.setName(k.replace('_', ' ').capitalize()) -# lnk.clearFlag(nuke.STARTLINE) -# group_node.addKnob(lnk) -# -# return + # add inner write node Tab + write_node.addKnob(nuke.Tab_Knob("WriteLinkedKnobs")) + + # linking knobs to group property panel + linking_knobs = ["channels", "___", "first", "last", "use_limit"] + for k in linking_knobs: + if "___" in k: + write_node.addKnob(nuke.Text_Knob('')) + else: + lnk = nuke.Link_Knob(k) + lnk.makeLink(w_node.name(), k) + lnk.setName(k.replace('_', ' ').capitalize()) + lnk.clearFlag(nuke.STARTLINE) + write_node.addKnob(lnk) + + return write_node From 033d5fe92e754d71204cffa72ead689e955479fc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 14 Jan 2020 09:48:26 +0100 Subject: [PATCH 116/393] clean(nuke): redundant file --- pype/plugins/nuke/create/create_read_plate | 8 -------- 1 file changed, 8 deletions(-) delete mode 100644 pype/plugins/nuke/create/create_read_plate diff --git a/pype/plugins/nuke/create/create_read_plate b/pype/plugins/nuke/create/create_read_plate deleted file mode 100644 index 90a47cb55e..0000000000 --- a/pype/plugins/nuke/create/create_read_plate +++ /dev/null @@ -1,8 +0,0 @@ -# create publishable read node usually used for enabling version tracking -# also useful for sharing across shots or assets - -# if read nodes are selected it will convert them to centainer -# if no read node selected it will create read node and offer browser to shot resource folder - -# type movie > mov or imagesequence -# type still > matpaint .psd, .tif, .png, From 968ba9e68d6fdbdfc08992dc5845a056d844eebb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 14 Jan 2020 11:54:57 +0100 Subject: [PATCH 117/393] feat(nuke): generate_mov adding specification of mov to prores444 --- pype/nuke/lib.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 4faea1da36..b8d484c701 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1511,6 +1511,9 @@ class ExporterReviewMov(ExporterReview): self.instance.data["baked_colorspace_movie"] = self.path write_node["file"].setValue(self.path) write_node["file_type"].setValue(self.ext) + write_node["meta_codec"].setValue("ap4h") + write_node["mov64_codec"].setValue("ap4h") + write_node["mov64_write_timecode"].setValue(1) write_node["raw"].setValue(1) # connect write_node.setInput(0, self.previous_node) From 7d14c01bbad7158d406c5262841f1107445059a4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 14 Jan 2020 11:55:22 +0100 Subject: [PATCH 118/393] feat(nuke): adding mov defalut to sRGB --- setup/nuke/nuke_path/init.py | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 setup/nuke/nuke_path/init.py diff --git a/setup/nuke/nuke_path/init.py b/setup/nuke/nuke_path/init.py new file mode 100644 index 0000000000..0ea5d1ad7d --- /dev/null +++ b/setup/nuke/nuke_path/init.py @@ -0,0 +1,2 @@ +# default write mov +nuke.knobDefault('Write.mov.colorspace', 'sRGB') From 54c3fa9dac5ff8ee3244dc0ec9c5865a91b429e8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 14 Jan 2020 13:56:12 +0100 Subject: [PATCH 119/393] feat(nuke): slate no need to be representation --- pype/plugins/nuke/publish/extract_slate_frame.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_slate_frame.py b/pype/plugins/nuke/publish/extract_slate_frame.py index 6ad6451176..be2dbdaf04 100644 --- a/pype/plugins/nuke/publish/extract_slate_frame.py +++ b/pype/plugins/nuke/publish/extract_slate_frame.py @@ -82,7 +82,6 @@ class ExtractSlateFrame(pype.api.Extractor): # create write node write_node = nuke.createNode("Write") file = fhead + "slate.png" - name = "slate" path = os.path.join(staging_dir, file).replace("\\", "/") instance.data["slateFrame"] = path write_node["file"].setValue(path) @@ -91,17 +90,6 @@ class ExtractSlateFrame(pype.api.Extractor): write_node.setInput(0, previous_node) temporary_nodes.append(write_node) - repre = { - 'name': name, - 'ext': "png", - 'files': file, - "stagingDir": staging_dir, - "frameStart": first_frame, - "frameEnd": last_frame, - "anatomy_template": "render" - } - instance.data["representations"].append(repre) - # Render frames nuke.execute(write_node.name(), int(first_frame), int(last_frame)) From 56943e77b329a0d4b727d2f570132f46bb8f7ddc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 14 Jan 2020 14:06:45 +0100 Subject: [PATCH 120/393] feat(global): accepting slate frame in farm collector --- pype/plugins/global/publish/collect_filesequences.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 1214657856..48651ec508 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -98,6 +98,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): def process(self, context): pixel_aspect = 1 lut_path = None + slate_frame = None if os.environ.get("PYPE_PUBLISH_PATHS"): paths = os.environ["PYPE_PUBLISH_PATHS"].split(os.pathsep) self.log.info("Collecting paths: {}".format(paths)) @@ -146,13 +147,12 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): os.environ.update(session) instance = metadata.get("instance") if instance: - # here is the place to add ability for nuke noninteractive - # ______________________________________ instance_family = instance.get("family") pixel_aspect = instance.get("pixelAspect", 1) resolution_width = instance.get("resolutionWidth", 1920) resolution_height = instance.get("resolutionHeight", 1080) lut_path = instance.get("lutPath", None) + slate_frame = instance.get("slateFrame", None) else: # Search in directory @@ -271,6 +271,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "fps": fps, "source": data.get("source", ""), "pixelAspect": pixel_aspect, + "slateFrame": slate_frame } ) From 0ae3c2dc9c34724dc5a3d4e8b86fe769cafe1a7d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 14 Jan 2020 14:36:36 +0100 Subject: [PATCH 121/393] feat(nuke): comments to slate_node --- .../nuke/publish/collect_slate_node.py | 2 +- .../nuke/publish/extract_slate_frame.py | 21 +++++++++++++++++-- 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_slate_node.py b/pype/plugins/nuke/publish/collect_slate_node.py index 5253f29889..33e1302314 100644 --- a/pype/plugins/nuke/publish/collect_slate_node.py +++ b/pype/plugins/nuke/publish/collect_slate_node.py @@ -31,7 +31,7 @@ class CollectSlate(pyblish.api.InstancePlugin): break if slate_node: - instance.data["slateNodeName"] = slate_node.name() + instance.data["slateNodeName"] = slate_node instance.data["families"].append("slate") self.log.info( "Slate node is in node graph: `{}`".format(slate.name())) diff --git a/pype/plugins/nuke/publish/extract_slate_frame.py b/pype/plugins/nuke/publish/extract_slate_frame.py index be2dbdaf04..753707ec65 100644 --- a/pype/plugins/nuke/publish/extract_slate_frame.py +++ b/pype/plugins/nuke/publish/extract_slate_frame.py @@ -100,7 +100,10 @@ class ExtractSlateFrame(pype.api.Extractor): # Clean up for node in temporary_nodes: - nuke.delete(node) + nuke.delete(node + + # fill slate node with comments + self.add_comment_slate_node(instance) def get_view_process_node(self): @@ -120,10 +123,24 @@ class ExtractSlateFrame(pype.api.Extractor): if ipn_orig: nuke.nodeCopy('%clipboard%') - [n.setSelected(False) for n in nuke.selectedNodes()] # Deselect all + [n.setSelected(False) for n in nuke.selectedNodes()] # Deselect all nuke.nodePaste('%clipboard%') ipn = nuke.selectedNode() return ipn + + def add_comment_slate_node(self, instance): + node = instance.data.get("slateNodeName") + if not node: + return + + comment = instance.context.data.get("comment") + intent = instance.context.data.get("intent") + + try: + node["f_submission_note"].setValue(comment) + node["f_submitting_for"].setValue(intent) + except NameError: + return From d5bb42212174d805c643f3614b2546980d950d19 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 14 Jan 2020 15:11:48 +0100 Subject: [PATCH 122/393] fix(nuke): rename attribute, missing bracket --- pype/plugins/nuke/publish/collect_slate_node.py | 2 +- pype/plugins/nuke/publish/extract_slate_frame.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_slate_node.py b/pype/plugins/nuke/publish/collect_slate_node.py index 33e1302314..1b6cef4bef 100644 --- a/pype/plugins/nuke/publish/collect_slate_node.py +++ b/pype/plugins/nuke/publish/collect_slate_node.py @@ -31,7 +31,7 @@ class CollectSlate(pyblish.api.InstancePlugin): break if slate_node: - instance.data["slateNodeName"] = slate_node + instance.data["slateNode"] = slate_node instance.data["families"].append("slate") self.log.info( "Slate node is in node graph: `{}`".format(slate.name())) diff --git a/pype/plugins/nuke/publish/extract_slate_frame.py b/pype/plugins/nuke/publish/extract_slate_frame.py index 753707ec65..779745b2e1 100644 --- a/pype/plugins/nuke/publish/extract_slate_frame.py +++ b/pype/plugins/nuke/publish/extract_slate_frame.py @@ -100,7 +100,7 @@ class ExtractSlateFrame(pype.api.Extractor): # Clean up for node in temporary_nodes: - nuke.delete(node + nuke.delete(node) # fill slate node with comments self.add_comment_slate_node(instance) @@ -132,7 +132,7 @@ class ExtractSlateFrame(pype.api.Extractor): return ipn def add_comment_slate_node(self, instance): - node = instance.data.get("slateNodeName") + node = instance.data.get("slateNode") if not node: return From c55a182ee89a8940d9ba879e38e40de1f8e83f33 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 14 Jan 2020 15:42:03 +0100 Subject: [PATCH 123/393] fix(global): fix integrate_new p27 nukestudio pathlib error --- pype/plugins/global/publish/integrate_new.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 92614b3c90..9936cd1671 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -473,8 +473,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): src = str(Path(src).resolve()) drive, _path = os.path.splitdrive(dst) unc = Path(drive).resolve() - dst = str(unc / _path) - src = str(src) + dst = str(unc) + _path self.log.debug("Copying file .. {} -> {}".format(src, dst)) dirname = os.path.dirname(dst) @@ -495,12 +494,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def hardlink_file(self, src, dst): dirname = os.path.dirname(dst) - + src = Path(src).resolve() drive, _path = os.path.splitdrive(dst) unc = Path(drive).resolve() - dst = str(unc / _path) - src = str(src) + dst = str(unc) + _path try: os.makedirs(dirname) From c43ae7cb5f49de1db34584e312c6d83a5b781793 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 14 Jan 2020 15:43:26 +0100 Subject: [PATCH 124/393] allow exporting multiple arnold standins from single scene. --- pype/plugins/maya/publish/collect_ass.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/pype/plugins/maya/publish/collect_ass.py b/pype/plugins/maya/publish/collect_ass.py index c0174e7026..8e6691120a 100644 --- a/pype/plugins/maya/publish/collect_ass.py +++ b/pype/plugins/maya/publish/collect_ass.py @@ -21,15 +21,17 @@ class CollectAssData(pyblish.api.InstancePlugin): objsets = instance.data['setMembers'] for objset in objsets: + objset = str(objset) members = cmds.sets(objset, query=True) if members is None: self.log.warning("Skipped empty instance: \"%s\" " % objset) continue - if objset == "content_SET": + if "content_SET" in objset: instance.data['setMembers'] = members - elif objset == "proxy_SET": + self.log.debug('content members: {}'.format(members)) + elif objset.startswith("proxy_SET"): assert len(members) == 1, "You have multiple proxy meshes, please only use one" instance.data['proxy'] = members - + self.log.debug('proxy members: {}'.format(members)) self.log.debug("data: {}".format(instance.data)) From d6b9ac36d50cc4175814d378298cb4a0fb2c5675 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 14 Jan 2020 15:50:48 +0100 Subject: [PATCH 125/393] rename creator and collector --- .../blender/create/{submarine_model.py => create_model.py} | 0 pype/plugins/blender/load/{submarine_model.py => load_model.py} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename pype/plugins/blender/create/{submarine_model.py => create_model.py} (100%) rename pype/plugins/blender/load/{submarine_model.py => load_model.py} (100%) diff --git a/pype/plugins/blender/create/submarine_model.py b/pype/plugins/blender/create/create_model.py similarity index 100% rename from pype/plugins/blender/create/submarine_model.py rename to pype/plugins/blender/create/create_model.py diff --git a/pype/plugins/blender/load/submarine_model.py b/pype/plugins/blender/load/load_model.py similarity index 100% rename from pype/plugins/blender/load/submarine_model.py rename to pype/plugins/blender/load/load_model.py From 1a6462d35872a81dfa15c83d36e5fd28bfe618cc Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 14 Jan 2020 16:16:19 +0100 Subject: [PATCH 126/393] submitting jobs with limited env, pype location determination --- .../global/publish/submit_publish_job.py | 6 +- .../maya/publish/submit_maya_deadline.py | 75 ++----------------- pype/scripts/publish_filesequence.py | 29 ++++++- 3 files changed, 37 insertions(+), 73 deletions(-) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 9c72ece73c..8d189cc7b3 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -149,7 +149,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "FTRACK_API_USER", "FTRACK_API_KEY", "FTRACK_SERVER", - "PYPE_ROOT" + "PYPE_ROOT", + "PYPE_METADATA_FILE" ] def _submit_deadline_post_job(self, instance, job): @@ -192,7 +193,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "PluginInfo": { "Version": "3.6", "ScriptFile": _get_script(), - "Arguments": '--paths "{}"'.format(metadata_path), + "Arguments": "", "SingleFrameOnly": "True" }, @@ -204,6 +205,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # job so they use the same environment environment = job["Props"].get("Env", {}) + environment["PYPE_METADATA_FILE"] = metadata_path i = 0 for index, key in enumerate(environment): self.log.info("KEY: {}".format(key)) diff --git a/pype/plugins/maya/publish/submit_maya_deadline.py b/pype/plugins/maya/publish/submit_maya_deadline.py index 55c04e9c41..e3fa79b1c8 100644 --- a/pype/plugins/maya/publish/submit_maya_deadline.py +++ b/pype/plugins/maya/publish/submit_maya_deadline.py @@ -228,80 +228,19 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): "AuxFiles": [] } - # Include critical environment variables with submission + # We need those to pass them to pype for it to set correct context keys = [ - # This will trigger `userSetup.py` on the slave - # such that proper initialisation happens the same - # way as it does on a local machine. - # TODO(marcus): This won't work if the slaves don't - # have accesss to these paths, such as if slaves are - # running Linux and the submitter is on Windows. - "PYTHONPATH", - "PATH", - - "MTOA_EXTENSIONS_PATH", - "MTOA_EXTENSIONS", - "DYLD_LIBRARY_PATH", - "MAYA_RENDER_DESC_PATH", - "MAYA_MODULE_PATH", - "ARNOLD_PLUGIN_PATH", - "AVALON_SCHEMA", "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", - "PYBLISHPLUGINPATH", - - # todo: This is a temporary fix for yeti variables - "PEREGRINEL_LICENSE", - "SOLIDANGLE_LICENSE", - "ARNOLD_LICENSE" - "MAYA_MODULE_PATH", - "TOOL_ENV" + "AVALON_PROJECT", + "AVALON_ASSET", + "AVALON_TASK", + "PYPE_USERNAME" ] + environment = dict({key: os.environ[key] for key in keys if key in os.environ}, **api.Session) - # self.log.debug("enviro: {}".format(pprint(environment))) - for path in os.environ: - if path.lower().startswith('pype_'): - environment[path] = os.environ[path] - - environment["PATH"] = os.environ["PATH"] - # self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS'])) - clean_environment = {} - for key in environment: - clean_path = "" - self.log.debug("key: {}".format(key)) - self.log.debug("value: {}".format(environment[key])) - to_process = str(environment[key]) - if key == "PYPE_STUDIO_CORE_MOUNT": - clean_path = to_process - elif "://" in to_process: - clean_path = to_process - elif os.pathsep not in str(to_process): - try: - path = to_process - path.decode('UTF-8', 'strict') - clean_path = os.path.normpath(path) - except UnicodeDecodeError: - print('path contains non UTF characters') - else: - for path in to_process.split(os.pathsep): - try: - path.decode('UTF-8', 'strict') - clean_path += os.path.normpath(path) + os.pathsep - except UnicodeDecodeError: - print('path contains non UTF characters') - - if key == "PYTHONPATH": - clean_path = clean_path.replace('python2', 'python3') - clean_path = clean_path.replace( - os.path.normpath( - environment['PYPE_STUDIO_CORE_MOUNT']), # noqa - os.path.normpath( - environment['PYPE_STUDIO_CORE_PATH'])) # noqa - clean_environment[key] = clean_path - - environment = clean_environment payload["JobInfo"].update({ "EnvironmentKeyValue%d" % index: "{key}={value}".format( @@ -319,7 +258,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): self.preflight_check(instance) - self.log.info("Submitting..") + self.log.info("Submitting ...") self.log.info(json.dumps(payload, indent=4, sort_keys=True)) # E.g. http://192.168.0.1:8082/api/jobs diff --git a/pype/scripts/publish_filesequence.py b/pype/scripts/publish_filesequence.py index 5517cfeb4c..167f7bea17 100644 --- a/pype/scripts/publish_filesequence.py +++ b/pype/scripts/publish_filesequence.py @@ -1,9 +1,12 @@ """This module is used for command line publishing of image sequences.""" import os +import sys +import argparse import logging import subprocess import platform + try: from shutil import which except ImportError: @@ -23,7 +26,7 @@ error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}" def __main__(): - import argparse + parser = argparse.ArgumentParser() parser.add_argument("--paths", nargs="*", @@ -43,7 +46,27 @@ def __main__(): print("Running pype ...") auto_pype_root = os.path.dirname(os.path.abspath(__file__)) auto_pype_root = os.path.abspath(auto_pype_root + "../../../../..") - auto_pype_root = os.environ.get('PYPE_ROOT') or auto_pype_root + # we need to use `auto_pype_root` to be able to remap locations. + # This is egg and chicken case: we need to know our storage locations + # to be able to remap them on different platforms but if we got `PYPE_ROOT` + # variable, we cannot be sure it originated on different platform and is + # therefor invalid. + # So we use auto_pype_root to get to `PypeLauncher.path_remapper()`. This + # will load Storage environments and is able to remap environment to + # correct paths. + sys.path.append(auto_pype_root) + try: + from pypeapp import PypeLauncher + except ImportError: + print("!!! Error: cannot determine Pype location.") + print("--- we are looking at {}, but this is not Pype.".format( + auto_pype_root)) + + remapped_env = PypeLauncher.path_remapper() + auto_pype_root = remapped_env.get('PYPE_ROOT') or auto_pype_root + if remapped_env.get('PYPE_ROOT'): + print("Got Pype location from environment: {}".format( + remapped_env.get('PYPE_ROOT'))) pype_command = "pype.ps1" if platform.system().lower() == "linux": @@ -81,7 +104,7 @@ def __main__(): # Forcing forwaring the environment because environment inheritance does # not always work. # Cast all values in environment to str to be safe - env = {k: str(v) for k, v in os.environ.items()} + env = {k: str(v) for k, v in remapped_env.items()} exit_code = subprocess.call(args, env=env) if exit_code != 0: raise RuntimeError("Publishing failed.") From 71e63ec3e971e275f0dec3a930f85175c7532f37 Mon Sep 17 00:00:00 2001 From: "jakub@orbi.tools" Date: Tue, 14 Jan 2020 16:40:50 +0100 Subject: [PATCH 127/393] fix(nuke): better place for intent to add to slatenode --- .../nuke/publish/extract_slate_frame.py | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_slate_frame.py b/pype/plugins/nuke/publish/extract_slate_frame.py index 779745b2e1..333cd9f7c1 100644 --- a/pype/plugins/nuke/publish/extract_slate_frame.py +++ b/pype/plugins/nuke/publish/extract_slate_frame.py @@ -18,7 +18,12 @@ class ExtractSlateFrame(pype.api.Extractor): families = ["slate"] hosts = ["nuke"] + def process(self, instance): + if hasattr(self, "viewer_lut_raw"): + self.viewer_lut_raw = self.viewer_lut_raw + else: + self.viewer_lut_raw = False with anlib.maintained_selection(): self.log.debug("instance: {}".format(instance)) @@ -74,10 +79,11 @@ class ExtractSlateFrame(pype.api.Extractor): previous_node = ipn temporary_nodes.append(ipn) - dag_node = nuke.createNode("OCIODisplay") - dag_node.setInput(0, previous_node) - previous_node = dag_node - temporary_nodes.append(dag_node) + if not self.viewer_lut_raw: + dag_node = nuke.createNode("OCIODisplay") + dag_node.setInput(0, previous_node) + previous_node = dag_node + temporary_nodes.append(dag_node) # create write node write_node = nuke.createNode("Write") @@ -90,11 +96,12 @@ class ExtractSlateFrame(pype.api.Extractor): write_node.setInput(0, previous_node) temporary_nodes.append(write_node) + # fill slate node with comments + self.add_comment_slate_node(instance) + # Render frames nuke.execute(write_node.name(), int(first_frame), int(last_frame)) - self.log.debug( - "representations: {}".format(instance.data["representations"])) self.log.debug( "slate frame path: {}".format(instance.data["slateFrame"])) @@ -102,8 +109,6 @@ class ExtractSlateFrame(pype.api.Extractor): for node in temporary_nodes: nuke.delete(node) - # fill slate node with comments - self.add_comment_slate_node(instance) def get_view_process_node(self): From a9257df48ebc14e2f3c91c703bd89f996d585173 Mon Sep 17 00:00:00 2001 From: "jakub@orbi.tools" Date: Tue, 14 Jan 2020 19:51:18 +0100 Subject: [PATCH 128/393] nuke fixing deadline submission --- pype/nuke/lib.py | 8 +++--- .../nuke/publish/extract_review_data_mov.py | 25 +++++++++++-------- .../nuke/publish/extract_slate_frame.py | 3 ++- .../plugins/nuke/publish/extract_thumbnail.py | 10 ++++++-- setup/nuke/nuke_path/menu.py | 2 +- 5 files changed, 29 insertions(+), 19 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index b8d484c701..36270f37df 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1460,14 +1460,13 @@ class ExporterReviewMov(ExporterReview): self.log.info("Rendered...") def save_file(self): + import shutil with anlib.maintained_selection(): self.log.info("Saving nodes as file... ") - # select temp nodes - anlib.select_nodes(self._temp_nodes) # create nk path path = os.path.splitext(self.path)[0] + ".nk" # save file to the path - nuke.nodeCopy(path) + shutil.copyfile(self.instance.context.data["currentFile"], path) self.log.info("Nodes exported...") return path @@ -1524,6 +1523,7 @@ class ExporterReviewMov(ExporterReview): # ---------- render or save to nk if farm: + nuke.scriptSave() path_nk = self.save_file() self.data.update({ "bakeScriptPath": path_nk, @@ -1542,7 +1542,7 @@ class ExporterReviewMov(ExporterReview): #---------- Clean up self.clean_nodes() - + nuke.scriptSave() return self.data diff --git a/pype/plugins/nuke/publish/extract_review_data_mov.py b/pype/plugins/nuke/publish/extract_review_data_mov.py index 333774bcd7..69347bfafc 100644 --- a/pype/plugins/nuke/publish/extract_review_data_mov.py +++ b/pype/plugins/nuke/publish/extract_review_data_mov.py @@ -16,23 +16,26 @@ class ExtractReviewDataMov(pype.api.Extractor): order = pyblish.api.ExtractorOrder + 0.01 label = "Extract Review Data Mov" - families = ["review"] + families = ["review", "render", "render.local"] hosts = ["nuke"] def process(self, instance): families = instance.data["families"] + self.log.info("Creating staging dir...") + self.log.debug( + "__ representations: `{}`".format( + instance.data["representations"])) if "representations" in instance.data: - staging_dir = instance.data[ - "representations"][0]["stagingDir"].replace("\\", "/") - instance.data["stagingDir"] = staging_dir - instance.data["representations"][0]["tags"] = [] - else: - instance.data["representations"] = [] - # get output path - render_path = instance.data['path'] - staging_dir = os.path.normpath(os.path.dirname(render_path)) - instance.data["stagingDir"] = staging_dir + if instance.data["representations"] == []: + render_path = instance.data['path'] + staging_dir = os.path.normpath(os.path.dirname(render_path)) + instance.data["stagingDir"] = staging_dir + else: + staging_dir = instance.data[ + "representations"][0]["stagingDir"].replace("\\", "/") + instance.data["representations"][0]["tags"] = [] + instance.data["stagingDir"] = staging_dir self.log.info( "StagingDir `{0}`...".format(instance.data["stagingDir"])) diff --git a/pype/plugins/nuke/publish/extract_slate_frame.py b/pype/plugins/nuke/publish/extract_slate_frame.py index 333cd9f7c1..a0f1c06828 100644 --- a/pype/plugins/nuke/publish/extract_slate_frame.py +++ b/pype/plugins/nuke/publish/extract_slate_frame.py @@ -98,7 +98,7 @@ class ExtractSlateFrame(pype.api.Extractor): # fill slate node with comments self.add_comment_slate_node(instance) - + # Render frames nuke.execute(write_node.name(), int(first_frame), int(last_frame)) @@ -149,3 +149,4 @@ class ExtractSlateFrame(pype.api.Extractor): node["f_submitting_for"].setValue(intent) except NameError: return + instance.data.pop("slateNode") diff --git a/pype/plugins/nuke/publish/extract_thumbnail.py b/pype/plugins/nuke/publish/extract_thumbnail.py index 1fa6231f72..dee6453671 100644 --- a/pype/plugins/nuke/publish/extract_thumbnail.py +++ b/pype/plugins/nuke/publish/extract_thumbnail.py @@ -30,9 +30,15 @@ class ExtractThumbnail(pype.api.Extractor): def render_thumbnail(self, instance): node = instance[0] # group node self.log.info("Creating staging dir...") + self.log.debug( + "_ representations `{0}`".format(instance.data["representations"])) if "representations" in instance.data: - staging_dir = instance.data[ - "representations"][0]["stagingDir"].replace("\\", "/") + try: + staging_dir = instance.data[ + "representations"][0]["stagingDir"].replace("\\", "/") + except IndexError: + path = instance.data["path"] + staging_dir = os.path.dirname(path) instance.data["stagingDir"] = staging_dir else: instance.data["representations"] = [] diff --git a/setup/nuke/nuke_path/menu.py b/setup/nuke/nuke_path/menu.py index fd87c98246..8c92b77e80 100644 --- a/setup/nuke/nuke_path/menu.py +++ b/setup/nuke/nuke_path/menu.py @@ -16,6 +16,6 @@ log = Logger().get_logger(__name__, "nuke") nuke.addOnScriptSave(onScriptLoad) nuke.addOnScriptLoad(checkInventoryVersions) nuke.addOnScriptSave(checkInventoryVersions) -nuke.addOnScriptSave(writes_version_sync) +# nuke.addOnScriptSave(writes_version_sync) log.info('Automatic syncing of write file knob to script version') From 3f1d5100e7baa3147b1d5eb2dac287ef6f3d0eb2 Mon Sep 17 00:00:00 2001 From: "jakub@orbi.tools" Date: Tue, 14 Jan 2020 19:51:50 +0100 Subject: [PATCH 129/393] global fixing unc path submisison and deadline integrtion --- .../global/publish/collect_filesequences.py | 9 ++++- pype/plugins/global/publish/integrate_new.py | 38 +++++++++++++------ .../global/publish/submit_publish_job.py | 26 ++++++++----- 3 files changed, 52 insertions(+), 21 deletions(-) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 48651ec508..9d065946ed 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -99,6 +99,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): pixel_aspect = 1 lut_path = None slate_frame = None + families_data = None + subset = None if os.environ.get("PYPE_PUBLISH_PATHS"): paths = os.environ["PYPE_PUBLISH_PATHS"].split(os.pathsep) self.log.info("Collecting paths: {}".format(paths)) @@ -152,7 +154,10 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): resolution_width = instance.get("resolutionWidth", 1920) resolution_height = instance.get("resolutionHeight", 1080) lut_path = instance.get("lutPath", None) - slate_frame = instance.get("slateFrame", None) + baked_mov_path = instance.get("bakeRenderPath") + subset = instance.get("subset") + families_data = instance.get("families") + slate_frame = instance.get("slateFrame") else: # Search in directory @@ -197,6 +202,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): families.append("ftrack") if "write" in instance_family: families.append("write") + if families_data and "slate" in families_data: + families.append("slate") if data.get("attachTo"): # we need to attach found collections to existing diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 9936cd1671..a15c296b11 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -357,7 +357,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): dst_head, dst_start_frame, dst_tail).replace("..", ".") - repre['published_path'] = dst + repre['published_path'] = self.unc_convert(dst) else: # Single file @@ -386,7 +386,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): instance.data["transfers"].append([src, dst]) - repre['published_path'] = dst + repre['published_path'] = self.unc_convert(dst) self.log.debug("__ dst: {}".format(dst)) representation = { @@ -460,6 +460,27 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.debug("Hardlinking file .. {} -> {}".format(src, dest)) self.hardlink_file(src, dest) + def unc_convert(self, path): + self.log.debug("_ path .. `{}`".format(path)) + drive, _path = os.path.splitdrive(path) + self.log.debug("_ drive, _path .. `{}`, `{}`".format(drive, _path)) + unc = Path(drive).resolve() + self.log.debug("_ unc.resolved .. `{}`".format(unc)) + path = str(unc) + _path + self.log.debug("_ path.resolved .. `{}`".format(path)) + + if not os.path.exists(str(unc)): + self.log.info("_ converting to unc from environments ..") + path_replace = os.getenv("PYPE_STUDIO_PROJECTS_PATH") + path_mount = os.getenv("PYPE_STUDIO_PROJECTS_MOUNT") + self.log.debug("_ path_replace .. `{}`".format(path_replace)) + self.log.debug("_ path_mount .. `{}`".format(path_mount)) + if "/" in path_mount: + path = path.replace(path_mount[0:-1], path_replace) + else: + path = path.replace(path_mount, path_replace) + return path + def copy_file(self, src, dst): """ Copy given source to destination @@ -469,11 +490,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): Returns: None """ - - src = str(Path(src).resolve()) - drive, _path = os.path.splitdrive(dst) - unc = Path(drive).resolve() - dst = str(unc) + _path + src = self.unc_convert(src) + dst = self.unc_convert(dst) self.log.debug("Copying file .. {} -> {}".format(src, dst)) dirname = os.path.dirname(dst) @@ -495,10 +513,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def hardlink_file(self, src, dst): dirname = os.path.dirname(dst) - src = Path(src).resolve() - drive, _path = os.path.splitdrive(dst) - unc = Path(drive).resolve() - dst = str(unc) + _path + src = self.unc_convert(src) + dst = self.unc_convert(dst) try: os.makedirs(dirname) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 12737880d0..11d4b15f9d 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -204,19 +204,27 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # job so they use the same environment environment = job["Props"].get("Env", {}) + + environment = dict( + {key: os.environ[key] for key in self.enviro_filter + if key in environment}, **api.Session) + + self.log.debug("___> enviro: {}".format(environment)) + for _key in os.environ: + if _key.lower().startswith('pype_'): + environment[_key] = os.environ[_key] + i = 0 for index, key in enumerate(environment): self.log.info("KEY: {}".format(key)) - self.log.info("FILTER: {}".format(self.enviro_filter)) - if key.upper() in self.enviro_filter: - payload["JobInfo"].update({ - "EnvironmentKeyValue%d" % i: "{key}={value}".format( - key=key, - value=environment[key] - ) - }) - i += 1 + payload["JobInfo"].update({ + "EnvironmentKeyValue%d" % i: "{key}={value}".format( + key=key, + value=environment[key] + ) + }) + i += 1 # Avoid copied pools and remove secondary pool payload["JobInfo"]["Pool"] = "none" From b080f73a396e867a6b27719c7fb1b87674ac58e1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 15 Jan 2020 10:57:15 +0100 Subject: [PATCH 130/393] style(nuke): cleaning code --- pype/plugins/nuke/publish/collect_slate_node.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pype/plugins/nuke/publish/collect_slate_node.py b/pype/plugins/nuke/publish/collect_slate_node.py index 1b6cef4bef..d8d6b50f05 100644 --- a/pype/plugins/nuke/publish/collect_slate_node.py +++ b/pype/plugins/nuke/publish/collect_slate_node.py @@ -1,6 +1,7 @@ import pyblish.api import nuke + class CollectSlate(pyblish.api.InstancePlugin): """Check if SLATE node is in scene and connected to rendering tree""" From b0d338fdb2a3f448fe343b746b2bb5b97fe25961 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 15 Jan 2020 10:57:52 +0100 Subject: [PATCH 131/393] fix(global): refactoring environment collection --- .../global/publish/submit_publish_job.py | 29 ++++++++----------- 1 file changed, 12 insertions(+), 17 deletions(-) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 11d4b15f9d..49ae958cd1 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -149,7 +149,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "FTRACK_API_USER", "FTRACK_API_KEY", "FTRACK_SERVER", - "PYPE_ROOT" + "PYPE_ROOT", + "PYPE_STUDIO_PROJECTS_PATH", + "PYPE_STUDIO_PROJECTS_MOUNT" ] def _submit_deadline_post_job(self, instance, job): @@ -205,26 +207,19 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): environment = job["Props"].get("Env", {}) - environment = dict( - {key: os.environ[key] for key in self.enviro_filter - if key in environment}, **api.Session) - - self.log.debug("___> enviro: {}".format(environment)) - for _key in os.environ: - if _key.lower().startswith('pype_'): - environment[_key] = os.environ[_key] - i = 0 for index, key in enumerate(environment): self.log.info("KEY: {}".format(key)) + self.log.info("FILTER: {}".format(self.enviro_filter)) - payload["JobInfo"].update({ - "EnvironmentKeyValue%d" % i: "{key}={value}".format( - key=key, - value=environment[key] - ) - }) - i += 1 + if key.upper() in self.enviro_filter: + payload["JobInfo"].update({ + "EnvironmentKeyValue%d" % i: "{key}={value}".format( + key=key, + value=environment[key] + ) + }) + i += 1 # Avoid copied pools and remove secondary pool payload["JobInfo"]["Pool"] = "none" From 408c220df80ad0a10951a2da9473f4fb5590926c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 15 Jan 2020 15:30:00 +0100 Subject: [PATCH 132/393] remapping handling pushed to pype publish command --- pype/scripts/publish_filesequence.py | 27 +++++---------------------- 1 file changed, 5 insertions(+), 22 deletions(-) diff --git a/pype/scripts/publish_filesequence.py b/pype/scripts/publish_filesequence.py index 167f7bea17..620ee3d851 100644 --- a/pype/scripts/publish_filesequence.py +++ b/pype/scripts/publish_filesequence.py @@ -26,7 +26,6 @@ error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}" def __main__(): - parser = argparse.ArgumentParser() parser.add_argument("--paths", nargs="*", @@ -46,27 +45,11 @@ def __main__(): print("Running pype ...") auto_pype_root = os.path.dirname(os.path.abspath(__file__)) auto_pype_root = os.path.abspath(auto_pype_root + "../../../../..") - # we need to use `auto_pype_root` to be able to remap locations. - # This is egg and chicken case: we need to know our storage locations - # to be able to remap them on different platforms but if we got `PYPE_ROOT` - # variable, we cannot be sure it originated on different platform and is - # therefor invalid. - # So we use auto_pype_root to get to `PypeLauncher.path_remapper()`. This - # will load Storage environments and is able to remap environment to - # correct paths. - sys.path.append(auto_pype_root) - try: - from pypeapp import PypeLauncher - except ImportError: - print("!!! Error: cannot determine Pype location.") - print("--- we are looking at {}, but this is not Pype.".format( - auto_pype_root)) - remapped_env = PypeLauncher.path_remapper() - auto_pype_root = remapped_env.get('PYPE_ROOT') or auto_pype_root - if remapped_env.get('PYPE_ROOT'): + auto_pype_root = os.environ.get('PYPE_ROOT') or auto_pype_root + if os.environ.get('PYPE_ROOT'): print("Got Pype location from environment: {}".format( - remapped_env.get('PYPE_ROOT'))) + os.environ.get('PYPE_ROOT'))) pype_command = "pype.ps1" if platform.system().lower() == "linux": @@ -92,7 +75,7 @@ def __main__(): print("Set pype root to: {}".format(pype_root)) print("Paths: {}".format(kwargs.paths or [os.getcwd()])) - paths = kwargs.paths or [os.getcwd()] + paths = kwargs.paths or [os.environ.get("PYPE_METADATA_FILE")] or [os.getcwd()] # noqa args = [ os.path.join(pype_root, pype_command), @@ -104,7 +87,7 @@ def __main__(): # Forcing forwaring the environment because environment inheritance does # not always work. # Cast all values in environment to str to be safe - env = {k: str(v) for k, v in remapped_env.items()} + env = {k: str(v) for k, v in os.environ.items()} exit_code = subprocess.call(args, env=env) if exit_code != 0: raise RuntimeError("Publishing failed.") From 315dfe79a4a27a53bba7c32e663755dea68c74ae Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 15 Jan 2020 15:36:54 +0100 Subject: [PATCH 133/393] set ftrackId and entityType on entity when integrate hierarchy ftrack creates one --- .../ftrack/publish/integrate_hierarchy_ftrack.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py b/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py index 1deff56d83..a33cf81c28 100644 --- a/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py +++ b/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py @@ -85,6 +85,18 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): type=entity_type, parent=parent ) + + if entity.entity_type.lower() not in ["task", "project"]: + filter = { + "type": "asset", + "name": entity_name + } + update_data = { + "data.ftrackId": entity["id"], + "data.entityType": entity.entity_type + } + io.update_one(filter, update_data) + # self.log.info('entity: {}'.format(dict(entity))) # CUSTOM ATTRIBUTES custom_attributes = entity_data.get('custom_attributes', []) From 3e129d7a564b035843854868eb1596f265c3a825 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 15 Jan 2020 16:04:49 +0100 Subject: [PATCH 134/393] feat(nuke, global): adding `slate` family conditions --- pype/nuke/lib.py | 2 ++ pype/plugins/global/publish/extract_burnin.py | 24 +++++++++++++------ pype/plugins/global/publish/integrate_new.py | 24 ++++++------------- .../global/publish/submit_publish_job.py | 2 ++ .../nuke/publish/collect_script_version.py | 22 +++++++++++++++++ pype/plugins/nuke/publish/collect_workfile.py | 8 +------ pype/plugins/nuke/publish/collect_writes.py | 9 +++---- .../nuke/publish/extract_render_local.py | 9 +++++++ .../nuke/publish/extract_slate_frame.py | 4 ++++ .../nuke/publish/submit_nuke_deadline.py | 10 ++++++-- .../nuke/publish/validate_rendered_frames.py | 3 +++ 11 files changed, 80 insertions(+), 37 deletions(-) create mode 100644 pype/plugins/nuke/publish/collect_script_version.py diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 36270f37df..23c677cc7f 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1235,6 +1235,8 @@ class ExporterReview: # get first and last frame self.first_frame = min(self.collection.indexes) self.last_frame = max(self.collection.indexes) + if "slate" in self.instance.data["families"]: + self.first_frame += 1 else: self.fname = os.path.basename(self.path_in) self.fhead = os.path.splitext(self.fname)[0] + "." diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index a87fd47ea1..dac5c524e7 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -25,11 +25,7 @@ class ExtractBurnin(pype.api.Extractor): if "representations" not in instance.data: raise RuntimeError("Burnin needs already created mov to work on.") - # TODO: expand burnin data list to include all usefull keys - version = '' - if instance.context.data.get('version'): - version = "v" + str(instance.context.data['version']) - + version = instance.context.data.get('version') frame_start = int(instance.data.get("frameStart") or 0) frame_end = int(instance.data.get("frameEnd") or 1) duration = frame_end - frame_start + 1 @@ -42,9 +38,23 @@ class ExtractBurnin(pype.api.Extractor): "frame_end": frame_end, "duration": duration, "version": version, - "comment": instance.context.data.get("comment"), - "intent": instance.context.data.get("intent") + "comment": instance.context.data.get("comment", ""), + "intent": instance.context.data.get("intent", "") } + + # exception for slate workflow + if "slate" in instance.data["families"]: + slate_frame_start = frame_start - 1 + slate_frame_end = frame_end + slate_duration = slate_frame_end - slate_frame_start + 1 + + prep_data.update({ + "slate_frame_start": slate_frame_start, + "slate_frame_end": slate_frame_end, + "slate_duration": slate_duration + }) + + # Update data with template data template_data = instance.data.get("assumedTemplateData") or {} prep_data.update(template_data) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index a15c296b11..8751431223 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -175,16 +175,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if instance.data.get('version'): next_version = int(instance.data.get('version')) - # self.log.info("Verifying version from assumed destination") - - # assumed_data = instance.data["assumedTemplateData"] - # assumed_version = assumed_data["version"] - # if assumed_version != next_version: - # raise AttributeError("Assumed version 'v{0:03d}' does not match" - # "next version in database " - # "('v{1:03d}')".format(assumed_version, - # next_version)) - self.log.debug("Next version: v{0:03d}".format(next_version)) version_data = self.create_version_data(context, instance) @@ -323,6 +313,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): repre.get("frameEnd"))) index_frame_start = int(repre.get("frameStart")) + # exception for slate workflow + if "slate" in instance.data["families"]: + index_frame_start -= 1 + dst_padding_exp = src_padding_exp dst_start_frame = None for i in src_collection.indexes: @@ -461,20 +455,16 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.hardlink_file(src, dest) def unc_convert(self, path): - self.log.debug("_ path .. `{}`".format(path)) drive, _path = os.path.splitdrive(path) - self.log.debug("_ drive, _path .. `{}`, `{}`".format(drive, _path)) unc = Path(drive).resolve() - self.log.debug("_ unc.resolved .. `{}`".format(unc)) path = str(unc) + _path - self.log.debug("_ path.resolved .. `{}`".format(path)) if not os.path.exists(str(unc)): - self.log.info("_ converting to unc from environments ..") + self.log.info("Converting to unc from environments ..") + path_replace = os.getenv("PYPE_STUDIO_PROJECTS_PATH") path_mount = os.getenv("PYPE_STUDIO_PROJECTS_MOUNT") - self.log.debug("_ path_replace .. `{}`".format(path_replace)) - self.log.debug("_ path_mount .. `{}`".format(path_mount)) + if "/" in path_mount: path = path.replace(path_mount[0:-1], path_replace) else: diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 49ae958cd1..0d4018d46f 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -321,6 +321,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "source": source, "user": context.data["user"], "version": context.data["version"], + "intent": context.data["intent"], + "comment": context.data["comment"], # Optional metadata (for debugging) "metadata": { "instance": data, diff --git a/pype/plugins/nuke/publish/collect_script_version.py b/pype/plugins/nuke/publish/collect_script_version.py new file mode 100644 index 0000000000..9a6b5bf572 --- /dev/null +++ b/pype/plugins/nuke/publish/collect_script_version.py @@ -0,0 +1,22 @@ +import os +import pype.api as pype +import pyblish.api + + +class CollectScriptVersion(pyblish. api.ContextPlugin): + """Collect Script Version.""" + + order = pyblish.api.CollectorOrder + label = "Collect Script Version" + hosts = [ + "nuke", + "nukeassist" + ] + + def process(self, context): + file_path = context.data["currentFile"] + base_name = os.path.basename(file_path) + # get version string + version = pype.get_version_from_path(base_name) + + context.data['version'] = version diff --git a/pype/plugins/nuke/publish/collect_workfile.py b/pype/plugins/nuke/publish/collect_workfile.py index aaee554fbf..4fff9f46ed 100644 --- a/pype/plugins/nuke/publish/collect_workfile.py +++ b/pype/plugins/nuke/publish/collect_workfile.py @@ -2,8 +2,6 @@ import nuke import pyblish.api import os -import pype.api as pype - from avalon.nuke import ( get_avalon_knob_data, add_publish_knob @@ -11,7 +9,7 @@ from avalon.nuke import ( class CollectWorkfile(pyblish.api.ContextPlugin): - """Publish current script version.""" + """Collect current script for publish.""" order = pyblish.api.CollectorOrder + 0.1 label = "Collect Workfile" @@ -31,9 +29,6 @@ class CollectWorkfile(pyblish.api.ContextPlugin): base_name = os.path.basename(file_path) subset = "{0}_{1}".format(os.getenv("AVALON_TASK", None), family) - # get version string - version = pype.get_version_from_path(base_name) - # Get frame range first_frame = int(root["first_frame"].getValue()) last_frame = int(root["last_frame"].getValue()) @@ -53,7 +48,6 @@ class CollectWorkfile(pyblish.api.ContextPlugin): script_data = { "asset": os.getenv("AVALON_ASSET", None), - "version": version, "frameStart": first_frame + handle_start, "frameEnd": last_frame - handle_end, "resolutionWidth": resolution_width, diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index ff315a12ea..37c86978b6 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -50,9 +50,10 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): output_dir = os.path.dirname(path) self.log.debug('output dir: {}'.format(output_dir)) - # get version - version = pype.get_version_from_path(nuke.root().name()) - instance.data['version'] = version + # get version to instance for integration + instance.data['version'] = instance.context.data.get( + "version", pype.get_version_from_path(nuke.root().name())) + self.log.debug('Write Version: %s' % instance.data('version')) # create label @@ -94,7 +95,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "handleEnd": handle_end, "frameStart": first_frame + handle_start, "frameEnd": last_frame - handle_end, - "version": int(version), + "version": int(instance.data['version']), "colorspace": node["colorspace"].value(), "families": [instance.data["family"]], "subset": instance.data["subset"], diff --git a/pype/plugins/nuke/publish/extract_render_local.py b/pype/plugins/nuke/publish/extract_render_local.py index 825db67e9d..9b8baa468b 100644 --- a/pype/plugins/nuke/publish/extract_render_local.py +++ b/pype/plugins/nuke/publish/extract_render_local.py @@ -28,6 +28,11 @@ class NukeRenderLocal(pype.api.Extractor): self.log.debug("instance collected: {}".format(instance.data)) first_frame = instance.data.get("frameStart", None) + + # exception for slate workflow + if "slate" in instance.data["families"]: + first_frame -= 1 + last_frame = instance.data.get("frameEnd", None) node_subset_name = instance.data.get("name", None) @@ -47,6 +52,10 @@ class NukeRenderLocal(pype.api.Extractor): int(last_frame) ) + # exception for slate workflow + if "slate" in instance.data["families"]: + first_frame += 1 + path = node['file'].value() out_dir = os.path.dirname(path) ext = node["file_type"].value() diff --git a/pype/plugins/nuke/publish/extract_slate_frame.py b/pype/plugins/nuke/publish/extract_slate_frame.py index a0f1c06828..fd1523447a 100644 --- a/pype/plugins/nuke/publish/extract_slate_frame.py +++ b/pype/plugins/nuke/publish/extract_slate_frame.py @@ -60,6 +60,10 @@ class ExtractSlateFrame(pype.api.Extractor): # get first and last frame first_frame = min(collection.indexes) - 1 + + if "slate" in instance.data["families"]: + first_frame += 1 + last_frame = first_frame else: fname = os.path.basename(instance.data.get("path", None)) diff --git a/pype/plugins/nuke/publish/submit_nuke_deadline.py b/pype/plugins/nuke/publish/submit_nuke_deadline.py index d9207d2bfc..580a114562 100644 --- a/pype/plugins/nuke/publish/submit_nuke_deadline.py +++ b/pype/plugins/nuke/publish/submit_nuke_deadline.py @@ -1,7 +1,7 @@ import os import json import getpass - + from avalon import api from avalon.vendor import requests import re @@ -43,6 +43,12 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): render_path = instance.data['path'] render_dir = os.path.normpath(os.path.dirname(render_path)) + # frame start definition + frame_start = int(instance.data["frameStart"]) + # exception for slate workflow + if "slate" in instance.data["families"]: + frame_start -= 1 + script_path = context.data["currentFile"] script_name = os.path.basename(script_path) @@ -75,7 +81,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): "Plugin": "Nuke", "Frames": "{start}-{end}".format( - start=int(instance.data["frameStart"]), + start=frame_start, end=int(instance.data["frameEnd"]) ), "ChunkSize": instance.data["deadlineChunkSize"], diff --git a/pype/plugins/nuke/publish/validate_rendered_frames.py b/pype/plugins/nuke/publish/validate_rendered_frames.py index 3887b5d5b7..c63c289947 100644 --- a/pype/plugins/nuke/publish/validate_rendered_frames.py +++ b/pype/plugins/nuke/publish/validate_rendered_frames.py @@ -75,6 +75,9 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): self.log.info( 'len(collection.indexes): {}'.format(collected_frames_len) ) + + if "slate" in instance.data["families"]: + collected_frames_len -= 1 assert (collected_frames_len == frame_length), ( "{} missing frames. Use repair to render all frames" From 96a7c4a06183c2c14a203f7f6c03cee5e8aaf126 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 15 Jan 2020 17:28:28 +0100 Subject: [PATCH 135/393] feat(ftrack): delivery action --- pype/ftrack/actions/action_delivery.py | 538 +++++++++++++++++++++++++ res/ftrack/action_icons/Delivery.svg | 34 ++ 2 files changed, 572 insertions(+) create mode 100644 pype/ftrack/actions/action_delivery.py create mode 100644 res/ftrack/action_icons/Delivery.svg diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py new file mode 100644 index 0000000000..afd20d12d1 --- /dev/null +++ b/pype/ftrack/actions/action_delivery.py @@ -0,0 +1,538 @@ +import os +import copy +import shutil +import collections +import string + +import clique +from bson.objectid import ObjectId + +from avalon import pipeline +from avalon.vendor import filelink +from avalon.tools.libraryloader.io_nonsingleton import DbConnector + +from pypeapp import Anatomy +from pype.ftrack import BaseAction +from pype.ftrack.lib.avalon_sync import CustAttrIdKey + + +class Delivery(BaseAction): + '''Edit meta data action.''' + + #: Action identifier. + identifier = "delivery.action" + #: Action label. + label = "Delivery" + #: Action description. + description = "Deliver data to client" + #: roles that are allowed to register this action + role_list = ["Pypeclub", "Administrator", "Project manager"] + icon = '{}/ftrack/action_icons/Delivery.svg'.format( + os.environ.get('PYPE_STATICS_SERVER', '') + ) + + db_con = DbConnector() + + def discover(self, session, entities, event): + ''' Validation ''' + for entity in entities: + if entity.entity_type.lower() == "assetversion": + return True + + return False + + def interface(self, session, entities, event): + if event["data"].get("values", {}): + return + + title = "Delivery data to Client" + + items = [] + item_splitter = {"type": "label", "value": "---"} + + # Prepare component names for processing + components = None + project = None + for entity in entities: + if project is None: + project_id = None + for ent_info in entity["link"]: + if ent_info["type"].lower() == "project": + project_id = ent_info["id"] + break + + if project_id is None: + project = entity["asset"]["parent"]["project"] + else: + project = session.query(( + "select id, full_name from Project where id is \"{}\"" + ).format(project_id)).one() + + _components = set( + [component["name"] for component in entity["components"]] + ) + if components is None: + components = _components + continue + + components = components.intersection(_components) + if not components: + break + + project_name = project["full_name"] + items.append({ + "type": "hidden", + "name": "__project_name__", + "value": project_name + }) + + # Prpeare anatomy data + anatomy = Anatomy(project_name) + new_anatomies = [] + first = None + for key in (anatomy.templates.get("delivery") or {}): + new_anatomies.append({ + "label": key, + "value": key + }) + if first is None: + first = key + + skipped = False + # Add message if there are any common components + if not components or not new_anatomies: + skipped = True + items.append({ + "type": "label", + "value": "

Something went wrong:

" + }) + + items.append({ + "type": "hidden", + "name": "__skipped__", + "value": skipped + }) + + if not components: + if len(entities) == 1: + items.append({ + "type": "label", + "value": ( + "- Selected entity doesn't have components to deliver." + ) + }) + else: + items.append({ + "type": "label", + "value": ( + "- Selected entities don't have common components." + ) + }) + + # Add message if delivery anatomies are not set + if not new_anatomies: + items.append({ + "type": "label", + "value": ( + "- `\"delivery\"` anatomy key is not set in config." + ) + }) + + # Skip if there are any data shortcomings + if skipped: + return { + "items": items, + "title": title + } + + items.append({ + "value": "

Choose Components to deliver

", + "type": "label" + }) + + for component in components: + items.append({ + "type": "boolean", + "value": False, + "label": component, + "name": component + }) + + items.append(item_splitter) + + items.append({ + "value": "

Location for delivery

", + "type": "label" + }) + + items.append({ + "type": "label", + "value": ( + "NOTE: It is possible to replace `root` key in anatomy." + ) + }) + + items.append({ + "type": "text", + "name": "__location_path__", + "empty_text": "Type location path here...(Optional)" + }) + + items.append(item_splitter) + + items.append({ + "value": "

Anatomy of delivery files

", + "type": "label" + }) + + items.append({ + "type": "label", + "value": ( + "

NOTE: These can be set in Anatomy.yaml" + " within `delivery` key.

" + ) + }) + + items.append({ + "type": "enumerator", + "name": "__new_anatomies__", + "data": new_anatomies, + "value": first + }) + + return { + "items": items, + "title": title + } + + def launch(self, session, entities, event): + if "values" not in event["data"]: + return + + self.report_items = collections.defaultdict(list) + + values = event["data"]["values"] + skipped = values.pop("__skipped__") + if skipped: + return None + + component_names = [] + location_path = values.pop("__location_path__") + anatomy_name = values.pop("__new_anatomies__") + project_name = values.pop("__project_name__") + + for key, value in values.items(): + if value is True: + component_names.append(key) + + if not component_names: + return { + "success": True, + "message": "Not selected components to deliver." + } + + location_path = location_path.strip() + if location_path: + location_path = os.path.normpath(location_path) + if not os.path.exists(location_path): + return { + "success": False, + "message": ( + "Entered location path does not exists. \"{}\"" + ).format(location_path) + } + + self.db_con.install() + self.db_con.Session["AVALON_PROJECT"] = project_name + + repres_to_deliver = [] + for entity in entities: + asset = entity["asset"] + subset_name = asset["name"] + version = entity["version"] + + parent = asset["parent"] + parent_mongo_id = parent["custom_attributes"].get(CustAttrIdKey) + if parent_mongo_id: + parent_mongo_id = ObjectId(parent_mongo_id) + else: + asset_ent = self.db_con.find_one({ + "type": "asset", + "data.ftrackId": parent["id"] + }) + if not asset_ent: + ent_path = "/".join( + [ent["name"] for ent in parent["link"]] + ) + msg = "Not synchronized entities to avalon" + self.report_items[msg].append(ent_path) + self.log.warning("{} <{}>".format(msg, ent_path)) + continue + + parent_mongo_id = asset_ent["_id"] + + subset_ent = self.db_con.find_one({ + "type": "subset", + "parent": parent_mongo_id, + "name": subset_name + }) + + version_ent = self.db_con.find_one({ + "type": "version", + "name": version, + "parent": subset_ent["_id"] + }) + + repre_ents = self.db_con.find({ + "type": "representation", + "parent": version_ent["_id"] + }) + + repres_by_name = {} + for repre in repre_ents: + repre_name = repre["name"] + repres_by_name[repre_name] = repre + + for component in entity["components"]: + comp_name = component["name"] + if comp_name not in component_names: + continue + + repre = repres_by_name.get(comp_name) + repres_to_deliver.append(repre) + + if not location_path: + location_path = os.environ.get("AVALON_PROJECTS") or "" + + print(location_path) + + anatomy = Anatomy(project_name) + for repre in repres_to_deliver: + # Get destination repre path + anatomy_data = copy.deepcopy(repre["context"]) + anatomy_data["root"] = location_path + + anatomy_filled = anatomy.format(anatomy_data) + test_path = ( + anatomy_filled + .get("delivery", {}) + .get(anatomy_name) + ) + + if not test_path: + msg = ( + "Missing keys in Representation's context" + " for anatomy template \"{}\"." + ).format(anatomy_name) + + all_anatomies = anatomy.format_all(anatomy_data) + result = None + for anatomies in all_anatomies.values(): + for key, temp in anatomies.get("delivery", {}).items(): + if key != anatomy_name: + continue + + result = temp + break + + # TODO log error! - missing keys in anatomy + if result: + missing_keys = [ + key[1] for key in string.Formatter().parse(result) + if key[1] is not None + ] + else: + missing_keys = ["unknown"] + + keys = ", ".join(missing_keys) + sub_msg = ( + "Representation: {}
- Missing keys: \"{}\"
" + ).format(str(repre["_id"]), keys) + self.report_items[msg].append(sub_msg) + self.log.warning( + "{} Representation: \"{}\" Filled: <{}>".format( + msg, str(repre["_id"]), str(result) + ) + ) + continue + + # Get source repre path + frame = repre['context'].get('frame') + + if frame: + repre["context"]["frame"] = len(str(frame)) * "#" + + repre_path = self.path_from_represenation(repre) + # TODO add backup solution where root of path from component + # is repalced with AVALON_PROJECTS root + if not frame: + self.process_single_file( + repre_path, anatomy, anatomy_name, anatomy_data + ) + + else: + self.process_sequence( + repre_path, anatomy, anatomy_name, anatomy_data + ) + + self.db_con.uninstall() + + return self.report() + + def process_single_file( + self, repre_path, anatomy, anatomy_name, anatomy_data + ): + anatomy_filled = anatomy.format(anatomy_data) + delivery_path = anatomy_filled["delivery"][anatomy_name] + delivery_folder = os.path.dirname(delivery_path) + if not os.path.exists(delivery_folder): + os.makedirs(delivery_folder) + + self.copy_file(repre_path, delivery_path) + + def process_sequence( + self, repre_path, anatomy, anatomy_name, anatomy_data + ): + dir_path, file_name = os.path.split(str(repre_path)) + + base_name, ext = os.path.splitext(file_name) + file_name_items = None + if "#" in base_name: + file_name_items = [part for part in base_name.split("#") if part] + + elif "%" in base_name: + file_name_items = base_name.split("%") + + if not file_name_items: + msg = "Source file was not found" + self.report_items[msg].append(repre_path) + self.log.warning("{} <{}>".format(msg, repre_path)) + return + + src_collections, remainder = clique.assemble(os.listdir(dir_path)) + src_collection = None + for col in src_collections: + if col.tail != ext: + continue + + # skip if collection don't have same basename + if not col.head.startswith(file_name_items[0]): + continue + + src_collection = col + break + + if src_collection is None: + # TODO log error! + msg = "Source collection of files was not found" + self.report_items[msg].append(repre_path) + self.log.warning("{} <{}>".format(msg, repre_path)) + return + + frame_indicator = "@####@" + + anatomy_data["frame"] = frame_indicator + anatomy_filled = anatomy.format(anatomy_data) + + delivery_path = anatomy_filled["delivery"][anatomy_name] + print(delivery_path) + delivery_folder = os.path.dirname(delivery_path) + dst_head, dst_tail = delivery_path.split(frame_indicator) + dst_padding = src_collection.padding + dst_collection = clique.Collection( + head=dst_head, + tail=dst_tail, + padding=dst_padding + ) + + if not os.path.exists(delivery_folder): + os.makedirs(delivery_folder) + + src_head = src_collection.head + src_tail = src_collection.tail + for index in src_collection.indexes: + src_padding = src_collection.format("{padding}") % index + src_file_name = "{}{}{}".format(src_head, src_padding, src_tail) + src = os.path.normpath( + os.path.join(dir_path, src_file_name) + ) + + dst_padding = dst_collection.format("{padding}") % index + dst = "{}{}{}".format(dst_head, dst_padding, dst_tail) + + self.copy_file(src, dst) + + def path_from_represenation(self, representation): + try: + template = representation["data"]["template"] + + except KeyError: + return None + + try: + context = representation["context"] + context["root"] = os.environ.get("AVALON_PROJECTS") or "" + path = pipeline.format_template_with_optional_keys( + context, template + ) + + except KeyError: + # Template references unavailable data + return None + + return os.path.normpath(path) + + def copy_file(self, src_path, dst_path): + if os.path.exists(dst_path): + return + try: + filelink.create( + src_path, + dst_path, + filelink.HARDLINK + ) + except OSError: + shutil.copyfile(src_path, dst_path) + + def report(self): + items = [] + title = "Delivery report" + for msg, _items in self.report_items.items(): + if not _items: + continue + + if items: + items.append({"type": "label", "value": "---"}) + + items.append({ + "type": "label", + "value": "# {}".format(msg) + }) + if not isinstance(_items, (list, tuple)): + _items = [_items] + __items = [] + for item in _items: + __items.append(str(item)) + + items.append({ + "type": "label", + "value": '

{}

'.format("
".join(__items)) + }) + + if not items: + return { + "success": True, + "message": "Delivery Finished" + } + + return { + "items": items, + "title": title, + "success": False, + "message": "Delivery Finished" + } + +def register(session, plugins_presets={}): + '''Register plugin. Called when used as an plugin.''' + + Delivery(session, plugins_presets).register() diff --git a/res/ftrack/action_icons/Delivery.svg b/res/ftrack/action_icons/Delivery.svg new file mode 100644 index 0000000000..3380487c31 --- /dev/null +++ b/res/ftrack/action_icons/Delivery.svg @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + From aee72b9eacd4ac25a461a9824e6ebb63afd61e9e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 15 Jan 2020 17:46:09 +0100 Subject: [PATCH 136/393] fix(nuke): deadline submission of `slate` family --- pype/nuke/lib.py | 1 - .../global/publish/submit_publish_job.py | 4 ++-- .../nuke/publish/extract_review_data_mov.py | 22 +++++++---------- .../nuke/publish/extract_slate_frame.py | 18 +++++++------- .../plugins/nuke/publish/extract_thumbnail.py | 24 +++++++------------ 5 files changed, 26 insertions(+), 43 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 23c677cc7f..07f1f08978 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1520,7 +1520,6 @@ class ExporterReviewMov(ExporterReview): write_node.setInput(0, self.previous_node) self._temp_nodes.append(write_node) self.log.debug("Write... `{}`".format(self._temp_nodes)) - # ---------- end nodes creation # ---------- render or save to nk diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 0d4018d46f..0bd71156eb 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -321,8 +321,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "source": source, "user": context.data["user"], "version": context.data["version"], - "intent": context.data["intent"], - "comment": context.data["comment"], + "intent": context.data.get("intent"), + "comment": context.data.get("comment"), # Optional metadata (for debugging) "metadata": { "instance": data, diff --git a/pype/plugins/nuke/publish/extract_review_data_mov.py b/pype/plugins/nuke/publish/extract_review_data_mov.py index 69347bfafc..f41bff570a 100644 --- a/pype/plugins/nuke/publish/extract_review_data_mov.py +++ b/pype/plugins/nuke/publish/extract_review_data_mov.py @@ -21,21 +21,15 @@ class ExtractReviewDataMov(pype.api.Extractor): def process(self, instance): families = instance.data["families"] - self.log.info("Creating staging dir...") - self.log.debug( - "__ representations: `{}`".format( - instance.data["representations"])) - if "representations" in instance.data: - if instance.data["representations"] == []: - render_path = instance.data['path'] - staging_dir = os.path.normpath(os.path.dirname(render_path)) - instance.data["stagingDir"] = staging_dir - else: - staging_dir = instance.data[ - "representations"][0]["stagingDir"].replace("\\", "/") - instance.data["representations"][0]["tags"] = [] - instance.data["stagingDir"] = staging_dir + + if "representations" not in instance.data: + instance.data["representations"] = list() + + staging_dir = os.path.normpath( + os.path.dirname(instance.data['path'])) + + instance.data["stagingDir"] = staging_dir self.log.info( "StagingDir `{0}`...".format(instance.data["stagingDir"])) diff --git a/pype/plugins/nuke/publish/extract_slate_frame.py b/pype/plugins/nuke/publish/extract_slate_frame.py index fd1523447a..7e43b3cd6f 100644 --- a/pype/plugins/nuke/publish/extract_slate_frame.py +++ b/pype/plugins/nuke/publish/extract_slate_frame.py @@ -35,16 +35,14 @@ class ExtractSlateFrame(pype.api.Extractor): def render_slate(self, instance): node = instance[0] # group node self.log.info("Creating staging dir...") - if "representations" in instance.data: - staging_dir = instance.data[ - "representations"][0]["stagingDir"].replace("\\", "/") - instance.data["stagingDir"] = staging_dir - else: - instance.data["representations"] = [] - # get output path - render_path = instance.data['path'] - staging_dir = os.path.normpath(os.path.dirname(render_path)) - instance.data["stagingDir"] = staging_dir + + if "representations" not in instance.data: + instance.data["representations"] = list() + + staging_dir = os.path.normpath( + os.path.dirname(instance.data['path'])) + + instance.data["stagingDir"] = staging_dir self.log.info( "StagingDir `{0}`...".format(instance.data["stagingDir"])) diff --git a/pype/plugins/nuke/publish/extract_thumbnail.py b/pype/plugins/nuke/publish/extract_thumbnail.py index dee6453671..828319e555 100644 --- a/pype/plugins/nuke/publish/extract_thumbnail.py +++ b/pype/plugins/nuke/publish/extract_thumbnail.py @@ -30,22 +30,14 @@ class ExtractThumbnail(pype.api.Extractor): def render_thumbnail(self, instance): node = instance[0] # group node self.log.info("Creating staging dir...") - self.log.debug( - "_ representations `{0}`".format(instance.data["representations"])) - if "representations" in instance.data: - try: - staging_dir = instance.data[ - "representations"][0]["stagingDir"].replace("\\", "/") - except IndexError: - path = instance.data["path"] - staging_dir = os.path.dirname(path) - instance.data["stagingDir"] = staging_dir - else: - instance.data["representations"] = [] - # get output path - render_path = instance.data['path'] - staging_dir = os.path.normpath(os.path.dirname(render_path)) - instance.data["stagingDir"] = staging_dir + + if "representations" not in instance.data: + instance.data["representations"] = list() + + staging_dir = os.path.normpath( + os.path.dirname(instance.data['path'])) + + instance.data["stagingDir"] = staging_dir self.log.info( "StagingDir `{0}`...".format(instance.data["stagingDir"])) From 1f96a74bac663d487d6257df689ee222fb00aed9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 16 Jan 2020 14:30:26 +0100 Subject: [PATCH 137/393] remove added code of previous commit --- .../ftrack/publish/integrate_hierarchy_ftrack.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py b/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py index a33cf81c28..1deff56d83 100644 --- a/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py +++ b/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py @@ -85,18 +85,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): type=entity_type, parent=parent ) - - if entity.entity_type.lower() not in ["task", "project"]: - filter = { - "type": "asset", - "name": entity_name - } - update_data = { - "data.ftrackId": entity["id"], - "data.entityType": entity.entity_type - } - io.update_one(filter, update_data) - # self.log.info('entity: {}'.format(dict(entity))) # CUSTOM ATTRIBUTES custom_attributes = entity_data.get('custom_attributes', []) From 415ec47f9db4746ddb206b9c0d70ed4d95014ccc Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 16 Jan 2020 14:31:05 +0100 Subject: [PATCH 138/393] added aditional check in event sync to avalon for cases when avalon ent exists but don't have ftrackId --- pype/ftrack/events/event_sync_to_avalon.py | 46 +++++++++++++++++++++- 1 file changed, 45 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 8d75d932f8..6188458645 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -131,7 +131,9 @@ class SyncToAvalonEvent(BaseEvent): ftrack_id = proj["data"]["ftrackId"] self._avalon_ents_by_ftrack_id[ftrack_id] = proj for ent in ents: - ftrack_id = ent["data"]["ftrackId"] + ftrack_id = ent["data"].get("ftrackId") + if ftrack_id is None: + continue self._avalon_ents_by_ftrack_id[ftrack_id] = ent return self._avalon_ents_by_ftrack_id @@ -1427,6 +1429,48 @@ class SyncToAvalonEvent(BaseEvent): parent_id = ent_info["parentId"] new_tasks_by_parent[parent_id].append(ent_info) pop_out_ents.append(ftrack_id) + continue + + name = ( + ent_info + .get("changes", {}) + .get("name", {}) + .get("new") + ) + avalon_ent_by_name = self.avalon_ents_by_name.get(name) + avalon_ent_by_name_ftrack_id = ( + avalon_ent_by_name + .get("data", {}) + .get("ftrackId") + ) + if avalon_ent_by_name and avalon_ent_by_name_ftrack_id is None: + ftrack_ent = self.ftrack_ents_by_id.get(ftrack_id) + if not ftrack_ent: + ftrack_ent = self.process_session.query( + self.entities_query_by_id.format( + self.cur_project["id"], ftrack_id + ) + ).one() + self.ftrack_ents_by_id[ftrack_id] = ftrack_ent + + ent_path_items = [ent["name"] for ent in ftrack_ent["link"]] + parents = ent_path_items[1:len(ent_path_items)-1:] + + avalon_ent_parents = ( + avalon_ent_by_name.get("data", {}).get("parents") + ) + if parents == avalon_ent_parents: + self.dbcon.update_one({ + "_id": avalon_ent_by_name["_id"] + }, { + "data.ftrackId": ftrack_id, + "data.entityType": entity_type + }) + self._avalon_ents_by_ftrack_id[ftrack_id] = ( + avalon_ent_by_name + ) + pop_out_ents.append(ftrack_id) + continue configuration_id = entity_type_conf_ids.get(entity_type) if not configuration_id: From 0c3513283993dcd0af468dceebf14e8e26c97f10 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 16 Jan 2020 15:56:02 +0100 Subject: [PATCH 139/393] feat(nuke): deadline submission with slate and dependend noninteractive job --- .../nuke/publish/submit_nuke_deadline.py | 131 +++++++++++------- 1 file changed, 78 insertions(+), 53 deletions(-) diff --git a/pype/plugins/nuke/publish/submit_nuke_deadline.py b/pype/plugins/nuke/publish/submit_nuke_deadline.py index 580a114562..71108189c0 100644 --- a/pype/plugins/nuke/publish/submit_nuke_deadline.py +++ b/pype/plugins/nuke/publish/submit_nuke_deadline.py @@ -26,37 +26,69 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): def process(self, instance): node = instance[0] - # for x in instance: - # if x.Class() == "Write": - # node = x - # - # if node is None: - # return + context = instance.context DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL", "http://localhost:8082") assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL" - context = instance.context + self.deadline_url = "{}/api/jobs".format(DEADLINE_REST_URL) + self._comment = context.data.get("comment", "") + self._ver = re.search(r"\d+\.\d+", context.data.get("hostVersion")) + self._deadline_user = context.data.get( + "deadlineUser", getpass.getuser()) + self._frame_start = int(instance.data["frameStart"]) + self._frame_end = int(instance.data["frameEnd"]) # get output path render_path = instance.data['path'] - render_dir = os.path.normpath(os.path.dirname(render_path)) - - # frame start definition - frame_start = int(instance.data["frameStart"]) - # exception for slate workflow - if "slate" in instance.data["families"]: - frame_start -= 1 - script_path = context.data["currentFile"] - script_name = os.path.basename(script_path) - comment = context.data.get("comment", "") + # exception for slate workflow + if "slate" in instance.data["families"]: + self._frame_start -= 1 - deadline_user = context.data.get("deadlineUser", getpass.getuser()) + response = self.payload_submit(instance, + script_path, + render_path, + node.name() + ) + # Store output dir for unified publisher (filesequence) + instance.data["deadlineSubmissionJob"] = response.json() + instance.data["publishJobState"] = "Active" + + if instance.data.get("bakeScriptPath"): + render_path = instance.data.get("bakeRenderPath") + script_path = instance.data.get("bakeScriptPath") + exe_node_name = instance.data.get("bakeWriteNodeName") + + # exception for slate workflow + if "slate" in instance.data["families"]: + self._frame_start += 1 + + resp = self.payload_submit(instance, + script_path, + render_path, + exe_node_name, + response.json() + ) + # Store output dir for unified publisher (filesequence) + instance.data["deadlineSubmissionJob"] = resp.json() + instance.data["publishJobState"] = "Suspended" + + def payload_submit(self, + instance, + script_path, + render_path, + exe_node_name, + responce_data=None + ): + render_dir = os.path.normpath(os.path.dirname(render_path)) + script_name = os.path.basename(script_path) jobname = "%s - %s" % (script_name, instance.name) - ver = re.search(r"\d+\.\d+", context.data.get("hostVersion")) + + if not responce_data: + responce_data = {} try: # Ensure render folder exists @@ -64,10 +96,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): except OSError: pass - # Documentation for keys available at: - # https://docs.thinkboxsoftware.com - # /products/deadline/8.0/1_User%20Manual/manual - # /manual-submission.html#job-info-file-options payload = { "JobInfo": { # Top-level group name @@ -77,21 +105,20 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): "Name": jobname, # Arbitrary username, for visualisation in Monitor - "UserName": deadline_user, + "UserName": self._deadline_user, + + "Priority": instance.data["deadlinePriority"], + + "Pool": "2d", + "SecondaryPool": "2d", "Plugin": "Nuke", "Frames": "{start}-{end}".format( - start=frame_start, - end=int(instance.data["frameEnd"]) + start=self._frame_start, + end=self._frame_end ), - "ChunkSize": instance.data["deadlineChunkSize"], - "Priority": instance.data["deadlinePriority"], + "Comment": self._comment, - "Comment": comment, - - # Optional, enable double-click to preview rendered - # frames from Deadline Monitor - # "OutputFilename0": output_filename_0.replace("\\", "/"), }, "PluginInfo": { # Input @@ -102,27 +129,29 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): # "OutputFilePrefix": render_variables["filename_prefix"], # Mandatory for Deadline - "Version": ver.group(), + "Version": self._ver.group(), # Resolve relative references "ProjectPath": script_path, "AWSAssetFile0": render_path, # Only the specific write node is rendered. - "WriteNode": node.name() + "WriteNode": exe_node_name }, # Mandatory for Deadline, may be empty "AuxFiles": [] } + if responce_data.get("_id"): + payload["JobInfo"].update({ + "JobType": "Normal", + "BatchName": responce_data["Props"]["Batch"], + "JobDependency0": responce_data["_id"], + "ChunkSize": 99999999 + }) + # Include critical environment variables with submission keys = [ - # This will trigger `userSetup.py` on the slave - # such that proper initialisation happens the same - # way as it does on a local machine. - # TODO(marcus): This won't work if the slaves don't - # have accesss to these paths, such as if slaves are - # running Linux and the submitter is on Windows. "PYTHONPATH", "PATH", "AVALON_SCHEMA", @@ -168,11 +197,12 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): if key == "PYTHONPATH": clean_path = clean_path.replace('python2', 'python3') + clean_path = clean_path.replace( - os.path.normpath( - environment['PYPE_STUDIO_CORE_MOUNT']), # noqa - os.path.normpath( - environment['PYPE_STUDIO_CORE_PATH'])) # noqa + os.path.normpath( + environment['PYPE_STUDIO_CORE_MOUNT']), # noqa + os.path.normpath( + environment['PYPE_STUDIO_CORE_PATH'])) # noqa clean_environment[key] = clean_path environment = clean_environment @@ -187,20 +217,15 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): plugin = payload["JobInfo"]["Plugin"] self.log.info("using render plugin : {}".format(plugin)) - self.preflight_check(instance) - self.log.info("Submitting..") self.log.info(json.dumps(payload, indent=4, sort_keys=True)) - # E.g. http://192.168.0.1:8082/api/jobs - url = "{}/api/jobs".format(DEADLINE_REST_URL) - response = requests.post(url, json=payload) + response = requests.post(self.deadline_url, json=payload) + if not response.ok: raise Exception(response.text) - # Store output dir for unified publisher (filesequence) - instance.data["deadlineSubmissionJob"] = response.json() - instance.data["publishJobState"] = "Active" + return response def preflight_check(self, instance): """Ensure the startFrame, endFrame and byFrameStep are integers""" From 83c501f26dd8d2a57cae5931e210885f95a780ea Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 16 Jan 2020 16:31:24 +0100 Subject: [PATCH 140/393] fixed update query --- pype/ftrack/events/event_sync_to_avalon.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 6188458645..b268372bd6 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1463,8 +1463,10 @@ class SyncToAvalonEvent(BaseEvent): self.dbcon.update_one({ "_id": avalon_ent_by_name["_id"] }, { - "data.ftrackId": ftrack_id, - "data.entityType": entity_type + "$set": { + "data.ftrackId": ftrack_id, + "data.entityType": entity_type + } }) self._avalon_ents_by_ftrack_id[ftrack_id] = ( avalon_ent_by_name From dad87fa395070b5dd667e26b7e39c2087e8c3280 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 16 Jan 2020 16:49:31 +0100 Subject: [PATCH 141/393] fix(global): burnin didnt work if slate was not in families --- pype/plugins/global/publish/extract_burnin.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index dac5c524e7..ecc17b669e 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -42,18 +42,21 @@ class ExtractBurnin(pype.api.Extractor): "intent": instance.context.data.get("intent", "") } + slate_frame_start = frame_start + slate_frame_end = frame_end + slate_duration = duration + # exception for slate workflow if "slate" in instance.data["families"]: slate_frame_start = frame_start - 1 slate_frame_end = frame_end slate_duration = slate_frame_end - slate_frame_start + 1 - prep_data.update({ - "slate_frame_start": slate_frame_start, - "slate_frame_end": slate_frame_end, - "slate_duration": slate_duration - }) - + prep_data.update({ + "slate_frame_start": slate_frame_start, + "slate_frame_end": slate_frame_end, + "slate_duration": slate_duration + }) # Update data with template data template_data = instance.data.get("assumedTemplateData") or {} From a84ac3b2db059e47462f06f831d9821e17979cf8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 16 Jan 2020 19:24:09 +0100 Subject: [PATCH 142/393] fix(global): deadline didnt publish all data to representation --- pype/ftrack/actions/action_test.py | 10 +++--- .../global/publish/collect_filesequences.py | 32 +++++++++++++++++-- pype/plugins/global/publish/extract_burnin.py | 3 +- pype/plugins/global/publish/integrate_new.py | 14 +++++--- 4 files changed, 46 insertions(+), 13 deletions(-) diff --git a/pype/ftrack/actions/action_test.py b/pype/ftrack/actions/action_test.py index 502373e626..a1e633bf1b 100644 --- a/pype/ftrack/actions/action_test.py +++ b/pype/ftrack/actions/action_test.py @@ -9,12 +9,11 @@ import re import ftrack_api from pype.ftrack import BaseAction from avalon import io, inventory, schema - +from pypeapp import Anatomy class TestAction(BaseAction): '''Edit meta data action.''' - ignore_me = True #: Action identifier. identifier = 'test.action' #: Action label. @@ -35,8 +34,11 @@ class TestAction(BaseAction): return True def launch(self, session, entities, event): - self.log.info(event) - + anatomy = Anatomy() + data = { + "version": None + } + print(anatomy.format(data)) return True diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 9d065946ed..a76c8f3c33 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -101,6 +101,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): slate_frame = None families_data = None subset = None + version = None if os.environ.get("PYPE_PUBLISH_PATHS"): paths = os.environ["PYPE_PUBLISH_PATHS"].split(os.pathsep) self.log.info("Collecting paths: {}".format(paths)) @@ -158,6 +159,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): subset = instance.get("subset") families_data = instance.get("families") slate_frame = instance.get("slateFrame") + version = instance.get("version") else: # Search in directory @@ -257,6 +259,20 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): # if we have subset - add all collections and known # reminder as representations + # take out review family if mov path + # this will make imagesequence none review + frame_start = data.get("frameStart") + frame_end = data.get("frameEnd") + + if baked_mov_path: + self.log.info( + "Baked mov is available {}".format( + baked_mov_path)) + families.append("review") + + if "slate" in families: + frame_start -= 1 + self.log.info( "Adding representations to subset {}".format( data.get("subset"))) @@ -273,12 +289,15 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "asset": data.get( "asset", api.Session["AVALON_ASSET"]), "stagingDir": root, - "frameStart": data.get("frameStart"), - "frameEnd": data.get("frameEnd"), + "frameStart": frame_start, + "frameEnd": frame_end, "fps": fps, "source": data.get("source", ""), "pixelAspect": pixel_aspect, - "slateFrame": slate_frame + "resolutionWidth": resolution_width, + "resolutionHeight": resolution_height, + "slateFrame": slate_frame, + "version": version } ) @@ -294,6 +313,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "name": ext, "ext": "{}".format(ext), "files": list(collection), + "frameStart": frame_start, + "frameEnd": frame_end, "stagingDir": root, "anatomy_template": "render", "fps": fps, @@ -307,11 +328,16 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): # add only known types to representation if rem.split(".")[-1] in ['mov', 'jpg', 'mp4']: self.log.info(" . {}".format(rem)) + + if "slate" in instance.data["families"]: + frame_start += 1 + representation = { "name": rem.split(".")[-1], "ext": "{}".format(rem.split(".")[-1]), "files": rem, "stagingDir": root, + "frameStart": frame_start, "anatomy_template": "render", "fps": fps, "tags": ["review"], diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index ecc17b669e..99bf8a4672 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -25,7 +25,8 @@ class ExtractBurnin(pype.api.Extractor): if "representations" not in instance.data: raise RuntimeError("Burnin needs already created mov to work on.") - version = instance.context.data.get('version') + version = instance.context.data.get( + 'version', instance.data.get('version')) frame_start = int(instance.data.get("frameStart") or 0) frame_end = int(instance.data.get("frameEnd") or 1) duration = frame_end - frame_start + 1 diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 8751431223..f1a125c4b2 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -316,7 +316,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # exception for slate workflow if "slate" in instance.data["families"]: index_frame_start -= 1 - + dst_padding_exp = src_padding_exp dst_start_frame = None for i in src_collection.indexes: @@ -408,6 +408,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): } } + if repre.get("outputName"): + representation["context"]["output"] = repre['outputName'] + if sequence_repre and repre.get("frameStart"): representation['context']['frame'] = src_padding_exp % int(repre.get("frameStart")) @@ -455,11 +458,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.hardlink_file(src, dest) def unc_convert(self, path): + self.log.debug("> __ path: `{}`".format(path)) drive, _path = os.path.splitdrive(path) - unc = Path(drive).resolve() - path = str(unc) + _path + self.log.debug("> __ drive, _path: `{}`, `{}`".format(drive, _path)) - if not os.path.exists(str(unc)): + if not os.path.exists(drive + "/"): self.log.info("Converting to unc from environments ..") path_replace = os.getenv("PYPE_STUDIO_PROJECTS_PATH") @@ -611,7 +614,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "source": source, "comment": context.data.get("comment"), "machine": context.data.get("machine"), - "fps": context.data.get("fps")} + "fps": context.data.get( + "fps", instance.data.get("fps"))} # Include optional data if present in optionals = [ From 06ed617d9bb25d3dc78d09c49f7a6904fef724c6 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 17 Jan 2020 09:34:23 +0100 Subject: [PATCH 143/393] make ascii ass configurable via presets --- pype/plugins/maya/publish/extract_ass.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pype/plugins/maya/publish/extract_ass.py b/pype/plugins/maya/publish/extract_ass.py index 71f3e0d84c..4cf394aefe 100644 --- a/pype/plugins/maya/publish/extract_ass.py +++ b/pype/plugins/maya/publish/extract_ass.py @@ -17,6 +17,7 @@ class ExtractAssStandin(pype.api.Extractor): label = "Ass Standin (.ass)" hosts = ["maya"] families = ["ass"] + asciiAss = False def process(self, instance): @@ -47,7 +48,7 @@ class ExtractAssStandin(pype.api.Extractor): exported_files = cmds.arnoldExportAss(filename=file_path, selected=True, - asciiAss=True, + asciiAss=self.asciiAss, shadowLinks=True, lightLinks=True, boundingBox=True, @@ -59,13 +60,15 @@ class ExtractAssStandin(pype.api.Extractor): filenames.append(os.path.split(file)[1]) self.log.info("Exported: {}".format(filenames)) else: + self.log.info("Extracting ass") cmds.arnoldExportAss(filename=file_path, selected=True, - asciiAss=True, + asciiAss=False, shadowLinks=True, lightLinks=True, boundingBox=True ) + self.log.info("Extracted {}".format(filename)) filenames = filename optionals = [ "frameStart", "frameEnd", "step", "handles", From 846777bff00231031ef7ee86cbb5b3602c75e360 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 17 Jan 2020 12:40:35 +0100 Subject: [PATCH 144/393] update all avalon ent data if ftrack id was updated --- pype/ftrack/events/event_sync_to_avalon.py | 41 ++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index b268372bd6..3f625da1ac 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1468,9 +1468,50 @@ class SyncToAvalonEvent(BaseEvent): "data.entityType": entity_type } }) + + avalon_ent_by_name["data"]["ftrackId"] = ftrack_id + avalon_ent_by_name["data"]["entityType"] = entity_type + self._avalon_ents_by_ftrack_id[ftrack_id] = ( avalon_ent_by_name ) + if self._avalon_ents_by_parent_id: + found = None + for _parent_id_, _entities_ in ( + self._avalon_ents_by_parent_id.items() + ): + for _idx_, entity in enumerate(_entities_): + if entity["_id"] == avalon_ent_by_name["_id"]: + found = (_parent_id_, _idx_) + break + + if found: + break + + if found: + _parent_id_, _idx_ = found + self._avalon_ents_by_parent_id[_parent_id_][ + _idx_] = avalon_ent_by_name + + if self._avalon_ents_by_id: + self._avalon_ents_by_id[avalon_ent_by_name["_id"]] = ( + avalon_ent_by_name + ) + + if self._avalon_ents_by_name: + self._avalon_ents_by_name[name] = avalon_ent_by_name + + if self._avalon_ents: + found = None + for _idx_, _ent_ in enumerate(self._avalon_ents): + if _ent_["_id"] != avalon_ent_by_name["_id"]: + continue + found = _idx_ + break + + if found is not None: + self._avalon_ents[found] = avalon_ent_by_name + pop_out_ents.append(ftrack_id) continue From 7029588793f013d751cb3d3d8c5bbd40e106e002 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 17 Jan 2020 15:03:26 +0100 Subject: [PATCH 145/393] correct setting --- pype/ftrack/events/event_sync_to_avalon.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 3f625da1ac..23284a2ae6 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1503,14 +1503,16 @@ class SyncToAvalonEvent(BaseEvent): if self._avalon_ents: found = None - for _idx_, _ent_ in enumerate(self._avalon_ents): + project, entities = self._avalon_ents + for _idx_, _ent_ in enumerate(entities): if _ent_["_id"] != avalon_ent_by_name["_id"]: continue found = _idx_ break if found is not None: - self._avalon_ents[found] = avalon_ent_by_name + entities[found] = avalon_ent_by_name + self._avalon_ents = project, entities pop_out_ents.append(ftrack_id) continue From 40cae28e76154385b2543f5c02c7b84877e26e3d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 17 Jan 2020 16:05:59 +0100 Subject: [PATCH 146/393] sync to avalon actions return information about not available avalon_mongo_id custom attribute(only for project) --- pype/ftrack/actions/action_sync_to_avalon.py | 5 ++++- pype/ftrack/events/action_sync_to_avalon.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/actions/action_sync_to_avalon.py b/pype/ftrack/actions/action_sync_to_avalon.py index 01d0b866bf..d2fcfb372f 100644 --- a/pype/ftrack/actions/action_sync_to_avalon.py +++ b/pype/ftrack/actions/action_sync_to_avalon.py @@ -70,7 +70,10 @@ class SyncToAvalonLocal(BaseAction): ft_project_name = in_entities[0]["project"]["full_name"] try: - self.entities_factory.launch_setup(ft_project_name) + output = self.entities_factory.launch_setup(ft_project_name) + if output is not None: + return output + time_1 = time.time() self.entities_factory.set_cutom_attributes() diff --git a/pype/ftrack/events/action_sync_to_avalon.py b/pype/ftrack/events/action_sync_to_avalon.py index 9f9deeab95..79ab1b5f7a 100644 --- a/pype/ftrack/events/action_sync_to_avalon.py +++ b/pype/ftrack/events/action_sync_to_avalon.py @@ -105,7 +105,10 @@ class SyncToAvalonServer(BaseAction): ft_project_name = in_entities[0]["project"]["full_name"] try: - self.entities_factory.launch_setup(ft_project_name) + output = self.entities_factory.launch_setup(ft_project_name) + if output is not None: + return output + time_1 = time.time() self.entities_factory.set_cutom_attributes() From bab5fb45fffe7a97c96fa173fbf04a23eba8e718 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 17 Jan 2020 17:30:22 +0100 Subject: [PATCH 147/393] fix order of collect filesequence and anatomy collect --- pype/plugins/global/publish/collect_filesequences.py | 2 +- pype/plugins/global/publish/extract_burnin.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 2ab1ab9b19..c4ba064088 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -91,7 +91,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): """ - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 0.0001 targets = ["filesequence"] label = "RenderedFrames" diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 99bf8a4672..bab517a107 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -38,7 +38,7 @@ class ExtractBurnin(pype.api.Extractor): "frame_start": frame_start, "frame_end": frame_end, "duration": duration, - "version": version, + "version": int(version), "comment": instance.context.data.get("comment", ""), "intent": instance.context.data.get("intent", "") } From 57d53f3397fec744d973269619af2f4ef03fd393 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 17 Jan 2020 18:12:25 +0100 Subject: [PATCH 148/393] added datetime data to burnins data --- pype/plugins/global/publish/extract_burnin.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 06a62dd98b..fcc338702f 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -4,6 +4,7 @@ import copy import pype.api import pyblish +from pypeapp import config class ExtractBurnin(pype.api.Extractor): @@ -45,6 +46,9 @@ class ExtractBurnin(pype.api.Extractor): "comment": instance.context.data.get("comment"), "intent": instance.context.data.get("intent") } + + # Add datetime data to preparation data + prep_data.update(config.get_datetime_data()) # Update data with template data template_data = instance.data.get("assumedTemplateData") or {} prep_data.update(template_data) From 4df0ca8e62931bee36fd38ba9d2d3f4ad78fb6d8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 17 Jan 2020 18:23:28 +0100 Subject: [PATCH 149/393] fix: offset if slate in collect filesequence --- .../global/publish/collect_filesequences.py | 30 ++++++++++++------- pype/plugins/global/publish/integrate_new.py | 2 +- 2 files changed, 21 insertions(+), 11 deletions(-) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index c4ba064088..2d8dd1b1c2 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -12,7 +12,6 @@ import os import re import copy import json -from pprint import pformat import pyblish.api from avalon import api @@ -104,6 +103,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): families_data = None subset = None version = None + frame_start = 0 + frame_end = 0 if os.environ.get("PYPE_PUBLISH_PATHS"): paths = os.environ["PYPE_PUBLISH_PATHS"].split(os.pathsep) self.log.info("Collecting paths: {}".format(paths)) @@ -129,6 +130,9 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): cwd = os.path.dirname(path) root_override = data.get("root") + frame_start = int(data.get("frameStart")) + frame_end = int(data.get("frameEnd")) + if root_override: if os.path.isabs(root_override): root = root_override @@ -177,12 +181,15 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): if regex: self.log.info("Using regex: {}".format(regex)) + if "slate" in families_data: + frame_start -= 1 + collections, remainder = collect( root=root, regex=regex, exclude_regex=data.get("exclude_regex"), - frame_start=data.get("frameStart"), - frame_end=data.get("frameEnd"), + frame_start=frame_start, + frame_end=frame_end, ) self.log.info("Found collections: {}".format(collections)) @@ -223,8 +230,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "asset": data.get( "asset", api.Session["AVALON_ASSET"]), "stagingDir": root, - "frameStart": data.get("frameStart"), - "frameEnd": data.get("frameEnd"), + "frameStart": frame_start, + "frameEnd": frame_end, "fps": fps, "source": data.get("source", ""), "pixelAspect": pixel_aspect, @@ -260,8 +267,6 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): # take out review family if mov path # this will make imagesequence none review - frame_start = data.get("frameStart") - frame_end = data.get("frameEnd") if baked_mov_path: self.log.info( @@ -269,9 +274,6 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): baked_mov_path)) families.append("review") - if "slate" in families: - frame_start -= 1 - self.log.info( "Adding representations to subset {}".format( subset)) @@ -307,6 +309,9 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): self.log.info(" - {}".format(str(collection))) ext = collection.tail.lstrip(".") + + if "slate" in instance.data["families"]: + frame_start += 1 representation = { "name": ext, @@ -411,3 +416,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "tags": ["review"], } instance.data["representations"].append(representation) + self.log.debug( + "__ representations {}".format( + instance.data["representations"])) + self.log.debug( + "__ instance.data {}".format(instance.data)) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index f1a125c4b2..a3a4996ca4 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -7,7 +7,7 @@ import errno import pyblish.api from avalon import api, io from avalon.vendor import filelink -from pathlib2 import Path + # this is needed until speedcopy for linux is fixed if sys.platform == "win32": from speedcopy import copyfile From e52687a6b75cbf493592f977a1f944d72a9f865c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 20 Jan 2020 17:33:03 +0100 Subject: [PATCH 150/393] fix(global): brushing the code --- pype/plugins/global/publish/extract_review.py | 72 +++++++++++++------ .../global/publish/extract_review_slate.py | 3 +- 2 files changed, 51 insertions(+), 24 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index d497d62ed3..b1dfec6740 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -57,10 +57,14 @@ class ExtractReview(pyblish.api.InstancePlugin): if not ext: ext = "mov" self.log.warning( - "`ext` attribute not in output profile. Setting to default ext: `mov`") + str("`ext` attribute not in output " + "profile. Setting to default ext: `mov`")) - self.log.debug("instance.families: {}".format(instance.data['families'])) - self.log.debug("profile.families: {}".format(profile['families'])) + self.log.debug( + "instance.families: {}".format( + instance.data['families'])) + self.log.debug( + "profile.families: {}".format(profile['families'])) if any(item in instance.data['families'] for item in profile['families']): if isinstance(repre["files"], list): @@ -115,8 +119,9 @@ class ExtractReview(pyblish.api.InstancePlugin): # necessary input data # adds start arg only if image sequence if isinstance(repre["files"], list): - input_args.append("-start_number {0} -framerate {1}".format( - start_frame, fps)) + input_args.append( + "-start_number {0} -framerate {1}".format( + start_frame, fps)) input_args.append("-i {}".format(full_input_path)) @@ -180,14 +185,19 @@ class ExtractReview(pyblish.api.InstancePlugin): ffmpet_height = int( resolution_height * pixel_aspect) else: - # TODO: it might still be failing in some cases if resolution_ratio != delivery_ratio: lb /= scale_factor else: lb /= pixel_aspect - output_args.append( - "-filter:v scale={0}x{1}:flags=lanczos,setsar=1,drawbox=0:0:iw:round((ih-(iw*(1/{2})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{2})))/2):iw:round((ih-(iw*(1/{2})))/2):t=fill:c=black".format(ffmpet_width, ffmpet_height, lb)) + output_args.append(str( + "-filter:v scale={0}x{1}:flags=lanczos," + "setsar=1,drawbox=0:0:iw:" + "round((ih-(iw*(1/{2})))/2):t=fill:" + "c=black,drawbox=0:ih-round((ih-(iw*(" + "1/{2})))/2):iw:round((ih-(iw*(1/{2})))" + "/2):t=fill:c=black").format( + ffmpet_width, ffmpet_height, lb)) # In case audio is longer than video. output_args.append("-shortest") @@ -195,9 +205,14 @@ class ExtractReview(pyblish.api.InstancePlugin): # output filename output_args.append(full_output_path) - self.log.debug("__ pixel_aspect: `{}`".format(pixel_aspect)) - self.log.debug("__ resolution_width: `{}`".format(resolution_width)) - self.log.debug("__ resolution_height: `{}`".format(resolution_height)) + self.log.debug( + "__ pixel_aspect: `{}`".format(pixel_aspect)) + self.log.debug( + "__ resolution_width: `{}`".format( + resolution_width)) + self.log.debug( + "__ resolution_height: `{}`".format( + resolution_height)) # scaling none square pixels and 1920 width if "reformat" in p_tags: @@ -212,21 +227,34 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("heigher then delivery") width_scale = to_width width_half_pad = 0 - scale_factor = float(to_width) / float(resolution_width) + scale_factor = float(to_width) / float( + resolution_width) self.log.debug(scale_factor) height_scale = int( resolution_height * scale_factor) height_half_pad = int( (to_height - height_scale)/2) - self.log.debug("__ width_scale: `{}`".format(width_scale)) - self.log.debug("__ width_half_pad: `{}`".format(width_half_pad)) - self.log.debug("__ height_scale: `{}`".format(height_scale)) - self.log.debug("__ height_half_pad: `{}`".format(height_half_pad)) + self.log.debug( + "__ width_scale: `{}`".format(width_scale)) + self.log.debug( + "__ width_half_pad: `{}`".format( + width_half_pad)) + self.log.debug( + "__ height_scale: `{}`".format( + height_scale)) + self.log.debug( + "__ height_half_pad: `{}`".format( + height_half_pad)) - scaling_arg = "scale={0}x{1}:flags=lanczos,pad={2}:{3}:{4}:{5}:black,setsar=1".format( - width_scale, height_scale, to_width, to_height, width_half_pad, height_half_pad - ) + scaling_arg = str( + "scale={0}x{1}:flags=lanczos," + "pad={2}:{3}:{4}:{5}:black,setsar=1" + ).format(width_scale, height_scale, + to_width, to_height, + width_half_pad, + height_half_pad + ) vf_back = self.add_video_filter_args( output_args, scaling_arg) @@ -254,7 +282,8 @@ class ExtractReview(pyblish.api.InstancePlugin): # add it to output_args output_args.insert(0, vf_back) self.log.info("Added Lut to ffmpeg command") - self.log.debug("_ output_args: `{}`".format(output_args)) + self.log.debug( + "_ output_args: `{}`".format(output_args)) mov_args = [ os.path.join( @@ -280,8 +309,7 @@ class ExtractReview(pyblish.api.InstancePlugin): "outputName": name, "codec": codec_args, "_profile": profile, - "anatomy_template": "render" - "resolutionWidth": resolution_height + "resolutionHeight": resolution_height, "resolutionWidth": resolution_width, }) if repre_new.get('preview'): diff --git a/pype/plugins/global/publish/extract_review_slate.py b/pype/plugins/global/publish/extract_review_slate.py index 8224cd51a8..9a720b77a9 100644 --- a/pype/plugins/global/publish/extract_review_slate.py +++ b/pype/plugins/global/publish/extract_review_slate.py @@ -185,8 +185,7 @@ class ExtractReviewSlate(pype.api.Extractor): repre_update = { "files": output_file, "name": repre["name"], - "tags": [x for x in repre["tags"] if x != "delete"], - "anatomy_template": "render" + "tags": [x for x in repre["tags"] if x != "delete"] } inst_data["representations"][i].update(repre_update) self.log.debug( From 9de6b0407d03ea46f761cf1c4df968ebfdfe7875 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 20 Jan 2020 17:39:42 +0100 Subject: [PATCH 151/393] feat(ftrack): reversing changes on action_test --- pype/ftrack/actions/action_test.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/pype/ftrack/actions/action_test.py b/pype/ftrack/actions/action_test.py index a1e633bf1b..502373e626 100644 --- a/pype/ftrack/actions/action_test.py +++ b/pype/ftrack/actions/action_test.py @@ -9,11 +9,12 @@ import re import ftrack_api from pype.ftrack import BaseAction from avalon import io, inventory, schema -from pypeapp import Anatomy + class TestAction(BaseAction): '''Edit meta data action.''' + ignore_me = True #: Action identifier. identifier = 'test.action' #: Action label. @@ -34,11 +35,8 @@ class TestAction(BaseAction): return True def launch(self, session, entities, event): - anatomy = Anatomy() - data = { - "version": None - } - print(anatomy.format(data)) + self.log.info(event) + return True From f7f0fa5ed9def798de8ed6e60a8b03de3210d27a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 20 Jan 2020 17:55:18 +0100 Subject: [PATCH 152/393] feat(nuke): KnobScripter added as backage not submodule --- .gitmodules | 3 - setup/nuke/nuke_path/KnobScripter-github | 1 - setup/nuke/nuke_path/KnobScripter/__init__.py | 1 + .../KnobScripter/icons/icon_clearConsole.png | Bin 0 -> 1860 bytes .../KnobScripter/icons/icon_download.png | Bin 0 -> 1225 bytes .../KnobScripter/icons/icon_exitnode.png | Bin 0 -> 1883 bytes .../KnobScripter/icons/icon_pick.png | Bin 0 -> 2184 bytes .../KnobScripter/icons/icon_prefs.png | Bin 0 -> 2277 bytes .../KnobScripter/icons/icon_prefs2.png | Bin 0 -> 2758 bytes .../KnobScripter/icons/icon_refresh.png | Bin 0 -> 1778 bytes .../nuke_path/KnobScripter/icons/icon_run.png | Bin 0 -> 2341 bytes .../KnobScripter/icons/icon_save.png | Bin 0 -> 1784 bytes .../KnobScripter/icons/icon_search.png | Bin 0 -> 2400 bytes .../KnobScripter/icons/icon_snippets.png | Bin 0 -> 1415 bytes .../nuke_path/KnobScripter/knob_scripter.py | 4196 +++++++++++++++++ setup/nuke/nuke_path/menu.py | 10 +- 16 files changed, 4198 insertions(+), 13 deletions(-) delete mode 100644 .gitmodules delete mode 160000 setup/nuke/nuke_path/KnobScripter-github create mode 100644 setup/nuke/nuke_path/KnobScripter/__init__.py create mode 100644 setup/nuke/nuke_path/KnobScripter/icons/icon_clearConsole.png create mode 100644 setup/nuke/nuke_path/KnobScripter/icons/icon_download.png create mode 100644 setup/nuke/nuke_path/KnobScripter/icons/icon_exitnode.png create mode 100644 setup/nuke/nuke_path/KnobScripter/icons/icon_pick.png create mode 100644 setup/nuke/nuke_path/KnobScripter/icons/icon_prefs.png create mode 100644 setup/nuke/nuke_path/KnobScripter/icons/icon_prefs2.png create mode 100644 setup/nuke/nuke_path/KnobScripter/icons/icon_refresh.png create mode 100644 setup/nuke/nuke_path/KnobScripter/icons/icon_run.png create mode 100644 setup/nuke/nuke_path/KnobScripter/icons/icon_save.png create mode 100644 setup/nuke/nuke_path/KnobScripter/icons/icon_search.png create mode 100644 setup/nuke/nuke_path/KnobScripter/icons/icon_snippets.png create mode 100644 setup/nuke/nuke_path/KnobScripter/knob_scripter.py diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index d29bd038aa..0000000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "setup/nuke/nuke_path/KnobScripter-github"] - path = setup/nuke/nuke_path/KnobScripter-github - url = https://github.com/pypeclub/KnobScripter diff --git a/setup/nuke/nuke_path/KnobScripter-github b/setup/nuke/nuke_path/KnobScripter-github deleted file mode 160000 index ada32b0144..0000000000 --- a/setup/nuke/nuke_path/KnobScripter-github +++ /dev/null @@ -1 +0,0 @@ -Subproject commit ada32b014470dd283ec52df09295e7b56c7e14b2 diff --git a/setup/nuke/nuke_path/KnobScripter/__init__.py b/setup/nuke/nuke_path/KnobScripter/__init__.py new file mode 100644 index 0000000000..8fe91d63f5 --- /dev/null +++ b/setup/nuke/nuke_path/KnobScripter/__init__.py @@ -0,0 +1 @@ +import knob_scripter \ No newline at end of file diff --git a/setup/nuke/nuke_path/KnobScripter/icons/icon_clearConsole.png b/setup/nuke/nuke_path/KnobScripter/icons/icon_clearConsole.png new file mode 100644 index 0000000000000000000000000000000000000000..75ac04ef84b7235c071b512d1dc60410c32c5834 GIT binary patch literal 1860 zcmV-K2fO%*P)<#DIs?bN`PzW0}2v_#Jl&xLld=NpC!8j9&8?voWv4HeW-6& zV%O#g6+#LK;he7RCJ^x#SI6>R>uc+m-hJ=glhZFx|L($t3ujvvu=R&qJDz{>`O~-d?z&UNca}b; zaLeNAv-}81;cvdV_|?eB=+CRw>M8)xW&%bY9i80OyX(%71VD(oTg34M0%6=*~;x#@O)G)D%w}OQiZZ2=S3l2*_kIU5CdHA0uiLLuHWq z+}GC!N@)OK7{FY>`H<&&W#4CN2#B0dvG4nEJr_I(od+;XDSfT4ukY<>q?QZ>1V|01 z3K$s~@ov1~##{6OQFTmZ0?RUC+xFinHRk1q0q1zF99h>R*&W66YYv*<3E4 z&F1ozrKNcxgb1~Z`K9Hhd^VfQpFejF?&M_cwt-{YvsR%nsE$<}@uUy6KF!RW0Dvb6 z01Old#t)AjdbXurrwKEEu{`N|LjmW33KFHp9Ed1xbl#aY3ufeeE4`3nt567~_Vo1Z zn3kiSpz)bNn5BlP-1C-Y4ywy!wLq(^cSaOICfe&l%khep#~>hD z*>A=~;2uGs1Sk=K{4PDmZ(t$=mgGblJ$m#g*4Njs#L2NH-v}bJRx3#zlw@5|mM zh{jV&4Qd#X5YBnvy54D@En-9W+WO4ly#X9rSorh7m6a79g(BKD=^X8QSzuHEneSuL zbrIAQDcf;oosvDP*9p$~kn4G6f01e3sGP15Bhd=M0B}(V@%7o2kG387<$-%C(TX!6 zNxh_|SJ$|a^99o!%;s`GZB!?EbGf>;h6v3ISd&07;qGLOe2+Oki zCnhHD)_-e+FLD0VeNtv%qy78LnKS=5aPZ)-wrtsgMEfickj|7)?C;zElhxJLvkyP= zux(k^V01#Fmvqkc=JL6nxjZ$fQ7V=GNGUbqYEQH|H}VO225R)gRxB1j7#kb=S#7FJ zV#e1hK{mrg#Lt{UAM5j_izo6vsb5HDgS4Q~#AV?m<~ha_FV2aW7k1*xl;E!nTXu(nFy=2=-7acquKEieR1Q z+>@t*dXSA7u{|u$xQlCJGi}u42Iy*bRO(YUWqobqip-`yFOt9z_qBjM&tx2z}IdmKTje6m2N3M zZz8UtK;*KaK;*K)bs_?gZX4koh)4`FZljP|mkk9Xmkk9Xmkk9XFNA#qIfu=}JtFok z{T#OU5~m76jg8mub##25+T8qeczAeVZ*OnM{$3!EC$I>GLQNeV9o-un8;?guMg|TJ z4)&e+;fRQ$C|>ouSUeE8QX>R3H#fiC)6;WjcXxN&93ugdlUgk;Ep2^$ed8XFQVNoi zk`mA0;NX`|d~uXofk2=p5V%r<5FjBO4mZbQvFY;i@|UbZ_Bz2%!c%JAcGcI{-vi2= z_~M92DwSHFpP!#V%5*&o!jbUnu~=-{@An75h=vpviAsw^I^}d-XKZXN41DFp7sqRf zqA1t;`uZM6qfxAi5RlqTJ*lmIIVglgO4#1s#_#v*9*?KUG_C6zsyaYCJ_Ya-aQCc! zIKH6MH0|%%+1cxnNF-cW7eu7f%7<&d%P5L?YLM!Jy^5iCkINbyQWgllTM}bK*n3u1t?KO&iT3V`i-Yq$Q;zp)wVZ zLx#i;P94bS-ETITx*+CiX6}cXNaU9Ug9#HaF4!cBM;MVV4n zRVI^3nMfpLYip~-2+R@iQN9gYRoK+jbld!|Y1+}!($b7W-CjjeCVIc=y)!*Com<#v zibPda)hi7R4X>GUDwSGaTU%Sn8kC$#%9-ShOeT|Za&mI}n3l7_D}?xkJVoNx?OPw3 zJ|>e%+1=g!z4`96oxKEj%Zbr6o=Yk30zZ?7n#gKEjTPi75CMDBXm7c}4~zqup6Tp( z4z!bxM*jG*;2mHc_%u5r?wuR*qcx`iwKrJ*v7_$kN6jMJ7-{C8qA2I#)xU;$egWfy zx0IPcTQ&0Z+0(zOtE+Y)0k&`}a#F()gS#K*;PB!!8@MlPNGEMAmHOQB~z7dT^ z8+~Oy!x5Y97q#Y2WMMq8$vL)jFqW>WS9*^PR!0%<(i+Dx(daFIv9KQjFt1K#5lya{YzsJYC)2_TS(+R5+&?*h$0 n`Du(V27&-703T?<;XO-kSwPNO(j+oAb=z78ck2u&Kf@S6saWx6gHe5Ys5htCIAySJ@h67 zbxAoDd1x9tMQ`rKEsTm(B_vQnN)3u03R{=d1(q#}N=j(%ICOilS&YZrmtjvsr%W(xtTT;6Z=8J_g#kIU0>7Upn{Qw?LqQ z0|N(6F+F0M=B8m7x7vMhwW+Gg5{bm}(9qE10B~&V*l}f2(N@=1e^V-zc3Sn*GEnrj zX!6XNvoU~!05~6{Gc9ad-9f2}%HoOmQXmi*7S0L4(b3Q|%7k(%x0buUTiR_2swJRk zG@3kn_DsxHWPg%#kt7?c!$>Kr$`XnAk{k#OyZ*_UGytK{=(wUxp31GRUEggXQcZzg zdF8@S&YgSdqR>dUVlZtrY=NdPzp1cj|dc*>e2;H`@ne=$8XbG{*rqOf>8z7z<^!)_qN zyf@Gf9zOUHngpTY#hp@V`|jPl|6)3OL4s6%N>xf!Xd=>eJyI+dH}d&>O{7#+W${F! zTmZQDyL;<}^a9cg=_djF5kS5pAOOhc^MAH1>!PNy0U}hTT9u~q>SDScF^k0w!vm$L zDoezx3jjP*Q(rG;7LiJ>E!_(#V;0JaUo*t9Gw!k9)POWhRbRH;Ro07%pI7X_4n z5o^XX6Y%Qt{Mf%OE-qqz{-+{0-gAETv{v__b;B^WEX#^$8iOQxhhi59?wwah698Rj z5#K?dKN^iD19D*4yU@K8P)}zT7m=Kw7clPwc(vI;+vyZ;I#6N8SY=l5@@&^?q7&)~ z{jtHveC~CSyXGsjyL1TY#SD`3NjO#deE{DtJM9fnC6vbO5O@Y52I%Sv&K5O*OeTY5 zvVE9NfGVIg#z-)^dC#kK_4Pfq4V1|^kwN<~odVT`f*6`R(cWF(Gi<82{%mbW#$7X4 z0sNpHm~Mb7peoYO-A8y^I+ICbJ~=Oj-w{l=K($an^EHK9E!Ir8FhKV}fCJ?}34q(|Y%W}P)zIlZsXgcGjX&1Kpv`XTwBmi3Q8c-^v(~7 zUh5c0RaF*`$CqVU9^zG9LR%WPCJ^bm-ZfN5K#Hoexp;gzAj?DGo-WRlX3cfENO&~q z8Vj4I)iG4tK&qm!xw*Mze;_dAT^G*u*XW9#x<;dpCej!Sn`UvtFlx_JJq1z~mBr^0 z%d%e{sx8TUPrbgrj$kkd36eN%xc60PvYS*QG(05s1jv2WB#XmY+gkWM3&N`Z3@!!WX@X^t~p_q=bZ zPFYlFl0dq~UM!mCM!rR;ra-FVz&M99@>HlsI-SWNl}w7OR{)$d3}ef*tO(OI8-^+h z`kuO-NHqi+931rh?A>?&aQN`>*Wfv{*~Q9Jv8r@BlSXPj<%N0G`SMM}Fg8ug3^Pq9 zNqo)Qk9)P#i4#dQHFYwa%jK@$zklCqH0Fk_&&bHg6Gx65Im$gP3m{%;)o*DQ(rKiU zDY4ghVTAp1>32&P-g)OTe*{q&?M;@0Dm51^F2}%u0crf{@z)!T7ibcpe?NHe&xz-b zpM2`6W6!$Vp(nM<70T8NnWiwpZW)HLSu~3go#`mQA)~qx2LYdLeTJ)7u6|>uwDW5K zdq%108v6YwpZrOgR9-te8ghqT?`%^}v!*aERNlyMnx+|M8Y7a(yCOYZ1W+gx@Wz{O z;NHD^5`Zj#+Z_Y#mUh2bU0a=+oS1xebab@bwt(CmPm5~SI*bct7{-QKG$TxBZtT64 zp-?Cw7K`D|ojU;VR{%c*u-!2b0F+9l8*6K8rzR#Q#zUcy-~n+e!|=9Y%20V@!%4r2 z7cYg+3y8hx!u%b;Hvtse@ZHe-KA$gl`SN9+%jS4Cm*ufoj1v)e0NyJPr6W5(Gc)sO zWo3nDvsr%a+BF^w28Hjx0eHFx3^nE_i9VV-ImJ~~6%XqLjIi@#V`Gm_pFYhG9Xhn{ zFv9!*K13Pj=jaWLu=4*!3^nLRG9DwftkoU=d-}=_KzWtBde*ntF VW=lh7gf9R9002ovPDHLkV1jt?fvf-k literal 0 HcmV?d00001 diff --git a/setup/nuke/nuke_path/KnobScripter/icons/icon_pick.png b/setup/nuke/nuke_path/KnobScripter/icons/icon_pick.png new file mode 100644 index 0000000000000000000000000000000000000000..239553755060aa93162d8748796d7ea29bb7b07e GIT binary patch literal 2184 zcmV;32zU31P)+O)@S-n@C;?RGcT)YN=b zU0uDWzrWuX8Z#llVPRn=hr{vfg$ozHE=dwXh(VyB-`?Kt*}8S>56+)Ie_9{=z=2I> zb3|#$_5-o8aW;^U5C{nnnx{|+@cDebnVFf1&CShq69Q~Dnqg2Jk` zx3^zfy?XUie!ssD!0@18O=fe%j?&WVSgSQj(@6qJs0Z39&6j}5lP5pa-Q8_yYHIof zKpro^=;-K~>ajTpYqeUF1R$y7DnRpvc1rVAos*N39e`=vfY-dbX6v3kdm3yu+X}PE z9I<0Z>0hkY*d%QN3V~i#jZE+BCac9_F$1r|ILzT!*R0vP=C#)f5rRM<5b$_BjWIDX z&uSt{5(vTF`2PNWQc_Zm0Q1LGVT;A`%qy!`=PM7CkPWgN6BF~Say6)^(8)QXuba?9 zaOKJsfM;Ngr-a+>ZY(Y?P7eeEf!mDoZG3_9%8J$1)d4&-=D+|oHAl|Y*49eHQ3;`2 zaChhfR8>_0w2nEjgd}BUIewIokRYKI0CjlPG%)BZBR;xcb>IL^O-%qt0E}Y=E6cJW zJ3ITm)YR0qTH;p~77Wbyv>_GNL{pOt$q!C z{ngi$mzNU=1ct(@#|-uLYp|iAfz6xq7#J7;C2|w+=56)b5)%_8S(4DC)@p}VqZQaqPJDenfX{%%fYxT- zGqA~QjwmiJ{`2$k@r$%kl_c6Ox8ZO&XlrW&xD3n(_Iuo(Ca>4KEHN=rlI5W@QIeE3 zdgja-Hf`EOUtgaJy9l_czxSSi&1Q3C$@Y>1&pr2Dy8^4|;(7{umR_T zVPAMWgV2&B2Vs@WaPi_r91h2*uy7BHE;2H5>f3L>eJCz2&Ng^f(9+UER#p~Qu5;OI!OaF@Nrl*yAH`T0BV9Qw}FPtVrZ{R_>_ z{%drIN0UOS7c|c&)lu114)8@{{VqN05v~UR8-)0yJJS< zxFdxlBO|Alm6iR?VzJDE!CgUPV;2^Q`^;i96VKg|v1 zS*10QI9Xduety2XqW=!00zE^1Sdyf--h6Y%ikDx`Rm+zI-1Pa_oR>#^eLX-Q@DdOK zR0d%SN8=e)VVljiT!RfNRCoF`E|&|x-w#Sdk_PmK!e(Y>mMvYnbe-DYN|NBljb5Bi zCykAbpk#q%z@cDRBk;v&JnBmiYvJMH=E};-3y(%cO&7sR(|N6%~d`OHhV%5LT8A&P~p}DJdzdgb-Q>*3;8NPEHONE?fX*>ZGDfrMtEff#c)j z7wY}}`}cF{(j|a@p(r~9)@U?_xm=q+OirG^Tx~BU3A(zjl9RImkEaFTD$3&g{9a&3 z1)e^A`Zx6}`_!pZpfn-N2w7l8qcMEjwrwBHnKS1_5ZZk2=;$OTCkLOh2o`Uy#TU%MbK1+r5qEx^8 zhJ_InUiXuAyVP!4LfNVWbMBXBlPSXTRLm^!$33)$J(ba!`y z(gZCC{v8whFW{9cS32Y46BemGbwCKJs;Wp^ym(-N-TwZ}nKR>!Mq{}0Ho*;;7;L@^ z3JSP(?HVX;r5#1#F|#3g*lxF{yIigc{rRpgr8S6(iW&?W=8NOOg9q8UbEjHR&IV!I z#u9c2a8h#eO9lD)`wX&d80nLdB!sr_QvJ5}cFM}iIDY)NdgU{eOmcOsVL>+~o=-?f zm1WrwI?Q@s00RR9T)upn=H_OO9zDv56DL$v0X_iU0Q$!R7IX^B2HBA3%scWuyFF3p zH;DoS6926I2YY|Fm!@+~=yX=i17reK;{~dkrf}d^;8saVi5p0?+wIys4G2Mb`TOkK zw@)9}4_pLJ1AhcQCZzH`C^Hxga;3}V5=V~Ih{J~ui#f{q*kg-H60x7OpO)6Kx#;9qP5~wBTi$g4s_ZyGo~Hu z_@hp%787YIluooo(6abP(ll0*nvTB`;}}odN z?%KKQr4K&*;AphRjyfSpmgJi1n)ZYd3CVgAzNBt<-@duz*(GCL!sUwQ_;NIr0gN9v z-jpyhAsHmx93%lqNJ9CKkOU(~B#s_8e%zX9j~{hHlP6Ek1dr=g4w`e^A}Rmf5^Woz zPRL|3&6I>{yvj-vAaud4VUx);956+5{FrDdN={B5w_?SL*Yfi6RvTht<0MJ)a)~5) zFIP#Dm^g7F2?+@qO-)V5`}+ENqG3nGNg*jI`LTj$^Vd1P=>PmgLqqMAD_1@SFc=K6 zFD_g5%NKsQwA2u1h;w@f2_&I-!;=lXuq1&H;B-3stE&F`+sev{*YDlC=L98mv$M1F z1$caT}ep* zLi^YKVap}UsgwUbd2HU?dCyHqoA9{y%x1G)EGjC>bUK~=Q6W@VSh!*BOKZ13jIn-L zk`TfZli_e#S=s9c4<7v0LlYV~V&v$*R8?L!8ix*7<3$S?ys#EOJPIt)+t=$__TsX% zuCA_-p%*sGty)#|8{?26!vUS~>O(ztdm<7X*m$>EKMX^RL*!LOMb+W#4N1r}Wy*|& zIXNp4c>B^1tKRa#ZXSMEdEXZY*RSA(3v*_fOjEua%I1(Ew{z$3-kb5w%hvjYw%;XV100^0QY`@0*a(XdGrX zoA0>IbWdk!>~J{PyLT@~jvVn?)OwL+nZ=72Q(Rn(!C-KkB1r+Dvt2})b4?|ou+!nBw6v7k+FFgwd0;*8BrsIHzV33l*4Nh7ep*seg45|# z-xdf}qlN>;oz-f^Zntj^W_QR+;pEAa$7ju&wKgFkK}n;!tv?XnwQCp0j~@s40(co% z1)KzK0S@)L1$+eTYinz})pMuk>Df=sc2BINEv^8#xw)Cr(o%YRdozF?fZx6pGAXpT zw_hk;x6X3@lTVZxmpn5lgzze7u3o*0*=$A~BnQ|FglY|3z@DT3IGST^vI?(BawiQB zK7al^#l^+6x3>da0PY4-8ZK>%o;!CIY}mBX(%RaJBs8CBDasRkW^)}brM3Af@Lm|b z9#$^bJLdZO!-_g((m>nd6x`a{%7zUa=;`SJxCG?lUA#w32*B#-cRU4rF5Dgx>aPc% zK63`(Q{c@=$!K!=)TzyGT`DQ}3Q{M{;cx()0KN@agXuB^d*CCt6GtVwyxIa!wgw4q zw%St{PRA!_>JPx zsmJa+A<4jf5bEsgY|{qu?>z;h$BY56MXFCPW5$e`plL#)8Uy}S&^s;C|NUS&PT1T-M;X_v9WOx=?Y6|*zn7*nY)V@}GHTZF)w=lOE5brfzRMZQ8U!d-m-4PfAKkdMJHCYdJ}h zq__V3)`j%6^r`OMM*^Yi1$?p7=@ca;B~PC@bLQPh$Yf<@&EK+ROP#@Bi1V~pf!lQr zH(9Np{dncdCxR7Z@1ff`}_Ot219K8`0?XR^XJchVbi9KZ!TEyy&uW493yGrtMj_9C-#K5 ziqD=qhqW`^@ieLrhJ_HUUi}j;Uv2~# z2K4&c95N|bE?;gqR$t#1Z12&{s}I)y>j5QzvSao28kv6)fDA;Cw)gKZUw*gmZor5R zWxOBOCB=xa&-;CM@3OzV9F)>(Loj>866)&idbi@hfsg!(9(9w57>6jD{IP#(I8aeR zS63InpMbLuObFm`P0jah*Q_ptq!f&R?b5|8Sp!=>m{XW>Ukt*0?xx|5r zN^CZ(yBZOI&1U6rbq&BB;1{864Ve`5l9ZIZxu78bRfoetLqh|Njg0`r#Kf?C`Er&n zUFtp`2)ADa*uSW%s-m*8l6&{=0WfLOB<9S?CN?&n!!^}(c60)4MCnu?o)8cRoCBun z?tKRQ4Op0zl$2FiSV(SeF2=z|f}IBGPkHQpcIxWtICSU`9UUE-&QriKV2RII7lBLy zwA~L%2;ebb9ZDHrs}cmIuX+_IU9)CQ-19$pUg>K*=THIh+XvsKtgKAa-4AR5ehp|R zC)0s^U=Z*Jpf#LUEpLh+ErVgV)nz5(o1 z_3Pzh^@>=c{lV*cV`Jk+&%vZ3{#aqCuER2vG#t_Al5i6BFL*7@Etc-??mJ=99^GAC zG`BPZoQ~%BQTu|Yi9m>ETl4>>PoKWXXfy)&eZA}G=wRFSZL~DEfU@>F{1ko3LPu&}#6pnf*?|p)~T#EqE1`Qq!W$Cs)>nO>u5WZ5z-NfNjsF? z5I=~?k0vHmTQt!oIvs`7PjJK{iy}y(qkO1{QBX1Ha(DOcp8mL>>@K@^H9PHed}nlg z)_c!+pZ9(5InR6U1s>TWdt~7>63)Rb7E9H0&pkJbQYuYI2pEPLC@Ly?!Dh1^4AEfQ1ks<&<2q>k{Xf!cXrc8M;Tyq0qBxW!gGazP)aLi;lHPyHRKz_L94;!)M zFvuA5O9*FIUN*1T5r{~O@_wt)2Q`!V6F$5q~s6rpi zFbs3}@R7gm+Er;CKR)ZHt5>fskBQOh{oP|bWjnmSxM&W;ID%#_D6zD(w5;+i<;S&p zZLF{W5KvH#TgaG4LTp4#DfJLSRNlXPqC%9!7=YJ5r0*TDUSyR>{z?D0R7*84*+ii zI1yfGlI@_f>hlarxVM*zSY0PRYA4M|eXn>+XSAONK@XZ&FP zM>L!|h0k~IM)TRTaJe|a5FZ~8v)PP=3m4+aC!P?4BLra(<`<}hgak~RI<5KOp+gl) zeN~(>lgTu}U^HeD+3F=e0$$j~x?r_hQBYWjMw?9_6mHO*fdFXVyIHSRkkzz26GZ>8-hUDaNrqQEwhQ-A_8Jw6%11UvGE2-E2@|R;#sY(xgeV#0&VYR}m&aLwy4Z3JbuK_zi&10t7a8_bsw34PRHjS_0ihxq5<8qu#$vI&cI3#BUGjl_O7U5Nl(7F$X`-N*0=svAAq=%5 z2q^&g{@S%`GwbT=dIvvYUgrH@G8`2Z6`hrxZT7y!hnU|Amq~*_m}Oa^Ooo>Mm>-asI8x}rLtkGX_;TMAn9E?hapPvA;HK2>_{Y+O zgKz|Zyr9GaX^Ejo0R)k1HNZVvM?)c|iPmTdKN=PNRgMYq9n&u{BHR6yV~V)PA4hzA z{A~b=x?I7G24e;t6e8IoB2gF;`ztRWVhqFFA6&~axuXD^&1QWlqzECzY&Opo9OeW7 zg@mQ{h43288#^3MpK*QE{kPwquXi|@#ZIRadc7VRO{7FqXd?pPbW?iN{{8!Rx?HYX zN{G#vF=Nr}*|S$j^ek zZD?;pc>9vcC?DS)B+v3dY@0L;C6_u{&`x}sZJS}=F+OHdJ&7!M+wT*3rN zNu!?5nJ{5`b8|~ge}DgN-{A?#&!4{Vop;{-hfb%B74DP8$p=>*sK$;RJJ8e90~dEt zMFDgGs0Hu^fXx8zi!%g@sLTuiGB>=v;REwDvq2OGAfS@|@xd(1I_v7{{%Nz>4)*l) zT#br~iZL3EnR$76ze-I_%~GmrCh+jX#Zk zW^}HoG(?#k2vaX7L&N?Sk(DcdTVgaC$NwP2va_=D=Dj@c&7q~Y0<5&33xYM8sEFd? z;%b#j6|~ZV63fWUm`n&!$#xZ~U64guDfaj3mcv9Qr-sH!Nl7DOV`C>jG_fzg{Ic9; zv)QD+i~HTXH)?8X!ri-fgAF&NH8$FC^JWhK*{Uh*k36Ox56a5kM_*sxM+mY$1WhT+ zvaZ_WwTa`#j-57q_y~R`A$J7!0XsHt+KdmjZpGfcdvWVlFZ6mno_gwOkRbZ6zrP;` z4<5wEO&d{Jxf5SkRU=_|0#XgBA}cQG1eCgAwU(i}x(Wc|0el2b9X%K^0C2NzTWw9v zPsWZLYaB6rq~IgYwYH*Q?OL2Zb4CclvGeCI=&P!#w0U`E#19+BD4C6(T0Xo zU|AO1KK&HGTeAk;-PZw5+Pn^68aVm2>hk4I?ky$Xt8xG4NfAz!w6%8tz-<730q`ui zuL!aaA3n6}^`%R3`b-m87mKp8_vEDBK=8Aa&?FVOHyrz-qob!Y3?uwj2VfDrPG}b| zSyH_8)mPsUGvs}&1`mGzi(g{EZePI@^R`z2d=#yTj$<55NTh(!zZ$Ruc!6Pdsa@pg$no{>zQcgs4se3jpH%|LVjRp-ey(LuND2t?h?_S%fLY-B zwfjR|)X~||I{0;~_)-=iKu22#z;QhnV6H0weg>{m6n`-QJlqE?Ly`i3YhBkaI-Cv{ z-@S8t5~SL{)Yb+7oe0o+$SfpMz=LH>k-6P&)?%^zAtxuN4B%Ea!pnY#!-0MK_5wqf z;s3m;0StiOU;X@fH=GlP(^vqnf!hYWZ2;UR8x{-OM`HhN`@bdjADgo))(LOj5&!@I M07*qoM6N<$f-?anq5uE@ literal 0 HcmV?d00001 diff --git a/setup/nuke/nuke_path/KnobScripter/icons/icon_refresh.png b/setup/nuke/nuke_path/KnobScripter/icons/icon_refresh.png new file mode 100644 index 0000000000000000000000000000000000000000..559bfd74ab8b550503194ba2409795e77a52c83b GIT binary patch literal 1778 zcmV%no+>XdU+wjJ?^&>5L4iOZ zg#>9CnO#2f)tS$>ZQbU}7r>G$mr6ni(b(9yZR4hmL2a?Nbf%*}g|Y^57rBdfwD0Jv ztE+n&LB@Fr^Ok3;2KYPR$f#rmA`qf|SNmW6wf=xWAf*HWQc47Zsi`TZ{x^k?#t6;# z_v+QFPWYc2NMOT;wz^t>KngG!xO8cZXf#SJ9w#w6>Zo(sNExKGwDhh`n>Gd|fCM4n z?Aal9?b^lo__+PmYrv0+&Hh*2)%m3ivVQ&g-+MeBF8~5ACN8pb=T0UjCIJ2e>;c{b zhVyl*va<5AbLYdDI{&g@nx38Da=WBQ+^h(s_autDL!?LFeqdc|d&?Vqv9`ykOA`}XxArKhx#uiEI z{B<{lQ3Men%!aLq4Oen=JtLpb*AVImMO}p^1Oj1>`pmN5?{5qSJE8>z1#|ASiXakc z0tl|}t-K+MTt!H8H$)=SeP%BkJ?Hn=Hg<$M-kA$VeQKEoG6RyIs;a7c?iWA$=)+@3NTpJQ z!(n3aIGDK@z~tm4t!=FajDV4mf7905<{29s>t;qUR|auvB+~6kDgWHr`TgO;hY9Zv z6N|+_kTOQy%z=Q3@rf_n+FHN);`1+Pd+8+-i3GqJV5z-5P zfh3YYf!JZp;k31j;}a9VZEtTs4y*#c2Y!K(7NwQb!OB5N;00g`hyypu?ME@3-w(6^ z|C;%N8i){?9&&zYB(TM7``76wvo(s54$UdGH6k-}*73!1ogG_L&r z_qGEuQj+ZJ-4MjR(7ni8=Dpi0TWQJ66lKYhCHIQ-0?JcXD>-Z?bAl{fxNyhA9P)a&({Fd3AUD_0hEb#)Gum6a_E1Olxsn_If_9vR#1Ox_?(O-+AzWbGrrvPO5s ziskpe@y4E`W-bZ{N=rR-c6Kf+)4a0}FetQXa%0g!HVNGNoK)zJQp9iN#}P zHyQhqMAF~qJj7y6Taq3R3F8yv^H;FxW|4hb*Y)ObBy#9e^YkXAxMd^pU}fdwlFS;p zaNz<#Ql+j+8b6)X_2%8-@PQ6!I)DEBErHN9ZE1adeY1TU4-O6jTu`a6da#;KCX>J0 z{p#)mDVb_ApE(90gb>?b-u|Yi#G^@!8;(RG!P%j+0RL2JBbyWAbV{b0fdjzTI0WG> z^WMGvqwVAgh0QsU97s@AT<3~0iZH_a>q zEMBZFUA5|gM{8^Sf#Tvto=gCS!Bc~LeB>w?xBFkJIOf<{ue`i`bx%*v8LQRnu069T z4-XHsb?a8^lKeTKSH-?_=MmdYq;+I9d{i%=Jb98Wo3~gHp8)-;<{iI9kygGm<3s7J za{9VYGBPqkZ*MOH0|Ul;aE=>H;ad)J<;s=GSS)r_NU`o_Uv!f)$=Iba5~B%Doj%34 z-+XIt>X->J=9F)_Xf?(p%jNeS@GLn_cc-V;B=D}{yGe}O!eJl=3_9$~9sQ*GKkH)C Uw!?IYy#N3J07*qoM6N<$f)br{CIA2c literal 0 HcmV?d00001 diff --git a/setup/nuke/nuke_path/KnobScripter/icons/icon_run.png b/setup/nuke/nuke_path/KnobScripter/icons/icon_run.png new file mode 100644 index 0000000000000000000000000000000000000000..6b2e4ddc2348254824b75b9d14b03c5eb28430fc GIT binary patch literal 2341 zcmV+=3EK9FP)@28X)BXK4_L%0;0i^ZWhi?r}hV z*v}W9YceC#urMm2nF3QtEQ$hS=7ZC_J@-hGib}X~Arr76AbjgV-n) zZOk#|FV0?U@^GH1!GXc+9LG(?HrsGyW@J8-zcRn1udlabe0<`LT3u023Q3x&0OeyT zj%C@U-5R$X0v(3q36f(l!~@Av!9wVpeE^!G=9HZ?glraA)#mOQ&8@9>escD+t- z7@HWIuvS~k@7}%J9bu|b=|>^bq9I|0n+q1Is;b`p<^KJjvLPAx1f{7l-mxTtt@R#w&$3LrIAn^Ia@)&gLaSbpWo)pH!@@rqHE z(PVqSh&!Ttz2Ho0x0{p=N+!WMl*Yo`(>c zj*N`BcJHqLacODk@f$a8XzX@709=ud(bm@1ym8Z}@AH0_!eX)D*s)`;0O*}5E(!W) z8kTZKDX=`2mUoblkeDc@b`*Sa0l*=A;Og(UyZZZo0H9jZ>`Q4M-QC?My1Tna0Bi+t z2|%l~ytA{j#mjL%tyY^%2!YXPgvn&uF+4oHS3X~t)O0;PEnWX$`az(suI@)pr}Kn-9Iw~wvDs{AH*VVaviLNjqM{yS)jg9`=H{L)`Pfq~AdDdKnAmlS3WV*Szxp@FqvL~)6 ztFpk5q=#zj_0BuL-uT{;BUd?&n^Cc=>pEV0?WeeP>((gv>z9DE?a0W;g;S?aecS1D z?gp?PD)(ilPoF*%2uuekrMP$R9vlt_02DIk9gOAKP>8f>xSc47OhIXLbMx9WXU^;? zDk|EOot>@qcsy`8dU5mSFuAf~eaG^>{o7E?>U<8vvUDm;p=z_$PqANWw%5m&^4Bm&IsVOO_tgK82NKK^I7zmN3XfiIotOgdJ+1Kn|p-{IDwDTF9QpsBmat?uzrs(B^It-^E93X3?xCx@ z3-|r^zW^VC`lA;(RK6a-jK?AK7!v?40`NWQiNp~{9C5@EM+E#Ie<;2-)aFSU00000 LNkvXXu0mjfRQGr7 literal 0 HcmV?d00001 diff --git a/setup/nuke/nuke_path/KnobScripter/icons/icon_save.png b/setup/nuke/nuke_path/KnobScripter/icons/icon_save.png new file mode 100644 index 0000000000000000000000000000000000000000..e29c667f3455f4afa92f63ba8de4ce3c4b53a866 GIT binary patch literal 1784 zcmV+f?nK~v}qSZF0fhEyVF|mpGAYx!?s%GY?F1qL{o1(s$SZj9$F?o=Z*azEe z5&XkG>C)YIg^qROn{~A<2#Zi#vx3;hAVph<+Lq2Fo%#82&$)ljJ(IbU%ox}07l!0} z&)jo=|IY9He&-Hc!4+IVBcbdISEbV}PmhjH-+SMEkF~eAUk?hDpajI+QiUrehy;&V z4WF5r`TPF;`=2V8%cWWrYql@!*zwFO5B=f6XVd9)tM?og5;{4@$pa%K<+t2=^POFt zn{GUE^jKa*!ZR<6lzrj;`|o|syIlc-LIO`9%)CFt8?V2?#fuk{KBH!2G8y*l*~6wy zT_|8Uo82xbH9fueiBc(<=VBfk8y%fCmID*39xs(j?0R+=mo8nZivp?U6m7q0Bb~mA zRB9bdOG^MOEG$qg78xA8$9pu8$=p29xvBG}V@HlA@?4XBK>(%jV=K%Hg#yK55#RuD zt*7zIJ{vb~{O!T~!5{PaJo)^)y!Wp|jE#={)r_AzcW!4i%!O<=H!?mx{*R`nrkF#j zDuz-7D_NT>YM{%&4&aAslP`zP&d#3J*0y$K(w9JYPxnAbkki33K(AF{IGg>`_{7A4 z<|@j?6%#>B0BUoIQP9Ihpf0hf5JZ$A06AdVHKsyx<+8&gFN{wdSk7~C#n7Qg;HYqD z))QWs3!uXD_qoQxTm-V&;gN}n%F4wQ^CM5WdEFt}YS#STbAvG#A0Ez*j8BX^!?Hs1 zFe_GByxvfV(g9}8FJ?~z5T(V5^W1QDPGZf>gQ8mb8rebl;A0E?NkAQ^a zGW>=cI-bDVy4;FIG9zZMZU{tq8^3FQdrWj@bjW!t@O=2*3LevoN?E+#@UtC3#tIKx zW3lf>vK!sudn)NlUz%)t>&>@*clPYrZT9q4JFq0apwxpa zrj&%LM4jy;%gW-emEx#8yl+YkJXES$d|Ytz7&~+dL}~Pm=CUa`y}wBvqTAIdg{d=g-F`tQVA0bai#n*4CyYBz&x7^EezE zv%u=+Bj5=7L&gZWHvZ{Ib!T+Wv(R$4h+=G>r7)Qg{uJYHN{Vs>^G;0xd(;Pb|4 z6oEQ^{P;g}x!j1^#Lb58^CaoszY%;F8@{x-NV!}Fc#@w-DF9hqEY3t%Ql&yFfK#zZ zGF7n-2v!eIh(HvZ#}ThQtexEEYl9$p zdCJ&44#$BhwT5%Uu@aG-B~`q*Tw9k#JCk@nk*K87HHyB+uBO)pri1q@`pB_(h=<_z zn6FTXuc6btc8JP>fd2me4JbDFub6a4ZzC3pWs!1_cJx1M zoEd{-ZCxq)JCH7hlXTEu&&|#D^z__rl<_AONTpH?4Gpam%BWj*4|-K@JdPedTu^$4Ew_1;NZ5Ym!|UTn%4!{MnU}8Idqt!4|=Ev_l=p1&&|y- zHa13~Q1~10Ohv^2GMUVmTeoh_v|rQivh-zy!}ah)HSPt9pgi6QY&N`1nfbYS4!w7X zLZJY#3wYh7PZ-;PMS_+2OaGh%+QJGfmlEy(9sxE~v%(sp4Ez^(6YqyLKHRTre+Br} aD)tkwJPu`6Ec-S90000yBjY@lTK?(Xe}yLVYEo0pr= zD#QQVvv=<~&u{N}&U5a07H;T+INX�v)R0;sIYMD(p#66WSO%Tr>3PYNSu{8 z8yQAMd;{nH)z{P2-R*X{T!)SvJ#ygUg^MFI;deY~2Gi+u`n&G>-n#Yc%b!`aDEAH_ zK$caLGV#WeB>C-j`<|U|zWL(GFTZS`8I1pyCa@)omlRb!UG=Ih$CgiI@&XxIVVKky z!t%TCzSrE;)U?It^9{}fOsu8^CWH_b6%TK%tbC+Sr_<{rvy42A$gBkj83K}o!C*w_ zl(Ec}*VXaBz(DV|ZQC}sx3@e0FJOAT-mrP|mgY70lvOAbi`*_`!od(Ow;QL^iQDbL z>m6Wtcm#vNKuSsq3l=P-pkM_hCB<8>XAJ7jNfq6gfE{M%11R}^0pNb%w}5vFe!l>kdU|?vUaxo6%9SgH5Xvu<^6iwH z0)aq^$K!cV8&YKM<6SH*E!|+V+45!8s~o1jp@F`$X8}F~3V=fzeTRVOTU%Q5#p{O$52|abALU7XIUVN_{q?m!zE(9lIvN?u zBqt~9@4Ij9pQmFqE|}SD$;->jy#tgzDijLR{QB#PXV}2ZKmjm%Vf2-qJ9i#cQi$T< zo_p4m0V!H-#sw=XDqJhe%H|6)pML7*tP+B6p*{TK>VpqHs0@cg$jTm$)n?VFrKLTs z)m~h%Wy_Y8#3s!nN7?`+;CZdi#?#Qy(20L^bexP6Fe6n#JoWeYACE3FVBWlW z0C#G!byYASgfN**iAt6iNPz%8pAW9V+KScC(9k(qmLuyLX3a_hSggg?Rl#IgR>K?s zys7MtTva@Ve1)yQ%r{esPEO{l2&>0h)sil@ZH#H7ueH#h5a%6@_@^Tml1 zYNhi&5YS?G+$`tydQYXNr&r!`OU@j%ZMJ-Q{;elZp6obv>Qtu|hp|$7DYxH#dzo5F z$zhqg`a1gi`vHCd^k}g?Vft=wZ}*gyt*M+ndzLyKN=iz8G%zr5rmL&VHSW#hC@~>1 zsivmpjneOzmaCr$1bg@I<(+rl0r&){(Q1D}Fu&j5bNu-6!n^NYontU4(W2Mub;ZRc z52mEtl-kkJaWoJJj7`O<&1TErv17;Ex7~JInOgCx+t>N?{WzVDa5yZl1I|nxNztS> z+;iv7z2Db&_VME4l0>2A4gqblp_Ws90;ZUgepJ%@C`~8>3vy+{jz4VDEp7_l# ztEzsVHg|4Dw0lYvkF0D97PF<>>2&@&91e%If=#6pd}qn3RUg)F-D)r>s{IMiSQ_hT2 zt^L})UT=GAYwLrOBpJS&n@fVx7`Y^3BMa31FV6VHi4(l|(n};IC1EyOh$#`H-?dmQ zc@~Rhsk5zZUpO2NT}v>4&qJY5tHZs4h&8ex(Nl8)hh$V;}OOk}s;pEjn zyh>A36TQ8?IP4CrHY=HCbJT%AD3Oi=mu0amZJQ$8bPoO!foFkV0XNP6&V2Hh-XAN>OZbar%b{PWMbG!TX*VR2xTwHwh@J8Tp9Nxce zdt-TUl3_U$Cz^ABO+W>(bQ0h9qqG?Q1pE!<(VC0~Mgm5!*R2w4;N{=EB<;{0?1Y{8C1&#tAqtt+3mC0x{?5?Y;SKzKV3B4TJPN(zj zhK7a>!Ql9CGYvLfnT$r`?z%`=s#1@wlmWCk+FI%x>er74cTL}jH7FDghxgbWj^#G1 zHP>u5U%AJjJc&%RIoD>hEq6E^d*UUOuNxRd;2aLea;w#vdquVsBDy7*&E{OI)fyLW z#^0@GNC8Qb)-^OV9B6B6i+-rI<}RZe%CS^bUuQ5-;p~p(R+}}~Y*AW_ZnvB2>T0#`cn;Wwa<1@=lgVH-wlp?2 z$}hkCvYeQZAfx1zL%>x-@tY)*(P(T*xXz$dnNa?faER*&N|lN05bB1mTm1)mZLGb( S3=G}?0000A0%^ literal 0 HcmV?d00001 diff --git a/setup/nuke/nuke_path/KnobScripter/icons/icon_snippets.png b/setup/nuke/nuke_path/KnobScripter/icons/icon_snippets.png new file mode 100644 index 0000000000000000000000000000000000000000..479c44f19e3c0c824c0491b4c176123eb1547197 GIT binary patch literal 1415 zcmV;21$g?2P)KAK4h#sKNeKn0cL4TArF8tM(uGMt}G2RbOnLZ=zHx zSMSNqJt>xm{eAib042}P^L*}=0s!LuM`SWtz*40$z45-m)Ya8xnAi~>9v-dHXnvPU zrQFeD$Nn7X>l<6YVZ*ypsgyrFJnX)F`EtjVD_1(6JbB_AbB(#>tXvo~H%7i@^_n@c zF|h%4cj~(K74B;+DlS?a8yzdB0sH{rv+@mgOV>#pu|m zr|EuE&%%U-K_-*2f6JDv#mwHld1u4I!lGJRTWz;**Y)^B0D_vDs}`ft*bX2Yz##yw z02}~D0Camio*U)mWs5pH%{G>0CEgMeFoYm4KdKiV!w6y45F4riicMTaI zJ32ZZewmT6-DEN?gD)+BNF@>hUmzR-s6Y9xprB^Shsh~6TbENTmbj%-sm$kmLMXgt zd^N7uH{4XI_Gefumd7C>A#atJm){Hu3Jz;*)ITMDe7>-xxVSm~{RMNp-lflMJcWC+ z^x2qBvvy!$V5qRLaQ(O0*~LLKgB7hUZT9Wkx7{HCz_M(xTCHwYC=@eZ_kzBNTc277 zGdE|}_1JrwCtikdg<`=E2M^Y9GMTX5WE{M7@sg31v%&y*fPiH=ft+InLb2G-Un=2A zB>sG{SmY-Z3K*Y5y}k9_U4o$@himKBZMWOn+S&l*OnHLh;^NXuN=km_^ZCp{wffdC z=g+GEumHI6Q3gN+Kng&@Fg#IIRAf-nlBAGzYuCNwC-7rH0k_LdcjxY|Z>VoD0@w}U zFWzJ!tJV6@Y&H)kCnqmUNJxm_@pwo~Oq3)gC51#rMh*-O4OQCh_M-qU0Qi+sdakFZ z=h%Y>53VVfDp$!lIZFsZsjBpkT3u}ifIFMbe8#iwJ8a@k87>nR~P0fXy zw{D#n931RtSvD{xCT7tGNlBlF&YCrc0yuu+M9Y!Ghg$%Y1DJSJpDP3aSy@?CX=!OI zynQ8@koy;Zc9_b_%J#CdvL*m$0i2o~Z%XMVL$BALi;j*?3k?ko27nv2wU*)0Q8z;f zal2f!%Vrxk7z{mOVPXCZ!;pLT?%4{9iW)s0PZfY8)5U!*ghGM8T7A%@(`o7H)2E#r z#}xqB58${+BtBPBQPHi{YN%yH=;8DwV2}$KwSu z48tobD!QoCYU!CXX9fai%%}!X3P3nroM}tv!^p_U-v)!>#4c^AT z{j&Wp&BzC<)!N+N-rg4*8~c&N;c%6fmNwPb*P8+4188~U3|@dl06zivaeV&Y!av`O VU4IwAq!s`G002ovPDHLkV1g(>ppyUq literal 0 HcmV?d00001 diff --git a/setup/nuke/nuke_path/KnobScripter/knob_scripter.py b/setup/nuke/nuke_path/KnobScripter/knob_scripter.py new file mode 100644 index 0000000000..f03067aa4b --- /dev/null +++ b/setup/nuke/nuke_path/KnobScripter/knob_scripter.py @@ -0,0 +1,4196 @@ +# ------------------------------------------------- +# KnobScripter by Adrian Pueyo +# Complete python sript editor for Nuke +# adrianpueyo.com, 2016-2019 +import string +import traceback +from webbrowser import open as openUrl +from threading import Event, Thread +import platform +import subprocess +from functools import partial +import re +import sys +from nukescripts import panels +import json +import os +import nuke +version = "2.3 wip" +date = "Aug 12 2019" +# ------------------------------------------------- + + +# Symlinks on windows... +if os.name == "nt": + def symlink_ms(source, link_name): + import ctypes + csl = ctypes.windll.kernel32.CreateSymbolicLinkW + csl.argtypes = (ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32) + csl.restype = ctypes.c_ubyte + flags = 1 if os.path.isdir(source) else 0 + try: + if csl(link_name, source.replace('/', '\\'), flags) == 0: + raise ctypes.WinError() + except: + pass + os.symlink = symlink_ms + +try: + if nuke.NUKE_VERSION_MAJOR < 11: + from PySide import QtCore, QtGui, QtGui as QtWidgets + from PySide.QtCore import Qt + else: + from PySide2 import QtWidgets, QtGui, QtCore + from PySide2.QtCore import Qt +except ImportError: + from Qt import QtCore, QtGui, QtWidgets + +KS_DIR = os.path.dirname(__file__) +icons_path = KS_DIR + "/icons/" +DebugMode = False +AllKnobScripters = [] # All open instances at a given time + +PrefsPanel = "" +SnippetEditPanel = "" + +nuke.tprint('KnobScripter v{}, built {}.\nCopyright (c) 2016-2019 Adrian Pueyo. All Rights Reserved.'.format(version, date)) + + +class KnobScripter(QtWidgets.QWidget): + + def __init__(self, node="", knob="knobChanged"): + super(KnobScripter, self).__init__() + + # Autosave the other knobscripters and add this one + for ks in AllKnobScripters: + try: + ks.autosave() + except: + pass + if self not in AllKnobScripters: + AllKnobScripters.append(self) + + self.nodeMode = (node != "") + if node == "": + self.node = nuke.toNode("root") + else: + self.node = node + + self.isPane = False + self.knob = knob + # For the option to also display the knob labels on the knob dropdown + self.show_labels = False + self.unsavedKnobs = {} + self.modifiedKnobs = set() + self.scrollPos = {} + self.cursorPos = {} + self.fontSize = 10 + self.font = "Monospace" + self.tabSpaces = 4 + self.windowDefaultSize = [500, 300] + self.color_scheme = "sublime" # Can be nuke or sublime + self.pinned = 1 + self.toLoadKnob = True + self.frw_open = False # Find replace widget closed by default + self.icon_size = 17 + self.btn_size = 24 + self.qt_icon_size = QtCore.QSize(self.icon_size, self.icon_size) + self.qt_btn_size = QtCore.QSize(self.btn_size, self.btn_size) + self.origConsoleText = "" + self.nukeSE = self.findSE() + self.nukeSEOutput = self.findSEOutput(self.nukeSE) + self.nukeSEInput = self.findSEInput(self.nukeSE) + self.nukeSERunBtn = self.findSERunBtn(self.nukeSE) + + self.scripts_dir = os.path.expandvars( + os.path.expanduser("~/.nuke/KnobScripter_Scripts")) + self.current_folder = "scripts" + self.folder_index = 0 + self.current_script = "Untitled.py" + self.current_script_modified = False + self.script_index = 0 + self.toAutosave = False + + # Load prefs + self.prefs_txt = os.path.expandvars( + os.path.expanduser("~/.nuke/KnobScripter_Prefs.txt")) + self.loadedPrefs = self.loadPrefs() + if self.loadedPrefs != []: + try: + if "font_size" in self.loadedPrefs: + self.fontSize = self.loadedPrefs['font_size'] + self.windowDefaultSize = [ + self.loadedPrefs['window_default_w'], self.loadedPrefs['window_default_h']] + self.tabSpaces = self.loadedPrefs['tab_spaces'] + self.pinned = self.loadedPrefs['pin_default'] + if "font" in self.loadedPrefs: + self.font = self.loadedPrefs['font'] + if "color_scheme" in self.loadedPrefs: + self.color_scheme = self.loadedPrefs['color_scheme'] + if "show_labels" in self.loadedPrefs: + self.show_labels = self.loadedPrefs['show_labels'] + except TypeError: + log("KnobScripter: Failed to load preferences.") + + # Load snippets + self.snippets_txt_path = os.path.expandvars( + os.path.expanduser("~/.nuke/KnobScripter_Snippets.txt")) + self.snippets = self.loadSnippets(maxDepth=5) + + # Current state of script (loaded when exiting node mode) + self.state_txt_path = os.path.expandvars( + os.path.expanduser("~/.nuke/KnobScripter_State.txt")) + + # Init UI + self.initUI() + + # Talk to Nuke's Script Editor + self.setSEOutputEvent() # Make the output windowS listen! + self.clearConsole() + + def initUI(self): + ''' Initializes the tool UI''' + # ------------------- + # 1. MAIN WINDOW + # ------------------- + self.resize(self.windowDefaultSize[0], self.windowDefaultSize[1]) + self.setWindowTitle("KnobScripter - %s %s" % + (self.node.fullName(), self.knob)) + self.setObjectName("com.adrianpueyo.knobscripter") + self.move(QtGui.QCursor().pos() - QtCore.QPoint(32, 74)) + + # --------------------- + # 2. TOP BAR + # --------------------- + # --- + # 2.1. Left buttons + self.change_btn = QtWidgets.QToolButton() + # self.exit_node_btn.setIcon(QtGui.QIcon(KS_DIR+"/KnobScripter/icons/icons8-delete-26.png")) + self.change_btn.setIcon(QtGui.QIcon(icons_path + "icon_pick.png")) + self.change_btn.setIconSize(self.qt_icon_size) + self.change_btn.setFixedSize(self.qt_btn_size) + self.change_btn.setToolTip( + "Change to node if selected. Otherwise, change to Script Mode.") + self.change_btn.clicked.connect(self.changeClicked) + + # --- + # 2.2.A. Node mode UI + self.exit_node_btn = QtWidgets.QToolButton() + self.exit_node_btn.setIcon(QtGui.QIcon( + icons_path + "icon_exitnode.png")) + self.exit_node_btn.setIconSize(self.qt_icon_size) + self.exit_node_btn.setFixedSize(self.qt_btn_size) + self.exit_node_btn.setToolTip( + "Exit the node, and change to Script Mode.") + self.exit_node_btn.clicked.connect(self.exitNodeMode) + self.current_node_label_node = QtWidgets.QLabel(" Node:") + self.current_node_label_name = QtWidgets.QLabel(self.node.fullName()) + self.current_node_label_name.setStyleSheet("font-weight:bold;") + self.current_knob_label = QtWidgets.QLabel("Knob: ") + self.current_knob_dropdown = QtWidgets.QComboBox() + self.current_knob_dropdown.setSizeAdjustPolicy( + QtWidgets.QComboBox.AdjustToContents) + self.updateKnobDropdown() + self.current_knob_dropdown.currentIndexChanged.connect( + lambda: self.loadKnobValue(False, updateDict=True)) + + # Layout + self.node_mode_bar_layout = QtWidgets.QHBoxLayout() + self.node_mode_bar_layout.addWidget(self.exit_node_btn) + self.node_mode_bar_layout.addSpacing(2) + self.node_mode_bar_layout.addWidget(self.current_node_label_node) + self.node_mode_bar_layout.addWidget(self.current_node_label_name) + self.node_mode_bar_layout.addSpacing(2) + self.node_mode_bar_layout.addWidget(self.current_knob_dropdown) + self.node_mode_bar = QtWidgets.QWidget() + self.node_mode_bar.setLayout(self.node_mode_bar_layout) + + self.node_mode_bar_layout.setContentsMargins(0, 0, 0, 0) + + # --- + # 2.2.B. Script mode UI + self.script_label = QtWidgets.QLabel("Script: ") + + self.current_folder_dropdown = QtWidgets.QComboBox() + self.current_folder_dropdown.setSizeAdjustPolicy( + QtWidgets.QComboBox.AdjustToContents) + self.current_folder_dropdown.currentIndexChanged.connect( + self.folderDropdownChanged) + # self.current_folder_dropdown.setEditable(True) + # self.current_folder_dropdown.lineEdit().setReadOnly(True) + # self.current_folder_dropdown.lineEdit().setAlignment(Qt.AlignRight) + + self.current_script_dropdown = QtWidgets.QComboBox() + self.current_script_dropdown.setSizeAdjustPolicy( + QtWidgets.QComboBox.AdjustToContents) + self.updateFoldersDropdown() + self.updateScriptsDropdown() + self.current_script_dropdown.currentIndexChanged.connect( + self.scriptDropdownChanged) + + # Layout + self.script_mode_bar_layout = QtWidgets.QHBoxLayout() + self.script_mode_bar_layout.addWidget(self.script_label) + self.script_mode_bar_layout.addSpacing(2) + self.script_mode_bar_layout.addWidget(self.current_folder_dropdown) + self.script_mode_bar_layout.addWidget(self.current_script_dropdown) + self.script_mode_bar = QtWidgets.QWidget() + self.script_mode_bar.setLayout(self.script_mode_bar_layout) + + self.script_mode_bar_layout.setContentsMargins(0, 0, 0, 0) + + # --- + # 2.3. File-system buttons + # Refresh dropdowns + self.refresh_btn = QtWidgets.QToolButton() + self.refresh_btn.setIcon(QtGui.QIcon(icons_path + "icon_refresh.png")) + self.refresh_btn.setIconSize(QtCore.QSize(50, 50)) + self.refresh_btn.setIconSize(self.qt_icon_size) + self.refresh_btn.setFixedSize(self.qt_btn_size) + self.refresh_btn.setToolTip("Refresh the dropdowns.\nShortcut: F5") + self.refresh_btn.setShortcut('F5') + self.refresh_btn.clicked.connect(self.refreshClicked) + + # Reload script + self.reload_btn = QtWidgets.QToolButton() + self.reload_btn.setIcon(QtGui.QIcon(icons_path + "icon_download.png")) + self.reload_btn.setIconSize(QtCore.QSize(50, 50)) + self.reload_btn.setIconSize(self.qt_icon_size) + self.reload_btn.setFixedSize(self.qt_btn_size) + self.reload_btn.setToolTip( + "Reload the current script. Will overwrite any changes made to it.\nShortcut: Ctrl+R") + self.reload_btn.setShortcut('Ctrl+R') + self.reload_btn.clicked.connect(self.reloadClicked) + + # Save script + self.save_btn = QtWidgets.QToolButton() + self.save_btn.setIcon(QtGui.QIcon(icons_path + "icon_save.png")) + self.save_btn.setIconSize(QtCore.QSize(50, 50)) + self.save_btn.setIconSize(self.qt_icon_size) + self.save_btn.setFixedSize(self.qt_btn_size) + self.save_btn.setToolTip( + "Save the script into the selected knob or python file.\nShortcut: Ctrl+S") + self.save_btn.setShortcut('Ctrl+S') + self.save_btn.clicked.connect(self.saveClicked) + + # Layout + self.top_file_bar_layout = QtWidgets.QHBoxLayout() + self.top_file_bar_layout.addWidget(self.refresh_btn) + self.top_file_bar_layout.addWidget(self.reload_btn) + self.top_file_bar_layout.addWidget(self.save_btn) + + # --- + # 2.4. Right Side buttons + + # Run script + self.run_script_button = QtWidgets.QToolButton() + self.run_script_button.setIcon( + QtGui.QIcon(icons_path + "icon_run.png")) + self.run_script_button.setIconSize(self.qt_icon_size) + # self.run_script_button.setIconSize(self.qt_icon_size) + self.run_script_button.setFixedSize(self.qt_btn_size) + self.run_script_button.setToolTip( + "Execute the current selection on the KnobScripter, or the whole script if no selection.\nShortcut: Ctrl+Enter") + self.run_script_button.clicked.connect(self.runScript) + + # Clear console + self.clear_console_button = QtWidgets.QToolButton() + self.clear_console_button.setIcon( + QtGui.QIcon(icons_path + "icon_clearConsole.png")) + self.clear_console_button.setIconSize(QtCore.QSize(50, 50)) + self.clear_console_button.setIconSize(self.qt_icon_size) + self.clear_console_button.setFixedSize(self.qt_btn_size) + self.clear_console_button.setToolTip( + "Clear the text in the console window.\nShortcut: Click Backspace on the console.") + self.clear_console_button.clicked.connect(self.clearConsole) + + # FindReplace button + self.find_button = QtWidgets.QToolButton() + self.find_button.setIcon(QtGui.QIcon(icons_path + "icon_search.png")) + self.find_button.setIconSize(self.qt_icon_size) + self.find_button.setFixedSize(self.qt_btn_size) + self.find_button.setToolTip( + "Call the snippets by writing the shortcut and pressing Tab.\nShortcut: Ctrl+F") + self.find_button.setShortcut('Ctrl+F') + #self.find_button.setMaximumWidth(self.find_button.fontMetrics().boundingRect("Find").width() + 20) + self.find_button.setCheckable(True) + self.find_button.setFocusPolicy(QtCore.Qt.NoFocus) + self.find_button.clicked[bool].connect(self.toggleFRW) + if self.frw_open: + self.find_button.toggle() + + # Snippets + self.snippets_button = QtWidgets.QToolButton() + self.snippets_button.setIcon( + QtGui.QIcon(icons_path + "icon_snippets.png")) + self.snippets_button.setIconSize(QtCore.QSize(50, 50)) + self.snippets_button.setIconSize(self.qt_icon_size) + self.snippets_button.setFixedSize(self.qt_btn_size) + self.snippets_button.setToolTip( + "Call the snippets by writing the shortcut and pressing Tab.") + self.snippets_button.clicked.connect(self.openSnippets) + + # PIN + ''' + self.pin_button = QtWidgets.QPushButton("P") + self.pin_button.setCheckable(True) + if self.pinned: + self.setWindowFlags(self.windowFlags() | QtCore.Qt.WindowStaysOnTopHint) + self.pin_button.toggle() + self.pin_button.setToolTip("Toggle 'Always On Top'. Keeps the KnobScripter on top of all other windows.") + self.pin_button.setFocusPolicy(QtCore.Qt.NoFocus) + self.pin_button.setFixedSize(self.qt_btn_size) + self.pin_button.clicked[bool].connect(self.pin) + ''' + + # Prefs + self.createPrefsMenu() + self.prefs_button = QtWidgets.QPushButton() + self.prefs_button.setIcon(QtGui.QIcon(icons_path + "icon_prefs.png")) + self.prefs_button.setIconSize(self.qt_icon_size) + self.prefs_button.setFixedSize( + QtCore.QSize(self.btn_size + 10, self.btn_size)) + # self.prefs_button.clicked.connect(self.openPrefs) + self.prefs_button.setMenu(self.prefsMenu) + self.prefs_button.setStyleSheet("text-align:left;padding-left:2px;") + #self.prefs_button.setMaximumWidth(self.prefs_button.fontMetrics().boundingRect("Prefs").width() + 12) + + # Layout + self.top_right_bar_layout = QtWidgets.QHBoxLayout() + self.top_right_bar_layout.addWidget(self.run_script_button) + self.top_right_bar_layout.addWidget(self.clear_console_button) + self.top_right_bar_layout.addWidget(self.find_button) + # self.top_right_bar_layout.addWidget(self.snippets_button) + # self.top_right_bar_layout.addWidget(self.pin_button) + # self.top_right_bar_layout.addSpacing(10) + self.top_right_bar_layout.addWidget(self.prefs_button) + + # --- + # Layout + self.top_layout = QtWidgets.QHBoxLayout() + self.top_layout.setContentsMargins(0, 0, 0, 0) + # self.top_layout.setSpacing(10) + self.top_layout.addWidget(self.change_btn) + self.top_layout.addWidget(self.node_mode_bar) + self.top_layout.addWidget(self.script_mode_bar) + self.node_mode_bar.setVisible(False) + # self.top_layout.addSpacing(10) + self.top_layout.addLayout(self.top_file_bar_layout) + self.top_layout.addStretch() + self.top_layout.addLayout(self.top_right_bar_layout) + + # ---------------------- + # 3. SCRIPTING SECTION + # ---------------------- + # Splitter + self.splitter = QtWidgets.QSplitter(Qt.Vertical) + + # Output widget + self.script_output = ScriptOutputWidget(parent=self) + self.script_output.setReadOnly(1) + self.script_output.setAcceptRichText(0) + self.script_output.setTabStopWidth( + self.script_output.tabStopWidth() / 4) + self.script_output.setFocusPolicy(Qt.ClickFocus) + self.script_output.setAutoFillBackground(0) + self.script_output.installEventFilter(self) + + # Script Editor + self.script_editor = KnobScripterTextEditMain(self, self.script_output) + self.script_editor.setMinimumHeight(30) + self.script_editor.setStyleSheet( + 'background:#282828;color:#EEE;') # Main Colors + self.script_editor.textChanged.connect(self.setModified) + self.highlighter = KSScriptEditorHighlighter( + self.script_editor.document(), self) + self.script_editor.cursorPositionChanged.connect(self.setTextSelection) + self.script_editor_font = QtGui.QFont() + self.script_editor_font.setFamily(self.font) + self.script_editor_font.setStyleHint(QtGui.QFont.Monospace) + self.script_editor_font.setFixedPitch(True) + self.script_editor_font.setPointSize(self.fontSize) + self.script_editor.setFont(self.script_editor_font) + self.script_editor.setTabStopWidth( + self.tabSpaces * QtGui.QFontMetrics(self.script_editor_font).width(' ')) + + # Add input and output to splitter + self.splitter.addWidget(self.script_output) + self.splitter.addWidget(self.script_editor) + self.splitter.setStretchFactor(0, 0) + + # FindReplace widget + self.frw = FindReplaceWidget(self) + self.frw.setVisible(self.frw_open) + + # --- + # Layout + self.scripting_layout = QtWidgets.QVBoxLayout() + self.scripting_layout.setContentsMargins(0, 0, 0, 0) + self.scripting_layout.setSpacing(0) + self.scripting_layout.addWidget(self.splitter) + self.scripting_layout.addWidget(self.frw) + + # --------------- + # MASTER LAYOUT + # --------------- + self.master_layout = QtWidgets.QVBoxLayout() + self.master_layout.setSpacing(5) + self.master_layout.setContentsMargins(8, 8, 8, 8) + self.master_layout.addLayout(self.top_layout) + self.master_layout.addLayout(self.scripting_layout) + # self.master_layout.addLayout(self.bottom_layout) + self.setLayout(self.master_layout) + + # ---------------- + # MAIN WINDOW UI + # ---------------- + size_policy = QtWidgets.QSizePolicy( + QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum) + self.setSizePolicy(size_policy) + self.setMinimumWidth(160) + + if self.pinned: + self.setWindowFlags(self.windowFlags() | + QtCore.Qt.WindowStaysOnTopHint) + + # Set default values based on mode + if self.nodeMode: + self.current_knob_dropdown.blockSignals(True) + self.node_mode_bar.setVisible(True) + self.script_mode_bar.setVisible(False) + self.setCurrentKnob(self.knob) + self.loadKnobValue(check=False) + self.setKnobModified(False) + self.current_knob_dropdown.blockSignals(False) + self.splitter.setSizes([0, 1]) + else: + self.exitNodeMode() + self.script_editor.setFocus() + + # Preferences submenus + def createPrefsMenu(self): + + # Actions + self.echoAct = QtWidgets.QAction("Echo python commands", self, checkable=True, + statusTip="Toggle nuke's 'Echo all python commands to ScriptEditor'", triggered=self.toggleEcho) + if nuke.toNode("preferences").knob("echoAllCommands").value(): + self.echoAct.toggle() + self.pinAct = QtWidgets.QAction("Always on top", self, checkable=True, + statusTip="Keeps the KnobScripter window always on top or not.", triggered=self.togglePin) + if self.pinned: + self.setWindowFlags(self.windowFlags() | + QtCore.Qt.WindowStaysOnTopHint) + self.pinAct.toggle() + self.helpAct = QtWidgets.QAction( + "&Help", self, statusTip="Open the KnobScripter help in your browser.", shortcut="F1", triggered=self.showHelp) + self.nukepediaAct = QtWidgets.QAction( + "Show in Nukepedia", self, statusTip="Open the KnobScripter download page on Nukepedia.", triggered=self.showInNukepedia) + self.githubAct = QtWidgets.QAction( + "Show in GitHub", self, statusTip="Open the KnobScripter repo on GitHub.", triggered=self.showInGithub) + self.snippetsAct = QtWidgets.QAction( + "Snippets", self, statusTip="Open the Snippets editor.", triggered=self.openSnippets) + self.snippetsAct.setIcon(QtGui.QIcon(icons_path + "icon_snippets.png")) + # self.snippetsAct = QtWidgets.QAction("Keywords", self, statusTip="Add custom keywords.", triggered=self.openSnippets) #TODO THIS + self.prefsAct = QtWidgets.QAction( + "Preferences", self, statusTip="Open the Preferences panel.", triggered=self.openPrefs) + self.prefsAct.setIcon(QtGui.QIcon(icons_path + "icon_prefs.png")) + + # Menus + self.prefsMenu = QtWidgets.QMenu("Preferences") + self.prefsMenu.addAction(self.echoAct) + self.prefsMenu.addAction(self.pinAct) + self.prefsMenu.addSeparator() + self.prefsMenu.addAction(self.nukepediaAct) + self.prefsMenu.addAction(self.githubAct) + self.prefsMenu.addSeparator() + self.prefsMenu.addAction(self.helpAct) + self.prefsMenu.addSeparator() + self.prefsMenu.addAction(self.snippetsAct) + self.prefsMenu.addAction(self.prefsAct) + + def initEcho(self): + ''' Initializes the echo chechable QAction based on nuke's state ''' + echo_knob = nuke.toNode("preferences").knob("echoAllCommands") + self.echoAct.setChecked(echo_knob.value()) + + def toggleEcho(self): + ''' Toggle the "Echo python commands" from Nuke ''' + echo_knob = nuke.toNode("preferences").knob("echoAllCommands") + echo_knob.setValue(self.echoAct.isChecked()) + + def togglePin(self): + ''' Toggle "always on top" based on the submenu button ''' + self.pin(self.pinAct.isChecked()) + + def showInNukepedia(self): + openUrl("http://www.nukepedia.com/python/ui/knobscripter") + + def showInGithub(self): + openUrl("https://github.com/adrianpueyo/KnobScripter") + + def showHelp(self): + openUrl("https://vimeo.com/adrianpueyo/knobscripter2") + + # Node Mode + + def updateKnobDropdown(self): + ''' Populate knob dropdown list ''' + self.current_knob_dropdown.clear() # First remove all items + defaultKnobs = ["knobChanged", "onCreate", "onScriptLoad", "onScriptSave", "onScriptClose", "onDestroy", + "updateUI", "autolabel", "beforeRender", "beforeFrameRender", "afterFrameRender", "afterRender"] + permittedKnobClasses = ["PyScript_Knob", "PythonCustomKnob"] + counter = 0 + for i in self.node.knobs(): + if i not in defaultKnobs and self.node.knob(i).Class() in permittedKnobClasses: + if self.show_labels: + i_full = "{} ({})".format(self.node.knob(i).label(), i) + else: + i_full = i + + if i in self.unsavedKnobs.keys(): + self.current_knob_dropdown.addItem(i_full + "(*)", i) + else: + self.current_knob_dropdown.addItem(i_full, i) + + counter += 1 + if counter > 0: + self.current_knob_dropdown.insertSeparator(counter) + counter += 1 + self.current_knob_dropdown.insertSeparator(counter) + counter += 1 + for i in self.node.knobs(): + if i in defaultKnobs: + if i in self.unsavedKnobs.keys(): + self.current_knob_dropdown.addItem(i + "(*)", i) + else: + self.current_knob_dropdown.addItem(i, i) + counter += 1 + return + + def loadKnobValue(self, check=True, updateDict=False): + ''' Get the content of the knob value and populate the editor ''' + if self.toLoadKnob == False: + return + dropdown_value = self.current_knob_dropdown.itemData( + self.current_knob_dropdown.currentIndex()) # knobChanged... + try: + obtained_knobValue = str(self.node[dropdown_value].value()) + obtained_scrollValue = 0 + edited_knobValue = self.script_editor.toPlainText() + except: + error_message = QtWidgets.QMessageBox.information( + None, "", "Unable to find %s.%s" % (self.node.name(), dropdown_value)) + error_message.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) + error_message.exec_() + return + # If there were changes to the previous knob, update the dictionary + if updateDict == True: + self.unsavedKnobs[self.knob] = edited_knobValue + self.scrollPos[self.knob] = self.script_editor.verticalScrollBar( + ).value() + prev_knob = self.knob # knobChanged... + + self.knob = self.current_knob_dropdown.itemData( + self.current_knob_dropdown.currentIndex()) # knobChanged... + + if check and obtained_knobValue != edited_knobValue: + msgBox = QtWidgets.QMessageBox() + msgBox.setText("The Script Editor has been modified.") + msgBox.setInformativeText( + "Do you want to overwrite the current code on this editor?") + msgBox.setStandardButtons( + QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No) + msgBox.setIcon(QtWidgets.QMessageBox.Question) + msgBox.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) + msgBox.setDefaultButton(QtWidgets.QMessageBox.Yes) + reply = msgBox.exec_() + if reply == QtWidgets.QMessageBox.No: + self.setCurrentKnob(prev_knob) + return + # If order comes from a dropdown update, update value from dictionary if possible, otherwise update normally + self.setWindowTitle("KnobScripter - %s %s" % + (self.node.name(), self.knob)) + if updateDict: + if self.knob in self.unsavedKnobs: + if self.unsavedKnobs[self.knob] == obtained_knobValue: + self.script_editor.setPlainText(obtained_knobValue) + self.setKnobModified(False) + else: + obtained_knobValue = self.unsavedKnobs[self.knob] + self.script_editor.setPlainText(obtained_knobValue) + self.setKnobModified(True) + else: + self.script_editor.setPlainText(obtained_knobValue) + self.setKnobModified(False) + + if self.knob in self.scrollPos: + obtained_scrollValue = self.scrollPos[self.knob] + else: + self.script_editor.setPlainText(obtained_knobValue) + + cursor = self.script_editor.textCursor() + self.script_editor.setTextCursor(cursor) + self.script_editor.verticalScrollBar().setValue(obtained_scrollValue) + return + + def loadAllKnobValues(self): + ''' Load all knobs button's function ''' + if len(self.unsavedKnobs) >= 1: + msgBox = QtWidgets.QMessageBox() + msgBox.setText( + "Do you want to reload all python and callback knobs?") + msgBox.setInformativeText( + "Unsaved changes on this editor will be lost.") + msgBox.setStandardButtons( + QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No) + msgBox.setIcon(QtWidgets.QMessageBox.Question) + msgBox.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) + msgBox.setDefaultButton(QtWidgets.QMessageBox.Yes) + reply = msgBox.exec_() + if reply == QtWidgets.QMessageBox.No: + return + self.unsavedKnobs = {} + return + + def saveKnobValue(self, check=True): + ''' Save the text from the editor to the node's knobChanged knob ''' + dropdown_value = self.current_knob_dropdown.itemData( + self.current_knob_dropdown.currentIndex()) + try: + obtained_knobValue = str(self.node[dropdown_value].value()) + self.knob = dropdown_value + except: + error_message = QtWidgets.QMessageBox.information( + None, "", "Unable to find %s.%s" % (self.node.name(), dropdown_value)) + error_message.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) + error_message.exec_() + return + edited_knobValue = self.script_editor.toPlainText() + if check and obtained_knobValue != edited_knobValue: + msgBox = QtWidgets.QMessageBox() + msgBox.setText("Do you want to overwrite %s.%s?" % + (self.node.name(), dropdown_value)) + msgBox.setStandardButtons( + QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No) + msgBox.setIcon(QtWidgets.QMessageBox.Question) + msgBox.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) + msgBox.setDefaultButton(QtWidgets.QMessageBox.Yes) + reply = msgBox.exec_() + if reply == QtWidgets.QMessageBox.No: + return + self.node[dropdown_value].setValue(edited_knobValue) + self.setKnobModified( + modified=False, knob=dropdown_value, changeTitle=True) + nuke.tcl("modified 1") + if self.knob in self.unsavedKnobs: + del self.unsavedKnobs[self.knob] + return + + def saveAllKnobValues(self, check=True): + ''' Save all knobs button's function ''' + if self.updateUnsavedKnobs() > 0 and check: + msgBox = QtWidgets.QMessageBox() + msgBox.setText( + "Do you want to save all modified python and callback knobs?") + msgBox.setStandardButtons( + QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No) + msgBox.setIcon(QtWidgets.QMessageBox.Question) + msgBox.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) + msgBox.setDefaultButton(QtWidgets.QMessageBox.Yes) + reply = msgBox.exec_() + if reply == QtWidgets.QMessageBox.No: + return + saveErrors = 0 + savedCount = 0 + for k in self.unsavedKnobs.copy(): + try: + self.node.knob(k).setValue(self.unsavedKnobs[k]) + del self.unsavedKnobs[k] + savedCount += 1 + nuke.tcl("modified 1") + except: + saveErrors += 1 + if saveErrors > 0: + errorBox = QtWidgets.QMessageBox() + errorBox.setText("Error saving %s knob%s." % + (str(saveErrors), int(saveErrors > 1) * "s")) + errorBox.setIcon(QtWidgets.QMessageBox.Warning) + errorBox.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) + errorBox.setDefaultButton(QtWidgets.QMessageBox.Yes) + reply = errorBox.exec_() + else: + log("KnobScripter: %s knobs saved" % str(savedCount)) + return + + def setCurrentKnob(self, knobToSet): + ''' Set current knob ''' + KnobDropdownItems = [] + for i in range(self.current_knob_dropdown.count()): + if self.current_knob_dropdown.itemData(i) is not None: + KnobDropdownItems.append( + self.current_knob_dropdown.itemData(i)) + else: + KnobDropdownItems.append("---") + if knobToSet in KnobDropdownItems: + index = KnobDropdownItems.index(knobToSet) + self.current_knob_dropdown.setCurrentIndex(index) + return + + def updateUnsavedKnobs(self, first_time=False): + ''' Clear unchanged knobs from the dict and return the number of unsaved knobs ''' + if not self.node: + # Node has been deleted, so simply return 0. Who cares. + return 0 + edited_knobValue = self.script_editor.toPlainText() + self.unsavedKnobs[self.knob] = edited_knobValue + if len(self.unsavedKnobs) > 0: + for k in self.unsavedKnobs.copy(): + if self.node.knob(k): + if str(self.node.knob(k).value()) == str(self.unsavedKnobs[k]): + del self.unsavedKnobs[k] + else: + del self.unsavedKnobs[k] + # Set appropriate knobs modified... + knobs_dropdown = self.current_knob_dropdown + all_knobs = [knobs_dropdown.itemData(i) + for i in range(knobs_dropdown.count())] + for key in all_knobs: + if key in self.unsavedKnobs.keys(): + self.setKnobModified( + modified=True, knob=key, changeTitle=False) + else: + self.setKnobModified( + modified=False, knob=key, changeTitle=False) + + return len(self.unsavedKnobs) + + def setKnobModified(self, modified=True, knob="", changeTitle=True): + ''' Sets the current knob modified, title and whatever else we need ''' + if knob == "": + knob = self.knob + if modified: + self.modifiedKnobs.add(knob) + else: + self.modifiedKnobs.discard(knob) + + if changeTitle: + title_modified_string = " [modified]" + windowTitle = self.windowTitle().split(title_modified_string)[0] + if modified == True: + windowTitle += title_modified_string + self.setWindowTitle(windowTitle) + + try: + knobs_dropdown = self.current_knob_dropdown + kd_index = knobs_dropdown.currentIndex() + kd_data = knobs_dropdown.itemData(kd_index) + if self.show_labels and i not in defaultKnobs: + kd_data = "{} ({})".format( + self.node.knob(kd_data).label(), kd_data) + if modified == False: + knobs_dropdown.setItemText(kd_index, kd_data) + else: + knobs_dropdown.setItemText(kd_index, kd_data + "(*)") + except: + pass + + # Script Mode + def updateFoldersDropdown(self): + ''' Populate folders dropdown list ''' + self.current_folder_dropdown.blockSignals(True) + self.current_folder_dropdown.clear() # First remove all items + defaultFolders = ["scripts"] + scriptFolders = [] + counter = 0 + for f in defaultFolders: + self.makeScriptFolder(f) + self.current_folder_dropdown.addItem(f + "/", f) + counter += 1 + + try: + scriptFolders = sorted([f for f in os.listdir(self.scripts_dir) if os.path.isdir( + os.path.join(self.scripts_dir, f))]) # Accepts symlinks!!! + except: + log("Couldn't read any script folders.") + + for f in scriptFolders: + fname = f.split("/")[-1] + if fname in defaultFolders: + continue + self.current_folder_dropdown.addItem(fname + "/", fname) + counter += 1 + + # print scriptFolders + if counter > 0: + self.current_folder_dropdown.insertSeparator(counter) + counter += 1 + # self.current_folder_dropdown.insertSeparator(counter) + #counter += 1 + self.current_folder_dropdown.addItem("New", "create new") + self.current_folder_dropdown.addItem("Open...", "open in browser") + self.current_folder_dropdown.addItem("Add custom", "add custom path") + self.folder_index = self.current_folder_dropdown.currentIndex() + self.current_folder = self.current_folder_dropdown.itemData( + self.folder_index) + self.current_folder_dropdown.blockSignals(False) + return + + def updateScriptsDropdown(self): + ''' Populate py scripts dropdown list ''' + self.current_script_dropdown.blockSignals(True) + self.current_script_dropdown.clear() # First remove all items + QtWidgets.QApplication.processEvents() + log("# Updating scripts dropdown...") + log("scripts dir:" + self.scripts_dir) + log("current folder:" + self.current_folder) + log("previous current script:" + self.current_script) + #current_folder = self.current_folder_dropdown.itemData(self.current_folder_dropdown.currentIndex()) + current_folder_path = os.path.join( + self.scripts_dir, self.current_folder) + defaultScripts = ["Untitled.py"] + found_scripts = [] + counter = 0 + # All files and folders inside of the folder + dir_list = os.listdir(current_folder_path) + try: + found_scripts = sorted([f for f in dir_list if f.endswith(".py")]) + found_temp_scripts = [ + f for f in dir_list if f.endswith(".py.autosave")] + except: + log("Couldn't find any scripts in the selected folder.") + if not len(found_scripts): + for s in defaultScripts: + if s + ".autosave" in found_temp_scripts: + self.current_script_dropdown.addItem(s + "(*)", s) + else: + self.current_script_dropdown.addItem(s, s) + counter += 1 + else: + for s in defaultScripts: + if s + ".autosave" in found_temp_scripts: + self.current_script_dropdown.addItem(s + "(*)", s) + elif s in found_scripts: + self.current_script_dropdown.addItem(s, s) + for s in found_scripts: + if s in defaultScripts: + continue + sname = s.split("/")[-1] + if s + ".autosave" in found_temp_scripts: + self.current_script_dropdown.addItem(sname + "(*)", sname) + else: + self.current_script_dropdown.addItem(sname, sname) + counter += 1 + # else: #Add the found scripts to the dropdown + if counter > 0: + counter += 1 + self.current_script_dropdown.insertSeparator(counter) + counter += 1 + self.current_script_dropdown.insertSeparator(counter) + self.current_script_dropdown.addItem("New", "create new") + self.current_script_dropdown.addItem("Duplicate", "create duplicate") + self.current_script_dropdown.addItem("Delete", "delete script") + self.current_script_dropdown.addItem("Open", "open in browser") + #self.script_index = self.current_script_dropdown.currentIndex() + self.script_index = 0 + self.current_script = self.current_script_dropdown.itemData( + self.script_index) + log("Finished updating scripts dropdown.") + log("current_script:" + self.current_script) + self.current_script_dropdown.blockSignals(False) + return + + def makeScriptFolder(self, name="scripts"): + folder_path = os.path.join(self.scripts_dir, name) + if not os.path.exists(folder_path): + try: + os.makedirs(folder_path) + return True + except: + print "Couldn't create the scripting folders.\nPlease check your OS write permissions." + return False + + def makeScriptFile(self, name="Untitled.py", folder="scripts", empty=True): + script_path = os.path.join(self.scripts_dir, self.current_folder, name) + if not os.path.isfile(script_path): + try: + self.current_script_file = open(script_path, 'w') + return True + except: + print "Couldn't create the scripting folders.\nPlease check your OS write permissions." + return False + + def setCurrentFolder(self, folderName): + ''' Set current folder ON THE DROPDOWN ONLY''' + folderList = [self.current_folder_dropdown.itemData( + i) for i in range(self.current_folder_dropdown.count())] + if folderName in folderList: + index = folderList.index(folderName) + self.current_folder_dropdown.setCurrentIndex(index) + self.current_folder = folderName + self.folder_index = self.current_folder_dropdown.currentIndex() + self.current_folder = self.current_folder_dropdown.itemData( + self.folder_index) + return + + def setCurrentScript(self, scriptName): + ''' Set current script ON THE DROPDOWN ONLY ''' + scriptList = [self.current_script_dropdown.itemData( + i) for i in range(self.current_script_dropdown.count())] + if scriptName in scriptList: + index = scriptList.index(scriptName) + self.current_script_dropdown.setCurrentIndex(index) + self.current_script = scriptName + self.script_index = self.current_script_dropdown.currentIndex() + self.current_script = self.current_script_dropdown.itemData( + self.script_index) + return + + def loadScriptContents(self, check=False, pyOnly=False, folder=""): + ''' Get the contents of the selected script and populate the editor ''' + log("# About to load script contents now.") + obtained_scrollValue = 0 + obtained_cursorPosValue = [0, 0] # Position, anchor + if folder == "": + folder = self.current_folder + script_path = os.path.join( + self.scripts_dir, folder, self.current_script) + script_path_temp = script_path + ".autosave" + if (self.current_folder + "/" + self.current_script) in self.scrollPos: + obtained_scrollValue = self.scrollPos[self.current_folder + + "/" + self.current_script] + if (self.current_folder + "/" + self.current_script) in self.cursorPos: + obtained_cursorPosValue = self.cursorPos[self.current_folder + + "/" + self.current_script] + + # 1: If autosave exists and pyOnly is false, load it + if os.path.isfile(script_path_temp) and not pyOnly: + log("Loading .py.autosave file\n---") + with open(script_path_temp, 'r') as script: + content = script.read() + self.script_editor.setPlainText(content) + self.setScriptModified(True) + self.script_editor.verticalScrollBar().setValue(obtained_scrollValue) + + # 2: Try to load the .py as first priority, if it exists + elif os.path.isfile(script_path): + log("Loading .py file\n---") + with open(script_path, 'r') as script: + content = script.read() + current_text = self.script_editor.toPlainText().encode("utf8") + if check and current_text != content and current_text.strip() != "": + msgBox = QtWidgets.QMessageBox() + msgBox.setText("The script has been modified.") + msgBox.setInformativeText( + "Do you want to overwrite the current code on this editor?") + msgBox.setStandardButtons( + QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No) + msgBox.setIcon(QtWidgets.QMessageBox.Question) + msgBox.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) + msgBox.setDefaultButton(QtWidgets.QMessageBox.Yes) + reply = msgBox.exec_() + if reply == QtWidgets.QMessageBox.No: + return + # Clear trash + if os.path.isfile(script_path_temp): + os.remove(script_path_temp) + log("Removed " + script_path_temp) + self.setScriptModified(False) + self.script_editor.setPlainText(content) + self.script_editor.verticalScrollBar().setValue(obtained_scrollValue) + self.setScriptModified(False) + self.loadScriptState() + self.setScriptState() + + # 3: If .py doesn't exist... only then stick to the autosave + elif os.path.isfile(script_path_temp): + with open(script_path_temp, 'r') as script: + content = script.read() + + msgBox = QtWidgets.QMessageBox() + msgBox.setText("The .py file hasn't been found.") + msgBox.setInformativeText( + "Do you want to clear the current code on this editor?") + msgBox.setStandardButtons( + QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No) + msgBox.setIcon(QtWidgets.QMessageBox.Question) + msgBox.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) + msgBox.setDefaultButton(QtWidgets.QMessageBox.Yes) + reply = msgBox.exec_() + if reply == QtWidgets.QMessageBox.No: + return + + # Clear trash + os.remove(script_path_temp) + log("Removed " + script_path_temp) + self.script_editor.setPlainText("") + self.updateScriptsDropdown() + self.loadScriptContents(check=False) + self.loadScriptState() + self.setScriptState() + + else: + content = "" + self.script_editor.setPlainText(content) + self.setScriptModified(False) + if self.current_folder + "/" + self.current_script in self.scrollPos: + del self.scrollPos[self.current_folder + + "/" + self.current_script] + if self.current_folder + "/" + self.current_script in self.cursorPos: + del self.cursorPos[self.current_folder + + "/" + self.current_script] + + self.setWindowTitle("KnobScripter - %s/%s" % + (self.current_folder, self.current_script)) + return + + def saveScriptContents(self, temp=True): + ''' Save the current contents of the editor into the python file. If temp == True, saves a .py.autosave file ''' + log("\n# About to save script contents now.") + log("Temp mode is: " + str(temp)) + log("self.current_folder: " + self.current_folder) + log("self.current_script: " + self.current_script) + script_path = os.path.join( + self.scripts_dir, self.current_folder, self.current_script) + script_path_temp = script_path + ".autosave" + orig_content = "" + content = self.script_editor.toPlainText().encode('utf8') + + if temp == True: + if os.path.isfile(script_path): + with open(script_path, 'r') as script: + orig_content = script.read() + # If script path doesn't exist and autosave does but the script is empty... + elif content == "" and os.path.isfile(script_path_temp): + os.remove(script_path_temp) + return + if content != orig_content: + with open(script_path_temp, 'w') as script: + script.write(content) + else: + if os.path.isfile(script_path_temp): + os.remove(script_path_temp) + log("Nothing to save") + return + else: + with open(script_path, 'w') as script: + script.write(self.script_editor.toPlainText().encode('utf8')) + # Clear trash + if os.path.isfile(script_path_temp): + os.remove(script_path_temp) + log("Removed " + script_path_temp) + self.setScriptModified(False) + self.saveScrollValue() + self.saveCursorPosValue() + log("Saved " + script_path + "\n---") + return + + def deleteScript(self, check=True, folder=""): + ''' Get the contents of the selected script and populate the editor ''' + log("# About to delete the .py and/or autosave script now.") + if folder == "": + folder = self.current_folder + script_path = os.path.join( + self.scripts_dir, folder, self.current_script) + script_path_temp = script_path + ".autosave" + if check: + msgBox = QtWidgets.QMessageBox() + msgBox.setText("You're about to delete this script.") + msgBox.setInformativeText( + "Are you sure you want to delete {}?".format(self.current_script)) + msgBox.setStandardButtons( + QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No) + msgBox.setIcon(QtWidgets.QMessageBox.Question) + msgBox.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) + msgBox.setDefaultButton(QtWidgets.QMessageBox.No) + reply = msgBox.exec_() + if reply == QtWidgets.QMessageBox.No: + return False + + if os.path.isfile(script_path_temp): + os.remove(script_path_temp) + log("Removed " + script_path_temp) + + if os.path.isfile(script_path): + os.remove(script_path) + log("Removed " + script_path) + + return True + + def folderDropdownChanged(self): + '''Executed when the current folder dropdown is changed''' + self.saveScriptState() + log("# folder dropdown changed") + folders_dropdown = self.current_folder_dropdown + fd_value = folders_dropdown.currentText() + fd_index = folders_dropdown.currentIndex() + fd_data = folders_dropdown.itemData(fd_index) + if fd_data == "create new": + panel = FileNameDialog(self, mode="folder") + # panel.setWidth(260) + # panel.addSingleLineInput("Name:","") + if panel.exec_(): + # Accepted + folder_name = panel.text + if os.path.isdir(os.path.join(self.scripts_dir, folder_name)): + self.messageBox("Folder already exists.") + self.setCurrentFolder(self.current_folder) + if self.makeScriptFolder(name=folder_name): + self.saveScriptContents(temp=True) + # Success creating the folder + self.current_folder = folder_name + self.updateFoldersDropdown() + self.setCurrentFolder(folder_name) + self.updateScriptsDropdown() + self.loadScriptContents(check=False) + else: + self.messageBox("There was a problem creating the folder.") + self.current_folder_dropdown.blockSignals(True) + self.current_folder_dropdown.setCurrentIndex( + self.folder_index) + self.current_folder_dropdown.blockSignals(False) + else: + # Canceled/rejected + self.current_folder_dropdown.blockSignals(True) + self.current_folder_dropdown.setCurrentIndex(self.folder_index) + self.current_folder_dropdown.blockSignals(False) + return + + elif fd_data == "open in browser": + current_folder_path = os.path.join( + self.scripts_dir, self.current_folder) + self.openInFileBrowser(current_folder_path) + self.current_folder_dropdown.blockSignals(True) + self.current_folder_dropdown.setCurrentIndex(self.folder_index) + self.current_folder_dropdown.blockSignals(False) + return + + elif fd_data == "add custom path": + folder_path = nuke.getFilename('Select custom folder.') + if folder_path is not None: + if folder_path.endswith("/"): + aliasName = folder_path.split("/")[-2] + else: + aliasName = folder_path.split("/")[-1] + if not os.path.isdir(folder_path): + self.messageBox( + "Folder not found. Please try again with the full path to a folder.") + elif not len(aliasName): + self.messageBox( + "Folder with the same name already exists. Please delete or rename it first.") + else: + # All good + os.symlink(folder_path, os.path.join( + self.scripts_dir, aliasName)) + self.saveScriptContents(temp=True) + self.current_folder = aliasName + self.updateFoldersDropdown() + self.setCurrentFolder(aliasName) + self.updateScriptsDropdown() + self.loadScriptContents(check=False) + self.script_editor.setFocus() + return + self.current_folder_dropdown.blockSignals(True) + self.current_folder_dropdown.setCurrentIndex(self.folder_index) + self.current_folder_dropdown.blockSignals(False) + else: + # 1: Save current script as temp if needed + self.saveScriptContents(temp=True) + # 2: Set the new folder in the variables + self.current_folder = fd_data + self.folder_index = fd_index + # 3: Update the scripts dropdown + self.updateScriptsDropdown() + # 4: Load the current script! + self.loadScriptContents() + self.script_editor.setFocus() + + self.loadScriptState() + self.setScriptState() + + return + + def scriptDropdownChanged(self): + '''Executed when the current script dropdown is changed. Should only be called by the manual dropdown change. Not by other functions.''' + self.saveScriptState() + scripts_dropdown = self.current_script_dropdown + sd_value = scripts_dropdown.currentText() + sd_index = scripts_dropdown.currentIndex() + sd_data = scripts_dropdown.itemData(sd_index) + if sd_data == "create new": + self.current_script_dropdown.blockSignals(True) + panel = FileNameDialog(self, mode="script") + if panel.exec_(): + # Accepted + script_name = panel.text + ".py" + script_path = os.path.join( + self.scripts_dir, self.current_folder, script_name) + log(script_name) + log(script_path) + if os.path.isfile(script_path): + self.messageBox("Script already exists.") + self.current_script_dropdown.setCurrentIndex( + self.script_index) + if self.makeScriptFile(name=script_name, folder=self.current_folder): + # Success creating the folder + self.saveScriptContents(temp=True) + self.updateScriptsDropdown() + if self.current_script != "Untitled.py": + self.script_editor.setPlainText("") + self.current_script = script_name + self.setCurrentScript(script_name) + self.saveScriptContents(temp=False) + # self.loadScriptContents() + else: + self.messageBox("There was a problem creating the script.") + self.current_script_dropdown.setCurrentIndex( + self.script_index) + else: + # Canceled/rejected + self.current_script_dropdown.setCurrentIndex(self.script_index) + return + self.current_script_dropdown.blockSignals(False) + + elif sd_data == "create duplicate": + self.current_script_dropdown.blockSignals(True) + current_folder_path = os.path.join( + self.scripts_dir, self.current_folder, self.current_script) + current_script_path = os.path.join( + self.scripts_dir, self.current_folder, self.current_script) + + current_name = self.current_script + if self.current_script.endswith(".py"): + current_name = current_name[:-3] + + test_name = current_name + while True: + test_name += "_copy" + new_script_path = os.path.join( + self.scripts_dir, self.current_folder, test_name + ".py") + if not os.path.isfile(new_script_path): + break + + script_name = test_name + ".py" + + if self.makeScriptFile(name=script_name, folder=self.current_folder): + # Success creating the folder + self.saveScriptContents(temp=True) + self.updateScriptsDropdown() + # self.script_editor.setPlainText("") + self.current_script = script_name + self.setCurrentScript(script_name) + self.script_editor.setFocus() + else: + self.messageBox("There was a problem duplicating the script.") + self.current_script_dropdown.setCurrentIndex(self.script_index) + + self.current_script_dropdown.blockSignals(False) + + elif sd_data == "open in browser": + current_script_path = os.path.join( + self.scripts_dir, self.current_folder, self.current_script) + self.openInFileBrowser(current_script_path) + self.current_script_dropdown.blockSignals(True) + self.current_script_dropdown.setCurrentIndex(self.script_index) + self.current_script_dropdown.blockSignals(False) + return + + elif sd_data == "delete script": + if self.deleteScript(): + self.updateScriptsDropdown() + self.loadScriptContents() + else: + self.current_script_dropdown.blockSignals(True) + self.current_script_dropdown.setCurrentIndex(self.script_index) + self.current_script_dropdown.blockSignals(False) + + else: + self.saveScriptContents() + self.current_script = sd_data + self.script_index = sd_index + self.setCurrentScript(self.current_script) + self.loadScriptContents() + self.script_editor.setFocus() + self.loadScriptState() + self.setScriptState() + return + + def setScriptModified(self, modified=True): + ''' Sets self.current_script_modified, title and whatever else we need ''' + self.current_script_modified = modified + title_modified_string = " [modified]" + windowTitle = self.windowTitle().split(title_modified_string)[0] + if modified == True: + windowTitle += title_modified_string + self.setWindowTitle(windowTitle) + try: + scripts_dropdown = self.current_script_dropdown + sd_index = scripts_dropdown.currentIndex() + sd_data = scripts_dropdown.itemData(sd_index) + if modified == False: + scripts_dropdown.setItemText(sd_index, sd_data) + else: + scripts_dropdown.setItemText(sd_index, sd_data + "(*)") + except: + pass + + def openInFileBrowser(self, path=""): + OS = platform.system() + if not os.path.exists(path): + path = KS_DIR + if OS == "Windows": + os.startfile(path) + elif OS == "Darwin": + subprocess.Popen(["open", path]) + else: + subprocess.Popen(["xdg-open", path]) + + def loadScriptState(self): + ''' + Loads the last state of the script from a file inside the SE directory's root. + SAVES self.scroll_pos, self.cursor_pos, self.last_open_script + ''' + self.state_dict = {} + if not os.path.isfile(self.state_txt_path): + return False + else: + with open(self.state_txt_path, "r") as f: + self.state_dict = json.load(f) + + log("Loading script state into self.state_dict, self.scrollPos, self.cursorPos") + log(self.state_dict) + + if "scroll_pos" in self.state_dict: + self.scrollPos = self.state_dict["scroll_pos"] + if "cursor_pos" in self.state_dict: + self.cursorPos = self.state_dict["cursor_pos"] + + def setScriptState(self): + ''' + Sets the already script state from self.state_dict into the current script if applicable + ''' + script_fullname = self.current_folder + "/" + self.current_script + + if "scroll_pos" in self.state_dict: + if script_fullname in self.state_dict["scroll_pos"]: + self.script_editor.verticalScrollBar().setValue( + int(self.state_dict["scroll_pos"][script_fullname])) + + if "cursor_pos" in self.state_dict: + if script_fullname in self.state_dict["cursor_pos"]: + cursor = self.script_editor.textCursor() + cursor.setPosition(int( + self.state_dict["cursor_pos"][script_fullname][1]), QtGui.QTextCursor.MoveAnchor) + cursor.setPosition(int( + self.state_dict["cursor_pos"][script_fullname][0]), QtGui.QTextCursor.KeepAnchor) + self.script_editor.setTextCursor(cursor) + + if 'splitter_sizes' in self.state_dict: + self.splitter.setSizes(self.state_dict['splitter_sizes']) + + def setLastScript(self): + if 'last_folder' in self.state_dict and 'last_script' in self.state_dict: + self.updateFoldersDropdown() + self.setCurrentFolder(self.state_dict['last_folder']) + self.updateScriptsDropdown() + self.setCurrentScript(self.state_dict['last_script']) + self.loadScriptContents() + self.script_editor.setFocus() + + def saveScriptState(self): + ''' Stores the current state of the script into a file inside the SE directory's root ''' + log("About to save script state...") + ''' + # self.state_dict = {} + if os.path.isfile(self.state_txt_path): + with open(self.state_txt_path, "r") as f: + self.state_dict = json.load(f) + + if "scroll_pos" in self.state_dict: + self.scrollPos = self.state_dict["scroll_pos"] + if "cursor_pos" in self.state_dict: + self.cursorPos = self.state_dict["cursor_pos"] + + ''' + self.loadScriptState() + + # Overwrite current values into the scriptState + self.saveScrollValue() + self.saveCursorPosValue() + + self.state_dict['scroll_pos'] = self.scrollPos + self.state_dict['cursor_pos'] = self.cursorPos + self.state_dict['last_folder'] = self.current_folder + self.state_dict['last_script'] = self.current_script + self.state_dict['splitter_sizes'] = self.splitter.sizes() + + with open(self.state_txt_path, "w") as f: + state = json.dump(self.state_dict, f, sort_keys=True, indent=4) + return state + + # Autosave background loop + def autosave(self): + if self.toAutosave: + # Save the script... + self.saveScriptContents() + self.toAutosave = False + self.saveScriptState() + log("autosaving...") + return + + # Global stuff + def setTextSelection(self): + self.highlighter.selected_text = self.script_editor.textCursor().selection().toPlainText() + return + + def eventFilter(self, object, event): + if event.type() == QtCore.QEvent.KeyPress: + return QtWidgets.QWidget.eventFilter(self, object, event) + else: + return QtWidgets.QWidget.eventFilter(self, object, event) + + def resizeEvent(self, res_event): + w = self.frameGeometry().width() + self.current_node_label_node.setVisible(w > 460) + self.script_label.setVisible(w > 460) + return super(KnobScripter, self).resizeEvent(res_event) + + def changeClicked(self, newNode=""): + ''' Change node ''' + try: + print "Changing from " + self.node.name() + except: + self.node = None + if not len(nuke.selectedNodes()): + self.exitNodeMode() + return + nuke.menu("Nuke").findItem( + "Edit/Node/Update KnobScripter Context").invoke() + selection = knobScripterSelectedNodes + if self.nodeMode: # Only update the number of unsaved knobs if we were already in node mode + if self.node is not None: + updatedCount = self.updateUnsavedKnobs() + else: + updatedCount = 0 + else: + updatedCount = 0 + self.autosave() + if newNode != "" and nuke.exists(newNode): + selection = [newNode] + elif not len(selection): + node_dialog = ChooseNodeDialog(self) + if node_dialog.exec_(): + # Accepted + selection = [nuke.toNode(node_dialog.name)] + else: + return + + # Change to node mode... + self.node_mode_bar.setVisible(True) + self.script_mode_bar.setVisible(False) + if not self.nodeMode: + self.saveScriptContents() + self.toAutosave = False + self.saveScriptState() + self.splitter.setSizes([0, 1]) + self.nodeMode = True + + # If already selected, pass + if self.node is not None and selection[0].fullName() == self.node.fullName(): + self.messageBox("Please select a different node first!") + return + elif updatedCount > 0: + msgBox = QtWidgets.QMessageBox() + msgBox.setText( + "Save changes to %s knob%s before changing the node?" % (str(updatedCount), int(updatedCount > 1) * "s")) + msgBox.setStandardButtons( + QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No | QtWidgets.QMessageBox.Cancel) + msgBox.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) + msgBox.setDefaultButton(QtWidgets.QMessageBox.Yes) + reply = msgBox.exec_() + if reply == QtWidgets.QMessageBox.Yes: + self.saveAllKnobValues(check=False) + elif reply == QtWidgets.QMessageBox.Cancel: + return + if len(selection) > 1: + self.messageBox( + "More than one node selected.\nChanging knobChanged editor to %s" % selection[0].fullName()) + # Reinitialise everything, wooo! + self.current_knob_dropdown.blockSignals(True) + self.node = selection[0] + + self.script_editor.setPlainText("") + self.unsavedKnobs = {} + self.scrollPos = {} + self.setWindowTitle("KnobScripter - %s %s" % + (self.node.fullName(), self.knob)) + self.current_node_label_name.setText(self.node.fullName()) + + self.toLoadKnob = False + self.updateKnobDropdown() # onee + # self.current_knob_dropdown.repaint() + # self.current_knob_dropdown.setMinimumWidth(self.current_knob_dropdown.minimumSizeHint().width()) + self.toLoadKnob = True + self.setCurrentKnob(self.knob) + self.loadKnobValue(False) + self.script_editor.setFocus() + self.setKnobModified(False) + self.current_knob_dropdown.blockSignals(False) + # self.current_knob_dropdown.setMinimumContentsLength(80) + return + + def exitNodeMode(self): + self.nodeMode = False + self.setWindowTitle("KnobScripter - Script Mode") + self.node_mode_bar.setVisible(False) + self.script_mode_bar.setVisible(True) + self.node = nuke.toNode("root") + # self.updateFoldersDropdown() + # self.updateScriptsDropdown() + self.splitter.setSizes([1, 1]) + self.loadScriptState() + self.setLastScript() + + self.loadScriptContents(check=False) + self.setScriptState() + + def clearConsole(self): + self.origConsoleText = self.nukeSEOutput.document().toPlainText().encode("utf8") + self.script_output.setPlainText("") + + def toggleFRW(self, frw_pressed): + self.frw_open = frw_pressed + self.frw.setVisible(self.frw_open) + if self.frw_open: + self.frw.find_lineEdit.setFocus() + self.frw.find_lineEdit.selectAll() + else: + self.script_editor.setFocus() + return + + def openSnippets(self): + ''' Whenever the 'snippets' button is pressed... open the panel ''' + global SnippetEditPanel + if SnippetEditPanel == "": + SnippetEditPanel = SnippetsPanel(self) + + if not SnippetEditPanel.isVisible(): + SnippetEditPanel.reload() + + if SnippetEditPanel.show(): + self.snippets = self.loadSnippets(maxDepth=5) + SnippetEditPanel = "" + + def loadSnippets(self, path="", maxDepth=5, depth=0): + ''' + Load prefs recursive. When maximum recursion depth, ignores paths. + ''' + max_depth = maxDepth + cur_depth = depth + if path == "": + path = self.snippets_txt_path + if not os.path.isfile(path): + return {} + else: + loaded_snippets = {} + with open(path, "r") as f: + file = json.load(f) + for i, (key, val) in enumerate(file.items()): + if re.match(r"\[custom-path-[0-9]+\]$", key): + if cur_depth < max_depth: + new_dict = self.loadSnippets( + path=val, maxDepth=max_depth, depth=cur_depth + 1) + loaded_snippets.update(new_dict) + else: + loaded_snippets[key] = val + return loaded_snippets + + def messageBox(self, the_text=""): + ''' Just a simple message box ''' + if self.isPane: + msgBox = QtWidgets.QMessageBox() + else: + msgBox = QtWidgets.QMessageBox(self) + msgBox.setText(the_text) + msgBox.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) + msgBox.exec_() + + def openPrefs(self): + ''' Open the preferences panel ''' + global PrefsPanel + if PrefsPanel == "": + PrefsPanel = KnobScripterPrefs(self) + + if PrefsPanel.show(): + PrefsPanel = "" + + def loadPrefs(self): + ''' Load prefs ''' + if not os.path.isfile(self.prefs_txt): + return [] + else: + with open(self.prefs_txt, "r") as f: + prefs = json.load(f) + return prefs + + def runScript(self): + ''' Run the current script... ''' + self.script_editor.runScript() + + def saveScrollValue(self): + ''' Save scroll values ''' + if self.nodeMode: + self.scrollPos[self.knob] = self.script_editor.verticalScrollBar( + ).value() + else: + self.scrollPos[self.current_folder + "/" + + self.current_script] = self.script_editor.verticalScrollBar().value() + + def saveCursorPosValue(self): + ''' Save cursor pos and anchor values ''' + self.cursorPos[self.current_folder + "/" + self.current_script] = [ + self.script_editor.textCursor().position(), self.script_editor.textCursor().anchor()] + + def closeEvent(self, close_event): + if self.nodeMode: + updatedCount = self.updateUnsavedKnobs() + if updatedCount > 0: + msgBox = QtWidgets.QMessageBox() + msgBox.setText("Save changes to %s knob%s before closing?" % ( + str(updatedCount), int(updatedCount > 1) * "s")) + msgBox.setStandardButtons( + QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No | QtWidgets.QMessageBox.Cancel) + msgBox.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) + msgBox.setDefaultButton(QtWidgets.QMessageBox.Yes) + reply = msgBox.exec_() + if reply == QtWidgets.QMessageBox.Yes: + self.saveAllKnobValues(check=False) + close_event.accept() + return + elif reply == QtWidgets.QMessageBox.Cancel: + close_event.ignore() + return + else: + close_event.accept() + else: + self.autosave() + if self in AllKnobScripters: + AllKnobScripters.remove(self) + close_event.accept() + + # Landing functions + + def refreshClicked(self): + ''' Function to refresh the dropdowns ''' + if self.nodeMode: + knob = self.current_knob_dropdown.itemData( + self.current_knob_dropdown.currentIndex()).encode('UTF8') + self.current_knob_dropdown.blockSignals(True) + self.current_knob_dropdown.clear() # First remove all items + self.updateKnobDropdown() + availableKnobs = [] + for i in range(self.current_knob_dropdown.count()): + if self.current_knob_dropdown.itemData(i) is not None: + availableKnobs.append( + self.current_knob_dropdown.itemData(i).encode('UTF8')) + if knob in availableKnobs: + self.setCurrentKnob(knob) + self.current_knob_dropdown.blockSignals(False) + else: + folder = self.current_folder + script = self.current_script + self.autosave() + self.updateFoldersDropdown() + self.setCurrentFolder(folder) + self.updateScriptsDropdown() + self.setCurrentScript(script) + self.script_editor.setFocus() + + def reloadClicked(self): + if self.nodeMode: + self.loadKnobValue() + else: + log("Node mode is off") + self.loadScriptContents(check=True, pyOnly=True) + + def saveClicked(self): + if self.nodeMode: + self.saveKnobValue(False) + else: + self.saveScriptContents(temp=False) + + def setModified(self): + if self.nodeMode: + self.setKnobModified(True) + elif not self.current_script_modified: + self.setScriptModified(True) + if not self.nodeMode: + self.toAutosave = True + + def pin(self, pressed): + if pressed: + self.setWindowFlags(self.windowFlags() | + QtCore.Qt.WindowStaysOnTopHint) + self.pinned = True + self.show() + else: + self.setWindowFlags(self.windowFlags() & ~ + QtCore.Qt.WindowStaysOnTopHint) + self.pinned = False + self.show() + + def findSE(self): + for widget in QtWidgets.QApplication.allWidgets(): + if "Script Editor" in widget.windowTitle(): + return widget + + # FunctiosaveScrollValuens for Nuke's Script Editor + def findScriptEditors(self): + script_editors = [] + for widget in QtWidgets.QApplication.allWidgets(): + if "Script Editor" in widget.windowTitle() and len(widget.children()) > 5: + script_editors.append(widget) + return script_editors + + def findSEInput(self, se): + return se.children()[-1].children()[0] + + def findSEOutput(self, se): + return se.children()[-1].children()[1] + + def findSERunBtn(self, se): + for btn in se.children(): + try: + if "Run the current script" in btn.toolTip(): + return btn + except: + pass + return False + + def setSEOutputEvent(self): + nukeScriptEditors = self.findScriptEditors() + # Take the console from the first script editor found... + self.origConsoleText = self.nukeSEOutput.document().toPlainText().encode("utf8") + for se in nukeScriptEditors: + se_output = self.findSEOutput(se) + se_output.textChanged.connect( + partial(consoleChanged, se_output, self)) + consoleChanged(se_output, self) # Initialise. + + +class KnobScripterPane(KnobScripter): + def __init__(self, node="", knob="knobChanged"): + super(KnobScripterPane, self).__init__() + self.isPane = True + + def showEvent(self, the_event): + try: + killPaneMargins(self) + except: + pass + return KnobScripter.showEvent(self, the_event) + + def hideEvent(self, the_event): + self.autosave() + return KnobScripter.hideEvent(self, the_event) + + +def consoleChanged(self, ks): + ''' This will be called every time the ScriptEditor Output text is changed ''' + try: + if ks: # KS exists + ksOutput = ks.script_output # The console TextEdit widget + ksText = self.document().toPlainText().encode("utf8") + # The text from the console that will be omitted + origConsoleText = ks.origConsoleText + if ksText.startswith(origConsoleText): + ksText = ksText[len(origConsoleText):] + else: + ks.origConsoleText = "" + ksOutput.setPlainText(ksText) + ksOutput.verticalScrollBar().setValue(ksOutput.verticalScrollBar().maximum()) + except: + pass + + +def killPaneMargins(widget_object): + if widget_object: + target_widgets = set() + target_widgets.add(widget_object.parentWidget().parentWidget()) + target_widgets.add(widget_object.parentWidget( + ).parentWidget().parentWidget().parentWidget()) + + for widget_layout in target_widgets: + try: + widget_layout.layout().setContentsMargins(0, 0, 0, 0) + except: + pass + + +def debug(lev=0): + ''' Convenience function to set the KnobScripter on debug mode''' + # levels = [logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL] + # for handler in logging.root.handlers[:]: + # logging.root.removeHandler(handler) + # logging.basicConfig(level=levels[lev]) + # Changed to a shitty way for now + global DebugMode + DebugMode = True + + +def log(text): + ''' Display a debug info message. Yes, in a stupid way. I know.''' + global DebugMode + if DebugMode: + print(text) + + +# --------------------------------------------------------------------- +# Dialogs +# --------------------------------------------------------------------- +class FileNameDialog(QtWidgets.QDialog): + ''' + Dialog for creating new... (mode = "folder", "script" or "knob"). + ''' + + def __init__(self, parent=None, mode="folder", text=""): + if parent.isPane: + super(FileNameDialog, self).__init__() + else: + super(FileNameDialog, self).__init__(parent) + #self.setWindowFlags(self.windowFlags() | QtCore.Qt.WindowStaysOnTopHint) + self.mode = mode + self.text = text + + title = "Create new {}.".format(self.mode) + self.setWindowTitle(title) + + self.initUI() + + def initUI(self): + # Widgets + self.name_label = QtWidgets.QLabel("Name: ") + self.name_label.setAlignment( + QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter) + self.name_lineEdit = QtWidgets.QLineEdit() + self.name_lineEdit.setText(self.text) + self.name_lineEdit.textChanged.connect(self.nameChanged) + + # Buttons + self.button_box = QtWidgets.QDialogButtonBox( + QtWidgets.QDialogButtonBox.Ok | QtWidgets.QDialogButtonBox.Cancel) + self.button_box.button( + QtWidgets.QDialogButtonBox.Ok).setEnabled(self.text != "") + self.button_box.accepted.connect(self.clickedOk) + self.button_box.rejected.connect(self.clickedCancel) + + # Layout + self.master_layout = QtWidgets.QVBoxLayout() + self.name_layout = QtWidgets.QHBoxLayout() + self.name_layout.addWidget(self.name_label) + self.name_layout.addWidget(self.name_lineEdit) + self.master_layout.addLayout(self.name_layout) + self.master_layout.addWidget(self.button_box) + self.setLayout(self.master_layout) + + self.name_lineEdit.setFocus() + self.setMinimumWidth(250) + + def nameChanged(self): + txt = self.name_lineEdit.text() + m = r"[\w]*$" + if self.mode == "knob": # Knobs can't start with a number... + m = r"[a-zA-Z_]+" + m + + if re.match(m, txt) or txt == "": + self.text = txt + else: + self.name_lineEdit.setText(self.text) + + self.button_box.button( + QtWidgets.QDialogButtonBox.Ok).setEnabled(self.text != "") + return + + def clickedOk(self): + self.accept() + return + + def clickedCancel(self): + self.reject() + return + + +class TextInputDialog(QtWidgets.QDialog): + ''' + Simple dialog for a text input. + ''' + + def __init__(self, parent=None, name="", text="", title=""): + if parent.isPane: + super(TextInputDialog, self).__init__() + else: + super(TextInputDialog, self).__init__(parent) + #self.setWindowFlags(self.windowFlags() | QtCore.Qt.WindowStaysOnTopHint) + + self.name = name # title of textinput + self.text = text # default content of textinput + + self.setWindowTitle(title) + + self.initUI() + + def initUI(self): + # Widgets + self.name_label = QtWidgets.QLabel(self.name + ": ") + self.name_label.setAlignment( + QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter) + self.name_lineEdit = QtWidgets.QLineEdit() + self.name_lineEdit.setText(self.text) + self.name_lineEdit.textChanged.connect(self.nameChanged) + + # Buttons + self.button_box = QtWidgets.QDialogButtonBox( + QtWidgets.QDialogButtonBox.Ok | QtWidgets.QDialogButtonBox.Cancel) + #self.button_box.button(QtWidgets.QDialogButtonBox.Ok).setEnabled(self.text != "") + self.button_box.accepted.connect(self.clickedOk) + self.button_box.rejected.connect(self.clickedCancel) + + # Layout + self.master_layout = QtWidgets.QVBoxLayout() + self.name_layout = QtWidgets.QHBoxLayout() + self.name_layout.addWidget(self.name_label) + self.name_layout.addWidget(self.name_lineEdit) + self.master_layout.addLayout(self.name_layout) + self.master_layout.addWidget(self.button_box) + self.setLayout(self.master_layout) + + self.name_lineEdit.setFocus() + self.setMinimumWidth(250) + + def nameChanged(self): + self.text = self.name_lineEdit.text() + + def clickedOk(self): + self.accept() + return + + def clickedCancel(self): + self.reject() + return + + +class ChooseNodeDialog(QtWidgets.QDialog): + ''' + Dialog for selecting a node by its name. Only admits nodes that exist (including root, preferences...) + ''' + + def __init__(self, parent=None, name=""): + if parent.isPane: + super(ChooseNodeDialog, self).__init__() + else: + super(ChooseNodeDialog, self).__init__(parent) + + self.name = name # Name of node (will be "" by default) + self.allNodes = [] + + self.setWindowTitle("Enter the node's name...") + + self.initUI() + + def initUI(self): + # Widgets + self.name_label = QtWidgets.QLabel("Name: ") + self.name_label.setAlignment( + QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter) + self.name_lineEdit = QtWidgets.QLineEdit() + self.name_lineEdit.setText(self.name) + self.name_lineEdit.textChanged.connect(self.nameChanged) + + self.allNodes = self.getAllNodes() + completer = QtWidgets.QCompleter(self.allNodes, self) + completer.setCaseSensitivity(QtCore.Qt.CaseInsensitive) + self.name_lineEdit.setCompleter(completer) + + # Buttons + self.button_box = QtWidgets.QDialogButtonBox( + QtWidgets.QDialogButtonBox.Ok | QtWidgets.QDialogButtonBox.Cancel) + self.button_box.button(QtWidgets.QDialogButtonBox.Ok).setEnabled( + nuke.exists(self.name)) + self.button_box.accepted.connect(self.clickedOk) + self.button_box.rejected.connect(self.clickedCancel) + + # Layout + self.master_layout = QtWidgets.QVBoxLayout() + self.name_layout = QtWidgets.QHBoxLayout() + self.name_layout.addWidget(self.name_label) + self.name_layout.addWidget(self.name_lineEdit) + self.master_layout.addLayout(self.name_layout) + self.master_layout.addWidget(self.button_box) + self.setLayout(self.master_layout) + + self.name_lineEdit.setFocus() + self.setMinimumWidth(250) + + def getAllNodes(self): + self.allNodes = [n.fullName() for n in nuke.allNodes( + recurseGroups=True)] # if parent is in current context?? + self.allNodes.extend(["root", "preferences"]) + return self.allNodes + + def nameChanged(self): + self.name = self.name_lineEdit.text() + self.button_box.button(QtWidgets.QDialogButtonBox.Ok).setEnabled( + self.name in self.allNodes) + + def clickedOk(self): + self.accept() + return + + def clickedCancel(self): + self.reject() + return + + +# ------------------------------------------------------------------------------------------------------ +# Script Editor Widget +# Wouter Gilsing built an incredibly useful python script editor for his Hotbox Manager, so I had it +# really easy for this part! +# Starting from his script editor, I changed the style and added the sublime-like functionality. +# I think this bit of code has the potential to get used in many nuke tools. +# Credit to him: http://www.woutergilsing.com/ +# Originally used on W_Hotbox v1.5: http://www.nukepedia.com/python/ui/w_hotbox +# ------------------------------------------------------------------------------------------------------ +class KnobScripterTextEdit(QtWidgets.QPlainTextEdit): + # Signal that will be emitted when the user has changed the text + userChangedEvent = QtCore.Signal() + + def __init__(self, knobScripter=""): + super(KnobScripterTextEdit, self).__init__() + + self.knobScripter = knobScripter + self.selected_text = "" + + # Setup line numbers + if self.knobScripter != "": + self.tabSpaces = self.knobScripter.tabSpaces + else: + self.tabSpaces = 4 + self.lineNumberArea = KSLineNumberArea(self) + self.blockCountChanged.connect(self.updateLineNumberAreaWidth) + self.updateRequest.connect(self.updateLineNumberArea) + self.updateLineNumberAreaWidth() + + # Highlight line + self.cursorPositionChanged.connect(self.highlightCurrentLine) + + # -------------------------------------------------------------------------------------------------- + # This is adapted from an original version by Wouter Gilsing. + # Extract from his original comments: + # While researching the implementation of line number, I had a look at Nuke's Blinkscript node. [..] + # thefoundry.co.uk/products/nuke/developers/100/pythonreference/nukescripts.blinkscripteditor-pysrc.html + # I stripped and modified the useful bits of the line number related parts of the code [..] + # Credits to theFoundry for writing the blinkscripteditor, best example code I could wish for. + # -------------------------------------------------------------------------------------------------- + + def lineNumberAreaWidth(self): + digits = 1 + maxNum = max(1, self.blockCount()) + while (maxNum >= 10): + maxNum /= 10 + digits += 1 + + space = 7 + self.fontMetrics().width('9') * digits + return space + + def updateLineNumberAreaWidth(self): + self.setViewportMargins(self.lineNumberAreaWidth(), 0, 0, 0) + + def updateLineNumberArea(self, rect, dy): + + if (dy): + self.lineNumberArea.scroll(0, dy) + else: + self.lineNumberArea.update( + 0, rect.y(), self.lineNumberArea.width(), rect.height()) + + if (rect.contains(self.viewport().rect())): + self.updateLineNumberAreaWidth() + + def resizeEvent(self, event): + QtWidgets.QPlainTextEdit.resizeEvent(self, event) + + cr = self.contentsRect() + self.lineNumberArea.setGeometry(QtCore.QRect( + cr.left(), cr.top(), self.lineNumberAreaWidth(), cr.height())) + + def lineNumberAreaPaintEvent(self, event): + + if self.isReadOnly(): + return + + painter = QtGui.QPainter(self.lineNumberArea) + painter.fillRect(event.rect(), QtGui.QColor(36, 36, 36)) # Number bg + + block = self.firstVisibleBlock() + blockNumber = block.blockNumber() + top = int(self.blockBoundingGeometry( + block).translated(self.contentOffset()).top()) + bottom = top + int(self.blockBoundingRect(block).height()) + currentLine = self.document().findBlock( + self.textCursor().position()).blockNumber() + + painter.setPen(self.palette().color(QtGui.QPalette.Text)) + + painterFont = QtGui.QFont() + painterFont.setFamily("Courier") + painterFont.setStyleHint(QtGui.QFont.Monospace) + painterFont.setFixedPitch(True) + if self.knobScripter != "": + painterFont.setPointSize(self.knobScripter.fontSize) + painter.setFont(self.knobScripter.script_editor_font) + + while (block.isValid() and top <= event.rect().bottom()): + + textColor = QtGui.QColor(110, 110, 110) # Numbers + + if blockNumber == currentLine and self.hasFocus(): + textColor = QtGui.QColor(255, 170, 0) # Number highlighted + + painter.setPen(textColor) + + number = "%s" % str(blockNumber + 1) + painter.drawText(-3, top, self.lineNumberArea.width(), + self.fontMetrics().height(), QtCore.Qt.AlignRight, number) + + # Move to the next block + block = block.next() + top = bottom + bottom = top + int(self.blockBoundingRect(block).height()) + blockNumber += 1 + + def keyPressEvent(self, event): + ''' + Custom actions for specific keystrokes + ''' + key = event.key() + ctrl = bool(event.modifiers() & Qt.ControlModifier) + alt = bool(event.modifiers() & Qt.AltModifier) + shift = bool(event.modifiers() & Qt.ShiftModifier) + pre_scroll = self.verticalScrollBar().value() + #modifiers = QtWidgets.QApplication.keyboardModifiers() + #ctrl = (modifiers == Qt.ControlModifier) + #shift = (modifiers == Qt.ShiftModifier) + + up_arrow = 16777235 + down_arrow = 16777237 + + # if Tab convert to Space + if key == 16777217: + self.indentation('indent') + + # if Shift+Tab remove indent + elif key == 16777218: + self.indentation('unindent') + + # if BackSpace try to snap to previous indent level + elif key == 16777219: + if not self.unindentBackspace(): + QtWidgets.QPlainTextEdit.keyPressEvent(self, event) + else: + # COOL BEHAVIORS SIMILAR TO SUBLIME GO NEXT! + cursor = self.textCursor() + cpos = cursor.position() + apos = cursor.anchor() + text_before_cursor = self.toPlainText()[:min(cpos, apos)] + text_after_cursor = self.toPlainText()[max(cpos, apos):] + text_all = self.toPlainText() + to_line_start = text_before_cursor[::-1].find("\n") + if to_line_start == -1: + # Position of the start of the line that includes the cursor selection start + linestart_pos = 0 + else: + linestart_pos = len(text_before_cursor) - to_line_start + + to_line_end = text_after_cursor.find("\n") + if to_line_end == -1: + # Position of the end of the line that includes the cursor selection end + lineend_pos = len(text_all) + else: + lineend_pos = max(cpos, apos) + to_line_end + + text_before_lines = text_all[:linestart_pos] + text_after_lines = text_all[lineend_pos:] + if len(text_after_lines) and text_after_lines.startswith("\n"): + text_after_lines = text_after_lines[1:] + text_lines = text_all[linestart_pos:lineend_pos] + + if cursor.hasSelection(): + selection = cursor.selection().toPlainText() + else: + selection = "" + if key == Qt.Key_ParenLeft and (len(selection) > 0 or re.match(r"[\s)}\];]+", text_after_cursor) or not len(text_after_cursor)): # ( + cursor.insertText("(" + selection + ")") + cursor.setPosition(apos + 1, QtGui.QTextCursor.MoveAnchor) + cursor.setPosition(cpos + 1, QtGui.QTextCursor.KeepAnchor) + self.setTextCursor(cursor) + # ) + elif key == Qt.Key_ParenRight and text_after_cursor.startswith(")"): + cursor.movePosition(QtGui.QTextCursor.NextCharacter) + self.setTextCursor(cursor) + elif key == Qt.Key_BracketLeft and (len(selection) > 0 or re.match(r"[\s)}\];]+", text_after_cursor) or not len(text_after_cursor)): # [ + cursor.insertText("[" + selection + "]") + cursor.setPosition(apos + 1, QtGui.QTextCursor.MoveAnchor) + cursor.setPosition(cpos + 1, QtGui.QTextCursor.KeepAnchor) + self.setTextCursor(cursor) + # ] + elif key in [Qt.Key_BracketRight, 43] and text_after_cursor.startswith("]"): + cursor.movePosition(QtGui.QTextCursor.NextCharacter) + self.setTextCursor(cursor) + elif key == Qt.Key_BraceLeft and (len(selection) > 0 or re.match(r"[\s)}\];]+", text_after_cursor) or not len(text_after_cursor)): # { + cursor.insertText("{" + selection + "}") + cursor.setPosition(apos + 1, QtGui.QTextCursor.MoveAnchor) + cursor.setPosition(cpos + 1, QtGui.QTextCursor.KeepAnchor) + self.setTextCursor(cursor) + # } + elif key in [199, Qt.Key_BraceRight] and text_after_cursor.startswith("}"): + cursor.movePosition(QtGui.QTextCursor.NextCharacter) + self.setTextCursor(cursor) + elif key == 34: # " + if len(selection) > 0: + cursor.insertText('"' + selection + '"') + cursor.setPosition(apos + 1, QtGui.QTextCursor.MoveAnchor) + cursor.setPosition(cpos + 1, QtGui.QTextCursor.KeepAnchor) + # and not re.search(r"(?:[\s)\]]+|$)",text_before_cursor): + elif text_after_cursor.startswith('"') and '"' in text_before_cursor.split("\n")[-1]: + cursor.movePosition(QtGui.QTextCursor.NextCharacter) + # If chars after cursor, act normal + elif not re.match(r"(?:[\s)\]]+|$)", text_after_cursor): + QtWidgets.QPlainTextEdit.keyPressEvent(self, event) + # If chars before cursor, act normal + elif not re.search(r"[\s.({\[,]$", text_before_cursor) and text_before_cursor != "": + QtWidgets.QPlainTextEdit.keyPressEvent(self, event) + else: + cursor.insertText('"' + selection + '"') + cursor.setPosition(apos + 1, QtGui.QTextCursor.MoveAnchor) + cursor.setPosition(cpos + 1, QtGui.QTextCursor.KeepAnchor) + self.setTextCursor(cursor) + elif key == 39: # ' + if len(selection) > 0: + cursor.insertText("'" + selection + "'") + cursor.setPosition(apos + 1, QtGui.QTextCursor.MoveAnchor) + cursor.setPosition(cpos + 1, QtGui.QTextCursor.KeepAnchor) + # and not re.search(r"(?:[\s)\]]+|$)",text_before_cursor): + elif text_after_cursor.startswith("'") and "'" in text_before_cursor.split("\n")[-1]: + cursor.movePosition(QtGui.QTextCursor.NextCharacter) + # If chars after cursor, act normal + elif not re.match(r"(?:[\s)\]]+|$)", text_after_cursor): + QtWidgets.QPlainTextEdit.keyPressEvent(self, event) + # If chars before cursor, act normal + elif not re.search(r"[\s.({\[,]$", text_before_cursor) and text_before_cursor != "": + QtWidgets.QPlainTextEdit.keyPressEvent(self, event) + else: + cursor.insertText("'" + selection + "'") + cursor.setPosition(apos + 1, QtGui.QTextCursor.MoveAnchor) + cursor.setPosition(cpos + 1, QtGui.QTextCursor.KeepAnchor) + self.setTextCursor(cursor) + elif key == 35 and len(selection): # (yes, a hash) + # If there's a selection, insert a hash at the start of each line.. how the fuck? + if selection != "": + selection_split = selection.split("\n") + if all(i.startswith("#") for i in selection_split): + selection_commented = "\n".join( + [s[1:] for s in selection_split]) # Uncommented + else: + selection_commented = "#" + "\n#".join(selection_split) + cursor.insertText(selection_commented) + if apos > cpos: + cursor.setPosition( + apos + len(selection_commented) - len(selection), QtGui.QTextCursor.MoveAnchor) + cursor.setPosition(cpos, QtGui.QTextCursor.KeepAnchor) + else: + cursor.setPosition(apos, QtGui.QTextCursor.MoveAnchor) + cursor.setPosition( + cpos + len(selection_commented) - len(selection), QtGui.QTextCursor.KeepAnchor) + self.setTextCursor(cursor) + + elif key == 68 and ctrl and shift: # Ctrl+Shift+D, to duplicate text or line/s + + if not len(selection): + self.setPlainText( + text_before_lines + text_lines + "\n" + text_lines + "\n" + text_after_lines) + cursor.setPosition( + apos + len(text_lines) + 1, QtGui.QTextCursor.MoveAnchor) + cursor.setPosition( + cpos + len(text_lines) + 1, QtGui.QTextCursor.KeepAnchor) + self.setTextCursor(cursor) + self.verticalScrollBar().setValue(pre_scroll) + self.scrollToCursor() + else: + if text_before_cursor.endswith("\n") and not selection.startswith("\n"): + cursor.insertText(selection + "\n" + selection) + cursor.setPosition( + apos + len(selection) + 1, QtGui.QTextCursor.MoveAnchor) + cursor.setPosition( + cpos + len(selection) + 1, QtGui.QTextCursor.KeepAnchor) + else: + cursor.insertText(selection + selection) + cursor.setPosition( + apos + len(selection), QtGui.QTextCursor.MoveAnchor) + cursor.setPosition( + cpos + len(selection), QtGui.QTextCursor.KeepAnchor) + self.setTextCursor(cursor) + + # Ctrl+Shift+Up, to move the selected line/s up + elif key == up_arrow and ctrl and shift and len(text_before_lines): + prev_line_start_distance = text_before_lines[:-1][::-1].find( + "\n") + if prev_line_start_distance == -1: + prev_line_start_pos = 0 # Position of the start of the previous line + else: + prev_line_start_pos = len( + text_before_lines) - 1 - prev_line_start_distance + prev_line = text_before_lines[prev_line_start_pos:] + + text_before_prev_line = text_before_lines[:prev_line_start_pos] + + if prev_line.endswith("\n"): + prev_line = prev_line[:-1] + + if len(text_after_lines): + text_after_lines = "\n" + text_after_lines + + self.setPlainText( + text_before_prev_line + text_lines + "\n" + prev_line + text_after_lines) + cursor.setPosition(apos - len(prev_line) - 1, + QtGui.QTextCursor.MoveAnchor) + cursor.setPosition(cpos - len(prev_line) - 1, + QtGui.QTextCursor.KeepAnchor) + self.setTextCursor(cursor) + self.verticalScrollBar().setValue(pre_scroll) + self.scrollToCursor() + return + + elif key == down_arrow and ctrl and shift: # Ctrl+Shift+Up, to move the selected line/s up + if not len(text_after_lines): + text_after_lines = "" + next_line_end_distance = text_after_lines.find("\n") + if next_line_end_distance == -1: + next_line_end_pos = len(text_all) + else: + next_line_end_pos = next_line_end_distance + next_line = text_after_lines[:next_line_end_pos] + text_after_next_line = text_after_lines[next_line_end_pos:] + + self.setPlainText(text_before_lines + next_line + + "\n" + text_lines + text_after_next_line) + cursor.setPosition(apos + len(next_line) + 1, + QtGui.QTextCursor.MoveAnchor) + cursor.setPosition(cpos + len(next_line) + 1, + QtGui.QTextCursor.KeepAnchor) + self.setTextCursor(cursor) + self.verticalScrollBar().setValue(pre_scroll) + self.scrollToCursor() + return + + # If up key and nothing happens, go to start + elif key == up_arrow and not len(text_before_lines): + if not shift: + cursor.setPosition(0, QtGui.QTextCursor.MoveAnchor) + self.setTextCursor(cursor) + else: + cursor.setPosition(0, QtGui.QTextCursor.KeepAnchor) + self.setTextCursor(cursor) + + # If up key and nothing happens, go to start + elif key == down_arrow and not len(text_after_lines): + if not shift: + cursor.setPosition( + len(text_all), QtGui.QTextCursor.MoveAnchor) + self.setTextCursor(cursor) + else: + cursor.setPosition( + len(text_all), QtGui.QTextCursor.KeepAnchor) + self.setTextCursor(cursor) + + # if enter or return, match indent level + elif key in [16777220, 16777221]: + self.indentNewLine() + else: + QtWidgets.QPlainTextEdit.keyPressEvent(self, event) + + self.scrollToCursor() + + def scrollToCursor(self): + self.cursor = self.textCursor() + # Does nothing, but makes the scroll go to the right place... + self.cursor.movePosition(QtGui.QTextCursor.NoMove) + self.setTextCursor(self.cursor) + + def getCursorInfo(self): + + self.cursor = self.textCursor() + + self.firstChar = self.cursor.selectionStart() + self.lastChar = self.cursor.selectionEnd() + + self.noSelection = False + if self.firstChar == self.lastChar: + self.noSelection = True + + self.originalPosition = self.cursor.position() + self.cursorBlockPos = self.cursor.positionInBlock() + + def unindentBackspace(self): + ''' + #snap to previous indent level + ''' + self.getCursorInfo() + + if not self.noSelection or self.cursorBlockPos == 0: + return False + + # check text in front of cursor + textInFront = self.document().findBlock( + self.firstChar).text()[:self.cursorBlockPos] + + # check whether solely spaces + if textInFront != ' ' * self.cursorBlockPos: + return False + + # snap to previous indent level + spaces = len(textInFront) + for space in range(spaces - ((spaces - 1) / self.tabSpaces) * self.tabSpaces - 1): + self.cursor.deletePreviousChar() + + def indentNewLine(self): + + # in case selection covers multiple line, make it one line first + self.insertPlainText('') + + self.getCursorInfo() + + # check how many spaces after cursor + text = self.document().findBlock(self.firstChar).text() + + textInFront = text[:self.cursorBlockPos] + + if len(textInFront) == 0: + self.insertPlainText('\n') + return + + indentLevel = 0 + for i in textInFront: + if i == ' ': + indentLevel += 1 + else: + break + + indentLevel /= self.tabSpaces + + # find out whether textInFront's last character was a ':' + # if that's the case add another indent. + # ignore any spaces at the end, however also + # make sure textInFront is not just an indent + if textInFront.count(' ') != len(textInFront): + while textInFront[-1] == ' ': + textInFront = textInFront[:-1] + + if textInFront[-1] == ':': + indentLevel += 1 + + # new line + self.insertPlainText('\n') + # match indent + self.insertPlainText(' ' * (self.tabSpaces * indentLevel)) + + def indentation(self, mode): + + pre_scroll = self.verticalScrollBar().value() + self.getCursorInfo() + + # if nothing is selected and mode is set to indent, simply insert as many + # space as needed to reach the next indentation level. + if self.noSelection and mode == 'indent': + + remainingSpaces = self.tabSpaces - \ + (self.cursorBlockPos % self.tabSpaces) + self.insertPlainText(' ' * remainingSpaces) + return + + selectedBlocks = self.findBlocks(self.firstChar, self.lastChar) + beforeBlocks = self.findBlocks( + last=self.firstChar - 1, exclude=selectedBlocks) + afterBlocks = self.findBlocks( + first=self.lastChar + 1, exclude=selectedBlocks) + + beforeBlocksText = self.blocks2list(beforeBlocks) + selectedBlocksText = self.blocks2list(selectedBlocks, mode) + afterBlocksText = self.blocks2list(afterBlocks) + + combinedText = '\n'.join( + beforeBlocksText + selectedBlocksText + afterBlocksText) + + # make sure the line count stays the same + originalBlockCount = len(self.toPlainText().split('\n')) + combinedText = '\n'.join(combinedText.split('\n')[:originalBlockCount]) + + self.clear() + self.setPlainText(combinedText) + + if self.noSelection: + self.cursor.setPosition(self.lastChar) + + # check whether the the orignal selection was from top to bottom or vice versa + else: + if self.originalPosition == self.firstChar: + first = self.lastChar + last = self.firstChar + firstBlockSnap = QtGui.QTextCursor.EndOfBlock + lastBlockSnap = QtGui.QTextCursor.StartOfBlock + else: + first = self.firstChar + last = self.lastChar + firstBlockSnap = QtGui.QTextCursor.StartOfBlock + lastBlockSnap = QtGui.QTextCursor.EndOfBlock + + self.cursor.setPosition(first) + self.cursor.movePosition( + firstBlockSnap, QtGui.QTextCursor.MoveAnchor) + self.cursor.setPosition(last, QtGui.QTextCursor.KeepAnchor) + self.cursor.movePosition( + lastBlockSnap, QtGui.QTextCursor.KeepAnchor) + + self.setTextCursor(self.cursor) + self.verticalScrollBar().setValue(pre_scroll) + + def findBlocks(self, first=0, last=None, exclude=[]): + blocks = [] + if last == None: + last = self.document().characterCount() + for pos in range(first, last + 1): + block = self.document().findBlock(pos) + if block not in blocks and block not in exclude: + blocks.append(block) + return blocks + + def blocks2list(self, blocks, mode=None): + text = [] + for block in blocks: + blockText = block.text() + if mode == 'unindent': + if blockText.startswith(' ' * self.tabSpaces): + blockText = blockText[self.tabSpaces:] + self.lastChar -= self.tabSpaces + elif blockText.startswith('\t'): + blockText = blockText[1:] + self.lastChar -= 1 + + elif mode == 'indent': + blockText = ' ' * self.tabSpaces + blockText + self.lastChar += self.tabSpaces + + text.append(blockText) + + return text + + def highlightCurrentLine(self): + ''' + Highlight currently selected line + ''' + extraSelections = [] + + selection = QtWidgets.QTextEdit.ExtraSelection() + + lineColor = QtGui.QColor(62, 62, 62, 255) + + selection.format.setBackground(lineColor) + selection.format.setProperty( + QtGui.QTextFormat.FullWidthSelection, True) + selection.cursor = self.textCursor() + selection.cursor.clearSelection() + + extraSelections.append(selection) + + self.setExtraSelections(extraSelections) + self.scrollToCursor() + + def format(self, rgb, style=''): + ''' + Return a QtWidgets.QTextCharFormat with the given attributes. + ''' + color = QtGui.QColor(*rgb) + textFormat = QtGui.QTextCharFormat() + textFormat.setForeground(color) + + if 'bold' in style: + textFormat.setFontWeight(QtGui.QFont.Bold) + if 'italic' in style: + textFormat.setFontItalic(True) + if 'underline' in style: + textFormat.setUnderlineStyle(QtGui.QTextCharFormat.SingleUnderline) + + return textFormat + + +class KSLineNumberArea(QtWidgets.QWidget): + def __init__(self, scriptEditor): + super(KSLineNumberArea, self).__init__(scriptEditor) + + self.scriptEditor = scriptEditor + self.setStyleSheet("text-align: center;") + + def paintEvent(self, event): + self.scriptEditor.lineNumberAreaPaintEvent(event) + return + + +class KSScriptEditorHighlighter(QtGui.QSyntaxHighlighter): + ''' + This is also adapted from an original version by Wouter Gilsing. His comments: + + Modified, simplified version of some code found I found when researching: + wiki.python.org/moin/PyQt/Python%20syntax%20highlighting + They did an awesome job, so credits to them. I only needed to make some + modifications to make it fit my needs. + ''' + + def __init__(self, document, parent=None): + + super(KSScriptEditorHighlighter, self).__init__(document) + self.knobScripter = parent + self.script_editor = self.knobScripter.script_editor + self.selected_text = "" + self.selected_text_prev = "" + self.rules_sublime = "" + + self.styles = { + 'keyword': self.format([238, 117, 181], 'bold'), + 'string': self.format([242, 136, 135]), + 'comment': self.format([143, 221, 144]), + 'numbers': self.format([174, 129, 255]), + 'custom': self.format([255, 170, 0], 'italic'), + 'selected': self.format([255, 255, 255], 'bold underline'), + 'underline': self.format([240, 240, 240], 'underline'), + } + + self.keywords = [ + 'and', 'assert', 'break', 'class', 'continue', 'def', + 'del', 'elif', 'else', 'except', 'exec', 'finally', + 'for', 'from', 'global', 'if', 'import', 'in', + 'is', 'lambda', 'not', 'or', 'pass', 'print', + 'raise', 'return', 'try', 'while', 'yield', 'with', 'as' + ] + + self.operatorKeywords = [ + '=', '==', '!=', '<', '<=', '>', '>=', + '\+', '-', '\*', '/', '//', '\%', '\*\*', + '\+=', '-=', '\*=', '/=', '\%=', + '\^', '\|', '\&', '\~', '>>', '<<' + ] + + self.variableKeywords = ['int', 'str', + 'float', 'bool', 'list', 'dict', 'set'] + + self.numbers = ['True', 'False', 'None'] + self.loadAltStyles() + + self.tri_single = (QtCore.QRegExp("'''"), 1, self.styles['comment']) + self.tri_double = (QtCore.QRegExp('"""'), 2, self.styles['comment']) + + # rules + rules = [] + + rules += [(r'\b%s\b' % i, 0, self.styles['keyword']) + for i in self.keywords] + rules += [(i, 0, self.styles['keyword']) + for i in self.operatorKeywords] + rules += [(r'\b%s\b' % i, 0, self.styles['numbers']) + for i in self.numbers] + + rules += [ + + # integers + (r'\b[0-9]+\b', 0, self.styles['numbers']), + # Double-quoted string, possibly containing escape sequences + (r'"[^"\\]*(\\.[^"\\]*)*"', 0, self.styles['string']), + # Single-quoted string, possibly containing escape sequences + (r"'[^'\\]*(\\.[^'\\]*)*'", 0, self.styles['string']), + # From '#' until a newline + (r'#[^\n]*', 0, self.styles['comment']), + ] + + # Build a QRegExp for each pattern + self.rules_nuke = [(QtCore.QRegExp(pat), index, fmt) + for (pat, index, fmt) in rules] + self.rules = self.rules_nuke + + def loadAltStyles(self): + ''' Loads other color styles apart from Nuke's default. ''' + self.styles_sublime = { + 'base': self.format([255, 255, 255]), + 'keyword': self.format([237, 36, 110]), + 'string': self.format([237, 229, 122]), + 'comment': self.format([125, 125, 125]), + 'numbers': self.format([165, 120, 255]), + 'functions': self.format([184, 237, 54]), + 'blue': self.format([130, 226, 255], 'italic'), + 'arguments': self.format([255, 170, 10], 'italic'), + 'custom': self.format([200, 200, 200], 'italic'), + 'underline': self.format([240, 240, 240], 'underline'), + 'selected': self.format([255, 255, 255], 'bold underline'), + } + + self.keywords_sublime = [ + 'and', 'assert', 'break', 'continue', + 'del', 'elif', 'else', 'except', 'exec', 'finally', + 'for', 'from', 'global', 'if', 'import', 'in', + 'is', 'lambda', 'not', 'or', 'pass', 'print', + 'raise', 'return', 'try', 'while', 'yield', 'with', 'as' + ] + self.operatorKeywords_sublime = [ + '=', '==', '!=', '<', '<=', '>', '>=', + '\+', '-', '\*', '/', '//', '\%', '\*\*', + '\+=', '-=', '\*=', '/=', '\%=', + '\^', '\|', '\&', '\~', '>>', '<<' + ] + + self.baseKeywords_sublime = [ + ',', + ] + + self.customKeywords_sublime = [ + 'nuke', + ] + + self.blueKeywords_sublime = [ + 'def', 'class', 'int', 'str', 'float', 'bool', 'list', 'dict', 'set' + ] + + self.argKeywords_sublime = [ + 'self', + ] + + self.tri_single_sublime = (QtCore.QRegExp( + "'''"), 1, self.styles_sublime['comment']) + self.tri_double_sublime = (QtCore.QRegExp( + '"""'), 2, self.styles_sublime['comment']) + self.numbers_sublime = ['True', 'False', 'None'] + + # rules + + rules = [] + # First turn everything inside parentheses orange + rules += [(r"def [\w]+[\s]*\((.*)\)", 1, + self.styles_sublime['arguments'])] + # Now restore unwanted stuff... + rules += [(i, 0, self.styles_sublime['base']) + for i in self.baseKeywords_sublime] + rules += [(r"[^\(\w),.][\s]*[\w]+", 0, self.styles_sublime['base'])] + + # Everything else + rules += [(r'\b%s\b' % i, 0, self.styles_sublime['keyword']) + for i in self.keywords_sublime] + rules += [(i, 0, self.styles_sublime['keyword']) + for i in self.operatorKeywords_sublime] + rules += [(i, 0, self.styles_sublime['custom']) + for i in self.customKeywords_sublime] + rules += [(r'\b%s\b' % i, 0, self.styles_sublime['blue']) + for i in self.blueKeywords_sublime] + rules += [(i, 0, self.styles_sublime['arguments']) + for i in self.argKeywords_sublime] + rules += [(r'\b%s\b' % i, 0, self.styles_sublime['numbers']) + for i in self.numbers_sublime] + + rules += [ + + # integers + (r'\b[0-9]+\b', 0, self.styles_sublime['numbers']), + # Double-quoted string, possibly containing escape sequences + (r'"[^"\\]*(\\.[^"\\]*)*"', 0, self.styles_sublime['string']), + # Single-quoted string, possibly containing escape sequences + (r"'[^'\\]*(\\.[^'\\]*)*'", 0, self.styles_sublime['string']), + # From '#' until a newline + (r'#[^\n]*', 0, self.styles_sublime['comment']), + # Function definitions + (r"def[\s]+([\w\.]+)", 1, self.styles_sublime['functions']), + # Class definitions + (r"class[\s]+([\w\.]+)", 1, self.styles_sublime['functions']), + # Class argument (which is also a class so must be green) + (r"class[\s]+[\w\.]+[\s]*\((.*)\)", + 1, self.styles_sublime['functions']), + # Function arguments also pick their style... + (r"def[\s]+[\w]+[\s]*\(([\w]+)", 1, + self.styles_sublime['arguments']), + ] + + # Build a QRegExp for each pattern + self.rules_sublime = [(QtCore.QRegExp(pat), index, fmt) + for (pat, index, fmt) in rules] + + def format(self, rgb, style=''): + ''' + Return a QtWidgets.QTextCharFormat with the given attributes. + ''' + + color = QtGui.QColor(*rgb) + textFormat = QtGui.QTextCharFormat() + textFormat.setForeground(color) + + if 'bold' in style: + textFormat.setFontWeight(QtGui.QFont.Bold) + if 'italic' in style: + textFormat.setFontItalic(True) + if 'underline' in style: + textFormat.setUnderlineStyle(QtGui.QTextCharFormat.SingleUnderline) + + return textFormat + + def highlightBlock(self, text): + ''' + Apply syntax highlighting to the given block of text. + ''' + # Do other syntax formatting + + if self.knobScripter.color_scheme: + self.color_scheme = self.knobScripter.color_scheme + else: + self.color_scheme = "nuke" + + if self.color_scheme == "nuke": + self.rules = self.rules_nuke + elif self.color_scheme == "sublime": + self.rules = self.rules_sublime + + for expression, nth, format in self.rules: + index = expression.indexIn(text, 0) + + while index >= 0: + # We actually want the index of the nth match + index = expression.pos(nth) + length = len(expression.cap(nth)) + self.setFormat(index, length, format) + index = expression.indexIn(text, index + length) + + self.setCurrentBlockState(0) + + # Multi-line strings etc. based on selected scheme + if self.color_scheme == "nuke": + in_multiline = self.match_multiline(text, *self.tri_single) + if not in_multiline: + in_multiline = self.match_multiline(text, *self.tri_double) + elif self.color_scheme == "sublime": + in_multiline = self.match_multiline(text, *self.tri_single_sublime) + if not in_multiline: + in_multiline = self.match_multiline( + text, *self.tri_double_sublime) + + # TODO if there's a selection, highlight same occurrences in the full document. If no selection but something highlighted, unhighlight full document. (do it thru regex or sth) + + def match_multiline(self, text, delimiter, in_state, style): + ''' + Check whether highlighting requires multiple lines. + ''' + # If inside triple-single quotes, start at 0 + if self.previousBlockState() == in_state: + start = 0 + add = 0 + # Otherwise, look for the delimiter on this line + else: + start = delimiter.indexIn(text) + # Move past this match + add = delimiter.matchedLength() + + # As long as there's a delimiter match on this line... + while start >= 0: + # Look for the ending delimiter + end = delimiter.indexIn(text, start + add) + # Ending delimiter on this line? + if end >= add: + length = end - start + add + delimiter.matchedLength() + self.setCurrentBlockState(0) + # No; multi-line string + else: + self.setCurrentBlockState(in_state) + length = len(text) - start + add + # Apply formatting + self.setFormat(start, length, style) + # Look for the next match + start = delimiter.indexIn(text, start + length) + + # Return True if still inside a multi-line string, False otherwise + if self.currentBlockState() == in_state: + return True + else: + return False + +# -------------------------------------------------------------------------------------- +# Script Output Widget +# The output logger works the same way as Nuke's python script editor output window +# -------------------------------------------------------------------------------------- + + +class ScriptOutputWidget(QtWidgets.QTextEdit): + def __init__(self, parent=None): + super(ScriptOutputWidget, self).__init__(parent) + self.knobScripter = parent + self.setSizePolicy(QtWidgets.QSizePolicy.Expanding, + QtWidgets.QSizePolicy.Expanding) + self.setMinimumHeight(20) + + def keyPressEvent(self, event): + ctrl = ((event.modifiers() and (Qt.ControlModifier)) != 0) + alt = ((event.modifiers() and (Qt.AltModifier)) != 0) + shift = ((event.modifiers() and (Qt.ShiftModifier)) != 0) + key = event.key() + if type(event) == QtGui.QKeyEvent: + # print event.key() + if key in [32]: # Space + return KnobScripter.keyPressEvent(self.knobScripter, event) + elif key in [Qt.Key_Backspace, Qt.Key_Delete]: + self.knobScripter.clearConsole() + return QtWidgets.QTextEdit.keyPressEvent(self, event) + + # def mousePressEvent(self, QMouseEvent): + # if QMouseEvent.button() == Qt.RightButton: + # self.knobScripter.clearConsole() + # QtWidgets.QTextEdit.mousePressEvent(self, QMouseEvent) + +# --------------------------------------------------------------------- +# Modified KnobScripterTextEdit to include snippets etc. +# --------------------------------------------------------------------- + + +class KnobScripterTextEditMain(KnobScripterTextEdit): + def __init__(self, knobScripter, output=None, parent=None): + super(KnobScripterTextEditMain, self).__init__(knobScripter) + self.knobScripter = knobScripter + self.script_output = output + self.nukeCompleter = None + self.currentNukeCompletion = None + + ######## + # FROM NUKE's SCRIPT EDITOR START + ######## + self.setSizePolicy(QtWidgets.QSizePolicy.Expanding, + QtWidgets.QSizePolicy.Expanding) + + # Setup completer + self.nukeCompleter = QtWidgets.QCompleter(self) + self.nukeCompleter.setWidget(self) + self.nukeCompleter.setCompletionMode( + QtWidgets.QCompleter.UnfilteredPopupCompletion) + self.nukeCompleter.setCaseSensitivity(Qt.CaseSensitive) + try: + self.nukeCompleter.setModel(QtGui.QStringListModel()) + except: + self.nukeCompleter.setModel(QtCore.QStringListModel()) + + self.nukeCompleter.activated.connect(self.insertNukeCompletion) + self.nukeCompleter.highlighted.connect(self.completerHighlightChanged) + ######## + # FROM NUKE's SCRIPT EDITOR END + ######## + + def findLongestEndingMatch(self, text, dic): + ''' + If the text ends with a key in the dictionary, it returns the key and value. + If there are several matches, returns the longest one. + False if no matches. + ''' + longest = 0 # len of longest match + match_key = None + match_snippet = "" + for key, val in dic.items(): + #match = re.search(r"[\s\.({\[,;=+-]"+key+r"(?:[\s)\]\"]+|$)",text) + match = re.search(r"[\s\.({\[,;=+-]" + key + r"$", text) + if match or text == key: + if len(key) > longest: + longest = len(key) + match_key = key + match_snippet = val + if match_key is None: + return False + return match_key, match_snippet + + def placeholderToEnd(self, text, placeholder): + '''Returns distance (int) from the first ocurrence of the placeholder, to the end of the string with placeholders removed''' + search = re.search(placeholder, text) + if not search: + return -1 + from_start = search.start() + total = len(re.sub(placeholder, "", text)) + to_end = total - from_start + return to_end + + def addSnippetText(self, snippet_text): + ''' Adds the selected text as a snippet (taking care of $$, $name$ etc) to the script editor ''' + cursor_placeholder_find = r"(? 1: + cursor_len = positions[1] - positions[0] - 2 + + text = re.sub(cursor_placeholder_find, "", text) + self.cursor.insertText(text) + if placeholder_to_end >= 0: + for i in range(placeholder_to_end): + self.cursor.movePosition(QtGui.QTextCursor.PreviousCharacter) + for i in range(cursor_len): + self.cursor.movePosition( + QtGui.QTextCursor.NextCharacter, QtGui.QTextCursor.KeepAnchor) + self.setTextCursor(self.cursor) + + def keyPressEvent(self, event): + + ctrl = bool(event.modifiers() & Qt.ControlModifier) + alt = bool(event.modifiers() & Qt.AltModifier) + shift = bool(event.modifiers() & Qt.ShiftModifier) + key = event.key() + + # ADAPTED FROM NUKE's SCRIPT EDITOR: + # Get completer state + self.nukeCompleterShowing = self.nukeCompleter.popup().isVisible() + + # BEFORE ANYTHING ELSE, IF SPECIAL MODIFIERS SIMPLY IGNORE THE REST + if not self.nukeCompleterShowing and (ctrl or shift or alt): + # Bypassed! + if key not in [Qt.Key_Return, Qt.Key_Enter, Qt.Key_Tab]: + KnobScripterTextEdit.keyPressEvent(self, event) + return + + # If the completer is showing + if self.nukeCompleterShowing: + tc = self.textCursor() + # If we're hitting enter, do completion + if key in [Qt.Key_Return, Qt.Key_Enter, Qt.Key_Tab]: + if not self.currentNukeCompletion: + self.nukeCompleter.setCurrentRow(0) + self.currentNukeCompletion = self.nukeCompleter.currentCompletion() + # print str(self.nukeCompleter.completionModel[0]) + self.insertNukeCompletion(self.currentNukeCompletion) + self.nukeCompleter.popup().hide() + self.nukeCompleterShowing = False + # If you're hitting right or escape, hide the popup + elif key == Qt.Key_Right or key == Qt.Key_Escape: + self.nukeCompleter.popup().hide() + self.nukeCompleterShowing = False + # If you hit tab, escape or ctrl-space, hide the completer + elif key == Qt.Key_Tab or key == Qt.Key_Escape or (ctrl and key == Qt.Key_Space): + self.currentNukeCompletion = "" + self.nukeCompleter.popup().hide() + self.nukeCompleterShowing = False + # If none of the above, update the completion model + else: + QtWidgets.QPlainTextEdit.keyPressEvent(self, event) + # Edit completion model + colNum = tc.columnNumber() + posNum = tc.position() + inputText = self.toPlainText() + inputTextSplit = inputText.splitlines() + runningLength = 0 + currentLine = None + for line in inputTextSplit: + length = len(line) + runningLength += length + if runningLength >= posNum: + currentLine = line + break + runningLength += 1 + if currentLine: + completionPart = currentLine.split(" ")[-1] + if "(" in completionPart: + completionPart = completionPart.split("(")[-1] + self.completeNukePartUnderCursor(completionPart) + return + + if type(event) == QtGui.QKeyEvent: + if key == Qt.Key_Escape: # Close the knobscripter... + self.knobScripter.close() + elif not ctrl and not alt and not shift and event.key() == Qt.Key_Tab: + self.placeholder = "$$" + # 1. Set the cursor + self.cursor = self.textCursor() + + # 2. Save text before and after + cpos = self.cursor.position() + text_before_cursor = self.toPlainText()[:cpos] + line_before_cursor = text_before_cursor.split('\n')[-1] + text_after_cursor = self.toPlainText()[cpos:] + + # 3. Check coincidences in snippets dicts + try: # Meaning snippet found + match_key, match_snippet = self.findLongestEndingMatch( + line_before_cursor, self.knobScripter.snippets) + for i in range(len(match_key)): + self.cursor.deletePreviousChar() + # This function takes care of adding the appropriate snippet and moving the cursor... + self.addSnippetText(match_snippet) + except: # Meaning snippet not found... + # ADAPTED FROM NUKE's SCRIPT EDITOR: + tc = self.textCursor() + allCode = self.toPlainText() + colNum = tc.columnNumber() + posNum = tc.position() + + # ...and if there's text in the editor + if len(allCode.split()) > 0: + # There is text in the editor + currentLine = tc.block().text() + + # If you're not at the end of the line just add a tab + if colNum < len(currentLine): + # If there isn't a ')' directly to the right of the cursor add a tab + if currentLine[colNum:colNum + 1] != ')': + KnobScripterTextEdit.keyPressEvent(self, event) + return + # Else show the completer + else: + completionPart = currentLine[:colNum].split( + " ")[-1] + if "(" in completionPart: + completionPart = completionPart.split( + "(")[-1] + + self.completeNukePartUnderCursor( + completionPart) + + return + + # If you are at the end of the line, + else: + # If there's nothing to the right of you add a tab + if currentLine[colNum - 1:] == "" or currentLine.endswith(" "): + KnobScripterTextEdit.keyPressEvent(self, event) + return + # Else update completionPart and show the completer + completionPart = currentLine.split(" ")[-1] + if "(" in completionPart: + completionPart = completionPart.split("(")[-1] + + self.completeNukePartUnderCursor(completionPart) + return + + KnobScripterTextEdit.keyPressEvent(self, event) + elif event.key() in [Qt.Key_Enter, Qt.Key_Return]: + modifiers = QtWidgets.QApplication.keyboardModifiers() + if modifiers == QtCore.Qt.ControlModifier: + self.runScript() + else: + KnobScripterTextEdit.keyPressEvent(self, event) + else: + KnobScripterTextEdit.keyPressEvent(self, event) + + def getPyObjects(self, text): + ''' Returns a list containing all the functions, classes and variables found within the selected python text (code) ''' + matches = [] + # 1: Remove text inside triple quotes (leaving the quotes) + text_clean = '""'.join(text.split('"""')[::2]) + text_clean = '""'.join(text_clean.split("'''")[::2]) + + # 2: Remove text inside of quotes (leaving the quotes) except if \" + lines = text_clean.split("\n") + text_clean = "" + for line in lines: + line_clean = '""'.join(line.split('"')[::2]) + line_clean = '""'.join(line_clean.split("'")[::2]) + line_clean = line_clean.split("#")[0] + text_clean += line_clean + "\n" + + # 3. Split into segments (lines plus ";") + segments = re.findall(r"[^\n;]+", text_clean) + + # 4. Go case by case. + for s in segments: + # Declared vars + matches += re.findall(r"([\w\.]+)(?=[,\s\w]*=[^=]+$)", s) + # Def functions and arguments + function = re.findall(r"[\s]*def[\s]+([\w\.]+)[\s]*\([\s]*", s) + if len(function): + matches += function + args = re.split(r"[\s]*def[\s]+([\w\.]+)[\s]*\([\s]*", s) + if len(args) > 1: + args = args[-1] + matches += re.findall( + r"(?adrianpueyo.com, 2016-2019') + kspSignature.setOpenExternalLinks(True) + kspSignature.setStyleSheet('''color:#555;font-size:9px;''') + kspSignature.setAlignment(QtCore.Qt.AlignRight) + + fontLabel = QtWidgets.QLabel("Font:") + self.fontBox = QtWidgets.QFontComboBox() + self.fontBox.setCurrentFont(QtGui.QFont(self.font)) + self.fontBox.currentFontChanged.connect(self.fontChanged) + + fontSizeLabel = QtWidgets.QLabel("Font size:") + self.fontSizeBox = QtWidgets.QSpinBox() + self.fontSizeBox.setValue(self.oldFontSize) + self.fontSizeBox.setMinimum(6) + self.fontSizeBox.setMaximum(100) + self.fontSizeBox.valueChanged.connect(self.fontSizeChanged) + + windowWLabel = QtWidgets.QLabel("Width (px):") + windowWLabel.setToolTip("Default window width in pixels") + self.windowWBox = QtWidgets.QSpinBox() + self.windowWBox.setValue(self.knobScripter.windowDefaultSize[0]) + self.windowWBox.setMinimum(200) + self.windowWBox.setMaximum(4000) + self.windowWBox.setToolTip("Default window width in pixels") + + windowHLabel = QtWidgets.QLabel("Height (px):") + windowHLabel.setToolTip("Default window height in pixels") + self.windowHBox = QtWidgets.QSpinBox() + self.windowHBox.setValue(self.knobScripter.windowDefaultSize[1]) + self.windowHBox.setMinimum(100) + self.windowHBox.setMaximum(2000) + self.windowHBox.setToolTip("Default window height in pixels") + + # TODO: "Grab current dimensions" button + + tabSpaceLabel = QtWidgets.QLabel("Tab spaces:") + tabSpaceLabel.setToolTip("Number of spaces to add with the tab key.") + self.tabSpace2 = QtWidgets.QRadioButton("2") + self.tabSpace4 = QtWidgets.QRadioButton("4") + tabSpaceButtonGroup = QtWidgets.QButtonGroup(self) + tabSpaceButtonGroup.addButton(self.tabSpace2) + tabSpaceButtonGroup.addButton(self.tabSpace4) + self.tabSpace2.setChecked(self.knobScripter.tabSpaces == 2) + self.tabSpace4.setChecked(self.knobScripter.tabSpaces == 4) + + pinDefaultLabel = QtWidgets.QLabel("Always on top:") + pinDefaultLabel.setToolTip("Default mode of the PIN toggle.") + self.pinDefaultOn = QtWidgets.QRadioButton("On") + self.pinDefaultOff = QtWidgets.QRadioButton("Off") + pinDefaultButtonGroup = QtWidgets.QButtonGroup(self) + pinDefaultButtonGroup.addButton(self.pinDefaultOn) + pinDefaultButtonGroup.addButton(self.pinDefaultOff) + self.pinDefaultOn.setChecked(self.knobScripter.pinned == True) + self.pinDefaultOff.setChecked(self.knobScripter.pinned == False) + self.pinDefaultOn.clicked.connect(lambda: self.knobScripter.pin(True)) + self.pinDefaultOff.clicked.connect( + lambda: self.knobScripter.pin(False)) + + colorSchemeLabel = QtWidgets.QLabel("Color scheme:") + colorSchemeLabel.setToolTip("Syntax highlighting text style.") + self.colorSchemeSublime = QtWidgets.QRadioButton("subl") + self.colorSchemeNuke = QtWidgets.QRadioButton("nuke") + colorSchemeButtonGroup = QtWidgets.QButtonGroup(self) + colorSchemeButtonGroup.addButton(self.colorSchemeSublime) + colorSchemeButtonGroup.addButton(self.colorSchemeNuke) + colorSchemeButtonGroup.buttonClicked.connect(self.colorSchemeChanged) + self.colorSchemeSublime.setChecked( + self.knobScripter.color_scheme == "sublime") + self.colorSchemeNuke.setChecked( + self.knobScripter.color_scheme == "nuke") + + showLabelsLabel = QtWidgets.QLabel("Show labels:") + showLabelsLabel.setToolTip( + "Display knob labels on the knob dropdown\nOtherwise, shows the internal name only.") + self.showLabelsOn = QtWidgets.QRadioButton("On") + self.showLabelsOff = QtWidgets.QRadioButton("Off") + showLabelsButtonGroup = QtWidgets.QButtonGroup(self) + showLabelsButtonGroup.addButton(self.showLabelsOn) + showLabelsButtonGroup.addButton(self.showLabelsOff) + self.showLabelsOn.setChecked(self.knobScripter.pinned == True) + self.showLabelsOff.setChecked(self.knobScripter.pinned == False) + self.showLabelsOn.clicked.connect(lambda: self.knobScripter.pin(True)) + self.showLabelsOff.clicked.connect( + lambda: self.knobScripter.pin(False)) + + self.buttonBox = QtWidgets.QDialogButtonBox( + QtWidgets.QDialogButtonBox.Ok | QtWidgets.QDialogButtonBox.Cancel) + self.buttonBox.accepted.connect(self.savePrefs) + self.buttonBox.rejected.connect(self.cancelPrefs) + + # Loaded custom values + self.ksPrefs = self.knobScripter.loadPrefs() + if self.ksPrefs != []: + try: + self.fontSizeBox.setValue(self.ksPrefs['font_size']) + self.windowWBox.setValue(self.ksPrefs['window_default_w']) + self.windowHBox.setValue(self.ksPrefs['window_default_h']) + self.tabSpace2.setChecked(self.ksPrefs['tab_spaces'] == 2) + self.tabSpace4.setChecked(self.ksPrefs['tab_spaces'] == 4) + self.pinDefaultOn.setChecked(self.ksPrefs['pin_default'] == 1) + self.pinDefaultOff.setChecked(self.ksPrefs['pin_default'] == 0) + self.showLabelsOn.setChecked(self.ksPrefs['show_labels'] == 1) + self.showLabelsOff.setChecked(self.ksPrefs['show_labels'] == 0) + self.colorSchemeSublime.setChecked( + self.ksPrefs['color_scheme'] == "sublime") + self.colorSchemeNuke.setChecked( + self.ksPrefs['color_scheme'] == "nuke") + except: + pass + + # Layouts + font_layout = QtWidgets.QHBoxLayout() + font_layout.addWidget(fontLabel) + font_layout.addWidget(self.fontBox) + + fontSize_layout = QtWidgets.QHBoxLayout() + fontSize_layout.addWidget(fontSizeLabel) + fontSize_layout.addWidget(self.fontSizeBox) + + windowW_layout = QtWidgets.QHBoxLayout() + windowW_layout.addWidget(windowWLabel) + windowW_layout.addWidget(self.windowWBox) + + windowH_layout = QtWidgets.QHBoxLayout() + windowH_layout.addWidget(windowHLabel) + windowH_layout.addWidget(self.windowHBox) + + tabSpacesButtons_layout = QtWidgets.QHBoxLayout() + tabSpacesButtons_layout.addWidget(self.tabSpace2) + tabSpacesButtons_layout.addWidget(self.tabSpace4) + tabSpaces_layout = QtWidgets.QHBoxLayout() + tabSpaces_layout.addWidget(tabSpaceLabel) + tabSpaces_layout.addLayout(tabSpacesButtons_layout) + + pinDefaultButtons_layout = QtWidgets.QHBoxLayout() + pinDefaultButtons_layout.addWidget(self.pinDefaultOn) + pinDefaultButtons_layout.addWidget(self.pinDefaultOff) + pinDefault_layout = QtWidgets.QHBoxLayout() + pinDefault_layout.addWidget(pinDefaultLabel) + pinDefault_layout.addLayout(pinDefaultButtons_layout) + + showLabelsButtons_layout = QtWidgets.QHBoxLayout() + showLabelsButtons_layout.addWidget(self.showLabelsOn) + showLabelsButtons_layout.addWidget(self.showLabelsOff) + showLabels_layout = QtWidgets.QHBoxLayout() + showLabels_layout.addWidget(showLabelsLabel) + showLabels_layout.addLayout(showLabelsButtons_layout) + + colorSchemeButtons_layout = QtWidgets.QHBoxLayout() + colorSchemeButtons_layout.addWidget(self.colorSchemeSublime) + colorSchemeButtons_layout.addWidget(self.colorSchemeNuke) + colorScheme_layout = QtWidgets.QHBoxLayout() + colorScheme_layout.addWidget(colorSchemeLabel) + colorScheme_layout.addLayout(colorSchemeButtons_layout) + + self.master_layout = QtWidgets.QVBoxLayout() + self.master_layout.addWidget(kspTitle) + self.master_layout.addWidget(kspSignature) + self.master_layout.addWidget(kspLine) + self.master_layout.addLayout(font_layout) + self.master_layout.addLayout(fontSize_layout) + self.master_layout.addLayout(windowW_layout) + self.master_layout.addLayout(windowH_layout) + self.master_layout.addLayout(tabSpaces_layout) + self.master_layout.addLayout(pinDefault_layout) + self.master_layout.addLayout(showLabels_layout) + self.master_layout.addLayout(colorScheme_layout) + self.master_layout.addWidget(self.buttonBox) + self.setLayout(self.master_layout) + self.setFixedSize(self.minimumSize()) + + def savePrefs(self): + self.font = self.fontBox.currentFont().family() + ks_prefs = { + 'font_size': self.fontSizeBox.value(), + 'window_default_w': self.windowWBox.value(), + 'window_default_h': self.windowHBox.value(), + 'tab_spaces': self.tabSpaceValue(), + 'pin_default': self.pinDefaultValue(), + 'show_labels': self.showLabelsValue(), + 'font': self.font, + 'color_scheme': self.colorSchemeValue(), + } + self.knobScripter.script_editor_font.setFamily(self.font) + self.knobScripter.script_editor.setFont( + self.knobScripter.script_editor_font) + self.knobScripter.font = self.font + self.knobScripter.color_scheme = self.colorSchemeValue() + self.knobScripter.tabSpaces = self.tabSpaceValue() + self.knobScripter.script_editor.tabSpaces = self.tabSpaceValue() + with open(self.prefs_txt, "w") as f: + prefs = json.dump(ks_prefs, f, sort_keys=True, indent=4) + self.accept() + self.knobScripter.highlighter.rehighlight() + self.knobScripter.show_labels = self.showLabelsValue() + if self.knobScripter.nodeMode: + self.knobScripter.refreshClicked() + return prefs + + def cancelPrefs(self): + self.knobScripter.script_editor_font.setPointSize(self.oldFontSize) + self.knobScripter.script_editor.setFont( + self.knobScripter.script_editor_font) + self.knobScripter.color_scheme = self.oldScheme + self.knobScripter.highlighter.rehighlight() + self.reject() + + def fontSizeChanged(self): + self.knobScripter.script_editor_font.setPointSize( + self.fontSizeBox.value()) + self.knobScripter.script_editor.setFont( + self.knobScripter.script_editor_font) + return + + def fontChanged(self): + self.font = self.fontBox.currentFont().family() + self.knobScripter.script_editor_font.setFamily(self.font) + self.knobScripter.script_editor.setFont( + self.knobScripter.script_editor_font) + return + + def colorSchemeChanged(self): + self.knobScripter.color_scheme = self.colorSchemeValue() + self.knobScripter.highlighter.rehighlight() + return + + def tabSpaceValue(self): + return 2 if self.tabSpace2.isChecked() else 4 + + def pinDefaultValue(self): + return 1 if self.pinDefaultOn.isChecked() else 0 + + def showLabelsValue(self): + return 1 if self.showLabelsOn.isChecked() else 0 + + def colorSchemeValue(self): + return "nuke" if self.colorSchemeNuke.isChecked() else "sublime" + + def closeEvent(self, event): + self.cancelPrefs() + self.close() + + +def updateContext(): + ''' + Get the current selection of nodes with their appropiate context + Doing this outside the KnobScripter -> forces context update inside groups when needed + ''' + global knobScripterSelectedNodes + knobScripterSelectedNodes = nuke.selectedNodes() + return + +# -------------------------------- +# FindReplace +# -------------------------------- + + +class FindReplaceWidget(QtWidgets.QWidget): + ''' SearchReplace Widget for the knobscripter. FindReplaceWidget(editor = QPlainTextEdit) ''' + + def __init__(self, parent): + super(FindReplaceWidget, self).__init__(parent) + + self.editor = parent.script_editor + + self.initUI() + + def initUI(self): + + # -------------- + # Find Row + # -------------- + + # Widgets + self.find_label = QtWidgets.QLabel("Find:") + # self.find_label.setSizePolicy(QtWidgets.QSizePolicy.Fixed,QtWidgets.QSizePolicy.Fixed) + self.find_label.setFixedWidth(50) + self.find_label.setAlignment( + QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter) + self.find_lineEdit = QtWidgets.QLineEdit() + self.find_next_button = QtWidgets.QPushButton("Next") + self.find_next_button.clicked.connect(self.find) + self.find_prev_button = QtWidgets.QPushButton("Previous") + self.find_prev_button.clicked.connect(self.findBack) + self.find_lineEdit.returnPressed.connect(self.find_next_button.click) + + # Layout + self.find_layout = QtWidgets.QHBoxLayout() + self.find_layout.addWidget(self.find_label) + self.find_layout.addWidget(self.find_lineEdit, stretch=1) + self.find_layout.addWidget(self.find_next_button) + self.find_layout.addWidget(self.find_prev_button) + + # -------------- + # Replace Row + # -------------- + + # Widgets + self.replace_label = QtWidgets.QLabel("Replace:") + # self.replace_label.setSizePolicy(QtWidgets.QSizePolicy.Fixed,QtWidgets.QSizePolicy.Fixed) + self.replace_label.setFixedWidth(50) + self.replace_label.setAlignment( + QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter) + self.replace_lineEdit = QtWidgets.QLineEdit() + self.replace_button = QtWidgets.QPushButton("Replace") + self.replace_button.clicked.connect(self.replace) + self.replace_all_button = QtWidgets.QPushButton("Replace All") + self.replace_all_button.clicked.connect( + lambda: self.replace(rep_all=True)) + self.replace_lineEdit.returnPressed.connect(self.replace_button.click) + + # Layout + self.replace_layout = QtWidgets.QHBoxLayout() + self.replace_layout.addWidget(self.replace_label) + self.replace_layout.addWidget(self.replace_lineEdit, stretch=1) + self.replace_layout.addWidget(self.replace_button) + self.replace_layout.addWidget(self.replace_all_button) + + # Info text + self.info_text = QtWidgets.QLabel("") + self.info_text.setVisible(False) + self.info_text.mousePressEvent = lambda x: self.info_text.setVisible( + False) + #f = self.info_text.font() + # f.setItalic(True) + # self.info_text.setFont(f) + # self.info_text.clicked.connect(lambda:self.info_text.setVisible(False)) + + # Divider line + line = QtWidgets.QFrame() + line.setFrameShape(QtWidgets.QFrame.HLine) + line.setFrameShadow(QtWidgets.QFrame.Sunken) + line.setLineWidth(0) + line.setMidLineWidth(1) + line.setFrameShadow(QtWidgets.QFrame.Sunken) + + # -------------- + # Main Layout + # -------------- + + self.layout = QtWidgets.QVBoxLayout() + self.layout.addSpacing(4) + self.layout.addWidget(self.info_text) + self.layout.addLayout(self.find_layout) + self.layout.addLayout(self.replace_layout) + self.layout.setSpacing(4) + try: # >n11 + self.layout.setMargin(2) + except: # 0: # If not found but there are matches, start over + cursor.movePosition(QtGui.QTextCursor.Start) + self.editor.setTextCursor(cursor) + self.editor.find(find_str, flags) + else: + cursor.insertText(rep_str) + self.editor.find( + rep_str, flags | QtGui.QTextDocument.FindBackward) + + cursor.endEditBlock() + self.replace_lineEdit.setFocus() + return + + +# -------------------------------- +# Snippets +# -------------------------------- +class SnippetsPanel(QtWidgets.QDialog): + def __init__(self, parent): + super(SnippetsPanel, self).__init__(parent) + + self.knobScripter = parent + + self.setWindowFlags(self.windowFlags() | + QtCore.Qt.WindowStaysOnTopHint) + self.setWindowTitle("Snippet editor") + + self.snippets_txt_path = self.knobScripter.snippets_txt_path + self.snippets_dict = self.loadSnippetsDict(path=self.snippets_txt_path) + #self.snippets_dict = snippets_dic + + # self.saveSnippets(snippets_dic) + + self.initUI() + self.resize(500, 300) + + def initUI(self): + self.layout = QtWidgets.QVBoxLayout() + + # First Area (Titles) + title_layout = QtWidgets.QHBoxLayout() + shortcuts_label = QtWidgets.QLabel("Shortcut") + code_label = QtWidgets.QLabel("Code snippet") + title_layout.addWidget(shortcuts_label, stretch=1) + title_layout.addWidget(code_label, stretch=2) + self.layout.addLayout(title_layout) + + # Main Scroll area + self.scroll_content = QtWidgets.QWidget() + self.scroll_layout = QtWidgets.QVBoxLayout() + + self.buildSnippetWidgets() + + self.scroll_content.setLayout(self.scroll_layout) + + # Scroll Area Properties + self.scroll = QtWidgets.QScrollArea() + self.scroll.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOn) + self.scroll.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff) + self.scroll.setWidgetResizable(True) + self.scroll.setWidget(self.scroll_content) + + self.layout.addWidget(self.scroll) + + # File knob test + #self.filePath_lineEdit = SnippetFilePath(self) + # self.filePath_lineEdit + # self.layout.addWidget(self.filePath_lineEdit) + + # Lower buttons + self.bottom_layout = QtWidgets.QHBoxLayout() + + self.add_btn = QtWidgets.QPushButton("Add snippet") + self.add_btn.setToolTip("Create empty fields for an extra snippet.") + self.add_btn.clicked.connect(self.addSnippet) + self.bottom_layout.addWidget(self.add_btn) + + self.addPath_btn = QtWidgets.QPushButton("Add custom path") + self.addPath_btn.setToolTip( + "Add a custom path to an external snippets .txt file.") + self.addPath_btn.clicked.connect(self.addCustomPath) + self.bottom_layout.addWidget(self.addPath_btn) + + self.bottom_layout.addStretch() + + self.save_btn = QtWidgets.QPushButton('OK') + self.save_btn.setToolTip( + "Save the snippets into a json file and close the panel.") + self.save_btn.clicked.connect(self.okPressed) + self.bottom_layout.addWidget(self.save_btn) + + self.cancel_btn = QtWidgets.QPushButton("Cancel") + self.cancel_btn.setToolTip("Cancel any new snippets or modifications.") + self.cancel_btn.clicked.connect(self.close) + self.bottom_layout.addWidget(self.cancel_btn) + + self.apply_btn = QtWidgets.QPushButton('Apply') + self.apply_btn.setToolTip("Save the snippets into a json file.") + self.apply_btn.setShortcut('Ctrl+S') + self.apply_btn.clicked.connect(self.applySnippets) + self.bottom_layout.addWidget(self.apply_btn) + + self.help_btn = QtWidgets.QPushButton('Help') + self.help_btn.setShortcut('F1') + self.help_btn.clicked.connect(self.showHelp) + self.bottom_layout.addWidget(self.help_btn) + + self.layout.addLayout(self.bottom_layout) + + self.setLayout(self.layout) + + def reload(self): + ''' + Clears everything without saving and redoes the widgets etc. + Only to be called if the panel isn't shown meaning it's closed. + ''' + for i in reversed(range(self.scroll_layout.count())): + self.scroll_layout.itemAt(i).widget().deleteLater() + + self.snippets_dict = self.loadSnippetsDict(path=self.snippets_txt_path) + + self.buildSnippetWidgets() + + def buildSnippetWidgets(self): + for i, (key, val) in enumerate(self.snippets_dict.items()): + if re.match(r"\[custom-path-[0-9]+\]$", key): + file_edit = SnippetFilePath(val) + self.scroll_layout.insertWidget(-1, file_edit) + else: + snippet_edit = SnippetEdit(key, val, parent=self) + self.scroll_layout.insertWidget(-1, snippet_edit) + + def loadSnippetsDict(self, path=""): + ''' Load prefs. TO REMOVE ''' + if path == "": + path = self.knobScripter.snippets_txt_path + if not os.path.isfile(self.snippets_txt_path): + return {} + else: + with open(self.snippets_txt_path, "r") as f: + self.snippets = json.load(f) + return self.snippets + + def getSnippetsAsDict(self): + dic = {} + num_snippets = self.scroll_layout.count() + path_i = 1 + for s in range(num_snippets): + se = self.scroll_layout.itemAt(s).widget() + if se.__class__.__name__ == "SnippetEdit": + key = se.shortcut_editor.text() + val = se.script_editor.toPlainText() + if key != "": + dic[key] = val + else: + path = se.filepath_lineEdit.text() + if path != "": + dic["[custom-path-{}]".format(str(path_i))] = path + path_i += 1 + return dic + + def saveSnippets(self, snippets=""): + if snippets == "": + snippets = self.getSnippetsAsDict() + with open(self.snippets_txt_path, "w") as f: + prefs = json.dump(snippets, f, sort_keys=True, indent=4) + return prefs + + def applySnippets(self): + self.saveSnippets() + self.knobScripter.snippets = self.knobScripter.loadSnippets(maxDepth=5) + self.knobScripter.loadSnippets() + + def okPressed(self): + self.applySnippets() + self.accept() + + def addSnippet(self, key="", val=""): + se = SnippetEdit(key, val, parent=self) + self.scroll_layout.insertWidget(0, se) + self.show() + return se + + def addCustomPath(self, path=""): + cpe = SnippetFilePath(path) + self.scroll_layout.insertWidget(0, cpe) + self.show() + cpe.browseSnippets() + return cpe + + def showHelp(self): + ''' Create a new snippet, auto-completed with the help ''' + help_key = "help" + help_val = """Snippets are a convenient way to have code blocks that you can call through a shortcut.\n\n1. Simply write a shortcut on the text input field on the left. You can see this one is set to "test".\n\n2. Then, write a code or whatever in this script editor. You can include $$ as the placeholder for where you'll want the mouse cursor to appear.\n\n3. Finally, click OK or Apply to save the snippets. On the main script editor, you'll be able to call any snippet by writing the shortcut (in this example: help) and pressing the Tab key.\n\nIn order to remove a snippet, simply leave the shortcut and contents blank, and save the snippets.""" + help_se = self.addSnippet(help_key, help_val) + help_se.script_editor.resize(160, 160) + + +class SnippetEdit(QtWidgets.QWidget): + ''' Simple widget containing two fields, for the snippet shortcut and content ''' + + def __init__(self, key="", val="", parent=None): + super(SnippetEdit, self).__init__(parent) + + self.knobScripter = parent.knobScripter + self.color_scheme = self.knobScripter.color_scheme + self.layout = QtWidgets.QHBoxLayout() + + self.shortcut_editor = QtWidgets.QLineEdit(self) + f = self.shortcut_editor.font() + f.setWeight(QtGui.QFont.Bold) + self.shortcut_editor.setFont(f) + self.shortcut_editor.setText(str(key)) + #self.script_editor = QtWidgets.QTextEdit(self) + self.script_editor = KnobScripterTextEdit() + self.script_editor.setMinimumHeight(100) + self.script_editor.setStyleSheet( + 'background:#282828;color:#EEE;') # Main Colors + self.highlighter = KSScriptEditorHighlighter( + self.script_editor.document(), self) + self.script_editor_font = self.knobScripter.script_editor_font + self.script_editor.setFont(self.script_editor_font) + self.script_editor.resize(90, 90) + self.script_editor.setPlainText(str(val)) + self.layout.addWidget(self.shortcut_editor, + stretch=1, alignment=Qt.AlignTop) + self.layout.addWidget(self.script_editor, stretch=2) + self.layout.setContentsMargins(0, 0, 0, 0) + + self.setLayout(self.layout) + + +class SnippetFilePath(QtWidgets.QWidget): + ''' Simple widget containing a filepath lineEdit and a button to open the file browser ''' + + def __init__(self, path="", parent=None): + super(SnippetFilePath, self).__init__(parent) + + self.layout = QtWidgets.QHBoxLayout() + + self.custompath_label = QtWidgets.QLabel(self) + self.custompath_label.setText("Custom path: ") + + self.filepath_lineEdit = QtWidgets.QLineEdit(self) + self.filepath_lineEdit.setText(str(path)) + #self.script_editor = QtWidgets.QTextEdit(self) + self.filepath_lineEdit.setStyleSheet( + 'background:#282828;color:#EEE;') # Main Colors + self.script_editor_font = QtGui.QFont() + self.script_editor_font.setFamily("Courier") + self.script_editor_font.setStyleHint(QtGui.QFont.Monospace) + self.script_editor_font.setFixedPitch(True) + self.script_editor_font.setPointSize(11) + self.filepath_lineEdit.setFont(self.script_editor_font) + + self.file_button = QtWidgets.QPushButton(self) + self.file_button.setText("Browse...") + self.file_button.clicked.connect(self.browseSnippets) + + self.layout.addWidget(self.custompath_label) + self.layout.addWidget(self.filepath_lineEdit) + self.layout.addWidget(self.file_button) + self.layout.setContentsMargins(0, 10, 0, 10) + + self.setLayout(self.layout) + + def browseSnippets(self): + ''' Opens file panel for ...snippets.txt ''' + browseLocation = nuke.getFilename('Select snippets file', '*.txt') + + if not browseLocation: + return + + self.filepath_lineEdit.setText(browseLocation) + return + + +# -------------------------------- +# Implementation +# -------------------------------- + +def showKnobScripter(knob="knobChanged"): + selection = nuke.selectedNodes() + if not len(selection): + pan = KnobScripter() + else: + pan = KnobScripter(selection[0], knob) + pan.show() + + +def addKnobScripterPanel(): + global knobScripterPanel + try: + knobScripterPanel = panels.registerWidgetAsPanel('nuke.KnobScripterPane', 'Knob Scripter', + 'com.adrianpueyo.KnobScripterPane') + knobScripterPanel.addToPane(nuke.getPaneFor('Properties.1')) + + except: + knobScripterPanel = panels.registerWidgetAsPanel( + 'nuke.KnobScripterPane', 'Knob Scripter', 'com.adrianpueyo.KnobScripterPane') + + +nuke.KnobScripterPane = KnobScripterPane +log("KS LOADED") +ksShortcut = "alt+z" +addKnobScripterPanel() +nuke.menu('Nuke').addCommand( + 'Edit/Node/Open Floating Knob Scripter', showKnobScripter, ksShortcut) +nuke.menu('Nuke').addCommand('Edit/Node/Update KnobScripter Context', + updateContext).setVisible(False) diff --git a/setup/nuke/nuke_path/menu.py b/setup/nuke/nuke_path/menu.py index 1155d2ea2e..34bfa294aa 100644 --- a/setup/nuke/nuke_path/menu.py +++ b/setup/nuke/nuke_path/menu.py @@ -1,6 +1,7 @@ import os import sys import atom_server +import KnobScripter from pype.nuke.lib import ( writes_version_sync, @@ -21,12 +22,3 @@ nuke.addOnScriptSave(checkInventoryVersions) nuke.addOnScriptSave(writes_version_sync) log.info('Automatic syncing of write file knob to script version') - -def adding_knobscripter_to_nukepath(): - nuke_path_dir = os.path.dirname(__file__) - knobscripter_path = os.path.join(nuke_path_dir, "KnobScripter-github") - sys.path.append(knobscripter_path) - import KnobScripter - log.info('Adding `KnobScripter`') - -adding_knobscripter_to_nukepath() From cca738f003f3e4182339951fba2f72961b773314 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 20 Jan 2020 17:56:52 +0100 Subject: [PATCH 153/393] reimplement sub user server --- pype/ftrack/ftrack_server/sub_user_server.py | 51 ++++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 pype/ftrack/ftrack_server/sub_user_server.py diff --git a/pype/ftrack/ftrack_server/sub_user_server.py b/pype/ftrack/ftrack_server/sub_user_server.py new file mode 100644 index 0000000000..68066b33ce --- /dev/null +++ b/pype/ftrack/ftrack_server/sub_user_server.py @@ -0,0 +1,51 @@ +import sys +import signal +import socket + +from ftrack_server import FtrackServer +from pype.ftrack.ftrack_server.lib import SocketSession, UserEventHub + +from pypeapp import Logger + +log = Logger().get_logger(__name__) + + +def main(args): + port = int(args[-1]) + + # Create a TCP/IP socket + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + + # Connect the socket to the port where the server is listening + server_address = ("localhost", port) + log.debug("Storer connected to {} port {}".format(*server_address)) + sock.connect(server_address) + sock.sendall(b"CreatedUser") + + try: + session = SocketSession( + auto_connect_event_hub=True, sock=sock, Eventhub=UserEventHub + ) + server = FtrackServer("action") + log.debug("Launched Ftrack Event storer") + server.run_server(session=session) + + finally: + log.debug("Closing socket") + sock.close() + return 1 + + +if __name__ == "__main__": + # Register interupt signal + def signal_handler(sig, frame): + log.info( + "Process was forced to stop. Process ended." + ) + log.info("Process ended.") + sys.exit(0) + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + sys.exit(main(sys.argv)) From 4ddc5079dae9394f295239677584512e844550d5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 20 Jan 2020 18:07:24 +0100 Subject: [PATCH 154/393] fix(global): if baked mov then do not integrate it if on farm --- pype/plugins/global/publish/collect_filesequences.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 2d8dd1b1c2..b7b41ea88b 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -309,7 +309,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): self.log.info(" - {}".format(str(collection))) ext = collection.tail.lstrip(".") - + if "slate" in instance.data["families"]: frame_start += 1 @@ -343,6 +343,11 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): if "slate" in instance.data["families"]: frame_start += 1 + tags = ["review"] + + if baked_mov_path: + tags.append("delete") + representation = { "name": rem.split(".")[-1], "ext": "{}".format(rem.split(".")[-1]), @@ -351,7 +356,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "frameStart": frame_start, "anatomy_template": "render", "fps": fps, - "tags": ["review"], + "tags": tags } instance.data["representations"].append( representation) From 82ac325448541be57fb6e352419cb8398643bef1 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 20 Jan 2020 23:42:09 +0100 Subject: [PATCH 155/393] Fix CG render publishing Eallin production cherry picked and merged with current develop --- .../global/publish/collect_filesequences.py | 22 +++- pype/plugins/global/publish/extract_jpeg.py | 102 +++++++++++------- pype/plugins/global/publish/extract_review.py | 3 + .../global/publish/submit_publish_job.py | 3 + 4 files changed, 89 insertions(+), 41 deletions(-) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index b7b41ea88b..a04de4fdd7 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -132,6 +132,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): root_override = data.get("root") frame_start = int(data.get("frameStart")) frame_end = int(data.get("frameEnd")) + subset = data.get("subset") if root_override: if os.path.isabs(root_override): @@ -162,11 +163,11 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): resolution_height = instance.get("resolutionHeight", 1080) lut_path = instance.get("lutPath", None) baked_mov_path = instance.get("bakeRenderPath") - subset = instance.get("subset") families_data = instance.get("families") slate_frame = instance.get("slateFrame") version = instance.get("version") + else: # Search in directory data = dict() @@ -200,6 +201,9 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): if data.get("user"): context.data["user"] = data["user"] + if data.get("version"): + version = data.get("version") + # Get family from the data families = data.get("families", ["render"]) if "render" not in families: @@ -274,6 +278,9 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): baked_mov_path)) families.append("review") + if session['AVALON_APP'] == "maya": + families.append("review") + self.log.info( "Adding representations to subset {}".format( subset)) @@ -399,7 +406,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "source": data.get("source", ""), "pixelAspect": pixel_aspect, "resolutionWidth": resolution_width, - "resolutionHeight": resolution_height + "resolutionHeight": resolution_height, + "version": version } ) if lut_path: @@ -421,6 +429,16 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "tags": ["review"], } instance.data["representations"].append(representation) + + # temporary ... allow only beauty on ftrack + if session['AVALON_APP'] == "maya": + AOV_filter = ['beauty'] + for aov in AOV_filter: + if aov not in instance.data['subset']: + instance.data['families'].remove('review') + instance.data['families'].remove('ftrack') + representation["tags"].remove('review') + self.log.debug( "__ representations {}".format( instance.data["representations"])) diff --git a/pype/plugins/global/publish/extract_jpeg.py b/pype/plugins/global/publish/extract_jpeg.py index 8a1a0b5e68..00e8a6fedf 100644 --- a/pype/plugins/global/publish/extract_jpeg.py +++ b/pype/plugins/global/publish/extract_jpeg.py @@ -20,6 +20,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): hosts = ["shell"] order = pyblish.api.ExtractorOrder families = ["imagesequence", "render", "write", "source"] + enabled = False def process(self, instance): start = instance.data.get("frameStart") @@ -28,51 +29,74 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): collected_frames = os.listdir(stagingdir) collections, remainder = clique.assemble(collected_frames) - input_file = ( - collections[0].format('{head}{padding}{tail}') % start - ) - full_input_path = os.path.join(stagingdir, input_file) - self.log.info("input {}".format(full_input_path)) + self.log.info("subset {}".format(instance.data['subset'])) + if 'crypto' in instance.data['subset']: + return - filename = collections[0].format('{head}') - if not filename.endswith('.'): - filename += "." - jpegFile = filename + "jpg" - full_output_path = os.path.join(stagingdir, jpegFile) + # get representation and loop them + representations = instance.data["representations"] - self.log.info("output {}".format(full_output_path)) + # filter out mov and img sequences + representations_new = representations[:] - config_data = instance.context.data['output_repre_config'] + for repre in representations: + self.log.debug(repre) + if 'review' not in repre['tags']: + return - proj_name = os.environ.get('AVALON_PROJECT', '__default__') - profile = config_data.get(proj_name, config_data['__default__']) + input_file = repre['files'][0] - jpeg_items = [] - jpeg_items.append( - os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg")) - # override file if already exists - jpeg_items.append("-y") - # use same input args like with mov - jpeg_items.extend(profile.get('input', [])) - # input file - jpeg_items.append("-i {}".format(full_input_path)) - # output file - jpeg_items.append(full_output_path) + # input_file = ( + # collections[0].format('{head}{padding}{tail}') % start + # ) + full_input_path = os.path.join(stagingdir, input_file) + self.log.info("input {}".format(full_input_path)) - subprocess_jpeg = " ".join(jpeg_items) + filename = os.path.splitext(input_file)[0] + if not filename.endswith('.'): + filename += "." + jpegFile = filename + "jpg" + full_output_path = os.path.join(stagingdir, jpegFile) - # run subprocess - self.log.debug("{}".format(subprocess_jpeg)) - pype.api.subprocess(subprocess_jpeg) + self.log.info("output {}".format(full_output_path)) - if "representations" not in instance.data: - instance.data["representations"] = [] + config_data = instance.context.data['output_repre_config'] - representation = { - 'name': 'jpg', - 'ext': 'jpg', - 'files': jpegFile, - "stagingDir": stagingdir, - "thumbnail": True - } - instance.data["representations"].append(representation) + proj_name = os.environ.get('AVALON_PROJECT', '__default__') + profile = config_data.get(proj_name, config_data['__default__']) + + jpeg_items = [] + jpeg_items.append( + os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg")) + # override file if already exists + jpeg_items.append("-y") + # use same input args like with mov + jpeg_items.extend(profile.get('input', [])) + # input file + jpeg_items.append("-i {}".format(full_input_path)) + # output file + jpeg_items.append(full_output_path) + + subprocess_jpeg = " ".join(jpeg_items) + + # run subprocess + self.log.debug("{}".format(subprocess_jpeg)) + pype.api.subprocess(subprocess_jpeg) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'jpg', + 'ext': 'jpg', + 'files': jpegFile, + "stagingDir": stagingdir, + "thumbnail": True, + "tags": ['thumbnail'] + } + + # adding representation + self.log.debug("Adding: {}".format(representation)) + representations_new.append(representation) + + instance.data["representations"] = representations_new diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index b1dfec6740..a11f681e61 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -46,6 +46,9 @@ class ExtractReview(pyblish.api.InstancePlugin): if repre['ext'] in self.ext_filter: tags = repre.get("tags", []) + if "thumbnail" in tags: + continue + self.log.info("Try repre: {}".format(repre)) if "review" in tags: diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 5c9c1dbd2e..c01cb379d4 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -341,6 +341,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): } } + if api.Session["AVALON_APP"] == "nuke": + metadata['subset'] = subset + if submission_type == "muster": ftrack = { "FTRACK_API_USER": os.environ.get("FTRACK_API_USER"), From 0052ed1d34f60cf4f46f1cf4afa3c12de45d0647 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 23 Jan 2020 08:55:30 +0100 Subject: [PATCH 156/393] fix(nuke): adding publish comment before rendering --- pype/plugins/global/publish/collect_filesequences.py | 4 ++++ pype/plugins/nuke/publish/extract_review_data_mov.py | 1 - pype/plugins/nuke/publish/extract_slate_frame.py | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index b7b41ea88b..121afa23ea 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -197,6 +197,10 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): fps = data.get("fps", 25) + # adding publish comment and intent to context + context.data["comment"] = data.get("comment", "") + context.data["intent"] = data.get("intent", "") + if data.get("user"): context.data["user"] = data["user"] diff --git a/pype/plugins/nuke/publish/extract_review_data_mov.py b/pype/plugins/nuke/publish/extract_review_data_mov.py index 39c338b62c..8b204680a7 100644 --- a/pype/plugins/nuke/publish/extract_review_data_mov.py +++ b/pype/plugins/nuke/publish/extract_review_data_mov.py @@ -3,7 +3,6 @@ import pyblish.api from avalon.nuke import lib as anlib from pype.nuke import lib as pnlib import pype -reload(pnlib) class ExtractReviewDataMov(pype.api.Extractor): diff --git a/pype/plugins/nuke/publish/extract_slate_frame.py b/pype/plugins/nuke/publish/extract_slate_frame.py index 7e43b3cd6f..4d43f38859 100644 --- a/pype/plugins/nuke/publish/extract_slate_frame.py +++ b/pype/plugins/nuke/publish/extract_slate_frame.py @@ -12,7 +12,7 @@ class ExtractSlateFrame(pype.api.Extractor): """ - order = pyblish.api.ExtractorOrder + 0.01 + order = pyblish.api.ExtractorOrder - 0.001 label = "Extract Slate Frame" families = ["slate"] From c51769b68c11866b73aaf921f2162c31656fcc77 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 23 Jan 2020 11:56:01 +0100 Subject: [PATCH 157/393] fix(ftrack): plugin is unfinished and causing troubles --- pype/plugins/ftrack/publish/integrate_ftrack_comments.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_comments.py b/pype/plugins/ftrack/publish/integrate_ftrack_comments.py index 9d0b7b3ab9..4f7afb4346 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_comments.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_comments.py @@ -9,6 +9,7 @@ class IntegrateFtrackComments(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder label = "Integrate Comments to Ftrack." families = ["shot"] + enabled = False def process(self, instance): session = instance.context.data["ftrackSession"] From 0fbc2aeceaf67e0db5c95608dfa68af43d76833b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 23 Jan 2020 12:17:46 +0100 Subject: [PATCH 158/393] feat(ftrack): disable ftrack comment integration --- pype/plugins/ftrack/publish/integrate_ftrack_comments.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_comments.py b/pype/plugins/ftrack/publish/integrate_ftrack_comments.py index 9d0b7b3ab9..4f7afb4346 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_comments.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_comments.py @@ -9,6 +9,7 @@ class IntegrateFtrackComments(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder label = "Integrate Comments to Ftrack." families = ["shot"] + enabled = False def process(self, instance): session = instance.context.data["ftrackSession"] From a1d5622cde2b2935a8acd000b86beb6f97c2e0c2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 23 Jan 2020 12:38:36 +0100 Subject: [PATCH 159/393] fix(ftrack): enabled: False was not enough --- .../{publish => _unused_publish}/integrate_ftrack_comments.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename pype/plugins/ftrack/{publish => _unused_publish}/integrate_ftrack_comments.py (94%) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_comments.py b/pype/plugins/ftrack/_unused_publish/integrate_ftrack_comments.py similarity index 94% rename from pype/plugins/ftrack/publish/integrate_ftrack_comments.py rename to pype/plugins/ftrack/_unused_publish/integrate_ftrack_comments.py index 4f7afb4346..4be9f7fc3a 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_comments.py +++ b/pype/plugins/ftrack/_unused_publish/integrate_ftrack_comments.py @@ -7,7 +7,7 @@ class IntegrateFtrackComments(pyblish.api.InstancePlugin): """Create comments in Ftrack.""" order = pyblish.api.IntegratorOrder - label = "Integrate Comments to Ftrack." + label = "Integrate Comments to Ftrack" families = ["shot"] enabled = False From 0b81a4f04bd2140d4524946f9743bc144a097c30 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 23 Jan 2020 14:49:12 +0100 Subject: [PATCH 160/393] (fix): use correct(existing) variable name --- pype/ftrack/tray/ftrack_module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/tray/ftrack_module.py b/pype/ftrack/tray/ftrack_module.py index dab751c001..250872f239 100644 --- a/pype/ftrack/tray/ftrack_module.py +++ b/pype/ftrack/tray/ftrack_module.py @@ -171,7 +171,7 @@ class FtrackModule: # If thread failed test Ftrack and Mongo connection elif not self.thread_socket_server.isAlive(): - self.thread_socket_server_thread.join() + self.thread_socket_server.join() self.thread_socket_server = None ftrack_accessible = False From 7b460a515e908e81aead12d391b395718a0793e8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 23 Jan 2020 14:49:45 +0100 Subject: [PATCH 161/393] set logger name in action subprocess because is launched as __main__ --- pype/ftrack/ftrack_server/sub_user_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/ftrack_server/sub_user_server.py b/pype/ftrack/ftrack_server/sub_user_server.py index 68066b33ce..b2ca17f929 100644 --- a/pype/ftrack/ftrack_server/sub_user_server.py +++ b/pype/ftrack/ftrack_server/sub_user_server.py @@ -7,7 +7,7 @@ from pype.ftrack.ftrack_server.lib import SocketSession, UserEventHub from pypeapp import Logger -log = Logger().get_logger(__name__) +log = Logger().get_logger("FtrackUserServer") def main(args): From 81ccb152f58caa4063f6944597059fe81587953c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 23 Jan 2020 14:50:04 +0100 Subject: [PATCH 162/393] modified startup and end log messages --- pype/ftrack/ftrack_server/sub_user_server.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/ftrack_server/sub_user_server.py b/pype/ftrack/ftrack_server/sub_user_server.py index b2ca17f929..8b2a9277cf 100644 --- a/pype/ftrack/ftrack_server/sub_user_server.py +++ b/pype/ftrack/ftrack_server/sub_user_server.py @@ -18,7 +18,9 @@ def main(args): # Connect the socket to the port where the server is listening server_address = ("localhost", port) - log.debug("Storer connected to {} port {}".format(*server_address)) + log.debug( + "User Ftrack Server connected to {} port {}".format(*server_address) + ) sock.connect(server_address) sock.sendall(b"CreatedUser") @@ -27,7 +29,7 @@ def main(args): auto_connect_event_hub=True, sock=sock, Eventhub=UserEventHub ) server = FtrackServer("action") - log.debug("Launched Ftrack Event storer") + log.debug("Launched User Ftrack Server") server.run_server(session=session) finally: @@ -42,7 +44,6 @@ if __name__ == "__main__": log.info( "Process was forced to stop. Process ended." ) - log.info("Process ended.") sys.exit(0) signal.signal(signal.SIGINT, signal_handler) From f35f2f466958e2a85233c13c88c3cc98509a6f66 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 23 Jan 2020 14:52:21 +0100 Subject: [PATCH 163/393] modified collect ftrack api to log used ftrack user and queried all project, asset and task entity separately and log queries and resul --- .../ftrack/publish/collect_ftrack_api.py | 48 +++++++++++++------ 1 file changed, 33 insertions(+), 15 deletions(-) diff --git a/pype/plugins/ftrack/publish/collect_ftrack_api.py b/pype/plugins/ftrack/publish/collect_ftrack_api.py index d09baec676..f79d74453b 100644 --- a/pype/plugins/ftrack/publish/collect_ftrack_api.py +++ b/pype/plugins/ftrack/publish/collect_ftrack_api.py @@ -23,25 +23,43 @@ class CollectFtrackApi(pyblish.api.ContextPlugin): # Collect session session = ftrack_api.Session() + self.log.debug("Ftrack user: \"{0}\"".format(session.api_user)) context.data["ftrackSession"] = session # Collect task - project = os.environ.get('AVALON_PROJECT', '') - asset = os.environ.get('AVALON_ASSET', '') - task = os.environ.get('AVALON_TASK', None) - self.log.debug(task) + project_name = os.environ.get('AVALON_PROJECT', '') + asset_name = os.environ.get('AVALON_ASSET', '') + task_name = os.environ.get('AVALON_TASK', None) + + # Find project entity + project_query = 'Project where full_name is "{0}"'.format(project_name) + self.log.debug("Project query: < {0} >".format(project_query)) + project_entity = session.query(project_query).one() + self.log.debug("Project found: {0}".format(project_entity)) + + # Find asset entity + entity_query = ( + 'TypedContext where project_id is "{0}"' + ' and name is "{1}"' + ).format(project_entity["id"], asset_name) + self.log.debug("Asset entity query: < {0} >".format(entity_query)) + asset_entity = session.query(entity_query).one() + self.log.debug("Asset found: {0}".format(asset_entity)) + + # Find task entity if task is set + if task_name: + task_query = ( + 'Task where name is "{0}" and parent_id is "{1}"' + ).format(task_name, asset_entity["id"]) + self.log.debug("Task entity query: < {0} >".format(task_query)) + task_entity = session.query(task_query).one() + self.log.debug("Task entity found: {0}".format(task_entity)) - if task: - result = session.query('Task where\ - project.full_name is "{0}" and\ - name is "{1}" and\ - parent.name is "{2}"'.format(project, task, asset)).one() - context.data["ftrackTask"] = result else: - result = session.query('TypedContext where\ - project.full_name is "{0}" and\ - name is "{1}"'.format(project, asset)).one() - context.data["ftrackEntity"] = result + task_entity = None + self.log.warning("Task name is not set.") - self.log.info(result) + context.data["ftrackProject"] = asset_entity + context.data["ftrackEntity"] = asset_entity + context.data["ftrackTask"] = task_entity From da990057f45124d52463c51e21983b7e36d933bc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 24 Jan 2020 14:26:46 +0100 Subject: [PATCH 164/393] feat(nuke): adding validator for output resolution --- .../publish/validate_output_resolution.py | 78 +++++++++++++++++++ 1 file changed, 78 insertions(+) create mode 100644 pype/plugins/nuke/publish/validate_output_resolution.py diff --git a/pype/plugins/nuke/publish/validate_output_resolution.py b/pype/plugins/nuke/publish/validate_output_resolution.py new file mode 100644 index 0000000000..2563ee929f --- /dev/null +++ b/pype/plugins/nuke/publish/validate_output_resolution.py @@ -0,0 +1,78 @@ +import nuke + +import pyblish.api + + +class RepairWriteResolutionDifference(pyblish.api.Action): + + label = "Repair" + icon = "wrench" + on = "failed" + + def process(self, context, plugin): + + # Get the errored instances + failed = [] + for result in context.data["results"]: + if (result["error"] is not None and result["instance"] is not None + and result["instance"] not in failed): + failed.append(result["instance"]) + + # Apply pyblish.logic to get the instances for the plug-in + instances = pyblish.api.instances_by_plugin(failed, plugin) + + for instance in instances: + reformat = instance[0].dependencies()[0] + if reformat.Class() != "Reformat": + reformat = nuke.nodes.Reformat(inputs=[instance[0].input(0)]) + + xpos = instance[0].xpos() + ypos = instance[0].ypos() - 26 + + dependent_ypos = instance[0].dependencies()[0].ypos() + if (instance[0].ypos() - dependent_ypos) <= 51: + xpos += 110 + + reformat.setXYpos(xpos, ypos) + + instance[0].setInput(0, reformat) + + reformat["resize"].setValue("none") + + +class ValidateOutputResolution(pyblish.api.InstancePlugin): + """Validates Output Resolution. + + It is making sure the resolution of write's input is the same as + Format definition of script in Root node. + """ + + order = pyblish.api.ValidatorOrder + optional = True + families = ["render", "render.local", "render.farm"] + label = "Write Resolution" + hosts = ["nuke"] + actions = [RepairWriteResolutionDifference] + + def process(self, instance): + + # Skip bounding box check if a crop node exists. + if instance[0].dependencies()[0].Class() == "Crop": + return + + msg = "Bounding box is outside the format." + assert self.check_resolution(instance), msg + + def check_resolution(self, instance): + node = instance[0] + + root_width = instance.data["resolutionWidth"] + root_height = instance.data["resolutionHeight"] + + write_width = node.format().width() + write_height = node.format().height() + + if (root_width != write_width) or (root_height != write_height): + return None + else: + return True From 675da4d1359898ea4e0cc6311b90882373de44f8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 24 Jan 2020 14:27:16 +0100 Subject: [PATCH 165/393] fix(nuke): making sure validator will run on all render families --- pype/plugins/nuke/publish/validate_write_bounding_box.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/nuke/publish/validate_write_bounding_box.py b/pype/plugins/nuke/publish/validate_write_bounding_box.py index 417d4ab004..e4b7c77a25 100644 --- a/pype/plugins/nuke/publish/validate_write_bounding_box.py +++ b/pype/plugins/nuke/publish/validate_write_bounding_box.py @@ -57,7 +57,7 @@ class ValidateNukeWriteBoundingBox(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder optional = True - families = ["render"] + families = ["render", "render.local", "render.farm"] label = "Write Bounding Box" hosts = ["nuke"] actions = [RepairNukeBoundingBoxAction] From 6b36d72b06a134ef1a1a03d82a10317becd31716 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 24 Jan 2020 15:27:15 +0100 Subject: [PATCH 166/393] visual parent was checked in wrong variable --- pype/ftrack/lib/avalon_sync.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/lib/avalon_sync.py b/pype/ftrack/lib/avalon_sync.py index 8cebd12a59..2240e42d36 100644 --- a/pype/ftrack/lib/avalon_sync.py +++ b/pype/ftrack/lib/avalon_sync.py @@ -1991,7 +1991,7 @@ class SyncEntitiesFactory: vis_par = ent["data"]["visualParent"] if ( vis_par is not None and - str(vis_par) in self.deleted_entities + str(vis_par) in _deleted_entities ): continue _ready.append(mongo_id) From 0b1caf955a4353c59aa59e7262350fbd0e018c69 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 24 Jan 2020 16:23:29 +0100 Subject: [PATCH 167/393] added new version of py2 ftrack-api --- pype/vendor/ftrack_api_old/_version.py | 2 +- pype/vendor/ftrack_api_old/_weakref.py | 66 +++++++ pype/vendor/ftrack_api_old/attribute.py | 12 +- pype/vendor/ftrack_api_old/entity/factory.py | 16 +- pype/vendor/ftrack_api_old/entity/location.py | 3 +- pype/vendor/ftrack_api_old/entity/note.py | 55 +++++- .../vendor/ftrack_api_old/event/expression.py | 7 +- pype/vendor/ftrack_api_old/event/hub.py | 68 +++++-- pype/vendor/ftrack_api_old/logging.py | 17 ++ pype/vendor/ftrack_api_old/session.py | 170 +++++++++++++++--- pype/vendor/ftrack_api_old/symbol.py | 6 +- 11 files changed, 358 insertions(+), 64 deletions(-) create mode 100644 pype/vendor/ftrack_api_old/_weakref.py diff --git a/pype/vendor/ftrack_api_old/_version.py b/pype/vendor/ftrack_api_old/_version.py index 07f744ca5d..aa1a8c4aba 100644 --- a/pype/vendor/ftrack_api_old/_version.py +++ b/pype/vendor/ftrack_api_old/_version.py @@ -1 +1 @@ -__version__ = '1.3.3' +__version__ = '1.8.2' diff --git a/pype/vendor/ftrack_api_old/_weakref.py b/pype/vendor/ftrack_api_old/_weakref.py new file mode 100644 index 0000000000..69cc6f4b4f --- /dev/null +++ b/pype/vendor/ftrack_api_old/_weakref.py @@ -0,0 +1,66 @@ +""" +Yet another backport of WeakMethod for Python 2.7. +Changes include removing exception chaining and adding args to super() calls. + +Copyright (c) 2001-2019 Python Software Foundation.All rights reserved. + +Full license available in LICENSE.python. +""" +from weakref import ref + + +class WeakMethod(ref): + """ + A custom `weakref.ref` subclass which simulates a weak reference to + a bound method, working around the lifetime problem of bound methods. + """ + + __slots__ = "_func_ref", "_meth_type", "_alive", "__weakref__" + + def __new__(cls, meth, callback=None): + try: + obj = meth.__self__ + func = meth.__func__ + except AttributeError: + raise TypeError( + "argument should be a bound method, not {}".format(type(meth)) + ) + + def _cb(arg): + # The self-weakref trick is needed to avoid creating a reference + # cycle. + self = self_wr() + if self._alive: + self._alive = False + if callback is not None: + callback(self) + + self = ref.__new__(cls, obj, _cb) + self._func_ref = ref(func, _cb) + self._meth_type = type(meth) + self._alive = True + self_wr = ref(self) + return self + + def __call__(self): + obj = super(WeakMethod, self).__call__() + func = self._func_ref() + if obj is None or func is None: + return None + return self._meth_type(func, obj) + + def __eq__(self, other): + if isinstance(other, WeakMethod): + if not self._alive or not other._alive: + return self is other + return ref.__eq__(self, other) and self._func_ref == other._func_ref + return NotImplemented + + def __ne__(self, other): + if isinstance(other, WeakMethod): + if not self._alive or not other._alive: + return self is not other + return ref.__ne__(self, other) or self._func_ref != other._func_ref + return NotImplemented + + __hash__ = ref.__hash__ diff --git a/pype/vendor/ftrack_api_old/attribute.py b/pype/vendor/ftrack_api_old/attribute.py index 66840bed66..47fd6c9616 100644 --- a/pype/vendor/ftrack_api_old/attribute.py +++ b/pype/vendor/ftrack_api_old/attribute.py @@ -148,7 +148,8 @@ class Attribute(object): '''A name and value pair persisted remotely.''' def __init__( - self, name, default_value=ftrack_api_old.symbol.NOT_SET, mutable=True + self, name, default_value=ftrack_api_old.symbol.NOT_SET, mutable=True, + computed=False ): '''Initialise attribute with *name*. @@ -161,10 +162,14 @@ class Attribute(object): are :attr:`ftrack_api_old.symbol.NOT_SET`. The exception to this is when the target value is also :attr:`ftrack_api_old.symbol.NOT_SET`. + If *computed* is set to True the value is a remote side computed value + and should not be long-term cached. + ''' super(Attribute, self).__init__() self._name = name self._mutable = mutable + self._computed = computed self.default_value = default_value self._local_key = 'local' @@ -205,6 +210,11 @@ class Attribute(object): '''Return whether attribute is mutable.''' return self._mutable + @property + def computed(self): + '''Return whether attribute is computed.''' + return self._computed + def get_value(self, entity): '''Return current value for *entity*. diff --git a/pype/vendor/ftrack_api_old/entity/factory.py b/pype/vendor/ftrack_api_old/entity/factory.py index 16721514bd..f47c92e563 100644 --- a/pype/vendor/ftrack_api_old/entity/factory.py +++ b/pype/vendor/ftrack_api_old/entity/factory.py @@ -49,9 +49,11 @@ class Factory(object): # Build attributes for class. attributes = ftrack_api_old.attribute.Attributes() - immutable = schema.get('immutable', []) + immutable_properties = schema.get('immutable', []) + computed_properties = schema.get('computed', []) for name, fragment in schema.get('properties', {}).items(): - mutable = name not in immutable + mutable = name not in immutable_properties + computed = name in computed_properties default = fragment.get('default', ftrack_api_old.symbol.NOT_SET) if default == '{uid}': @@ -62,7 +64,8 @@ class Factory(object): if data_type is not ftrack_api_old.symbol.NOT_SET: if data_type in ( - 'string', 'boolean', 'integer', 'number', 'variable' + 'string', 'boolean', 'integer', 'number', 'variable', + 'object' ): # Basic scalar attribute. if data_type == 'number': @@ -74,7 +77,7 @@ class Factory(object): data_type = 'datetime' attribute = self.create_scalar_attribute( - class_name, name, mutable, default, data_type + class_name, name, mutable, computed, default, data_type ) if attribute: attributes.add(attribute) @@ -139,11 +142,12 @@ class Factory(object): return cls def create_scalar_attribute( - self, class_name, name, mutable, default, data_type + self, class_name, name, mutable, computed, default, data_type ): '''Return appropriate scalar attribute instance.''' return ftrack_api_old.attribute.ScalarAttribute( - name, data_type=data_type, default_value=default, mutable=mutable + name, data_type=data_type, default_value=default, mutable=mutable, + computed=computed ) def create_reference_attribute(self, class_name, name, mutable, reference): diff --git a/pype/vendor/ftrack_api_old/entity/location.py b/pype/vendor/ftrack_api_old/entity/location.py index d48264abc2..8d9d52c654 100644 --- a/pype/vendor/ftrack_api_old/entity/location.py +++ b/pype/vendor/ftrack_api_old/entity/location.py @@ -526,7 +526,8 @@ class Location(ftrack_api_old.entity.base.Entity): for index, resource_identifier in enumerate(resource_identifiers): resource_identifiers[index] = ( self.resource_identifier_transformer.decode( - resource_identifier + resource_identifier, + context={'component': components[index]} ) ) diff --git a/pype/vendor/ftrack_api_old/entity/note.py b/pype/vendor/ftrack_api_old/entity/note.py index 4cacf6ac8a..c628886fd9 100644 --- a/pype/vendor/ftrack_api_old/entity/note.py +++ b/pype/vendor/ftrack_api_old/entity/note.py @@ -1,6 +1,8 @@ # :coding: utf-8 # :copyright: Copyright (c) 2015 ftrack +import warnings + import ftrack_api_old.entity.base @@ -33,26 +35,52 @@ class Note(ftrack_api_old.entity.base.Entity): class CreateNoteMixin(object): '''Mixin to add create_note method on entity class.''' - def create_note(self, content, author, recipients=None, category=None): + def create_note( + self, content, author, recipients=None, category=None, labels=None + ): '''Create note with *content*, *author*. - Note category can be set by including *category* and *recipients* - can be specified as a list of user or group instances. + NoteLabels can be set by including *labels*. + + Note category can be set by including *category*. + + *recipients* can be specified as a list of user or group instances. ''' + note_label_support = 'NoteLabel' in self.session.types + + if not labels: + labels = [] + + if labels and not note_label_support: + raise ValueError( + 'NoteLabel is not supported by the current server version.' + ) + + if category and labels: + raise ValueError( + 'Both category and labels cannot be set at the same time.' + ) + if not recipients: recipients = [] - category_id = None - if category: - category_id = category['id'] - data = { 'content': content, - 'author': author, - 'category_id': category_id + 'author': author } + if category: + if note_label_support: + labels = [category] + warnings.warn( + 'category argument will be removed in an upcoming version, ' + 'please use labels instead.', + PendingDeprecationWarning + ) + else: + data['category_id'] = category['id'] + note = self.session.create('Note', data) self['notes'].append(note) @@ -65,4 +93,13 @@ class CreateNoteMixin(object): note['recipients'].append(recipient) + for label in labels: + self.session.create( + 'NoteLabelLink', + { + 'label_id': label['id'], + 'note_id': note['id'] + } + ) + return note diff --git a/pype/vendor/ftrack_api_old/event/expression.py b/pype/vendor/ftrack_api_old/event/expression.py index e10cd85844..8de4be0d71 100644 --- a/pype/vendor/ftrack_api_old/event/expression.py +++ b/pype/vendor/ftrack_api_old/event/expression.py @@ -3,14 +3,15 @@ from operator import eq, ne, ge, le, gt, lt -from pyparsing import (ParserElement, Group, Word, CaselessKeyword, Forward, +from pyparsing import (Group, Word, CaselessKeyword, Forward, FollowedBy, Suppress, oneOf, OneOrMore, Optional, alphanums, quotedString, removeQuotes) import ftrack_api_old.exception -# Optimise parsing using packrat memoisation feature. -ParserElement.enablePackrat() +# Do not enable packrat since it is not thread-safe and will result in parsing +# exceptions in a multi threaded environment. +# ParserElement.enablePackrat() class Parser(object): diff --git a/pype/vendor/ftrack_api_old/event/hub.py b/pype/vendor/ftrack_api_old/event/hub.py index 25410aa1e1..3ffbd38056 100644 --- a/pype/vendor/ftrack_api_old/event/hub.py +++ b/pype/vendor/ftrack_api_old/event/hub.py @@ -14,6 +14,7 @@ import operator import functools import json import socket +import warnings import requests import requests.exceptions @@ -40,9 +41,20 @@ ServerDetails = collections.namedtuple('ServerDetails', [ ]) + + class EventHub(object): '''Manage routing of events.''' + _future_signature_warning = ( + 'When constructing your Session object you did not explicitly define ' + 'auto_connect_event_hub as True even though you appear to be publishing ' + 'and / or subscribing to asynchronous events. In version version 2.0 of ' + 'the ftrack-python-api the default behavior will change from True ' + 'to False. Please make sure to update your tools. You can read more at ' + 'http://ftrack-python-api.rtd.ftrack.com/en/stable/release/migration.html' + ) + def __init__(self, server_url, api_user, api_key): '''Initialise hub, connecting to ftrack *server_url*. @@ -76,6 +88,8 @@ class EventHub(object): self._auto_reconnect_attempts = 30 self._auto_reconnect_delay = 10 + self._deprecation_warning_auto_connect = False + # Mapping of Socket.IO codes to meaning. self._code_name_mapping = { '0': 'disconnect', @@ -134,6 +148,9 @@ class EventHub(object): connected or connection fails. ''' + + self._deprecation_warning_auto_connect = False + if self.connected: raise ftrack_api_old.exception.EventHubConnectionError( 'Already connected.' @@ -164,17 +181,26 @@ class EventHub(object): # https://docs.python.org/2/library/socket.html#socket.socket.setblocking self._connection = websocket.create_connection(url, timeout=60) - except Exception: + except Exception as error: + error_message = ( + 'Failed to connect to event server at {server_url} with ' + 'error: "{error}".' + ) + + error_details = { + 'error': unicode(error), + 'server_url': self.get_server_url() + } + self.logger.debug( L( - 'Error connecting to event server at {0}.', - self.get_server_url() + error_message, **error_details ), exc_info=1 ) raise ftrack_api_old.exception.EventHubConnectionError( - 'Failed to connect to event server at {0}.' - .format(self.get_server_url()) + error_message, + details=error_details ) # Start background processing thread. @@ -543,6 +569,11 @@ class EventHub(object): event will be caught by this method and ignored. ''' + if self._deprecation_warning_auto_connect and not synchronous: + warnings.warn( + self._future_signature_warning, FutureWarning + ) + try: return self._publish( event, synchronous=synchronous, on_reply=on_reply @@ -700,18 +731,23 @@ class EventHub(object): # Automatically publish a non None response as a reply when not in # synchronous mode. - if not synchronous and response is not None: - - try: - self.publish_reply( - event, data=response, source=subscriber.metadata + if not synchronous: + if self._deprecation_warning_auto_connect: + warnings.warn( + self._future_signature_warning, FutureWarning ) - except Exception: - self.logger.exception(L( - 'Error publishing response {0} from subscriber {1} ' - 'for event {2}.', response, subscriber, event - )) + if response is not None: + try: + self.publish_reply( + event, data=response, source=subscriber.metadata + ) + + except Exception: + self.logger.exception(L( + 'Error publishing response {0} from subscriber {1} ' + 'for event {2}.', response, subscriber, event + )) # Check whether to continue processing topic event. if event.is_stopped(): @@ -881,6 +917,7 @@ class EventHub(object): if code_name == 'connect': self.logger.debug('Connected to event server.') event = ftrack_api_old.event.base.Event('ftrack.meta.connected') + self._prepare_event(event) self._event_queue.put(event) elif code_name == 'disconnect': @@ -901,6 +938,7 @@ class EventHub(object): if not self.connected: event = ftrack_api_old.event.base.Event('ftrack.meta.disconnected') + self._prepare_event(event) self._event_queue.put(event) elif code_name == 'heartbeat': diff --git a/pype/vendor/ftrack_api_old/logging.py b/pype/vendor/ftrack_api_old/logging.py index 2b28ce900b..41969c5b2a 100644 --- a/pype/vendor/ftrack_api_old/logging.py +++ b/pype/vendor/ftrack_api_old/logging.py @@ -1,6 +1,23 @@ # :coding: utf-8 # :copyright: Copyright (c) 2016 ftrack +import functools +import warnings + + +def deprecation_warning(message): + def decorator(function): + @functools.wraps(function) + def wrapper(*args, **kwargs): + warnings.warn( + message, + PendingDeprecationWarning + ) + return function(*args, **kwargs) + return wrapper + + return decorator + class LazyLogMessage(object): '''A log message that can be evaluated lazily for improved performance. diff --git a/pype/vendor/ftrack_api_old/session.py b/pype/vendor/ftrack_api_old/session.py index c313203a0c..0986962ca4 100644 --- a/pype/vendor/ftrack_api_old/session.py +++ b/pype/vendor/ftrack_api_old/session.py @@ -16,6 +16,7 @@ import hashlib import tempfile import threading import atexit +import warnings import requests import requests.auth @@ -42,8 +43,14 @@ import ftrack_api_old.structure.origin import ftrack_api_old.structure.entity_id import ftrack_api_old.accessor.server import ftrack_api_old._centralized_storage_scenario +import ftrack_api_old.logging from ftrack_api_old.logging import LazyLogMessage as L +try: + from weakref import WeakMethod +except ImportError: + from ftrack_api_old._weakref import WeakMethod + class SessionAuthentication(requests.auth.AuthBase): '''Attach ftrack session authentication information to requests.''' @@ -69,7 +76,7 @@ class Session(object): def __init__( self, server_url=None, api_key=None, api_user=None, auto_populate=True, plugin_paths=None, cache=None, cache_key_maker=None, - auto_connect_event_hub=True, schema_cache_path=None, + auto_connect_event_hub=None, schema_cache_path=None, plugin_arguments=None ): '''Initialise session. @@ -233,7 +240,8 @@ class Session(object): self._api_key ) - if auto_connect_event_hub: + self._auto_connect_event_hub_thread = None + if auto_connect_event_hub in (None, True): # Connect to event hub in background thread so as not to block main # session usage waiting for event hub connection. self._auto_connect_event_hub_thread = threading.Thread( @@ -242,8 +250,14 @@ class Session(object): self._auto_connect_event_hub_thread.daemon = True self._auto_connect_event_hub_thread.start() + # To help with migration from auto_connect_event_hub default changing + # from True to False. + self._event_hub._deprecation_warning_auto_connect = ( + auto_connect_event_hub is None + ) + # Register to auto-close session on exit. - atexit.register(self.close) + atexit.register(WeakMethod(self.close)) self._plugin_paths = plugin_paths if self._plugin_paths is None: @@ -271,6 +285,15 @@ class Session(object): ftrack_api_old._centralized_storage_scenario.register(self) self._configure_locations() + self.event_hub.publish( + ftrack_api_old.event.base.Event( + topic='ftrack.api.session.ready', + data=dict( + session=self + ) + ), + synchronous=True + ) def __enter__(self): '''Return session as context manager.''' @@ -389,7 +412,8 @@ class Session(object): try: self.event_hub.disconnect() - self._auto_connect_event_hub_thread.join() + if self._auto_connect_event_hub_thread: + self._auto_connect_event_hub_thread.join() except ftrack_api_old.exception.EventHubConnectionError: pass @@ -428,6 +452,16 @@ class Session(object): # Re-configure certain session aspects that may be dependant on cache. self._configure_locations() + self.event_hub.publish( + ftrack_api_old.event.base.Event( + topic='ftrack.api.session.reset', + data=dict( + session=self + ) + ), + synchronous=True + ) + def auto_populating(self, auto_populate): '''Temporarily set auto populate to *auto_populate*. @@ -508,7 +542,7 @@ class Session(object): 'entity_key': entity.get('id') }) - result = self._call( + result = self.call( [payload] ) @@ -790,12 +824,13 @@ class Session(object): }] # TODO: When should this execute? How to handle background=True? - results = self._call(batch) + results = self.call(batch) # Merge entities into local cache and return merged entities. data = [] + merged = dict() for entity in results[0]['data']: - data.append(self.merge(entity)) + data.append(self._merge_recursive(entity, merged)) return data, results[0]['metadata'] @@ -856,6 +891,48 @@ class Session(object): else: return value + def _merge_recursive(self, entity, merged=None): + '''Merge *entity* and all its attributes recursivly.''' + log_debug = self.logger.isEnabledFor(logging.DEBUG) + + if merged is None: + merged = {} + + attached = self.merge(entity, merged) + + for attribute in entity.attributes: + # Remote attributes. + remote_value = attribute.get_remote_value(entity) + + if isinstance( + remote_value, + ( + ftrack_api_old.entity.base.Entity, + ftrack_api_old.collection.Collection, + ftrack_api_old.collection.MappedCollectionProxy + ) + ): + log_debug and self.logger.debug( + 'Merging remote value for attribute {0}.'.format(attribute) + ) + + if isinstance(remote_value, ftrack_api_old.entity.base.Entity): + self._merge_recursive(remote_value, merged=merged) + + elif isinstance( + remote_value, ftrack_api_old.collection.Collection + ): + for entry in remote_value: + self._merge_recursive(entry, merged=merged) + + elif isinstance( + remote_value, ftrack_api_old.collection.MappedCollectionProxy + ): + for entry in remote_value.collection: + self._merge_recursive(entry, merged=merged) + + return attached + def _merge_entity(self, entity, merged=None): '''Merge *entity* into session returning merged entity. @@ -1185,7 +1262,7 @@ class Session(object): # Process batch. if batch: - result = self._call(batch) + result = self.call(batch) # Clear recorded operations. self.recorded_operations.clear() @@ -1260,7 +1337,7 @@ class Session(object): def _fetch_server_information(self): '''Return server information.''' - result = self._call([{'action': 'query_server_information'}]) + result = self.call([{'action': 'query_server_information'}]) return result[0] def _discover_plugins(self, plugin_arguments=None): @@ -1362,7 +1439,7 @@ class Session(object): 'Loading schemas from server due to hash not matching.' 'Local: {0!r} != Server: {1!r}', local_schema_hash, server_hash )) - schemas = self._call([{'action': 'query_schemas'}])[0] + schemas = self.call([{'action': 'query_schemas'}])[0] if schema_cache_path: try: @@ -1525,8 +1602,24 @@ class Session(object): synchronous=True ) + @ftrack_api_old.logging.deprecation_warning( + 'Session._call is now available as public method Session.call. The ' + 'private method will be removed in version 2.0.' + ) def _call(self, data): - '''Make request to server with *data*.''' + '''Make request to server with *data* batch describing the actions. + + .. note:: + + This private method is now available as public method + :meth:`entity_reference`. This alias remains for backwards + compatibility, but will be removed in version 2.0. + + ''' + return self.call(data) + + def call(self, data): + '''Make request to server with *data* batch describing the actions.''' url = self._server_url + '/api' headers = { 'content-type': 'application/json', @@ -1553,7 +1646,7 @@ class Session(object): 'Server reported error in unexpected format. Raw error was: {0}' .format(response.text) ) - self.logger.error(error_message) + self.logger.exception(error_message) raise ftrack_api_old.exception.ServerError(error_message) else: @@ -1562,7 +1655,7 @@ class Session(object): error_message = 'Server reported error: {0}({1})'.format( result['exception'], result['content'] ) - self.logger.error(error_message) + self.logger.exception(error_message) raise ftrack_api_old.exception.ServerError(error_message) return result @@ -1620,12 +1713,12 @@ class Session(object): if "entity_data" in data: for key, value in data["entity_data"].items(): if isinstance(value, ftrack_api_old.entity.base.Entity): - data["entity_data"][key] = self._entity_reference(value) + data["entity_data"][key] = self.entity_reference(value) return data if isinstance(item, ftrack_api_old.entity.base.Entity): - data = self._entity_reference(item) + data = self.entity_reference(item) with self.auto_populating(True): @@ -1646,14 +1739,15 @@ class Session(object): value = attribute.get_local_value(item) elif entity_attribute_strategy == 'persisted_only': - value = attribute.get_remote_value(item) + if not attribute.computed: + value = attribute.get_remote_value(item) if value is not ftrack_api_old.symbol.NOT_SET: if isinstance( attribute, ftrack_api_old.attribute.ReferenceAttribute ): if isinstance(value, ftrack_api_old.entity.base.Entity): - value = self._entity_reference(value) + value = self.entity_reference(value) data[attribute.name] = value @@ -1668,14 +1762,14 @@ class Session(object): if isinstance(item, ftrack_api_old.collection.Collection): data = [] for entity in item: - data.append(self._entity_reference(entity)) + data.append(self.entity_reference(entity)) return data raise TypeError('{0!r} is not JSON serializable'.format(item)) - def _entity_reference(self, entity): - '''Return reference to *entity*. + def entity_reference(self, entity): + '''Return entity reference that uniquely identifies *entity*. Return a mapping containing the __entity_type__ of the entity along with the key, value pairs that make up it's primary key. @@ -1689,6 +1783,26 @@ class Session(object): return reference + @ftrack_api_old.logging.deprecation_warning( + 'Session._entity_reference is now available as public method ' + 'Session.entity_reference. The private method will be removed ' + 'in version 2.0.' + ) + def _entity_reference(self, entity): + '''Return entity reference that uniquely identifies *entity*. + + Return a mapping containing the __entity_type__ of the entity along + with the key, value pairs that make up it's primary key. + + .. note:: + + This private method is now available as public method + :meth:`entity_reference`. This alias remains for backwards + compatibility, but will be removed in version 2.0. + + ''' + return self.entity_reference(entity) + def decode(self, string): '''Return decoded JSON *string* as Python object.''' with self.operation_recording(False): @@ -2016,6 +2130,10 @@ class Session(object): return availabilities + @ftrack_api_old.logging.deprecation_warning( + 'Session.delayed_job has been deprecated in favour of session.call. ' + 'Please refer to the release notes for more information.' + ) def delayed_job(self, job_type): '''Execute a delayed job on the server, a `ftrack.entity.job.Job` is returned. @@ -2033,7 +2151,7 @@ class Session(object): } try: - result = self._call( + result = self.call( [operation] )[0] @@ -2070,7 +2188,7 @@ class Session(object): ) try: - result = self._call([operation]) + result = self.call([operation]) except ftrack_api_old.exception.ServerError as error: # Raise informative error if the action is not supported. @@ -2172,7 +2290,7 @@ class Session(object): } try: - result = self._call([operation]) + result = self.call([operation]) except ftrack_api_old.exception.ServerError as error: # Raise informative error if the action is not supported. @@ -2212,7 +2330,7 @@ class Session(object): } try: - result = self._call([operation]) + result = self.call([operation]) except ftrack_api_old.exception.ServerError as error: # Raise informative error if the action is not supported. @@ -2258,7 +2376,7 @@ class Session(object): ) try: - self._call(operations) + self.call(operations) except ftrack_api_old.exception.ServerError as error: # Raise informative error if the action is not supported. @@ -2306,7 +2424,7 @@ class Session(object): ) try: - self._call(operations) + self.call(operations) except ftrack_api_old.exception.ServerError as error: # Raise informative error if the action is not supported. if 'Invalid action u\'send_review_session_invite\'' in error.message: diff --git a/pype/vendor/ftrack_api_old/symbol.py b/pype/vendor/ftrack_api_old/symbol.py index 10b3f55bd5..f46760f634 100644 --- a/pype/vendor/ftrack_api_old/symbol.py +++ b/pype/vendor/ftrack_api_old/symbol.py @@ -1,6 +1,8 @@ # :coding: utf-8 # :copyright: Copyright (c) 2014 ftrack +import os + class Symbol(object): '''A constant symbol.''' @@ -68,8 +70,8 @@ CONNECT_LOCATION_ID = '07b82a97-8cf9-11e3-9383-20c9d081909b' #: Identifier of builtin server location. SERVER_LOCATION_ID = '3a372bde-05bc-11e4-8908-20c9d081909b' -#: Chunk size used when working with data. -CHUNK_SIZE = 8192 +#: Chunk size used when working with data, default to 1Mb. +CHUNK_SIZE = int(os.getenv('FTRACK_API_FILE_CHUNK_SIZE', 0)) or 1024*1024 #: Symbol representing syncing users with ldap JOB_SYNC_USERS_LDAP = Symbol('SYNC_USERS_LDAP') From b69e839cfc4e7842e04e7ed477e88ebe68aebde6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 25 Jan 2020 14:18:39 +0100 Subject: [PATCH 168/393] feat(global): adding review to sequence functionality --- pype/plugins/global/publish/extract_review.py | 41 ++++++++++++++++--- 1 file changed, 35 insertions(+), 6 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index a11f681e61..2e79d86c38 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -53,10 +53,21 @@ class ExtractReview(pyblish.api.InstancePlugin): if "review" in tags: staging_dir = repre["stagingDir"] + + # iterating preset output profiles for name, profile in output_profiles.items(): + repre_new = repre.copy() + ext = profile.get("ext", None) + p_tags = profile.get('tags', []) + self.log.info("p_tags: `{}`".format(p_tags)) + + # adding control for presets to be sequence + # or single file + is_sequence = ("sequence" in p_tags) and (ext in ( + "png", "jpg", "jpeg")) + self.log.debug("Profile name: {}".format(name)) - ext = profile.get("ext", None) if not ext: ext = "mov" self.log.warning( @@ -88,18 +99,22 @@ class ExtractReview(pyblish.api.InstancePlugin): filename = repre["files"].split(".")[0] repr_file = filename + "_{0}.{1}".format(name, ext) - full_output_path = os.path.join( staging_dir, repr_file) + if is_sequence: + filename_base = filename + "_{0}".format(name) + repr_file = filename_base + ".%08d.{0}".format( + ext) + repre_new["sequence_file"] = repr_file + full_output_path = os.path.join( + staging_dir, filename_base, repr_file) + self.log.info("input {}".format(full_input_path)) self.log.info("output {}".format(full_output_path)) - repre_new = repre.copy() - new_tags = [x for x in tags if x != "delete"] - p_tags = profile.get('tags', []) - self.log.info("p_tags: `{}`".format(p_tags)) + # add families [instance.data["families"].append(t) for t in p_tags @@ -288,6 +303,14 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug( "_ output_args: `{}`".format(output_args)) + if is_sequence: + stg_dir = os.path.dirname(full_output_path) + + if not os.path.exists(stg_dir): + self.log.debug( + "creating dir: {}".format(stg_dir)) + os.mkdir(stg_dir) + mov_args = [ os.path.join( os.environ.get( @@ -315,6 +338,12 @@ class ExtractReview(pyblish.api.InstancePlugin): "resolutionHeight": resolution_height, "resolutionWidth": resolution_width, }) + if is_sequence: + repre_new.update({ + "stagingDir": stg_dir, + "files": os.listdir(stg_dir) + }) + if repre_new.get('preview'): repre_new.pop("preview") if repre_new.get('thumbnail'): From 533037b0c407c6035af97ae6b5d1648a7e971017 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 25 Jan 2020 14:19:21 +0100 Subject: [PATCH 169/393] wip(global): extract burnins to sequence --- pype/plugins/global/publish/extract_burnin.py | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 8f5a4aa000..4988f0d042 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -77,19 +77,31 @@ class ExtractBurnin(pype.api.Extractor): if "burnin" not in repre.get("tags", []): continue + is_sequence = "sequence" in repre.get("tags", []) + stagingdir = repre["stagingDir"] filename = "{0}".format(repre["files"]) + if is_sequence: + filename = repre["sequence_file"] + name = "_burnin" ext = os.path.splitext(filename)[1] movieFileBurnin = filename.replace(ext, "") + name + ext + if is_sequence: + fn_splt = filename.split(".") + movieFileBurnin = ".".join( + ((fn_splt[0] + name), fn_splt[-2], fn_splt[-1])) + + self.log.debug("__ movieFileBurnin: `{}`".format(movieFileBurnin)) + full_movie_path = os.path.join( - os.path.normpath(stagingdir), repre["files"] - ) + os.path.normpath(stagingdir), filename) full_burnin_path = os.path.join( - os.path.normpath(stagingdir), movieFileBurnin - ) + os.path.normpath(stagingdir), movieFileBurnin) + + self.log.debug("__ full_movie_path: {}".format(full_movie_path)) self.log.debug("__ full_burnin_path: {}".format(full_burnin_path)) # create copy of prep_data for anatomy formatting From dc459e593446eab6c6818fbf58040ffe28fcbe53 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 27 Jan 2020 12:45:02 +0100 Subject: [PATCH 170/393] hotfix - maya 2020 compatibility --- pype/maya/menu.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pype/maya/menu.py b/pype/maya/menu.py index 5254337f03..806944c117 100644 --- a/pype/maya/menu.py +++ b/pype/maya/menu.py @@ -15,12 +15,13 @@ log = logging.getLogger(__name__) def _get_menu(): """Return the menu instance if it currently exists in Maya""" - app = QtWidgets.QApplication.instance() - widgets = dict((w.objectName(), w) for w in app.allWidgets()) + widgets = dict(( + w.objectName(), w) for w in QtWidgets.QApplication.allWidgets()) menu = widgets.get(self._menu) return menu + def deferred(): log.info("Attempting to install scripts menu..") From 07997219d7920e8530e6200559c78989649ae33e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 27 Jan 2020 13:31:13 +0000 Subject: [PATCH 171/393] fixing unassigned variable --- pype/plugins/global/publish/collect_filesequences.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 9aa96b0e33..5c7ba41a5b 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -101,6 +101,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): lut_path = None slate_frame = None families_data = None + baked_mov_path = None subset = None version = None frame_start = 0 From 6e1f4de58dc8571f079867fe2ecd60ecc9edb897 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 27 Jan 2020 23:59:36 +0100 Subject: [PATCH 172/393] fix environment filter typo --- pype/plugins/global/publish/submit_publish_job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index afb0bcab0c..faf4aaef93 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -162,7 +162,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "FTRACK_API_KEY", "FTRACK_SERVER", "PYPE_ROOT", - "PYPE_METADATA_FILE" + "PYPE_METADATA_FILE", "PYPE_STUDIO_PROJECTS_PATH", "PYPE_STUDIO_PROJECTS_MOUNT" ] From ee71d2420d7454f272d6ce19e319d80098288829 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 28 Jan 2020 00:00:19 +0100 Subject: [PATCH 173/393] add start and end frame to collection --- pype/plugins/global/publish/collect_filesequences.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index a04de4fdd7..564c5d528f 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -423,6 +423,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "name": ext, "ext": "{}".format(ext), "files": list(collection), + "frameStart": start, + "frameEnd": end, "stagingDir": root, "anatomy_template": "render", "fps": fps, From b9269512deab6e9bff99a13e563baa1a05441b40 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 28 Jan 2020 14:26:14 +0100 Subject: [PATCH 174/393] comment should not be in a query of asset version --- .../ftrack/publish/integrate_ftrack_api.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_api.py b/pype/plugins/ftrack/publish/integrate_ftrack_api.py index c51685f84d..adb22aabba 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_api.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_api.py @@ -148,6 +148,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): assetversion_cust_attrs = _assetversion_data.pop( "custom_attributes", {} ) + asset_version_comment = _assetversion_data.pop( + "comment", None + ) assetversion_data.update(_assetversion_data) assetversion_entity = session.query( @@ -185,6 +188,20 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): existing_assetversion_metadata.update(assetversion_metadata) assetversion_entity["metadata"] = existing_assetversion_metadata + # Add comment + if asset_version_comment: + assetversion_entity["comment"] = asset_version_comment + try: + session.commit() + except Exception: + session.rollback() + self.log.warning(( + "Comment was not possible to set for AssetVersion" + "\"{0}\". Can't set it's value to: \"{1}\"" + ).format( + assetversion_entity["id"], str(asset_version_comment) + )) + # Adding Custom Attributes for attr, val in assetversion_cust_attrs.items(): if attr in assetversion_entity["custom_attributes"]: From 5d654e8de13e7c83d6190970d30157eacbca79c6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 28 Jan 2020 14:26:51 +0100 Subject: [PATCH 175/393] syn to avalon action allows to synchronize empty projects --- pype/ftrack/lib/avalon_sync.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/ftrack/lib/avalon_sync.py b/pype/ftrack/lib/avalon_sync.py index 8cebd12a59..b0482c2ab9 100644 --- a/pype/ftrack/lib/avalon_sync.py +++ b/pype/ftrack/lib/avalon_sync.py @@ -1722,7 +1722,11 @@ class SyncEntitiesFactory: self.avalon_project_id = new_id self._avalon_ents_by_id[str(new_id)] = project_item + if self._avalon_ents_by_ftrack_id is None: + self._avalon_ents_by_ftrack_id = {} self._avalon_ents_by_ftrack_id[self.ft_project_id] = str(new_id) + if self._avalon_ents_by_name is None: + self._avalon_ents_by_name = {} self._avalon_ents_by_name[project_item["name"]] = str(new_id) self.create_list.append(project_item) From 2487a07e01e1cfb6b46a69b40cf5df16beb44b06 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 28 Jan 2020 14:27:15 +0100 Subject: [PATCH 176/393] action server starts subprocess with same executable as trat has --- pype/ftrack/ftrack_server/socket_thread.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py index c688693c77..1bf9d69ad7 100644 --- a/pype/ftrack/ftrack_server/socket_thread.py +++ b/pype/ftrack/ftrack_server/socket_thread.py @@ -1,4 +1,5 @@ import os +import sys import time import socket import threading @@ -52,7 +53,7 @@ class SocketThread(threading.Thread): ) self.subproc = subprocess.Popen( - ["python", self.filepath, "-port", str(self.port)], + [sys.executable, self.filepath, "-port", str(self.port)], stdout=subprocess.PIPE ) From 6c70f3fcbff1d22feded66e65c474e908f88992f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 28 Jan 2020 15:06:48 +0100 Subject: [PATCH 177/393] fix(nuke: removing annoying message window happened every time log.error happened --- pype/nuke/__init__.py | 69 ++++++++-------- pype/nuke/lib.py | 78 +++++++++++-------- pype/nuke/presets.py | 10 ++- pype/plugins/nuke/create/create_backdrop.py | 6 +- pype/plugins/nuke/create/create_gizmo.py | 12 ++- pype/plugins/nuke/create/create_read.py | 8 +- pype/plugins/nuke/create/create_write.py | 14 +++- pype/plugins/nuke/load/load_backdrop.py | 7 +- pype/plugins/nuke/load/load_gizmo_ip.py | 6 +- pype/plugins/nuke/load/load_luts_ip.py | 5 +- pype/plugins/nuke/load/load_matchmove.py | 5 +- .../nuke/publish/validate_rendered_frames.py | 4 +- 12 files changed, 133 insertions(+), 91 deletions(-) diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index 141cf4c13d..dfd61f4b39 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -33,40 +33,41 @@ if os.getenv("PYBLISH_GUI", None): pyblish.register_gui(os.getenv("PYBLISH_GUI", None)) -class NukeHandler(logging.Handler): - ''' - Nuke Handler - emits logs into nuke's script editor. - warning will emit nuke.warning() - critical and fatal would popup msg dialog to alert of the error. - ''' +# class NukeHandler(logging.Handler): +# ''' +# Nuke Handler - emits logs into nuke's script editor. +# warning will emit nuke.warning() +# critical and fatal would popup msg dialog to alert of the error. +# ''' +# +# def __init__(self): +# logging.Handler.__init__(self) +# self.set_name("Pype_Nuke_Handler") +# +# def emit(self, record): +# # Formated message: +# msg = self.format(record) +# +# if record.levelname.lower() in [ +# # "warning", +# "critical", +# "fatal", +# "error" +# ]: +# msg = self.format(record) +# nuke.message(msg) +# +# +# '''Adding Nuke Logging Handler''' +# log.info([handler.get_name() for handler in logging.root.handlers[:]]) +# nuke_handler = NukeHandler() +# if nuke_handler.get_name() \ +# not in [handler.get_name() +# for handler in logging.root.handlers[:]]: +# logging.getLogger().addHandler(nuke_handler) +# logging.getLogger().setLevel(logging.INFO) +# log.info([handler.get_name() for handler in logging.root.handlers[:]]) - def __init__(self): - logging.Handler.__init__(self) - self.set_name("Pype_Nuke_Handler") - - def emit(self, record): - # Formated message: - msg = self.format(record) - - if record.levelname.lower() in [ - # "warning", - "critical", - "fatal", - "error" - ]: - msg = self.format(record) - nuke.message(msg) - - -'''Adding Nuke Logging Handler''' -log.info([handler.get_name() for handler in logging.root.handlers[:]]) -nuke_handler = NukeHandler() -if nuke_handler.get_name() \ - not in [handler.get_name() - for handler in logging.root.handlers[:]]: - logging.getLogger().addHandler(nuke_handler) - logging.getLogger().setLevel(logging.INFO) -log.info([handler.get_name() for handler in logging.root.handlers[:]]) def reload_config(): """Attempt to reload pipeline at run-time. @@ -113,7 +114,7 @@ def install(): family_states = [ "write", "review", - "nukenodes" + "nukenodes" "gizmo" ] diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 7aa0395da5..9282443fcf 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -21,7 +21,6 @@ from .presets import ( from .presets import ( get_anatomy ) -# TODO: remove get_anatomy and import directly Anatomy() here from pypeapp import Logger log = Logger().get_logger(__name__, "nuke") @@ -50,8 +49,6 @@ def checkInventoryVersions(): and check if the node is having actual version. If not then it will color it to red. """ - # TODO: make it for all nodes not just Read (Loader - # get all Loader nodes by avalon attribute metadata for each in nuke.allNodes(): if each.Class() == 'Read': @@ -93,7 +90,6 @@ def checkInventoryVersions(): def writes_version_sync(): ''' Callback synchronizing version of publishable write nodes ''' - # TODO: make it work with new write node group try: rootVersion = pype.get_version_from_path(nuke.root().name()) padding = len(rootVersion) @@ -130,7 +126,8 @@ def writes_version_sync(): os.makedirs(os.path.dirname(node_new_file), 0o766) except Exception as e: log.warning( - "Write node: `{}` has no version in path: {}".format(each.name(), e)) + "Write node: `{}` has no version in path: {}".format( + each.name(), e)) def version_up_script(): @@ -183,9 +180,11 @@ def format_anatomy(data): try: padding = int(anatomy.templates['render']['padding']) except KeyError as e: - log.error("`padding` key is not in `render` " - "Anatomy template. Please, add it there and restart " - "the pipeline (padding: \"4\"): `{}`".format(e)) + msg = "`padding` key is not in `render` " + "Anatomy template. Please, add it there and restart " + "the pipeline (padding: \"4\"): `{}`".format(e) + log.error(msg) + nuke.message(msg) version = data.get("version", None) if not version: @@ -265,7 +264,9 @@ def create_write_node(name, data, input=None, prenodes=None): anatomy_filled = format_anatomy(data) except Exception as e: - log.error("problem with resolving anatomy tepmlate: {}".format(e)) + msg = "problem with resolving anatomy tepmlate: {}".format(e) + log.error(msg) + nuke.message(msg) # build file path to workfiles fpath = str(anatomy_filled["work"]["folder"]).replace("\\", "/") @@ -543,8 +544,11 @@ class WorkfileSettings(object): viewer_dict (dict): adjustments from presets ''' - assert isinstance(viewer_dict, dict), log.error( - "set_viewers_colorspace(): argument should be dictionary") + if not isinstance(viewer_dict, dict): + msg = "set_viewers_colorspace(): argument should be dictionary" + log.error(msg) + nuke.message(msg) + return filter_knobs = [ "viewerProcess", @@ -592,8 +596,10 @@ class WorkfileSettings(object): root_dict (dict): adjustmensts from presets ''' - assert isinstance(root_dict, dict), log.error( - "set_root_colorspace(): argument should be dictionary") + if not isinstance(root_dict, dict): + msg = "set_root_colorspace(): argument should be dictionary" + log.error(msg) + nuke.message(msg) log.debug(">> root_dict: {}".format(root_dict)) @@ -640,8 +646,11 @@ class WorkfileSettings(object): ''' # TODO: complete this function so any write node in # scene will have fixed colorspace following presets for the project - assert isinstance(write_dict, dict), log.error( - "set_root_colorspace(): argument should be dictionary") + if not isinstance(write_dict, dict): + msg = "set_root_colorspace(): argument should be dictionary" + nuke.message(msg) + log.error(msg) + return log.debug("__ set_writes_colorspace(): {}".format(write_dict)) @@ -653,25 +662,28 @@ class WorkfileSettings(object): try: self.set_root_colorspace(nuke_colorspace["root"]) except AttributeError: - log.error( - "set_colorspace(): missing `root` settings in template") + msg = "set_colorspace(): missing `root` settings in template" + try: self.set_viewers_colorspace(nuke_colorspace["viewer"]) except AttributeError: - log.error( - "set_colorspace(): missing `viewer` settings in template") + msg = "set_colorspace(): missing `viewer` settings in template" + nuke.message(msg) + log.error(msg) try: self.set_writes_colorspace(nuke_colorspace["write"]) except AttributeError: - log.error( - "set_colorspace(): missing `write` settings in template") + msg = "set_colorspace(): missing `write` settings in template" + nuke.message(msg) + log.error(msg) try: for key in nuke_colorspace: log.debug("Preset's colorspace key: {}".format(key)) except TypeError: - log.error("Nuke is not in templates! \n\n\n" - "contact your supervisor!") + msg = "Nuke is not in templates! Contact your supervisor!" + nuke.message(msg) + log.error(msg) def reset_frame_range_handles(self): """Set frame range to current asset""" @@ -758,13 +770,13 @@ class WorkfileSettings(object): } if any(x for x in data.values() if x is None): - log.error( - "Missing set shot attributes in DB." - "\nContact your supervisor!." - "\n\nWidth: `{width}`" - "\nHeight: `{height}`" - "\nPixel Asspect: `{pixel_aspect}`".format(**data) - ) + msg = "Missing set shot attributes in DB." + "\nContact your supervisor!." + "\n\nWidth: `{width}`" + "\nHeight: `{height}`" + "\nPixel Asspect: `{pixel_aspect}`".format(**data) + log.error(msg) + nuke.message(msg) bbox = self._asset_entity.get('data', {}).get('crop') @@ -781,10 +793,10 @@ class WorkfileSettings(object): ) except Exception as e: bbox = None - log.error( - "{}: {} \nFormat:Crop need to be set with dots, example: " + msg = "{}: {} \nFormat:Crop need to be set with dots, example: " "0.0.1920.1080, /nSetting to default".format(__name__, e) - ) + log.error(msg) + nuke.message(msg) existing_format = None for format in nuke.formats(): diff --git a/pype/nuke/presets.py b/pype/nuke/presets.py index e0c12e2671..a413ccc878 100644 --- a/pype/nuke/presets.py +++ b/pype/nuke/presets.py @@ -1,6 +1,6 @@ from pype import api as pype from pypeapp import Anatomy, config - +import nuke log = pype.Logger().get_logger(__name__, "nuke") @@ -28,7 +28,7 @@ def get_node_dataflow_preset(**kwarg): families = kwarg.get("families", []) preset = kwarg.get("preset", None) # omit < 2.0.0v - assert any([host, cls]), log.error( + assert any([host, cls]), nuke.message( "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__)) nuke_dataflow = get_dataflow_preset().get(str(host), None) @@ -56,8 +56,10 @@ def get_node_colorspace_preset(**kwarg): families = kwarg.get("families", []) preset = kwarg.get("preset", None) # omit < 2.0.0v - assert any([host, cls]), log.error( - "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__)) + if not any([host, cls]): + msg = "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__) + log.error(msg) + nuke.message(msg) nuke_colorspace = get_colorspace_preset().get(str(host), None) nuke_colorspace_node = nuke_colorspace.get(str(cls), None) diff --git a/pype/plugins/nuke/create/create_backdrop.py b/pype/plugins/nuke/create/create_backdrop.py index 767e92b592..2016c66095 100644 --- a/pype/plugins/nuke/create/create_backdrop.py +++ b/pype/plugins/nuke/create/create_backdrop.py @@ -35,8 +35,10 @@ class CreateBackdrop(Creator): return instance else: - nuke.message("Please select nodes you " - "wish to add to a container") + msg = "Please select nodes you " + "wish to add to a container" + self.log.error(msg) + nuke.message(msg) return else: bckd_node = autoBackdrop() diff --git a/pype/plugins/nuke/create/create_gizmo.py b/pype/plugins/nuke/create/create_gizmo.py index 41229862e3..93fbbcf144 100644 --- a/pype/plugins/nuke/create/create_gizmo.py +++ b/pype/plugins/nuke/create/create_gizmo.py @@ -36,8 +36,10 @@ class CreateGizmo(Creator): node["tile_color"].setValue(int(self.node_color, 16)) return anlib.imprint(node, self.data) else: - nuke.message("Please select a group node " - "you wish to publish as the gizmo") + msg = "Please select a group node " + "you wish to publish as the gizmo" + self.log.error(msg) + nuke.message(msg) if len(nodes) >= 2: anlib.select_nodes(nodes) @@ -58,8 +60,10 @@ class CreateGizmo(Creator): return anlib.imprint(gizmo_node, self.data) else: - nuke.message("Please select nodes you " - "wish to add to the gizmo") + msg = "Please select nodes you " + "wish to add to the gizmo" + self.log.error(msg) + nuke.message(msg) return else: with anlib.maintained_selection(): diff --git a/pype/plugins/nuke/create/create_read.py b/pype/plugins/nuke/create/create_read.py index 1aa7e68746..70db580a7e 100644 --- a/pype/plugins/nuke/create/create_read.py +++ b/pype/plugins/nuke/create/create_read.py @@ -34,7 +34,9 @@ class CrateRead(avalon.nuke.Creator): nodes = self.nodes if not nodes or len(nodes) == 0: - nuke.message('Please select Read node') + msg = "Please select Read node" + self.log.error(msg) + nuke.message(msg) else: count_reads = 0 for node in nodes: @@ -46,7 +48,9 @@ class CrateRead(avalon.nuke.Creator): count_reads += 1 if count_reads < 1: - nuke.message('Please select Read node') + msg = "Please select Read node" + self.log.error(msg) + nuke.message(msg) return def change_read_node(self, name, node, data): diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index a85408cab3..c5c7d659e3 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -41,9 +41,11 @@ class CreateWriteRender(plugin.PypeCreator): if (self.options or {}).get("useSelection"): nodes = self.nodes - assert len(nodes) < 2, self.log.error( - "Select only one node. The node you want to connect to, " - "or tick off `Use selection`") + if not (len(nodes) < 2): + msg = "Select only one node. The node you want to connect to, " + "or tick off `Use selection`" + log.error(msg) + nuke.message(msg) selected_node = nodes[0] inputs = [selected_node] @@ -134,7 +136,11 @@ class CreateWritePrerender(plugin.PypeCreator): if (self.options or {}).get("useSelection"): nodes = self.nodes - assert len(nodes) < 2, self.log.error("Select only one node. The node you want to connect to, or tick off `Use selection`") + if not (len(nodes) < 2): + msg = "Select only one node. The node you want to connect to, " + "or tick off `Use selection`" + self.log.error(msg) + nuke.message(msg) selected_node = nodes[0] inputs = [selected_node] diff --git a/pype/plugins/nuke/load/load_backdrop.py b/pype/plugins/nuke/load/load_backdrop.py index 7f58d4e9ec..07a6724771 100644 --- a/pype/plugins/nuke/load/load_backdrop.py +++ b/pype/plugins/nuke/load/load_backdrop.py @@ -256,8 +256,11 @@ class LoadBackdropNodes(api.Loader): if len(viewer) > 0: viewer = viewer[0] else: - self.log.error("Please create Viewer node before you " - "run this action again") + if not (len(nodes) < 2): + msg = "Please create Viewer node before you " + "run this action again" + self.log.error(msg) + nuke.message(msg) return None # get coordinates of Viewer1 diff --git a/pype/plugins/nuke/load/load_gizmo_ip.py b/pype/plugins/nuke/load/load_gizmo_ip.py index 0d78c14214..23d7ef2f4a 100644 --- a/pype/plugins/nuke/load/load_gizmo_ip.py +++ b/pype/plugins/nuke/load/load_gizmo_ip.py @@ -176,8 +176,10 @@ class LoadGizmoInputProcess(api.Loader): if len(viewer) > 0: viewer = viewer[0] else: - self.log.error("Please create Viewer node before you " - "run this action again") + msg = "Please create Viewer node before you " + "run this action again" + self.log.error(msg) + nuke.message(msg) return None # get coordinates of Viewer1 diff --git a/pype/plugins/nuke/load/load_luts_ip.py b/pype/plugins/nuke/load/load_luts_ip.py index 5f09adb05f..2b38a9ff08 100644 --- a/pype/plugins/nuke/load/load_luts_ip.py +++ b/pype/plugins/nuke/load/load_luts_ip.py @@ -276,7 +276,10 @@ class LoadLutsInputProcess(api.Loader): if len(viewer) > 0: viewer = viewer[0] else: - self.log.error("Please create Viewer node before you run this action again") + msg = "Please create Viewer node before you " + "run this action again" + self.log.error(msg) + nuke.message(msg) return None # get coordinates of Viewer1 diff --git a/pype/plugins/nuke/load/load_matchmove.py b/pype/plugins/nuke/load/load_matchmove.py index 6a674368fb..60d5dc026f 100644 --- a/pype/plugins/nuke/load/load_matchmove.py +++ b/pype/plugins/nuke/load/load_matchmove.py @@ -1,4 +1,5 @@ from avalon import api +import nuke class MatchmoveLoader(api.Loader): @@ -19,6 +20,8 @@ class MatchmoveLoader(api.Loader): exec(open(self.fname).read()) else: - self.log.error("Unsupported script type") + msg = "Unsupported script type" + self.log.error(msg) + nuke.message(msg) return True diff --git a/pype/plugins/nuke/publish/validate_rendered_frames.py b/pype/plugins/nuke/publish/validate_rendered_frames.py index c63c289947..169ea1ecb5 100644 --- a/pype/plugins/nuke/publish/validate_rendered_frames.py +++ b/pype/plugins/nuke/publish/validate_rendered_frames.py @@ -41,7 +41,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): if not repre.get('files'): msg = ("no frames were collected, " "you need to render them") - self.log.warning(msg) + self.log.error(msg) raise ValidationException(msg) collections, remainder = clique.assemble(repre["files"]) @@ -75,7 +75,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): self.log.info( 'len(collection.indexes): {}'.format(collected_frames_len) ) - + if "slate" in instance.data["families"]: collected_frames_len -= 1 From b0da1c9013079f7cb7c26e63161158edf7ef1c88 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 28 Jan 2020 15:06:48 +0100 Subject: [PATCH 178/393] fix(nuke: removing annoying message window happened every time log.error happened --- pype/nuke/__init__.py | 69 ++++++++-------- pype/nuke/lib.py | 78 +++++++++++-------- pype/nuke/presets.py | 10 ++- pype/plugins/nuke/create/create_backdrop.py | 6 +- pype/plugins/nuke/create/create_gizmo.py | 12 ++- pype/plugins/nuke/create/create_read.py | 8 +- pype/plugins/nuke/create/create_write.py | 14 +++- pype/plugins/nuke/load/load_backdrop.py | 7 +- pype/plugins/nuke/load/load_gizmo_ip.py | 6 +- pype/plugins/nuke/load/load_luts_ip.py | 5 +- pype/plugins/nuke/load/load_matchmove.py | 5 +- .../nuke/publish/validate_rendered_frames.py | 4 +- 12 files changed, 133 insertions(+), 91 deletions(-) diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index 141cf4c13d..dfd61f4b39 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -33,40 +33,41 @@ if os.getenv("PYBLISH_GUI", None): pyblish.register_gui(os.getenv("PYBLISH_GUI", None)) -class NukeHandler(logging.Handler): - ''' - Nuke Handler - emits logs into nuke's script editor. - warning will emit nuke.warning() - critical and fatal would popup msg dialog to alert of the error. - ''' +# class NukeHandler(logging.Handler): +# ''' +# Nuke Handler - emits logs into nuke's script editor. +# warning will emit nuke.warning() +# critical and fatal would popup msg dialog to alert of the error. +# ''' +# +# def __init__(self): +# logging.Handler.__init__(self) +# self.set_name("Pype_Nuke_Handler") +# +# def emit(self, record): +# # Formated message: +# msg = self.format(record) +# +# if record.levelname.lower() in [ +# # "warning", +# "critical", +# "fatal", +# "error" +# ]: +# msg = self.format(record) +# nuke.message(msg) +# +# +# '''Adding Nuke Logging Handler''' +# log.info([handler.get_name() for handler in logging.root.handlers[:]]) +# nuke_handler = NukeHandler() +# if nuke_handler.get_name() \ +# not in [handler.get_name() +# for handler in logging.root.handlers[:]]: +# logging.getLogger().addHandler(nuke_handler) +# logging.getLogger().setLevel(logging.INFO) +# log.info([handler.get_name() for handler in logging.root.handlers[:]]) - def __init__(self): - logging.Handler.__init__(self) - self.set_name("Pype_Nuke_Handler") - - def emit(self, record): - # Formated message: - msg = self.format(record) - - if record.levelname.lower() in [ - # "warning", - "critical", - "fatal", - "error" - ]: - msg = self.format(record) - nuke.message(msg) - - -'''Adding Nuke Logging Handler''' -log.info([handler.get_name() for handler in logging.root.handlers[:]]) -nuke_handler = NukeHandler() -if nuke_handler.get_name() \ - not in [handler.get_name() - for handler in logging.root.handlers[:]]: - logging.getLogger().addHandler(nuke_handler) - logging.getLogger().setLevel(logging.INFO) -log.info([handler.get_name() for handler in logging.root.handlers[:]]) def reload_config(): """Attempt to reload pipeline at run-time. @@ -113,7 +114,7 @@ def install(): family_states = [ "write", "review", - "nukenodes" + "nukenodes" "gizmo" ] diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 7aa0395da5..9282443fcf 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -21,7 +21,6 @@ from .presets import ( from .presets import ( get_anatomy ) -# TODO: remove get_anatomy and import directly Anatomy() here from pypeapp import Logger log = Logger().get_logger(__name__, "nuke") @@ -50,8 +49,6 @@ def checkInventoryVersions(): and check if the node is having actual version. If not then it will color it to red. """ - # TODO: make it for all nodes not just Read (Loader - # get all Loader nodes by avalon attribute metadata for each in nuke.allNodes(): if each.Class() == 'Read': @@ -93,7 +90,6 @@ def checkInventoryVersions(): def writes_version_sync(): ''' Callback synchronizing version of publishable write nodes ''' - # TODO: make it work with new write node group try: rootVersion = pype.get_version_from_path(nuke.root().name()) padding = len(rootVersion) @@ -130,7 +126,8 @@ def writes_version_sync(): os.makedirs(os.path.dirname(node_new_file), 0o766) except Exception as e: log.warning( - "Write node: `{}` has no version in path: {}".format(each.name(), e)) + "Write node: `{}` has no version in path: {}".format( + each.name(), e)) def version_up_script(): @@ -183,9 +180,11 @@ def format_anatomy(data): try: padding = int(anatomy.templates['render']['padding']) except KeyError as e: - log.error("`padding` key is not in `render` " - "Anatomy template. Please, add it there and restart " - "the pipeline (padding: \"4\"): `{}`".format(e)) + msg = "`padding` key is not in `render` " + "Anatomy template. Please, add it there and restart " + "the pipeline (padding: \"4\"): `{}`".format(e) + log.error(msg) + nuke.message(msg) version = data.get("version", None) if not version: @@ -265,7 +264,9 @@ def create_write_node(name, data, input=None, prenodes=None): anatomy_filled = format_anatomy(data) except Exception as e: - log.error("problem with resolving anatomy tepmlate: {}".format(e)) + msg = "problem with resolving anatomy tepmlate: {}".format(e) + log.error(msg) + nuke.message(msg) # build file path to workfiles fpath = str(anatomy_filled["work"]["folder"]).replace("\\", "/") @@ -543,8 +544,11 @@ class WorkfileSettings(object): viewer_dict (dict): adjustments from presets ''' - assert isinstance(viewer_dict, dict), log.error( - "set_viewers_colorspace(): argument should be dictionary") + if not isinstance(viewer_dict, dict): + msg = "set_viewers_colorspace(): argument should be dictionary" + log.error(msg) + nuke.message(msg) + return filter_knobs = [ "viewerProcess", @@ -592,8 +596,10 @@ class WorkfileSettings(object): root_dict (dict): adjustmensts from presets ''' - assert isinstance(root_dict, dict), log.error( - "set_root_colorspace(): argument should be dictionary") + if not isinstance(root_dict, dict): + msg = "set_root_colorspace(): argument should be dictionary" + log.error(msg) + nuke.message(msg) log.debug(">> root_dict: {}".format(root_dict)) @@ -640,8 +646,11 @@ class WorkfileSettings(object): ''' # TODO: complete this function so any write node in # scene will have fixed colorspace following presets for the project - assert isinstance(write_dict, dict), log.error( - "set_root_colorspace(): argument should be dictionary") + if not isinstance(write_dict, dict): + msg = "set_root_colorspace(): argument should be dictionary" + nuke.message(msg) + log.error(msg) + return log.debug("__ set_writes_colorspace(): {}".format(write_dict)) @@ -653,25 +662,28 @@ class WorkfileSettings(object): try: self.set_root_colorspace(nuke_colorspace["root"]) except AttributeError: - log.error( - "set_colorspace(): missing `root` settings in template") + msg = "set_colorspace(): missing `root` settings in template" + try: self.set_viewers_colorspace(nuke_colorspace["viewer"]) except AttributeError: - log.error( - "set_colorspace(): missing `viewer` settings in template") + msg = "set_colorspace(): missing `viewer` settings in template" + nuke.message(msg) + log.error(msg) try: self.set_writes_colorspace(nuke_colorspace["write"]) except AttributeError: - log.error( - "set_colorspace(): missing `write` settings in template") + msg = "set_colorspace(): missing `write` settings in template" + nuke.message(msg) + log.error(msg) try: for key in nuke_colorspace: log.debug("Preset's colorspace key: {}".format(key)) except TypeError: - log.error("Nuke is not in templates! \n\n\n" - "contact your supervisor!") + msg = "Nuke is not in templates! Contact your supervisor!" + nuke.message(msg) + log.error(msg) def reset_frame_range_handles(self): """Set frame range to current asset""" @@ -758,13 +770,13 @@ class WorkfileSettings(object): } if any(x for x in data.values() if x is None): - log.error( - "Missing set shot attributes in DB." - "\nContact your supervisor!." - "\n\nWidth: `{width}`" - "\nHeight: `{height}`" - "\nPixel Asspect: `{pixel_aspect}`".format(**data) - ) + msg = "Missing set shot attributes in DB." + "\nContact your supervisor!." + "\n\nWidth: `{width}`" + "\nHeight: `{height}`" + "\nPixel Asspect: `{pixel_aspect}`".format(**data) + log.error(msg) + nuke.message(msg) bbox = self._asset_entity.get('data', {}).get('crop') @@ -781,10 +793,10 @@ class WorkfileSettings(object): ) except Exception as e: bbox = None - log.error( - "{}: {} \nFormat:Crop need to be set with dots, example: " + msg = "{}: {} \nFormat:Crop need to be set with dots, example: " "0.0.1920.1080, /nSetting to default".format(__name__, e) - ) + log.error(msg) + nuke.message(msg) existing_format = None for format in nuke.formats(): diff --git a/pype/nuke/presets.py b/pype/nuke/presets.py index e0c12e2671..a413ccc878 100644 --- a/pype/nuke/presets.py +++ b/pype/nuke/presets.py @@ -1,6 +1,6 @@ from pype import api as pype from pypeapp import Anatomy, config - +import nuke log = pype.Logger().get_logger(__name__, "nuke") @@ -28,7 +28,7 @@ def get_node_dataflow_preset(**kwarg): families = kwarg.get("families", []) preset = kwarg.get("preset", None) # omit < 2.0.0v - assert any([host, cls]), log.error( + assert any([host, cls]), nuke.message( "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__)) nuke_dataflow = get_dataflow_preset().get(str(host), None) @@ -56,8 +56,10 @@ def get_node_colorspace_preset(**kwarg): families = kwarg.get("families", []) preset = kwarg.get("preset", None) # omit < 2.0.0v - assert any([host, cls]), log.error( - "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__)) + if not any([host, cls]): + msg = "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__) + log.error(msg) + nuke.message(msg) nuke_colorspace = get_colorspace_preset().get(str(host), None) nuke_colorspace_node = nuke_colorspace.get(str(cls), None) diff --git a/pype/plugins/nuke/create/create_backdrop.py b/pype/plugins/nuke/create/create_backdrop.py index 767e92b592..2016c66095 100644 --- a/pype/plugins/nuke/create/create_backdrop.py +++ b/pype/plugins/nuke/create/create_backdrop.py @@ -35,8 +35,10 @@ class CreateBackdrop(Creator): return instance else: - nuke.message("Please select nodes you " - "wish to add to a container") + msg = "Please select nodes you " + "wish to add to a container" + self.log.error(msg) + nuke.message(msg) return else: bckd_node = autoBackdrop() diff --git a/pype/plugins/nuke/create/create_gizmo.py b/pype/plugins/nuke/create/create_gizmo.py index 41229862e3..93fbbcf144 100644 --- a/pype/plugins/nuke/create/create_gizmo.py +++ b/pype/plugins/nuke/create/create_gizmo.py @@ -36,8 +36,10 @@ class CreateGizmo(Creator): node["tile_color"].setValue(int(self.node_color, 16)) return anlib.imprint(node, self.data) else: - nuke.message("Please select a group node " - "you wish to publish as the gizmo") + msg = "Please select a group node " + "you wish to publish as the gizmo" + self.log.error(msg) + nuke.message(msg) if len(nodes) >= 2: anlib.select_nodes(nodes) @@ -58,8 +60,10 @@ class CreateGizmo(Creator): return anlib.imprint(gizmo_node, self.data) else: - nuke.message("Please select nodes you " - "wish to add to the gizmo") + msg = "Please select nodes you " + "wish to add to the gizmo" + self.log.error(msg) + nuke.message(msg) return else: with anlib.maintained_selection(): diff --git a/pype/plugins/nuke/create/create_read.py b/pype/plugins/nuke/create/create_read.py index 1aa7e68746..70db580a7e 100644 --- a/pype/plugins/nuke/create/create_read.py +++ b/pype/plugins/nuke/create/create_read.py @@ -34,7 +34,9 @@ class CrateRead(avalon.nuke.Creator): nodes = self.nodes if not nodes or len(nodes) == 0: - nuke.message('Please select Read node') + msg = "Please select Read node" + self.log.error(msg) + nuke.message(msg) else: count_reads = 0 for node in nodes: @@ -46,7 +48,9 @@ class CrateRead(avalon.nuke.Creator): count_reads += 1 if count_reads < 1: - nuke.message('Please select Read node') + msg = "Please select Read node" + self.log.error(msg) + nuke.message(msg) return def change_read_node(self, name, node, data): diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index a85408cab3..c5c7d659e3 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -41,9 +41,11 @@ class CreateWriteRender(plugin.PypeCreator): if (self.options or {}).get("useSelection"): nodes = self.nodes - assert len(nodes) < 2, self.log.error( - "Select only one node. The node you want to connect to, " - "or tick off `Use selection`") + if not (len(nodes) < 2): + msg = "Select only one node. The node you want to connect to, " + "or tick off `Use selection`" + log.error(msg) + nuke.message(msg) selected_node = nodes[0] inputs = [selected_node] @@ -134,7 +136,11 @@ class CreateWritePrerender(plugin.PypeCreator): if (self.options or {}).get("useSelection"): nodes = self.nodes - assert len(nodes) < 2, self.log.error("Select only one node. The node you want to connect to, or tick off `Use selection`") + if not (len(nodes) < 2): + msg = "Select only one node. The node you want to connect to, " + "or tick off `Use selection`" + self.log.error(msg) + nuke.message(msg) selected_node = nodes[0] inputs = [selected_node] diff --git a/pype/plugins/nuke/load/load_backdrop.py b/pype/plugins/nuke/load/load_backdrop.py index 7f58d4e9ec..07a6724771 100644 --- a/pype/plugins/nuke/load/load_backdrop.py +++ b/pype/plugins/nuke/load/load_backdrop.py @@ -256,8 +256,11 @@ class LoadBackdropNodes(api.Loader): if len(viewer) > 0: viewer = viewer[0] else: - self.log.error("Please create Viewer node before you " - "run this action again") + if not (len(nodes) < 2): + msg = "Please create Viewer node before you " + "run this action again" + self.log.error(msg) + nuke.message(msg) return None # get coordinates of Viewer1 diff --git a/pype/plugins/nuke/load/load_gizmo_ip.py b/pype/plugins/nuke/load/load_gizmo_ip.py index 0d78c14214..23d7ef2f4a 100644 --- a/pype/plugins/nuke/load/load_gizmo_ip.py +++ b/pype/plugins/nuke/load/load_gizmo_ip.py @@ -176,8 +176,10 @@ class LoadGizmoInputProcess(api.Loader): if len(viewer) > 0: viewer = viewer[0] else: - self.log.error("Please create Viewer node before you " - "run this action again") + msg = "Please create Viewer node before you " + "run this action again" + self.log.error(msg) + nuke.message(msg) return None # get coordinates of Viewer1 diff --git a/pype/plugins/nuke/load/load_luts_ip.py b/pype/plugins/nuke/load/load_luts_ip.py index 5f09adb05f..2b38a9ff08 100644 --- a/pype/plugins/nuke/load/load_luts_ip.py +++ b/pype/plugins/nuke/load/load_luts_ip.py @@ -276,7 +276,10 @@ class LoadLutsInputProcess(api.Loader): if len(viewer) > 0: viewer = viewer[0] else: - self.log.error("Please create Viewer node before you run this action again") + msg = "Please create Viewer node before you " + "run this action again" + self.log.error(msg) + nuke.message(msg) return None # get coordinates of Viewer1 diff --git a/pype/plugins/nuke/load/load_matchmove.py b/pype/plugins/nuke/load/load_matchmove.py index 6a674368fb..60d5dc026f 100644 --- a/pype/plugins/nuke/load/load_matchmove.py +++ b/pype/plugins/nuke/load/load_matchmove.py @@ -1,4 +1,5 @@ from avalon import api +import nuke class MatchmoveLoader(api.Loader): @@ -19,6 +20,8 @@ class MatchmoveLoader(api.Loader): exec(open(self.fname).read()) else: - self.log.error("Unsupported script type") + msg = "Unsupported script type" + self.log.error(msg) + nuke.message(msg) return True diff --git a/pype/plugins/nuke/publish/validate_rendered_frames.py b/pype/plugins/nuke/publish/validate_rendered_frames.py index c63c289947..169ea1ecb5 100644 --- a/pype/plugins/nuke/publish/validate_rendered_frames.py +++ b/pype/plugins/nuke/publish/validate_rendered_frames.py @@ -41,7 +41,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): if not repre.get('files'): msg = ("no frames were collected, " "you need to render them") - self.log.warning(msg) + self.log.error(msg) raise ValidationException(msg) collections, remainder = clique.assemble(repre["files"]) @@ -75,7 +75,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): self.log.info( 'len(collection.indexes): {}'.format(collected_frames_len) ) - + if "slate" in instance.data["families"]: collected_frames_len -= 1 From ad6d5a1d55389a277e9b4fd45c761d372c2a8438 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 28 Jan 2020 23:50:52 +0100 Subject: [PATCH 179/393] fix zerotransform Pivots --- pype/plugins/maya/load/load_reference.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/pype/plugins/maya/load/load_reference.py b/pype/plugins/maya/load/load_reference.py index 376fcc2c01..e5b0c0e238 100644 --- a/pype/plugins/maya/load/load_reference.py +++ b/pype/plugins/maya/load/load_reference.py @@ -40,14 +40,11 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader): namespace = cmds.referenceQuery(nodes[0], namespace=True) shapes = cmds.ls(nodes, shapes=True, long=True) - print(shapes) newNodes = (list(set(nodes) - set(shapes))) - print(newNodes) groupNode = pm.PyNode(groupName) roots = set() - print(nodes) for node in newNodes: try: @@ -57,7 +54,7 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader): for root in roots: root.setParent(world=True) - groupNode.root().zeroTransformPivots() + groupNode.zeroTransformPivots() for root in roots: root.setParent(groupNode) From 837807d5357406dcce1e42a2dadbd50e2e609173 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 29 Jan 2020 00:50:58 +0100 Subject: [PATCH 180/393] syntax fixes --- pype/nuke/lib.py | 13 +++++++------ pype/plugins/nuke/create/create_gizmo.py | 8 ++++---- pype/plugins/nuke/create/create_write.py | 8 ++++---- .../plugins/nuke/publish/extract_review_data_lut.py | 2 +- 4 files changed, 16 insertions(+), 15 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 9282443fcf..db1a5919c3 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -180,9 +180,10 @@ def format_anatomy(data): try: padding = int(anatomy.templates['render']['padding']) except KeyError as e: - msg = "`padding` key is not in `render` " + msg = ("`padding` key is not in `render` " "Anatomy template. Please, add it there and restart " - "the pipeline (padding: \"4\"): `{}`".format(e) + "the pipeline (padding: \"4\"): `{}`").format(e) + log.error(msg) nuke.message(msg) @@ -770,11 +771,11 @@ class WorkfileSettings(object): } if any(x for x in data.values() if x is None): - msg = "Missing set shot attributes in DB." + msg = ("Missing set shot attributes in DB." "\nContact your supervisor!." "\n\nWidth: `{width}`" "\nHeight: `{height}`" - "\nPixel Asspect: `{pixel_aspect}`".format(**data) + "\nPixel Asspect: `{pixel_aspect}`").format(**data) log.error(msg) nuke.message(msg) @@ -793,8 +794,8 @@ class WorkfileSettings(object): ) except Exception as e: bbox = None - msg = "{}: {} \nFormat:Crop need to be set with dots, example: " - "0.0.1920.1080, /nSetting to default".format(__name__, e) + msg = ("{}:{} \nFormat:Crop need to be set with dots, example: " + "0.0.1920.1080, /nSetting to default").format(__name__, e) log.error(msg) nuke.message(msg) diff --git a/pype/plugins/nuke/create/create_gizmo.py b/pype/plugins/nuke/create/create_gizmo.py index 93fbbcf144..ca199b8800 100644 --- a/pype/plugins/nuke/create/create_gizmo.py +++ b/pype/plugins/nuke/create/create_gizmo.py @@ -36,8 +36,8 @@ class CreateGizmo(Creator): node["tile_color"].setValue(int(self.node_color, 16)) return anlib.imprint(node, self.data) else: - msg = "Please select a group node " - "you wish to publish as the gizmo" + msg = ("Please select a group node " + "you wish to publish as the gizmo") self.log.error(msg) nuke.message(msg) @@ -60,8 +60,8 @@ class CreateGizmo(Creator): return anlib.imprint(gizmo_node, self.data) else: - msg = "Please select nodes you " - "wish to add to the gizmo" + msg = ("Please select nodes you " + "wish to add to the gizmo") self.log.error(msg) nuke.message(msg) return diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index c5c7d659e3..74e450f267 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -42,8 +42,8 @@ class CreateWriteRender(plugin.PypeCreator): nodes = self.nodes if not (len(nodes) < 2): - msg = "Select only one node. The node you want to connect to, " - "or tick off `Use selection`" + msg = ("Select only one node. The node you want to connect to, " + "or tick off `Use selection`") log.error(msg) nuke.message(msg) @@ -137,8 +137,8 @@ class CreateWritePrerender(plugin.PypeCreator): nodes = self.nodes if not (len(nodes) < 2): - msg = "Select only one node. The node you want to connect to, " - "or tick off `Use selection`" + msg = ("Select only one node. The node you want to connect to, " + "or tick off `Use selection`") self.log.error(msg) nuke.message(msg) diff --git a/pype/plugins/nuke/publish/extract_review_data_lut.py b/pype/plugins/nuke/publish/extract_review_data_lut.py index 4373309363..90b1fda1ec 100644 --- a/pype/plugins/nuke/publish/extract_review_data_lut.py +++ b/pype/plugins/nuke/publish/extract_review_data_lut.py @@ -41,7 +41,7 @@ class ExtractReviewDataLut(pype.api.Extractor): with anlib.maintained_selection(): exporter = pnlib.ExporterReviewLut( self, instance - ) + ) data = exporter.generate_lut() # assign to representations From b2b6a0e79013eb8de5875a35c69cdb8c20db0b12 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 29 Jan 2020 01:22:49 +0100 Subject: [PATCH 181/393] remove capture_gui dependency --- .../plugins/maya/publish/extract_quicktime.py | 59 +++++++++- .../plugins/maya/publish/extract_thumbnail.py | 104 +++++++++--------- 2 files changed, 107 insertions(+), 56 deletions(-) diff --git a/pype/plugins/maya/publish/extract_quicktime.py b/pype/plugins/maya/publish/extract_quicktime.py index 1031955260..94b5a716a2 100644 --- a/pype/plugins/maya/publish/extract_quicktime.py +++ b/pype/plugins/maya/publish/extract_quicktime.py @@ -1,16 +1,14 @@ import os +import glob import contextlib -import capture_gui import clique +import capture # import pype.maya.lib as lib import pype.api # from maya import cmds, mel import pymel.core as pm -# import ffmpeg -# # from pype.scripts import otio_burnin -# reload(ffmpeg) # TODO: move codec settings to presets @@ -93,7 +91,18 @@ class ExtractQuicktime(pype.api.Extractor): pm.currentTime(refreshFrameInt, edit=True) with maintained_time(): - playblast = capture_gui.lib.capture_scene(preset) + filename = preset.get("filename", "%TEMP%") + + # Force viewer to False in call to capture because we have our own + # viewer opening call to allow a signal to trigger between playblast + # and viewer + preset['viewer'] = False + + # Remove panel key since it's internal value to capture_gui + preset.pop("panel", None) + + path = capture.capture(**preset) + playblast = self._fix_playblast_output_path(path) self.log.info("file list {}".format(playblast)) @@ -119,6 +128,46 @@ class ExtractQuicktime(pype.api.Extractor): } instance.data["representations"].append(representation) + def _fix_playblast_output_path(self, filepath): + """Workaround a bug in maya.cmds.playblast to return correct filepath. + + When the `viewer` argument is set to False and maya.cmds.playblast + does not automatically open the playblasted file the returned + filepath does not have the file's extension added correctly. + + To workaround this we just glob.glob() for any file extensions and + assume the latest modified file is the correct file and return it. + + """ + # Catch cancelled playblast + if filepath is None: + self.log.warning("Playblast did not result in output path. " + "Playblast is probably interrupted.") + return None + + # Fix: playblast not returning correct filename (with extension) + # Lets assume the most recently modified file is the correct one. + if not os.path.exists(filepath): + directory = os.path.dirname(filepath) + filename = os.path.basename(filepath) + # check if the filepath is has frame based filename + # example : capture.####.png + parts = filename.split(".") + if len(parts) == 3: + query = os.path.join(directory, "{}.*.{}".format(parts[0], + parts[-1])) + files = glob.glob(query) + else: + files = glob.glob("{}.*".format(filepath)) + + if not files: + raise RuntimeError("Couldn't find playblast from: " + "{0}".format(filepath)) + filepath = max(files, key=os.path.getmtime) + + return filepath + + @contextlib.contextmanager def maintained_time(): diff --git a/pype/plugins/maya/publish/extract_thumbnail.py b/pype/plugins/maya/publish/extract_thumbnail.py index dc8044cf19..8377af1ac0 100644 --- a/pype/plugins/maya/publish/extract_thumbnail.py +++ b/pype/plugins/maya/publish/extract_thumbnail.py @@ -1,31 +1,14 @@ import os import contextlib -import time -import sys +import glob -import capture_gui -import clique +import capture import pype.maya.lib as lib import pype.api from maya import cmds import pymel.core as pm -# import ffmpeg -# reload(ffmpeg) - -import avalon.maya - -# import maya_utils as mu - -# from tweakHUD import master -# from tweakHUD import draft_hud as dHUD -# from tweakHUD import ftrackStrings as fStrings - -# -# def soundOffsetFunc(oSF, SF, H): -# tmOff = (oSF - H) - SF -# return tmOff class ExtractThumbnail(pype.api.Extractor): @@ -47,39 +30,8 @@ class ExtractThumbnail(pype.api.Extractor): end = cmds.currentTime(query=True) self.log.info("start: {}, end: {}".format(start, end)) - members = instance.data['setMembers'] camera = instance.data['review_camera'] - # project_code = ftrack_data['Project']['code'] - # task_type = ftrack_data['Task']['type'] - # - # # load Preset - # studio_repos = os.path.abspath(os.environ.get('studio_repos')) - # shot_preset_path = os.path.join(studio_repos, 'maya', - # 'capture_gui_presets', - # (project_code + '_' + task_type + '_' + asset + '.json')) - # - # task_preset_path = os.path.join(studio_repos, 'maya', - # 'capture_gui_presets', - # (project_code + '_' + task_type + '.json')) - # - # project_preset_path = os.path.join(studio_repos, 'maya', - # 'capture_gui_presets', - # (project_code + '.json')) - # - # default_preset_path = os.path.join(studio_repos, 'maya', - # 'capture_gui_presets', - # 'default.json') - # - # if os.path.isfile(shot_preset_path): - # preset_to_use = shot_preset_path - # elif os.path.isfile(task_preset_path): - # preset_to_use = task_preset_path - # elif os.path.isfile(project_preset_path): - # preset_to_use = project_preset_path - # else: - # preset_to_use = default_preset_path - capture_preset = "" capture_preset = instance.context.data['presets']['maya']['capture'] try: @@ -126,7 +78,18 @@ class ExtractThumbnail(pype.api.Extractor): pm.currentTime(refreshFrameInt, edit=True) with maintained_time(): - playblast = capture_gui.lib.capture_scene(preset) + filename = preset.get("filename", "%TEMP%") + + # Force viewer to False in call to capture because we have our own + # viewer opening call to allow a signal to trigger between + # playblast and viewer + preset['viewer'] = False + + # Remove panel key since it's internal value to capture_gui + preset.pop("panel", None) + + path = capture.capture(**preset) + playblast = self._fix_playblast_output_path(path) _, thumbnail = os.path.split(playblast) @@ -144,6 +107,45 @@ class ExtractThumbnail(pype.api.Extractor): } instance.data["representations"].append(representation) + def _fix_playblast_output_path(self, filepath): + """Workaround a bug in maya.cmds.playblast to return correct filepath. + + When the `viewer` argument is set to False and maya.cmds.playblast + does not automatically open the playblasted file the returned + filepath does not have the file's extension added correctly. + + To workaround this we just glob.glob() for any file extensions and + assume the latest modified file is the correct file and return it. + + """ + # Catch cancelled playblast + if filepath is None: + self.log.warning("Playblast did not result in output path. " + "Playblast is probably interrupted.") + return None + + # Fix: playblast not returning correct filename (with extension) + # Lets assume the most recently modified file is the correct one. + if not os.path.exists(filepath): + directory = os.path.dirname(filepath) + filename = os.path.basename(filepath) + # check if the filepath is has frame based filename + # example : capture.####.png + parts = filename.split(".") + if len(parts) == 3: + query = os.path.join(directory, "{}.*.{}".format(parts[0], + parts[-1])) + files = glob.glob(query) + else: + files = glob.glob("{}.*".format(filepath)) + + if not files: + raise RuntimeError("Couldn't find playblast from: " + "{0}".format(filepath)) + filepath = max(files, key=os.path.getmtime) + + return filepath + @contextlib.contextmanager def maintained_time(): From 3e4fa756568037848c6ad78d24ba4fd71d1c979b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 29 Jan 2020 13:46:42 +0100 Subject: [PATCH 182/393] fix(global): integrate new was mixing padding number src to dst --- pype/plugins/global/publish/integrate_new.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index e577c477c3..1be712c14a 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -339,10 +339,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): for i in src_collection.indexes: src_padding = src_padding_exp % i - # for adding first frame into db - if not dst_start_frame: - dst_start_frame = src_padding - src_file_name = "{0}{1}{2}".format( src_head, src_padding, src_tail) @@ -364,6 +360,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.debug("source: {}".format(src)) instance.data["transfers"].append([src, dst]) + # for adding first frame into db + if not dst_start_frame: + dst_start_frame = dst_padding + + dst = "{0}{1}{2}".format( dst_head, dst_start_frame, From 153dcba79cf8a0aba9869e84a0a61c3a9be255f8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 29 Jan 2020 13:53:36 +0100 Subject: [PATCH 183/393] feat(scripts): otio_burnin is able to render image sequence --- pype/scripts/otio_burnin.py | 33 +++++++++++++++++++++++++++++++-- 1 file changed, 31 insertions(+), 2 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index d5bc2594a4..f128352974 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -5,6 +5,7 @@ import json import opentimelineio_contrib.adapters.ffmpeg_burnins as ffmpeg_burnins from pypeapp.lib import config from pype import api as pype +from subprocess import Popen, PIPE # FFmpeg in PATH is required @@ -21,6 +22,7 @@ else: FFMPEG = ( '{} -loglevel panic -i %(input)s %(filters)s %(args)s%(output)s' ).format(os.path.normpath(ffmpeg_path + "ffmpeg")) + FFPROBE = ( '{} -v quiet -print_format json -show_format -show_streams %(source)s' ).format(os.path.normpath(ffmpeg_path + "ffprobe")) @@ -248,6 +250,33 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): 'filters': filters }).strip() + def render(self, output, args=None, overwrite=False, **kwargs): + """ + Render the media to a specified destination. + + :param str output: output file + :param str args: additional FFMPEG arguments + :param bool overwrite: overwrite the output if it exists + """ + if not overwrite and os.path.exists(output): + raise RuntimeError("Destination '%s' exists, please " + "use overwrite" % output) + + is_sequence = "%" in output + + command = self.command(output=output, + args=args, + overwrite=overwrite) + proc = Popen(command, shell=True) + proc.communicate() + if proc.returncode != 0: + raise RuntimeError("Failed to render '%s': %s'" + % (output, command)) + if is_sequence: + output = output % kwargs.get("duration") + if not os.path.exists(output): + raise RuntimeError("Failed to generate this fucking file '%s'" % output) + def example(input_path, output_path): options_init = { @@ -349,7 +378,7 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) frame_start = data.get("frame_start") frame_start_tc = data.get('frame_start_tc', frame_start) - + stream = burnin._streams[0] if "resolution_width" not in data: data["resolution_width"] = stream.get("width", "Unknown") @@ -436,7 +465,7 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) if codec_data is not []: codec_args = " ".join(codec_data) - burnin.render(output_path, args=codec_args, overwrite=overwrite) + burnin.render(output_path, args=codec_args, overwrite=overwrite, **data) if __name__ == '__main__': From 862faa8325446ba8d734832be8b0deee1d236624 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 29 Jan 2020 13:54:27 +0100 Subject: [PATCH 184/393] feat(global): burnin extracting into image sequence --- pype/plugins/global/publish/extract_burnin.py | 24 +++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 4988f0d042..26f6d34e91 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -156,15 +156,35 @@ class ExtractBurnin(pype.api.Extractor): self.log.debug("Output: {}".format(output)) repre_update = { + "anatomy_template": "render", "files": movieFileBurnin, "name": repre["name"], "tags": [x for x in repre["tags"] if x != "delete"] } + + if is_sequence: + burnin_seq_files = list() + for frame_index in range(_prep_data["duration"] + 1): + if frame_index == 0: + continue + burnin_seq_files.append(movieFileBurnin % frame_index) + repre_update.update({ + "files": burnin_seq_files + }) + instance.data["representations"][i].update(repre_update) # removing the source mov file - os.remove(full_movie_path) - self.log.debug("Removed: `{}`".format(full_movie_path)) + if is_sequence: + for frame_index in range(_prep_data["duration"] + 1): + if frame_index == 0: + continue + rm_file = full_movie_path % frame_index + os.remove(rm_file) + self.log.debug("Removed: `{}`".format(rm_file)) + else: + os.remove(full_movie_path) + self.log.debug("Removed: `{}`".format(full_movie_path)) # Remove any representations tagged for deletion. for repre in instance.data["representations"]: From 17483ed05fab449bf9ce92eae2750d897f87af5b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 29 Jan 2020 17:20:01 +0100 Subject: [PATCH 185/393] fix(nuke): removing subsetgroups from publish plugins --- pype/plugins/nuke/publish/collect_workfile.py | 3 +-- pype/plugins/nuke/publish/collect_writes.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_workfile.py b/pype/plugins/nuke/publish/collect_workfile.py index 4fff9f46ed..9c01a3ec97 100644 --- a/pype/plugins/nuke/publish/collect_workfile.py +++ b/pype/plugins/nuke/publish/collect_workfile.py @@ -72,8 +72,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): "publish": root.knob('publish').value(), "family": family, "families": [family], - "representations": list(), - "subsetGroup": "workfiles" + "representations": list() }) # adding basic script data diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 37c86978b6..3eff527d47 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -127,8 +127,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "families": families, "colorspace": node["colorspace"].value(), "deadlineChunkSize": deadlineChunkSize, - "deadlinePriority": deadlinePriority, - "subsetGroup": "renders" + "deadlinePriority": deadlinePriority }) self.log.debug("instance.data: {}".format(instance.data)) From d9c59dced9926def2c238b09a71175e0f4a1a8e7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 29 Jan 2020 17:24:04 +0100 Subject: [PATCH 186/393] feat(nk): adding png to loader plugin sequence --- pype/plugins/nuke/load/load_sequence.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index 8f01d4511b..76599c3351 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -73,7 +73,7 @@ class LoadSequence(api.Loader): """Load image sequence into Nuke""" families = ["write", "source", "plate", "render"] - representations = ["exr", "dpx", "jpg", "jpeg"] + representations = ["exr", "dpx", "jpg", "jpeg", "png"] label = "Load sequence" order = -10 From fc28acb88fead4178480e8873f645c35415db114 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 29 Jan 2020 18:14:25 +0100 Subject: [PATCH 187/393] updated event_user_assignment and extract_burnin where format_all is used --- pype/ftrack/events/event_user_assigment.py | 7 ++++++- pype/plugins/global/publish/extract_burnin.py | 11 ++++++++--- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/pype/ftrack/events/event_user_assigment.py b/pype/ftrack/events/event_user_assigment.py index 87994d34b2..efdfb7665d 100644 --- a/pype/ftrack/events/event_user_assigment.py +++ b/pype/ftrack/events/event_user_assigment.py @@ -207,7 +207,12 @@ class UserAssigmentEvent(BaseEvent): # formatting work dir is easiest part as we can use whole path work_dir = anatomy.format(data)['avalon']['work'] # we also need publish but not whole - publish = anatomy.format_all(data)['partial']['avalon']['publish'] + filled_all = anatomy.format_all(data) + if "partial" not in filled_all: + publish = filled_all['avalon']['publish'] + else: + # Backwards compatibility + publish = filled_all["partial"]['avalon']['publish'] # now find path to {asset} m = re.search("(^.+?{})".format(data['asset']), publish) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 8f5a4aa000..a3df47518c 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -95,9 +95,14 @@ class ExtractBurnin(pype.api.Extractor): # create copy of prep_data for anatomy formatting _prep_data = copy.deepcopy(prep_data) _prep_data["representation"] = repre["name"] - _prep_data["anatomy"] = ( - anatomy.format_all(_prep_data).get("solved") or {} - ) + filled_anatomy = anatomy.format_all(_prep_data) + if hasattr(filled_anatomy, "get_solved"): + _filled_anatomy = filled_anatomy.get_solved() + else: + # Backwards compatibility + _filled_anatomy = filled_anatomy.get("solved") + _prep_data["anatomy"] = _filled_anatomy or {} + burnin_data = { "input": full_movie_path.replace("\\", "/"), "codec": repre.get("codec", []), From 3aaa524f79c340b7ec47734fd26e4779b3b5a2c8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 29 Jan 2020 18:14:56 +0100 Subject: [PATCH 188/393] updated action_delivery, this change is NOT backwards compatible --- pype/ftrack/actions/action_delivery.py | 44 ++++++++++---------------- 1 file changed, 17 insertions(+), 27 deletions(-) diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index afd20d12d1..29fdfe39ae 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -312,42 +312,32 @@ class Delivery(BaseAction): anatomy_data = copy.deepcopy(repre["context"]) anatomy_data["root"] = location_path - anatomy_filled = anatomy.format(anatomy_data) - test_path = ( - anatomy_filled - .get("delivery", {}) - .get(anatomy_name) - ) + anatomy_filled = anatomy.format_all(anatomy_data) + test_path = anatomy_filled["delivery"][anatomy_name] - if not test_path: + if not test_path.solved: msg = ( "Missing keys in Representation's context" " for anatomy template \"{}\"." ).format(anatomy_name) - all_anatomies = anatomy.format_all(anatomy_data) - result = None - for anatomies in all_anatomies.values(): - for key, temp in anatomies.get("delivery", {}).items(): - if key != anatomy_name: - continue + if test_path.missing_keys: + keys = ", ".join(test_path.missing_keys) + sub_msg = ( + "Representation: {}
- Missing keys: \"{}\"
" + ).format(str(repre["_id"]), keys) - result = temp - break + if test_path.invalid_types: + items = [] + for key, value in test_path.invalid_types.items(): + items.append("\"{}\" {}".format(key, str(value))) - # TODO log error! - missing keys in anatomy - if result: - missing_keys = [ - key[1] for key in string.Formatter().parse(result) - if key[1] is not None - ] - else: - missing_keys = ["unknown"] + keys = ", ".join(items) + sub_msg = ( + "Representation: {}
" + "- Invalid value DataType: \"{}\"
" + ).format(str(repre["_id"]), keys) - keys = ", ".join(missing_keys) - sub_msg = ( - "Representation: {}
- Missing keys: \"{}\"
" - ).format(str(repre["_id"]), keys) self.report_items[msg].append(sub_msg) self.log.warning( "{} Representation: \"{}\" Filled: <{}>".format( From 2d1bd6227fe1e1a00529dee4100d973addc7ae75 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 29 Jan 2020 18:34:29 +0100 Subject: [PATCH 189/393] fix(nks): handles was taken from wrong attribute --- pype/plugins/nukestudio/publish/collect_clips.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/plugins/nukestudio/publish/collect_clips.py b/pype/plugins/nukestudio/publish/collect_clips.py index 0729f20957..3759d50f6a 100644 --- a/pype/plugins/nukestudio/publish/collect_clips.py +++ b/pype/plugins/nukestudio/publish/collect_clips.py @@ -106,8 +106,8 @@ class CollectClips(api.ContextPlugin): "family": "clip", "families": [], "handles": 0, - "handleStart": projectdata.get("handles", 0), - "handleEnd": projectdata.get("handles", 0), + "handleStart": projectdata.get("handleStart", 0), + "handleEnd": projectdata.get("handleEnd", 0), "version": int(version)}) instance = context.create_instance(**data) From 8f2f88aeae9f4a91ef77b3edfff4e3731a1d4c03 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 30 Jan 2020 11:27:09 +0100 Subject: [PATCH 190/393] user server is not laucnhed with stdout override to be able get output --- pype/ftrack/ftrack_server/socket_thread.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py index 1bf9d69ad7..8e217870ba 100644 --- a/pype/ftrack/ftrack_server/socket_thread.py +++ b/pype/ftrack/ftrack_server/socket_thread.py @@ -53,8 +53,7 @@ class SocketThread(threading.Thread): ) self.subproc = subprocess.Popen( - [sys.executable, self.filepath, "-port", str(self.port)], - stdout=subprocess.PIPE + [sys.executable, self.filepath, "-port", str(self.port)] ) # Listen for incoming connections @@ -116,11 +115,6 @@ class SocketThread(threading.Thread): if self.subproc.poll() is None: self.subproc.terminate() - lines = self.subproc.stdout.readlines() - if lines: - print("*** Socked Thread stdout ***") - for line in lines: - os.write(1, line) self.finished = True def get_data_from_con(self, connection): From 2f992a6ea4b1f97e6bc87f40f90265753503577f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 30 Jan 2020 11:28:30 +0100 Subject: [PATCH 191/393] sub_user_server print out exception on crash --- pype/ftrack/ftrack_server/sub_user_server.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/ftrack/ftrack_server/sub_user_server.py b/pype/ftrack/ftrack_server/sub_user_server.py index 8b2a9277cf..f0d39447a8 100644 --- a/pype/ftrack/ftrack_server/sub_user_server.py +++ b/pype/ftrack/ftrack_server/sub_user_server.py @@ -2,6 +2,8 @@ import sys import signal import socket +import traceback + from ftrack_server import FtrackServer from pype.ftrack.ftrack_server.lib import SocketSession, UserEventHub @@ -31,6 +33,8 @@ def main(args): server = FtrackServer("action") log.debug("Launched User Ftrack Server") server.run_server(session=session) + except Exception: + traceback.print_exception(*sys.exc_info()) finally: log.debug("Closing socket") From cc35ed7ea9b606f7ded44d1d722f1d7e32853621 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 30 Jan 2020 11:29:17 +0100 Subject: [PATCH 192/393] thumbid key is ignored in event_sync_to_avalon --- pype/ftrack/events/event_sync_to_avalon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 23284a2ae6..f74abaf8cb 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -31,7 +31,7 @@ class SyncToAvalonEvent(BaseEvent): "timelog", "auth_userrole", "appointment" ] ignore_ent_types = ["Milestone"] - ignore_keys = ["statusid"] + ignore_keys = ["statusid", "thumbid"] project_query = ( "select full_name, name, custom_attributes" From 8eb14bade235f4c063f533410bf34069294d1130 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 30 Jan 2020 11:37:15 +0100 Subject: [PATCH 193/393] added warning message for cases when entityId is set to list (happened in client) --- pype/ftrack/events/event_sync_to_avalon.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index f74abaf8cb..67e0bee9d7 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -486,6 +486,14 @@ class SyncToAvalonEvent(BaseEvent): action = ent_info["action"] ftrack_id = ent_info["entityId"] + if isinstance(ftrack_id, list): + self.log.warning(( + "BUG REPORT: Entity info has `entityId` as `list` \"{}\"" + ).format(ent_info)) + if len(ftrack_id) == 0: + continue + ftrack_id = ftrack_id[0] + if action == "move": ent_keys = ent_info["keys"] # Seprate update info from move action From b715990e1e081741408358f572055bfa4e8a10a5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 30 Jan 2020 14:10:36 +0100 Subject: [PATCH 194/393] fix(nuke): created too many backdrops --- pype/nuke/lib.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index db1a5919c3..01b63392cd 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1103,9 +1103,9 @@ class BuildWorkfile(WorkfileSettings): # move position self.position_right() - bdn = self.create_backdrop(label="Loaded Reads", - color='0x2d7702ff', layer=-1, - nodes=nodes_backdrop) + self.create_backdrop(label="Loaded Reads", + color='0x2d7702ff', layer=-1, + nodes=nodes_backdrop) def read_loader(self, representation): """ From a46773450f9a618ad757b233d1ab48fe107a39f2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 30 Jan 2020 14:11:00 +0100 Subject: [PATCH 195/393] feat(nuke): added support for `png` --- pype/nuke/lib.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 01b63392cd..64ac83ba81 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1013,7 +1013,8 @@ class BuildWorkfile(WorkfileSettings): def process(self, regex_filter=None, version=None, - representations=["exr", "dpx", "lutJson", "mov", "preview"]): + representations=["exr", "dpx", "lutJson", "mov", + "preview", "png"]): """ A short description. From 34515cf14965b96a8658d90e3316f474de769bab Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 30 Jan 2020 14:11:36 +0100 Subject: [PATCH 196/393] feat(nuke): feat(nuke): reads mov are now in colorspace presets --- pype/plugins/nuke/load/load_mov.py | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/pype/plugins/nuke/load/load_mov.py b/pype/plugins/nuke/load/load_mov.py index e598839405..d4a3a7f6b9 100644 --- a/pype/plugins/nuke/load/load_mov.py +++ b/pype/plugins/nuke/load/load_mov.py @@ -1,8 +1,9 @@ +import re +import nuke import contextlib from avalon import api, io - -import nuke +from pype.nuke import presets from pype.api import Logger log = Logger().get_logger(__name__, "nuke") @@ -24,7 +25,7 @@ def preserve_trim(node): offset_frame = None if node['frame_mode'].value() == "start at": start_at_frame = node['frame'].value() - if node['frame_mode'].value() is "offset": + if node['frame_mode'].value() == "offset": offset_frame = node['frame'].value() try: @@ -122,7 +123,6 @@ class LoadMov(api.Loader): repr_cont["subset"], repr_cont["representation"]) - # Create the Loader with the filename path set with viewer_update_and_undo_stop(): # TODO: it might be universal read to img/geo/camera @@ -139,7 +139,20 @@ class LoadMov(api.Loader): read_node["last"].setValue(last) read_node["frame_mode"].setValue("start at") read_node["frame"].setValue(str(offset_frame)) - # add additional metadata from the version to imprint to Avalon knob + + # load nuke presets for Read's colorspace + read_clrs_presets = presets.get_colorspace_preset().get( + "nuke", {}).get("read", {}) + + # check if any colorspace presets for read is mathing + preset_clrsp = next((read_clrs_presets[k] + for k in read_clrs_presets + if bool(re.search(k, file))), + None) + if preset_clrsp is not None: + read_node["colorspace"].setValue(str(preset_clrsp)) + + # add additional metadata from the version to imprint Avalon knob add_keys = [ "frameStart", "frameEnd", "handles", "source", "author", "fps", "version", "handleStart", "handleEnd" @@ -147,7 +160,7 @@ class LoadMov(api.Loader): data_imprint = {} for key in add_keys: - if key is 'version': + if key == 'version': data_imprint.update({ key: context["version"]['name'] }) From a35969f0df62542bfd9f2b870c9566335254d913 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 30 Jan 2020 14:11:56 +0100 Subject: [PATCH 197/393] feat(nuke): reads sequences are now in colorspace presets --- pype/plugins/nuke/load/load_sequence.py | 26 +++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index 8f01d4511b..5d853d10d3 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -1,10 +1,12 @@ +import re +import nuke import contextlib from avalon import api, io - -import nuke +from pype.nuke import presets from pype.api import Logger + log = Logger().get_logger(__name__, "nuke") @@ -24,7 +26,7 @@ def preserve_trim(node): offset_frame = None if node['frame_mode'].value() == "start at": start_at_frame = node['frame'].value() - if node['frame_mode'].value() is "offset": + if node['frame_mode'].value() == "offset": offset_frame = node['frame'].value() try: @@ -134,20 +136,32 @@ class LoadSequence(api.Loader): if colorspace is not None: r["colorspace"].setValue(str(colorspace)) + # load nuke presets for Read's colorspace + read_clrs_presets = presets.get_colorspace_preset().get( + "nuke", {}).get("read", {}) + + # check if any colorspace presets for read is mathing + preset_clrsp = next((read_clrs_presets[k] + for k in read_clrs_presets + if bool(re.search(k, file))), + None) + if preset_clrsp is not None: + r["colorspace"].setValue(str(preset_clrsp)) + loader_shift(r, first, relative=True) r["origfirst"].setValue(int(first)) r["first"].setValue(int(first)) r["origlast"].setValue(int(last)) r["last"].setValue(int(last)) - # add additional metadata from the version to imprint to Avalon knob + # add additional metadata from the version to imprint Avalon knob add_keys = ["frameStart", "frameEnd", "source", "colorspace", "author", "fps", "version", "handleStart", "handleEnd"] data_imprint = {} for k in add_keys: - if k is 'version': + if k == 'version': data_imprint.update({k: context["version"]['name']}) else: data_imprint.update( @@ -179,7 +193,7 @@ class LoadSequence(api.Loader): rtn["after"].setValue("continue") rtn["input.first_lock"].setValue(True) rtn["input.first"].setValue( - self.handle_start + self.first_frame + self.handle_start + self.first_frame ) if time_warp_nodes != []: From be7dbb115172357fdfcdef6587429eff8a948bff Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 30 Jan 2020 18:18:01 +0100 Subject: [PATCH 198/393] fix(nuke): printing objects and docstring --- pype/nuke/lib.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 64ac83ba81..fdbd578a76 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1055,9 +1055,10 @@ class BuildWorkfile(WorkfileSettings): wn["render"].setValue(True) vn.setInput(0, wn) - bdn = self.create_backdrop(label="Render write \n\n\n\nOUTPUT", - color='0xcc1102ff', layer=-1, - nodes=[wn]) + # adding backdrop under write + self.create_backdrop(label="Render write \n\n\n\nOUTPUT", + color='0xcc1102ff', layer=-1, + nodes=[wn]) # move position self.position_up(4) @@ -1071,10 +1072,12 @@ class BuildWorkfile(WorkfileSettings): version=version, representations=representations) - log.info("__ subsets: `{}`".format(subsets)) + for name, subset in subsets.items(): + log.debug("___________________") + log.debug(name) + log.debug(subset["version"]) nodes_backdrop = list() - for name, subset in subsets.items(): if "lut" in name: continue @@ -1104,6 +1107,7 @@ class BuildWorkfile(WorkfileSettings): # move position self.position_right() + # adding backdrop under all read nodes self.create_backdrop(label="Loaded Reads", color='0x2d7702ff', layer=-1, nodes=nodes_backdrop) From 0f6c79967a0fb8a3fd7fa4d9b3fd08b663fc7b11 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 30 Jan 2020 18:48:58 +0100 Subject: [PATCH 199/393] added warning messages for cases when entity does not have custom attributes --- pype/ftrack/events/event_sync_to_avalon.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 67e0bee9d7..8d25b5b801 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1828,6 +1828,13 @@ class SyncToAvalonEvent(BaseEvent): obj_type_id = ent_info["objectTypeId"] ent_cust_attrs = cust_attrs_by_obj_id.get(obj_type_id) + if ent_cust_attrs is None: + self.log.warning(( + "BUG REPORT: Entity has ent type without" + " custom attributes <{}> \"{}\"" + ).format(entType, ent_info)) + continue + for key, values in ent_info["changes"].items(): if key in hier_attrs_keys: self.hier_cust_attrs_changes[key].append(ftrack_id) From a51604bf6f98f39119a9bb9b40f73cea2162896d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 30 Jan 2020 19:19:52 +0100 Subject: [PATCH 200/393] store asset version objects to instance data after ftrack integration --- pype/plugins/ftrack/publish/integrate_ftrack_api.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_api.py b/pype/plugins/ftrack/publish/integrate_ftrack_api.py index adb22aabba..9dd803aafd 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_api.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_api.py @@ -77,6 +77,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): info_msg = "Created new {entity_type} with data: {data}" info_msg += ", metadata: {metadata}." + used_asset_versions = [] # Iterate over components and publish for data in instance.data.get("ftrackComponentsList", []): @@ -386,3 +387,14 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): tp, value, tb = sys.exc_info() session.rollback() six.reraise(tp, value, tb) + + if assetversion_entity not in used_asset_versions: + used_asset_versions.append(assettype_entity) + + asset_versions_key = "ftrackIntegratedAssetVersions" + if asset_versions_key not in instance.context.data: + instance.context.data[asset_versions_key] = [] + + for asset_version in used_asset_versions: + if asset_version not in instance.context.data[asset_versions_key]: + instance.context.data[asset_versions_key].append(asset_version) From a3922a3b8193b4e9654e9f2fed074bc89738c9b4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 30 Jan 2020 19:27:51 +0100 Subject: [PATCH 201/393] added integrate ftrack note plugin --- .../ftrack/publish/integrate_ftrack_note.py | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 pype/plugins/ftrack/publish/integrate_ftrack_note.py diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_note.py b/pype/plugins/ftrack/publish/integrate_ftrack_note.py new file mode 100644 index 0000000000..e24c839be2 --- /dev/null +++ b/pype/plugins/ftrack/publish/integrate_ftrack_note.py @@ -0,0 +1,35 @@ +import sys +import pyblish.api +import six + + +class IntegrateFtrackNote(pyblish.api.InstancePlugin): + """Create comments in Ftrack.""" + + order = pyblish.api.IntegratorOrder + label = "Integrate Comments to Ftrack." + families = ["ftrack"] + optional = True + + def process(self, instance): + comment = (instance.context.data.get("comment") or "").strip() + if not comment: + return + + asset_versions_key = "ftrackIntegratedAssetVersions" + asset_versions = instance.data.get(asset_versions_key) + if not asset_versions: + return + + session = context.data["ftrackSession"] + + note = session.create("Note", {"content": comment}) + for asset_version in asset_versions: + asset_version["notes"].extend(note) + + try: + session.commit() + except Exception: + tp, value, tb = sys.exc_info() + session.rollback() + six.reraise(tp, value, tb) From 8f8bfeb14937697b03cb632dc1015f7fac4a2f74 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 31 Jan 2020 11:33:44 +0100 Subject: [PATCH 202/393] remove maya deprecated loaders --- pype/plugins/maya/load/load_reference.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/pype/plugins/maya/load/load_reference.py b/pype/plugins/maya/load/load_reference.py index e5b0c0e238..b2544222c0 100644 --- a/pype/plugins/maya/load/load_reference.py +++ b/pype/plugins/maya/load/load_reference.py @@ -91,19 +91,3 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader): def switch(self, container, representation): self.update(container, representation) - - -# for backwards compatibility -class AbcLoader(ReferenceLoader): - label = "Deprecated loader (don't use)" - families = ["pointcache", "animation"] - representations = ["abc"] - tool_names = [] - - -# for backwards compatibility -class ModelLoader(ReferenceLoader): - label = "Deprecated loader (don't use)" - families = ["model", "pointcache"] - representations = ["abc"] - tool_names = [] From 0e807f05dac35655dd1793523f8674edd3b7a74c Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 31 Jan 2020 12:05:07 +0100 Subject: [PATCH 203/393] remove obsolete backwards compatibility --- pype/ftrack/events/event_user_assigment.py | 7 ++----- pype/plugins/global/publish/extract_burnin.py | 7 +------ 2 files changed, 3 insertions(+), 11 deletions(-) diff --git a/pype/ftrack/events/event_user_assigment.py b/pype/ftrack/events/event_user_assigment.py index efdfb7665d..eaacfd959a 100644 --- a/pype/ftrack/events/event_user_assigment.py +++ b/pype/ftrack/events/event_user_assigment.py @@ -208,11 +208,8 @@ class UserAssigmentEvent(BaseEvent): work_dir = anatomy.format(data)['avalon']['work'] # we also need publish but not whole filled_all = anatomy.format_all(data) - if "partial" not in filled_all: - publish = filled_all['avalon']['publish'] - else: - # Backwards compatibility - publish = filled_all["partial"]['avalon']['publish'] + publish = filled_all['avalon']['publish'] + # now find path to {asset} m = re.search("(^.+?{})".format(data['asset']), publish) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index a3df47518c..8a96e66d27 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -96,12 +96,7 @@ class ExtractBurnin(pype.api.Extractor): _prep_data = copy.deepcopy(prep_data) _prep_data["representation"] = repre["name"] filled_anatomy = anatomy.format_all(_prep_data) - if hasattr(filled_anatomy, "get_solved"): - _filled_anatomy = filled_anatomy.get_solved() - else: - # Backwards compatibility - _filled_anatomy = filled_anatomy.get("solved") - _prep_data["anatomy"] = _filled_anatomy or {} + _prep_data["anatomy"] = filled_anatomy.get_solved() burnin_data = { "input": full_movie_path.replace("\\", "/"), From 45c76919c0df080af5905b2a5c7288298a6135cc Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 31 Jan 2020 12:09:08 +0100 Subject: [PATCH 204/393] store asset versions to instance.data instead of context.data --- pype/plugins/ftrack/publish/integrate_ftrack_api.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_api.py b/pype/plugins/ftrack/publish/integrate_ftrack_api.py index 9dd803aafd..922c8e119f 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_api.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_api.py @@ -392,9 +392,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): used_asset_versions.append(assettype_entity) asset_versions_key = "ftrackIntegratedAssetVersions" - if asset_versions_key not in instance.context.data: - instance.context.data[asset_versions_key] = [] + if asset_versions_key not in instance.data: + instance.data[asset_versions_key] = [] for asset_version in used_asset_versions: - if asset_version not in instance.context.data[asset_versions_key]: - instance.context.data[asset_versions_key].append(asset_version) + if asset_version not in instance.data[asset_versions_key]: + instance.data[asset_versions_key].append(asset_version) From eb50cd369d6dd31f914bd16820f4a3c7078d6f28 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 31 Jan 2020 12:09:17 +0100 Subject: [PATCH 205/393] store assetversion instead of assettype --- pype/plugins/ftrack/publish/integrate_ftrack_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_api.py b/pype/plugins/ftrack/publish/integrate_ftrack_api.py index 922c8e119f..cd94b2a150 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_api.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_api.py @@ -389,7 +389,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): six.reraise(tp, value, tb) if assetversion_entity not in used_asset_versions: - used_asset_versions.append(assettype_entity) + used_asset_versions.append(assetversion_entity) asset_versions_key = "ftrackIntegratedAssetVersions" if asset_versions_key not in instance.data: From e4ba53ac15358dbc648bfa6f8e4a507dfe513072 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 31 Jan 2020 12:13:20 +0100 Subject: [PATCH 206/393] integrate ftrack note add notes to each integrated asset version --- .../ftrack/publish/integrate_ftrack_note.py | 26 +++++++++++++++---- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_note.py b/pype/plugins/ftrack/publish/integrate_ftrack_note.py index e24c839be2..f7fb5addbb 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_note.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_note.py @@ -6,29 +6,45 @@ import six class IntegrateFtrackNote(pyblish.api.InstancePlugin): """Create comments in Ftrack.""" - order = pyblish.api.IntegratorOrder - label = "Integrate Comments to Ftrack." + # Must be after integrate asset new + order = pyblish.api.IntegratorOrder + 0.4999 + label = "Integrate Ftrack note" families = ["ftrack"] optional = True def process(self, instance): comment = (instance.context.data.get("comment") or "").strip() if not comment: + self.log.info("Comment is not set.") return + self.log.debug("Comment is set to {}".format(comment)) + asset_versions_key = "ftrackIntegratedAssetVersions" asset_versions = instance.data.get(asset_versions_key) if not asset_versions: + self.log.info("There are any integrated AssetVersions") return - session = context.data["ftrackSession"] + session = instance.context.data["ftrackSession"] + user = session.query( + "User where username is \"{}\"".format(session.api_user) + ).first() + if not user: + self.log.warning( + "Was not able to query current User {}".format( + session.api_user + ) + ) - note = session.create("Note", {"content": comment}) for asset_version in asset_versions: - asset_version["notes"].extend(note) + asset_version.create_note(comment, author=user) try: session.commit() + self.log.debug("Note added to AssetVersion \"{}\"".format( + str(asset_version) + )) except Exception: tp, value, tb = sys.exc_info() session.rollback() From 1afd2b40cef4446dc0fcfcab98aea0e732f970b4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 31 Jan 2020 12:13:39 +0100 Subject: [PATCH 207/393] comment is not overriden with empty string if is already set --- pype/plugins/global/publish/collect_comment.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/collect_comment.py b/pype/plugins/global/publish/collect_comment.py index 22970665a1..062142ace9 100644 --- a/pype/plugins/global/publish/collect_comment.py +++ b/pype/plugins/global/publish/collect_comment.py @@ -15,4 +15,5 @@ class CollectComment(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder def process(self, context): - context.data["comment"] = "" + comment = (context.data.get("comment") or "").strip() + context.data["comment"] = comment From 607ede0c0752fd6761401a08dba75c070c3a7875 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 31 Jan 2020 12:13:52 +0100 Subject: [PATCH 208/393] collect matchmove family filtering was fixed --- pype/plugins/standalonepublisher/publish/collect_matchmove.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/standalonepublisher/publish/collect_matchmove.py b/pype/plugins/standalonepublisher/publish/collect_matchmove.py index b46efc1cf3..5d9e8ddfb4 100644 --- a/pype/plugins/standalonepublisher/publish/collect_matchmove.py +++ b/pype/plugins/standalonepublisher/publish/collect_matchmove.py @@ -21,7 +21,7 @@ class CollectMatchmovePublish(pyblish.api.InstancePlugin): label = "Collect Matchmove - SA Publish" order = pyblish.api.CollectorOrder - family = ["matchmove"] + families = ["matchmove"] hosts = ["standalonepublisher"] def process(self, instance): From 35a7040930ee9a5013ffefb82f5165e5615a53da Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 31 Jan 2020 14:43:02 +0100 Subject: [PATCH 209/393] fix(nuke): new way of imprinting data --- pype/nuke/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index db1a5919c3..1c5601f34f 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -374,7 +374,7 @@ def create_write_node(name, data, input=None, prenodes=None): now_node.setInput(0, prev_node) # imprinting group node - GN = avalon.nuke.imprint(GN, data["avalon"]) + avalon.nuke.imprint(GN, data["avalon"], tab="Pype") divider = nuke.Text_Knob('') GN.addKnob(divider) From e05356bb904e2cef66acbb9726461d714a4fc420 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 31 Jan 2020 15:05:49 +0100 Subject: [PATCH 210/393] gracefully skip missing thumbnail path --- pype/plugins/global/publish/extract_jpeg.py | 10 +++++----- pype/plugins/global/publish/integrate_thumbnail.py | 6 ++++++ 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/extract_jpeg.py b/pype/plugins/global/publish/extract_jpeg.py index 00e8a6fedf..4978649ba2 100644 --- a/pype/plugins/global/publish/extract_jpeg.py +++ b/pype/plugins/global/publish/extract_jpeg.py @@ -6,7 +6,7 @@ import pype.api class ExtractJpegEXR(pyblish.api.InstancePlugin): - """Resolve any dependency issies + """Resolve any dependency issues This plug-in resolves any paths which, if not updated might break the published file. @@ -55,8 +55,8 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): filename = os.path.splitext(input_file)[0] if not filename.endswith('.'): filename += "." - jpegFile = filename + "jpg" - full_output_path = os.path.join(stagingdir, jpegFile) + jpeg_file = filename + "jpg" + full_output_path = os.path.join(stagingdir, jpeg_file) self.log.info("output {}".format(full_output_path)) @@ -87,9 +87,9 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): instance.data["representations"] = [] representation = { - 'name': 'jpg', + 'name': 'thumbnail', 'ext': 'jpg', - 'files': jpegFile, + 'files': jpeg_file, "stagingDir": stagingdir, "thumbnail": True, "tags": ['thumbnail'] diff --git a/pype/plugins/global/publish/integrate_thumbnail.py b/pype/plugins/global/publish/integrate_thumbnail.py index bf6c62155f..1c4399b386 100644 --- a/pype/plugins/global/publish/integrate_thumbnail.py +++ b/pype/plugins/global/publish/integrate_thumbnail.py @@ -19,6 +19,12 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): families = ["review"] def process(self, instance): + + if not os.environ.get("AVALON_THUMBNAIL_ROOT"): + self.log.info("AVALON_THUMBNAIL_ROOT is not set." + " Skipping thumbnail integration.") + return + published_repres = instance.data.get("published_representations") if not published_repres: self.log.debug( From 65fd98b1fad6a67a94d2393fa09487d02e9159fd Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 31 Jan 2020 17:39:16 +0100 Subject: [PATCH 211/393] unify loaders to a single reference loader --- pype/maya/__init__.py | 1 + pype/maya/lib.py | 17 +++++ pype/plugins/maya/load/load_camera.py | 62 ---------------- pype/plugins/maya/load/load_fbx.py | 54 -------------- pype/plugins/maya/load/load_mayaascii.py | 66 ---------------- pype/plugins/maya/load/load_reference.py | 42 ++++++++++- pype/plugins/maya/load/load_rig.py | 95 ------------------------ pype/plugins/maya/load/load_vrayproxy.py | 2 +- 8 files changed, 58 insertions(+), 281 deletions(-) delete mode 100644 pype/plugins/maya/load/load_camera.py delete mode 100644 pype/plugins/maya/load/load_fbx.py delete mode 100644 pype/plugins/maya/load/load_mayaascii.py delete mode 100644 pype/plugins/maya/load/load_rig.py diff --git a/pype/maya/__init__.py b/pype/maya/__init__.py index b4dbc52bc8..f027893a0e 100644 --- a/pype/maya/__init__.py +++ b/pype/maya/__init__.py @@ -162,6 +162,7 @@ def on_open(_): # Validate FPS after update_task_from_path to # ensure it is using correct FPS for the asset lib.validate_fps() + lib.fix_incompatible_containers() if any_outdated(): log.warning("Scene has outdated content.") diff --git a/pype/maya/lib.py b/pype/maya/lib.py index 0890d3863e..e1a72b5929 100644 --- a/pype/maya/lib.py +++ b/pype/maya/lib.py @@ -2318,6 +2318,23 @@ def get_attr_in_layer(attr, layer): return cmds.getAttr(attr) +def fix_incompatible_containers(): + """Return whether the current scene has any outdated content""" + + host = avalon.api.registered_host() + for container in host.ls(): + loader = container['loader'] + + print(container['loader']) + + if loader in ["MayaAsciiLoader", + "AbcLoader", + "ModelLoader", + "CameraLoader"]: + cmds.setAttr(container["objectName"] + ".loader", + "ReferenceLoader", type="string") + + def _null(*args): pass diff --git a/pype/plugins/maya/load/load_camera.py b/pype/plugins/maya/load/load_camera.py deleted file mode 100644 index e9bf265b98..0000000000 --- a/pype/plugins/maya/load/load_camera.py +++ /dev/null @@ -1,62 +0,0 @@ -import pype.maya.plugin -import os -from pypeapp import config - - -class CameraLoader(pype.maya.plugin.ReferenceLoader): - """Specific loader of Alembic for the pype.camera family""" - - families = ["camera"] - label = "Reference camera" - representations = ["abc", "ma"] - order = -10 - icon = "code-fork" - color = "orange" - - def process_reference(self, context, name, namespace, data): - - import maya.cmds as cmds - # Get family type from the context - - try: - family = context["representation"]["context"]["family"] - except ValueError: - family = "camera" - - cmds.loadPlugin("AbcImport.mll", quiet=True) - groupName = "{}:{}".format(namespace, name) - nodes = cmds.file(self.fname, - namespace=namespace, - sharedReferenceFile=False, - groupReference=True, - groupName="{}:{}".format(namespace, name), - reference=True, - returnNewNodes=True) - - cameras = cmds.ls(nodes, type="camera") - - presets = config.get_presets(project=os.environ['AVALON_PROJECT']) - colors = presets['plugins']['maya']['load']['colors'] - - c = colors.get(family) - if c is not None: - cmds.setAttr(groupName + ".useOutlinerColor", 1) - cmds.setAttr(groupName + ".outlinerColor", - c[0], c[1], c[2]) - - # Check the Maya version, lockTransform has been introduced since - # Maya 2016.5 Ext 2 - version = int(cmds.about(version=True)) - if version >= 2016: - for camera in cameras: - cmds.camera(camera, edit=True, lockTransform=True) - else: - self.log.warning("This version of Maya does not support locking of" - " transforms of cameras.") - - self[:] = nodes - - return nodes - - def switch(self, container, representation): - self.update(container, representation) diff --git a/pype/plugins/maya/load/load_fbx.py b/pype/plugins/maya/load/load_fbx.py deleted file mode 100644 index 14df300c3c..0000000000 --- a/pype/plugins/maya/load/load_fbx.py +++ /dev/null @@ -1,54 +0,0 @@ -import pype.maya.plugin -import os -from pypeapp import config - - -class FBXLoader(pype.maya.plugin.ReferenceLoader): - """Load the FBX""" - - families = ["fbx"] - representations = ["fbx"] - - label = "Reference FBX" - order = -10 - icon = "code-fork" - color = "orange" - - def process_reference(self, context, name, namespace, data): - - import maya.cmds as cmds - from avalon import maya - - try: - family = context["representation"]["context"]["family"] - except ValueError: - family = "fbx" - - # Ensure FBX plug-in is loaded - cmds.loadPlugin("fbxmaya", quiet=True) - - with maya.maintained_selection(): - nodes = cmds.file(self.fname, - namespace=namespace, - reference=True, - returnNewNodes=True, - groupReference=True, - groupName="{}:{}".format(namespace, name)) - - groupName = "{}:{}".format(namespace, name) - - presets = config.get_presets(project=os.environ['AVALON_PROJECT']) - colors = presets['plugins']['maya']['load']['colors'] - - c = colors.get(family) - if c is not None: - cmds.setAttr(groupName + ".useOutlinerColor", 1) - cmds.setAttr(groupName + ".outlinerColor", - c[0], c[1], c[2]) - - self[:] = nodes - - return nodes - - def switch(self, container, representation): - self.update(container, representation) diff --git a/pype/plugins/maya/load/load_mayaascii.py b/pype/plugins/maya/load/load_mayaascii.py deleted file mode 100644 index ab7b2daffb..0000000000 --- a/pype/plugins/maya/load/load_mayaascii.py +++ /dev/null @@ -1,66 +0,0 @@ -import pype.maya.plugin -from pypeapp import config -import os - - -class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader): - """Load the model""" - - families = [] - representations = ["ma"] - - label = "Reference Maya Ascii" - order = -10 - icon = "code-fork" - color = "orange" - - def process_reference(self, context, name, namespace, data): - - import maya.cmds as cmds - from avalon import maya - - try: - family = context["representation"]["context"]["family"] - except ValueError: - family = "model" - - with maya.maintained_selection(): - nodes = cmds.file(self.fname, - namespace=namespace, - reference=True, - returnNewNodes=True, - groupReference=True, - groupName="{}:{}".format(namespace, name)) - - self[:] = nodes - groupName = "{}:{}".format(namespace, name) - - presets = config.get_presets(project=os.environ['AVALON_PROJECT']) - colors = presets['plugins']['maya']['load']['colors'] - - c = colors.get(family) - if c is not None: - cmds.setAttr(groupName + ".useOutlinerColor", 1) - cmds.setAttr(groupName + ".outlinerColor", - c[0], c[1], c[2]) - cmds.setAttr(groupName + ".displayHandle", 1) - # get bounding box - bbox = cmds.exactWorldBoundingBox(groupName) - # get pivot position on world space - pivot = cmds.xform(groupName, q=True, sp=True, ws=True) - # center of bounding box - cx = (bbox[0] + bbox[3]) / 2 - cy = (bbox[1] + bbox[4]) / 2 - cz = (bbox[2] + bbox[5]) / 2 - # add pivot position to calculate offset - cx = cx + pivot[0] - cy = cy + pivot[1] - cz = cz + pivot[2] - # set selection handle offset to center of bounding box - cmds.setAttr(groupName + ".selectHandleX", cx) - cmds.setAttr(groupName + ".selectHandleY", cy) - cmds.setAttr(groupName + ".selectHandleZ", cz) - return nodes - - def switch(self, container, representation): - self.update(container, representation) diff --git a/pype/plugins/maya/load/load_reference.py b/pype/plugins/maya/load/load_reference.py index 91f946b728..5104ac26f8 100644 --- a/pype/plugins/maya/load/load_reference.py +++ b/pype/plugins/maya/load/load_reference.py @@ -1,4 +1,6 @@ import pype.maya.plugin +from avalon import api, maya +from maya import cmds import os from pypeapp import config @@ -11,8 +13,10 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader): "animation", "mayaAscii", "setdress", - "layout"] - representations = ["ma", "abc"] + "layout", + "camera", + "rig"] + representations = ["ma", "abc", "fbx"] tool_names = ["loader"] label = "Reference" @@ -42,7 +46,7 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader): reference=True, returnNewNodes=True) - namespace = cmds.referenceQuery(nodes[0], namespace=True) + # namespace = cmds.referenceQuery(nodes[0], namespace=True) shapes = cmds.ls(nodes, shapes=True, long=True) @@ -92,7 +96,39 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader): cmds.setAttr(groupName + ".selectHandleY", cy) cmds.setAttr(groupName + ".selectHandleZ", cz) + if data.get("post_process", True): + if family == "rig": + self._post_process_rig(name, namespace, context, data) + return newNodes def switch(self, container, representation): self.update(container, representation) + + def _post_process_rig(self, name, namespace, context, data): + + output = next((node for node in self if + node.endswith("out_SET")), None) + controls = next((node for node in self if + node.endswith("controls_SET")), None) + + assert output, "No out_SET in rig, this is a bug." + assert controls, "No controls_SET in rig, this is a bug." + + # Find the roots amongst the loaded nodes + roots = cmds.ls(self[:], assemblies=True, long=True) + assert roots, "No root nodes in rig, this is a bug." + + asset = api.Session["AVALON_ASSET"] + dependency = str(context["representation"]["_id"]) + + self.log.info("Creating subset: {}".format(namespace)) + + # Create the animation instance + with maya.maintained_selection(): + cmds.select([output, controls] + roots, noExpand=True) + api.create(name=namespace, + asset=asset, + family="animation", + options={"useSelection": True}, + data={"dependencies": dependency}) diff --git a/pype/plugins/maya/load/load_rig.py b/pype/plugins/maya/load/load_rig.py deleted file mode 100644 index fc6e666ac6..0000000000 --- a/pype/plugins/maya/load/load_rig.py +++ /dev/null @@ -1,95 +0,0 @@ -from maya import cmds - -import pype.maya.plugin -from avalon import api, maya -import os -from pypeapp import config - - -class RigLoader(pype.maya.plugin.ReferenceLoader): - """Specific loader for rigs - - This automatically creates an instance for animators upon load. - - """ - - families = ["rig"] - representations = ["ma"] - - label = "Reference rig" - order = -10 - icon = "code-fork" - color = "orange" - - def process_reference(self, context, name, namespace, data): - - try: - family = context["representation"]["context"]["family"] - except ValueError: - family = "rig" - - groupName = "{}:{}".format(namespace, name) - nodes = cmds.file(self.fname, - namespace=namespace, - reference=True, - returnNewNodes=True, - groupReference=True, - groupName=groupName) - - cmds.xform(groupName, pivots=(0, 0, 0)) - - presets = config.get_presets(project=os.environ['AVALON_PROJECT']) - colors = presets['plugins']['maya']['load']['colors'] - - c = colors.get(family) - if c is not None: - cmds.setAttr(groupName + ".useOutlinerColor", 1) - cmds.setAttr(groupName + ".outlinerColor", - c[0], c[1], c[2]) - - shapes = cmds.ls(nodes, shapes=True, long=True) - print(shapes) - - newNodes = (list(set(nodes) - set(shapes))) - print(newNodes) - - # Store for post-process - self[:] = newNodes - if data.get("post_process", True): - self._post_process(name, namespace, context, data) - - return newNodes - - def _post_process(self, name, namespace, context, data): - - # TODO(marcus): We are hardcoding the name "out_SET" here. - # Better register this keyword, so that it can be used - # elsewhere, such as in the Integrator plug-in, - # without duplication. - - output = next((node for node in self if - node.endswith("out_SET")), None) - controls = next((node for node in self if - node.endswith("controls_SET")), None) - - assert output, "No out_SET in rig, this is a bug." - assert controls, "No controls_SET in rig, this is a bug." - - # Find the roots amongst the loaded nodes - roots = cmds.ls(self[:], assemblies=True, long=True) - assert roots, "No root nodes in rig, this is a bug." - - asset = api.Session["AVALON_ASSET"] - dependency = str(context["representation"]["_id"]) - - # Create the animation instance - with maya.maintained_selection(): - cmds.select([output, controls] + roots, noExpand=True) - api.create(name=namespace, - asset=asset, - family="animation", - options={"useSelection": True}, - data={"dependencies": dependency}) - - def switch(self, container, representation): - self.update(container, representation) diff --git a/pype/plugins/maya/load/load_vrayproxy.py b/pype/plugins/maya/load/load_vrayproxy.py index 9b07dc7e30..35d93676a0 100644 --- a/pype/plugins/maya/load/load_vrayproxy.py +++ b/pype/plugins/maya/load/load_vrayproxy.py @@ -117,7 +117,7 @@ class VRayProxyLoader(api.Loader): vray_mesh = cmds.createNode('VRayMesh', name="{}_VRMS".format(name)) mesh_shape = cmds.createNode("mesh", name="{}_GEOShape".format(name)) vray_mat = cmds.shadingNode("VRayMeshMaterial", asShader=True, - name="{}_VRMM".format(name)) + name="{}_VRMM".format(name)) vray_mat_sg = cmds.sets(name="{}_VRSG".format(name), empty=True, renderable=True, From 2e7d4a94670b8eac96c2889ff5f4112da5dc0d37 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 31 Jan 2020 17:45:31 +0100 Subject: [PATCH 212/393] more families in automatic scene upgrade --- pype/maya/lib.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pype/maya/lib.py b/pype/maya/lib.py index e1a72b5929..ec39b3556e 100644 --- a/pype/maya/lib.py +++ b/pype/maya/lib.py @@ -2330,7 +2330,9 @@ def fix_incompatible_containers(): if loader in ["MayaAsciiLoader", "AbcLoader", "ModelLoader", - "CameraLoader"]: + "CameraLoader", + "RigLoader", + "FBXLoader"]: cmds.setAttr(container["objectName"] + ".loader", "ReferenceLoader", type="string") From 27774a0791cae0a8d9f82fdc69036b63770c8495 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 31 Jan 2020 17:48:36 +0100 Subject: [PATCH 213/393] pep8 --- pype/plugins/maya/load/actions.py | 8 ++++---- pype/plugins/maya/load/load_reference.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pype/plugins/maya/load/actions.py b/pype/plugins/maya/load/actions.py index 9f6a5c4d34..77d18b0ee3 100644 --- a/pype/plugins/maya/load/actions.py +++ b/pype/plugins/maya/load/actions.py @@ -140,9 +140,9 @@ class ImportMayaLoader(api.Loader): message = "Are you sure you want import this" state = QtWidgets.QMessageBox.warning(None, - "Are you sure?", - message, - buttons=buttons, - defaultButton=accept) + "Are you sure?", + message, + buttons=buttons, + defaultButton=accept) return state == accept diff --git a/pype/plugins/maya/load/load_reference.py b/pype/plugins/maya/load/load_reference.py index 5104ac26f8..3de35451a6 100644 --- a/pype/plugins/maya/load/load_reference.py +++ b/pype/plugins/maya/load/load_reference.py @@ -58,7 +58,7 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader): for node in newNodes: try: roots.add(pm.PyNode(node).getAllParents()[-2]) - except: + except: # noqa: E722 pass for root in roots: root.setParent(world=True) From 68f28ce4574c5234451e945d844fb84b042914d7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 31 Jan 2020 18:57:16 +0100 Subject: [PATCH 214/393] fix(nuke): refactoring loaders --- pype/plugins/nuke/load/load_mov.py | 67 +++++++++++++------------ pype/plugins/nuke/load/load_sequence.py | 53 +++++++++---------- 2 files changed, 62 insertions(+), 58 deletions(-) diff --git a/pype/plugins/nuke/load/load_mov.py b/pype/plugins/nuke/load/load_mov.py index e598839405..917abdf098 100644 --- a/pype/plugins/nuke/load/load_mov.py +++ b/pype/plugins/nuke/load/load_mov.py @@ -24,7 +24,7 @@ def preserve_trim(node): offset_frame = None if node['frame_mode'].value() == "start at": start_at_frame = node['frame'].value() - if node['frame_mode'].value() is "offset": + if node['frame_mode'].value() == "offset": offset_frame = node['frame'].value() try: @@ -85,30 +85,26 @@ class LoadMov(api.Loader): containerise, viewer_update_and_undo_stop ) - version = context['version'] version_data = version.get("data", {}) - orig_first = version_data.get("frameStart", None) - orig_last = version_data.get("frameEnd", None) + orig_first = version_data.get("frameStart") + orig_last = version_data.get("frameEnd") diff = orig_first - 1 - # set first to 1 + first = orig_first - diff last = orig_last - diff - handles = version_data.get("handles", None) - handle_start = version_data.get("handleStart", None) - handle_end = version_data.get("handleEnd", None) - repr_cont = context["representation"]["context"] - # fix handle start and end if none are available - if not handle_start and not handle_end: - handle_start = handles - handle_end = handles + handle_start = version_data.get("handleStart") + handle_end = version_data.get("handleEnd") + + colorspace = version_data.get("colorspace") + repr_cont = context["representation"]["context"] # create handles offset (only to last, because of mov) last += handle_start + handle_end # offset should be with handles so it match orig frame range - offset_frame = orig_first + handle_start + offset_frame = orig_first - handle_start # Fallback to asset name when namespace is None if namespace is None: @@ -122,10 +118,8 @@ class LoadMov(api.Loader): repr_cont["subset"], repr_cont["representation"]) - # Create the Loader with the filename path set with viewer_update_and_undo_stop(): - # TODO: it might be universal read to img/geo/camera read_node = nuke.createNode( "Read", "name {}".format(read_name) @@ -139,7 +133,11 @@ class LoadMov(api.Loader): read_node["last"].setValue(last) read_node["frame_mode"].setValue("start at") read_node["frame"].setValue(str(offset_frame)) - # add additional metadata from the version to imprint to Avalon knob + + if colorspace: + read_node["colorspace"].setValue(str(colorspace)) + + # add additional metadata from the version to imprint Avalon knob add_keys = [ "frameStart", "frameEnd", "handles", "source", "author", "fps", "version", "handleStart", "handleEnd" @@ -147,7 +145,7 @@ class LoadMov(api.Loader): data_imprint = {} for key in add_keys: - if key is 'version': + if key == 'version': data_imprint.update({ key: context["version"]['name'] }) @@ -186,10 +184,10 @@ class LoadMov(api.Loader): ) node = nuke.toNode(container['objectName']) - # TODO: prepare also for other Read img/geo/camera + assert node.Class() == "Read", "Must be Read" - file = api.get_representation_path(representation) + file = self.fname.replace("\\", "/") # Get start frame from version data version = io.find_one({ @@ -207,15 +205,17 @@ class LoadMov(api.Loader): version_data = version.get("data", {}) - orig_first = version_data.get("frameStart", None) - orig_last = version_data.get("frameEnd", None) + orig_first = version_data.get("frameStart") + orig_last = version_data.get("frameEnd") diff = orig_first - 1 + # set first to 1 first = orig_first - diff last = orig_last - diff handles = version_data.get("handles", 0) handle_start = version_data.get("handleStart", 0) handle_end = version_data.get("handleEnd", 0) + colorspace = version_data.get("colorspace") if first is None: log.warning("Missing start frame for updated version" @@ -231,11 +231,11 @@ class LoadMov(api.Loader): # create handles offset (only to last, because of mov) last += handle_start + handle_end # offset should be with handles so it match orig frame range - offset_frame = orig_first + handle_start + offset_frame = orig_first - handle_start # Update the loader's path whilst preserving some values with preserve_trim(node): - node["file"].setValue(file["path"]) + node["file"].setValue(file) log.info("__ node['file']: {}".format(node["file"].value())) # Set the global in to the start frame of the sequence @@ -247,19 +247,22 @@ class LoadMov(api.Loader): node["frame_mode"].setValue("start at") node["frame"].setValue(str(offset_frame)) + if colorspace: + node["colorspace"].setValue(str(colorspace)) + updated_dict = {} updated_dict.update({ "representation": str(representation["_id"]), - "frameStart": version_data.get("frameStart"), - "frameEnd": version_data.get("frameEnd"), - "version": version.get("name"), + "frameStart": str(first), + "frameEnd": str(last), + "version": str(version.get("name")), + "colorspace": version_data.get("colorspace"), "source": version_data.get("source"), - "handles": version_data.get("handles"), - "handleStart": version_data.get("handleStart"), - "handleEnd": version_data.get("handleEnd"), - "fps": version_data.get("fps"), + "handleStart": str(handle_start), + "handleEnd": str(handle_end), + "fps": str(version_data.get("fps")), "author": version_data.get("author"), - "outputDir": version_data.get("outputDir"), + "outputDir": version_data.get("outputDir") }) # change color of node diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index 76599c3351..67d1f75dc8 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -24,7 +24,7 @@ def preserve_trim(node): offset_frame = None if node['frame_mode'].value() == "start at": start_at_frame = node['frame'].value() - if node['frame_mode'].value() is "offset": + if node['frame_mode'].value() == "offset": offset_frame = node['frame'].value() try: @@ -93,7 +93,6 @@ class LoadSequence(api.Loader): self.first_frame = int(nuke.root()["first_frame"].getValue()) self.handle_start = version_data.get("handleStart", 0) - self.handle_start = version_data.get("handleStart", 0) self.handle_end = version_data.get("handleEnd", 0) first = version_data.get("frameStart", None) @@ -108,7 +107,10 @@ class LoadSequence(api.Loader): file = self.fname.replace("\\", "/") - log.info("file: {}\n".format(self.fname)) + if "#" not in file: + frame = repr_cont.get("frame") + padding = len(frame) + file = file.replace(frame, "#"*padding) repr_cont = context["representation"]["context"] read_name = "Read_{0}_{1}_{2}".format( @@ -116,11 +118,6 @@ class LoadSequence(api.Loader): repr_cont["subset"], repr_cont["representation"]) - if "#" not in file: - frame = repr_cont.get("frame") - padding = len(frame) - file = file.replace(frame, "#"*padding) - # Create the Loader with the filename path set with viewer_update_and_undo_stop(): # TODO: it might be universal read to img/geo/camera @@ -130,8 +127,8 @@ class LoadSequence(api.Loader): r["file"].setValue(file) # Set colorspace defined in version data - colorspace = context["version"]["data"].get("colorspace", None) - if colorspace is not None: + colorspace = context["version"]["data"].get("colorspace") + if colorspace: r["colorspace"].setValue(str(colorspace)) loader_shift(r, first, relative=True) @@ -140,14 +137,14 @@ class LoadSequence(api.Loader): r["origlast"].setValue(int(last)) r["last"].setValue(int(last)) - # add additional metadata from the version to imprint to Avalon knob + # add additional metadata from the version to imprint Avalon knob add_keys = ["frameStart", "frameEnd", "source", "colorspace", "author", "fps", "version", "handleStart", "handleEnd"] data_imprint = {} for k in add_keys: - if k is 'version': + if k == 'version': data_imprint.update({k: context["version"]['name']}) else: data_imprint.update( @@ -179,7 +176,7 @@ class LoadSequence(api.Loader): rtn["after"].setValue("continue") rtn["input.first_lock"].setValue(True) rtn["input.first"].setValue( - self.handle_start + self.first_frame + self.handle_start + self.first_frame ) if time_warp_nodes != []: @@ -210,16 +207,20 @@ class LoadSequence(api.Loader): """ from avalon.nuke import ( - ls_img_sequence, update_container ) node = nuke.toNode(container['objectName']) - # TODO: prepare also for other Read img/geo/camera + assert node.Class() == "Read", "Must be Read" - path = api.get_representation_path(representation) - file = ls_img_sequence(path) + repr_cont = representation["context"] + file = self.fname.replace("\\", "/") + + if "#" not in file: + frame = repr_cont.get("frame") + padding = len(frame) + file = file.replace(frame, "#"*padding) # Get start frame from version data version = io.find_one({ @@ -241,8 +242,8 @@ class LoadSequence(api.Loader): self.handle_start = version_data.get("handleStart", 0) self.handle_end = version_data.get("handleEnd", 0) - first = version_data.get("frameStart", None) - last = version_data.get("frameEnd", None) + first = version_data.get("frameStart") + last = version_data.get("frameEnd") if first is None: log.warning("Missing start frame for updated version" @@ -255,7 +256,7 @@ class LoadSequence(api.Loader): # Update the loader's path whilst preserving some values with preserve_trim(node): - node["file"].setValue(file["path"]) + node["file"].setValue(file) log.info("__ node['file']: {}".format(node["file"].value())) # Set the global in to the start frame of the sequence @@ -268,14 +269,14 @@ class LoadSequence(api.Loader): updated_dict = {} updated_dict.update({ "representation": str(representation["_id"]), - "frameStart": version_data.get("frameStart"), - "frameEnd": version_data.get("frameEnd"), - "version": version.get("name"), + "frameStart": str(first), + "frameEnd": str(last), + "version": str(version.get("name")), "colorspace": version_data.get("colorspace"), "source": version_data.get("source"), - "handleStart": version_data.get("handleStart"), - "handleEnd": version_data.get("handleEnd"), - "fps": version_data.get("fps"), + "handleStart": str(self.handle_start), + "handleEnd": str(self.handle_end), + "fps": str(version_data.get("fps")), "author": version_data.get("author"), "outputDir": version_data.get("outputDir"), }) From c2124b212da8954e5cf97c619805e2f901bbce48 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 31 Jan 2020 19:54:05 +0100 Subject: [PATCH 215/393] add schemas --- schema/application-1.0.json | 68 ++++++++++++++++ schema/asset-1.0.json | 35 ++++++++ schema/asset-2.0.json | 55 +++++++++++++ schema/asset-3.0.json | 55 +++++++++++++ schema/config-1.0.json | 86 ++++++++++++++++++++ schema/container-1.0.json | 100 +++++++++++++++++++++++ schema/container-2.0.json | 59 ++++++++++++++ schema/inventory-1.0.json | 10 +++ schema/project-2.0.json | 86 ++++++++++++++++++++ schema/representation-1.0.json | 28 +++++++ schema/representation-2.0.json | 78 ++++++++++++++++++ schema/session-1.0.json | 143 +++++++++++++++++++++++++++++++++ schema/session-2.0.json | 142 ++++++++++++++++++++++++++++++++ schema/shaders-1.0.json | 32 ++++++++ schema/subset-1.0.json | 35 ++++++++ schema/subset-2.0.json | 51 ++++++++++++ schema/subset-3.0.json | 62 ++++++++++++++ schema/thumbnail-1.0.json | 42 ++++++++++ schema/version-1.0.json | 50 ++++++++++++ schema/version-2.0.json | 92 +++++++++++++++++++++ schema/version-3.0.json | 84 +++++++++++++++++++ 21 files changed, 1393 insertions(+) create mode 100644 schema/application-1.0.json create mode 100644 schema/asset-1.0.json create mode 100644 schema/asset-2.0.json create mode 100644 schema/asset-3.0.json create mode 100644 schema/config-1.0.json create mode 100644 schema/container-1.0.json create mode 100644 schema/container-2.0.json create mode 100644 schema/inventory-1.0.json create mode 100644 schema/project-2.0.json create mode 100644 schema/representation-1.0.json create mode 100644 schema/representation-2.0.json create mode 100644 schema/session-1.0.json create mode 100644 schema/session-2.0.json create mode 100644 schema/shaders-1.0.json create mode 100644 schema/subset-1.0.json create mode 100644 schema/subset-2.0.json create mode 100644 schema/subset-3.0.json create mode 100644 schema/thumbnail-1.0.json create mode 100644 schema/version-1.0.json create mode 100644 schema/version-2.0.json create mode 100644 schema/version-3.0.json diff --git a/schema/application-1.0.json b/schema/application-1.0.json new file mode 100644 index 0000000000..e2418037c6 --- /dev/null +++ b/schema/application-1.0.json @@ -0,0 +1,68 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:application-1.0", + "description": "An application definition.", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "label", + "application_dir", + "executable" + ], + + "properties": { + "schema": { + "description": "Schema identifier for payload", + "type": "string" + }, + "label": { + "description": "Nice name of application.", + "type": "string" + }, + "application_dir": { + "description": "Name of directory used for application resources.", + "type": "string" + }, + "executable": { + "description": "Name of callable executable, this is called to launch the application", + "type": "string" + }, + "description": { + "description": "Description of application.", + "type": "string" + }, + "environment": { + "description": "Key/value pairs for environment variables related to this application. Supports lists for paths, such as PYTHONPATH.", + "type": "object", + "items": { + "oneOf": [ + {"type": "string"}, + {"type": "array", "items": {"type": "string"}} + ] + } + }, + "default_dirs": { + "type": "array", + "items": { + "type": "string" + } + }, + "copy": { + "type": "object", + "patternProperties": { + "^.*$": { + "anyOf": [ + {"type": "string"}, + {"type": "null"} + ] + } + }, + "additionalProperties": false + } + } +} diff --git a/schema/asset-1.0.json b/schema/asset-1.0.json new file mode 100644 index 0000000000..6f3665c628 --- /dev/null +++ b/schema/asset-1.0.json @@ -0,0 +1,35 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:asset-1.0", + "description": "A unit of data", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "name", + "subsets" + ], + + "properties": { + "schema": { + "description": "Schema identifier for payload", + "type": "string" + }, + "name": { + "description": "Name of directory", + "type": "string" + }, + "subsets": { + "type": "array", + "items": { + "$ref": "subset.json" + } + } + }, + + "definitions": {} +} \ No newline at end of file diff --git a/schema/asset-2.0.json b/schema/asset-2.0.json new file mode 100644 index 0000000000..066cb33498 --- /dev/null +++ b/schema/asset-2.0.json @@ -0,0 +1,55 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:asset-2.0", + "description": "A unit of data", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "type", + "name", + "silo", + "data" + ], + + "properties": { + "schema": { + "description": "Schema identifier for payload", + "type": "string", + "enum": ["avalon-core:asset-2.0"], + "example": "avalon-core:asset-2.0" + }, + "type": { + "description": "The type of document", + "type": "string", + "enum": ["asset"], + "example": "asset" + }, + "parent": { + "description": "Unique identifier to parent document", + "example": "592c33475f8c1b064c4d1696" + }, + "name": { + "description": "Name of asset", + "type": "string", + "pattern": "^[a-zA-Z0-9_.]*$", + "example": "Bruce" + }, + "silo": { + "description": "Group or container of asset", + "type": "string", + "example": "assets" + }, + "data": { + "description": "Document metadata", + "type": "object", + "example": {"key": "value"} + } + }, + + "definitions": {} +} diff --git a/schema/asset-3.0.json b/schema/asset-3.0.json new file mode 100644 index 0000000000..a3a22e917b --- /dev/null +++ b/schema/asset-3.0.json @@ -0,0 +1,55 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:asset-3.0", + "description": "A unit of data", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "type", + "name", + "data" + ], + + "properties": { + "schema": { + "description": "Schema identifier for payload", + "type": "string", + "enum": ["avalon-core:asset-3.0", "pype:asset-3.0"], + "example": "avalon-core:asset-3.0" + }, + "type": { + "description": "The type of document", + "type": "string", + "enum": ["asset"], + "example": "asset" + }, + "parent": { + "description": "Unique identifier to parent document", + "example": "592c33475f8c1b064c4d1696" + }, + "name": { + "description": "Name of asset", + "type": "string", + "pattern": "^[a-zA-Z0-9_.]*$", + "example": "Bruce" + }, + "silo": { + "description": "Group or container of asset", + "type": "string", + "pattern": "^[a-zA-Z0-9_.]*$", + "example": "assets" + }, + "data": { + "description": "Document metadata", + "type": "object", + "example": {"key": "value"} + } + }, + + "definitions": {} +} diff --git a/schema/config-1.0.json b/schema/config-1.0.json new file mode 100644 index 0000000000..b3c4362f41 --- /dev/null +++ b/schema/config-1.0.json @@ -0,0 +1,86 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:config-1.0", + "description": "A project configuration.", + + "type": "object", + + "additionalProperties": false, + "required": [ + "template", + "tasks", + "apps" + ], + + "properties": { + "schema": { + "description": "Schema identifier for payload", + "type": "string" + }, + "template": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^.*$": { + "type": "string" + } + } + }, + "tasks": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "icon": {"type": "string"}, + "group": {"type": "string"}, + "label": {"type": "string"} + }, + "required": ["name"] + } + }, + "apps": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "icon": {"type": "string"}, + "group": {"type": "string"}, + "label": {"type": "string"} + }, + "required": ["name"] + } + }, + "families": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "icon": {"type": "string"}, + "label": {"type": "string"}, + "hideFilter": {"type": "boolean"} + }, + "required": ["name"] + } + }, + "groups": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "icon": {"type": "string"}, + "color": {"type": "string"}, + "order": {"type": ["integer", "number"]} + }, + "required": ["name"] + } + }, + "copy": { + "type": "object" + } + } +} diff --git a/schema/container-1.0.json b/schema/container-1.0.json new file mode 100644 index 0000000000..d9e4e39f7f --- /dev/null +++ b/schema/container-1.0.json @@ -0,0 +1,100 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:container-1.0", + "description": "A loaded asset", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "id", + "objectName", + "name", + "author", + "loader", + "families", + "time", + "subset", + "asset", + "representation", + "version", + "silo", + "path", + "source" + ], + "properties": { + "id": { + "description": "Identifier for finding object in host", + "type": "string", + "enum": ["pyblish.mindbender.container"], + "example": "pyblish.mindbender.container" + }, + "objectName": { + "description": "Name of internal object, such as the objectSet in Maya.", + "type": "string", + "example": "Bruce_:rigDefault_CON" + }, + "name": { + "description": "Full name of application object", + "type": "string", + "example": "modelDefault" + }, + "author": { + "description": "Name of the author of the published version", + "type": "string", + "example": "Marcus Ottosson" + }, + "loader": { + "description": "Name of loader plug-in used to produce this container", + "type": "string", + "example": "ModelLoader" + }, + "families": { + "description": "Families associated with the this subset", + "type": "string", + "example": "mindbender.model" + }, + "time": { + "description": "File-system safe, formatted time", + "type": "string", + "example": "20170329T131545Z" + }, + "subset": { + "description": "Name of source subset", + "type": "string", + "example": "modelDefault" + }, + "asset": { + "description": "Name of source asset", + "type": "string" , + "example": "Bruce" + }, + "representation": { + "description": "Name of source representation", + "type": "string" , + "example": ".ma" + }, + "version": { + "description": "Version number", + "type": "number", + "example": 12 + }, + "silo": { + "description": "Silo of parent asset", + "type": "string", + "example": "assets" + }, + "path": { + "description": "Absolute path on disk", + "type": "string", + "example": "{root}/assets/Bruce/publish/rigDefault/v002" + }, + "source": { + "description": "Absolute path to file from which this version was published", + "type": "string", + "example": "{root}/assets/Bruce/work/rigging/maya/scenes/rig_v001.ma" + } + } +} diff --git a/schema/container-2.0.json b/schema/container-2.0.json new file mode 100644 index 0000000000..7b84209ea0 --- /dev/null +++ b/schema/container-2.0.json @@ -0,0 +1,59 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:container-2.0", + "description": "A loaded asset", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "id", + "objectName", + "name", + "namespace", + "loader", + "representation" + ], + "properties": { + "schema": { + "description": "Schema identifier for payload", + "type": "string", + "enum": ["avalon-core:container-2.0", "pype:container-2.0"], + "example": "pype:container-2.0" + }, + "id": { + "description": "Identifier for finding object in host", + "type": "string", + "enum": ["pyblish.avalon.container"], + "example": "pyblish.avalon.container" + }, + "objectName": { + "description": "Name of internal object, such as the objectSet in Maya.", + "type": "string", + "example": "Bruce_:rigDefault_CON" + }, + "loader": { + "description": "Name of loader plug-in used to produce this container", + "type": "string", + "example": "ModelLoader" + }, + "name": { + "description": "Internal object name of container in application", + "type": "string", + "example": "modelDefault_01" + }, + "namespace": { + "description": "Internal namespace of container in application", + "type": "string", + "example": "Bruce_" + }, + "representation": { + "description": "Unique id of representation in database", + "type": "string", + "example": "59523f355f8c1b5f6c5e8348" + } + } +} \ No newline at end of file diff --git a/schema/inventory-1.0.json b/schema/inventory-1.0.json new file mode 100644 index 0000000000..888ba7945a --- /dev/null +++ b/schema/inventory-1.0.json @@ -0,0 +1,10 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:config-1.0", + "description": "A project configuration.", + + "type": "object", + + "additionalProperties": true +} diff --git a/schema/project-2.0.json b/schema/project-2.0.json new file mode 100644 index 0000000000..ad0e460f4d --- /dev/null +++ b/schema/project-2.0.json @@ -0,0 +1,86 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:project-2.0", + "description": "A unit of data", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "type", + "name", + "data", + "config" + ], + + "properties": { + "schema": { + "description": "Schema identifier for payload", + "type": "string", + "enum": ["avalon-core:project-2.0", "pype:project-2.0"], + "example": "avalon-core:project-2.0" + }, + "type": { + "description": "The type of document", + "type": "string", + "enum": ["project"], + "example": "project" + }, + "parent": { + "description": "Unique identifier to parent document", + "example": "592c33475f8c1b064c4d1696" + }, + "name": { + "description": "Name of directory", + "type": "string", + "pattern": "^[a-zA-Z0-9_.]*$", + "example": "hulk" + }, + "data": { + "description": "Document metadata", + "type": "object", + "example": { + "fps": 24, + "width": 1920, + "height": 1080 + } + }, + "config": { + "type": "object", + "description": "Document metadata", + "example": { + "schema": "pype:config-1.0", + "apps": [ + { + "name": "maya2016", + "label": "Autodesk Maya 2016" + }, + { + "name": "nuke10", + "label": "The Foundry Nuke 10.0" + } + ], + "tasks": [ + {"name": "model"}, + {"name": "render"}, + {"name": "animate"}, + {"name": "rig"}, + {"name": "lookdev"}, + {"name": "layout"} + ], + "template": { + "work": + "{root}/{project}/{silo}/{asset}/work/{task}/{app}", + "publish": + "{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/{subset}.{representation}" + } + }, + "$ref": "config-1.0.json" + } + }, + + "definitions": {} +} diff --git a/schema/representation-1.0.json b/schema/representation-1.0.json new file mode 100644 index 0000000000..10ae72928e --- /dev/null +++ b/schema/representation-1.0.json @@ -0,0 +1,28 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:representation-1.0", + "description": "The inverse of an instance", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "format", + "path" + ], + + "properties": { + "schema": {"type": "string"}, + "format": { + "description": "File extension, including '.'", + "type": "string" + }, + "path": { + "description": "Unformatted path to version.", + "type": "string" + } + } +} diff --git a/schema/representation-2.0.json b/schema/representation-2.0.json new file mode 100644 index 0000000000..e12dea8564 --- /dev/null +++ b/schema/representation-2.0.json @@ -0,0 +1,78 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:representation-2.0", + "description": "The inverse of an instance", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "type", + "parent", + "name", + "data" + ], + + "properties": { + "schema": { + "description": "Schema identifier for payload", + "type": "string", + "enum": ["avalon-core:representation-2.0", "pype:representation-2.0"], + "example": "pype:representation-2.0" + }, + "type": { + "description": "The type of document", + "type": "string", + "enum": ["representation"], + "example": "representation" + }, + "parent": { + "description": "Unique identifier to parent document", + "example": "592c33475f8c1b064c4d1696" + }, + "name": { + "description": "Name of representation", + "type": "string", + "pattern": "^[a-zA-Z0-9_.]*$", + "example": "abc" + }, + "data": { + "description": "Document metadata", + "type": "object", + "example": { + "label": "Alembic" + } + }, + "dependencies": { + "description": "Other representation that this representation depends on", + "type": "array", + "items": {"type": "string"}, + "example": [ + "592d547a5f8c1b388093c145" + ] + }, + "context": { + "description": "Summary of the context to which this representation belong.", + "type": "object", + "properties": { + "project": {"type": "object"}, + "asset": {"type": "string"}, + "silo": {"type": ["string", "null"]}, + "subset": {"type": "string"}, + "version": {"type": "number"}, + "representation": {"type": "string"} + }, + "example": { + "project": "hulk", + "asset": "Bruce", + "silo": "assets", + "subset": "rigDefault", + "version": 12, + "representation": "ma" + } + } + } +} diff --git a/schema/session-1.0.json b/schema/session-1.0.json new file mode 100644 index 0000000000..2b201f9c61 --- /dev/null +++ b/schema/session-1.0.json @@ -0,0 +1,143 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:session-1.0", + "description": "The Avalon environment", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "AVALON_PROJECTS", + "AVALON_PROJECT", + "AVALON_ASSET", + "AVALON_SILO", + "AVALON_CONFIG" + ], + + "properties": { + "AVALON_PROJECTS": { + "description": "Absolute path to root of project directories", + "type": "string", + "example": "/nas/projects" + }, + "AVALON_PROJECT": { + "description": "Name of project", + "type": "string", + "pattern": "^\\w*$", + "example": "Hulk" + }, + "AVALON_ASSET": { + "description": "Name of asset", + "type": "string", + "pattern": "^\\w*$", + "example": "Bruce" + }, + "AVALON_SILO": { + "description": "Name of asset group or container", + "type": "string", + "pattern": "^\\w*$", + "example": "assets" + }, + "AVALON_TASK": { + "description": "Name of task", + "type": "string", + "pattern": "^\\w*$", + "example": "modeling" + }, + "AVALON_CONFIG": { + "description": "Name of Avalon configuration", + "type": "string", + "pattern": "^\\w*$", + "example": "polly" + }, + "AVALON_APP": { + "description": "Name of application", + "type": "string", + "pattern": "^\\w*$", + "example": "maya2016" + }, + "AVALON_MONGO": { + "description": "Address to the asset database", + "type": "string", + "pattern": "^mongodb://[\\w/@:.]*$", + "example": "mongodb://localhost:27017", + "default": "mongodb://localhost:27017" + }, + "AVALON_DB": { + "description": "Name of database", + "type": "string", + "pattern": "^\\w*$", + "example": "avalon", + "default": "avalon" + }, + "AVALON_LABEL": { + "description": "Nice name of Avalon, used in e.g. graphical user interfaces", + "type": "string", + "example": "Mindbender", + "default": "Avalon" + }, + "AVALON_SENTRY": { + "description": "Address to Sentry", + "type": "string", + "pattern": "^http[\\w/@:.]*$", + "example": "https://5b872b280de742919b115bdc8da076a5:8d278266fe764361b8fa6024af004a9c@logs.mindbender.com/2", + "default": null + }, + "AVALON_DEADLINE": { + "description": "Address to Deadline", + "type": "string", + "pattern": "^http[\\w/@:.]*$", + "example": "http://192.168.99.101", + "default": null + }, + "AVALON_TIMEOUT": { + "description": "Wherever there is a need for a timeout, this is the default value.", + "type": "string", + "pattern": "^[0-9]*$", + "default": "1000", + "example": "1000" + }, + "AVALON_UPLOAD": { + "description": "Boolean of whether to upload published material to central asset repository", + "type": "string", + "default": null, + "example": "True" + }, + "AVALON_USERNAME": { + "description": "Generic username", + "type": "string", + "pattern": "^\\w*$", + "default": "avalon", + "example": "myself" + }, + "AVALON_PASSWORD": { + "description": "Generic password", + "type": "string", + "pattern": "^\\w*$", + "default": "secret", + "example": "abc123" + }, + "AVALON_INSTANCE_ID": { + "description": "Unique identifier for instances in a working file", + "type": "string", + "pattern": "^[\\w.]*$", + "default": "avalon.instance", + "example": "avalon.instance" + }, + "AVALON_CONTAINER_ID": { + "description": "Unique identifier for a loaded representation in a working file", + "type": "string", + "pattern": "^[\\w.]*$", + "default": "avalon.container", + "example": "avalon.container" + }, + "AVALON_DEBUG": { + "description": "Enable debugging mode. Some applications may use this for e.g. extended verbosity or mock plug-ins.", + "type": "string", + "default": null, + "example": "True" + } + } +} \ No newline at end of file diff --git a/schema/session-2.0.json b/schema/session-2.0.json new file mode 100644 index 0000000000..006a9e2dbf --- /dev/null +++ b/schema/session-2.0.json @@ -0,0 +1,142 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:session-2.0", + "description": "The Avalon environment", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "AVALON_PROJECTS", + "AVALON_PROJECT", + "AVALON_ASSET", + "AVALON_CONFIG" + ], + + "properties": { + "AVALON_PROJECTS": { + "description": "Absolute path to root of project directories", + "type": "string", + "example": "/nas/projects" + }, + "AVALON_PROJECT": { + "description": "Name of project", + "type": "string", + "pattern": "^\\w*$", + "example": "Hulk" + }, + "AVALON_ASSET": { + "description": "Name of asset", + "type": "string", + "pattern": "^\\w*$", + "example": "Bruce" + }, + "AVALON_SILO": { + "description": "Name of asset group or container", + "type": "string", + "pattern": "^\\w*$", + "example": "assets" + }, + "AVALON_TASK": { + "description": "Name of task", + "type": "string", + "pattern": "^\\w*$", + "example": "modeling" + }, + "AVALON_CONFIG": { + "description": "Name of Avalon configuration", + "type": "string", + "pattern": "^\\w*$", + "example": "polly" + }, + "AVALON_APP": { + "description": "Name of application", + "type": "string", + "pattern": "^\\w*$", + "example": "maya2016" + }, + "AVALON_MONGO": { + "description": "Address to the asset database", + "type": "string", + "pattern": "^mongodb://[\\w/@:.]*$", + "example": "mongodb://localhost:27017", + "default": "mongodb://localhost:27017" + }, + "AVALON_DB": { + "description": "Name of database", + "type": "string", + "pattern": "^\\w*$", + "example": "avalon", + "default": "avalon" + }, + "AVALON_LABEL": { + "description": "Nice name of Avalon, used in e.g. graphical user interfaces", + "type": "string", + "example": "Mindbender", + "default": "Avalon" + }, + "AVALON_SENTRY": { + "description": "Address to Sentry", + "type": "string", + "pattern": "^http[\\w/@:.]*$", + "example": "https://5b872b280de742919b115bdc8da076a5:8d278266fe764361b8fa6024af004a9c@logs.mindbender.com/2", + "default": null + }, + "AVALON_DEADLINE": { + "description": "Address to Deadline", + "type": "string", + "pattern": "^http[\\w/@:.]*$", + "example": "http://192.168.99.101", + "default": null + }, + "AVALON_TIMEOUT": { + "description": "Wherever there is a need for a timeout, this is the default value.", + "type": "string", + "pattern": "^[0-9]*$", + "default": "1000", + "example": "1000" + }, + "AVALON_UPLOAD": { + "description": "Boolean of whether to upload published material to central asset repository", + "type": "string", + "default": null, + "example": "True" + }, + "AVALON_USERNAME": { + "description": "Generic username", + "type": "string", + "pattern": "^\\w*$", + "default": "avalon", + "example": "myself" + }, + "AVALON_PASSWORD": { + "description": "Generic password", + "type": "string", + "pattern": "^\\w*$", + "default": "secret", + "example": "abc123" + }, + "AVALON_INSTANCE_ID": { + "description": "Unique identifier for instances in a working file", + "type": "string", + "pattern": "^[\\w.]*$", + "default": "avalon.instance", + "example": "avalon.instance" + }, + "AVALON_CONTAINER_ID": { + "description": "Unique identifier for a loaded representation in a working file", + "type": "string", + "pattern": "^[\\w.]*$", + "default": "avalon.container", + "example": "avalon.container" + }, + "AVALON_DEBUG": { + "description": "Enable debugging mode. Some applications may use this for e.g. extended verbosity or mock plug-ins.", + "type": "string", + "default": null, + "example": "True" + } + } +} diff --git a/schema/shaders-1.0.json b/schema/shaders-1.0.json new file mode 100644 index 0000000000..e66cc735e8 --- /dev/null +++ b/schema/shaders-1.0.json @@ -0,0 +1,32 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:shaders-1.0", + "description": "Relationships between shaders and Avalon IDs", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "shader" + ], + + "properties": { + "schema": { + "description": "Schema identifier for payload", + "type": "string" + }, + "shader": { + "description": "Name of directory", + "type": "array", + "items": { + "type": "str", + "description": "Avalon ID and optional face indexes, e.g. 'f9520572-ac1d-11e6-b39e-3085a99791c9.f[5002:5185]'" + } + } + }, + + "definitions": {} +} diff --git a/schema/subset-1.0.json b/schema/subset-1.0.json new file mode 100644 index 0000000000..90ae0349fa --- /dev/null +++ b/schema/subset-1.0.json @@ -0,0 +1,35 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:subset-1.0", + "description": "A container of instances", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "name", + "versions" + ], + + "properties": { + "schema": { + "description": "Schema identifier for payload", + "type": "string" + }, + "name": { + "description": "Name of directory", + "type": "string" + }, + "versions": { + "type": "array", + "items": { + "$ref": "version.json" + } + } + }, + + "definitions": {} +} \ No newline at end of file diff --git a/schema/subset-2.0.json b/schema/subset-2.0.json new file mode 100644 index 0000000000..98f39c4f3e --- /dev/null +++ b/schema/subset-2.0.json @@ -0,0 +1,51 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:subset-2.0", + "description": "A container of instances", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "type", + "parent", + "name", + "data" + ], + + "properties": { + "schema": { + "description": "The schema associated with this document", + "type": "string", + "enum": ["pype:subset-2.0"], + "example": "pype:subset-2.0" + }, + "type": { + "description": "The type of document", + "type": "string", + "enum": ["subset"], + "example": "subset" + }, + "parent": { + "description": "Unique identifier to parent document", + "example": "592c33475f8c1b064c4d1696" + }, + "name": { + "description": "Name of directory", + "type": "string", + "pattern": "^[a-zA-Z0-9_.]*$", + "example": "shot01" + }, + "data": { + "type": "object", + "description": "Document metadata", + "example": { + "frameStart": 1000, + "frameEnd": 1201 + } + } + } +} diff --git a/schema/subset-3.0.json b/schema/subset-3.0.json new file mode 100644 index 0000000000..a0af9d340f --- /dev/null +++ b/schema/subset-3.0.json @@ -0,0 +1,62 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:subset-3.0", + "description": "A container of instances", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "type", + "parent", + "name", + "data" + ], + + "properties": { + "schema": { + "description": "The schema associated with this document", + "type": "string", + "enum": ["avalon-core:subset-3.0", "pype:subset-3.0"], + "example": "pype:subset-3.0" + }, + "type": { + "description": "The type of document", + "type": "string", + "enum": ["subset"], + "example": "subset" + }, + "parent": { + "description": "Unique identifier to parent document", + "example": "592c33475f8c1b064c4d1696" + }, + "name": { + "description": "Name of directory", + "type": "string", + "pattern": "^[a-zA-Z0-9_.]*$", + "example": "shot01" + }, + "data": { + "description": "Document metadata", + "type": "object", + "required": ["families"], + "properties": { + "families": { + "type": "array", + "items": {"type": "string"}, + "description": "One or more families associated with this subset" + } + }, + "example": { + "families" : [ + "avalon.camera" + ], + "frameStart": 1000, + "frameEnd": 1201 + } + } + } +} diff --git a/schema/thumbnail-1.0.json b/schema/thumbnail-1.0.json new file mode 100644 index 0000000000..96b540ab7e --- /dev/null +++ b/schema/thumbnail-1.0.json @@ -0,0 +1,42 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:thumbnail-1.0", + "description": "Entity with thumbnail data", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "type", + "data" + ], + + "properties": { + "schema": { + "description": "The schema associated with this document", + "type": "string", + "enum": ["pype:thumbnail-1.0"], + "example": "pype:thumbnail-1.0" + }, + "type": { + "description": "The type of document", + "type": "string", + "enum": ["thumbnail"], + "example": "thumbnail" + }, + "data": { + "description": "Thumbnail data", + "type": "object", + "example": { + "binary_data": "Binary({byte data of image})", + "template": "{thumbnail_root}/{project[name]}/{_id}{ext}}", + "template_data": { + "ext": ".jpg" + } + } + } + } +} diff --git a/schema/version-1.0.json b/schema/version-1.0.json new file mode 100644 index 0000000000..c784a25175 --- /dev/null +++ b/schema/version-1.0.json @@ -0,0 +1,50 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:version-1.0", + "description": "An individual version", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "version", + "path", + "time", + "author", + "source", + "representations" + ], + + "properties": { + "schema": {"type": "string"}, + "representations": { + "type": "array", + "items": { + "$ref": "representation.json" + } + }, + "time": { + "description": "ISO formatted, file-system compatible time", + "type": "string" + }, + "author": { + "description": "User logged on to the machine at time of publish", + "type": "string" + }, + "version": { + "description": "Number of this version", + "type": "number" + }, + "path": { + "description": "Unformatted path, e.g. '{root}/assets/Bruce/publish/lookdevDefault/v001", + "type": "string" + }, + "source": { + "description": "Original file from which this version was made.", + "type": "string" + } + } +} diff --git a/schema/version-2.0.json b/schema/version-2.0.json new file mode 100644 index 0000000000..5bb4a56f96 --- /dev/null +++ b/schema/version-2.0.json @@ -0,0 +1,92 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:version-2.0", + "description": "An individual version", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "type", + "parent", + "name", + "data" + ], + + "properties": { + "schema": { + "description": "The schema associated with this document", + "type": "string", + "enum": ["pype:version-2.0"], + "example": "pype:version-2.0" + }, + "type": { + "description": "The type of document", + "type": "string", + "enum": ["version"], + "example": "version" + }, + "parent": { + "description": "Unique identifier to parent document", + "example": "592c33475f8c1b064c4d1696" + }, + "name": { + "description": "Number of version", + "type": "number", + "example": 12 + }, + "locations": { + "description": "Where on the planet this version can be found.", + "type": "array", + "items": {"type": "string"}, + "example": ["data.avalon.com"] + }, + "data": { + "description": "Document metadata", + "type": "object", + "required": ["families", "author", "source", "time"], + "properties": { + "time": { + "description": "ISO formatted, file-system compatible time", + "type": "string" + }, + "timeFormat": { + "description": "ISO format of time", + "type": "string" + }, + "author": { + "description": "User logged on to the machine at time of publish", + "type": "string" + }, + "version": { + "description": "Number of this version", + "type": "number" + }, + "path": { + "description": "Unformatted path, e.g. '{root}/assets/Bruce/publish/lookdevDefault/v001", + "type": "string" + }, + "source": { + "description": "Original file from which this version was made.", + "type": "string" + }, + "families": { + "type": "array", + "items": {"type": "string"}, + "description": "One or more families associated with this version" + } + }, + "example": { + "source" : "{root}/f02_prod/assets/BubbleWitch/work/modeling/marcus/maya/scenes/model_v001.ma", + "author" : "marcus", + "families" : [ + "avalon.model" + ], + "time" : "20170510T090203Z" + } + } + } +} diff --git a/schema/version-3.0.json b/schema/version-3.0.json new file mode 100644 index 0000000000..808650da0d --- /dev/null +++ b/schema/version-3.0.json @@ -0,0 +1,84 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + + "title": "pype:version-3.0", + "description": "An individual version", + + "type": "object", + + "additionalProperties": true, + + "required": [ + "schema", + "type", + "parent", + "name", + "data" + ], + + "properties": { + "schema": { + "description": "The schema associated with this document", + "type": "string", + "enum": ["avalon-core:version-3.0", "pype:version-3.0"], + "example": "pype:version-3.0" + }, + "type": { + "description": "The type of document", + "type": "string", + "enum": ["version"], + "example": "version" + }, + "parent": { + "description": "Unique identifier to parent document", + "example": "592c33475f8c1b064c4d1696" + }, + "name": { + "description": "Number of version", + "type": "number", + "example": 12 + }, + "locations": { + "description": "Where on the planet this version can be found.", + "type": "array", + "items": {"type": "string"}, + "example": ["data.avalon.com"] + }, + "data": { + "description": "Document metadata", + "type": "object", + "required": ["author", "source", "time"], + "properties": { + "time": { + "description": "ISO formatted, file-system compatible time", + "type": "string" + }, + "timeFormat": { + "description": "ISO format of time", + "type": "string" + }, + "author": { + "description": "User logged on to the machine at time of publish", + "type": "string" + }, + "version": { + "description": "Number of this version", + "type": "number" + }, + "path": { + "description": "Unformatted path, e.g. '{root}/assets/Bruce/publish/lookdevDefault/v001", + "type": "string" + }, + "source": { + "description": "Original file from which this version was made.", + "type": "string" + } + }, + "example": { + "source" : "{root}/f02_prod/assets/BubbleWitch/work/modeling/marcus/maya/scenes/model_v001.ma", + "author" : "marcus", + "time" : "20170510T090203Z" + } + } + } +} From ae59f724a1a429e72ee3c6a3238e6a320f780815 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 31 Jan 2020 20:07:40 +0100 Subject: [PATCH 216/393] clean(nk): removing unneeded files --- .../_load_unused/extract_write_next_render.py | 24 ---- pype/plugins/nuke/_load_unused/load_backdrop | 0 .../_publish_unused/collect_active_viewer.py | 14 --- .../nuke/_publish_unused/extract_frames.py | 22 ---- .../_publish_unused/extract_nuke_write.py | 116 ------------------ .../nuke/_publish_unused/extract_script.py | 40 ------ .../_publish_unused/integrate_staging_dir.py | 27 ---- .../publish_image_sequences.py | 98 --------------- .../_publish_unused/validate_active_viewer.py | 24 ---- .../_publish_unused/validate_version_match.py | 36 ------ .../validate_write_families.py | 59 --------- .../nukestudio/publish/validate_version.py | 79 ------------ 12 files changed, 539 deletions(-) delete mode 100644 pype/plugins/nuke/_load_unused/extract_write_next_render.py delete mode 100644 pype/plugins/nuke/_load_unused/load_backdrop delete mode 100644 pype/plugins/nuke/_publish_unused/collect_active_viewer.py delete mode 100644 pype/plugins/nuke/_publish_unused/extract_frames.py delete mode 100644 pype/plugins/nuke/_publish_unused/extract_nuke_write.py delete mode 100644 pype/plugins/nuke/_publish_unused/extract_script.py delete mode 100644 pype/plugins/nuke/_publish_unused/integrate_staging_dir.py delete mode 100644 pype/plugins/nuke/_publish_unused/publish_image_sequences.py delete mode 100644 pype/plugins/nuke/_publish_unused/validate_active_viewer.py delete mode 100644 pype/plugins/nuke/_publish_unused/validate_version_match.py delete mode 100644 pype/plugins/nuke/_publish_unused/validate_write_families.py delete mode 100644 pype/plugins/nukestudio/publish/validate_version.py diff --git a/pype/plugins/nuke/_load_unused/extract_write_next_render.py b/pype/plugins/nuke/_load_unused/extract_write_next_render.py deleted file mode 100644 index 40bfe59ec2..0000000000 --- a/pype/plugins/nuke/_load_unused/extract_write_next_render.py +++ /dev/null @@ -1,24 +0,0 @@ -import pyblish.api - - -class WriteToRender(pyblish.api.InstancePlugin): - """Swith Render knob on write instance to on, - so next time publish will be set to render - """ - - order = pyblish.api.ExtractorOrder + 0.1 - label = "Write to render next" - optional = True - hosts = ["nuke", "nukeassist"] - families = ["write"] - - def process(self, instance): - return - if [f for f in instance.data["families"] - if ".frames" in f]: - instance[0]["render"].setValue(True) - self.log.info("Swith write node render to `on`") - else: - # swith to - instance[0]["render"].setValue(False) - self.log.info("Swith write node render to `Off`") diff --git a/pype/plugins/nuke/_load_unused/load_backdrop b/pype/plugins/nuke/_load_unused/load_backdrop deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pype/plugins/nuke/_publish_unused/collect_active_viewer.py b/pype/plugins/nuke/_publish_unused/collect_active_viewer.py deleted file mode 100644 index 5a6cc02b88..0000000000 --- a/pype/plugins/nuke/_publish_unused/collect_active_viewer.py +++ /dev/null @@ -1,14 +0,0 @@ -import pyblish.api -import nuke - - -class CollectActiveViewer(pyblish.api.ContextPlugin): - """Collect any active viewer from nodes - """ - - order = pyblish.api.CollectorOrder + 0.3 - label = "Collect Active Viewer" - hosts = ["nuke"] - - def process(self, context): - context.data["ActiveViewer"] = nuke.activeViewer() diff --git a/pype/plugins/nuke/_publish_unused/extract_frames.py b/pype/plugins/nuke/_publish_unused/extract_frames.py deleted file mode 100644 index b75f893802..0000000000 --- a/pype/plugins/nuke/_publish_unused/extract_frames.py +++ /dev/null @@ -1,22 +0,0 @@ -import pyblish - - -class ExtractFramesToIntegrate(pyblish.api.InstancePlugin): - """Extract rendered frames for integrator - """ - - order = pyblish.api.ExtractorOrder - label = "Extract rendered frames" - hosts = ["nuke"] - families = ["render"] - - def process(self, instance\ - return - - # staging_dir = instance.data.get('stagingDir', None) - # output_dir = instance.data.get('outputDir', None) - # - # if not staging_dir: - # staging_dir = output_dir - # instance.data['stagingDir'] = staging_dir - # # instance.data['transfer'] = False diff --git a/pype/plugins/nuke/_publish_unused/extract_nuke_write.py b/pype/plugins/nuke/_publish_unused/extract_nuke_write.py deleted file mode 100644 index 155b5cf56d..0000000000 --- a/pype/plugins/nuke/_publish_unused/extract_nuke_write.py +++ /dev/null @@ -1,116 +0,0 @@ -import os - -import nuke -import pyblish.api - - -class Extract(pyblish.api.InstancePlugin): - """Super class for write and writegeo extractors.""" - - order = pyblish.api.ExtractorOrder - optional = True - label = "Extract Nuke [super]" - hosts = ["nuke"] - match = pyblish.api.Subset - - # targets = ["process.local"] - - def execute(self, instance): - # Get frame range - node = instance[0] - first_frame = nuke.root()["first_frame"].value() - last_frame = nuke.root()["last_frame"].value() - - if node["use_limit"].value(): - first_frame = node["first"].value() - last_frame = node["last"].value() - - # Render frames - nuke.execute(node.name(), int(first_frame), int(last_frame)) - - -class ExtractNukeWrite(Extract): - """ Extract output from write nodes. """ - - families = ["write", "local"] - label = "Extract Write" - - def process(self, instance): - - self.execute(instance) - - # Validate output - for filename in list(instance.data["collection"]): - if not os.path.exists(filename): - instance.data["collection"].remove(filename) - self.log.warning("\"{0}\" didn't render.".format(filename)) - - -class ExtractNukeCache(Extract): - - label = "Cache" - families = ["cache", "local"] - - def process(self, instance): - - self.execute(instance) - - # Validate output - msg = "\"{0}\" didn't render.".format(instance.data["output_path"]) - assert os.path.exists(instance.data["output_path"]), msg - - -class ExtractNukeCamera(Extract): - - label = "Camera" - families = ["camera", "local"] - - def process(self, instance): - - node = instance[0] - node["writeGeometries"].setValue(False) - node["writePointClouds"].setValue(False) - node["writeAxes"].setValue(False) - - file_path = node["file"].getValue() - node["file"].setValue(instance.data["output_path"]) - - self.execute(instance) - - node["writeGeometries"].setValue(True) - node["writePointClouds"].setValue(True) - node["writeAxes"].setValue(True) - - node["file"].setValue(file_path) - - # Validate output - msg = "\"{0}\" didn't render.".format(instance.data["output_path"]) - assert os.path.exists(instance.data["output_path"]), msg - - -class ExtractNukeGeometry(Extract): - - label = "Geometry" - families = ["geometry", "local"] - - def process(self, instance): - - node = instance[0] - node["writeCameras"].setValue(False) - node["writePointClouds"].setValue(False) - node["writeAxes"].setValue(False) - - file_path = node["file"].getValue() - node["file"].setValue(instance.data["output_path"]) - - self.execute(instance) - - node["writeCameras"].setValue(True) - node["writePointClouds"].setValue(True) - node["writeAxes"].setValue(True) - - node["file"].setValue(file_path) - - # Validate output - msg = "\"{0}\" didn't render.".format(instance.data["output_path"]) - assert os.path.exists(instance.data["output_path"]), msg diff --git a/pype/plugins/nuke/_publish_unused/extract_script.py b/pype/plugins/nuke/_publish_unused/extract_script.py deleted file mode 100644 index 7d55ea0da4..0000000000 --- a/pype/plugins/nuke/_publish_unused/extract_script.py +++ /dev/null @@ -1,40 +0,0 @@ - -import pyblish.api -import os -import pype -import shutil - - -class ExtractScript(pype.api.Extractor): - """Publish script - """ - label = 'Extract Script' - order = pyblish.api.ExtractorOrder - 0.05 - optional = True - hosts = ['nuke'] - families = ["workfile"] - - def process(self, instance): - self.log.debug("instance extracting: {}".format(instance.data)) - current_script = instance.context.data["currentFile"] - - # Define extract output file path - stagingdir = self.staging_dir(instance) - filename = "{0}".format(instance.data["name"]) - path = os.path.join(stagingdir, filename) - - self.log.info("Performing extraction..") - shutil.copy(current_script, path) - - if "representations" not in instance.data: - instance.data["representations"] = list() - - representation = { - 'name': 'nk', - 'ext': '.nk', - 'files': filename, - "stagingDir": stagingdir, - } - instance.data["representations"].append(representation) - - self.log.info("Extracted instance '%s' to: %s" % (instance.name, path)) diff --git a/pype/plugins/nuke/_publish_unused/integrate_staging_dir.py b/pype/plugins/nuke/_publish_unused/integrate_staging_dir.py deleted file mode 100644 index e05c42ae50..0000000000 --- a/pype/plugins/nuke/_publish_unused/integrate_staging_dir.py +++ /dev/null @@ -1,27 +0,0 @@ -import pyblish.api -import shutil -import os - - -class CopyStagingDir(pyblish.api.InstancePlugin): - """Copy data rendered into temp local directory - """ - - order = pyblish.api.IntegratorOrder - 2 - label = "Copy data from temp dir" - hosts = ["nuke", "nukeassist"] - families = ["render.local"] - - def process(self, instance): - temp_dir = instance.data.get("stagingDir") - output_dir = instance.data.get("outputDir") - - # copy data to correct dir - if not os.path.exists(output_dir): - os.makedirs(output_dir) - self.log.info("output dir has been created") - - for f in os.listdir(temp_dir): - self.log.info("copy file to correct destination: {}".format(f)) - shutil.copy(os.path.join(temp_dir, os.path.basename(f)), - os.path.join(output_dir, os.path.basename(f))) diff --git a/pype/plugins/nuke/_publish_unused/publish_image_sequences.py b/pype/plugins/nuke/_publish_unused/publish_image_sequences.py deleted file mode 100644 index 34634dcc6b..0000000000 --- a/pype/plugins/nuke/_publish_unused/publish_image_sequences.py +++ /dev/null @@ -1,98 +0,0 @@ -import re -import os -import json -import subprocess - -import pyblish.api - -from pype.action import get_errored_plugins_from_data - - -def _get_script(): - """Get path to the image sequence script""" - - # todo: use a more elegant way to get the python script - - try: - from pype.fusion.scripts import publish_filesequence - except Exception: - raise RuntimeError("Expected module 'publish_imagesequence'" - "to be available") - - module_path = publish_filesequence.__file__ - if module_path.endswith(".pyc"): - module_path = module_path[:-len(".pyc")] + ".py" - - return module_path - - -class PublishImageSequence(pyblish.api.InstancePlugin): - """Publish the generated local image sequences.""" - - order = pyblish.api.IntegratorOrder - label = "Publish Rendered Image Sequence(s)" - hosts = ["fusion"] - families = ["saver.renderlocal"] - - def process(self, instance): - - # Skip this plug-in if the ExtractImageSequence failed - errored_plugins = get_errored_plugins_from_data(instance.context) - if any(plugin.__name__ == "FusionRenderLocal" for plugin in - errored_plugins): - raise RuntimeError("Fusion local render failed, " - "publishing images skipped.") - - subset = instance.data["subset"] - ext = instance.data["ext"] - - # Regex to match resulting renders - regex = "^{subset}.*[0-9]+{ext}+$".format(subset=re.escape(subset), - ext=re.escape(ext)) - - # The instance has most of the information already stored - metadata = { - "regex": regex, - "frameStart": instance.context.data["frameStart"], - "frameEnd": instance.context.data["frameEnd"], - "families": ["imagesequence"], - } - - # Write metadata and store the path in the instance - output_directory = instance.data["outputDir"] - path = os.path.join(output_directory, - "{}_metadata.json".format(subset)) - with open(path, "w") as f: - json.dump(metadata, f) - - assert os.path.isfile(path), ("Stored path is not a file for %s" - % instance.data["name"]) - - # Suppress any subprocess console - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - startupinfo.wShowWindow = subprocess.SW_HIDE - - process = subprocess.Popen(["python", _get_script(), - "--paths", path], - bufsize=1, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - startupinfo=startupinfo) - - while True: - output = process.stdout.readline() - # Break when there is no output or a return code has been given - if output == '' and process.poll() is not None: - process.stdout.close() - break - if output: - line = output.strip() - if line.startswith("ERROR"): - self.log.error(line) - else: - self.log.info(line) - - if process.returncode != 0: - raise RuntimeError("Process quit with non-zero " - "return code: {}".format(process.returncode)) diff --git a/pype/plugins/nuke/_publish_unused/validate_active_viewer.py b/pype/plugins/nuke/_publish_unused/validate_active_viewer.py deleted file mode 100644 index 618a7f1502..0000000000 --- a/pype/plugins/nuke/_publish_unused/validate_active_viewer.py +++ /dev/null @@ -1,24 +0,0 @@ -import pyblish.api -import nuke - - -class ValidateActiveViewer(pyblish.api.ContextPlugin): - """Validate presentse of the active viewer from nodes - """ - - order = pyblish.api.ValidatorOrder - label = "Validate Active Viewer" - hosts = ["nuke"] - - def process(self, context): - viewer_process_node = context.data.get("ViewerProcess") - - assert viewer_process_node, ( - "Missing active viewer process! Please click on output write node and push key number 1-9" - ) - active_viewer = context.data["ActiveViewer"] - active_input = active_viewer.activeInput() - - assert active_input is not None, ( - "Missing active viewer input! Please click on output write node and push key number 1-9" - ) diff --git a/pype/plugins/nuke/_publish_unused/validate_version_match.py b/pype/plugins/nuke/_publish_unused/validate_version_match.py deleted file mode 100644 index 1358d9a7b3..0000000000 --- a/pype/plugins/nuke/_publish_unused/validate_version_match.py +++ /dev/null @@ -1,36 +0,0 @@ -import os -import pyblish.api -import pype.utils - - - -@pyblish.api.log -class RepairNukeWriteNodeVersionAction(pyblish.api.Action): - label = "Repair" - on = "failed" - icon = "wrench" - - def process(self, context, plugin): - import pype.nuke.lib as nukelib - instances = pype.utils.filter_instances(context, plugin) - - for instance in instances: - node = instance[0] - render_path = nukelib.get_render_path(node) - self.log.info("render_path: {}".format(render_path)) - node['file'].setValue(render_path.replace("\\", "/")) - - -class ValidateVersionMatch(pyblish.api.InstancePlugin): - """Checks if write version matches workfile version""" - - label = "Validate Version Match" - order = pyblish.api.ValidatorOrder - actions = [RepairNukeWriteNodeVersionAction] - hosts = ["nuke"] - families = ['write'] - - def process(self, instance): - - assert instance.data['version'] == instance.context.data['version'], "\ - Version in write doesn't match version of the workfile" diff --git a/pype/plugins/nuke/_publish_unused/validate_write_families.py b/pype/plugins/nuke/_publish_unused/validate_write_families.py deleted file mode 100644 index 73f710867d..0000000000 --- a/pype/plugins/nuke/_publish_unused/validate_write_families.py +++ /dev/null @@ -1,59 +0,0 @@ - -import pyblish.api -import pype.api -import pype.nuke.actions - - -class RepairWriteFamiliesAction(pyblish.api.Action): - label = "Fix Write's render attributes" - on = "failed" - icon = "wrench" - - def process(self, instance, plugin): - self.log.info("instance {}".format(instance)) - instance["render"].setValue(True) - self.log.info("Rendering toggled ON") - - -@pyblish.api.log -class ValidateWriteFamilies(pyblish.api.InstancePlugin): - """ Validates write families. """ - - order = pyblish.api.ValidatorOrder - label = "Valitade writes families" - hosts = ["nuke"] - families = ["write"] - actions = [pype.nuke.actions.SelectInvalidAction, pype.api.RepairAction] - - @staticmethod - def get_invalid(self, instance): - if not [f for f in instance.data["families"] - if ".frames" in f]: - return - - if not instance.data.get('files'): - return (instance) - - def process(self, instance): - self.log.debug('instance.data["files"]: {}'.format(instance.data['files'])) - - invalid = self.get_invalid(self, instance) - - if invalid: - raise ValueError(str("`{}`: Switch `Render` on! " - "> {}".format(__name__, invalid))) - - # if any(".frames" in f for f in instance.data["families"]): - # if not instance.data["files"]: - # raise ValueError("instance {} is set to publish frames\ - # but no files were collected, render the frames first or\ - # check 'render' checkbox onthe no to 'ON'".format(instance))) - # - # - # self.log.info("Checked correct writes families") - - @classmethod - def repair(cls, instance): - cls.log.info("instance {}".format(instance)) - instance[0]["render"].setValue(True) - cls.log.info("Rendering toggled ON") diff --git a/pype/plugins/nukestudio/publish/validate_version.py b/pype/plugins/nukestudio/publish/validate_version.py deleted file mode 100644 index ebb8f357f8..0000000000 --- a/pype/plugins/nukestudio/publish/validate_version.py +++ /dev/null @@ -1,79 +0,0 @@ -import pyblish -from avalon import io -from pype.action import get_errored_instances_from_context -import pype.api as pype - - -@pyblish.api.log -class RepairNukestudioVersionUp(pyblish.api.Action): - label = "Version Up Workfile" - on = "failed" - icon = "wrench" - - def process(self, context, plugin): - - errored_instances = get_errored_instances_from_context(context) - - # Apply pyblish logic to get the instances for the plug-in - instances = pyblish.api.instances_by_plugin(errored_instances, plugin) - - if instances: - project = context.data["activeProject"] - path = context.data.get("currentFile") - - new_path = pype.version_up(path) - - if project: - project.saveAs(new_path) - - self.log.info("Project workfile version was fixed") - - -class ValidateVersion(pyblish.api.InstancePlugin): - """Validate clip's versions. - - """ - - order = pyblish.api.ValidatorOrder - families = ["plate"] - label = "Validate Version" - actions = [RepairNukestudioVersionUp] - hosts = ["nukestudio"] - - def process(self, instance): - version = int(instance.data.get("version", 0)) - asset_name = instance.data.get("asset", None) - subset_name = instance.data.get("subset", None) - - assert version, "The file is missing version string! example: filename_v001.hrox `{}`" - - self.log.debug("Collected version: `{0}`".format(version)) - - found_v = 0 - try: - io.install() - project = io.find_one({"type": "project"}) - - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] - }) - - subset = io.find_one({ - "type": "subset", - "parent": asset["_id"], - "name": subset_name - }) - - version_db = io.find_one({ - 'type': 'version', - 'parent': subset["_id"], - 'name': version - }) or {} - found_v = version_db.get("name", 0) - self.log.debug("Found version: `{0}`".format(found_v)) - except Exception as e: - self.log.debug("Problem to get data from database for asset `{0}` subset `{1}`. Error: `{2}`".format(asset_name, subset_name, e)) - - assert (found_v != version), "Version must not be the same as in database `{0}`, Versions file: `{1}`, db: `{2}`".format(asset_name, version, found_v) From fb710bbef45ebebfcac50b9cf9dc69651e0ab9e1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 31 Jan 2020 18:58:51 +0100 Subject: [PATCH 217/393] fix(nuke): remove commented code --- pype/nuke/__init__.py | 36 ------------------------------------ 1 file changed, 36 deletions(-) diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index dfd61f4b39..f1f87e40c8 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -33,42 +33,6 @@ if os.getenv("PYBLISH_GUI", None): pyblish.register_gui(os.getenv("PYBLISH_GUI", None)) -# class NukeHandler(logging.Handler): -# ''' -# Nuke Handler - emits logs into nuke's script editor. -# warning will emit nuke.warning() -# critical and fatal would popup msg dialog to alert of the error. -# ''' -# -# def __init__(self): -# logging.Handler.__init__(self) -# self.set_name("Pype_Nuke_Handler") -# -# def emit(self, record): -# # Formated message: -# msg = self.format(record) -# -# if record.levelname.lower() in [ -# # "warning", -# "critical", -# "fatal", -# "error" -# ]: -# msg = self.format(record) -# nuke.message(msg) -# -# -# '''Adding Nuke Logging Handler''' -# log.info([handler.get_name() for handler in logging.root.handlers[:]]) -# nuke_handler = NukeHandler() -# if nuke_handler.get_name() \ -# not in [handler.get_name() -# for handler in logging.root.handlers[:]]: -# logging.getLogger().addHandler(nuke_handler) -# logging.getLogger().setLevel(logging.INFO) -# log.info([handler.get_name() for handler in logging.root.handlers[:]]) - - def reload_config(): """Attempt to reload pipeline at run-time. From 8e9b44817359f02f30eecbd775bc500e3f492cbe Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 31 Jan 2020 21:06:16 +0100 Subject: [PATCH 218/393] get padding from anatomy --- pype/plugins/global/publish/integrate_new.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 2b8aa5b0fc..739cbc30ad 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -326,8 +326,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): index_frame_start = None if repre.get("frameStart"): - frame_start_padding = len(str( - repre.get("frameEnd"))) + frame_start_padding = anatomy.templates["render"]["padding"] index_frame_start = int(repre.get("frameStart")) # exception for slate workflow From 5bceb794413381d7c1a1378de87cb9efaade4041 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 31 Jan 2020 22:48:29 +0100 Subject: [PATCH 219/393] hotfix submit publish job --- pype/plugins/global/publish/submit_publish_job.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index faf4aaef93..a9fa8febd4 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -256,6 +256,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): """ # Get a submission job data = instance.data.copy() + if hasattr(instance, "_log"): + data['_log'] = instance._log render_job = data.pop("deadlineSubmissionJob", None) submission_type = "deadline" From dc497be92313401bc243127d6cf47c7c230b58a8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Feb 2020 00:16:20 +0100 Subject: [PATCH 220/393] feat(nuke): anatomy templates and version data family to render --- pype/nuke/lib.py | 2 +- pype/plugins/nuke/publish/collect_writes.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index db1a5919c3..3b3586cfe6 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1269,7 +1269,7 @@ class ExporterReview: 'ext': self.ext, 'files': self.file, "stagingDir": self.staging_dir, - "anatomy_template": "publish", + "anatomy_template": "render", "tags": [self.name.replace("_", "-")] + add_tags } diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 3eff527d47..8e86e12c2a 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -97,7 +97,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "frameEnd": last_frame - handle_end, "version": int(instance.data['version']), "colorspace": node["colorspace"].value(), - "families": [instance.data["family"]], + "families": ["render"], "subset": instance.data["subset"], "fps": instance.context.data["fps"] } From de93ef648ea3d99f0a390113da7f59ba317b73a9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Feb 2020 00:17:03 +0100 Subject: [PATCH 221/393] feat(nuke): adding render2d for review --- pype/plugins/nuke/publish/collect_review.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pype/plugins/nuke/publish/collect_review.py b/pype/plugins/nuke/publish/collect_review.py index 7e7cbedd6c..e41b55bbbd 100644 --- a/pype/plugins/nuke/publish/collect_review.py +++ b/pype/plugins/nuke/publish/collect_review.py @@ -1,12 +1,12 @@ import pyblish.api import nuke + class CollectReview(pyblish.api.InstancePlugin): """Collect review instance from rendered frames """ order = pyblish.api.CollectorOrder + 0.3 - family = "review" label = "Collect Review" hosts = ["nuke"] families = ["render", "render.local", "render.farm"] @@ -25,4 +25,7 @@ class CollectReview(pyblish.api.InstancePlugin): instance.data["families"].append("review") instance.data['families'].append('ftrack') + instance.data["families"].append("render2d") + self.log.info("Review collected: `{}`".format(instance)) + self.log.debug("__ instance.data: `{}`".format(instance.data)) From 3a09ff2059e5fbc08f1b04a3777badffcf2c2590 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Feb 2020 00:17:38 +0100 Subject: [PATCH 222/393] feat(global): rename burnin plugin --- pype/plugins/global/publish/extract_burnin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index a3df47518c..f0e4b70d19 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -16,7 +16,7 @@ class ExtractBurnin(pype.api.Extractor): `tags` including `burnin` """ - label = "Quicktime with burnins" + label = "Extract burnins" order = pyblish.api.ExtractorOrder + 0.03 families = ["review", "burnin"] hosts = ["nuke", "maya", "shell"] From c25a70d72ddcc785beaeb10de1ca0167193bd3a2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Feb 2020 00:18:13 +0100 Subject: [PATCH 223/393] fix(global): ftrack attributes validator failing --- .../plugins/global/publish/validate_custom_ftrack_attributes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/validate_custom_ftrack_attributes.py b/pype/plugins/global/publish/validate_custom_ftrack_attributes.py index 2386b359e4..1e8b239b33 100644 --- a/pype/plugins/global/publish/validate_custom_ftrack_attributes.py +++ b/pype/plugins/global/publish/validate_custom_ftrack_attributes.py @@ -47,7 +47,7 @@ class ValidateFtrackAttributes(pyblish.api.InstancePlugin): host = pyblish.api.current_host() to_check = context.data["presets"].get( - host, {}).get("ftrack_attributes") + host, {}).get("ftrack_custom_attributes") if not to_check: self.log.warning("ftrack_attributes preset not found") return From e0d288cdef324078efa8a19a221f664ffb61736b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Feb 2020 00:18:58 +0100 Subject: [PATCH 224/393] fix(nuke): some nodes are failing due disable knob --- pype/plugins/nuke/publish/collect_instances.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index 5b123ed7b9..cbbef70e4a 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -28,12 +28,15 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): self.log.debug("nuke.allNodes(): {}".format(nuke.allNodes())) for node in nuke.allNodes(): + if node.Class() in ["Viewer", "Dot"]: + continue + try: if node["disable"].value(): continue except Exception as E: self.log.warning(E) - + # get data from avalon knob self.log.debug("node[name]: {}".format(node['name'].value())) From f3bc7258df212d34d7882bdb6bf2ad662c87739d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Feb 2020 00:19:43 +0100 Subject: [PATCH 225/393] clean(nuke): commented code --- pype/plugins/nuke/publish/validate_script.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/pype/plugins/nuke/publish/validate_script.py b/pype/plugins/nuke/publish/validate_script.py index 307e3ade59..f7dd84d714 100644 --- a/pype/plugins/nuke/publish/validate_script.py +++ b/pype/plugins/nuke/publish/validate_script.py @@ -15,12 +15,6 @@ class ValidateScript(pyblish.api.InstancePlugin): def process(self, instance): ctx_data = instance.context.data asset_name = ctx_data["asset"] - - # asset = io.find_one({ - # "type": "asset", - # "name": asset_name - # }) - asset = lib.get_asset(asset_name) asset_data = asset["data"] From f177185a73b0d260098932e0547843460bd6504f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Feb 2020 00:58:26 +0100 Subject: [PATCH 226/393] fix(nuke): moving `render2d` to more global level --- pype/plugins/nuke/publish/collect_review.py | 1 - pype/plugins/nuke/publish/collect_writes.py | 2 ++ 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/plugins/nuke/publish/collect_review.py b/pype/plugins/nuke/publish/collect_review.py index e41b55bbbd..c95c94541d 100644 --- a/pype/plugins/nuke/publish/collect_review.py +++ b/pype/plugins/nuke/publish/collect_review.py @@ -25,7 +25,6 @@ class CollectReview(pyblish.api.InstancePlugin): instance.data["families"].append("review") instance.data['families'].append('ftrack') - instance.data["families"].append("render2d") self.log.info("Review collected: `{}`".format(instance)) self.log.debug("__ instance.data: `{}`".format(instance.data)) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 8e86e12c2a..bf1c6a4b66 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -14,6 +14,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): families = ["write"] def process(self, instance): + # adding 2d focused rendering + instance.data["families"].append("render2d") node = None for x in instance: From ac53d4345f7ed5e7ef1aa26fd9910c6b4ffae901 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 1 Feb 2020 01:02:14 +0100 Subject: [PATCH 227/393] fix(nuke): loader failing --- pype/plugins/nuke/load/load_sequence.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index 22caa9d6b0..9f3d09186c 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -109,12 +109,12 @@ class LoadSequence(api.Loader): file = self.fname.replace("\\", "/") + repr_cont = context["representation"]["context"] if "#" not in file: frame = repr_cont.get("frame") padding = len(frame) file = file.replace(frame, "#"*padding) - repr_cont = context["representation"]["context"] read_name = "Read_{0}_{1}_{2}".format( repr_cont["asset"], repr_cont["subset"], From 1f6d63d6a540409d57326bf09df7ed4ecef7c2d2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 18:07:08 +0100 Subject: [PATCH 228/393] added collect datetime plugin --- .../global/publish/collect_datetime_data.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 pype/plugins/global/publish/collect_datetime_data.py diff --git a/pype/plugins/global/publish/collect_datetime_data.py b/pype/plugins/global/publish/collect_datetime_data.py new file mode 100644 index 0000000000..f04f924e18 --- /dev/null +++ b/pype/plugins/global/publish/collect_datetime_data.py @@ -0,0 +1,18 @@ +"""These data *must* be collected only once during publishing process. + +Provides: + context -> datetimeData +""" + +import pyblish.api +from pypeapp import config + + +class CollectDateTimeData(pyblish.api.ContextPlugin): + order = pyblish.api.CollectorOrder + label = "Collect DateTime data" + + def process(self, context): + key = "datetimeData" + if key not in context.data: + context.data[key] = config.get_datetime_data() From 7bdb43852253826dc6b8c52e1fae88321740b574 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 18:08:09 +0100 Subject: [PATCH 229/393] extract burnin uses datetime data from context --- pype/plugins/global/publish/extract_burnin.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 4efe02ca3a..85757c101b 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -45,7 +45,8 @@ class ExtractBurnin(pype.api.Extractor): } # Add datetime data to preparation data - prep_data.update(config.get_datetime_data()) + datetime_data = isntance.context.data.get("datetimeData") or {} + prep_data.update(datetime_data) slate_frame_start = frame_start slate_frame_end = frame_end From eacc1ff0bee5d71d8c48c01eca917f5609f82a90 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 18:21:34 +0100 Subject: [PATCH 230/393] integrate new uses anatomy's used_values --- pype/plugins/global/publish/integrate_new.py | 37 ++++++++++---------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 9729716a50..81b37d0555 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -80,6 +80,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "assembly" ] exclude_families = ["clip"] + repre_context_stable_keys = [ + "project", "asset", "task", "subset", "version", "representation", + "family", "hierarchy", "task", "username" + ] def process(self, instance): @@ -288,7 +292,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): anatomy.templates[template_name]["path"]) sequence_repre = isinstance(files, list) - + repre_context = None if sequence_repre: src_collections, remainder = clique.assemble(files) self.log.debug( @@ -311,10 +315,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): template_data["representation"] = repre['ext'] template_data["frame"] = src_padding_exp % i anatomy_filled = anatomy.format(template_data) + template_filled = anatomy_filled[template_name]["path"] + if repre_context is None: + repre_context = template_filled.used_values test_dest_files.append( - os.path.normpath( - anatomy_filled[template_name]["path"]) + os.path.normpath(template_filled) ) self.log.debug( @@ -394,14 +400,21 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): src = os.path.join(stagingdir, fname) anatomy_filled = anatomy.format(template_data) - dst = os.path.normpath( - anatomy_filled[template_name]["path"]).replace("..", ".") + template_filled = anatomy_filled[template_name]["path"] + repre_context = template_filled.used_values + dst = os.path.normpath(template_filled).replace("..", ".") instance.data["transfers"].append([src, dst]) repre['published_path'] = self.unc_convert(dst) self.log.debug("__ dst: {}".format(dst)) + for key in self.repre_context_stable_keys: + value = template_data.get(key) + if not value: + continue + repre_context[key] = template_data[key] + representation = { "_id": io.ObjectId(), "schema": "pype:representation-2.0", @@ -413,19 +426,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Imprint shortcut to context # for performance reasons. - "context": { - "root": root, - "project": {"name": PROJECT, - "code": project['data']['code']}, - 'task': TASK, - "silo": asset.get('silo'), - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": version["name"], - "hierarchy": hierarchy, - "representation": repre['ext'] - } + "context": repre_context } if repre.get("outputName"): From ebb5b3b84974fc49e5400eff390321eede39dc9d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 18:23:39 +0100 Subject: [PATCH 231/393] datetime data are added to template data --- pype/plugins/global/publish/integrate_new.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 81b37d0555..c192804833 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -272,6 +272,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "version": int(version["name"]), "hierarchy": hierarchy} + + # Add datetime data to template data + datetime_data = context.data.get("datetimeData") or {} + template_data.update(datetime_data) + resolution_width = repre.get("resolutionWidth") resolution_height = repre.get("resolutionHeight") fps = instance.data.get("fps") From 13de5280887dbb07f5172c5b45c05e945473682f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 18:25:42 +0100 Subject: [PATCH 232/393] removed line --- pype/plugins/global/publish/integrate_new.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index c192804833..bba93ed658 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -272,7 +272,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "version": int(version["name"]), "hierarchy": hierarchy} - # Add datetime data to template data datetime_data = context.data.get("datetimeData") or {} template_data.update(datetime_data) From 908f9887952c610a8707d91953f92b5ce849a6f6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 18:26:50 +0100 Subject: [PATCH 233/393] datetime data added to collect templates --- pype/plugins/global/publish/collect_templates.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index 383944e293..0c272a6044 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -90,6 +90,10 @@ class CollectTemplates(pyblish.api.InstancePlugin): "hierarchy": hierarchy.replace("\\", "/"), "representation": "TEMP")} + # Add datetime data to template data + datetime_data = context.data.get("datetimeData") or {} + template_data.update(datetime_data) + resolution_width = instance.data.get("resolutionWidth") resolution_height = instance.data.get("resolutionHeight") fps = instance.data.get("fps") From 40aa0c2f5b97f32fdeed4837d4ca50c1e3ec59bd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 18:27:01 +0100 Subject: [PATCH 234/393] typo fix in collect templates --- pype/plugins/global/publish/collect_templates.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index 0c272a6044..46d2898875 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -88,7 +88,7 @@ class CollectTemplates(pyblish.api.InstancePlugin): "subset": subset_name, "version": version_number, "hierarchy": hierarchy.replace("\\", "/"), - "representation": "TEMP")} + "representation": "TEMP"} # Add datetime data to template data datetime_data = context.data.get("datetimeData") or {} From ed280250c4c39df742a52f29deaa0b237053b4ee Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 18:29:45 +0100 Subject: [PATCH 235/393] fixed variable typo --- pype/plugins/global/publish/extract_burnin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 85757c101b..e50ba891d2 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -45,7 +45,7 @@ class ExtractBurnin(pype.api.Extractor): } # Add datetime data to preparation data - datetime_data = isntance.context.data.get("datetimeData") or {} + datetime_data = instance.context.data.get("datetimeData") or {} prep_data.update(datetime_data) slate_frame_start = frame_start From fd42c048b36e1c527dc5ccd9d55f16e80b21850e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 18:36:37 +0100 Subject: [PATCH 236/393] collect templates fix there is not defined context --- pype/plugins/global/publish/collect_templates.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index 46d2898875..f065b3c246 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -91,7 +91,7 @@ class CollectTemplates(pyblish.api.InstancePlugin): "representation": "TEMP"} # Add datetime data to template data - datetime_data = context.data.get("datetimeData") or {} + datetime_data = instance.context.data.get("datetimeData") or {} template_data.update(datetime_data) resolution_width = instance.data.get("resolutionWidth") From 75b6cdd1489c94ee41b2cf94d5aeb99dbe2eac9f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 18:58:29 +0100 Subject: [PATCH 237/393] renamed repre_context_stable_keys to db_representation_context_keys --- pype/plugins/global/publish/integrate_new.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index bba93ed658..7d95534897 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -80,7 +80,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "assembly" ] exclude_families = ["clip"] - repre_context_stable_keys = [ + db_representation_context_keys = [ "project", "asset", "task", "subset", "version", "representation", "family", "hierarchy", "task", "username" ] @@ -413,7 +413,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): repre['published_path'] = self.unc_convert(dst) self.log.debug("__ dst: {}".format(dst)) - for key in self.repre_context_stable_keys: + for key in self.db_representation_context_keys: value = template_data.get(key) if not value: continue From 30d598911d78c803b4d5a7316097f53cdadfce9f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:24:45 +0100 Subject: [PATCH 238/393] removed old integrators --- pype/plugins/global/publish/integrate.py | 417 ----------------- .../publish/integrate_rendered_frames.py | 423 ------------------ 2 files changed, 840 deletions(-) delete mode 100644 pype/plugins/global/publish/integrate.py delete mode 100644 pype/plugins/global/publish/integrate_rendered_frames.py diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py deleted file mode 100644 index 87b9e1a9bd..0000000000 --- a/pype/plugins/global/publish/integrate.py +++ /dev/null @@ -1,417 +0,0 @@ -import os -import logging -import shutil - -import errno -import pyblish.api -from avalon import api, io -from avalon.vendor import filelink - - -log = logging.getLogger(__name__) - - -class IntegrateAsset(pyblish.api.InstancePlugin): - """Resolve any dependency issies - - This plug-in resolves any paths which, if not updated might break - the published file. - - The order of families is important, when working with lookdev you want to - first publish the texture, update the texture paths in the nodes and then - publish the shading network. Same goes for file dependent assets. - """ - - label = "Integrate Asset" - order = pyblish.api.IntegratorOrder - families = [] - exclude_families = ["clip"] - - def process(self, instance): - if [ef for ef in self.exclude_families - if instance.data["family"] in ef]: - return - - self.register(instance) - - self.log.info("Integrating Asset in to the database ...") - if instance.data.get('transfer', True): - self.integrate(instance) - - def register(self, instance): - # Required environment variables - PROJECT = api.Session["AVALON_PROJECT"] - ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] - LOCATION = api.Session["AVALON_LOCATION"] - - context = instance.context - # Atomicity - # - # Guarantee atomic publishes - each asset contains - # an identical set of members. - # __ - # / o - # / \ - # | o | - # \ / - # o __/ - # - assert all(result["success"] for result in context.data["results"]), ( - "Atomicity not held, aborting.") - - # Assemble - # - # | - # v - # ---> <---- - # ^ - # | - # - stagingdir = instance.data.get("stagingDir") - assert stagingdir, ("Incomplete instance \"%s\": " - "Missing reference to staging area." % instance) - - # extra check if stagingDir actually exists and is available - - self.log.debug("Establishing staging directory @ %s" % stagingdir) - - # Ensure at least one file is set up for transfer in staging dir. - files = instance.data.get("files", []) - assert files, "Instance has no files to transfer" - assert isinstance(files, (list, tuple)), ( - "Instance 'files' must be a list, got: {0}".format(files) - ) - - project = io.find_one({"type": "project"}) - - asset = io.find_one({ - "type": "asset", - "name": ASSET, - "parent": project["_id"] - }) - - assert all([project, asset]), ("Could not find current project or " - "asset '%s'" % ASSET) - - subset = self.get_subset(asset, instance) - - # get next version - latest_version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - {"name": True}, - sort=[("name", -1)] - ) - - next_version = 1 - if latest_version is not None: - next_version += latest_version["name"] - - self.log.info("Verifying version from assumed destination") - - assumed_data = instance.data["assumedTemplateData"] - assumed_version = assumed_data["version"] - if assumed_version != next_version: - raise AttributeError("Assumed version 'v{0:03d}' does not match" - "next version in database " - "('v{1:03d}')".format(assumed_version, - next_version)) - - self.log.debug("Next version: v{0:03d}".format(next_version)) - - version_data = self.create_version_data(context, instance) - version = self.create_version(subset=subset, - version_number=next_version, - locations=[LOCATION], - data=version_data) - - self.log.debug("Creating version ...") - version_id = io.insert_one(version).inserted_id - - # Write to disk - # _ - # | | - # _| |_ - # ____\ / - # |\ \ / \ - # \ \ v \ - # \ \________. - # \|________| - # - root = api.registered_root() - hierarchy = "" - parents = io.find_one({ - "type": 'asset', - "name": ASSET - })['data']['parents'] - if parents and len(parents) > 0: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = os.path.join(*parents) - - template_data = {"root": root, - "project": {"name": PROJECT, - "code": project['data']['code']}, - "silo": asset['silo'], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": int(version["name"]), - "hierarchy": hierarchy} - - # template_publish = project["config"]["template"]["publish"] - anatomy = instance.context.data['anatomy'] - - # Find the representations to transfer amongst the files - # Each should be a single representation (as such, a single extension) - representations = [] - destination_list = [] - if 'transfers' not in instance.data: - instance.data['transfers'] = [] - - for files in instance.data["files"]: - - # Collection - # _______ - # |______|\ - # | |\| - # | || - # | || - # | || - # |_______| - # - - if isinstance(files, list): - collection = files - # Assert that each member has identical suffix - _, ext = os.path.splitext(collection[0]) - assert all(ext == os.path.splitext(name)[1] - for name in collection), ( - "Files had varying suffixes, this is a bug" - ) - - assert not any(os.path.isabs(name) for name in collection) - - template_data["representation"] = ext[1:] - - for fname in collection: - - src = os.path.join(stagingdir, fname) - anatomy_filled = anatomy.format(template_data) - dst = anatomy_filled["publish"]["path"] - - instance.data["transfers"].append([src, dst]) - template = anatomy.templates["publish"]["path"] - - else: - # Single file - # _______ - # | |\ - # | | - # | | - # | | - # |_______| - # - fname = files - assert not os.path.isabs(fname), ( - "Given file name is a full path" - ) - _, ext = os.path.splitext(fname) - - template_data["representation"] = ext[1:] - - src = os.path.join(stagingdir, fname) - anatomy_filled = anatomy.format(template_data) - dst = anatomy_filled["publish"]["path"] - - instance.data["transfers"].append([src, dst]) - template = anatomy.templates["publish"]["path"] - - representation = { - "schema": "pype:representation-2.0", - "type": "representation", - "parent": version_id, - "name": ext[1:], - "data": {'path': dst, 'template': template}, - "dependencies": instance.data.get("dependencies", "").split(), - - # Imprint shortcut to context - # for performance reasons. - "context": { - "root": root, - "project": {"name": PROJECT, - "code": project['data']['code']}, - 'task': api.Session["AVALON_TASK"], - "silo": asset['silo'], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": version["name"], - "hierarchy": hierarchy, - "representation": ext[1:] - } - } - - destination_list.append(dst) - instance.data['destination_list'] = destination_list - representations.append(representation) - - self.log.info("Registering {} items".format(len(representations))) - - io.insert_many(representations) - - def integrate(self, instance): - """Move the files - - Through `instance.data["transfers"]` - - Args: - instance: the instance to integrate - """ - - transfers = instance.data.get("transfers", list()) - - for src, dest in transfers: - self.log.info("Copying file .. {} -> {}".format(src, dest)) - self.copy_file(src, dest) - - # Produce hardlinked copies - # Note: hardlink can only be produced between two files on the same - # server/disk and editing one of the two will edit both files at once. - # As such it is recommended to only make hardlinks between static files - # to ensure publishes remain safe and non-edited. - hardlinks = instance.data.get("hardlinks", list()) - for src, dest in hardlinks: - self.log.info("Hardlinking file .. {} -> {}".format(src, dest)) - self.hardlink_file(src, dest) - - def copy_file(self, src, dst): - """ Copy given source to destination - - Arguments: - src (str): the source file which needs to be copied - dst (str): the destination of the sourc file - Returns: - None - """ - - dirname = os.path.dirname(dst) - try: - os.makedirs(dirname) - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - self.log.critical("An unexpected error occurred.") - raise - - shutil.copy(src, dst) - - def hardlink_file(self, src, dst): - - dirname = os.path.dirname(dst) - try: - os.makedirs(dirname) - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - self.log.critical("An unexpected error occurred.") - raise - - filelink.create(src, dst, filelink.HARDLINK) - - def get_subset(self, asset, instance): - - subset = io.find_one({ - "type": "subset", - "parent": asset["_id"], - "name": instance.data["subset"] - }) - - if subset is None: - subset_name = instance.data["subset"] - self.log.info("Subset '%s' not found, creating.." % subset_name) - - _id = io.insert_one({ - "schema": "avalon-core:subset-2.0", - "type": "subset", - "name": subset_name, - "data": {}, - "parent": asset["_id"] - }).inserted_id - - subset = io.find_one({"_id": _id}) - - return subset - - def create_version(self, subset, version_number, locations, data=None): - """ Copy given source to destination - - Args: - subset (dict): the registered subset of the asset - version_number (int): the version number - locations (list): the currently registered locations - - Returns: - dict: collection of data to create a version - """ - # Imprint currently registered location - version_locations = [location for location in locations if - location is not None] - - return {"schema": "avalon-core:version-2.0", - "type": "version", - "parent": subset["_id"], - "name": version_number, - "locations": version_locations, - "data": data} - - def create_version_data(self, context, instance): - """Create the data collection for the version - - Args: - context: the current context - instance: the current instance being published - - Returns: - dict: the required information with instance.data as key - """ - - families = [] - current_families = instance.data.get("families", list()) - instance_family = instance.data.get("family", None) - - if instance_family is not None: - families.append(instance_family) - families += current_families - - self.log.debug("Registered root: {}".format(api.registered_root())) - # create relative source path for DB - try: - source = instance.data['source'] - except KeyError: - source = context.data["currentFile"] - - relative_path = os.path.relpath(source, api.registered_root()) - source = os.path.join("{root}", relative_path).replace("\\", "/") - - self.log.debug("Source: {}".format(source)) - version_data = {"families": families, - "time": context.data["time"], - "author": context.data["user"], - "source": source, - "comment": context.data.get("comment"), - "machine": context.data.get("machine"), - "fps": context.data.get("fps")} - - # Include optional data if present in - optionals = [ - "frameStart", "frameEnd", "step", "handles", "sourceHashes" - ] - for key in optionals: - if key in instance.data: - version_data[key] = instance.data[key] - - return version_data diff --git a/pype/plugins/global/publish/integrate_rendered_frames.py b/pype/plugins/global/publish/integrate_rendered_frames.py deleted file mode 100644 index 5819051146..0000000000 --- a/pype/plugins/global/publish/integrate_rendered_frames.py +++ /dev/null @@ -1,423 +0,0 @@ -import os -import logging -import shutil -import clique - -import errno -import pyblish.api -from avalon import api, io - - -log = logging.getLogger(__name__) - - -class IntegrateFrames(pyblish.api.InstancePlugin): - """Resolve any dependency issies - - This plug-in resolves any paths which, if not updated might break - the published file. - - The order of families is important, when working with lookdev you want to - first publish the texture, update the texture paths in the nodes and then - publish the shading network. Same goes for file dependent assets. - """ - - label = "Integrate Frames" - order = pyblish.api.IntegratorOrder - families = ["imagesequence"] - - family_targets = [".frames", ".local", ".review", "imagesequence", "render", "source"] - exclude_families = ["clip"] - - def process(self, instance): - if [ef for ef in self.exclude_families - if instance.data["family"] in ef]: - return - - families = [f for f in instance.data["families"] - for search in self.family_targets - if search in f] - - if not families: - return - - self.register(instance) - - # self.log.info("Integrating Asset in to the database ...") - # self.log.info("instance.data: {}".format(instance.data)) - if instance.data.get('transfer', True): - self.integrate(instance) - - def register(self, instance): - - # Required environment variables - PROJECT = api.Session["AVALON_PROJECT"] - ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] - LOCATION = api.Session["AVALON_LOCATION"] - - context = instance.context - # Atomicity - # - # Guarantee atomic publishes - each asset contains - # an identical set of members. - # __ - # / o - # / \ - # | o | - # \ / - # o __/ - # - assert all(result["success"] for result in context.data["results"]), ( - "Atomicity not held, aborting.") - - # Assemble - # - # | - # v - # ---> <---- - # ^ - # | - # - stagingdir = instance.data.get("stagingDir") - assert stagingdir, ("Incomplete instance \"%s\": " - "Missing reference to staging area." % instance) - - # extra check if stagingDir actually exists and is available - - self.log.debug("Establishing staging directory @ %s" % stagingdir) - - project = io.find_one({"type": "project"}) - - asset = io.find_one({ - "type": "asset", - "name": ASSET, - "parent": project["_id"] - }) - - assert all([project, asset]), ("Could not find current project or " - "asset '%s'" % ASSET) - - subset = self.get_subset(asset, instance) - - # get next version - latest_version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - {"name": True}, - sort=[("name", -1)] - ) - - next_version = 1 - if latest_version is not None: - next_version += latest_version["name"] - - self.log.info("Verifying version from assumed destination") - - assumed_data = instance.data["assumedTemplateData"] - assumed_version = assumed_data["version"] - if assumed_version != next_version: - raise AttributeError("Assumed version 'v{0:03d}' does not match" - "next version in database " - "('v{1:03d}')".format(assumed_version, - next_version)) - - if instance.data.get('version'): - next_version = int(instance.data.get('version')) - - self.log.debug("Next version: v{0:03d}".format(next_version)) - - version_data = self.create_version_data(context, instance) - version = self.create_version(subset=subset, - version_number=next_version, - locations=[LOCATION], - data=version_data) - - self.log.debug("Creating version ...") - version_id = io.insert_one(version).inserted_id - - # Write to disk - # _ - # | | - # _| |_ - # ____\ / - # |\ \ / \ - # \ \ v \ - # \ \________. - # \|________| - # - root = api.registered_root() - hierarchy = "" - parents = io.find_one({"type": 'asset', "name": ASSET})[ - 'data']['parents'] - if parents and len(parents) > 0: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = os.path.join(*parents) - - template_data = {"root": root, - "project": {"name": PROJECT, - "code": project['data']['code']}, - "silo": asset.get('silo'), - "task": api.Session["AVALON_TASK"], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": int(version["name"]), - "hierarchy": hierarchy} - - # template_publish = project["config"]["template"]["publish"] - anatomy = instance.context.data['anatomy'] - - # Find the representations to transfer amongst the files - # Each should be a single representation (as such, a single extension) - representations = [] - destination_list = [] - - if 'transfers' not in instance.data: - instance.data['transfers'] = [] - - for files in instance.data["files"]: - # Collection - # _______ - # |______|\ - # | |\| - # | || - # | || - # | || - # |_______| - # - if isinstance(files, list): - - src_collections, remainder = clique.assemble(files) - src_collection = src_collections[0] - # Assert that each member has identical suffix - src_head = src_collection.format("{head}") - src_tail = ext = src_collection.format("{tail}") - - test_dest_files = list() - for i in [1, 2]: - template_data["representation"] = src_tail[1:] - template_data["frame"] = src_collection.format( - "{padding}") % i - anatomy_filled = anatomy.format(template_data) - test_dest_files.append(anatomy_filled["render"]["path"]) - - dst_collections, remainder = clique.assemble(test_dest_files) - dst_collection = dst_collections[0] - dst_head = dst_collection.format("{head}") - dst_tail = dst_collection.format("{tail}") - - for i in src_collection.indexes: - src_padding = src_collection.format("{padding}") % i - src_file_name = "{0}{1}{2}".format( - src_head, src_padding, src_tail) - dst_padding = dst_collection.format("{padding}") % i - dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail) - - src = os.path.join(stagingdir, src_file_name) - instance.data["transfers"].append([src, dst]) - - else: - # Single file - # _______ - # | |\ - # | | - # | | - # | | - # |_______| - # - - template_data.pop("frame", None) - - fname = files - - self.log.info("fname: {}".format(fname)) - - assert not os.path.isabs(fname), ( - "Given file name is a full path" - ) - _, ext = os.path.splitext(fname) - - template_data["representation"] = ext[1:] - - src = os.path.join(stagingdir, fname) - - anatomy_filled = anatomy.format(template_data) - dst = anatomy_filled["render"]["path"] - - instance.data["transfers"].append([src, dst]) - - if ext[1:] not in ["jpeg", "jpg", "mov", "mp4", "wav"]: - template_data["frame"] = "#" * int(anatomy_filled["render"]["padding"]) - - anatomy_filled = anatomy.format(template_data) - path_to_save = anatomy_filled["render"]["path"] - template = anatomy.templates["render"]["path"] - - self.log.debug("path_to_save: {}".format(path_to_save)) - - representation = { - "schema": "pype:representation-2.0", - "type": "representation", - "parent": version_id, - "name": ext[1:], - "data": {'path': path_to_save, 'template': template}, - "dependencies": instance.data.get("dependencies", "").split(), - - # Imprint shortcut to context - # for performance reasons. - "context": { - "root": root, - "project": { - "name": PROJECT, - "code": project['data']['code'] - }, - "task": api.Session["AVALON_TASK"], - "silo": asset['silo'], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": int(version["name"]), - "hierarchy": hierarchy, - "representation": ext[1:] - } - } - - destination_list.append(dst) - instance.data['destination_list'] = destination_list - representations.append(representation) - - self.log.info("Registering {} items".format(len(representations))) - io.insert_many(representations) - - def integrate(self, instance): - """Move the files - - Through `instance.data["transfers"]` - - Args: - instance: the instance to integrate - """ - - transfers = instance.data["transfers"] - - for src, dest in transfers: - src = os.path.normpath(src) - dest = os.path.normpath(dest) - if src in dest: - continue - - self.log.info("Copying file .. {} -> {}".format(src, dest)) - self.copy_file(src, dest) - - def copy_file(self, src, dst): - """ Copy given source to destination - - Arguments: - src (str): the source file which needs to be copied - dst (str): the destination of the sourc file - Returns: - None - """ - - dirname = os.path.dirname(dst) - try: - os.makedirs(dirname) - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - self.log.critical("An unexpected error occurred.") - raise - - shutil.copy(src, dst) - - def get_subset(self, asset, instance): - - subset = io.find_one({ - "type": "subset", - "parent": asset["_id"], - "name": instance.data["subset"] - }) - - if subset is None: - subset_name = instance.data["subset"] - self.log.info("Subset '%s' not found, creating.." % subset_name) - - _id = io.insert_one({ - "schema": "pype:subset-2.0", - "type": "subset", - "name": subset_name, - "data": {}, - "parent": asset["_id"] - }).inserted_id - - subset = io.find_one({"_id": _id}) - - return subset - - def create_version(self, subset, version_number, locations, data=None): - """ Copy given source to destination - - Args: - subset (dict): the registered subset of the asset - version_number (int): the version number - locations (list): the currently registered locations - - Returns: - dict: collection of data to create a version - """ - # Imprint currently registered location - version_locations = [location for location in locations if - location is not None] - - return {"schema": "pype:version-2.0", - "type": "version", - "parent": subset["_id"], - "name": version_number, - "locations": version_locations, - "data": data} - - def create_version_data(self, context, instance): - """Create the data collection for the version - - Args: - context: the current context - instance: the current instance being published - - Returns: - dict: the required information with instance.data as key - """ - - families = [] - current_families = instance.data.get("families", list()) - instance_family = instance.data.get("family", None) - - if instance_family is not None: - families.append(instance_family) - families += current_families - - try: - source = instance.data['source'] - except KeyError: - source = context.data["currentFile"] - - relative_path = os.path.relpath(source, api.registered_root()) - source = os.path.join("{root}", relative_path).replace("\\", "/") - - version_data = {"families": families, - "time": context.data["time"], - "author": context.data["user"], - "source": source, - "comment": context.data.get("comment")} - - # Include optional data if present in - optionals = ["frameStart", "frameEnd", "step", - "handles", "colorspace", "fps", "outputDir"] - - for key in optionals: - if key in instance.data: - version_data[key] = instance.data.get(key, None) - - return version_data From 9bcdf7f72a96b9839aee1e9c49acb75475b55cc8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:25:33 +0100 Subject: [PATCH 239/393] added avalon entities collector --- .../global/publish/collect_avalon_entities.py | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 pype/plugins/global/publish/collect_avalon_entities.py diff --git a/pype/plugins/global/publish/collect_avalon_entities.py b/pype/plugins/global/publish/collect_avalon_entities.py new file mode 100644 index 0000000000..c256dffd52 --- /dev/null +++ b/pype/plugins/global/publish/collect_avalon_entities.py @@ -0,0 +1,46 @@ +"""Collect Anatomy and global anatomy data. + +Requires: + session -> AVALON_PROJECT, AVALON_ASSET + +Provides: + context -> projectEntity - project entity from database + context -> assetEntity - asset entity from database +""" + +from avalon import io, api +import pyblish.api + + +class CollectAvalonEntities(pyblish.api.ContextPlugin): + """Collect Anatomy into Context""" + + order = pyblish.api.CollectorOrder + label = "Collect Avalon Entities" + + def process(self, context): + project_name = api.Session["AVALON_PROJECT"] + asset_name = api.Session["AVALON_ASSET"] + + project_entity = io.find_one({ + "type": "project", + "name": project_name + }) + assert project_entity, ( + "Project '{0}' was not found." + ).format(project_name) + self.log.debug("Collected Project entity \"{}\"".format(project_entity)) + + asset_entity = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project_entity["_id"] + }) + assert asset_entity, ( + "No asset found by the name '{0}' in project '{1}'" + ).format(asset_name, project_name) + + self.log.debug("Collected Asset entity \"{}\"".format(asset_entity)) + + context.data["projectEntity"] = project_entity + context.data["assetEntity"] = asset_entity From a2d75afe7a8e78fd2481c18a095aa96b9382a9e7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:25:57 +0100 Subject: [PATCH 240/393] collect anatomy also collect global anatomy data --- .../plugins/global/publish/collect_anatomy.py | 67 +++++++++++++++++-- 1 file changed, 62 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/collect_anatomy.py b/pype/plugins/global/publish/collect_anatomy.py index 9412209850..0831c16d32 100644 --- a/pype/plugins/global/publish/collect_anatomy.py +++ b/pype/plugins/global/publish/collect_anatomy.py @@ -1,10 +1,24 @@ -""" +"""Collect Anatomy and global anatomy data. + Requires: - None + session -> AVALON_TASK + projectEntity, assetEntity -> collect_avalon_entities *(pyblish.api.CollectorOrder) + username -> collect_pype_user *(pyblish.api.CollectorOrder + 0.001) + datetimeData -> collect_datetime_data *(pyblish.api.CollectorOrder) + +Optional: + comment -> collect_comment *(pyblish.api.CollectorOrder) + intent -> collected in pyblish-lite + Provides: context -> anatomy (pypeapp.Anatomy) + context -> anatomyData """ +import os +import json + +from avalon import io, api, lib from pypeapp import Anatomy import pyblish.api @@ -12,9 +26,52 @@ import pyblish.api class CollectAnatomy(pyblish.api.ContextPlugin): """Collect Anatomy into Context""" - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder + 0.002 label = "Collect Anatomy" def process(self, context): - context.data['anatomy'] = Anatomy() - self.log.info("Anatomy templates collected...") + root_path = api.registered_root() + task_name = api.Session["AVALON_TASK"] + + project_entity = context.data["projectEntity"] + asset_entity = context.data["assetEntity"] + + project_name = project_entity["name"] + + context.data["anatomy"] = Anatomy(project_name) + self.log.info( + "Anatomy object collected for project \"{}\".".format(project_name) + ) + + hierarchy_items = asset_entity["data"]["parents"] + hierarchy = "" + if hierarchy_items: + hierarchy = os.path.join(*hierarchy_items) + + context_data = { + "root": root_path, + "project": { + "name": project_name, + "code": project_entity["data"].get("code") + }, + "asset": asset_entity["name"], + "hierarchy": hierarchy.replace("\\", "/"), + "task": task_name, + + "username": context.data["user"] + } + + avalon_app_name = os.environ.get("AVALON_APP_NAME") + if avalon_app_name: + application_def = lib.get_application(avalon_app_name) + app_dir = application_def.get("application_dir") + if app_dir: + context_data["app"] = app_dir + + datetime_data = context.data.get("datetimeData") or {} + context_data.update(datetime_data) + + context.data["anatomyData"] = context_data + + self.log.info("Global anatomy Data collected") + self.log.debug(json.dumps(context_data, indent=4)) From 54f76e7f7f9ec884bdbbe915a5088e7aaf8e3e10 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:26:19 +0100 Subject: [PATCH 241/393] collect templates replaced with collect instance anatomy data --- .../publish/collect_instance_anatomy_data.py | 119 ++++++++++++++++++ .../global/publish/collect_templates.py | 117 ----------------- 2 files changed, 119 insertions(+), 117 deletions(-) create mode 100644 pype/plugins/global/publish/collect_instance_anatomy_data.py delete mode 100644 pype/plugins/global/publish/collect_templates.py diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py new file mode 100644 index 0000000000..a1a9278d2a --- /dev/null +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -0,0 +1,119 @@ +""" +Requires: + context -> anatomyData + context -> projectEntity + context -> assetEntity + instance -> asset + instance -> subset + instance -> family + +Optional: + instance -> resolutionWidth + instance -> resolutionHeight + instance -> fps + +Provides: + instance -> anatomyData +""" + +import copy +import json + +from avalon import io +import pyblish.api + + +class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): + """Fill templates with data needed for publish""" + + order = pyblish.api.CollectorOrder + 0.1 + label = "Collect instance anatomy data" + hosts = ["maya", "nuke", "standalonepublisher"] + + def process(self, instance): + # get all the stuff from the database + anatomy_data = copy.deepcopy(instance.context.data["anatomyData"]) + project_entity = instance.context.data["projectEntity"] + context_asset_entity = instance.context.data["assetEntity"] + + asset_name = instance.data["asset"] + # Check if asset name is the same as what is in context + # - they may be different, e.g. in NukeStudio + if context_asset_entity["name"] == asset_name: + asset_entity = context_asset_entity + + else: + asset_entity = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project_entity["_id"] + }) + + instance.context.data["assetEntity"] = asset_entity + instance.context.data["projectEntity"] = project_entity + + subset_name = instance.data["subset"] + subset_entity = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset_entity["_id"] + }) + + version_number = instance.data.get("version") + if version_number is None: + version_number = instance.context.data.get("version") + + latest_version = None + if subset_entity is None: + self.log.debug("Subset entity does not exist yet.") + else: + version_entity = io.find_one( + { + "type": "version", + "parent": subset_entity["_id"] + }, + sort=[("name", -1)] + ) + if version_entity: + latest_version = version_entity["name"] + + if version_number is None: + # TODO we should be able to change this version by studio + # preferences (like start with version number `0`) + version_number = 1 + if latest_version is not None: + version_number += int(latest_version) + + # Version should not be collected since may be instance + anatomy_data.update({ + "asset": asset_entity["name"], + "family": instance.data["family"], + "subset": subset_name, + "version": version_number + }) + + resolution_width = instance.data.get("resolutionWidth") + if resolution_width: + anatomy_data["resolution_width"] = resolution_width + + resolution_height = instance.data.get("resolutionHeight") + if resolution_height: + anatomy_data["resolution_height"] = resolution_height + + fps = instance.data.get("fps") + if resolution_height: + anatomy_data["fps"] = fps + + instance.data["anatomyData"] = anatomy_data + instance.data["latestVersion"] = latest_version + # TODO check if template is used anywhere + # instance.data["template"] = template + + # TODO we should move this to any Validator + # # We take the parent folder of representation 'filepath' + # instance.data["assumedDestination"] = os.path.dirname( + # (anatomy.format(template_data))["publish"]["path"] + # ) + + self.log.info("Instance anatomy Data collected") + self.log.debug(json.dumps(anatomy_data, indent=4)) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py deleted file mode 100644 index f065b3c246..0000000000 --- a/pype/plugins/global/publish/collect_templates.py +++ /dev/null @@ -1,117 +0,0 @@ -""" -Requires: - session -> AVALON_PROJECT - context -> anatomy (pypeapp.Anatomy) - instance -> subset - instance -> asset - instance -> family - -Provides: - instance -> template - instance -> assumedTemplateData - instance -> assumedDestination -""" - -import os - -from avalon import io, api -import pyblish.api - - -class CollectTemplates(pyblish.api.InstancePlugin): - """Fill templates with data needed for publish""" - - order = pyblish.api.CollectorOrder + 0.1 - label = "Collect and fill Templates" - hosts = ["maya", "nuke", "standalonepublisher"] - - def process(self, instance): - # get all the stuff from the database - subset_name = instance.data["subset"] - asset_name = instance.data["asset"] - project_name = api.Session["AVALON_PROJECT"] - - project = io.find_one( - { - "type": "project", - "name": project_name - }, - projection={"config": True, "data": True} - ) - - template = project["config"]["template"]["publish"] - anatomy = instance.context.data['anatomy'] - - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] - }) - - assert asset, ("No asset found by the name '{}' " - "in project '{}'".format(asset_name, project_name)) - silo = asset.get('silo') - - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset["_id"] - }) - - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) - - # if there is a subset there ought to be version - if version is not None: - version_number += int(version["name"]) - - hierarchy = asset['data']['parents'] - if hierarchy: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = os.path.join(*hierarchy) - - template_data = {"root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, - "code": project['data']['code']}, - "silo": silo, - "family": instance.data['family'], - "asset": asset_name, - "subset": subset_name, - "version": version_number, - "hierarchy": hierarchy.replace("\\", "/"), - "representation": "TEMP"} - - # Add datetime data to template data - datetime_data = instance.context.data.get("datetimeData") or {} - template_data.update(datetime_data) - - resolution_width = instance.data.get("resolutionWidth") - resolution_height = instance.data.get("resolutionHeight") - fps = instance.data.get("fps") - - if resolution_width: - template_data["resolution_width"] = resolution_width - if resolution_width: - template_data["resolution_height"] = resolution_height - if resolution_width: - template_data["fps"] = fps - - instance.data["template"] = template - instance.data["assumedTemplateData"] = template_data - - # We take the parent folder of representation 'filepath' - instance.data["assumedDestination"] = os.path.dirname( - (anatomy.format(template_data))["publish"]["path"] - ) - self.log.info("Assumed Destination has been created...") - self.log.debug("__ assumedTemplateData: `{}`".format(instance.data["assumedTemplateData"])) - self.log.debug("__ template: `{}`".format(instance.data["template"])) From 1515f47f0fad2700efaa69022ac682456b7e4c50 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:26:33 +0100 Subject: [PATCH 242/393] extract burnin uses anatomyData --- pype/plugins/global/publish/extract_burnin.py | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index e50ba891d2..b95c15f340 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -32,21 +32,15 @@ class ExtractBurnin(pype.api.Extractor): frame_end = int(instance.data.get("frameEnd") or 1) duration = frame_end - frame_start + 1 - prep_data = { - "username": instance.context.data['user'], - "asset": os.environ['AVALON_ASSET'], - "task": os.environ['AVALON_TASK'], + prep_data = copy.deepcopy(instance.data["anatomyData"]) + prep_data.update({ "frame_start": frame_start, "frame_end": frame_end, "duration": duration, "version": int(version), "comment": instance.context.data.get("comment", ""), "intent": instance.context.data.get("intent", "") - } - - # Add datetime data to preparation data - datetime_data = instance.context.data.get("datetimeData") or {} - prep_data.update(datetime_data) + }) slate_frame_start = frame_start slate_frame_end = frame_end @@ -64,10 +58,6 @@ class ExtractBurnin(pype.api.Extractor): "slate_duration": slate_duration }) - # Update data with template data - template_data = instance.data.get("assumedTemplateData") or {} - prep_data.update(template_data) - # get anatomy project anatomy = instance.context.data['anatomy'] From e4b23553dffd42aad2ee07b5e207787b1b52c4f8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:26:55 +0100 Subject: [PATCH 243/393] integrate assumed destinatoin replaced with collect resources path --- .../global/publish/collect_resources_path.py | 132 ++++++++++++++++ .../publish/integrate_assumed_destination.py | 147 ------------------ 2 files changed, 132 insertions(+), 147 deletions(-) create mode 100644 pype/plugins/global/publish/collect_resources_path.py delete mode 100644 pype/plugins/global/publish/integrate_assumed_destination.py diff --git a/pype/plugins/global/publish/collect_resources_path.py b/pype/plugins/global/publish/collect_resources_path.py new file mode 100644 index 0000000000..52e926e09c --- /dev/null +++ b/pype/plugins/global/publish/collect_resources_path.py @@ -0,0 +1,132 @@ +import os +import copy + +import pyblish.api +from avalon import io + + +class IntegrateResourcesPath(pyblish.api.InstancePlugin): + """Generate the assumed destination path where the file will be stored""" + + label = "Integrate Prepare Resource" + order = pyblish.api.IntegratorOrder - 0.05 + families = ["clip", "projectfile", "plate"] + + def process(self, instance): + project_entity = instance.context["projectEntity"] + asset_entity = instance.context["assetEntity"] + + template_data = copy.deepcopy(instance.data["anatomyData"]) + + asset_name = instance.data["asset"] + if asset_name != asset_entity["name"]: + asset_entity = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project_entity["_id"] + }) + assert asset_entity, ( + "No asset found by the name '{}' in project '{}'".format( + asset_name, project_entity["name"] + ) + ) + + instance.data["assetEntity"] = asset_entity + + template_data["name"] = asset_entity["name"] + silo_name = asset_entity.get("silo") + if silo_name: + template_data["silo"] = silo_name + + parents = asset_entity["data"].get("parents") or [] + hierarchy = "/".join(parents) + template_data["hierarchy"] = hierarchy + + subset_name = instance.data["subset"] + self.log.info(subset_name) + + subset = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset_entity["_id"] + }) + + # assume there is no version yet, we start at `1` + version = None + version_number = 1 + if subset is not None: + version = io.find_one( + { + "type": "version", + "parent": subset["_id"] + }, + sort=[("name", -1)] + ) + + # if there is a subset there ought to be version + if version is not None: + version_number += version["name"] + + if instance.data.get('version'): + version_number = int(instance.data.get('version')) + + anatomy = instance.context.data["anatomy"] + padding = int(anatomy.templates['render']['padding']) + + template_data.update({ + "subset": subset_name, + "frame": ('#' * padding), + "version": version_number, + "representation": "TEMP" + }) + + anatomy_filled = anatomy.format(template_data) + + template_names = ["publish"] + for repre in instance.data["representations"]: + template_name = repre.get("anatomy_template") + if template_name and template_name not in template_names: + template_names.append(template_name) + + resources = instance.data.get("resources", list()) + transfers = instance.data.get("transfers", list()) + + for template_name in template_names: + mock_template = anatomy_filled[template_name]["path"] + + # For now assume resources end up in a "resources" folder in the + # published folder + mock_destination = os.path.join( + os.path.dirname(mock_template), "resources" + ) + + # Clean the path + mock_destination = os.path.abspath( + os.path.normpath(mock_destination) + ).replace("\\", "/") + + # Define resource destination and transfers + for resource in resources: + # Add destination to the resource + source_filename = os.path.basename( + resource["source"]).replace("\\", "/") + destination = os.path.join(mock_destination, source_filename) + + # Force forward slashes to fix issue with software unable + # to work correctly with backslashes in specific scenarios + # (e.g. escape characters in PLN-151 V-Ray UDIM) + destination = destination.replace("\\", "/") + + resource['destination'] = destination + + # Collect transfers for the individual files of the resource + # e.g. all individual files of a cache or UDIM textures. + files = resource['files'] + for fsrc in files: + fname = os.path.basename(fsrc) + fdest = os.path.join( + mock_destination, fname).replace("\\", "/") + transfers.append([fsrc, fdest]) + + instance.data["resources"] = resources + instance.data["transfers"] = transfers diff --git a/pype/plugins/global/publish/integrate_assumed_destination.py b/pype/plugins/global/publish/integrate_assumed_destination.py deleted file mode 100644 index d090e2711a..0000000000 --- a/pype/plugins/global/publish/integrate_assumed_destination.py +++ /dev/null @@ -1,147 +0,0 @@ -import pyblish.api -import os - -from avalon import io, api - - -class IntegrateAssumedDestination(pyblish.api.InstancePlugin): - """Generate the assumed destination path where the file will be stored""" - - label = "Integrate Assumed Destination" - order = pyblish.api.IntegratorOrder - 0.05 - families = ["clip", "projectfile", "plate"] - - def process(self, instance): - - anatomy = instance.context.data['anatomy'] - - self.create_destination_template(instance, anatomy) - - template_data = instance.data["assumedTemplateData"] - # self.log.info(anatomy.templates) - anatomy_filled = anatomy.format(template_data) - - # self.log.info(anatomy_filled) - mock_template = anatomy_filled["publish"]["path"] - - # For now assume resources end up in a "resources" folder in the - # published folder - mock_destination = os.path.join(os.path.dirname(mock_template), - "resources") - - # Clean the path - mock_destination = os.path.abspath( - os.path.normpath(mock_destination)).replace("\\", "/") - - # Define resource destination and transfers - resources = instance.data.get("resources", list()) - transfers = instance.data.get("transfers", list()) - for resource in resources: - - # Add destination to the resource - source_filename = os.path.basename( - resource["source"]).replace("\\", "/") - destination = os.path.join(mock_destination, source_filename) - - # Force forward slashes to fix issue with software unable - # to work correctly with backslashes in specific scenarios - # (e.g. escape characters in PLN-151 V-Ray UDIM) - destination = destination.replace("\\", "/") - - resource['destination'] = destination - - # Collect transfers for the individual files of the resource - # e.g. all individual files of a cache or UDIM textures. - files = resource['files'] - for fsrc in files: - fname = os.path.basename(fsrc) - fdest = os.path.join( - mock_destination, fname).replace("\\", "/") - transfers.append([fsrc, fdest]) - - instance.data["resources"] = resources - instance.data["transfers"] = transfers - - def create_destination_template(self, instance, anatomy): - """Create a filepath based on the current data available - - Example template: - {root}/{project}/{asset}/publish/{subset}/v{version:0>3}/ - {subset}.{representation} - Args: - instance: the instance to publish - - Returns: - file path (str) - """ - - # get all the stuff from the database - subset_name = instance.data["subset"] - self.log.info(subset_name) - asset_name = instance.data["asset"] - project_name = api.Session["AVALON_PROJECT"] - a_template = anatomy.templates - - project = io.find_one( - {"type": "project", "name": project_name}, - projection={"config": True, "data": True} - ) - - template = a_template['publish']['path'] - # anatomy = instance.context.data['anatomy'] - - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] - }) - - assert asset, ("No asset found by the name '{}' " - "in project '{}'".format(asset_name, project_name)) - - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset["_id"] - }) - - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) - - # if there is a subset there ought to be version - if version is not None: - version_number += version["name"] - - if instance.data.get('version'): - version_number = int(instance.data.get('version')) - - padding = int(a_template['render']['padding']) - - hierarchy = asset['data']['parents'] - if hierarchy: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = "/".join(hierarchy) - - template_data = {"root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, - "code": project['data']['code']}, - "family": instance.data['family'], - "asset": asset_name, - "subset": subset_name, - "frame": ('#' * padding), - "version": version_number, - "hierarchy": hierarchy, - "representation": "TEMP"} - - instance.data["assumedTemplateData"] = template_data - self.log.info(template_data) - instance.data["template"] = template From f6992a3d44532fac0d11a87c5d7cdfe0a0db715d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:27:20 +0100 Subject: [PATCH 244/393] collector for project data was removed --- .../global/publish/collect_project_data.py | 24 ------------------- .../nukestudio/publish/collect_clips.py | 2 +- 2 files changed, 1 insertion(+), 25 deletions(-) delete mode 100644 pype/plugins/global/publish/collect_project_data.py diff --git a/pype/plugins/global/publish/collect_project_data.py b/pype/plugins/global/publish/collect_project_data.py deleted file mode 100644 index acdbc2c41f..0000000000 --- a/pype/plugins/global/publish/collect_project_data.py +++ /dev/null @@ -1,24 +0,0 @@ -""" -Requires: - None - -Provides: - context -> projectData -""" - -import pyblish.api -import pype.api as pype - - -class CollectProjectData(pyblish.api.ContextPlugin): - """Collecting project data from avalon db""" - - label = "Collect Project Data" - order = pyblish.api.CollectorOrder - 0.1 - hosts = ["nukestudio"] - - def process(self, context): - # get project data from avalon db - context.data["projectData"] = pype.get_project()["data"] - - return diff --git a/pype/plugins/nukestudio/publish/collect_clips.py b/pype/plugins/nukestudio/publish/collect_clips.py index 3759d50f6a..82053b6811 100644 --- a/pype/plugins/nukestudio/publish/collect_clips.py +++ b/pype/plugins/nukestudio/publish/collect_clips.py @@ -17,7 +17,7 @@ class CollectClips(api.ContextPlugin): self.log.debug("Created `assetsShared` in context") context.data["assetsShared"] = dict() - projectdata = context.data["projectData"] + projectdata = context.data["projectEntity"]["data"] version = context.data.get("version", "001") sequence = context.data.get("activeSequence") selection = context.data.get("selection") From 5177b891ac5b1b1be0f19c621630be169b08741d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:27:42 +0100 Subject: [PATCH 245/393] extract yeti rig and extract look uses anatomyData --- pype/plugins/maya/publish/extract_look.py | 69 +++++-------------- pype/plugins/maya/publish/extract_yeti_rig.py | 10 +-- 2 files changed, 23 insertions(+), 56 deletions(-) diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index fa6ecd72c3..4000011520 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -1,6 +1,7 @@ import os import sys import json +import copy import tempfile import contextlib import subprocess @@ -333,7 +334,7 @@ class ExtractLook(pype.api.Extractor): anatomy = instance.context.data["anatomy"] - self.create_destination_template(instance, anatomy) + destination_dir = self.create_destination_template(instance, anatomy) # Compute destination location basename, ext = os.path.splitext(os.path.basename(filepath)) @@ -343,7 +344,7 @@ class ExtractLook(pype.api.Extractor): ext = ".tx" return os.path.join( - instance.data["assumedDestination"], "resources", basename + ext + destination_dir, "resources", basename + ext ) def _process_texture(self, filepath, do_maketx, staging, linearise, force): @@ -421,38 +422,17 @@ class ExtractLook(pype.api.Extractor): file path (str) """ - # get all the stuff from the database + asset_entity = instance.context["assetEntity"] + + template_data = copy.deepcopy(instance.data["anatomyData"]) + subset_name = instance.data["subset"] self.log.info(subset_name) - asset_name = instance.data["asset"] - project_name = api.Session["AVALON_PROJECT"] - a_template = anatomy.templates - - project = io.find_one( - { - "type": "project", - "name": project_name - }, - projection={"config": True, "data": True} - ) - - template = a_template["publish"]["path"] - # anatomy = instance.context.data['anatomy'] - - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] - }) - - assert asset, ("No asset found by the name '{}' " - "in project '{}'").format(asset_name, project_name) - silo = asset.get("silo") subset = io.find_one({ "type": "subset", "name": subset_name, - "parent": asset["_id"] + "parent": asset_entity["_id"] }) # assume there is no version yet, we start at `1` @@ -471,33 +451,18 @@ class ExtractLook(pype.api.Extractor): if version is not None: version_number += version["name"] - if instance.data.get("version"): - version_number = int(instance.data.get("version")) + if instance.data.get('version'): + version_number = int(instance.data.get('version')) - padding = int(a_template["render"]["padding"]) + anatomy = instance.context.data["anatomy"] + padding = int(anatomy.templates['render']['padding']) - hierarchy = asset["data"]["parents"] - if hierarchy: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = "/".join(hierarchy) - - template_data = { - "root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, "code": project["data"]["code"]}, - "silo": silo, - "family": instance.data["family"], - "asset": asset_name, + template_data.update({ "subset": subset_name, "frame": ("#" * padding), "version": version_number, - "hierarchy": hierarchy, - "representation": "TEMP", - } + "representation": "TEMP" + }) + anatomy_filled = anatomy.format(template_data) - instance.data["assumedTemplateData"] = template_data - self.log.info(template_data) - instance.data["template"] = template - # We take the parent folder of representation 'filepath' - instance.data["assumedDestination"] = os.path.dirname( - anatomy.format(template_data)["publish"]["path"] - ) + return os.path.dirname(anatomy_filled["publish"]["path"]) diff --git a/pype/plugins/maya/publish/extract_yeti_rig.py b/pype/plugins/maya/publish/extract_yeti_rig.py index 892bc0bea6..d390a1365a 100644 --- a/pype/plugins/maya/publish/extract_yeti_rig.py +++ b/pype/plugins/maya/publish/extract_yeti_rig.py @@ -1,6 +1,7 @@ import os import json import contextlib +import copy from maya import cmds @@ -111,11 +112,12 @@ class ExtractYetiRig(pype.api.Extractor): self.log.info("Writing metadata file") # Create assumed destination folder for imageSearchPath - assumed_temp_data = instance.data["assumedTemplateData"] - template = instance.data["template"] - template_formatted = template.format(**assumed_temp_data) + template_data = copy.deepcopy(instance.data["anatomyData"]) - destination_folder = os.path.dirname(template_formatted) + anatomy = instance.context["anatomy"] + filled = anatomy.format(template_data) + + destination_folder = os.path.dir(filled["publish"]["path"]) image_search_path = os.path.join(destination_folder, "resources") image_search_path = os.path.normpath(image_search_path) From f1486a9cd42820684d03c317442a0919b597bfef Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:27:59 +0100 Subject: [PATCH 246/393] integrate_new uses anatomyData --- pype/plugins/global/publish/integrate_new.py | 80 ++++++++------------ 1 file changed, 30 insertions(+), 50 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 7d95534897..c6bc1ffbab 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -2,6 +2,7 @@ import os from os.path import getsize import logging import sys +import copy import clique import errno import pyblish.api @@ -100,12 +101,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def register(self, instance): # Required environment variables - PROJECT = api.Session["AVALON_PROJECT"] - ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] - TASK = instance.data.get("task") or api.Session["AVALON_TASK"] - LOCATION = api.Session["AVALON_LOCATION"] + anatomy_data = instance.data["anatomyData"] + asset_entity = instance.data["assetEntity"] + avalon_location = api.Session["AVALON_LOCATION"] + + io.install() context = instance.context + # Atomicity # # Guarantee atomic publishes - each asset contains @@ -140,35 +143,27 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # stagingdir = instance.data.get("stagingDir") if not stagingdir: - self.log.info('''{} is missing reference to staging - directory Will try to get it from - representation'''.format(instance)) + self.log.info(( + "{0} is missing reference to staging directory." + " Will try to get it from representation." + ).format(instance)) - # extra check if stagingDir actually exists and is available - - self.log.debug("Establishing staging directory @ %s" % stagingdir) + else: + self.log.debug( + "Establishing staging directory @ {0}".format(stagingdir) + ) # Ensure at least one file is set up for transfer in staging dir. - repres = instance.data.get("representations", None) + repres = instance.data.get("representations") assert repres, "Instance has no files to transfer" assert isinstance(repres, (list, tuple)), ( - "Instance 'files' must be a list, got: {0}".format(repres) + "Instance 'files' must be a list, got: {0} {1}".format( + str(type(repres)), str(repres) + ) ) - # FIXME: io is not initialized at this point for shell host - io.install() - project = io.find_one({"type": "project"}) - - asset = io.find_one({ - "type": "asset", - "name": ASSET, - "parent": project["_id"] - }) - - assert all([project, asset]), ("Could not find current project or " - "asset '%s'" % ASSET) - - subset = self.get_subset(asset, instance) + intent = context.data.get("intent") + subset = self.get_subset(asset_entity, instance) # get next version latest_version = io.find_one( @@ -229,16 +224,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # \ \________. # \|________| # - root = api.registered_root() - hierarchy = "" - parents = io.find_one({ - "type": 'asset', - "name": ASSET - })['data']['parents'] - if parents and len(parents) > 0: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = os.path.join(*parents) - anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files @@ -261,20 +246,15 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # |_______| # # create template data for Anatomy - template_data = {"root": root, - "project": {"name": PROJECT, - "code": project['data']['code']}, - "silo": asset.get('silo'), - "task": TASK, - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": int(version["name"]), - "hierarchy": hierarchy} + template_data = copy.deepcopy(anatomy_data) + # TODO cleanup this code, should be already in anatomyData + template_data.update({ + "subset": subset["name"], + "version": int(version["name"]) + }) - # Add datetime data to template data - datetime_data = context.data.get("datetimeData") or {} - template_data.update(datetime_data) + if intent is not None: + template_data["intent"] = intent resolution_width = repre.get("resolutionWidth") resolution_height = repre.get("resolutionHeight") @@ -292,6 +272,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): stagingdir = repre['stagingDir'] if repre.get('anatomy_template'): template_name = repre['anatomy_template'] + template = os.path.normpath( anatomy.templates[template_name]["path"]) @@ -322,7 +303,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): template_filled = anatomy_filled[template_name]["path"] if repre_context is None: repre_context = template_filled.used_values - test_dest_files.append( os.path.normpath(template_filled) ) From 91d51f145844aed301c3f7a721e807e0dfb154a7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 3 Feb 2020 19:28:33 +0100 Subject: [PATCH 247/393] removed deprecated validate templates --- .../global/publish/validate_templates.py | 43 ------------------- 1 file changed, 43 deletions(-) delete mode 100644 pype/plugins/global/publish/validate_templates.py diff --git a/pype/plugins/global/publish/validate_templates.py b/pype/plugins/global/publish/validate_templates.py deleted file mode 100644 index f24f6b1a2e..0000000000 --- a/pype/plugins/global/publish/validate_templates.py +++ /dev/null @@ -1,43 +0,0 @@ -import pyblish.api -import os - - -class ValidateTemplates(pyblish.api.ContextPlugin): - """Check if all templates were filled""" - - label = "Validate Templates" - order = pyblish.api.ValidatorOrder - 0.1 - hosts = ["maya", "houdini", "nuke"] - - def process(self, context): - - anatomy = context.data["anatomy"] - if not anatomy: - raise RuntimeError("Did not find anatomy") - else: - data = { - "root": os.environ["PYPE_STUDIO_PROJECTS_PATH"], - "project": {"name": "D001_projectsx", - "code": "prjX"}, - "ext": "exr", - "version": 3, - "task": "animation", - "asset": "sh001", - "app": "maya", - "hierarchy": "ep101/sq01/sh010"} - - anatomy_filled = anatomy.format(data) - self.log.info(anatomy_filled) - - data = {"root": os.environ["PYPE_STUDIO_PROJECTS_PATH"], - "project": {"name": "D001_projectsy", - "code": "prjY"}, - "ext": "abc", - "version": 1, - "task": "lookdev", - "asset": "bob", - "app": "maya", - "hierarchy": "ep101/sq01/bob"} - - anatomy_filled = context.data["anatomy"].format(data) - self.log.info(anatomy_filled["work"]["folder"]) From 25a6cd2c13ea0da2af2698c6e4b0442fc438e6e9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 00:12:09 +0100 Subject: [PATCH 248/393] fixed avalon entity check for nukestudio publish --- pype/ftrack/events/event_sync_to_avalon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 23284a2ae6..1b245efaa8 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1437,7 +1437,7 @@ class SyncToAvalonEvent(BaseEvent): .get("name", {}) .get("new") ) - avalon_ent_by_name = self.avalon_ents_by_name.get(name) + avalon_ent_by_name = self.avalon_ents_by_name.get(name) or {} avalon_ent_by_name_ftrack_id = ( avalon_ent_by_name .get("data", {}) From 670f660a9724a83691913ffbfece7b9ae22cd414 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 10:59:30 +0100 Subject: [PATCH 249/393] changed collect instance anatomy data order to 0.49 --- pype/plugins/global/publish/collect_instance_anatomy_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index a1a9278d2a..76ab8dc3f6 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -26,7 +26,7 @@ import pyblish.api class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): """Fill templates with data needed for publish""" - order = pyblish.api.CollectorOrder + 0.1 + order = pyblish.api.CollectorOrder + 0.49 label = "Collect instance anatomy data" hosts = ["maya", "nuke", "standalonepublisher"] From 1a04dca10a7c2af07d779bc1339c941eb5ebf44d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 11:00:38 +0100 Subject: [PATCH 250/393] asset entity may not exist so collecting instance anatomy data was changed to not crash --- .../publish/collect_instance_anatomy_data.py | 46 ++++++++++--------- 1 file changed, 24 insertions(+), 22 deletions(-) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index 76ab8dc3f6..ecef9d10f2 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -53,40 +53,42 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): instance.context.data["projectEntity"] = project_entity subset_name = instance.data["subset"] - subset_entity = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset_entity["_id"] - }) - version_number = instance.data.get("version") - if version_number is None: - version_number = instance.context.data.get("version") - latest_version = None - if subset_entity is None: - self.log.debug("Subset entity does not exist yet.") - else: - version_entity = io.find_one( - { - "type": "version", - "parent": subset_entity["_id"] - }, - sort=[("name", -1)] - ) - if version_entity: - latest_version = version_entity["name"] + if asset_entity: + subset_entity = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset_entity["_id"] + }) + + + if subset_entity is None: + self.log.debug("Subset entity does not exist yet.") + else: + version_entity = io.find_one( + { + "type": "version", + "parent": subset_entity["_id"] + }, + sort=[("name", -1)] + ) + if version_entity: + latest_version = version_entity["name"] + + # If version is not specified for instance or context if version_number is None: # TODO we should be able to change this version by studio # preferences (like start with version number `0`) version_number = 1 + # use latest version (+1) if already any exist if latest_version is not None: version_number += int(latest_version) # Version should not be collected since may be instance anatomy_data.update({ - "asset": asset_entity["name"], + "asset": asset_name, "family": instance.data["family"], "subset": subset_name, "version": version_number From a14b05ccd1b5f75d4ccde0349de1096ec0425592 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 11:00:48 +0100 Subject: [PATCH 251/393] removed comments --- .../global/publish/collect_instance_anatomy_data.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index ecef9d10f2..838fb1a113 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -108,14 +108,6 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): instance.data["anatomyData"] = anatomy_data instance.data["latestVersion"] = latest_version - # TODO check if template is used anywhere - # instance.data["template"] = template - - # TODO we should move this to any Validator - # # We take the parent folder of representation 'filepath' - # instance.data["assumedDestination"] = os.path.dirname( - # (anatomy.format(template_data))["publish"]["path"] - # ) self.log.info("Instance anatomy Data collected") self.log.debug(json.dumps(anatomy_data, indent=4)) From f70f307cc40d95bfa5181e27ef7384108e9fb10b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:16:35 +0100 Subject: [PATCH 252/393] removed missleading comments --- pype/plugins/global/publish/integrate_new.py | 52 -------------------- 1 file changed, 52 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index c6bc1ffbab..c3a03324aa 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -109,38 +109,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): context = instance.context - # Atomicity - # - # Guarantee atomic publishes - each asset contains - # an identical set of members. - # __ - # / o - # / \ - # | o | - # \ / - # o __/ - # - # for result in context.data["results"]: - # if not result["success"]: - # self.log.debug(result) - # exc_type, exc_value, exc_traceback = result["error_info"] - # extracted_traceback = traceback.extract_tb(exc_traceback)[-1] - # self.log.debug( - # "Error at line {}: \"{}\"".format( - # extracted_traceback[1], result["error"] - # ) - # ) - # assert all(result["success"] for result in context.data["results"]),( - # "Atomicity not held, aborting.") - - # Assemble - # - # | - # v - # ---> <---- - # ^ - # | - # stagingdir = instance.data.get("stagingDir") if not stagingdir: self.log.info(( @@ -214,16 +182,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): version_id = existing_version['_id'] instance.data['version'] = version['name'] - # Write to disk - # _ - # | | - # _| |_ - # ____\ / - # |\ \ / \ - # \ \ v \ - # \ \________. - # \|________| - # anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files @@ -235,16 +193,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): instance.data['transfers'] = [] for idx, repre in enumerate(instance.data["representations"]): - - # Collection - # _______ - # |______|\ - # | |\| - # | || - # | || - # | || - # |_______| - # # create template data for Anatomy template_data = copy.deepcopy(anatomy_data) # TODO cleanup this code, should be already in anatomyData From 6ef1a7e17605233bc8ea0dc25e3912d0d0a9dc9d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:17:15 +0100 Subject: [PATCH 253/393] formatting --- pype/plugins/global/publish/integrate_new.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index c3a03324aa..b71b5fb298 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -159,10 +159,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if version_data_instance: version_data.update(version_data_instance) - version = self.create_version(subset=subset, - version_number=next_version, - locations=[LOCATION], - data=version_data) + version = self.create_version( + subset=subset, + version_number=next_version, + locations=[avalon_location], + data=version_data + ) self.log.debug("Creating version ...") existing_version = io.find_one({ From 1dcdac7ae051cb359fb481a812027a553e4c79e1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:18:11 +0100 Subject: [PATCH 254/393] asset_entity check moved back since nukestudio instances may have not set value --- pype/plugins/global/publish/integrate_new.py | 26 +++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index b71b5fb298..774a54ea7c 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -102,13 +102,37 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def register(self, instance): # Required environment variables anatomy_data = instance.data["anatomyData"] - asset_entity = instance.data["assetEntity"] avalon_location = api.Session["AVALON_LOCATION"] io.install() context = instance.context + project_entity = instance.data["projectEntity"] + + asset_name = instance.data["asset"] + asset_entity = instance.data.get("assetEntity") + if not asset_entity: + asset_entity = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project_entity["_id"] + }) + + assert asset_entity, ( + "No asset found by the name \"{0}\" in project \"{1}\"" + ).format(asset_name, project_entity["name"]) + + instance.data["assetEntity"] = asset_entity + + # update anatomy data with asset specific keys + # - name should already been set + hierarchy = "" + parents = asset_entity["data"]["parents"] + if parents: + hierarchy = "/".join(parents) + anatomy_data["hierarchy"] = hierarchy + stagingdir = instance.data.get("stagingDir") if not stagingdir: self.log.info(( From e23cc33de7ad1e9156f1a659a0619e2fc2609f68 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:18:59 +0100 Subject: [PATCH 255/393] latest version is not queried before check if instance have already version set in data --- pype/plugins/global/publish/integrate_new.py | 33 +++++++++++--------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 774a54ea7c..24162c4cf1 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -157,22 +157,27 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): intent = context.data.get("intent") subset = self.get_subset(asset_entity, instance) - # get next version - latest_version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - {"name": True}, - sort=[("name", -1)] - ) + # TODO iLLiCiT use "latestVersion" from `instance.data` + # and store version in anatomyData instance collector + # instead of query again + instance_version = instance.data.get('version') + if instance_version is not None: + next_version = int(instance_version) - next_version = 1 - if latest_version is not None: - next_version += latest_version["name"] + else: + # get next version + latest_version = io.find_one( + { + "type": "version", + "parent": subset["_id"] + }, + {"name": True}, + sort=[("name", -1)] + ) - if instance.data.get('version'): - next_version = int(instance.data.get('version')) + next_version = 1 + if latest_version is not None: + next_version += int(latest_version["name"]) self.log.debug("Next version: v{0:03d}".format(next_version)) From 29c6768da935380dd499834857b896c64d2b05f5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:19:19 +0100 Subject: [PATCH 256/393] intent added to anatomy data --- pype/plugins/global/publish/integrate_new.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 24162c4cf1..093a9e354c 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -154,7 +154,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): ) ) - intent = context.data.get("intent") subset = self.get_subset(asset_entity, instance) # TODO iLLiCiT use "latestVersion" from `instance.data` @@ -213,6 +212,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): version_id = existing_version['_id'] instance.data['version'] = version['name'] + intent = context.data.get("intent") + if intent is not None: + anatomy_data["intent"] = intent + anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files From 3a5ab92687bbf7cc89ade7ec453997d5189e0f64 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:19:44 +0100 Subject: [PATCH 257/393] removed subset and version anatomy update since they are already set for whole instance --- pype/plugins/global/publish/integrate_new.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 093a9e354c..fc7cbf4afa 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -229,12 +229,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): for idx, repre in enumerate(instance.data["representations"]): # create template data for Anatomy template_data = copy.deepcopy(anatomy_data) - # TODO cleanup this code, should be already in anatomyData - template_data.update({ - "subset": subset["name"], - "version": int(version["name"]) - }) - if intent is not None: template_data["intent"] = intent From 9113fb1c7f72b1e1ad7a0e32ac16fcb26cd67139 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:20:18 +0100 Subject: [PATCH 258/393] added check if index_frame_start exist --- pype/plugins/global/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index fc7cbf4afa..6d85e29732 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -298,7 +298,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): index_frame_start = int(repre.get("frameStart")) # exception for slate workflow - if "slate" in instance.data["families"]: + if index_frame_start and "slate" in instance.data["families"]: index_frame_start -= 1 dst_padding_exp = src_padding_exp From fde457d445c18d2f87591017df23e3915b8e55b4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:21:27 +0100 Subject: [PATCH 259/393] intent added to version data --- pype/plugins/global/publish/integrate_new.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 6d85e29732..5dba744346 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -601,6 +601,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "fps": context.data.get( "fps", instance.data.get("fps"))} + intent = context.data.get("intent") + if intent is not None: + version_data["intent"] = intent + # Include optional data if present in optionals = [ "frameStart", "frameEnd", "step", "handles", From df512a5a4a17d9d8b14ceba2bc60a908eccbfe5e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:21:52 +0100 Subject: [PATCH 260/393] formatting changes --- pype/plugins/global/publish/integrate_new.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 5dba744346..1ff1dfe520 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -331,7 +331,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if not dst_start_frame: dst_start_frame = dst_padding - dst = "{0}{1}{2}".format( dst_head, dst_start_frame, @@ -503,14 +502,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): filelink.create(src, dst, filelink.HARDLINK) def get_subset(self, asset, instance): + subset_name = instance.data["subset"] subset = io.find_one({ "type": "subset", "parent": asset["_id"], - "name": instance.data["subset"] + "name": subset_name }) if subset is None: - subset_name = instance.data["subset"] self.log.info("Subset '%s' not found, creating.." % subset_name) self.log.debug("families. %s" % instance.data.get('families')) self.log.debug( From 6bd8706579b5b1c19ae0ea0c3859e53fdda02013 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:22:06 +0100 Subject: [PATCH 261/393] added few TODOs --- pype/plugins/global/publish/integrate_new.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 1ff1dfe520..15165f4217 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -203,6 +203,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if existing_version is None: version_id = io.insert_one(version).inserted_id else: + # TODO query by _id and + # remove old version and representations but keep their ids io.update_many({ 'type': 'version', 'parent': subset["_id"], @@ -304,6 +306,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): dst_padding_exp = src_padding_exp dst_start_frame = None for i in src_collection.indexes: + # TODO 1.) do not count padding in each index iteration + # 2.) do not count dst_padding from src_padding before + # index_frame_start check src_padding = src_padding_exp % i src_file_name = "{0}{1}{2}".format( From 178fed2ae22893670dcfff056c13f44ed64c925b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:32:12 +0100 Subject: [PATCH 262/393] updated instance input/output docsting --- pype/plugins/global/publish/collect_instance_anatomy_data.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index 838fb1a113..404480b30b 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -8,12 +8,17 @@ Requires: instance -> family Optional: + instance -> version instance -> resolutionWidth instance -> resolutionHeight instance -> fps Provides: + instance -> projectEntity + instance -> assetEntity instance -> anatomyData + instance -> version + instance -> latestVersion """ import copy From ed8b56b6de17330054b2c9469ea63133a1ed5a36 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 15:32:54 +0100 Subject: [PATCH 263/393] fixed per key instance.data value assignment --- .../global/publish/collect_instance_anatomy_data.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index 404480b30b..8a98b6cbb2 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -54,9 +54,6 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): "parent": project_entity["_id"] }) - instance.context.data["assetEntity"] = asset_entity - instance.context.data["projectEntity"] = project_entity - subset_name = instance.data["subset"] version_number = instance.data.get("version") latest_version = None @@ -68,7 +65,6 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): "parent": asset_entity["_id"] }) - if subset_entity is None: self.log.debug("Subset entity does not exist yet.") else: @@ -84,7 +80,7 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): # If version is not specified for instance or context if version_number is None: - # TODO we should be able to change this version by studio + # TODO we should be able to change default version by studio # preferences (like start with version number `0`) version_number = 1 # use latest version (+1) if already any exist @@ -111,8 +107,12 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): if resolution_height: anatomy_data["fps"] = fps + instance.data["projectEntity"] = project_entity + instance.data["assetEntity"] = asset_entity instance.data["anatomyData"] = anatomy_data instance.data["latestVersion"] = latest_version + # TODO should be version number set here? + instance.data["version"] = version_number self.log.info("Instance anatomy Data collected") self.log.debug(json.dumps(anatomy_data, indent=4)) From 876ff064b6b6c1a941888e43758196525b49872c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 16:11:29 +0100 Subject: [PATCH 264/393] reduced collect resources path because of already collected data in instance anatomy data --- .../global/publish/collect_resources_path.py | 62 +------------------ 1 file changed, 3 insertions(+), 59 deletions(-) diff --git a/pype/plugins/global/publish/collect_resources_path.py b/pype/plugins/global/publish/collect_resources_path.py index 52e926e09c..de78874cd6 100644 --- a/pype/plugins/global/publish/collect_resources_path.py +++ b/pype/plugins/global/publish/collect_resources_path.py @@ -13,70 +13,14 @@ class IntegrateResourcesPath(pyblish.api.InstancePlugin): families = ["clip", "projectfile", "plate"] def process(self, instance): - project_entity = instance.context["projectEntity"] - asset_entity = instance.context["assetEntity"] - template_data = copy.deepcopy(instance.data["anatomyData"]) - asset_name = instance.data["asset"] - if asset_name != asset_entity["name"]: - asset_entity = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project_entity["_id"] - }) - assert asset_entity, ( - "No asset found by the name '{}' in project '{}'".format( - asset_name, project_entity["name"] - ) - ) - - instance.data["assetEntity"] = asset_entity - - template_data["name"] = asset_entity["name"] - silo_name = asset_entity.get("silo") - if silo_name: - template_data["silo"] = silo_name - - parents = asset_entity["data"].get("parents") or [] - hierarchy = "/".join(parents) - template_data["hierarchy"] = hierarchy - - subset_name = instance.data["subset"] - self.log.info(subset_name) - - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset_entity["_id"] - }) - - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) - - # if there is a subset there ought to be version - if version is not None: - version_number += version["name"] - - if instance.data.get('version'): - version_number = int(instance.data.get('version')) - anatomy = instance.context.data["anatomy"] - padding = int(anatomy.templates['render']['padding']) + padding = int(anatomy.templates["render"]["padding"]) + # add possible representation specific key to anatomy data template_data.update({ - "subset": subset_name, - "frame": ('#' * padding), - "version": version_number, + "frame": ("#" * padding), "representation": "TEMP" }) From 3fdfcec29bf6b62023fe34a8b1d1b01fe2198edf Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 16:17:18 +0100 Subject: [PATCH 265/393] version_number is used from instance.data in integrate_new --- pype/plugins/global/publish/integrate_new.py | 32 +++----------------- 1 file changed, 5 insertions(+), 27 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 15165f4217..aff92ea308 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -156,40 +156,18 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset = self.get_subset(asset_entity, instance) - # TODO iLLiCiT use "latestVersion" from `instance.data` - # and store version in anatomyData instance collector - # instead of query again - instance_version = instance.data.get('version') - if instance_version is not None: - next_version = int(instance_version) - - else: - # get next version - latest_version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - {"name": True}, - sort=[("name", -1)] - ) - - next_version = 1 - if latest_version is not None: - next_version += int(latest_version["name"]) - - self.log.debug("Next version: v{0:03d}".format(next_version)) + version_number = instance.data["version"] + self.log.debug("Next version: v{0:03d}".format(version_number)) version_data = self.create_version_data(context, instance) version_data_instance = instance.data.get('versionData') - if version_data_instance: version_data.update(version_data_instance) version = self.create_version( subset=subset, - version_number=next_version, + version_number=version_number, locations=[avalon_location], data=version_data ) @@ -198,7 +176,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): existing_version = io.find_one({ 'type': 'version', 'parent': subset["_id"], - 'name': next_version + 'name': version_number }) if existing_version is None: version_id = io.insert_one(version).inserted_id @@ -208,7 +186,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): io.update_many({ 'type': 'version', 'parent': subset["_id"], - 'name': next_version + 'name': version_number }, {'$set': version} ) version_id = existing_version['_id'] From ebdc7c3700f17f636573fa45e4ad500f261200f9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 16:23:03 +0100 Subject: [PATCH 266/393] added few todos --- pype/plugins/global/publish/collect_resources_path.py | 5 +++-- pype/plugins/global/publish/integrate_new.py | 5 ++++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/collect_resources_path.py b/pype/plugins/global/publish/collect_resources_path.py index de78874cd6..fe152584b6 100644 --- a/pype/plugins/global/publish/collect_resources_path.py +++ b/pype/plugins/global/publish/collect_resources_path.py @@ -16,11 +16,12 @@ class IntegrateResourcesPath(pyblish.api.InstancePlugin): template_data = copy.deepcopy(instance.data["anatomyData"]) anatomy = instance.context.data["anatomy"] - padding = int(anatomy.templates["render"]["padding"]) + frame_padding = int(anatomy.templates["render"]["padding"]) # add possible representation specific key to anatomy data + # TODO ability to set host specific "frame" value template_data.update({ - "frame": ("#" * padding), + "frame": ("#" * frame_padding), "representation": "TEMP" }) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index aff92ea308..570a093cdc 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -102,7 +102,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def register(self, instance): # Required environment variables anatomy_data = instance.data["anatomyData"] - avalon_location = api.Session["AVALON_LOCATION"] io.install() @@ -165,6 +164,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if version_data_instance: version_data.update(version_data_instance) + # TODO remove avalon_location (shall we?) + avalon_location = api.Session["AVALON_LOCATION"] + # TODO rename method from `create_version` to + # `prepare_version` or similar... version = self.create_version( subset=subset, version_number=version_number, From f44011268cca59f593402b13b37e5c3a0cefe4db Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 16:25:45 +0100 Subject: [PATCH 267/393] delete action tries to find entities by name and parents if ftrackId is not set in data --- pype/ftrack/actions/action_delete_asset.py | 37 ++++++++++++++++++++-- 1 file changed, 34 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/actions/action_delete_asset.py b/pype/ftrack/actions/action_delete_asset.py index 7eb9126fca..5d177748cd 100644 --- a/pype/ftrack/actions/action_delete_asset.py +++ b/pype/ftrack/actions/action_delete_asset.py @@ -99,6 +99,7 @@ class DeleteAssetSubset(BaseAction): # Filter event even more (skip task entities) # - task entities are not relevant for avalon + entity_mapping = {} for entity in entities: ftrack_id = entity["id"] if ftrack_id not in ftrack_ids: @@ -107,6 +108,8 @@ class DeleteAssetSubset(BaseAction): if entity.entity_type.lower() == "task": ftrack_ids.remove(ftrack_id) + entity_mapping[ftrack_id] = entity + if not ftrack_ids: # It is bug if this happens! return { @@ -122,11 +125,39 @@ class DeleteAssetSubset(BaseAction): project_name = project["full_name"] self.dbcon.Session["AVALON_PROJECT"] = project_name - selected_av_entities = self.dbcon.find({ + selected_av_entities = list(self.dbcon.find({ "type": "asset", "data.ftrackId": {"$in": ftrack_ids} - }) - selected_av_entities = [ent for ent in selected_av_entities] + })) + if len(selected_av_entities) != len(ftrack_ids): + found_ftrack_ids = [ + ent["data"]["ftrackId"] for ent in selected_av_entities + ] + for ftrack_id, entity in entity_mapping.items(): + if ftrack_id in found_ftrack_ids: + continue + + av_ents_by_name = list(self.dbcon.find({ + "type": "asset", + "name": entity["name"] + })) + if not av_ents_by_name: + continue + + ent_path_items = [ent["name"] for ent in entity["link"]] + parents = ent_path_items[1:len(ent_path_items)-1:] + # TODO we should say to user that + # few of them are missing in avalon + for av_ent in av_ents_by_name: + if av_ent["data"]["parents"] != parents: + continue + + # TODO we should say to user that found entity + # with same name does not match same ftrack id? + if "ftrackId" not in av_ent["data"]: + selected_av_entities.append(av_ent) + break + if not selected_av_entities: return { "success": False, From d57586c76b22e9d57b56e9abf831cb49e4e570db Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 16:47:26 +0100 Subject: [PATCH 268/393] added mapping for avalon -> ftrack id if ftrackId is not in entity's data --- pype/ftrack/actions/action_delete_asset.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/actions/action_delete_asset.py b/pype/ftrack/actions/action_delete_asset.py index 5d177748cd..fc9e66e4f8 100644 --- a/pype/ftrack/actions/action_delete_asset.py +++ b/pype/ftrack/actions/action_delete_asset.py @@ -129,6 +129,7 @@ class DeleteAssetSubset(BaseAction): "type": "asset", "data.ftrackId": {"$in": ftrack_ids} })) + found_without_ftrack_id = {} if len(selected_av_entities) != len(ftrack_ids): found_ftrack_ids = [ ent["data"]["ftrackId"] for ent in selected_av_entities @@ -156,6 +157,7 @@ class DeleteAssetSubset(BaseAction): # with same name does not match same ftrack id? if "ftrackId" not in av_ent["data"]: selected_av_entities.append(av_ent) + found_without_ftrack_id[str(av_ent["_id"])] = ftrack_id break if not selected_av_entities: @@ -186,7 +188,8 @@ class DeleteAssetSubset(BaseAction): "created_at": datetime.now(), "project_name": project_name, "subset_ids_by_name": {}, - "subset_ids_by_parent": {} + "subset_ids_by_parent": {}, + "without_ftrack_id": found_without_ftrack_id } id_item = { @@ -444,14 +447,21 @@ class DeleteAssetSubset(BaseAction): asset_ids_to_archive = [] ftrack_ids_to_delete = [] if len(assets_to_delete) > 0: + map_av_ftrack_id = spec_data["without_ftrack_id"] # Prepare data when deleting whole avalon asset avalon_assets = self.dbcon.find({"type": "asset"}) avalon_assets_by_parent = collections.defaultdict(list) for asset in avalon_assets: + asset_id = asset["_id"] parent_id = asset["data"]["visualParent"] avalon_assets_by_parent[parent_id].append(asset) - if asset["_id"] in assets_to_delete: - ftrack_id = asset["data"]["ftrackId"] + if asset_id in assets_to_delete: + ftrack_id = map_av_ftrack_id.get(str(asset_id)) + if not ftrack_id: + ftrack_id = asset["data"].get("ftrackId") + + if not ftrack_id: + continue ftrack_ids_to_delete.append(ftrack_id) children_queue = Queue() From 569bd6165859670365dce17fbe891af9dc56711f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 4 Feb 2020 17:36:46 +0100 Subject: [PATCH 269/393] fix(nk, nks): some fixes for loading sequence and mov --- .../global/publish/collect_templates.py | 4 ++- pype/plugins/nuke/load/load_mov.py | 34 +++++++++++++++++-- pype/plugins/nuke/load/load_sequence.py | 21 ++++++++++-- 3 files changed, 53 insertions(+), 6 deletions(-) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index 383944e293..5ad7aa7320 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -78,6 +78,8 @@ class CollectTemplates(pyblish.api.InstancePlugin): if hierarchy: # hierarchy = os.path.sep.join(hierarchy) hierarchy = os.path.join(*hierarchy) + else: + hierarchy = "" template_data = {"root": api.Session["AVALON_PROJECTS"], "project": {"name": project_name, @@ -88,7 +90,7 @@ class CollectTemplates(pyblish.api.InstancePlugin): "subset": subset_name, "version": version_number, "hierarchy": hierarchy.replace("\\", "/"), - "representation": "TEMP")} + "representation": "TEMP"} resolution_width = instance.data.get("resolutionWidth") resolution_height = instance.data.get("resolutionHeight") diff --git a/pype/plugins/nuke/load/load_mov.py b/pype/plugins/nuke/load/load_mov.py index 655937b9a8..fccba4c573 100644 --- a/pype/plugins/nuke/load/load_mov.py +++ b/pype/plugins/nuke/load/load_mov.py @@ -111,8 +111,15 @@ class LoadMov(api.Loader): if namespace is None: namespace = context['asset']['name'] - file = self.fname.replace("\\", "/") - log.info("file: {}\n".format(self.fname)) + file = self.fname + + if not file: + repr_id = context["representation"]["_id"] + log.warning( + "Representation id `{}` is failing to load".format(repr_id)) + return + + file = file.replace("\\", "/") read_name = "Read_{0}_{1}_{2}".format( repr_cont["asset"], @@ -200,7 +207,15 @@ class LoadMov(api.Loader): assert node.Class() == "Read", "Must be Read" - file = self.fname.replace("\\", "/") + file = self.fname + + if not file: + repr_id = representation["_id"] + log.warning( + "Representation id `{}` is failing to load".format(repr_id)) + return + + file = file.replace("\\", "/") # Get start frame from version data version = io.find_one({ @@ -263,6 +278,19 @@ class LoadMov(api.Loader): if colorspace: node["colorspace"].setValue(str(colorspace)) + # load nuke presets for Read's colorspace + read_clrs_presets = presets.get_colorspace_preset().get( + "nuke", {}).get("read", {}) + + # check if any colorspace presets for read is mathing + preset_clrsp = next((read_clrs_presets[k] + for k in read_clrs_presets + if bool(re.search(k, file))), + None) + if preset_clrsp is not None: + node["colorspace"].setValue(str(preset_clrsp)) + + updated_dict = {} updated_dict.update({ "representation": str(representation["_id"]), diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index 9f3d09186c..76ff7d2cb6 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -107,7 +107,15 @@ class LoadSequence(api.Loader): first -= self.handle_start last += self.handle_end - file = self.fname.replace("\\", "/") + file = self.fname + + if not file: + repr_id = context["representation"]["_id"] + log.warning( + "Representation id `{}` is failing to load".format(repr_id)) + return + + file = file.replace("\\", "/") repr_cont = context["representation"]["context"] if "#" not in file: @@ -229,7 +237,16 @@ class LoadSequence(api.Loader): assert node.Class() == "Read", "Must be Read" repr_cont = representation["context"] - file = self.fname.replace("\\", "/") + + file = self.fname + + if not file: + repr_id = representation["_id"] + log.warning( + "Representation id `{}` is failing to load".format(repr_id)) + return + + file = file.replace("\\", "/") if "#" not in file: frame = repr_cont.get("frame") From 66466bc24bf3ba98f1a715dfbd0fe9352ba6a65c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 18:07:17 +0100 Subject: [PATCH 270/393] collect resources path uses anatomy publish.folder key with ability of backwards compatibility --- .../global/publish/collect_resources_path.py | 95 ++++++++----------- 1 file changed, 39 insertions(+), 56 deletions(-) diff --git a/pype/plugins/global/publish/collect_resources_path.py b/pype/plugins/global/publish/collect_resources_path.py index fe152584b6..9fc8c576f5 100644 --- a/pype/plugins/global/publish/collect_resources_path.py +++ b/pype/plugins/global/publish/collect_resources_path.py @@ -1,77 +1,60 @@ +""" +Requires: + context -> anatomy + context -> anatomyData + +Provides: + instance -> publishDir + instance -> resourcesDir +""" + import os import copy import pyblish.api -from avalon import io +from avalon import api -class IntegrateResourcesPath(pyblish.api.InstancePlugin): - """Generate the assumed destination path where the file will be stored""" +class CollectResourcesPath(pyblish.api.InstancePlugin): + """Generate directory path where the files and resources will be stored""" - label = "Integrate Prepare Resource" - order = pyblish.api.IntegratorOrder - 0.05 - families = ["clip", "projectfile", "plate"] + label = "Collect Resources Path" + order = pyblish.api.CollectorOrder + 0.995 def process(self, instance): + anatomy = instance.context.data["anatomy"] + template_data = copy.deepcopy(instance.data["anatomyData"]) - anatomy = instance.context.data["anatomy"] - frame_padding = int(anatomy.templates["render"]["padding"]) - - # add possible representation specific key to anatomy data - # TODO ability to set host specific "frame" value + # This is for cases of Deprecated anatomy without `folder` + # TODO remove when all clients have solved this issue template_data.update({ - "frame": ("#" * frame_padding), + "frame": "FRAME_TEMP", "representation": "TEMP" }) anatomy_filled = anatomy.format(template_data) - template_names = ["publish"] - for repre in instance.data["representations"]: - template_name = repre.get("anatomy_template") - if template_name and template_name not in template_names: - template_names.append(template_name) + if "folder" in anatomy.templates["publish"]: + publish_folder = anatomy_filled["publish"]["folder"] + else: + # solve deprecated situation when `folder` key is not underneath + # `publish` anatomy + project_name = api.Session["AVALON_PROJECT"] + self.log.warning(( + "Deprecation warning: Anatomy does not have set `folder`" + " key underneath `publish` (in global of for project `{}`)." + ).format(project_name)) - resources = instance.data.get("resources", list()) - transfers = instance.data.get("transfers", list()) + file_path = anatomy_filled["publish"]["path"] + # Directory + publish_folder = os.path.dirname(file_path) - for template_name in template_names: - mock_template = anatomy_filled[template_name]["path"] + publish_folder = os.path.normpath(publish_folder) + resources_folder = os.path.join(publish_folder, "resources") - # For now assume resources end up in a "resources" folder in the - # published folder - mock_destination = os.path.join( - os.path.dirname(mock_template), "resources" - ) + instance.data["publishDir"] = publish_folder + instance.data["resourcesDir"] = resources_folder - # Clean the path - mock_destination = os.path.abspath( - os.path.normpath(mock_destination) - ).replace("\\", "/") - - # Define resource destination and transfers - for resource in resources: - # Add destination to the resource - source_filename = os.path.basename( - resource["source"]).replace("\\", "/") - destination = os.path.join(mock_destination, source_filename) - - # Force forward slashes to fix issue with software unable - # to work correctly with backslashes in specific scenarios - # (e.g. escape characters in PLN-151 V-Ray UDIM) - destination = destination.replace("\\", "/") - - resource['destination'] = destination - - # Collect transfers for the individual files of the resource - # e.g. all individual files of a cache or UDIM textures. - files = resource['files'] - for fsrc in files: - fname = os.path.basename(fsrc) - fdest = os.path.join( - mock_destination, fname).replace("\\", "/") - transfers.append([fsrc, fdest]) - - instance.data["resources"] = resources - instance.data["transfers"] = transfers + self.log.debug("publishDir: \"{}\"".format(publish_folder)) + self.log.debug("resourcesDir: \"{}\"".format(resources_folder)) From 6f26d0160ce62817843d935b10ba2a937e715a38 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 18:11:32 +0100 Subject: [PATCH 271/393] integrated assued destination was moved back with name integrate resources path --- .../publish/integrate_resources_path.py | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 pype/plugins/global/publish/integrate_resources_path.py diff --git a/pype/plugins/global/publish/integrate_resources_path.py b/pype/plugins/global/publish/integrate_resources_path.py new file mode 100644 index 0000000000..56dc0e5ef7 --- /dev/null +++ b/pype/plugins/global/publish/integrate_resources_path.py @@ -0,0 +1,49 @@ +import os +import pyblish.api + + +class IntegrateResourcesPath(pyblish.api.InstancePlugin): + """Generate directory path where the files and resources will be stored""" + + label = "Integrate Resources Path" + order = pyblish.api.IntegratorOrder - 0.05 + families = ["clip", "projectfile", "plate"] + + def process(self, instance): + resources = instance.data.get("resources") or [] + transfers = instance.data.get("transfers") or [] + + if not resources and not transfers: + self.log.debug( + "Instance does not have `resources` and `transfers`" + ) + return + + resources_folder = instance.data["resourcesDir"] + + # Define resource destination and transfers + for resource in resources: + # Add destination to the resource + source_filename = os.path.basename( + resource["source"]).replace("\\", "/") + destination = os.path.join(resources_folder, source_filename) + + # Force forward slashes to fix issue with software unable + # to work correctly with backslashes in specific scenarios + # (e.g. escape characters in PLN-151 V-Ray UDIM) + destination = destination.replace("\\", "/") + + resource['destination'] = destination + + # Collect transfers for the individual files of the resource + # e.g. all individual files of a cache or UDIM textures. + files = resource['files'] + for fsrc in files: + fname = os.path.basename(fsrc) + fdest = os.path.join( + resources_folder, fname + ).replace("\\", "/") + transfers.append([fsrc, fdest]) + + instance.data["resources"] = resources + instance.data["transfers"] = transfers From fcffa08177efd97ffc08bbf3520eb2be6a8d02f6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 18:27:03 +0100 Subject: [PATCH 272/393] extract look uses `resourcesDir` instead of computing itself --- pype/plugins/maya/publish/extract_look.py | 63 +---------------------- 1 file changed, 2 insertions(+), 61 deletions(-) diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index 4000011520..58196433aa 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -331,10 +331,9 @@ class ExtractLook(pype.api.Extractor): maya_path)) def resource_destination(self, instance, filepath, do_maketx): - anatomy = instance.context.data["anatomy"] - destination_dir = self.create_destination_template(instance, anatomy) + resources_dir = instance.data["resourcesDir"] # Compute destination location basename, ext = os.path.splitext(os.path.basename(filepath)) @@ -344,7 +343,7 @@ class ExtractLook(pype.api.Extractor): ext = ".tx" return os.path.join( - destination_dir, "resources", basename + ext + resources_dir, basename + ext ) def _process_texture(self, filepath, do_maketx, staging, linearise, force): @@ -408,61 +407,3 @@ class ExtractLook(pype.api.Extractor): return converted, COPY, texture_hash return filepath, COPY, texture_hash - - def create_destination_template(self, instance, anatomy): - """Create a filepath based on the current data available - - Example template: - {root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/ - {subset}.{representation} - Args: - instance: the instance to publish - - Returns: - file path (str) - """ - - asset_entity = instance.context["assetEntity"] - - template_data = copy.deepcopy(instance.data["anatomyData"]) - - subset_name = instance.data["subset"] - self.log.info(subset_name) - - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset_entity["_id"] - }) - - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) - - # if there is a subset there ought to be version - if version is not None: - version_number += version["name"] - - if instance.data.get('version'): - version_number = int(instance.data.get('version')) - - anatomy = instance.context.data["anatomy"] - padding = int(anatomy.templates['render']['padding']) - - template_data.update({ - "subset": subset_name, - "frame": ("#" * padding), - "version": version_number, - "representation": "TEMP" - }) - anatomy_filled = anatomy.format(template_data) - - return os.path.dirname(anatomy_filled["publish"]["path"]) From e92537d34a9c63b7cf09f1b4a46f11c30d76e90d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 18:27:34 +0100 Subject: [PATCH 273/393] extract effects can compute resources dir with anatomyData (need changes) --- .../nukestudio/publish/extract_effects.py | 182 +++++++++--------- 1 file changed, 96 insertions(+), 86 deletions(-) diff --git a/pype/plugins/nukestudio/publish/extract_effects.py b/pype/plugins/nukestudio/publish/extract_effects.py index 15d2a80a55..9e43bee1c8 100644 --- a/pype/plugins/nukestudio/publish/extract_effects.py +++ b/pype/plugins/nukestudio/publish/extract_effects.py @@ -2,10 +2,12 @@ import os import json import re +import copy import pyblish.api import tempfile from avalon import io, api + class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): """Collect video tracks effects into context.""" @@ -71,9 +73,11 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): ) data["source"] = data["sourcePath"] + # WARNING instance should not be created in Extractor! # create new instance instance = instance.context.create_instance(**data) - + # TODO replace line below with `instance.data["resourcesDir"]` + # when instance is created during collection part dst_dir = self.resource_destination_dir(instance) # change paths in effects to files @@ -141,103 +145,109 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): return (v, dst) def resource_destination_dir(self, instance): - anatomy = instance.context.data['anatomy'] - self.create_destination_template(instance, anatomy) + # WARNING this is from `collect_instance_anatomy_data.py` + anatomy_data = copy.deepcopy(instance.context.data["anatomyData"]) + project_entity = instance.context.data["projectEntity"] + context_asset_entity = instance.context.data["assetEntity"] - return os.path.join( - instance.data["assumedDestination"], - "resources" - ) - - def create_destination_template(self, instance, anatomy): - """Create a filepath based on the current data available - - Example template: - {root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/ - {subset}.{representation} - Args: - instance: the instance to publish - - Returns: - file path (str) - """ - - # get all the stuff from the database - subset_name = instance.data["subset"] - self.log.info(subset_name) asset_name = instance.data["asset"] - project_name = api.Session["AVALON_PROJECT"] - a_template = anatomy.templates + if context_asset_entity["name"] == asset_name: + asset_entity = context_asset_entity - project = io.find_one( - { - "type": "project", - "name": project_name - }, - projection={"config": True, "data": True} - ) + else: + asset_entity = io.find_one({ + "type": "asset", + "name": asset_name, + "parent": project_entity["_id"] + }) - template = a_template['publish']['path'] - # anatomy = instance.context.data['anatomy'] + subset_name = instance.data["subset"] + version_number = instance.data.get("version") + latest_version = None - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] + if asset_entity: + subset_entity = io.find_one({ + "type": "subset", + "name": subset_name, + "parent": asset_entity["_id"] + }) + + if subset_entity is None: + self.log.debug("Subset entity does not exist yet.") + else: + version_entity = io.find_one( + { + "type": "version", + "parent": subset_entity["_id"] + }, + sort=[("name", -1)] + ) + if version_entity: + latest_version = version_entity["name"] + + if version_number is None: + version_number = 1 + if latest_version is not None: + version_number += int(latest_version) + + anatomy_data.update({ + "asset": asset_name, + "family": instance.data["family"], + "subset": subset_name, + "version": version_number }) - assert asset, ("No asset found by the name '{}' " - "in project '{}'".format(asset_name, project_name)) - silo = asset.get('silo') + resolution_width = instance.data.get("resolutionWidth") + if resolution_width: + anatomy_data["resolution_width"] = resolution_width - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset["_id"] + resolution_height = instance.data.get("resolutionHeight") + if resolution_height: + anatomy_data["resolution_height"] = resolution_height + + fps = instance.data.get("fps") + if resolution_height: + anatomy_data["fps"] = fps + + instance.data["projectEntity"] = project_entity + instance.data["assetEntity"] = asset_entity + instance.data["anatomyData"] = anatomy_data + instance.data["latestVersion"] = latest_version + instance.data["version"] = version_number + + # WARNING this is from `collect_resources_path.py` + anatomy = instance.context.data["anatomy"] + + template_data = copy.deepcopy(instance.data["anatomyData"]) + + # This is for cases of Deprecated anatomy without `folder` + # TODO remove when all clients have solved this issue + template_data.update({ + "frame": "FRAME_TEMP", + "representation": "TEMP" }) - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) + anatomy_filled = anatomy.format(template_data) - # if there is a subset there ought to be version - if version is not None: - version_number += version["name"] + if "folder" in anatomy.templates["publish"]: + publish_folder = anatomy_filled["publish"]["folder"] + else: + # solve deprecated situation when `folder` key is not underneath + # `publish` anatomy + project_name = api.Session["AVALON_PROJECT"] + self.log.warning(( + "Deprecation warning: Anatomy does not have set `folder`" + " key underneath `publish` (in global of for project `{}`)." + ).format(project_name)) - if instance.data.get('version'): - version_number = int(instance.data.get('version')) + file_path = anatomy_filled["publish"]["path"] + # Directory + publish_folder = os.path.dirname(file_path) - padding = int(a_template['render']['padding']) + publish_folder = os.path.normpath(publish_folder) + resources_folder = os.path.join(publish_folder, "resources") - hierarchy = asset['data']['parents'] - if hierarchy: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = "/".join(hierarchy) + instance.data["publishDir"] = publish_folder + instance.data["resourcesDir"] = resources_folder - template_data = {"root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, - "code": project['data']['code']}, - "silo": silo, - "family": instance.data['family'], - "asset": asset_name, - "subset": subset_name, - "frame": ('#' * padding), - "version": version_number, - "hierarchy": hierarchy, - "representation": "TEMP"} - - instance.data["assumedTemplateData"] = template_data - self.log.info(template_data) - instance.data["template"] = template - # We take the parent folder of representation 'filepath' - instance.data["assumedDestination"] = os.path.dirname( - anatomy.format(template_data)["publish"]["path"] - ) + return resources_folder From f6e6220869a53f7411b55e03468761a9f5f7c323 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 4 Feb 2020 18:54:58 +0100 Subject: [PATCH 274/393] extract yeti rig uses resourcesDir --- pype/plugins/maya/publish/extract_yeti_rig.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/pype/plugins/maya/publish/extract_yeti_rig.py b/pype/plugins/maya/publish/extract_yeti_rig.py index d390a1365a..70a509564f 100644 --- a/pype/plugins/maya/publish/extract_yeti_rig.py +++ b/pype/plugins/maya/publish/extract_yeti_rig.py @@ -1,7 +1,6 @@ import os import json import contextlib -import copy from maya import cmds @@ -111,16 +110,7 @@ class ExtractYetiRig(pype.api.Extractor): self.log.info("Writing metadata file") - # Create assumed destination folder for imageSearchPath - template_data = copy.deepcopy(instance.data["anatomyData"]) - - anatomy = instance.context["anatomy"] - filled = anatomy.format(template_data) - - destination_folder = os.path.dir(filled["publish"]["path"]) - - image_search_path = os.path.join(destination_folder, "resources") - image_search_path = os.path.normpath(image_search_path) + image_search_path = resources_dir = instance.data["resourcesDir"] settings = instance.data.get("rigsettings", None) if settings: From f6ae5b2213b6ee21f2e27f8a2a347a669259cc12 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 5 Feb 2020 09:59:25 +0100 Subject: [PATCH 275/393] task name is also checked --- .../global/publish/collect_instance_anatomy_data.py | 12 +++++++++--- pype/plugins/global/publish/integrate_new.py | 4 ++++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index 8a98b6cbb2..9c6a8b08f2 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -87,13 +87,19 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): if latest_version is not None: version_number += int(latest_version) - # Version should not be collected since may be instance - anatomy_data.update({ + anatomy_updates = { "asset": asset_name, "family": instance.data["family"], "subset": subset_name, "version": version_number - }) + } + + task_name = instance.data.get("task") + if task_name: + anatomy_updates["task"] = task_name + + # Version should not be collected since may be instance + anatomy_data.update(anatomy_updates) resolution_width = instance.data.get("resolutionWidth") if resolution_width: diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 570a093cdc..d27582bb71 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -132,6 +132,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): hierarchy = "/".join(parents) anatomy_data["hierarchy"] = hierarchy + task_name = instance.data.get("task") + if task_name: + anatomy_data["task"] = task_name + stagingdir = instance.data.get("stagingDir") if not stagingdir: self.log.info(( From 38b563495506e028f92b6078e1988235bca30c7c Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 5 Feb 2020 14:07:29 +0100 Subject: [PATCH 276/393] update nukestudio init to new install way --- pype/nukestudio/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/nukestudio/__init__.py b/pype/nukestudio/__init__.py index 097f077e15..75825d188a 100644 --- a/pype/nukestudio/__init__.py +++ b/pype/nukestudio/__init__.py @@ -51,7 +51,7 @@ if os.getenv("PYBLISH_GUI", None): pyblish.register_gui(os.getenv("PYBLISH_GUI", None)) -def install(config): +def install(): """ Installing Nukestudio integration for avalon From 75b603d845fe44d6ba5f39268137ca0f6128763e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:46:03 +0100 Subject: [PATCH 277/393] removed add datetime and add frame numbers --- pype/scripts/otio_burnin.py | 38 ------------------------------------- 1 file changed, 38 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index f128352974..aca848dcfa 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -132,44 +132,6 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): options = ffmpeg_burnins.TextOptions(**self.options_init) self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) - def add_datetime(self, date_format, align, options=None): - """ - Adding date text to a filter. Using pythons datetime module. - - :param str date_format: format of date (e.g. `%d.%m.%Y`) - :param enum align: alignment, must use provided enum flags - :param dict options: recommended to use TextOptions - """ - if not options: - options = ffmpeg_burnins.TextOptions(**self.options_init) - today = datetime.datetime.today() - text = today.strftime(date_format) - self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) - - def add_frame_numbers( - self, align, options=None, start_frame=None, text=None - ): - """ - Convenience method to create the frame number expression. - - :param enum align: alignment, must use provided enum flags - :param dict options: recommended to use FrameNumberOptions - """ - if not options: - options = ffmpeg_burnins.FrameNumberOptions(**self.options_init) - if start_frame: - options['frame_offset'] = start_frame - - expr = r'%%{eif\:n+%d\:d}' % options['frame_offset'] - _text = str(int(self.end_frame + options['frame_offset'])) - if text and isinstance(text, str): - text = r"{}".format(text) - expr = text.replace("{current_frame}", expr) - text = text.replace("{current_frame}", _text) - - options['expression'] = expr - self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) - def add_timecode(self, align, options=None, start_frame=None): """ Convenience method to create the frame number expression. From 8c75c74cdec4c08225723f9f8328046999588735 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:49:59 +0100 Subject: [PATCH 278/393] print command before run --- pype/scripts/otio_burnin.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index aca848dcfa..b9d10ca23a 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -226,9 +226,13 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): is_sequence = "%" in output - command = self.command(output=output, - args=args, - overwrite=overwrite) + command = self.command( + output=output, + args=args, + overwrite=overwrite + ) + print(command) + proc = Popen(command, shell=True) proc.communicate() if proc.returncode != 0: From 8e86f6e37a0b6fb4ce794e67372b65d49f85b813 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:50:18 +0100 Subject: [PATCH 279/393] implemented custom drawtext and timecode constants --- pype/scripts/otio_burnin.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index b9d10ca23a..00d63939e7 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -27,6 +27,15 @@ FFPROBE = ( '{} -v quiet -print_format json -show_format -show_streams %(source)s' ).format(os.path.normpath(ffmpeg_path + "ffprobe")) +DRAWTEXT = ( + "drawtext=text=\\'%(text)s\\':x=%(x)s:y=%(y)s:fontcolor=" + "%(color)s@%(opacity).1f:fontsize=%(size)d:fontfile='%(font)s'" +) +TIMECODE = ( + "drawtext=text=\\'%(text)s\\':timecode=\\'%(timecode)s\\'" + ":timecode_rate=%(fps).2f:x=%(x)s:y=%(y)s:fontcolor=" + "%(color)s@%(opacity).1f:fontsize=%(size)d:fontfile='%(font)s'" +) def _streams(source): """Reimplemented from otio burnins to be able use full path to ffprobe From 4f862acfb8af3b12315008bd1e95b773fd58fc56 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:50:51 +0100 Subject: [PATCH 280/393] added constants for easier handling or entered keys --- pype/scripts/otio_burnin.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 00d63939e7..e0df769db4 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -37,6 +37,11 @@ TIMECODE = ( "%(color)s@%(opacity).1f:fontsize=%(size)d:fontfile='%(font)s'" ) +MISSING_KEY_VALUE = "N/A" +CURRENT_FRAME_KEY = "{current_frame}" +TIME_CODE_KEY = "{timecode}" + + def _streams(source): """Reimplemented from otio burnins to be able use full path to ffprobe :param str source: source media file From b73fcc6730e7b3367988a7aa636f363b60d82204 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:51:09 +0100 Subject: [PATCH 281/393] modified docstring --- pype/scripts/otio_burnin.py | 28 ++++++++-------------------- 1 file changed, 8 insertions(+), 20 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index e0df769db4..79565af22a 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -307,34 +307,22 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) - each key of "burnins" represents Alignment, there are 6 possibilities: TOP_LEFT TOP_CENTERED TOP_RIGHT BOTTOM_LEFT BOTTOM_CENTERED BOTTOM_RIGHT - - value for each key is dict which should contain "function" which says - what kind of burnin is that: - "text", "timecode" or "frame_numbers" - - "text" key with content is also required when "text" function is used + - value must be string with text you want to burn-in + - text may contain specific formatting keys (exmplained below) Requirement of *data* keys is based on presets. - - "start_frame" - is required when "timecode" or "frame_numbers" function is used - - "start_frame_tc" - when "timecode" should start with different frame + - "frame_start" - is required when "timecode" or "current_frame" ins keys + - "frame_start_tc" - when "timecode" should start with different frame - *keys for static text* EXAMPLE: preset = { "options": {*OPTIONS FOR LOOK*}, "burnins": { - "TOP_LEFT": { - "function": "text", - "text": "static_text" - }, - "TOP_RIGHT": { - "function": "text", - "text": "{shot}" - }, - "BOTTOM_LEFT": { - "function": "timecode" - }, - "BOTTOM_RIGHT": { - "function": "frame_numbers" - } + "TOP_LEFT": "static_text", + "TOP_RIGHT": "{shot}", + "BOTTOM_LEFT": "TC: {timecode}", + "BOTTOM_RIGHT": "{frame_start}{current_frame}" } } From d263cc3bfd0029b788d8ce4ff1bea405765bf3ef Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:51:37 +0100 Subject: [PATCH 282/393] data variable in __main___ was renamed to in_data to not be overriden during processing --- pype/scripts/otio_burnin.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 79565af22a..9564982980 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -439,10 +439,10 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) if __name__ == '__main__': import sys import json - data = json.loads(sys.argv[-1]) + in_data = json.loads(sys.argv[-1]) burnins_from_data( - data['input'], - data['codec'], - data['output'], - data['burnin_data'] + in_data['input'], + in_data['codec'], + in_data['output'], + in_data['burnin_data'] ) From be088579be01d7d5db473133d7c49f245aeec10c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:52:26 +0100 Subject: [PATCH 283/393] alignment checks lowered string (it is available to use `top_left` in presets --- pype/scripts/otio_burnin.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 9564982980..85e72245cd 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -359,17 +359,18 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) for align_text, preset in presets.get('burnins', {}).items(): align = None - if align_text == 'TOP_LEFT': + align_text = align_text.strip().lower() + if align_text == "top_left": align = ModifiedBurnins.TOP_LEFT - elif align_text == 'TOP_CENTERED': + elif align_text == "top_centered": align = ModifiedBurnins.TOP_CENTERED - elif align_text == 'TOP_RIGHT': + elif align_text == "top_right": align = ModifiedBurnins.TOP_RIGHT - elif align_text == 'BOTTOM_LEFT': + elif align_text == "bottom_left": align = ModifiedBurnins.BOTTOM_LEFT - elif align_text == 'BOTTOM_CENTERED': + elif align_text == "bottom_centered": align = ModifiedBurnins.BOTTOM_CENTERED - elif align_text == 'BOTTOM_RIGHT': + elif align_text == "bottom_right": align = ModifiedBurnins.BOTTOM_RIGHT bi_func = preset.get('function') From ca19b5d6798ef3535544b36340cea82a26ba7ff5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:56:51 +0100 Subject: [PATCH 284/393] add_text can accept frame_start argument --- pype/scripts/otio_burnin.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 85e72245cd..d913baa5e2 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -134,17 +134,21 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if options_init: self.options_init.update(options_init) - def add_text(self, text, align, options=None): + def add_text(self, text, align, frame_start=None, options=None): """ Adding static text to a filter. :param str text: text to apply to the drawtext :param enum align: alignment, must use provided enum flags + :param int frame_start: starting frame for burnins :param dict options: recommended to use TextOptions """ if not options: options = ffmpeg_burnins.TextOptions(**self.options_init) - self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) + + options = options.copy() + if frame_start: + options["frame_offset"] = frame_start def add_timecode(self, align, options=None, start_frame=None): """ From 5d5d3eec92d892ddae1845cbabada0847c739471 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 10:57:11 +0100 Subject: [PATCH 285/393] add_text use custom DRAWTEXT ffmpeg string --- pype/scripts/otio_burnin.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index d913baa5e2..be4ec3e57d 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -150,6 +150,8 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if frame_start: options["frame_offset"] = frame_start + self._add_burnin(text, align, options, DRAWTEXT) + def add_timecode(self, align, options=None, start_frame=None): """ Convenience method to create the frame number expression. From 96d3e51d9200cf04e4b63705a727d381c48a286e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:00:29 +0100 Subject: [PATCH 286/393] add timecode allows to add text and use custom TIMECODE ffmpeg string --- pype/scripts/otio_burnin.py | 39 +++++++++++++++++++++++-------------- 1 file changed, 24 insertions(+), 15 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index be4ec3e57d..67b85f9ba4 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -140,7 +140,7 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): :param str text: text to apply to the drawtext :param enum align: alignment, must use provided enum flags - :param int frame_start: starting frame for burnins + :param int frame_start: starting frame for burnins current frame :param dict options: recommended to use TextOptions """ if not options: @@ -152,32 +152,41 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): self._add_burnin(text, align, options, DRAWTEXT) - def add_timecode(self, align, options=None, start_frame=None): + def add_timecode( + self, align, frame_start=None, frame_start_tc=None, text=None, + options=None + ): """ Convenience method to create the frame number expression. :param enum align: alignment, must use provided enum flags + :param int frame_start: starting frame for burnins current frame + :param int frame_start_tc: starting frame for burnins timecode + :param str text: text that will be before timecode :param dict options: recommended to use TimeCodeOptions """ if not options: options = ffmpeg_burnins.TimeCodeOptions(**self.options_init) - if start_frame: - options['frame_offset'] = start_frame - timecode = ffmpeg_burnins._frames_to_timecode( - options['frame_offset'], + options = options.copy() + if frame_start: + options["frame_offset"] = frame_start + + if not frame_start_tc: + frame_start_tc = options["frame_offset"] + + if not text: + text = "" + + if not options.get("fps"): + options["fps"] = self.frame_rate + + options["timecode"] = ffmpeg_burnins._frames_to_timecode( + frame_start_tc, self.frame_rate ) - options = options.copy() - if not options.get('fps'): - options['fps'] = self.frame_rate - self._add_burnin( - timecode.replace(':', r'\:'), - align, - options, - ffmpeg_burnins.TIMECODE - ) + self._add_burnin(text, align, options, TIMECODE) def _add_burnin(self, text, align, options, draw): """ From defe60e5566ec8a251802636430843650a9115d4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:01:37 +0100 Subject: [PATCH 287/393] add burnin do not use expression but only text --- pype/scripts/otio_burnin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 67b85f9ba4..39bf963342 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -197,7 +197,7 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): """ resolution = self.resolution data = { - 'text': options.get('expression') or text, + 'text': text, 'color': options['font_color'], 'size': options['font_size'] } From ca2279e710dcf15e8545c3a904027508d9989435 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:02:01 +0100 Subject: [PATCH 288/393] _drawtext must count text sizes with timecode text --- pype/scripts/otio_burnin.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 39bf963342..4c1301becf 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -201,8 +201,12 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): 'color': options['font_color'], 'size': options['font_size'] } + timecode_text = options.get("timecode") or "" + text_for_size = text + timecode_text data.update(options) - data.update(ffmpeg_burnins._drawtext(align, resolution, text, options)) + data.update( + ffmpeg_burnins._drawtext(align, resolution, text_for_size, options) + ) if 'font' in data and ffmpeg_burnins._is_windows(): data['font'] = data['font'].replace(os.sep, r'\\' + os.sep) data['font'] = data['font'].replace(':', r'\:') From 39e785aefb6e4a48b5a8ea215a06070c11c2f425 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:02:14 +0100 Subject: [PATCH 289/393] doctstring changes --- pype/scripts/otio_burnin.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 4c1301becf..73de2f2827 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -347,14 +347,14 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) For this preset we'll need at least this data: data = { - "start_frame": 1001, + "frame_start": 1001, "shot": "sh0010" } When Timecode should start from 1 then data need: data = { - "start_frame": 1001, - "start_frame_tc": 1, + "frame_start": 1001, + "frame_start_tc": 1, "shot": "sh0010" } ''' From 9a8c3b56a22cf333909e8dad8fc064a9164c0d1e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:02:59 +0100 Subject: [PATCH 290/393] changed data processing to not use functions but only text --- pype/scripts/otio_burnin.py | 99 +++++++++++++++++-------------------- 1 file changed, 45 insertions(+), 54 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 73de2f2827..e7464cdc7c 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -368,15 +368,27 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) stream = burnin._streams[0] if "resolution_width" not in data: - data["resolution_width"] = stream.get("width", "Unknown") + data["resolution_width"] = stream.get("width", MISSING_KEY_VALUE) if "resolution_height" not in data: - data["resolution_height"] = stream.get("height", "Unknown") + data["resolution_height"] = stream.get("height", MISSING_KEY_VALUE) if "fps" not in data: data["fps"] = get_fps(stream.get("r_frame_rate", "0/0")) - for align_text, preset in presets.get('burnins', {}).items(): + # Check frame start and add expression if is available + if frame_start is not None: + data[CURRENT_FRAME_KEY] = r'%%{eif\:n+%d\:d}' % frame_start + + if frame_start_tc is not None: + data[TIME_CODE_KEY[1:-1]] = TIME_CODE_KEY + + for align_text, value in presets.get('burnins', {}).items(): + if not value: + continue + + has_timecode = TIME_CODE_KEY in value + align = None align_text = align_text.strip().lower() if align_text == "top_left": @@ -392,65 +404,44 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) elif align_text == "bottom_right": align = ModifiedBurnins.BOTTOM_RIGHT - bi_func = preset.get('function') - if not bi_func: - log.error( - 'Missing function for burnin!' - 'Burnins are not created!' + # Replace with missing key value if frame_start_tc is not set + if frame_start_tc is None and has_timecode: + has_timecode = False + log.warning( + "`frame_start` and `frame_start_tc`" + " are not set in entered data." ) - return + value = value.replace(TIME_CODE_KEY, MISSING_KEY_VALUE) - if ( - bi_func in ['frame_numbers', 'timecode'] and - frame_start is None - ): - log.error( - 'start_frame is not set in entered data!' - 'Burnins are not created!' - ) - return + key_pattern = re.compile(r"(\{.*?[^{0]*\})") - if bi_func == 'frame_numbers': - current_frame_identifier = "{current_frame}" - text = preset.get('text') or current_frame_identifier + missing_keys = [] + for group in key_pattern.findall(value): + try: + group.format(**data) + except (TypeError, KeyError): + missing_keys.append(group) - if current_frame_identifier not in text: - log.warning(( - 'Text for Frame numbers don\'t have ' - '`{current_frame}` key in text!' - )) + missing_keys = list(set(missing_keys)) + for key in missing_keys: + value = value.replace(key, MISSING_KEY_VALUE) - text_items = [] - split_items = text.split(current_frame_identifier) - for item in split_items: - text_items.append(item.format(**data)) + # Handle timecode differently + if has_timecode: + args = [align, frame_start, frame_start_tc] + if not value.startswith(TIME_CODE_KEY): + value_items = value.split(TIME_CODE_KEY) + text = value_items[0].format(**data) + args.append(value_items[0]) - text = "{current_frame}".join(text_items) + burnin.add_timecode(*args) + continue - burnin.add_frame_numbers(align, start_frame=frame_start, text=text) + text = value.format(**data) + burnin.add_text(text, align, frame_start) - elif bi_func == 'timecode': - burnin.add_timecode(align, start_frame=frame_start_tc) - - elif bi_func == 'text': - if not preset.get('text'): - log.error('Text is not set for text function burnin!') - return - text = preset['text'].format(**data) - burnin.add_text(text, align) - - elif bi_func == "datetime": - date_format = preset["format"] - burnin.add_datetime(date_format, align) - - else: - log.error( - 'Unknown function for burnins {}'.format(bi_func) - ) - return - - codec_args = '' - if codec_data is not []: + codec_args = "" + if codec_data: codec_args = " ".join(codec_data) burnin.render(output_path, args=codec_args, overwrite=overwrite, **data) From 1033f779d1a72d33365ec197b398a6f41cf478f9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:03:58 +0100 Subject: [PATCH 291/393] codec moved to optional args because is optional --- pype/scripts/otio_burnin.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index e7464cdc7c..bc45e45f82 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -302,7 +302,9 @@ def example(input_path, output_path): burnin.render(output_path, overwrite=True) -def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True): +def burnins_from_data( + input_path, output_path, data, codec_data=None, overwrite=True +): ''' This method adds burnins to video/image file based on presets setting. Extension of output MUST be same as input. (mov -> mov, avi -> avi,...) @@ -453,7 +455,7 @@ if __name__ == '__main__': in_data = json.loads(sys.argv[-1]) burnins_from_data( in_data['input'], - in_data['codec'], in_data['output'], - in_data['burnin_data'] + in_data['burnin_data'], + in_data['codec'] ) From f19235f91e4492331f04df281049d8984716fcdd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:04:08 +0100 Subject: [PATCH 292/393] added forgotten import --- pype/scripts/otio_burnin.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index bc45e45f82..8a95542c04 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -1,4 +1,5 @@ import os +import re import datetime import subprocess import json From 6be774b1f8716471e28beb2659f3d27750df6f4e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:04:43 +0100 Subject: [PATCH 293/393] removed imports from __main__ --- pype/scripts/otio_burnin.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 8a95542c04..6c1e19690b 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -1,4 +1,5 @@ import os +import sys import re import datetime import subprocess @@ -451,8 +452,6 @@ def burnins_from_data( if __name__ == '__main__': - import sys - import json in_data = json.loads(sys.argv[-1]) burnins_from_data( in_data['input'], From a2d07a89a9fa19b007c0565459df4973bbf1710d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 6 Feb 2020 11:06:31 +0100 Subject: [PATCH 294/393] removed deprecated method usage in example --- pype/scripts/otio_burnin.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 6c1e19690b..590939df56 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -296,10 +296,6 @@ def example(input_path, output_path): burnin.add_text('My Text', ModifiedBurnins.TOP_CENTERED) # Datetime burnin.add_text('%d-%m-%y', ModifiedBurnins.TOP_RIGHT) - # Frame number - burnin.add_frame_numbers(ModifiedBurnins.TOP_RIGHT, start_frame=start_frame) - # Timecode - burnin.add_timecode(ModifiedBurnins.TOP_LEFT, start_frame=start_frame) # Start render (overwrite output file if exist) burnin.render(output_path, overwrite=True) From b68cbf4be0c41152a4008d2312626a0e6075cf4b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 6 Feb 2020 16:59:22 +0100 Subject: [PATCH 295/393] fix(nuke): didnt create write node --- pype/nuke/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 2ed9f75513..6d6e7de1b2 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -374,7 +374,7 @@ def create_write_node(name, data, input=None, prenodes=None): now_node.setInput(0, prev_node) # imprinting group node - avalon.nuke.imprint(GN, data["avalon"], tab="Pype") + avalon.nuke.imprint(GN, data["avalon"]) divider = nuke.Text_Knob('') GN.addKnob(divider) From 979cad41dceeb2a9dffd62d82b55ccd5edef06d0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 6 Feb 2020 16:59:59 +0100 Subject: [PATCH 296/393] fix(nks):didnt publish plates --- pype/plugins/nukestudio/publish/collect_plates.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pype/plugins/nukestudio/publish/collect_plates.py b/pype/plugins/nukestudio/publish/collect_plates.py index be448931c8..70f0f7407e 100644 --- a/pype/plugins/nukestudio/publish/collect_plates.py +++ b/pype/plugins/nukestudio/publish/collect_plates.py @@ -156,8 +156,9 @@ class CollectPlatesData(api.InstancePlugin): ext=ext ) - start_frame = source_first_frame + instance.data["sourceInH"] - duration = instance.data["sourceOutH"] - instance.data["sourceInH"] + start_frame = int(source_first_frame + instance.data["sourceInH"]) + duration = int( + instance.data["sourceOutH"] - instance.data["sourceInH"]) end_frame = start_frame + duration self.log.debug("start_frame: `{}`".format(start_frame)) self.log.debug("end_frame: `{}`".format(end_frame)) From d0f0129c2c585b4f493e8b0bbd68f24f317849eb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 6 Feb 2020 17:40:07 +0100 Subject: [PATCH 297/393] fix(nk): loaders use self log --- pype/plugins/nuke/load/load_sequence.py | 31 +++++++++++-------------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index 76ff7d2cb6..db77c53aff 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -5,10 +5,6 @@ import contextlib from avalon import api, io from pype.nuke import presets -from pype.api import Logger - -log = Logger().get_logger(__name__, "nuke") - @contextlib.contextmanager def preserve_trim(node): @@ -35,14 +31,14 @@ def preserve_trim(node): if start_at_frame: node['frame_mode'].setValue("start at") node['frame'].setValue(str(script_start)) - log.info("start frame of Read was set to" - "{}".format(script_start)) + print("start frame of Read was set to" + "{}".format(script_start)) if offset_frame: node['frame_mode'].setValue("offset") node['frame'].setValue(str((script_start + offset_frame))) - log.info("start frame of Read was set to" - "{}".format(script_start)) + print("start frame of Read was set to" + "{}".format(script_start)) def loader_shift(node, frame, relative=True): @@ -74,7 +70,7 @@ def loader_shift(node, frame, relative=True): class LoadSequence(api.Loader): """Load image sequence into Nuke""" - families = ["write", "source", "plate", "render"] + families = ["render2d", "source", "plate", "render"] representations = ["exr", "dpx", "jpg", "jpeg", "png"] label = "Load sequence" @@ -91,7 +87,7 @@ class LoadSequence(api.Loader): version = context['version'] version_data = version.get("data", {}) - log.info("version_data: {}\n".format(version_data)) + self.log.info("version_data: {}\n".format(version_data)) self.first_frame = int(nuke.root()["first_frame"].getValue()) self.handle_start = version_data.get("handleStart", 0) @@ -111,7 +107,7 @@ class LoadSequence(api.Loader): if not file: repr_id = context["representation"]["_id"] - log.warning( + self.log.warning( "Representation id `{}` is failing to load".format(repr_id)) return @@ -242,7 +238,7 @@ class LoadSequence(api.Loader): if not file: repr_id = representation["_id"] - log.warning( + self.log.warning( "Representation id `{}` is failing to load".format(repr_id)) return @@ -277,9 +273,10 @@ class LoadSequence(api.Loader): last = version_data.get("frameEnd") if first is None: - log.warning("Missing start frame for updated version" - "assuming starts at frame 0 for: " - "{} ({})".format(node['name'].value(), representation)) + self.log.warning("Missing start frame for updated version" + "assuming starts at frame 0 for: " + "{} ({})".format( + node['name'].value(), representation)) first = 0 first -= self.handle_start @@ -288,7 +285,7 @@ class LoadSequence(api.Loader): # Update the loader's path whilst preserving some values with preserve_trim(node): node["file"].setValue(file) - log.info("__ node['file']: {}".format(node["file"].value())) + self.log.info("__ node['file']: {}".format(node["file"].value())) # Set the global in to the start frame of the sequence loader_shift(node, first, relative=True) @@ -328,7 +325,7 @@ class LoadSequence(api.Loader): node, updated_dict ) - log.info("udated to version: {}".format(version.get("name"))) + self.log.info("udated to version: {}".format(version.get("name"))) def remove(self, container): From 1b1770dd638cd684a1f69c76c5fb06eef9739ad8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 6 Feb 2020 17:40:37 +0100 Subject: [PATCH 298/393] fix(nuke): loader mov reads revew presets for family and representation --- pype/plugins/nuke/load/load_mov.py | 58 +++++++++++++++++++++--------- 1 file changed, 41 insertions(+), 17 deletions(-) diff --git a/pype/plugins/nuke/load/load_mov.py b/pype/plugins/nuke/load/load_mov.py index fccba4c573..77346a82a4 100644 --- a/pype/plugins/nuke/load/load_mov.py +++ b/pype/plugins/nuke/load/load_mov.py @@ -4,9 +4,7 @@ import contextlib from avalon import api, io from pype.nuke import presets - -from pype.api import Logger -log = Logger().get_logger(__name__, "nuke") +from pypeapp import config @contextlib.contextmanager @@ -34,14 +32,14 @@ def preserve_trim(node): if start_at_frame: node['frame_mode'].setValue("start at") node['frame'].setValue(str(script_start)) - log.info("start frame of Read was set to" - "{}".format(script_start)) + print("start frame of Read was set to" + "{}".format(script_start)) if offset_frame: node['frame_mode'].setValue("offset") node['frame'].setValue(str((script_start + offset_frame))) - log.info("start frame of Read was set to" - "{}".format(script_start)) + print("start frame of Read was set to" + "{}".format(script_start)) def loader_shift(node, frame, relative=True): @@ -70,11 +68,37 @@ def loader_shift(node, frame, relative=True): return int(script_start) +def add_review_presets_config(): + returning = { + "families": list(), + "representations": list() + } + review_presets = config.get_presets()["plugins"]["global"]["publish"].get( + "ExtractReview", {}) + + outputs = review_presets.get("outputs", {}) + # + for output, properities in outputs.items(): + returning["representations"].append(output) + returning["families"] += properities.get("families", []) + + return returning + + class LoadMov(api.Loader): """Load mov file into Nuke""" + presets = add_review_presets_config() + families = [ + "source", + "plate", + "render", + "review"] + presets["families"] - families = ["write", "source", "plate", "render", "review"] - representations = ["wipmov", "h264", "mov", "preview", "review", "mp4"] + representations = [ + "mov", + "preview", + "review", + "mp4"] + presets["representations"] label = "Load mov" order = -10 @@ -115,7 +139,7 @@ class LoadMov(api.Loader): if not file: repr_id = context["representation"]["_id"] - log.warning( + self.log.warning( "Representation id `{}` is failing to load".format(repr_id)) return @@ -211,7 +235,7 @@ class LoadMov(api.Loader): if not file: repr_id = representation["_id"] - log.warning( + self.log.warning( "Representation id `{}` is failing to load".format(repr_id)) return @@ -246,9 +270,10 @@ class LoadMov(api.Loader): colorspace = version_data.get("colorspace") if first is None: - log.warning("Missing start frame for updated version" - "assuming starts at frame 0 for: " - "{} ({})".format(node['name'].value(), representation)) + self.log.warning("Missing start frame for updated version" + "assuming starts at frame 0 for: " + "{} ({})".format( + node['name'].value(), representation)) first = 0 # fix handle start and end if none are available @@ -264,7 +289,7 @@ class LoadMov(api.Loader): # Update the loader's path whilst preserving some values with preserve_trim(node): node["file"].setValue(file) - log.info("__ node['file']: {}".format(node["file"].value())) + self.log.info("__ node['file']: {}".format(node["file"].value())) # Set the global in to the start frame of the sequence loader_shift(node, first, relative=True) @@ -290,7 +315,6 @@ class LoadMov(api.Loader): if preset_clrsp is not None: node["colorspace"].setValue(str(preset_clrsp)) - updated_dict = {} updated_dict.update({ "representation": str(representation["_id"]), @@ -316,7 +340,7 @@ class LoadMov(api.Loader): update_container( node, updated_dict ) - log.info("udated to version: {}".format(version.get("name"))) + self.log.info("udated to version: {}".format(version.get("name"))) def remove(self, container): From ae387d09778607ec56b12c2d9d75a9e74740786a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 11:39:22 +0100 Subject: [PATCH 299/393] added subproces for status --- pype/ftrack/ftrack_server/sub_event_info.py | 411 ++++++++++++++++++++ 1 file changed, 411 insertions(+) create mode 100644 pype/ftrack/ftrack_server/sub_event_info.py diff --git a/pype/ftrack/ftrack_server/sub_event_info.py b/pype/ftrack/ftrack_server/sub_event_info.py new file mode 100644 index 0000000000..d63b6acadd --- /dev/null +++ b/pype/ftrack/ftrack_server/sub_event_info.py @@ -0,0 +1,411 @@ +import os +import sys +import copy +import signal +import socket +import uuid +from datetime import datetime + +import ftrack_api +from ftrack_server import FtrackServer +from pype.ftrack.ftrack_server.lib import ( + SocketSession, SocketBaseEventHub, + TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT +) +from pypeapp import Logger + +log = Logger().get_logger("Event storer") +log.info(os.environ.get("FTRACK_EVENT_SUB_ID")) + + +class ObjectFactory: + session = None + sock = None + subprocess_id = os.environ["FTRACK_EVENT_SUB_ID"] + status_factory = None + + +def trigger_status_info(status_id=None, status=None): + if not status and not status_id: + log.warning( + "`status_id` or `status` must be specified to trigger action." + ) + return + + if not status: + status = ObjectFactory.status_factory[status_id] + + if not status: + return + + new_event_data = copy.deepcopy(action_data) + new_event_data.update({ + "selection": [] + }) + new_event_data["subprocess_id"] = ObjectFactory.subprocess_id + new_event_data["status_id"] = status.id + + new_event = ftrack_api.event.base.Event( + topic="ftrack.action.launch", + data=new_event_data, + source=status.source + ) + ObjectFactory.session.event_hub.publish(new_event) + + +action_identifier = ( + "event.server.status" + ObjectFactory.subprocess_id +) + +# TODO add IP adress to label +# TODO add icon +action_data = { + "label": "Pype Admin", + "variant": "Event server Status", + "description": "Get Infromation about event server", + "actionIdentifier": action_identifier, + "icon": None +} + + +class Status: + default_item = { + "type": "label", + "value": "Information not allowed." + } + note_item = { + "type": "label", + "value": "Hit `submit` to refresh data." + } + splitter_item = { + "type": "label", + "value": "---" + } + + def __init__(self, source_info, parent): + self.id = str(uuid.uuid1()) + self.created = datetime.now() + self.parent = parent + + self.source = source_info + + self.main_process = None + self.storer = None + self.processor = None + + def add_result(self, source, data): + if source.lower() == "storer": + self.storer = data + + elif source.lower() == "processor": + self.processor = data + + else: + self.main_process = data + + def filled(self): + # WARNING DEBUG PART!!!! + return True + return ( + self.main_process is not None and + self.storer is not None and + self.processor is not None + ) + + def get_items_from_dict(self, in_dict): + items = [] + for key, value in in_dict.items(): + items.append({ + "type": "label", + "value": "##{}".format(key) + }) + items.append({ + "type": "label", + "value": value + }) + return items + + def bool_items(self): + items = [] + name_labels = { + "shutdown_main": "Shutdown main process", + "reset_storer": "Reset storer", + "reset_processor": "Reset processor" + } + for name, label in name_labels.items(): + items.append({ + "type": "boolean", + "value": False, + "label": label, + "name": name + }) + return items + + def items(self): + items = [] + items.append(self.note_item) + + items.append({"type": "label", "value": "Main process"}) + if not self.main_process: + items.append(self.default_item) + else: + items.extend( + self.get_items_from_dict(self.main_process) + ) + + items.append(self.splitter_item) + items.append({"type": "label", "value": "Storer process"}) + if not self.storer: + items.append(self.default_item) + else: + items.extend( + self.get_items_from_dict(self.storer) + ) + + items.append(self.splitter_item) + items.append({"type": "label", "value": "Processor process"}) + if not self.processor: + items.append(self.default_item) + else: + items.extend( + self.get_items_from_dict(self.processor) + ) + + items.append(self.splitter_item) + items.extend(self.bool_items()) + + return items + + @property + def is_overtime(self): + time_delta = (datetime.now() - self.created).total_seconds() + return time_delta >= self.parent.max_delta_seconds + + +class StatusFactory: + max_delta_seconds = 30 + + def __init__(self): + self.statuses = {} + + def __getitem__(self, key): + return self.statuses.get(key) + + def create_status(self, source_info): + new_status = Status(source_info, self) + self.statuses[new_status.id] = new_status + return new_status + + def process_result(self, event): + subprocess_id = event["data"].get("subprocess_id") + if subprocess_id != ObjectFactory.subprocess_id: + return + + status_id = event["data"].get("status_id") + status = self.statuses[status_id] + if not status: + return + + source = event["data"]["source"] + data = event["data"]["status_info"] + + status.add_result(source, data) + if status.filled(): + trigger_status_info(status=status) + + +def server_activity_validate_user(event): + """Validate user permissions to show server info.""" + session = ObjectFactory.session + + username = event["source"].get("user", {}).get("username") + if not username: + return False + + user_ent = session.query( + "User where username = \"{}\"".format(username) + ).first() + if not user_ent: + return False + + role_list = ["Pypeclub", "Administrator"] + for role in user_ent["user_security_roles"]: + if role["security_role"]["name"] in role_list: + return True + return False + + +def server_activity_discover(event): + """Discover action in actions menu conditions.""" + session = ObjectFactory.session + if session is None: + return + + if not server_activity_validate_user(event): + return + + return {"items": [action_data]} + + +def handle_filled_event(event): + subprocess_id = event["data"].get("subprocess_id") + if subprocess_id != ObjectFactory.subprocess_id: + return None + + status_id = event["data"].get("status_id") + status = ObjectFactory.status_factory[status_id] + if not status: + return None + + values = event.get("values") + if values: + log.info(values) + + title = "Event server - Status" + + event_data = copy.deepcopy(event["data"]) + event_data.update({ + "type": "widget", + "items": status.items(), + "title": title + }) + + ObjectFactory.session.event_hub.publish( + ftrack_api.event.base.Event( + topic="ftrack.action.trigger-user-interface", + data=event_data + ), + on_error='ignore' + ) + + +def server_activity(event): + session = ObjectFactory.session + if session is None: + msg = "Session is not set. Can't trigger Reset action." + log.warning(msg) + return { + "success": False, + "message": msg + } + + valid = server_activity_validate_user(event) + if not valid: + return { + "success": False, + "message": "You don't have permissions to see Event server status!" + } + + subprocess_id = event["data"].get("subprocess_id") + if subprocess_id is not None: + return handle_filled_event(event) + + status = ObjectFactory.status_factory.create_status(event["source"]) + + event_data = { + "status_id": status.id, + "subprocess_id": ObjectFactory.subprocess_id + } + session.event_hub.publish( + ftrack_api.event.base.Event( + topic=TOPIC_STATUS_SERVER, + data=event_data + ), + on_error="ignore" + ) + + return { + "success": True, + "message": "Collecting information (this may take > 20s)" + } + + +def register(session): + '''Registers the event, subscribing the discover and launch topics.''' + session.event_hub.subscribe( + "topic=ftrack.action.discover", + server_activity_discover + ) + + status_launch_subscription = ( + "topic=ftrack.action.launch and data.actionIdentifier={}" + ).format(action_identifier) + + session.event_hub.subscribe( + status_launch_subscription, + server_activity + ) + + session.event_hub.subscribe( + "topic={}".format(TOPIC_STATUS_SERVER_RESULT), + ObjectFactory.status_factory.process_result + ) + + +def main(args): + port = int(args[-1]) + + # Create a TCP/IP socket + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + + # Connect the socket to the port where the server is listening + server_address = ("localhost", port) + log.debug("Storer connected to {} port {}".format(*server_address)) + sock.connect(server_address) + sock.sendall(b"CreatedStatus") + # store socket connection object + ObjectFactory.sock = sock + ObjectFactory.status_factory = StatusFactory() + + _returncode = 0 + try: + session = SocketSession( + auto_connect_event_hub=True, sock=sock, Eventhub=SocketBaseEventHub + ) + ObjectFactory.session = session + register(session) + server = FtrackServer("event") + log.debug("Launched Ftrack Event storer") + server.run_server(session, load_files=False) + + except Exception: + _returncode = 1 + log.error("ServerInfo subprocess crashed", exc_info=True) + + finally: + log.debug("Ending. Closing socket.") + sock.close() + return _returncode + + +if __name__ == "__main__": + # Register interupt signal + def signal_handler(sig, frame): + print("You pressed Ctrl+C. Process ended.") + sys.exit(0) + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + sys.exit(main(sys.argv)) + + +example_action_event = { + 'data': { + 'selection': [], + 'description': 'Test action2', + 'variant': None, + 'label': 'Test action2', + 'actionIdentifier': 'test.action2.3ceffe5e9acf40f8aa80603adebd0d06', + 'values': {}, + 'icon': None, + }, + 'topic': 'ftrack.action.launch', + 'sent': None, + 'source': { + 'id': 'eb67d186301c4cbbab73c1aee9b7c55d', + 'user': {'username': 'jakub.trllo', 'id': '2a8ae090-cbd3-11e8-a87a-0a580aa00121'} + }, + 'target': '', + 'in_reply_to_event': None +} From c937964dc8c80b54b95d5059670f845a83f4ca82 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 12:13:22 +0100 Subject: [PATCH 300/393] added subprocess to event server cli --- pype/ftrack/ftrack_server/event_server_cli.py | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index b09b0bc84e..b2c540e993 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -7,6 +7,7 @@ import socket import argparse import atexit import time +import uuid import ftrack_api from pype.ftrack.lib import credentials @@ -175,6 +176,7 @@ def main_loop(ftrack_url): otherwise thread will be killed. """ + os.environ["FTRACK_EVENT_SUB_ID"] = str(uuid.uuid1()) # Get mongo hostname and port for testing mongo connection mongo_list = ftrack_events_mongo_settings() mongo_hostname = mongo_list[0] @@ -202,6 +204,13 @@ def main_loop(ftrack_url): processor_last_failed = datetime.datetime.now() processor_failed_count = 0 + statuser_name = "StorerThread" + statuser_port = 10021 + statuser_path = "{}/sub_event_info.py".format(file_path) + statuser_thread = None + statuser_last_failed = datetime.datetime.now() + statuser_failed_count = 0 + ftrack_accessible = False mongo_accessible = False @@ -336,6 +345,43 @@ def main_loop(ftrack_url): processor_failed_count = 0 processor_last_failed = _processor_last_failed + if statuser_thread is None: + if statuser_failed_count < max_fail_count: + statuser_thread = socket_thread.SocketThread( + statuser_name, statuser_port, statuser_path + ) + statuser_thread.start() + + elif statuser_failed_count == max_fail_count: + print(( + "Statuser failed {}times in row" + " I'll try to run again {}s later" + ).format(str(max_fail_count), str(wait_time_after_max_fail))) + statuser_failed_count += 1 + + elif (( + datetime.datetime.now() - statuser_last_failed + ).seconds > wait_time_after_max_fail): + statuser_failed_count = 0 + + # If thread failed test Ftrack and Mongo connection + elif not statuser_thread.isAlive(): + statuser_thread.join() + statuser_thread = None + ftrack_accessible = False + mongo_accessible = False + + _processor_last_failed = datetime.datetime.now() + delta_time = ( + _processor_last_failed - statuser_last_failed + ).seconds + + if delta_time < min_fail_seconds: + statuser_failed_count += 1 + else: + statuser_failed_count = 0 + statuser_last_failed = _processor_last_failed + time.sleep(1) From fa60c87c3e0f9e9261dd9b9e5c8b4188c50e0b4f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:28:29 +0100 Subject: [PATCH 301/393] created base EventHub that can set callbacks on heartbeat and set message for sockets on heartbeat --- pype/ftrack/ftrack_server/lib.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index fefba580e0..2617b63614 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -123,20 +123,30 @@ def check_ftrack_url(url, log_errors=True): return url -class StorerEventHub(ftrack_api.event.hub.EventHub): +class SocketBaseEventHub(ftrack_api.event.hub.EventHub): + + hearbeat_msg = b"hearbeat" + heartbeat_callbacks = [] + def __init__(self, *args, **kwargs): self.sock = kwargs.pop("sock") - super(StorerEventHub, self).__init__(*args, **kwargs) + super(SocketBaseEventHub, self).__init__(*args, **kwargs) def _handle_packet(self, code, packet_identifier, path, data): """Override `_handle_packet` which extend heartbeat""" code_name = self._code_name_mapping[code] if code_name == "heartbeat": # Reply with heartbeat. - self.sock.sendall(b"storer") - return self._send_packet(self._code_name_mapping['heartbeat']) + for callback in self.heartbeat_callbacks: + callback() + + self.sock.sendall(self.hearbeat_msg) + return self._send_packet(self._code_name_mapping["heartbeat"]) + + return super(SocketBaseEventHub, self)._handle_packet( + code, packet_identifier, path, data + ) - elif code_name == "connect": event = ftrack_api.event.base.Event( topic="pype.storer.started", data={}, From 24022c583651f16d70b210e340472be523c447d8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:28:44 +0100 Subject: [PATCH 302/393] Status event hub implemented --- pype/ftrack/ftrack_server/lib.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 2617b63614..71ce6861a4 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -147,6 +147,25 @@ class SocketBaseEventHub(ftrack_api.event.hub.EventHub): code, packet_identifier, path, data ) + +class StatusEventHub(SocketBaseEventHub): + def _handle_packet(self, code, packet_identifier, path, data): + """Override `_handle_packet` which extend heartbeat""" + code_name = self._code_name_mapping[code] + if code_name == "connect": + event = ftrack_api.event.base.Event( + topic="pype.status.started", + data={}, + source={ + "id": self.id, + "user": {"username": self._api_user} + } + ) + self._event_queue.put(event) + + return super(StatusEventHub, self)._handle_packet( + code, packet_identifier, path, data + ) event = ftrack_api.event.base.Event( topic="pype.storer.started", data={}, From a97c73258e349291ae8f0899f37ac7ec9a8c13b5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:29:01 +0100 Subject: [PATCH 303/393] removed user event hub --- pype/ftrack/ftrack_server/lib.py | 29 -------------------- pype/ftrack/ftrack_server/sub_user_server.py | 4 +-- 2 files changed, 2 insertions(+), 31 deletions(-) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 71ce6861a4..57c5b7d5dc 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -296,35 +296,6 @@ class ProcessEventHub(ftrack_api.event.hub.EventHub): return super()._handle_packet(code, packet_identifier, path, data) -class UserEventHub(ftrack_api.event.hub.EventHub): - def __init__(self, *args, **kwargs): - self.sock = kwargs.pop("sock") - super(UserEventHub, self).__init__(*args, **kwargs) - - def _handle_packet(self, code, packet_identifier, path, data): - """Override `_handle_packet` which extend heartbeat""" - code_name = self._code_name_mapping[code] - if code_name == "heartbeat": - # Reply with heartbeat. - self.sock.sendall(b"hearbeat") - return self._send_packet(self._code_name_mapping['heartbeat']) - - elif code_name == "connect": - event = ftrack_api.event.base.Event( - topic="pype.storer.started", - data={}, - source={ - "id": self.id, - "user": {"username": self._api_user} - } - ) - self._event_queue.put(event) - - return super(UserEventHub, self)._handle_packet( - code, packet_identifier, path, data - ) - - class SocketSession(ftrack_api.session.Session): '''An isolated session for interaction with an ftrack server.''' def __init__( diff --git a/pype/ftrack/ftrack_server/sub_user_server.py b/pype/ftrack/ftrack_server/sub_user_server.py index f0d39447a8..8c1497a562 100644 --- a/pype/ftrack/ftrack_server/sub_user_server.py +++ b/pype/ftrack/ftrack_server/sub_user_server.py @@ -5,7 +5,7 @@ import socket import traceback from ftrack_server import FtrackServer -from pype.ftrack.ftrack_server.lib import SocketSession, UserEventHub +from pype.ftrack.ftrack_server.lib import SocketSession, SocketBaseEventHub from pypeapp import Logger @@ -28,7 +28,7 @@ def main(args): try: session = SocketSession( - auto_connect_event_hub=True, sock=sock, Eventhub=UserEventHub + auto_connect_event_hub=True, sock=sock, Eventhub=SocketBaseEventHub ) server = FtrackServer("action") log.debug("Launched User Ftrack Server") From 526f9282d1e4136b44eab6e5505b1adf23e4af5b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:29:24 +0100 Subject: [PATCH 304/393] storer and processor eventhubs are modified --- pype/ftrack/ftrack_server/lib.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 57c5b7d5dc..478bede6ef 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -166,6 +166,16 @@ class StatusEventHub(SocketBaseEventHub): return super(StatusEventHub, self)._handle_packet( code, packet_identifier, path, data ) + + +class StorerEventHub(SocketBaseEventHub): + + hearbeat_msg = b"storer" + + def _handle_packet(self, code, packet_identifier, path, data): + """Override `_handle_packet` which extend heartbeat""" + code_name = self._code_name_mapping[code] + if code_name == "connect": event = ftrack_api.event.base.Event( topic="pype.storer.started", data={}, @@ -181,7 +191,9 @@ class StatusEventHub(SocketBaseEventHub): ) -class ProcessEventHub(ftrack_api.event.hub.EventHub): +class ProcessEventHub(SocketBaseEventHub): + + hearbeat_msg = b"processor" url, database, table_name = get_ftrack_event_mongo_info() is_table_created = False @@ -193,7 +205,6 @@ class ProcessEventHub(ftrack_api.event.hub.EventHub): database_name=self.database, table_name=self.table_name ) - self.sock = kwargs.pop("sock") super(ProcessEventHub, self).__init__(*args, **kwargs) def prepare_dbcon(self): @@ -289,9 +300,6 @@ class ProcessEventHub(ftrack_api.event.hub.EventHub): code_name = self._code_name_mapping[code] if code_name == "event": return - if code_name == "heartbeat": - self.sock.sendall(b"processor") - return self._send_packet(self._code_name_mapping["heartbeat"]) return super()._handle_packet(code, packet_identifier, path, data) From 4fd403bf54a167ea6d0621554b0a9b6768ca2bfb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:29:38 +0100 Subject: [PATCH 305/393] added constants with topics to lib --- pype/ftrack/ftrack_server/lib.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 478bede6ef..e623cab8fb 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -28,6 +28,10 @@ from pypeapp import Logger from pype.ftrack.lib.custom_db_connector import DbConnector +TOPIC_STATUS_SERVER = "pype.event.server.status" +TOPIC_STATUS_SERVER_RESULT = "pype.event.server.status.result" + + def ftrack_events_mongo_settings(): host = None port = None From 37de60577809c2ace929f7dab880a95ddc0ed0c2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:30:07 +0100 Subject: [PATCH 306/393] socket thread can use additional arguments to execute and -port arg was removed (not used) --- pype/ftrack/ftrack_server/socket_thread.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py index 8e217870ba..cb073d83a0 100644 --- a/pype/ftrack/ftrack_server/socket_thread.py +++ b/pype/ftrack/ftrack_server/socket_thread.py @@ -12,13 +12,14 @@ class SocketThread(threading.Thread): MAX_TIMEOUT = 35 - def __init__(self, name, port, filepath): + def __init__(self, name, port, filepath, additional_args=[]): super(SocketThread, self).__init__() self.log = Logger().get_logger("SocketThread", "Event Thread") self.setName(name) self.name = name self.port = port self.filepath = filepath + self.additional_args = additional_args self.sock = None self.subproc = None self.connection = None @@ -53,7 +54,12 @@ class SocketThread(threading.Thread): ) self.subproc = subprocess.Popen( - [sys.executable, self.filepath, "-port", str(self.port)] + [ + sys.executable, + self.filepath, + *self.additional_args, + str(self.port) + ] ) # Listen for incoming connections From 05929f2b02929b9652411e4f0b53d324f3a67b76 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:31:24 +0100 Subject: [PATCH 307/393] status get suprocess data ony if they are missing (why to collect same data for each action launch) --- pype/ftrack/ftrack_server/sub_event_info.py | 426 +++++++++----------- 1 file changed, 197 insertions(+), 229 deletions(-) diff --git a/pype/ftrack/ftrack_server/sub_event_info.py b/pype/ftrack/ftrack_server/sub_event_info.py index d63b6acadd..5a38c992f5 100644 --- a/pype/ftrack/ftrack_server/sub_event_info.py +++ b/pype/ftrack/ftrack_server/sub_event_info.py @@ -1,137 +1,189 @@ import os import sys -import copy +import json import signal import socket -import uuid -from datetime import datetime +import datetime import ftrack_api from ftrack_server import FtrackServer from pype.ftrack.ftrack_server.lib import ( - SocketSession, SocketBaseEventHub, + SocketSession, StatusEventHub, TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT ) from pypeapp import Logger log = Logger().get_logger("Event storer") -log.info(os.environ.get("FTRACK_EVENT_SUB_ID")) - - -class ObjectFactory: - session = None - sock = None - subprocess_id = os.environ["FTRACK_EVENT_SUB_ID"] - status_factory = None - - -def trigger_status_info(status_id=None, status=None): - if not status and not status_id: - log.warning( - "`status_id` or `status` must be specified to trigger action." - ) - return - - if not status: - status = ObjectFactory.status_factory[status_id] - - if not status: - return - - new_event_data = copy.deepcopy(action_data) - new_event_data.update({ - "selection": [] - }) - new_event_data["subprocess_id"] = ObjectFactory.subprocess_id - new_event_data["status_id"] = status.id - - new_event = ftrack_api.event.base.Event( - topic="ftrack.action.launch", - data=new_event_data, - source=status.source - ) - ObjectFactory.session.event_hub.publish(new_event) - - action_identifier = ( - "event.server.status" + ObjectFactory.subprocess_id + "event.server.status" + os.environ["FTRACK_EVENT_SUB_ID"] ) - -# TODO add IP adress to label -# TODO add icon action_data = { "label": "Pype Admin", - "variant": "Event server Status", + "variant": "- Event server Status", "description": "Get Infromation about event server", "actionIdentifier": action_identifier, "icon": None } +class ObjectFactory: + session = None + status_factory = None + + class Status: default_item = { "type": "label", - "value": "Information not allowed." + "value": "Process info is not available at this moment." } + + def __init__(self, name, label, parent): + self.name = name + self.label = label or name + self.parent = parent + + self.info = None + self.last_update = None + + def update(self, info): + self.last_update = datetime.datetime.now() + self.info = info + + def get_delta_string(self, delta): + days, hours, minutes = ( + delta.days, delta.seconds // 3600, delta.seconds // 60 % 60 + ) + delta_items = [ + "{}d".format(days), + "{}h".format(hours), + "{}m".format(minutes) + ] + if not days: + delta_items.pop(0) + if not hours: + delta_items.pop(0) + delta_items.append("{}s".format(delta.seconds % 60)) + if not minutes: + delta_items.pop(0) + + return " ".join(delta_items) + + def get_items(self): + items = [] + last_update = "N/A" + if self.last_update: + delta = datetime.datetime.now() - self.last_update + last_update = "{} ago".format( + self.get_delta_string(delta) + ) + + last_update = "Updated: {}".format(last_update) + items.append({ + "type": "label", + "value": "#{}".format(self.label) + }) + items.append({ + "type": "label", + "value": "##{}".format(last_update) + }) + + if not self.info: + if self.info is None: + trigger_info_get() + items.append(self.default_item) + return items + + info = {} + for key, value in self.info.items(): + if key not in ["created_at:", "created_at"]: + info[key] = value + continue + + datetime_value = datetime.datetime.strptime( + value, "%Y.%m.%d %H:%M:%S" + ) + delta = datetime.datetime.now() - datetime_value + + running_for = self.get_delta_string(delta) + info["Started at"] = "{} [running: {}]".format(value, running_for) + + for key, value in info.items(): + items.append({ + "type": "label", + "value": "{}: {}".format(key, value) + }) + + return items + + +class StatusFactory: + note_item = { "type": "label", - "value": "Hit `submit` to refresh data." + "value": ( + "NOTE: Hit `submit` and uncheck all" + " checkers to refresh data." + ) } splitter_item = { "type": "label", "value": "---" } - def __init__(self, source_info, parent): - self.id = str(uuid.uuid1()) - self.created = datetime.now() - self.parent = parent + def __init__(self, statuses={}): + self.statuses = [] + for status in statuses.items(): + self.create_status(*status) - self.source = source_info + def __getitem__(self, key): + return self.get(key) - self.main_process = None - self.storer = None - self.processor = None + def get(self, key, default=None): + for status in self.statuses: + if status.name == key: + return status + return default - def add_result(self, source, data): - if source.lower() == "storer": - self.storer = data - - elif source.lower() == "processor": - self.processor = data - - else: - self.main_process = data - - def filled(self): - # WARNING DEBUG PART!!!! + def is_filled(self): + for status in self.statuses: + if status.info is None: + return False return True - return ( - self.main_process is not None and - self.storer is not None and - self.processor is not None - ) - def get_items_from_dict(self, in_dict): - items = [] - for key, value in in_dict.items(): - items.append({ - "type": "label", - "value": "##{}".format(key) - }) - items.append({ - "type": "label", - "value": value - }) - return items + def create_status(self, name, label): + new_status = Status(name, label, self) + self.statuses.append(new_status) + + def process_event_result(self, event): + subprocess_id = event["data"].get("subprocess_id") + if subprocess_id != os.environ["FTRACK_EVENT_SUB_ID"]: + return + + source = event["data"]["source"] + data = event["data"]["status_info"] + for status in self.statuses: + if status.name == source: + status.update(data) + break def bool_items(self): items = [] - name_labels = { - "shutdown_main": "Shutdown main process", - "reset_storer": "Reset storer", - "reset_processor": "Reset processor" - } + items.append({ + "type": "label", + "value": "#Restart process" + }) + items.append({ + "type": "label", + "value": ( + "WARNING: Main process may not restart" + " if does not run as a service!" + ) + }) + + name_labels = {} + for status in self.statuses: + name_labels[status.name] = status.label + for name, label in name_labels.items(): items.append({ "type": "boolean", @@ -144,75 +196,14 @@ class Status: def items(self): items = [] items.append(self.note_item) - - items.append({"type": "label", "value": "Main process"}) - if not self.main_process: - items.append(self.default_item) - else: - items.extend( - self.get_items_from_dict(self.main_process) - ) - - items.append(self.splitter_item) - items.append({"type": "label", "value": "Storer process"}) - if not self.storer: - items.append(self.default_item) - else: - items.extend( - self.get_items_from_dict(self.storer) - ) - - items.append(self.splitter_item) - items.append({"type": "label", "value": "Processor process"}) - if not self.processor: - items.append(self.default_item) - else: - items.extend( - self.get_items_from_dict(self.processor) - ) - - items.append(self.splitter_item) items.extend(self.bool_items()) + for status in self.statuses: + items.append(self.splitter_item) + items.extend(status.get_items()) + return items - @property - def is_overtime(self): - time_delta = (datetime.now() - self.created).total_seconds() - return time_delta >= self.parent.max_delta_seconds - - -class StatusFactory: - max_delta_seconds = 30 - - def __init__(self): - self.statuses = {} - - def __getitem__(self, key): - return self.statuses.get(key) - - def create_status(self, source_info): - new_status = Status(source_info, self) - self.statuses[new_status.id] = new_status - return new_status - - def process_result(self, event): - subprocess_id = event["data"].get("subprocess_id") - if subprocess_id != ObjectFactory.subprocess_id: - return - - status_id = event["data"].get("status_id") - status = self.statuses[status_id] - if not status: - return - - source = event["data"]["source"] - data = event["data"]["status_info"] - - status.add_result(source, data) - if status.filled(): - trigger_status_info(status=status) - def server_activity_validate_user(event): """Validate user permissions to show server info.""" @@ -247,38 +238,6 @@ def server_activity_discover(event): return {"items": [action_data]} -def handle_filled_event(event): - subprocess_id = event["data"].get("subprocess_id") - if subprocess_id != ObjectFactory.subprocess_id: - return None - - status_id = event["data"].get("status_id") - status = ObjectFactory.status_factory[status_id] - if not status: - return None - - values = event.get("values") - if values: - log.info(values) - - title = "Event server - Status" - - event_data = copy.deepcopy(event["data"]) - event_data.update({ - "type": "widget", - "items": status.items(), - "title": title - }) - - ObjectFactory.session.event_hub.publish( - ftrack_api.event.base.Event( - topic="ftrack.action.trigger-user-interface", - data=event_data - ), - on_error='ignore' - ) - - def server_activity(event): session = ObjectFactory.session if session is None: @@ -289,35 +248,47 @@ def server_activity(event): "message": msg } - valid = server_activity_validate_user(event) - if not valid: + if not server_activity_validate_user(event): return { "success": False, "message": "You don't have permissions to see Event server status!" } - subprocess_id = event["data"].get("subprocess_id") - if subprocess_id is not None: - return handle_filled_event(event) + values = event["data"].get("values") or {} + is_checked = False + for value in values.values(): + if value: + is_checked = True + break - status = ObjectFactory.status_factory.create_status(event["source"]) + if not is_checked: + return { + "items": ObjectFactory.status_factory.items(), + "title": "Server current status" + } - event_data = { - "status_id": status.id, - "subprocess_id": ObjectFactory.subprocess_id - } + +def trigger_info_get(): + session = ObjectFactory.session session.event_hub.publish( ftrack_api.event.base.Event( topic=TOPIC_STATUS_SERVER, - data=event_data + data={"subprocess_id": os.environ["FTRACK_EVENT_SUB_ID"]} ), on_error="ignore" ) - return { - "success": True, - "message": "Collecting information (this may take > 20s)" - } + +def on_start(event): + session = ObjectFactory.session + source_id = event.get("source", {}).get("id") + if not source_id or source_id != session.event_hub.id: + return + + if session is None: + log.warning("Session is not set. Can't trigger Sync to avalon action.") + return True + trigger_info_get() def register(session): @@ -326,6 +297,7 @@ def register(session): "topic=ftrack.action.discover", server_activity_discover ) + session.event_hub.subscribe("topic=pype.status.started", on_start) status_launch_subscription = ( "topic=ftrack.action.launch and data.actionIdentifier={}" @@ -338,34 +310,51 @@ def register(session): session.event_hub.subscribe( "topic={}".format(TOPIC_STATUS_SERVER_RESULT), - ObjectFactory.status_factory.process_result + ObjectFactory.status_factory.process_event_result ) +def heartbeat(): + if ObjectFactory.status_factory.is_filled(): + return + + trigger_info_get() + + def main(args): port = int(args[-1]) + server_info = json.loads(args[-2]) # Create a TCP/IP socket sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # Connect the socket to the port where the server is listening server_address = ("localhost", port) - log.debug("Storer connected to {} port {}".format(*server_address)) + log.debug("Statuser connected to {} port {}".format(*server_address)) sock.connect(server_address) sock.sendall(b"CreatedStatus") # store socket connection object ObjectFactory.sock = sock - ObjectFactory.status_factory = StatusFactory() + statuse_names = { + "main": "Main process", + "storer": "Storer", + "processor": "Processor" + } + + ObjectFactory.status_factory = StatusFactory(statuse_names) + ObjectFactory.status_factory["main"].update(server_info) _returncode = 0 try: session = SocketSession( - auto_connect_event_hub=True, sock=sock, Eventhub=SocketBaseEventHub + auto_connect_event_hub=True, sock=sock, Eventhub=StatusEventHub ) ObjectFactory.session = session + session.event_hub.heartbeat_callbacks.append(heartbeat) register(session) server = FtrackServer("event") - log.debug("Launched Ftrack Event storer") + log.debug("Launched Ftrack Event statuser") + server.run_server(session, load_files=False) except Exception: @@ -388,24 +377,3 @@ if __name__ == "__main__": signal.signal(signal.SIGTERM, signal_handler) sys.exit(main(sys.argv)) - - -example_action_event = { - 'data': { - 'selection': [], - 'description': 'Test action2', - 'variant': None, - 'label': 'Test action2', - 'actionIdentifier': 'test.action2.3ceffe5e9acf40f8aa80603adebd0d06', - 'values': {}, - 'icon': None, - }, - 'topic': 'ftrack.action.launch', - 'sent': None, - 'source': { - 'id': 'eb67d186301c4cbbab73c1aee9b7c55d', - 'user': {'username': 'jakub.trllo', 'id': '2a8ae090-cbd3-11e8-a87a-0a580aa00121'} - }, - 'target': '', - 'in_reply_to_event': None -} From 1b1a78cb6ed79be18fcf89bd340c4e09528fda56 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:31:47 +0100 Subject: [PATCH 308/393] processor suprocess can send status information on ask event --- .../ftrack_server/sub_event_processor.py | 51 ++++++++++++++++++- 1 file changed, 50 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/ftrack_server/sub_event_processor.py b/pype/ftrack/ftrack_server/sub_event_processor.py index 9c971ca916..2a3ad3e76d 100644 --- a/pype/ftrack/ftrack_server/sub_event_processor.py +++ b/pype/ftrack/ftrack_server/sub_event_processor.py @@ -1,13 +1,59 @@ +import os import sys import signal import socket +import datetime from ftrack_server import FtrackServer -from pype.ftrack.ftrack_server.lib import SocketSession, ProcessEventHub +from pype.ftrack.ftrack_server.lib import ( + SocketSession, ProcessEventHub, TOPIC_STATUS_SERVER +) +import ftrack_api from pypeapp import Logger log = Logger().get_logger("Event processor") +subprocess_started = datetime.datetime.now() + + +class SessionFactory: + session = None + + +def send_status(event): + subprocess_id = event["data"].get("subprocess_id") + if not subprocess_id: + return + + if subprocess_id != os.environ["FTRACK_EVENT_SUB_ID"]: + return + + session = SessionFactory.session + if not session: + return + + new_event_data = { + "subprocess_id": subprocess_id, + "source": "processor", + "status_info": { + "created_at": subprocess_started.strftime("%Y.%m.%d %H:%M:%S") + } + } + + new_event = ftrack_api.event.base.Event( + topic="pype.event.server.status.result", + data=new_event_data + ) + + session.event_hub.publish(new_event) + + +def register(session): + '''Registers the event, subscribing the discover and launch topics.''' + session.event_hub.subscribe( + "topic={}".format(TOPIC_STATUS_SERVER), send_status + ) + def main(args): port = int(args[-1]) @@ -24,6 +70,9 @@ def main(args): session = SocketSession( auto_connect_event_hub=True, sock=sock, Eventhub=ProcessEventHub ) + register(session) + SessionFactory.session = session + server = FtrackServer("event") log.debug("Launched Ftrack Event processor") server.run_server(session) From 2ff7b87956651c3343d195b56f0f871aaa4afee1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:32:02 +0100 Subject: [PATCH 309/393] storer can send status information on ask --- pype/ftrack/ftrack_server/sub_event_storer.py | 36 +++++++++++++++++-- 1 file changed, 34 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/ftrack_server/sub_event_storer.py b/pype/ftrack/ftrack_server/sub_event_storer.py index dfe8e21654..b4b9b8a7ab 100644 --- a/pype/ftrack/ftrack_server/sub_event_storer.py +++ b/pype/ftrack/ftrack_server/sub_event_storer.py @@ -8,14 +8,15 @@ import pymongo import ftrack_api from ftrack_server import FtrackServer from pype.ftrack.ftrack_server.lib import ( + SocketSession, StorerEventHub, get_ftrack_event_mongo_info, - SocketSession, - StorerEventHub + TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT ) from pype.ftrack.lib.custom_db_connector import DbConnector from pypeapp import Logger log = Logger().get_logger("Event storer") +subprocess_started = datetime.datetime.now() class SessionFactory: @@ -138,11 +139,42 @@ def trigger_sync(event): ) +def send_status(event): + session = SessionFactory.session + if not session: + return + + subprocess_id = event["data"].get("subprocess_id") + if not subprocess_id: + return + + if subprocess_id != os.environ["FTRACK_EVENT_SUB_ID"]: + return + + new_event_data = { + "subprocess_id": os.environ["FTRACK_EVENT_SUB_ID"], + "source": "storer", + "status_info": { + "created_at": subprocess_started.strftime("%Y.%m.%d %H:%M:%S") + } + } + + new_event = ftrack_api.event.base.Event( + topic=TOPIC_STATUS_SERVER_RESULT, + data=new_event_data + ) + + session.event_hub.publish(new_event) + + def register(session): '''Registers the event, subscribing the discover and launch topics.''' install_db() session.event_hub.subscribe("topic=*", launch) session.event_hub.subscribe("topic=pype.storer.started", trigger_sync) + session.event_hub.subscribe( + "topic={}".format(TOPIC_STATUS_SERVER), send_status + ) def main(args): From 5433daf7b065eb7c16720009170b3400a5ee0fd5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 18:32:40 +0100 Subject: [PATCH 310/393] event server cli sent his infomation on status subprocess startup --- pype/ftrack/ftrack_server/event_server_cli.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index b2c540e993..2dadb5da25 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -4,7 +4,10 @@ import signal import datetime import subprocess import socket +import json +import platform import argparse +import getpass import atexit import time import uuid @@ -233,6 +236,16 @@ def main_loop(ftrack_url): atexit.register( on_exit, processor_thread=processor_thread, storer_thread=storer_thread ) + + system_name, pc_name = platform.uname()[:2] + host_name = socket.gethostname() + main_info = { + "created_at": datetime.datetime.now().strftime("%Y.%m.%d %H:%M:%S"), + "Username": getpass.getuser(), + "Host Name": host_name, + "Host IP": socket.gethostbyname(host_name) + } + main_info_str = json.dumps(main_info) # Main loop while True: # Check if accessible Ftrack and Mongo url @@ -270,6 +283,7 @@ def main_loop(ftrack_url): printed_ftrack_error = False printed_mongo_error = False + # ====== STORER ======= # Run backup thread which does not requeire mongo to work if storer_thread is None: if storer_failed_count < max_fail_count: @@ -304,6 +318,7 @@ def main_loop(ftrack_url): storer_failed_count = 0 storer_last_failed = _storer_last_failed + # ====== PROCESSOR ======= if processor_thread is None: if processor_failed_count < max_fail_count: processor_thread = socket_thread.SocketThread( @@ -345,10 +360,12 @@ def main_loop(ftrack_url): processor_failed_count = 0 processor_last_failed = _processor_last_failed + # ====== STATUSER ======= if statuser_thread is None: if statuser_failed_count < max_fail_count: statuser_thread = socket_thread.SocketThread( - statuser_name, statuser_port, statuser_path + statuser_name, statuser_port, statuser_path, + [main_info_str] ) statuser_thread.start() From 2f85cdf0be4ed0b54481013ebc57c201dad9f444 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 7 Feb 2020 19:53:16 +0100 Subject: [PATCH 311/393] restarting is working, need to add comunication between main proces and status process --- pype/ftrack/ftrack_server/event_server_cli.py | 99 +++++++++++-------- pype/ftrack/ftrack_server/socket_thread.py | 56 ++++++++++- pype/ftrack/ftrack_server/sub_event_info.py | 35 +++++++ 3 files changed, 148 insertions(+), 42 deletions(-) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index 2dadb5da25..19e889f77d 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -222,7 +222,7 @@ def main_loop(ftrack_url): # stop threads on exit # TODO check if works and args have thread objects! - def on_exit(processor_thread, storer_thread): + def on_exit(processor_thread, storer_thread, statuser_thread): if processor_thread is not None: processor_thread.stop() processor_thread.join() @@ -233,8 +233,16 @@ def main_loop(ftrack_url): storer_thread.join() storer_thread = None + if statuser_thread is not None: + statuser_thread.stop() + statuser_thread.join() + statuser_thread = None + atexit.register( - on_exit, processor_thread=processor_thread, storer_thread=storer_thread + on_exit, + processor_thread=processor_thread, + storer_thread=storer_thread, + statuser_thread=statuser_thread ) system_name, pc_name = platform.uname()[:2] @@ -283,6 +291,51 @@ def main_loop(ftrack_url): printed_ftrack_error = False printed_mongo_error = False + # ====== STATUSER ======= + if statuser_thread is None: + if statuser_failed_count < max_fail_count: + statuser_thread = socket_thread.StatusSocketThread( + statuser_name, statuser_port, statuser_path, + [main_info_str] + ) + statuser_thread.start() + + elif statuser_failed_count == max_fail_count: + print(( + "Statuser failed {}times in row" + " I'll try to run again {}s later" + ).format(str(max_fail_count), str(wait_time_after_max_fail))) + statuser_failed_count += 1 + + elif (( + datetime.datetime.now() - statuser_last_failed + ).seconds > wait_time_after_max_fail): + statuser_failed_count = 0 + + # If thread failed test Ftrack and Mongo connection + elif not statuser_thread.isAlive(): + statuser_thread.join() + statuser_thread = None + ftrack_accessible = False + mongo_accessible = False + + _processor_last_failed = datetime.datetime.now() + delta_time = ( + _processor_last_failed - statuser_last_failed + ).seconds + + if delta_time < min_fail_seconds: + statuser_failed_count += 1 + else: + statuser_failed_count = 0 + statuser_last_failed = _processor_last_failed + + elif statuser_thread.stop_subprocess: + print("Main process was stopped by action") + on_exit(processor_thread, storer_thread, statuser_thread) + os.kill(os.getpid(), signal.SIGTERM) + return 1 + # ====== STORER ======= # Run backup thread which does not requeire mongo to work if storer_thread is None: @@ -291,6 +344,7 @@ def main_loop(ftrack_url): storer_name, storer_port, storer_path ) storer_thread.start() + elif storer_failed_count == max_fail_count: print(( "Storer failed {}times I'll try to run again {}s later" @@ -360,44 +414,9 @@ def main_loop(ftrack_url): processor_failed_count = 0 processor_last_failed = _processor_last_failed - # ====== STATUSER ======= - if statuser_thread is None: - if statuser_failed_count < max_fail_count: - statuser_thread = socket_thread.SocketThread( - statuser_name, statuser_port, statuser_path, - [main_info_str] - ) - statuser_thread.start() - - elif statuser_failed_count == max_fail_count: - print(( - "Statuser failed {}times in row" - " I'll try to run again {}s later" - ).format(str(max_fail_count), str(wait_time_after_max_fail))) - statuser_failed_count += 1 - - elif (( - datetime.datetime.now() - statuser_last_failed - ).seconds > wait_time_after_max_fail): - statuser_failed_count = 0 - - # If thread failed test Ftrack and Mongo connection - elif not statuser_thread.isAlive(): - statuser_thread.join() - statuser_thread = None - ftrack_accessible = False - mongo_accessible = False - - _processor_last_failed = datetime.datetime.now() - delta_time = ( - _processor_last_failed - statuser_last_failed - ).seconds - - if delta_time < min_fail_seconds: - statuser_failed_count += 1 - else: - statuser_failed_count = 0 - statuser_last_failed = _processor_last_failed + if statuser_thread is not None: + statuser_thread.set_process("storer", storer_thread) + statuser_thread.set_process("processor", processor_thread) time.sleep(1) diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py index cb073d83a0..cbe4f9dd8b 100644 --- a/pype/ftrack/ftrack_server/socket_thread.py +++ b/pype/ftrack/ftrack_server/socket_thread.py @@ -3,6 +3,7 @@ import sys import time import socket import threading +import traceback import subprocess from pypeapp import Logger @@ -14,12 +15,13 @@ class SocketThread(threading.Thread): def __init__(self, name, port, filepath, additional_args=[]): super(SocketThread, self).__init__() - self.log = Logger().get_logger("SocketThread", "Event Thread") + self.log = Logger().get_logger(self.__class__.__name__) self.setName(name) self.name = name self.port = port self.filepath = filepath self.additional_args = additional_args + self.sock = None self.subproc = None self.connection = None @@ -59,7 +61,8 @@ class SocketThread(threading.Thread): self.filepath, *self.additional_args, str(self.port) - ] + ], + stdin=subprocess.PIPE ) # Listen for incoming connections @@ -133,3 +136,52 @@ class SocketThread(threading.Thread): if data == b"MongoError": self.mongo_error = True connection.sendall(data) + + +class StatusSocketThread(SocketThread): + process_name_mapping = { + b"RestartS": "storer", + b"RestartP": "processor", + b"RestartM": "main" + } + + def __init__(self, *args, **kwargs): + self.process_threads = {} + self.stop_subprocess = False + super(StatusSocketThread, self).__init__(*args, **kwargs) + + def set_process(self, process_name, thread): + try: + if not self.subproc: + self.process_threads[process_name] = None + return + + if ( + process_name in self.process_threads and + self.process_threads[process_name] == thread + ): + return + + self.process_threads[process_name] = thread + self.subproc.stdin.write( + str.encode("reset:{}".format(process_name)) + ) + self.subproc.stdin.flush() + + except Exception: + print("Could not set thread in StatusSocketThread") + traceback.print_exception(*sys.exc_info()) + + def _handle_data(self, connection, data): + if not data: + return + + process_name = self.process_name_mapping.get(data) + if process_name: + if process_name == "main": + self.stop_subprocess = True + else: + subp = self.process_threads.get(process_name) + if subp: + subp.stop() + connection.sendall(data) diff --git a/pype/ftrack/ftrack_server/sub_event_info.py b/pype/ftrack/ftrack_server/sub_event_info.py index 5a38c992f5..a0c2564e10 100644 --- a/pype/ftrack/ftrack_server/sub_event_info.py +++ b/pype/ftrack/ftrack_server/sub_event_info.py @@ -1,6 +1,8 @@ import os import sys import json +import time +import threading import signal import socket import datetime @@ -29,6 +31,7 @@ action_data = { class ObjectFactory: session = None status_factory = None + checker_thread = None class Status: @@ -267,6 +270,17 @@ def server_activity(event): "title": "Server current status" } + session = ObjectFactory.session + if values["main"]: + session.event_hub.sock.sendall(b"RestartM") + return + + if values["storer"]: + session.event_hub.sock.sendall(b"RestartS") + + if values["processor"]: + session.event_hub.sock.sendall(b"RestartP") + def trigger_info_get(): session = ObjectFactory.session @@ -367,13 +381,34 @@ def main(args): return _returncode +class OutputChecker(threading.Thread): + read_input = True + + def run(self): + while self.read_input: + line = sys.stdin.readlines() + log.info(str(line)) + # for line in sys.stdin.readlines(): + # log.info(str(line)) + log.info("alive-end") + time.sleep(0.5) + + def stop(self): + self.read_input = False + + if __name__ == "__main__": # Register interupt signal def signal_handler(sig, frame): print("You pressed Ctrl+C. Process ended.") + ObjectFactory.checker_thread.stop() sys.exit(0) signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) + checker_thread = OutputChecker() + ObjectFactory.checker_thread = checker_thread + checker_thread.start() + sys.exit(main(sys.argv)) From 3e6ce6c1644fbdd63deece2bb756b4705ab39f58 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 7 Feb 2020 20:48:38 +0100 Subject: [PATCH 312/393] feat(nuke): setting colorspace to write and Reads from presets --- pype/nuke/lib.py | 101 +++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 98 insertions(+), 3 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 6d6e7de1b2..a7f1b64eec 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -645,15 +645,105 @@ class WorkfileSettings(object): write_dict (dict): nuke write node as dictionary ''' - # TODO: complete this function so any write node in # scene will have fixed colorspace following presets for the project if not isinstance(write_dict, dict): msg = "set_root_colorspace(): argument should be dictionary" - nuke.message(msg) log.error(msg) return - log.debug("__ set_writes_colorspace(): {}".format(write_dict)) + from avalon.nuke import get_avalon_knob_data + + for node in nuke.allNodes(): + + if node.Class() in ["Viewer", "Dot"]: + continue + + # get data from avalon knob + avalon_knob_data = get_avalon_knob_data(node, ["avalon:", "ak:"]) + + if not avalon_knob_data: + continue + + if avalon_knob_data["id"] != "pyblish.avalon.instance": + continue + + # establish families + families = [avalon_knob_data["family"]] + if avalon_knob_data.get("families"): + families.append(avalon_knob_data.get("families")) + + # except disabled nodes but exclude backdrops in test + for fmly, knob in write_dict.items(): + write = None + if (fmly in families): + # Add all nodes in group instances. + if node.Class() == "Group": + node.begin() + for x in nuke.allNodes(): + if x.Class() == "Write": + write = x + node.end() + elif node.Class() == "Write": + write = node + else: + log.warning("Wrong write node Class") + + write["colorspace"].setValue(str(knob["colorspace"])) + log.info( + "Setting `{0}` to `{1}`".format( + write.name(), + knob["colorspace"])) + + def set_reads_colorspace(self, reads): + """ Setting colorspace to Read nodes + + Looping trought all read nodes and tries to set colorspace based on regex rules in presets + """ + changes = dict() + for n in nuke.allNodes(): + file = nuke.filename(n) + if not n.Class() == "Read": + continue + + # load nuke presets for Read's colorspace + read_clrs_presets = get_colorspace_preset().get( + "nuke", {}).get("read", {}) + + # check if any colorspace presets for read is mathing + preset_clrsp = next((read_clrs_presets[k] + for k in read_clrs_presets + if bool(re.search(k, file))), + None) + log.debug(preset_clrsp) + if preset_clrsp is not None: + current = n["colorspace"].value() + future = str(preset_clrsp) + if current != future: + changes.update({ + n.name(): { + "from": current, + "to": future + } + }) + log.debug(changes) + if changes: + msg = "Read nodes are not set to correct colospace:\n\n" + for nname, knobs in changes.items(): + msg += str(" - node: '{0}' is now '{1}' " + "but should be '{2}'\n").format( + nname, knobs["from"], knobs["to"] + ) + + msg += "\nWould you like to change it?" + + if nuke.ask(msg): + for nname, knobs in changes.items(): + n = nuke.toNode(nname) + n["colorspace"].setValue(knobs["to"]) + log.info( + "Setting `{0}` to `{1}`".format( + nname, + knobs["to"])) def set_colorspace(self): ''' Setting colorpace following presets @@ -671,6 +761,7 @@ class WorkfileSettings(object): msg = "set_colorspace(): missing `viewer` settings in template" nuke.message(msg) log.error(msg) + try: self.set_writes_colorspace(nuke_colorspace["write"]) except AttributeError: @@ -678,6 +769,10 @@ class WorkfileSettings(object): nuke.message(msg) log.error(msg) + reads = nuke_colorspace.get("read") + if reads: + self.set_reads_colorspace(reads) + try: for key in nuke_colorspace: log.debug("Preset's colorspace key: {}".format(key)) From 1a84b605a162ab381a459e9421b1d0e3e32677ab Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 7 Feb 2020 20:49:07 +0100 Subject: [PATCH 313/393] fix(nuke, nks): removing `handles` obsolete --- pype/plugins/nukestudio/publish/collect_clips.py | 1 - pype/plugins/nukestudio/publish/collect_effects.py | 12 +++++++++--- pype/plugins/nukestudio/publish/collect_handles.py | 4 ---- .../nukestudio/publish/collect_hierarchy_context.py | 1 - pype/plugins/nukestudio/publish/collect_plates.py | 1 - pype/plugins/nukestudio/publish/collect_reviews.py | 8 +++++--- .../nukestudio/publish/collect_tag_handles.py | 12 +++++++----- pype/plugins/nukestudio/publish/extract_effects.py | 11 +++++++---- 8 files changed, 28 insertions(+), 22 deletions(-) diff --git a/pype/plugins/nukestudio/publish/collect_clips.py b/pype/plugins/nukestudio/publish/collect_clips.py index 3759d50f6a..4525b4947f 100644 --- a/pype/plugins/nukestudio/publish/collect_clips.py +++ b/pype/plugins/nukestudio/publish/collect_clips.py @@ -105,7 +105,6 @@ class CollectClips(api.ContextPlugin): "asset": asset, "family": "clip", "families": [], - "handles": 0, "handleStart": projectdata.get("handleStart", 0), "handleEnd": projectdata.get("handleEnd", 0), "version": int(version)}) diff --git a/pype/plugins/nukestudio/publish/collect_effects.py b/pype/plugins/nukestudio/publish/collect_effects.py index 0aee0adf2e..55ff849c88 100644 --- a/pype/plugins/nukestudio/publish/collect_effects.py +++ b/pype/plugins/nukestudio/publish/collect_effects.py @@ -11,7 +11,9 @@ class CollectVideoTracksLuts(pyblish.api.InstancePlugin): def process(self, instance): - self.log.debug("Finding soft effect for subset: `{}`".format(instance.data.get("subset"))) + self.log.debug( + "Finding soft effect for subset: `{}`".format( + instance.data.get("subset"))) # taking active sequence subset = instance.data.get("subset") @@ -41,8 +43,12 @@ class CollectVideoTracksLuts(pyblish.api.InstancePlugin): if len(instance.data.get("effectTrackItems", {}).keys()) > 0: instance.data["families"] += ["lut"] - self.log.debug("effects.keys: {}".format(instance.data.get("effectTrackItems", {}).keys())) - self.log.debug("effects: {}".format(instance.data.get("effectTrackItems", {}))) + self.log.debug( + "effects.keys: {}".format( + instance.data.get("effectTrackItems", {}).keys())) + self.log.debug( + "effects: {}".format( + instance.data.get("effectTrackItems", {}))) def add_effect(self, instance, track_index, item): track = item.parentTrack().name() diff --git a/pype/plugins/nukestudio/publish/collect_handles.py b/pype/plugins/nukestudio/publish/collect_handles.py index 8da83e715b..28f502d846 100644 --- a/pype/plugins/nukestudio/publish/collect_handles.py +++ b/pype/plugins/nukestudio/publish/collect_handles.py @@ -24,7 +24,6 @@ class CollectClipHandles(api.ContextPlugin): continue # get handles - handles = int(instance.data["handles"]) handle_start = int(instance.data["handleStart"]) handle_end = int(instance.data["handleEnd"]) @@ -38,19 +37,16 @@ class CollectClipHandles(api.ContextPlugin): self.log.debug("Adding to shared assets: `{}`".format( instance.data["name"])) asset_shared.update({ - "handles": handles, "handleStart": handle_start, "handleEnd": handle_end }) - for instance in filtered_instances: if not instance.data.get("main") and not instance.data.get("handleTag"): self.log.debug("Synchronize handles on: `{}`".format( instance.data["name"])) name = instance.data["asset"] s_asset_data = assets_shared.get(name) - instance.data["handles"] = s_asset_data.get("handles", 0) instance.data["handleStart"] = s_asset_data.get( "handleStart", 0 ) diff --git a/pype/plugins/nukestudio/publish/collect_hierarchy_context.py b/pype/plugins/nukestudio/publish/collect_hierarchy_context.py index 5f29837d80..5085b9719e 100644 --- a/pype/plugins/nukestudio/publish/collect_hierarchy_context.py +++ b/pype/plugins/nukestudio/publish/collect_hierarchy_context.py @@ -263,7 +263,6 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin): # get custom attributes of the shot if instance.data.get("main"): in_info['custom_attributes'] = { - 'handles': int(instance.data.get('handles', 0)), "handleStart": handle_start, "handleEnd": handle_end, "frameStart": instance.data["frameStart"], diff --git a/pype/plugins/nukestudio/publish/collect_plates.py b/pype/plugins/nukestudio/publish/collect_plates.py index 70f0f7407e..b98eccce7f 100644 --- a/pype/plugins/nukestudio/publish/collect_plates.py +++ b/pype/plugins/nukestudio/publish/collect_plates.py @@ -134,7 +134,6 @@ class CollectPlatesData(api.InstancePlugin): # add to data of representation version_data.update({ - "handles": version_data['handleStart'], "colorspace": item.sourceMediaColourTransform(), "colorspaceScript": instance.context.data["colorspace"], "families": [f for f in families if 'ftrack' not in f], diff --git a/pype/plugins/nukestudio/publish/collect_reviews.py b/pype/plugins/nukestudio/publish/collect_reviews.py index f9032b2ca4..f223e5ca65 100644 --- a/pype/plugins/nukestudio/publish/collect_reviews.py +++ b/pype/plugins/nukestudio/publish/collect_reviews.py @@ -125,7 +125,7 @@ class CollectReviews(api.InstancePlugin): thumb_path, format='png' ) - + self.log.debug("__ sourceIn: `{}`".format(instance.data["sourceIn"])) self.log.debug("__ thumbnail: `{}`, frame: `{}`".format(thumbnail, thumb_frame)) @@ -145,7 +145,10 @@ class CollectReviews(api.InstancePlugin): item = instance.data["item"] transfer_data = [ - "handleStart", "handleEnd", "sourceIn", "sourceOut", "frameStart", "frameEnd", "sourceInH", "sourceOutH", "clipIn", "clipOut", "clipInH", "clipOutH", "asset", "track", "version" + "handleStart", "handleEnd", "sourceIn", "sourceOut", + "frameStart", "frameEnd", "sourceInH", "sourceOutH", + "clipIn", "clipOut", "clipInH", "clipOutH", "asset", + "track", "version" ] version_data = dict() @@ -154,7 +157,6 @@ class CollectReviews(api.InstancePlugin): # add to data of representation version_data.update({ - "handles": version_data['handleStart'], "colorspace": item.sourceMediaColourTransform(), "families": instance.data["families"], "subset": instance.data["subset"], diff --git a/pype/plugins/nukestudio/publish/collect_tag_handles.py b/pype/plugins/nukestudio/publish/collect_tag_handles.py index 929f5e3b68..a6a63faea9 100644 --- a/pype/plugins/nukestudio/publish/collect_tag_handles.py +++ b/pype/plugins/nukestudio/publish/collect_tag_handles.py @@ -38,7 +38,9 @@ class CollectClipTagHandles(api.ContextPlugin): # gets arguments if there are any t_args = t_metadata.get("tag.args", "") - assert t_args, self.log.error("Tag with Handles is missing Args. Use only handle start/end") + assert t_args, self.log.error( + "Tag with Handles is missing Args. " + "Use only handle start/end") t_args = json.loads(t_args.replace("'", "\"")) # add in start @@ -55,8 +57,8 @@ class CollectClipTagHandles(api.ContextPlugin): # adding handles to asset_shared on context if instance.data.get("handleEnd"): - assets_shared_a["handleEnd"] = instance.data["handleEnd"] + assets_shared_a[ + "handleEnd"] = instance.data["handleEnd"] if instance.data.get("handleStart"): - assets_shared_a["handleStart"] = instance.data["handleStart"] - if instance.data.get("handles"): - assets_shared_a["handles"] = instance.data["handles"] + assets_shared_a[ + "handleStart"] = instance.data["handleStart"] diff --git a/pype/plugins/nukestudio/publish/extract_effects.py b/pype/plugins/nukestudio/publish/extract_effects.py index 15d2a80a55..5e2721aa8e 100644 --- a/pype/plugins/nukestudio/publish/extract_effects.py +++ b/pype/plugins/nukestudio/publish/extract_effects.py @@ -6,6 +6,7 @@ import pyblish.api import tempfile from avalon import io, api + class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): """Collect video tracks effects into context.""" @@ -17,9 +18,12 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): item = instance.data["item"] effects = instance.data.get("effectTrackItems") - instance.data["families"] = [f for f in instance.data.get("families", []) if f not in ["lut"]] + instance.data["families"] = [f for f in instance.data.get( + "families", []) if f not in ["lut"]] - self.log.debug("___ instance.data[families]: `{}`".format(instance.data["families"])) + self.log.debug( + "__ instance.data[families]: `{}`".format( + instance.data["families"])) # skip any without effects if not effects: @@ -102,7 +106,6 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): # add to data of representation version_data.update({ - "handles": version_data['handleStart'], "colorspace": item.sourceMediaColourTransform(), "colorspaceScript": instance.context.data["colorspace"], "families": ["plate", "lut"], @@ -132,7 +135,7 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin): def copy_linked_files(self, effect, dst_dir): for k, v in effect["node"].items(): - if k in "file" and v is not '': + if k in "file" and v != '': base_name = os.path.basename(v) dst = os.path.join(dst_dir, base_name).replace("\\", "/") From 10853e1ade753801109009d0497b389533419316 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 8 Feb 2020 11:26:43 +0100 Subject: [PATCH 314/393] process information are refreshed by main process now --- pype/ftrack/ftrack_server/socket_thread.py | 2 +- pype/ftrack/ftrack_server/sub_event_info.py | 39 ++++++++++++++------- 2 files changed, 27 insertions(+), 14 deletions(-) diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py index cbe4f9dd8b..942965f9e2 100644 --- a/pype/ftrack/ftrack_server/socket_thread.py +++ b/pype/ftrack/ftrack_server/socket_thread.py @@ -164,7 +164,7 @@ class StatusSocketThread(SocketThread): self.process_threads[process_name] = thread self.subproc.stdin.write( - str.encode("reset:{}".format(process_name)) + str.encode("reset:{}\r\n".format(process_name)) ) self.subproc.stdin.flush() diff --git a/pype/ftrack/ftrack_server/sub_event_info.py b/pype/ftrack/ftrack_server/sub_event_info.py index a0c2564e10..4c94513eae 100644 --- a/pype/ftrack/ftrack_server/sub_event_info.py +++ b/pype/ftrack/ftrack_server/sub_event_info.py @@ -32,6 +32,7 @@ class ObjectFactory: session = None status_factory = None checker_thread = None + last_trigger = None class Status: @@ -124,8 +125,8 @@ class StatusFactory: note_item = { "type": "label", "value": ( - "NOTE: Hit `submit` and uncheck all" - " checkers to refresh data." + "HINT: To refresh data uncheck" + " all checkboxes and hit `Submit` button." ) } splitter_item = { @@ -164,9 +165,13 @@ class StatusFactory: source = event["data"]["source"] data = event["data"]["status_info"] + + self.update_status_info(source, data) + + def update_status_info(self, process_name, info): for status in self.statuses: - if status.name == source: - status.update(data) + if status.name == process_name: + status.update(info) break def bool_items(self): @@ -178,7 +183,7 @@ class StatusFactory: items.append({ "type": "label", "value": ( - "WARNING: Main process may not restart" + "WARNING: Main process may shut down when checked" " if does not run as a service!" ) }) @@ -283,6 +288,11 @@ def server_activity(event): def trigger_info_get(): + if ObjectFactory.last_trigger: + delta = datetime.datetime.now() - ObjectFactory.last_trigger + if delta.seconds() < 5: + return + session = ObjectFactory.session session.event_hub.publish( ftrack_api.event.base.Event( @@ -352,8 +362,8 @@ def main(args): statuse_names = { "main": "Main process", - "storer": "Storer", - "processor": "Processor" + "storer": "Event Storer", + "processor": "Event Processor" } ObjectFactory.status_factory = StatusFactory(statuse_names) @@ -386,12 +396,15 @@ class OutputChecker(threading.Thread): def run(self): while self.read_input: - line = sys.stdin.readlines() - log.info(str(line)) - # for line in sys.stdin.readlines(): - # log.info(str(line)) - log.info("alive-end") - time.sleep(0.5) + for line in sys.stdin: + line = line.rstrip().lower() + if not line.startswith("reset:"): + continue + process_name = line.replace("reset:", "") + + ObjectFactory.status_factory.update_status_info( + process_name, None + ) def stop(self): self.read_input = False From 49f9dbf4183f057ab2f0ad16fe4b0909de55eef1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 8 Feb 2020 11:28:33 +0100 Subject: [PATCH 315/393] renamed sub_event_info to sub_event_status --- pype/ftrack/ftrack_server/event_server_cli.py | 2 +- .../ftrack_server/{sub_event_info.py => sub_event_status.py} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename pype/ftrack/ftrack_server/{sub_event_info.py => sub_event_status.py} (100%) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index 19e889f77d..90c7c566fc 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -209,7 +209,7 @@ def main_loop(ftrack_url): statuser_name = "StorerThread" statuser_port = 10021 - statuser_path = "{}/sub_event_info.py".format(file_path) + statuser_path = "{}/sub_event_status.py".format(file_path) statuser_thread = None statuser_last_failed = datetime.datetime.now() statuser_failed_count = 0 diff --git a/pype/ftrack/ftrack_server/sub_event_info.py b/pype/ftrack/ftrack_server/sub_event_status.py similarity index 100% rename from pype/ftrack/ftrack_server/sub_event_info.py rename to pype/ftrack/ftrack_server/sub_event_status.py From e9c4ec7fee46b87a067efc9a7566a09f071a4ea3 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 8 Feb 2020 11:30:38 +0100 Subject: [PATCH 316/393] label has IP adress of server --- pype/ftrack/ftrack_server/sub_event_status.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/ftrack_server/sub_event_status.py b/pype/ftrack/ftrack_server/sub_event_status.py index 4c94513eae..8dc176a091 100644 --- a/pype/ftrack/ftrack_server/sub_event_status.py +++ b/pype/ftrack/ftrack_server/sub_event_status.py @@ -1,7 +1,6 @@ import os import sys import json -import time import threading import signal import socket @@ -19,9 +18,10 @@ log = Logger().get_logger("Event storer") action_identifier = ( "event.server.status" + os.environ["FTRACK_EVENT_SUB_ID"] ) +host_ip = socket.gethostbyname(socket.gethostname()) action_data = { "label": "Pype Admin", - "variant": "- Event server Status", + "variant": "- Event server Status ({})".format(host_ip), "description": "Get Infromation about event server", "actionIdentifier": action_identifier, "icon": None From 4e85279771711e794330d414537381be9025a4b6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 8 Feb 2020 12:01:04 +0100 Subject: [PATCH 317/393] added icon to status action --- pype/ftrack/ftrack_server/sub_event_status.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/ftrack_server/sub_event_status.py b/pype/ftrack/ftrack_server/sub_event_status.py index 8dc176a091..1a15a1f28d 100644 --- a/pype/ftrack/ftrack_server/sub_event_status.py +++ b/pype/ftrack/ftrack_server/sub_event_status.py @@ -12,7 +12,7 @@ from pype.ftrack.ftrack_server.lib import ( SocketSession, StatusEventHub, TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT ) -from pypeapp import Logger +from pypeapp import Logger, config log = Logger().get_logger("Event storer") action_identifier = ( @@ -24,7 +24,16 @@ action_data = { "variant": "- Event server Status ({})".format(host_ip), "description": "Get Infromation about event server", "actionIdentifier": action_identifier, - "icon": None + "icon": "{}/ftrack/action_icons/PypeAdmin.svg".format( + os.environ.get( + "PYPE_STATICS_SERVER", + "http://localhost:{}".format( + config.get_presets().get("services", {}).get( + "rest_api", {} + ).get("default_port", 8021) + ) + ) + ) } From 350d1ca7d038c8cc75619915262fb2a7fdff4be0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 10 Feb 2020 16:41:58 +0100 Subject: [PATCH 318/393] implemented action that remove files from disk for paticulat asset version except lastest `x` versions --- .../actions/action_delete_old_versions.py | 481 ++++++++++++++++++ 1 file changed, 481 insertions(+) create mode 100644 pype/ftrack/actions/action_delete_old_versions.py diff --git a/pype/ftrack/actions/action_delete_old_versions.py b/pype/ftrack/actions/action_delete_old_versions.py new file mode 100644 index 0000000000..126c9a5e24 --- /dev/null +++ b/pype/ftrack/actions/action_delete_old_versions.py @@ -0,0 +1,481 @@ +import os +import collections +import uuid + +import clique +from pymongo import UpdateOne + +from pype.ftrack import BaseAction +from pype.ftrack.lib.io_nonsingleton import DbConnector + +import avalon.pipeline + + +class DeleteOldVersions(BaseAction): + + identifier = "delete.old.versions" + label = "Pype Admin" + variant = "- Delete old versions" + description = ( + "Delete files from older publishes so project can be" + " archived with only lates versions." + ) + + dbcon = DbConnector() + + inteface_title = "Choose your preferences" + splitter_item = {"type": "label", "value": "---"} + sequence_splitter = "__sequence_splitter__" + + def discover(self, session, entities, event): + ''' Validation ''' + selection = event["data"].get("selection") or [] + for entity in selection: + entity_type = (entity.get("entityType") or "").lower() + if entity_type == "assetversion": + return True + return False + + def interface(self, session, entities, event): + items = [] + root = os.environ.get("AVALON_PROJECTS") + if not root: + msg = "Root path to projects is not set." + items.append({ + "type": "label", + "value": "ERROR: {}".format(msg) + }) + self.show_interface( + items=items, title=self.inteface_title, event=event + ) + return { + "success": False, + "message": msg + } + + elif not os.path.exists(root): + msg = "Root path does not exists \"{}\".".format(str(root)) + items.append({ + "type": "label", + "value": "ERROR: {}".format(msg) + }) + self.show_interface( + items=items, title=self.inteface_title, event=event + ) + return { + "success": False, + "message": msg + } + + values = event["data"].get("values") + if values: + versions_count = int(values["last_versions_count"]) + if versions_count >= 1: + return + items.append({ + "type": "label", + "value": ( + "# You have to keep at least 1 version!" + ) + }) + + items.append({ + "type": "label", + "value": ( + "WARNING: This will remove published files of older" + " versions from disk so we don't recommend use" + " this action on \"live\" project." + ) + }) + + items.append(self.splitter_item) + + # How many versions to keep + items.append({ + "type": "label", + "value": "## Choose how many versions you want to keep:" + }) + items.append({ + "type": "label", + "value": ( + "NOTE: We do recommend to keep 2" + " versions (even if default is 1)." + ) + }) + items.append({ + "type": "number", + "name": "last_versions_count", + "label": "Versions", + "value": 1 + }) + + items.append(self.splitter_item) + + items.append({ + "type": "label", + "value": ( + "## Remove publish folder even if there" + " are other than published files:" + ) + }) + items.append({ + "type": "label", + "value": ( + "WARNING: This may remove more than you want." + ) + }) + items.append({ + "type": "boolean", + "name": "force_delete_publish_folder", + "label": "Are you sure?", + "value": True + }) + + return { + "items": items, + "title": self.inteface_title + } + + def launch(self, session, entities, event): + values = event["data"].get("values") + if not values: + return + + versions_count = int(values["last_versions_count"]) + force_to_remove = values["force_delete_publish_folder"] + + _val1 = "OFF" + if force_to_remove: + _val1 = "ON" + + _val3 = "s" + if versions_count == 1: + _val3 = "" + + self.log.debug(( + "Process started. Force to delete publish folder is set to [{0}]" + " and will keep {1} latest version{2}." + ).format(_val1, versions_count, _val3)) + + self.dbcon.install() + + project = None + avalon_asset_names = [] + asset_versions_by_parent_id = collections.defaultdict(list) + subset_names_by_asset_name = collections.defaultdict(list) + + for entity in entities: + parent_ent = entity["asset"]["parent"] + parent_ftrack_id = parent_ent["id"] + parent_name = parent_ent["name"] + + if parent_name not in avalon_asset_names: + avalon_asset_names.append(parent_name) + + # Group asset versions by parent entity + asset_versions_by_parent_id[parent_ftrack_id].append(entity) + + # Get project + if project is None: + project = parent_ent["project"] + + # Collect subset names per asset + subset_name = entity["asset"]["name"] + subset_names_by_asset_name[parent_name].append(subset_name) + + # Set Mongo collection + project_name = project["full_name"] + self.dbcon.Session["AVALON_PROJECT"] = project_name + self.log.debug("Project is set to {}".format(project_name)) + + # Get Assets from avalon database + assets = list(self.dbcon.find({ + "type": "asset", + "name": {"$in": avalon_asset_names} + })) + asset_id_to_name_map = { + asset["_id"]: asset["name"] for asset in assets + } + asset_ids = list(asset_id_to_name_map.keys()) + + self.log.debug("Collected assets ({})".format(len(asset_ids))) + + # Get Subsets + subsets = list(self.dbcon.find({ + "type": "subset", + "parent": {"$in": asset_ids} + })) + subsets_by_id = {} + subset_ids = [] + for subset in subsets: + asset_id = subset["parent"] + asset_name = asset_id_to_name_map[asset_id] + available_subsets = subset_names_by_asset_name[asset_name] + + if subset["name"] not in available_subsets: + continue + + subset_ids.append(subset["_id"]) + subsets_by_id[subset["_id"]] = subset + + self.log.debug("Collected subsets ({})".format(len(subset_ids))) + + # Get Versions + versions = list(self.dbcon.find({ + "type": "version", + "parent": {"$in": subset_ids} + })) + + versions_by_parent = collections.defaultdict(list) + for ent in versions: + versions_by_parent[ent["parent"]].append(ent) + + def sort_func(ent): + return int(ent["name"]) + + last_versions_by_parent = collections.defaultdict(list) + all_last_versions = [] + for parent_id, _versions in versions_by_parent.items(): + for idx, version in enumerate( + sorted(_versions, key=sort_func, reverse=True) + ): + if idx >= versions_count: + break + last_versions_by_parent[parent_id].append(version) + all_last_versions.append(version) + + self.log.debug("Collected versions ({})".format(len(versions))) + + # Filter latest versions + for version in all_last_versions: + versions.remove(version) + + # Filter already deleted versions + versions_to_pop = [] + for version in versions: + version_tags = version["data"].get("tags") + if version_tags and "deleted" in version_tags: + versions_to_pop.append(version) + + for version in versions_to_pop: + subset = subsets_by_id[version["parent"]] + asset_id = subset["parent"] + asset_name = asset_id_to_name_map[asset_id] + msg = "Asset: \"{}\" | Subset: \"{}\" | Version: \"{}\"".format( + asset_name, subset["name"], version["name"] + ) + self.log.warning(( + "Skipping version. Already tagged as `deleted`. < {} >" + ).format(msg)) + versions.remove(version) + + version_ids = [ent["_id"] for ent in versions] + + self.log.debug( + "Filtered versions to delete ({})".format(len(version_ids)) + ) + + if not version_ids: + msg = "Skipping processing. Nothing to delete." + self.log.debug(msg) + return { + "success": True, + "message": msg + } + + repres = list(self.dbcon.find({ + "type": "representation", + "parent": {"$in": version_ids} + })) + + self.log.debug( + "Collected representations to remove ({})".format(len(repres)) + ) + + dir_paths = {} + file_paths_by_dir = collections.defaultdict(list) + for repre in repres: + file_path, seq_path = self.path_from_represenation(repre) + if file_path is None: + self.log.warning(( + "Could not format path for represenation \"{}\"" + ).format(str(repre))) + continue + + dir_path = os.path.dirname(file_path) + dir_id = None + for _dir_id, _dir_path in dir_paths.items(): + if _dir_path == dir_path: + dir_id = _dir_id + break + + if dir_id is None: + dir_id = uuid.uuid4() + dir_paths[dir_id] = dir_path + + file_paths_by_dir[dir_id].append([file_path, seq_path]) + + dir_ids_to_pop = [] + for dir_id, dir_path in dir_paths.items(): + if os.path.exists(dir_path): + continue + + dir_ids_to_pop.append(dir_id) + + # Pop dirs from both dictionaries + for dir_id in dir_ids_to_pop: + dir_paths.pop(dir_id) + paths = file_paths_by_dir.pop(dir_id) + # TODO report of missing directories? + paths_msg = ", ".join([ + "'{}'".format(path[0].replace("\\", "/")) for path in paths + ]) + self.log.warning(( + "Folder does not exist. Deleting it's files skipped: {}" + ).format(paths_msg)) + + if force_to_remove: + self.delete_whole_dir_paths(dir_paths.values()) + else: + self.delete_only_repre_files(dir_paths, file_paths_by_dir) + + mongo_changes_bulk = [] + for version in versions: + orig_version_tags = version["data"].get("tags") or [] + version_tags = [tag for tag in orig_version_tags] + if "deleted" not in version_tags: + version_tags.append("deleted") + + if version_tags == orig_version_tags: + continue + + filter = {"_id": version["_id"]} + update_data = {"$set": {"data.tags": version_tags}} + mongo_changes_bulk.append(UpdateOne(filter, update_data)) + + if mongo_changes_bulk: + self.dbcon.bulk_write(mongo_changes_bulk) + + self.dbcon.uninstall() + + return True + + def delete_whole_dir_paths(self, dir_paths): + for dir_path in dir_paths: + # Delete all files and fodlers in dir path + for root, dirs, files in os.walk(dir_path, topdown=False): + for name in files: + os.remove(os.path.join(root, name)) + + for name in dirs: + os.rmdir(os.path.join(root, name)) + + # Delete even the folder and it's parents folders if they are empty + while True: + if not os.path.exists(dir_path): + dir_path = os.path.dirname(dir_path) + continue + + if len(os.listdir(dir_path)) != 0: + break + + os.rmdir(os.path.join(dir_path)) + + def delete_only_repre_files(self, dir_paths, file_paths): + for dir_id, dir_path in dir_paths.items(): + dir_files = os.listdir(dir_path) + collections, remainders = clique.assemble(dir_files) + for file_path, seq_path in file_paths[dir_id]: + file_path_base = os.path.split(file_path)[1] + # Just remove file if `frame` key was not in context or + # filled path is in remainders (single file sequence) + if not seq_path or file_path_base in remainders: + if not os.path.exists(file_path): + self.log.warning( + "File was not found: {}".format(file_path) + ) + continue + os.remove(file_path) + self.log.debug("Removed file: {}".format(file_path)) + remainders.remove(file_path_base) + continue + + seq_path_base = os.path.split(seq_path)[1] + head, tail = seq_path_base.split(self.sequence_splitter) + + final_col = None + for collection in collections: + if head != collection.head or tail != collection.tail: + continue + final_col = collection + break + + if final_col is not None: + # Fill full path to head + final_col.head = os.path.join(dir_path, final_col.head) + for _file_path in final_col: + if os.path.exists(_file_path): + os.remove(_file_path) + _seq_path = final_col.format("{head}{padding}{tail}") + self.log.debug("Removed files: {}".format(_seq_path)) + collections.remove(final_col) + + elif os.path.exists(file_path): + os.remove(file_path) + self.log.debug("Removed file: {}".format(file_path)) + + else: + self.log.warning( + "File was not found: {}".format(file_path) + ) + + # Delete as much as possible parent folders + for dir_path in dir_paths.values(): + while True: + if not os.path.exists(dir_path): + dir_path = os.path.dirname(dir_path) + continue + + if len(os.listdir(dir_path)) != 0: + break + + self.log.debug("Removed folder: {}".format(dir_path)) + os.rmdir(dir_path) + + def path_from_represenation(self, representation): + try: + template = representation["data"]["template"] + + except KeyError: + return (None, None) + + root = os.environ["AVALON_PROJECTS"] + if not root: + return (None, None) + + sequence_path = None + try: + context = representation["context"] + context["root"] = root + path = avalon.pipeline.format_template_with_optional_keys( + context, template + ) + if "frame" in context: + context["frame"] = self.sequence_splitter + sequence_path = os.path.normpath( + avalon.pipeline.format_template_with_optional_keys( + context, template + ) + ) + + except KeyError: + # Template references unavailable data + return (None, None) + + return (os.path.normpath(path), sequence_path) + + +def register(session, plugins_presets={}): + '''Register plugin. Called when used as an plugin.''' + + PrepareForArchivation(session, plugins_presets).register() From 765ec59d7b47238bfc0579c3d2baaf14880f8a7e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 10 Feb 2020 17:07:56 +0100 Subject: [PATCH 319/393] added roles and icon to action --- pype/ftrack/actions/action_delete_old_versions.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/ftrack/actions/action_delete_old_versions.py b/pype/ftrack/actions/action_delete_old_versions.py index 126c9a5e24..46f3e60d77 100644 --- a/pype/ftrack/actions/action_delete_old_versions.py +++ b/pype/ftrack/actions/action_delete_old_versions.py @@ -20,6 +20,10 @@ class DeleteOldVersions(BaseAction): "Delete files from older publishes so project can be" " archived with only lates versions." ) + role_list = ["Pypeclub", "Project Manager", "Administrator"] + icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format( + os.environ.get('PYPE_STATICS_SERVER', '') + ) dbcon = DbConnector() From 2da3de670eda9c2cf9e6fc679ca19c291d1e8128 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 10 Feb 2020 22:44:29 +0100 Subject: [PATCH 320/393] add version --- pype/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/__init__.py b/pype/__init__.py index 91b72d7de5..89c653bf6f 100644 --- a/pype/__init__.py +++ b/pype/__init__.py @@ -9,7 +9,7 @@ from pypeapp import config import logging log = logging.getLogger(__name__) -__version__ = "2.3.0" +__version__ = "2.5.0" PACKAGE_DIR = os.path.dirname(__file__) PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins") From 52b41212064898fac697cf60dbf6a4f957f60072 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Feb 2020 14:20:31 +0100 Subject: [PATCH 321/393] fix(nk): was causing troubles with linux workstations --- pype/nuke/lib.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index a7f1b64eec..c64dc0b828 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -196,7 +196,7 @@ def format_anatomy(data): "root": api.Session["AVALON_PROJECTS"], "subset": data["avalon"]["subset"], "asset": data["avalon"]["asset"], - "task": api.Session["AVALON_TASK"].lower(), + "task": api.Session["AVALON_TASK"], "family": data["avalon"]["family"], "project": {"name": project_document["name"], "code": project_document["data"].get("code", '')}, @@ -1070,7 +1070,7 @@ class BuildWorkfile(WorkfileSettings): "project": {"name": self._project["name"], "code": self._project["data"].get("code", '')}, "asset": self._asset or os.environ["AVALON_ASSET"], - "task": kwargs.get("task") or api.Session["AVALON_TASK"].lower(), + "task": kwargs.get("task") or api.Session["AVALON_TASK"], "hierarchy": kwargs.get("hierarchy") or pype.get_hierarchy(), "version": kwargs.get("version", {}).get("name", 1), "user": getpass.getuser(), From c0584eded70c2d63ab4be82484089e263bf15988 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 11 Feb 2020 18:49:40 +0100 Subject: [PATCH 322/393] integrate new will remove old representations if republishing version and set new repres IDs to those previous --- pype/plugins/global/publish/integrate_new.py | 24 +++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 7d95534897..4499445e6e 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -207,6 +207,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): 'parent': subset["_id"], 'name': next_version }) + existing_repres = None if existing_version is None: version_id = io.insert_one(version).inserted_id else: @@ -217,6 +218,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): }, {'$set': version} ) version_id = existing_version['_id'] + existing_repres = {repre["name"]: repre for repre in io.find({ + "type": "representation", + "parent": version_id + })} + instance.data['version'] = version['name'] # Write to disk @@ -249,6 +255,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if 'transfers' not in instance.data: instance.data['transfers'] = [] + new_repre_names = [] for idx, repre in enumerate(instance.data["representations"]): # Collection @@ -419,8 +426,16 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): continue repre_context[key] = template_data[key] + repre_name = repre['name'] + new_repre_names.append(repre_name) + # Use previous + if existing_repres and repre_name in existing_repres: + repre_id = existing_repres[repre_name]["_id"] + else: + repre_id = io.ObjectId() + representation = { - "_id": io.ObjectId(), + "_id": repre_id, "schema": "pype:representation-2.0", "type": "representation", "parent": version_id, @@ -446,6 +461,13 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): representations.append(representation) self.log.debug("__ representations: {}".format(representations)) + # Remove old representations if there are any (before insertion of new) + if existing_repres: + repre_ids_to_remove = [] + for repre in existing_repres.values(): + repre_ids_to_remove.append(repre["_id"]) + io.delete_many({"_id": {"$in": repre_ids_to_remove}}) + self.log.debug("__ representations: {}".format(representations)) for rep in instance.data["representations"]: self.log.debug("__ represNAME: {}".format(rep['name'])) From 85ba7f17f494a4324c0be113fff563a9edf9d597 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 12 Feb 2020 11:17:08 +0100 Subject: [PATCH 323/393] representations are not deleted but their type changes to archived_representations and their id is changed --- pype/plugins/global/publish/integrate_new.py | 39 +++++++++++++++++--- 1 file changed, 34 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 4499445e6e..c8e6a0188e 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -4,6 +4,8 @@ import logging import sys import clique import errno + +from pymongo import DeleteOne, InsertOne import pyblish.api from avalon import api, io from avalon.vendor import filelink @@ -207,21 +209,48 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): 'parent': subset["_id"], 'name': next_version }) - existing_repres = None + if existing_version is None: version_id = io.insert_one(version).inserted_id else: + # Update version data io.update_many({ 'type': 'version', 'parent': subset["_id"], 'name': next_version - }, {'$set': version} - ) + }, { + '$set': version + }) version_id = existing_version['_id'] - existing_repres = {repre["name"]: repre for repre in io.find({ + + # Find representations of existing version and archive them + current_repres = list(io.find({ "type": "representation", "parent": version_id - })} + })) + bulk_writes = [] + for repre in current_repres: + # Representation must change type, + # `_id` must be stored to other key and replaced with new + # - that is because new representations should have same ID + repre_id = repre["_id"] + bulk_writes.append(DeleteOne({"_id": repre_id})) + + repre["orig_id"] = repre_id + repre["_id"] = io.ObjectId() + repre["type"] = "archived_representation" + bulk_writes.append(InsertOne(repre)) + + # bulk updates + if bulk_writes: + io._database[io.Session["AVALON_PROJECT"]].bulk_write( + bulk_writes + ) + + existing_repres = list(io.find({ + "parent": version_id, + "type": "archived_representation" + })) instance.data['version'] = version['name'] From 7f49ed9fb3e353ce0be37c41d70a3da45d368ebb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 12 Feb 2020 11:17:27 +0100 Subject: [PATCH 324/393] check of existing representations was updated --- pype/plugins/global/publish/integrate_new.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index c8e6a0188e..b5b6b10aa2 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -411,7 +411,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if not dst_start_frame: dst_start_frame = dst_padding - dst = "{0}{1}{2}".format( dst_head, dst_start_frame, @@ -457,10 +456,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): repre_name = repre['name'] new_repre_names.append(repre_name) - # Use previous - if existing_repres and repre_name in existing_repres: - repre_id = existing_repres[repre_name]["_id"] - else: + + # Use previous representation's id if there are any + repre_id = None + for _repre in existing_repres: + # NOTE should we check lowered names? + if repre_name == _repre["name"]: + repre_id = _repre["orig_id"] + break + + # Create new id if existing representations does not match + if repre_id is None: repre_id = io.ObjectId() representation = { From 0b1451db3f770a245c9606770e9465ab184909ed Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 12 Feb 2020 13:49:34 +0100 Subject: [PATCH 325/393] fixed variable naming --- pype/ftrack/events/event_sync_to_avalon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index eef24a186d..49ac50c1db 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1643,7 +1643,7 @@ class SyncToAvalonEvent(BaseEvent): new_name, "task", schema_patterns=self.regex_schemas ) if not passed_regex: - self.regex_failed.append(ent_infos["entityId"]) + self.regex_failed.append(ent_info["entityId"]) continue if new_name not in self.task_changes_by_avalon_id[mongo_id]: From e6ba0dea0884e5477aca517a30b259a992ef44ee Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 15:35:05 +0100 Subject: [PATCH 326/393] fix(nk): didnt create backdrop string problem --- pype/plugins/nuke/create/create_backdrop.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pype/plugins/nuke/create/create_backdrop.py b/pype/plugins/nuke/create/create_backdrop.py index 2016c66095..8609117a0d 100644 --- a/pype/plugins/nuke/create/create_backdrop.py +++ b/pype/plugins/nuke/create/create_backdrop.py @@ -2,6 +2,7 @@ from avalon.nuke.pipeline import Creator from avalon.nuke import lib as anlib import nuke + class CreateBackdrop(Creator): """Add Publishable Backdrop""" @@ -35,8 +36,8 @@ class CreateBackdrop(Creator): return instance else: - msg = "Please select nodes you " - "wish to add to a container" + msg = str("Please select nodes you " + "wish to add to a container") self.log.error(msg) nuke.message(msg) return From 91aaa4058335d0bbb7d21f6a202f8c36f287dc79 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 15:35:42 +0100 Subject: [PATCH 327/393] fix(nk): not related code in script --- pype/plugins/nuke/load/load_backdrop.py | 71 ------------------------- 1 file changed, 71 deletions(-) diff --git a/pype/plugins/nuke/load/load_backdrop.py b/pype/plugins/nuke/load/load_backdrop.py index 07a6724771..04cff311d1 100644 --- a/pype/plugins/nuke/load/load_backdrop.py +++ b/pype/plugins/nuke/load/load_backdrop.py @@ -240,77 +240,6 @@ class LoadBackdropNodes(api.Loader): return update_container(GN, data_imprint) - def connect_active_viewer(self, group_node): - """ - Finds Active viewer and - place the node under it, also adds - name of group into Input Process of the viewer - - Arguments: - group_node (nuke node): nuke group node object - - """ - group_node_name = group_node["name"].value() - - viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()] - if len(viewer) > 0: - viewer = viewer[0] - else: - if not (len(nodes) < 2): - msg = "Please create Viewer node before you " - "run this action again" - self.log.error(msg) - nuke.message(msg) - return None - - # get coordinates of Viewer1 - xpos = viewer["xpos"].value() - ypos = viewer["ypos"].value() - - ypos += 150 - - viewer["ypos"].setValue(ypos) - - # set coordinates to group node - group_node["xpos"].setValue(xpos) - group_node["ypos"].setValue(ypos + 50) - - # add group node name to Viewer Input Process - viewer["input_process_node"].setValue(group_node_name) - - # put backdrop under - pnlib.create_backdrop(label="Input Process", layer=2, - nodes=[viewer, group_node], color="0x7c7faaff") - - return True - - def get_item(self, data, trackIndex, subTrackIndex): - return {key: val for key, val in data.items() - if subTrackIndex == val["subTrackIndex"] - if trackIndex == val["trackIndex"]} - - def byteify(self, input): - """ - Converts unicode strings to strings - It goes trought all dictionary - - Arguments: - input (dict/str): input - - Returns: - dict: with fixed values and keys - - """ - - if isinstance(input, dict): - return {self.byteify(key): self.byteify(value) - for key, value in input.iteritems()} - elif isinstance(input, list): - return [self.byteify(element) for element in input] - elif isinstance(input, unicode): - return input.encode('utf-8') - else: - return input def switch(self, container, representation): self.update(container, representation) From 4ace0b2d7ccccb71c0bd2a500f944e4849435028 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 15:36:14 +0100 Subject: [PATCH 328/393] fix(nk): version check --- pype/plugins/nuke/publish/collect_backdrop.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pype/plugins/nuke/publish/collect_backdrop.py b/pype/plugins/nuke/publish/collect_backdrop.py index d98a20aee0..10729b217b 100644 --- a/pype/plugins/nuke/publish/collect_backdrop.py +++ b/pype/plugins/nuke/publish/collect_backdrop.py @@ -58,7 +58,11 @@ class CollectBackdrops(pyblish.api.InstancePlugin): last_frame = int(nuke.root()["last_frame"].getValue()) # get version - version = pype.get_version_from_path(nuke.root().name()) + version = instance.context.data.get('version') + + if not version: + raise RuntimeError("Script name has no version in the name.") + instance.data['version'] = version # Add version data to instance From 26f53789f3f7cbdfdac3f1f09ddf0a2d6f7566dc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 15:42:51 +0100 Subject: [PATCH 329/393] fix(nks): filter out audio trackitems on effect collect --- pype/plugins/nukestudio/publish/collect_clips.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pype/plugins/nukestudio/publish/collect_clips.py b/pype/plugins/nukestudio/publish/collect_clips.py index 4525b4947f..48e0cb66db 100644 --- a/pype/plugins/nukestudio/publish/collect_clips.py +++ b/pype/plugins/nukestudio/publish/collect_clips.py @@ -1,7 +1,7 @@ import os from pyblish import api - +import hiero import nuke class CollectClips(api.ContextPlugin): @@ -48,7 +48,9 @@ class CollectClips(api.ContextPlugin): track = item.parent() source = item.source().mediaSource() source_path = source.firstpath() - effects = [f for f in item.linkedItems() if f.isEnabled()] + effects = [f for f in item.linkedItems() + if f.isEnabled() + if isinstance(f, hiero.core.EffectTrackItem)] # If source is *.nk its a comp effect and we need to fetch the # write node output. This should be improved by parsing the script From 0cd57430f946badfdb1e06cc9580be7d81f0f6b6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 15:43:31 +0100 Subject: [PATCH 330/393] fix(nks): removing optionals --- pype/plugins/nukestudio/publish/extract_audio.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pype/plugins/nukestudio/publish/extract_audio.py b/pype/plugins/nukestudio/publish/extract_audio.py index 315ba6784d..2c4afc8412 100644 --- a/pype/plugins/nukestudio/publish/extract_audio.py +++ b/pype/plugins/nukestudio/publish/extract_audio.py @@ -10,8 +10,6 @@ class ExtractAudioFile(pype.api.Extractor): hosts = ["nukestudio"] families = ["clip", "audio"] match = api.Intersection - optional = True - active = False def process(self, instance): import os From f46ca740f53b4472f46c1f955389d5d2d3aaff32 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 15:43:59 +0100 Subject: [PATCH 331/393] feat(nks): adding debug log --- pype/plugins/nukestudio/publish/collect_plates.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pype/plugins/nukestudio/publish/collect_plates.py b/pype/plugins/nukestudio/publish/collect_plates.py index b98eccce7f..75eb5bb043 100644 --- a/pype/plugins/nukestudio/publish/collect_plates.py +++ b/pype/plugins/nukestudio/publish/collect_plates.py @@ -146,6 +146,7 @@ class CollectPlatesData(api.InstancePlugin): head, padding = os.path.splitext(basename) ext = ext[1:] padding = padding[1:] + self.log.debug("_ padding: `{}`".format(padding)) # head, padding, ext = source_file.split('.') source_first_frame = int(padding) padding = len(padding) From a3af0be8cf319448efbaac2df98ec2ce26f86cd5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 12 Feb 2020 16:39:45 +0100 Subject: [PATCH 332/393] ftrack lib has lib file with get_project_from_entity and get_avalon_entities_for_assetversion --- pype/ftrack/lib/__init__.py | 5 ++ pype/ftrack/lib/lib.py | 135 ++++++++++++++++++++++++++++++++++++ 2 files changed, 140 insertions(+) create mode 100644 pype/ftrack/lib/lib.py diff --git a/pype/ftrack/lib/__init__.py b/pype/ftrack/lib/__init__.py index 9af9ded943..eabfdf0d7d 100644 --- a/pype/ftrack/lib/__init__.py +++ b/pype/ftrack/lib/__init__.py @@ -4,3 +4,8 @@ from .ftrack_app_handler import * from .ftrack_event_handler import * from .ftrack_action_handler import * from .ftrack_base_handler import * + +from .lib import ( + get_project_from_entity, + get_avalon_entities_for_assetversion +) diff --git a/pype/ftrack/lib/lib.py b/pype/ftrack/lib/lib.py new file mode 100644 index 0000000000..aee297fc7e --- /dev/null +++ b/pype/ftrack/lib/lib.py @@ -0,0 +1,135 @@ +from bson.objectid import ObjectId + +from .avalon_sync import CustAttrIdKey +import avalon.io + + +def get_project_from_entity(entity): + # TODO add more entities + ent_type_lowered = entity.entity_type.lower() + if ent_type_lowered == "project": + return entity + + elif ent_type_lowered == "assetversion": + return entity["asset"]["parent"]["project"] + + elif "project" in entity: + return entity["project"] + + return None + + +def get_avalon_entities_for_assetversion(asset_version, db_con=None): + output = { + "success": True, + "message": None, + "project": None, + "project_name": None, + "asset": None, + "asset_name": None, + "asset_path": None, + "subset": None, + "subset_name": None, + "version": None, + "version_name": None, + "representations": None + } + + if db_con is None: + db_con = avalon.io + db_con.install() + + ft_asset = asset_version["asset"] + subset_name = ft_asset["name"] + version = asset_version["version"] + parent = ft_asset["parent"] + ent_path = "/".join( + [ent["name"] for ent in parent["link"]] + ) + project = get_project_from_entity(asset_version) + project_name = project["full_name"] + + output["project_name"] = project_name + output["asset_name"] = parent["name"] + output["asset_path"] = ent_path + output["subset_name"] = subset_name + output["version_name"] = version + + db_con.Session["AVALON_PROJECT"] = project_name + + avalon_project = db_con.find_one({"type": "project"}) + output["project"] = avalon_project + + if not avalon_project: + output["success"] = False + output["message"] = "Project not synchronized to avalon `{}`".format( + project_name + ) + return output + + asset_ent = None + asset_mongo_id = parent["custom_attributes"].get(CustAttrIdKey) + if asset_mongo_id: + try: + asset_mongo_id = ObjectId(asset_mongo_id) + asset_ent = db_con.find_one({ + "type": "asset", + "_id": asset_mongo_id + }) + except Exception: + pass + + if not asset_ent: + asset_ent = db_con.find_one({ + "type": "asset", + "data.ftrackId": parent["id"] + }) + + output["asset"] = asset_ent + + if not asset_ent: + output["success"] = False + output["message"] = "Not synchronized entity to avalon `{}`".format( + ent_path + ) + return output + + asset_mongo_id = asset_ent["_id"] + + subset_ent = db_con.find_one({ + "type": "subset", + "parent": asset_mongo_id, + "name": subset_name + }) + + output["subset"] = subset_ent + + if not subset_ent: + output["success"] = False + output["message"] = ( + "Subset `{}` does not exist under Asset `{}`" + ).format(subset_name, ent_path) + return output + + version_ent = db_con.find_one({ + "type": "version", + "name": version, + "parent": subset_ent["_id"] + }) + + output["version"] = version_ent + + if not version_ent: + output["success"] = False + output["message"] = ( + "Version `{}` does not exist under Subset `{}` | Asset `{}`" + ).format(version, subset_name, ent_path) + return output + + repre_ents = list(db_con.find({ + "type": "representation", + "parent": version_ent["_id"] + })) + + output["representations"] = repre_ents + return output From b69fd842b118107c87bf6f08f3b7eb17510dafa1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 12 Feb 2020 16:40:07 +0100 Subject: [PATCH 333/393] added action for storing thumbnails to avalon entities --- .../action_store_thumbnails_to_avalon.py | 308 ++++++++++++++++++ 1 file changed, 308 insertions(+) create mode 100644 pype/ftrack/actions/action_store_thumbnails_to_avalon.py diff --git a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py new file mode 100644 index 0000000000..ff97534656 --- /dev/null +++ b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py @@ -0,0 +1,308 @@ +import os +import requests +import errno + +from bson.objectid import ObjectId +from pype.ftrack import BaseAction +from pype.ftrack.lib import ( + get_project_from_entity, + get_avalon_entities_for_assetversion +) +from pypeapp import Anatomy +from pype.ftrack.lib.io_nonsingleton import DbConnector + + +class StoreThumbnailsToAvalon(BaseAction): + # Action identifier + identifier = "store.thubmnail.to.avalon" + # Action label + label = "Pype Admin" + # Action variant + variant = "- Store Thumbnails to avalon" + # Action description + description = 'Test action' + # roles that are allowed to register this action + role_list = ["Pypeclub", "Administrator", "Project Manager"] + + icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format( + os.environ.get('PYPE_STATICS_SERVER', '') + ) + + thumbnail_key = "AVALON_THUMBNAIL_ROOT" + db_con = DbConnector() + + def discover(self, session, entities, event): + for entity in entities: + if entity.entity_type.lower() == "assetversion": + return True + return False + + def launch(self, session, entities, event): + # DEBUG LINE + # root_path = r"C:\Users\jakub.trllo\Desktop\Tests\ftrack_thumbnails" + + thumbnail_roots = os.environ.get(self.thumbnail_key) + if not thumbnail_roots: + return { + "success": False, + "message": "`{}` environment is not set".format( + self.thumbnail_key + ) + } + + existing_thumbnail_root = None + for path in thumbnail_roots.split(os.pathsep): + if os.path.exists(path): + existing_thumbnail_root = path + break + + if existing_thumbnail_root is None: + return { + "success": False, + "message": ( + "Can't access paths, set in `{}` ({})" + ).format(self.thumbnail_key, thumbnail_roots) + } + + project = get_project_from_entity(entities[0]) + project_name = project["full_name"] + anatomy = Anatomy(project_name) + + if "publish" not in anatomy.templates: + msg = "Anatomy does not have set publish key!" + + self.log.warning(msg) + + return { + "success": False, + "message": msg + } + + if "thumbnail" not in anatomy.templates["publish"]: + msg = ( + "There is not set \"thumbnail\"" + " template in Antomy for project \"{}\"" + ).format(project_name) + + self.log.warning(msg) + + return { + "success": False, + "message": msg + } + + example_template_data = { + "_id": "ID", + "thumbnail_root": "THUBMNAIL_ROOT", + "thumbnail_type": "THUMBNAIL_TYPE", + "ext": ".EXT", + "project": { + "name": "PROJECT_NAME", + "code": "PROJECT_CODE" + }, + "asset": "ASSET_NAME", + "subset": "SUBSET_NAME", + "version": "VERSION_NAME", + "hierarchy": "HIERARCHY" + } + tmp_filled = anatomy.format_all(example_template_data) + thumbnail_result = tmp_filled["publish"]["thumbnail"] + if not thumbnail_result.solved: + missing_keys = thumbnail_result.missing_keys + invalid_types = thumbnail_result.invalid_types + submsg = "" + if missing_keys: + submsg += "Missing keys: {}".format(", ".join( + ["\"{}\"".format(key) for key in missing_keys] + )) + + if invalid_types: + items = [] + for key, value in invalid_types.items(): + items.append("{}{}".format(str(key), str(value))) + submsg += "Invalid types: {}".format(", ".join(items)) + + msg = ( + "Thumbnail Anatomy template expects more keys than action" + " can offer. {}" + ).format(submsg) + + self.log.warning(msg) + + return { + "success": False, + "message": msg + } + + thumbnail_template = anatomy.templates["publish"]["thumbnail"] + + self.db_con.install() + + for entity in entities: + # Skip if entity is not AssetVersion (never should happend, but..) + if entity.entity_type.lower() != "assetversion": + continue + + # Skip if AssetVersion don't have thumbnail + thumbnail_ent = entity["thumbnail"] + if thumbnail_ent is None: + self.log.debug(( + "Skipping. AssetVersion don't " + "have set thumbnail. {}" + ).format(entity["id"])) + continue + + avalon_ents_result = get_avalon_entities_for_assetversion( + entity, self.db_con + ) + version_full_path = ( + "Asset: \"{project_name}/{asset_path}\"" + " | Subset: \"{subset_name}\"" + " | Version: \"{version_name}\"" + ).format(**avalon_ents_result) + + version = avalon_ents_result["version"] + if not version: + self.log.warning(( + "AssetVersion does not have version in avalon. {}" + ).format(version_full_path)) + continue + + thumbnail_id = version["data"].get("thumbnail_id") + if thumbnail_id: + self.log.info(( + "AssetVersion skipped, already has thubmanil set. {}" + ).format(version_full_path)) + continue + + # Get thumbnail extension + file_ext = thumbnail_ent["file_type"] + if not file_ext.startswith("."): + file_ext = ".{}".format(file_ext) + + avalon_project = avalon_ents_result["project"] + avalon_asset = avalon_ents_result["asset"] + hierarchy = "" + parents = avalon_asset["data"].get("parents") or [] + if parents: + hierarchy = "/".join(parents) + + # Prepare anatomy template fill data + # 1. Create new id for thumbnail entity + thumbnail_id = ObjectId() + + template_data = { + "_id": str(thumbnail_id), + "thumbnail_root": existing_thumbnail_root, + "thumbnail_type": "thumbnail", + "ext": file_ext, + "project": { + "name": avalon_project["name"], + "code": avalon_project["data"].get("code") + }, + "asset": avalon_ents_result["asset_name"], + "subset": avalon_ents_result["subset_name"], + "version": avalon_ents_result["version_name"], + "hierarchy": hierarchy + } + + anatomy_filled = anatomy.format(template_data) + thumbnail_path = anatomy_filled["publish"]["thumbnail"] + thumbnail_path = thumbnail_path.replace("..", ".") + thumbnail_path = os.path.normpath(thumbnail_path) + + downloaded = False + for loc in (thumbnail_ent.get("component_locations") or []): + res_id = loc.get("resource_identifier") + if not res_id: + continue + + thubmnail_url = self.get_thumbnail_url(res_id) + if self.download_file(thubmnail_url, thumbnail_path): + downloaded = True + break + + if not downloaded: + self.log.warning( + "Could not download thumbnail for {}".format( + version_full_path + ) + ) + continue + + # Clean template data from keys that are dynamic + template_data.pop("_id") + template_data.pop("thumbnail_root") + + thumbnail_entity = { + "_id": thumbnail_id, + "type": "thumbnail", + "schema": "pype:thumbnail-1.0", + "data": { + "template": thumbnail_template, + "template_data": template_data + } + } + + # Create thumbnail entity + self.db_con.insert_one(thumbnail_entity) + self.log.debug( + "Creating entity in database {}".format(str(thumbnail_entity)) + ) + + # Set thumbnail id for version + self.db_con.update_one( + {"_id": version["_id"]}, + {"$set": {"data.thumbnail_id": thumbnail_id}} + ) + + return True + + def get_thumbnail_url(self, resource_identifier, size=None): + # TODO use ftrack_api method rather (find way how to use it) + url_string = ( + u'{url}/component/thumbnail?id={id}&username={username}' + u'&apiKey={apiKey}' + ) + url = url_string.format( + url=self.session.server_url, + id=resource_identifier, + username=self.session.api_user, + apiKey=self.session.api_key + ) + if size: + url += u'&size={0}'.format(size) + + return url + + def download_file(self, source_url, dst_file_path): + dir_path = os.path.dirname(dst_file_path) + try: + os.makedirs(dir_path) + except OSError as exc: + if exc.errno != errno.EEXIST: + self.log.warning( + "Could not create folder: \"{}\"".format(dir_path) + ) + return False + + self.log.debug( + "Downloading file \"{}\" -> \"{}\"".format( + source_url, dst_file_path + ) + ) + file_open = open(dst_file_path, "wb") + try: + file_open.write(requests.get(source_url).content) + except Exception: + self.log.warning( + "Download of image `{}` failed.".format(source_url) + ) + return False + finally: + file_open.close() + return True + + +def register(session, plugins_presets={}): + StoreThumbnailsToAvalon(session, plugins_presets).register() From 256cc85d86f819ebb05c2d7e949ae11ff2d44944 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 16:41:31 +0100 Subject: [PATCH 334/393] fix(nk): removing deprecating code --- pype/lib.py | 67 ++--------------------------------------------------- 1 file changed, 2 insertions(+), 65 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index f26395d930..2235efa2f4 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -361,23 +361,7 @@ def _get_host_name(): def get_asset(asset_name=None): - entity_data_keys_from_project_when_miss = [ - "frameStart", "frameEnd", "handleStart", "handleEnd", "fps", - "resolutionWidth", "resolutionHeight" - ] - - entity_keys_from_project_when_miss = [] - - alternatives = { - "handleStart": "handles", - "handleEnd": "handles" - } - - defaults = { - "handleStart": 0, - "handleEnd": 0 - } - + """ Returning asset document from database """ if not asset_name: asset_name = avalon.api.Session["AVALON_ASSET"] @@ -385,57 +369,10 @@ def get_asset(asset_name=None): "name": asset_name, "type": "asset" }) + if not asset_document: raise TypeError("Entity \"{}\" was not found in DB".format(asset_name)) - project_document = io.find_one({"type": "project"}) - - for key in entity_data_keys_from_project_when_miss: - if asset_document["data"].get(key): - continue - - value = project_document["data"].get(key) - if value is not None or key not in alternatives: - asset_document["data"][key] = value - continue - - alt_key = alternatives[key] - value = asset_document["data"].get(alt_key) - if value is not None: - asset_document["data"][key] = value - continue - - value = project_document["data"].get(alt_key) - if value: - asset_document["data"][key] = value - continue - - if key in defaults: - asset_document["data"][key] = defaults[key] - - for key in entity_keys_from_project_when_miss: - if asset_document.get(key): - continue - - value = project_document.get(key) - if value is not None or key not in alternatives: - asset_document[key] = value - continue - - alt_key = alternatives[key] - value = asset_document.get(alt_key) - if value: - asset_document[key] = value - continue - - value = project_document.get(alt_key) - if value: - asset_document[key] = value - continue - - if key in defaults: - asset_document[key] = defaults[key] - return asset_document From 9e13ac98ec34ac376a8f2e81f10722255b4597ab Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 16:42:13 +0100 Subject: [PATCH 335/393] fix(nuke): cleanup and adding debug log --- pype/nuke/lib.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index c64dc0b828..6eb4da951c 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -519,11 +519,6 @@ class WorkfileSettings(object): self.data = kwargs def get_nodes(self, nodes=None, nodes_filter=None): - # filter out only dictionaries for node creation - # - # print("\n\n") - # pprint(self._nodes) - # if not isinstance(nodes, list) and not isinstance(nodes_filter, list): return [n for n in nuke.allNodes()] @@ -791,6 +786,8 @@ class WorkfileSettings(object): return data = self._asset_entity["data"] + log.debug("__ asset data: `{}`".format(data)) + missing_cols = [] check_cols = ["fps", "frameStart", "frameEnd", "handleStart", "handleEnd"] From e19f04ec8590f9aca24700b7a455a681e680ff8e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 17:14:34 +0100 Subject: [PATCH 336/393] fix(nk): multi line string needed to be added to str() --- pype/plugins/nuke/load/load_gizmo_ip.py | 4 ++-- pype/plugins/nuke/load/load_luts_ip.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pype/plugins/nuke/load/load_gizmo_ip.py b/pype/plugins/nuke/load/load_gizmo_ip.py index 23d7ef2f4a..5fecbc4c5c 100644 --- a/pype/plugins/nuke/load/load_gizmo_ip.py +++ b/pype/plugins/nuke/load/load_gizmo_ip.py @@ -176,8 +176,8 @@ class LoadGizmoInputProcess(api.Loader): if len(viewer) > 0: viewer = viewer[0] else: - msg = "Please create Viewer node before you " - "run this action again" + msg = str("Please create Viewer node before you " + "run this action again") self.log.error(msg) nuke.message(msg) return None diff --git a/pype/plugins/nuke/load/load_luts_ip.py b/pype/plugins/nuke/load/load_luts_ip.py index 2b38a9ff08..41cc6c1a43 100644 --- a/pype/plugins/nuke/load/load_luts_ip.py +++ b/pype/plugins/nuke/load/load_luts_ip.py @@ -276,8 +276,8 @@ class LoadLutsInputProcess(api.Loader): if len(viewer) > 0: viewer = viewer[0] else: - msg = "Please create Viewer node before you " - "run this action again" + msg = str("Please create Viewer node before you " + "run this action again") self.log.error(msg) nuke.message(msg) return None From bf35ee99f6e4c623d131aa7cc729e2f67f76e109 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 12 Feb 2020 17:45:54 +0100 Subject: [PATCH 337/393] add thumbnail to asset as well --- pype/ftrack/actions/action_store_thumbnails_to_avalon.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py index ff97534656..d63d3a6ae3 100644 --- a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py +++ b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py @@ -256,6 +256,11 @@ class StoreThumbnailsToAvalon(BaseAction): {"$set": {"data.thumbnail_id": thumbnail_id}} ) + self.db_con.update_one( + {"_id": avalon_asset["_id"]}, + {"$set": {"data.thumbnail_id": thumbnail_id}} + ) + return True def get_thumbnail_url(self, resource_identifier, size=None): From 5d8e2dc37fc618304268f49291a38b69740dec82 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Feb 2020 18:00:59 +0100 Subject: [PATCH 338/393] fix(nk): swap `write` family for `render2d` --- pype/plugins/global/load/open_file.py | 2 +- .../global/publish/collect_filesequences.py | 12 +-- pype/plugins/global/publish/extract_jpeg.py | 93 ++++++++++--------- 3 files changed, 53 insertions(+), 54 deletions(-) diff --git a/pype/plugins/global/load/open_file.py b/pype/plugins/global/load/open_file.py index 9425eaab04..b496311e0c 100644 --- a/pype/plugins/global/load/open_file.py +++ b/pype/plugins/global/load/open_file.py @@ -18,7 +18,7 @@ def open(filepath): class Openfile(api.Loader): """Open Image Sequence with system default""" - families = ["write"] + families = ["render2d"] representations = ["*"] label = "Open" diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 6c06229304..8b42606e4a 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -211,12 +211,10 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): # Get family from the data families = data.get("families", ["render"]) - if "render" not in families: - families.append("render") if "ftrack" not in families: families.append("ftrack") - if "write" in instance_family: - families.append("write") + if families_data and "render2d" in families_data: + families.append("render2d") if families_data and "slate" in families_data: families.append("slate") @@ -334,7 +332,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "stagingDir": root, "anatomy_template": "render", "fps": fps, - "tags": ["review"] if not baked_mov_path else [], + "tags": ["review"] if not baked_mov_path else ["thumb-nuke"], } instance.data["representations"].append( representation) @@ -388,8 +386,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): # If no start or end frame provided, get it from collection indices = list(collection.indexes) - start = data.get("frameStart", indices[0]) - end = data.get("frameEnd", indices[-1]) + start = int(data.get("frameStart", indices[0])) + end = int(data.get("frameEnd", indices[-1])) ext = list(collection)[0].split(".")[-1] diff --git a/pype/plugins/global/publish/extract_jpeg.py b/pype/plugins/global/publish/extract_jpeg.py index 4978649ba2..7c0820ea28 100644 --- a/pype/plugins/global/publish/extract_jpeg.py +++ b/pype/plugins/global/publish/extract_jpeg.py @@ -19,7 +19,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): label = "Extract Jpeg EXR" hosts = ["shell"] order = pyblish.api.ExtractorOrder - families = ["imagesequence", "render", "write", "source"] + families = ["imagesequence", "render", "render2d", "source"] enabled = False def process(self, instance): @@ -41,62 +41,63 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): for repre in representations: self.log.debug(repre) - if 'review' not in repre['tags']: - return + if 'review' in repre['tags'] or "thumb-nuke" in repre['tags']: + if not isinstance(repre['files'], list): + return - input_file = repre['files'][0] + input_file = repre['files'][0] - # input_file = ( - # collections[0].format('{head}{padding}{tail}') % start - # ) - full_input_path = os.path.join(stagingdir, input_file) - self.log.info("input {}".format(full_input_path)) + # input_file = ( + # collections[0].format('{head}{padding}{tail}') % start + # ) + full_input_path = os.path.join(stagingdir, input_file) + self.log.info("input {}".format(full_input_path)) - filename = os.path.splitext(input_file)[0] - if not filename.endswith('.'): - filename += "." - jpeg_file = filename + "jpg" - full_output_path = os.path.join(stagingdir, jpeg_file) + filename = os.path.splitext(input_file)[0] + if not filename.endswith('.'): + filename += "." + jpeg_file = filename + "jpg" + full_output_path = os.path.join(stagingdir, jpeg_file) - self.log.info("output {}".format(full_output_path)) + self.log.info("output {}".format(full_output_path)) - config_data = instance.context.data['output_repre_config'] + config_data = instance.context.data['output_repre_config'] - proj_name = os.environ.get('AVALON_PROJECT', '__default__') - profile = config_data.get(proj_name, config_data['__default__']) + proj_name = os.environ.get('AVALON_PROJECT', '__default__') + profile = config_data.get(proj_name, config_data['__default__']) - jpeg_items = [] - jpeg_items.append( - os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg")) - # override file if already exists - jpeg_items.append("-y") - # use same input args like with mov - jpeg_items.extend(profile.get('input', [])) - # input file - jpeg_items.append("-i {}".format(full_input_path)) - # output file - jpeg_items.append(full_output_path) + jpeg_items = [] + jpeg_items.append( + os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg")) + # override file if already exists + jpeg_items.append("-y") + # use same input args like with mov + jpeg_items.extend(profile.get('input', [])) + # input file + jpeg_items.append("-i {}".format(full_input_path)) + # output file + jpeg_items.append(full_output_path) - subprocess_jpeg = " ".join(jpeg_items) + subprocess_jpeg = " ".join(jpeg_items) - # run subprocess - self.log.debug("{}".format(subprocess_jpeg)) - pype.api.subprocess(subprocess_jpeg) + # run subprocess + self.log.debug("{}".format(subprocess_jpeg)) + pype.api.subprocess(subprocess_jpeg) - if "representations" not in instance.data: - instance.data["representations"] = [] + if "representations" not in instance.data: + instance.data["representations"] = [] - representation = { - 'name': 'thumbnail', - 'ext': 'jpg', - 'files': jpeg_file, - "stagingDir": stagingdir, - "thumbnail": True, - "tags": ['thumbnail'] - } + representation = { + 'name': 'thumbnail', + 'ext': 'jpg', + 'files': jpeg_file, + "stagingDir": stagingdir, + "thumbnail": True, + "tags": ['thumbnail'] + } - # adding representation - self.log.debug("Adding: {}".format(representation)) - representations_new.append(representation) + # adding representation + self.log.debug("Adding: {}".format(representation)) + representations_new.append(representation) instance.data["representations"] = representations_new From 00e77d690d6e8c42999f0ec154c71f84b9dbe52b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 11:36:55 +0100 Subject: [PATCH 339/393] added notelabellink to ignored entity types --- pype/ftrack/events/event_sync_to_avalon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 49ac50c1db..708ae707e9 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -28,7 +28,7 @@ class SyncToAvalonEvent(BaseEvent): ignore_entTypes = [ "socialfeed", "socialnotification", "note", "assetversion", "job", "user", "reviewsessionobject", "timer", - "timelog", "auth_userrole", "appointment" + "timelog", "auth_userrole", "appointment", "notelabellink" ] ignore_ent_types = ["Milestone"] ignore_keys = ["statusid", "thumbid"] From 2ff72b5aeea0ce4c83e27b84f7da017733f7b489 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 11:37:13 +0100 Subject: [PATCH 340/393] small cleanup in code --- pype/ftrack/events/event_sync_to_avalon.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 708ae707e9..643a3d793e 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -573,8 +573,7 @@ class SyncToAvalonEvent(BaseEvent): if auto_sync is not True: return True - debug_msg = "" - debug_msg += "Updated: {}".format(len(updated)) + debug_msg = "Updated: {}".format(len(updated)) debug_action_map = { "add": "Created", "remove": "Removed", From 5b1f33350b2d2b6d6d02d19919435aa73ef35c9a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 11:37:43 +0100 Subject: [PATCH 341/393] added another bug report message when configuration id is not for specific entity --- pype/ftrack/events/event_sync_to_avalon.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 643a3d793e..c646756788 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1544,6 +1544,14 @@ class SyncToAvalonEvent(BaseEvent): entity_type_conf_ids[entity_type] = configuration_id break + if not configuration_id: + self.log.warning( + "BUG REPORT: Missing configuration for `{} < {} >`".format( + entity_type, ent_info["entityType"] + ) + ) + continue + _entity_key = collections.OrderedDict({ "configuration_id": configuration_id, "entity_id": ftrack_id From bbe3ce3781cd4ff4fc5181e9237cc984c29c6836 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 11:38:44 +0100 Subject: [PATCH 342/393] added debug logs for specific reason at this moment, to avoid ignoring entity types but to find out which we are using --- pype/ftrack/events/event_sync_to_avalon.py | 33 ++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index c646756788..345bc5b925 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -3,6 +3,7 @@ import collections import copy import queue import time +import datetime import atexit import traceback @@ -51,9 +52,36 @@ class SyncToAvalonEvent(BaseEvent): def __init__(self, session, plugins_presets={}): '''Expects a ftrack_api.Session instance''' + # Debug settings + # - time expiration in seconds + self.debug_print_time_expiration = 5 * 60 + # - store current time + self.debug_print_time = datetime.datetime.now() + # - store synchronize entity types to be able to use + # only entityTypes in interest instead of filtering by ignored + self.debug_sync_types = collections.defaultdict(list) + + # Set processing session to not use global self.set_process_session(session) super().__init__(session, plugins_presets) + def debug_logs(self): + """This is debug method for printing small debugs messages. """ + now_datetime = datetime.datetime.now() + delta = now_datetime - self.debug_print_time + if delta.total_seconds() < self.debug_print_time_expiration: + return + + self.debug_print_time = now_datetime + known_types_items = [] + for entityType, entity_type in self.debug_sync_types.items(): + known_types_items.append("{} <{}>".format(entity_type, entityType)) + + known_entityTypes = ", ".join(known_types_items) + self.log.debug( + "DEBUG MESSAGE: Known entityTypes {}".format(known_entityTypes) + ) + @property def cur_project(self): if self._cur_project is None: @@ -484,6 +512,9 @@ class SyncToAvalonEvent(BaseEvent): if not entity_type or entity_type in self.ignore_ent_types: continue + if entity_type not in self.debug_sync_types[entityType]: + self.debug_sync_types[entityType].append(entity_type) + action = ent_info["action"] ftrack_id = ent_info["entityId"] if isinstance(ftrack_id, list): @@ -633,6 +664,8 @@ class SyncToAvalonEvent(BaseEvent): self.ftrack_added = entities_by_action["add"] self.ftrack_updated = updated + self.debug_logs() + self.log.debug("Synchronization begins") try: time_1 = time.time() From 9fec5fa0e3f997e85d16ae5b83f3771c828a2de8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 12:08:06 +0100 Subject: [PATCH 343/393] fixed messages --- pype/ftrack/events/event_sync_to_avalon.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 345bc5b925..53de588bcc 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -75,11 +75,14 @@ class SyncToAvalonEvent(BaseEvent): self.debug_print_time = now_datetime known_types_items = [] for entityType, entity_type in self.debug_sync_types.items(): - known_types_items.append("{} <{}>".format(entity_type, entityType)) + ent_types_msg = ", ".join(entity_type) + known_types_items.append( + "<{}> ({})".format(entityType, ent_types_msg) + ) known_entityTypes = ", ".join(known_types_items) self.log.debug( - "DEBUG MESSAGE: Known entityTypes {}".format(known_entityTypes) + "DEBUG MESSAGE: Known types {}".format(known_entityTypes) ) @property @@ -1603,7 +1606,7 @@ class SyncToAvalonEvent(BaseEvent): try: # Commit changes of mongo_id to empty string self.process_session.commit() - self.log.debug("Commititng unsetting") + self.log.debug("Committing unsetting") except Exception: self.process_session.rollback() # TODO logging From f12bb0f8597bcbad0862b5eac50963d225e1284b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 12:08:26 +0100 Subject: [PATCH 344/393] fixed prints when credentials to event server are not valid --- pype/ftrack/ftrack_server/event_server_cli.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index b09b0bc84e..cae037f2d9 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -63,10 +63,19 @@ def validate_credentials(url, user, api): ) session.close() except Exception as e: - print( - 'ERROR: Can\'t log into Ftrack with used credentials:' - ' Ftrack server: "{}" // Username: {} // API key: {}' - ).format(url, user, api) + print("Can't log into Ftrack with used credentials:") + ftrack_cred = { + "Ftrack server": str(url), + "Username": str(user), + "API key": str(api) + } + item_lens = [len(key) + 1 for key in ftrack_cred.keys()] + justify_len = max(*item_lens) + for key, value in ftrack_cred.items(): + print("{} {}".format( + (key + ":").ljust(justify_len, " "), + value + )) return False print('DEBUG: Credentials Username: "{}", API key: "{}" are valid.'.format( From aea05e2fe912c1a46d60625dc57d0f73ec009165 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 13 Feb 2020 12:09:12 +0100 Subject: [PATCH 345/393] fixed error message formatting --- pype/ftrack/lib/ftrack_base_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/lib/ftrack_base_handler.py b/pype/ftrack/lib/ftrack_base_handler.py index 8329505ffb..f11cb020e9 100644 --- a/pype/ftrack/lib/ftrack_base_handler.py +++ b/pype/ftrack/lib/ftrack_base_handler.py @@ -49,7 +49,7 @@ class BaseHandler(object): ).format( str(type(session)), str(ftrack_api.session.Session), - str(session_processor.ProcessSession) + str(SocketSession) )) self._session = session From 75bff66ce21e88cd43f165f2355286882b0f4bf3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 14 Feb 2020 10:40:22 +0000 Subject: [PATCH 346/393] submit_publish_job.py edited online with Bitbucket --- pype/plugins/global/publish/submit_publish_job.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index a9fa8febd4..792fc05a38 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -166,6 +166,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "PYPE_STUDIO_PROJECTS_PATH", "PYPE_STUDIO_PROJECTS_MOUNT" ] + + deadline_pool = "" def _submit_deadline_post_job(self, instance, job): """ @@ -201,7 +203,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "JobDependency0": job["_id"], "UserName": job["Props"]["User"], "Comment": instance.context.data.get("comment", ""), - "Priority": job["Props"]["Pri"] + "Priority": job["Props"]["Pri"], + "Pool": self.deadline_pool }, "PluginInfo": { "Version": "3.6", From f6b91ed589f94da3c9e3989d1ce04b2aaa405122 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:06:50 +0100 Subject: [PATCH 347/393] credentials functions are not private and are ready to store credentials by host and user --- pype/ftrack/lib/credentials.py | 163 ++++++++++++++++++++++----------- 1 file changed, 109 insertions(+), 54 deletions(-) diff --git a/pype/ftrack/lib/credentials.py b/pype/ftrack/lib/credentials.py index 7e305942f2..16b1fb25fb 100644 --- a/pype/ftrack/lib/credentials.py +++ b/pype/ftrack/lib/credentials.py @@ -2,85 +2,140 @@ import os import json import ftrack_api import appdirs +import getpass +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse -config_path = os.path.normpath(appdirs.user_data_dir('pype-app', 'pype')) -action_file_name = 'ftrack_cred.json' -event_file_name = 'ftrack_event_cred.json' -action_fpath = os.path.join(config_path, action_file_name) -event_fpath = os.path.join(config_path, event_file_name) -folders = set([os.path.dirname(action_fpath), os.path.dirname(event_fpath)]) +CONFIG_PATH = os.path.normpath(appdirs.user_data_dir("pype-app", "pype")) +CREDENTIALS_FILE_NAME = "ftrack_cred.json" +CREDENTIALS_PATH = os.path.join(CONFIG_PATH, CREDENTIALS_FILE_NAME) +CREDENTIALS_FOLDER = os.path.dirname(CREDENTIALS_PATH) -for folder in folders: - if not os.path.isdir(folder): - os.makedirs(folder) +if not os.path.isdir(CREDENTIALS_FOLDER): + os.makedirs(CREDENTIALS_FOLDER) + +USER_GETTER = None -def _get_credentials(event=False): - if event: - fpath = event_fpath - else: - fpath = action_fpath +def get_ftrack_hostname(ftrack_server=None): + if not ftrack_server: + ftrack_server = os.environ["FTRACK_SERVER"] + if "//" not in ftrack_server: + ftrack_server = "//" + ftrack_server + + return urlparse(ftrack_server).hostname + + +def get_user(): + if USER_GETTER: + return USER_GETTER() + return getpass.getuser() + + +def get_credentials(ftrack_server=None, user=None): credentials = {} - try: - file = open(fpath, 'r') - credentials = json.load(file) - except Exception: - file = open(fpath, 'w') + if not os.path.exists(CREDENTIALS_PATH): + with open(CREDENTIALS_PATH, "w") as file: + file.write(json.dumps(credentials)) + file.close() + return credentials - file.close() + with open(CREDENTIALS_PATH, "r") as file: + content = file.read() + + hostname = get_ftrack_hostname(ftrack_server) + if not user: + user = get_user() + + content_json = json.loads(content or "{}") + credentials = content_json.get(hostname, {}).get(user) or {} return credentials -def _save_credentials(username, apiKey, event=False, auto_connect=None): - data = { - 'username': username, - 'apiKey': apiKey +def save_credentials(ft_user, ft_api_key, ftrack_server=None, user=None): + hostname = get_ftrack_hostname(ftrack_server) + if not user: + user = get_user() + + with open(CREDENTIALS_PATH, "r") as file: + content = file.read() + + content_json = json.loads(content or "{}") + if hostname not in content_json: + content_json[hostname] = {} + + content_json[hostname][user] = { + "username": ft_user, + "api_key": ft_api_key } - if event: - fpath = event_fpath - if auto_connect is None: - cred = _get_credentials(True) - auto_connect = cred.get('auto_connect', False) - data['auto_connect'] = auto_connect - else: - fpath = action_fpath + # Deprecated keys + if "username" in content_json: + content_json.pop("username") + if "apiKey" in content_json: + content_json.pop("apiKey") - file = open(fpath, 'w') - file.write(json.dumps(data)) - file.close() + with open(CREDENTIALS_PATH, "w") as file: + file.write(json.dumps(content_json, indent=4)) -def _clear_credentials(event=False): - if event: - fpath = event_fpath - else: - fpath = action_fpath - open(fpath, 'w').close() - _set_env(None, None) +def clear_credentials(ft_user=None, ftrack_server=None, user=None): + if not ft_user: + ft_user = os.environ.get("FTRACK_API_USER") + + if not ft_user: + return + + hostname = get_ftrack_hostname(ftrack_server) + if not user: + user = get_user() + + with open(CREDENTIALS_PATH, "r") as file: + content = file.read() + + content_json = json.loads(content or "{}") + if hostname not in content_json: + content_json[hostname] = {} + + content_json[hostname].pop(user, None) + + with open(CREDENTIALS_PATH, "w") as file: + file.write(json.dumps(content_json)) -def _set_env(username, apiKey): - if not username: - username = '' - if not apiKey: - apiKey = '' - os.environ['FTRACK_API_USER'] = username - os.environ['FTRACK_API_KEY'] = apiKey +def set_env(ft_user=None, ft_api_key=None): + os.environ["FTRACK_API_USER"] = ft_user or "" + os.environ["FTRACK_API_KEY"] = ft_api_key or "" -def _check_credentials(username=None, apiKey=None): +def get_env_credentials(): + return ( + os.environ.get("FTRACK_API_USER"), + os.environ.get("FTRACK_API_KEY") + ) - if username and apiKey: - _set_env(username, apiKey) + +def check_credentials(ft_user, ft_api_key, ftrack_server=None): + if not ftrack_server: + ftrack_server = os.environ["FTRACK_SERVER"] + + if not ft_user or not ft_api_key: + return False try: - session = ftrack_api.Session() + session = ftrack_api.Session( + server_url=ftrack_server, + api_key=ft_api_key, + api_user=ft_user + ) session.close() - except Exception as e: + + except Exception: return False return True From 0272d38c7eb98bb68341b1762b93f5da4571b695 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:09:07 +0100 Subject: [PATCH 348/393] lib init do not import all credentials functions but only credentials module --- pype/ftrack/lib/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/lib/__init__.py b/pype/ftrack/lib/__init__.py index eabfdf0d7d..9da3b819b3 100644 --- a/pype/ftrack/lib/__init__.py +++ b/pype/ftrack/lib/__init__.py @@ -1,5 +1,5 @@ from . import avalon_sync -from .credentials import * +from . import credentials from .ftrack_app_handler import * from .ftrack_event_handler import * from .ftrack_action_handler import * From 79245bcd00283fb8e424ce438e836af0b17eba70 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:10:50 +0100 Subject: [PATCH 349/393] user module can execute callbacks on username change --- pype/user/user_module.py | 16 +++++++++++++++- pype/user/widget_user.py | 2 +- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/pype/user/user_module.py b/pype/user/user_module.py index d70885b211..a43866f471 100644 --- a/pype/user/user_module.py +++ b/pype/user/user_module.py @@ -19,8 +19,8 @@ class UserModule: log = pype.Logger().get_logger("UserModule", "user") def __init__(self, main_parent=None, parent=None): + self._callbacks_on_user_change = [] self.cred = {} - self.cred_path = os.path.normpath(os.path.join( self.cred_folder_path, self.cred_filename )) @@ -28,6 +28,9 @@ class UserModule: self.load_credentials() + def register_callback_on_user_change(self, callback): + self._callbacks_on_user_change.append(callback) + def tray_start(self): """Store credentials to env and preset them to widget""" username = "" @@ -95,6 +98,17 @@ class UserModule: )) return self.save_credentials(getpass.getuser()) + def change_credentials(self, username): + self.save_credentials(username) + for callback in self._callbacks_on_user_change: + try: + callback() + except Exception: + self.log.warning( + "Failed to execute callback \"{}\".".format(str(callback)), + exc_info=True + ) + def save_credentials(self, username): """Save credentials to JSON file, env and widget""" if username is None: diff --git a/pype/user/widget_user.py b/pype/user/widget_user.py index 7ca12ec4d4..27faa857f5 100644 --- a/pype/user/widget_user.py +++ b/pype/user/widget_user.py @@ -77,7 +77,7 @@ class UserWidget(QtWidgets.QWidget): def click_save(self): # all what should happen - validations and saving into appsdir username = self.input_username.text() - self.module.save_credentials(username) + self.module.change_credentials(username) self._close_widget() def closeEvent(self, event): From ce5ad584dd405597272c0b592998cce7e9953ef8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:11:06 +0100 Subject: [PATCH 350/393] user module has get_user method to get currently set user --- pype/user/user_module.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pype/user/user_module.py b/pype/user/user_module.py index a43866f471..46ceb0031f 100644 --- a/pype/user/user_module.py +++ b/pype/user/user_module.py @@ -40,6 +40,9 @@ class UserModule: os.environ[self.env_name] = username self.widget_login.set_user(username) + def get_user(self): + return self.cred.get("username") or getpass.getuser() + def process_modules(self, modules): """ Gives ability to connect with imported modules from TrayManager. From 908a89f4ca2a0af681021f9fb8c86c7fd4723a93 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:11:44 +0100 Subject: [PATCH 351/393] ftrack module uses new credentials functions and has ability to change user on User module's user change --- pype/ftrack/tray/ftrack_module.py | 57 ++++++++++++++++++------------- pype/ftrack/tray/login_dialog.py | 12 +++---- 2 files changed, 40 insertions(+), 29 deletions(-) diff --git a/pype/ftrack/tray/ftrack_module.py b/pype/ftrack/tray/ftrack_module.py index 250872f239..5811209a02 100644 --- a/pype/ftrack/tray/ftrack_module.py +++ b/pype/ftrack/tray/ftrack_module.py @@ -34,29 +34,28 @@ class FtrackModule: def validate(self): validation = False - cred = credentials._get_credentials() - try: - if 'username' in cred and 'apiKey' in cred: - validation = credentials._check_credentials( - cred['username'], - cred['apiKey'] - ) - if validation is False: - self.show_login_widget() - else: - self.show_login_widget() - - except Exception as e: - log.error("We are unable to connect to Ftrack: {0}".format(e)) - - validation = credentials._check_credentials() - if validation is True: + cred = credentials.get_credentials() + ft_user = cred.get("username") + ft_api_key = cred.get("api_key") + validation = credentials.check_credentials(ft_user, ft_api_key) + if validation: + credentials.set_env(ft_user, ft_api_key) log.info("Connected to Ftrack successfully") self.loginChange() - else: - log.warning("Please sign in to Ftrack") - self.bool_logged = False - self.set_menu_visibility() + + return validation + + if not validation and ft_user and ft_api_key: + log.warning( + "Current Ftrack credentials are not valid. {}: {} - {}".format( + str(os.environ.get("FTRACK_SERVER")), ft_user, ft_api_key + ) + ) + + log.info("Please sign in to Ftrack") + self.bool_logged = False + self.show_login_widget() + self.set_menu_visibility() return validation @@ -67,7 +66,7 @@ class FtrackModule: self.start_action_server() def logout(self): - credentials._clear_credentials() + credentials.clear_credentials() self.stop_action_server() log.info("Logged out of Ftrack") @@ -307,11 +306,23 @@ class FtrackModule: except Exception as e: log.error("During Killing Timer event server: {0}".format(e)) + def changed_user(self): + self.stop_action_server() + credentials.set_env() + self.validate() + def process_modules(self, modules): if 'TimersManager' in modules: self.timer_manager = modules['TimersManager'] self.timer_manager.add_module(self) + if "UserModule" in modules: + credentials.USER_GETTER = modules["UserModule"].get_user + modules["UserModule"].register_callback_on_user_change( + self.changed_user + ) + + def start_timer_manager(self, data): if self.thread_timer is not None: self.thread_timer.ftrack_start_timer(data) @@ -336,7 +347,7 @@ class FtrackEventsThread(QtCore.QThread): def __init__(self, parent): super(FtrackEventsThread, self).__init__() - cred = credentials._get_credentials() + cred = credentials.get_credentials() self.username = cred['username'] self.user = None self.last_task = None diff --git a/pype/ftrack/tray/login_dialog.py b/pype/ftrack/tray/login_dialog.py index 4dcbec5ab3..5f3777f93e 100644 --- a/pype/ftrack/tray/login_dialog.py +++ b/pype/ftrack/tray/login_dialog.py @@ -204,11 +204,11 @@ class Login_Dialog_ui(QtWidgets.QWidget): self.setError("{0} {1}".format(msg, " and ".join(missing))) return - verification = credentials._check_credentials(username, apiKey) + verification = credentials.check_credentials(username, apiKey) if verification: - credentials._save_credentials(username, apiKey, self.is_event) - credentials._set_env(username, apiKey) + credentials.save_credentials(username, apiKey, self.is_event) + credentials.set_env(username, apiKey) if self.parent is not None: self.parent.loginChange() self._close_widget() @@ -304,11 +304,11 @@ class Login_Dialog_ui(QtWidgets.QWidget): self._login_server_thread.start(url) return - verification = credentials._check_credentials(username, apiKey) + verification = credentials.check_credentials(username, apiKey) if verification is True: - credentials._save_credentials(username, apiKey, self.is_event) - credentials._set_env(username, apiKey) + credentials.save_credentials(username, apiKey, self.is_event) + credentials.set_env(username, apiKey) if self.parent is not None: self.parent.loginChange() self._close_widget() From a7c4dffb42c78a096655efa50e6164e579584636 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:12:05 +0100 Subject: [PATCH 352/393] event server cli also uses new credentials functions --- pype/ftrack/ftrack_server/event_server_cli.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index b09b0bc84e..d889b6be23 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -446,9 +446,9 @@ def main(argv): event_paths = kwargs.ftrackeventpaths if not kwargs.noloadcred: - cred = credentials._get_credentials(True) + cred = credentials.get_credentials(ftrack_url) username = cred.get('username') - api_key = cred.get('apiKey') + api_key = cred.get('api_key') if kwargs.ftrackuser: username = kwargs.ftrackuser @@ -482,7 +482,7 @@ def main(argv): return 1 if kwargs.storecred: - credentials._save_credentials(username, api_key, True) + credentials.save_credentials(username, api_key, ftrack_url) # Set Ftrack environments os.environ["FTRACK_SERVER"] = ftrack_url From 4de7478d9dcf176eb349a52a20eddac76e0424e8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:19:13 +0100 Subject: [PATCH 353/393] again remove collect templates --- .../global/publish/collect_templates.py | 119 ------------------ 1 file changed, 119 deletions(-) delete mode 100644 pype/plugins/global/publish/collect_templates.py diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py deleted file mode 100644 index 3104b5b705..0000000000 --- a/pype/plugins/global/publish/collect_templates.py +++ /dev/null @@ -1,119 +0,0 @@ -""" -Requires: - session -> AVALON_PROJECT - context -> anatomy (pypeapp.Anatomy) - instance -> subset - instance -> asset - instance -> family - -Provides: - instance -> template - instance -> assumedTemplateData - instance -> assumedDestination -""" - -import os - -from avalon import io, api -import pyblish.api - - -class CollectTemplates(pyblish.api.InstancePlugin): - """Fill templates with data needed for publish""" - - order = pyblish.api.CollectorOrder + 0.1 - label = "Collect and fill Templates" - hosts = ["maya", "nuke", "standalonepublisher"] - - def process(self, instance): - # get all the stuff from the database - subset_name = instance.data["subset"] - asset_name = instance.data["asset"] - project_name = api.Session["AVALON_PROJECT"] - - project = io.find_one( - { - "type": "project", - "name": project_name - }, - projection={"config": True, "data": True} - ) - - template = project["config"]["template"]["publish"] - anatomy = instance.context.data['anatomy'] - - asset = io.find_one({ - "type": "asset", - "name": asset_name, - "parent": project["_id"] - }) - - assert asset, ("No asset found by the name '{}' " - "in project '{}'".format(asset_name, project_name)) - silo = asset.get('silo') - - subset = io.find_one({ - "type": "subset", - "name": subset_name, - "parent": asset["_id"] - }) - - # assume there is no version yet, we start at `1` - version = None - version_number = 1 - if subset is not None: - version = io.find_one( - { - "type": "version", - "parent": subset["_id"] - }, - sort=[("name", -1)] - ) - - # if there is a subset there ought to be version - if version is not None: - version_number += int(version["name"]) - - hierarchy = asset['data']['parents'] - if hierarchy: - # hierarchy = os.path.sep.join(hierarchy) - hierarchy = os.path.join(*hierarchy) - else: - hierarchy = "" - - template_data = {"root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, - "code": project['data']['code']}, - "silo": silo, - "family": instance.data['family'], - "asset": asset_name, - "subset": subset_name, - "version": version_number, - "hierarchy": hierarchy.replace("\\", "/"), - "representation": "TEMP"} - - # Add datetime data to template data - datetime_data = instance.context.data.get("datetimeData") or {} - template_data.update(datetime_data) - - resolution_width = instance.data.get("resolutionWidth") - resolution_height = instance.data.get("resolutionHeight") - fps = instance.data.get("fps") - - if resolution_width: - template_data["resolution_width"] = resolution_width - if resolution_width: - template_data["resolution_height"] = resolution_height - if resolution_width: - template_data["fps"] = fps - - instance.data["template"] = template - instance.data["assumedTemplateData"] = template_data - - # We take the parent folder of representation 'filepath' - instance.data["assumedDestination"] = os.path.dirname( - (anatomy.format(template_data))["publish"]["path"] - ) - self.log.info("Assumed Destination has been created...") - self.log.debug("__ assumedTemplateData: `{}`".format(instance.data["assumedTemplateData"])) - self.log.debug("__ template: `{}`".format(instance.data["template"])) From 0618b7a85ff9767ac6f5d4eaf3f58bd72f2b433c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:20:04 +0100 Subject: [PATCH 354/393] fix order --- pype/plugins/global/publish/collect_resources_path.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/collect_resources_path.py b/pype/plugins/global/publish/collect_resources_path.py index 9fc8c576f5..734d1f84e4 100644 --- a/pype/plugins/global/publish/collect_resources_path.py +++ b/pype/plugins/global/publish/collect_resources_path.py @@ -19,7 +19,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): """Generate directory path where the files and resources will be stored""" label = "Collect Resources Path" - order = pyblish.api.CollectorOrder + 0.995 + order = pyblish.api.CollectorOrder + 0.495 def process(self, instance): anatomy = instance.context.data["anatomy"] From a7ca458e4ee1550859fee03f84592aea9615947e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:24:56 +0100 Subject: [PATCH 355/393] collect scene has publish set to True by default --- pype/plugins/maya/publish/collect_scene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/maya/publish/collect_scene.py b/pype/plugins/maya/publish/collect_scene.py index f2fbb4d623..089019f2d3 100644 --- a/pype/plugins/maya/publish/collect_scene.py +++ b/pype/plugins/maya/publish/collect_scene.py @@ -35,7 +35,7 @@ class CollectMayaScene(pyblish.api.ContextPlugin): "subset": subset, "asset": os.getenv("AVALON_ASSET", None), "label": subset, - "publish": False, + "publish": True, "family": 'workfile', "families": ['workfile'], "setMembers": [current_file] From 377513f01f77c49d656f152157a1245e63e3bab6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:25:25 +0100 Subject: [PATCH 356/393] removed locations from version --- pype/plugins/global/publish/integrate_new.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index d27582bb71..8735f8fed7 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -168,14 +168,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if version_data_instance: version_data.update(version_data_instance) - # TODO remove avalon_location (shall we?) - avalon_location = api.Session["AVALON_LOCATION"] # TODO rename method from `create_version` to # `prepare_version` or similar... version = self.create_version( subset=subset, version_number=version_number, - locations=[avalon_location], data=version_data ) @@ -528,26 +525,21 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): return subset - def create_version(self, subset, version_number, locations, data=None): + def create_version(self, subset, version_number, data=None): """ Copy given source to destination Args: subset (dict): the registered subset of the asset version_number (int): the version number - locations (list): the currently registered locations Returns: dict: collection of data to create a version """ - # Imprint currently registered location - version_locations = [location for location in locations if - location is not None] return {"schema": "pype:version-3.0", "type": "version", "parent": subset["_id"], "name": version_number, - "locations": version_locations, "data": data} def create_version_data(self, context, instance): From 3d1e231a0db9f075eb7b6157cb99665f285e34e1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 17:41:24 +0100 Subject: [PATCH 357/393] added job to store thumbnails to avalon action --- .../action_store_thumbnails_to_avalon.py | 52 +++++++++++++++---- 1 file changed, 42 insertions(+), 10 deletions(-) diff --git a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py index d63d3a6ae3..ce0dfeb244 100644 --- a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py +++ b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py @@ -1,6 +1,7 @@ import os import requests import errno +import json from bson.objectid import ObjectId from pype.ftrack import BaseAction @@ -41,13 +42,30 @@ class StoreThumbnailsToAvalon(BaseAction): # DEBUG LINE # root_path = r"C:\Users\jakub.trllo\Desktop\Tests\ftrack_thumbnails" + user = session.query( + "User where username is '{0}'".format(session.api_user) + ).one() + action_job = session.create("Job", { + "user": user, + "status": "running", + "data": json.dumps({ + "description": "Storing thumbnails to avalon." + }) + }) + session.commit() + thumbnail_roots = os.environ.get(self.thumbnail_key) if not thumbnail_roots: + msg = "`{}` environment is not set".format(self.thumbnail_key) + + action_job["status"] = "failed" + session.commit() + + self.log.warning(msg) + return { "success": False, - "message": "`{}` environment is not set".format( - self.thumbnail_key - ) + "message": msg } existing_thumbnail_root = None @@ -57,11 +75,18 @@ class StoreThumbnailsToAvalon(BaseAction): break if existing_thumbnail_root is None: + msg = ( + "Can't access paths, set in `{}` ({})" + ).format(self.thumbnail_key, thumbnail_roots) + + action_job["status"] = "failed" + session.commit() + + self.log.warning(msg) + return { "success": False, - "message": ( - "Can't access paths, set in `{}` ({})" - ).format(self.thumbnail_key, thumbnail_roots) + "message": msg } project = get_project_from_entity(entities[0]) @@ -71,6 +96,9 @@ class StoreThumbnailsToAvalon(BaseAction): if "publish" not in anatomy.templates: msg = "Anatomy does not have set publish key!" + action_job["status"] = "failed" + session.commit() + self.log.warning(msg) return { @@ -84,6 +112,9 @@ class StoreThumbnailsToAvalon(BaseAction): " template in Antomy for project \"{}\"" ).format(project_name) + action_job["status"] = "failed" + session.commit() + self.log.warning(msg) return { @@ -127,6 +158,9 @@ class StoreThumbnailsToAvalon(BaseAction): " can offer. {}" ).format(submsg) + action_job["status"] = "failed" + session.commit() + self.log.warning(msg) return { @@ -256,10 +290,8 @@ class StoreThumbnailsToAvalon(BaseAction): {"$set": {"data.thumbnail_id": thumbnail_id}} ) - self.db_con.update_one( - {"_id": avalon_asset["_id"]}, - {"$set": {"data.thumbnail_id": thumbnail_id}} - ) + action_job["status"] = "done" + session.commit() return True From 5290f6dd58de1abf78be75ab54c949c84972ae83 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 18:09:03 +0100 Subject: [PATCH 358/393] fix arguments appending --- pype/scripts/otio_burnin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 590939df56..fd3c51816a 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -432,7 +432,7 @@ def burnins_from_data( if not value.startswith(TIME_CODE_KEY): value_items = value.split(TIME_CODE_KEY) text = value_items[0].format(**data) - args.append(value_items[0]) + args.append(text) burnin.add_timecode(*args) continue From feb2037c0259dba1fa5b130dd66da8655571ec6d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 18:09:42 +0100 Subject: [PATCH 359/393] excahnge timecode and text keys in arguments --- pype/scripts/otio_burnin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index fd3c51816a..1d00a08521 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -34,7 +34,7 @@ DRAWTEXT = ( "%(color)s@%(opacity).1f:fontsize=%(size)d:fontfile='%(font)s'" ) TIMECODE = ( - "drawtext=text=\\'%(text)s\\':timecode=\\'%(timecode)s\\'" + "drawtext=timecode=\\'%(timecode)s\\':text=\\'%(text)s\\'" ":timecode_rate=%(fps).2f:x=%(x)s:y=%(y)s:fontcolor=" "%(color)s@%(opacity).1f:fontsize=%(size)d:fontfile='%(font)s'" ) From 773fbf106a89f6d901addf543dda849d86f8ae1d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 14 Feb 2020 18:23:37 +0100 Subject: [PATCH 360/393] ftrack server won't raise exception if there are any event handlers to register --- pype/ftrack/ftrack_server/ftrack_server.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pype/ftrack/ftrack_server/ftrack_server.py b/pype/ftrack/ftrack_server/ftrack_server.py index eebc3f6ec4..8464203c1d 100644 --- a/pype/ftrack/ftrack_server/ftrack_server.py +++ b/pype/ftrack/ftrack_server/ftrack_server.py @@ -100,9 +100,9 @@ class FtrackServer: log.warning(msg, exc_info=e) if len(register_functions_dict) < 1: - raise Exception(( - "There are no events with register function." - " Registered paths: \"{}\"" + log.warning(( + "There are no events with `register` function" + " in registered paths: \"{}\"" ).format("| ".join(paths))) # Load presets for setting plugins @@ -122,7 +122,7 @@ class FtrackServer: else: register(self.session, plugins_presets=plugins_presets) - if function_counter%7 == 0: + if function_counter % 7 == 0: time.sleep(0.1) function_counter += 1 except Exception as exc: From d1372fa25fb2c3fd5c2ccdbc101db73aaf8c74bf Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sat, 15 Feb 2020 11:36:27 +0100 Subject: [PATCH 361/393] reversed logic of extract review conditions which don't have else statement and maked bigger indentation --- pype/plugins/global/publish/extract_review.py | 632 +++++++++--------- 1 file changed, 320 insertions(+), 312 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 2e79d86c38..4d63e2c641 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -43,320 +43,328 @@ class ExtractReview(pyblish.api.InstancePlugin): # filter out mov and img sequences representations_new = representations[:] for repre in representations: - if repre['ext'] in self.ext_filter: - tags = repre.get("tags", []) - - if "thumbnail" in tags: - continue - - self.log.info("Try repre: {}".format(repre)) - - if "review" in tags: - staging_dir = repre["stagingDir"] - - # iterating preset output profiles - for name, profile in output_profiles.items(): - repre_new = repre.copy() - ext = profile.get("ext", None) - p_tags = profile.get('tags', []) - self.log.info("p_tags: `{}`".format(p_tags)) - - # adding control for presets to be sequence - # or single file - is_sequence = ("sequence" in p_tags) and (ext in ( - "png", "jpg", "jpeg")) - - self.log.debug("Profile name: {}".format(name)) - - if not ext: - ext = "mov" - self.log.warning( - str("`ext` attribute not in output " - "profile. Setting to default ext: `mov`")) - - self.log.debug( - "instance.families: {}".format( - instance.data['families'])) - self.log.debug( - "profile.families: {}".format(profile['families'])) - - if any(item in instance.data['families'] for item in profile['families']): - if isinstance(repre["files"], list): - collections, remainder = clique.assemble( - repre["files"]) - - full_input_path = os.path.join( - staging_dir, collections[0].format( - '{head}{padding}{tail}') - ) - - filename = collections[0].format('{head}') - if filename.endswith('.'): - filename = filename[:-1] - else: - full_input_path = os.path.join( - staging_dir, repre["files"]) - filename = repre["files"].split(".")[0] - - repr_file = filename + "_{0}.{1}".format(name, ext) - full_output_path = os.path.join( - staging_dir, repr_file) - - if is_sequence: - filename_base = filename + "_{0}".format(name) - repr_file = filename_base + ".%08d.{0}".format( - ext) - repre_new["sequence_file"] = repr_file - full_output_path = os.path.join( - staging_dir, filename_base, repr_file) - - self.log.info("input {}".format(full_input_path)) - self.log.info("output {}".format(full_output_path)) - - new_tags = [x for x in tags if x != "delete"] - - # add families - [instance.data["families"].append(t) - for t in p_tags - if t not in instance.data["families"]] - - # add to - [new_tags.append(t) for t in p_tags - if t not in new_tags] - - self.log.info("new_tags: `{}`".format(new_tags)) - - input_args = [] - - # overrides output file - input_args.append("-y") - - # preset's input data - input_args.extend(profile.get('input', [])) - - # necessary input data - # adds start arg only if image sequence - if isinstance(repre["files"], list): - input_args.append( - "-start_number {0} -framerate {1}".format( - start_frame, fps)) - - input_args.append("-i {}".format(full_input_path)) - - for audio in instance.data.get("audio", []): - offset_frames = ( - instance.data.get("startFrameReview") - - audio["offset"] - ) - offset_seconds = offset_frames / fps - - if offset_seconds > 0: - input_args.append("-ss") - else: - input_args.append("-itsoffset") - - input_args.append(str(abs(offset_seconds))) - - input_args.extend( - ["-i", audio["filename"]] - ) - - # Need to merge audio if there are more - # than 1 input. - if len(instance.data["audio"]) > 1: - input_args.extend( - [ - "-filter_complex", - "amerge", - "-ac", - "2" - ] - ) - - output_args = [] - codec_args = profile.get('codec', []) - output_args.extend(codec_args) - # preset's output data - output_args.extend(profile.get('output', [])) - - # defining image ratios - resolution_ratio = float(resolution_width / ( - resolution_height * pixel_aspect)) - delivery_ratio = float(to_width) / float(to_height) - self.log.debug(resolution_ratio) - self.log.debug(delivery_ratio) - - # get scale factor - scale_factor = to_height / ( - resolution_height * pixel_aspect) - self.log.debug(scale_factor) - - # letter_box - lb = profile.get('letter_box', 0) - if lb != 0: - ffmpet_width = to_width - ffmpet_height = to_height - if "reformat" not in p_tags: - lb /= pixel_aspect - if resolution_ratio != delivery_ratio: - ffmpet_width = resolution_width - ffmpet_height = int( - resolution_height * pixel_aspect) - else: - if resolution_ratio != delivery_ratio: - lb /= scale_factor - else: - lb /= pixel_aspect - - output_args.append(str( - "-filter:v scale={0}x{1}:flags=lanczos," - "setsar=1,drawbox=0:0:iw:" - "round((ih-(iw*(1/{2})))/2):t=fill:" - "c=black,drawbox=0:ih-round((ih-(iw*(" - "1/{2})))/2):iw:round((ih-(iw*(1/{2})))" - "/2):t=fill:c=black").format( - ffmpet_width, ffmpet_height, lb)) - - # In case audio is longer than video. - output_args.append("-shortest") - - # output filename - output_args.append(full_output_path) - - self.log.debug( - "__ pixel_aspect: `{}`".format(pixel_aspect)) - self.log.debug( - "__ resolution_width: `{}`".format( - resolution_width)) - self.log.debug( - "__ resolution_height: `{}`".format( - resolution_height)) - - # scaling none square pixels and 1920 width - if "reformat" in p_tags: - if resolution_ratio < delivery_ratio: - self.log.debug("lower then delivery") - width_scale = int(to_width * scale_factor) - width_half_pad = int(( - to_width - width_scale)/2) - height_scale = to_height - height_half_pad = 0 - else: - self.log.debug("heigher then delivery") - width_scale = to_width - width_half_pad = 0 - scale_factor = float(to_width) / float( - resolution_width) - self.log.debug(scale_factor) - height_scale = int( - resolution_height * scale_factor) - height_half_pad = int( - (to_height - height_scale)/2) - - self.log.debug( - "__ width_scale: `{}`".format(width_scale)) - self.log.debug( - "__ width_half_pad: `{}`".format( - width_half_pad)) - self.log.debug( - "__ height_scale: `{}`".format( - height_scale)) - self.log.debug( - "__ height_half_pad: `{}`".format( - height_half_pad)) - - scaling_arg = str( - "scale={0}x{1}:flags=lanczos," - "pad={2}:{3}:{4}:{5}:black,setsar=1" - ).format(width_scale, height_scale, - to_width, to_height, - width_half_pad, - height_half_pad - ) - - vf_back = self.add_video_filter_args( - output_args, scaling_arg) - # add it to output_args - output_args.insert(0, vf_back) - - # baking lut file application - lut_path = instance.data.get("lutPath") - if lut_path and ("bake-lut" in p_tags): - # removing Gama info as it is all baked in lut - gamma = next((g for g in input_args - if "-gamma" in g), None) - if gamma: - input_args.remove(gamma) - - # create lut argument - lut_arg = "lut3d=file='{}'".format( - lut_path.replace( - "\\", "/").replace(":/", "\\:/") - ) - lut_arg += ",colormatrix=bt601:bt709" - - vf_back = self.add_video_filter_args( - output_args, lut_arg) - # add it to output_args - output_args.insert(0, vf_back) - self.log.info("Added Lut to ffmpeg command") - self.log.debug( - "_ output_args: `{}`".format(output_args)) - - if is_sequence: - stg_dir = os.path.dirname(full_output_path) - - if not os.path.exists(stg_dir): - self.log.debug( - "creating dir: {}".format(stg_dir)) - os.mkdir(stg_dir) - - mov_args = [ - os.path.join( - os.environ.get( - "FFMPEG_PATH", - ""), "ffmpeg"), - " ".join(input_args), - " ".join(output_args) - ] - subprcs_cmd = " ".join(mov_args) - - # run subprocess - self.log.debug("Executing: {}".format(subprcs_cmd)) - output = pype.api.subprocess(subprcs_cmd) - self.log.debug("Output: {}".format(output)) - - # create representation data - repre_new.update({ - 'name': name, - 'ext': ext, - 'files': repr_file, - "tags": new_tags, - "outputName": name, - "codec": codec_args, - "_profile": profile, - "resolutionHeight": resolution_height, - "resolutionWidth": resolution_width, - }) - if is_sequence: - repre_new.update({ - "stagingDir": stg_dir, - "files": os.listdir(stg_dir) - }) - - if repre_new.get('preview'): - repre_new.pop("preview") - if repre_new.get('thumbnail'): - repre_new.pop("thumbnail") - - # adding representation - self.log.debug("Adding: {}".format(repre_new)) - representations_new.append(repre_new) - else: - continue - else: + if repre['ext'] not in self.ext_filter: continue + tags = repre.get("tags", []) + + if "thumbnail" in tags: + continue + + self.log.info("Try repre: {}".format(repre)) + + if "review" not in tags: + continue + + staging_dir = repre["stagingDir"] + + # iterating preset output profiles + for name, profile in output_profiles.items(): + repre_new = repre.copy() + ext = profile.get("ext", None) + p_tags = profile.get('tags', []) + self.log.info("p_tags: `{}`".format(p_tags)) + + # adding control for presets to be sequence + # or single file + is_sequence = ("sequence" in p_tags) and (ext in ( + "png", "jpg", "jpeg")) + + self.log.debug("Profile name: {}".format(name)) + + if not ext: + ext = "mov" + self.log.warning( + str("`ext` attribute not in output " + "profile. Setting to default ext: `mov`")) + + self.log.debug( + "instance.families: {}".format( + instance.data['families'])) + self.log.debug( + "profile.families: {}".format(profile['families'])) + + profile_family_check = False + for _family in profile['families']: + if _family in instance.data['families']: + profile_family_check = True + break + + if not profile_family_check: + continue + + if isinstance(repre["files"], list): + collections, remainder = clique.assemble( + repre["files"]) + + full_input_path = os.path.join( + staging_dir, collections[0].format( + '{head}{padding}{tail}') + ) + + filename = collections[0].format('{head}') + if filename.endswith('.'): + filename = filename[:-1] + else: + full_input_path = os.path.join( + staging_dir, repre["files"]) + filename = repre["files"].split(".")[0] + + repr_file = filename + "_{0}.{1}".format(name, ext) + full_output_path = os.path.join( + staging_dir, repr_file) + + if is_sequence: + filename_base = filename + "_{0}".format(name) + repr_file = filename_base + ".%08d.{0}".format( + ext) + repre_new["sequence_file"] = repr_file + full_output_path = os.path.join( + staging_dir, filename_base, repr_file) + + self.log.info("input {}".format(full_input_path)) + self.log.info("output {}".format(full_output_path)) + + new_tags = [x for x in tags if x != "delete"] + + # add families + [instance.data["families"].append(t) + for t in p_tags + if t not in instance.data["families"]] + + # add to + [new_tags.append(t) for t in p_tags + if t not in new_tags] + + self.log.info("new_tags: `{}`".format(new_tags)) + + input_args = [] + + # overrides output file + input_args.append("-y") + + # preset's input data + input_args.extend(profile.get('input', [])) + + # necessary input data + # adds start arg only if image sequence + if isinstance(repre["files"], list): + input_args.append( + "-start_number {0} -framerate {1}".format( + start_frame, fps)) + + input_args.append("-i {}".format(full_input_path)) + + for audio in instance.data.get("audio", []): + offset_frames = ( + instance.data.get("startFrameReview") - + audio["offset"] + ) + offset_seconds = offset_frames / fps + + if offset_seconds > 0: + input_args.append("-ss") + else: + input_args.append("-itsoffset") + + input_args.append(str(abs(offset_seconds))) + + input_args.extend( + ["-i", audio["filename"]] + ) + + # Need to merge audio if there are more + # than 1 input. + if len(instance.data["audio"]) > 1: + input_args.extend( + [ + "-filter_complex", + "amerge", + "-ac", + "2" + ] + ) + + output_args = [] + codec_args = profile.get('codec', []) + output_args.extend(codec_args) + # preset's output data + output_args.extend(profile.get('output', [])) + + # defining image ratios + resolution_ratio = float(resolution_width / ( + resolution_height * pixel_aspect)) + delivery_ratio = float(to_width) / float(to_height) + self.log.debug(resolution_ratio) + self.log.debug(delivery_ratio) + + # get scale factor + scale_factor = to_height / ( + resolution_height * pixel_aspect) + self.log.debug(scale_factor) + + # letter_box + lb = profile.get('letter_box', 0) + if lb != 0: + ffmpet_width = to_width + ffmpet_height = to_height + if "reformat" not in p_tags: + lb /= pixel_aspect + if resolution_ratio != delivery_ratio: + ffmpet_width = resolution_width + ffmpet_height = int( + resolution_height * pixel_aspect) + else: + if resolution_ratio != delivery_ratio: + lb /= scale_factor + else: + lb /= pixel_aspect + + output_args.append(str( + "-filter:v scale={0}x{1}:flags=lanczos," + "setsar=1,drawbox=0:0:iw:" + "round((ih-(iw*(1/{2})))/2):t=fill:" + "c=black,drawbox=0:ih-round((ih-(iw*(" + "1/{2})))/2):iw:round((ih-(iw*(1/{2})))" + "/2):t=fill:c=black").format( + ffmpet_width, ffmpet_height, lb)) + + # In case audio is longer than video. + output_args.append("-shortest") + + # output filename + output_args.append(full_output_path) + + self.log.debug( + "__ pixel_aspect: `{}`".format(pixel_aspect)) + self.log.debug( + "__ resolution_width: `{}`".format( + resolution_width)) + self.log.debug( + "__ resolution_height: `{}`".format( + resolution_height)) + + # scaling none square pixels and 1920 width + if "reformat" in p_tags: + if resolution_ratio < delivery_ratio: + self.log.debug("lower then delivery") + width_scale = int(to_width * scale_factor) + width_half_pad = int(( + to_width - width_scale)/2) + height_scale = to_height + height_half_pad = 0 + else: + self.log.debug("heigher then delivery") + width_scale = to_width + width_half_pad = 0 + scale_factor = float(to_width) / float( + resolution_width) + self.log.debug(scale_factor) + height_scale = int( + resolution_height * scale_factor) + height_half_pad = int( + (to_height - height_scale)/2) + + self.log.debug( + "__ width_scale: `{}`".format(width_scale)) + self.log.debug( + "__ width_half_pad: `{}`".format( + width_half_pad)) + self.log.debug( + "__ height_scale: `{}`".format( + height_scale)) + self.log.debug( + "__ height_half_pad: `{}`".format( + height_half_pad)) + + scaling_arg = str( + "scale={0}x{1}:flags=lanczos," + "pad={2}:{3}:{4}:{5}:black,setsar=1" + ).format(width_scale, height_scale, + to_width, to_height, + width_half_pad, + height_half_pad + ) + + vf_back = self.add_video_filter_args( + output_args, scaling_arg) + # add it to output_args + output_args.insert(0, vf_back) + + # baking lut file application + lut_path = instance.data.get("lutPath") + if lut_path and ("bake-lut" in p_tags): + # removing Gama info as it is all baked in lut + gamma = next((g for g in input_args + if "-gamma" in g), None) + if gamma: + input_args.remove(gamma) + + # create lut argument + lut_arg = "lut3d=file='{}'".format( + lut_path.replace( + "\\", "/").replace(":/", "\\:/") + ) + lut_arg += ",colormatrix=bt601:bt709" + + vf_back = self.add_video_filter_args( + output_args, lut_arg) + # add it to output_args + output_args.insert(0, vf_back) + self.log.info("Added Lut to ffmpeg command") + self.log.debug( + "_ output_args: `{}`".format(output_args)) + + if is_sequence: + stg_dir = os.path.dirname(full_output_path) + + if not os.path.exists(stg_dir): + self.log.debug( + "creating dir: {}".format(stg_dir)) + os.mkdir(stg_dir) + + mov_args = [ + os.path.join( + os.environ.get( + "FFMPEG_PATH", + ""), "ffmpeg"), + " ".join(input_args), + " ".join(output_args) + ] + subprcs_cmd = " ".join(mov_args) + + # run subprocess + self.log.debug("Executing: {}".format(subprcs_cmd)) + output = pype.api.subprocess(subprcs_cmd) + self.log.debug("Output: {}".format(output)) + + # create representation data + repre_new.update({ + 'name': name, + 'ext': ext, + 'files': repr_file, + "tags": new_tags, + "outputName": name, + "codec": codec_args, + "_profile": profile, + "resolutionHeight": resolution_height, + "resolutionWidth": resolution_width, + }) + if is_sequence: + repre_new.update({ + "stagingDir": stg_dir, + "files": os.listdir(stg_dir) + }) + + if repre_new.get('preview'): + repre_new.pop("preview") + if repre_new.get('thumbnail'): + repre_new.pop("thumbnail") + + # adding representation + self.log.debug("Adding: {}".format(repre_new)) + representations_new.append(repre_new) + for repre in representations_new: if "delete" in repre.get("tags", []): representations_new.remove(repre) From 5f5a80818c20e26deeded4f616d477a479999ee8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sun, 16 Feb 2020 20:07:42 +0100 Subject: [PATCH 362/393] fix(global): fixing version collection --- pype/plugins/nuke/publish/collect_writes.py | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index bf1c6a4b66..c29f676ef7 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -52,9 +52,9 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): output_dir = os.path.dirname(path) self.log.debug('output dir: {}'.format(output_dir)) - # get version to instance for integration - instance.data['version'] = instance.context.data.get( - "version", pype.get_version_from_path(nuke.root().name())) + # # get version to instance for integration + # instance.data['version'] = instance.context.data.get( + # "version", pype.get_version_from_path(nuke.root().name())) self.log.debug('Write Version: %s' % instance.data('version')) @@ -92,16 +92,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): # Add version data to instance version_data = { - "handles": handle_start, - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "version": int(instance.data['version']), "colorspace": node["colorspace"].value(), - "families": ["render"], - "subset": instance.data["subset"], - "fps": instance.context.data["fps"] } instance.data["family"] = "write" From e8499b43ff4cf6a0b9a15b502fbf164474ca0e49 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sun, 16 Feb 2020 20:08:18 +0100 Subject: [PATCH 363/393] fix(global): wrong version format print --- pype/plugins/global/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index d27582bb71..bb65a02bce 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -160,7 +160,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset = self.get_subset(asset_entity, instance) version_number = instance.data["version"] - self.log.debug("Next version: v{0:03d}".format(version_number)) + self.log.debug("Next version: v{}".format(version_number)) version_data = self.create_version_data(context, instance) From 957ca8ecd2f03097e4c1d48dff955d49b4150825 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 14:02:59 +0100 Subject: [PATCH 364/393] fix current frame key --- pype/scripts/otio_burnin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 1d00a08521..e34f7235e4 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -378,7 +378,7 @@ def burnins_from_data( # Check frame start and add expression if is available if frame_start is not None: - data[CURRENT_FRAME_KEY] = r'%%{eif\:n+%d\:d}' % frame_start + data[CURRENT_FRAME_KEY[1:-1]] = r'%%{eif\:n+%d\:d}' % frame_start if frame_start_tc is not None: data[TIME_CODE_KEY[1:-1]] = TIME_CODE_KEY From b657af153f7d9af72ea73327ebbef4a5e8a333eb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 17 Feb 2020 15:25:33 +0100 Subject: [PATCH 365/393] fix(global): removing unnecessary host argument --- pype/plugins/global/publish/collect_anatomy.py | 2 +- pype/plugins/global/publish/collect_instance_anatomy_data.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/collect_anatomy.py b/pype/plugins/global/publish/collect_anatomy.py index 0831c16d32..ae83e39513 100644 --- a/pype/plugins/global/publish/collect_anatomy.py +++ b/pype/plugins/global/publish/collect_anatomy.py @@ -18,7 +18,7 @@ Provides: import os import json -from avalon import io, api, lib +from avalon import api, lib from pypeapp import Anatomy import pyblish.api diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_instance_anatomy_data.py index 9c6a8b08f2..825c48dcf4 100644 --- a/pype/plugins/global/publish/collect_instance_anatomy_data.py +++ b/pype/plugins/global/publish/collect_instance_anatomy_data.py @@ -33,7 +33,6 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.49 label = "Collect instance anatomy data" - hosts = ["maya", "nuke", "standalonepublisher"] def process(self, instance): # get all the stuff from the database From 3922529058d43a631a7269ba4006707edd68c150 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 15:54:44 +0100 Subject: [PATCH 366/393] escape colon and comma in texts --- pype/scripts/otio_burnin.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index e34f7235e4..c61ea66d2d 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -199,7 +199,11 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): """ resolution = self.resolution data = { - 'text': text, + 'text': ( + text + .replace(",", r"\,") + .replace(':', r'\:') + ), 'color': options['font_color'], 'size': options['font_size'] } From d9ffc411a4d65559e436e7d220b8023c8eba5dc6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 16:48:36 +0100 Subject: [PATCH 367/393] integrate new's version override is ready to handle "append" method per instance --- pype/plugins/global/publish/integrate_new.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index b5b6b10aa2..2e2094dfc8 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -204,6 +204,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): data=version_data) self.log.debug("Creating version ...") + + new_repre_names_low = [_repre["name"].lower() for _repre in repres] + existing_version = io.find_one({ 'type': 'version', 'parent': subset["_id"], @@ -213,6 +216,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if existing_version is None: version_id = io.insert_one(version).inserted_id else: + # Check if instance have set `append` mode which cause that + # only replicated representations are set to archive + append_repres = instance.data.get("append", False) + # Update version data io.update_many({ 'type': 'version', @@ -230,6 +237,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): })) bulk_writes = [] for repre in current_repres: + if append_repres: + # archive only duplicated representations + if repre["name"].lower() not in new_repre_names_low: + continue # Representation must change type, # `_id` must be stored to other key and replaced with new # - that is because new representations should have same ID @@ -284,7 +295,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if 'transfers' not in instance.data: instance.data['transfers'] = [] - new_repre_names = [] for idx, repre in enumerate(instance.data["representations"]): # Collection @@ -454,9 +464,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): continue repre_context[key] = template_data[key] - repre_name = repre['name'] - new_repre_names.append(repre_name) - # Use previous representation's id if there are any repre_id = None for _repre in existing_repres: From 06f9187119ff8b15074717c83c224b96e287e7e7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 16:52:20 +0100 Subject: [PATCH 368/393] thumbnail is also stored to asset in store thumbnails action --- pype/ftrack/actions/action_store_thumbnails_to_avalon.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py index ce0dfeb244..21ac6666d5 100644 --- a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py +++ b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py @@ -290,6 +290,11 @@ class StoreThumbnailsToAvalon(BaseAction): {"$set": {"data.thumbnail_id": thumbnail_id}} ) + self.db_con.update_one( +- {"_id": avalon_asset["_id"]}, +- {"$set": {"data.thumbnail_id": thumbnail_id}} +- ) + action_job["status"] = "done" session.commit() From 4256eccc2b797d1e8af4d800e11a14c78222c669 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 17:05:08 +0100 Subject: [PATCH 369/393] fixed few merge issues --- pype/plugins/global/publish/integrate_new.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 9de29cd387..8d41aa7907 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -196,6 +196,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): append_repres = instance.data.get("append", False) # Update version data + # TODO query by _id and io.update_many({ 'type': 'version', 'parent': subset["_id"], @@ -322,7 +323,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): index_frame_start = None if repre.get("frameStart"): - frame_start_padding = anatomy.templates["render"]["padding"] + frame_start_padding = ( + anatomy.templates["render"]["padding"] + ) index_frame_start = int(repre.get("frameStart")) # exception for slate workflow @@ -407,9 +410,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Use previous representation's id if there are any repre_id = None + repre_name_low = repre["name"].lower() for _repre in existing_repres: # NOTE should we check lowered names? - if repre_name == _repre["name"]: + if repre_name_low == _repre["name"]: repre_id = _repre["orig_id"] break @@ -435,7 +439,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): representation["context"]["output"] = repre['outputName'] if sequence_repre and repre.get("frameStart"): - representation['context']['frame'] = src_padding_exp % int(repre.get("frameStart")) + representation['context']['frame'] = ( + src_padding_exp % int(repre.get("frameStart")) + ) self.log.debug("__ representation: {}".format(representation)) destination_list.append(dst) From d3823aecd1c36fa876142d7775fe5f47cbf913eb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 17:34:59 +0100 Subject: [PATCH 370/393] fixed skipping jpex extraction --- pype/plugins/global/publish/extract_jpeg.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_jpeg.py b/pype/plugins/global/publish/extract_jpeg.py index 7c0820ea28..28d16198cd 100644 --- a/pype/plugins/global/publish/extract_jpeg.py +++ b/pype/plugins/global/publish/extract_jpeg.py @@ -43,7 +43,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): self.log.debug(repre) if 'review' in repre['tags'] or "thumb-nuke" in repre['tags']: if not isinstance(repre['files'], list): - return + continue input_file = repre['files'][0] From 1a1e73649866a77e932075b901fe8edccf2e29ca Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 17:36:31 +0100 Subject: [PATCH 371/393] moved identation by changing validation condition logic --- pype/plugins/global/publish/extract_jpeg.py | 95 +++++++++++---------- 1 file changed, 49 insertions(+), 46 deletions(-) diff --git a/pype/plugins/global/publish/extract_jpeg.py b/pype/plugins/global/publish/extract_jpeg.py index 28d16198cd..abd20bb9ea 100644 --- a/pype/plugins/global/publish/extract_jpeg.py +++ b/pype/plugins/global/publish/extract_jpeg.py @@ -41,63 +41,66 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): for repre in representations: self.log.debug(repre) - if 'review' in repre['tags'] or "thumb-nuke" in repre['tags']: - if not isinstance(repre['files'], list): - continue + valid = 'review' in repre['tags'] or "thumb-nuke" in repre['tags'] + if not valid: + continue - input_file = repre['files'][0] + if not isinstance(repre['files'], list): + continue - # input_file = ( - # collections[0].format('{head}{padding}{tail}') % start - # ) - full_input_path = os.path.join(stagingdir, input_file) - self.log.info("input {}".format(full_input_path)) + input_file = repre['files'][0] - filename = os.path.splitext(input_file)[0] - if not filename.endswith('.'): - filename += "." - jpeg_file = filename + "jpg" - full_output_path = os.path.join(stagingdir, jpeg_file) + # input_file = ( + # collections[0].format('{head}{padding}{tail}') % start + # ) + full_input_path = os.path.join(stagingdir, input_file) + self.log.info("input {}".format(full_input_path)) - self.log.info("output {}".format(full_output_path)) + filename = os.path.splitext(input_file)[0] + if not filename.endswith('.'): + filename += "." + jpeg_file = filename + "jpg" + full_output_path = os.path.join(stagingdir, jpeg_file) - config_data = instance.context.data['output_repre_config'] + self.log.info("output {}".format(full_output_path)) - proj_name = os.environ.get('AVALON_PROJECT', '__default__') - profile = config_data.get(proj_name, config_data['__default__']) + config_data = instance.context.data['output_repre_config'] - jpeg_items = [] - jpeg_items.append( - os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg")) - # override file if already exists - jpeg_items.append("-y") - # use same input args like with mov - jpeg_items.extend(profile.get('input', [])) - # input file - jpeg_items.append("-i {}".format(full_input_path)) - # output file - jpeg_items.append(full_output_path) + proj_name = os.environ.get('AVALON_PROJECT', '__default__') + profile = config_data.get(proj_name, config_data['__default__']) - subprocess_jpeg = " ".join(jpeg_items) + jpeg_items = [] + jpeg_items.append( + os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg")) + # override file if already exists + jpeg_items.append("-y") + # use same input args like with mov + jpeg_items.extend(profile.get('input', [])) + # input file + jpeg_items.append("-i {}".format(full_input_path)) + # output file + jpeg_items.append(full_output_path) - # run subprocess - self.log.debug("{}".format(subprocess_jpeg)) - pype.api.subprocess(subprocess_jpeg) + subprocess_jpeg = " ".join(jpeg_items) - if "representations" not in instance.data: - instance.data["representations"] = [] + # run subprocess + self.log.debug("{}".format(subprocess_jpeg)) + pype.api.subprocess(subprocess_jpeg) - representation = { - 'name': 'thumbnail', - 'ext': 'jpg', - 'files': jpeg_file, - "stagingDir": stagingdir, - "thumbnail": True, - "tags": ['thumbnail'] - } + if "representations" not in instance.data: + instance.data["representations"] = [] - # adding representation - self.log.debug("Adding: {}".format(representation)) - representations_new.append(representation) + representation = { + 'name': 'thumbnail', + 'ext': 'jpg', + 'files': jpeg_file, + "stagingDir": stagingdir, + "thumbnail": True, + "tags": ['thumbnail'] + } + + # adding representation + self.log.debug("Adding: {}".format(representation)) + representations_new.append(representation) instance.data["representations"] = representations_new From 3fe6a13f0c2c97adf34bf6aea042970fdf90e572 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 17 Feb 2020 17:40:49 +0100 Subject: [PATCH 372/393] sync actions ignore milestones --- pype/ftrack/lib/avalon_sync.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/lib/avalon_sync.py b/pype/ftrack/lib/avalon_sync.py index f08dc73c19..f5b4c4b8c3 100644 --- a/pype/ftrack/lib/avalon_sync.py +++ b/pype/ftrack/lib/avalon_sync.py @@ -236,6 +236,7 @@ class SyncEntitiesFactory: " from TypedContext where project_id is \"{}\"" ) ignore_custom_attr_key = "avalon_ignore_sync" + ignore_entity_types = ["milestone"] report_splitter = {"type": "label", "value": "---"} @@ -366,7 +367,10 @@ class SyncEntitiesFactory: parent_id = entity["parent_id"] entity_type = entity.entity_type entity_type_low = entity_type.lower() - if entity_type_low == "task": + if entity_type_low in self.ignore_entity_types: + continue + + elif entity_type_low == "task": entities_dict[parent_id]["tasks"].append(entity["name"]) continue From 871870c603119c4920dd15f4995e27425898ceb8 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 17 Feb 2020 18:34:01 +0100 Subject: [PATCH 373/393] fix remaining dashes --- pype/ftrack/actions/action_store_thumbnails_to_avalon.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py index 21ac6666d5..7adc36f4b5 100644 --- a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py +++ b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py @@ -291,9 +291,9 @@ class StoreThumbnailsToAvalon(BaseAction): ) self.db_con.update_one( -- {"_id": avalon_asset["_id"]}, -- {"$set": {"data.thumbnail_id": thumbnail_id}} -- ) + {"_id": avalon_asset["_id"]}, + {"$set": {"data.thumbnail_id": thumbnail_id}} + ) action_job["status"] = "done" session.commit() From 07dbb2533d3a091c27a4a40a237b46377509f9b4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 17 Feb 2020 21:07:26 +0100 Subject: [PATCH 374/393] fix(nks): missing family if `review` applied --- pype/plugins/nukestudio/publish/collect_audio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/nukestudio/publish/collect_audio.py b/pype/plugins/nukestudio/publish/collect_audio.py index 61419b1ad9..e141f50488 100644 --- a/pype/plugins/nukestudio/publish/collect_audio.py +++ b/pype/plugins/nukestudio/publish/collect_audio.py @@ -15,7 +15,7 @@ class CollectAudio(api.InstancePlugin): order = api.CollectorOrder + 0.1025 label = "Collect Audio" hosts = ["nukestudio"] - families = ["clip"] + families = ["clip", "plate"] def process(self, instance): # Exclude non-tagged instances. From bc4447a971e48ee375801146ff391c66c1e1c681 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 17 Feb 2020 21:45:15 +0100 Subject: [PATCH 375/393] fix(nks): tags collection for plates and audio wrong subset name --- .../nukestudio/publish/collect_audio.py | 20 ++++++++++--------- .../nukestudio/publish/collect_plates.py | 11 ++++------ 2 files changed, 15 insertions(+), 16 deletions(-) diff --git a/pype/plugins/nukestudio/publish/collect_audio.py b/pype/plugins/nukestudio/publish/collect_audio.py index e141f50488..727d7da795 100644 --- a/pype/plugins/nukestudio/publish/collect_audio.py +++ b/pype/plugins/nukestudio/publish/collect_audio.py @@ -1,5 +1,5 @@ from pyblish import api - +import os class CollectAudio(api.InstancePlugin): """Collect audio from tags. @@ -12,17 +12,19 @@ class CollectAudio(api.InstancePlugin): """ # Run just before CollectSubsets - order = api.CollectorOrder + 0.1025 + order = api.CollectorOrder + 0.1021 label = "Collect Audio" hosts = ["nukestudio"] - families = ["clip", "plate"] + families = ["clip"] def process(self, instance): # Exclude non-tagged instances. tagged = False for tag in instance.data["tags"]: - family = dict(tag["metadata"]).get("tag.family", "") + tag_data = dict(tag["metadata"]) + family = tag_data.get("tag.family", "") if family.lower() == "audio": + subset = tag_data.get("tag.subset", "Main") tagged = True if not tagged: @@ -40,14 +42,14 @@ class CollectAudio(api.InstancePlugin): data["family"] = "audio" data["families"] = ["ftrack"] - subset = "" - for tag in instance.data["tags"]: - tag_data = dict(tag["metadata"]) - if "tag.subset" in tag_data: - subset = tag_data["tag.subset"] data["subset"] = "audio" + subset.title() data["source"] = data["sourcePath"] + data["label"] = "{} - {} - ({})".format( + data['asset'], data["subset"], os.path.splitext(data["sourcePath"])[ + 1] + ) + self.log.debug("Creating instance with data: {}".format(data)) instance.context.create_instance(**data) diff --git a/pype/plugins/nukestudio/publish/collect_plates.py b/pype/plugins/nukestudio/publish/collect_plates.py index 75eb5bb043..e0ecbaf302 100644 --- a/pype/plugins/nukestudio/publish/collect_plates.py +++ b/pype/plugins/nukestudio/publish/collect_plates.py @@ -23,8 +23,10 @@ class CollectPlates(api.InstancePlugin): # Exclude non-tagged instances. tagged = False for tag in instance.data["tags"]: - family = dict(tag["metadata"]).get("tag.family", "") + tag_data = dict(tag["metadata"]) + family = tag_data.get("tag.family", "") if family.lower() == "plate": + subset = tag_data.get("tag.subset", "Main") tagged = True break @@ -43,12 +45,7 @@ class CollectPlates(api.InstancePlugin): data["family"] = family.lower() data["families"] = ["ftrack"] + instance.data["families"][1:] data["source"] = data["sourcePath"] - - subset = "" - for tag in instance.data["tags"]: - tag_data = dict(tag["metadata"]) - if "tag.subset" in tag_data: - subset = tag_data["tag.subset"] + data["subset"] = data["family"] + subset.title() data["name"] = data["subset"] + "_" + data["asset"] From f3fc92881ad360017a03d905a55d1485256ce9e5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 17 Feb 2020 21:47:43 +0100 Subject: [PATCH 376/393] fix(nks): not correct way of collecting frame start - should not be offset-ed by handle start --- pype/plugins/nukestudio/publish/collect_tag_framestart.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pype/plugins/nukestudio/publish/collect_tag_framestart.py b/pype/plugins/nukestudio/publish/collect_tag_framestart.py index 1342d996ab..993aa99a3e 100644 --- a/pype/plugins/nukestudio/publish/collect_tag_framestart.py +++ b/pype/plugins/nukestudio/publish/collect_tag_framestart.py @@ -30,9 +30,12 @@ class CollectClipTagFrameStart(api.InstancePlugin): except ValueError: if "source" in t_value: source_first = instance.data["sourceFirst"] + if source_first == 0: + source_first = 1 + self.log.info("Start frame on `{0}`".format(source_first)) source_in = instance.data["sourceIn"] - handle_start = instance.data["handleStart"] - start_frame = (source_first + source_in) - handle_start + self.log.info("Start frame on `{0}`".format(source_in)) + start_frame = source_first + source_in instance.data["startingFrame"] = start_frame self.log.info("Start frame on `{0}` set to `{1}`".format( From 188881a0a5873ee7984b610f64599eebb57d9ac0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 18 Feb 2020 00:02:50 +0100 Subject: [PATCH 377/393] fix(nks): review family to `plate` and plates cleanup --- .../nukestudio/publish/collect_plates.py | 17 ++++++++++------- .../nukestudio/publish/collect_reviews.py | 2 +- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/pype/plugins/nukestudio/publish/collect_plates.py b/pype/plugins/nukestudio/publish/collect_plates.py index e0ecbaf302..acdc5193ae 100644 --- a/pype/plugins/nukestudio/publish/collect_plates.py +++ b/pype/plugins/nukestudio/publish/collect_plates.py @@ -14,7 +14,7 @@ class CollectPlates(api.InstancePlugin): """ # Run just before CollectSubsets - order = api.CollectorOrder + 0.1025 + order = api.CollectorOrder + 0.1021 label = "Collect Plates" hosts = ["nukestudio"] families = ["clip"] @@ -36,24 +36,27 @@ class CollectPlates(api.InstancePlugin): "\"plate\"".format(instance) ) return + self.log.debug("__ subset: `{}`".format(instance.data["subset"])) + # if "audio" in instance.data["subset"]: + # return # Collect data. data = {} for key, value in instance.data.iteritems(): data[key] = value + self.log.debug("__ family: `{}`".format(family)) + self.log.debug("__ subset: `{}`".format(subset)) + data["family"] = family.lower() data["families"] = ["ftrack"] + instance.data["families"][1:] data["source"] = data["sourcePath"] - - data["subset"] = data["family"] + subset.title() - + data["subset"] = family + subset.title() data["name"] = data["subset"] + "_" + data["asset"] data["label"] = "{} - {} - ({})".format( - data['asset'], data["subset"], os.path.splitext(data["sourcePath"])[ - 1] - ) + data['asset'], data["subset"], os.path.splitext( + data["sourcePath"])[1]) if "review" in instance.data["families"]: data["label"] += " - review" diff --git a/pype/plugins/nukestudio/publish/collect_reviews.py b/pype/plugins/nukestudio/publish/collect_reviews.py index f223e5ca65..af8fd4a0e7 100644 --- a/pype/plugins/nukestudio/publish/collect_reviews.py +++ b/pype/plugins/nukestudio/publish/collect_reviews.py @@ -16,7 +16,7 @@ class CollectReviews(api.InstancePlugin): order = api.CollectorOrder + 0.1022 label = "Collect Reviews" hosts = ["nukestudio"] - families = ["clip"] + families = ["plate"] def process(self, instance): # Exclude non-tagged instances. From 9526e0b5a3c8f4a386333ab62cd60384570f4a83 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 13:08:16 +0100 Subject: [PATCH 378/393] implemented event that can change task status on first asset version creation --- .../events/event_first_version_status.py | 175 ++++++++++++++++++ 1 file changed, 175 insertions(+) create mode 100644 pype/ftrack/events/event_first_version_status.py diff --git a/pype/ftrack/events/event_first_version_status.py b/pype/ftrack/events/event_first_version_status.py new file mode 100644 index 0000000000..ac0e94c3ae --- /dev/null +++ b/pype/ftrack/events/event_first_version_status.py @@ -0,0 +1,175 @@ +from pype.ftrack import BaseEvent + + +class FirstVersionStatus(BaseEvent): + + # WARNING Priority MUST be higher + # than handler in `event_version_to_task_statuses.py` + priority = 200 + + first_run = True + keys_enum = ["task", "task_type"] + # This should be set with presets + task_status_map = [] + + # EXAMPLE of `task_status_map` + __example_status_map__ = [{ + # `key` specify where to look for name (is enumerator of `keys_enum`) + "key": "task", + # speicification of name + "name": "compositing", + # Status to set to the task + "status": "Blocking" + }] + + def launch(self, session, event): + """Set task's status for first created Asset Version.""" + + if not self.task_status_map: + return + + if self.first_run: + self.first_run = False + valid_task_status_map = [] + for item in self.task_status_map: + key = (item.get("key") or "").lower() + name = (item.get("name") or "").lower() + status = (item.get("status") or "").lower() + if not (key and name and status): + self.log.warning(( + "Invalid item in Task -> Status mapping. {}" + ).format(str(item))) + continue + + if key not in self.keys_enum: + expected_msg = "" + last_key_idx = len(self.keys_enum) - 1 + for idx, key in enumerate(self.keys_enum): + if idx == 0: + joining_part = "`{}`" + elif idx == last_key_idx: + joining_part = "or `{}`" + else: + joining_part = ", `{}`" + expected_msg += joining_part.format(key) + + self.log.warning(( + "Invalid key `{}`. Expected: {}." + ).format(key, expected_msg)) + continue + + valid_task_status_map.append({ + "key": key, + "name": name, + "status": status + }) + self.task_status_map = valid_task_status_map + + entities_info = self.filter_event_ents(event) + if not entities_info: + return + + entity_ids = [] + for entity_info in entities_info: + entity_ids.append(entity_info["entityId"]) + + joined_entity_ids = ",".join( + ["\"{}\"".format(entity_id) for entity_id in entity_ids] + ) + asset_verisons = session.query( + "AssetVersion where id in ({})".format(joined_entity_ids) + ).all() + + statuses_per_type_id = {} + + project_schema = None + for asset_verison in asset_verisons: + task_entity = asset_verison["task"] + found_item = None + for item in self.task_status_map: + if ( + item["key"] == "task" and + task_entity["name"].lower() != item["name"] + ): + continue + + elif ( + item["key"] == "task_type" and + task_entity["type"]["name"].lower() != item["name"] + ): + continue + + found_item = item + break + + if not found_item: + continue + + if project_schema is None: + project_schema = task_entity["project"]["project_schema"] + + # Get all available statuses for Task + type_id = task_entity["type_id"] + if type_id not in statuses_per_type_id: + statuses = project_schema.get_statuses( + "Task", task_entity["type_id"] + ) + + # map lowered status name with it's object + statuses_per_type_id[type_id] = { + status["name"].lower(): status for status in statuses + } + + statuses_by_low_name = statuses_per_type_id[type_id] + new_status = statuses_by_low_name.get(found_item["status"]) + if not new_status: + continue + + ent_path = "/".join([ent["name"] for ent in task_entity["link"]]) + + try: + task_entity["status"] = new_status + session.commit() + self.log.debug("[ {} ] Status updated to [ {} ]".format( + ent_path, new_status['name'] + )) + + except Exception: + session.rollback() + self.log.warning( + "[ {} ] Status couldn't be set.".format(ent_path), + exc_info=True + ) + + def filter_event_ents(self, event): + filtered_ents = [] + for entity in event["data"].get("entities", []): + # Care only about add actions + if entity["action"] != "add": + continue + + # Filter AssetVersions + if entity["entityType"] != "assetversion": + continue + + entity_changes = entity.get("changes") or {} + + # Check if version of Asset Version is `1` + version_num = entity_changes.get("version", {}).get("new") + if version_num != 1: + continue + + # Skip in Asset Version don't have task + task_id = entity_changes.get("taskid", {}).get("new") + if not task_id: + continue + + filtered_ents.append(entity) + + return filtered_ents + + +def register(session, plugins_presets): + '''Register plugin. Called when used as an plugin.''' + + FirstVersionStatus(session, plugins_presets).register() From e57fecdeb34ac6dfb41d7cc75e019a32b40c5981 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 18 Feb 2020 13:08:25 +0100 Subject: [PATCH 379/393] making sure that options from capture.json are applied correctly this will need some refactoring though --- pype/maya/lib.py | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/pype/maya/lib.py b/pype/maya/lib.py index ec39b3556e..dafc281903 100644 --- a/pype/maya/lib.py +++ b/pype/maya/lib.py @@ -2176,18 +2176,29 @@ def load_capture_preset(path=None, data=None): 4: 'nolights'} for key in preset[id]: if key == 'high_quality': - temp_options2['multiSampleEnable'] = True - temp_options2['multiSampleCount'] = 8 - temp_options2['textureMaxResolution'] = 1024 - temp_options2['enableTextureMaxRes'] = True + if preset[id][key] == True: + temp_options2['multiSampleEnable'] = True + temp_options2['multiSampleCount'] = 4 + temp_options2['textureMaxResolution'] = 1024 + temp_options2['enableTextureMaxRes'] = True + temp_options2['textureMaxResMode'] = 1 + else: + temp_options2['multiSampleEnable'] = False + temp_options2['multiSampleCount'] = 4 + temp_options2['textureMaxResolution'] = 512 + temp_options2['enableTextureMaxRes'] = True + temp_options2['textureMaxResMode'] = 0 + + if key == 'ssaoEnable': + if preset[id][key] == True: + temp_options2['ssaoEnable'] = True + else: + temp_options2['ssaoEnable'] = False if key == 'alphaCut': temp_options2['transparencyAlgorithm'] = 5 temp_options2['transparencyQuality'] = 1 - if key == 'ssaoEnable': - temp_options2['ssaoEnable'] = True - if key == 'headsUpDisplay': temp_options['headsUpDisplay'] = True From 5a38ba950c728fc6718ae4e9ad39712b7a329119 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 13:14:22 +0100 Subject: [PATCH 380/393] validation of presets happens after registration --- .../events/event_first_version_status.py | 78 ++++++++++--------- 1 file changed, 40 insertions(+), 38 deletions(-) diff --git a/pype/ftrack/events/event_first_version_status.py b/pype/ftrack/events/event_first_version_status.py index ac0e94c3ae..59956697b6 100644 --- a/pype/ftrack/events/event_first_version_status.py +++ b/pype/ftrack/events/event_first_version_status.py @@ -7,7 +7,6 @@ class FirstVersionStatus(BaseEvent): # than handler in `event_version_to_task_statuses.py` priority = 200 - first_run = True keys_enum = ["task", "task_type"] # This should be set with presets task_status_map = [] @@ -22,49 +21,52 @@ class FirstVersionStatus(BaseEvent): "status": "Blocking" }] + def register(self, *args, **kwargs): + result = super(FirstVersionStatus, self).register(*args, **kwargs) + + valid_task_status_map = [] + for item in self.task_status_map: + key = (item.get("key") or "").lower() + name = (item.get("name") or "").lower() + status = (item.get("status") or "").lower() + if not (key and name and status): + self.log.warning(( + "Invalid item in Task -> Status mapping. {}" + ).format(str(item))) + continue + + if key not in self.keys_enum: + expected_msg = "" + last_key_idx = len(self.keys_enum) - 1 + for idx, key in enumerate(self.keys_enum): + if idx == 0: + joining_part = "`{}`" + elif idx == last_key_idx: + joining_part = "or `{}`" + else: + joining_part = ", `{}`" + expected_msg += joining_part.format(key) + + self.log.warning(( + "Invalid key `{}`. Expected: {}." + ).format(key, expected_msg)) + continue + + valid_task_status_map.append({ + "key": key, + "name": name, + "status": status + }) + self.task_status_map = valid_task_status_map + + return result + def launch(self, session, event): """Set task's status for first created Asset Version.""" if not self.task_status_map: return - if self.first_run: - self.first_run = False - valid_task_status_map = [] - for item in self.task_status_map: - key = (item.get("key") or "").lower() - name = (item.get("name") or "").lower() - status = (item.get("status") or "").lower() - if not (key and name and status): - self.log.warning(( - "Invalid item in Task -> Status mapping. {}" - ).format(str(item))) - continue - - if key not in self.keys_enum: - expected_msg = "" - last_key_idx = len(self.keys_enum) - 1 - for idx, key in enumerate(self.keys_enum): - if idx == 0: - joining_part = "`{}`" - elif idx == last_key_idx: - joining_part = "or `{}`" - else: - joining_part = ", `{}`" - expected_msg += joining_part.format(key) - - self.log.warning(( - "Invalid key `{}`. Expected: {}." - ).format(key, expected_msg)) - continue - - valid_task_status_map.append({ - "key": key, - "name": name, - "status": status - }) - self.task_status_map = valid_task_status_map - entities_info = self.filter_event_ents(event) if not entities_info: return From 96dce267e8c21bbbde068f50f657a881fbcd88bc Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 13:17:37 +0100 Subject: [PATCH 381/393] default value of `key` in status mapping is `task` to look after task's name --- pype/ftrack/events/event_first_version_status.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/events/event_first_version_status.py b/pype/ftrack/events/event_first_version_status.py index 59956697b6..c147692dc4 100644 --- a/pype/ftrack/events/event_first_version_status.py +++ b/pype/ftrack/events/event_first_version_status.py @@ -14,6 +14,7 @@ class FirstVersionStatus(BaseEvent): # EXAMPLE of `task_status_map` __example_status_map__ = [{ # `key` specify where to look for name (is enumerator of `keys_enum`) + # By default is set to "task" "key": "task", # speicification of name "name": "compositing", @@ -26,7 +27,7 @@ class FirstVersionStatus(BaseEvent): valid_task_status_map = [] for item in self.task_status_map: - key = (item.get("key") or "").lower() + key = (item.get("key") or "task").lower() name = (item.get("name") or "").lower() status = (item.get("status") or "").lower() if not (key and name and status): From cc6d70f8498a364c5ce643efd683e1427ca46179 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 13:56:48 +0100 Subject: [PATCH 382/393] integrate ftrack note adds intent at the beginning of comment (if is set) --- pype/plugins/ftrack/publish/integrate_ftrack_note.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_note.py b/pype/plugins/ftrack/publish/integrate_ftrack_note.py index f7fb5addbb..87016684ed 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_note.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_note.py @@ -18,7 +18,17 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): self.log.info("Comment is not set.") return - self.log.debug("Comment is set to {}".format(comment)) + self.log.debug("Comment is set to `{}`".format(comment)) + + intent = instance.context.data.get("intent") + if intent: + msg = "Intent is set to `{}` and was added to comment.".format( + intent + ) + comment = "{}: {}".format(intent, comment) + else: + msg = "Intent is not set." + self.log.debug(msg) asset_versions_key = "ftrackIntegratedAssetVersions" asset_versions = instance.data.get(asset_versions_key) From a3ad40e34a729f79902cf653beece81d0617b270 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 14:05:02 +0100 Subject: [PATCH 383/393] added possiblity to set note with intent template through presets --- pype/plugins/ftrack/publish/integrate_ftrack_note.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_note.py b/pype/plugins/ftrack/publish/integrate_ftrack_note.py index 87016684ed..bab7d1ecf5 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_note.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_note.py @@ -10,6 +10,8 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder + 0.4999 label = "Integrate Ftrack note" families = ["ftrack"] + # Can be set in presets (Allows only `intent` and `comment` keys) + note_with_intent_template = "{intent}: {comment}" optional = True def process(self, instance): @@ -25,7 +27,10 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): msg = "Intent is set to `{}` and was added to comment.".format( intent ) - comment = "{}: {}".format(intent, comment) + comment = note_with_intent_template.format(**{ + "intent": intent, + "comment": comment + }) else: msg = "Intent is not set." self.log.debug(msg) From 7296e86475077d014085be296e7747b7e01fbb06 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 18 Feb 2020 14:49:11 +0100 Subject: [PATCH 384/393] fix forgotten .value() call --- pype/plugins/global/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 8d41aa7907..a2343ce8a9 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -453,7 +453,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Remove old representations if there are any (before insertion of new) if existing_repres: repre_ids_to_remove = [] - for repre in existing_repres.values(): + for repre in existing_repres: repre_ids_to_remove.append(repre["_id"]) io.delete_many({"_id": {"$in": repre_ids_to_remove}}) From 79ad22b5fbbd49afaab132a16b270ab35bc0748e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 15:15:23 +0100 Subject: [PATCH 385/393] added few logs to event --- pype/ftrack/events/event_first_version_status.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/events/event_first_version_status.py b/pype/ftrack/events/event_first_version_status.py index c147692dc4..2447a20c3e 100644 --- a/pype/ftrack/events/event_first_version_status.py +++ b/pype/ftrack/events/event_first_version_status.py @@ -58,7 +58,12 @@ class FirstVersionStatus(BaseEvent): "name": name, "status": status }) + self.task_status_map = valid_task_status_map + if not self.task_status_map: + self.log.warning(( + "Event handler `{}` don't have set presets." + ).format(self.__class__.__name__)) return result @@ -123,13 +128,16 @@ class FirstVersionStatus(BaseEvent): status["name"].lower(): status for status in statuses } + ent_path = "/".join([ent["name"] for ent in task_entity["link"]]) + statuses_by_low_name = statuses_per_type_id[type_id] new_status = statuses_by_low_name.get(found_item["status"]) if not new_status: + self.log.warning("Status `{}` was not found for `{}`.".format( + found_item["status"], ent_path + )) continue - ent_path = "/".join([ent["name"] for ent in task_entity["link"]]) - try: task_entity["status"] = new_status session.commit() From 79db4af6254ddf606fbb429106568bbe6f01d097 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 15:52:45 +0100 Subject: [PATCH 386/393] task is not changed on task but asset version --- .../events/event_first_version_status.py | 38 ++++++++++--------- 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/pype/ftrack/events/event_first_version_status.py b/pype/ftrack/events/event_first_version_status.py index 2447a20c3e..2e2b98ad5f 100644 --- a/pype/ftrack/events/event_first_version_status.py +++ b/pype/ftrack/events/event_first_version_status.py @@ -18,7 +18,7 @@ class FirstVersionStatus(BaseEvent): "key": "task", # speicification of name "name": "compositing", - # Status to set to the task + # Status to set to the asset version "status": "Blocking" }] @@ -84,15 +84,15 @@ class FirstVersionStatus(BaseEvent): joined_entity_ids = ",".join( ["\"{}\"".format(entity_id) for entity_id in entity_ids] ) - asset_verisons = session.query( + asset_versions = session.query( "AssetVersion where id in ({})".format(joined_entity_ids) ).all() - statuses_per_type_id = {} + asset_version_statuses = None project_schema = None - for asset_verison in asset_verisons: - task_entity = asset_verison["task"] + for asset_version in asset_versions: + task_entity = asset_version["task"] found_item = None for item in self.task_status_map: if ( @@ -117,29 +117,31 @@ class FirstVersionStatus(BaseEvent): project_schema = task_entity["project"]["project_schema"] # Get all available statuses for Task - type_id = task_entity["type_id"] - if type_id not in statuses_per_type_id: - statuses = project_schema.get_statuses( - "Task", task_entity["type_id"] - ) + if asset_version_statuses is None: + statuses = project_schema.get_statuses("AssetVersion") # map lowered status name with it's object - statuses_per_type_id[type_id] = { + asset_version_statuses = { status["name"].lower(): status for status in statuses } - ent_path = "/".join([ent["name"] for ent in task_entity["link"]]) + ent_path = "/".join( + [ent["name"] for ent in task_entity["link"]] + + [ + str(asset_version["asset"]["name"]), + str(asset_version["version"]) + ] + ) - statuses_by_low_name = statuses_per_type_id[type_id] - new_status = statuses_by_low_name.get(found_item["status"]) + new_status = asset_version_statuses.get(found_item["status"]) if not new_status: - self.log.warning("Status `{}` was not found for `{}`.".format( - found_item["status"], ent_path - )) + self.log.warning( + "AssetVersion doesn't have status `{}`." + ).format(found_item["status"]) continue try: - task_entity["status"] = new_status + asset_version["status"] = new_status session.commit() self.log.debug("[ {} ] Status updated to [ {} ]".format( ent_path, new_status['name'] From d98cb1c2c49a62dfa82350f26168a6f887617454 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 16:20:11 +0100 Subject: [PATCH 387/393] fix template access --- pype/plugins/ftrack/publish/integrate_ftrack_note.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_note.py b/pype/plugins/ftrack/publish/integrate_ftrack_note.py index bab7d1ecf5..38f7486322 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_note.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_note.py @@ -27,7 +27,7 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): msg = "Intent is set to `{}` and was added to comment.".format( intent ) - comment = note_with_intent_template.format(**{ + comment = self.note_with_intent_template.format(**{ "intent": intent, "comment": comment }) From f59f8b142696f32748296953887af683e2d9aaad Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 17:15:14 +0100 Subject: [PATCH 388/393] added possibility to add labels to notes --- .../ftrack/publish/integrate_ftrack_note.py | 24 ++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_note.py b/pype/plugins/ftrack/publish/integrate_ftrack_note.py index 38f7486322..2621ca96ab 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_note.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_note.py @@ -10,10 +10,14 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder + 0.4999 label = "Integrate Ftrack note" families = ["ftrack"] - # Can be set in presets (Allows only `intent` and `comment` keys) - note_with_intent_template = "{intent}: {comment}" optional = True + # Can be set in presets: + # - Allows only `intent` and `comment` keys + note_with_intent_template = "{intent}: {comment}" + # - note label must exist in Ftrack + note_labels = [] + def process(self, instance): comment = (instance.context.data.get("comment") or "").strip() if not comment: @@ -52,8 +56,22 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): ) ) + labels = [] + if self.note_labels: + all_labels = session.query("NoteLabel").all() + labels_by_low_name = {lab["name"].lower(): lab for lab in all_labels} + for _label in self.note_labels: + label = labels_by_low_name.get(_label.lower()) + if not label: + self.log.warning( + "Note Label `{}` was not found.".format(_label) + ) + continue + + labels.append(label) + for asset_version in asset_versions: - asset_version.create_note(comment, author=user) + asset_version.create_note(comment, author=user, labels=labels) try: session.commit() From 60de315ddfb269385fccc48f590828fe3027c2b4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 18:43:28 +0100 Subject: [PATCH 389/393] fixed class name --- pype/ftrack/actions/action_delete_old_versions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/actions/action_delete_old_versions.py b/pype/ftrack/actions/action_delete_old_versions.py index 46f3e60d77..e418a21e53 100644 --- a/pype/ftrack/actions/action_delete_old_versions.py +++ b/pype/ftrack/actions/action_delete_old_versions.py @@ -482,4 +482,4 @@ class DeleteOldVersions(BaseAction): def register(session, plugins_presets={}): '''Register plugin. Called when used as an plugin.''' - PrepareForArchivation(session, plugins_presets).register() + DeleteOldVersions(session, plugins_presets).register() From d12fe99d1827591747b5f58a279b073e74fe82b3 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 18:43:41 +0100 Subject: [PATCH 390/393] default version number is 2 --- pype/ftrack/actions/action_delete_old_versions.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/actions/action_delete_old_versions.py b/pype/ftrack/actions/action_delete_old_versions.py index e418a21e53..a546f380a4 100644 --- a/pype/ftrack/actions/action_delete_old_versions.py +++ b/pype/ftrack/actions/action_delete_old_versions.py @@ -102,15 +102,14 @@ class DeleteOldVersions(BaseAction): items.append({ "type": "label", "value": ( - "NOTE: We do recommend to keep 2" - " versions (even if default is 1)." + "NOTE: We do recommend to keep 2 versions." ) }) items.append({ "type": "number", "name": "last_versions_count", "label": "Versions", - "value": 1 + "value": 2 }) items.append(self.splitter_item) From 7692de229b5d0b337eeb93ac269531785f0ad4dd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 18:44:06 +0100 Subject: [PATCH 391/393] remove publish folder is set to False by default and changed `you` to `You` --- pype/ftrack/actions/action_delete_old_versions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/actions/action_delete_old_versions.py b/pype/ftrack/actions/action_delete_old_versions.py index a546f380a4..bbc5dc4b73 100644 --- a/pype/ftrack/actions/action_delete_old_versions.py +++ b/pype/ftrack/actions/action_delete_old_versions.py @@ -130,8 +130,8 @@ class DeleteOldVersions(BaseAction): items.append({ "type": "boolean", "name": "force_delete_publish_folder", - "label": "Are you sure?", - "value": True + "label": "Are You sure?", + "value": False }) return { From dd83b585335557f854fea94a0b7a4eea451c7702 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 18:52:37 +0100 Subject: [PATCH 392/393] changed filter variable to update_query --- pype/ftrack/actions/action_delete_old_versions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/actions/action_delete_old_versions.py b/pype/ftrack/actions/action_delete_old_versions.py index bbc5dc4b73..c566198522 100644 --- a/pype/ftrack/actions/action_delete_old_versions.py +++ b/pype/ftrack/actions/action_delete_old_versions.py @@ -352,9 +352,9 @@ class DeleteOldVersions(BaseAction): if version_tags == orig_version_tags: continue - filter = {"_id": version["_id"]} + update_query = {"_id": version["_id"]} update_data = {"$set": {"data.tags": version_tags}} - mongo_changes_bulk.append(UpdateOne(filter, update_data)) + mongo_changes_bulk.append(UpdateOne(update_query, update_data)) if mongo_changes_bulk: self.dbcon.bulk_write(mongo_changes_bulk) From da4c9b5a4f7338cc2bc5a322b73377cc44a21a28 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 18 Feb 2020 18:55:43 +0100 Subject: [PATCH 393/393] changed elif to if condition --- pype/ftrack/actions/action_delete_old_versions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/actions/action_delete_old_versions.py b/pype/ftrack/actions/action_delete_old_versions.py index c566198522..bec21dae96 100644 --- a/pype/ftrack/actions/action_delete_old_versions.py +++ b/pype/ftrack/actions/action_delete_old_versions.py @@ -57,7 +57,7 @@ class DeleteOldVersions(BaseAction): "message": msg } - elif not os.path.exists(root): + if not os.path.exists(root): msg = "Root path does not exists \"{}\".".format(str(root)) items.append({ "type": "label",