From ccbef046058b20ada3a609c794705e3cce7da0b5 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 19 Nov 2019 22:41:27 +0100 Subject: [PATCH 001/195] preliminary copy of blender integration from sonar --- pype/blender/__init__.py | 34 +++ pype/blender/action.py | 42 +++ pype/blender/plugin.py | 135 +++++++++ .../plugins/blender/create/submarine_model.py | 35 +++ pype/plugins/blender/load/submarine_model.py | 264 ++++++++++++++++++ .../blender/publish/collect_current_file.py | 16 ++ pype/plugins/blender/publish/collect_model.py | 52 ++++ pype/plugins/blender/publish/extract_model.py | 34 +++ .../blender/publish/validate_mesh_has_uv.py | 47 ++++ .../validate_mesh_no_negative_scale.py | 31 ++ res/app_icons/blender.png | Bin 0 -> 51122 bytes 11 files changed, 690 insertions(+) create mode 100644 pype/blender/__init__.py create mode 100644 pype/blender/action.py create mode 100644 pype/blender/plugin.py create mode 100644 pype/plugins/blender/create/submarine_model.py create mode 100644 pype/plugins/blender/load/submarine_model.py create mode 100644 pype/plugins/blender/publish/collect_current_file.py create mode 100644 pype/plugins/blender/publish/collect_model.py create mode 100644 pype/plugins/blender/publish/extract_model.py create mode 100644 pype/plugins/blender/publish/validate_mesh_has_uv.py create mode 100644 pype/plugins/blender/publish/validate_mesh_no_negative_scale.py create mode 100644 res/app_icons/blender.png diff --git a/pype/blender/__init__.py b/pype/blender/__init__.py new file mode 100644 index 0000000000..8a29917e40 --- /dev/null +++ b/pype/blender/__init__.py @@ -0,0 +1,34 @@ +import logging +from pathlib import Path +import os + +import bpy + +from avalon import api as avalon +from pyblish import api as pyblish + +from .plugin import AssetLoader + +logger = logging.getLogger("pype.blender") + +PARENT_DIR = os.path.dirname(__file__) +PACKAGE_DIR = os.path.dirname(PARENT_DIR) +PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins") + +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "blender", "publish") +LOAD_PATH = os.path.join(PLUGINS_DIR, "blender", "load") +CREATE_PATH = os.path.join(PLUGINS_DIR, "blender", "create") + + +def install(): + """Install Blender configuration for Avalon.""" + pyblish.register_plugin_path(str(PUBLISH_PATH)) + avalon.register_plugin_path(avalon.Loader, str(LOAD_PATH)) + avalon.register_plugin_path(avalon.Creator, str(CREATE_PATH)) + + +def uninstall(): + """Uninstall Blender configuration for Avalon.""" + pyblish.deregister_plugin_path(str(PUBLISH_PATH)) + avalon.deregister_plugin_path(avalon.Loader, str(LOAD_PATH)) + avalon.deregister_plugin_path(avalon.Creator, str(CREATE_PATH)) diff --git a/pype/blender/action.py b/pype/blender/action.py new file mode 100644 index 0000000000..948123c3c5 --- /dev/null +++ b/pype/blender/action.py @@ -0,0 +1,42 @@ +import bpy + +import pyblish.api + +from ..action import get_errored_instances_from_context + + +class SelectInvalidAction(pyblish.api.Action): + """Select invalid objects in Blender when a publish plug-in failed.""" + label = "Select Invalid" + on = "failed" + icon = "search" + + def process(self, context, plugin): + errored_instances = get_errored_instances_from_context(context) + instances = pyblish.api.instances_by_plugin(errored_instances, plugin) + + # Get the invalid nodes for the plug-ins + self.log.info("Finding invalid nodes...") + invalid = list() + for instance in instances: + invalid_nodes = plugin.get_invalid(instance) + if invalid_nodes: + if isinstance(invalid_nodes, (list, tuple)): + invalid.extend(invalid_nodes) + else: + self.log.warning("Failed plug-in doens't have any selectable objects.") + + # Make sure every node is only processed once + invalid = list(set(invalid)) + + bpy.ops.object.select_all(action='DESELECT') + if invalid: + invalid_names = [obj.name for obj in invalid] + self.log.info("Selecting invalid objects: %s", ", ".join(invalid_names)) + # Select the objects and also make the last one the active object. + for obj in invalid: + obj.select_set(True) + bpy.context.view_layer.objects.active = invalid[-1] + + else: + self.log.info("No invalid nodes found.") diff --git a/pype/blender/plugin.py b/pype/blender/plugin.py new file mode 100644 index 0000000000..ad5a259785 --- /dev/null +++ b/pype/blender/plugin.py @@ -0,0 +1,135 @@ +"""Shared functionality for pipeline plugins for Blender.""" + +from pathlib import Path +from typing import Dict, List, Optional + +import bpy + +from avalon import api + +VALID_EXTENSIONS = [".blend"] + + +def model_name(asset: str, subset: str, namespace: Optional[str] = None) -> str: + """Return a consistent name for a model asset.""" + name = f"{asset}_{subset}" + if namespace: + name = f"{namespace}:{name}" + return name + + +class AssetLoader(api.Loader): + """A basic AssetLoader for Blender + + This will implement the basic logic for linking/appending assets + into another Blender scene. + + The `update` method should be implemented by a sub-class, because + it's different for different types (e.g. model, rig, animation, + etc.). + """ + + @staticmethod + def _get_instance_empty(instance_name: str, nodes: List) -> Optional[bpy.types.Object]: + """Get the 'instance empty' that holds the collection instance.""" + for node in nodes: + if not isinstance(node, bpy.types.Object): + continue + if (node.type == 'EMPTY' and node.instance_type == 'COLLECTION' + and node.instance_collection and node.name == instance_name): + return node + return None + + @staticmethod + def _get_instance_collection(instance_name: str, nodes: List) -> Optional[bpy.types.Collection]: + """Get the 'instance collection' (container) for this asset.""" + for node in nodes: + if not isinstance(node, bpy.types.Collection): + continue + if node.name == instance_name: + return node + return None + + @staticmethod + def _get_library_from_container(container: bpy.types.Collection) -> bpy.types.Library: + """Find the library file from the container. + + It traverses the objects from this collection, checks if there is only + 1 library from which the objects come from and returns the library. + + Warning: + No nested collections are supported at the moment! + """ + assert not container.children, "Nested collections are not supported." + assert container.objects, "The collection doesn't contain any objects." + libraries = set() + for obj in container.objects: + assert obj.library, f"'{obj.name}' is not linked." + libraries.add(obj.library) + + assert len(libraries) == 1, "'{container.name}' contains objects from more then 1 library." + + return list(libraries)[0] + + def process_asset(self, + context: dict, + name: str, + namespace: Optional[str] = None, + options: Optional[Dict] = None): + """Must be implemented by a sub-class""" + raise NotImplementedError("Must be implemented by a sub-class") + + def load(self, + context: dict, + name: Optional[str] = None, + namespace: Optional[str] = None, + options: Optional[Dict] = None) -> Optional[bpy.types.Collection]: + """Load asset via database + + Arguments: + context: Full parenthood of representation to load + name: Use pre-defined name + namespace: Use pre-defined namespace + options: Additional settings dictionary + """ + # TODO (jasper): make it possible to add the asset several times by + # just re-using the collection + assert Path(self.fname).exists(), f"{self.fname} doesn't exist." + + self.process_asset( + context=context, + name=name, + namespace=namespace, + options=options, + ) + + # Only containerise if anything was loaded by the Loader. + nodes = self[:] + if not nodes: + return None + + # Only containerise if it's not already a collection from a .blend file. + representation = context["representation"]["name"] + if representation != "blend": + from avalon.blender.pipeline import containerise + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__, + ) + + asset = context["asset"]["name"] + subset = context["subset"]["name"] + instance_name = model_name(asset, subset, namespace) + + return self._get_instance_collection(instance_name, nodes) + + def update(self, container: Dict, representation: Dict): + """Must be implemented by a sub-class""" + raise NotImplementedError("Must be implemented by a sub-class") + + def remove(self, container: Dict) -> bool: + """Must be implemented by a sub-class""" + raise NotImplementedError("Must be implemented by a sub-class") diff --git a/pype/plugins/blender/create/submarine_model.py b/pype/plugins/blender/create/submarine_model.py new file mode 100644 index 0000000000..29fcae8fbf --- /dev/null +++ b/pype/plugins/blender/create/submarine_model.py @@ -0,0 +1,35 @@ +"""Create a model asset.""" + +import bpy + +import sonar.blender +from avalon import api +from avalon.blender import Creator, lib + + +class CreateModel(Creator): + """Polygonal static geometry""" + + name = "model_default" + label = "Model" + family = "model" + icon = "cube" + + def process(self): + + asset = self.data["asset"] + subset = self.data["subset"] + name = sonar.blender.plugin.model_name(asset, subset) + collection = bpy.data.collections.new(name=name) + bpy.context.scene.collection.children.link(collection) + self.data['task'] = api.Session.get('AVALON_TASK') + lib.imprint(collection, self.data) + + if (self.options or {}).get("useSelection"): + for obj in bpy.context.selected_objects: + collection.objects.link(obj) + + if bpy.data.workspaces.get('Modeling'): + bpy.context.window.workspace = bpy.data.workspaces['Modeling'] + + return collection diff --git a/pype/plugins/blender/load/submarine_model.py b/pype/plugins/blender/load/submarine_model.py new file mode 100644 index 0000000000..4535b29065 --- /dev/null +++ b/pype/plugins/blender/load/submarine_model.py @@ -0,0 +1,264 @@ +"""Load a model asset in Blender.""" + +import logging +from pathlib import Path +from pprint import pformat +from typing import Dict, List, Optional + +import avalon.blender.pipeline +import bpy +import pype.blender +from avalon import api + +logger = logging.getLogger("pype").getChild("blender").getChild("load_model") + + +class BlendModelLoader(pype.blender.AssetLoader): + """Load models from a .blend file. + + Because they come from a .blend file we can simply link the collection that + contains the model. There is no further need to 'containerise' it. + + Warning: + Loading the same asset more then once is not properly supported at the + moment. + """ + + families = ["model"] + representations = ["blend"] + + label = "Link Model" + icon = "code-fork" + color = "orange" + + @staticmethod + def _get_lib_collection(name: str, libpath: Path) -> Optional[bpy.types.Collection]: + """Find the collection(s) with name, loaded from libpath. + + Note: + It is assumed that only 1 matching collection is found. + """ + for collection in bpy.data.collections: + if collection.name != name: + continue + if collection.library is None: + continue + if not collection.library.filepath: + continue + collection_lib_path = str(Path(bpy.path.abspath(collection.library.filepath)).resolve()) + normalized_libpath = str(Path(bpy.path.abspath(str(libpath))).resolve()) + if collection_lib_path == normalized_libpath: + return collection + return None + + @staticmethod + def _collection_contains_object(collection: bpy.types.Collection, object: bpy.types.Object) -> bool: + """Check if the collection contains the object.""" + for obj in collection.objects: + if obj == object: + return True + return False + + def process_asset(self, + context: dict, + name: str, + namespace: Optional[str] = None, + options: Optional[Dict] = None) -> Optional[List]: + """ + Arguments: + name: Use pre-defined name + namespace: Use pre-defined namespace + context: Full parenthood of representation to load + options: Additional settings dictionary + """ + + libpath = self.fname + asset = context["asset"]["name"] + subset = context["subset"]["name"] + lib_container = pype.blender.plugin.model_name(asset, subset) + container_name = pype.blender.plugin.model_name(asset, subset, namespace) + relative = bpy.context.preferences.filepaths.use_relative_paths + + with bpy.data.libraries.load(libpath, link=True, relative=relative) as (_, data_to): + data_to.collections = [lib_container] + + scene = bpy.context.scene + instance_empty = bpy.data.objects.new(container_name, None) + if not instance_empty.get("avalon"): + instance_empty["avalon"] = dict() + avalon_info = instance_empty["avalon"] + avalon_info.update({"container_name": container_name}) + scene.collection.objects.link(instance_empty) + instance_empty.instance_type = 'COLLECTION' + container = bpy.data.collections[lib_container] + container.name = container_name + instance_empty.instance_collection = container + container.make_local() + avalon.blender.pipeline.containerise_existing( + container, + name, + namespace, + context, + self.__class__.__name__, + ) + + nodes = list(container.objects) + nodes.append(container) + nodes.append(instance_empty) + self[:] = nodes + return nodes + + def update(self, container: Dict, representation: Dict): + """Update the loaded asset. + + This will remove all objects of the current collection, load the new + ones and add them to the collection. + If the objects of the collection are used in another collection they + will not be removed, only unlinked. Normally this should not be the + case though. + + Warning: + No nested collections are supported at the moment! + """ + collection = bpy.data.collections.get(container["objectName"]) + libpath = Path(api.get_representation_path(representation)) + extension = libpath.suffix.lower() + + logger.debug( + "Container: %s\nRepresentation: %s", + pformat(container, indent=2), + pformat(representation, indent=2), + ) + + assert collection, f"The asset is not loaded: {container['objectName']}" + assert not (collection.children), "Nested collections are not supported." + assert libpath, ("No existing library file found for {container['objectName']}") + assert libpath.is_file(), f"The file doesn't exist: {libpath}" + assert extension in pype.blender.plugin.VALID_EXTENSIONS, f"Unsupported file: {libpath}" + collection_libpath = self._get_library_from_container(collection).filepath + normalized_collection_libpath = str(Path(bpy.path.abspath(collection_libpath)).resolve()) + normalized_libpath = str(Path(bpy.path.abspath(str(libpath))).resolve()) + logger.debug( + "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_collection_libpath, + normalized_libpath, + ) + if normalized_collection_libpath == normalized_libpath: + logger.info("Library already loaded, not updating...") + return + # Let Blender's garbage collection take care of removing the library + # itself after removing the objects. + objects_to_remove = set() + collection_objects = list() + collection_objects[:] = collection.objects + for obj in collection_objects: + # Unlink every object + collection.objects.unlink(obj) + remove_obj = True + for coll in [coll for coll in bpy.data.collections if coll != collection]: + if coll.objects and self._collection_contains_object(coll, obj): + remove_obj = False + if remove_obj: + objects_to_remove.add(obj) + for obj in objects_to_remove: + # Only delete objects that are not used elsewhere + bpy.data.objects.remove(obj) + + instance_empties = [obj for obj in collection.users_dupli_group if obj.name in collection.name] + if instance_empties: + instance_empty = instance_empties[0] + container_name = instance_empty["avalon"]["container_name"] + relative = bpy.context.preferences.filepaths.use_relative_paths + with bpy.data.libraries.load(str(libpath), link=True, relative=relative) as (_, data_to): + data_to.collections = [container_name] + new_collection = self._get_lib_collection(container_name, libpath) + if new_collection is None: + raise ValueError("A matching collection '{container_name}' " + "should have been found in: {libpath}") + for obj in new_collection.objects: + collection.objects.link(obj) + bpy.data.collections.remove(new_collection) + # Update the representation on the collection + avalon_prop = collection[avalon.blender.pipeline.AVALON_PROPERTY] + avalon_prop["representation"] = str(representation["_id"]) + + def remove(self, container: Dict) -> bool: + """Remove an existing container from a Blender scene. + + Arguments: + container (avalon-core:container-1.0): Container to remove, + from `host.ls()`. + + Returns: + bool: Whether the container was deleted. + + Warning: + No nested collections are supported at the moment! + """ + collection = bpy.data.collections.get(container["objectName"]) + if not collection: + return False + assert not (collection.children), "Nested collections are not supported." + instance_parents = list(collection.users_dupli_group) + instance_objects = list(collection.objects) + for obj in instance_objects + instance_parents: + bpy.data.objects.remove(obj) + bpy.data.collections.remove(collection) + + return True + + +class CacheModelLoader(pype.blender.AssetLoader): + """Load cache models. + + Stores the imported asset in a collection named after the asset. + + Note: + At least for now it only supports Alembic files. + """ + + families = ["model"] + representations = ["abc"] + + label = "Link Model" + icon = "code-fork" + color = "orange" + + def process_asset(self, + context: dict, + name: str, + namespace: Optional[str] = None, + options: Optional[Dict] = None) -> Optional[List]: + """ + Arguments: + name: Use pre-defined name + namespace: Use pre-defined namespace + context: Full parenthood of representation to load + options: Additional settings dictionary + """ + raise NotImplementedError("Loading of Alembic files is not yet implemented.") + # TODO (jasper): implement Alembic import. + + libpath = self.fname + asset = context["asset"]["name"] + subset = context["subset"]["name"] + # TODO (jasper): evaluate use of namespace which is 'alien' to Blender. + lib_container = container_name = pype.blender.plugin.model_name(asset, subset, namespace) + relative = bpy.context.preferences.filepaths.use_relative_paths + + with bpy.data.libraries.load(libpath, link=True, relative=relative) as (data_from, data_to): + data_to.collections = [lib_container] + + scene = bpy.context.scene + instance_empty = bpy.data.objects.new(container_name, None) + scene.collection.objects.link(instance_empty) + instance_empty.instance_type = 'COLLECTION' + collection = bpy.data.collections[lib_container] + collection.name = container_name + instance_empty.instance_collection = collection + + nodes = list(collection.objects) + nodes.append(collection) + nodes.append(instance_empty) + self[:] = nodes + return nodes diff --git a/pype/plugins/blender/publish/collect_current_file.py b/pype/plugins/blender/publish/collect_current_file.py new file mode 100644 index 0000000000..a097c72047 --- /dev/null +++ b/pype/plugins/blender/publish/collect_current_file.py @@ -0,0 +1,16 @@ +import bpy + +import pyblish.api + + +class CollectBlenderCurrentFile(pyblish.api.ContextPlugin): + """Inject the current working file into context""" + + order = pyblish.api.CollectorOrder - 0.5 + label = "Blender Current File" + hosts = ['blender'] + + def process(self, context): + """Inject the current working file""" + current_file = bpy.data.filepath + context.data['currentFile'] = current_file diff --git a/pype/plugins/blender/publish/collect_model.py b/pype/plugins/blender/publish/collect_model.py new file mode 100644 index 0000000000..c60402f9ca --- /dev/null +++ b/pype/plugins/blender/publish/collect_model.py @@ -0,0 +1,52 @@ +import typing +from typing import Generator + +import bpy + +import avalon.api +import pyblish.api +from avalon.blender.pipeline import AVALON_PROPERTY + + +class CollectModel(pyblish.api.ContextPlugin): + """Collect the data of a model.""" + + hosts = ["blender"] + label = "Collect Model" + order = pyblish.api.CollectorOrder + + @staticmethod + def get_model_collections() -> Generator: + """Return all 'model' collections. + + Check if the family is 'model' and if it doesn't have the + representation set. If the representation is set, it is a loaded model + and we don't want to publish it. + """ + for collection in bpy.data.collections: + avalon_prop = collection.get(AVALON_PROPERTY) or dict() + if (avalon_prop.get('family') == 'model' + and not avalon_prop.get('representation')): + yield collection + + def process(self, context): + """Collect the models from the current Blender scene.""" + collections = self.get_model_collections() + for collection in collections: + avalon_prop = collection[AVALON_PROPERTY] + asset = avalon_prop['asset'] + family = avalon_prop['family'] + subset = avalon_prop['subset'] + task = avalon_prop['task'] + name = f"{asset}_{subset}" + instance = context.create_instance( + name=name, + family=family, + subset=subset, + asset=asset, + task=task, + ) + members = list(collection.objects) + members.append(collection) + instance[:] = members + self.log.debug(instance.data) diff --git a/pype/plugins/blender/publish/extract_model.py b/pype/plugins/blender/publish/extract_model.py new file mode 100644 index 0000000000..75ec33fb27 --- /dev/null +++ b/pype/plugins/blender/publish/extract_model.py @@ -0,0 +1,34 @@ +from pathlib import Path +import avalon.blender.workio + +import sonar.api + + +class ExtractModel(sonar.api.Extractor): + """Extract as model.""" + + label = "Model" + hosts = ["blender"] + families = ["model"] + optional = True + + def process(self, instance): + # Define extract output file path + stagingdir = Path(self.staging_dir(instance)) + filename = f"{instance.name}.blend" + filepath = str(stagingdir / filename) + + # Perform extraction + self.log.info("Performing extraction..") + + # Just save the file to a temporary location. At least for now it's no + # problem to have (possibly) extra stuff in the file. + avalon.blender.workio.save_file(filepath, copy=True) + + # Store reference for integration + if "files" not in instance.data: + instance.data["files"] = list() + + instance.data["files"].append(filename) + + self.log.info("Extracted instance '%s' to: %s", instance.name, filepath) diff --git a/pype/plugins/blender/publish/validate_mesh_has_uv.py b/pype/plugins/blender/publish/validate_mesh_has_uv.py new file mode 100644 index 0000000000..79a42a11d5 --- /dev/null +++ b/pype/plugins/blender/publish/validate_mesh_has_uv.py @@ -0,0 +1,47 @@ +from typing import List + +import bpy + +import pyblish.api +import sonar.blender.action + + +class ValidateMeshHasUvs(pyblish.api.InstancePlugin): + """Validate that the current mesh has UV's.""" + + order = pyblish.api.ValidatorOrder + hosts = ["blender"] + families = ["model"] + category = "geometry" + label = "Mesh Has UV's" + actions = [sonar.blender.action.SelectInvalidAction] + optional = True + + @staticmethod + def has_uvs(obj: bpy.types.Object) -> bool: + """Check if an object has uv's.""" + if not obj.data.uv_layers: + return False + for uv_layer in obj.data.uv_layers: + for polygon in obj.data.polygons: + for loop_index in polygon.loop_indices: + if not uv_layer.data[loop_index].uv: + return False + + return True + + @classmethod + def get_invalid(cls, instance) -> List: + invalid = [] + # TODO (jasper): only check objects in the collection that will be published? + for obj in [obj for obj in bpy.data.objects if obj.type == 'MESH']: + # Make sure we are in object mode. + bpy.ops.object.mode_set(mode='OBJECT') + if not cls.has_uvs(obj): + invalid.append(obj) + return invalid + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError(f"Meshes found in instance without valid UV's: {invalid}") diff --git a/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py new file mode 100644 index 0000000000..b2a927a2ed --- /dev/null +++ b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py @@ -0,0 +1,31 @@ +from typing import List + +import bpy + +import pyblish.api +import sonar.blender.action + + +class ValidateMeshNoNegativeScale(pyblish.api.Validator): + """Ensure that meshes don't have a negative scale.""" + + order = pyblish.api.ValidatorOrder + hosts = ["blender"] + families = ["model"] + label = "Mesh No Negative Scale" + actions = [sonar.blender.action.SelectInvalidAction] + + @staticmethod + def get_invalid(instance) -> List: + invalid = [] + # TODO (jasper): only check objects in the collection that will be published? + for obj in [obj for obj in bpy.data.objects if obj.type == 'MESH']: + if any(v < 0 for v in obj.scale): + invalid.append(obj) + + return invalid + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError(f"Meshes found in instance with negative scale: {invalid}") diff --git a/res/app_icons/blender.png b/res/app_icons/blender.png new file mode 100644 index 0000000000000000000000000000000000000000..6070a51fae3da0655d1bc14156e8359428d4f6f9 GIT binary patch literal 51122 zcmd?R2U}EG(>A)g$sphmBnKr)kc^UqHj$MqQ9uC^0m+Dx8WoY8gGf+8P$VN!ghq*i zARswPl$>+uvo_BAJ@0eA^AFBkbIlCWd#_p*?y9=0dV;iWC{vO%lLG)KRaLI)06+-; zlMs*+!+-g)2|fjY6R2Ljr01EuFzn|vwCS_F-)rM%Yf#e@T`*~Wxj|g<4oi!4VngPo zqH|2{t*k7}Pc(UdDb#=Il9YLh>JnXi_kOWrrZQ}@GRt8sb^3hflIoQp z6Q4G7pJ05V8Z)NX(9jcM80}!xsWxypqSzze@TbgUyLj|afkWaY*+BldflU%_yJWe2 zp9=*g`~S~hiTsvw@V1ea+ZTS_*wm1JBh?_i&g<0>HQn5i^o!>eLAs^V+~+-I`UtjN zT@z;jR%8)lPQNdEq}%k}hL6LVNN4L~^)l%Dfr*ttGe znxJ>yuOf2B{v?KW-OW%4hk^|~LqkPMA)1s8s_T3vW;b~+Ec8Qh3R$ARAT2?I^8j0n z^v%`27RNy(ID5~z7xy610_D$WYSpVC3+8@a=^NddM@8vtyeTd93c)WKj(@2XfQwP(9> z0LJ3iry{+Rpb8P(#n7gufw__F7f-_0>R0M9=J0T)iBEf>wi5%FK$8#Da-^l~R>~AOtGsp!iM|Bh%OQxy0SWSG!j`sX47#*a%MXfVc(NKubS0d=GLnQ@UMGVUw^z)8V-bGCSIbUJSJwV{E;xU@sG z?Yk2IyKoi0$mliVigbMwD(=zo#^D6ORhbF?X&@9uXx3pwk6xns;+-lEOx{Ey09His z4o$C-JA7*>Yl-UTsh%YnyB6Lx9pfr0df!+*P1izW7q*C3 zsH|-7Z#(4&R6csh$l}hSf#?OOA<-*;T`)_v;a5LYN9u%hu7XVNl$ZJA9uGR;!~!Kx zByQUx!1UNAq!YZ+p5g2zL*q7}zvH<3CaHn&m~?JK-75ihuU&^hZNT{o5k&e}?Uqz8 z9QRSUi_oa;w#F-eq8RLzQCNFK%?X?59kZFwSKkV66l zOFkbWmwxNcmdf3TMInIIQ#0c^8^%872>?8LrQjF?CKPe!{lE%nv|ZoV=sHF(D02by zQ0$D`LjGvxGu!A6=0PI>n5GdcPi+BU29L+lB2lAE(6;q#RVrxx5F(I|k}NJ2{uYKT zXV$SA2}>HI;OVK*P}l{_jHjphM)Zod1Nj_mP*}IO(6?fuSq=}chbJ#q?GS*>Zz?24 zjHH|i@cK>s9A9?q5}?>1;m&PNEbBG+Qa-n3$M<1;$-L+H2!IYmnaM7Hq6qd!wNPRe z#Eg^M}JY2 zx0NFUj))4KQ_fM2PQv$&UB8hQMMbFW_Ie|TYsMi9HFgdrUZ3r5bK|lnsv2*txcp55s)ycqT?>5iISSFoX z@LA1w;6p{4z(~rt`WO0GCzBNc;2-~!=zXK>iE>1hg&a9{2qq|t8;@UI(5GbaqwU>8 zfk@u)uK27v;cz05D44bg)6d8Bir>EzSJ@_}H~>~sJX84Y!D7m}O-*PG+%Nvbbn?;h~2A(ufb#h-pW0zJe^PUX0Z- zpn_@KvBE!iE=-jsZhofqRW|yz?k!~IZ$k|VuLWUpjm69 zF(8^7QPsjMG=-=1dk?cC6lk(lp9mk2HJM8*Vk5-f(g1+HuIvZ^&r@A6T|uv9kCC6x ze;WPL26=x049BNjZR{QdUuFIOeP9zs)PcQiN~1! zI*Gfu(c=^VV`>@%K<2xR9KimXweL5SU=?+!Iv@a88N<1=PC3Qzj`*Q9GfvS0O!%K; zKO?c9K?+D_t$6?Xw){0EYtTk=00xR_o$>hg%O)$vp(&IdHnF(s5h`D zL5aEwp$JXy+IMh>;GsV}dFQO`AVf+y6hsDa*P=s?l6jS&#%T^*eQt6G6iatKmvtl(Kym z5S4+d3vvI8PESi+;cqpXk8fNb1vtH+!i`>>LnKK_R!Eyz;2+)ZEG~@!J21>STmHuK zs6e_@X)%1T^?6j6-fCZzVo3@Ct`T{ZyJE`xuNo62!U>OabrU)i(fZKj22=g~-_)cE zR1^kLAYa{V)n>qR7y5ZcW2-BjbPwYBP^B>Pa@5RLXB=bxMiS_3K6athe?w_SfENID z*(4Tr4?5|F7U%^lXypMZXyBA{EOz!bsH$Kx!1+yj89GzdkJ*@xyuav9gR_IL7Jm=R z3d)&7sMWHUk6bhay-?UzOEhST3_si)oczPd1+NcO7l?p(2>erQ-t-SS+J=WeJ*c4V zawh&Hilu)p7dWY+2sILRe1<67jWq45a2lW@;sK+kbFk_|@H%u_&~1Voy_ zAhG@V-&Pr_Qy~##_gYo#Z$nLXIP=V37>u* z7}IietgS?;bg6Hst{&ud9(0@uq9~A|Vx#`;f(&xwNVOYj?ao`V$9ScRHHA3Xs@-0_ z_s0qMsrg>mvCGIbccc<|nIG5Z83o4afmTPFVGOK`qQgr>1GSf5UT z-uIxE24g{Px(+=haxs~Jf?aOy#UdT16iUupi`XA4=X9M@R0=+hKK|60czAe#Qi^OI z8DRrFGxRAUC`foq2zcvgFk{RJ5Pd00ZYElD3g4YU;nw0yLz33K(V%I9Mo!OZQ3 z&HY7oS5PX30_{ka0i0#^Y$Zte*~*bvun+fna0jBL(L=fZRCL}rcONTW+K3BqubgB<7lVMpKXfTt>2p%UM zJ#VvjMASDv}>!t3QxKbo?hklNcRn zA`?OkhDrJqsFW-f3z~>nQP}y_HRvcx{O_CR z8FQJa63-8cVQ254fl&tFmU zZfo2^oZq1l@LoY-bLx%{9cqa3NO5=|eKy>fOC472f|Mk`zW@6h#>Inf{Fhb7Yh+lV ztMG}g{?3)_Ou9BmZ2&}BcHiJQstzX|eN~}O#19m?i`0SA#U z)I3lLd{TvT95G_^Ijp!Kt5A?aG@?X7^zpd%WgNXUC<83=yzA+MbB4~u7`z@km?KML zQ8)}ApOKT?7ztt0AOQtGoRin{kKXQ&kR0s~I*dyLp*H|41vT45v|Bd(@t|b;JI5{2 ziBY!4c&V`D$I4Ha+2UzG$3AxhSo?A5wp~TL!}@_N{5A<*SPd9L0}INUobQn)XGJ8u zD_HgRQouY%a(4*61fJ?vxX4t0gZ26)aez6`5+t{L)QjH?afk&4{%oMBe6ppc=DPT8 zs1l3Jwd@P;C#&)5QTh*C7=$&N{W3*N->;l=56-!#b*3kOC*TIZLUc6V9$-9H_ceA? zF2p&oGqWBZv2Y|W7pIVnd?f_pbJ+>_M=&SiGAra7UyhTq#TV0DIb`-;E+8K9 zHVdLivBj@LBBGar#pl5;$Cm?o)tNeOAP#|rsq4SuxAm&oCNcf!mkZXTJMDF2tKXRA zeGR#?Mw@riC}E0$hGSL+k^ex=IelCSCjRkxCc+whTF}q8pvpKZ&V-7jgNIl~~z82_*81E-N#g}$}@ z^R%GNk&WffufaLL!;@ZUfU9S1l=0j7($>Cs5SYeJ3lFD-ho3*b^q(F@odWNQMaHh_ z2w^R&sd&L&(jq=s5H~W?<_QQI4i3w0zNT1=qD@W@B3-->pNFRSWvl}Lz#1CW9 z-2!_H4+h2V)-wp}dG4lsuURyEiBValNMVJL!ho2$fqs3y(kt+&=3$$uTb(n=U?c)M zd9ZT0etCL7nrslZQ4;!(@x7)Jdy4` zFr+7oule%J_iFRD6fY`L9QK$W5NIqPeW)S64yzUHOY;1CT2|e|rIs0b(1yEG+fHGe zpj_qbn91f(8^I9#(o{@-5Dnm;EM@~0)(}P^u8-Y()N-LOJo5aT@66X&IC|Zw4=vYT zV9fj%RRC5sb~4a+S=sy+{*mf#^xM*P&I_U_Y^zw@}-n_^0?W2CM2mcUWO!j28H zu0y%U$);d~8oRGA|G3ZNSxyw=TrbX~-EQp1vWq+bMOXdoH!7z5HrBY**omb1)Icj* z9fVXD!*D_dn2Ya*z4HSu0X3g=V076xHmTLF-KWp_qWOmB<5Zn6|A^Oj4{orM0|D}R z`|wTYIhOnsUrn8>osry)E<)v`MBwf_q}x!2O)e{lB>z1}nQ9EAS&sgo0~|l-T3vfe zQ|MbS;-=;tuT6l-yyY%|%jXH;rW|M^+5Elc0($0TGo_AxD&U0!d};uUk)-< z>?K8Gp6~c%R&B)>m5_uV>_3e4&Ppf5ebNGdOSuUJzmK6tNQam~LxdY5Z24HBbP|XQ zjcw~IZ1jI0ukqTodKJCwj^cTU2DH$epDym1bhks>VDj#ejScNmsKqoA+>~s3Mj}x}T*)fs4*Ffd8dASzi)i+1e zF}ClNDm4pWaCch$izTCuJ|91tG^Yb+)%1AF(=cUPs3CQ~m3J;|XE2rA!fx44hfhl_ zCs}DpeHNH&A%bra0t~Zh37se}f6gQwX2?az>NGjf$cCMnTHnkHQ1CXr9yBW#Qr}faazr*oxr`4869psVJ zH9|X?B77mOIEe#v4Gq;38!jhs(H4(N*6fzF?=|079cR7-5}yWiR5c*Mm#~f6;UR*B zM9W#QLWU4RN>&5X+tj#_uPw`;*lLTR{04dd#AbAi=q``}ER%S{Gdo9q6R)wR8o!yf z2(()DcD+!;<@l9;D(nJnOpQJSFXZ5m6zS|bt(rx~)ES7z9StJ0m4Q?rc9y=Z(Hr!^ zCNM9WVZ(7}R`=mWkM4*y5j+KZBOy@8On-NjZ`JAnRfh}Sc_AsAQ^nE)3u&JN=33l= zwVLVN6e51+|BfmJ-(BU~Jo&_|C5#mD+F&dgY1p`3kxrt(He+Hm*5J#fqbE9p+D>u* z7nF3J{6U>({w3pe?q+V@60~E`JnAB>LZhQ+jwZxMXNDAk3YU#vLl`jnZmyNRvL+Yj zH~2-|KT_%QvHS5_gpwJ=(dR5Ku|591rh~~N*^nlOTnc-CQPuSZA=m+fqS`)pH|V!5 zs-jUaBaIY3wIf7X(Dkj9^kih_Nj&OO)p-dqK|MWryusj&r?zVW6!d1kS^r4}lXb%V&;@TYS$<73X1(RttpNBR zN#I^YUjy=3H0~R_t?8Bn(>fCp0fVsM>c-Wtlb_p*J*br*bQZl1z4_knEWLs)h20Rr z*cY3wFt2H{*l=LEr&O7vCk=xGPP_#x?~&%#x|JRA1*uBkJZfJTL;tn=*VGsWZaigz zq9Kpn8}b24&GRTD@;@j#!2Sz1ze)|Pliw9V;!Ctd##TE1t%Rc2m8i}DI+U=aN03whz?R|&eqD`WdfV22`PD7A+T?yd|K-u2d(Cfz}N_sk9ZiX!NW!FEn zFJU8d74yP$`w2}Whl3j^*ME} zLYWNAx$dD)!mRXHJ`SvQ?e4}35w+fi+FezdC|YhwG(iH~)YXAZbb9=y z`CO}Jbyi+l^%-+f&H50>qS%wgumZa-CI8 z|B`hq2O|`Mzk<$5rTGhJLJ6T-o!Nd+Iy%E9)T*r{1jQ%e*L^EHd-zS~lwDUc`D`@f zoWVk!cHTdPk+gTXR@3!04^~`z52R0jP>;4eO`{VEyV+x2DzyJLG>vDoUJNi``6K&5 zLUA`$;b3)}?gd>TypRT`+tmLc*E>=$W{j%IU!!;^q=3g)?bW_AIP6pT9wTYNe@f>C znp63hu%E=2Lhv8ECc%%&m|Vp!Jv8rbHmJ_6`y=XwHWko%{N;>b+1aAEmteMr6>+(a zHg^G1OCRUI{cMg@6nX~o3CFtt#dM;|T43_^-@0;?*ESW>JY)J2=ir;AoK7(@Gk4oB5q^5YRd-CC75HsR&q#&nvh0afO?iyyOYGGqU|Om5sNR}iEl6WEfFkbUH(6MkaG75UQu4Z? z4I@YvLcHIf;<$QRR-zZ`%s;#ZT8W4-;pOssqX>&{ap!{os=-u#T|Gr%uj=uL=15PS z)Dzf+oentVt43tE9P@ulvL*rKp6(YC*baLl+LNoN(<7tH&>7Q?_nSz`v8+E~pUAxZ z;cw>(V$|KqIC-`S-D>u;5l`e+C8F0~Aai;HYXHP`@*h7blIvfV3QsAEZ(x`7nSN_HPzIs=I_lD4%!jmFr^b&-V!m=lDIxvrTHT2zfp!3hGG+B1(wJReIhhCGhi)=DD)r1 zOl89SJHD_#8t(O`(^^vk#?3+vRDfX)84Xb-Sw_bXw4qyL^UE(EjR!Lwwl&Q@?2ILD z5`Ni7CYrkYaY;vik!p*ueCTh3pNRiK80o>~JTv@VQNGCfNyKNKu)E^anGtAQfUNNx zUQOH!f^jAd?5^JMNRh`H2#VGi*RIWdI*{SI#Wz{E$9GhD>C7<^YL}X1hw>J^3ERS)Y+Mtr{iX zU7sL+()>arw2BU=>HF*fw#w=)R5(ew;emG`@u-(JfunE21PXMHlQhaPdmMe!jEGq8 z;8A4|Pa)Awhljx$pv>x1g9p`^F|3GUYqk`w-Mk@5`P_|JVRI`ZO;d_UY{30;kZR`H z#3xPigFLvzYxAYqmCj{n-cdLx*9!4G-=mjvj)nO?!_=PJ_tRVuhwp+9C3##WMe%A>c725Uw!N%Gy&yB9XtnoaQoA8c}{KXTFwxWlQ^ylw7g5~kZi_=GP z&ivD=I;4%GGgF^1rj7^LWb?o1;LkkU$@5AL%%)#NO_hz*L#zu)Th_wQ2f~)yq74!r z_?@!R{V>1B6RnBf0r}q-9HBuH?v9aE}A{90L5;h0^vx)O`byhWg znT{$9?PbytprQ}s7JW$<~02Xe$%yU-J6zv`*^L`WZx`nh(tdu8q% zj0e{I{PUA9i~CgWVD`Knht}%&t4ylk94S~S5sw_?@kI=R#LpK7_I>wLYj&IG5e;qF zsji+-{wjSI6qz2+AbzsOeEQ{aiW7ptIVgLOD-o8q63f-jZ!f$crTcHzuIIt2s zg+I+gI4z>veE0*_&T#wz3>&1Lz&K?I;>dkG9+>uO%rYEp*g%MhKOSZ`98@o|uAGM) zY3Kf{DG{L(HX;y7gnrj6SmY7{naCz4w!61Ev6`eiK5-^?-$#A6{>;)&c(-Z;*tQjE zK^5K`%D>tC%)!RLEN*DqoLG0xx;RHAuOO1_)vf-UR?g%(;cB8^r< z)H2fHsWyL165xje!k0F%c1alh&@|^2TmCD(W62ygteli{MR(Oy`%HH$71Z)yPO|RB`Q?n}GvB5!1wE4U;0{^kKeO#y_xC!vtUb*YXcxhze`0 zywGgE2)@CSnc!P!G^MYXN&o}P1GqBd0DWD+YlSIp2rso8EQl!j%=U2gPJ5ORW10-z^Q+Z`HQdvk zEe=@ymyq|2WX-e7*1_KgtUWoR6j3~S`8mroGyuzg6%~0JY4^##_4~m=ncg~V?j5{M zJMb%Xa8ILUNY6~VI}GToodr6u=enZwWPt1CBS@jqGR?}~hE@WuG>{9(^ZFCH=J>f6 zB5l{?_RH_bKAGhyB*Y&ZBwrDZ<|IJcX*OF(ZsM$x4Qm!j;q>|?9HG1oGuN4ANQ=)e z{|d|QF0DtmYV|$??mQw5GIH#Ru|dD`T9pfsH@Mc&XgE_Y z#YuCSFn;XojMJ(?C+fdLU(fhw^yr&IBGlhxC9E;@x9YRv489<{Wxa{D@E_kIR^qOg zpcAx#!to-)F)tp)DOHwJLYJb1U4$-F_D#f}w1Mf}(4Yr3KC5ndL{{V$r_O@Tx+u61 z1X>$#uSQGQ;O0XpS-io=$)me}{dKJ9YZZ>byvnCTPh`iGLYjgVrScTndtjRwgk7&F zOZsrl9VSkI(}g84ywj9O-p`fUvUfW#7FDn_!#C-}j>!z@6=!WZX&<=|!3##hgdnOaSqiW?dgGItJGJd2&VWk5G@F^nIiWxUmxY^H~LGh7?=D{k?ivJf8s zl7&pMkcfv{2u)}GE`2y7SbEm^gEADPh>c`L$Wp^INf(?}eHxv7qR!Mpa*J+2e z7{n(#h!fvGj=X-8R#wOW3M`0q0YtNLV;p}W0cQZsTH!8-jP;){3`pq%vjZ@sg}n-s z0TH^#6uH(9<$jBcl<-O_+egohl^=hFHA^?7ZUkV}b3pVC5z8U~nKJ%xhifK6q7Ba! zADYyEaIEs!+o6jPClWn=R{pnSe;de9U&ar8e6wv{4AUO4`wN^%eul+{eAZg!lg7;5 zdy=`ATmUWLk385Kg%6=Ju6tiCOshHL=N#~4s=Krco_8HimSD=096BO~3oj>DnP`Xq zhg(dX)8^p=N)jhq;u3+1zvtKA3ySi;zfvc?gkL@IG=rU>Kt+uaOuo<>?S?nw@m^it zv9~~s1q3=|!LB|GNe@_!phGy+Z2r*%ABwAatrtaa0#Bno9)_V@%y7#eb3Rur{3-MG zp58dVB^fW6zSp<&3L>5OV&{C|ul1BKY9}@a;aUM!3g&mUCC8g^GzqX~TUgc+y zh)#spbp6{0zjA~GQ(t8H&kz1-@>#z-o}JORgNP1rvqF*jI+>((pk&G z`K;}QPWQ_UzAI8;9{BHYlW3-nw(UcG1jlZet%9#M&8v?h=^!!Scr7aNCbY1Q z%kSub;8@}0jhB!@<2$Unw;D#Sp(B1(E$c`Eu5yGy#%1#`pH~fVQVx9H+xDIoF~he> zvR_x3(^-?o&S-iarXrQe=WJlNQgWM_-}m9ixN;A8qu^&rNb z%M%Gvq2a-hZ=@z3B-M}5&bDiPy0XqyVLqFhh zh>G!M03Ut+NKUG{>2<1%ujygkX=&5xmbxHS2_;GZSGnQiWp4>r;@@7oFt;V*cdP8; zM#4}L{3gk3LZFj%CTZJC!m~=$;gacBM>(v?anbW~6j5dIJzuSF>132)-1f?sk|3kD z_v_HSJGk91j=>KkUWJ}Ccd!zBCuLcXW}KthzaMgaheHnZS=Flpv$H}38>+#Ak63Xr zFkbQqC-4{nQL@O-c{TGWpDoQ)8@L~-0f+gR`$9rN8rzK6$yH16E3E4$T$vMi;Ek*f zx)8;Hig&OB@9~+MH!d%|bYzhVQ~H>I2He4BSlJg~pd(I|J1BPTG9qS-l+2mr#?FYM zL@C84CIk|o`YMh!c=k5;I{H)CPPH~1Z6Y!$26`%Keh^Sk~oZFYs(05XAl@fpo z$VH`~e_z%g{bKU)uz6jerlbd!Dotdu05=qS>iJ!l6P%jOs-XRL+dcAIKVChQZ7BN& zoz}pa6?Y41f=g=rFugwazre0F(p8!Yw5GNv{^?0tdzg0d@m`&zU%g83I|<$MYgY4O zv}Acl1f5HJMU%quBmVy&Got9Z5@^j_7)21uvB{-#6q<9xj;a^=%yeuoHF>b&^t08@ zP^;|KN^-O({1S;+M87spCTziDTW`~F%7xvUf7tRR!l{h|h-V!17=OV}But*z5+{Qj zcA{e@3Qw)Emhw_WI4Z1a!2xxXkO$f-Vea!o*IW)@=(2B6omcI|BZtB=CH)qM;~bbv zASStdwkH8q&uMVgke}+bN%J=A4*Lwjq91qA+Rg*+G zKNUsA<53>kXsa8LFnU$C+;PiqOt}^`*43F+Y%=X@ z331ZT!yFS9Pk`t|LX`-p`MuT=zhBuR^38C!+{qu>pkz%^;e+nQE6~llR^oo>OT0vr z3doQ}uFJ3bO=`%5;t7nVp!M`_F5_NIaW63l8@-=v7`ERc0Sw08jW;Pble>dByst#- z8J{dlCT%hcX(9Je-)lSJ`wr8&D_P&n+)8~b9d4oIaycJzL|RKziC%yco(Fvga!4pk zInD~edJ+`tReI+_%6aY^t>T0<5GAW=j~%Vd7~9Wvba&TWH+T{)jChmF!7=oq zi=aOP*-3`$+ZL%FS#~<@*oaN2Mko3?8{P6@arq+(B-|)>_2VrlaKVfmkJO@ zE(C-YsHn<)P4sS z+3+)p3Ea?Md!1Nk4!(X-R*?E;K7Vsf8q;)t$n3syY}ZH%-&yQ}R(7r&Vcs;DOl;m!e$ervc(-oHA&2%zDz?>~HB>71$S00(4} z9uhdHr(YjZPpb}5fzS^^L6T~FwI_Uegl3;`^IZ&D_)Ut<_)oUN4S7yU2@)Q3}XHPi5+Ix%d77EqxX#QP^lh45ftSjFL2Rde^h_g3^k%H;!e zbNBVFi0Z+&gdkd&gCH^JwaZFV)u;y;M0V*^s`(%QN6K~_yne1s%gn6W7cOqo{cyQw zF2yhJk-7o=S_C8`;eX72g;XG>S5>t$pjJBotrQhGgYV#AKLvNq+&4}f9?IDWH{4yZ zQ|i=6ZGW7a2An2Gt!2yR920_0c+5}n~GpXcu%!7D;SLXs~p zVc=V19;Gt*-F#+dre#?({2Bv2{jEp5ii(One(V-q0n~nPIHd4xe7L#2wx3|rA=Re{P`-^Q9w-yYIL~ov;qo?Pm2|9anWkFG`);gGBG-H1G zF$PZ0h8lf+vu#wO;nFR?dB17*j;3SsF2qD@d!{jqq%+o`f%v@o9mO&I^J-Wm<(%7A z`1(ZA^E-o59yLPC=pg&vM&-@|3ipPgG642I}PVaVPw>DexJ)GEId3rKVS3d)2DOu z^Ia>VEp>Swo1bvaF?{so0eUxYW*!`P|9Eww-9^^Z2r}t<`k$}x`L2X%dL#a2)fF79 zl;)C=j~LTeza z4*GfM8&PW6y--Wr?_op3uSV3jJ#UUra@_3_q6xY;QuRncK%lb7>ZD2;Y)~E_+EIsl z(P7{wZ`JMiM$o3}vN(G3;suoG8MvTKfL)MCI7ZoFmu2?2QR7#0hFJPSLycRvZpl7e zO14Ybs%Rb?dreG2;=Z+D6ma=;TtdRfoSaY=;>ep(os)5&6df;$F3o75aTLDm4Mmhm z!{QinP^MCF$blogYmEMP)C+U)Y$%acUEe)?UxRO-n7n*WSC;1d#%!0t7IADsf-W)V zIRvZcv+@yP-x6Y$1S6iaNUfteg=W2($E}`ag@eD7Osc~A4()=-Qus_);h&n7KulAVaIJUftR3}Lp5Wnvjmn$6zC2DQ%>-NDbdOhS|>Gc`i zJ(5WNeIhXVG{4T7YihBHk(pfT@vMuyVGArKvwLKl*O-}9n1yoOI(8!Z`iRcw4xK9R zW4Z8^DA?|u$EgtkOx!gShVX8Rt7HWNRu`_HV=AQW*Rl@@KBs2CaaHGS9p8@^-vXK1 z4zFb1jOTG?Z%WBy}iHs(=qGvo7Wv3+Aw0wK_*8`@dTXj9}moN#BJPssdzK%W7o#D8QKa#QzC4zIaz-;3D7qb zLK#=@1RAg7I93`sdcM!B_nzOJs8wVv>d%3HD=8`c>gaegJ$*M5trfS4U#*!<_+0Hg zsCb18%c;h5zf5l@-Mz?(33G4SQy6~f^Hs(lKdBS~?8W;Aj=!>wR)|U-l$&=9Je!rq z^snt!wWTTMj~;E6ReJA=zuG6#dZYF8*RNP!US4ihcX#*f4)4aORg2= zt&krT^$*lZQaCz7D3x7QT>Ncm>3Me#kEC!yicR^wjOSF#y{&ak4tUlWK$=7zL6Fnk zO@qt!xF}EWELPO*MhK2hsY<@T-Qtn!xyHkObo(8A=kMRQA%w55aOebFz6Aa4@%HdT z_upTMx|}WEP(rQ5ZOpWVx8FN}i;gExaIldWlFch!y&{GZEu4SY+ot3eG0O@LJ}%`2 z#*Xa8-k;OSOb7H8PQRZ(I5nD}Ry7F0y3VxOy1D7WK)?Us!Fe{a_| z)Q^mKQ77q|JwJ8&fH~`aajX_>lS9hz-~mKOrE~jpuJ<2rFd?#P@TR9LN6ZaXxXK?) z&`wWJOG-<>A3d19@ZCy0a{@<`*1zRYuXd2q#0{Qs(gaB!=Hu5Y)YP!`)}V*9o)9$R zQRUj#EkEm+lnPcH-R{!H2Di4gA;FfElqAkZbgF5}g_)62%g*kjtgLLTis6M4a%vtE z*-u;k`F1Pa7kLgJK~V|qOrp254<-N)KR^Gx6Kn6YbTGD~2zqV%if(PD4L7j(8QOVz z8gg-!jb>1kzbq^iyME(Fc6$1i#l=M^X5(XaADwh%Y5@uFw;76>WMEM0LLEoc#uswm zU)g9}wsU}*(*XzeT2QK2$-*jL+5O8lKg|RM`9e&ym87UB&csjeouSa&+EhzYax&B@ zTNfyQ_k8OCWzCnGNyYYm{0oerUwJ@I&2faItu=l(T`Kx6_qpvSl2z>`^N(sBq6P^+qM32}|Tc=L~Z5*g)ndfE$F z!TI2xt4T+5Y53ev!y=A5+Nv_D*8;g@gnz1o>G6U4vIM}fJ8$HDJfDZ);kzvO4f&p= z)YKjl4sQf(y0lM;81MEAKvUA|ktNHETzoW?gKDBjl3X|HSaC?57o@auHr4)*;upyQAX4PY16=`2PlI_H|87I|~bJl%F9YF|28lQc|SY zh@OAic#wCW9AVzX zlWu0-@iO1HSa2ayG>e@ajF*h0MbxwK@UYM~XI)v#w9+kT^N92rbWDGL)U8T4=(mhN zJ3kuC(>h<3SQ?W{9)YPjHtqd;?UF6{net3c+X;R$%p*OTjy z4hL%vU%{-Vp^P$(YnhU~crmx3L8Zp1^TV-+H13snJ1{ivopxH=m6to@(#y=ugkatz=6`y;E(r8mYJ!~~ z7ivCbcJgCom7hwVbt1xKy!X}J67aK4I zruU{pZ42c`IKE#|wwvsl+VU3!!S;Z3$N24x**X05s9=e;oZaC+D#gXcdt*VngBhx+P2P`VG=_*OwJr z3Vlg)`umM2Pn@{-AY3Gmw5*~+2L7Ohe)gj{0aRVkvJ! z8ux6nrg#>u59m%A;;DC3hi_5a?p`q0`8M4MeaE!^o1&9zEcjhG?VtbI<(3*q^mu6F zTGEKxQ+rJgK9qO6p@Bifrro1EWl8Kl&)ttm<prFt3# zQb444IW?w$9INsSe(n7A{{?a7;emdyCEN+_ zv>?a!Cozv45fF1fUeceZ{R|EM#`R#wnf=IL9NLOa(Rax0M0v*fmp5l-hTw|8S(HUX zZ&^Glg00=2ZNl4rkc^7Hg6+RKt#)WPmn>Nt)pN?`K2 zmaYw}x|iYTK%_v(u_@*yjws)gv1zp7Ry{{qZE`0J+*i5!@g-R4Ht)pIfnfNR?DAX) zZohEv6)!J*4WM9VW0PVdAbkciPyK{*Nm&_8pn>kbM`FDcNI;ILHl%m65*em{Y#*-f zBa%9)4*nLQD#f#`7%c?WrbZ9E-qmc-QR7K-o10btVh5*ITZ&v3{IN@HtUy1_PeUxC zTBLv#n7z1q=K5#$Ws_U=JlDy-GUSS)@?eiBe{z$UM@nqdd>D<`( z1odN>74olwb{<|l=if&vdXfltnH&Be8>H{Sluxhu{}A=%fl$5Q|IduEE0R#wELjqf zt%=Yk`l3t=l(F*FU`+Efcb7@VP=!% zn)+50{R4+cioX7uK7XvW(2pUJX_6x=k!;=1@x>qxd099q{rYea)h~-3=@h%4mo1Lo z<=R%7frZ?*0fNZ_QYMd2$S3TBox5ctpA#!(@}ju-n-7orhumiLs-d3fWuS4=qL~|w z|K=gim`P1Yp?nX-l5T$g$ju!hJx}6a{eg#;wA^CnQ&v$qk7weNwXm=#1Goxrd?z6p zV;7@`N_GNz)SQ_*Adp)Us<4PDs6x`mo_^2Xhu*mY$ZM)ze(+#IL{*4!`XP8!zcpr# z{r-Oild(+4`@VDXF9V6WWN6~dNy~kVj&mOD2oLjUOH{Q%T*<7Fd!C9bTTC8LsMPCIt{(w?+28xHHZ}6ms3s*E#=j^Re~o zef)xg9sPp#{FvDPFz|t?igq8K6VESOru~lhy?DX#!^hD#QYfY4-kp=>oZ5LDQw$F4 zIHH`q$8Kg$E!)W0Ei6{#OCwZ=ss4I(^3C4H@6~mW>@2H ztizgui2DPvX3*?wC;h)2K22>-=qOme+-`>KlL1D3ypyDM0(p3xI~Tf+u<7ky7*R7U zX?s!I$J??BIvCTaLmIVx1Trs#PbZqTmUjZAcsa)$Pbwpjs=GN!*TDT|9hllHbdwuJ zQ|xIY-e+D=W)jc&VamTVlCZ-xQWWgLE1?OizAh*HW`|o43w5nC98^(LYw$5fl(Td+ zpuMo;_ft?C>FtlP1!d?CII)sl4=$R%kN@Ku;;7cJf;s>3oYFtlBVD>19W5LCDKYxG zY$AU1^aW{HBM7Q%0!BiR)qH0zJJGSQ`x2gk%nVq(bLE3a^a4ZUhcPV&&UkEhvqBT% zHN&QxZ%)BE%u5{1Wk*NF&KT(;!-zb}+xU=${VFu-n+VlBWg+H*mQwH06Vg0}4aoEc zUD zRwo{7{n_N-b8X?TY9G84RBsV?o?t zJVp+7r*y-c_-!W^Mxneth&Q4x4}{w8v__W#;9=`g&J*FbG)&NCS>pFKbw)V8bPc!D z0xxWtOec=tW3xVkc(N|EZ$}Cp%I|W=n+AuhwMaX`jZUs`T6;i%2Y=0WO2_X0TQ%qt zdIp~NTu_~rWQ}p~VXl3_;~OKEeHi@(0X$K?d^zi#yP6Oq3(qmybq94Y$bT#5jN27W zv+FHGUYzNAlEg|dzMLYIc3+}MnP2mT(7{oX9@J0c_B;(T)J6T~Ab;QBUCAk6>CV47 z1@EdZn%wMq<;qtdjuYw44d8xbk@X`6Da`TPpuok*G*l9?q|`aB)BT^*Itm+^yPB#Q z)aAp0Hsga9^KyOtLzz6ohR*f}CrsSTHTURcahq8kWN#4=60$sVCfQMOcXiio^Giy3 z=QWd66azl^#7UZA$tMruS&5K5t2>_A=fjkKx+que#rS=pyDJRB{M)yiJu%%Y#87-c zbxhgozP;3Kr*~;%-`Zwdd6YyRG2hutp z?x)2mTNzw`xhPaFtyku66szi+=`+=v?PkD27KyY#;YVtHc;Y_BpGT3>7%y>VW9+?r z58T-J%8}jdNDq+Rgx5E(gndqWzKiebt1ss z&+nv@lT)^nww2ms(4P+_*McXXz8aXfa=eUpSbBAPep+ZxVDLIyHa6J7-Q7?~NXQUl zoe9EpdvL%Lf9#Yrrr={&EP8q7iz+pNQ+3!7=EdMVL6&=%kC9yT z51=PSZ*d*n=N%l7ppwbt?Vxl_%|WBh5$hs;FmQo5>~_*B&6+socDEwb%>X)67! zG)c$o%;(Y9W!;32K6hW+{ujN>4F+|8uPhrPnOxHhEQ~PVk-WNcw62Vmcu3<)%#FG4 z7>s3mcYa6w9|-LDKLFFJ#rfCU)r7+*qn53lE{BuoTjovMu?x#_mW?F8C;ktEm*y^v zs9xSC-zZDF+PUP1A&_fpc4XG3@PlEcl6ndIUcX`dL+!k0cRG7xwO`~#X+LTl6}Wk8YTJ5!8QA-=6x2ui=4U(OS&xz>tRFBz(9gh zgAox+&Ow>Ek|fGDlO?a4F&)RmjW*lVwp!%rNN#-Bfi((fs4-7kRI4QKhJkE zDS9g#2ow*BF9^XpY?0JfKk6pd`4=Neb%gpCWHtmO0XtkQ{wOQsNArC)Yqm zD^^7)%N1by)|gIpMGSfTf@0SnjOg<|9~5du7e9yxzS;*ygb_$N{51c;An{Wn6H*OV zDX&NmOKJc11z&{RvhYyk<%sof*8cK1;uEGiWU@jMd31 z>=SpX3D+S7L zxsLbrF*b@hm~pXPzx^_b8-oLt<>?L|aFJ@b@^Z<4`zp31$5fjB z&vZ!}Z#iJ7>s6OEnbKl9&3O8-ZJs3~(J}||NWx5;N5`M)jOUsB%a5QF#0*-B{}s6$ zA)3zvajJ&b`TTTS&y@QTnqM32_8CKdV^|rnXId@D>)`Lsp|rc zin`I*`N{O4TsE}LH^!fKk!u&2g6>OT?t0{g$Blms4RYwCQE;!#EiJP5k0(ao=sW(Z z>~LV858IL{7H1W5sUtiCX_o8sSR%BU4%CfeCLKCN3Ir#DQy+pS=}B9U4_4$Zfc>sl( zUVt5y^Pu?eZ2H@|t57lhO^GhWKG1US5-Mr?Fs~7W#Tjax9R7eK{%q4je%nw)0W*5> zI^gACgp|E`g+5Aw$KzWxp>zS(hYaC=YBjjFjfgR|39;*pZt ze(_oS#$>x8!l!sI9ti)nzf1;Jcou;`{Osv}gcY^l`bRYtjx(dO zrF%VhXC>ir6wf-)Z1(r>UVF6xLmofHu|&7NbiMLJnpDE@&H8}~BQnjW2jY-U+hgBX zSE9vbdY5)!_!VJ)<~L(6rb_!w(WCRJY3l%KCJH%7wBLu6F$ECcegm3AAbZ`9lkM9b zVjz1$N3#EdTEwUL^-Lvr%%Z98qwR_Xp-ZjK3*V{*m4x00_U~KUV+ex1=W9KR>Pi_Lj~~SA$)0wb0dH%JX%tem?!_ zoRT2(nA6!aSTD_JmbYgvbpY-Ze-N?Rb|U*_61pe${Wgq=R-+&%h3V&rMT-%2Vo{rI8y94d8{2;feN755zUxG> zdKV|l&SO_|=lY-QE4w$M^HtUAm$b)=y2|&1wPhS&X^E-7-7<{jpy%P);cumqZfrol zhiue;W}9g*)xLBCKx`if?RE9i$f?%4GT_7Ec6n;Lc&`G3^$}Js*L*&-IEmqE)65np ziO^%2zzK^NmnIa27>PQ;;jca~e@^kEMKu3(U;6s0bz-2%==b?bubvWr${D#&_nWzc zu-&_O^De_ahO-hUjy^Jy+Q`MbuFU#(%of>dtTF6NXdT^8eRZfD4tBS9ii`-Zd~@M- zC&??NDNtqtVZAl~;$Zuh;G23V=!lybOe3@q3%~8wu+q6O;EdmlL_R{ML)<#NjplGx zMth_rjAGRdx&m%ytbjFL!f0~kdr|g2(%mDil@JpZLp3BfJNo`!WTjYprfSk%M`8U@ zSY7(^OkreWi&C4J{NJ6_qnK$JDCO6^5HRSd1(3LxL3?{U zJa)99LH_jw0of+)F335|2v0wg7=x|YrjKZ=pI_|CiN}%-T zD2C|p0$gL@L~(bI$m1PaSC2F-Y0|5hG3$c~3m&R*EF(WC)w2aN_5o`pG%-f)?XW;A zxle<`Szu)!W=C<28mOGo!&pNe(P1hrN} zdXTWY9fIyFd}X7zsyVz z{iW~~_BWXq>8S_E#e}N)jhr2&O)uAt%V?^cF_j(q&Ha^>`1tV!5hU_7PJvUBs0EU1 zd%z&?&Vl?6qmKkr&IWXn5e@lvzT7#K$R-ms# zN-FK}SfhWC|4f(2k3mZe;XVUFW+>=$Mr0bT`z#U0(--8ag1M4}ojF4@c+`k6`)_$X!jawt|02AeHm5KwbR;^u<5NW#JwhU|Uh zMaZVryw}oD^a-dQp!`+U6`@L2o$D5(=TjVR+3%&s#O!4?-|=@3%j@v*tEt({v}2#>=K+AL6)fuv75?fzv#a2KXx zF3Hl!_Cd>jFPU$T+hvex;YzE=@Q_G;V-)?q$FE}+cJpmTeq%94o%IvaYqa2RKt?IHk)tGcLY7#@1)kpry9i4vr14^GsH0Z`jBh%M_7d8?t445k` zSL=5l+bbMg5#=9 z$)B@K!ADyCvL3ABfy3OVPn?GNg?eON{cGO&v zLTb9KT=G>4DB|e%_dC(Atl}#_MP${|n#MlYR6Z7(V1{O|uV9AFZ5|9~gurzYD#@F< z{2(*&dPYQ66+{b8ik@XUZ-~ez6A>}b(O1S!FyIb%df=w;vB382@s5Wv=>2as&wWA| z-ZBm4^x;*zX{gGLTlyzP6S=(%`HG5xOokufVRJvnMAG#3_U1t(U1?0P`0x6oGRW)9rv*-k z(;_*9yJ4gXi7Ot~NGZpp+rW-$W6+f3RNzM+oVCN+&n*kp6|nKpzK79a2-GwiuFH*J{JPxp|p`176;?$JY;Hm3H`PIloOx@+$sMQw>P^hDsrV7-JzF?xlYb> z3K;1UIX4Lnyn-`xPSt|@kDFro&4%lbuG=G0V!*du?OVv=pkVM)qYt+qJiGDX35{c{ z1#hk2W;glVrXOy$>($kmnE%|b3PkcSCsHU574zkVn*wkh=dZB=gZWSCg50VadZ&OsLot+Z~2QYL*rW zNNUC;Yc2gfT&4Qn0NlVGsRB z9CjN^zTPTHtcG?qOnu=sgQn5$wJ*HSPw4l^(W*RhG`D@b^)e%fvF1t&)HF{-M8#lo z%&_Y9Gh5`@Q1GG4@e1AVlEE?mb6wT2J`I>JCIG*-_ZBy43(PCEr~ar;iYCf}^ko1_ z0WA>?h0NJJ^(wWmf%fkC&Eh`v85Sx63l*(55jLYW9XK@_Ljnq-Gb@R^`;U;?kEst| zfrV2s%}{(C;U-+Geb}e0Xd+K3`eD${dkj1bu@--C$I*rPT40t!GP|)7L7;CwKv)siNjl0srFdpbit&`1%K!n-FXy0UcC*n z_5OWzlhR=+LtX9g2cO8ndS31C{ULjT-mA_oR!j6-tnE%4E;r#X5F`bG6^y^YEQ1XH zPp`d|jiD4WxQOh#DX4bzH0VfyYNW1-=KFJ0OQ4 zT^fn5ynsI>opO*8L3jGX)J;j`muKAyPFamxGjZ9il`9A>@z9_FGUzi zL&sRj!fYr`VztWv!Whr7X)1(NHTLSRALR`(aUSRKEevPxxZ5Os-4#vSy?u$BG$@Vf zuaQC0pVms3RJ>>TUQATB`tVH9T_|j;dRJ@1Z0X%`7HI}Izb5)h10jwX=bt92sqULG z+x%COs?zeSg2Ix+px$~L>+sbF+4wGZ+D5z@$b5t!kdJdbn9w#8qHvM&XFWT^fi-Dp zYqoegM@7-@*6e5=UF>b-No#7T#1$||65j+@<3zdJ$^n+W_%XTdyxrg>{OxUYkRveo zz5cxx8C&%>Ax7}7?SR~$ieWF|RgujnauL}3@e4h;PBG+;!U_Ih08Q=O89 zr^NlWeIJf=>iTH2mpNzD7Jt<=cbj5?95bl2;W(xHI&#b3lHUJmpdZs>B)!y5zVCy- zgumhYc%@brzN)FdIuRu6foBr@$`$ilAP&E6Nce{*53**H)ETdqZ=irFBdU z9@x@yKdcaFO&qCv0r1VWUqNAV>LEIVCg>$)m~ikSo}qCcm>k=cA-@e0xo z`l0B;!>iNhs{&2dTVxmB;(kS%JE(lf=v@e^yi2tT54ue&l;V7G{t7qwVe##h0S1SG zp^4>cL`;o%W90*CiC=G|jjReRQ$C5~=f7XVQ)Bt8M@6n&qm@UPMASJ7j$3A(`0@MZ z`NJv923o^I)q$VRnHTcEe|4?ltATpW3D#-ODGvL^wZl#Dg&V8RP1SkSsSqQ8%>({! zWpotrWXhxc0BzXiT8}n2BQfvv87L(e2m^ljL;0~|fyp_glmG8Fs#=SLu;7Qs0IOk| zd7g`r>|9ICLcM?S@>3Rmh$}s-DQ~0KFG_JzDT(1aRRP_1?%O%fzuUF{bW50D@a3qD z>X$fIi4{fUs7A^$)@ePz&pO*8U+w1Zam6#q7H6wG-{-U+FqjpD7@ZNDmfSi=MJ#~R z5HPzuS97A4ezC?~;YSX^o0bYzlXHuni)RVh+hjog84zn(*{~KT zt>yTxSbczEP7bX50JB`T5@2X);T!N-U=VcF#Spcv*J3=t%R}ri?nv8N_uKsU+M1Iq zu!GN6CJPa!>A8~kVM}fxtX)eP5=l?C!vAx}Gc|T}b!7vqKRJ2}OC)Cs#LD#I-eW4Z zK^dQwm)Goriz7-RF9T%4{0zo4Yxst+is~&VO%^>T9tBF?tN4Lt*YZ^iP@+=D@@fK*W3=*6_ZXbCvdw8X#^;+(3Zy-e<8ht)^Dyu>5c8r_A+y+yX&}trkdXO)HO$e#km=lQrhx7+ zacjcG9BVnCyzMeFGD?c(=BJwn5X9aY6Db*t#)n6QQ56-H!qsZVE5qW!2{$MD_=lAEIN7Au76^jy-+S-VM(*t;l1%c)BXM^r=~Np?9w%D{0I1 zzb%M~W%pQm$l0r#p+Tmo5RG zD+oLjyAm08Fe_||tkWqI$w{l+Ie4$psDJBZ&6Hgt`5fzB4HfUt_t{H7L&m8_mlbK9 zF_XF2rYMwDq z8ga?Tq3N5f1IMiMGBV!#gqj|gd`i#21uoW@Jl8xrer0!L`5IxW&@Rt#uJsdpKf`40 zlnv!5e>_9C1>)&p*=q7n%}WzjW6buYv9?sr z4{e0SAr?D|o}?rx_(}JaxnM(&1$y?;jJWeKD~h#_xu_LQn>{JJrIYSo=x=bz=F^>3 zC<*_eHl_xyu;&I$Oes?PirK3@hjd5c5j#73g6-X5>D{WBH5Nu~mO@2Q!zBKn?BRcU zdwZ4f_!ozsIN5S7VE5T#Yq|QHL>{b!~uKl4VH^3=>=i3cQJk|dB}qWaY%K(hqt-Ug;t zMM=pKf9%=!;&*yWd6&LI6_j2EhN_{O&?hr`@fz}S1#hZN-oAslo2aCCnYr5g#~p30 z!ffD$&F2F@gamD=0hj23e&XiqFC@5GeSLgPu@n-WFX5Z{_R>0A<<&ra_{CzstJ*=b z`C(HR+bZ3XI|S}2gz9{OE)+UaXKEz%5xO0pbCbW$r>G>O(-!E+wxo00q<}#uIiQa_ z?B)iarlB17bThBa(+hE$(_jBF-%<35xZ9gbU1|rY18}-*DI=VTVxYWZ!Yxw zhhaick^z;twz9S)tR%#6P=^)0ve5{H@kB^{3yH>?z;fIi=pQvRDFXtZepkk@VTz@2 zJARlgyp?!0GUlQu`;c6Bb5r;7pq*iF%`vsz80s`Z*w<~VcEc$PWMi2U2lO)wk_c=F z#GNd+>$YMNu?2@^x2#+5ET9u*UVAfciXO>d_mm7g9jjlL;c8M>q$D)@r)mU#)3{TKMq?K1DarC7M+TxIyGkyb@fkhZjA*BTBlCLYbh{##gS$D8 z;P>y}fx_s;MKkJ)9L!_60h!QEb}4=nv-$4WiGc9enzEniU96x+%#YE1mF%_rH`ybT~{rW1Yh<2Te8fM|NZ-`TPuEIIy~ND_pMuMM$Xa+z(P@W!K2ATz*j7AF`NG2sOZPdEZ5fASvOWaBG0+$ z;)NS7cw9s6p_$hP2`%M5wyVHvJ~H*F1+BtbSOI*v987@Z?q)X+kCV{+GtQ4ZGKz{U zo12><($}BNxtac{nJ>E3$K{(U@7sJN%`0vQ`D8dXtQ+4=mxbS*a*QZgRyf8D{t7aM zS=)T{%aKf(_U(=5Z82PVXR+vE22Q&OB4k78chuC>1R`=t_~XaI$7l>t-Nv;|pJ>Tr zCGJnicRP#OYn6$#ErE-;DINnQ_7F{hyO@LRU?Sky;Pg;EZOf4ydE5PN_`{_yccatP zHRrCVQzQcYj$`~cbKktNp!-G`YyduxPCjcO9;SQ`7hR!Nr)Z9T&H9c%{iO=4{ous8 z?dSz?aP$i~M7zt%%eO@xGipGG_UwpyL2nS?P!ENbnO!^kWU#rZ-1jVu{oiJiWK48O z$7xGK*~#1^ML|jD;cuu&x+I|xzUK@z6|ojFF94yTGNbc5_2Cs2AQKcj(zZRTUN;=m- zLy(NL77Nq%p24ATBjhRn&Qu+Cy%xy2#qX7f)8i8p88{>xXSl2L+c#%ClZ|rV{Jq(I zbYkYWk^n4>ceky0SIJ0vEu^K@tOp-ml|&xrFF1y(3GLh(StpGlvdJA>^X|7VNF7SF z!C61Ce!KXm`rKuVXm~6l@7G8DjV@jI+r}Hm6!Wz0*|;=u8~g!#9YEnMnTgfyUgNxe z(%P#CCHl|wK5$n+>a2*#6idDU#Qd?@FGR?^Sc~2NQsz4W50gBSVU~BM*m#3lSTd>X zN!#LObz#mUnpR$hNk12aF}<4t+2$dT+vKwi!(I@=K%Nv{&|@d4$E#N{VKlCfQ88l> zv%m{l8U37Hm$!S2CHp;9F@!I@!gA@7+B~WR=Sc*;U07IEX}gnQOtge>(tKak$?7@_rfT-oy0o3sv+V5fVg~M5 z>l%IIm*^?&ud~9~%j{*`)!J)Kt9EqOH zf&#$V$Q%ZVSUt!Tnn=d#`K@w3dGdZ$WuKxWG!~Uj+SD{-9lGk=?43d|rkMz)PJo5E zuFy7KvzKLJz26|O^*_}-FmkZ40O}_8 z;Myco&x%Woxw3v~Z_nuJItfp;Dth;qR};N!#iBsNIu`Ti6|qt*|0< z#D?_D@1IB0BKl}AclZQ!chD_6-PD|}3@oA(T^ooyAzO6O@!o5JA#x29j4a8DI5H1f zLu!VWpFSY1Wus|D=<=l759J-uruIxxW)L%U<(+9zyk2lpL--rWX-9uQG@fw$_@h-@Ke7qU$=j_uVa-s% z7SXk6ZqbTFEU8&ec38s0@w_VW)?t!N>l~LOc)k&j7Vn+s4FrO`^JC&cC<&7)3AJd zdy(Yzp&b#ILZjef`I6vuH>}0Rk3%WT7?=#6cx>>?($Z5QAtCy^VquR(Vhwi{d3?ub zwY#*Nk+(vx>+GV)lVRAgNeob!j{o!pf~Pq&9+^Oy5t1JOKMtO>w|`asw`{{q!Ib2D z7{=@S&GzL|Lql-M6l41q)LL4BNtb9Y|M&P0ylgqp4b})tIC$6aDGvZG=qfae)Lm1Y zjS-+%#FD~+Lb!34gPnO>-2}l9x{7iIFwpuVsJ$OH9jwoh9L_3`5VpJ@SayxUa;M=9&_Ear6ClLA@lWqnY76F4;S^%oEaP z1PhC{|8W7r>s*M?Ki+QUPzry*Z%EKfB-PC_sw_E&3Ii5bX;&6gnnZ?swh+xpboD5t zl#R1!tH9=+g{<@rtxx+KrR~M;n_5{VtgNhn?T0SP^ME?wKRN(?ogu_!(neANp4NXJ zjYptaMBQCs5aH)vRRag>#7^FINSi#N`J^Y(uZFz-CZ+8i|CcW18eXsW+# zs&R5Og8|^~XW=@HuA$#XdJd&`(F`zh$W_c8cL(qYOBE>lCZP*2xjV-vCp&I$60qil z;w1R-u`#;A?-&^Hz;BkD8>-@i65ZV$6#*lA60~aD7ii{)oj4|_ixE?o#@v{K8tHys zF3ZR|M;M&(6@I3pqvITYbMRBZf)ibN7S7I6!&K=$dckis>`TyM4m8kVrhV3{c}Upw zkye6g)>CN=7k?b8iBJ3VTiw%|eD8?ZlYs#{a5d3lq8}jAEIRw1$rsQGolMEVPR|xo zWRp9^`hT&pd&JNT43WA-?J*(?uRhVi>9DqXUR9M{Wo0FJ3k%tYNxnJ-^9Rm!K4OUrrWhfC8B#K6i2@aYHyAE)SZ49^J;`fY$nh^yOaV+`Z zKH>zN-%FNA-9P>e?682UyQ!{j7(G97T-pK-8W7ONw)pwYPIKzQC&0WO>Q2x`H7IvY zSbl;lIGXt_@+(0J2GKFZqMEq0PcJ~IR{9B4H|W^*f>aO?w0{Fy1P0cw*tYKO#9zPe z2J_W$Klqdhxd(t^ujt76GM6bm)&KcPC7ARoa?J`M97{TN|3~da-SOs)UuT!p-V3Co zwl72Lrx%tRps(S$oIK|kNYu}tnesLz>F%m-cwQghOSB(1waBWlr&Kl0E}Fn9R-_x7 zezNYHvr!nbIR^E8I=$;%`IOZqrAv6WL)l;(0r*1WawoKdKsV1uClK4i0F7&xE)nE- zyI;9X!xt;A#{@0hG-$A)vN+ifh-V+{W*_y}oOe4)pRYjdYq>ijjlo{1yS=*5<4LWC zcaYmMjl`*Y#{t`1E1eLaztw$v>oLM)b@h5dlgK_vm)?=qlIIRqP4|CHn1^D~^#%#; zX`4x{%(e{XPQN9F>V5|T%@j^pOBT+WXTn7B?>=(}AXvG!IU_+05u5M5h zGCQiwe@cbOh{y+;l0!d$AGFG(EHoWer<4v!H0aTfjg6&u{L3d~a9JNxN;&~YOUJ8p zFSc29{P!4vPAcIizDh|=JV>r-_^C;`o6I)PO{BMLyX31X0h}gF#xvm94KNOob>l>6 zrd{0prL$-aHV+(2#YmU}tf{|WTw(16SmVTp%715_`_0p(Hew(P`fkauB2hySx_Bt~ zsc%Ckty4DN%LKR!3=l|<{_PdU>S&!x2X&qaumLUkbnd~XHL}}WDMtp4Wb}C(Z~wr$ zy%J^7o-F`?X9(ib2VK4rJUBD4D=;*IKMr)ypym zTxbR8E+k~Vfl6a=uNi}uTmY4JFKKFl*0yAJ;zfzsSS+kC6t+-YE4H&-Yt7=))nAg( zIQim(V_}RDo7`zRCYW)Mo%6~gBIVOL2|Xw=zj+{g$zxZ4>d}XSz@96@?70-4m<9;f1S@xu ztsqS~5cpT%Q4&97>ZenmMp(pDc?G04iNq%Ow1L#*kxbxARuO@=TSDp}bjR(&+;D;c z7^g2Ut-IQEI@8->f4qjMcARikijzj#^SMOrJ} z8;MbbnmE8UZ&YJZ@Wib9&go&A`EfR4D4UjIDrRpKwo7EB^28%Lxr0~VP^_!B5OjjgygX~MXJ_BjkBDLG9#%`i`rw7^&=B>(;gTqG3!8%DSjX@ z>W?#hQK@^IGhkZL5C!CagrbM0K3cT}wWH(c281Xa6V&TpfAaCJa8?k;UxR;*1P%Mh zM%=Loohd*)q4V+10+)xRLa&Ma6_IYE_)tYODg5>o>j{TWqK+5fhB=5LX4vkjRKBze za48W=Gacky!`qmwz|rD_3VzyB9f0C<|3GmbP$yCurp5J9IRwvvqjfnDaUy=7s~3sB zH0|wdbw7HhXiYud!ofUS5z~6!I3n7^kMG$Rd_&?Iz5?q_@JZD$2z64$T zGNItc9}vF#Fp&VlPO1W93SbvRy@=!l^u%-DwM?_A_uZd?tKKPxdTBrH2V;F2eKv1g z?Ki1=pofPG9Xbb`AK067cqWI3bjZ<@wpnnFoDuwnYR`5S3tSRR$=u~5dF=T!U&C@5Q5y>3iFAUj2T)*<_+dFi`S;S zXg8IT#;ISY*S;SKzb-D!EOQlJYwA9lB@D zgiZkQ&~9D9TSF9eyN^v@gAU^tJilfLGCvtn6CyUk3qWhnNPspf=m1d`8e9TeIB`Kk zkhXLl!=)%hdJDZ@UWX!-lVKe4*k^0wt?i&j>4wnZFkebr`=d!~D#UrvRp%=s=fNEJCy)Ov2bksunho!ceFr86RQ@0uLk1B3PET{H zOH2;apkV1LwsHmED_z_GB4daQZdjl!Mp)JV>3K9b6~S zuUXVu{s|Lm)JT1e$8O^l5=v0+hVUvC#BGr@suaH^Pah!1jOY@9S=(}iE` z%$w=9%1UIkZS=$~Ecuv<90P*U9$BSAXr!HGs>zOB$2SoG%Cjm-Tq2Xke& zv}4x(^Gfu#aUI_aaygI#r}z0j)_r`UHNymY;L*udufe&=Mb3cqXFobJ6a?)?+Zv1v z9?g!uc~J|rW4b}ozNZKetFc-ddmqvYi9OWyd&Bh*b+34{+vTk%$U)324Yb3jwS$n? z#R);D2k=62sR^gvu%QvS7*79!lO<{Q)F#_S*CoQI2Q!3lNmT4}?c3q%lSh^kTQE4G zCd_2Va%sccAuXyJxL48=2ssW1)LJ_r9MXR`lNH|3)eyEJinlw7 zaT>+`laGOGGcNR-zroFP<${wD7ARMusR>_8QK2gfX;0nkmS_f?27Uja^bojOm!p!_ zMAyiwH!#&?gQm%apITewo}hVN$C7#JIbMq`Ap06{^%`bUG2Q^h?BZluGWy#*gX-#vJ zBAg8#g-<+3HL6EvuUVc-O1wKdXX1D*ZFF^&`shGVf3WL&D|?z`Jlgy$>x$0J%w)q+ zR2y1Hq@GZAz5ACkXQk}1A_vBYbf@e60iWs7-Jni~nnp$l4?4tlA#6Qw6T&1YvXV{4 zj}$Lf&&QqSglPhd$j1Ll!x=#;<~@G+d6YagWs2@Ph?s|&e>Oq$``IJ^ZrDr>&NBWR z&;;yLFQf0t3S(n^!TYd=eIY+IhG`7xh#xF3##9c+)bUDglV-u2;z zj*2N;KyhCTF1)mrm#4jvsIy(kOL>a;egzz2$qeY*{)alV z)IHA`@Zov%hd@3;OTxbwKi%|*k(sCM5-7{rI!A5Vjst&qzt4=mdRF585;&muwx;I9 zxX=ttzz0sVFf`G6B^F93LHpn1NTS<5ITi1w>&?sV1_tOo8f2Qo%08=58V#SJ8#e8uwN8a>(`b0kf&%FziG^?M(9;f zK~DG#O$S~iGs3y}NS%^4NgWz_^rgzQ>^Fb3L+IKAp=B+rVwgUG62c|-b*hB5_?J;) z`E1wnQZf+#4fA9-MC5}?=$W}H{~}sejx+{Q4)x8P{zDj-iC@PEFAR;h$np{7L&iA= z;W0PDcMoWNiX{@&;2wb>xVYw%kf#f+Np6*)LN=+;FK@u-+}ydB@jxaolh zM3lsvQrLCRuX+9xT?E=UjcqI>^~V&3wHi`8fmQF1Jk`z)28nYpzANhWVM3LOxan$d z-vdRf4THmAcQcGTPRx7|A#at}T9;Z5UM27{lJ-tZh?0TCPffU}m7(cY)nIWQ+3*xab}yBF5C(N`&OF@`Zg7^^0P*|#w&HKDBCl}BOOL)u z)EOsNEi+)Z5p^P@@^&PoQQ1EX*~31)`w`cr#E1eoA!dD>_N)TND=1~F#2*V5kLGd*P` z+jgxUPV#WvXX|n*MQW9|loIQrH)^w1S>cFv!0AtZb_FLtAEQSkUmRG<_d7@vWyfNE z9`S1v0UMMSUF>IBwg!h!-MBs!zIaj*rAKrqU9OVynX8oo7f(Js^KKuWvP3hpZB7G= z=a9$|yq=KBtM{&{I!VnC-L-yno=6N+WaW~RQnhkfiV<;ALqr_|5X-08`XUw6qVS;6 z7XO8k^?7?k41Nt#CO?lfJqjw8;ZE{|d0)GoxJ3gzs-{@K@0~gzD=z?hhhkh0*yUza zuN>G+)kf#s%g1B7fK%C|6{KVSb!71K?p^3gdV%O``Wai%W8{=>o2+#O%{mW+*70T* z@OQ1cArY24Z`wlp{Am_!-9z?{SW)NGvV6+N%Rc#R-neV;cdsS?`G?F2NzoV*L-#Z% zE@sbxGOs@agg>*be#axq*OIR`;IyN8y1VF`$Z;1oX2spPh`x#C7~;L$pgBeu$%N_H zVqjUi78{=#+EhKx21~mmgGgHRM2d_5z^lP)UNUesmy2e_Vcly*@i(=&wBJpR^)6|x zUub?Zmnu(nH#G048}v51KaOd@9l z)TmV>7+I=IKNNV;Um;U4T2j;m58tOu;G{k1_%l0IdKcj@jTnGX$O};(j)0eiM z*C?A!zp?==rh)F@FO>_TA2~wSr|x(B`Y}}+A%ynV%OjF~H!GJrob%XCKH(E!QsKNs zb$g?~xsJ|pUV(1U^aJ+?E|Mo`M-vFGmT-<33q+*tW#tOa*kwF&8j+put}+ZcHbqn0 zhxV~|UAlxH`O%s!nd&{WWMg#^ug$eit{QwKdh{2dT!-oU82OV*mt^k0hP=(=%&p(? zR>c;V8r&>TDn4`rzJKNqTi5=T+rI~bb=q2g!%LGNL|gwJ5m_5;MF96#S80~#P{5)e~0 zwOGHgWGy)#J{y+t=HXKQ>usklf#?rC!Lg)0HFKh?n$K{BE=*`UBst)B%-6FjeQ|3M zB& zNZKGv#S*E$%W6?i!4evb++b2+OCR$; zQm(#$4qkuNduVBBdAY~By%4ot1mzN0Z>{c}@wYd7x_%cL$TjjMA6Z+Lme%~q{_{dwMBz&>kJu1z07Cii-ZTBxB69N<^M8) z@sVVUPj`zZVhFE2?_W=YFG~^hWR-+O-PzdRDm%HAj|-wJ!~e_zvVUOglY4cr+!gG4 zy>a|^sJr+7Gj!Kl8|LYD0dV^l8hpeWy&^|nfYEX-S_+b{`>{s9zFV@ zdA-i-oa4f=+zu4E))Uy7yQHwtmQl zDsLF-c)pn}H*@yu-`(qY8KJ;b?}ICksGoVXb&|Jf+fiTd(fPFC;_q)~_wf=Gh6^x< z_kf#*%)*QK6`*_YGi!0tBDc?19hKgAp|js$qD+mNd z05VkUquif17o;@I16e*VBhRsJ?3P=)u!+poxrcP~MoniTtybj5tV8%RSi}VfY2u0u zHmY3jZs#*oQ#RgxBRI8yo3J1G69Je~?%nLT1M7uS6GlA7dcUk!(sOxg6M8=N0=*V3 zIxn&*d44ab7F6u;;&JN zw1-nnF|h>{SjQ-)NRqiV=}q2nW2CZua^ce+Y`I=_n_X$ppV8)lR1e0kB?U9BmX%#y z^3hd#^ryHBn6(FlJ@|w&CuK5+W6`v>*+2bRY_^-wt1QMP)A^_&e|R=#7W7V&jv9`! zc~3^esgX&$M~MN?RZhQuV2z(|jUj+l41JbzVE)&qR&GG&(22kvK?zxbH1=)Q99I?Y zME5NHo!a~Ku=iVRR#2K+gKa`Y$_{f^>4O12&ewMkKBsYvF)r|#lf65Oiw#H z0%&ahhs9*CcJ7X%?zgQIQ#Ek=pZkKEl$8~n+YhMkmX<$)V+gsD&N_)p0Ai8?rmg~L9C{&rYV-`%qIUB`0=oy+O1 zW{uCwqQuc|m;W)pu4{MYW`^JEo!*3DD!XC^RjJ|)tR0GhA^N%V-5T>FJAQw{-M-76 zoulycXYr;}{^GYkW5|nUK6_XusYPmUfyftfHU>@Y!er`@4qU4l4S;2A9!i?=HO|D$ zfQLB5jS%yBhE|Fi!KI=82lwy+O+3(`lTj6VwUT(=Bsz(7|NZG{V$}PlivhR%F50Noexve z8CZx6MXPLfxT*8lz`MHLnXj574uPyzW2K>&;L7S|->2t~dKSroaNJ_~$b^xi`h27edKF1f1W-YyZr$a8>nN)F#ITHl6 zpxJ6h6JNx-rn_O&bQiazZ0MAE_Rvd;BkkO^n(?#C(T*duj^nS!bYY1QF9UinEp*Ac zWH%MlBcMFl1hhPMA2ACT9Z%>lvpAz!*XGFTxt|S(Z=86-x^Nv8QUZq`c8O50xA&c!t(1R}Tr2{{LUUm>B|7@PAx7n)tqJMF%P&{icO=>y9>0W#} zukLDk1T;Ba4%|~j%>P)FL13p}%;}Srd09*`*ol*nz_vr)28Uc0$8liNe^BfL*{v&= zP*ES0V^(7?()X%t?9i^u;L?_FUYNhK3nzhxFMB}s5}evx4KO`E%HAT1qh9~D24Vw3 zviUryY&FL2vX8uaa$fr!y}NHEP*5*m3Lm`|@!ZUXL%*O_5E9S;&({bu9Uh`9xC1zT zBC>S&#Ubq}`3U0o1qvVGnXfVLDDa~&KauD-^LM;u(daXb)3q>~FwOu?5>~BYC)h3| z=)>jjKsJ2^3a9fTP84mpWF=pBhO=v(6TqbKwH95`OmQW=g z`um!fM8o>>sqhn(X?FBTil7Ns)r9K^#Fo6B8gL=Rr2vu^RDF!z68q-&nIr@_Pu7^X zv{RZk!U*N(XX9Oqd@$Af1#AH#mX$Qp;&S!nbM;p%YYmPO9Edmxy$7$i)7z;iACn5I z$6koeL{<8xw|R?^4vsO{Zrc1LWP8mwhg zz{`$Kx$q1IMQ)t1r6pSUuH+}~mChb{03lb8%^1=7tTdByf&=UB zMPzw387yp^8`oPxjeZ16-y})WfmRsQT#Tsrt(cY>E|9ZHYEg8XtZDSVsSDguxUTtG zkZg?qDhc{wY*q|P>BvLV)P)n1y?7YrZc$*fR}nOnhkof>Fa}!~b?|#FV#OhabL60* zMqK7gShxU28Y)56I_Xa1Be6s90TXTnoa0)b+9RnIZoCE)y+s|y&{^4ier`xG!pZ-4 z)H{lJI-;sT&$jW=n@vubq8o?WH$+!B*Y&O{Lf(A4$2j@;WUtg(#591Um8>QP{S0C#fi~JEUkJj{5>JE@G7Q4*^$t+{HV|bc{`ojfGYURcq1Pf}ck!pJ^+(V~c>} zq(GvjZibS%+dan#S~<<$LV?TpQU1fNt~zm#uVHf$#OCsMC`75EE^IA%=N)M-%bvk6 zK@JD75KPS8ONB1QCA{=>Y$#`wPesE{Y@cuI+YPjbBw=E3`a1UxHnGrCg#a8EpzskP zKadBmLvK;N@(kx7t->;d$X$zc(|@LR6_JWM0V`qk<#1{w6MUYBZY#EjA7)|T#Ar8g4F`yR=%7VnuKk6x}X&tucThXfw0JQh^h zyf|he#x@i%?GYhj+zl65GuxLyL~k*W^gzLR9iDnG9eZL1W2|=ZRS9=mE|TnNGSlMD zU@K*57-!i!cRS7MtU_3YJ|d2R##zm2K~45Sbf2j6Y>X?~^|wpR|E5{3@8Tj3U7rh!96!^+Ps2g_6xK^6nA z@}RmS$wdjTJ9dHqu|zNds*^H5`N*hbvXxdmrLg10MyhPg^yc1*LUrqZAThOuX^cnc zT0&`F#XR}*jC;GmTp4~Hlycv=A&kPC>ZQliH2+~@2pa$^F|go{dbiw)q?UK430oKD zq1lPaIgE|hK6(5S;{hn{1EtdvFW=Msv9Nx(9QU${&t0)AefJP?pT)nc6En4ySG(3{w5DT)F!FZgZ~67<+z=6l%($l^CTZ;E<@`ZTNZe_s0eb zDKxaMX8ZucXDeC1}Lyk9}zmD7Z#Gmp9Pj?ntLxN zO9Iruv{YrLMvVPq5vXI53&>U$v=&Ek_tOW# zM6Si)57X?AEQL!jnZ=xddE0a#JfV!~5s!nQHG_+)jvc^aky*tKYU|@UJK;%Pc%qz$ zJ#j5WoLgv40X?*6wif@hNnIRnWMD<~L4O#pa33g^L=)%i#+v5E(4`uW#GfE>JQOCD- z;ud)-oDX)-dFwyy@<$n(6AV>UEkme&keW)!((Ij91o@4@@T%xvjpAI##og-$xj725 z;)$h8b!FEO78)@|#EkAj(kkK6OM>aeq1@|=PrFdK}pKO z4aXo?ib1_XvbY#``%K8r3`BG;DVXhuFp9Z)4Td(k3PVQ&LjF&{6j+(icKjRR#!U?* z6uy{S@c}Blgny_i543X6TkUSd7)&m-Y?(Dx{dVd?bWWL{~z4 zt)X)RMm1-<_sC(%DU6&jTcNaYcm1le(RO}F`&{-Z{K8KnwJUfG<*y=%4B`@i>=_#>N~xV^4tIQ;``bBn>U(AeZb4(MSCO<2Bcmwx`|Ia{kw zbAs(V5laSmn4Wh0mBr=|%5|*u-RtU$109VOUvm@|zYFuClPe^1ro^j@C`m)BZEE`z z0$xg^>yG{31_uk0ZJid#7J{(-m=Ya7JGzgYa<0 zvte1+{_GAjA238)qyFkorwkhpd$2+@mRJg+?eyT$YgwO6$I8YQJ{Em_t`s9QQHgHk zc~Hl_|KV=rhr2DF((%bU+=ZpQ z`S-wtHqE(?c4djY>*prV#GM^qr+L$2i@Xq(3*Vp`U>fJ*(Yj~3zTkhR46NU}PzTVG z(VTZJd)v0|ZMV4=!pIq-R^Y5-LCy#AOQSkg8}^A|2=3e}ohSx@27BxIziU2E%k&rxoxxr> zxF_INeu47?A=G~mGhP^Ba(1ug9Fvejbb6?Y<;P;ru?&67HN_p?vWQa;{M&B|9)Iw= z)-wR;H&A2!hsD>>#gMaed}ivS-sCy&cb!?uclJ9Kz4gYUMJ&;zb=0?{Z`Z;r0(&;( za8|J>75b`O13IrhYJGoLKYK^g#*Bq3ijVuO5r0gRfBU^Z2&&&?I;lqC+=BlzxMZ2Z z6t@DB3be*Ar_2-2x}ClJRbhN~ICHjA0y#efDmlBrs-KnTyT0pPI9MkPcO}#N6jGsi zmZ;;xey@a=(!!@`!Fyd%a3-GJa7e+)t19$gy7IcC*t@XtH-l0;{JC0c&W zp}++~?}XoWDIw>rwxUAl8YVglfv#M=b^`%LMYnEH9{ysjh^OvE^J$-h8@Zm{Mc=3E z#VrMS7=-Q1;DvUG&d$8*BdFy#uIBU|fcpuQ^_|*&bSC_r#2zi!H`^%GjKhgSr(X27 z#{4cPVFwlw2LkPaSh}|mzlOMl0Lz9*DY?h#~QQ)!YKH>PbXfzHg(#z|FmHv-j-$vslpv~ z(ty5cG(DQhVfOb!?BtaLn@9MO?sAY{i7nt=f;*tV=1_9GPZ>7jZ9Sc?ft$%A3YsBW z;k4s=+?O41FXcZqD*TmNuQ${LWBP=p8X!{}LPUv-b2m z2(E46+=?P6&U~(ar`oG8BTP;6kcUb|%(+bqep^<8#pi)VbIh$_ONrX>)sx`~n+)fz zO4*E=vH91r8_p_2ir-lRIGbg8*6(*n)M0sMb5o6~TR(W+68O$C9+VivmNUyysjB|@ z{#I|&f$Eeh5UOiny%v-pm1Amen6MC{KgC|Vpkuo{p^D@hqD>wqTt zcG9=aLYJK1EVq=}35>?9aB287)i!-&k4_D@3lqF`syU7=2V02vi@*~F7HwM{FRAVS znnOg_8o@9-z5f$Oy<*mGa3kHcr7z8rE3`=sp^3KP35#@CDM-%F&IbhQeg-H~tz{%B z@6$TOBco(?tyH^OB z`Fnc|WeZ!mWGzA$h7Y94@MM0`!1;aIey=32jQR8XeH>E{j0BuSi>JIYn0)qXw%3mM z1Eh>uszmC1*eZgfd8s-NUvC?T+3Ql7WQ!^zfyTdI#>*RZraTJF8z#mLY3^Qr`~5oj zNPQx#qiSy8(~LfZvpO+cOP=7$eU|A-)?tft=1s3b?tHw zVPI9tP)u=U)}f#!mHIPV($2VBom==^ag!(#wiGzK3&fa^`YQgl=I^zb`BX$tshC$T9$X+39e~nddBgY50l-w zXs(tIqDm)JB1Xr_TPugh25vXRY5N(7qo@<$V8mhZv_H|Z$YXm+!xA&>E4cuynKhYn zT_SiAq35FK45rw&%Lt`o;MzeOl17WWkXBBb-m7IYx$y7X%bX})vi zmIu!hf0lcV-m?4;x0tlym~dti**V^NKbIEd;c54wCF>w;ryo#%s>i?4T(9Z83JGmk!OK(IKe3cc zn^<`5el2j%N{ysMC?v57 ztU+HaCYO&i9E2Acu-N^U{@9(v8*(xqGLG1^iW z?jfbXwKWhbM(G7NGYe_21?TjuCni(8YJ$J9(u!5!HeEAB*F5SwX<>T&!#6oG_f09T z*V{Zx7t}2UaWAfGKoq}IdGJC*hIbr|`KIbkc1x-17lu9fLjVW<#;WFY#!vZ|czW%A zJfx|-TnbjI4BO$My3D|a#|-;gwM)y$Y_XbA<5Ps+4(t1T8$x11>KV;Ex?>k!t(=Lv zE5?KQ71S%?FpUH3kk{O z^P*V{kx^XTzR`TfH-ly}7xR8qK7IkblGvXlT{e)1}zkxC$ph` z0@SJUZOB$(wPSz6CGo5K0tobLpoB#m)6+!$UPN4-C4R!WB6ZPA)S7W zKxX68(!tBtQ1$fX6=gX%ciag%`G)1!n^+hR>wzMz34a=_a~|j<^uG^Ba<88Y#lXe4$$-1TYb5)Qa5iUv=X4Ur6-mAJb6R1=m zjPtIfVI!Fsx3N{C5-W>1a{rFXH)TbvQg+V)QBx z<`TYfh(b^u&7|E-?m-zL^D<5Q_zs1y3n;_5Z0tU+38dQJ66W4wA{F;1VGzIFths!! z7gQgb!Ct*DX|RX>B%j}9)Xre0Z>TetZ~!dQX8l6c4-xK4JlMm>DMp8J1eSSA8Fvq4 zobc!~tDDyT?G0sH@!T2kH7~)@b-KSTVtwD*<_9R!hFR`ZnDgo6I!K&Oe z>$l+RFA-u)fhspclMqDnh1~aZtKZP>8r)qHuX@F zmmjDu6s6)wa7)$yK&>_qZaO24U>4Um4Z zd}a20d#gJ68senixL(0~_{5@F%qo}KUi!wlF|k>ZQd&SbZ9ty?2B?w_O-GS4YsrA7 zi017>jOnO?fKwge7lM3VzYxv{8(XijE?MrDal`Wz9MjN@`nm5d0T0XE9tD#((N@Y9 zTAOeMm|#&mQ+!*OdO@!EQs?!euOaJrF-Cy!N@TpXMM$_2R#Qjd5cHbcLbFTGc2nlW=8Sgv< z!`!~G`pY@PsK&yN;?!}_b;RJUUA!YHBIT0?Y>#z%q?sU=Q*wFdebm z^`xUe_$4tC+rE~Mh!gkoSMgz4exn?Vpo5q5PEIrq{yZy|GQKJ0l_{u#h45Kkq1EuV zuKFTX54mhLuURB(t7hfwvD8(@68sPNp17JGT=|@H-0IRy(y2fhLC&$}HFL9{#PLsH z7wK}LDiQGq1^oZmefH7(@-Pivp(TWP)KxI=c*~nzn>FpIIVQKv+&Uq7H-QZ6TLbI! zH}|yxNjqL=C!qTL(38p%k1Ypzn94c{sLO4(;&#%dvGKuh)j=nM-#qR*^tna;HusX1 z(k<3>OM7g_c=vKkiLjS?xOPSE6TDtzf&fF%X)O-Y5UH_Gx23**zwvdNOqV4`m{i)Q ziJbXh0M_mtkg^O5!IhCCu|&Jmec^5QUvz28T$d?#4;i$k(``%{g(vy6iRmfR8x!c=-`Be6gl2{97;He1Ry(J)@;B+ zAl#bxnp#~{Anp=|BOO1EW;g8@mWWV)W-67Wyyq`|x4YdZOm`Ud_Ds$wY@!SwH{RMd z98?jd#X-7o@B<0Ok4RrR&VNdeVfH0puBDvzerEm71IKEowp-nF*xIwE;B4fycf+MN z(|x7t(=sw2-4YHf7^t|FeAY|Rska{P`Py0)jAVJRkxGeH0uChKMyg7W?E&A;vZDLt zwE8%nN!M=QO-ZH08{>Nd3e^u%HpHAUR4~_e--;tBO#&B8D(29AiG2d19RvJVR9ww@Zr!Jg=*9QMw%A(IVv9zy{H?yf zpS3L-JFe6|+c5@hFM=pu*+b#7@goVGOxbZkDjd~8D0X@_qse0hdFb*7qG!I`Uh9C< zeIqZfHf<%XU%OUz(bqm?LKq^s`nqz*M`Y#?HAHSPm71c}C(P5$nr7Qt{RvPn4vd#x zpk&a=XV$MNX}qQqb6w89HjVf>KbSA?=p8is@T-Qlj^_H%@aAeNK1pju(TS-bu7hS= zj#c(3uBIaGTo@&|X)r(Nl$~ca(@kc!>*P0Zgh2(N{LKATw{fapA3<{59D+)2HIuwZ zG#!2sJ>q)-wd0Tgjr|(HU6*n7Z(gb(_RbAur5p73C`3tDeAY*b%NQa}>+08fELVg=;cvt7LgJ%VlR|)#D2yn mUt&&p3q0rFp(B)L=_&0;E0rjPrLnU;Tdz%R=b@ literal 0 HcmV?d00001 From 444071e2d05f36dcc467cc76b7cad4172017e3a4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Nov 2019 10:22:54 +0100 Subject: [PATCH 002/195] fix(nuke): not necessary to do it on all write nodes --- pype/nuke/lib.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 157af9019d..0079a23266 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -105,6 +105,10 @@ def writes_version_sync(): for each in nuke.allNodes(): if each.Class() == 'Write': + # check if the node is avalon tracked + if "AvalonTab" not in each.knobs(): + continue + avalon_knob_data = avalon.nuke.get_avalon_knob_data( each, ['avalon:', 'ak:']) From 74b1be0da35fc160094eda38600ca9e5f1ae6b50 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Nov 2019 13:17:48 +0100 Subject: [PATCH 003/195] fix(nuke): code is redundant --- pype/plugins/nuke/create/create_write.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index a8c9c932da..c885631794 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -24,8 +24,6 @@ class CreateWriteRender(plugin.PypeCreator): def __init__(self, *args, **kwargs): super(CreateWriteRender, self).__init__(*args, **kwargs) - self.name = self.data["subset"] - data = OrderedDict() data["family"] = self.family From 3fbe8b6f455009177dc38d141fa11240e1708a16 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Nov 2019 18:09:22 +0100 Subject: [PATCH 004/195] feat(nuke): update head info in create backdrop --- pype/plugins/nuke/create/create_backdrop.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/pype/plugins/nuke/create/create_backdrop.py b/pype/plugins/nuke/create/create_backdrop.py index b5600e8b37..237f9ac8f0 100644 --- a/pype/plugins/nuke/create/create_backdrop.py +++ b/pype/plugins/nuke/create/create_backdrop.py @@ -4,13 +4,12 @@ from avalon.nuke.pipeline import Creator class CreateBackdrop(Creator): """Add Publishable Backdrop""" - name = "backdrop" - label = "Backdrop" - family = "group" - icon = "cube" + name = "nukenodes" + label = "Create Backdrop" + family = "nukenodes" + icon = "file-archive-o" defaults = ["Main"] def __init__(self, *args, **kwargs): super(CreateBackdrop, self).__init__(*args, **kwargs) - return From df41c92f44cf6b5f434c9d8fb7c0378f4b43d4ae Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Nov 2019 19:06:00 +0100 Subject: [PATCH 005/195] feat(nuke): include only nodes with subset knob --- pype/plugins/nuke/publish/collect_instances.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index 2500f3fca5..f83f4f5e9a 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -23,15 +23,13 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): instances = [] # creating instances per write node - self.log.debug("nuke.allNodes(): {}".format(nuke.allNodes())) - for node in nuke.allNodes(): - try: - if node["disable"].value(): - continue - except Exception as E: - self.log.warning(E) - continue + # gets only nodes with subset knob + nodes = [n for n in nuke.allNodes() + if get_avalon_knob_data(n, + ["avalon:", "ak:"]).get("subset")] + # creating instances per write node + for node in nodes: # get data from avalon knob self.log.debug("node[name]: {}".format(node['name'].value())) avalon_knob_data = get_avalon_knob_data(node) From 2bf235ece19f98bcbe8b932946cc6ea58f91214b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Nov 2019 19:07:04 +0100 Subject: [PATCH 006/195] clean(nuke): remove unused lines --- pype/plugins/nuke/publish/collect_instances.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index f83f4f5e9a..c901d2c161 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -18,10 +18,8 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): asset_data = io.find_one({"type": "asset", "name": api.Session["AVALON_ASSET"]}) - self.log.debug("asset_data: {}".format(asset_data["data"])) instances = [] - # creating instances per write node # gets only nodes with subset knob nodes = [n for n in nuke.allNodes() From a87d7cf054a088df41d907e5ce8462802d3a0b74 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Nov 2019 19:08:34 +0100 Subject: [PATCH 007/195] feat(nuke): fixing family/class detection - render families only for render group node - any group will be accepted too - disable nodes are not included --- .../plugins/nuke/publish/collect_instances.py | 40 ++++++++++++------- 1 file changed, 25 insertions(+), 15 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index c901d2c161..ea8c885d9a 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -40,6 +40,14 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): if avalon_knob_data["id"] != "pyblish.avalon.instance": continue + # establish families + family = avalon_knob_data["family"] + families = list() + + # except disabled nodes but exclude backdrops in test + if ("nukenodes" not in family) and (node["disable"].value()): + continue + subset = avalon_knob_data.get( "subset", None) or node["name"].value() @@ -49,26 +57,28 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): # Add all nodes in group instances. if node.Class() == "Group": + # only alter families for render family + if ("render" in family): + # check if node is not disabled + families.append(avalon_knob_data["families"]) + if node["render"].value(): + self.log.info("flagged for render") + add_family = "render.local" + # dealing with local/farm rendering + if node["render_farm"].value(): + self.log.info("adding render farm family") + add_family = "render.farm" + instance.data["transfer"] = False + families.append(add_family) + else: + # add family into families + families.insert(0, family) + node.begin() for i in nuke.allNodes(): instance.append(i) node.end() - family = avalon_knob_data["family"] - families = [avalon_knob_data["families"]] - if node["render"].value(): - self.log.info("flagged for render") - add_family = "render.local" - # dealing with local/farm rendering - if node["render_farm"].value(): - self.log.info("adding render farm family") - add_family = "render.farm" - instance.data["transfer"] = False - families.append(add_family) - else: - # add family into families - families.insert(0, family) - instance.data.update({ "subset": subset, "asset": os.environ["AVALON_ASSET"], From c4292d3add70c3ea1353a78afea4ca82b6e4e925 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 21 Nov 2019 19:56:12 +0100 Subject: [PATCH 008/195] feat(nuke): adding collect backdrop plugin --- .../plugins/nuke/publish/collect_nukenodes.py | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 pype/plugins/nuke/publish/collect_nukenodes.py diff --git a/pype/plugins/nuke/publish/collect_nukenodes.py b/pype/plugins/nuke/publish/collect_nukenodes.py new file mode 100644 index 0000000000..ba5a28c44a --- /dev/null +++ b/pype/plugins/nuke/publish/collect_nukenodes.py @@ -0,0 +1,34 @@ +import pyblish.api +import nuke + +class CollectBackdrops(pyblish.api.InstancePlugin): + """Collect Backdrop instance from rendered frames + """ + + order = pyblish.api.CollectorOrder + 0.3 + label = "Collect Backdrop" + hosts = ["nuke"] + families = ["nukenodes"] + + def process(self, instance): + + bckn = instance[0] + + left = bckn.xpos() + top = bckn.ypos() + right = left + bckn['bdwidth'].value() + bottom = top + bckn['bdheight'].value() + + inNodes = [] + for node in nuke.allNodes(): + if node.Class() == "Viewer": + continue + + if (node.xpos() > left) \ + and (node.xpos() + node.screenWidth() < right) \ + and (node.ypos() > top) \ + and (node.ypos() + node.screenHeight() < bottom): + inNodes.append(node) + + self.log.info("Backdrop content collected: `{}`".format(inNodes)) + self.log.info("Backdrop instance collected: `{}`".format(instance)) From a2f81fbdb02593bd216c05e531952a5c3238e6a2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 22 Nov 2019 11:50:31 +0100 Subject: [PATCH 009/195] feat(nuke): adding label to collect backdrop --- ...llect_nukenodes.py => collect_backdrop.py} | 21 ++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) rename pype/plugins/nuke/publish/{collect_nukenodes.py => collect_backdrop.py} (64%) diff --git a/pype/plugins/nuke/publish/collect_nukenodes.py b/pype/plugins/nuke/publish/collect_backdrop.py similarity index 64% rename from pype/plugins/nuke/publish/collect_nukenodes.py rename to pype/plugins/nuke/publish/collect_backdrop.py index ba5a28c44a..d88a702a84 100644 --- a/pype/plugins/nuke/publish/collect_nukenodes.py +++ b/pype/plugins/nuke/publish/collect_backdrop.py @@ -1,11 +1,13 @@ import pyblish.api import nuke + +@pyblish.api.log class CollectBackdrops(pyblish.api.InstancePlugin): - """Collect Backdrop instance from rendered frames + """Collect Backdrop node instance and its content """ - order = pyblish.api.CollectorOrder + 0.3 + order = pyblish.api.CollectorOrder + 0.22 label = "Collect Backdrop" hosts = ["nuke"] families = ["nukenodes"] @@ -14,21 +16,30 @@ class CollectBackdrops(pyblish.api.InstancePlugin): bckn = instance[0] + # define size of the backdrop left = bckn.xpos() top = bckn.ypos() right = left + bckn['bdwidth'].value() bottom = top + bckn['bdheight'].value() - inNodes = [] + # iterate all nodes for node in nuke.allNodes(): + + # exclude viewer if node.Class() == "Viewer": continue + # find all related nodes if (node.xpos() > left) \ and (node.xpos() + node.screenWidth() < right) \ and (node.ypos() > top) \ and (node.ypos() + node.screenHeight() < bottom): - inNodes.append(node) - self.log.info("Backdrop content collected: `{}`".format(inNodes)) + # add contained nodes to instance's node list + instance.append(node) + + instance.data["label"] = "{0} ({1} nodes)".format( + bckn.name(), len(instance)-1) + + self.log.info("Backdrop content collected: `{}`".format(instance[:])) self.log.info("Backdrop instance collected: `{}`".format(instance)) From 351c7ed95f4ef71d1d434df04126baab26c2411f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 22 Nov 2019 11:51:12 +0100 Subject: [PATCH 010/195] feat(nuke): adding validate backdrop checking if there are nodes above backdrop --- .../plugins/nuke/publish/validate_backdrop.py | 64 +++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 pype/plugins/nuke/publish/validate_backdrop.py diff --git a/pype/plugins/nuke/publish/validate_backdrop.py b/pype/plugins/nuke/publish/validate_backdrop.py new file mode 100644 index 0000000000..59c4e96b37 --- /dev/null +++ b/pype/plugins/nuke/publish/validate_backdrop.py @@ -0,0 +1,64 @@ +import pyblish +from avalon.nuke import lib as anlib +import nuke + + +class SelectCenterInNodeGraph(pyblish.api.Action): + """ + Centering failed instance node in node grap + """ + + label = "Center node in node graph" + icon = "wrench" + on = "failed" + + def process(self, context, plugin): + + # Get the errored instances + failed = [] + for result in context.data["results"]: + if (result["error"] is not None and result["instance"] is not None + and result["instance"] not in failed): + failed.append(result["instance"]) + + # Apply pyblish.logic to get the instances for the plug-in + instances = pyblish.api.instances_by_plugin(failed, plugin) + + all_xC = list() + all_yC = list() + + # maintain selection + with anlib.maintained_selection(): + # collect all failed nodes xpos and ypos + for instance in instances: + bdn = instance[0] + xC = bdn.xpos() + bdn.screenWidth()/2 + yC = bdn.ypos() + bdn.screenHeight()/2 + + all_xC.append(xC) + all_yC.append(yC) + + self.log.info("all_xC: `{}`".format(all_xC)) + self.log.info("all_yC: `{}`".format(all_yC)) + + # zoom to nodes in node graph + nuke.zoom(2, [min(all_xC), min(all_yC)]) + + +@pyblish.api.log +class ValidateBackdrop(pyblish.api.InstancePlugin): + """Validate amount of nodes on backdrop node in case user + forgoten to add nodes above the publishing backdrop node""" + + order = pyblish.api.ValidatorOrder + optional = True + families = ["nukenodes"] + label = "Validate Backdrop" + hosts = ["nuke"] + actions = [SelectCenterInNodeGraph] + + def process(self, instance): + + msg = "No content on backdrop node: \"{}\"".format( + instance.data["name"]) + assert len(instance) > 1, msg From 443f47d240637322a9ea80c8077c78ffce9af2a6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 23 Nov 2019 23:43:48 +0100 Subject: [PATCH 011/195] feat(nuke): adding `nukenodes` family --- pype/nuke/__init__.py | 3 ++- pype/plugins/global/publish/integrate_new.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index b7dbf69510..a7e63bf06d 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -112,7 +112,8 @@ def install(): # Disable all families except for the ones we explicitly want to see family_states = [ "write", - "review" + "review", + "nukenodes" ] avalon.data["familiesStateDefault"] = False diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 64f6dd5015..10df19b953 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -70,7 +70,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "lut", "audio", "yetiRig", - "yeticache" + "yeticache", + "nukenodes" ] exclude_families = ["clip"] From d21e98aea977303b25342105b45476003a936647 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 23 Nov 2019 23:44:43 +0100 Subject: [PATCH 012/195] feat(nuke): adding method for getting dependency for list of nodes --- pype/nuke/lib.py | 38 +++++++++++++++++++++++++++++++++++++- 1 file changed, 37 insertions(+), 1 deletion(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 0079a23266..816a7d5116 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -108,7 +108,7 @@ def writes_version_sync(): # check if the node is avalon tracked if "AvalonTab" not in each.knobs(): continue - + avalon_knob_data = avalon.nuke.get_avalon_knob_data( each, ['avalon:', 'ak:']) @@ -1194,3 +1194,39 @@ class BuildWorkfile(WorkfileSettings): def position_up(self, multiply=1): self.ypos -= (self.ypos_size * multiply) + self.ypos_gap + +def get_dependent_nodes(nodes): + """Get all dependent nodes connected to the list of nodes. + + Looking for connections outside of the nodes in incoming argument. + + Arguments: + nodes (list): list of nuke.Node objects + + Returns: + connections_in: dictionary of nodes and its dependencies + connections_out: dictionary of nodes and its dependency + """ + + connections_in = dict() + connections_out = dict() + node_names = [n.name() for n in nodes] + for node in nodes: + inputs = node.dependencies() + outputs = node.dependent() + # collect all inputs outside + test_in = [(i, n) for i, n in enumerate(inputs) + if n.name() not in node_names] + if test_in: + connections_in.update({ + node: test_in + }) + # collect all outputs outside + test_out = [i for i in outputs if i.name() not in node_names] + if test_out: + # only one dependent node is allowed + connections_out.update({ + node: test_out[-1] + }) + + return connections_in, connections_out From 641406c08846603cc829fa0669c00f35f8a79b81 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 23 Nov 2019 23:45:37 +0100 Subject: [PATCH 013/195] feat(nuke): adding `nukenodes` family for loading precoms --- pype/plugins/nuke/load/load_script_precomp.py | 21 +++++++++---------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/pype/plugins/nuke/load/load_script_precomp.py b/pype/plugins/nuke/load/load_script_precomp.py index e84e23a890..310157f099 100644 --- a/pype/plugins/nuke/load/load_script_precomp.py +++ b/pype/plugins/nuke/load/load_script_precomp.py @@ -7,7 +7,7 @@ class LinkAsGroup(api.Loader): """Copy the published file to be pasted at the desired location""" representations = ["nk"] - families = ["workfile"] + families = ["workfile", "nukenodes"] label = "Load Precomp" order = 0 @@ -63,8 +63,6 @@ class LinkAsGroup(api.Loader): colorspace = context["version"]["data"].get("colorspace", None) self.log.info("colorspace: {}\n".format(colorspace)) - # ['version', 'file', 'reading', 'output', 'useOutput'] - P["name"].setValue("{}_{}".format(name, namespace)) P["useOutput"].setValue(True) @@ -74,14 +72,15 @@ class LinkAsGroup(api.Loader): if n.Class() == "Group" if get_avalon_knob_data(n)] - # create panel for selecting output - panel_choices = " ".join(writes) - panel_label = "Select write node for output" - p = nuke.Panel("Select Write Node") - p.addEnumerationPulldown( - panel_label, panel_choices) - p.show() - P["output"].setValue(p.value(panel_label)) + if writes: + # create panel for selecting output + panel_choices = " ".join(writes) + panel_label = "Select write node for output" + p = nuke.Panel("Select Write Node") + p.addEnumerationPulldown( + panel_label, panel_choices) + p.show() + P["output"].setValue(p.value(panel_label)) P["tile_color"].setValue(0xff0ff0ff) From c8a1bd151f9d23c3a432d86dd58b54db97af0f77 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 23 Nov 2019 23:46:29 +0100 Subject: [PATCH 014/195] feat(nuke): collecting | validating | extracting backdrop --- pype/plugins/nuke/publish/collect_backdrop.py | 41 ++++++- pype/plugins/nuke/publish/extract_backdrop.py | 103 ++++++++++++++++++ .../plugins/nuke/publish/validate_backdrop.py | 9 +- 3 files changed, 150 insertions(+), 3 deletions(-) create mode 100644 pype/plugins/nuke/publish/extract_backdrop.py diff --git a/pype/plugins/nuke/publish/collect_backdrop.py b/pype/plugins/nuke/publish/collect_backdrop.py index d88a702a84..39402f5352 100644 --- a/pype/plugins/nuke/publish/collect_backdrop.py +++ b/pype/plugins/nuke/publish/collect_backdrop.py @@ -1,7 +1,8 @@ import pyblish.api +import pype.api as pype +from pype.nuke import lib as pnlib import nuke - @pyblish.api.log class CollectBackdrops(pyblish.api.InstancePlugin): """Collect Backdrop node instance and its content @@ -38,8 +39,46 @@ class CollectBackdrops(pyblish.api.InstancePlugin): # add contained nodes to instance's node list instance.append(node) + # get all connections from outside of backdrop + nodes = instance[1:] + connections_in, connections_out = pnlib.get_dependent_nodes(nodes) + instance.data["connections_in"] = connections_in + instance.data["connections_out"] = connections_out + + # make label nicer instance.data["label"] = "{0} ({1} nodes)".format( bckn.name(), len(instance)-1) + instance.data["families"].append(instance.data["family"]) + + # Get frame range + handle_start = instance.context.data["handleStart"] + handle_end = instance.context.data["handleEnd"] + first_frame = int(nuke.root()["first_frame"].getValue()) + last_frame = int(nuke.root()["last_frame"].getValue()) + + # get version + version = pype.get_version_from_path(nuke.root().name()) + instance.data['version'] = version + + # Add version data to instance + version_data = { + "handles": handle_start, + "handleStart": handle_start, + "handleEnd": handle_end, + "frameStart": first_frame + handle_start, + "frameEnd": last_frame - handle_end, + "version": int(version), + "families": [instance.data["family"]] + instance.data["families"], + "subset": instance.data["subset"], + "fps": instance.context.data["fps"] + } + + instance.data.update({ + "versionData": version_data, + "frameStart": first_frame, + "frameEnd": last_frame, + "subsetGroup": "backdrops" + }) self.log.info("Backdrop content collected: `{}`".format(instance[:])) self.log.info("Backdrop instance collected: `{}`".format(instance)) diff --git a/pype/plugins/nuke/publish/extract_backdrop.py b/pype/plugins/nuke/publish/extract_backdrop.py new file mode 100644 index 0000000000..7b01b5deac --- /dev/null +++ b/pype/plugins/nuke/publish/extract_backdrop.py @@ -0,0 +1,103 @@ +import pyblish.api +from avalon.nuke import lib as anlib +from pype.nuke import lib as pnlib +import nuke +import os +import pype +reload(pnlib) + +class ExtractBackdropNode(pype.api.Extractor): + """Extracting content of backdrop nodes + + Will create nuke script only with containing nodes. + Also it will solve Input and Output nodes. + + """ + + order = pyblish.api.ExtractorOrder + label = "Extract Backdrop" + hosts = ["nuke"] + families = ["nukenodes"] + + def process(self, instance): + tmp_nodes = list() + nodes = instance[1:] + # Define extract output file path + stagingdir = self.staging_dir(instance) + filename = "{0}.nk".format(instance.name) + path = os.path.join(stagingdir, filename) + + # maintain selection + with anlib.maintained_selection(): + # all connections outside of backdrop + connections_in = instance.data["connections_in"] + connections_out = instance.data["connections_out"] + self.log.debug("_ connections_in: `{}`".format(connections_in)) + self.log.debug("_ connections_out: `{}`".format(connections_out)) + + # create input nodes and name them as passing node (*_INP) + for n, inputs in connections_in.items(): + for i, input in inputs: + inpn = nuke.createNode("Input") + inpn["name"].setValue("{}_{}_INP".format(n.name(), i)) + n.setInput(i, inpn) + inpn.setXYpos(input.xpos(), input.ypos()) + nodes.append(inpn) + tmp_nodes.append(inpn) + + anlib.reset_selection() + + # connect output node + for n, output in connections_out.items(): + opn = nuke.createNode("Output") + self.log.info(n.name()) + self.log.info(output.name()) + output.setInput( + next((i for i, d in enumerate(output.dependencies()) + if d.name() in n.name()), 0), opn) + opn.setInput(0, n) + opn.autoplace() + nodes.append(opn) + tmp_nodes.append(opn) + anlib.reset_selection() + + # select nodes to copy + anlib.reset_selection() + anlib.select_nodes(nodes) + # create tmp nk file + # save file to the path + nuke.nodeCopy(path) + + # Clean up + for tn in tmp_nodes: + nuke.delete(tn) + + # restore original connections + # reconnect input node + for n, inputs in connections_in.items(): + for i, input in inputs: + n.setInput(i, input) + + # reconnect output node + for n, output in connections_out.items(): + output.setInput( + next((i for i, d in enumerate(output.dependencies()) + if d.name() in n.name()), 0), n) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + # create representation + representation = { + 'name': 'nk', + 'ext': 'nk', + 'files': filename, + "stagingDir": stagingdir + } + instance.data["representations"].append(representation) + + self.log.info("Extracted instance '{}' to: {}".format( + instance.name, path)) + + self.log.info("Data {}".format( + instance.data)) diff --git a/pype/plugins/nuke/publish/validate_backdrop.py b/pype/plugins/nuke/publish/validate_backdrop.py index 59c4e96b37..cf2d56087d 100644 --- a/pype/plugins/nuke/publish/validate_backdrop.py +++ b/pype/plugins/nuke/publish/validate_backdrop.py @@ -58,7 +58,12 @@ class ValidateBackdrop(pyblish.api.InstancePlugin): actions = [SelectCenterInNodeGraph] def process(self, instance): + connections_out = instance.data["connections_out"] - msg = "No content on backdrop node: \"{}\"".format( + msg_multiple_outputs = "Only one outcoming connection from \"{}\" is allowed".format( instance.data["name"]) - assert len(instance) > 1, msg + assert len(connections_out.keys()) <= 1, msg_multiple_outputs + + msg_no_content = "No content on backdrop node: \"{}\"".format( + instance.data["name"]) + assert len(instance) > 1, msg_no_content From fc6733f582c427a4d197a518a683f7a02c6811ff Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sun, 24 Nov 2019 20:35:55 +0100 Subject: [PATCH 015/195] feat(nuke): adding utils.py for nuke specific operations - bake_gizmos_recursively - get_node_outputs - is_node_gizmo - gizmo_is_nuke_default --- pype/nuke/utils.py | 64 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 pype/nuke/utils.py diff --git a/pype/nuke/utils.py b/pype/nuke/utils.py new file mode 100644 index 0000000000..5250e80f25 --- /dev/null +++ b/pype/nuke/utils.py @@ -0,0 +1,64 @@ +import os +import nuke +from avalon.nuke import lib as anlib + + +def get_node_outputs(node): + ''' + Return a dictionary of the nodes and pipes that are connected to node + ''' + depDict = {} + dependencies = node.dependent(nuke.INPUTS | nuke.HIDDEN_INPUTS) + for d in dependencies: + depDict[d] = [] + for i in range(d.inputs()): + if d.input(i) == node: + depDict[d].append(i) + return depDict + + +def is_node_gizmo(node): + ''' + return True if node is gizmo + ''' + return 'gizmo_file' in node.knobs() + + +def gizmo_is_nuke_default(gizmo): + '''Check if gizmo is in default install path''' + plugDir = os.path.join(os.path.dirname( + nuke.env['ExecutablePath']), 'plugins') + return gizmo.filename().startswith(plugDir) + + +def bake_gizmos_recursively(in_group=nuke.Root()): + """Converting a gizmo to group + + Argumets: + is_group (nuke.Node)[optonal]: group node or all nodes + """ + # preserve selection after all is done + with anlib.maintained_selection(): + # jump to the group + with in_group: + for node in nuke.allNodes(): + if is_node_gizmo(node) and not gizmo_is_nuke_default(node): + with node: + outputs = get_node_outputs(node) + group = node.makeGroup() + # Reconnect inputs and outputs if any + if outputs: + for n, pipes in outputs.items(): + for i in pipes: + n.setInput(i, group) + for i in range(node.inputs()): + group.setInput(i, node.input(i)) + # set node position and name + group.setXYpos(node.xpos(), node.ypos()) + name = node.name() + nuke.delete(node) + group.setName(name) + node = group + + if node.Class() == "Group": + bake_gizmos_recursively(node) From fe7e3580d6b8d01b65b8d922b09c1e5d1205b864 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sun, 24 Nov 2019 20:41:21 +0100 Subject: [PATCH 016/195] fix(nuke): converting camelcase to snakecase --- pype/nuke/utils.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/pype/nuke/utils.py b/pype/nuke/utils.py index 5250e80f25..7583221696 100644 --- a/pype/nuke/utils.py +++ b/pype/nuke/utils.py @@ -7,14 +7,14 @@ def get_node_outputs(node): ''' Return a dictionary of the nodes and pipes that are connected to node ''' - depDict = {} + dep_dict = {} dependencies = node.dependent(nuke.INPUTS | nuke.HIDDEN_INPUTS) for d in dependencies: - depDict[d] = [] + dep_dict[d] = [] for i in range(d.inputs()): if d.input(i) == node: - depDict[d].append(i) - return depDict + dep_dict[d].append(i) + return dep_dict def is_node_gizmo(node): @@ -26,9 +26,9 @@ def is_node_gizmo(node): def gizmo_is_nuke_default(gizmo): '''Check if gizmo is in default install path''' - plugDir = os.path.join(os.path.dirname( + plug_dir = os.path.join(os.path.dirname( nuke.env['ExecutablePath']), 'plugins') - return gizmo.filename().startswith(plugDir) + return gizmo.filename().startswith(plug_dir) def bake_gizmos_recursively(in_group=nuke.Root()): From 498ba601b689a775de521c0d72a605b434583446 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 25 Nov 2019 00:53:06 +0100 Subject: [PATCH 017/195] feat(nuke): adding CreateGizmo plugin --- pype/plugins/nuke/create/create_gizmo.py | 79 ++++++++++++++++++++++++ 1 file changed, 79 insertions(+) create mode 100644 pype/plugins/nuke/create/create_gizmo.py diff --git a/pype/plugins/nuke/create/create_gizmo.py b/pype/plugins/nuke/create/create_gizmo.py new file mode 100644 index 0000000000..41229862e3 --- /dev/null +++ b/pype/plugins/nuke/create/create_gizmo.py @@ -0,0 +1,79 @@ +from avalon.nuke.pipeline import Creator +from avalon.nuke import lib as anlib +import nuke +import nukescripts + +class CreateGizmo(Creator): + """Add Publishable "gizmo" group + + The name is symbolically gizmo as presumably + it is something familiar to nuke users as group of nodes + distributed downstream in workflow + """ + + name = "gizmo" + label = "Gizmo" + family = "gizmo" + icon = "file-archive-o" + defaults = ["ViewerInput", "Lut", "Effect"] + + def __init__(self, *args, **kwargs): + super(CreateGizmo, self).__init__(*args, **kwargs) + self.nodes = nuke.selectedNodes() + self.node_color = "0x7533c1ff" + return + + def process(self): + if (self.options or {}).get("useSelection"): + nodes = self.nodes + self.log.info(len(nodes)) + if len(nodes) == 1: + anlib.select_nodes(nodes) + node = nodes[-1] + # check if Group node + if node.Class() in "Group": + node["name"].setValue("{}_GZM".format(self.name)) + node["tile_color"].setValue(int(self.node_color, 16)) + return anlib.imprint(node, self.data) + else: + nuke.message("Please select a group node " + "you wish to publish as the gizmo") + + if len(nodes) >= 2: + anlib.select_nodes(nodes) + nuke.makeGroup() + gizmo_node = nuke.selectedNode() + gizmo_node["name"].setValue("{}_GZM".format(self.name)) + gizmo_node["tile_color"].setValue(int(self.node_color, 16)) + + # add sticky node wit guide + with gizmo_node: + sticky = nuke.createNode("StickyNote") + sticky["label"].setValue( + "Add following:\n- set Input" + " nodes\n- set one Output1\n" + "- create User knobs on the group") + + # add avalon knobs + return anlib.imprint(gizmo_node, self.data) + + else: + nuke.message("Please select nodes you " + "wish to add to the gizmo") + return + else: + with anlib.maintained_selection(): + gizmo_node = nuke.createNode("Group") + gizmo_node["name"].setValue("{}_GZM".format(self.name)) + gizmo_node["tile_color"].setValue(int(self.node_color, 16)) + + # add sticky node wit guide + with gizmo_node: + sticky = nuke.createNode("StickyNote") + sticky["label"].setValue( + "Add following:\n- add Input" + " nodes\n- add one Output1\n" + "- create User knobs on the group") + + # add avalon knobs + return anlib.imprint(gizmo_node, self.data) From e07a95480ba9fb7f0f90a2810dbf16fbf39054ff Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 25 Nov 2019 00:55:32 +0100 Subject: [PATCH 018/195] feat(nuke): adding `gizmo` family --- pype/nuke/__init__.py | 3 ++- pype/plugins/global/publish/integrate_new.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index b7dbf69510..aa746ea872 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -112,7 +112,8 @@ def install(): # Disable all families except for the ones we explicitly want to see family_states = [ "write", - "review" + "review", + "gizmo" ] avalon.data["familiesStateDefault"] = False diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 64f6dd5015..f1b3a69d1e 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -70,7 +70,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "lut", "audio", "yetiRig", - "yeticache" + "yeticache", + "gizmo" ] exclude_families = ["clip"] From 1077a02e9296635646dd3e3890561a156b8e4cab Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 25 Nov 2019 15:09:52 +0100 Subject: [PATCH 019/195] feat(nuke): adding process() to create backdrop --- pype/plugins/nuke/create/create_backdrop.py | 37 ++++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/pype/plugins/nuke/create/create_backdrop.py b/pype/plugins/nuke/create/create_backdrop.py index 237f9ac8f0..767e92b592 100644 --- a/pype/plugins/nuke/create/create_backdrop.py +++ b/pype/plugins/nuke/create/create_backdrop.py @@ -1,5 +1,6 @@ from avalon.nuke.pipeline import Creator - +from avalon.nuke import lib as anlib +import nuke class CreateBackdrop(Creator): """Add Publishable Backdrop""" @@ -12,4 +13,38 @@ class CreateBackdrop(Creator): def __init__(self, *args, **kwargs): super(CreateBackdrop, self).__init__(*args, **kwargs) + self.nodes = nuke.selectedNodes() + self.node_color = "0xdfea5dff" return + + def process(self): + from nukescripts import autoBackdrop + nodes = list() + if (self.options or {}).get("useSelection"): + nodes = self.nodes + + if len(nodes) >= 1: + anlib.select_nodes(nodes) + bckd_node = autoBackdrop() + bckd_node["name"].setValue("{}_BDN".format(self.name)) + bckd_node["tile_color"].setValue(int(self.node_color, 16)) + bckd_node["note_font_size"].setValue(24) + bckd_node["label"].setValue("[{}]".format(self.name)) + # add avalon knobs + instance = anlib.imprint(bckd_node, self.data) + + return instance + else: + nuke.message("Please select nodes you " + "wish to add to a container") + return + else: + bckd_node = autoBackdrop() + bckd_node["name"].setValue("{}_BDN".format(self.name)) + bckd_node["tile_color"].setValue(int(self.node_color, 16)) + bckd_node["note_font_size"].setValue(24) + bckd_node["label"].setValue("[{}]".format(self.name)) + # add avalon knobs + instance = anlib.imprint(bckd_node, self.data) + + return instance From 3ed91886b49749a453951ef6795d3c6396c2581a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 25 Nov 2019 20:53:49 +0100 Subject: [PATCH 020/195] fix(global): integrate_new - families missing comma - subsetGroup were not integrating properly --- pype/plugins/global/publish/integrate_new.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 63c22a1f46..80e11c0624 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -62,7 +62,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "render", "imagesequence", "review", - "render", "rendersetup", "rig", "plate", @@ -71,7 +70,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "audio", "yetiRig", "yeticache", - "nukenodes" + "nukenodes", "gizmo" ] exclude_families = ["clip"] @@ -528,10 +527,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset["data"].update( {"subsetGroup": instance.data.get("subsetGroup")} ) + self.log.info("__ subset.data: {}".format(subset["data"])) io.update_many({ 'type': 'subset', '_id': io.ObjectId(subset["_id"]) - }, {'$set': subset["data"]} + }, {'$set': {'data': subset["data"]}} ) return subset From 3e2635f3b9d7abcaec7f56ea03a1d9837f5f8fc4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 25 Nov 2019 20:54:30 +0100 Subject: [PATCH 021/195] fix(nuke): collect legacy write was overwriting family on all groups --- pype/plugins/nuke/publish/collect_legacy_write.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_legacy_write.py b/pype/plugins/nuke/publish/collect_legacy_write.py index 74280b743a..cfb0798434 100644 --- a/pype/plugins/nuke/publish/collect_legacy_write.py +++ b/pype/plugins/nuke/publish/collect_legacy_write.py @@ -24,7 +24,8 @@ class CollectWriteLegacy(pyblish.api.InstancePlugin): self.log.info("render") return - instance.data.update( - {"family": "write.legacy", - "families": []} - ) + if "render" in node.knobs(): + instance.data.update( + {"family": "write.legacy", + "families": []} + ) From 28a048b57d9ddf4352d85e6d322cd24b4e80f2d5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 25 Nov 2019 21:04:27 +0100 Subject: [PATCH 022/195] feat(nuke): adding Collect | Validate | Extract Gizmo family --- pype/plugins/nuke/publish/collect_gizmo.py | 56 ++++++++++++ pype/plugins/nuke/publish/extract_gizmo.py | 95 +++++++++++++++++++++ pype/plugins/nuke/publish/validate_gizmo.py | 58 +++++++++++++ 3 files changed, 209 insertions(+) create mode 100644 pype/plugins/nuke/publish/collect_gizmo.py create mode 100644 pype/plugins/nuke/publish/extract_gizmo.py create mode 100644 pype/plugins/nuke/publish/validate_gizmo.py diff --git a/pype/plugins/nuke/publish/collect_gizmo.py b/pype/plugins/nuke/publish/collect_gizmo.py new file mode 100644 index 0000000000..ada3400dfc --- /dev/null +++ b/pype/plugins/nuke/publish/collect_gizmo.py @@ -0,0 +1,56 @@ +import pyblish.api +import pype.api as pype +import nuke + + +@pyblish.api.log +class CollectGizmo(pyblish.api.InstancePlugin): + """Collect Gizmo (group) node instance and its content + """ + + order = pyblish.api.CollectorOrder + 0.22 + label = "Collect Gizmo (Group)" + hosts = ["nuke"] + families = ["gizmo"] + + def process(self, instance): + + grpn = instance[0] + + # add family to familiess + instance.data["families"].insert(0, instance.data["family"]) + # make label nicer + instance.data["label"] = "{0} ({1} nodes)".format( + grpn.name(), len(instance) - 1) + + # Get frame range + handle_start = instance.context.data["handleStart"] + handle_end = instance.context.data["handleEnd"] + first_frame = int(nuke.root()["first_frame"].getValue()) + last_frame = int(nuke.root()["last_frame"].getValue()) + + # get version + version = pype.get_version_from_path(nuke.root().name()) + instance.data['version'] = version + + # Add version data to instance + version_data = { + "handles": handle_start, + "handleStart": handle_start, + "handleEnd": handle_end, + "frameStart": first_frame + handle_start, + "frameEnd": last_frame - handle_end, + "version": int(version), + "families": [instance.data["family"]] + instance.data["families"], + "subset": instance.data["subset"], + "fps": instance.context.data["fps"] + } + + instance.data.update({ + "versionData": version_data, + "frameStart": first_frame, + "frameEnd": last_frame, + "subsetGroup": "gizmos" + }) + self.log.info("Gizmo content collected: `{}`".format(instance[:])) + self.log.info("Gizmo instance collected: `{}`".format(instance)) diff --git a/pype/plugins/nuke/publish/extract_gizmo.py b/pype/plugins/nuke/publish/extract_gizmo.py new file mode 100644 index 0000000000..36ef1d464c --- /dev/null +++ b/pype/plugins/nuke/publish/extract_gizmo.py @@ -0,0 +1,95 @@ +import pyblish.api +from avalon.nuke import lib as anlib +from pype.nuke import lib as pnlib +from pype.nuke import utils as pnutils +import nuke +import os +import pype + + +class ExtractGizmo(pype.api.Extractor): + """Extracting Gizmo (Group) node + + Will create nuke script only with the Gizmo node. + """ + + order = pyblish.api.ExtractorOrder + label = "Extract Gizmo (Group)" + hosts = ["nuke"] + families = ["gizmo"] + + def process(self, instance): + tmp_nodes = list() + orig_grpn = instance[0] + # Define extract output file path + stagingdir = self.staging_dir(instance) + filename = "{0}.nk".format(instance.name) + path = os.path.join(stagingdir, filename) + + # maintain selection + with anlib.maintained_selection(): + orig_grpn_name = orig_grpn.name() + tmp_grpn_name = orig_grpn_name + "_tmp" + # select original group node + anlib.select_nodes([orig_grpn]) + + # copy to clipboard + nuke.nodeCopy("%clipboard%") + + # reset selection to none + anlib.reset_selection() + + # paste clipboard + nuke.nodePaste("%clipboard%") + + # assign pasted node + copy_grpn = nuke.selectedNode() + copy_grpn.setXYpos((orig_grpn.xpos() + 120), orig_grpn.ypos()) + + # convert gizmos to groups + pnutils.bake_gizmos_recursively(copy_grpn) + + # remove avalonknobs + knobs = copy_grpn.knobs() + avalon_knobs = [k for k in knobs.keys() + for ak in ["avalon:", "ak:"] + if ak in k] + avalon_knobs.append("publish") + for ak in avalon_knobs: + copy_grpn.removeKnob(knobs[ak]) + + # add to temporary nodes + tmp_nodes.append(copy_grpn) + + # swap names + orig_grpn.setName(tmp_grpn_name) + copy_grpn.setName(orig_grpn_name) + + # create tmp nk file + # save file to the path + nuke.nodeCopy(path) + + # Clean up + for tn in tmp_nodes: + nuke.delete(tn) + + # rename back to original + orig_grpn.setName(orig_grpn_name) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + # create representation + representation = { + 'name': 'gizmo', + 'ext': 'nk', + 'files': filename, + "stagingDir": stagingdir + } + instance.data["representations"].append(representation) + + self.log.info("Extracted instance '{}' to: {}".format( + instance.name, path)) + + self.log.info("Data {}".format( + instance.data)) diff --git a/pype/plugins/nuke/publish/validate_gizmo.py b/pype/plugins/nuke/publish/validate_gizmo.py new file mode 100644 index 0000000000..9c94ea88ef --- /dev/null +++ b/pype/plugins/nuke/publish/validate_gizmo.py @@ -0,0 +1,58 @@ +import pyblish +from avalon.nuke import lib as anlib +import nuke + + +class OpenFailedGroupNode(pyblish.api.Action): + """ + Centering failed instance node in node grap + """ + + label = "Open Gizmo in Node Graph" + icon = "wrench" + on = "failed" + + def process(self, context, plugin): + + # Get the errored instances + failed = [] + for result in context.data["results"]: + if (result["error"] is not None and result["instance"] is not None + and result["instance"] not in failed): + failed.append(result["instance"]) + + # Apply pyblish.logic to get the instances for the plug-in + instances = pyblish.api.instances_by_plugin(failed, plugin) + + # maintain selection + with anlib.maintained_selection(): + # collect all failed nodes xpos and ypos + for instance in instances: + grpn = instance[0] + nuke.showDag(grpn) + + +@pyblish.api.log +class ValidateGizmo(pyblish.api.InstancePlugin): + """Validate amount of output nodes in gizmo (group) node""" + + order = pyblish.api.ValidatorOrder + optional = True + families = ["gizmo"] + label = "Validate Gizmo (Group)" + hosts = ["nuke"] + actions = [OpenFailedGroupNode] + + def process(self, instance): + grpn = instance[0] + + with grpn: + connections_out = nuke.allNodes('Output') + msg_multiple_outputs = "Only one outcoming connection from " + "\"{}\" is allowed".format(instance.data["name"]) + assert len(connections_out) <= 1, msg_multiple_outputs + + connections_in = nuke.allNodes('Input') + msg_missing_inputs = "At least one Input node has to be used in: " + "\"{}\"".format(instance.data["name"]) + assert len(connections_in) >= 1, msg_missing_inputs From 7b9530613ddefaf14f193ea5bae6721a650dcf89 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 26 Nov 2019 00:19:41 +0100 Subject: [PATCH 023/195] fix(global): integrate_new `subsetGroup` improvement --- pype/plugins/global/publish/integrate_new.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 80e11c0624..f82df891f2 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -524,14 +524,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # add group if available if instance.data.get("subsetGroup"): - subset["data"].update( - {"subsetGroup": instance.data.get("subsetGroup")} - ) - self.log.info("__ subset.data: {}".format(subset["data"])) io.update_many({ 'type': 'subset', '_id': io.ObjectId(subset["_id"]) - }, {'$set': {'data': subset["data"]}} + }, {'$set': {'data.subsetGroup': + instance.data.get('subsetGroup')}} ) return subset From 5fd691efe3607dec70247041a3c112893100b325 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 26 Nov 2019 00:20:04 +0100 Subject: [PATCH 024/195] feat(nuke): adding load gizmo as Imput Process --- pype/plugins/nuke/load/load_gizmo_ip.py | 239 ++++++++++++++++++++++++ 1 file changed, 239 insertions(+) create mode 100644 pype/plugins/nuke/load/load_gizmo_ip.py diff --git a/pype/plugins/nuke/load/load_gizmo_ip.py b/pype/plugins/nuke/load/load_gizmo_ip.py new file mode 100644 index 0000000000..0d78c14214 --- /dev/null +++ b/pype/plugins/nuke/load/load_gizmo_ip.py @@ -0,0 +1,239 @@ +from avalon import api, style, io +import nuke +from pype.nuke import lib as pnlib +from avalon.nuke import lib as anlib +from avalon.nuke import containerise, update_container + + +class LoadGizmoInputProcess(api.Loader): + """Loading colorspace soft effect exported from nukestudio""" + + representations = ["gizmo"] + families = ["gizmo"] + + label = "Load Gizmo - Input Process" + order = 0 + icon = "eye" + color = style.colors.alert + node_color = "0x7533c1ff" + + def load(self, context, name, namespace, data): + """ + Loading function to get Gizmo as Input Process on viewer + + Arguments: + context (dict): context of version + name (str): name of the version + namespace (str): asset name + data (dict): compulsory attribute > not used + + Returns: + nuke node: containerised nuke node object + """ + + # get main variables + version = context['version'] + version_data = version.get("data", {}) + vname = version.get("name", None) + first = version_data.get("frameStart", None) + last = version_data.get("frameEnd", None) + namespace = namespace or context['asset']['name'] + colorspace = version_data.get("colorspace", None) + object_name = "{}_{}".format(name, namespace) + + # prepare data for imprinting + # add additional metadata from the version to imprint to Avalon knob + add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", + "source", "author", "fps"] + + data_imprint = {"frameStart": first, + "frameEnd": last, + "version": vname, + "colorspaceInput": colorspace, + "objectName": object_name} + + for k in add_keys: + data_imprint.update({k: version_data[k]}) + + # getting file path + file = self.fname.replace("\\", "/") + + # adding nodes to node graph + # just in case we are in group lets jump out of it + nuke.endGroup() + + with anlib.maintained_selection(): + # add group from nk + nuke.nodePaste(file) + + GN = nuke.selectedNode() + + GN["name"].setValue(object_name) + + # try to place it under Viewer1 + if not self.connect_active_viewer(GN): + nuke.delete(GN) + return + + return containerise( + node=GN, + name=name, + namespace=namespace, + context=context, + loader=self.__class__.__name__, + data=data_imprint) + + def update(self, container, representation): + """Update the Loader's path + + Nuke automatically tries to reset some variables when changing + the loader's path to a new file. These automatic changes are to its + inputs: + + """ + + # get main variables + # Get version from io + version = io.find_one({ + "type": "version", + "_id": representation["parent"] + }) + # get corresponding node + GN = nuke.toNode(container['objectName']) + + file = api.get_representation_path(representation).replace("\\", "/") + context = representation["context"] + name = container['name'] + version_data = version.get("data", {}) + vname = version.get("name", None) + first = version_data.get("frameStart", None) + last = version_data.get("frameEnd", None) + namespace = container['namespace'] + colorspace = version_data.get("colorspace", None) + object_name = "{}_{}".format(name, namespace) + + add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", + "source", "author", "fps"] + + data_imprint = {"representation": str(representation["_id"]), + "frameStart": first, + "frameEnd": last, + "version": vname, + "colorspaceInput": colorspace, + "objectName": object_name} + + for k in add_keys: + data_imprint.update({k: version_data[k]}) + + # adding nodes to node graph + # just in case we are in group lets jump out of it + nuke.endGroup() + + with anlib.maintained_selection(): + xpos = GN.xpos() + ypos = GN.ypos() + avalon_data = anlib.get_avalon_knob_data(GN) + nuke.delete(GN) + # add group from nk + nuke.nodePaste(file) + + GN = nuke.selectedNode() + anlib.set_avalon_knob_data(GN, avalon_data) + GN.setXYpos(xpos, ypos) + GN["name"].setValue(object_name) + + # get all versions in list + versions = io.find({ + "type": "version", + "parent": version["parent"] + }).distinct('name') + + max_version = max(versions) + + # change color of node + if version.get("name") not in [max_version]: + GN["tile_color"].setValue(int("0xd88467ff", 16)) + else: + GN["tile_color"].setValue(int(self.node_color, 16)) + + self.log.info("udated to version: {}".format(version.get("name"))) + + return update_container(GN, data_imprint) + + def connect_active_viewer(self, group_node): + """ + Finds Active viewer and + place the node under it, also adds + name of group into Input Process of the viewer + + Arguments: + group_node (nuke node): nuke group node object + + """ + group_node_name = group_node["name"].value() + + viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()] + if len(viewer) > 0: + viewer = viewer[0] + else: + self.log.error("Please create Viewer node before you " + "run this action again") + return None + + # get coordinates of Viewer1 + xpos = viewer["xpos"].value() + ypos = viewer["ypos"].value() + + ypos += 150 + + viewer["ypos"].setValue(ypos) + + # set coordinates to group node + group_node["xpos"].setValue(xpos) + group_node["ypos"].setValue(ypos + 50) + + # add group node name to Viewer Input Process + viewer["input_process_node"].setValue(group_node_name) + + # put backdrop under + pnlib.create_backdrop(label="Input Process", layer=2, + nodes=[viewer, group_node], color="0x7c7faaff") + + return True + + def get_item(self, data, trackIndex, subTrackIndex): + return {key: val for key, val in data.items() + if subTrackIndex == val["subTrackIndex"] + if trackIndex == val["trackIndex"]} + + def byteify(self, input): + """ + Converts unicode strings to strings + It goes trought all dictionary + + Arguments: + input (dict/str): input + + Returns: + dict: with fixed values and keys + + """ + + if isinstance(input, dict): + return {self.byteify(key): self.byteify(value) + for key, value in input.iteritems()} + elif isinstance(input, list): + return [self.byteify(element) for element in input] + elif isinstance(input, unicode): + return input.encode('utf-8') + else: + return input + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + from avalon.nuke import viewer_update_and_undo_stop + node = nuke.toNode(container['objectName']) + with viewer_update_and_undo_stop(): + nuke.delete(node) From b5abaecdfe4e69e748b065a52eba308af393dd85 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 26 Nov 2019 00:20:21 +0100 Subject: [PATCH 025/195] feat(nuke): adding colorspace to version data --- pype/plugins/nuke/publish/collect_gizmo.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pype/plugins/nuke/publish/collect_gizmo.py b/pype/plugins/nuke/publish/collect_gizmo.py index ada3400dfc..87f4cf8264 100644 --- a/pype/plugins/nuke/publish/collect_gizmo.py +++ b/pype/plugins/nuke/publish/collect_gizmo.py @@ -40,6 +40,7 @@ class CollectGizmo(pyblish.api.InstancePlugin): "handleEnd": handle_end, "frameStart": first_frame + handle_start, "frameEnd": last_frame - handle_end, + "colorspace": nuke.root().knob('workingSpaceLUT').value(), "version": int(version), "families": [instance.data["family"]] + instance.data["families"], "subset": instance.data["subset"], From fde5bdf438b64c325a563053eeb4472e1928d893 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 28 Nov 2019 16:22:14 +0100 Subject: [PATCH 026/195] add ability to publish and load arnold standin sequence --- pype/plugins/global/publish/integrate_new.py | 2 +- pype/plugins/maya/create/create_ass.py | 18 +++++- pype/plugins/maya/load/load_ass.py | 55 ++++++++++++++++-- pype/plugins/maya/publish/extract_ass.py | 57 ++++++++++++++++--- pype/plugins/maya/publish/extract_assproxy.py | 9 ++- 5 files changed, 122 insertions(+), 19 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index f0619eb776..52dc3a6361 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -409,7 +409,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): } if sequence_repre and repre.get("frameStart"): - representation['context']['frame'] = repre.get("frameStart") + representation['context']['frame'] = src_padding_exp % repre.get("frameStart") self.log.debug("__ representation: {}".format(representation)) destination_list.append(dst) diff --git a/pype/plugins/maya/create/create_ass.py b/pype/plugins/maya/create/create_ass.py index 84b42e9b20..6d8eda1a40 100644 --- a/pype/plugins/maya/create/create_ass.py +++ b/pype/plugins/maya/create/create_ass.py @@ -1,6 +1,7 @@ from collections import OrderedDict import avalon.maya +from pype.maya import lib from maya import cmds @@ -14,10 +15,21 @@ class CreateAss(avalon.maya.Creator): icon = "cube" defaults = ['Main'] + def __init__(self, *args, **kwargs): + super(CreateAss, self).__init__(*args, **kwargs) + + # Add animation data + self.data.update(lib.collect_animation_data()) + + # Vertex colors with the geometry + self.data["exportSequence"] = False + def process(self): instance = super(CreateAss, self).process() - data = OrderedDict(**self.data) + # data = OrderedDict(**self.data) + + nodes = list() @@ -30,4 +42,6 @@ class CreateAss(avalon.maya.Creator): assProxy = cmds.sets(name="proxy_SET", empty=True) cmds.sets([assContent, assProxy], forceElement=instance) - self.data = data + # self.log.info(data) + # + # self.data = data diff --git a/pype/plugins/maya/load/load_ass.py b/pype/plugins/maya/load/load_ass.py index 2960e4403e..83dd80bd4e 100644 --- a/pype/plugins/maya/load/load_ass.py +++ b/pype/plugins/maya/load/load_ass.py @@ -2,6 +2,7 @@ from avalon import api import pype.maya.plugin import os from pypeapp import config +import clique class AssProxyLoader(pype.maya.plugin.ReferenceLoader): @@ -21,6 +22,13 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader): from avalon import maya import pymel.core as pm + version = context['version'] + version_data = version.get("data", {}) + + self.log.info("version_data: {}\n".format(version_data)) + + frameStart = version_data.get("frameStart", None) + try: family = context["representation"]["context"]["family"] except ValueError: @@ -30,7 +38,24 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader): groupName = "{}:{}".format(namespace, name) path = self.fname - proxyPath = os.path.splitext(path)[0] + ".ma" + proxyPath_base = os.path.splitext(path)[0] + + if frameStart is not None: + proxyPath_base = os.path.splitext(proxyPath_base)[0] + + publish_folder = os.path.split(path)[0] + files_in_folder = os.listdir(publish_folder) + collections, remainder = clique.assemble(files_in_folder) + + if collections: + hashes = collections[0].padding * '#' + coll = collections[0].format('{head}[index]{tail}') + filename = coll.replace('[index]', hashes) + + path = os.path.join(publish_folder, filename) + + proxyPath = proxyPath_base + ".ma" + self.log.info nodes = cmds.file(proxyPath, namespace=namespace, @@ -147,6 +172,13 @@ class AssStandinLoader(api.Loader): import mtoa.ui.arnoldmenu import pymel.core as pm + version = context['version'] + version_data = version.get("data", {}) + + self.log.info("version_data: {}\n".format(version_data)) + + frameStart = version_data.get("frameStart", None) + asset = context['asset']['name'] namespace = namespace or lib.unique_namespace( asset + "_", @@ -182,6 +214,8 @@ class AssStandinLoader(api.Loader): # Set the standin filepath standinShape.dso.set(self.fname) + if frameStart is not None: + standinShape.useFrameExtension.set(1) nodes = [root, standin] self[:] = nodes @@ -199,14 +233,23 @@ class AssStandinLoader(api.Loader): path = api.get_representation_path(representation) - # Update the standin - members = pm.sets(container['objectName'], query=True) - standins = pm.ls(members, type="AiStandIn", long=True) + files_in_path = os.listdir(os.path.split(path)[0]) + sequence = 0 + collections, remainder = clique.assemble(files_in_path) + if collections: + sequence = 1 - assert len(caches) == 1, "This is a bug" + # Update the standin + standins = list() + members = pm.sets(container['objectName'], query=True) + for member in members: + shape = member.getShape() + if (shape and shape.type() == "aiStandIn"): + standins.append(shape) for standin in standins: - standin.cacheFileName.set(path) + standin.dso.set(path) + standin.useFrameExtension.set(sequence) container = pm.PyNode(container["objectName"]) container.representation.set(str(representation["_id"])) diff --git a/pype/plugins/maya/publish/extract_ass.py b/pype/plugins/maya/publish/extract_ass.py index 1fed6c8dd7..71f3e0d84c 100644 --- a/pype/plugins/maya/publish/extract_ass.py +++ b/pype/plugins/maya/publish/extract_ass.py @@ -20,8 +20,11 @@ class ExtractAssStandin(pype.api.Extractor): def process(self, instance): + sequence = instance.data.get("exportSequence", False) + staging_dir = self.staging_dir(instance) filename = "{}.ass".format(instance.name) + filenames = list() file_path = os.path.join(staging_dir, filename) # Write out .ass file @@ -29,13 +32,47 @@ class ExtractAssStandin(pype.api.Extractor): with avalon.maya.maintained_selection(): self.log.info("Writing: {}".format(instance.data["setMembers"])) cmds.select(instance.data["setMembers"], noExpand=True) - cmds.arnoldExportAss( filename=file_path, - selected=True, - asciiAss=True, - shadowLinks=True, - lightLinks=True, - boundingBox=True - ) + + if sequence: + self.log.info("Extracting ass sequence") + + # Collect the start and end including handles + start = instance.data.get("frameStart", 1) + end = instance.data.get("frameEnd", 1) + handles = instance.data.get("handles", 0) + step = instance.data.get("step", 0) + if handles: + start -= handles + end += handles + + exported_files = cmds.arnoldExportAss(filename=file_path, + selected=True, + asciiAss=True, + shadowLinks=True, + lightLinks=True, + boundingBox=True, + startFrame=start, + endFrame=end, + frameStep=step + ) + for file in exported_files: + filenames.append(os.path.split(file)[1]) + self.log.info("Exported: {}".format(filenames)) + else: + cmds.arnoldExportAss(filename=file_path, + selected=True, + asciiAss=True, + shadowLinks=True, + lightLinks=True, + boundingBox=True + ) + filenames = filename + optionals = [ + "frameStart", "frameEnd", "step", "handles", + "handleEnd", "handleStart" + ] + for key in optionals: + instance.data.pop(key, None) if "representations" not in instance.data: instance.data["representations"] = [] @@ -43,9 +80,13 @@ class ExtractAssStandin(pype.api.Extractor): representation = { 'name': 'ass', 'ext': 'ass', - 'files': filename, + 'files': filenames, "stagingDir": staging_dir } + + if sequence: + representation['frameStart'] = start + instance.data["representations"].append(representation) self.log.info("Extracted instance '%s' to: %s" diff --git a/pype/plugins/maya/publish/extract_assproxy.py b/pype/plugins/maya/publish/extract_assproxy.py index 34c3113e11..59684febe1 100644 --- a/pype/plugins/maya/publish/extract_assproxy.py +++ b/pype/plugins/maya/publish/extract_assproxy.py @@ -43,8 +43,13 @@ class ExtractAssProxy(pype.api.Extractor): # Get only the shape contents we need in such a way that we avoid # taking along intermediateObjects - members = instance.data['proxy'] - members = cmds.ls(members, + proxy = instance.data.get('proxy', None) + + if not proxy: + self.log.info("no proxy mesh") + return + + members = cmds.ls(proxy, dag=True, transforms=True, noIntermediate=True) From 99c89f38118f9d870a5df8549604a3cf97ff5416 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 3 Dec 2019 18:43:11 +0100 Subject: [PATCH 027/195] add option for start and end frame to burnins --- pype/plugins/global/publish/extract_burnin.py | 15 +++++-- pype/scripts/otio_burnin.py | 40 +++++++++++++++---- 2 files changed, 44 insertions(+), 11 deletions(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index b1569aaa45..457f4801cf 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -29,11 +29,16 @@ class ExtractBurnin(pype.api.Extractor): if instance.context.data.get('version'): version = "v" + str(instance.context.data['version']) + frame_start = int(instance.data.get("frameStart") or 0) + frame_end = int(instance.data.get("frameEnd") or 1) + duration = frame_end - frame_start + 1 prep_data = { "username": instance.context.data['user'], "asset": os.environ['AVALON_ASSET'], "task": os.environ['AVALON_TASK'], - "start_frame": int(instance.data["frameStart"]), + "frame_start": frame_start, + "frame_end": frame_end, + "duration": duration, "version": version } self.log.debug("__ prep_data: {}".format(prep_data)) @@ -49,8 +54,12 @@ class ExtractBurnin(pype.api.Extractor): name = "_burnin" movieFileBurnin = filename.replace(".mov", "") + name + ".mov" - full_movie_path = os.path.join(os.path.normpath(stagingdir), repre["files"]) - full_burnin_path = os.path.join(os.path.normpath(stagingdir), movieFileBurnin) + full_movie_path = os.path.join( + os.path.normpath(stagingdir), repre["files"] + ) + full_burnin_path = os.path.join( + os.path.normpath(stagingdir), movieFileBurnin + ) self.log.debug("__ full_burnin_path: {}".format(full_burnin_path)) burnin_data = { diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index ad2e59fc96..a7ae73f6c3 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -91,7 +91,9 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): text = today.strftime(date_format) self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) - def add_frame_numbers(self, align, options=None, start_frame=None): + def add_frame_numbers( + self, align, options=None, start_frame=None, text=None + ): """ Convenience method to create the frame number expression. @@ -103,7 +105,12 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if start_frame: options['frame_offset'] = start_frame - options['expression'] = r'%%{eif\:n+%d\:d}' % options['frame_offset'] + expr = r'%%{eif\:n+%d\:d}' % options['frame_offset'] + if text and isinstance(text, str): + text = r"{}".format(text) + expr = text.replace("{current_frame}", expr) + + options['expression'] = expr text = str(int(self.end_frame + options['frame_offset'])) self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) @@ -121,7 +128,7 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): timecode = ffmpeg_burnins._frames_to_timecode( options['frame_offset'], - self.frame_rate + self.frame_rate ) options = options.copy() if not options.get('fps'): @@ -284,8 +291,8 @@ def burnins_from_data(input_path, output_path, data, overwrite=True): burnin = ModifiedBurnins(input_path, options_init=options_init) - start_frame = data.get("start_frame") - start_frame_tc = data.get('start_frame_tc', start_frame) + frame_start = data.get("frame_start") + frame_start_tc = data.get('frame_start_tc', frame_start) for align_text, preset in presets.get('burnins', {}).items(): align = None if align_text == 'TOP_LEFT': @@ -311,7 +318,7 @@ def burnins_from_data(input_path, output_path, data, overwrite=True): if ( bi_func in ['frame_numbers', 'timecode'] and - start_frame is None + frame_start is None ): log.error( 'start_frame is not set in entered data!' @@ -320,9 +327,26 @@ def burnins_from_data(input_path, output_path, data, overwrite=True): return if bi_func == 'frame_numbers': - burnin.add_frame_numbers(align, start_frame=start_frame) + current_frame_identifier = "{current_frame}" + text = preset.get('text') or current_frame_identifier + + if current_frame_identifier not in text: + log.warning(( + 'Text for Frame numbers don\'t have ' + '`{current_frame}` key in text!' + )) + + text_items = [] + split_items = text.split(current_frame_identifier) + for item in split_items: + text_items.append(item.format(**data)) + + text = "{current_frame}".join(text_items) + + burnin.add_frame_numbers(align, start_frame=frame_start, text=text) + elif bi_func == 'timecode': - burnin.add_timecode(align, start_frame=start_frame_tc) + burnin.add_timecode(align, start_frame=frame_start_tc) elif bi_func == 'text': if not preset.get('text'): log.error('Text is not set for text function burnin!') From 8ed99e57594f837c5abbbe7c858fca929c87fec5 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 3 Dec 2019 19:02:16 +0100 Subject: [PATCH 028/195] fixed environment passing to subprocess --- pype/lib.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pype/lib.py b/pype/lib.py index e41f9eb8bc..f4820a1962 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -19,12 +19,15 @@ log = logging.getLogger(__name__) def _subprocess(args): """Convenience method for getting output errors for subprocess.""" + # make sure environment contains only strings + env = {k: str(v) for k, v in os.environ.items()} + proc = subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, - env=os.environ + env=env ) output = proc.communicate()[0] From bcf32d80469cf849e0c1d4a38d086a8cebf08d2c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 3 Dec 2019 22:00:25 +0100 Subject: [PATCH 029/195] fixed pype root handling during publshing of image sequences --- pype/scripts/publish_filesequence.py | 37 +++++++++++++++++++--------- 1 file changed, 25 insertions(+), 12 deletions(-) diff --git a/pype/scripts/publish_filesequence.py b/pype/scripts/publish_filesequence.py index 25ed4135c3..7ad7318831 100644 --- a/pype/scripts/publish_filesequence.py +++ b/pype/scripts/publish_filesequence.py @@ -4,6 +4,7 @@ import os import logging import subprocess import platform +from shutil import which handler = logging.basicConfig() log = logging.getLogger("Publish Image Sequences") @@ -35,22 +36,32 @@ def __main__(): auto_pype_root = os.path.abspath(auto_pype_root + "../../../../..") auto_pype_root = os.environ.get('PYPE_ROOT') or auto_pype_root - if kwargs.pype: - pype_root = kwargs.pype - else: - # if pype argument not specified, lets assume it is set in PATH - pype_root = "" - - print("Set pype root to: {}".format(pype_root)) - print("Paths: {}".format(kwargs.paths or [os.getcwd()])) - - paths = kwargs.paths or [os.getcwd()] pype_command = "pype.ps1" if platform.system().lower() == "linux": pype_command = "pype" elif platform.system().lower() == "windows": pype_command = "pype.bat" + if kwargs.pype: + pype_root = kwargs.pype + else: + # test if pype.bat / pype is in the PATH + # if it is, which() will return its path and we use that. + # if not, we use auto_pype_root path. Caveat of that one is + # that it can be UNC path and that will not work on windows. + + pype_path = which(pype_command) + + if pype_path: + pype_root = os.path.dirname(pype_path) + else: + pype_root = auto_pype_root + + print("Set pype root to: {}".format(pype_root)) + print("Paths: {}".format(kwargs.paths or [os.getcwd()])) + + paths = kwargs.paths or [os.getcwd()] + args = [ os.path.join(pype_root, pype_command), "publish", @@ -60,9 +71,11 @@ def __main__(): print("Pype command: {}".format(" ".join(args))) # Forcing forwaring the environment because environment inheritance does # not always work. - exit_code = subprocess.call(args, env=os.environ) + # Cast all values in environment to str to be safe + env = {k: str(v) for k, v in os.environ.items()} + exit_code = subprocess.call(args, env=env) if exit_code != 0: - raise ValueError("Publishing failed.") + raise RuntimeError("Publishing failed.") if __name__ == '__main__': From 5e1eb0f42a8707878c142f247be8a3244c5b0547 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 4 Dec 2019 14:41:11 +0100 Subject: [PATCH 030/195] fix(nks): collect plates didn't collect `frame` to representation --- pype/plugins/nukestudio/publish/collect_plates.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/plugins/nukestudio/publish/collect_plates.py b/pype/plugins/nukestudio/publish/collect_plates.py index f9eb126772..be448931c8 100644 --- a/pype/plugins/nukestudio/publish/collect_plates.py +++ b/pype/plugins/nukestudio/publish/collect_plates.py @@ -234,8 +234,9 @@ class CollectPlatesData(api.InstancePlugin): 'stagingDir': staging_dir, 'name': ext, 'ext': ext, - "frameStart": frame_start, "frameEnd": frame_end, + "frameStart": "%0{}d".format( + len(str(frame_end))) % frame_start } instance.data["representations"].append(plates_representation) From 55d255a20a0a189ea2357cf3c9d3d07fc4804951 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 4 Dec 2019 14:53:06 +0100 Subject: [PATCH 031/195] feat(nks): adding timecode values to instance --- pype/plugins/nukestudio/publish/collect_clips.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/pype/plugins/nukestudio/publish/collect_clips.py b/pype/plugins/nukestudio/publish/collect_clips.py index 7a400909fd..0729f20957 100644 --- a/pype/plugins/nukestudio/publish/collect_clips.py +++ b/pype/plugins/nukestudio/publish/collect_clips.py @@ -4,7 +4,6 @@ from pyblish import api import nuke - class CollectClips(api.ContextPlugin): """Collect all Track items selection.""" @@ -31,6 +30,7 @@ class CollectClips(api.ContextPlugin): sub_items = video_track.subTrackItems() for item in items: + data = dict() # compare with selection or if disabled if item not in selection or not item.isEnabled(): continue @@ -83,9 +83,12 @@ class CollectClips(api.ContextPlugin): except Exception: source_first_frame = 0 - data = {"name": "{0}_{1}".format(track.name(), item.name()), + data.update({ + "name": "{0}_{1}".format(track.name(), item.name()), "item": item, "source": source, + "timecodeStart": str(source.timecodeStart()), + "timelineTimecodeStart": str(sequence.timecodeStart()), "sourcePath": source_path, "track": track.name(), "trackIndex": track_index, @@ -93,19 +96,24 @@ class CollectClips(api.ContextPlugin): "effects": effects, "sourceIn": int(item.sourceIn()), "sourceOut": int(item.sourceOut()), + "mediaDuration": (int(item.sourceOut()) - + int(item.sourceIn())) + 1, "clipIn": int(item.timelineIn()), "clipOut": int(item.timelineOut()), + "clipDuration": (int(item.timelineOut()) - + int(item.timelineIn())) + 1, "asset": asset, "family": "clip", "families": [], "handles": 0, "handleStart": projectdata.get("handles", 0), "handleEnd": projectdata.get("handles", 0), - "version": int(version)} + "version": int(version)}) instance = context.create_instance(**data) self.log.info("Created instance: {}".format(instance)) + self.log.info("Created instance.data: {}".format(instance.data)) self.log.debug(">> effects: {}".format(instance.data["effects"])) context.data["assetsShared"][asset] = dict() From f350d7694cf5c4d7c2598aa540db219cbf216403 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 4 Dec 2019 17:26:23 +0100 Subject: [PATCH 032/195] feat(global): print ffmpeg path --- pype/plugins/global/publish/validate_ffmpeg_installed.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pype/plugins/global/publish/validate_ffmpeg_installed.py b/pype/plugins/global/publish/validate_ffmpeg_installed.py index 6d5ffba1e1..df7c330e95 100644 --- a/pype/plugins/global/publish/validate_ffmpeg_installed.py +++ b/pype/plugins/global/publish/validate_ffmpeg_installed.py @@ -27,6 +27,8 @@ class ValidateFfmpegInstallef(pyblish.api.Validator): return True def process(self, instance): + self.log.info("ffmpeg path: `{}`".format( + os.environ.get("FFMPEG_PATH", ""))) if self.is_tool( os.path.join( os.environ.get("FFMPEG_PATH", ""), "ffmpeg")) is False: From 346e3e27bce2bc0dd71829a3a686857cdd1268d7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 4 Dec 2019 17:27:39 +0100 Subject: [PATCH 033/195] feat(nuke): extract review will generate lut file this will remove the nuke rendering of `*.baked..mov` --- .../nuke/publish/extract_review_data_lut.py | 185 ++++++++++++++++++ 1 file changed, 185 insertions(+) create mode 100644 pype/plugins/nuke/publish/extract_review_data_lut.py diff --git a/pype/plugins/nuke/publish/extract_review_data_lut.py b/pype/plugins/nuke/publish/extract_review_data_lut.py new file mode 100644 index 0000000000..bba9544b13 --- /dev/null +++ b/pype/plugins/nuke/publish/extract_review_data_lut.py @@ -0,0 +1,185 @@ +import os +import nuke +import pyblish.api +from avalon.nuke import lib as anlib +import pype + + +class ExtractReviewData(pype.api.Extractor): + """Extracts movie and thumbnail with baked in luts + + must be run after extract_render_local.py + + """ + + order = pyblish.api.ExtractorOrder + 0.01 + label = "Extract Review Data Lut" + + families = ["review"] + hosts = ["nuke"] + + def process(self, instance): + + self.log.debug("creating staging dir:") + self.staging_dir(instance) + + with anlib.maintained_selection(): + if "still" not in instance.data["families"]: + self.render_review_representation(instance, + representation="mov") + self.render_review_representation(instance, + representation="jpeg") + else: + self.render_review_representation(instance, representation="jpeg") + + def render_review_representation(self, + instance, + representation="mov"): + + assert instance.data['representations'][0]['files'], "Instance data files should't be empty!" + + temporary_nodes = [] + stagingDir = instance.data[ + 'representations'][0]["stagingDir"].replace("\\", "/") + self.log.debug("StagingDir `{0}`...".format(stagingDir)) + + collection = instance.data.get("collection", None) + + if collection: + # get path + fname = os.path.basename(collection.format( + "{head}{padding}{tail}")) + fhead = collection.format("{head}") + + # get first and last frame + first_frame = min(collection.indexes) + last_frame = max(collection.indexes) + else: + fname = os.path.basename(instance.data.get("path", None)) + fhead = os.path.splitext(fname)[0] + "." + first_frame = instance.data.get("frameStart", None) + last_frame = instance.data.get("frameEnd", None) + + rnode = nuke.createNode("Read") + + rnode["file"].setValue( + os.path.join(stagingDir, fname).replace("\\", "/")) + + rnode["first"].setValue(first_frame) + rnode["origfirst"].setValue(first_frame) + rnode["last"].setValue(last_frame) + rnode["origlast"].setValue(last_frame) + temporary_nodes.append(rnode) + previous_node = rnode + + # get input process and connect it to baking + ipn = self.get_view_process_node() + if ipn is not None: + ipn.setInput(0, previous_node) + previous_node = ipn + temporary_nodes.append(ipn) + + reformat_node = nuke.createNode("Reformat") + + ref_node = self.nodes.get("Reformat", None) + if ref_node: + for k, v in ref_node: + self.log.debug("k,v: {0}:{1}".format(k,v)) + if isinstance(v, unicode): + v = str(v) + reformat_node[k].setValue(v) + + reformat_node.setInput(0, previous_node) + previous_node = reformat_node + temporary_nodes.append(reformat_node) + + dag_node = nuke.createNode("OCIODisplay") + dag_node.setInput(0, previous_node) + previous_node = dag_node + temporary_nodes.append(dag_node) + + # create write node + write_node = nuke.createNode("Write") + + if representation in "mov": + file = fhead + "baked.mov" + name = "baked" + path = os.path.join(stagingDir, file).replace("\\", "/") + self.log.debug("Path: {}".format(path)) + instance.data["baked_colorspace_movie"] = path + write_node["file"].setValue(path) + write_node["file_type"].setValue("mov") + write_node["raw"].setValue(1) + write_node.setInput(0, previous_node) + temporary_nodes.append(write_node) + tags = ["review", "delete"] + + elif representation in "jpeg": + file = fhead + "jpeg" + name = "thumbnail" + path = os.path.join(stagingDir, file).replace("\\", "/") + instance.data["thumbnail"] = path + write_node["file"].setValue(path) + write_node["file_type"].setValue("jpeg") + write_node["raw"].setValue(1) + write_node.setInput(0, previous_node) + temporary_nodes.append(write_node) + tags = ["thumbnail"] + + # retime for + first_frame = int(last_frame) / 2 + last_frame = int(last_frame) / 2 + + repre = { + 'name': name, + 'ext': representation, + 'files': file, + "stagingDir": stagingDir, + "frameStart": first_frame, + "frameEnd": last_frame, + "anatomy_template": "render", + "tags": tags + } + instance.data["representations"].append(repre) + + # Render frames + nuke.execute(write_node.name(), int(first_frame), int(last_frame)) + + self.log.debug("representations: {}".format(instance.data["representations"])) + + # Clean up + for node in temporary_nodes: + nuke.delete(node) + + def get_view_process_node(self): + """ + Will get any active view process. + + Arguments: + self (class): in object definition + + Returns: + nuke.Node: copy node of Input Process node + """ + + with anlib.maintained_selection(): + ipn_orig = None + for v in [n for n in nuke.allNodes() + if "Viewer" in n.Class()]: + ip = v['input_process'].getValue() + ipn = v['input_process_node'].getValue() + if "VIEWER_INPUT" not in ipn and ip: + ipn_orig = nuke.toNode(ipn) + ipn_orig.setSelected(True) + + if ipn_orig: + # copy selected to clipboard + nuke.nodeCopy('%clipboard%') + # reset selection + anlib.reset_selection() + # paste node and selection is on it only + nuke.nodePaste('%clipboard%') + # assign to variable + ipn = nuke.selectedNode() + + return ipn From 48e5c4fd26143d78903564679f6dce15f5239da3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 5 Dec 2019 10:36:08 +0100 Subject: [PATCH 034/195] feat(nuke): Lut exporter added to nuke.lib --- pype/nuke/lib.py | 169 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 169 insertions(+) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 157af9019d..960b65f769 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -6,6 +6,7 @@ from collections import OrderedDict from avalon import api, io, lib import avalon.nuke +from avalon.nuke import lib as anlib import pype.api as pype import nuke @@ -1190,3 +1191,171 @@ class BuildWorkfile(WorkfileSettings): def position_up(self, multiply=1): self.ypos -= (self.ypos_size * multiply) + self.ypos_gap + + +class Exporter_review_lut: + """ + Generator object for review lut from Nuke + + Args: + klass (pyblish.plugin): pyblish plugin parent + + + """ + _temp_nodes = [] + data = dict({ + "representations": list() + }) + + def __init__(self, + klass, + instance, + name=None, + ext=None, + lut_size=None, + lut_style=None): + + self.log = klass.log + self.instance = instance + + self.name = name or "baked_lut" + self.ext = ext or "cube" + self.lut_size = lut_size or 1024 + self.lut_style = lut_style or "linear" + + self.stagingDir = self.instance.data["stagingDir"] + self.collection = self.instance.data.get("collection", None) + + # set frame start / end and file name to self + self.get_file_info() + + self.log.info("File info was set...") + + self.file = self.fhead + self.name + ".{}".format(self.ext) + self.path = os.path.join(self.stagingDir, self.file).replace("\\", "/") + + def generate_lut(self): + # ---------- start nodes creation + + # CMSTestPattern + cms_node = nuke.createNode("CMSTestPattern") + cms_node["cube_size"].setValue(96) + # connect + self._temp_nodes.append(cms_node) + self.previous_node = cms_node + self.log.debug("CMSTestPattern... `{}`".format(self._temp_nodes)) + + # Node View Process + ipn = self.get_view_process_node() + if ipn is not None: + # connect + ipn.setInput(0, self.previous_node) + self._temp_nodes.append(ipn) + self.previous_node = ipn + self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) + + # OCIODisplay + dag_node = nuke.createNode("OCIODisplay") + # connect + dag_node.setInput(0, self.previous_node) + self._temp_nodes.append(dag_node) + self.previous_node = dag_node + self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) + + # GenerateLUT + gen_lut_node = nuke.createNode("GenerateLUT") + gen_lut_node["file"].setValue(self.path) + gen_lut_node["file_type"].setValue(".{}".format(self.ext)) + gen_lut_node["lut1d"].setValue(self.lut_size) + gen_lut_node["style1d"].setValue(self.lut_style) + # connect + gen_lut_node.setInput(0, self.previous_node) + self._temp_nodes.append(gen_lut_node) + self.log.debug("GenerateLUT... `{}`".format(self._temp_nodes)) + + # ---------- end nodes creation + + # Export lut file + nuke.execute( + gen_lut_node.name(), + int(self.first_frame), + int(self.first_frame)) + + self.log.info("Exported...") + + # ---------- generate representation data + self.get_representation_data() + + self.log.debug("Representation... `{}`".format(self.data)) + + # ---------- Clean up + for node in self._temp_nodes: + nuke.delete(node) + self.log.info("Deleted nodes...") + + return self.data + + def get_file_info(self): + if self.collection: + self.log.debug("Collection: `{}`".format(self.collection)) + # get path + self.fname = os.path.basename(self.collection.format( + "{head}{padding}{tail}")) + self.fhead = self.collection.format("{head}") + + # get first and last frame + self.first_frame = min(self.collection.indexes) + self.last_frame = max(self.collection.indexes) + else: + self.fname = os.path.basename(self.instance.data.get("path", None)) + self.fhead = os.path.splitext(self.fname)[0] + "." + self.first_frame = self.instance.data.get("frameStart", None) + self.last_frame = self.instance.data.get("frameEnd", None) + + if "#" in self.fhead: + self.fhead = self.fhead.replace("#", "")[:-1] + + def get_representation_data(self): + + repre = { + 'name': self.name, + 'ext': self.ext, + 'files': self.file, + "stagingDir": self.stagingDir, + "anatomy_template": "publish", + "tags": [self.name.replace("_", "-")] + } + + self.data["representations"].append(repre) + + def get_view_process_node(self): + """ + Will get any active view process. + + Arguments: + self (class): in object definition + + Returns: + nuke.Node: copy node of Input Process node + """ + anlib.reset_selection() + ipn_orig = None + for v in [n for n in nuke.allNodes() + if "Viewer" in n.Class()]: + ip = v['input_process'].getValue() + ipn = v['input_process_node'].getValue() + if "VIEWER_INPUT" not in ipn and ip: + ipn_orig = nuke.toNode(ipn) + ipn_orig.setSelected(True) + + if ipn_orig: + # copy selected to clipboard + nuke.nodeCopy('%clipboard%') + # reset selection + anlib.reset_selection() + # paste node and selection is on it only + nuke.nodePaste('%clipboard%') + # assign to variable + ipn = nuke.selectedNode() + + return ipn From 49eadd8a2d8c28e73b9ef7ffc63fb12d3c87d1f6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 5 Dec 2019 10:36:53 +0100 Subject: [PATCH 035/195] feat(global): added lut filter to ffmpeg --- pype/plugins/global/publish/extract_review.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index de167710a5..3ff3241812 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -162,6 +162,13 @@ class ExtractReview(pyblish.api.InstancePlugin): # output filename output_args.append(full_output_path) + + lut_path = instance.data.get("lutPath") + if lut_path: + lut_arg = "-vf \"lut3d=file='{}'\"".format( + lut_path) + output_args.insert(0, lut_arg) + mov_args = [ os.path.join( os.environ.get( From bd5805301b3c234f50fe8901f5a3f6a59dd09dac Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 5 Dec 2019 10:37:32 +0100 Subject: [PATCH 036/195] feat(nuke): lut extractor added to nuke plugins --- .../nuke/publish/extract_review_data_lut.py | 182 +++--------------- 1 file changed, 26 insertions(+), 156 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_review_data_lut.py b/pype/plugins/nuke/publish/extract_review_data_lut.py index bba9544b13..54013af11a 100644 --- a/pype/plugins/nuke/publish/extract_review_data_lut.py +++ b/pype/plugins/nuke/publish/extract_review_data_lut.py @@ -1,185 +1,55 @@ import os -import nuke import pyblish.api from avalon.nuke import lib as anlib +from pype.nuke import lib as pnlib import pype +reload(pnlib) -class ExtractReviewData(pype.api.Extractor): +class ExtractReviewLutData(pype.api.Extractor): """Extracts movie and thumbnail with baked in luts must be run after extract_render_local.py """ - order = pyblish.api.ExtractorOrder + 0.01 + order = pyblish.api.ExtractorOrder + 0.005 label = "Extract Review Data Lut" families = ["review"] hosts = ["nuke"] def process(self, instance): + self.log.debug( + "_ representations: {}".format(instance.data["representations"])) - self.log.debug("creating staging dir:") - self.staging_dir(instance) + self.log.info("Creating staging dir...") - with anlib.maintained_selection(): - if "still" not in instance.data["families"]: - self.render_review_representation(instance, - representation="mov") - self.render_review_representation(instance, - representation="jpeg") - else: - self.render_review_representation(instance, representation="jpeg") - - def render_review_representation(self, - instance, - representation="mov"): - - assert instance.data['representations'][0]['files'], "Instance data files should't be empty!" - - temporary_nodes = [] stagingDir = instance.data[ 'representations'][0]["stagingDir"].replace("\\", "/") - self.log.debug("StagingDir `{0}`...".format(stagingDir)) + instance.data["stagingDir"] = stagingDir - collection = instance.data.get("collection", None) + instance.data['representations'][0]["tags"] = ["review"] - if collection: - # get path - fname = os.path.basename(collection.format( - "{head}{padding}{tail}")) - fhead = collection.format("{head}") + self.log.info( + "StagingDir `{0}`...".format(instance.data["stagingDir"])) - # get first and last frame - first_frame = min(collection.indexes) - last_frame = max(collection.indexes) - else: - fname = os.path.basename(instance.data.get("path", None)) - fhead = os.path.splitext(fname)[0] + "." - first_frame = instance.data.get("frameStart", None) - last_frame = instance.data.get("frameEnd", None) - - rnode = nuke.createNode("Read") - - rnode["file"].setValue( - os.path.join(stagingDir, fname).replace("\\", "/")) - - rnode["first"].setValue(first_frame) - rnode["origfirst"].setValue(first_frame) - rnode["last"].setValue(last_frame) - rnode["origlast"].setValue(last_frame) - temporary_nodes.append(rnode) - previous_node = rnode - - # get input process and connect it to baking - ipn = self.get_view_process_node() - if ipn is not None: - ipn.setInput(0, previous_node) - previous_node = ipn - temporary_nodes.append(ipn) - - reformat_node = nuke.createNode("Reformat") - - ref_node = self.nodes.get("Reformat", None) - if ref_node: - for k, v in ref_node: - self.log.debug("k,v: {0}:{1}".format(k,v)) - if isinstance(v, unicode): - v = str(v) - reformat_node[k].setValue(v) - - reformat_node.setInput(0, previous_node) - previous_node = reformat_node - temporary_nodes.append(reformat_node) - - dag_node = nuke.createNode("OCIODisplay") - dag_node.setInput(0, previous_node) - previous_node = dag_node - temporary_nodes.append(dag_node) - - # create write node - write_node = nuke.createNode("Write") - - if representation in "mov": - file = fhead + "baked.mov" - name = "baked" - path = os.path.join(stagingDir, file).replace("\\", "/") - self.log.debug("Path: {}".format(path)) - instance.data["baked_colorspace_movie"] = path - write_node["file"].setValue(path) - write_node["file_type"].setValue("mov") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - tags = ["review", "delete"] - - elif representation in "jpeg": - file = fhead + "jpeg" - name = "thumbnail" - path = os.path.join(stagingDir, file).replace("\\", "/") - instance.data["thumbnail"] = path - write_node["file"].setValue(path) - write_node["file_type"].setValue("jpeg") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - tags = ["thumbnail"] - - # retime for - first_frame = int(last_frame) / 2 - last_frame = int(last_frame) / 2 - - repre = { - 'name': name, - 'ext': representation, - 'files': file, - "stagingDir": stagingDir, - "frameStart": first_frame, - "frameEnd": last_frame, - "anatomy_template": "render", - "tags": tags - } - instance.data["representations"].append(repre) - - # Render frames - nuke.execute(write_node.name(), int(first_frame), int(last_frame)) - - self.log.debug("representations: {}".format(instance.data["representations"])) - - # Clean up - for node in temporary_nodes: - nuke.delete(node) - - def get_view_process_node(self): - """ - Will get any active view process. - - Arguments: - self (class): in object definition - - Returns: - nuke.Node: copy node of Input Process node - """ + if "representations" not in instance.data: + instance.data["representations"] = [] with anlib.maintained_selection(): - ipn_orig = None - for v in [n for n in nuke.allNodes() - if "Viewer" in n.Class()]: - ip = v['input_process'].getValue() - ipn = v['input_process_node'].getValue() - if "VIEWER_INPUT" not in ipn and ip: - ipn_orig = nuke.toNode(ipn) - ipn_orig.setSelected(True) + exporter = pnlib.Exporter_review_lut( + self, instance + ) + data = exporter.generate_lut() - if ipn_orig: - # copy selected to clipboard - nuke.nodeCopy('%clipboard%') - # reset selection - anlib.reset_selection() - # paste node and selection is on it only - nuke.nodePaste('%clipboard%') - # assign to variable - ipn = nuke.selectedNode() + # assign to representations + instance.data["lutPath"] = os.path.join( + exporter.stagingDir, exporter.file).replace("\\", "/").replace( + "C:/", "C\\:/") + instance.data["representations"] += data["representations"] - return ipn + self.log.debug( + "_ lutPath: {}".format(instance.data["lutPath"])) + self.log.debug( + "_ representations: {}".format(instance.data["representations"])) From bf8a829eee011b9d5fecb8bb4781a2a44395f1bd Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 5 Dec 2019 10:38:06 +0100 Subject: [PATCH 037/195] fix(nuke): review extractor fixed maintained selection --- .../nuke/publish/extract_review_data.py | 34 +++++++++---------- 1 file changed, 16 insertions(+), 18 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_review_data.py b/pype/plugins/nuke/publish/extract_review_data.py index 791b9d7969..9bb4f93582 100644 --- a/pype/plugins/nuke/publish/extract_review_data.py +++ b/pype/plugins/nuke/publish/extract_review_data.py @@ -1,5 +1,6 @@ import os import nuke +from avalon.nuke import lib as anlib import pyblish.api import pype @@ -18,28 +19,22 @@ class ExtractReviewData(pype.api.Extractor): def process(self, instance): - # Store selection - selection = [i for i in nuke.allNodes() if i["selected"].getValue()] - # Deselect all nodes to prevent external connections - [i["selected"].setValue(False) for i in nuke.allNodes()] - self.log.debug("creating staging dir:") - self.staging_dir(instance) + with anlib.maintained_selection(): + self.log.debug("creating staging dir:") + self.staging_dir(instance) - self.log.debug("instance: {}".format(instance)) - self.log.debug("instance.data[families]: {}".format( - instance.data["families"])) + self.log.debug("instance: {}".format(instance)) + self.log.debug("instance.data[families]: {}".format( + instance.data["families"])) - if "still" not in instance.data["families"]: - self.render_review_representation(instance, - representation="mov") - self.render_review_representation(instance, - representation="jpeg") - else: + # if "still" not in instance.data["families"]: + # self.render_review_representation(instance, + # representation="mov") + # self.render_review_representation(instance, + # representation="jpeg") + # else: self.render_review_representation(instance, representation="jpeg") - # Restore selection - [i["selected"].setValue(False) for i in nuke.allNodes()] - [i["selected"].setValue(True) for i in selection] def render_review_representation(self, instance, @@ -69,6 +64,9 @@ class ExtractReviewData(pype.api.Extractor): first_frame = instance.data.get("frameStart", None) last_frame = instance.data.get("frameEnd", None) + if "#" in fhead: + fhead = fhead.replace("#", "")[:-1] + rnode = nuke.createNode("Read") rnode["file"].setValue( From d3e36f13bc01b7234b492748653572bc5d9d3cb9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 5 Dec 2019 10:38:42 +0100 Subject: [PATCH 038/195] feat(global): adding debug prints to assumed destination --- pype/plugins/global/publish/collect_templates.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index b80ca4ae1b..9b0c03fdee 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -85,3 +85,6 @@ class CollectTemplates(pyblish.api.InstancePlugin): instance.data["assumedDestination"] = os.path.dirname( (anatomy.format(template_data))["publish"]["path"] ) + self.log.info("Assumed Destination has been created...") + self.log.debug("__ assumedTemplateData: `{}`".format(instance.data["assumedTemplateData"])) + self.log.debug("__ template: `{}`".format(instance.data["template"])) From ec39b9f1d17b4717c49d1bf6e6d80782bf061a51 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 11:59:23 +0100 Subject: [PATCH 039/195] cleanup or imports --- pype/ftrack/actions/action_update_from_v2-2-0.py | 15 ++------------- pype/lib.py | 4 +--- pype/plugins/aport/publish/collect_context.py | 5 +---- pype/plugins/launcher/actions/Aport.py | 3 --- .../maya/publish/validate_node_ids_in_database.py | 2 +- .../maya/publish/validate_node_ids_related.py | 2 +- .../_publish_unused/validate_nuke_settings.py | 2 +- pype/plugins/nuke/load/load_mov.py | 7 ++----- pype/plugins/nuke/load/load_sequence.py | 5 +---- pype/setdress_api.py | 3 +-- .../widgets/widget_component_item.py | 2 +- pype/tools/assetcreator/widget.py | 5 ++--- 12 files changed, 14 insertions(+), 41 deletions(-) diff --git a/pype/ftrack/actions/action_update_from_v2-2-0.py b/pype/ftrack/actions/action_update_from_v2-2-0.py index 80b920207a..dd0f1e6ea2 100644 --- a/pype/ftrack/actions/action_update_from_v2-2-0.py +++ b/pype/ftrack/actions/action_update_from_v2-2-0.py @@ -1,14 +1,6 @@ import os -import sys -import argparse -import logging -import collections -import json -import re -import ftrack_api from pype.ftrack import BaseAction -from avalon import io, inventory, schema from pype.ftrack.lib.io_nonsingleton import DbConnector @@ -134,7 +126,6 @@ class PypeUpdateFromV2_2_0(BaseAction): "title": title } - def launch(self, session, entities, event): if 'values' not in event['data']: return @@ -182,7 +173,7 @@ class PypeUpdateFromV2_2_0(BaseAction): {"type": "asset"}, {"$unset": {"silo": ""}} ) - + self.log.debug("- setting schema of assets to v.3") self.db_con.update_many( {"type": "asset"}, @@ -191,10 +182,8 @@ class PypeUpdateFromV2_2_0(BaseAction): return True + def register(session, plugins_presets={}): """Register plugin. Called when used as an plugin.""" - if not isinstance(session, ftrack_api.session.Session): - return - PypeUpdateFromV2_2_0(session, plugins_presets).register() diff --git a/pype/lib.py b/pype/lib.py index e41f9eb8bc..279d836b88 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -1,14 +1,12 @@ import os import re import logging -import importlib import itertools import contextlib import subprocess import inspect - -import avalon.io as io +from avalon import io import avalon.api import avalon diff --git a/pype/plugins/aport/publish/collect_context.py b/pype/plugins/aport/publish/collect_context.py index 2aaa89fd05..35811d6378 100644 --- a/pype/plugins/aport/publish/collect_context.py +++ b/pype/plugins/aport/publish/collect_context.py @@ -1,9 +1,6 @@ import os import pyblish.api -from avalon import ( - io, - api as avalon -) +from avalon import api as avalon from pype import api as pype import json from pathlib import Path diff --git a/pype/plugins/launcher/actions/Aport.py b/pype/plugins/launcher/actions/Aport.py index 94f14cd0d3..0ecd07c49a 100644 --- a/pype/plugins/launcher/actions/Aport.py +++ b/pype/plugins/launcher/actions/Aport.py @@ -1,7 +1,4 @@ import os -import sys -from avalon import io -from pprint import pprint import acre from avalon import api, lib diff --git a/pype/plugins/maya/publish/validate_node_ids_in_database.py b/pype/plugins/maya/publish/validate_node_ids_in_database.py index 7347ce2ab2..fdcf0b20b0 100644 --- a/pype/plugins/maya/publish/validate_node_ids_in_database.py +++ b/pype/plugins/maya/publish/validate_node_ids_in_database.py @@ -1,6 +1,6 @@ import pyblish.api -import avalon.io as io +from avalon import io import pype.api import pype.maya.action diff --git a/pype/plugins/maya/publish/validate_node_ids_related.py b/pype/plugins/maya/publish/validate_node_ids_related.py index 4a154d0b71..4872f438d4 100644 --- a/pype/plugins/maya/publish/validate_node_ids_related.py +++ b/pype/plugins/maya/publish/validate_node_ids_related.py @@ -1,7 +1,7 @@ import pyblish.api import pype.api -import avalon.io as io +from avalon import io import pype.maya.action from pype.maya import lib diff --git a/pype/plugins/nuke/_publish_unused/validate_nuke_settings.py b/pype/plugins/nuke/_publish_unused/validate_nuke_settings.py index dd66b4fb3a..441658297d 100644 --- a/pype/plugins/nuke/_publish_unused/validate_nuke_settings.py +++ b/pype/plugins/nuke/_publish_unused/validate_nuke_settings.py @@ -1,7 +1,7 @@ import nuke import os import pyblish.api -import avalon.io as io +from avalon import io # TODO: add repair function diff --git a/pype/plugins/nuke/load/load_mov.py b/pype/plugins/nuke/load/load_mov.py index e6daaaff8a..e598839405 100644 --- a/pype/plugins/nuke/load/load_mov.py +++ b/pype/plugins/nuke/load/load_mov.py @@ -1,9 +1,6 @@ -import os import contextlib -from avalon import api -import avalon.io as io - +from avalon import api, io import nuke @@ -102,7 +99,7 @@ class LoadMov(api.Loader): handle_start = version_data.get("handleStart", None) handle_end = version_data.get("handleEnd", None) repr_cont = context["representation"]["context"] - + # fix handle start and end if none are available if not handle_start and not handle_end: handle_start = handles diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index e1c75584d7..8f01d4511b 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -1,9 +1,6 @@ -import os import contextlib -from avalon import api -import avalon.io as io - +from avalon import api, io import nuke diff --git a/pype/setdress_api.py b/pype/setdress_api.py index c6de0a4f74..55a6b4a2fb 100644 --- a/pype/setdress_api.py +++ b/pype/setdress_api.py @@ -7,8 +7,7 @@ import copy from maya import cmds -from avalon import api -import avalon.io as io +from avalon import api, io from avalon.maya.lib import unique_namespace from pype.maya.lib import matrix_equals diff --git a/pype/standalonepublish/widgets/widget_component_item.py b/pype/standalonepublish/widgets/widget_component_item.py index 9631fed258..cdca479f06 100644 --- a/pype/standalonepublish/widgets/widget_component_item.py +++ b/pype/standalonepublish/widgets/widget_component_item.py @@ -2,7 +2,7 @@ import os from . import QtCore, QtGui, QtWidgets from . import SvgButton from . import get_resource -from avalon import style +from pypeapp import style class ComponentItem(QtWidgets.QFrame): diff --git a/pype/tools/assetcreator/widget.py b/pype/tools/assetcreator/widget.py index 75e793479a..1e9e4ab624 100644 --- a/pype/tools/assetcreator/widget.py +++ b/pype/tools/assetcreator/widget.py @@ -3,9 +3,8 @@ import contextlib import collections from avalon.vendor import qtawesome -from avalon.vendor.Qt import QtWidgets, QtCore, QtGui -from avalon import io -from avalon import style +from Qt import QtWidgets, QtCore, QtGui +from avalon import style, io from .model import ( TreeModel, From f9c7f1eb7660b8bd47a0a2888a42146b680714fb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 12:24:44 +0100 Subject: [PATCH 040/195] one forgotten import fix --- pype/plugins/launcher/actions/unused/PremierePro.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/plugins/launcher/actions/unused/PremierePro.py b/pype/plugins/launcher/actions/unused/PremierePro.py index 97d693ffbb..57aa4eb2cb 100644 --- a/pype/plugins/launcher/actions/unused/PremierePro.py +++ b/pype/plugins/launcher/actions/unused/PremierePro.py @@ -1,10 +1,9 @@ import os -import sys -from pprint import pprint import acre from avalon import api, lib, io import pype.api as pype +from pypeapp import Anatomy class PremierePro(api.Action): From 81e1885c3c8a77308ebf741a005b9b621dfa056a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 12:29:59 +0100 Subject: [PATCH 041/195] replaced storer_thread with processor_thread --- pype/ftrack/ftrack_server/event_server_cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index 56a301e8f2..c829a3e65d 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -319,7 +319,7 @@ def main_loop(ftrack_url): # If thread failed test Ftrack and Mongo connection elif not processor_thread.isAlive(): - if storer_thread.mongo_error: + if processor_thread.mongo_error: raise Exception( "Exiting because have issue with acces to MongoDB" ) From 40c1e9e11f9730e4acfed88c4fb8b2665f7845e0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 12:30:09 +0100 Subject: [PATCH 042/195] removed unused imports --- pype/ftrack/ftrack_server/event_server_cli.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index c829a3e65d..6b1ac8ca9b 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -7,11 +7,9 @@ import socket import argparse import atexit import time -from urllib.parse import urlparse import ftrack_api from pype.ftrack.lib import credentials -from pype.ftrack.ftrack_server import FtrackServer from pype.ftrack.ftrack_server.lib import ( ftrack_events_mongo_settings, check_ftrack_url ) From 4f16e93d9dfba257fb27afa20fbae7f5ae215cf9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 12:30:17 +0100 Subject: [PATCH 043/195] fixed indentation --- pype/ftrack/ftrack_server/event_server_cli.py | 23 +++++++++---------- 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py index 6b1ac8ca9b..b09b0bc84e 100644 --- a/pype/ftrack/ftrack_server/event_server_cli.py +++ b/pype/ftrack/ftrack_server/event_server_cli.py @@ -65,9 +65,8 @@ def validate_credentials(url, user, api): except Exception as e: print( 'ERROR: Can\'t log into Ftrack with used credentials:' - ' Ftrack server: "{}" // Username: {} // API key: {}'.format( - url, user, api - )) + ' Ftrack server: "{}" // Username: {} // API key: {}' + ).format(url, user, api) return False print('DEBUG: Credentials Username: "{}", API key: "{}" are valid.'.format( @@ -145,9 +144,9 @@ def legacy_server(ftrack_url): ).format(str(max_fail_count), str(wait_time_after_max_fail))) subproc_failed_count += 1 elif (( - datetime.datetime.now() - subproc_last_failed - ).seconds > wait_time_after_max_fail): - subproc_failed_count = 0 + datetime.datetime.now() - subproc_last_failed + ).seconds > wait_time_after_max_fail): + subproc_failed_count = 0 # If thread failed test Ftrack and Mongo connection elif subproc.poll() is not None: @@ -275,9 +274,9 @@ def main_loop(ftrack_url): ).format(str(max_fail_count), str(wait_time_after_max_fail))) storer_failed_count += 1 elif (( - datetime.datetime.now() - storer_last_failed - ).seconds > wait_time_after_max_fail): - storer_failed_count = 0 + datetime.datetime.now() - storer_last_failed + ).seconds > wait_time_after_max_fail): + storer_failed_count = 0 # If thread failed test Ftrack and Mongo connection elif not storer_thread.isAlive(): @@ -311,9 +310,9 @@ def main_loop(ftrack_url): processor_failed_count += 1 elif (( - datetime.datetime.now() - processor_last_failed - ).seconds > wait_time_after_max_fail): - processor_failed_count = 0 + datetime.datetime.now() - processor_last_failed + ).seconds > wait_time_after_max_fail): + processor_failed_count = 0 # If thread failed test Ftrack and Mongo connection elif not processor_thread.isAlive(): From 2db7beb946691f65b53c24b258e994a249bca3aa Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 14:06:26 +0100 Subject: [PATCH 044/195] created lighting button that should replace thumbnail and preview svgs --- .../widgets/widget_component_item.py | 59 +++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/pype/standalonepublish/widgets/widget_component_item.py b/pype/standalonepublish/widgets/widget_component_item.py index 9631fed258..70fd01bf7d 100644 --- a/pype/standalonepublish/widgets/widget_component_item.py +++ b/pype/standalonepublish/widgets/widget_component_item.py @@ -309,3 +309,62 @@ class ComponentItem(QtWidgets.QFrame): data['fps'] = self.in_data['fps'] return data + + +class LightingButton(QtWidgets.QPushButton): + lightingbtnstyle = """ + QPushButton { + text-align: center; + color: #777777; + background-color: transparent; + border-width: 1px; + border-color: #777777; + border-style: solid; + padding-top: 2px; + padding-bottom: 2px; + padding-left: 3px; + padding-right: 3px; + border-radius: 3px; + } + + QPushButton:hover { + border-color: #cccccc; + color: #cccccc; + } + + QPushButton:pressed { + border-color: #ffffff; + color: #ffffff; + } + + QPushButton:disabled { + border-color: #3A3939; + color: #3A3939; + } + + QPushButton:checked { + border-color: #4BB543; + color: #4BB543; + } + + QPushButton:checked:hover { + border-color: #4Bd543; + color: #4Bd543; + } + + QPushButton:checked:pressed { + border-color: #4BF543; + color: #4BF543; + } + """ + def __init__(self, text, *args, **kwargs): + super().__init__(text, *args, **kwargs) + self.setStyleSheet(self.lightingbtnstyle) + + self.setCheckable(True) + + preview_font_metrics = self.fontMetrics().boundingRect(text) + width = preview_font_metrics.width() + 16 + height = preview_font_metrics.height() + 5 + self.setMaximumWidth(width) + self.setMaximumHeight(height) From 0882d92a877a89f847e73593a1d05e189f18b294 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 14:07:21 +0100 Subject: [PATCH 045/195] replaced preview and thumbnail SVG buttons with lighting buttons --- .../widgets/widget_component_item.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/pype/standalonepublish/widgets/widget_component_item.py b/pype/standalonepublish/widgets/widget_component_item.py index 70fd01bf7d..3676100f78 100644 --- a/pype/standalonepublish/widgets/widget_component_item.py +++ b/pype/standalonepublish/widgets/widget_component_item.py @@ -16,6 +16,9 @@ class ComponentItem(QtWidgets.QFrame): signal_preview = QtCore.Signal(object) signal_repre_change = QtCore.Signal(object, object) + preview_text = "PREVIEW" + thumbnail_text = "THUMBNAIL" + def __init__(self, parent, main_parent): super().__init__() self.has_valid_repre = True @@ -124,17 +127,8 @@ class ComponentItem(QtWidgets.QFrame): frame_icons = QtWidgets.QFrame(frame_repre_icons) - self.preview = SvgButton( - get_resource('preview.svg'), 64, 18, - [self.C_NORMAL, self.C_HOVER, self.C_ACTIVE, self.C_ACTIVE_HOVER], - frame_icons - ) - - self.thumbnail = SvgButton( - get_resource('thumbnail.svg'), 84, 18, - [self.C_NORMAL, self.C_HOVER, self.C_ACTIVE, self.C_ACTIVE_HOVER], - frame_icons - ) + self.preview = LightingButton(self.preview_text) + self.thumbnail = LightingButton(self.thumbnail_text) layout = QtWidgets.QHBoxLayout(frame_icons) layout.setSpacing(6) From 93f97fecda4bc4a42b3cc1838652ae7058b6422a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 14:08:02 +0100 Subject: [PATCH 046/195] changed logic of collecting and setting check of thumbnail and preview buttons --- .../widgets/widget_component_item.py | 8 ++++---- .../widgets/widget_drop_frame.py | 20 +++++++++++-------- 2 files changed, 16 insertions(+), 12 deletions(-) diff --git a/pype/standalonepublish/widgets/widget_component_item.py b/pype/standalonepublish/widgets/widget_component_item.py index 3676100f78..f735140b23 100644 --- a/pype/standalonepublish/widgets/widget_component_item.py +++ b/pype/standalonepublish/widgets/widget_component_item.py @@ -266,16 +266,16 @@ class ComponentItem(QtWidgets.QFrame): self.signal_repre_change.emit(self, repre_name) def is_thumbnail(self): - return self.thumbnail.checked + return self.thumbnail.isChecked() def change_thumbnail(self, hover=True): - self.thumbnail.change_checked(hover) + self.thumbnail.setChecked(hover) def is_preview(self): - return self.preview.checked + return self.preview.isChecked() def change_preview(self, hover=True): - self.preview.change_checked(hover) + self.preview.setChecked(hover) def collect_data(self): in_files = self.in_data['files'] diff --git a/pype/standalonepublish/widgets/widget_drop_frame.py b/pype/standalonepublish/widgets/widget_drop_frame.py index ba8ab44cf8..73b9f0e179 100644 --- a/pype/standalonepublish/widgets/widget_drop_frame.py +++ b/pype/standalonepublish/widgets/widget_drop_frame.py @@ -92,28 +92,32 @@ class DropDataFrame(QtWidgets.QFrame): self._refresh_view() def _set_thumbnail(self, in_item): + current_state = in_item.is_thumbnail() + in_item.change_thumbnail(not current_state) + checked_item = None for item in self.components_list.widgets(): if item.is_thumbnail(): checked_item = item break - if checked_item is None or checked_item == in_item: - in_item.change_thumbnail() - else: + if checked_item is not None and checked_item != in_item: checked_item.change_thumbnail(False) - in_item.change_thumbnail() + + in_item.change_thumbnail(current_state) def _set_preview(self, in_item): + current_state = in_item.is_preview() + in_item.change_preview(not current_state) + checked_item = None for item in self.components_list.widgets(): if item.is_preview(): checked_item = item break - if checked_item is None or checked_item == in_item: - in_item.change_preview() - else: + if checked_item is not None and checked_item != in_item: checked_item.change_preview(False) - in_item.change_preview() + + in_item.change_preview(current_state) def _remove_item(self, in_item): valid_repre = in_item.has_valid_repre is True From bd4ce7c177b519d1c27368f4ca498a978e71a180 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 14:09:10 +0100 Subject: [PATCH 047/195] removed svg images of thumbnail and preview --- pype/standalonepublish/resources/preview.svg | 19 ------------------- .../standalonepublish/resources/thumbnail.svg | 19 ------------------- 2 files changed, 38 deletions(-) delete mode 100644 pype/standalonepublish/resources/preview.svg delete mode 100644 pype/standalonepublish/resources/thumbnail.svg diff --git a/pype/standalonepublish/resources/preview.svg b/pype/standalonepublish/resources/preview.svg deleted file mode 100644 index 4a9810c1d5..0000000000 --- a/pype/standalonepublish/resources/preview.svg +++ /dev/null @@ -1,19 +0,0 @@ - - - - - PREVIEW - - diff --git a/pype/standalonepublish/resources/thumbnail.svg b/pype/standalonepublish/resources/thumbnail.svg deleted file mode 100644 index dbc228f8c8..0000000000 --- a/pype/standalonepublish/resources/thumbnail.svg +++ /dev/null @@ -1,19 +0,0 @@ - - - - - THUMBNAIL - - From 58430956f68b98d4e4d23de7f6d1b38c1bb053c9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 14:15:04 +0100 Subject: [PATCH 048/195] check if index is valid on selection change --- pype/logging/gui/app.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pype/logging/gui/app.py b/pype/logging/gui/app.py index 7cee280158..9767077f80 100644 --- a/pype/logging/gui/app.py +++ b/pype/logging/gui/app.py @@ -33,5 +33,7 @@ class LogsWindow(QtWidgets.QWidget): def on_selection_changed(self): index = self.logs_widget.selected_log() + if not index or not index.isValid(): + return node = index.data(self.logs_widget.model.NodeRole) self.log_detail.set_detail(node) From b8031e565a96b26a8f822ca5c178fcebf0c092de Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 14:26:42 +0100 Subject: [PATCH 049/195] added filter for logs --- pype/logging/gui/widgets.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/pype/logging/gui/widgets.py b/pype/logging/gui/widgets.py index 66692c2c65..7cfb341366 100644 --- a/pype/logging/gui/widgets.py +++ b/pype/logging/gui/widgets.py @@ -190,6 +190,27 @@ class CheckableComboBox(QtWidgets.QComboBox): self.model.appendRow([text_item, checked_item]) +class FilterLogModel(QtCore.QSortFilterProxyModel): + sub_dict = ["$gt", "$lt", "$not"] + def __init__(self, key_values, parent=None): + super(FilterLogModel, self).__init__(parent) + self.allowed_key_values = key_values + + def filterAcceptsRow(self, row, parent): + """ + Reimplemented from base class. + """ + model = self.sourceModel() + for key, values in self.allowed_key_values.items(): + col_indx = model.COLUMNS.index(key) + value = model.index(row, col_indx, parent).data( + QtCore.Qt.DisplayRole + ) + if value not in values: + return False + return True + + class LogsWidget(QtWidgets.QWidget): """A widget that lists the published subsets for an asset""" From 33e4e5cd4050c8a85fdf55ad1a8f1ab667d184f8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 14:27:21 +0100 Subject: [PATCH 050/195] filtering by comboboxes now works --- pype/logging/gui/widgets.py | 88 ++++++++++++++++++++++++++----------- 1 file changed, 63 insertions(+), 25 deletions(-) diff --git a/pype/logging/gui/widgets.py b/pype/logging/gui/widgets.py index 7cfb341366..e0d3c17080 100644 --- a/pype/logging/gui/widgets.py +++ b/pype/logging/gui/widgets.py @@ -1,11 +1,7 @@ -import datetime -import inspect +import getpass from Qt import QtCore, QtWidgets, QtGui -from PyQt5.QtCore import QVariant from .models import LogModel -from .lib import preserve_states - class SearchComboBox(QtWidgets.QComboBox): """Searchable ComboBox with empty placeholder value as first value""" @@ -53,6 +49,7 @@ class SearchComboBox(QtWidgets.QComboBox): return text + class CheckableComboBox2(QtWidgets.QComboBox): def __init__(self, parent=None): super(CheckableComboBox, self).__init__(parent) @@ -96,9 +93,11 @@ class SelectableMenu(QtWidgets.QMenu): else: super(SelectableMenu, self).mouseReleaseEvent(event) + class CustomCombo(QtWidgets.QWidget): selection_changed = QtCore.Signal() + checked_changed = QtCore.Signal(bool) def __init__(self, title, parent=None): super(CustomCombo, self).__init__(parent) @@ -127,12 +126,27 @@ class CustomCombo(QtWidgets.QWidget): self.toolmenu.clear() self.addItems(items) + def select_items(self, items, ignore_input=False): + if not isinstance(items, list): + items = [items] + + for action in self.toolmenu.actions(): + check = True + if ( + action.text() in items and ignore_input or + action.text() not in items and not ignore_input + ): + check = False + + action.setChecked(check) + def addItems(self, items): for item in items: action = self.toolmenu.addAction(item) action.setCheckable(True) - action.setChecked(True) self.toolmenu.addAction(action) + action.setChecked(True) + action.triggered.connect(self.checked_changed) def items(self): for action in self.toolmenu.actions(): @@ -186,7 +200,9 @@ class CheckableComboBox(QtWidgets.QComboBox): for text, checked in items: text_item = QtGui.QStandardItem(text) checked_item = QtGui.QStandardItem() - checked_item.setData(QVariant(checked), QtCore.Qt.CheckStateRole) + checked_item.setData( + QtCore.QVariant(checked), QtCore.Qt.CheckStateRole + ) self.model.appendRow([text_item, checked_item]) @@ -216,6 +232,10 @@ class LogsWidget(QtWidgets.QWidget): active_changed = QtCore.Signal() + _level_order = [ + "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL" + ] + def __init__(self, parent=None): super(LogsWidget, self).__init__(parent=parent) @@ -223,16 +243,20 @@ class LogsWidget(QtWidgets.QWidget): filter_layout = QtWidgets.QHBoxLayout() - # user_filter = SearchComboBox(self, "Users") user_filter = CustomCombo("Users", self) users = model.dbcon.distinct("user") user_filter.populate(users) - user_filter.selection_changed.connect(self.user_changed) + user_filter.checked_changed.connect(self.user_changed) + user_filter.select_items(getpass.getuser()) level_filter = CustomCombo("Levels", self) - # levels = [(level, True) for level in model.dbcon.distinct("level")] levels = model.dbcon.distinct("level") - level_filter.addItems(levels) + _levels = [] + for level in self._level_order: + if level in levels: + _levels.append(level) + level_filter.populate(_levels) + level_filter.checked_changed.connect(self.level_changed) date_from_label = QtWidgets.QLabel("From:") date_filter_from = QtWidgets.QDateTimeEdit() @@ -241,8 +265,6 @@ class LogsWidget(QtWidgets.QWidget): date_from_layout.addWidget(date_from_label) date_from_layout.addWidget(date_filter_from) - # now = datetime.datetime.now() - # QtCore.QDateTime(now.year, now.month, now.day, now.hour, now.minute, second = 0, msec = 0, timeSpec = 0) date_to_label = QtWidgets.QLabel("To:") date_filter_to = QtWidgets.QDateTimeEdit() @@ -252,6 +274,7 @@ class LogsWidget(QtWidgets.QWidget): filter_layout.addWidget(user_filter) filter_layout.addWidget(level_filter) + filter_layout.setAlignment(QtCore.Qt.AlignLeft) filter_layout.addLayout(date_from_layout) filter_layout.addLayout(date_to_layout) @@ -259,11 +282,6 @@ class LogsWidget(QtWidgets.QWidget): view = QtWidgets.QTreeView(self) view.setAllColumnsShowFocus(True) - # # Set view delegates - # time_delegate = PrettyTimeDelegate() - # column = model.COLUMNS.index("time") - # view.setItemDelegateForColumn(column, time_delegate) - layout = QtWidgets.QVBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) layout.addLayout(filter_layout) @@ -276,34 +294,54 @@ class LogsWidget(QtWidgets.QWidget): QtCore.Qt.AscendingOrder ) - view.setModel(model) + key_val = { + "user": users, + "level": levels + } + proxy_model = FilterLogModel(key_val, view) + proxy_model.setSourceModel(model) + view.setModel(proxy_model) view.customContextMenuRequested.connect(self.on_context_menu) view.selectionModel().selectionChanged.connect(self.active_changed) - # user_filter.connect() - # TODO remove if nothing will affect... - # header = self.view.header() + # WARNING this is cool but slows down widget a lot + # header = view.header() # # Enforce the columns to fit the data (purely cosmetic) # if Qt.__binding__ in ("PySide2", "PyQt5"): # header.setSectionResizeMode(QtWidgets.QHeaderView.ResizeToContents) # else: # header.setResizeMode(QtWidgets.QHeaderView.ResizeToContents) - # Set signals - # prepare model.refresh() # Store to memory self.model = model + self.proxy_model = proxy_model self.view = view self.user_filter = user_filter + self.level_filter = level_filter def user_changed(self): + valid_actions = [] for action in self.user_filter.items(): - print(action) + if action.isChecked(): + valid_actions.append(action.text()) + + self.proxy_model.allowed_key_values["user"] = valid_actions + self.proxy_model.invalidate() + + def level_changed(self): + valid_actions = [] + for action in self.level_filter.items(): + if action.isChecked(): + valid_actions.append(action.text()) + + self.proxy_model.allowed_key_values["level"] = valid_actions + self.proxy_model.invalidate() + def on_context_menu(self, point): # TODO will be any actions? it's ready From efbcc1a7023b9786a7dc08480b6125b952a8b4cf Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 5 Dec 2019 14:28:57 +0100 Subject: [PATCH 051/195] commented out date time widgets, filtering not work and probably does not make sence --- pype/logging/gui/widgets.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/pype/logging/gui/widgets.py b/pype/logging/gui/widgets.py index e0d3c17080..10aad3c282 100644 --- a/pype/logging/gui/widgets.py +++ b/pype/logging/gui/widgets.py @@ -258,26 +258,26 @@ class LogsWidget(QtWidgets.QWidget): level_filter.populate(_levels) level_filter.checked_changed.connect(self.level_changed) - date_from_label = QtWidgets.QLabel("From:") - date_filter_from = QtWidgets.QDateTimeEdit() - - date_from_layout = QtWidgets.QVBoxLayout() - date_from_layout.addWidget(date_from_label) - date_from_layout.addWidget(date_filter_from) - - date_to_label = QtWidgets.QLabel("To:") - date_filter_to = QtWidgets.QDateTimeEdit() - - date_to_layout = QtWidgets.QVBoxLayout() - date_to_layout.addWidget(date_to_label) - date_to_layout.addWidget(date_filter_to) + # date_from_label = QtWidgets.QLabel("From:") + # date_filter_from = QtWidgets.QDateTimeEdit() + # + # date_from_layout = QtWidgets.QVBoxLayout() + # date_from_layout.addWidget(date_from_label) + # date_from_layout.addWidget(date_filter_from) + # + # date_to_label = QtWidgets.QLabel("To:") + # date_filter_to = QtWidgets.QDateTimeEdit() + # + # date_to_layout = QtWidgets.QVBoxLayout() + # date_to_layout.addWidget(date_to_label) + # date_to_layout.addWidget(date_filter_to) filter_layout.addWidget(user_filter) filter_layout.addWidget(level_filter) filter_layout.setAlignment(QtCore.Qt.AlignLeft) - filter_layout.addLayout(date_from_layout) - filter_layout.addLayout(date_to_layout) + # filter_layout.addLayout(date_from_layout) + # filter_layout.addLayout(date_to_layout) view = QtWidgets.QTreeView(self) view.setAllColumnsShowFocus(True) From d346f7e37f5065ed81fd89ba178bfb167eae53a0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 5 Dec 2019 17:21:18 +0100 Subject: [PATCH 052/195] feat(nuke): removing subsetgroup --- pype/plugins/nuke/publish/collect_backdrop.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_backdrop.py b/pype/plugins/nuke/publish/collect_backdrop.py index 39402f5352..d98a20aee0 100644 --- a/pype/plugins/nuke/publish/collect_backdrop.py +++ b/pype/plugins/nuke/publish/collect_backdrop.py @@ -77,8 +77,7 @@ class CollectBackdrops(pyblish.api.InstancePlugin): instance.data.update({ "versionData": version_data, "frameStart": first_frame, - "frameEnd": last_frame, - "subsetGroup": "backdrops" + "frameEnd": last_frame }) self.log.info("Backdrop content collected: `{}`".format(instance[:])) self.log.info("Backdrop instance collected: `{}`".format(instance)) From 443a77fd9408ae482cd2e2a1ef1c350edacef7b7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 5 Dec 2019 17:23:54 +0100 Subject: [PATCH 053/195] feat(nuke): removing subsetgroup --- pype/plugins/nuke/publish/collect_backdrop.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_backdrop.py b/pype/plugins/nuke/publish/collect_backdrop.py index 39402f5352..d98a20aee0 100644 --- a/pype/plugins/nuke/publish/collect_backdrop.py +++ b/pype/plugins/nuke/publish/collect_backdrop.py @@ -77,8 +77,7 @@ class CollectBackdrops(pyblish.api.InstancePlugin): instance.data.update({ "versionData": version_data, "frameStart": first_frame, - "frameEnd": last_frame, - "subsetGroup": "backdrops" + "frameEnd": last_frame }) self.log.info("Backdrop content collected: `{}`".format(instance[:])) self.log.info("Backdrop instance collected: `{}`".format(instance)) From 90f6d2e2daddaf6150e1d5b2a3927099d776d224 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 5 Dec 2019 17:24:45 +0100 Subject: [PATCH 054/195] feat(nuke): gizmo remove subsetgroup --- pype/plugins/nuke/publish/collect_gizmo.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_gizmo.py b/pype/plugins/nuke/publish/collect_gizmo.py index 87f4cf8264..11e8c17a3f 100644 --- a/pype/plugins/nuke/publish/collect_gizmo.py +++ b/pype/plugins/nuke/publish/collect_gizmo.py @@ -50,8 +50,7 @@ class CollectGizmo(pyblish.api.InstancePlugin): instance.data.update({ "versionData": version_data, "frameStart": first_frame, - "frameEnd": last_frame, - "subsetGroup": "gizmos" + "frameEnd": last_frame }) self.log.info("Gizmo content collected: `{}`".format(instance[:])) self.log.info("Gizmo instance collected: `{}`".format(instance)) From d9b64f760a4a669eaad0a4f7a4029fd7e2906ea1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 5 Dec 2019 17:42:47 +0100 Subject: [PATCH 055/195] fix(nuke): burnin were converting better codec to h264 --- pype/plugins/global/publish/extract_burnin.py | 1 + pype/plugins/global/publish/extract_review.py | 4 +++- pype/scripts/otio_burnin.py | 14 +++++++++++--- 3 files changed, 15 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index b1569aaa45..3917f6e92c 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -55,6 +55,7 @@ class ExtractBurnin(pype.api.Extractor): burnin_data = { "input": full_movie_path.replace("\\", "/"), + "codec": repre.data.get("codec", []), "output": full_burnin_path.replace("\\", "/"), "burnin_data": prep_data } diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 1a7dcced78..7e06f0e158 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -147,6 +147,7 @@ class ExtractReview(pyblish.api.InstancePlugin): ) output_args = [] + output_args.extend(profile.get('codec', [])) # preset's output data output_args.extend(profile.get('output', [])) @@ -183,7 +184,8 @@ class ExtractReview(pyblish.api.InstancePlugin): 'ext': ext, 'files': repr_file, "tags": new_tags, - "outputName": name + "outputName": name, + "codec": profile.get('codec', []) }) if repre_new.get('preview'): repre_new.pop("preview") diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index ad2e59fc96..01fa4c520d 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -213,13 +213,15 @@ def example(input_path, output_path): burnin.render(output_path, overwrite=True) -def burnins_from_data(input_path, output_path, data, overwrite=True): +def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True): ''' This method adds burnins to video/image file based on presets setting. Extension of output MUST be same as input. (mov -> mov, avi -> avi,...) :param input_path: full path to input file where burnins should be add :type input_path: str + :param codec_data: all codec related arguments in list + :param codec_data: list :param output_path: full path to output file where output will be rendered :type output_path: str :param data: data required for burnin settings (more info below) @@ -339,11 +341,17 @@ def burnins_from_data(input_path, output_path, data, overwrite=True): ) return - burnin.render(output_path, overwrite=overwrite) + codec_args = " ".join(codec_data) + burnin.render(output_path, args=codec_args, overwrite=overwrite) if __name__ == '__main__': import sys import json data = json.loads(sys.argv[-1]) - burnins_from_data(data['input'], data['output'], data['burnin_data']) + burnins_from_data( + data['input'], + data['codec'], + data['output'], + data['burnin_data'] + ) From 0f967eebf799606893681e94fe6485586f9fe0ba Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 5 Dec 2019 18:01:30 +0100 Subject: [PATCH 056/195] fix(nuke): improving the fix --- pype/plugins/global/publish/extract_burnin.py | 2 +- pype/scripts/otio_burnin.py | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 3917f6e92c..50198529fd 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -55,7 +55,7 @@ class ExtractBurnin(pype.api.Extractor): burnin_data = { "input": full_movie_path.replace("\\", "/"), - "codec": repre.data.get("codec", []), + "codec": repre.get("codec", []), "output": full_burnin_path.replace("\\", "/"), "burnin_data": prep_data } diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 01fa4c520d..6acc625a9f 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -341,7 +341,10 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) ) return - codec_args = " ".join(codec_data) + codec_args = '' + if codec_data is not []: + codec_args = " ".join(codec_data) + burnin.render(output_path, args=codec_args, overwrite=overwrite) From 4a95119c65141142dcff0132dd0457d600be8358 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 5 Dec 2019 23:20:17 +0100 Subject: [PATCH 057/195] (maya) fixed extract look, added few debug info --- pype/plugins/maya/publish/collect_look.py | 19 ++++++++++++++----- pype/plugins/maya/publish/extract_look.py | 2 +- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/pype/plugins/maya/publish/collect_look.py b/pype/plugins/maya/publish/collect_look.py index 618f2749a4..17f8180fdf 100644 --- a/pype/plugins/maya/publish/collect_look.py +++ b/pype/plugins/maya/publish/collect_look.py @@ -297,9 +297,11 @@ class CollectLook(pyblish.api.InstancePlugin): self.log.info("Collected file nodes:\n{}".format(files)) # Collect textures if any file nodes are found - instance.data["resources"] = [self.collect_resource(n) - for n in files] - self.log.info("Collected resources:\n{}".format(instance.data["resources"])) + instance.data["resources"] = [] + for n in files: + instance.data["resources"].append(self.collect_resource(n)) + + self.log.info("Collected resources: {}".format(instance.data["resources"])) # Log a warning when no relevant sets were retrieved for the look. if not instance.data["lookData"]["relationships"]: @@ -423,7 +425,7 @@ class CollectLook(pyblish.api.InstancePlugin): self.log.debug("processing: {}".format(node)) if cmds.nodeType(node) == 'file': - self.log.debug("file node") + self.log.debug(" - file node") attribute = "{}.fileTextureName".format(node) computed_attribute = "{}.computedFileTextureNamePattern".format(node) elif cmds.nodeType(node) == 'aiImage': @@ -431,7 +433,7 @@ class CollectLook(pyblish.api.InstancePlugin): attribute = "{}.filename".format(node) computed_attribute = attribute source = cmds.getAttr(attribute) - + self.log.info(" - file source: {}".format(source)) color_space_attr = "{}.colorSpace".format(node) color_space = cmds.getAttr(color_space_attr) # Compare with the computed file path, e.g. the one with the @@ -455,6 +457,13 @@ class CollectLook(pyblish.api.InstancePlugin): if len(files) == 0: self.log.error("No valid files found from node `%s`" % node) + self.log.info("collection of resource done:") + self.log.info(" - node: {}".format(node)) + self.log.info(" - attribute: {}".format(attribute)) + self.log.info(" - source: {}".format(source)) + self.log.info(" - file: {}".format(files)) + self.log.info(" - color space: {}".format(color_space)) + # Define the resource return {"node": node, "attribute": attribute, diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index c7b8058852..b8cdaa6727 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -88,7 +88,7 @@ def maketx(source, destination, *args): ) if sys.platform == "win32": - kwargs["creationflags"] = CREATE_NO_WIDOW + kwargs["creationflags"] = CREATE_NO_WINDOW try: out = subprocess.check_output(**kwargs) except subprocess.CalledProcessError as exc: From 670b2eb0bd1abe595f7b1e030ef17734ba27ee0b Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 5 Dec 2019 23:29:42 +0100 Subject: [PATCH 058/195] (maya) fixing colorspace when texture is linearized in extract_look, some PEP8 changes --- pype/plugins/maya/publish/extract_look.py | 167 +++++++++++----------- 1 file changed, 83 insertions(+), 84 deletions(-) diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index b8cdaa6727..5f3c1b33f3 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -38,11 +38,7 @@ def source_hash(filepath, *args): file_name = os.path.basename(filepath) time = str(os.path.getmtime(filepath)) size = str(os.path.getsize(filepath)) - return "|".join([ - file_name, - time, - size - ] + list(args)).replace(".", ",") + return "|".join([file_name, time, size] + list(args)).replace(".", ",") def find_paths_by_hash(texture_hash): @@ -64,28 +60,22 @@ def maketx(source, destination, *args): """ cmd = [ - "maketx", - "-v", # verbose - "-u", # update mode - # unpremultiply before conversion (recommended when alpha present) - "--unpremult", - "--checknan", - # use oiio-optimized settings for tile-size, planarconfig, metadata - "--oiio", - "--filter lanczos3" - ] + "maketx", + "-v", # verbose + "-u", # update mode + # unpremultiply before conversion (recommended when alpha present) + "--unpremult", + "--checknan", + # use oiio-optimized settings for tile-size, planarconfig, metadata + "--oiio", + "--filter lanczos3", + ] cmd.extend(args) - cmd.extend([ - "-o", destination, - source - ]) + cmd.extend(["-o", destination, source]) CREATE_NO_WINDOW = 0x08000000 - kwargs = dict( - args=cmd, - stderr=subprocess.STDOUT - ) + kwargs = dict(args=cmd, stderr=subprocess.STDOUT) if sys.platform == "win32": kwargs["creationflags"] = CREATE_NO_WINDOW @@ -94,6 +84,7 @@ def maketx(source, destination, *args): except subprocess.CalledProcessError as exc: print(exc) import traceback + traceback.print_exc() raise @@ -180,11 +171,12 @@ class ExtractLook(pype.api.Extractor): # Preserve color space values (force value after filepath change) # This will also trigger in the same order at end of context to # ensure after context it's still the original value. - color_space = resource.get('color_space') + color_space = resource.get("color_space") for f in resource["files"]: - files_metadata[os.path.normpath(f)] = {'color_space': color_space} + files_metadata[os.path.normpath(f)] = { + "color_space": color_space} # files.update(os.path.normpath(f)) # Process the resource files @@ -195,17 +187,19 @@ class ExtractLook(pype.api.Extractor): self.log.info(files) for filepath in files_metadata: - cspace = files_metadata[filepath]['color_space'] + cspace = files_metadata[filepath]["color_space"] linearise = False - if cspace == 'sRGB': + if cspace == "sRGB": linearise = True + # set its file node to 'raw' as tx will be linearized + files_metadata[filepath]["color_space"] = "raw" source, mode, hash = self._process_texture( filepath, do_maketx, staging=dir_path, linearise=linearise ) - destination = self.resource_destination( - instance, source, do_maketx - ) + destination = self.resource_destination(instance, + source, + do_maketx) # Force copy is specified. if instance.data.get("forceCopy", False): @@ -235,11 +229,11 @@ class ExtractLook(pype.api.Extractor): # Preserve color space values (force value after filepath change) # This will also trigger in the same order at end of context to # ensure after context it's still the original value. - color_space_attr = resource['node'] + ".colorSpace" + color_space_attr = resource["node"] + ".colorSpace" color_space = cmds.getAttr(color_space_attr) # Remap file node filename to destination - attr = resource['attribute'] + attr = resource["attribute"] remap[attr] = destinations[source] remap[color_space_attr] = color_space @@ -268,13 +262,15 @@ class ExtractLook(pype.api.Extractor): channels=True, constraints=True, expressions=True, - constructionHistory=True + constructionHistory=True, ) # Write the JSON data self.log.info("Extract json..") - data = {"attributes": lookdata["attributes"], - "relationships": relationships} + data = { + "attributes": lookdata["attributes"], + "relationships": relationships + } with open(json_path, "w") as f: json.dump(data, f) @@ -293,7 +289,7 @@ class ExtractLook(pype.api.Extractor): instance.data["representations"].append( { "name": "ma", - "ext": 'ma', + "ext": "ma", "files": os.path.basename(maya_fname), "stagingDir": os.path.dirname(maya_fname), } @@ -301,7 +297,7 @@ class ExtractLook(pype.api.Extractor): instance.data["representations"].append( { "name": "json", - "ext": 'json', + "ext": "json", "files": os.path.basename(json_fname), "stagingDir": os.path.dirname(json_fname), } @@ -314,13 +310,12 @@ class ExtractLook(pype.api.Extractor): # Source hash for the textures instance.data["sourceHashes"] = hashes - self.log.info("Extracted instance '%s' to: %s" % ( - instance.name, maya_path) - ) + self.log.info("Extracted instance '%s' to: %s" % (instance.name, + maya_path)) def resource_destination(self, instance, filepath, do_maketx): - anatomy = instance.context.data['anatomy'] + anatomy = instance.context.data["anatomy"] self.create_destination_template(instance, anatomy) @@ -332,9 +327,7 @@ class ExtractLook(pype.api.Extractor): ext = ".tx" return os.path.join( - instance.data["assumedDestination"], - "resources", - basename + ext + instance.data["assumedDestination"], "resources", basename + ext ) def _process_texture(self, filepath, do_maketx, staging, linearise): @@ -366,17 +359,13 @@ class ExtractLook(pype.api.Extractor): return source, HARDLINK, texture_hash else: self.log.warning( - "Paths not found on disk, " - "skipping hardlink: %s" % (existing,) + ("Paths not found on disk, " + "skipping hardlink: %s") % (existing,) ) if do_maketx and ext != ".tx": # Produce .tx file in staging if source file is not .tx - converted = os.path.join( - staging, - "resources", - fname + ".tx" - ) + converted = os.path.join(staging, "resources", fname + ".tx") if linearise: self.log.info("tx: converting sRGB -> linear") @@ -389,9 +378,15 @@ class ExtractLook(pype.api.Extractor): os.makedirs(os.path.dirname(converted)) self.log.info("Generating .tx file for %s .." % filepath) - maketx(filepath, converted, - # Include `source-hash` as string metadata - "-sattrib", "sourceHash", texture_hash, colorconvert) + maketx( + filepath, + converted, + # Include `source-hash` as string metadata + "-sattrib", + "sourceHash", + texture_hash, + colorconvert, + ) return converted, COPY, texture_hash @@ -417,58 +412,62 @@ class ExtractLook(pype.api.Extractor): project_name = api.Session["AVALON_PROJECT"] a_template = anatomy.templates - project = io.find_one({"type": "project", - "name": project_name}, - projection={"config": True, "data": True}) + project = io.find_one( + {"type": "project", "name": project_name}, + projection={"config": True, "data": True}, + ) - template = a_template['publish']['path'] + template = a_template["publish"]["path"] # anatomy = instance.context.data['anatomy'] - asset = io.find_one({"type": "asset", - "name": asset_name, - "parent": project["_id"]}) + asset = io.find_one( + {"type": "asset", "name": asset_name, "parent": project["_id"]} + ) assert asset, ("No asset found by the name '{}' " - "in project '{}'".format(asset_name, project_name)) - silo = asset.get('silo') + "in project '{}'").format(asset_name, project_name) + silo = asset.get("silo") - subset = io.find_one({"type": "subset", - "name": subset_name, - "parent": asset["_id"]}) + subset = io.find_one( + {"type": "subset", "name": subset_name, "parent": asset["_id"]} + ) # assume there is no version yet, we start at `1` version = None version_number = 1 if subset is not None: - version = io.find_one({"type": "version", - "parent": subset["_id"]}, - sort=[("name", -1)]) + version = io.find_one( + {"type": "version", + "parent": subset["_id"] + }, sort=[("name", -1)] + ) # if there is a subset there ought to be version if version is not None: version_number += version["name"] - if instance.data.get('version'): - version_number = int(instance.data.get('version')) + if instance.data.get("version"): + version_number = int(instance.data.get("version")) - padding = int(a_template['render']['padding']) + padding = int(a_template["render"]["padding"]) - hierarchy = asset['data']['parents'] + hierarchy = asset["data"]["parents"] if hierarchy: # hierarchy = os.path.sep.join(hierarchy) hierarchy = "/".join(hierarchy) - template_data = {"root": api.Session["AVALON_PROJECTS"], - "project": {"name": project_name, - "code": project['data']['code']}, - "silo": silo, - "family": instance.data['family'], - "asset": asset_name, - "subset": subset_name, - "frame": ('#' * padding), - "version": version_number, - "hierarchy": hierarchy, - "representation": "TEMP"} + template_data = { + "root": api.Session["AVALON_PROJECTS"], + "project": {"name": project_name, "code": project["data"]["code"]}, + "silo": silo, + "family": instance.data["family"], + "asset": asset_name, + "subset": subset_name, + "frame": ("#" * padding), + "version": version_number, + "hierarchy": hierarchy, + "representation": "TEMP", + } instance.data["assumedTemplateData"] = template_data self.log.info(template_data) From 1361d12452223977a14cb623b1765457ca2ea54b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 07:59:19 +0100 Subject: [PATCH 059/195] feat(nuke): polishing the Lut Exporter --- pype/nuke/lib.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 960b65f769..6349f35bea 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1212,6 +1212,7 @@ class Exporter_review_lut: instance, name=None, ext=None, + cube_size=None, lut_size=None, lut_style=None): @@ -1220,6 +1221,7 @@ class Exporter_review_lut: self.name = name or "baked_lut" self.ext = ext or "cube" + self.cube_size = cube_size or 32 self.lut_size = lut_size or 1024 self.lut_style = lut_style or "linear" @@ -1239,7 +1241,7 @@ class Exporter_review_lut: # CMSTestPattern cms_node = nuke.createNode("CMSTestPattern") - cms_node["cube_size"].setValue(96) + cms_node["cube_size"].setValue(self.cube_size) # connect self._temp_nodes.append(cms_node) self.previous_node = cms_node From 43ed9443941a136227aa694e1fd7cc84985b7ce6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 08:00:15 +0100 Subject: [PATCH 060/195] feat(global): implementing Lut integration into Extract Review --- pype/plugins/global/publish/extract_review.py | 30 +++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 3ff3241812..59ef308f9a 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -165,9 +165,35 @@ class ExtractReview(pyblish.api.InstancePlugin): lut_path = instance.data.get("lutPath") if lut_path: - lut_arg = "-vf \"lut3d=file='{}'\"".format( + # removing Gama info as it is all baked in lut + gamma = next((g for g in input_args + if "-gamma" in g), None) + if gamma: + input_args.remove(gamma) + + # find all video format settings + vf_settings = [p for p in output_args + for v in ["-filter:v", "-vf"] + if v in p] + self.log.debug("_ vf_settings: `{}`".format(vf_settings)) + # remove them from output args list + for p in vf_settings: + self.log.debug("_ remove p: `{}`".format(p)) + output_args.remove(p) + self.log.debug("_ output_args: `{}`".format(output_args)) + # strip them from all flags + vf_fixed = [p.replace("-vf ", "").replace("-filter:v ", "") for p in vf_settings] + # create lut argument + lut_arg = "lut3d=file='{}',colormatrix=bt601:bt709".format( lut_path) - output_args.insert(0, lut_arg) + vf_fixed.insert(0, lut_arg) + # create new video filter setting + vf_back = "-vf " + ",".join(vf_fixed) + # add it to output_args + output_args.insert(0, vf_back) + self.log.info("Added Lut to ffmpeg command") + self.log.debug("_ output_args: `{}`".format(output_args)) + mov_args = [ os.path.join( From ec9f5b5e0405e0ba53b26d1d14adbcb78d680c20 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 08:00:35 +0100 Subject: [PATCH 061/195] feat(nuke): adding format data to instance --- pype/plugins/nuke/publish/collect_instances.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index 483f260295..cb98b8244d 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -23,6 +23,8 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): instances = [] # creating instances per write node + root = nuke.root() + self.log.debug("nuke.allNodes(): {}".format(nuke.allNodes())) for node in nuke.allNodes(): @@ -61,7 +63,13 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): family = avalon_knob_data["family"] families = [avalon_knob_data["families"]] - + + # Get format + format = root['format'].value() + resolution_width = format.width() + resolution_height = format.height() + pixel_aspect = format.pixelAspect() + if node.Class() not in "Read": if node["render"].value(): self.log.info("flagged for render") @@ -87,7 +95,10 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): "avalonKnob": avalon_knob_data, "publish": node.knob('publish').value(), "step": 1, - "fps": nuke.root()['fps'].value() + "fps": nuke.root()['fps'].value(), + "resolutionWidth": resolution_width, + "resolutionHeight": resolution_height, + "pixelAspect": pixel_aspect, }) @@ -95,5 +106,4 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): instances.append(instance) context.data["instances"] = instances - self.log.debug("context: {}".format(context)) From 3c90a7984023889b30c2fc7b0233b02c8a6a3bb3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 13:58:31 +0100 Subject: [PATCH 062/195] feat(global): extr review pixel aspect rescale --- pype/plugins/global/publish/extract_review.py | 76 ++++++++++++++----- 1 file changed, 58 insertions(+), 18 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 59ef308f9a..fa02826527 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -151,7 +151,6 @@ class ExtractReview(pyblish.api.InstancePlugin): output_args.extend(profile.get('output', [])) # letter_box - # TODO: add to documentation lb = profile.get('letter_box', None) if lb: output_args.append( @@ -163,6 +162,18 @@ class ExtractReview(pyblish.api.InstancePlugin): # output filename output_args.append(full_output_path) + # scaling none square pixels and 1920 width + # scale=320:-2 # to auto count height with output to be multiple of 2 + if profile.get('reformat', False): + pixel_aspect = instance.data["pixelAspect"] + scaling_arg = "scale=1920:'ceil((1920/{})/2)*2':flags=lanczos,setsar=1".format( + pixel_aspect) + vf_back = self.add_video_filter_args( + output_args, scaling_arg) + # add it to output_args + output_args.insert(0, vf_back) + + # baking lut file application lut_path = instance.data.get("lutPath") if lut_path: # removing Gama info as it is all baked in lut @@ -171,24 +182,15 @@ class ExtractReview(pyblish.api.InstancePlugin): if gamma: input_args.remove(gamma) - # find all video format settings - vf_settings = [p for p in output_args - for v in ["-filter:v", "-vf"] - if v in p] - self.log.debug("_ vf_settings: `{}`".format(vf_settings)) - # remove them from output args list - for p in vf_settings: - self.log.debug("_ remove p: `{}`".format(p)) - output_args.remove(p) - self.log.debug("_ output_args: `{}`".format(output_args)) - # strip them from all flags - vf_fixed = [p.replace("-vf ", "").replace("-filter:v ", "") for p in vf_settings] # create lut argument - lut_arg = "lut3d=file='{}',colormatrix=bt601:bt709".format( - lut_path) - vf_fixed.insert(0, lut_arg) - # create new video filter setting - vf_back = "-vf " + ",".join(vf_fixed) + lut_arg = "lut3d=file='{}'".format( + lut_path.replace( + "\\", "/").replace(":/", "\\:/") + ) + lut_arg += ",colormatrix=bt601:bt709" + + vf_back = self.add_video_filter_args( + output_args, lut_arg) # add it to output_args output_args.insert(0, vf_back) self.log.info("Added Lut to ffmpeg command") @@ -240,3 +242,41 @@ class ExtractReview(pyblish.api.InstancePlugin): instance.data["representations"] = representations_new self.log.debug("Families Out: `{}`".format(instance.data["families"])) + + + def add_video_filter_args(self, args, inserting_arg): + """ + Fixing video filter argumets to be one long string + + Args: + args (list): list of string arguments + inserting_arg (str): string argument we want to add + (without flag `-vf`) + + Returns: + str: long joined argument to be added back to list of arguments + + """ + # find all video format settings + vf_settings = [p for p in args + for v in ["-filter:v", "-vf"] + if v in p] + self.log.debug("_ vf_settings: `{}`".format(vf_settings)) + + # remove them from output args list + for p in vf_settings: + self.log.debug("_ remove p: `{}`".format(p)) + args.remove(p) + self.log.debug("_ args: `{}`".format(args)) + + # strip them from all flags + vf_fixed = [p.replace("-vf ", "").replace("-filter:v ", "") + for p in vf_settings] + + self.log.debug("_ vf_fixed: `{}`".format(vf_fixed)) + vf_fixed.insert(0, inserting_arg) + self.log.debug("_ vf_fixed: `{}`".format(vf_fixed)) + # create new video filter setting + vf_back = "-vf " + ",".join(vf_fixed) + + return vf_back From b79e22464b02c0b3c4ecf7224bac69ae84270ac6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 13:59:35 +0100 Subject: [PATCH 063/195] fix(nuke: when writes collected family should be `write` otherwise validate frames is picking it up --- pype/plugins/nuke/publish/collect_writes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index ba8a0534b1..5484d971bf 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -99,7 +99,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "subset": instance.data["subset"], "fps": instance.context.data["fps"] } - + instance.data["family"] = "write" group_node = [x for x in instance if x.Class() == "Group"][0] deadlineChunkSize = 1 if "deadlineChunkSize" in group_node.knobs(): From 78f2da25cf81c494ee6f7d1979ddf98ab67784d6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 14:01:42 +0100 Subject: [PATCH 064/195] feat(nuke): extract thumbnail from before Review Data (mov, jpg) --- ...ct_review_data.py => extract_thumbnail.py} | 69 ++++++------------- 1 file changed, 22 insertions(+), 47 deletions(-) rename pype/plugins/nuke/publish/{extract_review_data.py => extract_thumbnail.py} (67%) diff --git a/pype/plugins/nuke/publish/extract_review_data.py b/pype/plugins/nuke/publish/extract_thumbnail.py similarity index 67% rename from pype/plugins/nuke/publish/extract_review_data.py rename to pype/plugins/nuke/publish/extract_thumbnail.py index 9bb4f93582..5740a90924 100644 --- a/pype/plugins/nuke/publish/extract_review_data.py +++ b/pype/plugins/nuke/publish/extract_thumbnail.py @@ -4,7 +4,7 @@ from avalon.nuke import lib as anlib import pyblish.api import pype -class ExtractReviewData(pype.api.Extractor): +class ExtractThumbnail(pype.api.Extractor): """Extracts movie and thumbnail with baked in luts must be run after extract_render_local.py @@ -12,7 +12,7 @@ class ExtractReviewData(pype.api.Extractor): """ order = pyblish.api.ExtractorOrder + 0.01 - label = "Extract Review Data" + label = "Extract Thumbnail" families = ["review"] hosts = ["nuke"] @@ -20,29 +20,19 @@ class ExtractReviewData(pype.api.Extractor): def process(self, instance): with anlib.maintained_selection(): - self.log.debug("creating staging dir:") - self.staging_dir(instance) - self.log.debug("instance: {}".format(instance)) self.log.debug("instance.data[families]: {}".format( instance.data["families"])) - # if "still" not in instance.data["families"]: - # self.render_review_representation(instance, - # representation="mov") - # self.render_review_representation(instance, - # representation="jpeg") - # else: - self.render_review_representation(instance, representation="jpeg") + self.render_thumbnail(instance) - def render_review_representation(self, - instance, - representation="mov"): + def render_thumbnail(self, instance): assert instance.data['representations'][0]['files'], "Instance data files should't be empty!" temporary_nodes = [] + self.log.info("Getting staging dir...") stagingDir = instance.data[ 'representations'][0]["stagingDir"].replace("\\", "/") self.log.debug("StagingDir `{0}`...".format(stagingDir)) @@ -107,39 +97,24 @@ class ExtractReviewData(pype.api.Extractor): # create write node write_node = nuke.createNode("Write") + file = fhead + "jpeg" + name = "thumbnail" + path = os.path.join(stagingDir, file).replace("\\", "/") + instance.data["thumbnail"] = path + write_node["file"].setValue(path) + write_node["file_type"].setValue("jpeg") + write_node["raw"].setValue(1) + write_node.setInput(0, previous_node) + temporary_nodes.append(write_node) + tags = ["thumbnail"] - if representation in "mov": - file = fhead + "baked.mov" - name = "baked" - path = os.path.join(stagingDir, file).replace("\\", "/") - self.log.debug("Path: {}".format(path)) - instance.data["baked_colorspace_movie"] = path - write_node["file"].setValue(path) - write_node["file_type"].setValue("mov") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - tags = ["review", "delete"] - - elif representation in "jpeg": - file = fhead + "jpeg" - name = "thumbnail" - path = os.path.join(stagingDir, file).replace("\\", "/") - instance.data["thumbnail"] = path - write_node["file"].setValue(path) - write_node["file_type"].setValue("jpeg") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - tags = ["thumbnail"] - - # retime for - first_frame = int(last_frame) / 2 - last_frame = int(last_frame) / 2 + # retime for + first_frame = int(last_frame) / 2 + last_frame = int(last_frame) / 2 repre = { 'name': name, - 'ext': representation, + 'ext': "jpeg", 'files': file, "stagingDir": stagingDir, "frameStart": first_frame, @@ -154,9 +129,9 @@ class ExtractReviewData(pype.api.Extractor): self.log.debug("representations: {}".format(instance.data["representations"])) - # Clean up - for node in temporary_nodes: - nuke.delete(node) + # # Clean up + # for node in temporary_nodes: + # nuke.delete(node) def get_view_process_node(self): From 5754450b88e7cb5b894137d5daac0608df77ccfb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 14:02:37 +0100 Subject: [PATCH 065/195] fix(nuke): the path was only working with C: Also the replace was moved to Extract Review --- pype/plugins/nuke/publish/extract_review_data_lut.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_review_data_lut.py b/pype/plugins/nuke/publish/extract_review_data_lut.py index 54013af11a..910b6ee19e 100644 --- a/pype/plugins/nuke/publish/extract_review_data_lut.py +++ b/pype/plugins/nuke/publish/extract_review_data_lut.py @@ -45,8 +45,7 @@ class ExtractReviewLutData(pype.api.Extractor): # assign to representations instance.data["lutPath"] = os.path.join( - exporter.stagingDir, exporter.file).replace("\\", "/").replace( - "C:/", "C\\:/") + exporter.stagingDir, exporter.file).replace("\\", "/") instance.data["representations"] += data["representations"] self.log.debug( From f4a0b341ac5036945a9f47f0ebfe50cb030d564b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 14:03:16 +0100 Subject: [PATCH 066/195] fix(nuke): validator had wrong family --- pype/plugins/nuke/publish/validate_rendered_frames.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/nuke/publish/validate_rendered_frames.py b/pype/plugins/nuke/publish/validate_rendered_frames.py index e244a9b4b6..3887b5d5b7 100644 --- a/pype/plugins/nuke/publish/validate_rendered_frames.py +++ b/pype/plugins/nuke/publish/validate_rendered_frames.py @@ -28,7 +28,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): """ Validates file output. """ order = pyblish.api.ValidatorOrder + 0.1 - families = ["render.no"] + families = ["render"] label = "Validate rendered frame" hosts = ["nuke", "nukestudio"] From 350dbda38ccf8d3ebf7c9322d5fa2e4a6b9cb000 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 14:41:50 +0100 Subject: [PATCH 067/195] feat(global): atatching conditions to preset tags instead of arguments that is what they are for, isnt it? ;) --- pype/plugins/nuke/publish/extract_thumbnail.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_thumbnail.py b/pype/plugins/nuke/publish/extract_thumbnail.py index 5740a90924..126203603e 100644 --- a/pype/plugins/nuke/publish/extract_thumbnail.py +++ b/pype/plugins/nuke/publish/extract_thumbnail.py @@ -4,6 +4,7 @@ from avalon.nuke import lib as anlib import pyblish.api import pype + class ExtractThumbnail(pype.api.Extractor): """Extracts movie and thumbnail with baked in luts @@ -26,9 +27,7 @@ class ExtractThumbnail(pype.api.Extractor): self.render_thumbnail(instance) - def render_thumbnail(self, instance): - assert instance.data['representations'][0]['files'], "Instance data files should't be empty!" temporary_nodes = [] @@ -129,9 +128,9 @@ class ExtractThumbnail(pype.api.Extractor): self.log.debug("representations: {}".format(instance.data["representations"])) - # # Clean up - # for node in temporary_nodes: - # nuke.delete(node) + # Clean up + for node in temporary_nodes: + nuke.delete(node) def get_view_process_node(self): From 7f171bd11d16a806987f2a355808f2d1a59aa3c2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 14:49:45 +0100 Subject: [PATCH 068/195] feat(global): hook the conditions to preset tags after all that is what they are for ;) --- pype/plugins/global/publish/extract_review.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 418ac39186..a002e1140d 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -165,7 +165,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # scaling none square pixels and 1920 width # scale=320:-2 # to auto count height with output to be multiple of 2 - if profile.get('reformat', False): + if "reformat" in tags: pixel_aspect = instance.data["pixelAspect"] scaling_arg = "scale=1920:'ceil((1920/{})/2)*2':flags=lanczos,setsar=1".format( pixel_aspect) @@ -176,7 +176,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # baking lut file application lut_path = instance.data.get("lutPath") - if lut_path: + if lut_path and ("bake-lut" in tags): # removing Gama info as it is all baked in lut gamma = next((g for g in input_args if "-gamma" in g), None) From b69e9629a74e462c4be36aedc42152ece9a9c0f5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 14:50:08 +0100 Subject: [PATCH 069/195] clean(nuke): make it nicer --- pype/plugins/nuke/publish/extract_thumbnail.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_thumbnail.py b/pype/plugins/nuke/publish/extract_thumbnail.py index 126203603e..a58dad02f5 100644 --- a/pype/plugins/nuke/publish/extract_thumbnail.py +++ b/pype/plugins/nuke/publish/extract_thumbnail.py @@ -80,7 +80,7 @@ class ExtractThumbnail(pype.api.Extractor): ref_node = self.nodes.get("Reformat", None) if ref_node: for k, v in ref_node: - self.log.debug("k,v: {0}:{1}".format(k,v)) + self.log.debug("k, v: {0}:{1}".format(k, v)) if isinstance(v, unicode): v = str(v) reformat_node[k].setValue(v) @@ -126,7 +126,8 @@ class ExtractThumbnail(pype.api.Extractor): # Render frames nuke.execute(write_node.name(), int(first_frame), int(last_frame)) - self.log.debug("representations: {}".format(instance.data["representations"])) + self.log.debug( + "representations: {}".format(instance.data["representations"])) # Clean up for node in temporary_nodes: From f53710dc455a91e64e3c6b03b480bfff8ec6a7fc Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 6 Dec 2019 15:57:57 +0100 Subject: [PATCH 070/195] added mapping based on presets to version to task status changer --- .../events/event_version_to_task_statuses.py | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/pype/ftrack/events/event_version_to_task_statuses.py b/pype/ftrack/events/event_version_to_task_statuses.py index 81398373bb..2cdeed7fce 100644 --- a/pype/ftrack/events/event_version_to_task_statuses.py +++ b/pype/ftrack/events/event_version_to_task_statuses.py @@ -1,9 +1,14 @@ -import ftrack_api from pype.ftrack import BaseEvent +from pypeapp import config class VersionToTaskStatus(BaseEvent): + default_status_mapping = { + 'reviewed': 'Change requested', + 'approved': 'Complete' + } + def launch(self, session, event): '''Propagates status from version to task when changed''' @@ -27,13 +32,15 @@ class VersionToTaskStatus(BaseEvent): self.log.info('>>> version status: [ {} ]'.format( version_status['name'])) - status_to_set = None - # Filter to versions with status change to "render complete" - if version_status['name'].lower() == 'reviewed': - status_to_set = 'Change requested' + version_name_low = version_status['name'].lower() - if version_status['name'].lower() == 'approved': - status_to_set = 'Complete' + status_mapping = ( + config.get_presets() + .get("ftrack", {}) + .get("ftrack_config", {}) + .get("status_version_to_task") + ) or self.default_status_mapping + status_to_set = status_mapping.get(version_name_low) self.log.info( '>>> status to set: [ {} ]'.format(status_to_set)) @@ -46,7 +53,8 @@ class VersionToTaskStatus(BaseEvent): self.log.info( '!!! status was not found in Ftrack [ {} ]'.format( status_to_set - )) + ) + ) continue # Proceed if the task status was set From 305bd6a02181744668e0360f25ba70fb801b0f27 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 16:10:23 +0100 Subject: [PATCH 071/195] fix(global): not using correct preset tags fixing pixelAspect to by applied to letter box too --- pype/plugins/global/publish/extract_review.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index a002e1140d..9de4a966f3 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -31,7 +31,7 @@ class ExtractReview(pyblish.api.InstancePlugin): inst_data = instance.data fps = inst_data.get("fps") start_frame = inst_data.get("frameStart") - + pixel_aspect = instance.data["pixelAspect"] self.log.debug("Families In: `{}`".format(instance.data["families"])) # get representation and loop them @@ -147,13 +147,16 @@ class ExtractReview(pyblish.api.InstancePlugin): ) output_args = [] - output_args.extend(profile.get('codec', [])) + codec_args = profile.get('codec', []) + output_args.extend(codec_args) # preset's output data output_args.extend(profile.get('output', [])) # letter_box lb = profile.get('letter_box', None) if lb: + if "reformat" not in p_tags: + lb /= pixel_aspect output_args.append( "-filter:v drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb)) @@ -165,8 +168,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # scaling none square pixels and 1920 width # scale=320:-2 # to auto count height with output to be multiple of 2 - if "reformat" in tags: - pixel_aspect = instance.data["pixelAspect"] + if "reformat" in p_tags: scaling_arg = "scale=1920:'ceil((1920/{})/2)*2':flags=lanczos,setsar=1".format( pixel_aspect) vf_back = self.add_video_filter_args( @@ -176,7 +178,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # baking lut file application lut_path = instance.data.get("lutPath") - if lut_path and ("bake-lut" in tags): + if lut_path and ("bake-lut" in p_tags): # removing Gama info as it is all baked in lut gamma = next((g for g in input_args if "-gamma" in g), None) @@ -220,7 +222,7 @@ class ExtractReview(pyblish.api.InstancePlugin): 'files': repr_file, "tags": new_tags, "outputName": name, - "codec": profile.get('codec', []) + "codec": codec_args }) if repre_new.get('preview'): repre_new.pop("preview") From 9727a70722f11a41394f1dbdbedda6245623f23d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 19:00:42 +0100 Subject: [PATCH 072/195] feat(global): making collection filesequences for nuke --- .../global/publish/collect_filesequences.py | 20 ++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 39481e216b..d0ff5722a3 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -100,6 +100,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): label = "RenderedFrames" def process(self, context): + pixel_aspect = 1 + lut_path = None if os.environ.get("PYPE_PUBLISH_PATHS"): paths = os.environ["PYPE_PUBLISH_PATHS"].split(os.pathsep) self.log.info("Collecting paths: {}".format(paths)) @@ -144,6 +146,12 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): self.log.info("setting session using metadata") api.Session.update(session) os.environ.update(session) + instance = metadata.get("instance") + if instance: + instance_family = instance.get("family") + pixel_aspect = instance.get("pixelAspect", 1) + lut_path = instance.get("lutPath", None) + else: # Search in directory @@ -181,6 +189,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): families.append("ftrack") if "review" not in families: families.append("review") + if "write" in instance_family: + families.append("write") for collection in collections: instance = context.create_instance(str(collection)) @@ -197,6 +207,11 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): start = data.get("frameStart", indices[0]) end = data.get("frameEnd", indices[-1]) + self.log.debug("Collected pixel_aspect:\n" + "{}".format(pixel_aspect)) + self.log.debug("type pixel_aspect:\n" + "{}".format(type(pixel_aspect))) + # root = os.path.normpath(root) # self.log.info("Source: {}}".format(data.get("source", ""))) @@ -212,8 +227,11 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "frameStart": start, "frameEnd": end, "fps": fps, - "source": data.get('source', '') + "source": data.get('source', ''), + "pixelAspect": pixel_aspect, }) + if lut_path: + instance.data.update({"lutPath": lut_path}) instance.append(collection) instance.context.data['fps'] = fps From 83f506d3eb94f830eea6a79be379a4f0d958d449 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 19:01:15 +0100 Subject: [PATCH 073/195] feat(nuke): removing families which are not needed anymore --- .../nuke/publish/extract_review_data_lut.py | 28 +++++++++++-------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_review_data_lut.py b/pype/plugins/nuke/publish/extract_review_data_lut.py index 910b6ee19e..dfc10952cd 100644 --- a/pype/plugins/nuke/publish/extract_review_data_lut.py +++ b/pype/plugins/nuke/publish/extract_review_data_lut.py @@ -20,23 +20,23 @@ class ExtractReviewLutData(pype.api.Extractor): hosts = ["nuke"] def process(self, instance): - self.log.debug( - "_ representations: {}".format(instance.data["representations"])) - + families = instance.data["families"] self.log.info("Creating staging dir...") - - stagingDir = instance.data[ - 'representations'][0]["stagingDir"].replace("\\", "/") - instance.data["stagingDir"] = stagingDir - - instance.data['representations'][0]["tags"] = ["review"] + if "representations" in instance.data: + staging_dir = instance.data[ + "representations"][0]["stagingDir"].replace("\\", "/") + instance.data["stagingDir"] = staging_dir + instance.data["representations"][0]["tags"] = ["review"] + else: + instance.data["representations"] = [] + # get output path + render_path = instance.data['path'] + staging_dir = os.path.normpath(os.path.dirname(render_path)) + instance.data["stagingDir"] = staging_dir self.log.info( "StagingDir `{0}`...".format(instance.data["stagingDir"])) - if "representations" not in instance.data: - instance.data["representations"] = [] - with anlib.maintained_selection(): exporter = pnlib.Exporter_review_lut( self, instance @@ -48,6 +48,10 @@ class ExtractReviewLutData(pype.api.Extractor): exporter.stagingDir, exporter.file).replace("\\", "/") instance.data["representations"] += data["representations"] + if "render.farm" in families: + instance.data["families"].remove("review") + instance.data["families"].remove("ftrack") + self.log.debug( "_ lutPath: {}".format(instance.data["lutPath"])) self.log.debug( From 75145a4095b1ecaaa593488e6f82ecb1374cad19 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 19:01:43 +0100 Subject: [PATCH 074/195] feat(nuke): make thumbnail exporter available for render farm --- .../plugins/nuke/publish/extract_thumbnail.py | 50 ++++++++++++------- 1 file changed, 32 insertions(+), 18 deletions(-) diff --git a/pype/plugins/nuke/publish/extract_thumbnail.py b/pype/plugins/nuke/publish/extract_thumbnail.py index a58dad02f5..3886fda569 100644 --- a/pype/plugins/nuke/publish/extract_thumbnail.py +++ b/pype/plugins/nuke/publish/extract_thumbnail.py @@ -15,7 +15,7 @@ class ExtractThumbnail(pype.api.Extractor): order = pyblish.api.ExtractorOrder + 0.01 label = "Extract Thumbnail" - families = ["review"] + families = ["review", "render.farm"] hosts = ["nuke"] def process(self, instance): @@ -28,14 +28,24 @@ class ExtractThumbnail(pype.api.Extractor): self.render_thumbnail(instance) def render_thumbnail(self, instance): - assert instance.data['representations'][0]['files'], "Instance data files should't be empty!" + node = instance[0] # group node + self.log.info("Creating staging dir...") + if "representations" not in instance.data: + staging_dir = instance.data[ + "representations"][0]["stagingDir"].replace("\\", "/") + instance.data["stagingDir"] = staging_dir + instance.data["representations"][0]["tags"] = ["review"] + else: + instance.data["representations"] = [] + # get output path + render_path = instance.data['path'] + staging_dir = os.path.normpath(os.path.dirname(render_path)) + instance.data["stagingDir"] = staging_dir + + self.log.info( + "StagingDir `{0}`...".format(instance.data["stagingDir"])) temporary_nodes = [] - self.log.info("Getting staging dir...") - stagingDir = instance.data[ - 'representations'][0]["stagingDir"].replace("\\", "/") - self.log.debug("StagingDir `{0}`...".format(stagingDir)) - collection = instance.data.get("collection", None) if collection: @@ -56,17 +66,21 @@ class ExtractThumbnail(pype.api.Extractor): if "#" in fhead: fhead = fhead.replace("#", "")[:-1] - rnode = nuke.createNode("Read") + path_render = os.path.join(staging_dir, fname).replace("\\", "/") + # check if file exist otherwise connect to write node + if os.path.isfile(path_render): + rnode = nuke.createNode("Read") - rnode["file"].setValue( - os.path.join(stagingDir, fname).replace("\\", "/")) + rnode["file"].setValue(path_render) - rnode["first"].setValue(first_frame) - rnode["origfirst"].setValue(first_frame) - rnode["last"].setValue(last_frame) - rnode["origlast"].setValue(last_frame) - temporary_nodes.append(rnode) - previous_node = rnode + rnode["first"].setValue(first_frame) + rnode["origfirst"].setValue(first_frame) + rnode["last"].setValue(last_frame) + rnode["origlast"].setValue(last_frame) + temporary_nodes.append(rnode) + previous_node = rnode + else: + previous_node = node # get input process and connect it to baking ipn = self.get_view_process_node() @@ -98,7 +112,7 @@ class ExtractThumbnail(pype.api.Extractor): write_node = nuke.createNode("Write") file = fhead + "jpeg" name = "thumbnail" - path = os.path.join(stagingDir, file).replace("\\", "/") + path = os.path.join(staging_dir, file).replace("\\", "/") instance.data["thumbnail"] = path write_node["file"].setValue(path) write_node["file_type"].setValue("jpeg") @@ -115,7 +129,7 @@ class ExtractThumbnail(pype.api.Extractor): 'name': name, 'ext': "jpeg", 'files': file, - "stagingDir": stagingDir, + "stagingDir": staging_dir, "frameStart": first_frame, "frameEnd": last_frame, "anatomy_template": "render", From 532e485e5d6588db1d4a0bfa9464da21fdc09cbc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 6 Dec 2019 19:01:59 +0100 Subject: [PATCH 075/195] feat(nuke): fixing deadline submiter --- .../nuke/publish/submit_nuke_deadline.py | 50 ++++++++----------- 1 file changed, 22 insertions(+), 28 deletions(-) diff --git a/pype/plugins/nuke/publish/submit_nuke_deadline.py b/pype/plugins/nuke/publish/submit_nuke_deadline.py index 4044026b5e..d9207d2bfc 100644 --- a/pype/plugins/nuke/publish/submit_nuke_deadline.py +++ b/pype/plugins/nuke/publish/submit_nuke_deadline.py @@ -1,9 +1,7 @@ import os import json import getpass - -import nuke - + from avalon import api from avalon.vendor import requests import re @@ -27,40 +25,36 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): def process(self, instance): - node = None - for x in instance: - if x.Class() == "Write": - node = x - - if node is None: - return + node = instance[0] + # for x in instance: + # if x.Class() == "Write": + # node = x + # + # if node is None: + # return DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL", "http://localhost:8082") assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL" context = instance.context - workspace = os.path.dirname(context.data["currentFile"]) - filepath = None - # get path - path = nuke.filename(node) - output_dir = instance.data['outputDir'] + # get output path + render_path = instance.data['path'] + render_dir = os.path.normpath(os.path.dirname(render_path)) - filepath = context.data["currentFile"] + script_path = context.data["currentFile"] - self.log.debug(filepath) - - filename = os.path.basename(filepath) + script_name = os.path.basename(script_path) comment = context.data.get("comment", "") - dirname = os.path.join(workspace, "renders") + deadline_user = context.data.get("deadlineUser", getpass.getuser()) - jobname = "%s - %s" % (filename, instance.name) + jobname = "%s - %s" % (script_name, instance.name) ver = re.search(r"\d+\.\d+", context.data.get("hostVersion")) try: # Ensure render folder exists - os.makedirs(dirname) + os.makedirs(render_dir) except OSError: pass @@ -71,7 +65,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): payload = { "JobInfo": { # Top-level group name - "BatchName": filename, + "BatchName": script_name, # Job name, as seen in Monitor "Name": jobname, @@ -95,20 +89,20 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): }, "PluginInfo": { # Input - "SceneFile": filepath, + "SceneFile": script_path, # Output directory and filename - "OutputFilePath": dirname.replace("\\", "/"), + "OutputFilePath": render_dir.replace("\\", "/"), # "OutputFilePrefix": render_variables["filename_prefix"], # Mandatory for Deadline "Version": ver.group(), # Resolve relative references - "ProjectPath": workspace, - + "ProjectPath": script_path, + "AWSAssetFile0": render_path, # Only the specific write node is rendered. - "WriteNode": instance[0].name() + "WriteNode": node.name() }, # Mandatory for Deadline, may be empty From a940ef7508ed77d2d7699f336df43df18e2dc300 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 6 Dec 2019 19:45:00 +0100 Subject: [PATCH 076/195] remove default mapping --- pype/ftrack/events/event_version_to_task_statuses.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/pype/ftrack/events/event_version_to_task_statuses.py b/pype/ftrack/events/event_version_to_task_statuses.py index 2cdeed7fce..cd83b819bc 100644 --- a/pype/ftrack/events/event_version_to_task_statuses.py +++ b/pype/ftrack/events/event_version_to_task_statuses.py @@ -4,10 +4,7 @@ from pypeapp import config class VersionToTaskStatus(BaseEvent): - default_status_mapping = { - 'reviewed': 'Change requested', - 'approved': 'Complete' - } + default_status_mapping = {} def launch(self, session, event): '''Propagates status from version to task when changed''' @@ -40,6 +37,7 @@ class VersionToTaskStatus(BaseEvent): .get("ftrack_config", {}) .get("status_version_to_task") ) or self.default_status_mapping + status_to_set = status_mapping.get(version_name_low) self.log.info( From 1ae54c6a03c99b06bf15736f7087993e0b16d6f6 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 6 Dec 2019 21:14:26 +0100 Subject: [PATCH 077/195] minor hotfix on reference loader --- pype/plugins/maya/load/load_reference.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/pype/plugins/maya/load/load_reference.py b/pype/plugins/maya/load/load_reference.py index 55db019cf4..376fcc2c01 100644 --- a/pype/plugins/maya/load/load_reference.py +++ b/pype/plugins/maya/load/load_reference.py @@ -1,9 +1,7 @@ import pype.maya.plugin import os from pypeapp import config -reload(config) -import pype.maya.plugin -reload(pype.maya.plugin) + class ReferenceLoader(pype.maya.plugin.ReferenceLoader): """Load the model""" @@ -22,7 +20,6 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader): from avalon import maya import pymel.core as pm - try: family = context["representation"]["context"]["family"] except ValueError: From 360c11a58db6836cc6336a3c3086f57091eb054c Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 6 Dec 2019 21:14:36 +0100 Subject: [PATCH 078/195] add comment to ftrack publish --- pype/plugins/ftrack/publish/integrate_ftrack_instances.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py index 383ed0098b..5e680a172a 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py @@ -116,6 +116,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): }, "assetversion_data": { "version": version_number, + "comment": instance.context.data.get("comment", "") }, "component_data": component_data, "component_path": comp['published_path'], From ac875a9306257eb078f5f2d971ce53dbd1f1f834 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 6 Dec 2019 22:44:50 +0100 Subject: [PATCH 079/195] use automatic preset loading --- pype/plugins/global/publish/extract_review.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 9de4a966f3..96c01fffb2 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -22,16 +22,17 @@ class ExtractReview(pyblish.api.InstancePlugin): families = ["review"] hosts = ["nuke", "maya", "shell"] + outputs = {} + ext_filter = [] + def process(self, instance): - # adding plugin attributes from presets - publish_presets = config.get_presets()["plugins"]["global"]["publish"] - plugin_attrs = publish_presets[self.__class__.__name__] - output_profiles = plugin_attrs.get("outputs", {}) + + output_profiles = self.outputs or {} inst_data = instance.data fps = inst_data.get("fps") start_frame = inst_data.get("frameStart") - pixel_aspect = instance.data["pixelAspect"] + pixel_aspect = instance.data.get("pixelAspect") or 1 self.log.debug("Families In: `{}`".format(instance.data["families"])) # get representation and loop them @@ -40,7 +41,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # filter out mov and img sequences representations_new = representations[:] for repre in representations: - if repre['ext'] in plugin_attrs["ext_filter"]: + if repre['ext'] in self.ext_filter: tags = repre.get("tags", []) self.log.info("Try repre: {}".format(repre)) @@ -92,8 +93,9 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.info("p_tags: `{}`".format(p_tags)) # add families [instance.data["families"].append(t) - for t in p_tags - if t not in instance.data["families"]] + for t in p_tags + if t not in instance.data["families"]] + # add to [new_tags.append(t) for t in p_tags if t not in new_tags] @@ -199,7 +201,6 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.info("Added Lut to ffmpeg command") self.log.debug("_ output_args: `{}`".format(output_args)) - mov_args = [ os.path.join( os.environ.get( @@ -247,7 +248,6 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("Families Out: `{}`".format(instance.data["families"])) - def add_video_filter_args(self, args, inserting_arg): """ Fixing video filter argumets to be one long string From 0f50ead66153922a856c1ebf1623d88db97464a9 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 6 Dec 2019 22:47:54 +0100 Subject: [PATCH 080/195] remove unnecesary import --- pype/plugins/global/publish/extract_review.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 96c01fffb2..7554c080a0 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -3,7 +3,6 @@ import os import pyblish.api import clique import pype.api -from pypeapp import config class ExtractReview(pyblish.api.InstancePlugin): From c069f36d954a93f5786503aa8e3efcb43e8e3df8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 7 Dec 2019 18:06:54 +0100 Subject: [PATCH 081/195] fix(global): reformat was not counting with ratio of format --- pype/plugins/global/publish/extract_review.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 9de4a966f3..936ae74c6f 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -32,6 +32,8 @@ class ExtractReview(pyblish.api.InstancePlugin): fps = inst_data.get("fps") start_frame = inst_data.get("frameStart") pixel_aspect = instance.data["pixelAspect"] + resolution_width = instance.data["resolutionWidth"] + resolution_height = instance.data["resolutionHeight"] self.log.debug("Families In: `{}`".format(instance.data["families"])) # get representation and loop them @@ -167,10 +169,9 @@ class ExtractReview(pyblish.api.InstancePlugin): output_args.append(full_output_path) # scaling none square pixels and 1920 width - # scale=320:-2 # to auto count height with output to be multiple of 2 if "reformat" in p_tags: scaling_arg = "scale=1920:'ceil((1920/{})/2)*2':flags=lanczos,setsar=1".format( - pixel_aspect) + (lb/pixel_aspect * (resolution_width / resolution_height))) vf_back = self.add_video_filter_args( output_args, scaling_arg) # add it to output_args From 8bf0874a4ad9368a21dde16be9759c906afde9ef Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Dec 2019 12:12:27 +0100 Subject: [PATCH 082/195] fix(global): LetterBox can be 0 rather then None --- pype/plugins/global/publish/extract_review.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 77b00d52ec..a3707f4e59 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -156,8 +156,8 @@ class ExtractReview(pyblish.api.InstancePlugin): output_args.extend(profile.get('output', [])) # letter_box - lb = profile.get('letter_box', None) - if lb: + lb = profile.get('letter_box', 0) + if lb is not 0: if "reformat" not in p_tags: lb /= pixel_aspect output_args.append( From 101c3bc190d2785e9d337b4f4e1d4b0b7945d000 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Dec 2019 13:25:34 +0100 Subject: [PATCH 083/195] fix(nuke): backward compatibility if avalon knob `ak:` --- pype/plugins/nuke/publish/collect_instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index fbff28b282..3f3042ec46 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -34,7 +34,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): # get data from avalon knob self.log.debug("node[name]: {}".format(node['name'].value())) - avalon_knob_data = get_avalon_knob_data(node) + avalon_knob_data = get_avalon_knob_data(node, ["avalon:", "ak:"]) self.log.debug("avalon_knob_data: {}".format(avalon_knob_data)) From 1e76ec6fd48d0e0b303140eb5d335921014a6f91 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Dec 2019 14:57:10 +0100 Subject: [PATCH 084/195] fix(global): letter box was not applying on 1929x1080 --- pype/plugins/global/publish/extract_review.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index bf4682b26e..11b7b6ee8a 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -156,7 +156,7 @@ class ExtractReview(pyblish.api.InstancePlugin): lb = profile.get('letter_box', None) if lb: output_args.append( - "-filter:v drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb)) + "-filter:v scale=1920x1080:flags=lanczos,setsar=1,drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb)) # In case audio is longer than video. output_args.append("-shortest") From 2dd940078ebd23631fcf1eb662754a394f230c7d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Dec 2019 14:57:34 +0100 Subject: [PATCH 085/195] fix(global): missing coma --- pype/plugins/global/publish/integrate_new.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 9021a3f997..faade613f2 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -71,7 +71,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "yetiRig", "yeticache", "nukenodes", - "gizmo" + "gizmo", "source", "matchmove", "image" @@ -414,7 +414,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): } if sequence_repre and repre.get("frameStart"): - representation['context']['frame'] = src_padding_exp % repre.get("frameStart") + representation['context']['frame'] = src_padding_exp % int(repre.get("frameStart")) self.log.debug("__ representation: {}".format(representation)) destination_list.append(dst) From 399d9e8b7531b3202dab5c884af7118d57ce94b3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Dec 2019 14:58:14 +0100 Subject: [PATCH 086/195] fix(nuke): instance didnt detect other then `write` family --- pype/plugins/nuke/publish/collect_instances.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index fbff28b282..74cb0a8226 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -30,7 +30,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): continue except Exception as E: self.log.warning(E) - continue + # get data from avalon knob self.log.debug("node[name]: {}".format(node['name'].value())) @@ -84,10 +84,16 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): node.end() family = avalon_knob_data["family"] - families = [avalon_knob_data["families"]] + families = avalon_knob_data.get("families") + if families: + families = [families] + else: + families = [family] if node.Class() not in "Read": - if node["render"].value(): + if "render" not in node.knobs().keys(): + families.insert(0, family) + elif node["render"].value(): self.log.info("flagged for render") add_family = "render.local" # dealing with local/farm rendering From 5b89eca421f543e04bd9fcb518327fbe7251fe39 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Dec 2019 14:58:38 +0100 Subject: [PATCH 087/195] fix(nuke): families mishmash --- pype/plugins/nuke/publish/collect_writes.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index ba8a0534b1..cd3dd67bef 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -11,7 +11,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.1 label = "Collect Writes" hosts = ["nuke", "nukeassist"] - families = ["render", "render.local", "render.farm"] + families = ["write"] def process(self, instance): @@ -95,7 +95,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "frameEnd": last_frame - handle_end, "version": int(version), "colorspace": node["colorspace"].value(), - "families": [instance.data["family"]] + instance.data["families"], + "families": [instance.data["family"]], "subset": instance.data["subset"], "fps": instance.context.data["fps"] } @@ -109,6 +109,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): if "deadlinePriority" in group_node.knobs(): deadlinePriority = group_node["deadlinePriority"].value() + families = [f for f in instance.data["families"] if "write" not in f] instance.data.update({ "versionData": version_data, "path": path, @@ -119,10 +120,13 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "frameStart": first_frame, "frameEnd": last_frame, "outputType": output_type, + "family": "write", + "families": families, "colorspace": node["colorspace"].value(), "deadlineChunkSize": deadlineChunkSize, "deadlinePriority": deadlinePriority, "subsetGroup": "renders" }) + self.log.debug("instance.data: {}".format(instance.data)) From ac1fab9bdd42242dc77374fc82ca61b20088b01a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Dec 2019 14:59:00 +0100 Subject: [PATCH 088/195] fix(nuke): path with hashes error --- pype/plugins/nuke/publish/extract_review_data.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pype/plugins/nuke/publish/extract_review_data.py b/pype/plugins/nuke/publish/extract_review_data.py index 791b9d7969..f63ca4d426 100644 --- a/pype/plugins/nuke/publish/extract_review_data.py +++ b/pype/plugins/nuke/publish/extract_review_data.py @@ -69,6 +69,9 @@ class ExtractReviewData(pype.api.Extractor): first_frame = instance.data.get("frameStart", None) last_frame = instance.data.get("frameEnd", None) + if "#" in fhead: + fhead = fhead.replace("#", "")[:-1] + rnode = nuke.createNode("Read") rnode["file"].setValue( From a60e02e7c0dea9372acd7d0032af051f41587681 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 9 Dec 2019 14:59:22 +0100 Subject: [PATCH 089/195] fix(nuke): family mishmash --- pype/plugins/nuke/publish/validate_rendered_frames.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/nuke/publish/validate_rendered_frames.py b/pype/plugins/nuke/publish/validate_rendered_frames.py index e244a9b4b6..3887b5d5b7 100644 --- a/pype/plugins/nuke/publish/validate_rendered_frames.py +++ b/pype/plugins/nuke/publish/validate_rendered_frames.py @@ -28,7 +28,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): """ Validates file output. """ order = pyblish.api.ValidatorOrder + 0.1 - families = ["render.no"] + families = ["render"] label = "Validate rendered frame" hosts = ["nuke", "nukestudio"] From 482addd6439265ed71e7c3efb4e8d46470af77e3 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Mon, 9 Dec 2019 16:52:15 +0100 Subject: [PATCH 090/195] (maya) fixed correct colorspace for linearized textures during look extraction --- pype/plugins/maya/publish/extract_look.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index 5f3c1b33f3..ad43e02d21 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -231,11 +231,12 @@ class ExtractLook(pype.api.Extractor): # ensure after context it's still the original value. color_space_attr = resource["node"] + ".colorSpace" color_space = cmds.getAttr(color_space_attr) - + if files_metadata[source]["color_space"] == "raw": + # set colorpsace to raw if we linearized it + color_space = "Raw" # Remap file node filename to destination attr = resource["attribute"] remap[attr] = destinations[source] - remap[color_space_attr] = color_space self.log.info("Finished remapping destinations ...") @@ -310,6 +311,12 @@ class ExtractLook(pype.api.Extractor): # Source hash for the textures instance.data["sourceHashes"] = hashes + """ + self.log.info("Returning colorspaces to their original values ...") + for attr, value in remap.items(): + self.log.info(" - {}: {}".format(attr, value)) + cmds.setAttr(attr, value, type="string") + """ self.log.info("Extracted instance '%s' to: %s" % (instance.name, maya_path)) From 3469becf6e298744a4b035022699cc81a7a34da8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Dec 2019 17:46:32 +0100 Subject: [PATCH 091/195] remove `delete_asset_by_name` action which may cause issues on duplicated names or not syncrhonized entities --- .../actions/action_delete_asset_byname.py | 175 ------------------ 1 file changed, 175 deletions(-) delete mode 100644 pype/ftrack/actions/action_delete_asset_byname.py diff --git a/pype/ftrack/actions/action_delete_asset_byname.py b/pype/ftrack/actions/action_delete_asset_byname.py deleted file mode 100644 index c05c135991..0000000000 --- a/pype/ftrack/actions/action_delete_asset_byname.py +++ /dev/null @@ -1,175 +0,0 @@ -import os -import sys -import logging -import argparse -import ftrack_api -from pype.ftrack import BaseAction -from pype.ftrack.lib.io_nonsingleton import DbConnector - - -class AssetsRemover(BaseAction): - '''Edit meta data action.''' - - #: Action identifier. - identifier = 'remove.assets' - #: Action label. - label = "Pype Admin" - variant = '- Delete Assets by Name' - #: Action description. - description = 'Removes assets from Ftrack and Avalon db with all childs' - #: roles that are allowed to register this action - role_list = ['Pypeclub', 'Administrator'] - icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format( - os.environ.get('PYPE_STATICS_SERVER', '') - ) - #: Db - db = DbConnector() - - def discover(self, session, entities, event): - ''' Validation ''' - if len(entities) != 1: - return False - - valid = ["show", "task"] - entityType = event["data"]["selection"][0].get("entityType", "") - if entityType.lower() not in valid: - return False - - return True - - def interface(self, session, entities, event): - if not event['data'].get('values', {}): - title = 'Enter Asset names to delete' - - items = [] - for i in range(15): - - item = { - 'label': 'Asset {}'.format(i+1), - 'name': 'asset_{}'.format(i+1), - 'type': 'text', - 'value': '' - } - items.append(item) - - return { - 'items': items, - 'title': title - } - - def launch(self, session, entities, event): - entity = entities[0] - if entity.entity_type.lower() != 'Project': - project = entity['project'] - else: - project = entity - - if 'values' not in event['data']: - return - - values = event['data']['values'] - if len(values) <= 0: - return { - 'success': True, - 'message': 'No Assets to delete!' - } - - asset_names = [] - - for k, v in values.items(): - if v.replace(' ', '') != '': - asset_names.append(v) - - self.db.install() - self.db.Session['AVALON_PROJECT'] = project["full_name"] - - assets = self.find_assets(asset_names) - - all_ids = [] - for asset in assets: - all_ids.append(asset['_id']) - all_ids.extend(self.find_child(asset)) - - if len(all_ids) == 0: - self.db.uninstall() - return { - 'success': True, - 'message': 'None of assets' - } - - delete_query = {'_id': {'$in': all_ids}} - self.db.delete_many(delete_query) - - self.db.uninstall() - return { - 'success': True, - 'message': 'All assets were deleted!' - } - - def find_child(self, entity): - output = [] - id = entity['_id'] - visuals = [x for x in self.db.find({'data.visualParent': id})] - assert len(visuals) == 0, 'This asset has another asset as child' - childs = self.db.find({'parent': id}) - for child in childs: - output.append(child['_id']) - output.extend(self.find_child(child)) - return output - - def find_assets(self, asset_names): - assets = [] - for name in asset_names: - entity = self.db.find_one({ - 'type': 'asset', - 'name': name - }) - if entity is not None and entity not in assets: - assets.append(entity) - return assets - - -def register(session, plugins_presets={}): - '''Register plugin. Called when used as an plugin.''' - - AssetsRemover(session, plugins_presets).register() - - -def main(arguments=None): - '''Set up logging and register action.''' - if arguments is None: - arguments = [] - - parser = argparse.ArgumentParser() - # Allow setting of logging level from arguments. - loggingLevels = {} - for level in ( - logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, - logging.ERROR, logging.CRITICAL - ): - loggingLevels[logging.getLevelName(level).lower()] = level - - parser.add_argument( - '-v', '--verbosity', - help='Set the logging output verbosity.', - choices=loggingLevels.keys(), - default='info' - ) - namespace = parser.parse_args(arguments) - - # Set up basic logging - logging.basicConfig(level=loggingLevels[namespace.verbosity]) - - session = ftrack_api.Session() - - register(session) - - # Wait for events - logging.info( - 'Registered actions and listening for events. Use Ctrl-C to abort.' - ) - session.event_hub.wait() - - -if __name__ == '__main__': - raise SystemExit(main(sys.argv[1:])) From 5df3afd0bc7ec0b1b25d440e552bd8a95afdd64b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Dec 2019 17:48:57 +0100 Subject: [PATCH 092/195] delete asset is totally rewritten --- pype/ftrack/actions/action_delete_asset.py | 794 ++++++++++++++------- 1 file changed, 523 insertions(+), 271 deletions(-) diff --git a/pype/ftrack/actions/action_delete_asset.py b/pype/ftrack/actions/action_delete_asset.py index df760f7c21..7eb9126fca 100644 --- a/pype/ftrack/actions/action_delete_asset.py +++ b/pype/ftrack/actions/action_delete_asset.py @@ -1,354 +1,606 @@ import os -import sys -import logging +import collections +import uuid +from datetime import datetime +from queue import Queue + from bson.objectid import ObjectId -import argparse -import ftrack_api from pype.ftrack import BaseAction from pype.ftrack.lib.io_nonsingleton import DbConnector -class DeleteAsset(BaseAction): +class DeleteAssetSubset(BaseAction): '''Edit meta data action.''' #: Action identifier. - identifier = 'delete.asset' + identifier = "delete.asset.subset" #: Action label. - label = 'Delete Asset/Subsets' + label = "Delete Asset/Subsets" #: Action description. - description = 'Removes from Avalon with all childs and asset from Ftrack' - icon = '{}/ftrack/action_icons/DeleteAsset.svg'.format( - os.environ.get('PYPE_STATICS_SERVER', '') + description = "Removes from Avalon with all childs and asset from Ftrack" + icon = "{}/ftrack/action_icons/DeleteAsset.svg".format( + os.environ.get("PYPE_STATICS_SERVER", "") ) #: roles that are allowed to register this action - role_list = ['Pypeclub', 'Administrator'] - #: Db - db = DbConnector() + role_list = ["Pypeclub", "Administrator", "Project Manager"] + #: Db connection + dbcon = DbConnector() - value = None + splitter = {"type": "label", "value": "---"} + action_data_by_id = {} + asset_prefix = "asset:" + subset_prefix = "subset:" def discover(self, session, entities, event): - ''' Validation ''' - if len(entities) != 1: - return False + """ Validation """ + task_ids = [] + for ent_info in event["data"]["selection"]: + entType = ent_info.get("entityType", "") + if entType == "task": + task_ids.append(ent_info["entityId"]) - valid = ["task"] - entityType = event["data"]["selection"][0].get("entityType", "") - if entityType.lower() not in valid: - return False - - return True + for entity in entities: + ftrack_id = entity["id"] + if ftrack_id not in task_ids: + continue + if entity.entity_type.lower() != "task": + return True + return False def _launch(self, event): - self.reset_session() try: - self.db.install() args = self._translate_event( self.session, event ) + if "values" not in event["data"]: + self.dbcon.install() + return self._interface(self.session, *args) - interface = self._interface( - self.session, *args - ) - - confirmation = self.confirm_delete( - True, *args - ) - - if interface: - return interface - + confirmation = self.confirm_delete(*args) if confirmation: return confirmation + self.dbcon.install() response = self.launch( self.session, *args ) finally: - self.db.uninstall() + self.dbcon.uninstall() return self._handle_result( self.session, response, *args ) def interface(self, session, entities, event): - if not event['data'].get('values', {}): - self.attempt = 1 - items = [] - entity = entities[0] - title = 'Choose items to delete from "{}"'.format(entity['name']) - project = entity['project'] + self.show_message(event, "Preparing data...", True) + items = [] + title = "Choose items to delete" - self.db.Session['AVALON_PROJECT'] = project["full_name"] + # Filter selection and get ftrack ids + selection = event["data"].get("selection") or [] + ftrack_ids = [] + project_in_selection = False + for entity in selection: + entity_type = (entity.get("entityType") or "").lower() + if entity_type != "task": + if entity_type == "show": + project_in_selection = True + continue - av_entity = self.db.find_one({ - 'type': 'asset', - 'name': entity['name'] + ftrack_id = entity.get("entityId") + if not ftrack_id: + continue + + ftrack_ids.append(ftrack_id) + + if project_in_selection: + msg = "It is not possible to use this action on project entity." + self.show_message(event, msg, True) + + # Filter event even more (skip task entities) + # - task entities are not relevant for avalon + for entity in entities: + ftrack_id = entity["id"] + if ftrack_id not in ftrack_ids: + continue + + if entity.entity_type.lower() == "task": + ftrack_ids.remove(ftrack_id) + + if not ftrack_ids: + # It is bug if this happens! + return { + "success": False, + "message": "Invalid selection for this action (Bug)" + } + + if entities[0].entity_type.lower() == "project": + project = entities[0] + else: + project = entities[0]["project"] + + project_name = project["full_name"] + self.dbcon.Session["AVALON_PROJECT"] = project_name + + selected_av_entities = self.dbcon.find({ + "type": "asset", + "data.ftrackId": {"$in": ftrack_ids} + }) + selected_av_entities = [ent for ent in selected_av_entities] + if not selected_av_entities: + return { + "success": False, + "message": "Didn't found entities in avalon" + } + + # Remove cached action older than 2 minutes + old_action_ids = [] + for id, data in self.action_data_by_id.items(): + created_at = data.get("created_at") + if not created_at: + old_action_ids.append(id) + continue + cur_time = datetime.now() + existing_in_sec = (created_at - cur_time).total_seconds() + if existing_in_sec > 60 * 2: + old_action_ids.append(id) + + for id in old_action_ids: + self.action_data_by_id.pop(id, None) + + # Store data for action id + action_id = str(uuid.uuid1()) + self.action_data_by_id[action_id] = { + "attempt": 1, + "created_at": datetime.now(), + "project_name": project_name, + "subset_ids_by_name": {}, + "subset_ids_by_parent": {} + } + + id_item = { + "type": "hidden", + "name": "action_id", + "value": action_id + } + + items.append(id_item) + asset_ids = [ent["_id"] for ent in selected_av_entities] + subsets_for_selection = self.dbcon.find({ + "type": "subset", + "parent": {"$in": asset_ids} + }) + + asset_ending = "" + if len(selected_av_entities) > 1: + asset_ending = "s" + + asset_title = { + "type": "label", + "value": "# Delete asset{}:".format(asset_ending) + } + asset_note = { + "type": "label", + "value": ( + "

NOTE: Action will delete checked entities" + " in Ftrack and Avalon with all children entities and" + " published content.

" + ) + } + + items.append(asset_title) + items.append(asset_note) + + asset_items = collections.defaultdict(list) + for asset in selected_av_entities: + ent_path_items = [project_name] + ent_path_items.extend(asset.get("data", {}).get("parents") or []) + ent_path_to_parent = "/".join(ent_path_items) + "/" + asset_items[ent_path_to_parent].append(asset) + + for asset_parent_path, assets in sorted(asset_items.items()): + items.append({ + "type": "label", + "value": "## - {}".format(asset_parent_path) }) - - if av_entity is None: - return { - 'success': False, - 'message': 'Didn\'t found assets in avalon' - } - - asset_label = { - 'type': 'label', - 'value': '## Delete whole asset: ##' - } - asset_item = { - 'label': av_entity['name'], - 'name': 'whole_asset', - 'type': 'boolean', - 'value': False - } - splitter = { - 'type': 'label', - 'value': '{}'.format(200*"-") - } - subset_label = { - 'type': 'label', - 'value': '## Subsets: ##' - } - if av_entity is not None: - items.append(asset_label) - items.append(asset_item) - items.append(splitter) - - all_subsets = self.db.find({ - 'type': 'subset', - 'parent': av_entity['_id'] + for asset in assets: + items.append({ + "label": asset["name"], + "name": "{}{}".format( + self.asset_prefix, str(asset["_id"]) + ), + "type": 'boolean', + "value": False }) - subset_items = [] - for subset in all_subsets: - item = { - 'label': subset['name'], - 'name': str(subset['_id']), - 'type': 'boolean', - 'value': False - } - subset_items.append(item) - if len(subset_items) > 0: - items.append(subset_label) - items.extend(subset_items) - else: - return { - 'success': False, - 'message': 'Didn\'t found assets in avalon' - } + subset_ids_by_name = collections.defaultdict(list) + subset_ids_by_parent = collections.defaultdict(list) + for subset in subsets_for_selection: + subset_id = subset["_id"] + name = subset["name"] + parent_id = subset["parent"] + subset_ids_by_name[name].append(subset_id) + subset_ids_by_parent[parent_id].append(subset_id) + if not subset_ids_by_name: return { - 'items': items, - 'title': title + "items": items, + "title": title } - def confirm_delete(self, first_attempt, entities, event): - if first_attempt is True: - if 'values' not in event['data']: - return + subset_ending = "" + if len(subset_ids_by_name.keys()) > 1: + subset_ending = "s" - values = event['data']['values'] + subset_title = { + "type": "label", + "value": "# Subset{} to delete:".format(subset_ending) + } + subset_note = { + "type": "label", + "value": ( + "

WARNING: Subset{} will be removed" + " for all selected entities.

" + ).format(subset_ending) + } - if len(values) <= 0: - return - if 'whole_asset' not in values: - return - else: - values = self.values + items.append(self.splitter) + items.append(subset_title) + items.append(subset_note) - title = 'Confirmation of deleting {}' - if values['whole_asset'] is True: - title = title.format( - 'whole asset {}'.format( - entities[0]['name'] - ) - ) - else: - subsets = [] - for key, value in values.items(): - if value is True: - subsets.append(key) - len_subsets = len(subsets) - if len_subsets == 0: + for name in subset_ids_by_name: + items.append({ + "label": "{}".format(name), + "name": "{}{}".format(self.subset_prefix, name), + "type": "boolean", + "value": False + }) + + self.action_data_by_id[action_id]["subset_ids_by_parent"] = ( + subset_ids_by_parent + ) + self.action_data_by_id[action_id]["subset_ids_by_name"] = ( + subset_ids_by_name + ) + + return { + "items": items, + "title": title + } + + def confirm_delete(self, entities, event): + values = event["data"]["values"] + action_id = values.get("action_id") + spec_data = self.action_data_by_id.get(action_id) + if not spec_data: + # it is a bug if this happens! + return { + "success": False, + "message": "Something bad has happened. Please try again." + } + + # Process Delete confirmation + delete_key = values.get("delete_key") + if delete_key: + delete_key = delete_key.lower().strip() + # Go to launch part if user entered `delete` + if delete_key == "delete": + return + # Skip whole process if user didn't enter any text + elif delete_key == "": + self.action_data_by_id.pop(action_id, None) return { - 'success': True, - 'message': 'Nothing was selected to delete' + "success": True, + "message": "Deleting cancelled (delete entry was empty)" } - elif len_subsets == 1: - title = title.format( - '{} subset'.format(len_subsets) - ) - else: - title = title.format( - '{} subsets'.format(len_subsets) - ) + # Get data to show again + to_delete = spec_data["to_delete"] + + else: + to_delete = collections.defaultdict(list) + for key, value in values.items(): + if not value: + continue + if key.startswith(self.asset_prefix): + _key = key.replace(self.asset_prefix, "") + to_delete["assets"].append(_key) + + elif key.startswith(self.subset_prefix): + _key = key.replace(self.subset_prefix, "") + to_delete["subsets"].append(_key) + + self.action_data_by_id[action_id]["to_delete"] = to_delete + + asset_to_delete = len(to_delete.get("assets") or []) > 0 + subset_to_delete = len(to_delete.get("subsets") or []) > 0 + + if not asset_to_delete and not subset_to_delete: + self.action_data_by_id.pop(action_id, None) + return { + "success": True, + "message": "Nothing was selected to delete" + } + + attempt = spec_data["attempt"] + if attempt > 3: + self.action_data_by_id.pop(action_id, None) + return { + "success": False, + "message": "You didn't enter \"DELETE\" properly 3 times!" + } + + self.action_data_by_id[action_id]["attempt"] += 1 + + title = "Confirmation of deleting" + + if asset_to_delete: + asset_len = len(to_delete["assets"]) + asset_ending = "" + if asset_len > 1: + asset_ending = "s" + title += " {} Asset{}".format(asset_len, asset_ending) + if subset_to_delete: + title += " and" + + if subset_to_delete: + sub_len = len(to_delete["subsets"]) + type_ending = "" + sub_ending = "" + if sub_len == 1: + subset_ids_by_name = spec_data["subset_ids_by_name"] + if len(subset_ids_by_name[to_delete["subsets"][0]]) > 1: + sub_ending = "s" + + elif sub_len > 1: + type_ending = "s" + sub_ending = "s" + + title += " {} type{} of subset{}".format( + sub_len, type_ending, sub_ending + ) - self.values = values items = [] + id_item = {"type": "hidden", "name": "action_id", "value": action_id} delete_label = { 'type': 'label', 'value': '# Please enter "DELETE" to confirm #' } - delete_item = { - 'name': 'delete_key', - 'type': 'text', - 'value': '', - 'empty_text': 'Type Delete here...' + "name": "delete_key", + "type": "text", + "value": "", + "empty_text": "Type Delete here..." } + + items.append(id_item) items.append(delete_label) items.append(delete_item) return { - 'items': items, - 'title': title + "items": items, + "title": title } def launch(self, session, entities, event): - if 'values' not in event['data']: - return - - values = event['data']['values'] - if len(values) <= 0: - return - if 'delete_key' not in values: - return - - if values['delete_key'].lower() != 'delete': - if values['delete_key'].lower() == '': - return { - 'success': False, - 'message': 'Deleting cancelled' - } - if self.attempt < 3: - self.attempt += 1 - return_dict = self.confirm_delete(False, entities, event) - return_dict['title'] = '{} ({} attempt)'.format( - return_dict['title'], self.attempt - ) - return return_dict + self.show_message(event, "Processing...", True) + values = event["data"]["values"] + action_id = values.get("action_id") + spec_data = self.action_data_by_id.get(action_id) + if not spec_data: + # it is a bug if this happens! return { - 'success': False, - 'message': 'You didn\'t enter "DELETE" properly 3 times!' + "success": False, + "message": "Something bad has happened. Please try again." } - entity = entities[0] - project = entity['project'] + report_messages = collections.defaultdict(list) - self.db.Session['AVALON_PROJECT'] = project["full_name"] + project_name = spec_data["project_name"] + to_delete = spec_data["to_delete"] + self.dbcon.Session["AVALON_PROJECT"] = project_name - all_ids = [] - if self.values.get('whole_asset', False) is True: - av_entity = self.db.find_one({ - 'type': 'asset', - 'name': entity['name'] + assets_to_delete = to_delete.get("assets") or [] + subsets_to_delete = to_delete.get("subsets") or [] + + # Convert asset ids to ObjectId obj + assets_to_delete = [ObjectId(id) for id in assets_to_delete if id] + + subset_ids_by_parent = spec_data["subset_ids_by_parent"] + subset_ids_by_name = spec_data["subset_ids_by_name"] + + subset_ids_to_archive = [] + asset_ids_to_archive = [] + ftrack_ids_to_delete = [] + if len(assets_to_delete) > 0: + # Prepare data when deleting whole avalon asset + avalon_assets = self.dbcon.find({"type": "asset"}) + avalon_assets_by_parent = collections.defaultdict(list) + for asset in avalon_assets: + parent_id = asset["data"]["visualParent"] + avalon_assets_by_parent[parent_id].append(asset) + if asset["_id"] in assets_to_delete: + ftrack_id = asset["data"]["ftrackId"] + ftrack_ids_to_delete.append(ftrack_id) + + children_queue = Queue() + for mongo_id in assets_to_delete: + children_queue.put(mongo_id) + + while not children_queue.empty(): + mongo_id = children_queue.get() + if mongo_id in asset_ids_to_archive: + continue + + asset_ids_to_archive.append(mongo_id) + for subset_id in subset_ids_by_parent.get(mongo_id, []): + if subset_id not in subset_ids_to_archive: + subset_ids_to_archive.append(subset_id) + + children = avalon_assets_by_parent.get(mongo_id) + if not children: + continue + + for child in children: + child_id = child["_id"] + if child_id not in asset_ids_to_archive: + children_queue.put(child_id) + + # Prepare names of assets in ftrack and ids of subsets in mongo + asset_names_to_delete = [] + if len(subsets_to_delete) > 0: + for name in subsets_to_delete: + asset_names_to_delete.append(name) + for subset_id in subset_ids_by_name[name]: + if subset_id in subset_ids_to_archive: + continue + subset_ids_to_archive.append(subset_id) + + # Get ftrack ids of entities where will be delete only asset + not_deleted_entities_id = [] + ftrack_id_name_map = {} + if asset_names_to_delete: + for entity in entities: + ftrack_id = entity["id"] + ftrack_id_name_map[ftrack_id] = entity["name"] + if ftrack_id in ftrack_ids_to_delete: + continue + not_deleted_entities_id.append(ftrack_id) + + mongo_proc_txt = "MongoProcessing: " + ftrack_proc_txt = "Ftrack processing: " + if asset_ids_to_archive: + self.log.debug("{}Archivation of assets <{}>".format( + mongo_proc_txt, + ", ".join([str(id) for id in asset_ids_to_archive]) + )) + self.dbcon.update_many( + { + "_id": {"$in": asset_ids_to_archive}, + "type": "asset" + }, + {"$set": {"type": "archived_asset"}} + ) + + if subset_ids_to_archive: + self.log.debug("{}Archivation of subsets <{}>".format( + mongo_proc_txt, + ", ".join([str(id) for id in subset_ids_to_archive]) + )) + self.dbcon.update_many( + { + "_id": {"$in": subset_ids_to_archive}, + "type": "subset" + }, + {"$set": {"type": "archived_subset"}} + ) + + if ftrack_ids_to_delete: + self.log.debug("{}Deleting Ftrack Entities <{}>".format( + ftrack_proc_txt, ", ".join(ftrack_ids_to_delete) + )) + + joined_ids_to_delete = ", ".join( + ["\"{}\"".format(id) for id in ftrack_ids_to_delete] + ) + ftrack_ents_to_delete = self.session.query( + "select id, link from TypedContext where id in ({})".format( + joined_ids_to_delete + ) + ).all() + for entity in ftrack_ents_to_delete: + self.session.delete(entity) + try: + self.session.commit() + except Exception: + ent_path = "/".join( + [ent["name"] for ent in entity["link"]] + ) + msg = "Failed to delete entity" + report_messages[msg].append(ent_path) + self.session.rollback() + self.log.warning( + "{} <{}>".format(msg, ent_path), + exc_info=True + ) + + if not_deleted_entities_id: + joined_not_deleted = ", ".join([ + "\"{}\"".format(ftrack_id) + for ftrack_id in not_deleted_entities_id + ]) + joined_asset_names = ", ".join([ + "\"{}\"".format(name) + for name in asset_names_to_delete + ]) + # Find assets of selected entities with names of checked subsets + assets = self.session.query(( + "select id from Asset where" + " context_id in ({}) and name in ({})" + ).format(joined_not_deleted, joined_asset_names)).all() + + self.log.debug("{}Deleting Ftrack Assets <{}>".format( + ftrack_proc_txt, + ", ".join([asset["id"] for asset in assets]) + )) + for asset in assets: + self.session.delete(asset) + try: + self.session.commit() + except Exception: + self.session.rollback() + msg = "Failed to delete asset" + report_messages[msg].append(asset["id"]) + self.log.warning( + "{} <{}>".format(asset["id"]), + exc_info=True + ) + + return self.report_handle(report_messages, project_name, event) + + def report_handle(self, report_messages, project_name, event): + if not report_messages: + return { + "success": True, + "message": "Deletion was successful!" + } + + title = "Delete report ({}):".format(project_name) + items = [] + items.append({ + "type": "label", + "value": "# Deleting was not completely successful" + }) + items.append({ + "type": "label", + "value": "

Check logs for more information

" + }) + for msg, _items in report_messages.items(): + if not _items or not msg: + continue + + items.append({ + "type": "label", + "value": "# {}".format(msg) }) - if av_entity is not None: - all_ids.append(av_entity['_id']) - all_ids.extend(self.find_child(av_entity)) + if isinstance(_items, str): + _items = [_items] + items.append({ + "type": "label", + "value": '

{}

'.format("
".join(_items)) + }) + items.append(self.splitter) - session.delete(entity) - session.commit() - else: - subset_names = [] - for key, value in self.values.items(): - if key == 'delete_key' or value is False: - continue - - entity_id = ObjectId(key) - av_entity = self.db.find_one({'_id': entity_id}) - subset_names.append(av_entity['name']) - if av_entity is None: - continue - all_ids.append(entity_id) - all_ids.extend(self.find_child(av_entity)) - - for ft_asset in entity['assets']: - if ft_asset['name'] in subset_names: - session.delete(ft_asset) - session.commit() - - if len(all_ids) == 0: - return { - 'success': True, - 'message': 'No entities to delete in avalon' - } - - delete_query = {'_id': {'$in': all_ids}} - self.db.delete_many(delete_query) + self.show_interface(items, title, event) return { - 'success': True, - 'message': 'All assets were deleted!' + "success": False, + "message": "Deleting finished. Read report messages." } - def find_child(self, entity): - output = [] - id = entity['_id'] - visuals = [x for x in self.db.find({'data.visualParent': id})] - assert len(visuals) == 0, 'This asset has another asset as child' - childs = self.db.find({'parent': id}) - for child in childs: - output.append(child['_id']) - output.extend(self.find_child(child)) - return output - - def find_assets(self, asset_names): - assets = [] - for name in asset_names: - entity = self.db.find_one({ - 'type': 'asset', - 'name': name - }) - if entity is not None and entity not in assets: - assets.append(entity) - return assets - def register(session, plugins_presets={}): '''Register plugin. Called when used as an plugin.''' - DeleteAsset(session, plugins_presets).register() - - -def main(arguments=None): - '''Set up logging and register action.''' - if arguments is None: - arguments = [] - - parser = argparse.ArgumentParser() - # Allow setting of logging level from arguments. - loggingLevels = {} - for level in ( - logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, - logging.ERROR, logging.CRITICAL - ): - loggingLevels[logging.getLevelName(level).lower()] = level - - parser.add_argument( - '-v', '--verbosity', - help='Set the logging output verbosity.', - choices=loggingLevels.keys(), - default='info' - ) - namespace = parser.parse_args(arguments) - - # Set up basic logging - logging.basicConfig(level=loggingLevels[namespace.verbosity]) - - session = ftrack_api.Session() - - register(session) - - # Wait for events - logging.info( - 'Registered actions and listening for events. Use Ctrl-C to abort.' - ) - session.event_hub.wait() - - -if __name__ == '__main__': - raise SystemExit(main(sys.argv[1:])) + DeleteAssetSubset(session, plugins_presets).register() From b2e7c60c28cb4917c7aca4f36a35f50b46a5ffdf Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 10 Dec 2019 02:02:57 +0100 Subject: [PATCH 093/195] fix(global): reformat now works on width and height --- pype/plugins/global/publish/extract_review.py | 36 ++++++++++++++++--- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index a3707f4e59..786df95fc1 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -1,5 +1,5 @@ import os - +import math import pyblish.api import clique import pype.api @@ -31,8 +31,8 @@ class ExtractReview(pyblish.api.InstancePlugin): inst_data = instance.data fps = inst_data.get("fps") start_frame = inst_data.get("frameStart") - resolution_height = instance.dataget("resolutionHeight", 1080) - resolution_width = instance.dataget("resolutionWidth", 1920) + resolution_height = instance.data.get("resolutionHeight", 1080) + resolution_width = instance.data.get("resolutionWidth", 1920) pixel_aspect = instance.data.get("pixelAspect", 1) self.log.debug("Families In: `{}`".format(instance.data["families"])) @@ -169,10 +169,36 @@ class ExtractReview(pyblish.api.InstancePlugin): # output filename output_args.append(full_output_path) + self.log.debug("__ pixel_aspect: `{}`".format(pixel_aspect)) + self.log.debug("__ resolution_width: `{}`".format(resolution_width)) + self.log.debug("__ resolution_height: `{}`".format(resolution_height)) # scaling none square pixels and 1920 width if "reformat" in p_tags: - scaling_arg = "scale=1920:'ceil((1920/{})/2)*2':flags=lanczos,setsar=1".format( - (lb/pixel_aspect * (resolution_width / resolution_height))) + width_scale = 1920 + width_half_pad = 0 + res_w = int(float(resolution_width) * pixel_aspect) + height_half_pad = int(( + (res_w - 1920) / ( + res_w * .01) * ( + 1080 * .01)) / 2 + ) + height_scale = 1080 - (height_half_pad * 2) + if height_scale > 1080: + height_half_pad = 0 + height_scale = 1080 + width_half_pad = (1920 - (float(resolution_width) * (1080 / float(resolution_height))) ) / 2 + width_scale = int(1920 - (width_half_pad * 2)) + + self.log.debug("__ width_scale: `{}`".format(width_scale)) + self.log.debug("__ width_half_pad: `{}`".format(width_half_pad)) + self.log.debug("__ height_scale: `{}`".format(height_scale)) + self.log.debug("__ height_half_pad: `{}`".format(height_half_pad)) + + + scaling_arg = "scale={0}x{1}:flags=lanczos,pad=1920:1080:{2}:{3}:black,setsar=1".format( + width_scale, height_scale, width_half_pad, height_half_pad + ) + vf_back = self.add_video_filter_args( output_args, scaling_arg) # add it to output_args From 3faef65e44338e9380a208d8b1eabbf6c2d38afd Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 10 Dec 2019 02:04:05 +0100 Subject: [PATCH 094/195] fix(nuke): little bits --- pype/plugins/global/publish/integrate_new.py | 2 +- pype/plugins/nuke/publish/collect_writes.py | 3 ++- pype/plugins/nuke/publish/extract_thumbnail.py | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 9021a3f997..c723631679 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -414,7 +414,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): } if sequence_repre and repre.get("frameStart"): - representation['context']['frame'] = src_padding_exp % repre.get("frameStart") + representation['context']['frame'] = src_padding_exp % int(repre.get("frameStart")) self.log.debug("__ representation: {}".format(representation)) destination_list.append(dst) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 5484d971bf..c9c516c888 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -76,7 +76,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): } try: - collected_frames = os.listdir(output_dir) + collected_frames = [f for f in os.listdir(output_dir) + if ext in f] if collected_frames: representation['frameStart'] = "%0{}d".format( len(str(last_frame))) % first_frame diff --git a/pype/plugins/nuke/publish/extract_thumbnail.py b/pype/plugins/nuke/publish/extract_thumbnail.py index 3886fda569..450bb39928 100644 --- a/pype/plugins/nuke/publish/extract_thumbnail.py +++ b/pype/plugins/nuke/publish/extract_thumbnail.py @@ -30,7 +30,7 @@ class ExtractThumbnail(pype.api.Extractor): def render_thumbnail(self, instance): node = instance[0] # group node self.log.info("Creating staging dir...") - if "representations" not in instance.data: + if "representations" in instance.data: staging_dir = instance.data[ "representations"][0]["stagingDir"].replace("\\", "/") instance.data["stagingDir"] = staging_dir From 37792d0829c9bf340ee5fa872a1da3809d115443 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Dec 2019 13:22:26 +0100 Subject: [PATCH 095/195] implemented tray_exit method to stop threads and clear signals on tray exit in idle manager service --- pype/services/idle_manager/idle_manager.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pype/services/idle_manager/idle_manager.py b/pype/services/idle_manager/idle_manager.py index 64cafcd193..86caa9ddd2 100644 --- a/pype/services/idle_manager/idle_manager.py +++ b/pype/services/idle_manager/idle_manager.py @@ -29,6 +29,13 @@ class IdleManager(QtCore.QThread): def tray_start(self): self.start() + def tray_exit(self): + self.stop() + try: + self.time_signals = {} + except Exception: + pass + def add_time_signal(self, emit_time, signal): """ If any module want to use IdleManager, need to use add_time_signal :param emit_time: time when signal will be emitted From 5a0eed0d76ab6554beeca6698b87ab6c7576e173 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Dec 2019 13:22:43 +0100 Subject: [PATCH 096/195] import cleanup in idle manager service --- pype/services/idle_manager/idle_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/services/idle_manager/idle_manager.py b/pype/services/idle_manager/idle_manager.py index 86caa9ddd2..0897245049 100644 --- a/pype/services/idle_manager/idle_manager.py +++ b/pype/services/idle_manager/idle_manager.py @@ -1,6 +1,6 @@ import time import collections -from Qt import QtCore, QtGui, QtWidgets +from Qt import QtCore from pynput import mouse, keyboard from pypeapp import Logger From 8159b029d573ffdc22e556caafae6bc96843986e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 10 Dec 2019 12:25:24 +0000 Subject: [PATCH 097/195] (genera) fixing subprocess function to allow pass through Popen.subprocess arguments. Also adding better output for running subprocess --- pype/lib.py | 33 ++++++++++++++++++++++----------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index c8fade7f4a..8772608b38 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -14,24 +14,35 @@ log = logging.getLogger(__name__) # Special naming case for subprocess since its a built-in method. -def _subprocess(args): +def _subprocess(*args, **kwargs): """Convenience method for getting output errors for subprocess.""" # make sure environment contains only strings - env = {k: str(v) for k, v in os.environ.items()} + filtered_env = {k: str(v) for k, v in os.environ.items()} - proc = subprocess.Popen( - args, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - stdin=subprocess.PIPE, - env=env - ) + # set overrides + kwargs['stdout'] = kwargs.get('stdout', subprocess.PIPE) + kwargs['stderr'] = kwargs.get('stderr', subprocess.STDOUT) + kwargs['stdin'] = kwargs.get('stdin', subprocess.PIPE) + kwargs['env'] = kwargs.get('env',filtered_env) - output = proc.communicate()[0] + proc = subprocess.Popen(*args, **kwargs) + + output, error = proc.communicate() + + if output: + output = output.decode("utf-8") + output += "\n" + for line in output.strip().split("\n"): + log.info(line) + + if error: + error = error.decode("utf-8") + error += "\n" + for line in error.strip().split("\n"): + log.error(line) if proc.returncode != 0: - log.error(output) raise ValueError("\"{}\" was not successful: {}".format(args, output)) return output From a4ae644e35aec8fdadd361401c96f321e4fd9eb9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 10 Dec 2019 14:05:05 +0100 Subject: [PATCH 098/195] feat(nuke): Loader plugin for nukenodes --- pype/nuke/lib.py | 67 +++++ pype/plugins/nuke/load/load_backdrop.py | 319 ++++++++++++++++++++++++ 2 files changed, 386 insertions(+) create mode 100644 pype/plugins/nuke/load/load_backdrop.py diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 816a7d5116..202798893a 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1230,3 +1230,70 @@ def get_dependent_nodes(nodes): }) return connections_in, connections_out + + +def find_free_space_to_paste_nodes( + nodes, + group=nuke.root(), + direction="right", + offset=300): + """ + For getting coordinates in DAG (node graph) for placing new nodes + + Arguments: + nodes (list): list of nuke.Node objects + group (nuke.Node) [optional]: object in which context it is + direction (str) [optional]: where we want it to be placed + [left, right, top, bottom] + offset (int) [optional]: what offset it is from rest of nodes + + Returns: + xpos (int): x coordinace in DAG + ypos (int): y coordinace in DAG + """ + if len(nodes) == 0: + return 0, 0 + + group_xpos = list() + group_ypos = list() + + # get local coordinates of all nodes + nodes_xpos = [n.xpos() for n in nodes] + \ + [n.xpos() + n.screenWidth() for n in nodes] + + nodes_ypos = [n.ypos() for n in nodes] + \ + [n.ypos() + n.screenHeight() for n in nodes] + + # get complete screen size of all nodes to be placed in + nodes_screen_width = max(nodes_xpos) - min(nodes_xpos) + nodes_screen_heigth = max(nodes_ypos) - min(nodes_ypos) + + # get screen size (r,l,t,b) of all nodes in `group` + with group: + group_xpos = [n.xpos() for n in nuke.allNodes() if n not in nodes] + \ + [n.xpos() + n.screenWidth() for n in nuke.allNodes() + if n not in nodes] + group_ypos = [n.ypos() for n in nuke.allNodes() if n not in nodes] + \ + [n.ypos() + n.screenHeight() for n in nuke.allNodes() + if n not in nodes] + + # calc output left + if direction in "left": + xpos = min(group_xpos) - abs(nodes_screen_width) - abs(offset) + ypos = min(group_ypos) + return xpos, ypos + # calc output right + if direction in "right": + xpos = max(group_xpos) + abs(offset) + ypos = min(group_ypos) + return xpos, ypos + # calc output top + if direction in "top": + xpos = min(group_xpos) + ypos = min(group_ypos) - abs(nodes_screen_heigth) - abs(offset) + return xpos, ypos + # calc output bottom + if direction in "bottom": + xpos = min(group_xpos) + ypos = max(group_ypos) + abs(offset) + return xpos, ypos diff --git a/pype/plugins/nuke/load/load_backdrop.py b/pype/plugins/nuke/load/load_backdrop.py new file mode 100644 index 0000000000..7f58d4e9ec --- /dev/null +++ b/pype/plugins/nuke/load/load_backdrop.py @@ -0,0 +1,319 @@ +from avalon import api, style, io +import nuke +import nukescripts +from pype.nuke import lib as pnlib +from avalon.nuke import lib as anlib +from avalon.nuke import containerise, update_container +reload(pnlib) + +class LoadBackdropNodes(api.Loader): + """Loading Published Backdrop nodes (workfile, nukenodes)""" + + representations = ["nk"] + families = ["workfile", "nukenodes"] + + label = "Iport Nuke Nodes" + order = 0 + icon = "eye" + color = style.colors.light + node_color = "0x7533c1ff" + + def load(self, context, name, namespace, data): + """ + Loading function to import .nk file into script and wrap + it on backdrop + + Arguments: + context (dict): context of version + name (str): name of the version + namespace (str): asset name + data (dict): compulsory attribute > not used + + Returns: + nuke node: containerised nuke node object + """ + + # get main variables + version = context['version'] + version_data = version.get("data", {}) + vname = version.get("name", None) + first = version_data.get("frameStart", None) + last = version_data.get("frameEnd", None) + namespace = namespace or context['asset']['name'] + colorspace = version_data.get("colorspace", None) + object_name = "{}_{}".format(name, namespace) + + # prepare data for imprinting + # add additional metadata from the version to imprint to Avalon knob + add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", + "source", "author", "fps"] + + data_imprint = {"frameStart": first, + "frameEnd": last, + "version": vname, + "colorspaceInput": colorspace, + "objectName": object_name} + + for k in add_keys: + data_imprint.update({k: version_data[k]}) + + # getting file path + file = self.fname.replace("\\", "/") + + # adding nodes to node graph + # just in case we are in group lets jump out of it + nuke.endGroup() + + # Get mouse position + n = nuke.createNode("NoOp") + xcursor, ycursor = (n.xpos(), n.ypos()) + anlib.reset_selection() + nuke.delete(n) + + bdn_frame = 50 + + with anlib.maintained_selection(): + + # add group from nk + nuke.nodePaste(file) + + # get all pasted nodes + new_nodes = list() + nodes = nuke.selectedNodes() + + # get pointer position in DAG + xpointer, ypointer = pnlib.find_free_space_to_paste_nodes(nodes, direction="right", offset=200+bdn_frame) + + # reset position to all nodes and replace inputs and output + for n in nodes: + anlib.reset_selection() + xpos = (n.xpos() - xcursor) + xpointer + ypos = (n.ypos() - ycursor) + ypointer + n.setXYpos(xpos, ypos) + + # replace Input nodes for dots + if n.Class() in "Input": + dot = nuke.createNode("Dot") + new_name = n.name().replace("INP", "DOT") + dot.setName(new_name) + dot["label"].setValue(new_name) + dot.setXYpos(xpos, ypos) + new_nodes.append(dot) + + # rewire + dep = n.dependent() + for d in dep: + index = next((i for i, dpcy in enumerate( + d.dependencies()) + if n is dpcy), 0) + d.setInput(index, dot) + + # remove Input node + anlib.reset_selection() + nuke.delete(n) + continue + + # replace Input nodes for dots + elif n.Class() in "Output": + dot = nuke.createNode("Dot") + new_name = n.name() + "_DOT" + dot.setName(new_name) + dot["label"].setValue(new_name) + dot.setXYpos(xpos, ypos) + new_nodes.append(dot) + + # rewire + dep = next((d for d in n.dependencies()), None) + if dep: + dot.setInput(0, dep) + + # remove Input node + anlib.reset_selection() + nuke.delete(n) + continue + else: + new_nodes.append(n) + + # reselect nodes with new Dot instead of Inputs and Output + anlib.reset_selection() + anlib.select_nodes(new_nodes) + # place on backdrop + bdn = nukescripts.autoBackdrop() + + # add frame offset + xpos = bdn.xpos() - bdn_frame + ypos = bdn.ypos() - bdn_frame + bdwidth = bdn["bdwidth"].value() + (bdn_frame*2) + bdheight = bdn["bdheight"].value() + (bdn_frame*2) + + bdn["xpos"].setValue(xpos) + bdn["ypos"].setValue(ypos) + bdn["bdwidth"].setValue(bdwidth) + bdn["bdheight"].setValue(bdheight) + + bdn["name"].setValue(object_name) + bdn["label"].setValue("Version tracked frame: \n`{}`\n\nPLEASE DO NOT REMOVE OR MOVE \nANYTHING FROM THIS FRAME!".format(object_name)) + bdn["note_font_size"].setValue(20) + + return containerise( + node=bdn, + name=name, + namespace=namespace, + context=context, + loader=self.__class__.__name__, + data=data_imprint) + + def update(self, container, representation): + """Update the Loader's path + + Nuke automatically tries to reset some variables when changing + the loader's path to a new file. These automatic changes are to its + inputs: + + """ + + # get main variables + # Get version from io + version = io.find_one({ + "type": "version", + "_id": representation["parent"] + }) + # get corresponding node + GN = nuke.toNode(container['objectName']) + + file = api.get_representation_path(representation).replace("\\", "/") + context = representation["context"] + name = container['name'] + version_data = version.get("data", {}) + vname = version.get("name", None) + first = version_data.get("frameStart", None) + last = version_data.get("frameEnd", None) + namespace = container['namespace'] + colorspace = version_data.get("colorspace", None) + object_name = "{}_{}".format(name, namespace) + + add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", + "source", "author", "fps"] + + data_imprint = {"representation": str(representation["_id"]), + "frameStart": first, + "frameEnd": last, + "version": vname, + "colorspaceInput": colorspace, + "objectName": object_name} + + for k in add_keys: + data_imprint.update({k: version_data[k]}) + + # adding nodes to node graph + # just in case we are in group lets jump out of it + nuke.endGroup() + + with anlib.maintained_selection(): + xpos = GN.xpos() + ypos = GN.ypos() + avalon_data = anlib.get_avalon_knob_data(GN) + nuke.delete(GN) + # add group from nk + nuke.nodePaste(file) + + GN = nuke.selectedNode() + anlib.set_avalon_knob_data(GN, avalon_data) + GN.setXYpos(xpos, ypos) + GN["name"].setValue(object_name) + + # get all versions in list + versions = io.find({ + "type": "version", + "parent": version["parent"] + }).distinct('name') + + max_version = max(versions) + + # change color of node + if version.get("name") not in [max_version]: + GN["tile_color"].setValue(int("0xd88467ff", 16)) + else: + GN["tile_color"].setValue(int(self.node_color, 16)) + + self.log.info("udated to version: {}".format(version.get("name"))) + + return update_container(GN, data_imprint) + + def connect_active_viewer(self, group_node): + """ + Finds Active viewer and + place the node under it, also adds + name of group into Input Process of the viewer + + Arguments: + group_node (nuke node): nuke group node object + + """ + group_node_name = group_node["name"].value() + + viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()] + if len(viewer) > 0: + viewer = viewer[0] + else: + self.log.error("Please create Viewer node before you " + "run this action again") + return None + + # get coordinates of Viewer1 + xpos = viewer["xpos"].value() + ypos = viewer["ypos"].value() + + ypos += 150 + + viewer["ypos"].setValue(ypos) + + # set coordinates to group node + group_node["xpos"].setValue(xpos) + group_node["ypos"].setValue(ypos + 50) + + # add group node name to Viewer Input Process + viewer["input_process_node"].setValue(group_node_name) + + # put backdrop under + pnlib.create_backdrop(label="Input Process", layer=2, + nodes=[viewer, group_node], color="0x7c7faaff") + + return True + + def get_item(self, data, trackIndex, subTrackIndex): + return {key: val for key, val in data.items() + if subTrackIndex == val["subTrackIndex"] + if trackIndex == val["trackIndex"]} + + def byteify(self, input): + """ + Converts unicode strings to strings + It goes trought all dictionary + + Arguments: + input (dict/str): input + + Returns: + dict: with fixed values and keys + + """ + + if isinstance(input, dict): + return {self.byteify(key): self.byteify(value) + for key, value in input.iteritems()} + elif isinstance(input, list): + return [self.byteify(element) for element in input] + elif isinstance(input, unicode): + return input.encode('utf-8') + else: + return input + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + from avalon.nuke import viewer_update_and_undo_stop + node = nuke.toNode(container['objectName']) + with viewer_update_and_undo_stop(): + nuke.delete(node) From bc9e7833b0b5403fe3b1fc3778a8a0bbd7c0ffd5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Dec 2019 14:32:28 +0100 Subject: [PATCH 099/195] width of Lighting button is not so complicated to set and ton size is default to 8pt --- .../widgets/widget_component_item.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/pype/standalonepublish/widgets/widget_component_item.py b/pype/standalonepublish/widgets/widget_component_item.py index 78287ccf37..0fd72cc70e 100644 --- a/pype/standalonepublish/widgets/widget_component_item.py +++ b/pype/standalonepublish/widgets/widget_component_item.py @@ -308,14 +308,15 @@ class ComponentItem(QtWidgets.QFrame): class LightingButton(QtWidgets.QPushButton): lightingbtnstyle = """ QPushButton { + font: %(font_size_pt)spt; text-align: center; color: #777777; background-color: transparent; border-width: 1px; border-color: #777777; border-style: solid; - padding-top: 2px; - padding-bottom: 2px; + padding-top: 0px; + padding-bottom: 0px; padding-left: 3px; padding-right: 3px; border-radius: 3px; @@ -351,14 +352,11 @@ class LightingButton(QtWidgets.QPushButton): color: #4BF543; } """ - def __init__(self, text, *args, **kwargs): - super().__init__(text, *args, **kwargs) - self.setStyleSheet(self.lightingbtnstyle) + def __init__(self, text, font_size_pt=8, *args, **kwargs): + super(LightingButton, self).__init__(text, *args, **kwargs) + self.setStyleSheet(self.lightingbtnstyle % { + "font_size_pt": font_size_pt + }) self.setCheckable(True) - preview_font_metrics = self.fontMetrics().boundingRect(text) - width = preview_font_metrics.width() + 16 - height = preview_font_metrics.height() + 5 - self.setMaximumWidth(width) - self.setMaximumHeight(height) From 310c7672441c96207c746c4e3db2db7bae26599d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Dec 2019 19:06:16 +0100 Subject: [PATCH 100/195] added replaced svg images of trash and menu with pngs --- pype/standalonepublish/resources/menu.png | Bin 0 -> 1629 bytes pype/standalonepublish/resources/menu.svg | 12 --------- .../resources/menu_disabled.png | Bin 0 -> 1629 bytes .../resources/menu_hover.png | Bin 0 -> 1626 bytes .../resources/menu_pressed.png | Bin 0 -> 1626 bytes .../resources/menu_pressed_hover.png | Bin 0 -> 1488 bytes pype/standalonepublish/resources/trash.png | Bin 0 -> 1235 bytes pype/standalonepublish/resources/trash.svg | 23 ------------------ .../resources/trash_disabled.png | Bin 0 -> 1235 bytes .../resources/trash_hover.png | Bin 0 -> 1232 bytes .../resources/trash_pressed.png | Bin 0 -> 1232 bytes .../resources/trash_pressed_hover.png | Bin 0 -> 1094 bytes 12 files changed, 35 deletions(-) create mode 100644 pype/standalonepublish/resources/menu.png delete mode 100644 pype/standalonepublish/resources/menu.svg create mode 100644 pype/standalonepublish/resources/menu_disabled.png create mode 100644 pype/standalonepublish/resources/menu_hover.png create mode 100644 pype/standalonepublish/resources/menu_pressed.png create mode 100644 pype/standalonepublish/resources/menu_pressed_hover.png create mode 100644 pype/standalonepublish/resources/trash.png delete mode 100644 pype/standalonepublish/resources/trash.svg create mode 100644 pype/standalonepublish/resources/trash_disabled.png create mode 100644 pype/standalonepublish/resources/trash_hover.png create mode 100644 pype/standalonepublish/resources/trash_pressed.png create mode 100644 pype/standalonepublish/resources/trash_pressed_hover.png diff --git a/pype/standalonepublish/resources/menu.png b/pype/standalonepublish/resources/menu.png new file mode 100644 index 0000000000000000000000000000000000000000..da83b4524468dff7e927baac8927e655e004632b GIT binary patch literal 1629 zcmeAS@N?(olHy`uVBq!ia0vp^4j|0I3?%1nZ+yeRz~mX=6XFWw0zpGVLuhCykW5QU z1Cl^cSXc-oOG-+BWLa4mkSs4RFDNLetgI|9E(Vf7sj8}~($Z2O2~<#BT@4|DvK18- zKv^IOl!cH$DWDdh2|&d_5-1Ll0*V6_z-1v)5E3p0Q2-=?;t)fCdV#V)E|3HQWCajq zKtYHsgoFsfNw6l6E}$%&glNI49-<7*OrRhfzzqS4K-h58;3CLskd1;H0%yQgA{&BB z7ONVFVoaC74FNJBvdAKou>b%6&)f007MPL_mIV0)GcYnSv#_#paB^|;@bd8s2#bh{ ziAzXINz2H}D<~={tEj4LY3t}37#W+GnweYK*xK1UIyt+zdU$$y`}p|>1Obje8tLDtJkbuzhUF1&0Dwa*tL7l{sRXO9XWdJ_{meJ&zw7d;o_ysSFT;ZefQq| z2M-@VefIpt%hzw-zI*@S zB;xSi=|7`+6Ge`nf2-v!IBBg?blE0#EluxfDW_-YTx@b`aXBe;vs_nYiFfACy8+=} zexCnx;IPl#nbyYND{Ja!S)MMxSNv?x`|7?9HmB<;G3)X&UcSi4T%mW;Lr!&>f%SGd z)2jPV&KSM2&AD#fFzYI=<_)-)Vo%{QER>p7X^06Yt9e@NEAT`D^L{$Jra5 zHp;ej*XDj!xLmPRu4!@BNj8hTW!L-+1OHP;OtUVk-yn3;kU!I zjw>sJGPsVf5afKDx-zs$qRQP%>78O2XH%8OgxV`zsaH0-uM0?;yI_iU(BZn*EHgt+ zx^Ih6zu~pmB67Ldt;K0sN~hLs^IaHw=g8{Tr&ZMpeR5|vMDG(`E*5uLdB>`4y~}Sq za|jBX2wv20F)N(#%v(r$$*Jx4e>in%ZuJW|S9#z2=JT^Af7A`bIKw_E-}uga^t*Dd zl}og81WT}u>}%F^lfVZrBfkGllDxAjOK;NYk_CHiab-yT_+G?oJin2%UO${OZ|NZ$ z$EW*W$C^1Uiu}Lzl-|j2BFj(mDc)I{Q<$=-`}nh2Ytk(aPi?v}KW4M^_wDh|k0)(? z!SF;<-GZ_D_o+_{Pb7#ipF7<7FYs<#Qu5hvqANap(5+Uy;!^d!e@*MpAHGRn!Q&278>FVdQ&MBb@0KA_L4*&oF literal 0 HcmV?d00001 diff --git a/pype/standalonepublish/resources/menu.svg b/pype/standalonepublish/resources/menu.svg deleted file mode 100644 index ac1e728011..0000000000 --- a/pype/standalonepublish/resources/menu.svg +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - diff --git a/pype/standalonepublish/resources/menu_disabled.png b/pype/standalonepublish/resources/menu_disabled.png new file mode 100644 index 0000000000000000000000000000000000000000..e4758f0b199c554da8bd0932cea2d13b2d2bcced GIT binary patch literal 1629 zcmeAS@N?(olHy`uVBq!ia0vp^4j|0I3?%1nZ+yeRz~mX=6XFWwLO^I}sDp!pv9YnX zwl$>*?tM+1A$9=H})=E|6_zW(HJbYiny|Wd#&8H8lmYfeL`?fm}ONQ=qto zg@v7&nT?GNkPQ(BDzG&-2a*sGh#DIt5gQ8&ph{#BxMHAbK85AR7YLh)@hS0j3_P2CEjh zULfQD|NkdRQ@5x&}tZCZ=ZQRyMYF_Kr@@F0LM)Ufw=_{sBS3A)#U65z#TRafvCZY3UhRd4(mV zW#yGMb@dI6P0cN>Z5^Fm-95d1{SzimnmlFdjG43M&0nx^(c&dbmn~nha@FcJYu9hs zxM}m&Z98`D-n0L}!9z!m9y@;W)af(l&R@8A>GG9p*KgmwcmKh|$4{RDTZ?yo{GGGBQ`_o%E1XU1ngtUCy-X{*yCCuWWO!TQ|(R zo+`Og)6-LPCHtvYRSfsu$z&aMek`sbfA*GX^SR%0-YeR2iZjy8 zf0m>9gS?LKI_-DbUo-zc&79{vasR~oG66i>e?|V9dcbk^MyHLkZQZrGpA{}wER}0o zoOP1TB5&C>|3g(ztDlI=@fkQj_|FpFqO@Z9mL9|HcaLrT_#$4)>`;+mr-iG)!L?Jm zGw&?1d~yD9+`c|@e}R9!ck9J2oD^(T+WBdon$~2Iwwo69`ia)Di$y<4l?1FklWcz5 z#bwDnr>B<|^}k5icj=SPdKZ^x2Vyq1R2n$D6jkJJc1!r}Fs}QYjqd9LlIAX$;vICj?lsHIkdyA)BGhkqEw+eUE_Q2i zT9(qOb=!Ow#@;!yy7g&Q^+KQA*$vVAgqMrOT~^+)YFqE}+s+(20o?BcQQa`>I@fy!>dbw;qTi-pMwSV`&F}9hp>bc!pMHBst z*V^xXwkx@NQGchY@2}+h%dg%`;<N{%q*;I9GqO- zJiL7T0>UDqV&W2#QqnTA@(PMd$||brTG~3g21dpvre@|=Hnw*5j!w=lt{$FV-adZ* z0YSkbp<&?>(J`@ci7BaR=^0shg(anB<&`yc^$m?p%`L5M9i3g>J-vPX6DCfYJZ0*P znX~52U$Ah|;w4L$Enl&6)#^2C*KgRkY4g@?J9h2fv;V-sLr0DtJAU%i=`-ifU$}Vb z@|A1XZ{NLl|G~q@PoF)1@$&VXx9{G6`1s}9_a8rh{r>a!pRdp7O+e2uCV9KNF#c!K zy$8%IUp-wMLn03Eo&GbLH&NvH`L|l$f|J%NMVD<-*V6Q!mU4QQ&c!CD7MGJkH_LTZ zmUw6Gyc-by<>&c72M+t(ooQ|Sy|Si$mgVX4d&ST8ysz%-U~{^j60Ht%oTbk zJ>*oE8CY+ZGp)M+{ojDv*)dUD`N{1 zd+v!Ji)+ZAy=B^b?zf!xing5Mj5PC~dy0MGVc zk-w%MaGbr-X`^ggcWv%xh07I7<(d{}on*7fTXxO=P}S4wC*pE^2F?%uvxK)OtysRL z$8h`IV_QGIh?g=uRAks`;VN)&?Ue4!J4-BIoPQj*ug~0H;9u|Eda(;91zVMNewwGI zHCd$XrbWGeqIK+I(N9t(0c+1Bo8NYESu)S*>7_;eFB0}$`lPeo#pT(7n2jx!2F@-; z75SUp5`H^O>$tKqD1+&u|A$kT=2pLebCvhKZ$3Y3@<-h;j5F+$ z@{RAzN53oQTDe3kN3aCj$i8MxHwk?3GUEH+B*{Cgvh*gME?KbW7FUMUkMBji#`7CF z>-EDq^Ohd6aeTV}b*!1wqR9VSPwAcfCbIk_pW>aRIfW^Ux{p7bwIS-~DflZDy=`ZueHvME~No_Pd|$O733N-)ZXmEBXHNtM`(4?%Z8xTd{4a xwB^C(OIzLU%QpT`{}I;nzt6A0vgG*x_!Q-*UelL+F$LxZ22WQ%mvv4FO#m7ceop`Z literal 0 HcmV?d00001 diff --git a/pype/standalonepublish/resources/menu_pressed.png b/pype/standalonepublish/resources/menu_pressed.png new file mode 100644 index 0000000000000000000000000000000000000000..a5f931b2c4107986d389c1df0fe910cae0145d0d GIT binary patch literal 1626 zcmeAS@N?(olHy`uVBq!ia0vp^4j|0I3?%1nZ+yeRz~mO-6XFWw{s)6qt5)sbzyHjc zGgq%(1u~8uI|gK5ym%4FJ#ys8i4!M)-1FzppFDXI$T)ZI98d}C-@|vuDo&xj;5h9H1Obje8tLDtJkbuzhUF1&0Dwa*tL7l{sRXO z9XWdJ_{meJ&zw7d;o_ysSFT;ZefQq|2M-@VefIpt%hzw-zI*@SlFzi8#D>`p;&~JgEwON2BZ`8!c3^6kg@7?ixX=%gnJ500LuO3n|-;)3B^5^+~gm!&8 zp1tz0U+Lk`p11z3j4ep)xhH-st|5Q+mTB|3-*Vn7+H#6B(#(IBqxplpj_*3{ciLYw z|31x}=R9%$#QQP1jk0aswYi@aE>|p-Yg(LjlFcG-*){(|RZpv* zh|BRAI6wH$65gV;V)>RH!|ivEZT#V(u_Y*pI%X`Y(aWRbR;7WMjx*0GC4KS`AYtUZ%#e%r-m$vmg0mlpNENZ5Dj zlg@e8XnV|>jIMIE|}sSbhz#{%gm6I?%N{NZ+I=Xh+HmqYjIkZ(y4XZd>6*vIkLL- zX;t+?pWN9E(ffp#i^W}5-mz+1@ABKu9D>3of*18$%nBzw^A?g`a%%hiA5LAGTm1sg zRo?f$`TVTOA9cep&ah9)H@-6;{jQvAB=MH!N3%uKwlzjG^ z=!y>?bgLDwxKw@bU(XC+ z=Sb9bF>#kQr=mIs?JZFRdZ+xS2IM_AAQKEDFXlH>p5Q7%TxdW literal 0 HcmV?d00001 diff --git a/pype/standalonepublish/resources/menu_pressed_hover.png b/pype/standalonepublish/resources/menu_pressed_hover.png new file mode 100644 index 0000000000000000000000000000000000000000..51503add0fbc0c7352c764623632509e14dd669b GIT binary patch literal 1488 zcmV;>1uy!EP)^iQUvsuG)vNbl17y^pOCa!(l3&>NZKyx zu%sidy|<)3k_J23QIbYWswva6Mba`!FB@a_hvJ?Jz(C*`U~k#oYX;^6lYxF!=&c2& zxVyitOm`bF7Z{L*Pn8mkfLXvEkJwRQ8E`!i^683z3BYn^BgSsv0U!!xt5QN&0N;2- z4g-sUOLDM11b7KJ;@STi7?HK@n7~ZuaE#Z1i*hmEp2x+`c8tG(`?E9{VZZGD<2N-k zgO&5T18DVl%?tVZ#YXx7?|Mc)2I_+vNdcf9_{8J)4$wQ4@?O9Pp7P~D&mhVJ!tq?; z@q8~ePxsir=_!3G_~R82fWiKfW4w`yFR;*4Ha8&GOaVOY@qV^iCHHzto)641a{w=R z{O?XGJRI2Pgg*nyr=*=8Ku=(U^Pvs6tWrS(eCY)Lq=0{XC;)?j7U#?QioOTH4DZpo zH3y!V0=T{6R{^L4cDc@P=fp8{0Bf92JAm3Vb)4fm9wdi(eO>^DVyDUgv;8*Xb*huJ zO;UeJ^NcZ1Brwn@8wbd1f^=NcGGolDN(F%Vk{*+^N77JZ%z^R_kGc^$3|yFm4X}~? z@ohqmgDI^bAC9m4Pe7BTv65aj#>`Bpe7vM7Nn`@xePhgqBm%%vNi!t{}y-J z5XmMW!@Mo0ID3@1W5Wlh5Whhcp%6CxOcHR1);nV`?^1p)Z$JC6-1^8#PN>Zk!;n5 zRGJWV2gJVq9LW|WNu>!4pNi<%J3E_@gmqZyXU?=Z@3<>mf0fW~}S{hH+l7aC{-`asY6n=jRy7 z6(SmAz6UnRhQ;rZ^hH9&<0Va3zKM~QNyFl@zDYJLzPVis|0hN`M|L_}FKLl6=8*)- z8ztSDgirviN|+jZQPM+_c1Rj#j5%_;PJVK9NI0&=0=S$GlB4Av{q*?S964qR;6vxr znbYI0vtJ;QcaS4EfV(_@&R#1-`x2Q}x^SHUwl1K}`BGVlj1wH@34Tu3Et2}myOBea zDws@Zjj$RU`0Nq@dSJ_C!Or<_s#ejH-utsKFwe{YEb@5INvmwJr)+s=ejR`s;5Co$ zq7X`Z(!N-F1)aB8dZov6ZLpi=-lVuhKW;Tv02_@F^>jp9dnEwz|DCX9|ElNj2FFW>Z3etpN`CQV7o^o?`?B`0C}%M z|3$7JFx!0-d^D5Y#>p1oaRNlKL*AuYNe{{vB4)XDcB!N##+WqO>})}I+&tY8IQ{nd qfs*QE3$rC@ucUpFb}Q%Y^M3=%H32mG5YMLo0000loC1qBrq6+rRI%F5#6Vju~WEiEmruC4~E0g^yLpaLKO z3Iaud3LqpfhdNP5S4KCa5Y%PA&!BoL1y5QLRLnQ z{r~^}Jl8k+z!;rb666=mz{teR!p6?Q$;Hjf$1fl#BrGB-CLt{&E3c%Ys;;4_rLCi@ zZ)jp+X=P()@8st0;pye$>lY9d91Yt&-Rdys|!-6E|Kmp&7BYi6bcOR3B=9!q_%IacbmgDJh;&c9A1@ptbn$<-{6LV=V`Wv)qa0X?H@S*o&Rl)VuyYa#|I|4)t8w{q-Lb$XMBy|I&dUz z_2XV=gUMOhtIlTT@0ouf;N*p0`~Uv#eampt`R2d5nlhSiPu}{fapC%=Q@IP;o(X(7 zx=*0JF*|ix^b;$7neDA|PFrr(OkVA#kkq}WE!wZIQ{FANF>%L3mI6MzTTK2BT7?~0 z;%+k8CG_vCXD>J@7CEu$J^SgiZ&H8nbE=waTOsz~_5p9SzS2r{=u8x_y zynfd0$NMBLmtEuea#wHFhbyxWEz@-f{-_$myJaea>H&MkH&w6JPg2?0yjNS*Q&fC@ zr7Xj-2MsDO)$UAHdD|8*daR*nf%hblmVamV+HYskVDxGbdSK78L#QBRl1RhxNlW-< z%t)wfy2kYOsQk3t z1IO;j@0DojVt(gPRQE1~!6J0Yv+`2Lhc+jViG29;q?75@JN8SJ@fVWQLX*n>F_uY* s%-*Qnx-6nTamR!AxiwEV-G29jH(IMB%le*t6R2$RboFyt=akR{07G6oeE - - - - - - - - - - diff --git a/pype/standalonepublish/resources/trash_disabled.png b/pype/standalonepublish/resources/trash_disabled.png new file mode 100644 index 0000000000000000000000000000000000000000..06f5ae5276d22663eeceeb4099b87d86f8ecf9df GIT binary patch literal 1235 zcmeAS@N?(olHy`uVBq!ia0vp^4j|0I3?%1nZ+yeRz<4phC&U%Vg@Dk|PzMJGZEbB& zPfsAj($dn{*jP_b&)VAB+}s?<1`68P*Z{@N%*-5&jcsjht*op}O-+F!Km~TDra&%4 z0gwyS1r)ciu&@QQfl@#&L>4Fo1W*xka|i=SLKr}0))p2(1t9f6hJ^)01g-!iYheN5 zf*245mX>hs5D}09h%&GOI2ULdL?zHc2)%Gaz=CiEAgjR$t{B7s`vt^6HUTUS6oHrz z)CE@nVu1C)1wm?H2*C!cfr~@5AhjQiW^f>W3|F44i;a<(^pAUMBE$5frFF$8|JMDYy$_BI8%Fi!9Klr+K z);zI0$w7O&mp_iT@^ZWQ^xWU8&JLe0O5SfSd=T54?7(kuL&5VjTf=I-OV)l9tP^@qD?fx9Y={*@u?tIs|`I zjp5xgl|l7@J>#3ISL-LK>}=kvt?DT%KEG0y;n;%)m6vLFrmDPcix)lCP_)2%l1R(H zGkfi~vuH4SH3&VhXW1cC5Hd-m;rOH_d^2XG)~)>ZVV~cmEwy5w1TDdR(% zlgC6p{CU#J^y(e^rONmV$!VcU<^LGVq(o+KRBl}sQJ=Wu!Ta2rC!21+`@tKn)sba= SPreCMG(?(`x)jJ*w{G3CWy>H8APEF(*REZ@ zd^wN=0w8zInl%t|#flZk3?LUM1q2ZB)vH$nWmm3T2_%64$c3;W3^*4k2xkKs$SNV~ z(M$lUfdjY#AOixxR)Oq-m;*NrC<0Uf=Rz0|WkB{&0piollYuchvn0qbn1PXrnT3s= zgOiJ!mycgSP)JxrR7^rzMpj-)MO9rxQ%hS%SKrXY!qUpd&fdw*-NVz%$JZ|)C^#fE zJUS*eJ|QVNEj>H8pt!8Oy0)RYv#Ymn!oGpI?4{ z@OAC1d17~xgZ6eWe;jY+<#zGuxxZJP9X?%@yx&~-AhtKzf#2YUg6Cz%V!@FfFgX#f$#y3^3)=yH|*}PX<)l*b_ex)qKu?Gz*FV*f$Re9SMFM6z@Xo2@6 zk(PgF_S$b}(O~py5PD$GvO}mKWRgh3@kvYgX3R*fTlww7KEFv@Y^P;RSJAD>{#*5l z+04IE{M8zvWo7e1nRI97tYM5QX1d1o_Ne@{+ylq%$nTYC>0*B8P*nFWguxRW{xNrfe z0LTD}oIZW}?AfzGl|Tki4Ui2K1WG};5CuRIE)FpVr~oJeQ4eGQ6$1f~0bxVLA%bu< zm{JfCAPG?eB!ODcNMv0=DIkE0ps^ubkO~-ptAsNkGH6_wa)il11#m%HGnjs4%>>5i z%#t9#U_=Ke7wDj!Ug5t9B>e`0p&aU3R2@@wznL2&O%vrPN%w4c_ z)#^2C*KgdkdCS&q+js2RyZ`W!qsNY)I&=2o}GwI%AU|{0*ba4!cIQ({cdT_9#M5}*>7FXGk zhz$#poC5`XLyq*V5ZrxCE}Cazf-9?wiCK=P$BEDRe-+FR_i9%Ee9&WTIlt_F`8ng; zY2Ra4Hkid$et!A+!Pm93=84@&4%*wj{BgXMm)pgs=l))GcKCEr@_uvSgV^3=2Y!Pa z3ZAFg8dm%LHMM`>{CEDhIf@~1mnKWG(pV2QiQWS7vtv!1=+ zq*&y{ruXcp&%R0hz0av?u5E?bgWCtZg?kx)Y+f-nFhk(ow;%75v|M(L z=gVEaRUfX*KD12NA^4+e4DXhy45|n08Q)aBT0co;XY*cdRZmgz`IWK^#~w7Oyi~h0 zRpo74yy&rpq6OZQL|Xox*=xU@MT60+LFj=!%MPJ}kVzs9$0se}n=vD`ZsoTR`}`(t zv7MGNT}8Jh`)}1JW;6dv@mFhvmX*y5WzwCQvxYIMnCTkR+oST+at|E4BfnRorHlEU zLs8wk5C)6TCC|!B86Vo5JSOtt&y!B3SMS&_RmNXPP76&c|HoJ+B{F-Xa_h2)`otX% i-sjdl*>wBe58i04jx6hY@=d@r!r{^^}<`IdjNGca2VJUHJ=5(0*(LzTY-Cl3nR@j;HD75blo0nMSUgr z=^jzHE}`439?ogsO5wRO)Qiu7iTcm9lK>VKXx4-u?oQaNC6^Ysf z@JDof}kz8BS-i~4k02)znClWU#^ej;0B^cy3hM~cZkEF8-;wU|0?j*-=KTWALy_i;y5vuJeYF&B zePI&+PhRxv?z%2DtAI5tu(l-4Ga$8r)HDI4HjtVofYb(3(*%&(Kx&!*QX5E36F_PM zsc8a8Z6GyG0I3b6rU@Xmfz;Fyz#=doofj1-%yy|+BrANVl*$7LAqebgo&E}ZP$1n$ zHovCNSv?ZKz7RsXDi!sPZVDvSjcVIX73wAGWgRp_>Qx;yH{`tC-%cOX&5n_O za_1YCRJ8~p9Pf0&G8NpdI;Gz_$TsHAt1sPASF1Bo%9E83vP{(dk*@{y^123kFL$fG zV8Rl5v(otIa-P2E<{DjXoK#22HbKVKqTiD}tmc>OR3C$6n=QwpHi6qi2#0DLRo!j> z@_cO*&xNo{J)L^C5i{!3J(+wI!Y1`~^{8ofmCc*&3VA~vS?)ak14O|_7j^>z%K!iX M07*qoM6N<$fz>% literal 0 HcmV?d00001 From 69c1ae5990620b5e81e36e832cdea85418b89690 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Dec 2019 19:07:42 +0100 Subject: [PATCH 101/195] created png button with png factory --- .../widgets/widget_component_item.py | 171 +++++++++++++++++- 1 file changed, 170 insertions(+), 1 deletion(-) diff --git a/pype/standalonepublish/widgets/widget_component_item.py b/pype/standalonepublish/widgets/widget_component_item.py index 78287ccf37..f519164c6b 100644 --- a/pype/standalonepublish/widgets/widget_component_item.py +++ b/pype/standalonepublish/widgets/widget_component_item.py @@ -1,6 +1,5 @@ import os from . import QtCore, QtGui, QtWidgets -from . import SvgButton from . import get_resource from pypeapp import style @@ -362,3 +361,173 @@ class LightingButton(QtWidgets.QPushButton): height = preview_font_metrics.height() + 5 self.setMaximumWidth(width) self.setMaximumHeight(height) + + +class PngFactory: + png_names = { + "trash": { + "normal": QtGui.QIcon(get_resource("trash.png")), + "hover": QtGui.QIcon(get_resource("trash_hover.png")), + "pressed": QtGui.QIcon(get_resource("trash_pressed.png")), + "pressed_hover": QtGui.QIcon( + get_resource("trash_pressed_hover.png") + ), + "disabled": QtGui.QIcon(get_resource("trash_disabled.png")) + }, + + "menu": { + "normal": QtGui.QIcon(get_resource("menu.png")), + "hover": QtGui.QIcon(get_resource("menu_hover.png")), + "pressed": QtGui.QIcon(get_resource("menu_pressed.png")), + "pressed_hover": QtGui.QIcon( + get_resource("menu_pressed_hover.png") + ), + "disabled": QtGui.QIcon(get_resource("menu_disabled.png")) + } + } + + +class PngButton(QtWidgets.QPushButton): + png_button_style = """ + QPushButton { + border: none; + background-color: transparent; + padding-top: 0px; + padding-bottom: 0px; + padding-left: 0px; + padding-right: 0px; + } + QPushButton:hover {} + QPushButton:pressed {} + QPushButton:disabled {} + QPushButton:checked {} + QPushButton:checked:hover {} + QPushButton:checked:pressed {} + """ + + def __init__( + self, name=None, path=None, hover_path=None, pressed_path=None, + hover_pressed_path=None, disabled_path=None, + size=None, *args, **kwargs + ): + self._hovered = False + self._pressed = False + super(PngButton, self).__init__(*args, **kwargs) + self.setStyleSheet(self.png_button_style) + + png_dict = {} + if name: + png_dict = PngFactory.png_names.get(name) or {} + if not png_dict: + print(( + "WARNING: There is not set icon with name \"{}\"" + "in PngFactory!" + ).format(name)) + + ico_normal = png_dict.get("normal") + ico_hover = png_dict.get("hover") + ico_pressed = png_dict.get("pressed") + ico_hover_pressed = png_dict.get("pressed_hover") + ico_disabled = png_dict.get("disabled") + + if path: + ico_normal = QtGui.QIcon(path) + if hover_path: + ico_hover = QtGui.QIcon(hover_path) + + if pressed_path: + ico_pressed = QtGui.QIcon(hover_path) + + if hover_pressed_path: + ico_hover_pressed = QtGui.QIcon(hover_pressed_path) + + if disabled_path: + ico_disabled = QtGui.QIcon(disabled_path) + + self.setIcon(ico_normal) + if size: + self.setIconSize(size) + self.setMaximumSize(size) + + self.ico_normal = ico_normal + self.ico_hover = ico_hover + self.ico_pressed = ico_pressed + self.ico_hover_pressed = ico_hover_pressed + self.ico_disabled = ico_disabled + + def setDisabled(self, in_bool): + super(PngButton, self).setDisabled(in_bool) + icon = self.ico_normal + if in_bool and self.ico_disabled: + icon = self.ico_disabled + self.setIcon(icon) + + def enterEvent(self, event): + self._hovered = True + if not self.isEnabled(): + return + icon = self.ico_normal + if self.ico_hover: + icon = self.ico_hover + + if self._pressed and self.ico_hover_pressed: + icon = self.ico_hover_pressed + + if self.icon() != icon: + self.setIcon(icon) + + def mouseMoveEvent(self, event): + super(PngButton, self).mouseMoveEvent(event) + if self._pressed: + mouse_pos = event.pos() + hovering = self.rect().contains(mouse_pos) + if hovering and not self._hovered: + self.enterEvent(event) + elif not hovering and self._hovered: + self.leaveEvent(event) + + def leaveEvent(self, event): + self._hovered = False + if not self.isEnabled(): + return + icon = self.ico_normal + if self._pressed and self.ico_pressed: + icon = self.ico_pressed + + if self.icon() != icon: + self.setIcon(icon) + + def mousePressEvent(self, event): + self._pressed = True + if not self.isEnabled(): + return + icon = self.ico_hover + if self.ico_pressed: + icon = self.ico_pressed + + if self.ico_hover_pressed: + mouse_pos = event.pos() + if self.rect().contains(mouse_pos): + icon = self.ico_hover_pressed + + if icon is None: + icon = self.ico_normal + + if self.icon() != icon: + self.setIcon(icon) + + def mouseReleaseEvent(self, event): + if not self.isEnabled(): + return + if self._pressed: + self._pressed = False + mouse_pos = event.pos() + if self.rect().contains(mouse_pos): + self.clicked.emit() + + icon = self.ico_normal + if self._hovered and self.ico_hover: + icon = self.ico_hover + + if self.icon() != icon: + self.setIcon(icon) From 43d5dbbe744ea08645b33466d3a32b0def1d6fe9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Dec 2019 19:08:02 +0100 Subject: [PATCH 102/195] replaced svg buttons with pngs --- .../widgets/widget_component_item.py | 25 +++++++------------ 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/pype/standalonepublish/widgets/widget_component_item.py b/pype/standalonepublish/widgets/widget_component_item.py index f519164c6b..daf8f5d773 100644 --- a/pype/standalonepublish/widgets/widget_component_item.py +++ b/pype/standalonepublish/widgets/widget_component_item.py @@ -5,10 +5,6 @@ from pypeapp import style class ComponentItem(QtWidgets.QFrame): - C_NORMAL = '#777777' - C_HOVER = '#ffffff' - C_ACTIVE = '#4BB543' - C_ACTIVE_HOVER = '#4BF543' signal_remove = QtCore.Signal(object) signal_thumbnail = QtCore.Signal(object) @@ -57,10 +53,8 @@ class ComponentItem(QtWidgets.QFrame): self.icon.setText("") self.icon.setScaledContents(True) - self.btn_action_menu = SvgButton( - get_resource('menu.svg'), 22, 22, - [self.C_NORMAL, self.C_HOVER], - frame_image_info, False + self.btn_action_menu = PngButton( + name="menu", size=QtCore.QSize(22, 22) ) self.action_menu = QtWidgets.QMenu() @@ -87,7 +81,9 @@ class ComponentItem(QtWidgets.QFrame): self.file_info.setStyleSheet('padding-left:3px;') - expanding_sizePolicy.setHeightForWidth(self.name.sizePolicy().hasHeightForWidth()) + expanding_sizePolicy.setHeightForWidth( + self.name.sizePolicy().hasHeightForWidth() + ) frame_name_repre = QtWidgets.QFrame(frame) @@ -103,7 +99,8 @@ class ComponentItem(QtWidgets.QFrame): layout.addWidget(self.ext, alignment=QtCore.Qt.AlignRight) frame_name_repre.setSizePolicy( - QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.MinimumExpanding + QtWidgets.QSizePolicy.MinimumExpanding, + QtWidgets.QSizePolicy.MinimumExpanding ) # Repre + icons @@ -155,12 +152,7 @@ class ComponentItem(QtWidgets.QFrame): layout_main.addWidget(frame_middle) - self.remove = SvgButton( - get_resource('trash.svg'), 22, 22, - [self.C_NORMAL, self.C_HOVER], - frame, False - ) - + self.remove = PngButton(name="trash", size=QtCore.QSize(22, 22)) layout_main.addWidget(self.remove) layout = QtWidgets.QVBoxLayout(self) @@ -350,6 +342,7 @@ class LightingButton(QtWidgets.QPushButton): color: #4BF543; } """ + def __init__(self, text, *args, **kwargs): super().__init__(text, *args, **kwargs) self.setStyleSheet(self.lightingbtnstyle) From f9035a20d6ab16bfababc0e80b79e64d59cd41e0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 10 Dec 2019 20:51:24 +0100 Subject: [PATCH 103/195] fix(nuke): selection of nodes for write render creator --- pype/plugins/nuke/create/create_write.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index 8a1f958f9e..042826d4d9 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -34,6 +34,7 @@ class CreateWriteRender(plugin.PypeCreator): data.update({k: v}) self.data = data + self.nodes = nuke.selectedNodes() self.log.info("self.data: '{}'".format(self.data)) def process(self): @@ -46,9 +47,9 @@ class CreateWriteRender(plugin.PypeCreator): # use selection if (self.options or {}).get("useSelection"): - nodes = nuke.selectedNodes() + nodes = self.nodes - assert len(nodes) == 1, self.log.error("Select only one node. The node you want to connect to, or tick off `Use selection`") + assert len(nodes) < 2, self.log.error("Select only one node. The node you want to connect to, or tick off `Use selection`") selected_node = nodes[0] inputs = [selected_node] From 8a14e5d544ade37068cda522628eaff068b411a5 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 11 Dec 2019 00:05:52 +0100 Subject: [PATCH 104/195] make sure we process texture when force copy and maketx is enabled --- pype/plugins/maya/publish/collect_look.py | 4 ---- pype/plugins/maya/publish/extract_look.py | 17 +++++++++++++---- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/pype/plugins/maya/publish/collect_look.py b/pype/plugins/maya/publish/collect_look.py index 17f8180fdf..7a5fea776c 100644 --- a/pype/plugins/maya/publish/collect_look.py +++ b/pype/plugins/maya/publish/collect_look.py @@ -219,10 +219,6 @@ class CollectLook(pyblish.api.InstancePlugin): with lib.renderlayer(instance.data["renderlayer"]): self.collect(instance) - # make ftrack publishable - self.maketx = instance.data.get('maketx', True) - instance.data['maketx'] = self.maketx - self.log.info('maketx: {}'.format(self.maketx)) def collect(self, instance): diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py index ad43e02d21..5226f80f7a 100644 --- a/pype/plugins/maya/publish/extract_look.py +++ b/pype/plugins/maya/publish/extract_look.py @@ -74,6 +74,8 @@ def maketx(source, destination, *args): cmd.extend(args) cmd.extend(["-o", destination, source]) + cmd = " ".join(cmd) + CREATE_NO_WINDOW = 0x08000000 kwargs = dict(args=cmd, stderr=subprocess.STDOUT) @@ -183,6 +185,7 @@ class ExtractLook(pype.api.Extractor): transfers = list() hardlinks = list() hashes = dict() + forceCopy = instance.data.get("forceCopy", False) self.log.info(files) for filepath in files_metadata: @@ -195,20 +198,26 @@ class ExtractLook(pype.api.Extractor): files_metadata[filepath]["color_space"] = "raw" source, mode, hash = self._process_texture( - filepath, do_maketx, staging=dir_path, linearise=linearise + filepath, + do_maketx, + staging=dir_path, + linearise=linearise, + force=forceCopy ) destination = self.resource_destination(instance, source, do_maketx) # Force copy is specified. - if instance.data.get("forceCopy", False): + if forceCopy: mode = COPY if mode == COPY: transfers.append((source, destination)) + self.log.info('copying') elif mode == HARDLINK: hardlinks.append((source, destination)) + self.log.info('hardlinking') # Store the hashes from hash to destination to include in the # database @@ -337,7 +346,7 @@ class ExtractLook(pype.api.Extractor): instance.data["assumedDestination"], "resources", basename + ext ) - def _process_texture(self, filepath, do_maketx, staging, linearise): + def _process_texture(self, filepath, do_maketx, staging, linearise, force): """Process a single texture file on disk for publishing. This will: 1. Check whether it's already published, if so it will do hardlink @@ -359,7 +368,7 @@ class ExtractLook(pype.api.Extractor): # If source has been published before with the same settings, # then don't reprocess but hardlink from the original existing = find_paths_by_hash(texture_hash) - if existing: + if existing and not force: self.log.info("Found hash in database, preparing hardlink..") source = next((p for p in existing if os.path.exists(p)), None) if filepath: From 7b09e1c41e846c0ba1a0686653cdf5c3101c0c17 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 12:24:06 +0100 Subject: [PATCH 105/195] formatting --- pype/ftrack/ftrack_server/lib.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 748937c7bd..fd4c1fe7b9 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -49,7 +49,9 @@ def ftrack_events_mongo_settings(): def get_ftrack_event_mongo_info(): - host, port, database, username, password, collection, auth_db = ftrack_events_mongo_settings() + host, port, database, username, password, collection, auth_db = ( + ftrack_events_mongo_settings() + ) user_pass = "" if username and password: user_pass = "{}:{}@".format(username, password) From 4497a89f53198adfeb60d6ea663fd9e266f0ef5e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 12:24:36 +0100 Subject: [PATCH 106/195] Storer EventHub moved to lib --- pype/ftrack/ftrack_server/lib.py | 29 +++++++++++++++++++++ pype/ftrack/ftrack_server/session_storer.py | 27 ------------------- 2 files changed, 29 insertions(+), 27 deletions(-) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index fd4c1fe7b9..80c147d400 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -99,3 +99,32 @@ def check_ftrack_url(url, log_errors=True): print('DEBUG: Ftrack server {} is accessible.'.format(url)) return url + + +class StorerEventHub(ftrack_api.event.hub.EventHub): + def __init__(self, *args, **kwargs): + self.sock = kwargs.pop("sock") + super(StorerEventHub, self).__init__(*args, **kwargs) + + def _handle_packet(self, code, packet_identifier, path, data): + """Override `_handle_packet` which extend heartbeat""" + code_name = self._code_name_mapping[code] + if code_name == "heartbeat": + # Reply with heartbeat. + self.sock.sendall(b"storer") + return self._send_packet(self._code_name_mapping['heartbeat']) + + elif code_name == "connect": + event = ftrack_api.event.base.Event( + topic="pype.storer.started", + data={}, + source={ + "id": self.id, + "user": {"username": self._api_user} + } + ) + self._event_queue.put(event) + + return super(StorerEventHub, self)._handle_packet( + code, packet_identifier, path, data + ) diff --git a/pype/ftrack/ftrack_server/session_storer.py b/pype/ftrack/ftrack_server/session_storer.py index 0b44d7d3a1..29abf329f0 100644 --- a/pype/ftrack/ftrack_server/session_storer.py +++ b/pype/ftrack/ftrack_server/session_storer.py @@ -14,33 +14,6 @@ import ftrack_api.event from ftrack_api.logging import LazyLogMessage as L -class StorerEventHub(ftrack_api.event.hub.EventHub): - def __init__(self, *args, **kwargs): - self.sock = kwargs.pop("sock") - super(StorerEventHub, self).__init__(*args, **kwargs) - - def _handle_packet(self, code, packet_identifier, path, data): - """Override `_handle_packet` which extend heartbeat""" - code_name = self._code_name_mapping[code] - if code_name == "heartbeat": - # Reply with heartbeat. - self.sock.sendall(b"storer") - return self._send_packet(self._code_name_mapping['heartbeat']) - - elif code_name == "connect": - event = ftrack_api.event.base.Event( - topic="pype.storer.started", - data={}, - source={ - "id": self.id, - "user": {"username": self._api_user} - } - ) - self._event_queue.put(event) - - return super(StorerEventHub, self)._handle_packet( - code, packet_identifier, path, data - ) class StorerSession(ftrack_api.session.Session): From d714f5fb780c7c7db13e4610be385590e373778a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 12:25:25 +0100 Subject: [PATCH 107/195] Processor Eventub moved to lib --- pype/ftrack/ftrack_server/lib.py | 115 ++++++++++++++++++ .../ftrack/ftrack_server/session_processor.py | 110 ----------------- 2 files changed, 115 insertions(+), 110 deletions(-) diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 80c147d400..091df72a98 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -128,3 +128,118 @@ class StorerEventHub(ftrack_api.event.hub.EventHub): return super(StorerEventHub, self)._handle_packet( code, packet_identifier, path, data ) + + +class ProcessEventHub(ftrack_api.event.hub.EventHub): + url, database, table_name = get_ftrack_event_mongo_info() + + is_table_created = False + pypelog = Logger().get_logger("Session Processor") + + def __init__(self, *args, **kwargs): + self.dbcon = DbConnector( + mongo_url=self.url, + database_name=self.database, + table_name=self.table_name + ) + self.sock = kwargs.pop("sock") + super(ProcessEventHub, self).__init__(*args, **kwargs) + + def prepare_dbcon(self): + try: + self.dbcon.install() + self.dbcon._database.list_collection_names() + except pymongo.errors.AutoReconnect: + self.pypelog.error( + "Mongo server \"{}\" is not responding, exiting.".format( + os.environ["AVALON_MONGO"] + ) + ) + sys.exit(0) + + except pymongo.errors.OperationFailure: + self.pypelog.error(( + "Error with Mongo access, probably permissions." + "Check if exist database with name \"{}\"" + " and collection \"{}\" inside." + ).format(self.database, self.table_name)) + self.sock.sendall(b"MongoError") + sys.exit(0) + + def wait(self, duration=None): + """Overriden wait + + Event are loaded from Mongo DB when queue is empty. Handled event is + set as processed in Mongo DB. + """ + started = time.time() + self.prepare_dbcon() + while True: + try: + event = self._event_queue.get(timeout=0.1) + except queue.Empty: + if not self.load_events(): + time.sleep(0.5) + else: + try: + self._handle(event) + self.dbcon.update_one( + {"id": event["id"]}, + {"$set": {"pype_data.is_processed": True}} + ) + except pymongo.errors.AutoReconnect: + self.pypelog.error(( + "Mongo server \"{}\" is not responding, exiting." + ).format(os.environ["AVALON_MONGO"])) + sys.exit(0) + # Additional special processing of events. + if event['topic'] == 'ftrack.meta.disconnected': + break + + if duration is not None: + if (time.time() - started) > duration: + break + + def load_events(self): + """Load not processed events sorted by stored date""" + ago_date = datetime.datetime.now() - datetime.timedelta(days=3) + result = self.dbcon.delete_many({ + "pype_data.stored": {"$lte": ago_date}, + "pype_data.is_processed": True + }) + + not_processed_events = self.dbcon.find( + {"pype_data.is_processed": False} + ).sort( + [("pype_data.stored", pymongo.ASCENDING)] + ) + + found = False + for event_data in not_processed_events: + new_event_data = { + k: v for k, v in event_data.items() + if k not in ["_id", "pype_data"] + } + try: + event = ftrack_api.event.base.Event(**new_event_data) + except Exception: + self.logger.exception(L( + 'Failed to convert payload into event: {0}', + event_data + )) + continue + found = True + self._event_queue.put(event) + + return found + + def _handle_packet(self, code, packet_identifier, path, data): + """Override `_handle_packet` which skip events and extend heartbeat""" + code_name = self._code_name_mapping[code] + if code_name == "event": + return + if code_name == "heartbeat": + self.sock.sendall(b"processor") + return self._send_packet(self._code_name_mapping["heartbeat"]) + + return super()._handle_packet(code, packet_identifier, path, data) diff --git a/pype/ftrack/ftrack_server/session_processor.py b/pype/ftrack/ftrack_server/session_processor.py index 133719bab4..a17f919969 100644 --- a/pype/ftrack/ftrack_server/session_processor.py +++ b/pype/ftrack/ftrack_server/session_processor.py @@ -24,116 +24,6 @@ from pypeapp import Logger log = Logger().get_logger("Session processor") -class ProcessEventHub(ftrack_api.event.hub.EventHub): - url, database, table_name = get_ftrack_event_mongo_info() - - is_table_created = False - - def __init__(self, *args, **kwargs): - self.dbcon = DbConnector( - mongo_url=self.url, - database_name=self.database, - table_name=self.table_name - ) - self.sock = kwargs.pop("sock") - super(ProcessEventHub, self).__init__(*args, **kwargs) - - def prepare_dbcon(self): - try: - self.dbcon.install() - self.dbcon._database.list_collection_names() - except pymongo.errors.AutoReconnect: - log.error("Mongo server \"{}\" is not responding, exiting.".format( - os.environ["AVALON_MONGO"] - )) - sys.exit(0) - - except pymongo.errors.OperationFailure: - log.error(( - "Error with Mongo access, probably permissions." - "Check if exist database with name \"{}\"" - " and collection \"{}\" inside." - ).format(self.database, self.table_name)) - self.sock.sendall(b"MongoError") - sys.exit(0) - - def wait(self, duration=None): - """Overriden wait - - Event are loaded from Mongo DB when queue is empty. Handled event is - set as processed in Mongo DB. - """ - started = time.time() - self.prepare_dbcon() - while True: - try: - event = self._event_queue.get(timeout=0.1) - except queue.Empty: - if not self.load_events(): - time.sleep(0.5) - else: - try: - self._handle(event) - self.dbcon.update_one( - {"id": event["id"]}, - {"$set": {"pype_data.is_processed": True}} - ) - except pymongo.errors.AutoReconnect: - log.error(( - "Mongo server \"{}\" is not responding, exiting." - ).format(os.environ["AVALON_MONGO"])) - sys.exit(0) - # Additional special processing of events. - if event['topic'] == 'ftrack.meta.disconnected': - break - - if duration is not None: - if (time.time() - started) > duration: - break - - def load_events(self): - """Load not processed events sorted by stored date""" - ago_date = datetime.datetime.now() - datetime.timedelta(days=3) - result = self.dbcon.delete_many({ - "pype_data.stored": {"$lte": ago_date}, - "pype_data.is_processed": True - }) - - not_processed_events = self.dbcon.find( - {"pype_data.is_processed": False} - ).sort( - [("pype_data.stored", pymongo.ASCENDING)] - ) - - found = False - for event_data in not_processed_events: - new_event_data = { - k: v for k, v in event_data.items() - if k not in ["_id", "pype_data"] - } - try: - event = ftrack_api.event.base.Event(**new_event_data) - except Exception: - self.logger.exception(L( - 'Failed to convert payload into event: {0}', - event_data - )) - continue - found = True - self._event_queue.put(event) - - return found - - def _handle_packet(self, code, packet_identifier, path, data): - """Override `_handle_packet` which skip events and extend heartbeat""" - code_name = self._code_name_mapping[code] - if code_name == "event": - return - if code_name == "heartbeat": - self.sock.sendall(b"processor") - return self._send_packet(self._code_name_mapping["heartbeat"]) - - return super()._handle_packet(code, packet_identifier, path, data) class ProcessSession(ftrack_api.session.Session): From edacecf04152ee36e2e6047747fbc69ba4dfc75f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 12:27:59 +0100 Subject: [PATCH 108/195] sessions from processor and storer moved to lib and added possiblity to set EventHub class in init --- pype/ftrack/ftrack_server/lib.py | 178 +++++++++++++ .../ftrack/ftrack_server/session_processor.py | 182 ------------- pype/ftrack/ftrack_server/session_storer.py | 242 ------------------ 3 files changed, 178 insertions(+), 424 deletions(-) delete mode 100644 pype/ftrack/ftrack_server/session_processor.py delete mode 100644 pype/ftrack/ftrack_server/session_storer.py diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py index 091df72a98..edd3cee09b 100644 --- a/pype/ftrack/ftrack_server/lib.py +++ b/pype/ftrack/ftrack_server/lib.py @@ -1,10 +1,32 @@ import os +import sys +import logging +import getpass +import atexit +import tempfile +import threading +import datetime +import time +import queue +import pymongo + import requests +import ftrack_api +import ftrack_api.session +import ftrack_api.cache +import ftrack_api.operation +import ftrack_api._centralized_storage_scenario +import ftrack_api.event +from ftrack_api.logging import LazyLogMessage as L try: from urllib.parse import urlparse, parse_qs except ImportError: from urlparse import urlparse, parse_qs +from pypeapp import Logger + +from pype.ftrack.lib.custom_db_connector import DbConnector + def ftrack_events_mongo_settings(): host = None @@ -243,3 +265,159 @@ class ProcessEventHub(ftrack_api.event.hub.EventHub): return self._send_packet(self._code_name_mapping["heartbeat"]) return super()._handle_packet(code, packet_identifier, path, data) +class SocketSession(ftrack_api.session.Session): + '''An isolated session for interaction with an ftrack server.''' + def __init__( + self, server_url=None, api_key=None, api_user=None, auto_populate=True, + plugin_paths=None, cache=None, cache_key_maker=None, + auto_connect_event_hub=None, schema_cache_path=None, + plugin_arguments=None, sock=None, Eventhub=None + ): + super(ftrack_api.session.Session, self).__init__() + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + self._closed = False + + if server_url is None: + server_url = os.environ.get('FTRACK_SERVER') + + if not server_url: + raise TypeError( + 'Required "server_url" not specified. Pass as argument or set ' + 'in environment variable FTRACK_SERVER.' + ) + + self._server_url = server_url + + if api_key is None: + api_key = os.environ.get( + 'FTRACK_API_KEY', + # Backwards compatibility + os.environ.get('FTRACK_APIKEY') + ) + + if not api_key: + raise TypeError( + 'Required "api_key" not specified. Pass as argument or set in ' + 'environment variable FTRACK_API_KEY.' + ) + + self._api_key = api_key + + if api_user is None: + api_user = os.environ.get('FTRACK_API_USER') + if not api_user: + try: + api_user = getpass.getuser() + except Exception: + pass + + if not api_user: + raise TypeError( + 'Required "api_user" not specified. Pass as argument, set in ' + 'environment variable FTRACK_API_USER or one of the standard ' + 'environment variables used by Python\'s getpass module.' + ) + + self._api_user = api_user + + # Currently pending operations. + self.recorded_operations = ftrack_api.operation.Operations() + self.record_operations = True + + self.cache_key_maker = cache_key_maker + if self.cache_key_maker is None: + self.cache_key_maker = ftrack_api.cache.StringKeyMaker() + + # Enforce always having a memory cache at top level so that the same + # in-memory instance is returned from session. + self.cache = ftrack_api.cache.LayeredCache([ + ftrack_api.cache.MemoryCache() + ]) + + if cache is not None: + if callable(cache): + cache = cache(self) + + if cache is not None: + self.cache.caches.append(cache) + + self._managed_request = None + self._request = requests.Session() + self._request.auth = ftrack_api.session.SessionAuthentication( + self._api_key, self._api_user + ) + + self.auto_populate = auto_populate + + # Fetch server information and in doing so also check credentials. + self._server_information = self._fetch_server_information() + + # Now check compatibility of server based on retrieved information. + self.check_server_compatibility() + + # Construct event hub and load plugins. + if Eventhub is None: + Eventhub = ftrack_api.event.hub.EventHub + self._event_hub = Eventhub( + self._server_url, + self._api_user, + self._api_key, + sock=sock + ) + + self._auto_connect_event_hub_thread = None + if auto_connect_event_hub in (None, True): + # Connect to event hub in background thread so as not to block main + # session usage waiting for event hub connection. + self._auto_connect_event_hub_thread = threading.Thread( + target=self._event_hub.connect + ) + self._auto_connect_event_hub_thread.daemon = True + self._auto_connect_event_hub_thread.start() + + # To help with migration from auto_connect_event_hub default changing + # from True to False. + self._event_hub._deprecation_warning_auto_connect = ( + auto_connect_event_hub is None + ) + + # Register to auto-close session on exit. + atexit.register(self.close) + + self._plugin_paths = plugin_paths + if self._plugin_paths is None: + self._plugin_paths = os.environ.get( + 'FTRACK_EVENT_PLUGIN_PATH', '' + ).split(os.pathsep) + + self._discover_plugins(plugin_arguments=plugin_arguments) + + # TODO: Make schemas read-only and non-mutable (or at least without + # rebuilding types)? + if schema_cache_path is not False: + if schema_cache_path is None: + schema_cache_path = os.environ.get( + 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() + ) + + schema_cache_path = os.path.join( + schema_cache_path, 'ftrack_api_schema_cache.json' + ) + + self.schemas = self._load_schemas(schema_cache_path) + self.types = self._build_entity_type_classes(self.schemas) + + ftrack_api._centralized_storage_scenario.register(self) + + self._configure_locations() + self.event_hub.publish( + ftrack_api.event.base.Event( + topic='ftrack.api.session.ready', + data=dict( + session=self + ) + ), + synchronous=True + ) diff --git a/pype/ftrack/ftrack_server/session_processor.py b/pype/ftrack/ftrack_server/session_processor.py deleted file mode 100644 index a17f919969..0000000000 --- a/pype/ftrack/ftrack_server/session_processor.py +++ /dev/null @@ -1,182 +0,0 @@ -import logging -import os -import atexit -import datetime -import tempfile -import threading -import time -import requests -import queue -import pymongo - -import ftrack_api -import ftrack_api.session -import ftrack_api.cache -import ftrack_api.operation -import ftrack_api._centralized_storage_scenario -import ftrack_api.event -from ftrack_api.logging import LazyLogMessage as L - -from pype.ftrack.lib.custom_db_connector import DbConnector -from pype.ftrack.ftrack_server.lib import get_ftrack_event_mongo_info -from pypeapp import Logger - -log = Logger().get_logger("Session processor") - - - - -class ProcessSession(ftrack_api.session.Session): - '''An isolated session for interaction with an ftrack server.''' - def __init__( - self, server_url=None, api_key=None, api_user=None, auto_populate=True, - plugin_paths=None, cache=None, cache_key_maker=None, - auto_connect_event_hub=None, schema_cache_path=None, - plugin_arguments=None, sock=None - ): - super(ftrack_api.session.Session, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self._closed = False - - if server_url is None: - server_url = os.environ.get('FTRACK_SERVER') - - if not server_url: - raise TypeError( - 'Required "server_url" not specified. Pass as argument or set ' - 'in environment variable FTRACK_SERVER.' - ) - - self._server_url = server_url - - if api_key is None: - api_key = os.environ.get( - 'FTRACK_API_KEY', - # Backwards compatibility - os.environ.get('FTRACK_APIKEY') - ) - - if not api_key: - raise TypeError( - 'Required "api_key" not specified. Pass as argument or set in ' - 'environment variable FTRACK_API_KEY.' - ) - - self._api_key = api_key - - if api_user is None: - api_user = os.environ.get('FTRACK_API_USER') - if not api_user: - try: - api_user = getpass.getuser() - except Exception: - pass - - if not api_user: - raise TypeError( - 'Required "api_user" not specified. Pass as argument, set in ' - 'environment variable FTRACK_API_USER or one of the standard ' - 'environment variables used by Python\'s getpass module.' - ) - - self._api_user = api_user - - # Currently pending operations. - self.recorded_operations = ftrack_api.operation.Operations() - self.record_operations = True - - self.cache_key_maker = cache_key_maker - if self.cache_key_maker is None: - self.cache_key_maker = ftrack_api.cache.StringKeyMaker() - - # Enforce always having a memory cache at top level so that the same - # in-memory instance is returned from session. - self.cache = ftrack_api.cache.LayeredCache([ - ftrack_api.cache.MemoryCache() - ]) - - if cache is not None: - if callable(cache): - cache = cache(self) - - if cache is not None: - self.cache.caches.append(cache) - - self._managed_request = None - self._request = requests.Session() - self._request.auth = ftrack_api.session.SessionAuthentication( - self._api_key, self._api_user - ) - - self.auto_populate = auto_populate - - # Fetch server information and in doing so also check credentials. - self._server_information = self._fetch_server_information() - - # Now check compatibility of server based on retrieved information. - self.check_server_compatibility() - - # Construct event hub and load plugins. - self._event_hub = ProcessEventHub( - self._server_url, - self._api_user, - self._api_key, - sock=sock - ) - - self._auto_connect_event_hub_thread = None - if auto_connect_event_hub in (None, True): - # Connect to event hub in background thread so as not to block main - # session usage waiting for event hub connection. - self._auto_connect_event_hub_thread = threading.Thread( - target=self._event_hub.connect - ) - self._auto_connect_event_hub_thread.daemon = True - self._auto_connect_event_hub_thread.start() - - # To help with migration from auto_connect_event_hub default changing - # from True to False. - self._event_hub._deprecation_warning_auto_connect = ( - auto_connect_event_hub is None - ) - - # Register to auto-close session on exit. - atexit.register(self.close) - - self._plugin_paths = plugin_paths - if self._plugin_paths is None: - self._plugin_paths = os.environ.get( - 'FTRACK_EVENT_PLUGIN_PATH', '' - ).split(os.pathsep) - - self._discover_plugins(plugin_arguments=plugin_arguments) - - # TODO: Make schemas read-only and non-mutable (or at least without - # rebuilding types)? - if schema_cache_path is not False: - if schema_cache_path is None: - schema_cache_path = os.environ.get( - 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() - ) - - schema_cache_path = os.path.join( - schema_cache_path, 'ftrack_api_schema_cache.json' - ) - - self.schemas = self._load_schemas(schema_cache_path) - self.types = self._build_entity_type_classes(self.schemas) - - ftrack_api._centralized_storage_scenario.register(self) - - self._configure_locations() - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.ready', - data=dict( - session=self - ) - ), - synchronous=True - ) diff --git a/pype/ftrack/ftrack_server/session_storer.py b/pype/ftrack/ftrack_server/session_storer.py deleted file mode 100644 index 29abf329f0..0000000000 --- a/pype/ftrack/ftrack_server/session_storer.py +++ /dev/null @@ -1,242 +0,0 @@ -import logging -import os -import atexit -import tempfile -import threading -import requests - -import ftrack_api -import ftrack_api.session -import ftrack_api.cache -import ftrack_api.operation -import ftrack_api._centralized_storage_scenario -import ftrack_api.event -from ftrack_api.logging import LazyLogMessage as L - - - - -class StorerSession(ftrack_api.session.Session): - '''An isolated session for interaction with an ftrack server.''' - def __init__( - self, server_url=None, api_key=None, api_user=None, auto_populate=True, - plugin_paths=None, cache=None, cache_key_maker=None, - auto_connect_event_hub=None, schema_cache_path=None, - plugin_arguments=None, sock=None - ): - '''Initialise session. - - *server_url* should be the URL of the ftrack server to connect to - including any port number. If not specified attempt to look up from - :envvar:`FTRACK_SERVER`. - - *api_key* should be the API key to use for authentication whilst - *api_user* should be the username of the user in ftrack to record - operations against. If not specified, *api_key* should be retrieved - from :envvar:`FTRACK_API_KEY` and *api_user* from - :envvar:`FTRACK_API_USER`. - - If *auto_populate* is True (the default), then accessing entity - attributes will cause them to be automatically fetched from the server - if they are not already. This flag can be changed on the session - directly at any time. - - *plugin_paths* should be a list of paths to search for plugins. If not - specified, default to looking up :envvar:`FTRACK_EVENT_PLUGIN_PATH`. - - *cache* should be an instance of a cache that fulfils the - :class:`ftrack_api.cache.Cache` interface and will be used as the cache - for the session. It can also be a callable that will be called with the - session instance as sole argument. The callable should return ``None`` - if a suitable cache could not be configured, but session instantiation - can continue safely. - - .. note:: - - The session will add the specified cache to a pre-configured layered - cache that specifies the top level cache as a - :class:`ftrack_api.cache.MemoryCache`. Therefore, it is unnecessary - to construct a separate memory cache for typical behaviour. Working - around this behaviour or removing the memory cache can lead to - unexpected behaviour. - - *cache_key_maker* should be an instance of a key maker that fulfils the - :class:`ftrack_api.cache.KeyMaker` interface and will be used to - generate keys for objects being stored in the *cache*. If not specified, - a :class:`~ftrack_api.cache.StringKeyMaker` will be used. - - If *auto_connect_event_hub* is True then embedded event hub will be - automatically connected to the event server and allow for publishing and - subscribing to **non-local** events. If False, then only publishing and - subscribing to **local** events will be possible until the hub is - manually connected using :meth:`EventHub.connect - `. - - .. note:: - - The event hub connection is performed in a background thread to - improve session startup time. If a registered plugin requires a - connected event hub then it should check the event hub connection - status explicitly. Subscribing to events does *not* require a - connected event hub. - - Enable schema caching by setting *schema_cache_path* to a folder path. - If not set, :envvar:`FTRACK_API_SCHEMA_CACHE_PATH` will be used to - determine the path to store cache in. If the environment variable is - also not specified then a temporary directory will be used. Set to - `False` to disable schema caching entirely. - - *plugin_arguments* should be an optional mapping (dict) of keyword - arguments to pass to plugin register functions upon discovery. If a - discovered plugin has a signature that is incompatible with the passed - arguments, the discovery mechanism will attempt to reduce the passed - arguments to only those that the plugin accepts. Note that a warning - will be logged in this case. - - ''' - super(ftrack_api.session.Session, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self._closed = False - - if server_url is None: - server_url = os.environ.get('FTRACK_SERVER') - - if not server_url: - raise TypeError( - 'Required "server_url" not specified. Pass as argument or set ' - 'in environment variable FTRACK_SERVER.' - ) - - self._server_url = server_url - - if api_key is None: - api_key = os.environ.get( - 'FTRACK_API_KEY', - # Backwards compatibility - os.environ.get('FTRACK_APIKEY') - ) - - if not api_key: - raise TypeError( - 'Required "api_key" not specified. Pass as argument or set in ' - 'environment variable FTRACK_API_KEY.' - ) - - self._api_key = api_key - - if api_user is None: - api_user = os.environ.get('FTRACK_API_USER') - if not api_user: - try: - api_user = getpass.getuser() - except Exception: - pass - - if not api_user: - raise TypeError( - 'Required "api_user" not specified. Pass as argument, set in ' - 'environment variable FTRACK_API_USER or one of the standard ' - 'environment variables used by Python\'s getpass module.' - ) - - self._api_user = api_user - - # Currently pending operations. - self.recorded_operations = ftrack_api.operation.Operations() - self.record_operations = True - - self.cache_key_maker = cache_key_maker - if self.cache_key_maker is None: - self.cache_key_maker = ftrack_api.cache.StringKeyMaker() - - # Enforce always having a memory cache at top level so that the same - # in-memory instance is returned from session. - self.cache = ftrack_api.cache.LayeredCache([ - ftrack_api.cache.MemoryCache() - ]) - - if cache is not None: - if callable(cache): - cache = cache(self) - - if cache is not None: - self.cache.caches.append(cache) - - self._managed_request = None - self._request = requests.Session() - self._request.auth = ftrack_api.session.SessionAuthentication( - self._api_key, self._api_user - ) - - self.auto_populate = auto_populate - - # Fetch server information and in doing so also check credentials. - self._server_information = self._fetch_server_information() - - # Now check compatibility of server based on retrieved information. - self.check_server_compatibility() - - # Construct event hub and load plugins. - self._event_hub = StorerEventHub( - self._server_url, - self._api_user, - self._api_key, - sock=sock - ) - - self._auto_connect_event_hub_thread = None - if auto_connect_event_hub in (None, True): - # Connect to event hub in background thread so as not to block main - # session usage waiting for event hub connection. - self._auto_connect_event_hub_thread = threading.Thread( - target=self._event_hub.connect - ) - self._auto_connect_event_hub_thread.daemon = True - self._auto_connect_event_hub_thread.start() - - # To help with migration from auto_connect_event_hub default changing - # from True to False. - self._event_hub._deprecation_warning_auto_connect = ( - auto_connect_event_hub is None - ) - - # Register to auto-close session on exit. - atexit.register(self.close) - - self._plugin_paths = plugin_paths - if self._plugin_paths is None: - self._plugin_paths = os.environ.get( - 'FTRACK_EVENT_PLUGIN_PATH', '' - ).split(os.pathsep) - - self._discover_plugins(plugin_arguments=plugin_arguments) - - # TODO: Make schemas read-only and non-mutable (or at least without - # rebuilding types)? - if schema_cache_path is not False: - if schema_cache_path is None: - schema_cache_path = os.environ.get( - 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() - ) - - schema_cache_path = os.path.join( - schema_cache_path, 'ftrack_api_schema_cache.json' - ) - - self.schemas = self._load_schemas(schema_cache_path) - self.types = self._build_entity_type_classes(self.schemas) - - ftrack_api._centralized_storage_scenario.register(self) - - self._configure_locations() - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.ready', - data=dict( - session=self - ) - ), - synchronous=True - ) From dd52e6594db97395f4ace8c39c0d873518f54d09 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 12:29:28 +0100 Subject: [PATCH 109/195] used moved session and event hubs in subprocesses --- .../ftrack_server/sub_event_processor.py | 13 ++++---- pype/ftrack/ftrack_server/sub_event_storer.py | 30 ++++++++++--------- 2 files changed, 22 insertions(+), 21 deletions(-) diff --git a/pype/ftrack/ftrack_server/sub_event_processor.py b/pype/ftrack/ftrack_server/sub_event_processor.py index 6ada787223..9c971ca916 100644 --- a/pype/ftrack/ftrack_server/sub_event_processor.py +++ b/pype/ftrack/ftrack_server/sub_event_processor.py @@ -1,12 +1,9 @@ -import os import sys -import datetime import signal import socket -import pymongo from ftrack_server import FtrackServer -from pype.ftrack.ftrack_server.session_processor import ProcessSession +from pype.ftrack.ftrack_server.lib import SocketSession, ProcessEventHub from pypeapp import Logger log = Logger().get_logger("Event processor") @@ -24,12 +21,14 @@ def main(args): sock.sendall(b"CreatedProcess") try: - session = ProcessSession(auto_connect_event_hub=True, sock=sock) - server = FtrackServer('event') + session = SocketSession( + auto_connect_event_hub=True, sock=sock, Eventhub=ProcessEventHub + ) + server = FtrackServer("event") log.debug("Launched Ftrack Event processor") server.run_server(session) - except Exception as exc: + except Exception: log.error("Event server crashed. See traceback below", exc_info=True) finally: diff --git a/pype/ftrack/ftrack_server/sub_event_storer.py b/pype/ftrack/ftrack_server/sub_event_storer.py index 4828b10bfa..11cda0e487 100644 --- a/pype/ftrack/ftrack_server/sub_event_storer.py +++ b/pype/ftrack/ftrack_server/sub_event_storer.py @@ -7,22 +7,22 @@ import pymongo import ftrack_api from ftrack_server import FtrackServer -from pype.ftrack.ftrack_server.lib import get_ftrack_event_mongo_info +from pype.ftrack.ftrack_server.lib import ( + get_ftrack_event_mongo_info, + SocketSession, + StorerEventHub +) from pype.ftrack.lib.custom_db_connector import DbConnector -from session_storer import StorerSession from pypeapp import Logger log = Logger().get_logger("Event storer") + +class SessionFactory: + session = None + + url, database, table_name = get_ftrack_event_mongo_info() - - -class SessionClass: - def __init__(self): - self.session = None - - -session_obj = SessionClass() dbcon = DbConnector( mongo_url=url, database_name=database, @@ -75,7 +75,7 @@ def launch(event): def trigger_sync(event): - session = session_obj.session + session = SessionFactory.session if session is None: log.warning("Session is not set. Can't trigger Sync to avalon action.") return True @@ -93,7 +93,7 @@ def trigger_sync(event): "$set": {"pype_data.is_processed": True} } dbcon.update_many(query, set_dict) - + selections = [] for project in projects: if project["status"] != "active": @@ -154,8 +154,10 @@ def main(args): sock.sendall(b"CreatedStore") try: - session = StorerSession(auto_connect_event_hub=True, sock=sock) - session_obj.session = session + session = SocketSession( + auto_connect_event_hub=True, sock=sock, Eventhub=StorerEventHub + ) + SessionFactory.session = session register(session) server = FtrackServer("event") log.debug("Launched Ftrack Event storer") From 8d7f29c1bac8fc1b6cf66137e07771b42b582c1e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 12:32:03 +0100 Subject: [PATCH 110/195] fix in trigger sync method, now check event_hub id to not trigger sync on every connect event --- pype/ftrack/ftrack_server/sub_event_storer.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/ftrack/ftrack_server/sub_event_storer.py b/pype/ftrack/ftrack_server/sub_event_storer.py index 11cda0e487..dfe8e21654 100644 --- a/pype/ftrack/ftrack_server/sub_event_storer.py +++ b/pype/ftrack/ftrack_server/sub_event_storer.py @@ -76,6 +76,10 @@ def launch(event): def trigger_sync(event): session = SessionFactory.session + source_id = event.get("source", {}).get("id") + if not source_id or source_id != session.event_hub.id: + return + if session is None: log.warning("Session is not set. Can't trigger Sync to avalon action.") return True From f5c326aa568a3660c4a8e612e1704b613403577e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 12:33:22 +0100 Subject: [PATCH 111/195] formatting changes --- pype/ftrack/ftrack_server/socket_thread.py | 4 ++-- pype/ftrack/ftrack_server/sub_legacy_server.py | 9 +++++---- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/pype/ftrack/ftrack_server/socket_thread.py b/pype/ftrack/ftrack_server/socket_thread.py index d0a2868743..3309f75cd7 100644 --- a/pype/ftrack/ftrack_server/socket_thread.py +++ b/pype/ftrack/ftrack_server/socket_thread.py @@ -1,7 +1,5 @@ import os -import sys import time -import signal import socket import threading import subprocess @@ -10,7 +8,9 @@ from pypeapp import Logger class SocketThread(threading.Thread): """Thread that checks suprocess of storer of processor of events""" + MAX_TIMEOUT = 35 + def __init__(self, name, port, filepath): super(SocketThread, self).__init__() self.log = Logger().get_logger("SocketThread", "Event Thread") diff --git a/pype/ftrack/ftrack_server/sub_legacy_server.py b/pype/ftrack/ftrack_server/sub_legacy_server.py index 31f38d0404..8b7bab5e2e 100644 --- a/pype/ftrack/ftrack_server/sub_legacy_server.py +++ b/pype/ftrack/ftrack_server/sub_legacy_server.py @@ -1,4 +1,3 @@ -import os import sys import time import datetime @@ -7,7 +6,6 @@ import threading from ftrack_server import FtrackServer import ftrack_api -from ftrack_api.event.hub import EventHub from pypeapp import Logger log = Logger().get_logger("Event Server Legacy") @@ -37,7 +35,10 @@ class TimerChecker(threading.Thread): if not self.session.event_hub.connected: if not connected: - if (datetime.datetime.now() - start).seconds > self.max_time_out: + if ( + (datetime.datetime.now() - start).seconds > + self.max_time_out + ): log.error(( "Exiting event server. Session was not connected" " to ftrack server in {} seconds." @@ -61,7 +62,7 @@ class TimerChecker(threading.Thread): def main(args): check_thread = None try: - server = FtrackServer('event') + server = FtrackServer("event") session = ftrack_api.Session(auto_connect_event_hub=True) check_thread = TimerChecker(server, session) From 6e2ea0c05bc63fbcb4272e11922c202b55d39b0f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 11 Dec 2019 12:34:36 +0100 Subject: [PATCH 112/195] change session check in base event handler --- pype/ftrack/lib/ftrack_base_handler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/lib/ftrack_base_handler.py b/pype/ftrack/lib/ftrack_base_handler.py index 4b57452961..8329505ffb 100644 --- a/pype/ftrack/lib/ftrack_base_handler.py +++ b/pype/ftrack/lib/ftrack_base_handler.py @@ -2,7 +2,7 @@ import functools import time from pypeapp import Logger import ftrack_api -from pype.ftrack.ftrack_server import session_processor +from pype.ftrack.ftrack_server.lib import SocketSession class MissingPermision(Exception): @@ -41,7 +41,7 @@ class BaseHandler(object): self.log = Logger().get_logger(self.__class__.__name__) if not( isinstance(session, ftrack_api.session.Session) or - isinstance(session, session_processor.ProcessSession) + isinstance(session, SocketSession) ): raise Exception(( "Session object entered with args is instance of \"{}\"" From eac2629fcb27728396dc63b55de00f11cd006408 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 12 Dec 2019 11:26:12 +0100 Subject: [PATCH 113/195] use FFMPEG_PATH environ to get path to ffmpeg and ffprobe --- pype/scripts/otio_burnin.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 01dc76aacf..1b2c2a04aa 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -9,6 +9,21 @@ from pype import api as pype log = pype.Logger().get_logger("BurninWrapper", "burninwrap") +ffmpeg_path = os.environ.get("FFMPEG_PATH") +if ffmpeg_path and os.path.exists(ffmpeg_path): + # add separator "/" or "\" to be prepared for next part + ffmpeg_path += os.path.sep +else: + ffmpeg_path = "" + +FFMPEG = ( + '{} -loglevel panic -i %(input)s %(filters)s %(args)s%(output)s' +).format(os.path.normpath(ffmpeg_path + "ffmpeg")) +FFPROBE = ( + '{} -v quiet -print_format json -show_format -show_streams %(source)s' +).format(os.path.normpath(ffmpeg_path + "ffprobe")) + + class ModifiedBurnins(ffmpeg_burnins.Burnins): ''' This is modification of OTIO FFmpeg Burnin adapter. From 54c76b3b7f0358e9a8943524d22ce215cd006740 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 12 Dec 2019 11:26:55 +0100 Subject: [PATCH 114/195] copied _streams method from otio adapter to be able to use ffprobe full path --- pype/scripts/otio_burnin.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 1b2c2a04aa..dd62c59bec 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -1,5 +1,7 @@ import os import datetime +import subprocess +import json import opentimelineio_contrib.adapters.ffmpeg_burnins as ffmpeg_burnins from pypeapp.lib import config from pype import api as pype @@ -24,6 +26,19 @@ FFPROBE = ( ).format(os.path.normpath(ffmpeg_path + "ffprobe")) +def _streams(source): + """Reimplemented from otio burnins to be able use full path to ffprobe + :param str source: source media file + :rtype: [{}, ...] + """ + command = FFPROBE % {'source': source} + proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE) + out = proc.communicate()[0] + if proc.returncode != 0: + raise RuntimeError("Failed to run: %s" % command) + return json.loads(out)['streams'] + + class ModifiedBurnins(ffmpeg_burnins.Burnins): ''' This is modification of OTIO FFmpeg Burnin adapter. From fdf4182155e53f77e25fa32088f3776a9995f8b4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 12 Dec 2019 11:27:22 +0100 Subject: [PATCH 115/195] replace FFMPEG string in command method --- pype/scripts/otio_burnin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index dd62c59bec..d215bea55e 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -217,7 +217,7 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if self.filter_string: filters = '-vf "{}"'.format(self.filter_string) - return (ffmpeg_burnins.FFMPEG % { + return (FFMPEG % { 'input': self.source, 'output': output, 'args': '%s ' % args if args else '', @@ -398,7 +398,7 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) codec_args = '' if codec_data is not []: codec_args = " ".join(codec_data) - + burnin.render(output_path, args=codec_args, overwrite=overwrite) From 978e05b536debcc928d39376690a14558c25a198 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 12 Dec 2019 11:27:47 +0100 Subject: [PATCH 116/195] get streams before super init in Burnin class is called --- pype/scripts/otio_burnin.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index d215bea55e..3e8cb3b0c4 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -91,6 +91,9 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): } def __init__(self, source, streams=None, options_init=None): + if not streams: + streams = _streams(source) + super().__init__(source, streams) if options_init: self.options_init.update(options_init) From df34ed8705bf62e611b0b7d9e1e3f725df178457 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 12 Dec 2019 17:44:40 +0100 Subject: [PATCH 117/195] refactored verion to task event and added feature with more statuses to set --- .../events/event_version_to_task_statuses.py | 170 ++++++++++++------ 1 file changed, 113 insertions(+), 57 deletions(-) diff --git a/pype/ftrack/events/event_version_to_task_statuses.py b/pype/ftrack/events/event_version_to_task_statuses.py index cd83b819bc..1f5f1514d7 100644 --- a/pype/ftrack/events/event_version_to_task_statuses.py +++ b/pype/ftrack/events/event_version_to_task_statuses.py @@ -4,6 +4,7 @@ from pypeapp import config class VersionToTaskStatus(BaseEvent): + # Presets usage default_status_mapping = {} def launch(self, session, event): @@ -11,69 +12,124 @@ class VersionToTaskStatus(BaseEvent): # start of event procedure ---------------------------------- for entity in event['data'].get('entities', []): - # Filter non-assetversions - if ( - entity['entityType'] == 'assetversion' and - 'statusid' in (entity.get('keys') or []) - ): + # Filter AssetVersions + if entity["entityType"] != "assetversion": + continue - version = session.get('AssetVersion', entity['entityId']) - try: - version_status = session.get( - 'Status', entity['changes']['statusid']['new'] - ) - except Exception: + # Skip if statusid not in keys (in changes) + keys = entity.get("keys") + if not keys or "statusid" not in keys: + continue + + # Get new version task name + version_status_id = ( + entity + .get("changes", {}) + .get("statusid", {}) + .get("new", {}) + ) + + # Just check that `new` is set to any value + if not version_status_id: + continue + + try: + version_status = session.get("Status", version_status_id) + except Exception: + self.log.warning( + "Troubles with query status id [ {} ]".format( + version_status_id + ), + exc_info=True + ) + + if not version_status: + continue + + version_status_orig = version_status["name"] + + # Load status mapping from presets + status_mapping = ( + config.get_presets() + .get("ftrack", {}) + .get("ftrack_config", {}) + .get("status_version_to_task") + ) or self.default_status_mapping + + # Skip if mapping is empty + if not status_mapping: + continue + + # Lower version status name and check if has mapping + version_status = version_status_orig.lower() + new_status_names = status_mapping.get(version_status) + if not new_status_names: + continue + + self.log.debug( + "Processing AssetVersion status change: [ {} ]".format( + version_status_orig + ) + ) + + # Backwards compatibility (convert string to list) + if isinstance(new_status_names, str): + new_status_names = [new_status_names] + + # Lower all names from presets + new_status_names = [name.lower() for name in new_status_names] + + # Get entities necessary for processing + version = session.get("AssetVersion", entity["entityId"]) + task = version.get("task") + if not task: + continue + + project_schema = task["project"]["project_schema"] + # Get all available statuses for Task + statuses = project_schema.get_statuses("Task", task["type_id"]) + # map lowered status name with it's object + stat_names_low = { + status["name"].lower(): status for status in statuses + } + + new_status = None + for status_name in new_status_names: + if status_name not in stat_names_low: continue - task_status = version_status - task = version['task'] - self.log.info('>>> version status: [ {} ]'.format( - version_status['name'])) - version_name_low = version_status['name'].lower() + # store object of found status + new_status = stat_names_low[status_name] + self.log.debug("Status to set: [ {} ]".format( + new_status["name"] + )) + break - status_mapping = ( - config.get_presets() - .get("ftrack", {}) - .get("ftrack_config", {}) - .get("status_version_to_task") - ) or self.default_status_mapping + # Skip if status names were not found for paticulat entity + if not new_status: + self.log.warning( + "Any of statuses from presets can be set: {}".format( + str(new_status_names) + ) + ) + continue - status_to_set = status_mapping.get(version_name_low) + # Get full path to task for logging + ent_path = "/".join([ent["name"] for ent in task["link"]]) - self.log.info( - '>>> status to set: [ {} ]'.format(status_to_set)) - - if status_to_set is not None: - query = 'Status where name is "{}"'.format(status_to_set) - try: - task_status = session.query(query).one() - except Exception: - self.log.info( - '!!! status was not found in Ftrack [ {} ]'.format( - status_to_set - ) - ) - continue - - # Proceed if the task status was set - if task_status is not None: - # Get path to task - path = task['name'] - for p in task['ancestors']: - path = p['name'] + '/' + path - - # Setting task status - try: - task['status'] = task_status - session.commit() - except Exception as e: - session.rollback() - self.log.warning('!!! [ {} ] status couldnt be set:\ - [ {} ]'.format(path, e)) - session.rollback() - else: - self.log.info('>>> [ {} ] updated to [ {} ]'.format( - path, task_status['name'])) + # Setting task status + try: + task["status"] = new_status + session.commit() + self.log.debug("[ {} ] Status updated to [ {} ]".format( + ent_path, new_status['name'] + )) + except Exception: + session.rollback() + self.log.warning( + "[ {} ]Status couldn't be set".format(ent_path), + exc_info=True + ) def register(session, plugins_presets): From b5fe082a59061e5eb14d77966ecefbcf7e91dc08 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 16 Dec 2019 10:51:02 +0100 Subject: [PATCH 118/195] sonar import moved to process because in the time the file is imported, sonar is not available --- pype/plugins/blender/create/submarine_model.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/plugins/blender/create/submarine_model.py b/pype/plugins/blender/create/submarine_model.py index 29fcae8fbf..1845c9b222 100644 --- a/pype/plugins/blender/create/submarine_model.py +++ b/pype/plugins/blender/create/submarine_model.py @@ -2,7 +2,6 @@ import bpy -import sonar.blender from avalon import api from avalon.blender import Creator, lib @@ -16,7 +15,7 @@ class CreateModel(Creator): icon = "cube" def process(self): - + import sonar.blender asset = self.data["asset"] subset = self.data["subset"] name = sonar.blender.plugin.model_name(asset, subset) From 0841d91eef7584e729b58db8d3b2c8340d7e2b3a Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 16 Dec 2019 14:13:31 +0100 Subject: [PATCH 119/195] fix which import --- pype/scripts/publish_filesequence.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/pype/scripts/publish_filesequence.py b/pype/scripts/publish_filesequence.py index 7ad7318831..5517cfeb4c 100644 --- a/pype/scripts/publish_filesequence.py +++ b/pype/scripts/publish_filesequence.py @@ -4,7 +4,16 @@ import os import logging import subprocess import platform -from shutil import which +try: + from shutil import which +except ImportError: + # we are in python < 3.3 + def which(command): + path = os.getenv('PATH') + for p in path.split(os.path.pathsep): + p = os.path.join(p, command) + if os.path.exists(p) and os.access(p, os.X_OK): + return p handler = logging.basicConfig() log = logging.getLogger("Publish Image Sequences") From 19cf990fbdf1b24c481ad9ac6b6fa74419331a0d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 16 Dec 2019 17:35:23 +0100 Subject: [PATCH 120/195] added template data to burnins data --- pype/plugins/global/publish/extract_burnin.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 95a7144081..33935b4272 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -32,6 +32,7 @@ class ExtractBurnin(pype.api.Extractor): frame_start = int(instance.data.get("frameStart") or 0) frame_end = int(instance.data.get("frameEnd") or 1) duration = frame_end - frame_start + 1 + prep_data = { "username": instance.context.data['user'], "asset": os.environ['AVALON_ASSET'], @@ -39,8 +40,14 @@ class ExtractBurnin(pype.api.Extractor): "frame_start": frame_start, "frame_end": frame_end, "duration": duration, - "version": version + "version": version, + "comment": instance.context.data.get("comment"), + "intent": instance.context.data.get("intent") } + # Update data with template data + template_data = instance.data.get("assumedTemplateData") or {} + prep_data.update(template_data) + self.log.debug("__ prep_data: {}".format(prep_data)) for i, repre in enumerate(instance.data["representations"]): self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre)) From 1286edfc25c717815d16bb8bb18d7d6a98268b30 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 16 Dec 2019 17:36:10 +0100 Subject: [PATCH 121/195] added filled anatomy to burnin data to be able use `anatomy[...][...]` in burnin presets --- pype/plugins/global/publish/extract_burnin.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 33935b4272..06a62dd98b 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -1,5 +1,6 @@ import os import json +import copy import pype.api import pyblish @@ -48,6 +49,9 @@ class ExtractBurnin(pype.api.Extractor): template_data = instance.data.get("assumedTemplateData") or {} prep_data.update(template_data) + # get anatomy project + anatomy = instance.context.data['anatomy'] + self.log.debug("__ prep_data: {}".format(prep_data)) for i, repre in enumerate(instance.data["representations"]): self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre)) @@ -69,11 +73,17 @@ class ExtractBurnin(pype.api.Extractor): ) self.log.debug("__ full_burnin_path: {}".format(full_burnin_path)) + # create copy of prep_data for anatomy formatting + _prep_data = copy.deepcopy(prep_data) + _prep_data["representation"] = repre["name"] + _prep_data["anatomy"] = ( + anatomy.format_all(_prep_data).get("solved") or {} + ) burnin_data = { "input": full_movie_path.replace("\\", "/"), "codec": repre.get("codec", []), "output": full_burnin_path.replace("\\", "/"), - "burnin_data": prep_data + "burnin_data": _prep_data } self.log.debug("__ burnin_data2: {}".format(burnin_data)) From d78166a0da72d2a736be3c7b4bfc5da4fa38fff1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 16 Dec 2019 17:49:42 +0100 Subject: [PATCH 122/195] replace backslash in hierararchy which may cause issues in burnin path --- pype/plugins/global/publish/collect_templates.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index 9b0c03fdee..48623eec22 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -75,7 +75,7 @@ class CollectTemplates(pyblish.api.InstancePlugin): "asset": asset_name, "subset": subset_name, "version": version_number, - "hierarchy": hierarchy, + "hierarchy": hierarchy.replace("\\", "/"), "representation": "TEMP"} instance.data["template"] = template From 813673dd504eb0f83648daea606a00ae4ac8de86 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 12 Dec 2019 16:04:26 +0100 Subject: [PATCH 123/195] fix(global): comma missing `gizmo` family From f06857c42e8376f2f9b63efb77fb37846fd83c05 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 18 Dec 2019 14:56:57 +0100 Subject: [PATCH 124/195] fix(nuke): didn't collect all publishable isntaces --- pype/plugins/nuke/publish/collect_instances.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index cffe415058..c5fb289a1e 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -86,11 +86,13 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): node.end() family = avalon_knob_data["family"] - families = avalon_knob_data.get("families") - if families: - families = [families] + families = list() + families_ak = avalon_knob_data.get("families") + + if families_ak: + families.append(families_ak) else: - families = [family] + families.append(family) # Get format format = root['format'].value() @@ -100,7 +102,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): if node.Class() not in "Read": if "render" not in node.knobs().keys(): - families.insert(0, family) + pass elif node["render"].value(): self.log.info("flagged for render") add_family = "render.local" From d16865d96fcfe8b413507ca44535967ffa42140f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Dec 2019 16:08:07 +0100 Subject: [PATCH 125/195] modified sonar creator a little bit --- pype/plugins/blender/create/submarine_model.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/pype/plugins/blender/create/submarine_model.py b/pype/plugins/blender/create/submarine_model.py index 1845c9b222..7301073f05 100644 --- a/pype/plugins/blender/create/submarine_model.py +++ b/pype/plugins/blender/create/submarine_model.py @@ -9,26 +9,24 @@ from avalon.blender import Creator, lib class CreateModel(Creator): """Polygonal static geometry""" - name = "model_default" + name = "modelMain" label = "Model" family = "model" icon = "cube" def process(self): - import sonar.blender + import pype.blender + asset = self.data["asset"] subset = self.data["subset"] - name = sonar.blender.plugin.model_name(asset, subset) + name = pype.blender.plugin.model_name(asset, subset) collection = bpy.data.collections.new(name=name) bpy.context.scene.collection.children.link(collection) self.data['task'] = api.Session.get('AVALON_TASK') lib.imprint(collection, self.data) if (self.options or {}).get("useSelection"): - for obj in bpy.context.selected_objects: + for obj in lib.get_selection(): collection.objects.link(obj) - if bpy.data.workspaces.get('Modeling'): - bpy.context.window.workspace = bpy.data.workspaces['Modeling'] - return collection From cd79f0654dfc0efbb9f36ee040b83c4a5ce419c7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Dec 2019 16:13:38 +0100 Subject: [PATCH 126/195] added init file to pype setup --- setup/blender/init.py | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 setup/blender/init.py diff --git a/setup/blender/init.py b/setup/blender/init.py new file mode 100644 index 0000000000..05c15eaeb2 --- /dev/null +++ b/setup/blender/init.py @@ -0,0 +1,3 @@ +from pype import blender + +blender.install() From f0918ec7604734673c288e0bc55f1c5723dce7ff Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Dec 2019 17:30:47 +0100 Subject: [PATCH 127/195] blender plugins update --- pype/plugins/blender/load/submarine_model.py | 129 ++++++++++++------ .../blender/publish/collect_current_file.py | 2 +- pype/plugins/blender/publish/collect_model.py | 3 +- pype/plugins/blender/publish/extract_model.py | 33 +++-- .../blender/publish/validate_mesh_has_uv.py | 8 +- .../validate_mesh_no_negative_scale.py | 12 +- 6 files changed, 129 insertions(+), 58 deletions(-) diff --git a/pype/plugins/blender/load/submarine_model.py b/pype/plugins/blender/load/submarine_model.py index 4535b29065..99095d74cd 100644 --- a/pype/plugins/blender/load/submarine_model.py +++ b/pype/plugins/blender/load/submarine_model.py @@ -38,7 +38,7 @@ class BlendModelLoader(pype.blender.AssetLoader): Note: It is assumed that only 1 matching collection is found. """ - for collection in bpy.data.collections: + for collection in bpy.context.blend_data.collections: if collection.name != name: continue if collection.library is None: @@ -52,18 +52,19 @@ class BlendModelLoader(pype.blender.AssetLoader): return None @staticmethod - def _collection_contains_object(collection: bpy.types.Collection, object: bpy.types.Object) -> bool: + def _collection_contains_object( + collection: bpy.types.Collection, object: bpy.types.Object + ) -> bool: """Check if the collection contains the object.""" for obj in collection.objects: if obj == object: return True return False - def process_asset(self, - context: dict, - name: str, - namespace: Optional[str] = None, - options: Optional[Dict] = None) -> Optional[List]: + def process_asset( + self, context: dict, name: str, namespace: Optional[str] = None, + options: Optional[Dict] = None + ) -> Optional[List]: """ Arguments: name: Use pre-defined name @@ -76,21 +77,27 @@ class BlendModelLoader(pype.blender.AssetLoader): asset = context["asset"]["name"] subset = context["subset"]["name"] lib_container = pype.blender.plugin.model_name(asset, subset) - container_name = pype.blender.plugin.model_name(asset, subset, namespace) + container_name = pype.blender.plugin.model_name( + asset, subset, namespace + ) relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.data.libraries.load(libpath, link=True, relative=relative) as (_, data_to): + with bpy.context.blend_data.libraries.load( + libpath, link=True, relative=relative + ) as (_, data_to): data_to.collections = [lib_container] scene = bpy.context.scene - instance_empty = bpy.data.objects.new(container_name, None) + instance_empty = bpy.context.blend_data.objects.new( + container_name, None + ) if not instance_empty.get("avalon"): instance_empty["avalon"] = dict() avalon_info = instance_empty["avalon"] avalon_info.update({"container_name": container_name}) scene.collection.objects.link(instance_empty) instance_empty.instance_type = 'COLLECTION' - container = bpy.data.collections[lib_container] + container = bpy.context.blend_data.collections[lib_container] container.name = container_name instance_empty.instance_collection = container container.make_local() @@ -120,7 +127,9 @@ class BlendModelLoader(pype.blender.AssetLoader): Warning: No nested collections are supported at the moment! """ - collection = bpy.data.collections.get(container["objectName"]) + collection = bpy.context.blend_data.collections.get( + container["objectName"] + ) libpath = Path(api.get_representation_path(representation)) extension = libpath.suffix.lower() @@ -130,14 +139,30 @@ class BlendModelLoader(pype.blender.AssetLoader): pformat(representation, indent=2), ) - assert collection, f"The asset is not loaded: {container['objectName']}" - assert not (collection.children), "Nested collections are not supported." - assert libpath, ("No existing library file found for {container['objectName']}") - assert libpath.is_file(), f"The file doesn't exist: {libpath}" - assert extension in pype.blender.plugin.VALID_EXTENSIONS, f"Unsupported file: {libpath}" - collection_libpath = self._get_library_from_container(collection).filepath - normalized_collection_libpath = str(Path(bpy.path.abspath(collection_libpath)).resolve()) - normalized_libpath = str(Path(bpy.path.abspath(str(libpath))).resolve()) + assert collection, ( + f"The asset is not loaded: {container['objectName']}" + ) + assert not (collection.children), ( + "Nested collections are not supported." + ) + assert libpath, ( + "No existing library file found for {container['objectName']}" + ) + assert libpath.is_file(), ( + f"The file doesn't exist: {libpath}" + ) + assert extension in pype.blender.plugin.VALID_EXTENSIONS, ( + f"Unsupported file: {libpath}" + ) + collection_libpath = ( + self._get_library_from_container(collection).filepath + ) + normalized_collection_libpath = ( + str(Path(bpy.path.abspath(collection_libpath)).resolve()) + ) + normalized_libpath = ( + str(Path(bpy.path.abspath(str(libpath))).resolve()) + ) logger.debug( "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", normalized_collection_libpath, @@ -155,29 +180,46 @@ class BlendModelLoader(pype.blender.AssetLoader): # Unlink every object collection.objects.unlink(obj) remove_obj = True - for coll in [coll for coll in bpy.data.collections if coll != collection]: - if coll.objects and self._collection_contains_object(coll, obj): + for coll in [ + coll for coll in bpy.context.blend_data.collections + if coll != collection + ]: + if ( + coll.objects and + self._collection_contains_object(coll, obj) + ): remove_obj = False if remove_obj: objects_to_remove.add(obj) + for obj in objects_to_remove: # Only delete objects that are not used elsewhere - bpy.data.objects.remove(obj) + bpy.context.blend_data.objects.remove(obj) - instance_empties = [obj for obj in collection.users_dupli_group if obj.name in collection.name] + instance_empties = [ + obj for obj in collection.users_dupli_group + if obj.name in collection.name + ] if instance_empties: instance_empty = instance_empties[0] container_name = instance_empty["avalon"]["container_name"] + relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.data.libraries.load(str(libpath), link=True, relative=relative) as (_, data_to): + with bpy.context.blend_data.libraries.load( + str(libpath), link=True, relative=relative + ) as (_, data_to): data_to.collections = [container_name] + new_collection = self._get_lib_collection(container_name, libpath) if new_collection is None: - raise ValueError("A matching collection '{container_name}' " - "should have been found in: {libpath}") + raise ValueError( + "A matching collection '{container_name}' " + "should have been found in: {libpath}" + ) + for obj in new_collection.objects: collection.objects.link(obj) - bpy.data.collections.remove(new_collection) + bpy.context.blend_data.collections.remove(new_collection) # Update the representation on the collection avalon_prop = collection[avalon.blender.pipeline.AVALON_PROPERTY] avalon_prop["representation"] = str(representation["_id"]) @@ -195,10 +237,14 @@ class BlendModelLoader(pype.blender.AssetLoader): Warning: No nested collections are supported at the moment! """ - collection = bpy.data.collections.get(container["objectName"]) + collection = bpy.context.blend_data.collections.get( + container["objectName"] + ) if not collection: return False - assert not (collection.children), "Nested collections are not supported." + assert not (collection.children), ( + "Nested collections are not supported." + ) instance_parents = list(collection.users_dupli_group) instance_objects = list(collection.objects) for obj in instance_objects + instance_parents: @@ -224,11 +270,10 @@ class CacheModelLoader(pype.blender.AssetLoader): icon = "code-fork" color = "orange" - def process_asset(self, - context: dict, - name: str, - namespace: Optional[str] = None, - options: Optional[Dict] = None) -> Optional[List]: + def process_asset( + self, context: dict, name: str, namespace: Optional[str] = None, + options: Optional[Dict] = None + ) -> Optional[List]: """ Arguments: name: Use pre-defined name @@ -243,17 +288,23 @@ class CacheModelLoader(pype.blender.AssetLoader): asset = context["asset"]["name"] subset = context["subset"]["name"] # TODO (jasper): evaluate use of namespace which is 'alien' to Blender. - lib_container = container_name = pype.blender.plugin.model_name(asset, subset, namespace) + lib_container = container_name = ( + pype.blender.plugin.model_name(asset, subset, namespace) + ) relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.data.libraries.load(libpath, link=True, relative=relative) as (data_from, data_to): + with bpy.context.blend_data.libraries.load( + libpath, link=True, relative=relative + ) as (data_from, data_to): data_to.collections = [lib_container] scene = bpy.context.scene - instance_empty = bpy.data.objects.new(container_name, None) + instance_empty = bpy.context.blend_data.objects.new( + container_name, None + ) scene.collection.objects.link(instance_empty) instance_empty.instance_type = 'COLLECTION' - collection = bpy.data.collections[lib_container] + collection = bpy.context.blend_data.collections[lib_container] collection.name = container_name instance_empty.instance_collection = collection diff --git a/pype/plugins/blender/publish/collect_current_file.py b/pype/plugins/blender/publish/collect_current_file.py index a097c72047..5756431314 100644 --- a/pype/plugins/blender/publish/collect_current_file.py +++ b/pype/plugins/blender/publish/collect_current_file.py @@ -12,5 +12,5 @@ class CollectBlenderCurrentFile(pyblish.api.ContextPlugin): def process(self, context): """Inject the current working file""" - current_file = bpy.data.filepath + current_file = bpy.context.blend_data.filepath context.data['currentFile'] = current_file diff --git a/pype/plugins/blender/publish/collect_model.py b/pype/plugins/blender/publish/collect_model.py index c60402f9ca..4c7e840c17 100644 --- a/pype/plugins/blender/publish/collect_model.py +++ b/pype/plugins/blender/publish/collect_model.py @@ -23,7 +23,7 @@ class CollectModel(pyblish.api.ContextPlugin): representation set. If the representation is set, it is a loaded model and we don't want to publish it. """ - for collection in bpy.data.collections: + for collection in bpy.context.blend_data.collections: avalon_prop = collection.get(AVALON_PROPERTY) or dict() if (avalon_prop.get('family') == 'model' and not avalon_prop.get('representation')): @@ -42,6 +42,7 @@ class CollectModel(pyblish.api.ContextPlugin): instance = context.create_instance( name=name, family=family, + families=[family], subset=subset, asset=asset, task=task, diff --git a/pype/plugins/blender/publish/extract_model.py b/pype/plugins/blender/publish/extract_model.py index 75ec33fb27..501c4d9d5c 100644 --- a/pype/plugins/blender/publish/extract_model.py +++ b/pype/plugins/blender/publish/extract_model.py @@ -1,10 +1,10 @@ -from pathlib import Path +import os import avalon.blender.workio -import sonar.api +import pype.api -class ExtractModel(sonar.api.Extractor): +class ExtractModel(pype.api.Extractor): """Extract as model.""" label = "Model" @@ -14,9 +14,10 @@ class ExtractModel(sonar.api.Extractor): def process(self, instance): # Define extract output file path - stagingdir = Path(self.staging_dir(instance)) + + stagingdir = self.staging_dir(instance) filename = f"{instance.name}.blend" - filepath = str(stagingdir / filename) + filepath = os.path.join(stagingdir, filename) # Perform extraction self.log.info("Performing extraction..") @@ -24,11 +25,23 @@ class ExtractModel(sonar.api.Extractor): # Just save the file to a temporary location. At least for now it's no # problem to have (possibly) extra stuff in the file. avalon.blender.workio.save_file(filepath, copy=True) + # + # # Store reference for integration + # if "files" not in instance.data: + # instance.data["files"] = list() + # + # # instance.data["files"].append(filename) - # Store reference for integration - if "files" not in instance.data: - instance.data["files"] = list() + if "representations" not in instance.data: + instance.data["representations"] = [] - instance.data["files"].append(filename) + representation = { + 'name': 'blend', + 'ext': 'blend', + 'files': filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(representation) - self.log.info("Extracted instance '%s' to: %s", instance.name, filepath) + + self.log.info("Extracted instance '%s' to: %s", instance.name, representation) diff --git a/pype/plugins/blender/publish/validate_mesh_has_uv.py b/pype/plugins/blender/publish/validate_mesh_has_uv.py index 79a42a11d5..f8c5092ab7 100644 --- a/pype/plugins/blender/publish/validate_mesh_has_uv.py +++ b/pype/plugins/blender/publish/validate_mesh_has_uv.py @@ -3,7 +3,7 @@ from typing import List import bpy import pyblish.api -import sonar.blender.action +import pype.blender.action class ValidateMeshHasUvs(pyblish.api.InstancePlugin): @@ -14,7 +14,7 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): families = ["model"] category = "geometry" label = "Mesh Has UV's" - actions = [sonar.blender.action.SelectInvalidAction] + actions = [pype.blender.action.SelectInvalidAction] optional = True @staticmethod @@ -34,7 +34,9 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): def get_invalid(cls, instance) -> List: invalid = [] # TODO (jasper): only check objects in the collection that will be published? - for obj in [obj for obj in bpy.data.objects if obj.type == 'MESH']: + for obj in [ + obj for obj in bpy.context.blend_data.objects if obj.type == 'MESH' + ]: # Make sure we are in object mode. bpy.ops.object.mode_set(mode='OBJECT') if not cls.has_uvs(obj): diff --git a/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py index b2a927a2ed..1f050f6844 100644 --- a/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py +++ b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py @@ -3,7 +3,7 @@ from typing import List import bpy import pyblish.api -import sonar.blender.action +import pype.blender.action class ValidateMeshNoNegativeScale(pyblish.api.Validator): @@ -13,13 +13,15 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator): hosts = ["blender"] families = ["model"] label = "Mesh No Negative Scale" - actions = [sonar.blender.action.SelectInvalidAction] + actions = [pype.blender.action.SelectInvalidAction] @staticmethod def get_invalid(instance) -> List: invalid = [] # TODO (jasper): only check objects in the collection that will be published? - for obj in [obj for obj in bpy.data.objects if obj.type == 'MESH']: + for obj in [ + obj for obj in bpy.context.blend_data.objects if obj.type == 'MESH' + ]: if any(v < 0 for v in obj.scale): invalid.append(obj) @@ -28,4 +30,6 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError(f"Meshes found in instance with negative scale: {invalid}") + raise RuntimeError( + f"Meshes found in instance with negative scale: {invalid}" + ) From 2635268a494f612a7a75a8a873e4b211b1fca20e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Dec 2019 17:32:44 +0100 Subject: [PATCH 128/195] modified blender action --- pype/blender/action.py | 29 +++++++++++++++++------------ 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/pype/blender/action.py b/pype/blender/action.py index 948123c3c5..4bd7e303fc 100644 --- a/pype/blender/action.py +++ b/pype/blender/action.py @@ -24,19 +24,24 @@ class SelectInvalidAction(pyblish.api.Action): if isinstance(invalid_nodes, (list, tuple)): invalid.extend(invalid_nodes) else: - self.log.warning("Failed plug-in doens't have any selectable objects.") + self.log.warning( + "Failed plug-in doens't have any selectable objects." + ) + + bpy.ops.object.select_all(action='DESELECT') # Make sure every node is only processed once invalid = list(set(invalid)) - - bpy.ops.object.select_all(action='DESELECT') - if invalid: - invalid_names = [obj.name for obj in invalid] - self.log.info("Selecting invalid objects: %s", ", ".join(invalid_names)) - # Select the objects and also make the last one the active object. - for obj in invalid: - obj.select_set(True) - bpy.context.view_layer.objects.active = invalid[-1] - - else: + if not invalid: self.log.info("No invalid nodes found.") + return + + invalid_names = [obj.name for obj in invalid] + self.log.info( + "Selecting invalid objects: %s", ", ".join(invalid_names) + ) + # Select the objects and also make the last one the active object. + for obj in invalid: + obj.select_set(True) + + bpy.context.view_layer.objects.active = invalid[-1] From bba0d10e9165b859a0cfd050adf4d8a1c886abfb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 30 Dec 2019 15:09:05 +0100 Subject: [PATCH 129/195] feat(nuke): adding back plugin renaming to only mov creation in running nuke session --- .../nuke/publish/extract_review_mov.py | 181 ++++++++++++++++++ 1 file changed, 181 insertions(+) create mode 100644 pype/plugins/nuke/publish/extract_review_mov.py diff --git a/pype/plugins/nuke/publish/extract_review_mov.py b/pype/plugins/nuke/publish/extract_review_mov.py new file mode 100644 index 0000000000..ed3101951c --- /dev/null +++ b/pype/plugins/nuke/publish/extract_review_mov.py @@ -0,0 +1,181 @@ +import os +import nuke +import pyblish.api +import pype\ + +class ExtractReviewData(pype.api.Extractor): + """Extracts movie and thumbnail with baked in luts + + must be run after extract_render_local.py + + """ + + order = pyblish.api.ExtractorOrder + 0.01 + label = "Extract Review Data" + + families = ["review"] + hosts = ["nuke"] + + def process(self, instance): + + # Store selection + selection = [i for i in nuke.allNodes() if i["selected"].getValue()] + # Deselect all nodes to prevent external connections + [i["selected"].setValue(False) for i in nuke.allNodes()] + self.log.debug("creating staging dir:") + self.staging_dir(instance) + + self.log.debug("instance: {}".format(instance)) + self.log.debug("instance.data[families]: {}".format( + instance.data["families"])) + + self.render_review_representation(instance, representation="mov") + + # Restore selection + [i["selected"].setValue(False) for i in nuke.allNodes()] + [i["selected"].setValue(True) for i in selection] + + def render_review_representation(self, + instance, + representation="mov"): + + assert instance.data['representations'][0]['files'], "Instance data files should't be empty!" + + temporary_nodes = [] + stagingDir = instance.data[ + 'representations'][0]["stagingDir"].replace("\\", "/") + self.log.debug("StagingDir `{0}`...".format(stagingDir)) + + collection = instance.data.get("collection", None) + + if collection: + # get path + fname = os.path.basename(collection.format( + "{head}{padding}{tail}")) + fhead = collection.format("{head}") + + # get first and last frame + first_frame = min(collection.indexes) + last_frame = max(collection.indexes) + else: + fname = os.path.basename(instance.data.get("path", None)) + fhead = os.path.splitext(fname)[0] + "." + first_frame = instance.data.get("frameStart", None) + last_frame = instance.data.get("frameEnd", None) + + rnode = nuke.createNode("Read") + + rnode["file"].setValue( + os.path.join(stagingDir, fname).replace("\\", "/")) + + rnode["first"].setValue(first_frame) + rnode["origfirst"].setValue(first_frame) + rnode["last"].setValue(last_frame) + rnode["origlast"].setValue(last_frame) + temporary_nodes.append(rnode) + previous_node = rnode + + # get input process and connect it to baking + ipn = self.get_view_process_node() + if ipn is not None: + ipn.setInput(0, previous_node) + previous_node = ipn + temporary_nodes.append(ipn) + + reformat_node = nuke.createNode("Reformat") + + ref_node = self.nodes.get("Reformat", None) + if ref_node: + for k, v in ref_node: + self.log.debug("k,v: {0}:{1}".format(k,v)) + if isinstance(v, unicode): + v = str(v) + reformat_node[k].setValue(v) + + reformat_node.setInput(0, previous_node) + previous_node = reformat_node + temporary_nodes.append(reformat_node) + + dag_node = nuke.createNode("OCIODisplay") + dag_node.setInput(0, previous_node) + previous_node = dag_node + temporary_nodes.append(dag_node) + + # create write node + write_node = nuke.createNode("Write") + + if representation in "mov": + file = fhead + "baked.mov" + name = "baked" + path = os.path.join(stagingDir, file).replace("\\", "/") + self.log.debug("Path: {}".format(path)) + instance.data["baked_colorspace_movie"] = path + write_node["file"].setValue(path) + write_node["file_type"].setValue("mov") + write_node["raw"].setValue(1) + write_node.setInput(0, previous_node) + temporary_nodes.append(write_node) + tags = ["review", "delete"] + + elif representation in "jpeg": + file = fhead + "jpeg" + name = "thumbnail" + path = os.path.join(stagingDir, file).replace("\\", "/") + instance.data["thumbnail"] = path + write_node["file"].setValue(path) + write_node["file_type"].setValue("jpeg") + write_node["raw"].setValue(1) + write_node.setInput(0, previous_node) + temporary_nodes.append(write_node) + tags = ["thumbnail"] + + # retime for + first_frame = int(last_frame) / 2 + last_frame = int(last_frame) / 2 + + repre = { + 'name': name, + 'ext': representation, + 'files': file, + "stagingDir": stagingDir, + "frameStart": first_frame, + "frameEnd": last_frame, + "anatomy_template": "render", + "tags": tags + } + instance.data["representations"].append(repre) + + # Render frames + nuke.execute(write_node.name(), int(first_frame), int(last_frame)) + + self.log.debug("representations: {}".format(instance.data["representations"])) + + # Clean up + for node in temporary_nodes: + nuke.delete(node) + + def get_view_process_node(self): + + # Select only the target node + if nuke.selectedNodes(): + [n.setSelected(False) for n in nuke.selectedNodes()] + + ipn_orig = None + for v in [n for n in nuke.allNodes() + if "Viewer" in n.Class()]: + ip = v['input_process'].getValue() + ipn = v['input_process_node'].getValue() + if "VIEWER_INPUT" not in ipn and ip: + ipn_orig = nuke.toNode(ipn) + ipn_orig.setSelected(True) + + if ipn_orig: + nuke.nodeCopy('%clipboard%') + + [n.setSelected(False) for n in nuke.selectedNodes()] # Deselect all + + nuke.nodePaste('%clipboard%') + + ipn = nuke.selectedNode() + + return ipn From cd4ad045e6e53bb2ad9963e56d2acfac3c045ea2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 4 Jan 2020 17:14:31 +0100 Subject: [PATCH 130/195] fix(nks): workio on save_as if Untitled didnt do anything --- pype/nukestudio/workio.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/pype/nukestudio/workio.py b/pype/nukestudio/workio.py index 1681d8a2ab..c7484b826b 100644 --- a/pype/nukestudio/workio.py +++ b/pype/nukestudio/workio.py @@ -22,19 +22,16 @@ def has_unsaved_changes(): def save_file(filepath): + file = os.path.basename(filepath) project = hiero.core.projects()[-1] - # close `Untitled` project - if "Untitled" not in project.name(): - log.info("Saving project: `{}`".format(project.name())) + if project: + log.info("Saving project: `{}` as '{}'".format(project.name(), file)) project.saveAs(filepath) - elif not project: + else: log.info("Creating new project...") project = hiero.core.newProject() project.saveAs(filepath) - else: - log.info("Dropping `Untitled` project...") - return def open_file(filepath): From 73e50fa03fcc6efccbcf49cfac120e3dbb4bf01a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sun, 5 Jan 2020 14:24:56 +0100 Subject: [PATCH 131/195] change label to see whole label value --- pype/ftrack/actions/action_seed.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/actions/action_seed.py b/pype/ftrack/actions/action_seed.py index cf0a4b0445..5cbc5d1cec 100644 --- a/pype/ftrack/actions/action_seed.py +++ b/pype/ftrack/actions/action_seed.py @@ -9,7 +9,7 @@ class SeedDebugProject(BaseAction): #: Action identifier. identifier = "seed.debug.project" #: Action label. - label = "SeedDebugProject" + label = "Seed Debug Project" #: Action description. description = "Description" #: priority From 0024688a449a81919ab4b3331126a4f451a112ff Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sun, 5 Jan 2020 14:25:50 +0100 Subject: [PATCH 132/195] convert input values to integer and set to 0 if not successful --- pype/ftrack/actions/action_seed.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/pype/ftrack/actions/action_seed.py b/pype/ftrack/actions/action_seed.py index 5cbc5d1cec..260e854d14 100644 --- a/pype/ftrack/actions/action_seed.py +++ b/pype/ftrack/actions/action_seed.py @@ -265,6 +265,11 @@ class SeedDebugProject(BaseAction): def create_assets(self, project, asset_count): self.log.debug("*** Creating assets:") + try: + asset_count = int(asset_count) + except ValueError: + asset_count = 0 + main_entity = self.session.create("Folder", { "name": "Assets", "parent": project @@ -305,6 +310,19 @@ class SeedDebugProject(BaseAction): def create_shots(self, project, seq_count, shots_count): self.log.debug("*** Creating shots:") + + # Convert counts to integers + try: + seq_count = int(seq_count) + except ValueError: + seq_count = 0 + + try: + shots_count = int(shots_count) + except ValueError: + shots_count = 0 + + # Create Folder "Shots" main_entity = self.session.create("Folder", { "name": "Shots", "parent": project From 080f1f6819d09b5c7d9ca8c3f3bc061998e9933b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Sun, 5 Jan 2020 14:26:13 +0100 Subject: [PATCH 133/195] check if input values of seeder are greater than 0 --- pype/ftrack/actions/action_seed.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/pype/ftrack/actions/action_seed.py b/pype/ftrack/actions/action_seed.py index 260e854d14..1238e73e72 100644 --- a/pype/ftrack/actions/action_seed.py +++ b/pype/ftrack/actions/action_seed.py @@ -270,6 +270,10 @@ class SeedDebugProject(BaseAction): except ValueError: asset_count = 0 + if asset_count <= 0: + self.log.debug("No assets to create") + return + main_entity = self.session.create("Folder", { "name": "Assets", "parent": project @@ -322,6 +326,18 @@ class SeedDebugProject(BaseAction): except ValueError: shots_count = 0 + # Check if both are higher than 0 + missing = [] + if seq_count <= 0: + missing.append("sequences") + + if shots_count <= 0: + missing.append("shots") + + if missing: + self.log.debug("No {} to create".format(" and ".join(missing))) + return + # Create Folder "Shots" main_entity = self.session.create("Folder", { "name": "Shots", From 730fbdd5090d06c55a9890d73e62c91e30ab1453 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 6 Jan 2020 00:45:22 +0100 Subject: [PATCH 134/195] fix(global): reformat didn't return correct data --- pype/plugins/global/publish/extract_review.py | 46 ++++++++++++------- 1 file changed, 30 insertions(+), 16 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index f621df0c66..0c39af64ed 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -1,5 +1,4 @@ import os -import math import pyblish.api import clique import pype.api @@ -25,14 +24,16 @@ class ExtractReview(pyblish.api.InstancePlugin): ext_filter = [] def process(self, instance): + to_width = 1920 + to_height = 1080 output_profiles = self.outputs or {} inst_data = instance.data fps = inst_data.get("fps") start_frame = inst_data.get("frameStart") - resolution_height = instance.data.get("resolutionHeight", 1080) - resolution_width = instance.data.get("resolutionWidth", 1920) + resolution_width = instance.data.get("resolutionWidth", to_width) + resolution_height = instance.data.get("resolutionHeight", to_height) pixel_aspect = instance.data.get("pixelAspect", 1) self.log.debug("Families In: `{}`".format(instance.data["families"])) @@ -172,22 +173,35 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("__ pixel_aspect: `{}`".format(pixel_aspect)) self.log.debug("__ resolution_width: `{}`".format(resolution_width)) self.log.debug("__ resolution_height: `{}`".format(resolution_height)) + # scaling none square pixels and 1920 width if "reformat" in p_tags: - width_scale = 1920 - width_half_pad = 0 - res_w = int(float(resolution_width) * pixel_aspect) - height_half_pad = int(( - (res_w - 1920) / ( - res_w * .01) * ( - 1080 * .01)) / 2 - ) - height_scale = 1080 - (height_half_pad * 2) - if height_scale > 1080: + resolution_ratio = float(resolution_width / ( + resolution_height * pixel_aspect)) + delivery_ratio = float(to_width) / float(to_height) + self.log.debug(resolution_ratio) + self.log.debug(delivery_ratio) + + if resolution_ratio < delivery_ratio: + self.log.debug("lower then delivery") + scale_factor = to_height / ( + resolution_height * pixel_aspect) + self.log.debug(scale_factor) + width_scale = int(to_width * scale_factor) + width_half_pad = int(( + to_width - width_scale)/2) + height_scale = to_height height_half_pad = 0 - height_scale = 1080 - width_half_pad = (1920 - (float(resolution_width) * (1080 / float(resolution_height))) ) / 2 - width_scale = int(1920 - (width_half_pad * 2)) + else: + self.log.debug("heigher then delivery") + width_scale = to_width + width_half_pad = 0 + scale_factor = to_width / resolution_width + self.log.debug(scale_factor) + height_scale = int( + resolution_height * scale_factor) + height_half_pad = int( + (to_height - height_scale)/2) self.log.debug("__ width_scale: `{}`".format(width_scale)) self.log.debug("__ width_half_pad: `{}`".format(width_half_pad)) From f20c4025c5b1f0df30f659b13d2734c9e3ec3ae6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 10:09:52 +0100 Subject: [PATCH 135/195] replace bpy.context.blend_data with bpy.data --- pype/plugins/blender/load/submarine_model.py | 26 +++++++++---------- .../blender/publish/collect_current_file.py | 2 +- pype/plugins/blender/publish/collect_model.py | 2 +- .../blender/publish/validate_mesh_has_uv.py | 2 +- .../validate_mesh_no_negative_scale.py | 2 +- 5 files changed, 17 insertions(+), 17 deletions(-) diff --git a/pype/plugins/blender/load/submarine_model.py b/pype/plugins/blender/load/submarine_model.py index 99095d74cd..bd6db17650 100644 --- a/pype/plugins/blender/load/submarine_model.py +++ b/pype/plugins/blender/load/submarine_model.py @@ -38,7 +38,7 @@ class BlendModelLoader(pype.blender.AssetLoader): Note: It is assumed that only 1 matching collection is found. """ - for collection in bpy.context.blend_data.collections: + for collection in bpy.data.collections: if collection.name != name: continue if collection.library is None: @@ -82,13 +82,13 @@ class BlendModelLoader(pype.blender.AssetLoader): ) relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.context.blend_data.libraries.load( + with bpy.data.libraries.load( libpath, link=True, relative=relative ) as (_, data_to): data_to.collections = [lib_container] scene = bpy.context.scene - instance_empty = bpy.context.blend_data.objects.new( + instance_empty = bpy.data.objects.new( container_name, None ) if not instance_empty.get("avalon"): @@ -97,7 +97,7 @@ class BlendModelLoader(pype.blender.AssetLoader): avalon_info.update({"container_name": container_name}) scene.collection.objects.link(instance_empty) instance_empty.instance_type = 'COLLECTION' - container = bpy.context.blend_data.collections[lib_container] + container = bpy.data.collections[lib_container] container.name = container_name instance_empty.instance_collection = container container.make_local() @@ -127,7 +127,7 @@ class BlendModelLoader(pype.blender.AssetLoader): Warning: No nested collections are supported at the moment! """ - collection = bpy.context.blend_data.collections.get( + collection = bpy.data.collections.get( container["objectName"] ) libpath = Path(api.get_representation_path(representation)) @@ -181,7 +181,7 @@ class BlendModelLoader(pype.blender.AssetLoader): collection.objects.unlink(obj) remove_obj = True for coll in [ - coll for coll in bpy.context.blend_data.collections + coll for coll in bpy.data.collections if coll != collection ]: if ( @@ -194,7 +194,7 @@ class BlendModelLoader(pype.blender.AssetLoader): for obj in objects_to_remove: # Only delete objects that are not used elsewhere - bpy.context.blend_data.objects.remove(obj) + bpy.data.objects.remove(obj) instance_empties = [ obj for obj in collection.users_dupli_group @@ -205,7 +205,7 @@ class BlendModelLoader(pype.blender.AssetLoader): container_name = instance_empty["avalon"]["container_name"] relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.context.blend_data.libraries.load( + with bpy.data.libraries.load( str(libpath), link=True, relative=relative ) as (_, data_to): data_to.collections = [container_name] @@ -219,7 +219,7 @@ class BlendModelLoader(pype.blender.AssetLoader): for obj in new_collection.objects: collection.objects.link(obj) - bpy.context.blend_data.collections.remove(new_collection) + bpy.data.collections.remove(new_collection) # Update the representation on the collection avalon_prop = collection[avalon.blender.pipeline.AVALON_PROPERTY] avalon_prop["representation"] = str(representation["_id"]) @@ -237,7 +237,7 @@ class BlendModelLoader(pype.blender.AssetLoader): Warning: No nested collections are supported at the moment! """ - collection = bpy.context.blend_data.collections.get( + collection = bpy.data.collections.get( container["objectName"] ) if not collection: @@ -293,18 +293,18 @@ class CacheModelLoader(pype.blender.AssetLoader): ) relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.context.blend_data.libraries.load( + with bpy.data.libraries.load( libpath, link=True, relative=relative ) as (data_from, data_to): data_to.collections = [lib_container] scene = bpy.context.scene - instance_empty = bpy.context.blend_data.objects.new( + instance_empty = bpy.data.objects.new( container_name, None ) scene.collection.objects.link(instance_empty) instance_empty.instance_type = 'COLLECTION' - collection = bpy.context.blend_data.collections[lib_container] + collection = bpy.data.collections[lib_container] collection.name = container_name instance_empty.instance_collection = collection diff --git a/pype/plugins/blender/publish/collect_current_file.py b/pype/plugins/blender/publish/collect_current_file.py index 5756431314..a097c72047 100644 --- a/pype/plugins/blender/publish/collect_current_file.py +++ b/pype/plugins/blender/publish/collect_current_file.py @@ -12,5 +12,5 @@ class CollectBlenderCurrentFile(pyblish.api.ContextPlugin): def process(self, context): """Inject the current working file""" - current_file = bpy.context.blend_data.filepath + current_file = bpy.data.filepath context.data['currentFile'] = current_file diff --git a/pype/plugins/blender/publish/collect_model.py b/pype/plugins/blender/publish/collect_model.py index 4c7e840c17..ee10eaf7f2 100644 --- a/pype/plugins/blender/publish/collect_model.py +++ b/pype/plugins/blender/publish/collect_model.py @@ -23,7 +23,7 @@ class CollectModel(pyblish.api.ContextPlugin): representation set. If the representation is set, it is a loaded model and we don't want to publish it. """ - for collection in bpy.context.blend_data.collections: + for collection in bpy.data.collections: avalon_prop = collection.get(AVALON_PROPERTY) or dict() if (avalon_prop.get('family') == 'model' and not avalon_prop.get('representation')): diff --git a/pype/plugins/blender/publish/validate_mesh_has_uv.py b/pype/plugins/blender/publish/validate_mesh_has_uv.py index f8c5092ab7..b71a40ad8f 100644 --- a/pype/plugins/blender/publish/validate_mesh_has_uv.py +++ b/pype/plugins/blender/publish/validate_mesh_has_uv.py @@ -35,7 +35,7 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): invalid = [] # TODO (jasper): only check objects in the collection that will be published? for obj in [ - obj for obj in bpy.context.blend_data.objects if obj.type == 'MESH' + obj for obj in bpy.data.objects if obj.type == 'MESH' ]: # Make sure we are in object mode. bpy.ops.object.mode_set(mode='OBJECT') diff --git a/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py index 1f050f6844..7e3b38dd19 100644 --- a/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py +++ b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py @@ -20,7 +20,7 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator): invalid = [] # TODO (jasper): only check objects in the collection that will be published? for obj in [ - obj for obj in bpy.context.blend_data.objects if obj.type == 'MESH' + obj for obj in bpy.data.objects if obj.type == 'MESH' ]: if any(v < 0 for v in obj.scale): invalid.append(obj) From 6d1b064d0b2593bcdba2914e40b75a2cf820f3fb Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 6 Jan 2020 12:19:56 +0100 Subject: [PATCH 136/195] add previous behaviour as default --- .../ftrack/events/event_version_to_task_statuses.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/pype/ftrack/events/event_version_to_task_statuses.py b/pype/ftrack/events/event_version_to_task_statuses.py index 1f5f1514d7..0d2a3130c0 100644 --- a/pype/ftrack/events/event_version_to_task_statuses.py +++ b/pype/ftrack/events/event_version_to_task_statuses.py @@ -62,9 +62,12 @@ class VersionToTaskStatus(BaseEvent): # Lower version status name and check if has mapping version_status = version_status_orig.lower() - new_status_names = status_mapping.get(version_status) - if not new_status_names: - continue + new_status_names = [] + mapped = status_mapping.get(version_status) + if mapped: + new_status_names.extend(list(mapped)) + + new_status_names.append(version_status) self.log.debug( "Processing AssetVersion status change: [ {} ]".format( @@ -72,10 +75,6 @@ class VersionToTaskStatus(BaseEvent): ) ) - # Backwards compatibility (convert string to list) - if isinstance(new_status_names, str): - new_status_names = [new_status_names] - # Lower all names from presets new_status_names = [name.lower() for name in new_status_names] From 68c8a253bfd3f82c3d535b4c5810324b9c88fa16 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 6 Jan 2020 12:43:43 +0100 Subject: [PATCH 137/195] feat(nuke): lock range on setting frame ranges --- pype/nuke/lib.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index f213b596ad..12a083eca1 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -707,9 +707,11 @@ class WorkfileSettings(object): frame_start = int(data["frameStart"]) - handle_start frame_end = int(data["frameEnd"]) + handle_end + self._root_node["lock_range"].setValue(False) self._root_node["fps"].setValue(fps) self._root_node["first_frame"].setValue(frame_start) self._root_node["last_frame"].setValue(frame_end) + self._root_node["lock_range"].setValue(True) # setting active viewers try: From 9009e99712e339fb03476780517ff2a0b2e5d0ae Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 6 Jan 2020 14:07:11 +0100 Subject: [PATCH 138/195] fix(global): passing resolution to context --- pype/plugins/global/publish/collect_filesequences.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index d0ff5722a3..e658cd434c 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -150,6 +150,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): if instance: instance_family = instance.get("family") pixel_aspect = instance.get("pixelAspect", 1) + resolution_width = instance.get("resolutionWidth", 1920) + resolution_height = instance.get("resolutionHeight", 1080) lut_path = instance.get("lutPath", None) @@ -229,6 +231,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "fps": fps, "source": data.get('source', ''), "pixelAspect": pixel_aspect, + "resolutionWidth": resolution_width, + "resolutionHeight": resolution_height }) if lut_path: instance.data.update({"lutPath": lut_path}) From 25d2e135d9e78a8c2680b421cc08312f328f1ae6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 15:48:19 +0100 Subject: [PATCH 139/195] add custom attributes key to assetversion data in integrate frant instances --- pype/plugins/ftrack/publish/integrate_ftrack_instances.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py index 5e680a172a..5b8c195730 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py @@ -125,6 +125,12 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): "thumbnail": comp['thumbnail'] } + # Add custom attributes for AssetVersion + assetversion_cust_attrs = {} + component_item["assetversion_data"]["custom_attributes"] = ( + assetversion_cust_attrs + ) + componentList.append(component_item) # Create copy with ftrack.unmanaged location if thumb or prev if comp.get('thumbnail') or comp.get('preview') \ From abe9334d5d4adf6962983bff4a8fc2939b0f4d9b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 15:49:18 +0100 Subject: [PATCH 140/195] add intent value from context to custom attributes if is set --- pype/plugins/ftrack/publish/integrate_ftrack_instances.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py index 5b8c195730..78583b0a2f 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py @@ -127,6 +127,10 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): # Add custom attributes for AssetVersion assetversion_cust_attrs = {} + intent_val = instance.context.data.get("intent") + if intent_val: + assetversion_cust_attrs["intent"] = intent_val + component_item["assetversion_data"]["custom_attributes"] = ( assetversion_cust_attrs ) From c71fc909cef6bd6536656a9abfcbd9ce36bc2fad Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 15:49:34 +0100 Subject: [PATCH 141/195] set asset version custom attributes if there are any --- .../ftrack/publish/integrate_ftrack_api.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_api.py b/pype/plugins/ftrack/publish/integrate_ftrack_api.py index 9fe4fddebf..337562c1f5 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_api.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_api.py @@ -144,8 +144,11 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): "version": 0, "asset": asset_entity, } - - assetversion_data.update(data.get("assetversion_data", {})) + _assetversion_data = data.get("assetversion_data", {}) + assetversion_cust_attrs = _assetversion_data.pop( + "custom_attributes", {} + ) + assetversion_data.update(_assetversion_data) assetversion_entity = session.query( self.query("AssetVersion", assetversion_data) @@ -182,6 +185,18 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): existing_assetversion_metadata.update(assetversion_metadata) assetversion_entity["metadata"] = existing_assetversion_metadata + # Adding Custom Attributes + for attr, val in assetversion_cust_attrs.items(): + if attr in assetversion_entity["custom_attributes"]: + assetversion_entity["custom_attributes"][attr] = val + continue + + self.log.warning(( + "Custom Attrubute \"{0}\"" + " is not available for AssetVersion." + " Can't set it's value to: \"{1}\"" + ).format(attr, str(val))) + # Have to commit the version and asset, because location can't # determine the final location without. try: From 3d33f8fd4ab22eadb27b46ecea8d063f5b856549 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 18:09:46 +0100 Subject: [PATCH 142/195] added get_fps method to burnins class which calculate fps from r_frame_rate --- pype/scripts/otio_burnin.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 3e8cb3b0c4..a8c4017c52 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -98,6 +98,24 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if options_init: self.options_init.update(options_init) + def get_fps(str_value): + if str_value == "0/0": + print("Source has \"r_frame_rate\" value set to \"0/0\".") + return "Unknown" + + items = str_value.split("/") + if len(items) == 1: + fps = float(items[0]) + + elif len(items) == 2: + fps = float(items[0]) / float(items[1]) + + # Check if fps is integer or float number + if int(fps) == fps: + fps = int(fps) + + return str(fps) + def add_text(self, text, align, options=None): """ Adding static text to a filter. From bb86c94c184645631906688ba184e29f50363be8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 18:10:19 +0100 Subject: [PATCH 143/195] width, height and fps values from ffprobe are added to options data --- pype/scripts/otio_burnin.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index a8c4017c52..ea1554876f 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -95,9 +95,24 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): streams = _streams(source) super().__init__(source, streams) + if options_init: self.options_init.update(options_init) + if "resolution_width" not in self.options_init: + self.options_init["resolution_width"] = ( + streams[0].get("width", "Unknown") + ) + + if "resolution_height" not in self.options_init: + self.options_init["resolution_height"] = ( + streams[0].get("height", "Unknown") + ) + + if "fps" not in self.options_init: + fps = self.get_fps(streams[0]["r_frame_rate"]) + self.options_init["fps"] = fps + def get_fps(str_value): if str_value == "0/0": print("Source has \"r_frame_rate\" value set to \"0/0\".") From 6f4d50d41d8b62f57d13e1c3fdc6fd121c5cd8ac Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 18:25:07 +0100 Subject: [PATCH 144/195] get_fps moved from Burnin class --- pype/scripts/otio_burnin.py | 37 +++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index ea1554876f..f6b5c34bff 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -39,6 +39,25 @@ def _streams(source): return json.loads(out)['streams'] +def get_fps(str_value): + if str_value == "0/0": + print("Source has \"r_frame_rate\" value set to \"0/0\".") + return "Unknown" + + items = str_value.split("/") + if len(items) == 1: + fps = float(items[0]) + + elif len(items) == 2: + fps = float(items[0]) / float(items[1]) + + # Check if fps is integer or float number + if int(fps) == fps: + fps = int(fps) + + return str(fps) + + class ModifiedBurnins(ffmpeg_burnins.Burnins): ''' This is modification of OTIO FFmpeg Burnin adapter. @@ -113,24 +132,6 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): fps = self.get_fps(streams[0]["r_frame_rate"]) self.options_init["fps"] = fps - def get_fps(str_value): - if str_value == "0/0": - print("Source has \"r_frame_rate\" value set to \"0/0\".") - return "Unknown" - - items = str_value.split("/") - if len(items) == 1: - fps = float(items[0]) - - elif len(items) == 2: - fps = float(items[0]) / float(items[1]) - - # Check if fps is integer or float number - if int(fps) == fps: - fps = int(fps) - - return str(fps) - def add_text(self, text, align, options=None): """ Adding static text to a filter. From 3dac4c1b69da68a850e1be4730f37b45b46fabd4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 18:30:17 +0100 Subject: [PATCH 145/195] data from frobe are stored to data not to options --- pype/scripts/otio_burnin.py | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index f6b5c34bff..0c985a0faf 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -118,20 +118,6 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if options_init: self.options_init.update(options_init) - if "resolution_width" not in self.options_init: - self.options_init["resolution_width"] = ( - streams[0].get("width", "Unknown") - ) - - if "resolution_height" not in self.options_init: - self.options_init["resolution_height"] = ( - streams[0].get("height", "Unknown") - ) - - if "fps" not in self.options_init: - fps = self.get_fps(streams[0]["r_frame_rate"]) - self.options_init["fps"] = fps - def add_text(self, text, align, options=None): """ Adding static text to a filter. @@ -362,6 +348,17 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) frame_start = data.get("frame_start") frame_start_tc = data.get('frame_start_tc', frame_start) + + stream = burnin._streams[0] + if "resolution_width" not in data: + data["resolution_width"] = stream.get("width", "Unknown") + + if "resolution_height" not in data: + data["resolution_height"] = stream.get("height", "Unknown") + + if "fps" not in data: + data["fps"] = get_fps(stream.get("r_frame_rate", "0/0")) + for align_text, preset in presets.get('burnins', {}).items(): align = None if align_text == 'TOP_LEFT': From f84f1537def6d65e0e9c399083e84111e940c83a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 18:30:24 +0100 Subject: [PATCH 146/195] formatting changes --- pype/scripts/otio_burnin.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 0c985a0faf..b3d0e544db 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -413,12 +413,14 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) elif bi_func == 'timecode': burnin.add_timecode(align, start_frame=frame_start_tc) + elif bi_func == 'text': if not preset.get('text'): log.error('Text is not set for text function burnin!') return text = preset['text'].format(**data) burnin.add_text(text, align) + elif bi_func == "datetime": date_format = preset["format"] burnin.add_datetime(date_format, align) @@ -445,4 +447,4 @@ if __name__ == '__main__': data['codec'], data['output'], data['burnin_data'] - ) + ) From a6af3ca90bb72c4bf430fa2d41f71590ab77ef04 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 7 Jan 2020 11:12:42 +0100 Subject: [PATCH 147/195] fix(global): reformat didnt compare properly resolution float and int --- pype/plugins/global/publish/extract_review.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 0c39af64ed..deceaa93a5 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -196,7 +196,7 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("heigher then delivery") width_scale = to_width width_half_pad = 0 - scale_factor = to_width / resolution_width + scale_factor = float(to_width) / float(resolution_width) self.log.debug(scale_factor) height_scale = int( resolution_height * scale_factor) From 26f2f882e2997f8e10f8098216edbe241b0cc144 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 7 Jan 2020 13:12:29 +0100 Subject: [PATCH 148/195] fix(otio): burnin right side didnt format properly --- pype/scripts/otio_burnin.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 3e8cb3b0c4..89b74e258e 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -139,12 +139,13 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): options['frame_offset'] = start_frame expr = r'%%{eif\:n+%d\:d}' % options['frame_offset'] + _text = str(int(self.end_frame + options['frame_offset'])) if text and isinstance(text, str): text = r"{}".format(text) expr = text.replace("{current_frame}", expr) + text = text.replace("{current_frame}", _text) options['expression'] = expr - text = str(int(self.end_frame + options['frame_offset'])) self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT) def add_timecode(self, align, options=None, start_frame=None): From ade2a26e84b80c01fd3ea4b39bc216b483f786ab Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 8 Jan 2020 00:02:14 +0100 Subject: [PATCH 149/195] feat(nuke): adding back baking mov from nuke --- pype/nuke/lib.py | 275 ++++++++++++++---- .../global/publish/collect_filesequences.py | 2 + .../nuke/publish/extract_review_data_lut.py | 3 +- .../nuke/publish/extract_review_data_mov.py | 57 ++++ .../nuke/publish/extract_review_mov.py | 181 ------------ 5 files changed, 273 insertions(+), 245 deletions(-) create mode 100644 pype/plugins/nuke/publish/extract_review_data_mov.py delete mode 100644 pype/plugins/nuke/publish/extract_review_mov.py diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 12a083eca1..9201e9c63e 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1199,13 +1199,13 @@ class BuildWorkfile(WorkfileSettings): self.ypos -= (self.ypos_size * multiply) + self.ypos_gap -class Exporter_review_lut: +class Exporter_review: """ - Generator object for review lut from Nuke + Base class object for generating review data from Nuke Args: klass (pyblish.plugin): pyblish plugin parent - + instance (pyblish.context.instance): """ _temp_nodes = [] @@ -1213,6 +1213,101 @@ class Exporter_review_lut: "representations": list() }) + def __init__(self, + klass, + instance + ): + + self.log = klass.log + self.instance = instance + self.path_in = self.instance.data.get("path", None) + self.staging_dir = self.instance.data["stagingDir"] + self.collection = self.instance.data.get("collection", None) + + def get_file_info(self): + if self.collection: + self.log.debug("Collection: `{}`".format(self.collection)) + # get path + self.fname = os.path.basename(self.collection.format( + "{head}{padding}{tail}")) + self.fhead = self.collection.format("{head}") + + # get first and last frame + self.first_frame = min(self.collection.indexes) + self.last_frame = max(self.collection.indexes) + else: + self.fname = os.path.basename(self.path_in) + self.fhead = os.path.splitext(self.fname)[0] + "." + self.first_frame = self.instance.data.get("frameStart", None) + self.last_frame = self.instance.data.get("frameEnd", None) + + if "#" in self.fhead: + self.fhead = self.fhead.replace("#", "")[:-1] + + def get_representation_data(self, tags=None, range=False): + add_tags = [] + if tags: + add_tags = tags + + repre = { + 'name': self.name, + 'ext': self.ext, + 'files': self.file, + "stagingDir": self.staging_dir, + "anatomy_template": "publish", + "tags": [self.name.replace("_", "-")] + add_tags + } + + if range: + repre.update({ + "frameStart": self.first_frame, + "frameEnd": self.last_frame, + }) + + self.data["representations"].append(repre) + + def get_view_process_node(self): + """ + Will get any active view process. + + Arguments: + self (class): in object definition + + Returns: + nuke.Node: copy node of Input Process node + """ + anlib.reset_selection() + ipn_orig = None + for v in [n for n in nuke.allNodes() + if "Viewer" in n.Class()]: + ip = v['input_process'].getValue() + ipn = v['input_process_node'].getValue() + if "VIEWER_INPUT" not in ipn and ip: + ipn_orig = nuke.toNode(ipn) + ipn_orig.setSelected(True) + + if ipn_orig: + # copy selected to clipboard + nuke.nodeCopy('%clipboard%') + # reset selection + anlib.reset_selection() + # paste node and selection is on it only + nuke.nodePaste('%clipboard%') + # assign to variable + ipn = nuke.selectedNode() + + return ipn + + +class Exporter_review_lut(Exporter_review): + """ + Generator object for review lut from Nuke + + Args: + klass (pyblish.plugin): pyblish plugin parent + + + """ def __init__(self, klass, instance, @@ -1221,9 +1316,8 @@ class Exporter_review_lut: cube_size=None, lut_size=None, lut_style=None): - - self.log = klass.log - self.instance = instance + # initialize parent class + Exporter_review.__init__(self, klass, instance) self.name = name or "baked_lut" self.ext = ext or "cube" @@ -1231,16 +1325,13 @@ class Exporter_review_lut: self.lut_size = lut_size or 1024 self.lut_style = lut_style or "linear" - self.stagingDir = self.instance.data["stagingDir"] - self.collection = self.instance.data.get("collection", None) - # set frame start / end and file name to self self.get_file_info() self.log.info("File info was set...") self.file = self.fhead + self.name + ".{}".format(self.ext) - self.path = os.path.join(self.stagingDir, self.file).replace("\\", "/") + self.path = os.path.join(self.staging_dir, self.file).replace("\\", "/") def generate_lut(self): # ---------- start nodes creation @@ -1303,70 +1394,128 @@ class Exporter_review_lut: return self.data - def get_file_info(self): - if self.collection: - self.log.debug("Collection: `{}`".format(self.collection)) - # get path - self.fname = os.path.basename(self.collection.format( - "{head}{padding}{tail}")) - self.fhead = self.collection.format("{head}") - # get first and last frame - self.first_frame = min(self.collection.indexes) - self.last_frame = max(self.collection.indexes) +class Exporter_review_mov(Exporter_review): + """ + Metaclass for generating review mov files + + Args: + klass (pyblish.plugin): pyblish plugin parent + + + """ + def __init__(self, + klass, + instance, + name=None, + ext=None, + ): + # initialize parent class + Exporter_review.__init__(self, klass, instance) + + # passing presets for nodes to self + if hasattr(klass, "nodes"): + self.nodes = klass.nodes else: - self.fname = os.path.basename(self.instance.data.get("path", None)) - self.fhead = os.path.splitext(self.fname)[0] + "." - self.first_frame = self.instance.data.get("frameStart", None) - self.last_frame = self.instance.data.get("frameEnd", None) + self.nodes = {} - if "#" in self.fhead: - self.fhead = self.fhead.replace("#", "")[:-1] + self.name = name or "baked" + self.ext = ext or "mov" - def get_representation_data(self): + # set frame start / end and file name to self + self.get_file_info() - repre = { - 'name': self.name, - 'ext': self.ext, - 'files': self.file, - "stagingDir": self.stagingDir, - "anatomy_template": "publish", - "tags": [self.name.replace("_", "-")] - } + self.log.info("File info was set...") - self.data["representations"].append(repre) + self.file = self.fhead + self.name + ".{}".format(self.ext) + self.path = os.path.join(self.staging_dir, self.file).replace("\\", "/") - def get_view_process_node(self): - """ - Will get any active view process. + def generate_mov(self, farm=False): + # ---------- start nodes creation - Arguments: - self (class): in object definition + # Read node + r_node = nuke.createNode("Read") + r_node["file"].setValue(self.path_in) + r_node["first"].setValue(self.first_frame) + r_node["origfirst"].setValue(self.first_frame) + r_node["last"].setValue(self.last_frame) + r_node["origlast"].setValue(self.last_frame) + # connect + self._temp_nodes.append(r_node) + self.previous_node = r_node + self.log.debug("Read... `{}`".format(self._temp_nodes)) - Returns: - nuke.Node: copy node of Input Process node - """ - anlib.reset_selection() - ipn_orig = None - for v in [n for n in nuke.allNodes() - if "Viewer" in n.Class()]: - ip = v['input_process'].getValue() - ipn = v['input_process_node'].getValue() - if "VIEWER_INPUT" not in ipn and ip: - ipn_orig = nuke.toNode(ipn) - ipn_orig.setSelected(True) + # View Process node + ipn = self.get_view_process_node() + if ipn is not None: + # connect + ipn.setInput(0, self.previous_node) + self._temp_nodes.append(ipn) + self.previous_node = ipn + self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) - if ipn_orig: - # copy selected to clipboard - nuke.nodeCopy('%clipboard%') - # reset selection - anlib.reset_selection() - # paste node and selection is on it only - nuke.nodePaste('%clipboard%') - # assign to variable - ipn = nuke.selectedNode() + # reformat_node = nuke.createNode("Reformat") + # rn_preset = self.nodes.get("Reformat", None) + # if rn_preset: + # self.log.debug("Reformat preset") + # for k, v in rn_preset: + # self.log.debug("k, v: {0}:{1}".format(k, v)) + # if isinstance(v, unicode): + # v = str(v) + # reformat_node[k].setValue(v) + # # connect + # reformat_node.setInput(0, self.previous_node) + # self._temp_nodes.append(reformat_node) + # self.previous_node = reformat_node + # self.log.debug("Reformat... `{}`".format(self._temp_nodes)) + + # OCIODisplay node + dag_node = nuke.createNode("OCIODisplay") + # connect + dag_node.setInput(0, self.previous_node) + self._temp_nodes.append(dag_node) + self.previous_node = dag_node + self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) + + # Write node + write_node = nuke.createNode("Write") + self.log.debug("Path: {}".format(self.path)) + self.instance.data["baked_colorspace_movie"] = self.path + write_node["file"].setValue(self.path) + write_node["file_type"].setValue(self.ext) + write_node["raw"].setValue(1) + # connect + write_node.setInput(0, self.previous_node) + self._temp_nodes.append(write_node) + self.log.debug("Write... `{}`".format(self._temp_nodes)) + + # ---------- end nodes creation + + if not farm: + self.log.info("Rendering... ") + # Render Write node + nuke.execute( + write_node.name(), + int(self.first_frame), + int(self.last_frame)) + + self.log.info("Rendered...") + + # ---------- generate representation data + self.get_representation_data( + tags=["review", "delete"], + range=True + ) + + self.log.debug("Representation... `{}`".format(self.data)) + + # ---------- Clean up + # for node in self._temp_nodes: + # nuke.delete(node) + # self.log.info("Deleted nodes...") + + return self.data - return ipn def get_dependent_nodes(nodes): """Get all dependent nodes connected to the list of nodes. diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index e658cd434c..6a59f5dffc 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -148,6 +148,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): os.environ.update(session) instance = metadata.get("instance") if instance: + # here is the place to add ability for nuke noninteractive + # ______________________________________ instance_family = instance.get("family") pixel_aspect = instance.get("pixelAspect", 1) resolution_width = instance.get("resolutionWidth", 1920) diff --git a/pype/plugins/nuke/publish/extract_review_data_lut.py b/pype/plugins/nuke/publish/extract_review_data_lut.py index dfc10952cd..f5fc3e59db 100644 --- a/pype/plugins/nuke/publish/extract_review_data_lut.py +++ b/pype/plugins/nuke/publish/extract_review_data_lut.py @@ -6,7 +6,7 @@ import pype reload(pnlib) -class ExtractReviewLutData(pype.api.Extractor): +class ExtractReviewDataLut(pype.api.Extractor): """Extracts movie and thumbnail with baked in luts must be run after extract_render_local.py @@ -37,6 +37,7 @@ class ExtractReviewLutData(pype.api.Extractor): self.log.info( "StagingDir `{0}`...".format(instance.data["stagingDir"])) + # generate data with anlib.maintained_selection(): exporter = pnlib.Exporter_review_lut( self, instance diff --git a/pype/plugins/nuke/publish/extract_review_data_mov.py b/pype/plugins/nuke/publish/extract_review_data_mov.py new file mode 100644 index 0000000000..585bd3f108 --- /dev/null +++ b/pype/plugins/nuke/publish/extract_review_data_mov.py @@ -0,0 +1,57 @@ +import os +import nuke +import pyblish.api +from avalon.nuke import lib as anlib +from pype.nuke import lib as pnlib +import pype +reload(pnlib) + + +class ExtractReviewDataMov(pype.api.Extractor): + """Extracts movie and thumbnail with baked in luts + + must be run after extract_render_local.py + + """ + + order = pyblish.api.ExtractorOrder + 0.01 + label = "Extract Review Data Mov" + + families = ["review"] + hosts = ["nuke"] + + def process(self, instance): + families = instance.data["families"] + self.log.info("Creating staging dir...") + if "representations" in instance.data: + staging_dir = instance.data[ + "representations"][0]["stagingDir"].replace("\\", "/") + instance.data["stagingDir"] = staging_dir + instance.data["representations"][0]["tags"] = [] + else: + instance.data["representations"] = [] + # get output path + render_path = instance.data['path'] + staging_dir = os.path.normpath(os.path.dirname(render_path)) + instance.data["stagingDir"] = staging_dir + + self.log.info( + "StagingDir `{0}`...".format(instance.data["stagingDir"])) + + # generate data + with anlib.maintained_selection(): + exporter = pnlib.Exporter_review_mov( + self, instance) + + if "render.farm" in families: + instance.data["families"].remove("review") + instance.data["families"].remove("ftrack") + data = exporter.generate_mov(farm=True) + else: + data = exporter.generate_mov() + + # assign to representations + instance.data["representations"] += data["representations"] + + self.log.debug( + "_ representations: {}".format(instance.data["representations"])) diff --git a/pype/plugins/nuke/publish/extract_review_mov.py b/pype/plugins/nuke/publish/extract_review_mov.py deleted file mode 100644 index ed3101951c..0000000000 --- a/pype/plugins/nuke/publish/extract_review_mov.py +++ /dev/null @@ -1,181 +0,0 @@ -import os -import nuke -import pyblish.api -import pype\ - -class ExtractReviewData(pype.api.Extractor): - """Extracts movie and thumbnail with baked in luts - - must be run after extract_render_local.py - - """ - - order = pyblish.api.ExtractorOrder + 0.01 - label = "Extract Review Data" - - families = ["review"] - hosts = ["nuke"] - - def process(self, instance): - - # Store selection - selection = [i for i in nuke.allNodes() if i["selected"].getValue()] - # Deselect all nodes to prevent external connections - [i["selected"].setValue(False) for i in nuke.allNodes()] - self.log.debug("creating staging dir:") - self.staging_dir(instance) - - self.log.debug("instance: {}".format(instance)) - self.log.debug("instance.data[families]: {}".format( - instance.data["families"])) - - self.render_review_representation(instance, representation="mov") - - # Restore selection - [i["selected"].setValue(False) for i in nuke.allNodes()] - [i["selected"].setValue(True) for i in selection] - - def render_review_representation(self, - instance, - representation="mov"): - - assert instance.data['representations'][0]['files'], "Instance data files should't be empty!" - - temporary_nodes = [] - stagingDir = instance.data[ - 'representations'][0]["stagingDir"].replace("\\", "/") - self.log.debug("StagingDir `{0}`...".format(stagingDir)) - - collection = instance.data.get("collection", None) - - if collection: - # get path - fname = os.path.basename(collection.format( - "{head}{padding}{tail}")) - fhead = collection.format("{head}") - - # get first and last frame - first_frame = min(collection.indexes) - last_frame = max(collection.indexes) - else: - fname = os.path.basename(instance.data.get("path", None)) - fhead = os.path.splitext(fname)[0] + "." - first_frame = instance.data.get("frameStart", None) - last_frame = instance.data.get("frameEnd", None) - - rnode = nuke.createNode("Read") - - rnode["file"].setValue( - os.path.join(stagingDir, fname).replace("\\", "/")) - - rnode["first"].setValue(first_frame) - rnode["origfirst"].setValue(first_frame) - rnode["last"].setValue(last_frame) - rnode["origlast"].setValue(last_frame) - temporary_nodes.append(rnode) - previous_node = rnode - - # get input process and connect it to baking - ipn = self.get_view_process_node() - if ipn is not None: - ipn.setInput(0, previous_node) - previous_node = ipn - temporary_nodes.append(ipn) - - reformat_node = nuke.createNode("Reformat") - - ref_node = self.nodes.get("Reformat", None) - if ref_node: - for k, v in ref_node: - self.log.debug("k,v: {0}:{1}".format(k,v)) - if isinstance(v, unicode): - v = str(v) - reformat_node[k].setValue(v) - - reformat_node.setInput(0, previous_node) - previous_node = reformat_node - temporary_nodes.append(reformat_node) - - dag_node = nuke.createNode("OCIODisplay") - dag_node.setInput(0, previous_node) - previous_node = dag_node - temporary_nodes.append(dag_node) - - # create write node - write_node = nuke.createNode("Write") - - if representation in "mov": - file = fhead + "baked.mov" - name = "baked" - path = os.path.join(stagingDir, file).replace("\\", "/") - self.log.debug("Path: {}".format(path)) - instance.data["baked_colorspace_movie"] = path - write_node["file"].setValue(path) - write_node["file_type"].setValue("mov") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - tags = ["review", "delete"] - - elif representation in "jpeg": - file = fhead + "jpeg" - name = "thumbnail" - path = os.path.join(stagingDir, file).replace("\\", "/") - instance.data["thumbnail"] = path - write_node["file"].setValue(path) - write_node["file_type"].setValue("jpeg") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - tags = ["thumbnail"] - - # retime for - first_frame = int(last_frame) / 2 - last_frame = int(last_frame) / 2 - - repre = { - 'name': name, - 'ext': representation, - 'files': file, - "stagingDir": stagingDir, - "frameStart": first_frame, - "frameEnd": last_frame, - "anatomy_template": "render", - "tags": tags - } - instance.data["representations"].append(repre) - - # Render frames - nuke.execute(write_node.name(), int(first_frame), int(last_frame)) - - self.log.debug("representations: {}".format(instance.data["representations"])) - - # Clean up - for node in temporary_nodes: - nuke.delete(node) - - def get_view_process_node(self): - - # Select only the target node - if nuke.selectedNodes(): - [n.setSelected(False) for n in nuke.selectedNodes()] - - ipn_orig = None - for v in [n for n in nuke.allNodes() - if "Viewer" in n.Class()]: - ip = v['input_process'].getValue() - ipn = v['input_process_node'].getValue() - if "VIEWER_INPUT" not in ipn and ip: - ipn_orig = nuke.toNode(ipn) - ipn_orig.setSelected(True) - - if ipn_orig: - nuke.nodeCopy('%clipboard%') - - [n.setSelected(False) for n in nuke.selectedNodes()] # Deselect all - - nuke.nodePaste('%clipboard%') - - ipn = nuke.selectedNode() - - return ipn From fbb4c247f60d2d6210e38287f8206c2729e72779 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 8 Jan 2020 00:38:08 +0100 Subject: [PATCH 150/195] fix(global): fixing reformat and letter box --- pype/nuke/lib.py | 23 ++--------- pype/plugins/global/publish/extract_review.py | 38 ++++++++++++------- 2 files changed, 29 insertions(+), 32 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 9201e9c63e..c468343545 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1454,21 +1454,6 @@ class Exporter_review_mov(Exporter_review): self.previous_node = ipn self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) - # reformat_node = nuke.createNode("Reformat") - # rn_preset = self.nodes.get("Reformat", None) - # if rn_preset: - # self.log.debug("Reformat preset") - # for k, v in rn_preset: - # self.log.debug("k, v: {0}:{1}".format(k, v)) - # if isinstance(v, unicode): - # v = str(v) - # reformat_node[k].setValue(v) - # # connect - # reformat_node.setInput(0, self.previous_node) - # self._temp_nodes.append(reformat_node) - # self.previous_node = reformat_node - # self.log.debug("Reformat... `{}`".format(self._temp_nodes)) - # OCIODisplay node dag_node = nuke.createNode("OCIODisplay") # connect @@ -1509,10 +1494,10 @@ class Exporter_review_mov(Exporter_review): self.log.debug("Representation... `{}`".format(self.data)) - # ---------- Clean up - # for node in self._temp_nodes: - # nuke.delete(node) - # self.log.info("Deleted nodes...") + ---------- Clean up + for node in self._temp_nodes: + nuke.delete(node) + self.log.info("Deleted nodes...") return self.data diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index deceaa93a5..28eb0289fa 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -156,13 +156,34 @@ class ExtractReview(pyblish.api.InstancePlugin): # preset's output data output_args.extend(profile.get('output', [])) + # defining image ratios + resolution_ratio = float(resolution_width / ( + resolution_height * pixel_aspect)) + delivery_ratio = float(to_width) / float(to_height) + self.log.debug(resolution_ratio) + self.log.debug(delivery_ratio) + + # get scale factor + scale_factor = to_height / ( + resolution_height * pixel_aspect) + self.log.debug(scale_factor) + # letter_box lb = profile.get('letter_box', 0) - if lb is not 0: + if lb != 0: + ffmpet_width = to_width + ffmpet_height = to_height if "reformat" not in p_tags: lb /= pixel_aspect + if resolution_ratio != delivery_ratio: + ffmpet_width = resolution_width + ffmpet_height = int( + resolution_height * pixel_aspect) + else: + lb /= scale_factor + output_args.append( - "-filter:v scale=1920x1080:flags=lanczos,setsar=1,drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb)) + "-filter:v scale={0}x{1}:flags=lanczos,setsar=1,drawbox=0:0:iw:round((ih-(iw*(1/{2})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{2})))/2):iw:round((ih-(iw*(1/{2})))/2):t=fill:c=black".format(ffmpet_width, ffmpet_height, lb)) # In case audio is longer than video. output_args.append("-shortest") @@ -176,17 +197,8 @@ class ExtractReview(pyblish.api.InstancePlugin): # scaling none square pixels and 1920 width if "reformat" in p_tags: - resolution_ratio = float(resolution_width / ( - resolution_height * pixel_aspect)) - delivery_ratio = float(to_width) / float(to_height) - self.log.debug(resolution_ratio) - self.log.debug(delivery_ratio) - if resolution_ratio < delivery_ratio: self.log.debug("lower then delivery") - scale_factor = to_height / ( - resolution_height * pixel_aspect) - self.log.debug(scale_factor) width_scale = int(to_width * scale_factor) width_half_pad = int(( to_width - width_scale)/2) @@ -209,8 +221,8 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("__ height_half_pad: `{}`".format(height_half_pad)) - scaling_arg = "scale={0}x{1}:flags=lanczos,pad=1920:1080:{2}:{3}:black,setsar=1".format( - width_scale, height_scale, width_half_pad, height_half_pad + scaling_arg = "scale={0}x{1}:flags=lanczos,pad={2}:{3}:{4}:{5}:black,setsar=1".format( + width_scale, height_scale, to_width, to_height, width_half_pad, height_half_pad ) vf_back = self.add_video_filter_args( From 5bf0f2973dad63d690d2201443159879b5326f22 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 15:48:19 +0100 Subject: [PATCH 151/195] add custom attributes key to assetversion data in integrate frant instances --- pype/plugins/ftrack/publish/integrate_ftrack_instances.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py index 5e680a172a..5b8c195730 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py @@ -125,6 +125,12 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): "thumbnail": comp['thumbnail'] } + # Add custom attributes for AssetVersion + assetversion_cust_attrs = {} + component_item["assetversion_data"]["custom_attributes"] = ( + assetversion_cust_attrs + ) + componentList.append(component_item) # Create copy with ftrack.unmanaged location if thumb or prev if comp.get('thumbnail') or comp.get('preview') \ From 19f2b8148cd4ab2ced775491318ff1a2190bfd3f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 15:49:18 +0100 Subject: [PATCH 152/195] add intent value from context to custom attributes if is set --- pype/plugins/ftrack/publish/integrate_ftrack_instances.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py index 5b8c195730..78583b0a2f 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py @@ -127,6 +127,10 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): # Add custom attributes for AssetVersion assetversion_cust_attrs = {} + intent_val = instance.context.data.get("intent") + if intent_val: + assetversion_cust_attrs["intent"] = intent_val + component_item["assetversion_data"]["custom_attributes"] = ( assetversion_cust_attrs ) From 264a7c177ba985d3d5b72a0c5cdd4628754426d9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 15:49:34 +0100 Subject: [PATCH 153/195] set asset version custom attributes if there are any --- .../ftrack/publish/integrate_ftrack_api.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_api.py b/pype/plugins/ftrack/publish/integrate_ftrack_api.py index 9fe4fddebf..337562c1f5 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_api.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_api.py @@ -144,8 +144,11 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): "version": 0, "asset": asset_entity, } - - assetversion_data.update(data.get("assetversion_data", {})) + _assetversion_data = data.get("assetversion_data", {}) + assetversion_cust_attrs = _assetversion_data.pop( + "custom_attributes", {} + ) + assetversion_data.update(_assetversion_data) assetversion_entity = session.query( self.query("AssetVersion", assetversion_data) @@ -182,6 +185,18 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): existing_assetversion_metadata.update(assetversion_metadata) assetversion_entity["metadata"] = existing_assetversion_metadata + # Adding Custom Attributes + for attr, val in assetversion_cust_attrs.items(): + if attr in assetversion_entity["custom_attributes"]: + assetversion_entity["custom_attributes"][attr] = val + continue + + self.log.warning(( + "Custom Attrubute \"{0}\"" + " is not available for AssetVersion." + " Can't set it's value to: \"{1}\"" + ).format(attr, str(val))) + # Have to commit the version and asset, because location can't # determine the final location without. try: From 023aec0a61d6f239970cd848f0fb3cac19ab1a15 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 16 Dec 2019 17:35:23 +0100 Subject: [PATCH 154/195] added template data to burnins data --- pype/plugins/global/publish/extract_burnin.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 95a7144081..33935b4272 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -32,6 +32,7 @@ class ExtractBurnin(pype.api.Extractor): frame_start = int(instance.data.get("frameStart") or 0) frame_end = int(instance.data.get("frameEnd") or 1) duration = frame_end - frame_start + 1 + prep_data = { "username": instance.context.data['user'], "asset": os.environ['AVALON_ASSET'], @@ -39,8 +40,14 @@ class ExtractBurnin(pype.api.Extractor): "frame_start": frame_start, "frame_end": frame_end, "duration": duration, - "version": version + "version": version, + "comment": instance.context.data.get("comment"), + "intent": instance.context.data.get("intent") } + # Update data with template data + template_data = instance.data.get("assumedTemplateData") or {} + prep_data.update(template_data) + self.log.debug("__ prep_data: {}".format(prep_data)) for i, repre in enumerate(instance.data["representations"]): self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre)) From f89c1d3dbc28d2f533eb4828e889ece1f68a33f0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 16 Dec 2019 17:36:10 +0100 Subject: [PATCH 155/195] added filled anatomy to burnin data to be able use `anatomy[...][...]` in burnin presets --- pype/plugins/global/publish/extract_burnin.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 33935b4272..06a62dd98b 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -1,5 +1,6 @@ import os import json +import copy import pype.api import pyblish @@ -48,6 +49,9 @@ class ExtractBurnin(pype.api.Extractor): template_data = instance.data.get("assumedTemplateData") or {} prep_data.update(template_data) + # get anatomy project + anatomy = instance.context.data['anatomy'] + self.log.debug("__ prep_data: {}".format(prep_data)) for i, repre in enumerate(instance.data["representations"]): self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre)) @@ -69,11 +73,17 @@ class ExtractBurnin(pype.api.Extractor): ) self.log.debug("__ full_burnin_path: {}".format(full_burnin_path)) + # create copy of prep_data for anatomy formatting + _prep_data = copy.deepcopy(prep_data) + _prep_data["representation"] = repre["name"] + _prep_data["anatomy"] = ( + anatomy.format_all(_prep_data).get("solved") or {} + ) burnin_data = { "input": full_movie_path.replace("\\", "/"), "codec": repre.get("codec", []), "output": full_burnin_path.replace("\\", "/"), - "burnin_data": prep_data + "burnin_data": _prep_data } self.log.debug("__ burnin_data2: {}".format(burnin_data)) From dde70634e1d8789b17db595560143d03ddd459a3 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 16 Dec 2019 17:49:42 +0100 Subject: [PATCH 156/195] replace backslash in hierararchy which may cause issues in burnin path --- pype/plugins/global/publish/collect_templates.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index 9b0c03fdee..48623eec22 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -75,7 +75,7 @@ class CollectTemplates(pyblish.api.InstancePlugin): "asset": asset_name, "subset": subset_name, "version": version_number, - "hierarchy": hierarchy, + "hierarchy": hierarchy.replace("\\", "/"), "representation": "TEMP"} instance.data["template"] = template From 75cb30fe1da52f124ab25ed084ea1e63fab1a677 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 17:11:27 +0100 Subject: [PATCH 157/195] inital version of delivery action in ftrack --- pype/ftrack/actions/action_delivery.py | 421 +++++++++++++++++++++++++ 1 file changed, 421 insertions(+) create mode 100644 pype/ftrack/actions/action_delivery.py diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py new file mode 100644 index 0000000000..e23e35f91c --- /dev/null +++ b/pype/ftrack/actions/action_delivery.py @@ -0,0 +1,421 @@ +import os +import copy +import shutil + +import clique +from bson.objectid import ObjectId +from avalon import pipeline +from avalon.vendor import filelink +from avalon.tools.libraryloader.io_nonsingleton import DbConnector + +from pypeapp import Anatomy +from pype.ftrack import BaseAction +from pype.ftrack.lib.avalon_sync import CustAttrIdKey + + +class Delivery(BaseAction): + '''Edit meta data action.''' + + #: Action identifier. + identifier = "delivery.action" + #: Action label. + label = "Delivery" + #: Action description. + description = "Deliver data to client" + #: roles that are allowed to register this action + role_list = ["Pypeclub", "Administrator", "Project manager"] + # icon = '{}/ftrack/action_icons/TestAction.svg'.format( + # os.environ.get('PYPE_STATICS_SERVER', '') + # ) + + db_con = DbConnector() + + def discover(self, session, entities, event): + ''' Validation ''' + for entity in entities: + if entity.entity_type.lower() == "assetversion": + return True + + return False + + def interface(self, session, entities, event): + if event["data"].get("values", {}): + return + + title = "Delivery data to Client" + + items = [] + item_splitter = {"type": "label", "value": "---"} + + # Prepare component names for processing + components = None + project = None + for entity in entities: + if project is None: + project_id = None + for ent_info in entity["link"]: + if ent_info["type"].lower() == "project": + project_id = ent_info["id"] + break + + if project_id is None: + project = entity["asset"]["parent"]["project"] + else: + project = session.query(( + "select id, full_name from Project where id is \"{}\"" + ).format(project_id)).one() + + _components = set( + [component["name"] for component in entity["components"]] + ) + if components is None: + components = _components + continue + + components = components.intersection(_components) + if not components: + break + + project_name = project["full_name"] + items.append({ + "type": "hidden", + "name": "__project_name__", + "value": project_name + }) + + # Prpeare anatomy data + anatomy = Anatomy(project_name) + new_anatomies = [] + first = None + for key in (anatomy.templates.get("delivery") or {}): + new_anatomies.append({ + "label": key, + "value": key + }) + if first is None: + first = key + + skipped = False + # Add message if there are any common components + if not components or not new_anatomies: + skipped = True + items.append({ + "type": "label", + "value": "

Something went wrong:

" + }) + + items.append({ + "type": "hidden", + "name": "__skipped__", + "value": skipped + }) + + if not components: + if len(entities) == 1: + items.append({ + "type": "label", + "value": ( + "- Selected entity doesn't have components to deliver." + ) + }) + else: + items.append({ + "type": "label", + "value": ( + "- Selected entities don't have common components." + ) + }) + + # Add message if delivery anatomies are not set + if not new_anatomies: + items.append({ + "type": "label", + "value": ( + "- `\"delivery\"` anatomy key is not set in config." + ) + }) + + # Skip if there are any data shortcomings + if skipped: + return { + "items": items, + "title": title + } + + items.append({ + "value": "

Choose Components to deliver

", + "type": "label" + }) + + for component in components: + items.append({ + "type": "boolean", + "value": False, + "label": component, + "name": component + }) + + items.append(item_splitter) + + items.append({ + "value": "

Location for delivery

", + "type": "label" + }) + + items.append({ + "type": "text", + "name": "__location_path__", + "empty_text": "Type location path here..." + }) + + items.append(item_splitter) + + items.append({ + "value": "

Anatomy of delivery files

", + "type": "label" + }) + + items.append({ + "type": "label", + "value": ( + "

NOTE: These can be set in Anatomy.yaml" + " within `delivery` key.

" + ) + }) + + items.append({ + "type": "enumerator", + "name": "__new_anatomies__", + "data": new_anatomies, + "value": first + }) + + return { + "items": items, + "title": title + } + + def launch(self, session, entities, event): + if "values" not in event["data"]: + return + + values = event["data"]["values"] + skipped = values.pop("__skipped__") + if skipped: + return None + + component_names = [] + location_path = values.pop("__location_path__") + anatomy_name = values.pop("__new_anatomies__") + project_name = values.pop("__project_name__") + + for key, value in values.items(): + if value is True: + component_names.append(key) + + if not component_names: + return None + + location_path = os.path.normpath(location_path.strip()) + if location_path and not os.path.exists(location_path): + return { + "success": False, + "message": ( + "Entered location path does not exists. \"{}\"" + ).format(location_path) + } + + self.db_con.install() + self.db_con.Session["AVALON_PROJECT"] = project_name + + components = [] + repres_to_deliver = [] + for entity in entities: + asset = entity["asset"] + subset_name = asset["name"] + version = entity["version"] + + parent = asset["parent"] + parent_mongo_id = parent["custom_attributes"].get(CustAttrIdKey) + if not parent_mongo_id: + # TODO log error (much better) + self.log.warning(( + "Seems like entity <{}> is not synchronized to avalon" + ).format(parent["name"])) + continue + + parent_mongo_id = ObjectId(parent_mongo_id) + subset_ent = self.db_con.find_one({ + "type": "subset", + "parent": parent_mongo_id, + "name": subset_name + }) + + version_ent = self.db_con.find_one({ + "type": "version", + "name": version, + "parent": subset_ent["_id"] + }) + + repre_ents = self.db_con.find({ + "type": "representation", + "parent": version_ent["_id"] + }) + + repres_by_name = {} + for repre in repre_ents: + repre_name = repre["name"] + repres_by_name[repre_name] = repre + + for component in entity["components"]: + comp_name = component["name"] + if comp_name not in component_names: + continue + + repre = repres_by_name.get(comp_name) + repres_to_deliver.append(repre) + + src_dst_files = {} + anatomy = Anatomy(project_name) + for repre in repres_to_deliver: + # Get destination repre path + anatomy_data = copy.deepcopy(repre["context"]) + if location_path: + anatomy_data["root"] = location_path + else: + anatomy_data["root"] = pipeline.registered_root() + + # Get source repre path + repre_path = self.path_from_represenation(repre) + # TODO add backup solution where root of path from component + # is repalced with AVALON_PROJECTS root + + if repre_path and os.path.exists(repre_path): + self.process_single_file( + repre_path, anatomy, anatomy_name, anatomy_data + ) + + else: + self.process_sequence( + repre_path, anatomy, anatomy_name, anatomy_data + ) + + self.db_con.uninstall() + + def process_single_file( + self, repre_path, anatomy, anatomy_name, anatomy_data + ): + anatomy_filled = anatomy.format(anatomy_data) + delivery_path = anatomy_filled.get("delivery", {}).get(anatomy_name) + if not delivery_path: + # TODO log error! - missing keys in anatomy + return + + delivery_folder = os.path.dirname(delivery_path) + if not os.path.exists(delivery_folder): + os.makedirs(delivery_folder) + + self.copy_file(repre_path, delivery_path) + + def process_sequence( + self, repre_path, anatomy, anatomy_name, anatomy_data + ): + dir_path, file_name = os.path.split(repre_path) + if not os.path.exists(dir_path): + # TODO log if folder don't exist + return + + base_name, ext = os.path.splitext(file_name) + file_name_items = None + if "#" in base_name: + file_name_items = [part for part in base_name.split("#") if part] + + elif "%" in base_name: + file_name_items = base_name.split("%") + + if not file_name_items: + # TODO log if file does not exists + return + + src_collections, remainder = clique.assemble(os.listdir(dir_path)) + src_collection = None + for col in src_collections: + if col.tail != ext: + continue + + # skip if collection don't have same basename + if not col.head.startswith(file_name_items[0]): + continue + + src_collection = col + break + + if src_collection is None: + # TODO log error! + return + + anatomy_data["frame"] = "{frame}" + anatomy_filled = anatomy.format(anatomy_data) + delivery_path = anatomy_filled.get("delivery", {}).get(anatomy_name) + if not delivery_path: + # TODO log error! - missing keys in anatomy + return + + delivery_folder = os.path.dirname(delivery_path) + dst_head, dst_tail = delivery_path.split("{frame}") + dst_padding = src_collection.padding + dst_collection = clique.Collection( + head=dst_head, + tail=dst_tail, + padding=dst_padding + ) + + if not os.path.exists(delivery_folder): + os.makedirs(delivery_folder) + + src_head = src_collection.head + src_tail = src_collection.tail + for index in src_collection.indexes: + src_padding = src_collection.format("{padding}") % index + src_file_name = "{}{}{}".format(src_head, src_padding, src_tail) + + dst_padding = dst_collection.format("{padding}") % index + dst_file_name = "{}{}{}".format(dst_head, dst_padding, dst_tail) + + self.copy_file(src, dst) + + def path_from_represenation(self, representation): + try: + template = representation["data"]["template"] + + except KeyError: + return None + + try: + context = representation["context"] + context["root"] = os.environ.get("AVALON_PROJECTS") or "" + path = pipeline.format_template_with_optional_keys( + context, template + ) + + except KeyError: + # Template references unavailable data + return None + + if os.path.exists(path): + return os.path.normpath(path) + + def copy_file(self, src_path, dst_path): + try: + filelink.create( + src_path, + dst_path, + filelink.HARDLINK + ) + except OSError: + shutil.copyfile(src_path, dst_path) + +def register(session, plugins_presets={}): + '''Register plugin. Called when used as an plugin.''' + + Delivery(session, plugins_presets).register() From 830373f3d5c35c298285236a3a36b9eed0aaf5c4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 17:19:35 +0100 Subject: [PATCH 158/195] added delivery icon --- pype/ftrack/actions/action_delivery.py | 6 ++--- res/ftrack/action_icons/Delivery.svg | 34 ++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 3 deletions(-) create mode 100644 res/ftrack/action_icons/Delivery.svg diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index e23e35f91c..572a9bc8e0 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -24,9 +24,9 @@ class Delivery(BaseAction): description = "Deliver data to client" #: roles that are allowed to register this action role_list = ["Pypeclub", "Administrator", "Project manager"] - # icon = '{}/ftrack/action_icons/TestAction.svg'.format( - # os.environ.get('PYPE_STATICS_SERVER', '') - # ) + icon = '{}/ftrack/action_icons/Delivery.svg'.format( + os.environ.get('PYPE_STATICS_SERVER', '') + ) db_con = DbConnector() diff --git a/res/ftrack/action_icons/Delivery.svg b/res/ftrack/action_icons/Delivery.svg new file mode 100644 index 0000000000..3380487c31 --- /dev/null +++ b/res/ftrack/action_icons/Delivery.svg @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + From cbbb074a25c929582a26807691bf00a27c7325a4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 17:24:35 +0100 Subject: [PATCH 159/195] fix source filepath --- pype/ftrack/actions/action_delivery.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index 572a9bc8e0..ad3d6ef6cc 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -228,7 +228,6 @@ class Delivery(BaseAction): self.db_con.install() self.db_con.Session["AVALON_PROJECT"] = project_name - components = [] repres_to_deliver = [] for entity in entities: asset = entity["asset"] @@ -275,7 +274,6 @@ class Delivery(BaseAction): repre = repres_by_name.get(comp_name) repres_to_deliver.append(repre) - src_dst_files = {} anatomy = Anatomy(project_name) for repre in repres_to_deliver: # Get destination repre path @@ -302,6 +300,8 @@ class Delivery(BaseAction): self.db_con.uninstall() + return True + def process_single_file( self, repre_path, anatomy, anatomy_name, anatomy_data ): @@ -378,9 +378,12 @@ class Delivery(BaseAction): for index in src_collection.indexes: src_padding = src_collection.format("{padding}") % index src_file_name = "{}{}{}".format(src_head, src_padding, src_tail) + src = os.path.normpath( + os.path.join(dir_path, src_file_name) + ) dst_padding = dst_collection.format("{padding}") % index - dst_file_name = "{}{}{}".format(dst_head, dst_padding, dst_tail) + dst = "{}{}{}".format(dst_head, dst_padding, dst_tail) self.copy_file(src, dst) From 5e31299c2441ba57c323245b067062279817f24d Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 8 Jan 2020 17:38:03 +0100 Subject: [PATCH 160/195] add resolution and fps to anatomy keys --- pype/plugins/global/publish/collect_templates.py | 5 ++++- pype/plugins/global/publish/extract_review.py | 4 +++- pype/plugins/global/publish/integrate_new.py | 5 ++++- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index 48623eec22..d57d416dea 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -76,7 +76,10 @@ class CollectTemplates(pyblish.api.InstancePlugin): "subset": subset_name, "version": version_number, "hierarchy": hierarchy.replace("\\", "/"), - "representation": "TEMP"} + "representation": "TEMP", + "resolution_width": instance.data.get("resolutionWidth", ""), + "resolution_height": instance.data.get("resolutionHeight", ""), + "fps": str(instance.data.get("fps", ""))}} instance.data["template"] = template instance.data["assumedTemplateData"] = template_data diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index f621df0c66..c75bb488a2 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -249,7 +249,9 @@ class ExtractReview(pyblish.api.InstancePlugin): 'files': repr_file, "tags": new_tags, "outputName": name, - "codec": codec_args + "codec": codec_args, + "resolutionWidth": resolution_width, + "resolutionWidth": resolution_height }) if repre_new.get('preview'): repre_new.pop("preview") diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index faade613f2..ee18347703 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -267,7 +267,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "family": instance.data['family'], "subset": subset["name"], "version": int(version["name"]), - "hierarchy": hierarchy} + "hierarchy": hierarchy, + "resolution_width": repre.get("resolutionWidth", ""), + "resolution_height": repre.get("resolutionHeight", ""), + "fps": str(instance.data.get("fps", ""))} files = repre['files'] if repre.get('stagingDir'): From cfd9823abc0c8109f4c5e18e2a6f1a55e2977047 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 17:41:35 +0100 Subject: [PATCH 161/195] replaced {frame} with <> --- pype/ftrack/actions/action_delivery.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index ad3d6ef6cc..22fb15198b 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -354,7 +354,7 @@ class Delivery(BaseAction): # TODO log error! return - anatomy_data["frame"] = "{frame}" + anatomy_data["frame"] = "<>" anatomy_filled = anatomy.format(anatomy_data) delivery_path = anatomy_filled.get("delivery", {}).get(anatomy_name) if not delivery_path: @@ -362,7 +362,7 @@ class Delivery(BaseAction): return delivery_folder = os.path.dirname(delivery_path) - dst_head, dst_tail = delivery_path.split("{frame}") + dst_head, dst_tail = delivery_path.split("<>") dst_padding = src_collection.padding dst_collection = clique.Collection( head=dst_head, From ccd491d99e436c2d9ea91a4b58b0f9115ddb2f19 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 8 Jan 2020 18:24:35 +0100 Subject: [PATCH 162/195] add remapping from mounted to network path to render publish job --- pype/plugins/global/publish/submit_publish_job.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 2a254b015c..9c72ece73c 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -21,6 +21,12 @@ def _get_script(): if module_path.endswith(".pyc"): module_path = module_path[:-len(".pyc")] + ".py" + module_path = os.path.normpath(module_path) + mount_root = os.path.normpath(os.environ['PYPE_STUDIO_CORE_MOUNT']) + network_root = os.path.normpath(os.environ['PYPE_STUDIO_CORE_PATH']) + + module_path = module_path.replace(mount_root, network_root) + return module_path @@ -164,6 +170,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): output_dir = instance.data["outputDir"] metadata_path = os.path.join(output_dir, metadata_filename) + metadata_path = os.path.normpath(metadata_path) + mount_root = os.path.normpath(os.environ['PYPE_STUDIO_PROJECTS_MOUNT']) + network_root = os.path.normpath(os.environ['PYPE_STUDIO_PROJECTS_PATH']) + + metadata_path = metadata_path.replace(mount_root, network_root) + # Generate the payload for Deadline submission payload = { "JobInfo": { From 3cf559afba5058eae3e96cbb1d873e1b7403affe Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 19:21:15 +0100 Subject: [PATCH 163/195] better reporting and logging --- pype/ftrack/actions/action_delivery.py | 144 +++++++++++++++++++++---- 1 file changed, 121 insertions(+), 23 deletions(-) diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index 22fb15198b..e698c371e1 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -1,9 +1,12 @@ import os import copy import shutil +import collections +import string import clique from bson.objectid import ObjectId + from avalon import pipeline from avalon.vendor import filelink from avalon.tools.libraryloader.io_nonsingleton import DbConnector @@ -162,10 +165,17 @@ class Delivery(BaseAction): "type": "label" }) + items.append({ + "type": "label", + "value": ( + "NOTE: It is possible to replace `root` key in anatomy." + ) + }) + items.append({ "type": "text", "name": "__location_path__", - "empty_text": "Type location path here..." + "empty_text": "Type location path here...(Optional)" }) items.append(item_splitter) @@ -199,6 +209,8 @@ class Delivery(BaseAction): if "values" not in event["data"]: return + self.report_items = collections.defaultdict(list) + values = event["data"]["values"] skipped = values.pop("__skipped__") if skipped: @@ -214,7 +226,10 @@ class Delivery(BaseAction): component_names.append(key) if not component_names: - return None + return { + "success": True, + "message": "Not selected components to deliver." + } location_path = os.path.normpath(location_path.strip()) if location_path and not os.path.exists(location_path): @@ -236,14 +251,24 @@ class Delivery(BaseAction): parent = asset["parent"] parent_mongo_id = parent["custom_attributes"].get(CustAttrIdKey) - if not parent_mongo_id: - # TODO log error (much better) - self.log.warning(( - "Seems like entity <{}> is not synchronized to avalon" - ).format(parent["name"])) - continue + if parent_mongo_id: + parent_mongo_id = ObjectId(parent_mongo_id) + else: + asset_ent = self.db_con.find_one({ + "type": "asset", + "data.ftrackId": parent["id"] + }) + if not asset_ent: + ent_path = "/".join( + [ent["name"] for ent in parent["link"]] + ) + msg = "Not synchronized entities to avalon" + self.report_items[msg].append(ent_path) + self.log.warning("{} <{}>".format(msg, ent_path)) + continue + + parent_mongo_id = asset_ent["_id"] - parent_mongo_id = ObjectId(parent_mongo_id) subset_ent = self.db_con.find_one({ "type": "subset", "parent": parent_mongo_id, @@ -283,6 +308,50 @@ class Delivery(BaseAction): else: anatomy_data["root"] = pipeline.registered_root() + anatomy_filled = anatomy.format(anatomy_data) + test_path = ( + anatomy_filled + .get("delivery", {}) + .get(anatomy_name) + ) + + if not test_path: + msg = ( + "Missing keys in Representation's context" + " for anatomy template \"{}\"." + ).format(anatomy_name) + + all_anatomies = anatomy.format_all(anatomy_data) + result = None + for anatomies in all_anatomies.values(): + for key, temp in anatomies.get("delivery", {}).items(): + if key != anatomy_name: + continue + + result = temp + break + + # TODO log error! - missing keys in anatomy + if result: + missing_keys = [ + key[1] for key in string.Formatter().parse(result) + if key[1] is not None + ] + else: + missing_keys = ["unknown"] + + keys = ", ".join(missing_keys) + sub_msg = ( + "Representation: {}
- Missing keys: \"{}\"
" + ).format(str(repre["_id"]), keys) + self.report_items[msg].append(sub_msg) + self.log.warning( + "{} Representation: \"{}\" Filled: <{}>".format( + msg, str(repre["_id"]), str(result) + ) + ) + continue + # Get source repre path repre_path = self.path_from_represenation(repre) # TODO add backup solution where root of path from component @@ -300,17 +369,13 @@ class Delivery(BaseAction): self.db_con.uninstall() - return True + return self.report() def process_single_file( self, repre_path, anatomy, anatomy_name, anatomy_data ): anatomy_filled = anatomy.format(anatomy_data) - delivery_path = anatomy_filled.get("delivery", {}).get(anatomy_name) - if not delivery_path: - # TODO log error! - missing keys in anatomy - return - + delivery_path = anatomy_filled["delivery"][anatomy_name] delivery_folder = os.path.dirname(delivery_path) if not os.path.exists(delivery_folder): os.makedirs(delivery_folder) @@ -321,9 +386,6 @@ class Delivery(BaseAction): self, repre_path, anatomy, anatomy_name, anatomy_data ): dir_path, file_name = os.path.split(repre_path) - if not os.path.exists(dir_path): - # TODO log if folder don't exist - return base_name, ext = os.path.splitext(file_name) file_name_items = None @@ -334,7 +396,9 @@ class Delivery(BaseAction): file_name_items = base_name.split("%") if not file_name_items: - # TODO log if file does not exists + msg = "Source file was not found" + self.report_items[msg].append(repre_path) + self.log.warning("{} <{}>".format(msg, repre_path)) return src_collections, remainder = clique.assemble(os.listdir(dir_path)) @@ -352,15 +416,15 @@ class Delivery(BaseAction): if src_collection is None: # TODO log error! + msg = "Source collection of files was not found" + self.report_items[msg].append(repre_path) + self.log.warning("{} <{}>".format(msg, repre_path)) return anatomy_data["frame"] = "<>" anatomy_filled = anatomy.format(anatomy_data) - delivery_path = anatomy_filled.get("delivery", {}).get(anatomy_name) - if not delivery_path: - # TODO log error! - missing keys in anatomy - return + delivery_path = anatomy_filled["delivery"][anatomy_name] delivery_folder = os.path.dirname(delivery_path) dst_head, dst_tail = delivery_path.split("<>") dst_padding = src_collection.padding @@ -418,6 +482,40 @@ class Delivery(BaseAction): except OSError: shutil.copyfile(src_path, dst_path) + def report(self): + items = [] + title = "Delivery report" + for msg, _items in self.report_items.items(): + if not _items: + continue + + if items: + items.append({"type": "label", "value": "---"}) + + items.append({ + "type": "label", + "value": "# {}".format(msg) + }) + if isinstance(_items, str): + _items = [_items] + items.append({ + "type": "label", + "value": '

{}

'.format("
".join(_items)) + }) + + if not items: + return { + "success": True, + "message": "Delivery Finished" + } + + return { + "items": items, + "title": title, + "success": False, + "message": "Delivery Finished" + } + def register(session, plugins_presets={}): '''Register plugin. Called when used as an plugin.''' From bf24580b6f87ded4672661fb055a85ba92fd8b78 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 19:31:58 +0100 Subject: [PATCH 164/195] fix root path --- pype/ftrack/actions/action_delivery.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index e698c371e1..9edb7a5964 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -171,7 +171,7 @@ class Delivery(BaseAction): "NOTE: It is possible to replace `root` key in anatomy." ) }) - + items.append({ "type": "text", "name": "__location_path__", @@ -306,7 +306,7 @@ class Delivery(BaseAction): if location_path: anatomy_data["root"] = location_path else: - anatomy_data["root"] = pipeline.registered_root() + anatomy_data["root"] = os.environ.get("AVALON_PROJECTS") or "" anatomy_filled = anatomy.format(anatomy_data) test_path = ( From e6dc7c29a3dde61a8d27c03a862ef2dfce7a71c7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 8 Jan 2020 22:13:48 +0100 Subject: [PATCH 165/195] feat(): --- .../global/publish/collect_filesequences.py | 249 +++++++++++++----- .../global/publish/submit_publish_job.py | 13 + 2 files changed, 192 insertions(+), 70 deletions(-) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 6a59f5dffc..1214657856 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -54,10 +54,6 @@ def collect(root, patterns=[pattern], minimum_items=1) - # Ignore any remainders - if remainder: - print("Skipping remainder {}".format(remainder)) - # Exclude any frames outside start and end frame. for collection in collections: for index in list(collection.indexes): @@ -71,7 +67,7 @@ def collect(root, # Keep only collections that have at least a single frame collections = [c for c in collections if c.indexes] - return collections + return collections, remainder class CollectRenderedFrames(pyblish.api.ContextPlugin): @@ -119,8 +115,10 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): try: data = json.load(f) except Exception as exc: - self.log.error("Error loading json: " - "{} - Exception: {}".format(path, exc)) + self.log.error( + "Error loading json: " + "{} - Exception: {}".format(path, exc) + ) raise cwd = os.path.dirname(path) @@ -156,7 +154,6 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): resolution_height = instance.get("resolutionHeight", 1080) lut_path = instance.get("lutPath", None) - else: # Search in directory data = dict() @@ -167,14 +164,17 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): if regex: self.log.info("Using regex: {}".format(regex)) - collections = collect(root=root, - regex=regex, - exclude_regex=data.get("exclude_regex"), - frame_start=data.get("frameStart"), - frame_end=data.get("frameEnd")) + collections, remainder = collect( + root=root, + regex=regex, + exclude_regex=data.get("exclude_regex"), + frame_start=data.get("frameStart"), + frame_end=data.get("frameEnd"), + ) self.log.info("Found collections: {}".format(collections)) + """ if data.get("subset"): # If subset is provided for this json then it must be a single # collection. @@ -182,81 +182,190 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): self.log.error("Forced subset can only work with a single " "found sequence") raise RuntimeError("Invalid sequence") + """ fps = data.get("fps", 25) + if data.get("user"): + context.data["user"] = data["user"] + # Get family from the data families = data.get("families", ["render"]) if "render" not in families: families.append("render") if "ftrack" not in families: families.append("ftrack") - if "review" not in families: - families.append("review") if "write" in instance_family: families.append("write") - for collection in collections: - instance = context.create_instance(str(collection)) - self.log.info("Collection: %s" % list(collection)) + if data.get("attachTo"): + # we need to attach found collections to existing + # subset version as review represenation. - # Ensure each instance gets a unique reference to the data + for attach in data.get("attachTo"): + self.log.info( + "Attaching render {}:v{}".format( + attach["subset"], attach["version"])) + instance = context.create_instance( + attach["subset"]) + instance.data.update( + { + "name": attach["subset"], + "version": attach["version"], + "family": 'review', + "families": ['review', 'ftrack'], + "asset": data.get( + "asset", api.Session["AVALON_ASSET"]), + "stagingDir": root, + "frameStart": data.get("frameStart"), + "frameEnd": data.get("frameEnd"), + "fps": fps, + "source": data.get("source", ""), + "pixelAspect": pixel_aspect + }) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + for collection in collections: + self.log.info( + " - adding representation: {}".format( + str(collection)) + ) + ext = collection.tail.lstrip(".") + + representation = { + "name": ext, + "ext": "{}".format(ext), + "files": list(collection), + "stagingDir": root, + "anatomy_template": "render", + "fps": fps, + "tags": ["review"], + } + instance.data["representations"].append( + representation) + + elif data.get("subset"): + # if we have subset - add all collections and known + # reminder as representations + + self.log.info( + "Adding representations to subset {}".format( + data.get("subset"))) + + instance = context.create_instance(data.get("subset")) data = copy.deepcopy(data) - # If no subset provided, get it from collection's head - subset = data.get("subset", collection.head.rstrip("_. ")) - - # If no start or end frame provided, get it from collection - indices = list(collection.indexes) - start = data.get("frameStart", indices[0]) - end = data.get("frameEnd", indices[-1]) - - self.log.debug("Collected pixel_aspect:\n" - "{}".format(pixel_aspect)) - self.log.debug("type pixel_aspect:\n" - "{}".format(type(pixel_aspect))) - - # root = os.path.normpath(root) - # self.log.info("Source: {}}".format(data.get("source", ""))) - - ext = list(collection)[0].split('.')[-1] - - instance.data.update({ - "name": str(collection), - "family": families[0], # backwards compatibility / pyblish - "families": list(families), - "subset": subset, - "asset": data.get("asset", api.Session["AVALON_ASSET"]), - "stagingDir": root, - "frameStart": start, - "frameEnd": end, - "fps": fps, - "source": data.get('source', ''), - "pixelAspect": pixel_aspect, - "resolutionWidth": resolution_width, - "resolutionHeight": resolution_height - }) - if lut_path: - instance.data.update({"lutPath": lut_path}) - instance.append(collection) - instance.context.data['fps'] = fps + instance.data.update( + { + "name": data.get("subset"), + "family": families[0], + "families": list(families), + "subset": data.get("subset"), + "asset": data.get( + "asset", api.Session["AVALON_ASSET"]), + "stagingDir": root, + "frameStart": data.get("frameStart"), + "frameEnd": data.get("frameEnd"), + "fps": fps, + "source": data.get("source", ""), + "pixelAspect": pixel_aspect, + } + ) if "representations" not in instance.data: instance.data["representations"] = [] - representation = { - 'name': ext, - 'ext': '{}'.format(ext), - 'files': list(collection), - "stagingDir": root, - "anatomy_template": "render", - "fps": fps, - "tags": ['review'] - } - instance.data["representations"].append(representation) + for collection in collections: + self.log.info(" - {}".format(str(collection))) - if data.get('user'): - context.data["user"] = data['user'] + ext = collection.tail.lstrip(".") - self.log.debug("Collected instance:\n" - "{}".format(pformat(instance.data))) + representation = { + "name": ext, + "ext": "{}".format(ext), + "files": list(collection), + "stagingDir": root, + "anatomy_template": "render", + "fps": fps, + "tags": ["review"], + } + instance.data["representations"].append( + representation) + + # process reminders + for rem in remainder: + # add only known types to representation + if rem.split(".")[-1] in ['mov', 'jpg', 'mp4']: + self.log.info(" . {}".format(rem)) + representation = { + "name": rem.split(".")[-1], + "ext": "{}".format(rem.split(".")[-1]), + "files": rem, + "stagingDir": root, + "anatomy_template": "render", + "fps": fps, + "tags": ["review"], + } + instance.data["representations"].append( + representation) + + else: + # we have no subset so we take every collection and create one + # from it + for collection in collections: + instance = context.create_instance(str(collection)) + self.log.info("Creating subset from: %s" % str(collection)) + + # Ensure each instance gets a unique reference to the data + data = copy.deepcopy(data) + + # If no subset provided, get it from collection's head + subset = data.get("subset", collection.head.rstrip("_. ")) + + # If no start or end frame provided, get it from collection + indices = list(collection.indexes) + start = data.get("frameStart", indices[0]) + end = data.get("frameEnd", indices[-1]) + + ext = list(collection)[0].split(".")[-1] + + if "review" not in families: + families.append("review") + + instance.data.update( + { + "name": str(collection), + "family": families[0], # backwards compatibility + "families": list(families), + "subset": subset, + "asset": data.get( + "asset", api.Session["AVALON_ASSET"]), + "stagingDir": root, + "frameStart": start, + "frameEnd": end, + "fps": fps, + "source": data.get("source", ""), + "pixelAspect": pixel_aspect, + } + ) + if lut_path: + instance.data.update({"lutPath": lut_path}) + + instance.append(collection) + instance.context.data["fps"] = fps + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + "name": ext, + "ext": "{}".format(ext), + "files": list(collection), + "stagingDir": root, + "anatomy_template": "render", + "fps": fps, + "tags": ["review"], + } + instance.data["representations"].append(representation) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 2a254b015c..e7d5fe3147 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -282,6 +282,19 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): relative_path = os.path.relpath(source, api.registered_root()) source = os.path.join("{root}", relative_path).replace("\\", "/") + # find subsets and version to attach render to + attach_to = instance.data.get("attachTo") + attach_subset_versions = [] + if attach_to: + for subset in attach_to: + for instance in context: + if instance.data["subset"] != subset["subset"]: + continue + attach_subset_versions.append( + {"version": instance.data["version"], + "subset": subset["subset"], + "family": subset["family"]}) + # Write metadata for publish job metadata = { "asset": asset, From b2dfb6c95b77bf327291eccc6b50e9937e4c71a7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 9 Jan 2020 10:36:35 +0100 Subject: [PATCH 166/195] be specific about task custom attributes to avoid asset version's cust attrs --- pype/ftrack/events/event_sync_to_avalon.py | 11 +++++++---- pype/ftrack/lib/avalon_sync.py | 4 ++-- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 606866aba2..91355c6068 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1438,9 +1438,11 @@ class SyncToAvalonEvent(BaseEvent): if attr["entity_type"] != ent_info["entityType"]: continue - if ent_info["entityType"] != "show": - if attr["object_type_id"] != ent_info["objectTypeId"]: - continue + if ( + ent_info["entityType"] == "task" and + attr["object_type_id"] != ent_info["objectTypeId"] + ): + continue configuration_id = attr["id"] entity_type_conf_ids[entity_type] = configuration_id @@ -1712,7 +1714,8 @@ class SyncToAvalonEvent(BaseEvent): if ca_ent_type == "show": cust_attrs_by_obj_id[ca_ent_type][key] = cust_attr - else: + + elif ca_ent_type == "task": obj_id = cust_attr["object_type_id"] cust_attrs_by_obj_id[obj_id][key] = cust_attr diff --git a/pype/ftrack/lib/avalon_sync.py b/pype/ftrack/lib/avalon_sync.py index 064ea1adb8..5839d36e64 100644 --- a/pype/ftrack/lib/avalon_sync.py +++ b/pype/ftrack/lib/avalon_sync.py @@ -699,7 +699,7 @@ class SyncEntitiesFactory: if ca_ent_type == "show": avalon_attrs[ca_ent_type][key] = cust_attr["default"] avalon_attrs_ca_id[ca_ent_type][key] = cust_attr["id"] - else: + elif ca_ent_type == "task": obj_id = cust_attr["object_type_id"] avalon_attrs[obj_id][key] = cust_attr["default"] avalon_attrs_ca_id[obj_id][key] = cust_attr["id"] @@ -708,7 +708,7 @@ class SyncEntitiesFactory: if ca_ent_type == "show": attrs_per_entity_type[ca_ent_type][key] = cust_attr["default"] attrs_per_entity_type_ca_id[ca_ent_type][key] = cust_attr["id"] - else: + elif ca_ent_type == "task": obj_id = cust_attr["object_type_id"] attrs_per_entity_type[obj_id][key] = cust_attr["default"] attrs_per_entity_type_ca_id[obj_id][key] = cust_attr["id"] From 64a0360ce90a699d86c4ee166c36268f9857dae8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 9 Jan 2020 11:08:35 +0100 Subject: [PATCH 167/195] fix(global): letter box not created properly --- pype/plugins/global/publish/extract_review.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 28eb0289fa..4eb7fa16ed 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -180,7 +180,11 @@ class ExtractReview(pyblish.api.InstancePlugin): ffmpet_height = int( resolution_height * pixel_aspect) else: - lb /= scale_factor + # TODO: it might still be failing in some cases + if resolution_ratio != delivery_ratio: + lb /= scale_factor + else: + lb /= pixel_aspect output_args.append( "-filter:v scale={0}x{1}:flags=lanczos,setsar=1,drawbox=0:0:iw:round((ih-(iw*(1/{2})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{2})))/2):iw:round((ih-(iw*(1/{2})))/2):t=fill:c=black".format(ffmpet_width, ffmpet_height, lb)) From 69015fb7fc08970c8a9619466556eb02f8a76ab7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 9 Jan 2020 11:15:57 +0100 Subject: [PATCH 168/195] fix(nuke): updating nuke.lib and review data mov --- pype/nuke/lib.py | 121 ++++++++++++------ .../nuke/publish/extract_review_data_mov.py | 1 - 2 files changed, 81 insertions(+), 41 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index c468343545..9ded8b75d0 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1205,7 +1205,7 @@ class Exporter_review: Args: klass (pyblish.plugin): pyblish plugin parent - instance (pyblish.context.instance): + instance (pyblish.instance): instance of pyblish context """ _temp_nodes = [] @@ -1298,6 +1298,11 @@ class Exporter_review: return ipn + def clean_nodes(self): + for node in self._temp_nodes: + nuke.delete(node) + self.log.info("Deleted nodes...") + class Exporter_review_lut(Exporter_review): """ @@ -1305,6 +1310,7 @@ class Exporter_review_lut(Exporter_review): Args: klass (pyblish.plugin): pyblish plugin parent + instance (pyblish.instance): instance of pyblish context """ @@ -1319,6 +1325,12 @@ class Exporter_review_lut(Exporter_review): # initialize parent class Exporter_review.__init__(self, klass, instance) + # deal with now lut defined in viewer lut + if hasattr(klass, "viewer_lut_raw"): + self.viewer_lut_raw = klass.viewer_lut_raw + else: + self.viewer_lut_raw = False + self.name = name or "baked_lut" self.ext = ext or "cube" self.cube_size = cube_size or 32 @@ -1331,7 +1343,8 @@ class Exporter_review_lut(Exporter_review): self.log.info("File info was set...") self.file = self.fhead + self.name + ".{}".format(self.ext) - self.path = os.path.join(self.staging_dir, self.file).replace("\\", "/") + self.path = os.path.join( + self.staging_dir, self.file).replace("\\", "/") def generate_lut(self): # ---------- start nodes creation @@ -1353,13 +1366,14 @@ class Exporter_review_lut(Exporter_review): self.previous_node = ipn self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) - # OCIODisplay - dag_node = nuke.createNode("OCIODisplay") - # connect - dag_node.setInput(0, self.previous_node) - self._temp_nodes.append(dag_node) - self.previous_node = dag_node - self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) + if not self.viewer_lut_raw: + # OCIODisplay + dag_node = nuke.createNode("OCIODisplay") + # connect + dag_node.setInput(0, self.previous_node) + self._temp_nodes.append(dag_node) + self.previous_node = dag_node + self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) # GenerateLUT gen_lut_node = nuke.createNode("GenerateLUT") @@ -1388,9 +1402,7 @@ class Exporter_review_lut(Exporter_review): self.log.debug("Representation... `{}`".format(self.data)) # ---------- Clean up - for node in self._temp_nodes: - nuke.delete(node) - self.log.info("Deleted nodes...") + self.clean_nodes() return self.data @@ -1401,7 +1413,7 @@ class Exporter_review_mov(Exporter_review): Args: klass (pyblish.plugin): pyblish plugin parent - + instance (pyblish.instance): instance of pyblish context """ def __init__(self, @@ -1419,6 +1431,12 @@ class Exporter_review_mov(Exporter_review): else: self.nodes = {} + # deal with now lut defined in viewer lut + if hasattr(klass, "viewer_lut_raw"): + self.viewer_lut_raw = klass.viewer_lut_raw + else: + self.viewer_lut_raw = False + self.name = name or "baked" self.ext = ext or "mov" @@ -1428,7 +1446,31 @@ class Exporter_review_mov(Exporter_review): self.log.info("File info was set...") self.file = self.fhead + self.name + ".{}".format(self.ext) - self.path = os.path.join(self.staging_dir, self.file).replace("\\", "/") + self.path = os.path.join( + self.staging_dir, self.file).replace("\\", "/") + + def render(self, render_node_name): + self.log.info("Rendering... ") + # Render Write node + nuke.execute( + render_node_name, + int(self.first_frame), + int(self.last_frame)) + + self.log.info("Rendered...") + + def save_file(self): + with anlib.maintained_selection(): + self.log.info("Saving nodes as file... ") + # select temp nodes + anlib.select_nodes(self._temp_nodes) + # create nk path + path = os.path.splitext(self.path)[0] + ".nk" + # save file to the path + nuke.nodeCopy(path) + + self.log.info("Nodes exported...") + return path def generate_mov(self, farm=False): # ---------- start nodes creation @@ -1454,13 +1496,14 @@ class Exporter_review_mov(Exporter_review): self.previous_node = ipn self.log.debug("ViewProcess... `{}`".format(self._temp_nodes)) - # OCIODisplay node - dag_node = nuke.createNode("OCIODisplay") - # connect - dag_node.setInput(0, self.previous_node) - self._temp_nodes.append(dag_node) - self.previous_node = dag_node - self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) + if not self.viewer_lut_raw: + # OCIODisplay node + dag_node = nuke.createNode("OCIODisplay") + # connect + dag_node.setInput(0, self.previous_node) + self._temp_nodes.append(dag_node) + self.previous_node = dag_node + self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes)) # Write node write_node = nuke.createNode("Write") @@ -1476,28 +1519,26 @@ class Exporter_review_mov(Exporter_review): # ---------- end nodes creation - if not farm: - self.log.info("Rendering... ") - # Render Write node - nuke.execute( - write_node.name(), - int(self.first_frame), - int(self.last_frame)) - - self.log.info("Rendered...") - - # ---------- generate representation data - self.get_representation_data( - tags=["review", "delete"], - range=True - ) + # ---------- render or save to nk + if farm: + path_nk = self.save_file() + self.data.update({ + "bakeScriptPath": path_nk, + "bakeWriteNodeName": write_node.name(), + "bakeRenderPath": self.path + }) + else: + self.render(write_node.name()) + # ---------- generate representation data + self.get_representation_data( + tags=["review", "delete"], + range=True + ) self.log.debug("Representation... `{}`".format(self.data)) - ---------- Clean up - for node in self._temp_nodes: - nuke.delete(node) - self.log.info("Deleted nodes...") + #---------- Clean up + self.clean_nodes() return self.data diff --git a/pype/plugins/nuke/publish/extract_review_data_mov.py b/pype/plugins/nuke/publish/extract_review_data_mov.py index 585bd3f108..2208f8fa31 100644 --- a/pype/plugins/nuke/publish/extract_review_data_mov.py +++ b/pype/plugins/nuke/publish/extract_review_data_mov.py @@ -1,5 +1,4 @@ import os -import nuke import pyblish.api from avalon.nuke import lib as anlib from pype.nuke import lib as pnlib From 235079038965f1f3e038b60487e07447ed0bf039 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 9 Jan 2020 12:02:04 +0100 Subject: [PATCH 169/195] remove obsolete logge --- pype/plugins/nuke/create/create_read.py | 3 --- pype/plugins/nuke/create/create_write.py | 4 ---- 2 files changed, 7 deletions(-) diff --git a/pype/plugins/nuke/create/create_read.py b/pype/plugins/nuke/create/create_read.py index 87bb45a6ad..1aa7e68746 100644 --- a/pype/plugins/nuke/create/create_read.py +++ b/pype/plugins/nuke/create/create_read.py @@ -6,9 +6,6 @@ from pype import api as pype import nuke -log = pype.Logger().get_logger(__name__, "nuke") - - class CrateRead(avalon.nuke.Creator): # change this to template preset name = "ReadCopy" diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py index 042826d4d9..f522c50511 100644 --- a/pype/plugins/nuke/create/create_write.py +++ b/pype/plugins/nuke/create/create_write.py @@ -7,10 +7,6 @@ from pypeapp import config import nuke - -log = pype.Logger().get_logger(__name__, "nuke") - - class CreateWriteRender(plugin.PypeCreator): # change this to template preset name = "WriteRender" From 3a4a6782abdf74e9278c029c0291abd889b1aa74 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 9 Jan 2020 15:07:44 +0100 Subject: [PATCH 170/195] pep8 class names --- pype/nuke/lib.py | 10 +++++----- pype/plugins/nuke/publish/extract_review_data_lut.py | 2 +- pype/plugins/nuke/publish/extract_review_data_mov.py | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 9ded8b75d0..4faea1da36 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1199,7 +1199,7 @@ class BuildWorkfile(WorkfileSettings): self.ypos -= (self.ypos_size * multiply) + self.ypos_gap -class Exporter_review: +class ExporterReview: """ Base class object for generating review data from Nuke @@ -1304,7 +1304,7 @@ class Exporter_review: self.log.info("Deleted nodes...") -class Exporter_review_lut(Exporter_review): +class ExporterReviewLut(ExporterReview): """ Generator object for review lut from Nuke @@ -1323,7 +1323,7 @@ class Exporter_review_lut(Exporter_review): lut_size=None, lut_style=None): # initialize parent class - Exporter_review.__init__(self, klass, instance) + ExporterReview.__init__(self, klass, instance) # deal with now lut defined in viewer lut if hasattr(klass, "viewer_lut_raw"): @@ -1407,7 +1407,7 @@ class Exporter_review_lut(Exporter_review): return self.data -class Exporter_review_mov(Exporter_review): +class ExporterReviewMov(ExporterReview): """ Metaclass for generating review mov files @@ -1423,7 +1423,7 @@ class Exporter_review_mov(Exporter_review): ext=None, ): # initialize parent class - Exporter_review.__init__(self, klass, instance) + ExporterReview.__init__(self, klass, instance) # passing presets for nodes to self if hasattr(klass, "nodes"): diff --git a/pype/plugins/nuke/publish/extract_review_data_lut.py b/pype/plugins/nuke/publish/extract_review_data_lut.py index f5fc3e59db..4373309363 100644 --- a/pype/plugins/nuke/publish/extract_review_data_lut.py +++ b/pype/plugins/nuke/publish/extract_review_data_lut.py @@ -39,7 +39,7 @@ class ExtractReviewDataLut(pype.api.Extractor): # generate data with anlib.maintained_selection(): - exporter = pnlib.Exporter_review_lut( + exporter = pnlib.ExporterReviewLut( self, instance ) data = exporter.generate_lut() diff --git a/pype/plugins/nuke/publish/extract_review_data_mov.py b/pype/plugins/nuke/publish/extract_review_data_mov.py index 2208f8fa31..333774bcd7 100644 --- a/pype/plugins/nuke/publish/extract_review_data_mov.py +++ b/pype/plugins/nuke/publish/extract_review_data_mov.py @@ -39,7 +39,7 @@ class ExtractReviewDataMov(pype.api.Extractor): # generate data with anlib.maintained_selection(): - exporter = pnlib.Exporter_review_mov( + exporter = pnlib.ExporterReviewMov( self, instance) if "render.farm" in families: From 5ace134b646dfb3a756859984236807a9ddd47aa Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 9 Jan 2020 15:24:51 +0100 Subject: [PATCH 171/195] add pathlib path resolve --- pype/plugins/global/publish/integrate_new.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index faade613f2..9bfaf2e417 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -7,6 +7,7 @@ import errno import pyblish.api from avalon import api, io from avalon.vendor import filelink +from pathlib import Path # this is needed until speedcopy for linux is fixed if sys.platform == "win32": from speedcopy import copyfile @@ -468,8 +469,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): Returns: None """ - src = os.path.normpath(src) - dst = os.path.normpath(dst) + src = Path(src).resolve() + dst = Path(dst).resolve() self.log.debug("Copying file .. {} -> {}".format(src, dst)) dirname = os.path.dirname(dst) @@ -490,6 +491,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def hardlink_file(self, src, dst): dirname = os.path.dirname(dst) + src = Path(src).resolve() + dst = Path(dst).resolve() try: os.makedirs(dirname) except OSError as e: From b3321a92ee4c0b05df0bd3f08684fcd632696f80 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 9 Jan 2020 23:06:57 +0100 Subject: [PATCH 172/195] fix(global): pathlib changed to pathlib2 --- pype/plugins/global/publish/integrate_new.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 9bfaf2e417..c2812880c7 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -7,7 +7,7 @@ import errno import pyblish.api from avalon import api, io from avalon.vendor import filelink -from pathlib import Path +from pathlib2 import Path # this is needed until speedcopy for linux is fixed if sys.platform == "win32": from speedcopy import copyfile @@ -469,8 +469,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): Returns: None """ + src = Path(src).resolve() - dst = Path(dst).resolve() + drive, _path = os.path.splitdrive(dst) + unc = Path(drive).resolve() + dst = str(unc / _path) self.log.debug("Copying file .. {} -> {}".format(src, dst)) dirname = os.path.dirname(dst) From ce64e6fa0706f5db01ce147f510b34074d6936fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 10 Jan 2020 00:30:20 +0000 Subject: [PATCH 173/195] fixing environment filtering --- pype/lib.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pype/lib.py b/pype/lib.py index 8772608b38..b19491adeb 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -18,13 +18,16 @@ def _subprocess(*args, **kwargs): """Convenience method for getting output errors for subprocess.""" # make sure environment contains only strings - filtered_env = {k: str(v) for k, v in os.environ.items()} + if not kwargs.get("env"): + filtered_env = {k: str(v) for k, v in os.environ.items()} + else: + filtered_env = {k: str(v) for k, v in kwargs.get("env").items()} # set overrides kwargs['stdout'] = kwargs.get('stdout', subprocess.PIPE) kwargs['stderr'] = kwargs.get('stderr', subprocess.STDOUT) kwargs['stdin'] = kwargs.get('stdin', subprocess.PIPE) - kwargs['env'] = kwargs.get('env',filtered_env) + kwargs['env'] = filtered_env proc = subprocess.Popen(*args, **kwargs) From 9bc2f557a39efb7aa1ebefbdb7025ff87b8c7515 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 10 Jan 2020 11:40:29 +0100 Subject: [PATCH 174/195] added new entityType `appointment` to ignored entity types --- pype/ftrack/events/event_sync_to_avalon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 91355c6068..8d75d932f8 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -28,7 +28,7 @@ class SyncToAvalonEvent(BaseEvent): ignore_entTypes = [ "socialfeed", "socialnotification", "note", "assetversion", "job", "user", "reviewsessionobject", "timer", - "timelog", "auth_userrole" + "timelog", "auth_userrole", "appointment" ] ignore_ent_types = ["Milestone"] ignore_keys = ["statusid"] From 4bb66af2016951942f4cdc2c0ecd004c82681df2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 10 Jan 2020 11:40:53 +0100 Subject: [PATCH 175/195] added debug with project name to sync to avalon action --- pype/ftrack/lib/avalon_sync.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pype/ftrack/lib/avalon_sync.py b/pype/ftrack/lib/avalon_sync.py index 5839d36e64..8cebd12a59 100644 --- a/pype/ftrack/lib/avalon_sync.py +++ b/pype/ftrack/lib/avalon_sync.py @@ -314,6 +314,9 @@ class SyncEntitiesFactory: self.log.warning(msg) return {"success": False, "message": msg} + self.log.debug(( + "*** Synchronization initialization started <{}>." + ).format(project_full_name)) # Check if `avalon_mongo_id` custom attribute exist or is accessible if CustAttrIdKey not in ft_project["custom_attributes"]: items = [] From 77d71d4bf356f40ce2a06cf27899529e8df2613c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 10 Jan 2020 11:43:07 +0100 Subject: [PATCH 176/195] it is tried to set intent value on ftrack entity and do not crash pyblish in integrate_ftrack_api --- .../plugins/ftrack/publish/integrate_ftrack_api.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_api.py b/pype/plugins/ftrack/publish/integrate_ftrack_api.py index 337562c1f5..c51685f84d 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_api.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_api.py @@ -188,14 +188,18 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): # Adding Custom Attributes for attr, val in assetversion_cust_attrs.items(): if attr in assetversion_entity["custom_attributes"]: - assetversion_entity["custom_attributes"][attr] = val - continue + try: + assetversion_entity["custom_attributes"][attr] = val + session.commit() + continue + except Exception: + session.rollback() self.log.warning(( "Custom Attrubute \"{0}\"" - " is not available for AssetVersion." - " Can't set it's value to: \"{1}\"" - ).format(attr, str(val))) + " is not available for AssetVersion <{1}>." + " Can't set it's value to: \"{2}\"" + ).format(attr, assetversion_entity["id"], str(val))) # Have to commit the version and asset, because location can't # determine the final location without. From d4bf25f01a823b042777730d6e09333223841656 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 10 Jan 2020 13:07:11 +0000 Subject: [PATCH 177/195] resolving `${TOKEN}` variables in PATH to env variables --- pype/plugins/maya/publish/collect_yeti_rig.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/pype/plugins/maya/publish/collect_yeti_rig.py b/pype/plugins/maya/publish/collect_yeti_rig.py index 7ab5649c0b..3b05e19fdb 100644 --- a/pype/plugins/maya/publish/collect_yeti_rig.py +++ b/pype/plugins/maya/publish/collect_yeti_rig.py @@ -140,9 +140,21 @@ class CollectYetiRig(pyblish.api.InstancePlugin): "atttribute'" % node) # Collect all texture files + # find all ${TOKEN} tokens and replace them with $TOKEN env. variable + env_re = re.compile(r"\$\{(\w+)\}") for texture in texture_filenames: files = [] + + matches = re.finditer(env_re, texture) + for m in matches: + try: + texture = texture.replace(m.group(), os.environ[m.group(1)]) + except KeyError: + msg = "Cannot find requested {} in environment".format(1) + self.log.error(msg) + raise RuntimeError(msg) + if os.path.isabs(texture): self.log.debug("Texture is absolute path, ignoring " "image search paths for: %s" % texture) From 9a7f36023b5f9f9d9a29ff4ae9a6c88c7a01069b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 10 Jan 2020 13:16:41 +0000 Subject: [PATCH 178/195] fixed error message --- pype/plugins/maya/publish/collect_yeti_rig.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/plugins/maya/publish/collect_yeti_rig.py b/pype/plugins/maya/publish/collect_yeti_rig.py index 3b05e19fdb..831bc5e0ca 100644 --- a/pype/plugins/maya/publish/collect_yeti_rig.py +++ b/pype/plugins/maya/publish/collect_yeti_rig.py @@ -151,7 +151,8 @@ class CollectYetiRig(pyblish.api.InstancePlugin): try: texture = texture.replace(m.group(), os.environ[m.group(1)]) except KeyError: - msg = "Cannot find requested {} in environment".format(1) + msg = "Cannot find requested {} in environment".format( + m.group(1)) self.log.error(msg) raise RuntimeError(msg) From acdc0fed0cbb1463c6f0b354c92d293f9cc1f13f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 10 Jan 2020 13:23:07 +0000 Subject: [PATCH 179/195] refactored to class method --- pype/plugins/maya/publish/collect_yeti_rig.py | 33 ++++++++++++------- 1 file changed, 21 insertions(+), 12 deletions(-) diff --git a/pype/plugins/maya/publish/collect_yeti_rig.py b/pype/plugins/maya/publish/collect_yeti_rig.py index 831bc5e0ca..39426ea623 100644 --- a/pype/plugins/maya/publish/collect_yeti_rig.py +++ b/pype/plugins/maya/publish/collect_yeti_rig.py @@ -119,6 +119,8 @@ class CollectYetiRig(pyblish.api.InstancePlugin): texture_filenames = [] if image_search_paths: + # find all ${TOKEN} tokens and replace them with $TOKEN env. variable + image_search_paths = self._replace_tokens(image_search_paths) # TODO: Somehow this uses OS environment path separator, `:` vs `;` # Later on check whether this is pipeline OS cross-compatible. image_search_paths = [p for p in @@ -141,21 +143,11 @@ class CollectYetiRig(pyblish.api.InstancePlugin): # Collect all texture files # find all ${TOKEN} tokens and replace them with $TOKEN env. variable - env_re = re.compile(r"\$\{(\w+)\}") + texture_filenames = self._replace_tokens(texture_filenames) for texture in texture_filenames: files = [] - - matches = re.finditer(env_re, texture) - for m in matches: - try: - texture = texture.replace(m.group(), os.environ[m.group(1)]) - except KeyError: - msg = "Cannot find requested {} in environment".format( - m.group(1)) - self.log.error(msg) - raise RuntimeError(msg) - + if os.path.isabs(texture): self.log.debug("Texture is absolute path, ignoring " "image search paths for: %s" % texture) @@ -296,3 +288,20 @@ class CollectYetiRig(pyblish.api.InstancePlugin): collection, remainder = clique.assemble(files, patterns=pattern) return collection + + def _replace_tokens(self, strings): + env_re = re.compile(r"\$\{(\w+)\}") + + replaced = [] + for s in strings: + matches = re.finditer(env_re, s) + for m in matches: + try: + s = s.replace(m.group(), os.environ[m.group(1)]) + except KeyError: + msg = "Cannot find requested {} in environment".format( + m.group(1)) + self.log.error(msg) + raise RuntimeError(msg) + replaced.append(s) + return replaced From efd71c7ef72090f6f989d9b237dce53333c8f6ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 10 Jan 2020 13:53:49 +0000 Subject: [PATCH 180/195] changed place where tokens are replaced for `image_search_path` --- pype/plugins/maya/publish/collect_yeti_rig.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pype/plugins/maya/publish/collect_yeti_rig.py b/pype/plugins/maya/publish/collect_yeti_rig.py index 39426ea623..c743b2c00b 100644 --- a/pype/plugins/maya/publish/collect_yeti_rig.py +++ b/pype/plugins/maya/publish/collect_yeti_rig.py @@ -119,13 +119,15 @@ class CollectYetiRig(pyblish.api.InstancePlugin): texture_filenames = [] if image_search_paths: - # find all ${TOKEN} tokens and replace them with $TOKEN env. variable - image_search_paths = self._replace_tokens(image_search_paths) + # TODO: Somehow this uses OS environment path separator, `:` vs `;` # Later on check whether this is pipeline OS cross-compatible. image_search_paths = [p for p in image_search_paths.split(os.path.pathsep) if p] + # find all ${TOKEN} tokens and replace them with $TOKEN env. variable + image_search_paths = self._replace_tokens(image_search_paths) + # List all related textures texture_filenames = cmds.pgYetiCommand(node, listTextures=True) self.log.info("Found %i texture(s)" % len(texture_filenames)) @@ -147,7 +149,6 @@ class CollectYetiRig(pyblish.api.InstancePlugin): for texture in texture_filenames: files = [] - if os.path.isabs(texture): self.log.debug("Texture is absolute path, ignoring " "image search paths for: %s" % texture) From 59305a12106aa81ffc19e5b92a2b3eb8aafec2c5 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 10 Jan 2020 16:48:23 +0100 Subject: [PATCH 181/195] make sure template keys exist only when needed --- pype/plugins/global/publish/integrate_new.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index ee18347703..01dc58dc1f 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -267,10 +267,19 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "family": instance.data['family'], "subset": subset["name"], "version": int(version["name"]), - "hierarchy": hierarchy, - "resolution_width": repre.get("resolutionWidth", ""), - "resolution_height": repre.get("resolutionHeight", ""), - "fps": str(instance.data.get("fps", ""))} + "hierarchy": hierarchy} + + resolution_width = repre.get("resolutionWidth") + resolution_height = repre.get("resolutionHeight") + fps = instance.data.get("fps") + + + if resolution_width: + template_data["resolution_width"] = resolution_width + if resolution_width: + template_data["resolution_height"] = resolution_height + if resolution_width: + template_data["fps"] = fps files = repre['files'] if repre.get('stagingDir'): From 791bb63f97f9a74c7520ff19ea2a4e8fcd9283d2 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 10 Jan 2020 18:11:33 +0100 Subject: [PATCH 182/195] collect templates fps fix --- pype/plugins/global/publish/collect_templates.py | 16 ++++++++++++---- pype/plugins/global/publish/integrate_new.py | 1 - 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index d57d416dea..e27af82595 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -76,10 +76,18 @@ class CollectTemplates(pyblish.api.InstancePlugin): "subset": subset_name, "version": version_number, "hierarchy": hierarchy.replace("\\", "/"), - "representation": "TEMP", - "resolution_width": instance.data.get("resolutionWidth", ""), - "resolution_height": instance.data.get("resolutionHeight", ""), - "fps": str(instance.data.get("fps", ""))}} + "representation": "TEMP")} + + resolution_width = instance.data.get("resolutionWidth") + resolution_height = instance.data.get("resolutionHeight") + fps = instance.data.get("fps") + + if resolution_width: + template_data["resolution_width"] = resolution_width + if resolution_width: + template_data["resolution_height"] = resolution_height + if resolution_width: + template_data["fps"] = fps instance.data["template"] = template instance.data["assumedTemplateData"] = template_data diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 01dc58dc1f..8efec94013 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -273,7 +273,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): resolution_height = repre.get("resolutionHeight") fps = instance.data.get("fps") - if resolution_width: template_data["resolution_width"] = resolution_width if resolution_width: From 271a935ee754672d1b34592e86db7ca3b0f24360 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 11 Jan 2020 14:11:04 +0100 Subject: [PATCH 183/195] fixes to getting the path --- pype/ftrack/actions/action_delivery.py | 58 ++++++++++++++++---------- 1 file changed, 37 insertions(+), 21 deletions(-) diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index 9edb7a5964..afd20d12d1 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -231,14 +231,16 @@ class Delivery(BaseAction): "message": "Not selected components to deliver." } - location_path = os.path.normpath(location_path.strip()) - if location_path and not os.path.exists(location_path): - return { - "success": False, - "message": ( - "Entered location path does not exists. \"{}\"" - ).format(location_path) - } + location_path = location_path.strip() + if location_path: + location_path = os.path.normpath(location_path) + if not os.path.exists(location_path): + return { + "success": False, + "message": ( + "Entered location path does not exists. \"{}\"" + ).format(location_path) + } self.db_con.install() self.db_con.Session["AVALON_PROJECT"] = project_name @@ -299,14 +301,16 @@ class Delivery(BaseAction): repre = repres_by_name.get(comp_name) repres_to_deliver.append(repre) + if not location_path: + location_path = os.environ.get("AVALON_PROJECTS") or "" + + print(location_path) + anatomy = Anatomy(project_name) for repre in repres_to_deliver: # Get destination repre path anatomy_data = copy.deepcopy(repre["context"]) - if location_path: - anatomy_data["root"] = location_path - else: - anatomy_data["root"] = os.environ.get("AVALON_PROJECTS") or "" + anatomy_data["root"] = location_path anatomy_filled = anatomy.format(anatomy_data) test_path = ( @@ -353,11 +357,15 @@ class Delivery(BaseAction): continue # Get source repre path + frame = repre['context'].get('frame') + + if frame: + repre["context"]["frame"] = len(str(frame)) * "#" + repre_path = self.path_from_represenation(repre) # TODO add backup solution where root of path from component # is repalced with AVALON_PROJECTS root - - if repre_path and os.path.exists(repre_path): + if not frame: self.process_single_file( repre_path, anatomy, anatomy_name, anatomy_data ) @@ -385,7 +393,7 @@ class Delivery(BaseAction): def process_sequence( self, repre_path, anatomy, anatomy_name, anatomy_data ): - dir_path, file_name = os.path.split(repre_path) + dir_path, file_name = os.path.split(str(repre_path)) base_name, ext = os.path.splitext(file_name) file_name_items = None @@ -421,12 +429,15 @@ class Delivery(BaseAction): self.log.warning("{} <{}>".format(msg, repre_path)) return - anatomy_data["frame"] = "<>" + frame_indicator = "@####@" + + anatomy_data["frame"] = frame_indicator anatomy_filled = anatomy.format(anatomy_data) delivery_path = anatomy_filled["delivery"][anatomy_name] + print(delivery_path) delivery_folder = os.path.dirname(delivery_path) - dst_head, dst_tail = delivery_path.split("<>") + dst_head, dst_tail = delivery_path.split(frame_indicator) dst_padding = src_collection.padding dst_collection = clique.Collection( head=dst_head, @@ -469,10 +480,11 @@ class Delivery(BaseAction): # Template references unavailable data return None - if os.path.exists(path): - return os.path.normpath(path) + return os.path.normpath(path) def copy_file(self, src_path, dst_path): + if os.path.exists(dst_path): + return try: filelink.create( src_path, @@ -496,11 +508,15 @@ class Delivery(BaseAction): "type": "label", "value": "# {}".format(msg) }) - if isinstance(_items, str): + if not isinstance(_items, (list, tuple)): _items = [_items] + __items = [] + for item in _items: + __items.append(str(item)) + items.append({ "type": "label", - "value": '

{}

'.format("
".join(_items)) + "value": '

{}

'.format("
".join(__items)) }) if not items: From cc4857a5d87a39430b3d0b72fb72e7a824621a41 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 11 Jan 2020 14:56:48 +0100 Subject: [PATCH 184/195] hotfix/pathlib in integration --- pype/plugins/global/publish/integrate_new.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index c2812880c7..6e7a8d13a9 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -470,7 +470,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): None """ - src = Path(src).resolve() + src = str(Path(src).resolve()) drive, _path = os.path.splitdrive(dst) unc = Path(drive).resolve() dst = str(unc / _path) @@ -495,7 +495,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def hardlink_file(self, src, dst): dirname = os.path.dirname(dst) src = Path(src).resolve() - dst = Path(dst).resolve() + drive, _path = os.path.splitdrive(dst) + unc = Path(drive).resolve() + dst = str(unc / _path) try: os.makedirs(dirname) except OSError as e: From fcde886e0af56a96d599e2e4556155c4a52f44ab Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 13 Jan 2020 09:42:03 +0100 Subject: [PATCH 185/195] hotfix- string convertion for pathlib path --- pype/plugins/global/publish/integrate_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index c2812880c7..c78e9c6442 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -470,7 +470,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): None """ - src = Path(src).resolve() + src = str(Path(src).resolve()) drive, _path = os.path.splitdrive(dst) unc = Path(drive).resolve() dst = str(unc / _path) From c43ae7cb5f49de1db34584e312c6d83a5b781793 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 14 Jan 2020 15:43:26 +0100 Subject: [PATCH 186/195] allow exporting multiple arnold standins from single scene. --- pype/plugins/maya/publish/collect_ass.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/pype/plugins/maya/publish/collect_ass.py b/pype/plugins/maya/publish/collect_ass.py index c0174e7026..8e6691120a 100644 --- a/pype/plugins/maya/publish/collect_ass.py +++ b/pype/plugins/maya/publish/collect_ass.py @@ -21,15 +21,17 @@ class CollectAssData(pyblish.api.InstancePlugin): objsets = instance.data['setMembers'] for objset in objsets: + objset = str(objset) members = cmds.sets(objset, query=True) if members is None: self.log.warning("Skipped empty instance: \"%s\" " % objset) continue - if objset == "content_SET": + if "content_SET" in objset: instance.data['setMembers'] = members - elif objset == "proxy_SET": + self.log.debug('content members: {}'.format(members)) + elif objset.startswith("proxy_SET"): assert len(members) == 1, "You have multiple proxy meshes, please only use one" instance.data['proxy'] = members - + self.log.debug('proxy members: {}'.format(members)) self.log.debug("data: {}".format(instance.data)) From d6b9ac36d50cc4175814d378298cb4a0fb2c5675 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 14 Jan 2020 15:50:48 +0100 Subject: [PATCH 187/195] rename creator and collector --- .../blender/create/{submarine_model.py => create_model.py} | 0 pype/plugins/blender/load/{submarine_model.py => load_model.py} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename pype/plugins/blender/create/{submarine_model.py => create_model.py} (100%) rename pype/plugins/blender/load/{submarine_model.py => load_model.py} (100%) diff --git a/pype/plugins/blender/create/submarine_model.py b/pype/plugins/blender/create/create_model.py similarity index 100% rename from pype/plugins/blender/create/submarine_model.py rename to pype/plugins/blender/create/create_model.py diff --git a/pype/plugins/blender/load/submarine_model.py b/pype/plugins/blender/load/load_model.py similarity index 100% rename from pype/plugins/blender/load/submarine_model.py rename to pype/plugins/blender/load/load_model.py From 315dfe79a4a27a53bba7c32e663755dea68c74ae Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 15 Jan 2020 15:36:54 +0100 Subject: [PATCH 188/195] set ftrackId and entityType on entity when integrate hierarchy ftrack creates one --- .../ftrack/publish/integrate_hierarchy_ftrack.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py b/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py index 1deff56d83..a33cf81c28 100644 --- a/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py +++ b/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py @@ -85,6 +85,18 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): type=entity_type, parent=parent ) + + if entity.entity_type.lower() not in ["task", "project"]: + filter = { + "type": "asset", + "name": entity_name + } + update_data = { + "data.ftrackId": entity["id"], + "data.entityType": entity.entity_type + } + io.update_one(filter, update_data) + # self.log.info('entity: {}'.format(dict(entity))) # CUSTOM ATTRIBUTES custom_attributes = entity_data.get('custom_attributes', []) From 1f96a74bac663d487d6257df689ee222fb00aed9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 16 Jan 2020 14:30:26 +0100 Subject: [PATCH 189/195] remove added code of previous commit --- .../ftrack/publish/integrate_hierarchy_ftrack.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py b/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py index a33cf81c28..1deff56d83 100644 --- a/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py +++ b/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py @@ -85,18 +85,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): type=entity_type, parent=parent ) - - if entity.entity_type.lower() not in ["task", "project"]: - filter = { - "type": "asset", - "name": entity_name - } - update_data = { - "data.ftrackId": entity["id"], - "data.entityType": entity.entity_type - } - io.update_one(filter, update_data) - # self.log.info('entity: {}'.format(dict(entity))) # CUSTOM ATTRIBUTES custom_attributes = entity_data.get('custom_attributes', []) From 415ec47f9db4746ddb206b9c0d70ed4d95014ccc Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 16 Jan 2020 14:31:05 +0100 Subject: [PATCH 190/195] added aditional check in event sync to avalon for cases when avalon ent exists but don't have ftrackId --- pype/ftrack/events/event_sync_to_avalon.py | 46 +++++++++++++++++++++- 1 file changed, 45 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 8d75d932f8..6188458645 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -131,7 +131,9 @@ class SyncToAvalonEvent(BaseEvent): ftrack_id = proj["data"]["ftrackId"] self._avalon_ents_by_ftrack_id[ftrack_id] = proj for ent in ents: - ftrack_id = ent["data"]["ftrackId"] + ftrack_id = ent["data"].get("ftrackId") + if ftrack_id is None: + continue self._avalon_ents_by_ftrack_id[ftrack_id] = ent return self._avalon_ents_by_ftrack_id @@ -1427,6 +1429,48 @@ class SyncToAvalonEvent(BaseEvent): parent_id = ent_info["parentId"] new_tasks_by_parent[parent_id].append(ent_info) pop_out_ents.append(ftrack_id) + continue + + name = ( + ent_info + .get("changes", {}) + .get("name", {}) + .get("new") + ) + avalon_ent_by_name = self.avalon_ents_by_name.get(name) + avalon_ent_by_name_ftrack_id = ( + avalon_ent_by_name + .get("data", {}) + .get("ftrackId") + ) + if avalon_ent_by_name and avalon_ent_by_name_ftrack_id is None: + ftrack_ent = self.ftrack_ents_by_id.get(ftrack_id) + if not ftrack_ent: + ftrack_ent = self.process_session.query( + self.entities_query_by_id.format( + self.cur_project["id"], ftrack_id + ) + ).one() + self.ftrack_ents_by_id[ftrack_id] = ftrack_ent + + ent_path_items = [ent["name"] for ent in ftrack_ent["link"]] + parents = ent_path_items[1:len(ent_path_items)-1:] + + avalon_ent_parents = ( + avalon_ent_by_name.get("data", {}).get("parents") + ) + if parents == avalon_ent_parents: + self.dbcon.update_one({ + "_id": avalon_ent_by_name["_id"] + }, { + "data.ftrackId": ftrack_id, + "data.entityType": entity_type + }) + self._avalon_ents_by_ftrack_id[ftrack_id] = ( + avalon_ent_by_name + ) + pop_out_ents.append(ftrack_id) + continue configuration_id = entity_type_conf_ids.get(entity_type) if not configuration_id: From 83c501f26dd8d2a57cae5931e210885f95a780ea Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 16 Jan 2020 16:31:24 +0100 Subject: [PATCH 191/195] fixed update query --- pype/ftrack/events/event_sync_to_avalon.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 6188458645..b268372bd6 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1463,8 +1463,10 @@ class SyncToAvalonEvent(BaseEvent): self.dbcon.update_one({ "_id": avalon_ent_by_name["_id"] }, { - "data.ftrackId": ftrack_id, - "data.entityType": entity_type + "$set": { + "data.ftrackId": ftrack_id, + "data.entityType": entity_type + } }) self._avalon_ents_by_ftrack_id[ftrack_id] = ( avalon_ent_by_name From 06ed617d9bb25d3dc78d09c49f7a6904fef724c6 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 17 Jan 2020 09:34:23 +0100 Subject: [PATCH 192/195] make ascii ass configurable via presets --- pype/plugins/maya/publish/extract_ass.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pype/plugins/maya/publish/extract_ass.py b/pype/plugins/maya/publish/extract_ass.py index 71f3e0d84c..4cf394aefe 100644 --- a/pype/plugins/maya/publish/extract_ass.py +++ b/pype/plugins/maya/publish/extract_ass.py @@ -17,6 +17,7 @@ class ExtractAssStandin(pype.api.Extractor): label = "Ass Standin (.ass)" hosts = ["maya"] families = ["ass"] + asciiAss = False def process(self, instance): @@ -47,7 +48,7 @@ class ExtractAssStandin(pype.api.Extractor): exported_files = cmds.arnoldExportAss(filename=file_path, selected=True, - asciiAss=True, + asciiAss=self.asciiAss, shadowLinks=True, lightLinks=True, boundingBox=True, @@ -59,13 +60,15 @@ class ExtractAssStandin(pype.api.Extractor): filenames.append(os.path.split(file)[1]) self.log.info("Exported: {}".format(filenames)) else: + self.log.info("Extracting ass") cmds.arnoldExportAss(filename=file_path, selected=True, - asciiAss=True, + asciiAss=False, shadowLinks=True, lightLinks=True, boundingBox=True ) + self.log.info("Extracted {}".format(filename)) filenames = filename optionals = [ "frameStart", "frameEnd", "step", "handles", From 846777bff00231031ef7ee86cbb5b3602c75e360 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 17 Jan 2020 12:40:35 +0100 Subject: [PATCH 193/195] update all avalon ent data if ftrack id was updated --- pype/ftrack/events/event_sync_to_avalon.py | 41 ++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index b268372bd6..3f625da1ac 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1468,9 +1468,50 @@ class SyncToAvalonEvent(BaseEvent): "data.entityType": entity_type } }) + + avalon_ent_by_name["data"]["ftrackId"] = ftrack_id + avalon_ent_by_name["data"]["entityType"] = entity_type + self._avalon_ents_by_ftrack_id[ftrack_id] = ( avalon_ent_by_name ) + if self._avalon_ents_by_parent_id: + found = None + for _parent_id_, _entities_ in ( + self._avalon_ents_by_parent_id.items() + ): + for _idx_, entity in enumerate(_entities_): + if entity["_id"] == avalon_ent_by_name["_id"]: + found = (_parent_id_, _idx_) + break + + if found: + break + + if found: + _parent_id_, _idx_ = found + self._avalon_ents_by_parent_id[_parent_id_][ + _idx_] = avalon_ent_by_name + + if self._avalon_ents_by_id: + self._avalon_ents_by_id[avalon_ent_by_name["_id"]] = ( + avalon_ent_by_name + ) + + if self._avalon_ents_by_name: + self._avalon_ents_by_name[name] = avalon_ent_by_name + + if self._avalon_ents: + found = None + for _idx_, _ent_ in enumerate(self._avalon_ents): + if _ent_["_id"] != avalon_ent_by_name["_id"]: + continue + found = _idx_ + break + + if found is not None: + self._avalon_ents[found] = avalon_ent_by_name + pop_out_ents.append(ftrack_id) continue From 7029588793f013d751cb3d3d8c5bbd40e106e002 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 17 Jan 2020 15:03:26 +0100 Subject: [PATCH 194/195] correct setting --- pype/ftrack/events/event_sync_to_avalon.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py index 3f625da1ac..23284a2ae6 100644 --- a/pype/ftrack/events/event_sync_to_avalon.py +++ b/pype/ftrack/events/event_sync_to_avalon.py @@ -1503,14 +1503,16 @@ class SyncToAvalonEvent(BaseEvent): if self._avalon_ents: found = None - for _idx_, _ent_ in enumerate(self._avalon_ents): + project, entities = self._avalon_ents + for _idx_, _ent_ in enumerate(entities): if _ent_["_id"] != avalon_ent_by_name["_id"]: continue found = _idx_ break if found is not None: - self._avalon_ents[found] = avalon_ent_by_name + entities[found] = avalon_ent_by_name + self._avalon_ents = project, entities pop_out_ents.append(ftrack_id) continue From 40cae28e76154385b2543f5c02c7b84877e26e3d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 17 Jan 2020 16:05:59 +0100 Subject: [PATCH 195/195] sync to avalon actions return information about not available avalon_mongo_id custom attribute(only for project) --- pype/ftrack/actions/action_sync_to_avalon.py | 5 ++++- pype/ftrack/events/action_sync_to_avalon.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/actions/action_sync_to_avalon.py b/pype/ftrack/actions/action_sync_to_avalon.py index 01d0b866bf..d2fcfb372f 100644 --- a/pype/ftrack/actions/action_sync_to_avalon.py +++ b/pype/ftrack/actions/action_sync_to_avalon.py @@ -70,7 +70,10 @@ class SyncToAvalonLocal(BaseAction): ft_project_name = in_entities[0]["project"]["full_name"] try: - self.entities_factory.launch_setup(ft_project_name) + output = self.entities_factory.launch_setup(ft_project_name) + if output is not None: + return output + time_1 = time.time() self.entities_factory.set_cutom_attributes() diff --git a/pype/ftrack/events/action_sync_to_avalon.py b/pype/ftrack/events/action_sync_to_avalon.py index 9f9deeab95..79ab1b5f7a 100644 --- a/pype/ftrack/events/action_sync_to_avalon.py +++ b/pype/ftrack/events/action_sync_to_avalon.py @@ -105,7 +105,10 @@ class SyncToAvalonServer(BaseAction): ft_project_name = in_entities[0]["project"]["full_name"] try: - self.entities_factory.launch_setup(ft_project_name) + output = self.entities_factory.launch_setup(ft_project_name) + if output is not None: + return output + time_1 = time.time() self.entities_factory.set_cutom_attributes()