From ccbef046058b20ada3a609c794705e3cce7da0b5 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 19 Nov 2019 22:41:27 +0100 Subject: [PATCH 01/31] preliminary copy of blender integration from sonar --- pype/blender/__init__.py | 34 +++ pype/blender/action.py | 42 +++ pype/blender/plugin.py | 135 +++++++++ .../plugins/blender/create/submarine_model.py | 35 +++ pype/plugins/blender/load/submarine_model.py | 264 ++++++++++++++++++ .../blender/publish/collect_current_file.py | 16 ++ pype/plugins/blender/publish/collect_model.py | 52 ++++ pype/plugins/blender/publish/extract_model.py | 34 +++ .../blender/publish/validate_mesh_has_uv.py | 47 ++++ .../validate_mesh_no_negative_scale.py | 31 ++ res/app_icons/blender.png | Bin 0 -> 51122 bytes 11 files changed, 690 insertions(+) create mode 100644 pype/blender/__init__.py create mode 100644 pype/blender/action.py create mode 100644 pype/blender/plugin.py create mode 100644 pype/plugins/blender/create/submarine_model.py create mode 100644 pype/plugins/blender/load/submarine_model.py create mode 100644 pype/plugins/blender/publish/collect_current_file.py create mode 100644 pype/plugins/blender/publish/collect_model.py create mode 100644 pype/plugins/blender/publish/extract_model.py create mode 100644 pype/plugins/blender/publish/validate_mesh_has_uv.py create mode 100644 pype/plugins/blender/publish/validate_mesh_no_negative_scale.py create mode 100644 res/app_icons/blender.png diff --git a/pype/blender/__init__.py b/pype/blender/__init__.py new file mode 100644 index 0000000000..8a29917e40 --- /dev/null +++ b/pype/blender/__init__.py @@ -0,0 +1,34 @@ +import logging +from pathlib import Path +import os + +import bpy + +from avalon import api as avalon +from pyblish import api as pyblish + +from .plugin import AssetLoader + +logger = logging.getLogger("pype.blender") + +PARENT_DIR = os.path.dirname(__file__) +PACKAGE_DIR = os.path.dirname(PARENT_DIR) +PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins") + +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "blender", "publish") +LOAD_PATH = os.path.join(PLUGINS_DIR, "blender", "load") +CREATE_PATH = os.path.join(PLUGINS_DIR, "blender", "create") + + +def install(): + """Install Blender configuration for Avalon.""" + pyblish.register_plugin_path(str(PUBLISH_PATH)) + avalon.register_plugin_path(avalon.Loader, str(LOAD_PATH)) + avalon.register_plugin_path(avalon.Creator, str(CREATE_PATH)) + + +def uninstall(): + """Uninstall Blender configuration for Avalon.""" + pyblish.deregister_plugin_path(str(PUBLISH_PATH)) + avalon.deregister_plugin_path(avalon.Loader, str(LOAD_PATH)) + avalon.deregister_plugin_path(avalon.Creator, str(CREATE_PATH)) diff --git a/pype/blender/action.py b/pype/blender/action.py new file mode 100644 index 0000000000..948123c3c5 --- /dev/null +++ b/pype/blender/action.py @@ -0,0 +1,42 @@ +import bpy + +import pyblish.api + +from ..action import get_errored_instances_from_context + + +class SelectInvalidAction(pyblish.api.Action): + """Select invalid objects in Blender when a publish plug-in failed.""" + label = "Select Invalid" + on = "failed" + icon = "search" + + def process(self, context, plugin): + errored_instances = get_errored_instances_from_context(context) + instances = pyblish.api.instances_by_plugin(errored_instances, plugin) + + # Get the invalid nodes for the plug-ins + self.log.info("Finding invalid nodes...") + invalid = list() + for instance in instances: + invalid_nodes = plugin.get_invalid(instance) + if invalid_nodes: + if isinstance(invalid_nodes, (list, tuple)): + invalid.extend(invalid_nodes) + else: + self.log.warning("Failed plug-in doens't have any selectable objects.") + + # Make sure every node is only processed once + invalid = list(set(invalid)) + + bpy.ops.object.select_all(action='DESELECT') + if invalid: + invalid_names = [obj.name for obj in invalid] + self.log.info("Selecting invalid objects: %s", ", ".join(invalid_names)) + # Select the objects and also make the last one the active object. + for obj in invalid: + obj.select_set(True) + bpy.context.view_layer.objects.active = invalid[-1] + + else: + self.log.info("No invalid nodes found.") diff --git a/pype/blender/plugin.py b/pype/blender/plugin.py new file mode 100644 index 0000000000..ad5a259785 --- /dev/null +++ b/pype/blender/plugin.py @@ -0,0 +1,135 @@ +"""Shared functionality for pipeline plugins for Blender.""" + +from pathlib import Path +from typing import Dict, List, Optional + +import bpy + +from avalon import api + +VALID_EXTENSIONS = [".blend"] + + +def model_name(asset: str, subset: str, namespace: Optional[str] = None) -> str: + """Return a consistent name for a model asset.""" + name = f"{asset}_{subset}" + if namespace: + name = f"{namespace}:{name}" + return name + + +class AssetLoader(api.Loader): + """A basic AssetLoader for Blender + + This will implement the basic logic for linking/appending assets + into another Blender scene. + + The `update` method should be implemented by a sub-class, because + it's different for different types (e.g. model, rig, animation, + etc.). + """ + + @staticmethod + def _get_instance_empty(instance_name: str, nodes: List) -> Optional[bpy.types.Object]: + """Get the 'instance empty' that holds the collection instance.""" + for node in nodes: + if not isinstance(node, bpy.types.Object): + continue + if (node.type == 'EMPTY' and node.instance_type == 'COLLECTION' + and node.instance_collection and node.name == instance_name): + return node + return None + + @staticmethod + def _get_instance_collection(instance_name: str, nodes: List) -> Optional[bpy.types.Collection]: + """Get the 'instance collection' (container) for this asset.""" + for node in nodes: + if not isinstance(node, bpy.types.Collection): + continue + if node.name == instance_name: + return node + return None + + @staticmethod + def _get_library_from_container(container: bpy.types.Collection) -> bpy.types.Library: + """Find the library file from the container. + + It traverses the objects from this collection, checks if there is only + 1 library from which the objects come from and returns the library. + + Warning: + No nested collections are supported at the moment! + """ + assert not container.children, "Nested collections are not supported." + assert container.objects, "The collection doesn't contain any objects." + libraries = set() + for obj in container.objects: + assert obj.library, f"'{obj.name}' is not linked." + libraries.add(obj.library) + + assert len(libraries) == 1, "'{container.name}' contains objects from more then 1 library." + + return list(libraries)[0] + + def process_asset(self, + context: dict, + name: str, + namespace: Optional[str] = None, + options: Optional[Dict] = None): + """Must be implemented by a sub-class""" + raise NotImplementedError("Must be implemented by a sub-class") + + def load(self, + context: dict, + name: Optional[str] = None, + namespace: Optional[str] = None, + options: Optional[Dict] = None) -> Optional[bpy.types.Collection]: + """Load asset via database + + Arguments: + context: Full parenthood of representation to load + name: Use pre-defined name + namespace: Use pre-defined namespace + options: Additional settings dictionary + """ + # TODO (jasper): make it possible to add the asset several times by + # just re-using the collection + assert Path(self.fname).exists(), f"{self.fname} doesn't exist." + + self.process_asset( + context=context, + name=name, + namespace=namespace, + options=options, + ) + + # Only containerise if anything was loaded by the Loader. + nodes = self[:] + if not nodes: + return None + + # Only containerise if it's not already a collection from a .blend file. + representation = context["representation"]["name"] + if representation != "blend": + from avalon.blender.pipeline import containerise + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__, + ) + + asset = context["asset"]["name"] + subset = context["subset"]["name"] + instance_name = model_name(asset, subset, namespace) + + return self._get_instance_collection(instance_name, nodes) + + def update(self, container: Dict, representation: Dict): + """Must be implemented by a sub-class""" + raise NotImplementedError("Must be implemented by a sub-class") + + def remove(self, container: Dict) -> bool: + """Must be implemented by a sub-class""" + raise NotImplementedError("Must be implemented by a sub-class") diff --git a/pype/plugins/blender/create/submarine_model.py b/pype/plugins/blender/create/submarine_model.py new file mode 100644 index 0000000000..29fcae8fbf --- /dev/null +++ b/pype/plugins/blender/create/submarine_model.py @@ -0,0 +1,35 @@ +"""Create a model asset.""" + +import bpy + +import sonar.blender +from avalon import api +from avalon.blender import Creator, lib + + +class CreateModel(Creator): + """Polygonal static geometry""" + + name = "model_default" + label = "Model" + family = "model" + icon = "cube" + + def process(self): + + asset = self.data["asset"] + subset = self.data["subset"] + name = sonar.blender.plugin.model_name(asset, subset) + collection = bpy.data.collections.new(name=name) + bpy.context.scene.collection.children.link(collection) + self.data['task'] = api.Session.get('AVALON_TASK') + lib.imprint(collection, self.data) + + if (self.options or {}).get("useSelection"): + for obj in bpy.context.selected_objects: + collection.objects.link(obj) + + if bpy.data.workspaces.get('Modeling'): + bpy.context.window.workspace = bpy.data.workspaces['Modeling'] + + return collection diff --git a/pype/plugins/blender/load/submarine_model.py b/pype/plugins/blender/load/submarine_model.py new file mode 100644 index 0000000000..4535b29065 --- /dev/null +++ b/pype/plugins/blender/load/submarine_model.py @@ -0,0 +1,264 @@ +"""Load a model asset in Blender.""" + +import logging +from pathlib import Path +from pprint import pformat +from typing import Dict, List, Optional + +import avalon.blender.pipeline +import bpy +import pype.blender +from avalon import api + +logger = logging.getLogger("pype").getChild("blender").getChild("load_model") + + +class BlendModelLoader(pype.blender.AssetLoader): + """Load models from a .blend file. + + Because they come from a .blend file we can simply link the collection that + contains the model. There is no further need to 'containerise' it. + + Warning: + Loading the same asset more then once is not properly supported at the + moment. + """ + + families = ["model"] + representations = ["blend"] + + label = "Link Model" + icon = "code-fork" + color = "orange" + + @staticmethod + def _get_lib_collection(name: str, libpath: Path) -> Optional[bpy.types.Collection]: + """Find the collection(s) with name, loaded from libpath. + + Note: + It is assumed that only 1 matching collection is found. + """ + for collection in bpy.data.collections: + if collection.name != name: + continue + if collection.library is None: + continue + if not collection.library.filepath: + continue + collection_lib_path = str(Path(bpy.path.abspath(collection.library.filepath)).resolve()) + normalized_libpath = str(Path(bpy.path.abspath(str(libpath))).resolve()) + if collection_lib_path == normalized_libpath: + return collection + return None + + @staticmethod + def _collection_contains_object(collection: bpy.types.Collection, object: bpy.types.Object) -> bool: + """Check if the collection contains the object.""" + for obj in collection.objects: + if obj == object: + return True + return False + + def process_asset(self, + context: dict, + name: str, + namespace: Optional[str] = None, + options: Optional[Dict] = None) -> Optional[List]: + """ + Arguments: + name: Use pre-defined name + namespace: Use pre-defined namespace + context: Full parenthood of representation to load + options: Additional settings dictionary + """ + + libpath = self.fname + asset = context["asset"]["name"] + subset = context["subset"]["name"] + lib_container = pype.blender.plugin.model_name(asset, subset) + container_name = pype.blender.plugin.model_name(asset, subset, namespace) + relative = bpy.context.preferences.filepaths.use_relative_paths + + with bpy.data.libraries.load(libpath, link=True, relative=relative) as (_, data_to): + data_to.collections = [lib_container] + + scene = bpy.context.scene + instance_empty = bpy.data.objects.new(container_name, None) + if not instance_empty.get("avalon"): + instance_empty["avalon"] = dict() + avalon_info = instance_empty["avalon"] + avalon_info.update({"container_name": container_name}) + scene.collection.objects.link(instance_empty) + instance_empty.instance_type = 'COLLECTION' + container = bpy.data.collections[lib_container] + container.name = container_name + instance_empty.instance_collection = container + container.make_local() + avalon.blender.pipeline.containerise_existing( + container, + name, + namespace, + context, + self.__class__.__name__, + ) + + nodes = list(container.objects) + nodes.append(container) + nodes.append(instance_empty) + self[:] = nodes + return nodes + + def update(self, container: Dict, representation: Dict): + """Update the loaded asset. + + This will remove all objects of the current collection, load the new + ones and add them to the collection. + If the objects of the collection are used in another collection they + will not be removed, only unlinked. Normally this should not be the + case though. + + Warning: + No nested collections are supported at the moment! + """ + collection = bpy.data.collections.get(container["objectName"]) + libpath = Path(api.get_representation_path(representation)) + extension = libpath.suffix.lower() + + logger.debug( + "Container: %s\nRepresentation: %s", + pformat(container, indent=2), + pformat(representation, indent=2), + ) + + assert collection, f"The asset is not loaded: {container['objectName']}" + assert not (collection.children), "Nested collections are not supported." + assert libpath, ("No existing library file found for {container['objectName']}") + assert libpath.is_file(), f"The file doesn't exist: {libpath}" + assert extension in pype.blender.plugin.VALID_EXTENSIONS, f"Unsupported file: {libpath}" + collection_libpath = self._get_library_from_container(collection).filepath + normalized_collection_libpath = str(Path(bpy.path.abspath(collection_libpath)).resolve()) + normalized_libpath = str(Path(bpy.path.abspath(str(libpath))).resolve()) + logger.debug( + "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_collection_libpath, + normalized_libpath, + ) + if normalized_collection_libpath == normalized_libpath: + logger.info("Library already loaded, not updating...") + return + # Let Blender's garbage collection take care of removing the library + # itself after removing the objects. + objects_to_remove = set() + collection_objects = list() + collection_objects[:] = collection.objects + for obj in collection_objects: + # Unlink every object + collection.objects.unlink(obj) + remove_obj = True + for coll in [coll for coll in bpy.data.collections if coll != collection]: + if coll.objects and self._collection_contains_object(coll, obj): + remove_obj = False + if remove_obj: + objects_to_remove.add(obj) + for obj in objects_to_remove: + # Only delete objects that are not used elsewhere + bpy.data.objects.remove(obj) + + instance_empties = [obj for obj in collection.users_dupli_group if obj.name in collection.name] + if instance_empties: + instance_empty = instance_empties[0] + container_name = instance_empty["avalon"]["container_name"] + relative = bpy.context.preferences.filepaths.use_relative_paths + with bpy.data.libraries.load(str(libpath), link=True, relative=relative) as (_, data_to): + data_to.collections = [container_name] + new_collection = self._get_lib_collection(container_name, libpath) + if new_collection is None: + raise ValueError("A matching collection '{container_name}' " + "should have been found in: {libpath}") + for obj in new_collection.objects: + collection.objects.link(obj) + bpy.data.collections.remove(new_collection) + # Update the representation on the collection + avalon_prop = collection[avalon.blender.pipeline.AVALON_PROPERTY] + avalon_prop["representation"] = str(representation["_id"]) + + def remove(self, container: Dict) -> bool: + """Remove an existing container from a Blender scene. + + Arguments: + container (avalon-core:container-1.0): Container to remove, + from `host.ls()`. + + Returns: + bool: Whether the container was deleted. + + Warning: + No nested collections are supported at the moment! + """ + collection = bpy.data.collections.get(container["objectName"]) + if not collection: + return False + assert not (collection.children), "Nested collections are not supported." + instance_parents = list(collection.users_dupli_group) + instance_objects = list(collection.objects) + for obj in instance_objects + instance_parents: + bpy.data.objects.remove(obj) + bpy.data.collections.remove(collection) + + return True + + +class CacheModelLoader(pype.blender.AssetLoader): + """Load cache models. + + Stores the imported asset in a collection named after the asset. + + Note: + At least for now it only supports Alembic files. + """ + + families = ["model"] + representations = ["abc"] + + label = "Link Model" + icon = "code-fork" + color = "orange" + + def process_asset(self, + context: dict, + name: str, + namespace: Optional[str] = None, + options: Optional[Dict] = None) -> Optional[List]: + """ + Arguments: + name: Use pre-defined name + namespace: Use pre-defined namespace + context: Full parenthood of representation to load + options: Additional settings dictionary + """ + raise NotImplementedError("Loading of Alembic files is not yet implemented.") + # TODO (jasper): implement Alembic import. + + libpath = self.fname + asset = context["asset"]["name"] + subset = context["subset"]["name"] + # TODO (jasper): evaluate use of namespace which is 'alien' to Blender. + lib_container = container_name = pype.blender.plugin.model_name(asset, subset, namespace) + relative = bpy.context.preferences.filepaths.use_relative_paths + + with bpy.data.libraries.load(libpath, link=True, relative=relative) as (data_from, data_to): + data_to.collections = [lib_container] + + scene = bpy.context.scene + instance_empty = bpy.data.objects.new(container_name, None) + scene.collection.objects.link(instance_empty) + instance_empty.instance_type = 'COLLECTION' + collection = bpy.data.collections[lib_container] + collection.name = container_name + instance_empty.instance_collection = collection + + nodes = list(collection.objects) + nodes.append(collection) + nodes.append(instance_empty) + self[:] = nodes + return nodes diff --git a/pype/plugins/blender/publish/collect_current_file.py b/pype/plugins/blender/publish/collect_current_file.py new file mode 100644 index 0000000000..a097c72047 --- /dev/null +++ b/pype/plugins/blender/publish/collect_current_file.py @@ -0,0 +1,16 @@ +import bpy + +import pyblish.api + + +class CollectBlenderCurrentFile(pyblish.api.ContextPlugin): + """Inject the current working file into context""" + + order = pyblish.api.CollectorOrder - 0.5 + label = "Blender Current File" + hosts = ['blender'] + + def process(self, context): + """Inject the current working file""" + current_file = bpy.data.filepath + context.data['currentFile'] = current_file diff --git a/pype/plugins/blender/publish/collect_model.py b/pype/plugins/blender/publish/collect_model.py new file mode 100644 index 0000000000..c60402f9ca --- /dev/null +++ b/pype/plugins/blender/publish/collect_model.py @@ -0,0 +1,52 @@ +import typing +from typing import Generator + +import bpy + +import avalon.api +import pyblish.api +from avalon.blender.pipeline import AVALON_PROPERTY + + +class CollectModel(pyblish.api.ContextPlugin): + """Collect the data of a model.""" + + hosts = ["blender"] + label = "Collect Model" + order = pyblish.api.CollectorOrder + + @staticmethod + def get_model_collections() -> Generator: + """Return all 'model' collections. + + Check if the family is 'model' and if it doesn't have the + representation set. If the representation is set, it is a loaded model + and we don't want to publish it. + """ + for collection in bpy.data.collections: + avalon_prop = collection.get(AVALON_PROPERTY) or dict() + if (avalon_prop.get('family') == 'model' + and not avalon_prop.get('representation')): + yield collection + + def process(self, context): + """Collect the models from the current Blender scene.""" + collections = self.get_model_collections() + for collection in collections: + avalon_prop = collection[AVALON_PROPERTY] + asset = avalon_prop['asset'] + family = avalon_prop['family'] + subset = avalon_prop['subset'] + task = avalon_prop['task'] + name = f"{asset}_{subset}" + instance = context.create_instance( + name=name, + family=family, + subset=subset, + asset=asset, + task=task, + ) + members = list(collection.objects) + members.append(collection) + instance[:] = members + self.log.debug(instance.data) diff --git a/pype/plugins/blender/publish/extract_model.py b/pype/plugins/blender/publish/extract_model.py new file mode 100644 index 0000000000..75ec33fb27 --- /dev/null +++ b/pype/plugins/blender/publish/extract_model.py @@ -0,0 +1,34 @@ +from pathlib import Path +import avalon.blender.workio + +import sonar.api + + +class ExtractModel(sonar.api.Extractor): + """Extract as model.""" + + label = "Model" + hosts = ["blender"] + families = ["model"] + optional = True + + def process(self, instance): + # Define extract output file path + stagingdir = Path(self.staging_dir(instance)) + filename = f"{instance.name}.blend" + filepath = str(stagingdir / filename) + + # Perform extraction + self.log.info("Performing extraction..") + + # Just save the file to a temporary location. At least for now it's no + # problem to have (possibly) extra stuff in the file. + avalon.blender.workio.save_file(filepath, copy=True) + + # Store reference for integration + if "files" not in instance.data: + instance.data["files"] = list() + + instance.data["files"].append(filename) + + self.log.info("Extracted instance '%s' to: %s", instance.name, filepath) diff --git a/pype/plugins/blender/publish/validate_mesh_has_uv.py b/pype/plugins/blender/publish/validate_mesh_has_uv.py new file mode 100644 index 0000000000..79a42a11d5 --- /dev/null +++ b/pype/plugins/blender/publish/validate_mesh_has_uv.py @@ -0,0 +1,47 @@ +from typing import List + +import bpy + +import pyblish.api +import sonar.blender.action + + +class ValidateMeshHasUvs(pyblish.api.InstancePlugin): + """Validate that the current mesh has UV's.""" + + order = pyblish.api.ValidatorOrder + hosts = ["blender"] + families = ["model"] + category = "geometry" + label = "Mesh Has UV's" + actions = [sonar.blender.action.SelectInvalidAction] + optional = True + + @staticmethod + def has_uvs(obj: bpy.types.Object) -> bool: + """Check if an object has uv's.""" + if not obj.data.uv_layers: + return False + for uv_layer in obj.data.uv_layers: + for polygon in obj.data.polygons: + for loop_index in polygon.loop_indices: + if not uv_layer.data[loop_index].uv: + return False + + return True + + @classmethod + def get_invalid(cls, instance) -> List: + invalid = [] + # TODO (jasper): only check objects in the collection that will be published? + for obj in [obj for obj in bpy.data.objects if obj.type == 'MESH']: + # Make sure we are in object mode. + bpy.ops.object.mode_set(mode='OBJECT') + if not cls.has_uvs(obj): + invalid.append(obj) + return invalid + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError(f"Meshes found in instance without valid UV's: {invalid}") diff --git a/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py new file mode 100644 index 0000000000..b2a927a2ed --- /dev/null +++ b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py @@ -0,0 +1,31 @@ +from typing import List + +import bpy + +import pyblish.api +import sonar.blender.action + + +class ValidateMeshNoNegativeScale(pyblish.api.Validator): + """Ensure that meshes don't have a negative scale.""" + + order = pyblish.api.ValidatorOrder + hosts = ["blender"] + families = ["model"] + label = "Mesh No Negative Scale" + actions = [sonar.blender.action.SelectInvalidAction] + + @staticmethod + def get_invalid(instance) -> List: + invalid = [] + # TODO (jasper): only check objects in the collection that will be published? + for obj in [obj for obj in bpy.data.objects if obj.type == 'MESH']: + if any(v < 0 for v in obj.scale): + invalid.append(obj) + + return invalid + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError(f"Meshes found in instance with negative scale: {invalid}") diff --git a/res/app_icons/blender.png b/res/app_icons/blender.png new file mode 100644 index 0000000000000000000000000000000000000000..6070a51fae3da0655d1bc14156e8359428d4f6f9 GIT binary patch literal 51122 zcmd?R2U}EG(>A)g$sphmBnKr)kc^UqHj$MqQ9uC^0m+Dx8WoY8gGf+8P$VN!ghq*i zARswPl$>+uvo_BAJ@0eA^AFBkbIlCWd#_p*?y9=0dV;iWC{vO%lLG)KRaLI)06+-; zlMs*+!+-g)2|fjY6R2Ljr01EuFzn|vwCS_F-)rM%Yf#e@T`*~Wxj|g<4oi!4VngPo zqH|2{t*k7}Pc(UdDb#=Il9YLh>JnXi_kOWrrZQ}@GRt8sb^3hflIoQp z6Q4G7pJ05V8Z)NX(9jcM80}!xsWxypqSzze@TbgUyLj|afkWaY*+BldflU%_yJWe2 zp9=*g`~S~hiTsvw@V1ea+ZTS_*wm1JBh?_i&g<0>HQn5i^o!>eLAs^V+~+-I`UtjN zT@z;jR%8)lPQNdEq}%k}hL6LVNN4L~^)l%Dfr*ttGe znxJ>yuOf2B{v?KW-OW%4hk^|~LqkPMA)1s8s_T3vW;b~+Ec8Qh3R$ARAT2?I^8j0n z^v%`27RNy(ID5~z7xy610_D$WYSpVC3+8@a=^NddM@8vtyeTd93c)WKj(@2XfQwP(9> z0LJ3iry{+Rpb8P(#n7gufw__F7f-_0>R0M9=J0T)iBEf>wi5%FK$8#Da-^l~R>~AOtGsp!iM|Bh%OQxy0SWSG!j`sX47#*a%MXfVc(NKubS0d=GLnQ@UMGVUw^z)8V-bGCSIbUJSJwV{E;xU@sG z?Yk2IyKoi0$mliVigbMwD(=zo#^D6ORhbF?X&@9uXx3pwk6xns;+-lEOx{Ey09His z4o$C-JA7*>Yl-UTsh%YnyB6Lx9pfr0df!+*P1izW7q*C3 zsH|-7Z#(4&R6csh$l}hSf#?OOA<-*;T`)_v;a5LYN9u%hu7XVNl$ZJA9uGR;!~!Kx zByQUx!1UNAq!YZ+p5g2zL*q7}zvH<3CaHn&m~?JK-75ihuU&^hZNT{o5k&e}?Uqz8 z9QRSUi_oa;w#F-eq8RLzQCNFK%?X?59kZFwSKkV66l zOFkbWmwxNcmdf3TMInIIQ#0c^8^%872>?8LrQjF?CKPe!{lE%nv|ZoV=sHF(D02by zQ0$D`LjGvxGu!A6=0PI>n5GdcPi+BU29L+lB2lAE(6;q#RVrxx5F(I|k}NJ2{uYKT zXV$SA2}>HI;OVK*P}l{_jHjphM)Zod1Nj_mP*}IO(6?fuSq=}chbJ#q?GS*>Zz?24 zjHH|i@cK>s9A9?q5}?>1;m&PNEbBG+Qa-n3$M<1;$-L+H2!IYmnaM7Hq6qd!wNPRe z#Eg^M}JY2 zx0NFUj))4KQ_fM2PQv$&UB8hQMMbFW_Ie|TYsMi9HFgdrUZ3r5bK|lnsv2*txcp55s)ycqT?>5iISSFoX z@LA1w;6p{4z(~rt`WO0GCzBNc;2-~!=zXK>iE>1hg&a9{2qq|t8;@UI(5GbaqwU>8 zfk@u)uK27v;cz05D44bg)6d8Bir>EzSJ@_}H~>~sJX84Y!D7m}O-*PG+%Nvbbn?;h~2A(ufb#h-pW0zJe^PUX0Z- zpn_@KvBE!iE=-jsZhofqRW|yz?k!~IZ$k|VuLWUpjm69 zF(8^7QPsjMG=-=1dk?cC6lk(lp9mk2HJM8*Vk5-f(g1+HuIvZ^&r@A6T|uv9kCC6x ze;WPL26=x049BNjZR{QdUuFIOeP9zs)PcQiN~1! zI*Gfu(c=^VV`>@%K<2xR9KimXweL5SU=?+!Iv@a88N<1=PC3Qzj`*Q9GfvS0O!%K; zKO?c9K?+D_t$6?Xw){0EYtTk=00xR_o$>hg%O)$vp(&IdHnF(s5h`D zL5aEwp$JXy+IMh>;GsV}dFQO`AVf+y6hsDa*P=s?l6jS&#%T^*eQt6G6iatKmvtl(Kym z5S4+d3vvI8PESi+;cqpXk8fNb1vtH+!i`>>LnKK_R!Eyz;2+)ZEG~@!J21>STmHuK zs6e_@X)%1T^?6j6-fCZzVo3@Ct`T{ZyJE`xuNo62!U>OabrU)i(fZKj22=g~-_)cE zR1^kLAYa{V)n>qR7y5ZcW2-BjbPwYBP^B>Pa@5RLXB=bxMiS_3K6athe?w_SfENID z*(4Tr4?5|F7U%^lXypMZXyBA{EOz!bsH$Kx!1+yj89GzdkJ*@xyuav9gR_IL7Jm=R z3d)&7sMWHUk6bhay-?UzOEhST3_si)oczPd1+NcO7l?p(2>erQ-t-SS+J=WeJ*c4V zawh&Hilu)p7dWY+2sILRe1<67jWq45a2lW@;sK+kbFk_|@H%u_&~1Voy_ zAhG@V-&Pr_Qy~##_gYo#Z$nLXIP=V37>u* z7}IietgS?;bg6Hst{&ud9(0@uq9~A|Vx#`;f(&xwNVOYj?ao`V$9ScRHHA3Xs@-0_ z_s0qMsrg>mvCGIbccc<|nIG5Z83o4afmTPFVGOK`qQgr>1GSf5UT z-uIxE24g{Px(+=haxs~Jf?aOy#UdT16iUupi`XA4=X9M@R0=+hKK|60czAe#Qi^OI z8DRrFGxRAUC`foq2zcvgFk{RJ5Pd00ZYElD3g4YU;nw0yLz33K(V%I9Mo!OZQ3 z&HY7oS5PX30_{ka0i0#^Y$Zte*~*bvun+fna0jBL(L=fZRCL}rcONTW+K3BqubgB<7lVMpKXfTt>2p%UM zJ#VvjMASDv}>!t3QxKbo?hklNcRn zA`?OkhDrJqsFW-f3z~>nQP}y_HRvcx{O_CR z8FQJa63-8cVQ254fl&tFmU zZfo2^oZq1l@LoY-bLx%{9cqa3NO5=|eKy>fOC472f|Mk`zW@6h#>Inf{Fhb7Yh+lV ztMG}g{?3)_Ou9BmZ2&}BcHiJQstzX|eN~}O#19m?i`0SA#U z)I3lLd{TvT95G_^Ijp!Kt5A?aG@?X7^zpd%WgNXUC<83=yzA+MbB4~u7`z@km?KML zQ8)}ApOKT?7ztt0AOQtGoRin{kKXQ&kR0s~I*dyLp*H|41vT45v|Bd(@t|b;JI5{2 ziBY!4c&V`D$I4Ha+2UzG$3AxhSo?A5wp~TL!}@_N{5A<*SPd9L0}INUobQn)XGJ8u zD_HgRQouY%a(4*61fJ?vxX4t0gZ26)aez6`5+t{L)QjH?afk&4{%oMBe6ppc=DPT8 zs1l3Jwd@P;C#&)5QTh*C7=$&N{W3*N->;l=56-!#b*3kOC*TIZLUc6V9$-9H_ceA? zF2p&oGqWBZv2Y|W7pIVnd?f_pbJ+>_M=&SiGAra7UyhTq#TV0DIb`-;E+8K9 zHVdLivBj@LBBGar#pl5;$Cm?o)tNeOAP#|rsq4SuxAm&oCNcf!mkZXTJMDF2tKXRA zeGR#?Mw@riC}E0$hGSL+k^ex=IelCSCjRkxCc+whTF}q8pvpKZ&V-7jgNIl~~z82_*81E-N#g}$}@ z^R%GNk&WffufaLL!;@ZUfU9S1l=0j7($>Cs5SYeJ3lFD-ho3*b^q(F@odWNQMaHh_ z2w^R&sd&L&(jq=s5H~W?<_QQI4i3w0zNT1=qD@W@B3-->pNFRSWvl}Lz#1CW9 z-2!_H4+h2V)-wp}dG4lsuURyEiBValNMVJL!ho2$fqs3y(kt+&=3$$uTb(n=U?c)M zd9ZT0etCL7nrslZQ4;!(@x7)Jdy4` zFr+7oule%J_iFRD6fY`L9QK$W5NIqPeW)S64yzUHOY;1CT2|e|rIs0b(1yEG+fHGe zpj_qbn91f(8^I9#(o{@-5Dnm;EM@~0)(}P^u8-Y()N-LOJo5aT@66X&IC|Zw4=vYT zV9fj%RRC5sb~4a+S=sy+{*mf#^xM*P&I_U_Y^zw@}-n_^0?W2CM2mcUWO!j28H zu0y%U$);d~8oRGA|G3ZNSxyw=TrbX~-EQp1vWq+bMOXdoH!7z5HrBY**omb1)Icj* z9fVXD!*D_dn2Ya*z4HSu0X3g=V076xHmTLF-KWp_qWOmB<5Zn6|A^Oj4{orM0|D}R z`|wTYIhOnsUrn8>osry)E<)v`MBwf_q}x!2O)e{lB>z1}nQ9EAS&sgo0~|l-T3vfe zQ|MbS;-=;tuT6l-yyY%|%jXH;rW|M^+5Elc0($0TGo_AxD&U0!d};uUk)-< z>?K8Gp6~c%R&B)>m5_uV>_3e4&Ppf5ebNGdOSuUJzmK6tNQam~LxdY5Z24HBbP|XQ zjcw~IZ1jI0ukqTodKJCwj^cTU2DH$epDym1bhks>VDj#ejScNmsKqoA+>~s3Mj}x}T*)fs4*Ffd8dASzi)i+1e zF}ClNDm4pWaCch$izTCuJ|91tG^Yb+)%1AF(=cUPs3CQ~m3J;|XE2rA!fx44hfhl_ zCs}DpeHNH&A%bra0t~Zh37se}f6gQwX2?az>NGjf$cCMnTHnkHQ1CXr9yBW#Qr}faazr*oxr`4869psVJ zH9|X?B77mOIEe#v4Gq;38!jhs(H4(N*6fzF?=|079cR7-5}yWiR5c*Mm#~f6;UR*B zM9W#QLWU4RN>&5X+tj#_uPw`;*lLTR{04dd#AbAi=q``}ER%S{Gdo9q6R)wR8o!yf z2(()DcD+!;<@l9;D(nJnOpQJSFXZ5m6zS|bt(rx~)ES7z9StJ0m4Q?rc9y=Z(Hr!^ zCNM9WVZ(7}R`=mWkM4*y5j+KZBOy@8On-NjZ`JAnRfh}Sc_AsAQ^nE)3u&JN=33l= zwVLVN6e51+|BfmJ-(BU~Jo&_|C5#mD+F&dgY1p`3kxrt(He+Hm*5J#fqbE9p+D>u* z7nF3J{6U>({w3pe?q+V@60~E`JnAB>LZhQ+jwZxMXNDAk3YU#vLl`jnZmyNRvL+Yj zH~2-|KT_%QvHS5_gpwJ=(dR5Ku|591rh~~N*^nlOTnc-CQPuSZA=m+fqS`)pH|V!5 zs-jUaBaIY3wIf7X(Dkj9^kih_Nj&OO)p-dqK|MWryusj&r?zVW6!d1kS^r4}lXb%V&;@TYS$<73X1(RttpNBR zN#I^YUjy=3H0~R_t?8Bn(>fCp0fVsM>c-Wtlb_p*J*br*bQZl1z4_knEWLs)h20Rr z*cY3wFt2H{*l=LEr&O7vCk=xGPP_#x?~&%#x|JRA1*uBkJZfJTL;tn=*VGsWZaigz zq9Kpn8}b24&GRTD@;@j#!2Sz1ze)|Pliw9V;!Ctd##TE1t%Rc2m8i}DI+U=aN03whz?R|&eqD`WdfV22`PD7A+T?yd|K-u2d(Cfz}N_sk9ZiX!NW!FEn zFJU8d74yP$`w2}Whl3j^*ME} zLYWNAx$dD)!mRXHJ`SvQ?e4}35w+fi+FezdC|YhwG(iH~)YXAZbb9=y z`CO}Jbyi+l^%-+f&H50>qS%wgumZa-CI8 z|B`hq2O|`Mzk<$5rTGhJLJ6T-o!Nd+Iy%E9)T*r{1jQ%e*L^EHd-zS~lwDUc`D`@f zoWVk!cHTdPk+gTXR@3!04^~`z52R0jP>;4eO`{VEyV+x2DzyJLG>vDoUJNi``6K&5 zLUA`$;b3)}?gd>TypRT`+tmLc*E>=$W{j%IU!!;^q=3g)?bW_AIP6pT9wTYNe@f>C znp63hu%E=2Lhv8ECc%%&m|Vp!Jv8rbHmJ_6`y=XwHWko%{N;>b+1aAEmteMr6>+(a zHg^G1OCRUI{cMg@6nX~o3CFtt#dM;|T43_^-@0;?*ESW>JY)J2=ir;AoK7(@Gk4oB5q^5YRd-CC75HsR&q#&nvh0afO?iyyOYGGqU|Om5sNR}iEl6WEfFkbUH(6MkaG75UQu4Z? z4I@YvLcHIf;<$QRR-zZ`%s;#ZT8W4-;pOssqX>&{ap!{os=-u#T|Gr%uj=uL=15PS z)Dzf+oentVt43tE9P@ulvL*rKp6(YC*baLl+LNoN(<7tH&>7Q?_nSz`v8+E~pUAxZ z;cw>(V$|KqIC-`S-D>u;5l`e+C8F0~Aai;HYXHP`@*h7blIvfV3QsAEZ(x`7nSN_HPzIs=I_lD4%!jmFr^b&-V!m=lDIxvrTHT2zfp!3hGG+B1(wJReIhhCGhi)=DD)r1 zOl89SJHD_#8t(O`(^^vk#?3+vRDfX)84Xb-Sw_bXw4qyL^UE(EjR!Lwwl&Q@?2ILD z5`Ni7CYrkYaY;vik!p*ueCTh3pNRiK80o>~JTv@VQNGCfNyKNKu)E^anGtAQfUNNx zUQOH!f^jAd?5^JMNRh`H2#VGi*RIWdI*{SI#Wz{E$9GhD>C7<^YL}X1hw>J^3ERS)Y+Mtr{iX zU7sL+()>arw2BU=>HF*fw#w=)R5(ew;emG`@u-(JfunE21PXMHlQhaPdmMe!jEGq8 z;8A4|Pa)Awhljx$pv>x1g9p`^F|3GUYqk`w-Mk@5`P_|JVRI`ZO;d_UY{30;kZR`H z#3xPigFLvzYxAYqmCj{n-cdLx*9!4G-=mjvj)nO?!_=PJ_tRVuhwp+9C3##WMe%A>c725Uw!N%Gy&yB9XtnoaQoA8c}{KXTFwxWlQ^ylw7g5~kZi_=GP z&ivD=I;4%GGgF^1rj7^LWb?o1;LkkU$@5AL%%)#NO_hz*L#zu)Th_wQ2f~)yq74!r z_?@!R{V>1B6RnBf0r}q-9HBuH?v9aE}A{90L5;h0^vx)O`byhWg znT{$9?PbytprQ}s7JW$<~02Xe$%yU-J6zv`*^L`WZx`nh(tdu8q% zj0e{I{PUA9i~CgWVD`Knht}%&t4ylk94S~S5sw_?@kI=R#LpK7_I>wLYj&IG5e;qF zsji+-{wjSI6qz2+AbzsOeEQ{aiW7ptIVgLOD-o8q63f-jZ!f$crTcHzuIIt2s zg+I+gI4z>veE0*_&T#wz3>&1Lz&K?I;>dkG9+>uO%rYEp*g%MhKOSZ`98@o|uAGM) zY3Kf{DG{L(HX;y7gnrj6SmY7{naCz4w!61Ev6`eiK5-^?-$#A6{>;)&c(-Z;*tQjE zK^5K`%D>tC%)!RLEN*DqoLG0xx;RHAuOO1_)vf-UR?g%(;cB8^r< z)H2fHsWyL165xje!k0F%c1alh&@|^2TmCD(W62ygteli{MR(Oy`%HH$71Z)yPO|RB`Q?n}GvB5!1wE4U;0{^kKeO#y_xC!vtUb*YXcxhze`0 zywGgE2)@CSnc!P!G^MYXN&o}P1GqBd0DWD+YlSIp2rso8EQl!j%=U2gPJ5ORW10-z^Q+Z`HQdvk zEe=@ymyq|2WX-e7*1_KgtUWoR6j3~S`8mroGyuzg6%~0JY4^##_4~m=ncg~V?j5{M zJMb%Xa8ILUNY6~VI}GToodr6u=enZwWPt1CBS@jqGR?}~hE@WuG>{9(^ZFCH=J>f6 zB5l{?_RH_bKAGhyB*Y&ZBwrDZ<|IJcX*OF(ZsM$x4Qm!j;q>|?9HG1oGuN4ANQ=)e z{|d|QF0DtmYV|$??mQw5GIH#Ru|dD`T9pfsH@Mc&XgE_Y z#YuCSFn;XojMJ(?C+fdLU(fhw^yr&IBGlhxC9E;@x9YRv489<{Wxa{D@E_kIR^qOg zpcAx#!to-)F)tp)DOHwJLYJb1U4$-F_D#f}w1Mf}(4Yr3KC5ndL{{V$r_O@Tx+u61 z1X>$#uSQGQ;O0XpS-io=$)me}{dKJ9YZZ>byvnCTPh`iGLYjgVrScTndtjRwgk7&F zOZsrl9VSkI(}g84ywj9O-p`fUvUfW#7FDn_!#C-}j>!z@6=!WZX&<=|!3##hgdnOaSqiW?dgGItJGJd2&VWk5G@F^nIiWxUmxY^H~LGh7?=D{k?ivJf8s zl7&pMkcfv{2u)}GE`2y7SbEm^gEADPh>c`L$Wp^INf(?}eHxv7qR!Mpa*J+2e z7{n(#h!fvGj=X-8R#wOW3M`0q0YtNLV;p}W0cQZsTH!8-jP;){3`pq%vjZ@sg}n-s z0TH^#6uH(9<$jBcl<-O_+egohl^=hFHA^?7ZUkV}b3pVC5z8U~nKJ%xhifK6q7Ba! zADYyEaIEs!+o6jPClWn=R{pnSe;de9U&ar8e6wv{4AUO4`wN^%eul+{eAZg!lg7;5 zdy=`ATmUWLk385Kg%6=Ju6tiCOshHL=N#~4s=Krco_8HimSD=096BO~3oj>DnP`Xq zhg(dX)8^p=N)jhq;u3+1zvtKA3ySi;zfvc?gkL@IG=rU>Kt+uaOuo<>?S?nw@m^it zv9~~s1q3=|!LB|GNe@_!phGy+Z2r*%ABwAatrtaa0#Bno9)_V@%y7#eb3Rur{3-MG zp58dVB^fW6zSp<&3L>5OV&{C|ul1BKY9}@a;aUM!3g&mUCC8g^GzqX~TUgc+y zh)#spbp6{0zjA~GQ(t8H&kz1-@>#z-o}JORgNP1rvqF*jI+>((pk&G z`K;}QPWQ_UzAI8;9{BHYlW3-nw(UcG1jlZet%9#M&8v?h=^!!Scr7aNCbY1Q z%kSub;8@}0jhB!@<2$Unw;D#Sp(B1(E$c`Eu5yGy#%1#`pH~fVQVx9H+xDIoF~he> zvR_x3(^-?o&S-iarXrQe=WJlNQgWM_-}m9ixN;A8qu^&rNb z%M%Gvq2a-hZ=@z3B-M}5&bDiPy0XqyVLqFhh zh>G!M03Ut+NKUG{>2<1%ujygkX=&5xmbxHS2_;GZSGnQiWp4>r;@@7oFt;V*cdP8; zM#4}L{3gk3LZFj%CTZJC!m~=$;gacBM>(v?anbW~6j5dIJzuSF>132)-1f?sk|3kD z_v_HSJGk91j=>KkUWJ}Ccd!zBCuLcXW}KthzaMgaheHnZS=Flpv$H}38>+#Ak63Xr zFkbQqC-4{nQL@O-c{TGWpDoQ)8@L~-0f+gR`$9rN8rzK6$yH16E3E4$T$vMi;Ek*f zx)8;Hig&OB@9~+MH!d%|bYzhVQ~H>I2He4BSlJg~pd(I|J1BPTG9qS-l+2mr#?FYM zL@C84CIk|o`YMh!c=k5;I{H)CPPH~1Z6Y!$26`%Keh^Sk~oZFYs(05XAl@fpo z$VH`~e_z%g{bKU)uz6jerlbd!Dotdu05=qS>iJ!l6P%jOs-XRL+dcAIKVChQZ7BN& zoz}pa6?Y41f=g=rFugwazre0F(p8!Yw5GNv{^?0tdzg0d@m`&zU%g83I|<$MYgY4O zv}Acl1f5HJMU%quBmVy&Got9Z5@^j_7)21uvB{-#6q<9xj;a^=%yeuoHF>b&^t08@ zP^;|KN^-O({1S;+M87spCTziDTW`~F%7xvUf7tRR!l{h|h-V!17=OV}But*z5+{Qj zcA{e@3Qw)Emhw_WI4Z1a!2xxXkO$f-Vea!o*IW)@=(2B6omcI|BZtB=CH)qM;~bbv zASStdwkH8q&uMVgke}+bN%J=A4*Lwjq91qA+Rg*+G zKNUsA<53>kXsa8LFnU$C+;PiqOt}^`*43F+Y%=X@ z331ZT!yFS9Pk`t|LX`-p`MuT=zhBuR^38C!+{qu>pkz%^;e+nQE6~llR^oo>OT0vr z3doQ}uFJ3bO=`%5;t7nVp!M`_F5_NIaW63l8@-=v7`ERc0Sw08jW;Pble>dByst#- z8J{dlCT%hcX(9Je-)lSJ`wr8&D_P&n+)8~b9d4oIaycJzL|RKziC%yco(Fvga!4pk zInD~edJ+`tReI+_%6aY^t>T0<5GAW=j~%Vd7~9Wvba&TWH+T{)jChmF!7=oq zi=aOP*-3`$+ZL%FS#~<@*oaN2Mko3?8{P6@arq+(B-|)>_2VrlaKVfmkJO@ zE(C-YsHn<)P4sS z+3+)p3Ea?Md!1Nk4!(X-R*?E;K7Vsf8q;)t$n3syY}ZH%-&yQ}R(7r&Vcs;DOl;m!e$ervc(-oHA&2%zDz?>~HB>71$S00(4} z9uhdHr(YjZPpb}5fzS^^L6T~FwI_Uegl3;`^IZ&D_)Ut<_)oUN4S7yU2@)Q3}XHPi5+Ix%d77EqxX#QP^lh45ftSjFL2Rde^h_g3^k%H;!e zbNBVFi0Z+&gdkd&gCH^JwaZFV)u;y;M0V*^s`(%QN6K~_yne1s%gn6W7cOqo{cyQw zF2yhJk-7o=S_C8`;eX72g;XG>S5>t$pjJBotrQhGgYV#AKLvNq+&4}f9?IDWH{4yZ zQ|i=6ZGW7a2An2Gt!2yR920_0c+5}n~GpXcu%!7D;SLXs~p zVc=V19;Gt*-F#+dre#?({2Bv2{jEp5ii(One(V-q0n~nPIHd4xe7L#2wx3|rA=Re{P`-^Q9w-yYIL~ov;qo?Pm2|9anWkFG`);gGBG-H1G zF$PZ0h8lf+vu#wO;nFR?dB17*j;3SsF2qD@d!{jqq%+o`f%v@o9mO&I^J-Wm<(%7A z`1(ZA^E-o59yLPC=pg&vM&-@|3ipPgG642I}PVaVPw>DexJ)GEId3rKVS3d)2DOu z^Ia>VEp>Swo1bvaF?{so0eUxYW*!`P|9Eww-9^^Z2r}t<`k$}x`L2X%dL#a2)fF79 zl;)C=j~LTeza z4*GfM8&PW6y--Wr?_op3uSV3jJ#UUra@_3_q6xY;QuRncK%lb7>ZD2;Y)~E_+EIsl z(P7{wZ`JMiM$o3}vN(G3;suoG8MvTKfL)MCI7ZoFmu2?2QR7#0hFJPSLycRvZpl7e zO14Ybs%Rb?dreG2;=Z+D6ma=;TtdRfoSaY=;>ep(os)5&6df;$F3o75aTLDm4Mmhm z!{QinP^MCF$blogYmEMP)C+U)Y$%acUEe)?UxRO-n7n*WSC;1d#%!0t7IADsf-W)V zIRvZcv+@yP-x6Y$1S6iaNUfteg=W2($E}`ag@eD7Osc~A4()=-Qus_);h&n7KulAVaIJUftR3}Lp5Wnvjmn$6zC2DQ%>-NDbdOhS|>Gc`i zJ(5WNeIhXVG{4T7YihBHk(pfT@vMuyVGArKvwLKl*O-}9n1yoOI(8!Z`iRcw4xK9R zW4Z8^DA?|u$EgtkOx!gShVX8Rt7HWNRu`_HV=AQW*Rl@@KBs2CaaHGS9p8@^-vXK1 z4zFb1jOTG?Z%WBy}iHs(=qGvo7Wv3+Aw0wK_*8`@dTXj9}moN#BJPssdzK%W7o#D8QKa#QzC4zIaz-;3D7qb zLK#=@1RAg7I93`sdcM!B_nzOJs8wVv>d%3HD=8`c>gaegJ$*M5trfS4U#*!<_+0Hg zsCb18%c;h5zf5l@-Mz?(33G4SQy6~f^Hs(lKdBS~?8W;Aj=!>wR)|U-l$&=9Je!rq z^snt!wWTTMj~;E6ReJA=zuG6#dZYF8*RNP!US4ihcX#*f4)4aORg2= zt&krT^$*lZQaCz7D3x7QT>Ncm>3Me#kEC!yicR^wjOSF#y{&ak4tUlWK$=7zL6Fnk zO@qt!xF}EWELPO*MhK2hsY<@T-Qtn!xyHkObo(8A=kMRQA%w55aOebFz6Aa4@%HdT z_upTMx|}WEP(rQ5ZOpWVx8FN}i;gExaIldWlFch!y&{GZEu4SY+ot3eG0O@LJ}%`2 z#*Xa8-k;OSOb7H8PQRZ(I5nD}Ry7F0y3VxOy1D7WK)?Us!Fe{a_| z)Q^mKQ77q|JwJ8&fH~`aajX_>lS9hz-~mKOrE~jpuJ<2rFd?#P@TR9LN6ZaXxXK?) z&`wWJOG-<>A3d19@ZCy0a{@<`*1zRYuXd2q#0{Qs(gaB!=Hu5Y)YP!`)}V*9o)9$R zQRUj#EkEm+lnPcH-R{!H2Di4gA;FfElqAkZbgF5}g_)62%g*kjtgLLTis6M4a%vtE z*-u;k`F1Pa7kLgJK~V|qOrp254<-N)KR^Gx6Kn6YbTGD~2zqV%if(PD4L7j(8QOVz z8gg-!jb>1kzbq^iyME(Fc6$1i#l=M^X5(XaADwh%Y5@uFw;76>WMEM0LLEoc#uswm zU)g9}wsU}*(*XzeT2QK2$-*jL+5O8lKg|RM`9e&ym87UB&csjeouSa&+EhzYax&B@ zTNfyQ_k8OCWzCnGNyYYm{0oerUwJ@I&2faItu=l(T`Kx6_qpvSl2z>`^N(sBq6P^+qM32}|Tc=L~Z5*g)ndfE$F z!TI2xt4T+5Y53ev!y=A5+Nv_D*8;g@gnz1o>G6U4vIM}fJ8$HDJfDZ);kzvO4f&p= z)YKjl4sQf(y0lM;81MEAKvUA|ktNHETzoW?gKDBjl3X|HSaC?57o@auHr4)*;upyQAX4PY16=`2PlI_H|87I|~bJl%F9YF|28lQc|SY zh@OAic#wCW9AVzX zlWu0-@iO1HSa2ayG>e@ajF*h0MbxwK@UYM~XI)v#w9+kT^N92rbWDGL)U8T4=(mhN zJ3kuC(>h<3SQ?W{9)YPjHtqd;?UF6{net3c+X;R$%p*OTjy z4hL%vU%{-Vp^P$(YnhU~crmx3L8Zp1^TV-+H13snJ1{ivopxH=m6to@(#y=ugkatz=6`y;E(r8mYJ!~~ z7ivCbcJgCom7hwVbt1xKy!X}J67aK4I zruU{pZ42c`IKE#|wwvsl+VU3!!S;Z3$N24x**X05s9=e;oZaC+D#gXcdt*VngBhx+P2P`VG=_*OwJr z3Vlg)`umM2Pn@{-AY3Gmw5*~+2L7Ohe)gj{0aRVkvJ! z8ux6nrg#>u59m%A;;DC3hi_5a?p`q0`8M4MeaE!^o1&9zEcjhG?VtbI<(3*q^mu6F zTGEKxQ+rJgK9qO6p@Bifrro1EWl8Kl&)ttm<prFt3# zQb444IW?w$9INsSe(n7A{{?a7;emdyCEN+_ zv>?a!Cozv45fF1fUeceZ{R|EM#`R#wnf=IL9NLOa(Rax0M0v*fmp5l-hTw|8S(HUX zZ&^Glg00=2ZNl4rkc^7Hg6+RKt#)WPmn>Nt)pN?`K2 zmaYw}x|iYTK%_v(u_@*yjws)gv1zp7Ry{{qZE`0J+*i5!@g-R4Ht)pIfnfNR?DAX) zZohEv6)!J*4WM9VW0PVdAbkciPyK{*Nm&_8pn>kbM`FDcNI;ILHl%m65*em{Y#*-f zBa%9)4*nLQD#f#`7%c?WrbZ9E-qmc-QR7K-o10btVh5*ITZ&v3{IN@HtUy1_PeUxC zTBLv#n7z1q=K5#$Ws_U=JlDy-GUSS)@?eiBe{z$UM@nqdd>D<`( z1odN>74olwb{<|l=if&vdXfltnH&Be8>H{Sluxhu{}A=%fl$5Q|IduEE0R#wELjqf zt%=Yk`l3t=l(F*FU`+Efcb7@VP=!% zn)+50{R4+cioX7uK7XvW(2pUJX_6x=k!;=1@x>qxd099q{rYea)h~-3=@h%4mo1Lo z<=R%7frZ?*0fNZ_QYMd2$S3TBox5ctpA#!(@}ju-n-7orhumiLs-d3fWuS4=qL~|w z|K=gim`P1Yp?nX-l5T$g$ju!hJx}6a{eg#;wA^CnQ&v$qk7weNwXm=#1Goxrd?z6p zV;7@`N_GNz)SQ_*Adp)Us<4PDs6x`mo_^2Xhu*mY$ZM)ze(+#IL{*4!`XP8!zcpr# z{r-Oild(+4`@VDXF9V6WWN6~dNy~kVj&mOD2oLjUOH{Q%T*<7Fd!C9bTTC8LsMPCIt{(w?+28xHHZ}6ms3s*E#=j^Re~o zef)xg9sPp#{FvDPFz|t?igq8K6VESOru~lhy?DX#!^hD#QYfY4-kp=>oZ5LDQw$F4 zIHH`q$8Kg$E!)W0Ei6{#OCwZ=ss4I(^3C4H@6~mW>@2H ztizgui2DPvX3*?wC;h)2K22>-=qOme+-`>KlL1D3ypyDM0(p3xI~Tf+u<7ky7*R7U zX?s!I$J??BIvCTaLmIVx1Trs#PbZqTmUjZAcsa)$Pbwpjs=GN!*TDT|9hllHbdwuJ zQ|xIY-e+D=W)jc&VamTVlCZ-xQWWgLE1?OizAh*HW`|o43w5nC98^(LYw$5fl(Td+ zpuMo;_ft?C>FtlP1!d?CII)sl4=$R%kN@Ku;;7cJf;s>3oYFtlBVD>19W5LCDKYxG zY$AU1^aW{HBM7Q%0!BiR)qH0zJJGSQ`x2gk%nVq(bLE3a^a4ZUhcPV&&UkEhvqBT% zHN&QxZ%)BE%u5{1Wk*NF&KT(;!-zb}+xU=${VFu-n+VlBWg+H*mQwH06Vg0}4aoEc zUD zRwo{7{n_N-b8X?TY9G84RBsV?o?t zJVp+7r*y-c_-!W^Mxneth&Q4x4}{w8v__W#;9=`g&J*FbG)&NCS>pFKbw)V8bPc!D z0xxWtOec=tW3xVkc(N|EZ$}Cp%I|W=n+AuhwMaX`jZUs`T6;i%2Y=0WO2_X0TQ%qt zdIp~NTu_~rWQ}p~VXl3_;~OKEeHi@(0X$K?d^zi#yP6Oq3(qmybq94Y$bT#5jN27W zv+FHGUYzNAlEg|dzMLYIc3+}MnP2mT(7{oX9@J0c_B;(T)J6T~Ab;QBUCAk6>CV47 z1@EdZn%wMq<;qtdjuYw44d8xbk@X`6Da`TPpuok*G*l9?q|`aB)BT^*Itm+^yPB#Q z)aAp0Hsga9^KyOtLzz6ohR*f}CrsSTHTURcahq8kWN#4=60$sVCfQMOcXiio^Giy3 z=QWd66azl^#7UZA$tMruS&5K5t2>_A=fjkKx+que#rS=pyDJRB{M)yiJu%%Y#87-c zbxhgozP;3Kr*~;%-`Zwdd6YyRG2hutp z?x)2mTNzw`xhPaFtyku66szi+=`+=v?PkD27KyY#;YVtHc;Y_BpGT3>7%y>VW9+?r z58T-J%8}jdNDq+Rgx5E(gndqWzKiebt1ss z&+nv@lT)^nww2ms(4P+_*McXXz8aXfa=eUpSbBAPep+ZxVDLIyHa6J7-Q7?~NXQUl zoe9EpdvL%Lf9#Yrrr={&EP8q7iz+pNQ+3!7=EdMVL6&=%kC9yT z51=PSZ*d*n=N%l7ppwbt?Vxl_%|WBh5$hs;FmQo5>~_*B&6+socDEwb%>X)67! zG)c$o%;(Y9W!;32K6hW+{ujN>4F+|8uPhrPnOxHhEQ~PVk-WNcw62Vmcu3<)%#FG4 z7>s3mcYa6w9|-LDKLFFJ#rfCU)r7+*qn53lE{BuoTjovMu?x#_mW?F8C;ktEm*y^v zs9xSC-zZDF+PUP1A&_fpc4XG3@PlEcl6ndIUcX`dL+!k0cRG7xwO`~#X+LTl6}Wk8YTJ5!8QA-=6x2ui=4U(OS&xz>tRFBz(9gh zgAox+&Ow>Ek|fGDlO?a4F&)RmjW*lVwp!%rNN#-Bfi((fs4-7kRI4QKhJkE zDS9g#2ow*BF9^XpY?0JfKk6pd`4=Neb%gpCWHtmO0XtkQ{wOQsNArC)Yqm zD^^7)%N1by)|gIpMGSfTf@0SnjOg<|9~5du7e9yxzS;*ygb_$N{51c;An{Wn6H*OV zDX&NmOKJc11z&{RvhYyk<%sof*8cK1;uEGiWU@jMd31 z>=SpX3D+S7L zxsLbrF*b@hm~pXPzx^_b8-oLt<>?L|aFJ@b@^Z<4`zp31$5fjB z&vZ!}Z#iJ7>s6OEnbKl9&3O8-ZJs3~(J}||NWx5;N5`M)jOUsB%a5QF#0*-B{}s6$ zA)3zvajJ&b`TTTS&y@QTnqM32_8CKdV^|rnXId@D>)`Lsp|rc zin`I*`N{O4TsE}LH^!fKk!u&2g6>OT?t0{g$Blms4RYwCQE;!#EiJP5k0(ao=sW(Z z>~LV858IL{7H1W5sUtiCX_o8sSR%BU4%CfeCLKCN3Ir#DQy+pS=}B9U4_4$Zfc>sl( zUVt5y^Pu?eZ2H@|t57lhO^GhWKG1US5-Mr?Fs~7W#Tjax9R7eK{%q4je%nw)0W*5> zI^gACgp|E`g+5Aw$KzWxp>zS(hYaC=YBjjFjfgR|39;*pZt ze(_oS#$>x8!l!sI9ti)nzf1;Jcou;`{Osv}gcY^l`bRYtjx(dO zrF%VhXC>ir6wf-)Z1(r>UVF6xLmofHu|&7NbiMLJnpDE@&H8}~BQnjW2jY-U+hgBX zSE9vbdY5)!_!VJ)<~L(6rb_!w(WCRJY3l%KCJH%7wBLu6F$ECcegm3AAbZ`9lkM9b zVjz1$N3#EdTEwUL^-Lvr%%Z98qwR_Xp-ZjK3*V{*m4x00_U~KUV+ex1=W9KR>Pi_Lj~~SA$)0wb0dH%JX%tem?!_ zoRT2(nA6!aSTD_JmbYgvbpY-Ze-N?Rb|U*_61pe${Wgq=R-+&%h3V&rMT-%2Vo{rI8y94d8{2;feN755zUxG> zdKV|l&SO_|=lY-QE4w$M^HtUAm$b)=y2|&1wPhS&X^E-7-7<{jpy%P);cumqZfrol zhiue;W}9g*)xLBCKx`if?RE9i$f?%4GT_7Ec6n;Lc&`G3^$}Js*L*&-IEmqE)65np ziO^%2zzK^NmnIa27>PQ;;jca~e@^kEMKu3(U;6s0bz-2%==b?bubvWr${D#&_nWzc zu-&_O^De_ahO-hUjy^Jy+Q`MbuFU#(%of>dtTF6NXdT^8eRZfD4tBS9ii`-Zd~@M- zC&??NDNtqtVZAl~;$Zuh;G23V=!lybOe3@q3%~8wu+q6O;EdmlL_R{ML)<#NjplGx zMth_rjAGRdx&m%ytbjFL!f0~kdr|g2(%mDil@JpZLp3BfJNo`!WTjYprfSk%M`8U@ zSY7(^OkreWi&C4J{NJ6_qnK$JDCO6^5HRSd1(3LxL3?{U zJa)99LH_jw0of+)F335|2v0wg7=x|YrjKZ=pI_|CiN}%-T zD2C|p0$gL@L~(bI$m1PaSC2F-Y0|5hG3$c~3m&R*EF(WC)w2aN_5o`pG%-f)?XW;A zxle<`Szu)!W=C<28mOGo!&pNe(P1hrN} zdXTWY9fIyFd}X7zsyVz z{iW~~_BWXq>8S_E#e}N)jhr2&O)uAt%V?^cF_j(q&Ha^>`1tV!5hU_7PJvUBs0EU1 zd%z&?&Vl?6qmKkr&IWXn5e@lvzT7#K$R-ms# zN-FK}SfhWC|4f(2k3mZe;XVUFW+>=$Mr0bT`z#U0(--8ag1M4}ojF4@c+`k6`)_$X!jawt|02AeHm5KwbR;^u<5NW#JwhU|Uh zMaZVryw}oD^a-dQp!`+U6`@L2o$D5(=TjVR+3%&s#O!4?-|=@3%j@v*tEt({v}2#>=K+AL6)fuv75?fzv#a2KXx zF3Hl!_Cd>jFPU$T+hvex;YzE=@Q_G;V-)?q$FE}+cJpmTeq%94o%IvaYqa2RKt?IHk)tGcLY7#@1)kpry9i4vr14^GsH0Z`jBh%M_7d8?t445k` zSL=5l+bbMg5#=9 z$)B@K!ADyCvL3ABfy3OVPn?GNg?eON{cGO&v zLTb9KT=G>4DB|e%_dC(Atl}#_MP${|n#MlYR6Z7(V1{O|uV9AFZ5|9~gurzYD#@F< z{2(*&dPYQ66+{b8ik@XUZ-~ez6A>}b(O1S!FyIb%df=w;vB382@s5Wv=>2as&wWA| z-ZBm4^x;*zX{gGLTlyzP6S=(%`HG5xOokufVRJvnMAG#3_U1t(U1?0P`0x6oGRW)9rv*-k z(;_*9yJ4gXi7Ot~NGZpp+rW-$W6+f3RNzM+oVCN+&n*kp6|nKpzK79a2-GwiuFH*J{JPxp|p`176;?$JY;Hm3H`PIloOx@+$sMQw>P^hDsrV7-JzF?xlYb> z3K;1UIX4Lnyn-`xPSt|@kDFro&4%lbuG=G0V!*du?OVv=pkVM)qYt+qJiGDX35{c{ z1#hk2W;glVrXOy$>($kmnE%|b3PkcSCsHU574zkVn*wkh=dZB=gZWSCg50VadZ&OsLot+Z~2QYL*rW zNNUC;Yc2gfT&4Qn0NlVGsRB z9CjN^zTPTHtcG?qOnu=sgQn5$wJ*HSPw4l^(W*RhG`D@b^)e%fvF1t&)HF{-M8#lo z%&_Y9Gh5`@Q1GG4@e1AVlEE?mb6wT2J`I>JCIG*-_ZBy43(PCEr~ar;iYCf}^ko1_ z0WA>?h0NJJ^(wWmf%fkC&Eh`v85Sx63l*(55jLYW9XK@_Ljnq-Gb@R^`;U;?kEst| zfrV2s%}{(C;U-+Geb}e0Xd+K3`eD${dkj1bu@--C$I*rPT40t!GP|)7L7;CwKv)siNjl0srFdpbit&`1%K!n-FXy0UcC*n z_5OWzlhR=+LtX9g2cO8ndS31C{ULjT-mA_oR!j6-tnE%4E;r#X5F`bG6^y^YEQ1XH zPp`d|jiD4WxQOh#DX4bzH0VfyYNW1-=KFJ0OQ4 zT^fn5ynsI>opO*8L3jGX)J;j`muKAyPFamxGjZ9il`9A>@z9_FGUzi zL&sRj!fYr`VztWv!Whr7X)1(NHTLSRALR`(aUSRKEevPxxZ5Os-4#vSy?u$BG$@Vf zuaQC0pVms3RJ>>TUQATB`tVH9T_|j;dRJ@1Z0X%`7HI}Izb5)h10jwX=bt92sqULG z+x%COs?zeSg2Ix+px$~L>+sbF+4wGZ+D5z@$b5t!kdJdbn9w#8qHvM&XFWT^fi-Dp zYqoegM@7-@*6e5=UF>b-No#7T#1$||65j+@<3zdJ$^n+W_%XTdyxrg>{OxUYkRveo zz5cxx8C&%>Ax7}7?SR~$ieWF|RgujnauL}3@e4h;PBG+;!U_Ih08Q=O89 zr^NlWeIJf=>iTH2mpNzD7Jt<=cbj5?95bl2;W(xHI&#b3lHUJmpdZs>B)!y5zVCy- zgumhYc%@brzN)FdIuRu6foBr@$`$ilAP&E6Nce{*53**H)ETdqZ=irFBdU z9@x@yKdcaFO&qCv0r1VWUqNAV>LEIVCg>$)m~ikSo}qCcm>k=cA-@e0xo z`l0B;!>iNhs{&2dTVxmB;(kS%JE(lf=v@e^yi2tT54ue&l;V7G{t7qwVe##h0S1SG zp^4>cL`;o%W90*CiC=G|jjReRQ$C5~=f7XVQ)Bt8M@6n&qm@UPMASJ7j$3A(`0@MZ z`NJv923o^I)q$VRnHTcEe|4?ltATpW3D#-ODGvL^wZl#Dg&V8RP1SkSsSqQ8%>({! zWpotrWXhxc0BzXiT8}n2BQfvv87L(e2m^ljL;0~|fyp_glmG8Fs#=SLu;7Qs0IOk| zd7g`r>|9ICLcM?S@>3Rmh$}s-DQ~0KFG_JzDT(1aRRP_1?%O%fzuUF{bW50D@a3qD z>X$fIi4{fUs7A^$)@ePz&pO*8U+w1Zam6#q7H6wG-{-U+FqjpD7@ZNDmfSi=MJ#~R z5HPzuS97A4ezC?~;YSX^o0bYzlXHuni)RVh+hjog84zn(*{~KT zt>yTxSbczEP7bX50JB`T5@2X);T!N-U=VcF#Spcv*J3=t%R}ri?nv8N_uKsU+M1Iq zu!GN6CJPa!>A8~kVM}fxtX)eP5=l?C!vAx}Gc|T}b!7vqKRJ2}OC)Cs#LD#I-eW4Z zK^dQwm)Goriz7-RF9T%4{0zo4Yxst+is~&VO%^>T9tBF?tN4Lt*YZ^iP@+=D@@fK*W3=*6_ZXbCvdw8X#^;+(3Zy-e<8ht)^Dyu>5c8r_A+y+yX&}trkdXO)HO$e#km=lQrhx7+ zacjcG9BVnCyzMeFGD?c(=BJwn5X9aY6Db*t#)n6QQ56-H!qsZVE5qW!2{$MD_=lAEIN7Au76^jy-+S-VM(*t;l1%c)BXM^r=~Np?9w%D{0I1 zzb%M~W%pQm$l0r#p+Tmo5RG zD+oLjyAm08Fe_||tkWqI$w{l+Ie4$psDJBZ&6Hgt`5fzB4HfUt_t{H7L&m8_mlbK9 zF_XF2rYMwDq z8ga?Tq3N5f1IMiMGBV!#gqj|gd`i#21uoW@Jl8xrer0!L`5IxW&@Rt#uJsdpKf`40 zlnv!5e>_9C1>)&p*=q7n%}WzjW6buYv9?sr z4{e0SAr?D|o}?rx_(}JaxnM(&1$y?;jJWeKD~h#_xu_LQn>{JJrIYSo=x=bz=F^>3 zC<*_eHl_xyu;&I$Oes?PirK3@hjd5c5j#73g6-X5>D{WBH5Nu~mO@2Q!zBKn?BRcU zdwZ4f_!ozsIN5S7VE5T#Yq|QHL>{b!~uKl4VH^3=>=i3cQJk|dB}qWaY%K(hqt-Ug;t zMM=pKf9%=!;&*yWd6&LI6_j2EhN_{O&?hr`@fz}S1#hZN-oAslo2aCCnYr5g#~p30 z!ffD$&F2F@gamD=0hj23e&XiqFC@5GeSLgPu@n-WFX5Z{_R>0A<<&ra_{CzstJ*=b z`C(HR+bZ3XI|S}2gz9{OE)+UaXKEz%5xO0pbCbW$r>G>O(-!E+wxo00q<}#uIiQa_ z?B)iarlB17bThBa(+hE$(_jBF-%<35xZ9gbU1|rY18}-*DI=VTVxYWZ!Yxw zhhaick^z;twz9S)tR%#6P=^)0ve5{H@kB^{3yH>?z;fIi=pQvRDFXtZepkk@VTz@2 zJARlgyp?!0GUlQu`;c6Bb5r;7pq*iF%`vsz80s`Z*w<~VcEc$PWMi2U2lO)wk_c=F z#GNd+>$YMNu?2@^x2#+5ET9u*UVAfciXO>d_mm7g9jjlL;c8M>q$D)@r)mU#)3{TKMq?K1DarC7M+TxIyGkyb@fkhZjA*BTBlCLYbh{##gS$D8 z;P>y}fx_s;MKkJ)9L!_60h!QEb}4=nv-$4WiGc9enzEniU96x+%#YE1mF%_rH`ybT~{rW1Yh<2Te8fM|NZ-`TPuEIIy~ND_pMuMM$Xa+z(P@W!K2ATz*j7AF`NG2sOZPdEZ5fASvOWaBG0+$ z;)NS7cw9s6p_$hP2`%M5wyVHvJ~H*F1+BtbSOI*v987@Z?q)X+kCV{+GtQ4ZGKz{U zo12><($}BNxtac{nJ>E3$K{(U@7sJN%`0vQ`D8dXtQ+4=mxbS*a*QZgRyf8D{t7aM zS=)T{%aKf(_U(=5Z82PVXR+vE22Q&OB4k78chuC>1R`=t_~XaI$7l>t-Nv;|pJ>Tr zCGJnicRP#OYn6$#ErE-;DINnQ_7F{hyO@LRU?Sky;Pg;EZOf4ydE5PN_`{_yccatP zHRrCVQzQcYj$`~cbKktNp!-G`YyduxPCjcO9;SQ`7hR!Nr)Z9T&H9c%{iO=4{ous8 z?dSz?aP$i~M7zt%%eO@xGipGG_UwpyL2nS?P!ENbnO!^kWU#rZ-1jVu{oiJiWK48O z$7xGK*~#1^ML|jD;cuu&x+I|xzUK@z6|ojFF94yTGNbc5_2Cs2AQKcj(zZRTUN;=m- zLy(NL77Nq%p24ATBjhRn&Qu+Cy%xy2#qX7f)8i8p88{>xXSl2L+c#%ClZ|rV{Jq(I zbYkYWk^n4>ceky0SIJ0vEu^K@tOp-ml|&xrFF1y(3GLh(StpGlvdJA>^X|7VNF7SF z!C61Ce!KXm`rKuVXm~6l@7G8DjV@jI+r}Hm6!Wz0*|;=u8~g!#9YEnMnTgfyUgNxe z(%P#CCHl|wK5$n+>a2*#6idDU#Qd?@FGR?^Sc~2NQsz4W50gBSVU~BM*m#3lSTd>X zN!#LObz#mUnpR$hNk12aF}<4t+2$dT+vKwi!(I@=K%Nv{&|@d4$E#N{VKlCfQ88l> zv%m{l8U37Hm$!S2CHp;9F@!I@!gA@7+B~WR=Sc*;U07IEX}gnQOtge>(tKak$?7@_rfT-oy0o3sv+V5fVg~M5 z>l%IIm*^?&ud~9~%j{*`)!J)Kt9EqOH zf&#$V$Q%ZVSUt!Tnn=d#`K@w3dGdZ$WuKxWG!~Uj+SD{-9lGk=?43d|rkMz)PJo5E zuFy7KvzKLJz26|O^*_}-FmkZ40O}_8 z;Myco&x%Woxw3v~Z_nuJItfp;Dth;qR};N!#iBsNIu`Ti6|qt*|0< z#D?_D@1IB0BKl}AclZQ!chD_6-PD|}3@oA(T^ooyAzO6O@!o5JA#x29j4a8DI5H1f zLu!VWpFSY1Wus|D=<=l759J-uruIxxW)L%U<(+9zyk2lpL--rWX-9uQG@fw$_@h-@Ke7qU$=j_uVa-s% z7SXk6ZqbTFEU8&ec38s0@w_VW)?t!N>l~LOc)k&j7Vn+s4FrO`^JC&cC<&7)3AJd zdy(Yzp&b#ILZjef`I6vuH>}0Rk3%WT7?=#6cx>>?($Z5QAtCy^VquR(Vhwi{d3?ub zwY#*Nk+(vx>+GV)lVRAgNeob!j{o!pf~Pq&9+^Oy5t1JOKMtO>w|`asw`{{q!Ib2D z7{=@S&GzL|Lql-M6l41q)LL4BNtb9Y|M&P0ylgqp4b})tIC$6aDGvZG=qfae)Lm1Y zjS-+%#FD~+Lb!34gPnO>-2}l9x{7iIFwpuVsJ$OH9jwoh9L_3`5VpJ@SayxUa;M=9&_Ear6ClLA@lWqnY76F4;S^%oEaP z1PhC{|8W7r>s*M?Ki+QUPzry*Z%EKfB-PC_sw_E&3Ii5bX;&6gnnZ?swh+xpboD5t zl#R1!tH9=+g{<@rtxx+KrR~M;n_5{VtgNhn?T0SP^ME?wKRN(?ogu_!(neANp4NXJ zjYptaMBQCs5aH)vRRag>#7^FINSi#N`J^Y(uZFz-CZ+8i|CcW18eXsW+# zs&R5Og8|^~XW=@HuA$#XdJd&`(F`zh$W_c8cL(qYOBE>lCZP*2xjV-vCp&I$60qil z;w1R-u`#;A?-&^Hz;BkD8>-@i65ZV$6#*lA60~aD7ii{)oj4|_ixE?o#@v{K8tHys zF3ZR|M;M&(6@I3pqvITYbMRBZf)ibN7S7I6!&K=$dckis>`TyM4m8kVrhV3{c}Upw zkye6g)>CN=7k?b8iBJ3VTiw%|eD8?ZlYs#{a5d3lq8}jAEIRw1$rsQGolMEVPR|xo zWRp9^`hT&pd&JNT43WA-?J*(?uRhVi>9DqXUR9M{Wo0FJ3k%tYNxnJ-^9Rm!K4OUrrWhfC8B#K6i2@aYHyAE)SZ49^J;`fY$nh^yOaV+`Z zKH>zN-%FNA-9P>e?682UyQ!{j7(G97T-pK-8W7ONw)pwYPIKzQC&0WO>Q2x`H7IvY zSbl;lIGXt_@+(0J2GKFZqMEq0PcJ~IR{9B4H|W^*f>aO?w0{Fy1P0cw*tYKO#9zPe z2J_W$Klqdhxd(t^ujt76GM6bm)&KcPC7ARoa?J`M97{TN|3~da-SOs)UuT!p-V3Co zwl72Lrx%tRps(S$oIK|kNYu}tnesLz>F%m-cwQghOSB(1waBWlr&Kl0E}Fn9R-_x7 zezNYHvr!nbIR^E8I=$;%`IOZqrAv6WL)l;(0r*1WawoKdKsV1uClK4i0F7&xE)nE- zyI;9X!xt;A#{@0hG-$A)vN+ifh-V+{W*_y}oOe4)pRYjdYq>ijjlo{1yS=*5<4LWC zcaYmMjl`*Y#{t`1E1eLaztw$v>oLM)b@h5dlgK_vm)?=qlIIRqP4|CHn1^D~^#%#; zX`4x{%(e{XPQN9F>V5|T%@j^pOBT+WXTn7B?>=(}AXvG!IU_+05u5M5h zGCQiwe@cbOh{y+;l0!d$AGFG(EHoWer<4v!H0aTfjg6&u{L3d~a9JNxN;&~YOUJ8p zFSc29{P!4vPAcIizDh|=JV>r-_^C;`o6I)PO{BMLyX31X0h}gF#xvm94KNOob>l>6 zrd{0prL$-aHV+(2#YmU}tf{|WTw(16SmVTp%715_`_0p(Hew(P`fkauB2hySx_Bt~ zsc%Ckty4DN%LKR!3=l|<{_PdU>S&!x2X&qaumLUkbnd~XHL}}WDMtp4Wb}C(Z~wr$ zy%J^7o-F`?X9(ib2VK4rJUBD4D=;*IKMr)ypym zTxbR8E+k~Vfl6a=uNi}uTmY4JFKKFl*0yAJ;zfzsSS+kC6t+-YE4H&-Yt7=))nAg( zIQim(V_}RDo7`zRCYW)Mo%6~gBIVOL2|Xw=zj+{g$zxZ4>d}XSz@96@?70-4m<9;f1S@xu ztsqS~5cpT%Q4&97>ZenmMp(pDc?G04iNq%Ow1L#*kxbxARuO@=TSDp}bjR(&+;D;c z7^g2Ut-IQEI@8->f4qjMcARikijzj#^SMOrJ} z8;MbbnmE8UZ&YJZ@Wib9&go&A`EfR4D4UjIDrRpKwo7EB^28%Lxr0~VP^_!B5OjjgygX~MXJ_BjkBDLG9#%`i`rw7^&=B>(;gTqG3!8%DSjX@ z>W?#hQK@^IGhkZL5C!CagrbM0K3cT}wWH(c281Xa6V&TpfAaCJa8?k;UxR;*1P%Mh zM%=Loohd*)q4V+10+)xRLa&Ma6_IYE_)tYODg5>o>j{TWqK+5fhB=5LX4vkjRKBze za48W=Gacky!`qmwz|rD_3VzyB9f0C<|3GmbP$yCurp5J9IRwvvqjfnDaUy=7s~3sB zH0|wdbw7HhXiYud!ofUS5z~6!I3n7^kMG$Rd_&?Iz5?q_@JZD$2z64$T zGNItc9}vF#Fp&VlPO1W93SbvRy@=!l^u%-DwM?_A_uZd?tKKPxdTBrH2V;F2eKv1g z?Ki1=pofPG9Xbb`AK067cqWI3bjZ<@wpnnFoDuwnYR`5S3tSRR$=u~5dF=T!U&C@5Q5y>3iFAUj2T)*<_+dFi`S;S zXg8IT#;ISY*S;SKzb-D!EOQlJYwA9lB@D zgiZkQ&~9D9TSF9eyN^v@gAU^tJilfLGCvtn6CyUk3qWhnNPspf=m1d`8e9TeIB`Kk zkhXLl!=)%hdJDZ@UWX!-lVKe4*k^0wt?i&j>4wnZFkebr`=d!~D#UrvRp%=s=fNEJCy)Ov2bksunho!ceFr86RQ@0uLk1B3PET{H zOH2;apkV1LwsHmED_z_GB4daQZdjl!Mp)JV>3K9b6~S zuUXVu{s|Lm)JT1e$8O^l5=v0+hVUvC#BGr@suaH^Pah!1jOY@9S=(}iE` z%$w=9%1UIkZS=$~Ecuv<90P*U9$BSAXr!HGs>zOB$2SoG%Cjm-Tq2Xke& zv}4x(^Gfu#aUI_aaygI#r}z0j)_r`UHNymY;L*udufe&=Mb3cqXFobJ6a?)?+Zv1v z9?g!uc~J|rW4b}ozNZKetFc-ddmqvYi9OWyd&Bh*b+34{+vTk%$U)324Yb3jwS$n? z#R);D2k=62sR^gvu%QvS7*79!lO<{Q)F#_S*CoQI2Q!3lNmT4}?c3q%lSh^kTQE4G zCd_2Va%sccAuXyJxL48=2ssW1)LJ_r9MXR`lNH|3)eyEJinlw7 zaT>+`laGOGGcNR-zroFP<${wD7ARMusR>_8QK2gfX;0nkmS_f?27Uja^bojOm!p!_ zMAyiwH!#&?gQm%apITewo}hVN$C7#JIbMq`Ap06{^%`bUG2Q^h?BZluGWy#*gX-#vJ zBAg8#g-<+3HL6EvuUVc-O1wKdXX1D*ZFF^&`shGVf3WL&D|?z`Jlgy$>x$0J%w)q+ zR2y1Hq@GZAz5ACkXQk}1A_vBYbf@e60iWs7-Jni~nnp$l4?4tlA#6Qw6T&1YvXV{4 zj}$Lf&&QqSglPhd$j1Ll!x=#;<~@G+d6YagWs2@Ph?s|&e>Oq$``IJ^ZrDr>&NBWR z&;;yLFQf0t3S(n^!TYd=eIY+IhG`7xh#xF3##9c+)bUDglV-u2;z zj*2N;KyhCTF1)mrm#4jvsIy(kOL>a;egzz2$qeY*{)alV z)IHA`@Zov%hd@3;OTxbwKi%|*k(sCM5-7{rI!A5Vjst&qzt4=mdRF585;&muwx;I9 zxX=ttzz0sVFf`G6B^F93LHpn1NTS<5ITi1w>&?sV1_tOo8f2Qo%08=58V#SJ8#e8uwN8a>(`b0kf&%FziG^?M(9;f zK~DG#O$S~iGs3y}NS%^4NgWz_^rgzQ>^Fb3L+IKAp=B+rVwgUG62c|-b*hB5_?J;) z`E1wnQZf+#4fA9-MC5}?=$W}H{~}sejx+{Q4)x8P{zDj-iC@PEFAR;h$np{7L&iA= z;W0PDcMoWNiX{@&;2wb>xVYw%kf#f+Np6*)LN=+;FK@u-+}ydB@jxaolh zM3lsvQrLCRuX+9xT?E=UjcqI>^~V&3wHi`8fmQF1Jk`z)28nYpzANhWVM3LOxan$d z-vdRf4THmAcQcGTPRx7|A#at}T9;Z5UM27{lJ-tZh?0TCPffU}m7(cY)nIWQ+3*xab}yBF5C(N`&OF@`Zg7^^0P*|#w&HKDBCl}BOOL)u z)EOsNEi+)Z5p^P@@^&PoQQ1EX*~31)`w`cr#E1eoA!dD>_N)TND=1~F#2*V5kLGd*P` z+jgxUPV#WvXX|n*MQW9|loIQrH)^w1S>cFv!0AtZb_FLtAEQSkUmRG<_d7@vWyfNE z9`S1v0UMMSUF>IBwg!h!-MBs!zIaj*rAKrqU9OVynX8oo7f(Js^KKuWvP3hpZB7G= z=a9$|yq=KBtM{&{I!VnC-L-yno=6N+WaW~RQnhkfiV<;ALqr_|5X-08`XUw6qVS;6 z7XO8k^?7?k41Nt#CO?lfJqjw8;ZE{|d0)GoxJ3gzs-{@K@0~gzD=z?hhhkh0*yUza zuN>G+)kf#s%g1B7fK%C|6{KVSb!71K?p^3gdV%O``Wai%W8{=>o2+#O%{mW+*70T* z@OQ1cArY24Z`wlp{Am_!-9z?{SW)NGvV6+N%Rc#R-neV;cdsS?`G?F2NzoV*L-#Z% zE@sbxGOs@agg>*be#axq*OIR`;IyN8y1VF`$Z;1oX2spPh`x#C7~;L$pgBeu$%N_H zVqjUi78{=#+EhKx21~mmgGgHRM2d_5z^lP)UNUesmy2e_Vcly*@i(=&wBJpR^)6|x zUub?Zmnu(nH#G048}v51KaOd@9l z)TmV>7+I=IKNNV;Um;U4T2j;m58tOu;G{k1_%l0IdKcj@jTnGX$O};(j)0eiM z*C?A!zp?==rh)F@FO>_TA2~wSr|x(B`Y}}+A%ynV%OjF~H!GJrob%XCKH(E!QsKNs zb$g?~xsJ|pUV(1U^aJ+?E|Mo`M-vFGmT-<33q+*tW#tOa*kwF&8j+put}+ZcHbqn0 zhxV~|UAlxH`O%s!nd&{WWMg#^ug$eit{QwKdh{2dT!-oU82OV*mt^k0hP=(=%&p(? zR>c;V8r&>TDn4`rzJKNqTi5=T+rI~bb=q2g!%LGNL|gwJ5m_5;MF96#S80~#P{5)e~0 zwOGHgWGy)#J{y+t=HXKQ>usklf#?rC!Lg)0HFKh?n$K{BE=*`UBst)B%-6FjeQ|3M zB& zNZKGv#S*E$%W6?i!4evb++b2+OCR$; zQm(#$4qkuNduVBBdAY~By%4ot1mzN0Z>{c}@wYd7x_%cL$TjjMA6Z+Lme%~q{_{dwMBz&>kJu1z07Cii-ZTBxB69N<^M8) z@sVVUPj`zZVhFE2?_W=YFG~^hWR-+O-PzdRDm%HAj|-wJ!~e_zvVUOglY4cr+!gG4 zy>a|^sJr+7Gj!Kl8|LYD0dV^l8hpeWy&^|nfYEX-S_+b{`>{s9zFV@ zdA-i-oa4f=+zu4E))Uy7yQHwtmQl zDsLF-c)pn}H*@yu-`(qY8KJ;b?}ICksGoVXb&|Jf+fiTd(fPFC;_q)~_wf=Gh6^x< z_kf#*%)*QK6`*_YGi!0tBDc?19hKgAp|js$qD+mNd z05VkUquif17o;@I16e*VBhRsJ?3P=)u!+poxrcP~MoniTtybj5tV8%RSi}VfY2u0u zHmY3jZs#*oQ#RgxBRI8yo3J1G69Je~?%nLT1M7uS6GlA7dcUk!(sOxg6M8=N0=*V3 zIxn&*d44ab7F6u;;&JN zw1-nnF|h>{SjQ-)NRqiV=}q2nW2CZua^ce+Y`I=_n_X$ppV8)lR1e0kB?U9BmX%#y z^3hd#^ryHBn6(FlJ@|w&CuK5+W6`v>*+2bRY_^-wt1QMP)A^_&e|R=#7W7V&jv9`! zc~3^esgX&$M~MN?RZhQuV2z(|jUj+l41JbzVE)&qR&GG&(22kvK?zxbH1=)Q99I?Y zME5NHo!a~Ku=iVRR#2K+gKa`Y$_{f^>4O12&ewMkKBsYvF)r|#lf65Oiw#H z0%&ahhs9*CcJ7X%?zgQIQ#Ek=pZkKEl$8~n+YhMkmX<$)V+gsD&N_)p0Ai8?rmg~L9C{&rYV-`%qIUB`0=oy+O1 zW{uCwqQuc|m;W)pu4{MYW`^JEo!*3DD!XC^RjJ|)tR0GhA^N%V-5T>FJAQw{-M-76 zoulycXYr;}{^GYkW5|nUK6_XusYPmUfyftfHU>@Y!er`@4qU4l4S;2A9!i?=HO|D$ zfQLB5jS%yBhE|Fi!KI=82lwy+O+3(`lTj6VwUT(=Bsz(7|NZG{V$}PlivhR%F50Noexve z8CZx6MXPLfxT*8lz`MHLnXj574uPyzW2K>&;L7S|->2t~dKSroaNJ_~$b^xi`h27edKF1f1W-YyZr$a8>nN)F#ITHl6 zpxJ6h6JNx-rn_O&bQiazZ0MAE_Rvd;BkkO^n(?#C(T*duj^nS!bYY1QF9UinEp*Ac zWH%MlBcMFl1hhPMA2ACT9Z%>lvpAz!*XGFTxt|S(Z=86-x^Nv8QUZq`c8O50xA&c!t(1R}Tr2{{LUUm>B|7@PAx7n)tqJMF%P&{icO=>y9>0W#} zukLDk1T;Ba4%|~j%>P)FL13p}%;}Srd09*`*ol*nz_vr)28Uc0$8liNe^BfL*{v&= zP*ES0V^(7?()X%t?9i^u;L?_FUYNhK3nzhxFMB}s5}evx4KO`E%HAT1qh9~D24Vw3 zviUryY&FL2vX8uaa$fr!y}NHEP*5*m3Lm`|@!ZUXL%*O_5E9S;&({bu9Uh`9xC1zT zBC>S&#Ubq}`3U0o1qvVGnXfVLDDa~&KauD-^LM;u(daXb)3q>~FwOu?5>~BYC)h3| z=)>jjKsJ2^3a9fTP84mpWF=pBhO=v(6TqbKwH95`OmQW=g z`um!fM8o>>sqhn(X?FBTil7Ns)r9K^#Fo6B8gL=Rr2vu^RDF!z68q-&nIr@_Pu7^X zv{RZk!U*N(XX9Oqd@$Af1#AH#mX$Qp;&S!nbM;p%YYmPO9Edmxy$7$i)7z;iACn5I z$6koeL{<8xw|R?^4vsO{Zrc1LWP8mwhg zz{`$Kx$q1IMQ)t1r6pSUuH+}~mChb{03lb8%^1=7tTdByf&=UB zMPzw387yp^8`oPxjeZ16-y})WfmRsQT#Tsrt(cY>E|9ZHYEg8XtZDSVsSDguxUTtG zkZg?qDhc{wY*q|P>BvLV)P)n1y?7YrZc$*fR}nOnhkof>Fa}!~b?|#FV#OhabL60* zMqK7gShxU28Y)56I_Xa1Be6s90TXTnoa0)b+9RnIZoCE)y+s|y&{^4ier`xG!pZ-4 z)H{lJI-;sT&$jW=n@vubq8o?WH$+!B*Y&O{Lf(A4$2j@;WUtg(#591Um8>QP{S0C#fi~JEUkJj{5>JE@G7Q4*^$t+{HV|bc{`ojfGYURcq1Pf}ck!pJ^+(V~c>} zq(GvjZibS%+dan#S~<<$LV?TpQU1fNt~zm#uVHf$#OCsMC`75EE^IA%=N)M-%bvk6 zK@JD75KPS8ONB1QCA{=>Y$#`wPesE{Y@cuI+YPjbBw=E3`a1UxHnGrCg#a8EpzskP zKadBmLvK;N@(kx7t->;d$X$zc(|@LR6_JWM0V`qk<#1{w6MUYBZY#EjA7)|T#Ar8g4F`yR=%7VnuKk6x}X&tucThXfw0JQh^h zyf|he#x@i%?GYhj+zl65GuxLyL~k*W^gzLR9iDnG9eZL1W2|=ZRS9=mE|TnNGSlMD zU@K*57-!i!cRS7MtU_3YJ|d2R##zm2K~45Sbf2j6Y>X?~^|wpR|E5{3@8Tj3U7rh!96!^+Ps2g_6xK^6nA z@}RmS$wdjTJ9dHqu|zNds*^H5`N*hbvXxdmrLg10MyhPg^yc1*LUrqZAThOuX^cnc zT0&`F#XR}*jC;GmTp4~Hlycv=A&kPC>ZQliH2+~@2pa$^F|go{dbiw)q?UK430oKD zq1lPaIgE|hK6(5S;{hn{1EtdvFW=Msv9Nx(9QU${&t0)AefJP?pT)nc6En4ySG(3{w5DT)F!FZgZ~67<+z=6l%($l^CTZ;E<@`ZTNZe_s0eb zDKxaMX8ZucXDeC1}Lyk9}zmD7Z#Gmp9Pj?ntLxN zO9Iruv{YrLMvVPq5vXI53&>U$v=&Ek_tOW# zM6Si)57X?AEQL!jnZ=xddE0a#JfV!~5s!nQHG_+)jvc^aky*tKYU|@UJK;%Pc%qz$ zJ#j5WoLgv40X?*6wif@hNnIRnWMD<~L4O#pa33g^L=)%i#+v5E(4`uW#GfE>JQOCD- z;ud)-oDX)-dFwyy@<$n(6AV>UEkme&keW)!((Ij91o@4@@T%xvjpAI##og-$xj725 z;)$h8b!FEO78)@|#EkAj(kkK6OM>aeq1@|=PrFdK}pKO z4aXo?ib1_XvbY#``%K8r3`BG;DVXhuFp9Z)4Td(k3PVQ&LjF&{6j+(icKjRR#!U?* z6uy{S@c}Blgny_i543X6TkUSd7)&m-Y?(Dx{dVd?bWWL{~z4 zt)X)RMm1-<_sC(%DU6&jTcNaYcm1le(RO}F`&{-Z{K8KnwJUfG<*y=%4B`@i>=_#>N~xV^4tIQ;``bBn>U(AeZb4(MSCO<2Bcmwx`|Ia{kw zbAs(V5laSmn4Wh0mBr=|%5|*u-RtU$109VOUvm@|zYFuClPe^1ro^j@C`m)BZEE`z z0$xg^>yG{31_uk0ZJid#7J{(-m=Ya7JGzgYa<0 zvte1+{_GAjA238)qyFkorwkhpd$2+@mRJg+?eyT$YgwO6$I8YQJ{Em_t`s9QQHgHk zc~Hl_|KV=rhr2DF((%bU+=ZpQ z`S-wtHqE(?c4djY>*prV#GM^qr+L$2i@Xq(3*Vp`U>fJ*(Yj~3zTkhR46NU}PzTVG z(VTZJd)v0|ZMV4=!pIq-R^Y5-LCy#AOQSkg8}^A|2=3e}ohSx@27BxIziU2E%k&rxoxxr> zxF_INeu47?A=G~mGhP^Ba(1ug9Fvejbb6?Y<;P;ru?&67HN_p?vWQa;{M&B|9)Iw= z)-wR;H&A2!hsD>>#gMaed}ivS-sCy&cb!?uclJ9Kz4gYUMJ&;zb=0?{Z`Z;r0(&;( za8|J>75b`O13IrhYJGoLKYK^g#*Bq3ijVuO5r0gRfBU^Z2&&&?I;lqC+=BlzxMZ2Z z6t@DB3be*Ar_2-2x}ClJRbhN~ICHjA0y#efDmlBrs-KnTyT0pPI9MkPcO}#N6jGsi zmZ;;xey@a=(!!@`!Fyd%a3-GJa7e+)t19$gy7IcC*t@XtH-l0;{JC0c&W zp}++~?}XoWDIw>rwxUAl8YVglfv#M=b^`%LMYnEH9{ysjh^OvE^J$-h8@Zm{Mc=3E z#VrMS7=-Q1;DvUG&d$8*BdFy#uIBU|fcpuQ^_|*&bSC_r#2zi!H`^%GjKhgSr(X27 z#{4cPVFwlw2LkPaSh}|mzlOMl0Lz9*DY?h#~QQ)!YKH>PbXfzHg(#z|FmHv-j-$vslpv~ z(ty5cG(DQhVfOb!?BtaLn@9MO?sAY{i7nt=f;*tV=1_9GPZ>7jZ9Sc?ft$%A3YsBW z;k4s=+?O41FXcZqD*TmNuQ${LWBP=p8X!{}LPUv-b2m z2(E46+=?P6&U~(ar`oG8BTP;6kcUb|%(+bqep^<8#pi)VbIh$_ONrX>)sx`~n+)fz zO4*E=vH91r8_p_2ir-lRIGbg8*6(*n)M0sMb5o6~TR(W+68O$C9+VivmNUyysjB|@ z{#I|&f$Eeh5UOiny%v-pm1Amen6MC{KgC|Vpkuo{p^D@hqD>wqTt zcG9=aLYJK1EVq=}35>?9aB287)i!-&k4_D@3lqF`syU7=2V02vi@*~F7HwM{FRAVS znnOg_8o@9-z5f$Oy<*mGa3kHcr7z8rE3`=sp^3KP35#@CDM-%F&IbhQeg-H~tz{%B z@6$TOBco(?tyH^OB z`Fnc|WeZ!mWGzA$h7Y94@MM0`!1;aIey=32jQR8XeH>E{j0BuSi>JIYn0)qXw%3mM z1Eh>uszmC1*eZgfd8s-NUvC?T+3Ql7WQ!^zfyTdI#>*RZraTJF8z#mLY3^Qr`~5oj zNPQx#qiSy8(~LfZvpO+cOP=7$eU|A-)?tft=1s3b?tHw zVPI9tP)u=U)}f#!mHIPV($2VBom==^ag!(#wiGzK3&fa^`YQgl=I^zb`BX$tshC$T9$X+39e~nddBgY50l-w zXs(tIqDm)JB1Xr_TPugh25vXRY5N(7qo@<$V8mhZv_H|Z$YXm+!xA&>E4cuynKhYn zT_SiAq35FK45rw&%Lt`o;MzeOl17WWkXBBb-m7IYx$y7X%bX})vi zmIu!hf0lcV-m?4;x0tlym~dti**V^NKbIEd;c54wCF>w;ryo#%s>i?4T(9Z83JGmk!OK(IKe3cc zn^<`5el2j%N{ysMC?v57 ztU+HaCYO&i9E2Acu-N^U{@9(v8*(xqGLG1^iW z?jfbXwKWhbM(G7NGYe_21?TjuCni(8YJ$J9(u!5!HeEAB*F5SwX<>T&!#6oG_f09T z*V{Zx7t}2UaWAfGKoq}IdGJC*hIbr|`KIbkc1x-17lu9fLjVW<#;WFY#!vZ|czW%A zJfx|-TnbjI4BO$My3D|a#|-;gwM)y$Y_XbA<5Ps+4(t1T8$x11>KV;Ex?>k!t(=Lv zE5?KQ71S%?FpUH3kk{O z^P*V{kx^XTzR`TfH-ly}7xR8qK7IkblGvXlT{e)1}zkxC$ph` z0@SJUZOB$(wPSz6CGo5K0tobLpoB#m)6+!$UPN4-C4R!WB6ZPA)S7W zKxX68(!tBtQ1$fX6=gX%ciag%`G)1!n^+hR>wzMz34a=_a~|j<^uG^Ba<88Y#lXe4$$-1TYb5)Qa5iUv=X4Ur6-mAJb6R1=m zjPtIfVI!Fsx3N{C5-W>1a{rFXH)TbvQg+V)QBx z<`TYfh(b^u&7|E-?m-zL^D<5Q_zs1y3n;_5Z0tU+38dQJ66W4wA{F;1VGzIFths!! z7gQgb!Ct*DX|RX>B%j}9)Xre0Z>TetZ~!dQX8l6c4-xK4JlMm>DMp8J1eSSA8Fvq4 zobc!~tDDyT?G0sH@!T2kH7~)@b-KSTVtwD*<_9R!hFR`ZnDgo6I!K&Oe z>$l+RFA-u)fhspclMqDnh1~aZtKZP>8r)qHuX@F zmmjDu6s6)wa7)$yK&>_qZaO24U>4Um4Z zd}a20d#gJ68senixL(0~_{5@F%qo}KUi!wlF|k>ZQd&SbZ9ty?2B?w_O-GS4YsrA7 zi017>jOnO?fKwge7lM3VzYxv{8(XijE?MrDal`Wz9MjN@`nm5d0T0XE9tD#((N@Y9 zTAOeMm|#&mQ+!*OdO@!EQs?!euOaJrF-Cy!N@TpXMM$_2R#Qjd5cHbcLbFTGc2nlW=8Sgv< z!`!~G`pY@PsK&yN;?!}_b;RJUUA!YHBIT0?Y>#z%q?sU=Q*wFdebm z^`xUe_$4tC+rE~Mh!gkoSMgz4exn?Vpo5q5PEIrq{yZy|GQKJ0l_{u#h45Kkq1EuV zuKFTX54mhLuURB(t7hfwvD8(@68sPNp17JGT=|@H-0IRy(y2fhLC&$}HFL9{#PLsH z7wK}LDiQGq1^oZmefH7(@-Pivp(TWP)KxI=c*~nzn>FpIIVQKv+&Uq7H-QZ6TLbI! zH}|yxNjqL=C!qTL(38p%k1Ypzn94c{sLO4(;&#%dvGKuh)j=nM-#qR*^tna;HusX1 z(k<3>OM7g_c=vKkiLjS?xOPSE6TDtzf&fF%X)O-Y5UH_Gx23**zwvdNOqV4`m{i)Q ziJbXh0M_mtkg^O5!IhCCu|&Jmec^5QUvz28T$d?#4;i$k(``%{g(vy6iRmfR8x!c=-`Be6gl2{97;He1Ry(J)@;B+ zAl#bxnp#~{Anp=|BOO1EW;g8@mWWV)W-67Wyyq`|x4YdZOm`Ud_Ds$wY@!SwH{RMd z98?jd#X-7o@B<0Ok4RrR&VNdeVfH0puBDvzerEm71IKEowp-nF*xIwE;B4fycf+MN z(|x7t(=sw2-4YHf7^t|FeAY|Rska{P`Py0)jAVJRkxGeH0uChKMyg7W?E&A;vZDLt zwE8%nN!M=QO-ZH08{>Nd3e^u%HpHAUR4~_e--;tBO#&B8D(29AiG2d19RvJVR9ww@Zr!Jg=*9QMw%A(IVv9zy{H?yf zpS3L-JFe6|+c5@hFM=pu*+b#7@goVGOxbZkDjd~8D0X@_qse0hdFb*7qG!I`Uh9C< zeIqZfHf<%XU%OUz(bqm?LKq^s`nqz*M`Y#?HAHSPm71c}C(P5$nr7Qt{RvPn4vd#x zpk&a=XV$MNX}qQqb6w89HjVf>KbSA?=p8is@T-Qlj^_H%@aAeNK1pju(TS-bu7hS= zj#c(3uBIaGTo@&|X)r(Nl$~ca(@kc!>*P0Zgh2(N{LKATw{fapA3<{59D+)2HIuwZ zG#!2sJ>q)-wd0Tgjr|(HU6*n7Z(gb(_RbAur5p73C`3tDeAY*b%NQa}>+08fELVg=;cvt7LgJ%VlR|)#D2yn mUt&&p3q0rFp(B)L=_&0;E0rjPrLnU;Tdz%R=b@ literal 0 HcmV?d00001 From a4ae644e35aec8fdadd361401c96f321e4fd9eb9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 10 Dec 2019 14:05:05 +0100 Subject: [PATCH 02/31] feat(nuke): Loader plugin for nukenodes --- pype/nuke/lib.py | 67 +++++ pype/plugins/nuke/load/load_backdrop.py | 319 ++++++++++++++++++++++++ 2 files changed, 386 insertions(+) create mode 100644 pype/plugins/nuke/load/load_backdrop.py diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 816a7d5116..202798893a 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1230,3 +1230,70 @@ def get_dependent_nodes(nodes): }) return connections_in, connections_out + + +def find_free_space_to_paste_nodes( + nodes, + group=nuke.root(), + direction="right", + offset=300): + """ + For getting coordinates in DAG (node graph) for placing new nodes + + Arguments: + nodes (list): list of nuke.Node objects + group (nuke.Node) [optional]: object in which context it is + direction (str) [optional]: where we want it to be placed + [left, right, top, bottom] + offset (int) [optional]: what offset it is from rest of nodes + + Returns: + xpos (int): x coordinace in DAG + ypos (int): y coordinace in DAG + """ + if len(nodes) == 0: + return 0, 0 + + group_xpos = list() + group_ypos = list() + + # get local coordinates of all nodes + nodes_xpos = [n.xpos() for n in nodes] + \ + [n.xpos() + n.screenWidth() for n in nodes] + + nodes_ypos = [n.ypos() for n in nodes] + \ + [n.ypos() + n.screenHeight() for n in nodes] + + # get complete screen size of all nodes to be placed in + nodes_screen_width = max(nodes_xpos) - min(nodes_xpos) + nodes_screen_heigth = max(nodes_ypos) - min(nodes_ypos) + + # get screen size (r,l,t,b) of all nodes in `group` + with group: + group_xpos = [n.xpos() for n in nuke.allNodes() if n not in nodes] + \ + [n.xpos() + n.screenWidth() for n in nuke.allNodes() + if n not in nodes] + group_ypos = [n.ypos() for n in nuke.allNodes() if n not in nodes] + \ + [n.ypos() + n.screenHeight() for n in nuke.allNodes() + if n not in nodes] + + # calc output left + if direction in "left": + xpos = min(group_xpos) - abs(nodes_screen_width) - abs(offset) + ypos = min(group_ypos) + return xpos, ypos + # calc output right + if direction in "right": + xpos = max(group_xpos) + abs(offset) + ypos = min(group_ypos) + return xpos, ypos + # calc output top + if direction in "top": + xpos = min(group_xpos) + ypos = min(group_ypos) - abs(nodes_screen_heigth) - abs(offset) + return xpos, ypos + # calc output bottom + if direction in "bottom": + xpos = min(group_xpos) + ypos = max(group_ypos) + abs(offset) + return xpos, ypos diff --git a/pype/plugins/nuke/load/load_backdrop.py b/pype/plugins/nuke/load/load_backdrop.py new file mode 100644 index 0000000000..7f58d4e9ec --- /dev/null +++ b/pype/plugins/nuke/load/load_backdrop.py @@ -0,0 +1,319 @@ +from avalon import api, style, io +import nuke +import nukescripts +from pype.nuke import lib as pnlib +from avalon.nuke import lib as anlib +from avalon.nuke import containerise, update_container +reload(pnlib) + +class LoadBackdropNodes(api.Loader): + """Loading Published Backdrop nodes (workfile, nukenodes)""" + + representations = ["nk"] + families = ["workfile", "nukenodes"] + + label = "Iport Nuke Nodes" + order = 0 + icon = "eye" + color = style.colors.light + node_color = "0x7533c1ff" + + def load(self, context, name, namespace, data): + """ + Loading function to import .nk file into script and wrap + it on backdrop + + Arguments: + context (dict): context of version + name (str): name of the version + namespace (str): asset name + data (dict): compulsory attribute > not used + + Returns: + nuke node: containerised nuke node object + """ + + # get main variables + version = context['version'] + version_data = version.get("data", {}) + vname = version.get("name", None) + first = version_data.get("frameStart", None) + last = version_data.get("frameEnd", None) + namespace = namespace or context['asset']['name'] + colorspace = version_data.get("colorspace", None) + object_name = "{}_{}".format(name, namespace) + + # prepare data for imprinting + # add additional metadata from the version to imprint to Avalon knob + add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", + "source", "author", "fps"] + + data_imprint = {"frameStart": first, + "frameEnd": last, + "version": vname, + "colorspaceInput": colorspace, + "objectName": object_name} + + for k in add_keys: + data_imprint.update({k: version_data[k]}) + + # getting file path + file = self.fname.replace("\\", "/") + + # adding nodes to node graph + # just in case we are in group lets jump out of it + nuke.endGroup() + + # Get mouse position + n = nuke.createNode("NoOp") + xcursor, ycursor = (n.xpos(), n.ypos()) + anlib.reset_selection() + nuke.delete(n) + + bdn_frame = 50 + + with anlib.maintained_selection(): + + # add group from nk + nuke.nodePaste(file) + + # get all pasted nodes + new_nodes = list() + nodes = nuke.selectedNodes() + + # get pointer position in DAG + xpointer, ypointer = pnlib.find_free_space_to_paste_nodes(nodes, direction="right", offset=200+bdn_frame) + + # reset position to all nodes and replace inputs and output + for n in nodes: + anlib.reset_selection() + xpos = (n.xpos() - xcursor) + xpointer + ypos = (n.ypos() - ycursor) + ypointer + n.setXYpos(xpos, ypos) + + # replace Input nodes for dots + if n.Class() in "Input": + dot = nuke.createNode("Dot") + new_name = n.name().replace("INP", "DOT") + dot.setName(new_name) + dot["label"].setValue(new_name) + dot.setXYpos(xpos, ypos) + new_nodes.append(dot) + + # rewire + dep = n.dependent() + for d in dep: + index = next((i for i, dpcy in enumerate( + d.dependencies()) + if n is dpcy), 0) + d.setInput(index, dot) + + # remove Input node + anlib.reset_selection() + nuke.delete(n) + continue + + # replace Input nodes for dots + elif n.Class() in "Output": + dot = nuke.createNode("Dot") + new_name = n.name() + "_DOT" + dot.setName(new_name) + dot["label"].setValue(new_name) + dot.setXYpos(xpos, ypos) + new_nodes.append(dot) + + # rewire + dep = next((d for d in n.dependencies()), None) + if dep: + dot.setInput(0, dep) + + # remove Input node + anlib.reset_selection() + nuke.delete(n) + continue + else: + new_nodes.append(n) + + # reselect nodes with new Dot instead of Inputs and Output + anlib.reset_selection() + anlib.select_nodes(new_nodes) + # place on backdrop + bdn = nukescripts.autoBackdrop() + + # add frame offset + xpos = bdn.xpos() - bdn_frame + ypos = bdn.ypos() - bdn_frame + bdwidth = bdn["bdwidth"].value() + (bdn_frame*2) + bdheight = bdn["bdheight"].value() + (bdn_frame*2) + + bdn["xpos"].setValue(xpos) + bdn["ypos"].setValue(ypos) + bdn["bdwidth"].setValue(bdwidth) + bdn["bdheight"].setValue(bdheight) + + bdn["name"].setValue(object_name) + bdn["label"].setValue("Version tracked frame: \n`{}`\n\nPLEASE DO NOT REMOVE OR MOVE \nANYTHING FROM THIS FRAME!".format(object_name)) + bdn["note_font_size"].setValue(20) + + return containerise( + node=bdn, + name=name, + namespace=namespace, + context=context, + loader=self.__class__.__name__, + data=data_imprint) + + def update(self, container, representation): + """Update the Loader's path + + Nuke automatically tries to reset some variables when changing + the loader's path to a new file. These automatic changes are to its + inputs: + + """ + + # get main variables + # Get version from io + version = io.find_one({ + "type": "version", + "_id": representation["parent"] + }) + # get corresponding node + GN = nuke.toNode(container['objectName']) + + file = api.get_representation_path(representation).replace("\\", "/") + context = representation["context"] + name = container['name'] + version_data = version.get("data", {}) + vname = version.get("name", None) + first = version_data.get("frameStart", None) + last = version_data.get("frameEnd", None) + namespace = container['namespace'] + colorspace = version_data.get("colorspace", None) + object_name = "{}_{}".format(name, namespace) + + add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", + "source", "author", "fps"] + + data_imprint = {"representation": str(representation["_id"]), + "frameStart": first, + "frameEnd": last, + "version": vname, + "colorspaceInput": colorspace, + "objectName": object_name} + + for k in add_keys: + data_imprint.update({k: version_data[k]}) + + # adding nodes to node graph + # just in case we are in group lets jump out of it + nuke.endGroup() + + with anlib.maintained_selection(): + xpos = GN.xpos() + ypos = GN.ypos() + avalon_data = anlib.get_avalon_knob_data(GN) + nuke.delete(GN) + # add group from nk + nuke.nodePaste(file) + + GN = nuke.selectedNode() + anlib.set_avalon_knob_data(GN, avalon_data) + GN.setXYpos(xpos, ypos) + GN["name"].setValue(object_name) + + # get all versions in list + versions = io.find({ + "type": "version", + "parent": version["parent"] + }).distinct('name') + + max_version = max(versions) + + # change color of node + if version.get("name") not in [max_version]: + GN["tile_color"].setValue(int("0xd88467ff", 16)) + else: + GN["tile_color"].setValue(int(self.node_color, 16)) + + self.log.info("udated to version: {}".format(version.get("name"))) + + return update_container(GN, data_imprint) + + def connect_active_viewer(self, group_node): + """ + Finds Active viewer and + place the node under it, also adds + name of group into Input Process of the viewer + + Arguments: + group_node (nuke node): nuke group node object + + """ + group_node_name = group_node["name"].value() + + viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()] + if len(viewer) > 0: + viewer = viewer[0] + else: + self.log.error("Please create Viewer node before you " + "run this action again") + return None + + # get coordinates of Viewer1 + xpos = viewer["xpos"].value() + ypos = viewer["ypos"].value() + + ypos += 150 + + viewer["ypos"].setValue(ypos) + + # set coordinates to group node + group_node["xpos"].setValue(xpos) + group_node["ypos"].setValue(ypos + 50) + + # add group node name to Viewer Input Process + viewer["input_process_node"].setValue(group_node_name) + + # put backdrop under + pnlib.create_backdrop(label="Input Process", layer=2, + nodes=[viewer, group_node], color="0x7c7faaff") + + return True + + def get_item(self, data, trackIndex, subTrackIndex): + return {key: val for key, val in data.items() + if subTrackIndex == val["subTrackIndex"] + if trackIndex == val["trackIndex"]} + + def byteify(self, input): + """ + Converts unicode strings to strings + It goes trought all dictionary + + Arguments: + input (dict/str): input + + Returns: + dict: with fixed values and keys + + """ + + if isinstance(input, dict): + return {self.byteify(key): self.byteify(value) + for key, value in input.iteritems()} + elif isinstance(input, list): + return [self.byteify(element) for element in input] + elif isinstance(input, unicode): + return input.encode('utf-8') + else: + return input + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + from avalon.nuke import viewer_update_and_undo_stop + node = nuke.toNode(container['objectName']) + with viewer_update_and_undo_stop(): + nuke.delete(node) From b5fe082a59061e5eb14d77966ecefbcf7e91dc08 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 16 Dec 2019 10:51:02 +0100 Subject: [PATCH 03/31] sonar import moved to process because in the time the file is imported, sonar is not available --- pype/plugins/blender/create/submarine_model.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pype/plugins/blender/create/submarine_model.py b/pype/plugins/blender/create/submarine_model.py index 29fcae8fbf..1845c9b222 100644 --- a/pype/plugins/blender/create/submarine_model.py +++ b/pype/plugins/blender/create/submarine_model.py @@ -2,7 +2,6 @@ import bpy -import sonar.blender from avalon import api from avalon.blender import Creator, lib @@ -16,7 +15,7 @@ class CreateModel(Creator): icon = "cube" def process(self): - + import sonar.blender asset = self.data["asset"] subset = self.data["subset"] name = sonar.blender.plugin.model_name(asset, subset) From d16865d96fcfe8b413507ca44535967ffa42140f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Dec 2019 16:08:07 +0100 Subject: [PATCH 04/31] modified sonar creator a little bit --- pype/plugins/blender/create/submarine_model.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/pype/plugins/blender/create/submarine_model.py b/pype/plugins/blender/create/submarine_model.py index 1845c9b222..7301073f05 100644 --- a/pype/plugins/blender/create/submarine_model.py +++ b/pype/plugins/blender/create/submarine_model.py @@ -9,26 +9,24 @@ from avalon.blender import Creator, lib class CreateModel(Creator): """Polygonal static geometry""" - name = "model_default" + name = "modelMain" label = "Model" family = "model" icon = "cube" def process(self): - import sonar.blender + import pype.blender + asset = self.data["asset"] subset = self.data["subset"] - name = sonar.blender.plugin.model_name(asset, subset) + name = pype.blender.plugin.model_name(asset, subset) collection = bpy.data.collections.new(name=name) bpy.context.scene.collection.children.link(collection) self.data['task'] = api.Session.get('AVALON_TASK') lib.imprint(collection, self.data) if (self.options or {}).get("useSelection"): - for obj in bpy.context.selected_objects: + for obj in lib.get_selection(): collection.objects.link(obj) - if bpy.data.workspaces.get('Modeling'): - bpy.context.window.workspace = bpy.data.workspaces['Modeling'] - return collection From cd79f0654dfc0efbb9f36ee040b83c4a5ce419c7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Dec 2019 16:13:38 +0100 Subject: [PATCH 05/31] added init file to pype setup --- setup/blender/init.py | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 setup/blender/init.py diff --git a/setup/blender/init.py b/setup/blender/init.py new file mode 100644 index 0000000000..05c15eaeb2 --- /dev/null +++ b/setup/blender/init.py @@ -0,0 +1,3 @@ +from pype import blender + +blender.install() From f0918ec7604734673c288e0bc55f1c5723dce7ff Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Dec 2019 17:30:47 +0100 Subject: [PATCH 06/31] blender plugins update --- pype/plugins/blender/load/submarine_model.py | 129 ++++++++++++------ .../blender/publish/collect_current_file.py | 2 +- pype/plugins/blender/publish/collect_model.py | 3 +- pype/plugins/blender/publish/extract_model.py | 33 +++-- .../blender/publish/validate_mesh_has_uv.py | 8 +- .../validate_mesh_no_negative_scale.py | 12 +- 6 files changed, 129 insertions(+), 58 deletions(-) diff --git a/pype/plugins/blender/load/submarine_model.py b/pype/plugins/blender/load/submarine_model.py index 4535b29065..99095d74cd 100644 --- a/pype/plugins/blender/load/submarine_model.py +++ b/pype/plugins/blender/load/submarine_model.py @@ -38,7 +38,7 @@ class BlendModelLoader(pype.blender.AssetLoader): Note: It is assumed that only 1 matching collection is found. """ - for collection in bpy.data.collections: + for collection in bpy.context.blend_data.collections: if collection.name != name: continue if collection.library is None: @@ -52,18 +52,19 @@ class BlendModelLoader(pype.blender.AssetLoader): return None @staticmethod - def _collection_contains_object(collection: bpy.types.Collection, object: bpy.types.Object) -> bool: + def _collection_contains_object( + collection: bpy.types.Collection, object: bpy.types.Object + ) -> bool: """Check if the collection contains the object.""" for obj in collection.objects: if obj == object: return True return False - def process_asset(self, - context: dict, - name: str, - namespace: Optional[str] = None, - options: Optional[Dict] = None) -> Optional[List]: + def process_asset( + self, context: dict, name: str, namespace: Optional[str] = None, + options: Optional[Dict] = None + ) -> Optional[List]: """ Arguments: name: Use pre-defined name @@ -76,21 +77,27 @@ class BlendModelLoader(pype.blender.AssetLoader): asset = context["asset"]["name"] subset = context["subset"]["name"] lib_container = pype.blender.plugin.model_name(asset, subset) - container_name = pype.blender.plugin.model_name(asset, subset, namespace) + container_name = pype.blender.plugin.model_name( + asset, subset, namespace + ) relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.data.libraries.load(libpath, link=True, relative=relative) as (_, data_to): + with bpy.context.blend_data.libraries.load( + libpath, link=True, relative=relative + ) as (_, data_to): data_to.collections = [lib_container] scene = bpy.context.scene - instance_empty = bpy.data.objects.new(container_name, None) + instance_empty = bpy.context.blend_data.objects.new( + container_name, None + ) if not instance_empty.get("avalon"): instance_empty["avalon"] = dict() avalon_info = instance_empty["avalon"] avalon_info.update({"container_name": container_name}) scene.collection.objects.link(instance_empty) instance_empty.instance_type = 'COLLECTION' - container = bpy.data.collections[lib_container] + container = bpy.context.blend_data.collections[lib_container] container.name = container_name instance_empty.instance_collection = container container.make_local() @@ -120,7 +127,9 @@ class BlendModelLoader(pype.blender.AssetLoader): Warning: No nested collections are supported at the moment! """ - collection = bpy.data.collections.get(container["objectName"]) + collection = bpy.context.blend_data.collections.get( + container["objectName"] + ) libpath = Path(api.get_representation_path(representation)) extension = libpath.suffix.lower() @@ -130,14 +139,30 @@ class BlendModelLoader(pype.blender.AssetLoader): pformat(representation, indent=2), ) - assert collection, f"The asset is not loaded: {container['objectName']}" - assert not (collection.children), "Nested collections are not supported." - assert libpath, ("No existing library file found for {container['objectName']}") - assert libpath.is_file(), f"The file doesn't exist: {libpath}" - assert extension in pype.blender.plugin.VALID_EXTENSIONS, f"Unsupported file: {libpath}" - collection_libpath = self._get_library_from_container(collection).filepath - normalized_collection_libpath = str(Path(bpy.path.abspath(collection_libpath)).resolve()) - normalized_libpath = str(Path(bpy.path.abspath(str(libpath))).resolve()) + assert collection, ( + f"The asset is not loaded: {container['objectName']}" + ) + assert not (collection.children), ( + "Nested collections are not supported." + ) + assert libpath, ( + "No existing library file found for {container['objectName']}" + ) + assert libpath.is_file(), ( + f"The file doesn't exist: {libpath}" + ) + assert extension in pype.blender.plugin.VALID_EXTENSIONS, ( + f"Unsupported file: {libpath}" + ) + collection_libpath = ( + self._get_library_from_container(collection).filepath + ) + normalized_collection_libpath = ( + str(Path(bpy.path.abspath(collection_libpath)).resolve()) + ) + normalized_libpath = ( + str(Path(bpy.path.abspath(str(libpath))).resolve()) + ) logger.debug( "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", normalized_collection_libpath, @@ -155,29 +180,46 @@ class BlendModelLoader(pype.blender.AssetLoader): # Unlink every object collection.objects.unlink(obj) remove_obj = True - for coll in [coll for coll in bpy.data.collections if coll != collection]: - if coll.objects and self._collection_contains_object(coll, obj): + for coll in [ + coll for coll in bpy.context.blend_data.collections + if coll != collection + ]: + if ( + coll.objects and + self._collection_contains_object(coll, obj) + ): remove_obj = False if remove_obj: objects_to_remove.add(obj) + for obj in objects_to_remove: # Only delete objects that are not used elsewhere - bpy.data.objects.remove(obj) + bpy.context.blend_data.objects.remove(obj) - instance_empties = [obj for obj in collection.users_dupli_group if obj.name in collection.name] + instance_empties = [ + obj for obj in collection.users_dupli_group + if obj.name in collection.name + ] if instance_empties: instance_empty = instance_empties[0] container_name = instance_empty["avalon"]["container_name"] + relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.data.libraries.load(str(libpath), link=True, relative=relative) as (_, data_to): + with bpy.context.blend_data.libraries.load( + str(libpath), link=True, relative=relative + ) as (_, data_to): data_to.collections = [container_name] + new_collection = self._get_lib_collection(container_name, libpath) if new_collection is None: - raise ValueError("A matching collection '{container_name}' " - "should have been found in: {libpath}") + raise ValueError( + "A matching collection '{container_name}' " + "should have been found in: {libpath}" + ) + for obj in new_collection.objects: collection.objects.link(obj) - bpy.data.collections.remove(new_collection) + bpy.context.blend_data.collections.remove(new_collection) # Update the representation on the collection avalon_prop = collection[avalon.blender.pipeline.AVALON_PROPERTY] avalon_prop["representation"] = str(representation["_id"]) @@ -195,10 +237,14 @@ class BlendModelLoader(pype.blender.AssetLoader): Warning: No nested collections are supported at the moment! """ - collection = bpy.data.collections.get(container["objectName"]) + collection = bpy.context.blend_data.collections.get( + container["objectName"] + ) if not collection: return False - assert not (collection.children), "Nested collections are not supported." + assert not (collection.children), ( + "Nested collections are not supported." + ) instance_parents = list(collection.users_dupli_group) instance_objects = list(collection.objects) for obj in instance_objects + instance_parents: @@ -224,11 +270,10 @@ class CacheModelLoader(pype.blender.AssetLoader): icon = "code-fork" color = "orange" - def process_asset(self, - context: dict, - name: str, - namespace: Optional[str] = None, - options: Optional[Dict] = None) -> Optional[List]: + def process_asset( + self, context: dict, name: str, namespace: Optional[str] = None, + options: Optional[Dict] = None + ) -> Optional[List]: """ Arguments: name: Use pre-defined name @@ -243,17 +288,23 @@ class CacheModelLoader(pype.blender.AssetLoader): asset = context["asset"]["name"] subset = context["subset"]["name"] # TODO (jasper): evaluate use of namespace which is 'alien' to Blender. - lib_container = container_name = pype.blender.plugin.model_name(asset, subset, namespace) + lib_container = container_name = ( + pype.blender.plugin.model_name(asset, subset, namespace) + ) relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.data.libraries.load(libpath, link=True, relative=relative) as (data_from, data_to): + with bpy.context.blend_data.libraries.load( + libpath, link=True, relative=relative + ) as (data_from, data_to): data_to.collections = [lib_container] scene = bpy.context.scene - instance_empty = bpy.data.objects.new(container_name, None) + instance_empty = bpy.context.blend_data.objects.new( + container_name, None + ) scene.collection.objects.link(instance_empty) instance_empty.instance_type = 'COLLECTION' - collection = bpy.data.collections[lib_container] + collection = bpy.context.blend_data.collections[lib_container] collection.name = container_name instance_empty.instance_collection = collection diff --git a/pype/plugins/blender/publish/collect_current_file.py b/pype/plugins/blender/publish/collect_current_file.py index a097c72047..5756431314 100644 --- a/pype/plugins/blender/publish/collect_current_file.py +++ b/pype/plugins/blender/publish/collect_current_file.py @@ -12,5 +12,5 @@ class CollectBlenderCurrentFile(pyblish.api.ContextPlugin): def process(self, context): """Inject the current working file""" - current_file = bpy.data.filepath + current_file = bpy.context.blend_data.filepath context.data['currentFile'] = current_file diff --git a/pype/plugins/blender/publish/collect_model.py b/pype/plugins/blender/publish/collect_model.py index c60402f9ca..4c7e840c17 100644 --- a/pype/plugins/blender/publish/collect_model.py +++ b/pype/plugins/blender/publish/collect_model.py @@ -23,7 +23,7 @@ class CollectModel(pyblish.api.ContextPlugin): representation set. If the representation is set, it is a loaded model and we don't want to publish it. """ - for collection in bpy.data.collections: + for collection in bpy.context.blend_data.collections: avalon_prop = collection.get(AVALON_PROPERTY) or dict() if (avalon_prop.get('family') == 'model' and not avalon_prop.get('representation')): @@ -42,6 +42,7 @@ class CollectModel(pyblish.api.ContextPlugin): instance = context.create_instance( name=name, family=family, + families=[family], subset=subset, asset=asset, task=task, diff --git a/pype/plugins/blender/publish/extract_model.py b/pype/plugins/blender/publish/extract_model.py index 75ec33fb27..501c4d9d5c 100644 --- a/pype/plugins/blender/publish/extract_model.py +++ b/pype/plugins/blender/publish/extract_model.py @@ -1,10 +1,10 @@ -from pathlib import Path +import os import avalon.blender.workio -import sonar.api +import pype.api -class ExtractModel(sonar.api.Extractor): +class ExtractModel(pype.api.Extractor): """Extract as model.""" label = "Model" @@ -14,9 +14,10 @@ class ExtractModel(sonar.api.Extractor): def process(self, instance): # Define extract output file path - stagingdir = Path(self.staging_dir(instance)) + + stagingdir = self.staging_dir(instance) filename = f"{instance.name}.blend" - filepath = str(stagingdir / filename) + filepath = os.path.join(stagingdir, filename) # Perform extraction self.log.info("Performing extraction..") @@ -24,11 +25,23 @@ class ExtractModel(sonar.api.Extractor): # Just save the file to a temporary location. At least for now it's no # problem to have (possibly) extra stuff in the file. avalon.blender.workio.save_file(filepath, copy=True) + # + # # Store reference for integration + # if "files" not in instance.data: + # instance.data["files"] = list() + # + # # instance.data["files"].append(filename) - # Store reference for integration - if "files" not in instance.data: - instance.data["files"] = list() + if "representations" not in instance.data: + instance.data["representations"] = [] - instance.data["files"].append(filename) + representation = { + 'name': 'blend', + 'ext': 'blend', + 'files': filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(representation) - self.log.info("Extracted instance '%s' to: %s", instance.name, filepath) + + self.log.info("Extracted instance '%s' to: %s", instance.name, representation) diff --git a/pype/plugins/blender/publish/validate_mesh_has_uv.py b/pype/plugins/blender/publish/validate_mesh_has_uv.py index 79a42a11d5..f8c5092ab7 100644 --- a/pype/plugins/blender/publish/validate_mesh_has_uv.py +++ b/pype/plugins/blender/publish/validate_mesh_has_uv.py @@ -3,7 +3,7 @@ from typing import List import bpy import pyblish.api -import sonar.blender.action +import pype.blender.action class ValidateMeshHasUvs(pyblish.api.InstancePlugin): @@ -14,7 +14,7 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): families = ["model"] category = "geometry" label = "Mesh Has UV's" - actions = [sonar.blender.action.SelectInvalidAction] + actions = [pype.blender.action.SelectInvalidAction] optional = True @staticmethod @@ -34,7 +34,9 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): def get_invalid(cls, instance) -> List: invalid = [] # TODO (jasper): only check objects in the collection that will be published? - for obj in [obj for obj in bpy.data.objects if obj.type == 'MESH']: + for obj in [ + obj for obj in bpy.context.blend_data.objects if obj.type == 'MESH' + ]: # Make sure we are in object mode. bpy.ops.object.mode_set(mode='OBJECT') if not cls.has_uvs(obj): diff --git a/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py index b2a927a2ed..1f050f6844 100644 --- a/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py +++ b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py @@ -3,7 +3,7 @@ from typing import List import bpy import pyblish.api -import sonar.blender.action +import pype.blender.action class ValidateMeshNoNegativeScale(pyblish.api.Validator): @@ -13,13 +13,15 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator): hosts = ["blender"] families = ["model"] label = "Mesh No Negative Scale" - actions = [sonar.blender.action.SelectInvalidAction] + actions = [pype.blender.action.SelectInvalidAction] @staticmethod def get_invalid(instance) -> List: invalid = [] # TODO (jasper): only check objects in the collection that will be published? - for obj in [obj for obj in bpy.data.objects if obj.type == 'MESH']: + for obj in [ + obj for obj in bpy.context.blend_data.objects if obj.type == 'MESH' + ]: if any(v < 0 for v in obj.scale): invalid.append(obj) @@ -28,4 +30,6 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError(f"Meshes found in instance with negative scale: {invalid}") + raise RuntimeError( + f"Meshes found in instance with negative scale: {invalid}" + ) From 2635268a494f612a7a75a8a873e4b211b1fca20e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Dec 2019 17:32:44 +0100 Subject: [PATCH 07/31] modified blender action --- pype/blender/action.py | 29 +++++++++++++++++------------ 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/pype/blender/action.py b/pype/blender/action.py index 948123c3c5..4bd7e303fc 100644 --- a/pype/blender/action.py +++ b/pype/blender/action.py @@ -24,19 +24,24 @@ class SelectInvalidAction(pyblish.api.Action): if isinstance(invalid_nodes, (list, tuple)): invalid.extend(invalid_nodes) else: - self.log.warning("Failed plug-in doens't have any selectable objects.") + self.log.warning( + "Failed plug-in doens't have any selectable objects." + ) + + bpy.ops.object.select_all(action='DESELECT') # Make sure every node is only processed once invalid = list(set(invalid)) - - bpy.ops.object.select_all(action='DESELECT') - if invalid: - invalid_names = [obj.name for obj in invalid] - self.log.info("Selecting invalid objects: %s", ", ".join(invalid_names)) - # Select the objects and also make the last one the active object. - for obj in invalid: - obj.select_set(True) - bpy.context.view_layer.objects.active = invalid[-1] - - else: + if not invalid: self.log.info("No invalid nodes found.") + return + + invalid_names = [obj.name for obj in invalid] + self.log.info( + "Selecting invalid objects: %s", ", ".join(invalid_names) + ) + # Select the objects and also make the last one the active object. + for obj in invalid: + obj.select_set(True) + + bpy.context.view_layer.objects.active = invalid[-1] From f20c4025c5b1f0df30f659b13d2734c9e3ec3ae6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 10:09:52 +0100 Subject: [PATCH 08/31] replace bpy.context.blend_data with bpy.data --- pype/plugins/blender/load/submarine_model.py | 26 +++++++++---------- .../blender/publish/collect_current_file.py | 2 +- pype/plugins/blender/publish/collect_model.py | 2 +- .../blender/publish/validate_mesh_has_uv.py | 2 +- .../validate_mesh_no_negative_scale.py | 2 +- 5 files changed, 17 insertions(+), 17 deletions(-) diff --git a/pype/plugins/blender/load/submarine_model.py b/pype/plugins/blender/load/submarine_model.py index 99095d74cd..bd6db17650 100644 --- a/pype/plugins/blender/load/submarine_model.py +++ b/pype/plugins/blender/load/submarine_model.py @@ -38,7 +38,7 @@ class BlendModelLoader(pype.blender.AssetLoader): Note: It is assumed that only 1 matching collection is found. """ - for collection in bpy.context.blend_data.collections: + for collection in bpy.data.collections: if collection.name != name: continue if collection.library is None: @@ -82,13 +82,13 @@ class BlendModelLoader(pype.blender.AssetLoader): ) relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.context.blend_data.libraries.load( + with bpy.data.libraries.load( libpath, link=True, relative=relative ) as (_, data_to): data_to.collections = [lib_container] scene = bpy.context.scene - instance_empty = bpy.context.blend_data.objects.new( + instance_empty = bpy.data.objects.new( container_name, None ) if not instance_empty.get("avalon"): @@ -97,7 +97,7 @@ class BlendModelLoader(pype.blender.AssetLoader): avalon_info.update({"container_name": container_name}) scene.collection.objects.link(instance_empty) instance_empty.instance_type = 'COLLECTION' - container = bpy.context.blend_data.collections[lib_container] + container = bpy.data.collections[lib_container] container.name = container_name instance_empty.instance_collection = container container.make_local() @@ -127,7 +127,7 @@ class BlendModelLoader(pype.blender.AssetLoader): Warning: No nested collections are supported at the moment! """ - collection = bpy.context.blend_data.collections.get( + collection = bpy.data.collections.get( container["objectName"] ) libpath = Path(api.get_representation_path(representation)) @@ -181,7 +181,7 @@ class BlendModelLoader(pype.blender.AssetLoader): collection.objects.unlink(obj) remove_obj = True for coll in [ - coll for coll in bpy.context.blend_data.collections + coll for coll in bpy.data.collections if coll != collection ]: if ( @@ -194,7 +194,7 @@ class BlendModelLoader(pype.blender.AssetLoader): for obj in objects_to_remove: # Only delete objects that are not used elsewhere - bpy.context.blend_data.objects.remove(obj) + bpy.data.objects.remove(obj) instance_empties = [ obj for obj in collection.users_dupli_group @@ -205,7 +205,7 @@ class BlendModelLoader(pype.blender.AssetLoader): container_name = instance_empty["avalon"]["container_name"] relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.context.blend_data.libraries.load( + with bpy.data.libraries.load( str(libpath), link=True, relative=relative ) as (_, data_to): data_to.collections = [container_name] @@ -219,7 +219,7 @@ class BlendModelLoader(pype.blender.AssetLoader): for obj in new_collection.objects: collection.objects.link(obj) - bpy.context.blend_data.collections.remove(new_collection) + bpy.data.collections.remove(new_collection) # Update the representation on the collection avalon_prop = collection[avalon.blender.pipeline.AVALON_PROPERTY] avalon_prop["representation"] = str(representation["_id"]) @@ -237,7 +237,7 @@ class BlendModelLoader(pype.blender.AssetLoader): Warning: No nested collections are supported at the moment! """ - collection = bpy.context.blend_data.collections.get( + collection = bpy.data.collections.get( container["objectName"] ) if not collection: @@ -293,18 +293,18 @@ class CacheModelLoader(pype.blender.AssetLoader): ) relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.context.blend_data.libraries.load( + with bpy.data.libraries.load( libpath, link=True, relative=relative ) as (data_from, data_to): data_to.collections = [lib_container] scene = bpy.context.scene - instance_empty = bpy.context.blend_data.objects.new( + instance_empty = bpy.data.objects.new( container_name, None ) scene.collection.objects.link(instance_empty) instance_empty.instance_type = 'COLLECTION' - collection = bpy.context.blend_data.collections[lib_container] + collection = bpy.data.collections[lib_container] collection.name = container_name instance_empty.instance_collection = collection diff --git a/pype/plugins/blender/publish/collect_current_file.py b/pype/plugins/blender/publish/collect_current_file.py index 5756431314..a097c72047 100644 --- a/pype/plugins/blender/publish/collect_current_file.py +++ b/pype/plugins/blender/publish/collect_current_file.py @@ -12,5 +12,5 @@ class CollectBlenderCurrentFile(pyblish.api.ContextPlugin): def process(self, context): """Inject the current working file""" - current_file = bpy.context.blend_data.filepath + current_file = bpy.data.filepath context.data['currentFile'] = current_file diff --git a/pype/plugins/blender/publish/collect_model.py b/pype/plugins/blender/publish/collect_model.py index 4c7e840c17..ee10eaf7f2 100644 --- a/pype/plugins/blender/publish/collect_model.py +++ b/pype/plugins/blender/publish/collect_model.py @@ -23,7 +23,7 @@ class CollectModel(pyblish.api.ContextPlugin): representation set. If the representation is set, it is a loaded model and we don't want to publish it. """ - for collection in bpy.context.blend_data.collections: + for collection in bpy.data.collections: avalon_prop = collection.get(AVALON_PROPERTY) or dict() if (avalon_prop.get('family') == 'model' and not avalon_prop.get('representation')): diff --git a/pype/plugins/blender/publish/validate_mesh_has_uv.py b/pype/plugins/blender/publish/validate_mesh_has_uv.py index f8c5092ab7..b71a40ad8f 100644 --- a/pype/plugins/blender/publish/validate_mesh_has_uv.py +++ b/pype/plugins/blender/publish/validate_mesh_has_uv.py @@ -35,7 +35,7 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin): invalid = [] # TODO (jasper): only check objects in the collection that will be published? for obj in [ - obj for obj in bpy.context.blend_data.objects if obj.type == 'MESH' + obj for obj in bpy.data.objects if obj.type == 'MESH' ]: # Make sure we are in object mode. bpy.ops.object.mode_set(mode='OBJECT') diff --git a/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py index 1f050f6844..7e3b38dd19 100644 --- a/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py +++ b/pype/plugins/blender/publish/validate_mesh_no_negative_scale.py @@ -20,7 +20,7 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator): invalid = [] # TODO (jasper): only check objects in the collection that will be published? for obj in [ - obj for obj in bpy.context.blend_data.objects if obj.type == 'MESH' + obj for obj in bpy.data.objects if obj.type == 'MESH' ]: if any(v < 0 for v in obj.scale): invalid.append(obj) From 3d33f8fd4ab22eadb27b46ecea8d063f5b856549 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 18:09:46 +0100 Subject: [PATCH 09/31] added get_fps method to burnins class which calculate fps from r_frame_rate --- pype/scripts/otio_burnin.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 3e8cb3b0c4..a8c4017c52 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -98,6 +98,24 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if options_init: self.options_init.update(options_init) + def get_fps(str_value): + if str_value == "0/0": + print("Source has \"r_frame_rate\" value set to \"0/0\".") + return "Unknown" + + items = str_value.split("/") + if len(items) == 1: + fps = float(items[0]) + + elif len(items) == 2: + fps = float(items[0]) / float(items[1]) + + # Check if fps is integer or float number + if int(fps) == fps: + fps = int(fps) + + return str(fps) + def add_text(self, text, align, options=None): """ Adding static text to a filter. From bb86c94c184645631906688ba184e29f50363be8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 18:10:19 +0100 Subject: [PATCH 10/31] width, height and fps values from ffprobe are added to options data --- pype/scripts/otio_burnin.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index a8c4017c52..ea1554876f 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -95,9 +95,24 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): streams = _streams(source) super().__init__(source, streams) + if options_init: self.options_init.update(options_init) + if "resolution_width" not in self.options_init: + self.options_init["resolution_width"] = ( + streams[0].get("width", "Unknown") + ) + + if "resolution_height" not in self.options_init: + self.options_init["resolution_height"] = ( + streams[0].get("height", "Unknown") + ) + + if "fps" not in self.options_init: + fps = self.get_fps(streams[0]["r_frame_rate"]) + self.options_init["fps"] = fps + def get_fps(str_value): if str_value == "0/0": print("Source has \"r_frame_rate\" value set to \"0/0\".") From 6f4d50d41d8b62f57d13e1c3fdc6fd121c5cd8ac Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 18:25:07 +0100 Subject: [PATCH 11/31] get_fps moved from Burnin class --- pype/scripts/otio_burnin.py | 37 +++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index ea1554876f..f6b5c34bff 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -39,6 +39,25 @@ def _streams(source): return json.loads(out)['streams'] +def get_fps(str_value): + if str_value == "0/0": + print("Source has \"r_frame_rate\" value set to \"0/0\".") + return "Unknown" + + items = str_value.split("/") + if len(items) == 1: + fps = float(items[0]) + + elif len(items) == 2: + fps = float(items[0]) / float(items[1]) + + # Check if fps is integer or float number + if int(fps) == fps: + fps = int(fps) + + return str(fps) + + class ModifiedBurnins(ffmpeg_burnins.Burnins): ''' This is modification of OTIO FFmpeg Burnin adapter. @@ -113,24 +132,6 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): fps = self.get_fps(streams[0]["r_frame_rate"]) self.options_init["fps"] = fps - def get_fps(str_value): - if str_value == "0/0": - print("Source has \"r_frame_rate\" value set to \"0/0\".") - return "Unknown" - - items = str_value.split("/") - if len(items) == 1: - fps = float(items[0]) - - elif len(items) == 2: - fps = float(items[0]) / float(items[1]) - - # Check if fps is integer or float number - if int(fps) == fps: - fps = int(fps) - - return str(fps) - def add_text(self, text, align, options=None): """ Adding static text to a filter. From 3dac4c1b69da68a850e1be4730f37b45b46fabd4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 18:30:17 +0100 Subject: [PATCH 12/31] data from frobe are stored to data not to options --- pype/scripts/otio_burnin.py | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index f6b5c34bff..0c985a0faf 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -118,20 +118,6 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins): if options_init: self.options_init.update(options_init) - if "resolution_width" not in self.options_init: - self.options_init["resolution_width"] = ( - streams[0].get("width", "Unknown") - ) - - if "resolution_height" not in self.options_init: - self.options_init["resolution_height"] = ( - streams[0].get("height", "Unknown") - ) - - if "fps" not in self.options_init: - fps = self.get_fps(streams[0]["r_frame_rate"]) - self.options_init["fps"] = fps - def add_text(self, text, align, options=None): """ Adding static text to a filter. @@ -362,6 +348,17 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) frame_start = data.get("frame_start") frame_start_tc = data.get('frame_start_tc', frame_start) + + stream = burnin._streams[0] + if "resolution_width" not in data: + data["resolution_width"] = stream.get("width", "Unknown") + + if "resolution_height" not in data: + data["resolution_height"] = stream.get("height", "Unknown") + + if "fps" not in data: + data["fps"] = get_fps(stream.get("r_frame_rate", "0/0")) + for align_text, preset in presets.get('burnins', {}).items(): align = None if align_text == 'TOP_LEFT': From f84f1537def6d65e0e9c399083e84111e940c83a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 6 Jan 2020 18:30:24 +0100 Subject: [PATCH 13/31] formatting changes --- pype/scripts/otio_burnin.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py index 0c985a0faf..b3d0e544db 100644 --- a/pype/scripts/otio_burnin.py +++ b/pype/scripts/otio_burnin.py @@ -413,12 +413,14 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True) elif bi_func == 'timecode': burnin.add_timecode(align, start_frame=frame_start_tc) + elif bi_func == 'text': if not preset.get('text'): log.error('Text is not set for text function burnin!') return text = preset['text'].format(**data) burnin.add_text(text, align) + elif bi_func == "datetime": date_format = preset["format"] burnin.add_datetime(date_format, align) @@ -445,4 +447,4 @@ if __name__ == '__main__': data['codec'], data['output'], data['burnin_data'] - ) + ) From 75cb30fe1da52f124ab25ed084ea1e63fab1a677 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 17:11:27 +0100 Subject: [PATCH 14/31] inital version of delivery action in ftrack --- pype/ftrack/actions/action_delivery.py | 421 +++++++++++++++++++++++++ 1 file changed, 421 insertions(+) create mode 100644 pype/ftrack/actions/action_delivery.py diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py new file mode 100644 index 0000000000..e23e35f91c --- /dev/null +++ b/pype/ftrack/actions/action_delivery.py @@ -0,0 +1,421 @@ +import os +import copy +import shutil + +import clique +from bson.objectid import ObjectId +from avalon import pipeline +from avalon.vendor import filelink +from avalon.tools.libraryloader.io_nonsingleton import DbConnector + +from pypeapp import Anatomy +from pype.ftrack import BaseAction +from pype.ftrack.lib.avalon_sync import CustAttrIdKey + + +class Delivery(BaseAction): + '''Edit meta data action.''' + + #: Action identifier. + identifier = "delivery.action" + #: Action label. + label = "Delivery" + #: Action description. + description = "Deliver data to client" + #: roles that are allowed to register this action + role_list = ["Pypeclub", "Administrator", "Project manager"] + # icon = '{}/ftrack/action_icons/TestAction.svg'.format( + # os.environ.get('PYPE_STATICS_SERVER', '') + # ) + + db_con = DbConnector() + + def discover(self, session, entities, event): + ''' Validation ''' + for entity in entities: + if entity.entity_type.lower() == "assetversion": + return True + + return False + + def interface(self, session, entities, event): + if event["data"].get("values", {}): + return + + title = "Delivery data to Client" + + items = [] + item_splitter = {"type": "label", "value": "---"} + + # Prepare component names for processing + components = None + project = None + for entity in entities: + if project is None: + project_id = None + for ent_info in entity["link"]: + if ent_info["type"].lower() == "project": + project_id = ent_info["id"] + break + + if project_id is None: + project = entity["asset"]["parent"]["project"] + else: + project = session.query(( + "select id, full_name from Project where id is \"{}\"" + ).format(project_id)).one() + + _components = set( + [component["name"] for component in entity["components"]] + ) + if components is None: + components = _components + continue + + components = components.intersection(_components) + if not components: + break + + project_name = project["full_name"] + items.append({ + "type": "hidden", + "name": "__project_name__", + "value": project_name + }) + + # Prpeare anatomy data + anatomy = Anatomy(project_name) + new_anatomies = [] + first = None + for key in (anatomy.templates.get("delivery") or {}): + new_anatomies.append({ + "label": key, + "value": key + }) + if first is None: + first = key + + skipped = False + # Add message if there are any common components + if not components or not new_anatomies: + skipped = True + items.append({ + "type": "label", + "value": "

Something went wrong:

" + }) + + items.append({ + "type": "hidden", + "name": "__skipped__", + "value": skipped + }) + + if not components: + if len(entities) == 1: + items.append({ + "type": "label", + "value": ( + "- Selected entity doesn't have components to deliver." + ) + }) + else: + items.append({ + "type": "label", + "value": ( + "- Selected entities don't have common components." + ) + }) + + # Add message if delivery anatomies are not set + if not new_anatomies: + items.append({ + "type": "label", + "value": ( + "- `\"delivery\"` anatomy key is not set in config." + ) + }) + + # Skip if there are any data shortcomings + if skipped: + return { + "items": items, + "title": title + } + + items.append({ + "value": "

Choose Components to deliver

", + "type": "label" + }) + + for component in components: + items.append({ + "type": "boolean", + "value": False, + "label": component, + "name": component + }) + + items.append(item_splitter) + + items.append({ + "value": "

Location for delivery

", + "type": "label" + }) + + items.append({ + "type": "text", + "name": "__location_path__", + "empty_text": "Type location path here..." + }) + + items.append(item_splitter) + + items.append({ + "value": "

Anatomy of delivery files

", + "type": "label" + }) + + items.append({ + "type": "label", + "value": ( + "

NOTE: These can be set in Anatomy.yaml" + " within `delivery` key.

" + ) + }) + + items.append({ + "type": "enumerator", + "name": "__new_anatomies__", + "data": new_anatomies, + "value": first + }) + + return { + "items": items, + "title": title + } + + def launch(self, session, entities, event): + if "values" not in event["data"]: + return + + values = event["data"]["values"] + skipped = values.pop("__skipped__") + if skipped: + return None + + component_names = [] + location_path = values.pop("__location_path__") + anatomy_name = values.pop("__new_anatomies__") + project_name = values.pop("__project_name__") + + for key, value in values.items(): + if value is True: + component_names.append(key) + + if not component_names: + return None + + location_path = os.path.normpath(location_path.strip()) + if location_path and not os.path.exists(location_path): + return { + "success": False, + "message": ( + "Entered location path does not exists. \"{}\"" + ).format(location_path) + } + + self.db_con.install() + self.db_con.Session["AVALON_PROJECT"] = project_name + + components = [] + repres_to_deliver = [] + for entity in entities: + asset = entity["asset"] + subset_name = asset["name"] + version = entity["version"] + + parent = asset["parent"] + parent_mongo_id = parent["custom_attributes"].get(CustAttrIdKey) + if not parent_mongo_id: + # TODO log error (much better) + self.log.warning(( + "Seems like entity <{}> is not synchronized to avalon" + ).format(parent["name"])) + continue + + parent_mongo_id = ObjectId(parent_mongo_id) + subset_ent = self.db_con.find_one({ + "type": "subset", + "parent": parent_mongo_id, + "name": subset_name + }) + + version_ent = self.db_con.find_one({ + "type": "version", + "name": version, + "parent": subset_ent["_id"] + }) + + repre_ents = self.db_con.find({ + "type": "representation", + "parent": version_ent["_id"] + }) + + repres_by_name = {} + for repre in repre_ents: + repre_name = repre["name"] + repres_by_name[repre_name] = repre + + for component in entity["components"]: + comp_name = component["name"] + if comp_name not in component_names: + continue + + repre = repres_by_name.get(comp_name) + repres_to_deliver.append(repre) + + src_dst_files = {} + anatomy = Anatomy(project_name) + for repre in repres_to_deliver: + # Get destination repre path + anatomy_data = copy.deepcopy(repre["context"]) + if location_path: + anatomy_data["root"] = location_path + else: + anatomy_data["root"] = pipeline.registered_root() + + # Get source repre path + repre_path = self.path_from_represenation(repre) + # TODO add backup solution where root of path from component + # is repalced with AVALON_PROJECTS root + + if repre_path and os.path.exists(repre_path): + self.process_single_file( + repre_path, anatomy, anatomy_name, anatomy_data + ) + + else: + self.process_sequence( + repre_path, anatomy, anatomy_name, anatomy_data + ) + + self.db_con.uninstall() + + def process_single_file( + self, repre_path, anatomy, anatomy_name, anatomy_data + ): + anatomy_filled = anatomy.format(anatomy_data) + delivery_path = anatomy_filled.get("delivery", {}).get(anatomy_name) + if not delivery_path: + # TODO log error! - missing keys in anatomy + return + + delivery_folder = os.path.dirname(delivery_path) + if not os.path.exists(delivery_folder): + os.makedirs(delivery_folder) + + self.copy_file(repre_path, delivery_path) + + def process_sequence( + self, repre_path, anatomy, anatomy_name, anatomy_data + ): + dir_path, file_name = os.path.split(repre_path) + if not os.path.exists(dir_path): + # TODO log if folder don't exist + return + + base_name, ext = os.path.splitext(file_name) + file_name_items = None + if "#" in base_name: + file_name_items = [part for part in base_name.split("#") if part] + + elif "%" in base_name: + file_name_items = base_name.split("%") + + if not file_name_items: + # TODO log if file does not exists + return + + src_collections, remainder = clique.assemble(os.listdir(dir_path)) + src_collection = None + for col in src_collections: + if col.tail != ext: + continue + + # skip if collection don't have same basename + if not col.head.startswith(file_name_items[0]): + continue + + src_collection = col + break + + if src_collection is None: + # TODO log error! + return + + anatomy_data["frame"] = "{frame}" + anatomy_filled = anatomy.format(anatomy_data) + delivery_path = anatomy_filled.get("delivery", {}).get(anatomy_name) + if not delivery_path: + # TODO log error! - missing keys in anatomy + return + + delivery_folder = os.path.dirname(delivery_path) + dst_head, dst_tail = delivery_path.split("{frame}") + dst_padding = src_collection.padding + dst_collection = clique.Collection( + head=dst_head, + tail=dst_tail, + padding=dst_padding + ) + + if not os.path.exists(delivery_folder): + os.makedirs(delivery_folder) + + src_head = src_collection.head + src_tail = src_collection.tail + for index in src_collection.indexes: + src_padding = src_collection.format("{padding}") % index + src_file_name = "{}{}{}".format(src_head, src_padding, src_tail) + + dst_padding = dst_collection.format("{padding}") % index + dst_file_name = "{}{}{}".format(dst_head, dst_padding, dst_tail) + + self.copy_file(src, dst) + + def path_from_represenation(self, representation): + try: + template = representation["data"]["template"] + + except KeyError: + return None + + try: + context = representation["context"] + context["root"] = os.environ.get("AVALON_PROJECTS") or "" + path = pipeline.format_template_with_optional_keys( + context, template + ) + + except KeyError: + # Template references unavailable data + return None + + if os.path.exists(path): + return os.path.normpath(path) + + def copy_file(self, src_path, dst_path): + try: + filelink.create( + src_path, + dst_path, + filelink.HARDLINK + ) + except OSError: + shutil.copyfile(src_path, dst_path) + +def register(session, plugins_presets={}): + '''Register plugin. Called when used as an plugin.''' + + Delivery(session, plugins_presets).register() From 830373f3d5c35c298285236a3a36b9eed0aaf5c4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 17:19:35 +0100 Subject: [PATCH 15/31] added delivery icon --- pype/ftrack/actions/action_delivery.py | 6 ++--- res/ftrack/action_icons/Delivery.svg | 34 ++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 3 deletions(-) create mode 100644 res/ftrack/action_icons/Delivery.svg diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index e23e35f91c..572a9bc8e0 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -24,9 +24,9 @@ class Delivery(BaseAction): description = "Deliver data to client" #: roles that are allowed to register this action role_list = ["Pypeclub", "Administrator", "Project manager"] - # icon = '{}/ftrack/action_icons/TestAction.svg'.format( - # os.environ.get('PYPE_STATICS_SERVER', '') - # ) + icon = '{}/ftrack/action_icons/Delivery.svg'.format( + os.environ.get('PYPE_STATICS_SERVER', '') + ) db_con = DbConnector() diff --git a/res/ftrack/action_icons/Delivery.svg b/res/ftrack/action_icons/Delivery.svg new file mode 100644 index 0000000000..3380487c31 --- /dev/null +++ b/res/ftrack/action_icons/Delivery.svg @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + From cbbb074a25c929582a26807691bf00a27c7325a4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 17:24:35 +0100 Subject: [PATCH 16/31] fix source filepath --- pype/ftrack/actions/action_delivery.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index 572a9bc8e0..ad3d6ef6cc 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -228,7 +228,6 @@ class Delivery(BaseAction): self.db_con.install() self.db_con.Session["AVALON_PROJECT"] = project_name - components = [] repres_to_deliver = [] for entity in entities: asset = entity["asset"] @@ -275,7 +274,6 @@ class Delivery(BaseAction): repre = repres_by_name.get(comp_name) repres_to_deliver.append(repre) - src_dst_files = {} anatomy = Anatomy(project_name) for repre in repres_to_deliver: # Get destination repre path @@ -302,6 +300,8 @@ class Delivery(BaseAction): self.db_con.uninstall() + return True + def process_single_file( self, repre_path, anatomy, anatomy_name, anatomy_data ): @@ -378,9 +378,12 @@ class Delivery(BaseAction): for index in src_collection.indexes: src_padding = src_collection.format("{padding}") % index src_file_name = "{}{}{}".format(src_head, src_padding, src_tail) + src = os.path.normpath( + os.path.join(dir_path, src_file_name) + ) dst_padding = dst_collection.format("{padding}") % index - dst_file_name = "{}{}{}".format(dst_head, dst_padding, dst_tail) + dst = "{}{}{}".format(dst_head, dst_padding, dst_tail) self.copy_file(src, dst) From 5e31299c2441ba57c323245b067062279817f24d Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 8 Jan 2020 17:38:03 +0100 Subject: [PATCH 17/31] add resolution and fps to anatomy keys --- pype/plugins/global/publish/collect_templates.py | 5 ++++- pype/plugins/global/publish/extract_review.py | 4 +++- pype/plugins/global/publish/integrate_new.py | 5 ++++- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index 48623eec22..d57d416dea 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -76,7 +76,10 @@ class CollectTemplates(pyblish.api.InstancePlugin): "subset": subset_name, "version": version_number, "hierarchy": hierarchy.replace("\\", "/"), - "representation": "TEMP"} + "representation": "TEMP", + "resolution_width": instance.data.get("resolutionWidth", ""), + "resolution_height": instance.data.get("resolutionHeight", ""), + "fps": str(instance.data.get("fps", ""))}} instance.data["template"] = template instance.data["assumedTemplateData"] = template_data diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index f621df0c66..c75bb488a2 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -249,7 +249,9 @@ class ExtractReview(pyblish.api.InstancePlugin): 'files': repr_file, "tags": new_tags, "outputName": name, - "codec": codec_args + "codec": codec_args, + "resolutionWidth": resolution_width, + "resolutionWidth": resolution_height }) if repre_new.get('preview'): repre_new.pop("preview") diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index faade613f2..ee18347703 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -267,7 +267,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "family": instance.data['family'], "subset": subset["name"], "version": int(version["name"]), - "hierarchy": hierarchy} + "hierarchy": hierarchy, + "resolution_width": repre.get("resolutionWidth", ""), + "resolution_height": repre.get("resolutionHeight", ""), + "fps": str(instance.data.get("fps", ""))} files = repre['files'] if repre.get('stagingDir'): From cfd9823abc0c8109f4c5e18e2a6f1a55e2977047 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 17:41:35 +0100 Subject: [PATCH 18/31] replaced {frame} with <> --- pype/ftrack/actions/action_delivery.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index ad3d6ef6cc..22fb15198b 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -354,7 +354,7 @@ class Delivery(BaseAction): # TODO log error! return - anatomy_data["frame"] = "{frame}" + anatomy_data["frame"] = "<>" anatomy_filled = anatomy.format(anatomy_data) delivery_path = anatomy_filled.get("delivery", {}).get(anatomy_name) if not delivery_path: @@ -362,7 +362,7 @@ class Delivery(BaseAction): return delivery_folder = os.path.dirname(delivery_path) - dst_head, dst_tail = delivery_path.split("{frame}") + dst_head, dst_tail = delivery_path.split("<>") dst_padding = src_collection.padding dst_collection = clique.Collection( head=dst_head, From 3cf559afba5058eae3e96cbb1d873e1b7403affe Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 19:21:15 +0100 Subject: [PATCH 19/31] better reporting and logging --- pype/ftrack/actions/action_delivery.py | 144 +++++++++++++++++++++---- 1 file changed, 121 insertions(+), 23 deletions(-) diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index 22fb15198b..e698c371e1 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -1,9 +1,12 @@ import os import copy import shutil +import collections +import string import clique from bson.objectid import ObjectId + from avalon import pipeline from avalon.vendor import filelink from avalon.tools.libraryloader.io_nonsingleton import DbConnector @@ -162,10 +165,17 @@ class Delivery(BaseAction): "type": "label" }) + items.append({ + "type": "label", + "value": ( + "NOTE: It is possible to replace `root` key in anatomy." + ) + }) + items.append({ "type": "text", "name": "__location_path__", - "empty_text": "Type location path here..." + "empty_text": "Type location path here...(Optional)" }) items.append(item_splitter) @@ -199,6 +209,8 @@ class Delivery(BaseAction): if "values" not in event["data"]: return + self.report_items = collections.defaultdict(list) + values = event["data"]["values"] skipped = values.pop("__skipped__") if skipped: @@ -214,7 +226,10 @@ class Delivery(BaseAction): component_names.append(key) if not component_names: - return None + return { + "success": True, + "message": "Not selected components to deliver." + } location_path = os.path.normpath(location_path.strip()) if location_path and not os.path.exists(location_path): @@ -236,14 +251,24 @@ class Delivery(BaseAction): parent = asset["parent"] parent_mongo_id = parent["custom_attributes"].get(CustAttrIdKey) - if not parent_mongo_id: - # TODO log error (much better) - self.log.warning(( - "Seems like entity <{}> is not synchronized to avalon" - ).format(parent["name"])) - continue + if parent_mongo_id: + parent_mongo_id = ObjectId(parent_mongo_id) + else: + asset_ent = self.db_con.find_one({ + "type": "asset", + "data.ftrackId": parent["id"] + }) + if not asset_ent: + ent_path = "/".join( + [ent["name"] for ent in parent["link"]] + ) + msg = "Not synchronized entities to avalon" + self.report_items[msg].append(ent_path) + self.log.warning("{} <{}>".format(msg, ent_path)) + continue + + parent_mongo_id = asset_ent["_id"] - parent_mongo_id = ObjectId(parent_mongo_id) subset_ent = self.db_con.find_one({ "type": "subset", "parent": parent_mongo_id, @@ -283,6 +308,50 @@ class Delivery(BaseAction): else: anatomy_data["root"] = pipeline.registered_root() + anatomy_filled = anatomy.format(anatomy_data) + test_path = ( + anatomy_filled + .get("delivery", {}) + .get(anatomy_name) + ) + + if not test_path: + msg = ( + "Missing keys in Representation's context" + " for anatomy template \"{}\"." + ).format(anatomy_name) + + all_anatomies = anatomy.format_all(anatomy_data) + result = None + for anatomies in all_anatomies.values(): + for key, temp in anatomies.get("delivery", {}).items(): + if key != anatomy_name: + continue + + result = temp + break + + # TODO log error! - missing keys in anatomy + if result: + missing_keys = [ + key[1] for key in string.Formatter().parse(result) + if key[1] is not None + ] + else: + missing_keys = ["unknown"] + + keys = ", ".join(missing_keys) + sub_msg = ( + "Representation: {}
- Missing keys: \"{}\"
" + ).format(str(repre["_id"]), keys) + self.report_items[msg].append(sub_msg) + self.log.warning( + "{} Representation: \"{}\" Filled: <{}>".format( + msg, str(repre["_id"]), str(result) + ) + ) + continue + # Get source repre path repre_path = self.path_from_represenation(repre) # TODO add backup solution where root of path from component @@ -300,17 +369,13 @@ class Delivery(BaseAction): self.db_con.uninstall() - return True + return self.report() def process_single_file( self, repre_path, anatomy, anatomy_name, anatomy_data ): anatomy_filled = anatomy.format(anatomy_data) - delivery_path = anatomy_filled.get("delivery", {}).get(anatomy_name) - if not delivery_path: - # TODO log error! - missing keys in anatomy - return - + delivery_path = anatomy_filled["delivery"][anatomy_name] delivery_folder = os.path.dirname(delivery_path) if not os.path.exists(delivery_folder): os.makedirs(delivery_folder) @@ -321,9 +386,6 @@ class Delivery(BaseAction): self, repre_path, anatomy, anatomy_name, anatomy_data ): dir_path, file_name = os.path.split(repre_path) - if not os.path.exists(dir_path): - # TODO log if folder don't exist - return base_name, ext = os.path.splitext(file_name) file_name_items = None @@ -334,7 +396,9 @@ class Delivery(BaseAction): file_name_items = base_name.split("%") if not file_name_items: - # TODO log if file does not exists + msg = "Source file was not found" + self.report_items[msg].append(repre_path) + self.log.warning("{} <{}>".format(msg, repre_path)) return src_collections, remainder = clique.assemble(os.listdir(dir_path)) @@ -352,15 +416,15 @@ class Delivery(BaseAction): if src_collection is None: # TODO log error! + msg = "Source collection of files was not found" + self.report_items[msg].append(repre_path) + self.log.warning("{} <{}>".format(msg, repre_path)) return anatomy_data["frame"] = "<>" anatomy_filled = anatomy.format(anatomy_data) - delivery_path = anatomy_filled.get("delivery", {}).get(anatomy_name) - if not delivery_path: - # TODO log error! - missing keys in anatomy - return + delivery_path = anatomy_filled["delivery"][anatomy_name] delivery_folder = os.path.dirname(delivery_path) dst_head, dst_tail = delivery_path.split("<>") dst_padding = src_collection.padding @@ -418,6 +482,40 @@ class Delivery(BaseAction): except OSError: shutil.copyfile(src_path, dst_path) + def report(self): + items = [] + title = "Delivery report" + for msg, _items in self.report_items.items(): + if not _items: + continue + + if items: + items.append({"type": "label", "value": "---"}) + + items.append({ + "type": "label", + "value": "# {}".format(msg) + }) + if isinstance(_items, str): + _items = [_items] + items.append({ + "type": "label", + "value": '

{}

'.format("
".join(_items)) + }) + + if not items: + return { + "success": True, + "message": "Delivery Finished" + } + + return { + "items": items, + "title": title, + "success": False, + "message": "Delivery Finished" + } + def register(session, plugins_presets={}): '''Register plugin. Called when used as an plugin.''' From bf24580b6f87ded4672661fb055a85ba92fd8b78 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 8 Jan 2020 19:31:58 +0100 Subject: [PATCH 20/31] fix root path --- pype/ftrack/actions/action_delivery.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index e698c371e1..9edb7a5964 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -171,7 +171,7 @@ class Delivery(BaseAction): "NOTE: It is possible to replace `root` key in anatomy." ) }) - + items.append({ "type": "text", "name": "__location_path__", @@ -306,7 +306,7 @@ class Delivery(BaseAction): if location_path: anatomy_data["root"] = location_path else: - anatomy_data["root"] = pipeline.registered_root() + anatomy_data["root"] = os.environ.get("AVALON_PROJECTS") or "" anatomy_filled = anatomy.format(anatomy_data) test_path = ( From d4bf25f01a823b042777730d6e09333223841656 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 10 Jan 2020 13:07:11 +0000 Subject: [PATCH 21/31] resolving `${TOKEN}` variables in PATH to env variables --- pype/plugins/maya/publish/collect_yeti_rig.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/pype/plugins/maya/publish/collect_yeti_rig.py b/pype/plugins/maya/publish/collect_yeti_rig.py index 7ab5649c0b..3b05e19fdb 100644 --- a/pype/plugins/maya/publish/collect_yeti_rig.py +++ b/pype/plugins/maya/publish/collect_yeti_rig.py @@ -140,9 +140,21 @@ class CollectYetiRig(pyblish.api.InstancePlugin): "atttribute'" % node) # Collect all texture files + # find all ${TOKEN} tokens and replace them with $TOKEN env. variable + env_re = re.compile(r"\$\{(\w+)\}") for texture in texture_filenames: files = [] + + matches = re.finditer(env_re, texture) + for m in matches: + try: + texture = texture.replace(m.group(), os.environ[m.group(1)]) + except KeyError: + msg = "Cannot find requested {} in environment".format(1) + self.log.error(msg) + raise RuntimeError(msg) + if os.path.isabs(texture): self.log.debug("Texture is absolute path, ignoring " "image search paths for: %s" % texture) From 9a7f36023b5f9f9d9a29ff4ae9a6c88c7a01069b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 10 Jan 2020 13:16:41 +0000 Subject: [PATCH 22/31] fixed error message --- pype/plugins/maya/publish/collect_yeti_rig.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pype/plugins/maya/publish/collect_yeti_rig.py b/pype/plugins/maya/publish/collect_yeti_rig.py index 3b05e19fdb..831bc5e0ca 100644 --- a/pype/plugins/maya/publish/collect_yeti_rig.py +++ b/pype/plugins/maya/publish/collect_yeti_rig.py @@ -151,7 +151,8 @@ class CollectYetiRig(pyblish.api.InstancePlugin): try: texture = texture.replace(m.group(), os.environ[m.group(1)]) except KeyError: - msg = "Cannot find requested {} in environment".format(1) + msg = "Cannot find requested {} in environment".format( + m.group(1)) self.log.error(msg) raise RuntimeError(msg) From acdc0fed0cbb1463c6f0b354c92d293f9cc1f13f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 10 Jan 2020 13:23:07 +0000 Subject: [PATCH 23/31] refactored to class method --- pype/plugins/maya/publish/collect_yeti_rig.py | 33 ++++++++++++------- 1 file changed, 21 insertions(+), 12 deletions(-) diff --git a/pype/plugins/maya/publish/collect_yeti_rig.py b/pype/plugins/maya/publish/collect_yeti_rig.py index 831bc5e0ca..39426ea623 100644 --- a/pype/plugins/maya/publish/collect_yeti_rig.py +++ b/pype/plugins/maya/publish/collect_yeti_rig.py @@ -119,6 +119,8 @@ class CollectYetiRig(pyblish.api.InstancePlugin): texture_filenames = [] if image_search_paths: + # find all ${TOKEN} tokens and replace them with $TOKEN env. variable + image_search_paths = self._replace_tokens(image_search_paths) # TODO: Somehow this uses OS environment path separator, `:` vs `;` # Later on check whether this is pipeline OS cross-compatible. image_search_paths = [p for p in @@ -141,21 +143,11 @@ class CollectYetiRig(pyblish.api.InstancePlugin): # Collect all texture files # find all ${TOKEN} tokens and replace them with $TOKEN env. variable - env_re = re.compile(r"\$\{(\w+)\}") + texture_filenames = self._replace_tokens(texture_filenames) for texture in texture_filenames: files = [] - - matches = re.finditer(env_re, texture) - for m in matches: - try: - texture = texture.replace(m.group(), os.environ[m.group(1)]) - except KeyError: - msg = "Cannot find requested {} in environment".format( - m.group(1)) - self.log.error(msg) - raise RuntimeError(msg) - + if os.path.isabs(texture): self.log.debug("Texture is absolute path, ignoring " "image search paths for: %s" % texture) @@ -296,3 +288,20 @@ class CollectYetiRig(pyblish.api.InstancePlugin): collection, remainder = clique.assemble(files, patterns=pattern) return collection + + def _replace_tokens(self, strings): + env_re = re.compile(r"\$\{(\w+)\}") + + replaced = [] + for s in strings: + matches = re.finditer(env_re, s) + for m in matches: + try: + s = s.replace(m.group(), os.environ[m.group(1)]) + except KeyError: + msg = "Cannot find requested {} in environment".format( + m.group(1)) + self.log.error(msg) + raise RuntimeError(msg) + replaced.append(s) + return replaced From efd71c7ef72090f6f989d9b237dce53333c8f6ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 10 Jan 2020 13:53:49 +0000 Subject: [PATCH 24/31] changed place where tokens are replaced for `image_search_path` --- pype/plugins/maya/publish/collect_yeti_rig.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pype/plugins/maya/publish/collect_yeti_rig.py b/pype/plugins/maya/publish/collect_yeti_rig.py index 39426ea623..c743b2c00b 100644 --- a/pype/plugins/maya/publish/collect_yeti_rig.py +++ b/pype/plugins/maya/publish/collect_yeti_rig.py @@ -119,13 +119,15 @@ class CollectYetiRig(pyblish.api.InstancePlugin): texture_filenames = [] if image_search_paths: - # find all ${TOKEN} tokens and replace them with $TOKEN env. variable - image_search_paths = self._replace_tokens(image_search_paths) + # TODO: Somehow this uses OS environment path separator, `:` vs `;` # Later on check whether this is pipeline OS cross-compatible. image_search_paths = [p for p in image_search_paths.split(os.path.pathsep) if p] + # find all ${TOKEN} tokens and replace them with $TOKEN env. variable + image_search_paths = self._replace_tokens(image_search_paths) + # List all related textures texture_filenames = cmds.pgYetiCommand(node, listTextures=True) self.log.info("Found %i texture(s)" % len(texture_filenames)) @@ -147,7 +149,6 @@ class CollectYetiRig(pyblish.api.InstancePlugin): for texture in texture_filenames: files = [] - if os.path.isabs(texture): self.log.debug("Texture is absolute path, ignoring " "image search paths for: %s" % texture) From 59305a12106aa81ffc19e5b92a2b3eb8aafec2c5 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 10 Jan 2020 16:48:23 +0100 Subject: [PATCH 25/31] make sure template keys exist only when needed --- pype/plugins/global/publish/integrate_new.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index ee18347703..01dc58dc1f 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -267,10 +267,19 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "family": instance.data['family'], "subset": subset["name"], "version": int(version["name"]), - "hierarchy": hierarchy, - "resolution_width": repre.get("resolutionWidth", ""), - "resolution_height": repre.get("resolutionHeight", ""), - "fps": str(instance.data.get("fps", ""))} + "hierarchy": hierarchy} + + resolution_width = repre.get("resolutionWidth") + resolution_height = repre.get("resolutionHeight") + fps = instance.data.get("fps") + + + if resolution_width: + template_data["resolution_width"] = resolution_width + if resolution_width: + template_data["resolution_height"] = resolution_height + if resolution_width: + template_data["fps"] = fps files = repre['files'] if repre.get('stagingDir'): From 791bb63f97f9a74c7520ff19ea2a4e8fcd9283d2 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 10 Jan 2020 18:11:33 +0100 Subject: [PATCH 26/31] collect templates fps fix --- pype/plugins/global/publish/collect_templates.py | 16 ++++++++++++---- pype/plugins/global/publish/integrate_new.py | 1 - 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index d57d416dea..e27af82595 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -76,10 +76,18 @@ class CollectTemplates(pyblish.api.InstancePlugin): "subset": subset_name, "version": version_number, "hierarchy": hierarchy.replace("\\", "/"), - "representation": "TEMP", - "resolution_width": instance.data.get("resolutionWidth", ""), - "resolution_height": instance.data.get("resolutionHeight", ""), - "fps": str(instance.data.get("fps", ""))}} + "representation": "TEMP")} + + resolution_width = instance.data.get("resolutionWidth") + resolution_height = instance.data.get("resolutionHeight") + fps = instance.data.get("fps") + + if resolution_width: + template_data["resolution_width"] = resolution_width + if resolution_width: + template_data["resolution_height"] = resolution_height + if resolution_width: + template_data["fps"] = fps instance.data["template"] = template instance.data["assumedTemplateData"] = template_data diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 01dc58dc1f..8efec94013 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -273,7 +273,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): resolution_height = repre.get("resolutionHeight") fps = instance.data.get("fps") - if resolution_width: template_data["resolution_width"] = resolution_width if resolution_width: From 271a935ee754672d1b34592e86db7ca3b0f24360 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 11 Jan 2020 14:11:04 +0100 Subject: [PATCH 27/31] fixes to getting the path --- pype/ftrack/actions/action_delivery.py | 58 ++++++++++++++++---------- 1 file changed, 37 insertions(+), 21 deletions(-) diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py index 9edb7a5964..afd20d12d1 100644 --- a/pype/ftrack/actions/action_delivery.py +++ b/pype/ftrack/actions/action_delivery.py @@ -231,14 +231,16 @@ class Delivery(BaseAction): "message": "Not selected components to deliver." } - location_path = os.path.normpath(location_path.strip()) - if location_path and not os.path.exists(location_path): - return { - "success": False, - "message": ( - "Entered location path does not exists. \"{}\"" - ).format(location_path) - } + location_path = location_path.strip() + if location_path: + location_path = os.path.normpath(location_path) + if not os.path.exists(location_path): + return { + "success": False, + "message": ( + "Entered location path does not exists. \"{}\"" + ).format(location_path) + } self.db_con.install() self.db_con.Session["AVALON_PROJECT"] = project_name @@ -299,14 +301,16 @@ class Delivery(BaseAction): repre = repres_by_name.get(comp_name) repres_to_deliver.append(repre) + if not location_path: + location_path = os.environ.get("AVALON_PROJECTS") or "" + + print(location_path) + anatomy = Anatomy(project_name) for repre in repres_to_deliver: # Get destination repre path anatomy_data = copy.deepcopy(repre["context"]) - if location_path: - anatomy_data["root"] = location_path - else: - anatomy_data["root"] = os.environ.get("AVALON_PROJECTS") or "" + anatomy_data["root"] = location_path anatomy_filled = anatomy.format(anatomy_data) test_path = ( @@ -353,11 +357,15 @@ class Delivery(BaseAction): continue # Get source repre path + frame = repre['context'].get('frame') + + if frame: + repre["context"]["frame"] = len(str(frame)) * "#" + repre_path = self.path_from_represenation(repre) # TODO add backup solution where root of path from component # is repalced with AVALON_PROJECTS root - - if repre_path and os.path.exists(repre_path): + if not frame: self.process_single_file( repre_path, anatomy, anatomy_name, anatomy_data ) @@ -385,7 +393,7 @@ class Delivery(BaseAction): def process_sequence( self, repre_path, anatomy, anatomy_name, anatomy_data ): - dir_path, file_name = os.path.split(repre_path) + dir_path, file_name = os.path.split(str(repre_path)) base_name, ext = os.path.splitext(file_name) file_name_items = None @@ -421,12 +429,15 @@ class Delivery(BaseAction): self.log.warning("{} <{}>".format(msg, repre_path)) return - anatomy_data["frame"] = "<>" + frame_indicator = "@####@" + + anatomy_data["frame"] = frame_indicator anatomy_filled = anatomy.format(anatomy_data) delivery_path = anatomy_filled["delivery"][anatomy_name] + print(delivery_path) delivery_folder = os.path.dirname(delivery_path) - dst_head, dst_tail = delivery_path.split("<>") + dst_head, dst_tail = delivery_path.split(frame_indicator) dst_padding = src_collection.padding dst_collection = clique.Collection( head=dst_head, @@ -469,10 +480,11 @@ class Delivery(BaseAction): # Template references unavailable data return None - if os.path.exists(path): - return os.path.normpath(path) + return os.path.normpath(path) def copy_file(self, src_path, dst_path): + if os.path.exists(dst_path): + return try: filelink.create( src_path, @@ -496,11 +508,15 @@ class Delivery(BaseAction): "type": "label", "value": "# {}".format(msg) }) - if isinstance(_items, str): + if not isinstance(_items, (list, tuple)): _items = [_items] + __items = [] + for item in _items: + __items.append(str(item)) + items.append({ "type": "label", - "value": '

{}

'.format("
".join(_items)) + "value": '

{}

'.format("
".join(__items)) }) if not items: From cc4857a5d87a39430b3d0b72fb72e7a824621a41 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Sat, 11 Jan 2020 14:56:48 +0100 Subject: [PATCH 28/31] hotfix/pathlib in integration --- pype/plugins/global/publish/integrate_new.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index c2812880c7..6e7a8d13a9 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -470,7 +470,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): None """ - src = Path(src).resolve() + src = str(Path(src).resolve()) drive, _path = os.path.splitdrive(dst) unc = Path(drive).resolve() dst = str(unc / _path) @@ -495,7 +495,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def hardlink_file(self, src, dst): dirname = os.path.dirname(dst) src = Path(src).resolve() - dst = Path(dst).resolve() + drive, _path = os.path.splitdrive(dst) + unc = Path(drive).resolve() + dst = str(unc / _path) try: os.makedirs(dirname) except OSError as e: From c43ae7cb5f49de1db34584e312c6d83a5b781793 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 14 Jan 2020 15:43:26 +0100 Subject: [PATCH 29/31] allow exporting multiple arnold standins from single scene. --- pype/plugins/maya/publish/collect_ass.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/pype/plugins/maya/publish/collect_ass.py b/pype/plugins/maya/publish/collect_ass.py index c0174e7026..8e6691120a 100644 --- a/pype/plugins/maya/publish/collect_ass.py +++ b/pype/plugins/maya/publish/collect_ass.py @@ -21,15 +21,17 @@ class CollectAssData(pyblish.api.InstancePlugin): objsets = instance.data['setMembers'] for objset in objsets: + objset = str(objset) members = cmds.sets(objset, query=True) if members is None: self.log.warning("Skipped empty instance: \"%s\" " % objset) continue - if objset == "content_SET": + if "content_SET" in objset: instance.data['setMembers'] = members - elif objset == "proxy_SET": + self.log.debug('content members: {}'.format(members)) + elif objset.startswith("proxy_SET"): assert len(members) == 1, "You have multiple proxy meshes, please only use one" instance.data['proxy'] = members - + self.log.debug('proxy members: {}'.format(members)) self.log.debug("data: {}".format(instance.data)) From d6b9ac36d50cc4175814d378298cb4a0fb2c5675 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 14 Jan 2020 15:50:48 +0100 Subject: [PATCH 30/31] rename creator and collector --- .../blender/create/{submarine_model.py => create_model.py} | 0 pype/plugins/blender/load/{submarine_model.py => load_model.py} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename pype/plugins/blender/create/{submarine_model.py => create_model.py} (100%) rename pype/plugins/blender/load/{submarine_model.py => load_model.py} (100%) diff --git a/pype/plugins/blender/create/submarine_model.py b/pype/plugins/blender/create/create_model.py similarity index 100% rename from pype/plugins/blender/create/submarine_model.py rename to pype/plugins/blender/create/create_model.py diff --git a/pype/plugins/blender/load/submarine_model.py b/pype/plugins/blender/load/load_model.py similarity index 100% rename from pype/plugins/blender/load/submarine_model.py rename to pype/plugins/blender/load/load_model.py From 06ed617d9bb25d3dc78d09c49f7a6904fef724c6 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 17 Jan 2020 09:34:23 +0100 Subject: [PATCH 31/31] make ascii ass configurable via presets --- pype/plugins/maya/publish/extract_ass.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pype/plugins/maya/publish/extract_ass.py b/pype/plugins/maya/publish/extract_ass.py index 71f3e0d84c..4cf394aefe 100644 --- a/pype/plugins/maya/publish/extract_ass.py +++ b/pype/plugins/maya/publish/extract_ass.py @@ -17,6 +17,7 @@ class ExtractAssStandin(pype.api.Extractor): label = "Ass Standin (.ass)" hosts = ["maya"] families = ["ass"] + asciiAss = False def process(self, instance): @@ -47,7 +48,7 @@ class ExtractAssStandin(pype.api.Extractor): exported_files = cmds.arnoldExportAss(filename=file_path, selected=True, - asciiAss=True, + asciiAss=self.asciiAss, shadowLinks=True, lightLinks=True, boundingBox=True, @@ -59,13 +60,15 @@ class ExtractAssStandin(pype.api.Extractor): filenames.append(os.path.split(file)[1]) self.log.info("Exported: {}".format(filenames)) else: + self.log.info("Extracting ass") cmds.arnoldExportAss(filename=file_path, selected=True, - asciiAss=True, + asciiAss=False, shadowLinks=True, lightLinks=True, boundingBox=True ) + self.log.info("Extracted {}".format(filename)) filenames = filename optionals = [ "frameStart", "frameEnd", "step", "handles",