From 8e10029d2af496858987808630edf8d6c749c530 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 31 May 2021 17:33:44 +0100 Subject: [PATCH 001/308] Improved model assets handling --- openpype/hosts/blender/api/plugin.py | 30 +-- .../blender/plugins/create/create_model.py | 32 ++- .../hosts/blender/plugins/load/load_model.py | 226 ++++++++---------- .../plugins/publish/collect_instances.py | 26 +- .../blender/plugins/publish/extract_blend.py | 19 +- 5 files changed, 164 insertions(+), 169 deletions(-) diff --git a/openpype/hosts/blender/api/plugin.py b/openpype/hosts/blender/api/plugin.py index de30da3319..4d82fb3f76 100644 --- a/openpype/hosts/blender/api/plugin.py +++ b/openpype/hosts/blender/api/plugin.py @@ -5,8 +5,8 @@ from typing import Dict, List, Optional import bpy -from avalon import api -import avalon.blender +from avalon import api, blender +from avalon.blender.pipeline import AVALON_CONTAINERS from openpype.api import PypeCreatorMixin VALID_EXTENSIONS = [".blend", ".json", ".abc"] @@ -27,25 +27,17 @@ def get_unique_number( asset: str, subset: str ) -> str: """Return a unique number based on the asset name.""" - avalon_containers = [ - c for c in bpy.data.collections - if c.name == 'AVALON_CONTAINERS' - ] - containers = [] - # First, add the children of avalon containers - for c in avalon_containers: - containers.extend(c.children) - # then keep looping to include all the children - for c in containers: - containers.extend(c.children) - container_names = [ - c.name for c in containers - ] + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + if not avalon_container: + return "01" + asset_groups = avalon_container.objects + + container_names = [c.name for c in asset_groups] count = 1 - name = f"{asset}_{count:0>2}_{subset}_CON" + name = f"{asset}_{count:0>2}_{subset}" while name in container_names: count += 1 - name = f"{asset}_{count:0>2}_{subset}_CON" + name = f"{asset}_{count:0>2}_{subset}" return f"{count:0>2}" @@ -102,7 +94,7 @@ def get_local_collection_with_name(name): return None -class Creator(PypeCreatorMixin, avalon.blender.Creator): +class Creator(PypeCreatorMixin, blender.Creator): pass diff --git a/openpype/hosts/blender/plugins/create/create_model.py b/openpype/hosts/blender/plugins/create/create_model.py index 921d86513b..223896944b 100644 --- a/openpype/hosts/blender/plugins/create/create_model.py +++ b/openpype/hosts/blender/plugins/create/create_model.py @@ -4,10 +4,11 @@ import bpy from avalon import api from avalon.blender import lib -import openpype.hosts.blender.api.plugin +from avalon.blender.pipeline import AVALON_INSTANCES +from openpype.hosts.blender.api import plugin -class CreateModel(openpype.hosts.blender.api.plugin.Creator): +class CreateModel(plugin.Creator): """Polygonal static geometry""" name = "modelMain" @@ -16,17 +17,30 @@ class CreateModel(openpype.hosts.blender.api.plugin.Creator): icon = "cube" def process(self): + # Get Instance Containter or create it if it does not exist + instances = bpy.data.collections.get(AVALON_INSTANCES) + if not instances: + instances = bpy.data.collections.new(name = AVALON_INSTANCES) + bpy.context.scene.collection.children.link(instances) + # Create instance object asset = self.data["asset"] subset = self.data["subset"] - name = openpype.hosts.blender.api.plugin.asset_name(asset, subset) - collection = bpy.data.collections.new(name=name) - bpy.context.scene.collection.children.link(collection) + name = plugin.asset_name(asset, subset) + asset_group = bpy.data.objects.new(name=name, object_data = None) + instances.objects.link(asset_group) self.data['task'] = api.Session.get('AVALON_TASK') - lib.imprint(collection, self.data) + lib.imprint(asset_group, self.data) + # Add selected objects to instance if (self.options or {}).get("useSelection"): - for obj in lib.get_selection(): - collection.objects.link(obj) + bpy.context.view_layer.objects.active = asset_group + selected = lib.get_selection() + for obj in selected: + obj.select_set(True) + selected.append(asset_group) + context = plugin.create_blender_context( + active = asset_group, selected = selected) + bpy.ops.object.parent_set(context, keep_transform=True) - return collection + return asset_group diff --git a/openpype/hosts/blender/plugins/load/load_model.py b/openpype/hosts/blender/plugins/load/load_model.py index 35a241b98e..7e757f5433 100644 --- a/openpype/hosts/blender/plugins/load/load_model.py +++ b/openpype/hosts/blender/plugins/load/load_model.py @@ -1,13 +1,16 @@ """Load a model asset in Blender.""" -import logging from pathlib import Path from pprint import pformat from typing import Dict, List, Optional -from avalon import api, blender import bpy -import openpype.hosts.blender.api.plugin as plugin + +from avalon import api +from avalon.blender.pipeline import AVALON_CONTAINERS +from avalon.blender.pipeline import AVALON_CONTAINER_ID +from avalon.blender.pipeline import AVALON_PROPERTY +from openpype.hosts.blender.api import plugin class BlendModelLoader(plugin.AssetLoader): @@ -24,52 +27,78 @@ class BlendModelLoader(plugin.AssetLoader): icon = "code-fork" color = "orange" - def _remove(self, objects, container): - for obj in list(objects): - for material_slot in list(obj.material_slots): - bpy.data.materials.remove(material_slot.material) - bpy.data.meshes.remove(obj.data) + def _remove(self, asset_group): + objects = list(asset_group.children) + empties = [] - bpy.data.collections.remove(container) + for obj in objects: + if obj.type == 'MESH': + for material_slot in list(obj.material_slots): + bpy.data.materials.remove(material_slot.material) + bpy.data.meshes.remove(obj.data) + elif obj.type == 'EMPTY': + objects.extend(obj.children) + empties.append(obj) - def _process( - self, libpath, lib_container, container_name, - parent_collection - ): + for empty in empties: + bpy.data.objects.remove(empty) + + def _process(self, libpath, asset_group, group_name): relative = bpy.context.preferences.filepaths.use_relative_paths with bpy.data.libraries.load( libpath, link=True, relative=relative - ) as (_, data_to): - data_to.collections = [lib_container] + ) as (data_from, data_to): + data_to.objects = data_from.objects - parent = parent_collection + parent = bpy.context.scene.collection - if parent is None: - parent = bpy.context.scene.collection + empties = [obj for obj in data_to.objects if obj.type == 'EMPTY'] - parent.children.link(bpy.data.collections[lib_container]) + container = None - model_container = parent.children[lib_container].make_local() - model_container.name = container_name + for empty in empties: + if empty.get(AVALON_PROPERTY): + container = empty + break - for obj in model_container.objects: - local_obj = plugin.prepare_data(obj, container_name) - plugin.prepare_data(local_obj.data, container_name) + assert container, "No asset group found" - for material_slot in local_obj.material_slots: - plugin.prepare_data(material_slot.material, container_name) + # Children must be linked before parents, + # otherwise the hierarchy will break + objects = [] + nodes = list(container.children) - if not obj.get(blender.pipeline.AVALON_PROPERTY): - local_obj[blender.pipeline.AVALON_PROPERTY] = dict() + for obj in nodes: + obj.parent = asset_group - avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY] - avalon_info.update({"container_name": container_name}) + for obj in nodes: + objects.append(obj) + nodes.extend(list(obj.children)) - model_container.pop(blender.pipeline.AVALON_PROPERTY) + objects.reverse() + + for obj in objects: + parent.objects.link(obj) + + for obj in objects: + local_obj = plugin.prepare_data(obj, group_name) + if obj.type != 'EMPTY': + plugin.prepare_data(local_obj.data, group_name) + + for material_slot in local_obj.material_slots: + plugin.prepare_data(material_slot.material, group_name) + + if not obj.get(AVALON_PROPERTY): + local_obj[AVALON_PROPERTY] = dict() + + avalon_info = local_obj[AVALON_PROPERTY] + avalon_info.update({"container_name": group_name}) + + objects.reverse() bpy.ops.object.select_all(action='DESELECT') - return model_container + return objects def process_asset( self, context: dict, name: str, namespace: Optional[str] = None, @@ -82,50 +111,41 @@ class BlendModelLoader(plugin.AssetLoader): context: Full parenthood of representation to load options: Additional settings dictionary """ - libpath = self.fname asset = context["asset"]["name"] subset = context["subset"]["name"] - lib_container = plugin.asset_name( - asset, subset - ) - unique_number = plugin.get_unique_number( - asset, subset - ) + asset_name = plugin.asset_name(asset, subset) + unique_number = plugin.get_unique_number(asset, subset) + group_name = plugin.asset_name(asset, subset, unique_number) namespace = namespace or f"{asset}_{unique_number}" - container_name = plugin.asset_name( - asset, subset, unique_number - ) - container = bpy.data.collections.new(lib_container) - container.name = container_name - blender.pipeline.containerise_existing( - container, - name, - namespace, - context, - self.__class__.__name__, - ) + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + if not avalon_container: + avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) + bpy.context.scene.collection.children.link(avalon_container) - metadata = container.get(blender.pipeline.AVALON_PROPERTY) + asset_group = bpy.data.objects.new(group_name, object_data=None) + avalon_container.objects.link(asset_group) - metadata["libpath"] = libpath - metadata["lib_container"] = lib_container + objects = self._process(libpath, asset_group, group_name) - obj_container = self._process( - libpath, lib_container, container_name, None) + bpy.context.scene.collection.objects.link(asset_group) - metadata["obj_container"] = obj_container + asset_group[AVALON_PROPERTY] = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": name, + "namespace": namespace or '', + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + "libpath": libpath, + "asset_name": asset_name, + "parent": str(context["representation"]["parent"]), + "family": context["representation"]["context"]["family"] + } - # Save the list of objects in the metadata container - metadata["objects"] = obj_container.all_objects - - metadata["parent"] = str(context["representation"]["parent"]) - metadata["family"] = context["representation"]["context"]["family"] - - nodes = list(container.objects) - nodes.append(container) + nodes = objects self[:] = nodes return nodes @@ -137,13 +157,9 @@ class BlendModelLoader(plugin.AssetLoader): If the objects of the collection are used in another collection they will not be removed, only unlinked. Normally this should not be the case though. - - Warning: - No nested collections are supported at the moment! """ - collection = bpy.data.collections.get( - container["objectName"] - ) + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) libpath = Path(api.get_representation_path(representation)) extension = libpath.suffix.lower() @@ -153,12 +169,9 @@ class BlendModelLoader(plugin.AssetLoader): pformat(representation, indent=2), ) - assert collection, ( + assert asset_group, ( f"The asset is not loaded: {container['objectName']}" ) - assert not (collection.children), ( - "Nested collections are not supported." - ) assert libpath, ( "No existing library file found for {container['objectName']}" ) @@ -169,45 +182,30 @@ class BlendModelLoader(plugin.AssetLoader): f"Unsupported file: {libpath}" ) - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) - collection_libpath = collection_metadata["libpath"] - lib_container = collection_metadata["lib_container"] + metadata = asset_group.get(AVALON_PROPERTY) + group_libpath = metadata["libpath"] - obj_container = plugin.get_local_collection_with_name( - collection_metadata["obj_container"].name - ) - objects = obj_container.all_objects - - container_name = obj_container.name - - normalized_collection_libpath = ( - str(Path(bpy.path.abspath(collection_libpath)).resolve()) + normalized_group_libpath = ( + str(Path(bpy.path.abspath(group_libpath)).resolve()) ) normalized_libpath = ( str(Path(bpy.path.abspath(str(libpath))).resolve()) ) self.log.debug( - "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_collection_libpath, + "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_group_libpath, normalized_libpath, ) - if normalized_collection_libpath == normalized_libpath: + if normalized_group_libpath == normalized_libpath: self.log.info("Library already loaded, not updating...") return - parent = plugin.get_parent_collection(obj_container) + self._remove(asset_group) - self._remove(objects, obj_container) + self._process(str(libpath), asset_group, object_name) - obj_container = self._process( - str(libpath), lib_container, container_name, parent) - - # Save the list of objects in the metadata container - collection_metadata["obj_container"] = obj_container - collection_metadata["objects"] = obj_container.all_objects - collection_metadata["libpath"] = str(libpath) - collection_metadata["representation"] = str(representation["_id"]) + metadata["libpath"] = str(libpath) + metadata["representation"] = str(representation["_id"]) def remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. @@ -218,29 +216,15 @@ class BlendModelLoader(plugin.AssetLoader): Returns: bool: Whether the container was deleted. - - Warning: - No nested collections are supported at the moment! """ - collection = bpy.data.collections.get( - container["objectName"] - ) - if not collection: + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + + if not asset_group: return False - assert not (collection.children), ( - "Nested collections are not supported." - ) - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) + self._remove(asset_group) - obj_container = plugin.get_local_collection_with_name( - collection_metadata["obj_container"].name - ) - objects = obj_container.all_objects - - self._remove(objects, obj_container) - - bpy.data.collections.remove(collection) + bpy.data.objects.remove(asset_group) return True diff --git a/openpype/hosts/blender/plugins/publish/collect_instances.py b/openpype/hosts/blender/plugins/publish/collect_instances.py index 1d3693216d..09a60d9725 100644 --- a/openpype/hosts/blender/plugins/publish/collect_instances.py +++ b/openpype/hosts/blender/plugins/publish/collect_instances.py @@ -5,6 +5,7 @@ import json import pyblish.api from avalon.blender.pipeline import AVALON_PROPERTY +from avalon.blender.pipeline import AVALON_INSTANCES class CollectInstances(pyblish.api.ContextPlugin): @@ -15,24 +16,25 @@ class CollectInstances(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder @staticmethod - def get_collections() -> Generator: + def get_asset_groups() -> Generator: """Return all 'model' collections. Check if the family is 'model' and if it doesn't have the representation set. If the representation is set, it is a loaded model and we don't want to publish it. """ - for collection in bpy.data.collections: - avalon_prop = collection.get(AVALON_PROPERTY) or dict() + instances = bpy.data.collections.get(AVALON_INSTANCES) + for obj in instances.objects: + avalon_prop = obj.get(AVALON_PROPERTY) or dict() if avalon_prop.get('id') == 'pyblish.avalon.instance': - yield collection + yield obj def process(self, context): """Collect the models from the current Blender scene.""" - collections = self.get_collections() + asset_groups = self.get_asset_groups() - for collection in collections: - avalon_prop = collection[AVALON_PROPERTY] + for group in asset_groups: + avalon_prop = group[AVALON_PROPERTY] asset = avalon_prop['asset'] family = avalon_prop['family'] subset = avalon_prop['subset'] @@ -46,9 +48,13 @@ class CollectInstances(pyblish.api.ContextPlugin): asset=asset, task=task, ) - members = list(collection.objects) - members.append(collection) - instance[:] = members + objects = list(group.children) + members = set() + for obj in objects: + objects.extend(list(obj.children)) + members.add(obj) + members.add(group) + instance[:] = list(members) self.log.debug(json.dumps(instance.data, indent=4)) for obj in instance: self.log.debug(obj) diff --git a/openpype/hosts/blender/plugins/publish/extract_blend.py b/openpype/hosts/blender/plugins/publish/extract_blend.py index 890c8b5ffd..60ef20e31c 100644 --- a/openpype/hosts/blender/plugins/publish/extract_blend.py +++ b/openpype/hosts/blender/plugins/publish/extract_blend.py @@ -1,6 +1,8 @@ import os -import avalon.blender.workio +import bpy + +# import avalon.blender.workio import openpype.api @@ -22,15 +24,12 @@ class ExtractBlend(openpype.api.Extractor): # Perform extraction self.log.info("Performing extraction..") - # Just save the file to a temporary location. At least for now it's no - # problem to have (possibly) extra stuff in the file. - avalon.blender.workio.save_file(filepath, copy=True) - # - # # Store reference for integration - # if "files" not in instance.data: - # instance.data["files"] = list() - # - # # instance.data["files"].append(filename) + data_blocks = set() + + for obj in instance: + data_blocks.add(obj) + + bpy.data.libraries.write(filepath, data_blocks) if "representations" not in instance.data: instance.data["representations"] = [] From ef3319b3e8f559887412ccfce1df0e4822947323 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 31 May 2021 17:39:46 +0100 Subject: [PATCH 002/308] Hound fixes --- openpype/hosts/blender/plugins/create/create_model.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/blender/plugins/create/create_model.py b/openpype/hosts/blender/plugins/create/create_model.py index 223896944b..e318cd07da 100644 --- a/openpype/hosts/blender/plugins/create/create_model.py +++ b/openpype/hosts/blender/plugins/create/create_model.py @@ -20,14 +20,14 @@ class CreateModel(plugin.Creator): # Get Instance Containter or create it if it does not exist instances = bpy.data.collections.get(AVALON_INSTANCES) if not instances: - instances = bpy.data.collections.new(name = AVALON_INSTANCES) + instances = bpy.data.collections.new(name=AVALON_INSTANCES) bpy.context.scene.collection.children.link(instances) # Create instance object asset = self.data["asset"] subset = self.data["subset"] name = plugin.asset_name(asset, subset) - asset_group = bpy.data.objects.new(name=name, object_data = None) + asset_group = bpy.data.objects.new(name=name, object_data=None) instances.objects.link(asset_group) self.data['task'] = api.Session.get('AVALON_TASK') lib.imprint(asset_group, self.data) @@ -40,7 +40,7 @@ class CreateModel(plugin.Creator): obj.select_set(True) selected.append(asset_group) context = plugin.create_blender_context( - active = asset_group, selected = selected) + active=asset_group, selected=selected) bpy.ops.object.parent_set(context, keep_transform=True) return asset_group From 0fd34c6acad71f76d4aacfcd3abaa0ecd052246d Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 2 Jun 2021 10:49:57 +0100 Subject: [PATCH 003/308] Fixed FBX and ABC extraction and loading --- .../blender/plugins/publish/extract_abc.py | 60 ++++--------- .../blender/plugins/publish/extract_fbx.py | 52 +++++------- .../load/load_alembic_geometrycache.py | 85 +++++++++++-------- .../plugins/load/load_alembic_staticmesh.py | 67 ++++++++------- .../unreal/plugins/load/load_staticmeshfbx.py | 61 +++++++------ 5 files changed, 150 insertions(+), 175 deletions(-) diff --git a/openpype/hosts/blender/plugins/publish/extract_abc.py b/openpype/hosts/blender/plugins/publish/extract_abc.py index a6315908fc..97123c5967 100644 --- a/openpype/hosts/blender/plugins/publish/extract_abc.py +++ b/openpype/hosts/blender/plugins/publish/extract_abc.py @@ -1,12 +1,13 @@ import os -import openpype.api -import openpype.hosts.blender.api.plugin +from openpype import api +from openpype.hosts.blender.api import plugin +from avalon.blender.pipeline import AVALON_PROPERTY import bpy -class ExtractABC(openpype.api.Extractor): +class ExtractABC(api.Extractor): """Extract as ABC.""" label = "Extract ABC" @@ -16,7 +17,6 @@ class ExtractABC(openpype.api.Extractor): def process(self, instance): # Define extract output file path - stagingdir = self.staging_dir(instance) filename = f"{instance.name}.abc" filepath = os.path.join(stagingdir, filename) @@ -28,57 +28,29 @@ class ExtractABC(openpype.api.Extractor): # Perform extraction self.log.info("Performing extraction..") - collections = [ - obj for obj in instance if type(obj) is bpy.types.Collection] - - assert len(collections) == 1, "There should be one and only one " \ - "collection collected for this asset" - - old_active_layer_collection = view_layer.active_layer_collection - - layers = view_layer.layer_collection.children - - # Get the layer collection from the collection we need to export. - # This is needed because in Blender you can only set the active - # collection with the layer collection, and there is no way to get - # the layer collection from the collection - # (but there is the vice versa). - layer_collections = [ - layer for layer in layers if layer.collection == collections[0]] - - assert len(layer_collections) == 1 - - view_layer.active_layer_collection = layer_collections[0] - - old_scale = scene.unit_settings.scale_length - bpy.ops.object.select_all(action='DESELECT') - selected = list() + selected = [] + asset_group = None for obj in instance: - try: - obj.select_set(True) - selected.append(obj) - except: - continue + obj.select_set(True) + selected.append(obj) + if obj.get(AVALON_PROPERTY): + asset_group = obj - new_context = openpype.hosts.blender.api.plugin.create_blender_context( - active=selected[0], selected=selected) - - # We set the scale of the scene for the export - scene.unit_settings.scale_length = 0.01 + context = plugin.create_blender_context( + active=asset_group, selected=selected) # We export the abc bpy.ops.wm.alembic_export( - new_context, + context, filepath=filepath, - selected=True + selected=True, + flatten=True ) - view_layer.active_layer_collection = old_active_layer_collection - - scene.unit_settings.scale_length = old_scale + bpy.ops.object.select_all(action='DESELECT') if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/openpype/hosts/blender/plugins/publish/extract_fbx.py b/openpype/hosts/blender/plugins/publish/extract_fbx.py index dc74348949..c8e2473cf5 100644 --- a/openpype/hosts/blender/plugins/publish/extract_fbx.py +++ b/openpype/hosts/blender/plugins/publish/extract_fbx.py @@ -1,11 +1,13 @@ import os -import openpype.api +from openpype import api +from openpype.hosts.blender.api import plugin +from avalon.blender.pipeline import AVALON_PROPERTY import bpy -class ExtractFBX(openpype.api.Extractor): +class ExtractFBX(api.Extractor): """Extract as FBX.""" label = "Extract FBX" @@ -15,56 +17,40 @@ class ExtractFBX(openpype.api.Extractor): def process(self, instance): # Define extract output file path - stagingdir = self.staging_dir(instance) filename = f"{instance.name}.fbx" filepath = os.path.join(stagingdir, filename) - context = bpy.context - scene = context.scene - view_layer = context.view_layer + scene = bpy.context.scene # Perform extraction self.log.info("Performing extraction..") - collections = [ - obj for obj in instance if type(obj) is bpy.types.Collection] + bpy.ops.object.select_all(action='DESELECT') - assert len(collections) == 1, "There should be one and only one " \ - "collection collected for this asset" + selected = [] + asset_group = None - old_active_layer_collection = view_layer.active_layer_collection + for obj in instance: + obj.select_set(True) + selected.append(obj) + if obj.get(AVALON_PROPERTY): + asset_group = obj - layers = view_layer.layer_collection.children - - # Get the layer collection from the collection we need to export. - # This is needed because in Blender you can only set the active - # collection with the layer collection, and there is no way to get - # the layer collection from the collection - # (but there is the vice versa). - layer_collections = [ - layer for layer in layers if layer.collection == collections[0]] - - assert len(layer_collections) == 1 - - view_layer.active_layer_collection = layer_collections[0] - - old_scale = scene.unit_settings.scale_length - - # We set the scale of the scene for the export - scene.unit_settings.scale_length = 0.01 + context = plugin.create_blender_context( + active=asset_group, selected=selected) # We export the fbx bpy.ops.export_scene.fbx( + context, filepath=filepath, - use_active_collection=True, + use_active_collection=False, + use_selection=True, mesh_smooth_type='FACE', add_leaf_bones=False ) - view_layer.active_layer_collection = old_active_layer_collection - - scene.unit_settings.scale_length = old_scale + bpy.ops.object.select_all(action='DESELECT') if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py b/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py index a9279bf6e0..ad37a7a068 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py @@ -15,6 +15,46 @@ class PointCacheAlembicLoader(api.Loader): icon = "cube" color = "orange" + def get_task( + self, filename, asset_dir, asset_name, replace, frame_start, frame_end + ): + task = unreal.AssetImportTask() + options = unreal.AbcImportSettings() + gc_settings = unreal.AbcGeometryCacheSettings() + conversion_settings = unreal.AbcConversionSettings() + sampling_settings = unreal.AbcSamplingSettings() + + task.set_editor_property('filename', filename) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', replace) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + # set import options here + # Unreal 4.24 ignores the settings. It works with Unreal 4.26 + options.set_editor_property( + 'import_type', unreal.AlembicImportType.GEOMETRY_CACHE) + + gc_settings.set_editor_property('flatten_tracks', False) + + conversion_settings.set_editor_property('flip_u', False) + conversion_settings.set_editor_property('flip_v', True) + conversion_settings.set_editor_property( + 'scale', unreal.Vector(x=100.0, y=100.0, z=100.0)) + conversion_settings.set_editor_property( + 'rotation', unreal.Vector(x=-90.0, y=0.0, z=180.0)) + + sampling_settings.set_editor_property('frame_start', frame_start) + sampling_settings.set_editor_property('frame_end', frame_end) + + options.geometry_cache_settings = gc_settings + options.conversion_settings = conversion_settings + options.sampling_settings = sampling_settings + task.options = options + + return task + def load(self, context, name, namespace, data): """ Load and containerise representation into Content Browser. @@ -55,25 +95,17 @@ class PointCacheAlembicLoader(api.Loader): unreal.EditorAssetLibrary.make_directory(asset_dir) - task = unreal.AssetImportTask() + frame_start = context.get('asset').get('data').get('frameStart') + frame_end = context.get('asset').get('data').get('frameEnd') - task.set_editor_property('filename', self.fname) - task.set_editor_property('destination_path', asset_dir) - task.set_editor_property('destination_name', asset_name) - task.set_editor_property('replace_existing', False) - task.set_editor_property('automated', True) - task.set_editor_property('save', True) + # If frame start and end are the same, we increse the end frame by + # one, otherwise Unreal will not import it + if frame_start == frame_end: + frame_end += 1 - # set import options here - # Unreal 4.24 ignores the settings. It works with Unreal 4.26 - options = unreal.AbcImportSettings() - options.set_editor_property( - 'import_type', unreal.AlembicImportType.GEOMETRY_CACHE) + task = self.get_task( + self.fname, asset_dir, asset_name, False, frame_start, frame_end) - options.geometry_cache_settings.set_editor_property( - 'flatten_tracks', False) - - task.options = options unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 # Create Asset Container @@ -109,28 +141,11 @@ class PointCacheAlembicLoader(api.Loader): source_path = api.get_representation_path(representation) destination_path = container["namespace"] - task = unreal.AssetImportTask() + task = self.get_task(source_path, destination_path, name, True) - task.set_editor_property('filename', source_path) - task.set_editor_property('destination_path', destination_path) - # strip suffix - task.set_editor_property('destination_name', name) - task.set_editor_property('replace_existing', True) - task.set_editor_property('automated', True) - task.set_editor_property('save', True) - - # set import options here - # Unreal 4.24 ignores the settings. It works with Unreal 4.26 - options = unreal.AbcImportSettings() - options.set_editor_property( - 'import_type', unreal.AlembicImportType.GEOMETRY_CACHE) - - options.geometry_cache_settings.set_editor_property( - 'flatten_tracks', False) - - task.options = options # do import fbx and replace existing data unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) + container_path = "{}/{}".format(container["namespace"], container["objectName"]) # update metadata diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py index 12b9320f72..ccec31b832 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py @@ -15,6 +15,39 @@ class StaticMeshAlembicLoader(api.Loader): icon = "cube" color = "orange" + def get_task(self, filename, asset_dir, asset_name, replace): + task = unreal.AssetImportTask() + options = unreal.AbcImportSettings() + sm_settings = unreal.AbcStaticMeshSettings() + conversion_settings = unreal.AbcConversionSettings() + + task.set_editor_property('filename', filename) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', replace) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + # set import options here + # Unreal 4.24 ignores the settings. It works with Unreal 4.26 + options.set_editor_property( + 'import_type', unreal.AlembicImportType.STATIC_MESH) + + sm_settings.set_editor_property('merge_meshes', True) + + conversion_settings.set_editor_property('flip_u', False) + conversion_settings.set_editor_property('flip_v', True) + conversion_settings.set_editor_property( + 'scale', unreal.Vector(x=100.0, y=100.0, z=100.0)) + conversion_settings.set_editor_property( + 'rotation', unreal.Vector(x=-90.0, y=0.0, z=180.0)) + + options.static_mesh_settings = sm_settings + options.conversion_settings = conversion_settings + task.options = options + + return task + def load(self, context, name, namespace, data): """ Load and containerise representation into Content Browser. @@ -55,22 +88,8 @@ class StaticMeshAlembicLoader(api.Loader): unreal.EditorAssetLibrary.make_directory(asset_dir) - task = unreal.AssetImportTask() + task = self.get_task(self.fname, asset_dir, asset_name, False) - task.set_editor_property('filename', self.fname) - task.set_editor_property('destination_path', asset_dir) - task.set_editor_property('destination_name', asset_name) - task.set_editor_property('replace_existing', False) - task.set_editor_property('automated', True) - task.set_editor_property('save', True) - - # set import options here - # Unreal 4.24 ignores the settings. It works with Unreal 4.26 - options = unreal.AbcImportSettings() - options.set_editor_property( - 'import_type', unreal.AlembicImportType.STATIC_MESH) - - task.options = options unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 # Create Asset Container @@ -106,25 +125,11 @@ class StaticMeshAlembicLoader(api.Loader): source_path = api.get_representation_path(representation) destination_path = container["namespace"] - task = unreal.AssetImportTask() + task = self.get_task(source_path, destination_path, name, True) - task.set_editor_property('filename', source_path) - task.set_editor_property('destination_path', destination_path) - # strip suffix - task.set_editor_property('destination_name', name) - task.set_editor_property('replace_existing', True) - task.set_editor_property('automated', True) - task.set_editor_property('save', True) - - # set import options here - # Unreal 4.24 ignores the settings. It works with Unreal 4.26 - options = unreal.AbcImportSettings() - options.set_editor_property( - 'import_type', unreal.AlembicImportType.STATIC_MESH) - - task.options = options # do import fbx and replace existing data unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) + container_path = "{}/{}".format(container["namespace"], container["objectName"]) # update metadata diff --git a/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py b/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py index dcb566fa4c..d25f84ea69 100644 --- a/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py +++ b/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py @@ -15,6 +15,31 @@ class StaticMeshFBXLoader(api.Loader): icon = "cube" color = "orange" + def get_task(self, filename, asset_dir, asset_name, replace): + task = unreal.AssetImportTask() + options = unreal.FbxImportUI() + import_data = unreal.FbxStaticMeshImportData() + + task.set_editor_property('filename', filename) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', replace) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + # set import options here + options.set_editor_property( + 'automated_import_should_detect_type', False) + options.set_editor_property('import_animations', False) + + import_data.set_editor_property('combine_meshes', True) + import_data.set_editor_property('remove_degenerates', False) + + options.static_mesh_import_data = import_data + task.options = options + + return task + def load(self, context, name, namespace, data): """ Load and containerise representation into Content Browser. @@ -55,22 +80,8 @@ class StaticMeshFBXLoader(api.Loader): unreal.EditorAssetLibrary.make_directory(asset_dir) - task = unreal.AssetImportTask() + task = self.get_task(self.fname, asset_dir, asset_name, False) - task.set_editor_property('filename', self.fname) - task.set_editor_property('destination_path', asset_dir) - task.set_editor_property('destination_name', asset_name) - task.set_editor_property('replace_existing', False) - task.set_editor_property('automated', True) - task.set_editor_property('save', True) - - # set import options here - options = unreal.FbxImportUI() - options.set_editor_property( - 'automated_import_should_detect_type', False) - options.set_editor_property('import_animations', False) - - task.options = options unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 # Create Asset Container @@ -102,29 +113,15 @@ class StaticMeshFBXLoader(api.Loader): return asset_content def update(self, container, representation): - name = container["name"] + name = container["asset_name"] source_path = api.get_representation_path(representation) destination_path = container["namespace"] - task = unreal.AssetImportTask() + task = self.get_task(source_path, destination_path, name, True) - task.set_editor_property('filename', source_path) - task.set_editor_property('destination_path', destination_path) - # strip suffix - task.set_editor_property('destination_name', name) - task.set_editor_property('replace_existing', True) - task.set_editor_property('automated', True) - task.set_editor_property('save', True) - - # set import options here - options = unreal.FbxImportUI() - options.set_editor_property( - 'automated_import_should_detect_type', False) - options.set_editor_property('import_animations', False) - - task.options = options # do import fbx and replace existing data unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) + container_path = "{}/{}".format(container["namespace"], container["objectName"]) # update metadata From f19bcc61bbb4869e65cc296ada175f22d7542542 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 3 Jun 2021 14:21:23 +0100 Subject: [PATCH 004/308] Added support for loading FBX models in Blender and fixed alembic --- openpype/hosts/blender/api/plugin.py | 2 +- .../hosts/blender/plugins/load/load_abc.py | 220 +++++++-------- .../blender/plugins/load/load_fbx_model.py | 250 ++++++++++++++++++ .../hosts/blender/plugins/load/load_model.py | 2 + .../blender/plugins/publish/extract_abc.py | 2 +- 5 files changed, 368 insertions(+), 108 deletions(-) create mode 100644 openpype/hosts/blender/plugins/load/load_fbx_model.py diff --git a/openpype/hosts/blender/api/plugin.py b/openpype/hosts/blender/api/plugin.py index 4d82fb3f76..f3cf2b88cd 100644 --- a/openpype/hosts/blender/api/plugin.py +++ b/openpype/hosts/blender/api/plugin.py @@ -44,7 +44,7 @@ def get_unique_number( def prepare_data(data, container_name): name = data.name local_data = data.make_local() - local_data.name = f"{name}:{container_name}" + local_data.name = f"{container_name}:{name}" return local_data diff --git a/openpype/hosts/blender/plugins/load/load_abc.py b/openpype/hosts/blender/plugins/load/load_abc.py index 4248cffd69..7261e002f1 100644 --- a/openpype/hosts/blender/plugins/load/load_abc.py +++ b/openpype/hosts/blender/plugins/load/load_abc.py @@ -4,9 +4,14 @@ from pathlib import Path from pprint import pformat from typing import Dict, List, Optional -from avalon import api, blender import bpy -import openpype.hosts.blender.api.plugin as plugin + +from avalon import api +from avalon.blender import lib +from avalon.blender.pipeline import AVALON_CONTAINERS +from avalon.blender.pipeline import AVALON_CONTAINER_ID +from avalon.blender.pipeline import AVALON_PROPERTY +from openpype.hosts.blender.api import plugin class CacheModelLoader(plugin.AssetLoader): @@ -21,59 +26,94 @@ class CacheModelLoader(plugin.AssetLoader): families = ["model", "pointcache"] representations = ["abc"] - label = "Link Alembic" + label = "Load Alembic" icon = "code-fork" color = "orange" - def _remove(self, objects, container): - for obj in list(objects): + def _remove(self, asset_group): + objects = list(asset_group.children) + empties = [] + + for obj in objects: if obj.type == 'MESH': + for material_slot in list(obj.material_slots): + bpy.data.materials.remove(material_slot.material) bpy.data.meshes.remove(obj.data) elif obj.type == 'EMPTY': - bpy.data.objects.remove(obj) + objects.extend(obj.children) + empties.append(obj) - bpy.data.collections.remove(container) + for empty in empties: + bpy.data.objects.remove(empty) - def _process(self, libpath, container_name, parent_collection): + def _process(self, libpath, asset_group, group_name): bpy.ops.object.select_all(action='DESELECT') - view_layer = bpy.context.view_layer - view_layer_collection = view_layer.active_layer_collection.collection + collection = bpy.context.view_layer.active_layer_collection.collection relative = bpy.context.preferences.filepaths.use_relative_paths + context = plugin.create_blender_context() bpy.ops.wm.alembic_import( + context, filepath=libpath, relative_path=relative ) - parent = parent_collection + parent = bpy.context.scene.collection - if parent is None: - parent = bpy.context.scene.collection + imported = lib.get_selection() - model_container = bpy.data.collections.new(container_name) - parent.children.link(model_container) - for obj in bpy.context.selected_objects: - model_container.objects.link(obj) - view_layer_collection.objects.unlink(obj) + empties = [obj for obj in imported if obj.type == 'EMPTY'] + container = None + + for empty in empties: + if not empty.parent: + container = empty + break + + assert container, "No asset group found" + + # Children must be linked before parents, + # otherwise the hierarchy will break + objects = [] + nodes = list(container.children) + + for obj in nodes: + obj.parent = asset_group + + bpy.data.objects.remove(container) + + for obj in nodes: + objects.append(obj) + nodes.extend(list(obj.children)) + + objects.reverse() + + for obj in objects: + parent.objects.link(obj) + collection.objects.unlink(obj) + + for obj in objects: name = obj.name - obj.name = f"{name}:{container_name}" + obj.name = f"{group_name}:{name}" + if obj.type != 'EMPTY': + name_data = obj.data.name + obj.data.name = f"{group_name}:{name_data}" - # Groups are imported as Empty objects in Blender - if obj.type == 'MESH': - data_name = obj.data.name - obj.data.name = f"{data_name}:{container_name}" + for material_slot in obj.material_slots: + name_mat = material_slot.material.name + material_slot.material.name = f"{group_name}:{name_mat}" - if not obj.get(blender.pipeline.AVALON_PROPERTY): - obj[blender.pipeline.AVALON_PROPERTY] = dict() + if not obj.get(AVALON_PROPERTY): + obj[AVALON_PROPERTY] = dict() - avalon_info = obj[blender.pipeline.AVALON_PROPERTY] - avalon_info.update({"container_name": container_name}) + avalon_info = obj[AVALON_PROPERTY] + avalon_info.update({"container_name": group_name}) bpy.ops.object.select_all(action='DESELECT') - return model_container + return objects def process_asset( self, context: dict, name: str, namespace: Optional[str] = None, @@ -91,43 +131,37 @@ class CacheModelLoader(plugin.AssetLoader): asset = context["asset"]["name"] subset = context["subset"]["name"] - lib_container = plugin.asset_name( - asset, subset - ) - unique_number = plugin.get_unique_number( - asset, subset - ) + asset_name = plugin.asset_name(asset, subset) + unique_number = plugin.get_unique_number(asset, subset) + group_name = plugin.asset_name(asset, subset, unique_number) namespace = namespace or f"{asset}_{unique_number}" - container_name = plugin.asset_name( - asset, subset, unique_number - ) - container = bpy.data.collections.new(lib_container) - container.name = container_name - blender.pipeline.containerise_existing( - container, - name, - namespace, - context, - self.__class__.__name__, - ) + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + if not avalon_container: + avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) + bpy.context.scene.collection.children.link(avalon_container) - container_metadata = container.get( - blender.pipeline.AVALON_PROPERTY) + asset_group = bpy.data.objects.new(group_name, object_data=None) + avalon_container.objects.link(asset_group) - container_metadata["libpath"] = libpath - container_metadata["lib_container"] = lib_container + objects = self._process(libpath, asset_group, group_name) - obj_container = self._process( - libpath, container_name, None) + bpy.context.scene.collection.objects.link(asset_group) - container_metadata["obj_container"] = obj_container + asset_group[AVALON_PROPERTY] = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": name, + "namespace": namespace or '', + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + "libpath": libpath, + "asset_name": asset_name, + "parent": str(context["representation"]["parent"]), + "family": context["representation"]["context"]["family"] + } - # Save the list of objects in the metadata container - container_metadata["objects"] = obj_container.all_objects - - nodes = list(container.objects) - nodes.append(container) + nodes = objects self[:] = nodes return nodes @@ -143,9 +177,8 @@ class CacheModelLoader(plugin.AssetLoader): Warning: No nested collections are supported at the moment! """ - collection = bpy.data.collections.get( - container["objectName"] - ) + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) libpath = Path(api.get_representation_path(representation)) extension = libpath.suffix.lower() @@ -155,12 +188,9 @@ class CacheModelLoader(plugin.AssetLoader): pformat(representation, indent=2), ) - assert collection, ( + assert asset_group, ( f"The asset is not loaded: {container['objectName']}" ) - assert not (collection.children), ( - "Nested collections are not supported." - ) assert libpath, ( "No existing library file found for {container['objectName']}" ) @@ -171,43 +201,32 @@ class CacheModelLoader(plugin.AssetLoader): f"Unsupported file: {libpath}" ) - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) - collection_libpath = collection_metadata["libpath"] + metadata = asset_group.get(AVALON_PROPERTY) + group_libpath = metadata["libpath"] - obj_container = plugin.get_local_collection_with_name( - collection_metadata["obj_container"].name - ) - objects = obj_container.all_objects - - container_name = obj_container.name - - normalized_collection_libpath = ( - str(Path(bpy.path.abspath(collection_libpath)).resolve()) + normalized_group_libpath = ( + str(Path(bpy.path.abspath(group_libpath)).resolve()) ) normalized_libpath = ( str(Path(bpy.path.abspath(str(libpath))).resolve()) ) self.log.debug( - "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_collection_libpath, + "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_group_libpath, normalized_libpath, ) - if normalized_collection_libpath == normalized_libpath: + if normalized_group_libpath == normalized_libpath: self.log.info("Library already loaded, not updating...") return - parent = plugin.get_parent_collection(obj_container) + mat = asset_group.matrix_basis.copy() + self._remove(asset_group) - self._remove(objects, obj_container) + self._process(str(libpath), asset_group, object_name) + asset_group.matrix_basis = mat - obj_container = self._process( - str(libpath), container_name, parent) - - collection_metadata["obj_container"] = obj_container - collection_metadata["objects"] = obj_container.all_objects - collection_metadata["libpath"] = str(libpath) - collection_metadata["representation"] = str(representation["_id"]) + metadata["libpath"] = str(libpath) + metadata["representation"] = str(representation["_id"]) def remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. @@ -222,25 +241,14 @@ class CacheModelLoader(plugin.AssetLoader): Warning: No nested collections are supported at the moment! """ - collection = bpy.data.collections.get( - container["objectName"] - ) - if not collection: + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + + if not asset_group: return False - assert not (collection.children), ( - "Nested collections are not supported." - ) - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) + self._remove(asset_group) - obj_container = plugin.get_local_collection_with_name( - collection_metadata["obj_container"].name - ) - objects = obj_container.all_objects - - self._remove(objects, obj_container) - - bpy.data.collections.remove(collection) + bpy.data.objects.remove(asset_group) return True diff --git a/openpype/hosts/blender/plugins/load/load_fbx_model.py b/openpype/hosts/blender/plugins/load/load_fbx_model.py new file mode 100644 index 0000000000..776217e5f1 --- /dev/null +++ b/openpype/hosts/blender/plugins/load/load_fbx_model.py @@ -0,0 +1,250 @@ +"""Load an asset in Blender from an Alembic file.""" + +from pathlib import Path +from pprint import pformat +from typing import Dict, List, Optional + +import bpy + +from avalon import api +from avalon.blender import lib +from avalon.blender.pipeline import AVALON_CONTAINERS +from avalon.blender.pipeline import AVALON_CONTAINER_ID +from avalon.blender.pipeline import AVALON_PROPERTY +from openpype.hosts.blender.api import plugin + + +class FbxModelLoader(plugin.AssetLoader): + """Load FBX models. + + Stores the imported asset in an empty named after the asset. + """ + + families = ["model"] + representations = ["fbx"] + + label = "Load FBX" + icon = "code-fork" + color = "orange" + + def _remove(self, asset_group): + objects = list(asset_group.children) + empties = [] + + for obj in objects: + if obj.type == 'MESH': + for material_slot in list(obj.material_slots): + if material_slot.material: + bpy.data.materials.remove(material_slot.material) + bpy.data.meshes.remove(obj.data) + elif obj.type == 'EMPTY': + objects.extend(obj.children) + empties.append(obj) + + for empty in empties: + bpy.data.objects.remove(empty) + + def _process(self, libpath, asset_group, group_name): + bpy.ops.object.select_all(action='DESELECT') + + collection = bpy.context.view_layer.active_layer_collection.collection + + context = plugin.create_blender_context() + bpy.ops.import_scene.fbx( + context, + filepath=libpath + ) + + parent = bpy.context.scene.collection + + imported = lib.get_selection() + + empties = [obj for obj in imported if obj.type == 'EMPTY'] + + container = None + + for empty in empties: + if not empty.parent: + container = empty + break + + assert container, "No asset group found" + + # Children must be linked before parents, + # otherwise the hierarchy will break + objects = [] + nodes = list(container.children) + + for obj in nodes: + obj.parent = asset_group + + bpy.data.objects.remove(container) + + for obj in nodes: + objects.append(obj) + nodes.extend(list(obj.children)) + + objects.reverse() + + for obj in objects: + parent.objects.link(obj) + collection.objects.unlink(obj) + + for obj in objects: + name = obj.name + obj.name = f"{group_name}:{name}" + if obj.type != 'EMPTY': + name_data = obj.data.name + obj.data.name = f"{group_name}:{name_data}" + + for material_slot in obj.material_slots: + name_mat = material_slot.material.name + material_slot.material.name = f"{group_name}:{name_mat}" + + if not obj.get(AVALON_PROPERTY): + obj[AVALON_PROPERTY] = dict() + + avalon_info = obj[AVALON_PROPERTY] + avalon_info.update({"container_name": group_name}) + + bpy.ops.object.select_all(action='DESELECT') + + return objects + + def process_asset( + self, context: dict, name: str, namespace: Optional[str] = None, + options: Optional[Dict] = None + ) -> Optional[List]: + """ + Arguments: + name: Use pre-defined name + namespace: Use pre-defined namespace + context: Full parenthood of representation to load + options: Additional settings dictionary + """ + + libpath = self.fname + asset = context["asset"]["name"] + subset = context["subset"]["name"] + + asset_name = plugin.asset_name(asset, subset) + unique_number = plugin.get_unique_number(asset, subset) + group_name = plugin.asset_name(asset, subset, unique_number) + namespace = namespace or f"{asset}_{unique_number}" + + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + if not avalon_container: + avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) + bpy.context.scene.collection.children.link(avalon_container) + + asset_group = bpy.data.objects.new(group_name, object_data=None) + avalon_container.objects.link(asset_group) + + objects = self._process(libpath, asset_group, group_name) + + bpy.context.scene.collection.objects.link(asset_group) + + asset_group[AVALON_PROPERTY] = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": name, + "namespace": namespace or '', + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + "libpath": libpath, + "asset_name": asset_name, + "parent": str(context["representation"]["parent"]), + "family": context["representation"]["context"]["family"] + } + + nodes = objects + self[:] = nodes + return nodes + + def update(self, container: Dict, representation: Dict): + """Update the loaded asset. + + This will remove all objects of the current collection, load the new + ones and add them to the collection. + If the objects of the collection are used in another collection they + will not be removed, only unlinked. Normally this should not be the + case though. + + Warning: + No nested collections are supported at the moment! + """ + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + libpath = Path(api.get_representation_path(representation)) + extension = libpath.suffix.lower() + + self.log.info( + "Container: %s\nRepresentation: %s", + pformat(container, indent=2), + pformat(representation, indent=2), + ) + + assert asset_group, ( + f"The asset is not loaded: {container['objectName']}" + ) + assert libpath, ( + "No existing library file found for {container['objectName']}" + ) + assert libpath.is_file(), ( + f"The file doesn't exist: {libpath}" + ) + assert extension in plugin.VALID_EXTENSIONS, ( + f"Unsupported file: {libpath}" + ) + + metadata = asset_group.get(AVALON_PROPERTY) + group_libpath = metadata["libpath"] + + normalized_group_libpath = ( + str(Path(bpy.path.abspath(group_libpath)).resolve()) + ) + normalized_libpath = ( + str(Path(bpy.path.abspath(str(libpath))).resolve()) + ) + self.log.debug( + "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_group_libpath, + normalized_libpath, + ) + if normalized_group_libpath == normalized_libpath: + self.log.info("Library already loaded, not updating...") + return + + mat = asset_group.matrix_basis.copy() + self._remove(asset_group) + + self._process(str(libpath), asset_group, object_name) + asset_group.matrix_basis = mat + + metadata["libpath"] = str(libpath) + metadata["representation"] = str(representation["_id"]) + + def remove(self, container: Dict) -> bool: + """Remove an existing container from a Blender scene. + + Arguments: + container (openpype:container-1.0): Container to remove, + from `host.ls()`. + + Returns: + bool: Whether the container was deleted. + + Warning: + No nested collections are supported at the moment! + """ + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + + if not asset_group: + return False + + self._remove(asset_group) + + bpy.data.objects.remove(asset_group) + + return True diff --git a/openpype/hosts/blender/plugins/load/load_model.py b/openpype/hosts/blender/plugins/load/load_model.py index 7e757f5433..dd1b76034b 100644 --- a/openpype/hosts/blender/plugins/load/load_model.py +++ b/openpype/hosts/blender/plugins/load/load_model.py @@ -200,9 +200,11 @@ class BlendModelLoader(plugin.AssetLoader): self.log.info("Library already loaded, not updating...") return + mat = asset_group.matrix_basis.copy() self._remove(asset_group) self._process(str(libpath), asset_group, object_name) + asset_group.matrix_basis = mat metadata["libpath"] = str(libpath) metadata["representation"] = str(representation["_id"]) diff --git a/openpype/hosts/blender/plugins/publish/extract_abc.py b/openpype/hosts/blender/plugins/publish/extract_abc.py index 97123c5967..4696da3db4 100644 --- a/openpype/hosts/blender/plugins/publish/extract_abc.py +++ b/openpype/hosts/blender/plugins/publish/extract_abc.py @@ -47,7 +47,7 @@ class ExtractABC(api.Extractor): context, filepath=filepath, selected=True, - flatten=True + flatten=False ) bpy.ops.object.select_all(action='DESELECT') From 4717b4dc0bca85735bcdfea2dee9054d16652297 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 4 Jun 2021 11:18:14 +0100 Subject: [PATCH 005/308] Remove linked library when it is not used anymore --- .../hosts/blender/plugins/load/load_model.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/openpype/hosts/blender/plugins/load/load_model.py b/openpype/hosts/blender/plugins/load/load_model.py index dd1b76034b..a6ace64254 100644 --- a/openpype/hosts/blender/plugins/load/load_model.py +++ b/openpype/hosts/blender/plugins/load/load_model.py @@ -200,10 +200,23 @@ class BlendModelLoader(plugin.AssetLoader): self.log.info("Library already loaded, not updating...") return + # Check how many assets use the same library + count = 0 + for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects: + if obj.get(AVALON_PROPERTY).get('libpath') == group_libpath: + count += 1 + mat = asset_group.matrix_basis.copy() + self._remove(asset_group) + # If it is the last object to use that library, remove it + if count == 1: + library = bpy.data.libraries.get(bpy.path.basename(group_libpath)) + bpy.data.libraries.remove(library) + self._process(str(libpath), asset_group, object_name) + asset_group.matrix_basis = mat metadata["libpath"] = str(libpath) @@ -221,6 +234,13 @@ class BlendModelLoader(plugin.AssetLoader): """ object_name = container["objectName"] asset_group = bpy.data.objects.get(object_name) + libpath = asset_group.get(AVALON_PROPERTY).get('libpath') + + # Check how many assets use the same library + count = 0 + for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects: + if obj.get(AVALON_PROPERTY).get('libpath') == libpath: + count += 1 if not asset_group: return False @@ -229,4 +249,9 @@ class BlendModelLoader(plugin.AssetLoader): bpy.data.objects.remove(asset_group) + # If it is the last object to use that library, remove it + if count == 1: + library = bpy.data.libraries.get(bpy.path.basename(libpath)) + bpy.data.libraries.remove(library) + return True From 96b3e063ba766ca79bd1c2b5b7b80964cde586bf Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Fri, 4 Jun 2021 11:26:37 +0100 Subject: [PATCH 006/308] Code refinement --- openpype/hosts/blender/plugins/load/load_abc.py | 5 ++--- openpype/hosts/blender/plugins/load/load_fbx_model.py | 5 ++--- openpype/hosts/blender/plugins/load/load_model.py | 11 +++-------- 3 files changed, 7 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/blender/plugins/load/load_abc.py b/openpype/hosts/blender/plugins/load/load_abc.py index 7261e002f1..522a7fd63a 100644 --- a/openpype/hosts/blender/plugins/load/load_abc.py +++ b/openpype/hosts/blender/plugins/load/load_abc.py @@ -161,9 +161,8 @@ class CacheModelLoader(plugin.AssetLoader): "family": context["representation"]["context"]["family"] } - nodes = objects - self[:] = nodes - return nodes + self[:] = objects + return objects def update(self, container: Dict, representation: Dict): """Update the loaded asset. diff --git a/openpype/hosts/blender/plugins/load/load_fbx_model.py b/openpype/hosts/blender/plugins/load/load_fbx_model.py index 776217e5f1..0e49eada3f 100644 --- a/openpype/hosts/blender/plugins/load/load_fbx_model.py +++ b/openpype/hosts/blender/plugins/load/load_fbx_model.py @@ -157,9 +157,8 @@ class FbxModelLoader(plugin.AssetLoader): "family": context["representation"]["context"]["family"] } - nodes = objects - self[:] = nodes - return nodes + self[:] = objects + return objects def update(self, container: Dict, representation: Dict): """Update the loaded asset. diff --git a/openpype/hosts/blender/plugins/load/load_model.py b/openpype/hosts/blender/plugins/load/load_model.py index a6ace64254..ff2d526d10 100644 --- a/openpype/hosts/blender/plugins/load/load_model.py +++ b/openpype/hosts/blender/plugins/load/load_model.py @@ -29,7 +29,6 @@ class BlendModelLoader(plugin.AssetLoader): def _remove(self, asset_group): objects = list(asset_group.children) - empties = [] for obj in objects: if obj.type == 'MESH': @@ -38,10 +37,7 @@ class BlendModelLoader(plugin.AssetLoader): bpy.data.meshes.remove(obj.data) elif obj.type == 'EMPTY': objects.extend(obj.children) - empties.append(obj) - - for empty in empties: - bpy.data.objects.remove(empty) + bpy.data.objects.remove(obj) def _process(self, libpath, asset_group, group_name): relative = bpy.context.preferences.filepaths.use_relative_paths @@ -145,9 +141,8 @@ class BlendModelLoader(plugin.AssetLoader): "family": context["representation"]["context"]["family"] } - nodes = objects - self[:] = nodes - return nodes + self[:] = objects + return objects def update(self, container: Dict, representation: Dict): """Update the loaded asset. From 015f001d73993886a61ea80b7e00a5d7f14d4f43 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 9 Jun 2021 10:44:44 +0100 Subject: [PATCH 007/308] Improved rig assets handling --- .../blender/plugins/create/create_rig.py | 41 ++- .../hosts/blender/plugins/load/load_rig.py | 329 +++++++++--------- .../blender/plugins/publish/extract_fbx.py | 15 +- 3 files changed, 187 insertions(+), 198 deletions(-) diff --git a/openpype/hosts/blender/plugins/create/create_rig.py b/openpype/hosts/blender/plugins/create/create_rig.py index 116fb9f742..45c0f836d1 100644 --- a/openpype/hosts/blender/plugins/create/create_rig.py +++ b/openpype/hosts/blender/plugins/create/create_rig.py @@ -4,10 +4,11 @@ import bpy from avalon import api from avalon.blender import lib -import openpype.hosts.blender.api.plugin +from avalon.blender.pipeline import AVALON_INSTANCES +from openpype.hosts.blender.api import plugin -class CreateRig(openpype.hosts.blender.api.plugin.Creator): +class CreateRig(plugin.Creator): """Artist-friendly rig with controls to direct motion""" name = "rigMain" @@ -16,26 +17,30 @@ class CreateRig(openpype.hosts.blender.api.plugin.Creator): icon = "wheelchair" def process(self): + # Get Instance Containter or create it if it does not exist + instances = bpy.data.collections.get(AVALON_INSTANCES) + if not instances: + instances = bpy.data.collections.new(name=AVALON_INSTANCES) + bpy.context.scene.collection.children.link(instances) + # Create instance object asset = self.data["asset"] subset = self.data["subset"] - name = openpype.hosts.blender.api.plugin.asset_name(asset, subset) - collection = bpy.data.collections.new(name=name) - bpy.context.scene.collection.children.link(collection) + name = plugin.asset_name(asset, subset) + asset_group = bpy.data.objects.new(name=name, object_data=None) + instances.objects.link(asset_group) self.data['task'] = api.Session.get('AVALON_TASK') - lib.imprint(collection, self.data) - - # Add the rig object and all the children meshes to - # a set and link them all at the end to avoid duplicates. - # Blender crashes if trying to link an object that is already linked. - # This links automatically the children meshes if they were not - # selected, and doesn't link them twice if they, insted, - # were manually selected by the user. + lib.imprint(asset_group, self.data) + # Add selected objects to instance if (self.options or {}).get("useSelection"): - for obj in lib.get_selection(): - for child in obj.users_collection[0].children: - collection.children.link(child) - collection.objects.link(obj) + bpy.context.view_layer.objects.active = asset_group + selected = lib.get_selection() + for obj in selected: + obj.select_set(True) + selected.append(asset_group) + context = plugin.create_blender_context( + active=asset_group, selected=selected) + bpy.ops.object.parent_set(context, keep_transform=True) - return collection + return asset_group diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py index b6be8f4cf6..6fa7460d76 100644 --- a/openpype/hosts/blender/plugins/load/load_rig.py +++ b/openpype/hosts/blender/plugins/load/load_rig.py @@ -1,21 +1,20 @@ """Load a rig asset in Blender.""" -import logging from pathlib import Path from pprint import pformat from typing import Dict, List, Optional -from avalon import api, blender import bpy -import openpype.hosts.blender.api.plugin as plugin + +from avalon import api +from avalon.blender.pipeline import AVALON_CONTAINERS +from avalon.blender.pipeline import AVALON_CONTAINER_ID +from avalon.blender.pipeline import AVALON_PROPERTY +from openpype.hosts.blender.api import plugin class BlendRigLoader(plugin.AssetLoader): - """Load rigs from a .blend file. - - Because they come from a .blend file we can simply link the collection that - contains the model. There is no further need to 'containerise' it. - """ + """Load rigs from a .blend file.""" families = ["rig"] representations = ["blend"] @@ -24,105 +23,110 @@ class BlendRigLoader(plugin.AssetLoader): icon = "code-fork" color = "orange" - def _remove(self, objects, obj_container): - for obj in list(objects): - if obj.type == 'ARMATURE': - bpy.data.armatures.remove(obj.data) - elif obj.type == 'MESH': + def _remove(self, asset_group): + objects = list(asset_group.children) + + for obj in objects: + if obj.type == 'MESH': + for material_slot in list(obj.material_slots): + bpy.data.materials.remove(material_slot.material) bpy.data.meshes.remove(obj.data) + elif obj.type == 'ARMATURE': + objects.extend(obj.children) + bpy.data.armatures.remove(obj.data) elif obj.type == 'CURVE': bpy.data.curves.remove(obj.data) + elif obj.type == 'EMPTY': + objects.extend(obj.children) + bpy.data.objects.remove(obj) - for child in obj_container.children: - bpy.data.collections.remove(child) - - bpy.data.collections.remove(obj_container) - - def make_local_and_metadata(self, obj, collection_name): - local_obj = plugin.prepare_data(obj, collection_name) - plugin.prepare_data(local_obj.data, collection_name) - - if not local_obj.get(blender.pipeline.AVALON_PROPERTY): - local_obj[blender.pipeline.AVALON_PROPERTY] = dict() - - avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY] - avalon_info.update({"container_name": collection_name + '_CON'}) - - return local_obj - - def _process( - self, libpath, lib_container, collection_name, - action, parent_collection - ): + def _process(self, libpath, asset_group, group_name, action): relative = bpy.context.preferences.filepaths.use_relative_paths with bpy.data.libraries.load( libpath, link=True, relative=relative - ) as (_, data_to): - data_to.collections = [lib_container] + ) as (data_from, data_to): + data_to.objects = data_from.objects - parent = parent_collection + parent = bpy.context.scene.collection - if parent is None: - parent = bpy.context.scene.collection + empties = [obj for obj in data_to.objects if obj.type == 'EMPTY'] - parent.children.link(bpy.data.collections[lib_container]) + container = None - rig_container = parent.children[lib_container].make_local() - rig_container.name = collection_name + for empty in empties: + if empty.get(AVALON_PROPERTY): + container = empty + break + assert container, "No asset group found" + + # Children must be linked before parents, + # otherwise the hierarchy will break objects = [] - armatures = [ - obj for obj in rig_container.objects - if obj.type == 'ARMATURE' - ] + nodes = list(container.children) - for child in rig_container.children: - local_child = plugin.prepare_data(child, collection_name) - objects.extend(local_child.objects) + for obj in nodes: + obj.parent = asset_group - # for obj in bpy.data.objects: - # obj.select_set(False) + for obj in nodes: + objects.append(obj) + nodes.extend(list(obj.children)) + + objects.reverse() constraints = [] + armatures = [obj for obj in objects if obj.type == 'ARMATURE'] + for armature in armatures: for bone in armature.pose.bones: for constraint in bone.constraints: if hasattr(constraint, 'target'): constraints.append(constraint) - # Link armatures after other objects. - # The armature is unparented for all the non-local meshes, - # when it is made local. for obj in objects: - local_obj = self.make_local_and_metadata(obj, collection_name) + parent.objects.link(obj) - if obj != local_obj: - for constraint in constraints: - if constraint.target == obj: - constraint.target = local_obj + for obj in objects: + local_obj = plugin.prepare_data(obj, group_name) - for armature in armatures: - local_obj = self.make_local_and_metadata(armature, collection_name) + if obj.type == 'MESH': + plugin.prepare_data(local_obj.data, group_name) - if action is not None: - local_obj.animation_data.action = action - elif local_obj.animation_data.action is not None: - plugin.prepare_data( - local_obj.animation_data.action, collection_name) + if obj != local_obj: + for constraint in constraints: + if constraint.target == obj: + constraint.target = local_obj - # Set link the drivers to the local object - if local_obj.data.animation_data: - for d in local_obj.data.animation_data.drivers: - for v in d.driver.variables: - for t in v.targets: - t.id = local_obj + for material_slot in local_obj.material_slots: + plugin.prepare_data(material_slot.material, group_name) + elif obj.type == 'ARMATURE': + plugin.prepare_data(local_obj.data, group_name) - rig_container.pop(blender.pipeline.AVALON_PROPERTY) + if action is not None: + local_obj.animation_data.action = action + elif local_obj.animation_data.action is not None: + plugin.prepare_data( + local_obj.animation_data.action, group_name) + + # Set link the drivers to the local object + if local_obj.data.animation_data: + for d in local_obj.data.animation_data.drivers: + for v in d.driver.variables: + for t in v.targets: + t.id = local_obj + + if not obj.get(AVALON_PROPERTY): + local_obj[AVALON_PROPERTY] = dict() + + avalon_info = local_obj[AVALON_PROPERTY] + avalon_info.update({"container_name": group_name}) + + objects.reverse() bpy.ops.object.select_all(action='DESELECT') - return rig_container + return objects def process_asset( self, context: dict, name: str, namespace: Optional[str] = None, @@ -138,61 +142,48 @@ class BlendRigLoader(plugin.AssetLoader): libpath = self.fname asset = context["asset"]["name"] subset = context["subset"]["name"] - lib_container = plugin.asset_name( - asset, subset - ) - unique_number = plugin.get_unique_number( - asset, subset - ) + + asset_name = plugin.asset_name(asset, subset) + unique_number = plugin.get_unique_number(asset, subset) + group_name = plugin.asset_name(asset, subset, unique_number) namespace = namespace or f"{asset}_{unique_number}" - collection_name = plugin.asset_name( - asset, subset, unique_number - ) - container = bpy.data.collections.new(collection_name) - blender.pipeline.containerise_existing( - container, - name, - namespace, - context, - self.__class__.__name__, - ) + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + if not avalon_container: + avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) + bpy.context.scene.collection.children.link(avalon_container) - metadata = container.get(blender.pipeline.AVALON_PROPERTY) + asset_group = bpy.data.objects.new(group_name, object_data=None) + avalon_container.objects.link(asset_group) - metadata["libpath"] = libpath - metadata["lib_container"] = lib_container + objects = self._process(libpath, asset_group, group_name, None) - obj_container = self._process( - libpath, lib_container, collection_name, None, None) + bpy.context.scene.collection.objects.link(asset_group) - metadata["obj_container"] = obj_container - # Save the list of objects in the metadata container - metadata["objects"] = obj_container.all_objects + asset_group[AVALON_PROPERTY] = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": name, + "namespace": namespace or '', + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + "libpath": libpath, + "asset_name": asset_name, + "parent": str(context["representation"]["parent"]), + "family": context["representation"]["context"]["family"] + } - metadata["parent"] = str(context["representation"]["parent"]) - metadata["family"] = context["representation"]["context"]["family"] - - nodes = list(container.objects) - nodes.append(container) - self[:] = nodes - return nodes + self[:] = objects + return objects def update(self, container: Dict, representation: Dict): """Update the loaded asset. - This will remove all objects of the current collection, load the new - ones and add them to the collection. - If the objects of the collection are used in another collection they - will not be removed, only unlinked. Normally this should not be the - case though. - - Warning: - No nested collections are supported at the moment! + This will remove all children of the asset group, load the new ones + and add them as children of the group. """ - collection = bpy.data.collections.get( - container["objectName"] - ) + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) libpath = Path(api.get_representation_path(representation)) extension = libpath.suffix.lower() @@ -202,12 +193,9 @@ class BlendRigLoader(plugin.AssetLoader): pformat(representation, indent=2), ) - assert collection, ( + assert asset_group, ( f"The asset is not loaded: {container['objectName']}" ) - assert not (collection.children), ( - "Nested collections are not supported." - ) assert libpath, ( "No existing library file found for {container['objectName']}" ) @@ -218,89 +206,84 @@ class BlendRigLoader(plugin.AssetLoader): f"Unsupported file: {libpath}" ) - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) - collection_libpath = collection_metadata["libpath"] - lib_container = collection_metadata["lib_container"] + metadata = asset_group.get(AVALON_PROPERTY) + group_libpath = metadata["libpath"] - obj_container = plugin.get_local_collection_with_name( - collection_metadata["obj_container"].name - ) - objects = obj_container.all_objects - - container_name = obj_container.name - - normalized_collection_libpath = ( - str(Path(bpy.path.abspath(collection_libpath)).resolve()) + normalized_group_libpath = ( + str(Path(bpy.path.abspath(group_libpath)).resolve()) ) normalized_libpath = ( str(Path(bpy.path.abspath(str(libpath))).resolve()) ) self.log.debug( - "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_collection_libpath, + "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_group_libpath, normalized_libpath, ) - if normalized_collection_libpath == normalized_libpath: + if normalized_group_libpath == normalized_libpath: self.log.info("Library already loaded, not updating...") return - # Get the armature of the rig - armatures = [obj for obj in objects if obj.type == 'ARMATURE'] - assert(len(armatures) == 1) + # Check how many assets use the same library + count = 0 + for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects: + if obj.get(AVALON_PROPERTY).get('libpath') == group_libpath: + count += 1 + + # # Get the armature of the rig + objects = asset_group.children + armature = [obj for obj in objects if obj.type == 'ARMATURE'][0] action = None - if armatures[0].animation_data and armatures[0].animation_data.action: - action = armatures[0].animation_data.action + if armature.animation_data and armature.animation_data.action: + action = armature.animation_data.action - parent = plugin.get_parent_collection(obj_container) + mat = asset_group.matrix_basis.copy() - self._remove(objects, obj_container) + self._remove(asset_group) - obj_container = self._process( - str(libpath), lib_container, container_name, action, parent) + # If it is the last object to use that library, remove it + if count == 1: + library = bpy.data.libraries.get(bpy.path.basename(group_libpath)) + bpy.data.libraries.remove(library) - # Save the list of objects in the metadata container - collection_metadata["obj_container"] = obj_container - collection_metadata["objects"] = obj_container.all_objects - collection_metadata["libpath"] = str(libpath) - collection_metadata["representation"] = str(representation["_id"]) + self._process(str(libpath), asset_group, object_name, action) - bpy.ops.object.select_all(action='DESELECT') + asset_group.matrix_basis = mat + + metadata["libpath"] = str(libpath) + metadata["representation"] = str(representation["_id"]) def remove(self, container: Dict) -> bool: - """Remove an existing container from a Blender scene. + """Remove an existing asset group from a Blender scene. Arguments: container (openpype:container-1.0): Container to remove, from `host.ls()`. Returns: - bool: Whether the container was deleted. - - Warning: - No nested collections are supported at the moment! + bool: Whether the asset group was deleted. """ + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + libpath = asset_group.get(AVALON_PROPERTY).get('libpath') - collection = bpy.data.collections.get( - container["objectName"] - ) - if not collection: + # Check how many assets use the same library + count = 0 + for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects: + if obj.get(AVALON_PROPERTY).get('libpath') == libpath: + count += 1 + + if not asset_group: return False - assert not (collection.children), ( - "Nested collections are not supported." - ) - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) + self._remove(asset_group) - obj_container = plugin.get_local_collection_with_name( - collection_metadata["obj_container"].name - ) - objects = obj_container.all_objects + bpy.data.objects.remove(asset_group) - self._remove(objects, obj_container) - - bpy.data.collections.remove(collection) + # If it is the last object to use that library, remove it + if count == 1: + library = bpy.data.libraries.get(bpy.path.basename(libpath)) + bpy.data.libraries.remove(library) return True diff --git a/openpype/hosts/blender/plugins/publish/extract_fbx.py b/openpype/hosts/blender/plugins/publish/extract_fbx.py index f95a0a3283..b91f2a75ef 100644 --- a/openpype/hosts/blender/plugins/publish/extract_fbx.py +++ b/openpype/hosts/blender/plugins/publish/extract_fbx.py @@ -21,8 +21,6 @@ class ExtractFBX(api.Extractor): filename = f"{instance.name}.fbx" filepath = os.path.join(stagingdir, filename) - scene = bpy.context.scene - # Perform extraction self.log.info("Performing extraction..") @@ -41,12 +39,16 @@ class ExtractFBX(api.Extractor): active=asset_group, selected=selected) new_materials = [] + new_materials_objs = [] + objects = list(asset_group.children) - for obj in collections[0].all_objects: - if obj.type == 'MESH': + for obj in objects: + objects.extend(obj.children) + if obj.type == 'MESH' and len(obj.data.materials) == 0: mat = bpy.data.materials.new(obj.name) obj.data.materials.append(mat) new_materials.append(mat) + new_materials_objs.append(obj) # We export the fbx bpy.ops.export_scene.fbx( @@ -63,9 +65,8 @@ class ExtractFBX(api.Extractor): for mat in new_materials: bpy.data.materials.remove(mat) - for obj in collections[0].all_objects: - if obj.type == 'MESH': - obj.data.materials.pop() + for obj in new_materials_objs: + obj.data.materials.pop() if "representations" not in instance.data: instance.data["representations"] = [] From 3987223870da99fc7ec7d4100e3e6302fe383524 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 9 Jun 2021 10:45:22 +0100 Subject: [PATCH 008/308] Added new validators for Blender --- .../publish/validate_no_colons_in _name.py | 39 ++++++++++++++++++ .../publish/validate_transform_zero.py | 40 +++++++++++++++++++ 2 files changed, 79 insertions(+) create mode 100644 openpype/hosts/blender/plugins/publish/validate_no_colons_in _name.py create mode 100644 openpype/hosts/blender/plugins/publish/validate_transform_zero.py diff --git a/openpype/hosts/blender/plugins/publish/validate_no_colons_in _name.py b/openpype/hosts/blender/plugins/publish/validate_no_colons_in _name.py new file mode 100644 index 0000000000..261ff864d5 --- /dev/null +++ b/openpype/hosts/blender/plugins/publish/validate_no_colons_in _name.py @@ -0,0 +1,39 @@ +from typing import List + +import pyblish.api +import openpype.hosts.blender.api.action + + +class ValidateNoColonsInName(pyblish.api.InstancePlugin): + """There cannot be colons in names + + Object or bone names cannot include colons. Other software do not + handle colons correctly. + + """ + + order = openpype.api.ValidateContentsOrder + hosts = ["blender"] + families = ["model", "rig"] + version = (0, 1, 0) + label = "No Colons in names" + actions = [openpype.hosts.blender.api.action.SelectInvalidAction] + + @classmethod + def get_invalid(cls, instance) -> List: + invalid = [] + for obj in [obj for obj in instance]: + if ':' in obj.name: + invalid.append(obj) + if obj.type == 'ARMATURE': + for bone in obj.data.bones: + if ':' in bone.name: + invalid.append(obj) + break + return invalid + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + f"Objects found with colon in name: {invalid}") diff --git a/openpype/hosts/blender/plugins/publish/validate_transform_zero.py b/openpype/hosts/blender/plugins/publish/validate_transform_zero.py new file mode 100644 index 0000000000..7456dbc423 --- /dev/null +++ b/openpype/hosts/blender/plugins/publish/validate_transform_zero.py @@ -0,0 +1,40 @@ +from typing import List + +import mathutils + +import pyblish.api +import openpype.hosts.blender.api.action + + +class ValidateTransformZero(pyblish.api.InstancePlugin): + """Transforms can't have any values + + To solve this issue, try freezing the transforms. So long + as the transforms, rotation and scale values are zero, + you're all good. + + """ + + order = openpype.api.ValidateContentsOrder + hosts = ["blender"] + families = ["model"] + category = "geometry" + version = (0, 1, 0) + label = "Transform Zero" + actions = [openpype.hosts.blender.api.action.SelectInvalidAction] + + _identity = mathutils.Matrix() + + @classmethod + def get_invalid(cls, instance) -> List: + invalid = [] + for obj in [obj for obj in instance]: + if obj.matrix_basis != cls._identity: + invalid.append(obj) + return invalid + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + f"Object found in instance is not in Object Mode: {invalid}") From 32c876db85b084b369dde0a633de7c372cf6d3c9 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 9 Jun 2021 11:17:26 +0100 Subject: [PATCH 009/308] Changed default display type for asset groups --- openpype/hosts/blender/plugins/create/create_model.py | 1 + openpype/hosts/blender/plugins/create/create_rig.py | 1 + openpype/hosts/blender/plugins/load/load_model.py | 1 + openpype/hosts/blender/plugins/load/load_rig.py | 1 + 4 files changed, 4 insertions(+) diff --git a/openpype/hosts/blender/plugins/create/create_model.py b/openpype/hosts/blender/plugins/create/create_model.py index e318cd07da..ecc6f4bf22 100644 --- a/openpype/hosts/blender/plugins/create/create_model.py +++ b/openpype/hosts/blender/plugins/create/create_model.py @@ -28,6 +28,7 @@ class CreateModel(plugin.Creator): subset = self.data["subset"] name = plugin.asset_name(asset, subset) asset_group = bpy.data.objects.new(name=name, object_data=None) + asset_group.empty_display_type = 'SINGLE_ARROW' instances.objects.link(asset_group) self.data['task'] = api.Session.get('AVALON_TASK') lib.imprint(asset_group, self.data) diff --git a/openpype/hosts/blender/plugins/create/create_rig.py b/openpype/hosts/blender/plugins/create/create_rig.py index 45c0f836d1..0f1c686816 100644 --- a/openpype/hosts/blender/plugins/create/create_rig.py +++ b/openpype/hosts/blender/plugins/create/create_rig.py @@ -28,6 +28,7 @@ class CreateRig(plugin.Creator): subset = self.data["subset"] name = plugin.asset_name(asset, subset) asset_group = bpy.data.objects.new(name=name, object_data=None) + asset_group.empty_display_type = 'SINGLE_ARROW' instances.objects.link(asset_group) self.data['task'] = api.Session.get('AVALON_TASK') lib.imprint(asset_group, self.data) diff --git a/openpype/hosts/blender/plugins/load/load_model.py b/openpype/hosts/blender/plugins/load/load_model.py index ff2d526d10..3f7967cd0d 100644 --- a/openpype/hosts/blender/plugins/load/load_model.py +++ b/openpype/hosts/blender/plugins/load/load_model.py @@ -122,6 +122,7 @@ class BlendModelLoader(plugin.AssetLoader): bpy.context.scene.collection.children.link(avalon_container) asset_group = bpy.data.objects.new(group_name, object_data=None) + asset_group.empty_display_type = 'SINGLE_ARROW' avalon_container.objects.link(asset_group) objects = self._process(libpath, asset_group, group_name) diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py index 6fa7460d76..fe58d0f0a7 100644 --- a/openpype/hosts/blender/plugins/load/load_rig.py +++ b/openpype/hosts/blender/plugins/load/load_rig.py @@ -154,6 +154,7 @@ class BlendRigLoader(plugin.AssetLoader): bpy.context.scene.collection.children.link(avalon_container) asset_group = bpy.data.objects.new(group_name, object_data=None) + asset_group.empty_display_type = 'SINGLE_ARROW' avalon_container.objects.link(asset_group) objects = self._process(libpath, asset_group, group_name, None) From c8c70cc0a79c51659ec4371df3ed5ed431df50a3 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 9 Jun 2021 11:21:43 +0100 Subject: [PATCH 010/308] Fixed problem when loading rig and missing material in material slot --- openpype/hosts/blender/plugins/load/load_rig.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py index fe58d0f0a7..306c4aa03b 100644 --- a/openpype/hosts/blender/plugins/load/load_rig.py +++ b/openpype/hosts/blender/plugins/load/load_rig.py @@ -99,7 +99,8 @@ class BlendRigLoader(plugin.AssetLoader): constraint.target = local_obj for material_slot in local_obj.material_slots: - plugin.prepare_data(material_slot.material, group_name) + if material_slot.material: + plugin.prepare_data(material_slot.material, group_name) elif obj.type == 'ARMATURE': plugin.prepare_data(local_obj.data, group_name) From 6ee64d09ab09e9b834144e16e6c7c5286b421eb8 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 21 Jun 2021 11:40:58 +0100 Subject: [PATCH 011/308] Loaders and creators now use Blender main thread to operate --- openpype/hosts/blender/api/plugin.py | 26 +++++++- .../blender/plugins/create/create_model.py | 11 ++-- .../blender/plugins/create/create_rig.py | 11 ++-- .../hosts/blender/plugins/load/load_abc.py | 6 +- .../load/{load_fbx_model.py => load_fbx.py} | 59 +++++++++++++------ .../hosts/blender/plugins/load/load_model.py | 4 +- .../hosts/blender/plugins/load/load_rig.py | 9 +-- 7 files changed, 87 insertions(+), 39 deletions(-) rename openpype/hosts/blender/plugins/load/{load_fbx_model.py => load_fbx.py} (82%) diff --git a/openpype/hosts/blender/api/plugin.py b/openpype/hosts/blender/api/plugin.py index f3cf2b88cd..b0df2d7109 100644 --- a/openpype/hosts/blender/api/plugin.py +++ b/openpype/hosts/blender/api/plugin.py @@ -6,10 +6,11 @@ from typing import Dict, List, Optional import bpy from avalon import api, blender +from avalon.blender import ops from avalon.blender.pipeline import AVALON_CONTAINERS from openpype.api import PypeCreatorMixin -VALID_EXTENSIONS = [".blend", ".json", ".abc"] +VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx"] def asset_name( @@ -161,6 +162,15 @@ class AssetLoader(api.Loader): raise NotImplementedError("Must be implemented by a sub-class") def load(self, + context: dict, + name: Optional[str] = None, + namespace: Optional[str] = None, + options: Optional[Dict] = None) -> Optional[bpy.types.Collection]: + """ Run the loader on Blender main thread""" + mti = ops.MainThreadItem(self._load, context, name, namespace, options) + ops.execute_in_main_thread(mti) + + def _load(self, context: dict, name: Optional[str] = None, namespace: Optional[str] = None, @@ -216,10 +226,20 @@ class AssetLoader(api.Loader): return self._get_instance_collection(instance_name, nodes) + def exec_update(self, container: Dict, representation: Dict): + """Must be implemented by a sub-class""" + raise NotImplementedError("Must be implemented by a sub-class") + def update(self, container: Dict, representation: Dict): + """ Run the update on Blender main thread""" + mti = ops.MainThreadItem(self.exec_update, container, representation) + ops.execute_in_main_thread(mti) + + def exec_remove(self, container: Dict) -> bool: """Must be implemented by a sub-class""" raise NotImplementedError("Must be implemented by a sub-class") def remove(self, container: Dict) -> bool: - """Must be implemented by a sub-class""" - raise NotImplementedError("Must be implemented by a sub-class") + """ Run the remove on Blender main thread""" + mti = ops.MainThreadItem(self.exec_remove, container) + ops.execute_in_main_thread(mti) diff --git a/openpype/hosts/blender/plugins/create/create_model.py b/openpype/hosts/blender/plugins/create/create_model.py index ecc6f4bf22..e778f5b74f 100644 --- a/openpype/hosts/blender/plugins/create/create_model.py +++ b/openpype/hosts/blender/plugins/create/create_model.py @@ -3,7 +3,7 @@ import bpy from avalon import api -from avalon.blender import lib +from avalon.blender import lib, ops from avalon.blender.pipeline import AVALON_INSTANCES from openpype.hosts.blender.api import plugin @@ -17,6 +17,11 @@ class CreateModel(plugin.Creator): icon = "cube" def process(self): + """ Run the creator on Blender main thread""" + mti = ops.MainThreadItem(self._process) + ops.execute_in_main_thread(mti) + + def _process(self): # Get Instance Containter or create it if it does not exist instances = bpy.data.collections.get(AVALON_INSTANCES) if not instances: @@ -40,8 +45,6 @@ class CreateModel(plugin.Creator): for obj in selected: obj.select_set(True) selected.append(asset_group) - context = plugin.create_blender_context( - active=asset_group, selected=selected) - bpy.ops.object.parent_set(context, keep_transform=True) + bpy.ops.object.parent_set(keep_transform=True) return asset_group diff --git a/openpype/hosts/blender/plugins/create/create_rig.py b/openpype/hosts/blender/plugins/create/create_rig.py index 0f1c686816..2e1c71f570 100644 --- a/openpype/hosts/blender/plugins/create/create_rig.py +++ b/openpype/hosts/blender/plugins/create/create_rig.py @@ -3,7 +3,7 @@ import bpy from avalon import api -from avalon.blender import lib +from avalon.blender import lib, ops from avalon.blender.pipeline import AVALON_INSTANCES from openpype.hosts.blender.api import plugin @@ -17,6 +17,11 @@ class CreateRig(plugin.Creator): icon = "wheelchair" def process(self): + """ Run the creator on Blender main thread""" + mti = ops.MainThreadItem(self._process) + ops.execute_in_main_thread(mti) + + def _process(self): # Get Instance Containter or create it if it does not exist instances = bpy.data.collections.get(AVALON_INSTANCES) if not instances: @@ -40,8 +45,6 @@ class CreateRig(plugin.Creator): for obj in selected: obj.select_set(True) selected.append(asset_group) - context = plugin.create_blender_context( - active=asset_group, selected=selected) - bpy.ops.object.parent_set(context, keep_transform=True) + bpy.ops.object.parent_set(keep_transform=True) return asset_group diff --git a/openpype/hosts/blender/plugins/load/load_abc.py b/openpype/hosts/blender/plugins/load/load_abc.py index 522a7fd63a..a985ae684d 100644 --- a/openpype/hosts/blender/plugins/load/load_abc.py +++ b/openpype/hosts/blender/plugins/load/load_abc.py @@ -52,9 +52,7 @@ class CacheModelLoader(plugin.AssetLoader): collection = bpy.context.view_layer.active_layer_collection.collection relative = bpy.context.preferences.filepaths.use_relative_paths - context = plugin.create_blender_context() bpy.ops.wm.alembic_import( - context, filepath=libpath, relative_path=relative ) @@ -164,7 +162,7 @@ class CacheModelLoader(plugin.AssetLoader): self[:] = objects return objects - def update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, representation: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -227,7 +225,7 @@ class CacheModelLoader(plugin.AssetLoader): metadata["libpath"] = str(libpath) metadata["representation"] = str(representation["_id"]) - def remove(self, container: Dict) -> bool: + def exec_remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. Arguments: diff --git a/openpype/hosts/blender/plugins/load/load_fbx_model.py b/openpype/hosts/blender/plugins/load/load_fbx.py similarity index 82% rename from openpype/hosts/blender/plugins/load/load_fbx_model.py rename to openpype/hosts/blender/plugins/load/load_fbx.py index 0e49eada3f..f267bef586 100644 --- a/openpype/hosts/blender/plugins/load/load_fbx_model.py +++ b/openpype/hosts/blender/plugins/load/load_fbx.py @@ -7,7 +7,7 @@ from typing import Dict, List, Optional import bpy from avalon import api -from avalon.blender import lib +from avalon.blender import lib, ops from avalon.blender.pipeline import AVALON_CONTAINERS from avalon.blender.pipeline import AVALON_CONTAINER_ID from avalon.blender.pipeline import AVALON_PROPERTY @@ -20,7 +20,7 @@ class FbxModelLoader(plugin.AssetLoader): Stores the imported asset in an empty named after the asset. """ - families = ["model"] + families = ["model", "rig"] representations = ["fbx"] label = "Load FBX" @@ -29,7 +29,6 @@ class FbxModelLoader(plugin.AssetLoader): def _remove(self, asset_group): objects = list(asset_group.children) - empties = [] for obj in objects: if obj.type == 'MESH': @@ -37,23 +36,21 @@ class FbxModelLoader(plugin.AssetLoader): if material_slot.material: bpy.data.materials.remove(material_slot.material) bpy.data.meshes.remove(obj.data) + elif obj.type == 'ARMATURE': + objects.extend(obj.children) + bpy.data.armatures.remove(obj.data) + elif obj.type == 'CURVE': + bpy.data.curves.remove(obj.data) elif obj.type == 'EMPTY': objects.extend(obj.children) - empties.append(obj) + bpy.data.objects.remove(obj) - for empty in empties: - bpy.data.objects.remove(empty) - - def _process(self, libpath, asset_group, group_name): + def _process(self, libpath, asset_group, group_name, action): bpy.ops.object.select_all(action='DESELECT') collection = bpy.context.view_layer.active_layer_collection.collection - context = plugin.create_blender_context() - bpy.ops.import_scene.fbx( - context, - filepath=libpath - ) + bpy.ops.import_scene.fbx(filepath=libpath) parent = bpy.context.scene.collection @@ -97,9 +94,17 @@ class FbxModelLoader(plugin.AssetLoader): name_data = obj.data.name obj.data.name = f"{group_name}:{name_data}" + if obj.type == 'MESH': for material_slot in obj.material_slots: name_mat = material_slot.material.name material_slot.material.name = f"{group_name}:{name_mat}" + elif obj.type == 'ARMATURE': + anim_data = obj.animation_data + if action is not None: + anim_data.action = action + elif anim_data.action is not None: + name_action = anim_data.action.name + anim_data.action.name = f"{group_name}:{name_action}" if not obj.get(AVALON_PROPERTY): obj[AVALON_PROPERTY] = dict() @@ -122,7 +127,6 @@ class FbxModelLoader(plugin.AssetLoader): context: Full parenthood of representation to load options: Additional settings dictionary """ - libpath = self.fname asset = context["asset"]["name"] subset = context["subset"]["name"] @@ -140,7 +144,14 @@ class FbxModelLoader(plugin.AssetLoader): asset_group = bpy.data.objects.new(group_name, object_data=None) avalon_container.objects.link(asset_group) - objects = self._process(libpath, asset_group, group_name) + objects = self._process(libpath, asset_group, group_name, None) + + objects = [] + nodes = list(asset_group.children) + + for obj in nodes: + objects.append(obj) + nodes.extend(list(obj.children)) bpy.context.scene.collection.objects.link(asset_group) @@ -160,7 +171,7 @@ class FbxModelLoader(plugin.AssetLoader): self[:] = objects return objects - def update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, representation: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -214,16 +225,28 @@ class FbxModelLoader(plugin.AssetLoader): self.log.info("Library already loaded, not updating...") return + # Get the armature of the rig + objects = asset_group.children + armatures = [obj for obj in objects if obj.type == 'ARMATURE'] + action = None + + if armatures: + armature = armatures[0] + + if armature.animation_data and armature.animation_data.action: + action = armature.animation_data.action + mat = asset_group.matrix_basis.copy() self._remove(asset_group) - self._process(str(libpath), asset_group, object_name) + self._process(str(libpath), asset_group, object_name, action) + asset_group.matrix_basis = mat metadata["libpath"] = str(libpath) metadata["representation"] = str(representation["_id"]) - def remove(self, container: Dict) -> bool: + def exec_remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. Arguments: diff --git a/openpype/hosts/blender/plugins/load/load_model.py b/openpype/hosts/blender/plugins/load/load_model.py index 3f7967cd0d..37618b164d 100644 --- a/openpype/hosts/blender/plugins/load/load_model.py +++ b/openpype/hosts/blender/plugins/load/load_model.py @@ -145,7 +145,7 @@ class BlendModelLoader(plugin.AssetLoader): self[:] = objects return objects - def update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, representation: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -218,7 +218,7 @@ class BlendModelLoader(plugin.AssetLoader): metadata["libpath"] = str(libpath) metadata["representation"] = str(representation["_id"]) - def remove(self, container: Dict) -> bool: + def exec_remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. Arguments: diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py index 306c4aa03b..3909a1b4aa 100644 --- a/openpype/hosts/blender/plugins/load/load_rig.py +++ b/openpype/hosts/blender/plugins/load/load_rig.py @@ -29,7 +29,8 @@ class BlendRigLoader(plugin.AssetLoader): for obj in objects: if obj.type == 'MESH': for material_slot in list(obj.material_slots): - bpy.data.materials.remove(material_slot.material) + if material_slot.material: + bpy.data.materials.remove(material_slot.material) bpy.data.meshes.remove(obj.data) elif obj.type == 'ARMATURE': objects.extend(obj.children) @@ -178,7 +179,7 @@ class BlendRigLoader(plugin.AssetLoader): self[:] = objects return objects - def update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, representation: Dict): """Update the loaded asset. This will remove all children of the asset group, load the new ones @@ -232,7 +233,7 @@ class BlendRigLoader(plugin.AssetLoader): if obj.get(AVALON_PROPERTY).get('libpath') == group_libpath: count += 1 - # # Get the armature of the rig + # Get the armature of the rig objects = asset_group.children armature = [obj for obj in objects if obj.type == 'ARMATURE'][0] @@ -256,7 +257,7 @@ class BlendRigLoader(plugin.AssetLoader): metadata["libpath"] = str(libpath) metadata["representation"] = str(representation["_id"]) - def remove(self, container: Dict) -> bool: + def exec_remove(self, container: Dict) -> bool: """Remove an existing asset group from a Blender scene. Arguments: From 395b4fe3bd09953673d99dcebc9f9a69b74b7bf9 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 21 Jun 2021 11:47:02 +0100 Subject: [PATCH 012/308] Hound fixes --- openpype/hosts/blender/api/plugin.py | 16 ++++++++-------- openpype/hosts/blender/plugins/load/load_fbx.py | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/blender/api/plugin.py b/openpype/hosts/blender/api/plugin.py index b0df2d7109..08e688f484 100644 --- a/openpype/hosts/blender/api/plugin.py +++ b/openpype/hosts/blender/api/plugin.py @@ -162,19 +162,19 @@ class AssetLoader(api.Loader): raise NotImplementedError("Must be implemented by a sub-class") def load(self, - context: dict, - name: Optional[str] = None, - namespace: Optional[str] = None, - options: Optional[Dict] = None) -> Optional[bpy.types.Collection]: + context: dict, + name: Optional[str] = None, + namespace: Optional[str] = None, + options: Optional[Dict] = None) -> Optional[bpy.types.Collection]: """ Run the loader on Blender main thread""" mti = ops.MainThreadItem(self._load, context, name, namespace, options) ops.execute_in_main_thread(mti) def _load(self, - context: dict, - name: Optional[str] = None, - namespace: Optional[str] = None, - options: Optional[Dict] = None) -> Optional[bpy.types.Collection]: + context: dict, + name: Optional[str] = None, + namespace: Optional[str] = None, + options: Optional[Dict] = None) -> Optional[bpy.types.Collection]: """Load asset via database Arguments: diff --git a/openpype/hosts/blender/plugins/load/load_fbx.py b/openpype/hosts/blender/plugins/load/load_fbx.py index f267bef586..aa96584c58 100644 --- a/openpype/hosts/blender/plugins/load/load_fbx.py +++ b/openpype/hosts/blender/plugins/load/load_fbx.py @@ -7,7 +7,7 @@ from typing import Dict, List, Optional import bpy from avalon import api -from avalon.blender import lib, ops +from avalon.blender import lib from avalon.blender.pipeline import AVALON_CONTAINERS from avalon.blender.pipeline import AVALON_CONTAINER_ID from avalon.blender.pipeline import AVALON_PROPERTY From acf4c8bf1946a52f784b1faa256a0113a62f5724 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 29 Jun 2021 11:18:51 +0100 Subject: [PATCH 013/308] Fixed validator name --- ...lidate_no_colons_in _name.py => validate_no_colons_in_name.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/hosts/blender/plugins/publish/{validate_no_colons_in _name.py => validate_no_colons_in_name.py} (100%) diff --git a/openpype/hosts/blender/plugins/publish/validate_no_colons_in _name.py b/openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py similarity index 100% rename from openpype/hosts/blender/plugins/publish/validate_no_colons_in _name.py rename to openpype/hosts/blender/plugins/publish/validate_no_colons_in_name.py From 6ad2daeee9f7407bd56f587d9bc76cbd1302ea58 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 1 Jul 2021 15:02:01 +0100 Subject: [PATCH 014/308] Fixed a problem when loading the same object multiple times --- openpype/hosts/blender/plugins/load/load_model.py | 6 ++++-- openpype/hosts/blender/plugins/load/load_rig.py | 8 +++++--- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/blender/plugins/load/load_model.py b/openpype/hosts/blender/plugins/load/load_model.py index 37618b164d..3087edb573 100644 --- a/openpype/hosts/blender/plugins/load/load_model.py +++ b/openpype/hosts/blender/plugins/load/load_model.py @@ -78,13 +78,13 @@ class BlendModelLoader(plugin.AssetLoader): for obj in objects: local_obj = plugin.prepare_data(obj, group_name) - if obj.type != 'EMPTY': + if local_obj.type != 'EMPTY': plugin.prepare_data(local_obj.data, group_name) for material_slot in local_obj.material_slots: plugin.prepare_data(material_slot.material, group_name) - if not obj.get(AVALON_PROPERTY): + if not local_obj.get(AVALON_PROPERTY): local_obj[AVALON_PROPERTY] = dict() avalon_info = local_obj[AVALON_PROPERTY] @@ -92,6 +92,8 @@ class BlendModelLoader(plugin.AssetLoader): objects.reverse() + bpy.data.orphans_purge(do_local_ids = False) + bpy.ops.object.select_all(action='DESELECT') return objects diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py index 3909a1b4aa..e57b97972b 100644 --- a/openpype/hosts/blender/plugins/load/load_rig.py +++ b/openpype/hosts/blender/plugins/load/load_rig.py @@ -91,7 +91,7 @@ class BlendRigLoader(plugin.AssetLoader): for obj in objects: local_obj = plugin.prepare_data(obj, group_name) - if obj.type == 'MESH': + if local_obj.type == 'MESH': plugin.prepare_data(local_obj.data, group_name) if obj != local_obj: @@ -102,7 +102,7 @@ class BlendRigLoader(plugin.AssetLoader): for material_slot in local_obj.material_slots: if material_slot.material: plugin.prepare_data(material_slot.material, group_name) - elif obj.type == 'ARMATURE': + elif local_obj.type == 'ARMATURE': plugin.prepare_data(local_obj.data, group_name) if action is not None: @@ -118,7 +118,7 @@ class BlendRigLoader(plugin.AssetLoader): for t in v.targets: t.id = local_obj - if not obj.get(AVALON_PROPERTY): + if not local_obj.get(AVALON_PROPERTY): local_obj[AVALON_PROPERTY] = dict() avalon_info = local_obj[AVALON_PROPERTY] @@ -126,6 +126,8 @@ class BlendRigLoader(plugin.AssetLoader): objects.reverse() + bpy.data.orphans_purge(do_local_ids = False) + bpy.ops.object.select_all(action='DESELECT') return objects From b7ef4b34c632361e32458e73752db72dae2b5fa9 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 1 Jul 2021 15:06:59 +0100 Subject: [PATCH 015/308] Hound fixes --- openpype/hosts/blender/plugins/load/load_model.py | 2 +- openpype/hosts/blender/plugins/load/load_rig.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/blender/plugins/load/load_model.py b/openpype/hosts/blender/plugins/load/load_model.py index 3087edb573..405ecc52b5 100644 --- a/openpype/hosts/blender/plugins/load/load_model.py +++ b/openpype/hosts/blender/plugins/load/load_model.py @@ -92,7 +92,7 @@ class BlendModelLoader(plugin.AssetLoader): objects.reverse() - bpy.data.orphans_purge(do_local_ids = False) + bpy.data.orphans_purge(do_local_ids=False) bpy.ops.object.select_all(action='DESELECT') diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py index e57b97972b..03538605bf 100644 --- a/openpype/hosts/blender/plugins/load/load_rig.py +++ b/openpype/hosts/blender/plugins/load/load_rig.py @@ -126,7 +126,7 @@ class BlendRigLoader(plugin.AssetLoader): objects.reverse() - bpy.data.orphans_purge(do_local_ids = False) + bpy.data.orphans_purge(do_local_ids=False) bpy.ops.object.select_all(action='DESELECT') From 9ad9c5b52ee3d09e7c6fb6ee7c42280210dc9c7d Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 7 Jul 2021 10:55:04 +0100 Subject: [PATCH 016/308] Fixed problem with missing attribute in avalon properties --- openpype/hosts/blender/plugins/load/load_abc.py | 3 ++- openpype/hosts/blender/plugins/load/load_fbx.py | 3 ++- openpype/hosts/blender/plugins/load/load_model.py | 3 ++- openpype/hosts/blender/plugins/load/load_rig.py | 3 ++- 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/blender/plugins/load/load_abc.py b/openpype/hosts/blender/plugins/load/load_abc.py index a985ae684d..92656fac9e 100644 --- a/openpype/hosts/blender/plugins/load/load_abc.py +++ b/openpype/hosts/blender/plugins/load/load_abc.py @@ -156,7 +156,8 @@ class CacheModelLoader(plugin.AssetLoader): "libpath": libpath, "asset_name": asset_name, "parent": str(context["representation"]["parent"]), - "family": context["representation"]["context"]["family"] + "family": context["representation"]["context"]["family"], + "objectName": group_name } self[:] = objects diff --git a/openpype/hosts/blender/plugins/load/load_fbx.py b/openpype/hosts/blender/plugins/load/load_fbx.py index aa96584c58..b80dc69adc 100644 --- a/openpype/hosts/blender/plugins/load/load_fbx.py +++ b/openpype/hosts/blender/plugins/load/load_fbx.py @@ -165,7 +165,8 @@ class FbxModelLoader(plugin.AssetLoader): "libpath": libpath, "asset_name": asset_name, "parent": str(context["representation"]["parent"]), - "family": context["representation"]["context"]["family"] + "family": context["representation"]["context"]["family"], + "objectName": group_name } self[:] = objects diff --git a/openpype/hosts/blender/plugins/load/load_model.py b/openpype/hosts/blender/plugins/load/load_model.py index 405ecc52b5..5ddbdecf01 100644 --- a/openpype/hosts/blender/plugins/load/load_model.py +++ b/openpype/hosts/blender/plugins/load/load_model.py @@ -141,7 +141,8 @@ class BlendModelLoader(plugin.AssetLoader): "libpath": libpath, "asset_name": asset_name, "parent": str(context["representation"]["parent"]), - "family": context["representation"]["context"]["family"] + "family": context["representation"]["context"]["family"], + "objectName": group_name } self[:] = objects diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py index 03538605bf..236672391c 100644 --- a/openpype/hosts/blender/plugins/load/load_rig.py +++ b/openpype/hosts/blender/plugins/load/load_rig.py @@ -175,7 +175,8 @@ class BlendRigLoader(plugin.AssetLoader): "libpath": libpath, "asset_name": asset_name, "parent": str(context["representation"]["parent"]), - "family": context["representation"]["context"]["family"] + "family": context["representation"]["context"]["family"], + "objectName": group_name } self[:] = objects From 78e10b9d1a8bc1d4a48b2263a0fee0603c80868a Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 7 Jul 2021 10:56:28 +0100 Subject: [PATCH 017/308] Improved layout assets handling --- openpype/hosts/blender/api/plugin.py | 12 +- .../blender/plugins/create/create_layout.py | 38 +- .../hosts/blender/plugins/load/load_layout.py | 664 ------------------ .../blender/plugins/load/load_layout_blend.py | 337 +++++++++ .../blender/plugins/load/load_layout_json.py | 243 +++++++ .../hosts/blender/plugins/load/load_model.py | 39 +- .../hosts/blender/plugins/load/load_rig.py | 44 +- .../blender/plugins/publish/extract_layout.py | 80 +-- 8 files changed, 733 insertions(+), 724 deletions(-) delete mode 100644 openpype/hosts/blender/plugins/load/load_layout.py create mode 100644 openpype/hosts/blender/plugins/load/load_layout_blend.py create mode 100644 openpype/hosts/blender/plugins/load/load_layout_json.py diff --git a/openpype/hosts/blender/api/plugin.py b/openpype/hosts/blender/api/plugin.py index 08e688f484..a126f5702f 100644 --- a/openpype/hosts/blender/api/plugin.py +++ b/openpype/hosts/blender/api/plugin.py @@ -31,9 +31,9 @@ def get_unique_number( avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) if not avalon_container: return "01" - asset_groups = avalon_container.objects + asset_groups = avalon_container.all_objects - container_names = [c.name for c in asset_groups] + container_names = [c.name for c in asset_groups if c.type == 'EMPTY'] count = 1 name = f"{asset}_{count:0>2}_{subset}" while name in container_names: @@ -220,11 +220,11 @@ class AssetLoader(api.Loader): # loader=self.__class__.__name__, # ) - asset = context["asset"]["name"] - subset = context["subset"]["name"] - instance_name = asset_name(asset, subset, unique_number) + '_CON' + # asset = context["asset"]["name"] + # subset = context["subset"]["name"] + # instance_name = asset_name(asset, subset, unique_number) + '_CON' - return self._get_instance_collection(instance_name, nodes) + # return self._get_instance_collection(instance_name, nodes) def exec_update(self, container: Dict, representation: Dict): """Must be implemented by a sub-class""" diff --git a/openpype/hosts/blender/plugins/create/create_layout.py b/openpype/hosts/blender/plugins/create/create_layout.py index 5404cec587..831261f027 100644 --- a/openpype/hosts/blender/plugins/create/create_layout.py +++ b/openpype/hosts/blender/plugins/create/create_layout.py @@ -3,11 +3,12 @@ import bpy from avalon import api -from avalon.blender import lib -import openpype.hosts.blender.api.plugin +from avalon.blender import lib, ops +from avalon.blender.pipeline import AVALON_INSTANCES +from openpype.hosts.blender.api import plugin -class CreateLayout(openpype.hosts.blender.api.plugin.Creator): +class CreateLayout(plugin.Creator): """Layout output for character rigs""" name = "layoutMain" @@ -16,13 +17,34 @@ class CreateLayout(openpype.hosts.blender.api.plugin.Creator): icon = "cubes" def process(self): + """ Run the creator on Blender main thread""" + mti = ops.MainThreadItem(self._process) + ops.execute_in_main_thread(mti) + def _process(self): + # Get Instance Containter or create it if it does not exist + instances = bpy.data.collections.get(AVALON_INSTANCES) + if not instances: + instances = bpy.data.collections.new(name=AVALON_INSTANCES) + bpy.context.scene.collection.children.link(instances) + + # Create instance object asset = self.data["asset"] subset = self.data["subset"] - name = openpype.hosts.blender.api.plugin.asset_name(asset, subset) - collection = bpy.context.collection - collection.name = name + name = plugin.asset_name(asset, subset) + asset_group = bpy.data.objects.new(name=name, object_data=None) + asset_group.empty_display_type = 'SINGLE_ARROW' + instances.objects.link(asset_group) self.data['task'] = api.Session.get('AVALON_TASK') - lib.imprint(collection, self.data) + lib.imprint(asset_group, self.data) - return collection + # Add selected objects to instance + if (self.options or {}).get("useSelection"): + bpy.context.view_layer.objects.active = asset_group + selected = lib.get_selection() + for obj in selected: + obj.select_set(True) + selected.append(asset_group) + bpy.ops.object.parent_set(keep_transform=True) + + return asset_group diff --git a/openpype/hosts/blender/plugins/load/load_layout.py b/openpype/hosts/blender/plugins/load/load_layout.py deleted file mode 100644 index 2092be9139..0000000000 --- a/openpype/hosts/blender/plugins/load/load_layout.py +++ /dev/null @@ -1,664 +0,0 @@ -"""Load a layout in Blender.""" - -import json -from logging import log, warning -import math - -import logging -from pathlib import Path -from pprint import pformat -from typing import Dict, List, Optional - -from avalon import api, blender, pipeline -import bpy -import openpype.hosts.blender.api.plugin as plugin -from openpype.lib import get_creator_by_name - - -class BlendLayoutLoader(plugin.AssetLoader): - """Load layout from a .blend file.""" - - families = ["layout"] - representations = ["blend"] - - label = "Link Layout" - icon = "code-fork" - color = "orange" - - def _remove(self, objects, obj_container): - for obj in list(objects): - if obj.type == 'ARMATURE': - bpy.data.armatures.remove(obj.data) - elif obj.type == 'MESH': - bpy.data.meshes.remove(obj.data) - elif obj.type == 'CAMERA': - bpy.data.cameras.remove(obj.data) - elif obj.type == 'CURVE': - bpy.data.curves.remove(obj.data) - - for element_container in obj_container.children: - for child in element_container.children: - bpy.data.collections.remove(child) - bpy.data.collections.remove(element_container) - - bpy.data.collections.remove(obj_container) - - def _process(self, libpath, lib_container, container_name, actions): - relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.data.libraries.load( - libpath, link=True, relative=relative - ) as (_, data_to): - data_to.collections = [lib_container] - - scene = bpy.context.scene - - scene.collection.children.link(bpy.data.collections[lib_container]) - - layout_container = scene.collection.children[lib_container].make_local() - layout_container.name = container_name - - objects_local_types = ['MESH', 'CAMERA', 'CURVE'] - - objects = [] - armatures = [] - - containers = list(layout_container.children) - - for container in layout_container.children: - if container.name == blender.pipeline.AVALON_CONTAINERS: - containers.remove(container) - - for container in containers: - container.make_local() - objects.extend([ - obj for obj in container.objects - if obj.type in objects_local_types - ]) - armatures.extend([ - obj for obj in container.objects - if obj.type == 'ARMATURE' - ]) - containers.extend(list(container.children)) - - # Link meshes first, then armatures. - # The armature is unparented for all the non-local meshes, - # when it is made local. - for obj in objects + armatures: - local_obj = obj.make_local() - if obj.data: - obj.data.make_local() - - if not local_obj.get(blender.pipeline.AVALON_PROPERTY): - local_obj[blender.pipeline.AVALON_PROPERTY] = dict() - - avalon_info = local_obj[blender.pipeline.AVALON_PROPERTY] - avalon_info.update({"container_name": container_name}) - - action = actions.get(local_obj.name, None) - - if local_obj.type == 'ARMATURE' and action is not None: - local_obj.animation_data.action = action - - layout_container.pop(blender.pipeline.AVALON_PROPERTY) - - bpy.ops.object.select_all(action='DESELECT') - - return layout_container - - def process_asset( - self, context: dict, name: str, namespace: Optional[str] = None, - options: Optional[Dict] = None - ) -> Optional[List]: - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - - libpath = self.fname - asset = context["asset"]["name"] - subset = context["subset"]["name"] - lib_container = plugin.asset_name( - asset, subset - ) - unique_number = plugin.get_unique_number( - asset, subset - ) - namespace = namespace or f"{asset}_{unique_number}" - container_name = plugin.asset_name( - asset, subset, unique_number - ) - - container = bpy.data.collections.new(lib_container) - container.name = container_name - blender.pipeline.containerise_existing( - container, - name, - namespace, - context, - self.__class__.__name__, - ) - - container_metadata = container.get( - blender.pipeline.AVALON_PROPERTY) - - container_metadata["libpath"] = libpath - container_metadata["lib_container"] = lib_container - - obj_container = self._process( - libpath, lib_container, container_name, {}) - - container_metadata["obj_container"] = obj_container - - # Save the list of objects in the metadata container - container_metadata["objects"] = obj_container.all_objects - - # nodes = list(container.objects) - # nodes.append(container) - nodes = [container] - self[:] = nodes - return nodes - - def update(self, container: Dict, representation: Dict): - """Update the loaded asset. - - This will remove all objects of the current collection, load the new - ones and add them to the collection. - If the objects of the collection are used in another collection they - will not be removed, only unlinked. Normally this should not be the - case though. - - Warning: - No nested collections are supported at the moment! - """ - collection = bpy.data.collections.get( - container["objectName"] - ) - - libpath = Path(api.get_representation_path(representation)) - extension = libpath.suffix.lower() - - self.log.info( - "Container: %s\nRepresentation: %s", - pformat(container, indent=2), - pformat(representation, indent=2), - ) - - assert collection, ( - f"The asset is not loaded: {container['objectName']}" - ) - assert not (collection.children), ( - "Nested collections are not supported." - ) - assert libpath, ( - "No existing library file found for {container['objectName']}" - ) - assert libpath.is_file(), ( - f"The file doesn't exist: {libpath}" - ) - assert extension in plugin.VALID_EXTENSIONS, ( - f"Unsupported file: {libpath}" - ) - - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) - collection_libpath = collection_metadata["libpath"] - objects = collection_metadata["objects"] - lib_container = collection_metadata["lib_container"] - obj_container = collection_metadata["obj_container"] - - normalized_collection_libpath = ( - str(Path(bpy.path.abspath(collection_libpath)).resolve()) - ) - normalized_libpath = ( - str(Path(bpy.path.abspath(str(libpath))).resolve()) - ) - self.log.debug( - "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_collection_libpath, - normalized_libpath, - ) - if normalized_collection_libpath == normalized_libpath: - self.log.info("Library already loaded, not updating...") - return - - actions = {} - - for obj in objects: - if obj.type == 'ARMATURE': - if obj.animation_data and obj.animation_data.action: - actions[obj.name] = obj.animation_data.action - - self._remove(objects, obj_container) - - obj_container = self._process( - str(libpath), lib_container, collection.name, actions) - - # Save the list of objects in the metadata container - collection_metadata["obj_container"] = obj_container - collection_metadata["objects"] = obj_container.all_objects - collection_metadata["libpath"] = str(libpath) - collection_metadata["representation"] = str(representation["_id"]) - - bpy.ops.object.select_all(action='DESELECT') - - def remove(self, container: Dict) -> bool: - """Remove an existing container from a Blender scene. - - Arguments: - container (openpype:container-1.0): Container to remove, - from `host.ls()`. - - Returns: - bool: Whether the container was deleted. - - Warning: - No nested collections are supported at the moment! - """ - - collection = bpy.data.collections.get( - container["objectName"] - ) - if not collection: - return False - assert not (collection.children), ( - "Nested collections are not supported." - ) - - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) - objects = collection_metadata["objects"] - obj_container = collection_metadata["obj_container"] - - self._remove(objects, obj_container) - - bpy.data.collections.remove(collection) - - return True - - -class UnrealLayoutLoader(plugin.AssetLoader): - """Load layout published from Unreal.""" - - families = ["layout"] - representations = ["json"] - - label = "Link Layout" - icon = "code-fork" - color = "orange" - - animation_creator_name = "CreateAnimation" - - def _remove_objects(self, objects): - for obj in list(objects): - if obj.type == 'ARMATURE': - bpy.data.armatures.remove(obj.data) - elif obj.type == 'MESH': - bpy.data.meshes.remove(obj.data) - elif obj.type == 'CAMERA': - bpy.data.cameras.remove(obj.data) - elif obj.type == 'CURVE': - bpy.data.curves.remove(obj.data) - else: - self.log.error( - f"Object {obj.name} of type {obj.type} not recognized.") - - def _remove_collections(self, collection): - if collection.children: - for child in collection.children: - self._remove_collections(child) - bpy.data.collections.remove(child) - - def _remove(self, layout_container): - layout_container_metadata = layout_container.get( - blender.pipeline.AVALON_PROPERTY) - - if layout_container.children: - for child in layout_container.children: - child_container = child.get(blender.pipeline.AVALON_PROPERTY) - child_container['objectName'] = child.name - api.remove(child_container) - - for c in bpy.data.collections: - metadata = c.get('avalon') - if metadata: - print("metadata.get('id')") - print(metadata.get('id')) - if metadata and metadata.get('id') == 'pyblish.avalon.instance': - print("metadata.get('dependencies')") - print(metadata.get('dependencies')) - print("layout_container_metadata.get('representation')") - print(layout_container_metadata.get('representation')) - if metadata.get('dependencies') == layout_container_metadata.get('representation'): - - for child in c.children: - bpy.data.collections.remove(child) - bpy.data.collections.remove(c) - break - - def _get_loader(self, loaders, family): - name = "" - if family == 'rig': - name = "BlendRigLoader" - elif family == 'model': - name = "BlendModelLoader" - - if name == "": - return None - - for loader in loaders: - if loader.__name__ == name: - return loader - - return None - - def set_transform(self, obj, transform): - location = transform.get('translation') - rotation = transform.get('rotation') - scale = transform.get('scale') - - # Y position is inverted in sign because Unreal and Blender have the - # Y axis mirrored - obj.location = ( - location.get('x'), - location.get('y'), - location.get('z') - ) - obj.rotation_euler = ( - rotation.get('x'), - rotation.get('y'), - rotation.get('z') - ) - obj.scale = ( - scale.get('x'), - scale.get('y'), - scale.get('z') - ) - - def _process( - self, libpath, layout_container, container_name, representation, - actions, parent_collection - ): - with open(libpath, "r") as fp: - data = json.load(fp) - - scene = bpy.context.scene - layout_collection = bpy.data.collections.new(container_name) - scene.collection.children.link(layout_collection) - - parent = parent_collection - - if parent is None: - parent = scene.collection - - all_loaders = api.discover(api.Loader) - - avalon_container = bpy.data.collections.get( - blender.pipeline.AVALON_CONTAINERS) - - for element in data: - reference = element.get('reference') - family = element.get('family') - - loaders = api.loaders_from_representation(all_loaders, reference) - loader = self._get_loader(loaders, family) - - if not loader: - continue - - instance_name = element.get('instance_name') - - element_container = api.load( - loader, - reference, - namespace=instance_name - ) - - if not element_container: - continue - - avalon_container.children.unlink(element_container) - layout_container.children.link(element_container) - - element_metadata = element_container.get( - blender.pipeline.AVALON_PROPERTY) - - # Unlink the object's collection from the scene collection and - # link it in the layout collection - element_collection = element_metadata.get('obj_container') - scene.collection.children.unlink(element_collection) - layout_collection.children.link(element_collection) - - objects = element_metadata.get('objects') - element_metadata['instance_name'] = instance_name - - objects_to_transform = [] - - creator_plugin = get_creator_by_name(self.animation_creator_name) - if not creator_plugin: - raise ValueError("Creator plugin \"{}\" was not found.".format( - self.animation_creator_name - )) - - if family == 'rig': - for o in objects: - if o.type == 'ARMATURE': - objects_to_transform.append(o) - # Create an animation subset for each rig - o.select_set(True) - asset = api.Session["AVALON_ASSET"] - c = api.create( - creator_plugin, - name="animation_" + element_collection.name, - asset=asset, - options={"useSelection": True}, - data={"dependencies": representation}) - scene.collection.children.unlink(c) - parent.children.link(c) - o.select_set(False) - break - elif family == 'model': - objects_to_transform = objects - - for o in objects_to_transform: - self.set_transform(o, element.get('transform')) - - if actions: - if o.type == 'ARMATURE': - action = actions.get(instance_name, None) - - if action: - if o.animation_data is None: - o.animation_data_create() - o.animation_data.action = action - - return layout_collection - - def process_asset(self, - context: dict, - name: str, - namespace: Optional[str] = None, - options: Optional[Dict] = None): - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - libpath = self.fname - asset = context["asset"]["name"] - subset = context["subset"]["name"] - lib_container = plugin.asset_name( - asset, subset - ) - unique_number = plugin.get_unique_number( - asset, subset - ) - namespace = namespace or f"{asset}_{unique_number}" - container_name = plugin.asset_name( - asset, subset, unique_number - ) - - layout_container = bpy.data.collections.new(container_name) - blender.pipeline.containerise_existing( - layout_container, - name, - namespace, - context, - self.__class__.__name__, - ) - - container_metadata = layout_container.get( - blender.pipeline.AVALON_PROPERTY) - - container_metadata["libpath"] = libpath - container_metadata["lib_container"] = lib_container - - layout_collection = self._process( - libpath, layout_container, container_name, - str(context["representation"]["_id"]), None, None) - - container_metadata["obj_container"] = layout_collection - - # Save the list of objects in the metadata container - container_metadata["objects"] = layout_collection.all_objects - - nodes = [layout_container] - self[:] = nodes - return nodes - - def update(self, container: Dict, representation: Dict): - """Update the loaded asset. - - This will remove all objects of the current collection, load the new - ones and add them to the collection. - If the objects of the collection are used in another collection they - will not be removed, only unlinked. Normally this should not be the - case though. - """ - layout_container = bpy.data.collections.get( - container["objectName"] - ) - if not layout_container: - return False - - libpath = Path(api.get_representation_path(representation)) - extension = libpath.suffix.lower() - - self.log.info( - "Container: %s\nRepresentation: %s", - pformat(container, indent=2), - pformat(representation, indent=2), - ) - - assert layout_container, ( - f"The asset is not loaded: {container['objectName']}" - ) - assert libpath, ( - "No existing library file found for {container['objectName']}" - ) - assert libpath.is_file(), ( - f"The file doesn't exist: {libpath}" - ) - assert extension in plugin.VALID_EXTENSIONS, ( - f"Unsupported file: {libpath}" - ) - - layout_container_metadata = layout_container.get( - blender.pipeline.AVALON_PROPERTY) - collection_libpath = layout_container_metadata["libpath"] - lib_container = layout_container_metadata["lib_container"] - obj_container = plugin.get_local_collection_with_name( - layout_container_metadata["obj_container"].name - ) - objects = obj_container.all_objects - - container_name = obj_container.name - - normalized_collection_libpath = ( - str(Path(bpy.path.abspath(collection_libpath)).resolve()) - ) - normalized_libpath = ( - str(Path(bpy.path.abspath(str(libpath))).resolve()) - ) - self.log.debug( - "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_collection_libpath, - normalized_libpath, - ) - if normalized_collection_libpath == normalized_libpath: - self.log.info("Library already loaded, not updating...") - return - - actions = {} - - for obj in objects: - if obj.type == 'ARMATURE': - if obj.animation_data and obj.animation_data.action: - obj_cont_name = obj.get( - blender.pipeline.AVALON_PROPERTY).get('container_name') - obj_cont = plugin.get_local_collection_with_name( - obj_cont_name) - element_metadata = obj_cont.get( - blender.pipeline.AVALON_PROPERTY) - instance_name = element_metadata.get('instance_name') - actions[instance_name] = obj.animation_data.action - - self._remove(layout_container) - - bpy.data.collections.remove(obj_container) - - creator_plugin = get_creator_by_name(self.setdress_creator_name) - if not creator_plugin: - raise ValueError("Creator plugin \"{}\" was not found.".format( - self.setdress_creator_name - )) - - parent = api.create( - creator_plugin, - name="animation", - asset=api.Session["AVALON_ASSET"], - options={"useSelection": True}, - data={"dependencies": str(representation["_id"])}) - - layout_collection = self._process( - libpath, layout_container, container_name, - str(representation["_id"]), actions, parent) - - layout_container_metadata["obj_container"] = layout_collection - layout_container_metadata["objects"] = layout_collection.all_objects - layout_container_metadata["libpath"] = str(libpath) - layout_container_metadata["representation"] = str( - representation["_id"]) - - def remove(self, container: Dict) -> bool: - """Remove an existing container from a Blender scene. - - Arguments: - container (openpype:container-1.0): Container to remove, - from `host.ls()`. - - Returns: - bool: Whether the container was deleted. - """ - layout_container = bpy.data.collections.get( - container["objectName"] - ) - if not layout_container: - return False - - layout_container_metadata = layout_container.get( - blender.pipeline.AVALON_PROPERTY) - obj_container = plugin.get_local_collection_with_name( - layout_container_metadata["obj_container"].name - ) - - self._remove(layout_container) - - bpy.data.collections.remove(obj_container) - bpy.data.collections.remove(layout_container) - - return True diff --git a/openpype/hosts/blender/plugins/load/load_layout_blend.py b/openpype/hosts/blender/plugins/load/load_layout_blend.py new file mode 100644 index 0000000000..85cb4dfbd3 --- /dev/null +++ b/openpype/hosts/blender/plugins/load/load_layout_blend.py @@ -0,0 +1,337 @@ +"""Load a layout in Blender.""" + +from pathlib import Path +from pprint import pformat +from typing import Dict, List, Optional + +import bpy + +from avalon import api +from avalon.blender.pipeline import AVALON_CONTAINERS +from avalon.blender.pipeline import AVALON_CONTAINER_ID +from avalon.blender.pipeline import AVALON_PROPERTY +from openpype.hosts.blender.api import plugin + + +class BlendLayoutLoader(plugin.AssetLoader): + """Load layout from a .blend file.""" + + families = ["layout"] + representations = ["blend"] + + label = "Link Layout" + icon = "code-fork" + color = "orange" + + def _remove(self, asset_group): + objects = list(asset_group.children) + + for obj in objects: + if obj.type == 'MESH': + for material_slot in list(obj.material_slots): + if material_slot.material: + bpy.data.materials.remove(material_slot.material) + bpy.data.meshes.remove(obj.data) + elif obj.type == 'ARMATURE': + objects.extend(obj.children) + bpy.data.armatures.remove(obj.data) + elif obj.type == 'CURVE': + bpy.data.curves.remove(obj.data) + elif obj.type == 'EMPTY': + objects.extend(obj.children) + bpy.data.objects.remove(obj) + + def _remove_asset_and_library(self, asset_group): + libpath = asset_group.get(AVALON_PROPERTY).get('libpath') + + # Check how many assets use the same library + count = 0 + for obj in bpy.data.collections.get(AVALON_CONTAINERS).all_objects: + if obj.get(AVALON_PROPERTY).get('libpath') == libpath: + count += 1 + + self._remove(asset_group) + + bpy.data.objects.remove(asset_group) + + # If it is the last object to use that library, remove it + if count == 1: + library = bpy.data.libraries.get(bpy.path.basename(libpath)) + bpy.data.libraries.remove(library) + + def _process(self, libpath, asset_group, group_name, actions): + with bpy.data.libraries.load( + libpath, link=True, relative=False + ) as (data_from, data_to): + data_to.objects = data_from.objects + + parent = bpy.context.scene.collection + + empties = [obj for obj in data_to.objects if obj.type == 'EMPTY'] + + container = None + + for empty in empties: + if empty.get(AVALON_PROPERTY): + container = empty + break + + assert container, "No asset group found" + + # Children must be linked before parents, + # otherwise the hierarchy will break + objects = [] + nodes = list(container.children) + + for obj in nodes: + obj.parent = asset_group + + for obj in nodes: + objects.append(obj) + nodes.extend(list(obj.children)) + + objects.reverse() + + constraints = [] + + armatures = [obj for obj in objects if obj.type == 'ARMATURE'] + + for armature in armatures: + for bone in armature.pose.bones: + for constraint in bone.constraints: + if hasattr(constraint, 'target'): + constraints.append(constraint) + + for obj in objects: + parent.objects.link(obj) + + for obj in objects: + local_obj = plugin.prepare_data(obj, group_name) + + action = None + + if actions: + action = actions.get(local_obj.name, None) + + if local_obj.type == 'MESH': + plugin.prepare_data(local_obj.data, group_name) + + if obj != local_obj: + for constraint in constraints: + if constraint.target == obj: + constraint.target = local_obj + + for material_slot in local_obj.material_slots: + if material_slot.material: + plugin.prepare_data(material_slot.material, group_name) + elif local_obj.type == 'ARMATURE': + plugin.prepare_data(local_obj.data, group_name) + + if action is not None: + local_obj.animation_data.action = action + elif local_obj.animation_data.action is not None: + plugin.prepare_data( + local_obj.animation_data.action, group_name) + + # Set link the drivers to the local object + if local_obj.data.animation_data: + for d in local_obj.data.animation_data.drivers: + for v in d.driver.variables: + for t in v.targets: + t.id = local_obj + + if not local_obj.get(AVALON_PROPERTY): + local_obj[AVALON_PROPERTY] = dict() + + avalon_info = local_obj[AVALON_PROPERTY] + avalon_info.update({"container_name": group_name}) + + objects.reverse() + + bpy.data.orphans_purge(do_local_ids=False) + + bpy.ops.object.select_all(action='DESELECT') + + return objects + + def process_asset( + self, context: dict, name: str, namespace: Optional[str] = None, + options: Optional[Dict] = None + ) -> Optional[List]: + """ + Arguments: + name: Use pre-defined name + namespace: Use pre-defined namespace + context: Full parenthood of representation to load + options: Additional settings dictionary + """ + libpath = self.fname + asset = context["asset"]["name"] + subset = context["subset"]["name"] + + asset_name = plugin.asset_name(asset, subset) + unique_number = plugin.get_unique_number(asset, subset) + group_name = plugin.asset_name(asset, subset, unique_number) + namespace = namespace or f"{asset}_{unique_number}" + + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + if not avalon_container: + avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) + bpy.context.scene.collection.children.link(avalon_container) + + asset_group = bpy.data.objects.new(group_name, object_data=None) + asset_group.empty_display_type = 'SINGLE_ARROW' + avalon_container.objects.link(asset_group) + + objects = self._process(libpath, asset_group, group_name, None) + + for child in asset_group.children: + if child.get(AVALON_PROPERTY): + avalon_container.objects.link(child) + + bpy.context.scene.collection.objects.link(asset_group) + + asset_group[AVALON_PROPERTY] = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": name, + "namespace": namespace or '', + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + "libpath": libpath, + "asset_name": asset_name, + "parent": str(context["representation"]["parent"]), + "family": context["representation"]["context"]["family"], + "objectName": group_name + } + + self[:] = objects + return objects + + def update(self, container: Dict, representation: Dict): + """Update the loaded asset. + + This will remove all objects of the current collection, load the new + ones and add them to the collection. + If the objects of the collection are used in another collection they + will not be removed, only unlinked. Normally this should not be the + case though. + + Warning: + No nested collections are supported at the moment! + """ + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + libpath = Path(api.get_representation_path(representation)) + extension = libpath.suffix.lower() + + self.log.info( + "Container: %s\nRepresentation: %s", + pformat(container, indent=2), + pformat(representation, indent=2), + ) + + assert asset_group, ( + f"The asset is not loaded: {container['objectName']}" + ) + assert libpath, ( + "No existing library file found for {container['objectName']}" + ) + assert libpath.is_file(), ( + f"The file doesn't exist: {libpath}" + ) + assert extension in plugin.VALID_EXTENSIONS, ( + f"Unsupported file: {libpath}" + ) + + metadata = asset_group.get(AVALON_PROPERTY) + group_libpath = metadata["libpath"] + + normalized_group_libpath = ( + str(Path(bpy.path.abspath(group_libpath)).resolve()) + ) + normalized_libpath = ( + str(Path(bpy.path.abspath(str(libpath))).resolve()) + ) + self.log.debug( + "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_group_libpath, + normalized_libpath, + ) + if normalized_group_libpath == normalized_libpath: + self.log.info("Library already loaded, not updating...") + return + + actions = {} + + for obj in asset_group.children: + obj_meta = obj.get(AVALON_PROPERTY) + if obj_meta.get('family') == 'rig': + rig = None + for child in obj.children: + if child.type == 'ARMATURE': + rig = child + break + if not rig: + raise Exception("No armature in the rig asset group.") + if rig.animation_data and rig.animation_data.action: + instance_name = obj_meta.get('instance_name') + actions[instance_name] = rig.animation_data.action + + mat = asset_group.matrix_basis.copy() + + # Remove the children of the asset_group first + for child in list(asset_group.children): + self._remove_asset_and_library(child) + + # Check how many assets use the same library + count = 0 + for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects: + if obj.get(AVALON_PROPERTY).get('libpath') == group_libpath: + count += 1 + + self._remove(asset_group) + + # If it is the last object to use that library, remove it + if count == 1: + library = bpy.data.libraries.get(bpy.path.basename(group_libpath)) + bpy.data.libraries.remove(library) + + self._process(str(libpath), asset_group, object_name, actions) + + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + for child in asset_group.children: + if child.get(AVALON_PROPERTY): + avalon_container.objects.link(child) + + asset_group.matrix_basis = mat + + metadata["libpath"] = str(libpath) + metadata["representation"] = str(representation["_id"]) + + def exec_remove(self, container: Dict) -> bool: + """Remove an existing container from a Blender scene. + + Arguments: + container (openpype:container-1.0): Container to remove, + from `host.ls()`. + + Returns: + bool: Whether the container was deleted. + + Warning: + No nested collections are supported at the moment! + """ + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + + if not asset_group: + return False + + # Remove the children of the asset_group first + for child in list(asset_group.children): + self._remove_asset_and_library(child) + + self._remove_asset_and_library(asset_group) + + return True diff --git a/openpype/hosts/blender/plugins/load/load_layout_json.py b/openpype/hosts/blender/plugins/load/load_layout_json.py new file mode 100644 index 0000000000..8564b52816 --- /dev/null +++ b/openpype/hosts/blender/plugins/load/load_layout_json.py @@ -0,0 +1,243 @@ +"""Load a layout in Blender.""" + +from pathlib import Path +from pprint import pformat +from typing import Dict, Optional + +import bpy +import json + +from avalon import api +from avalon.blender.pipeline import AVALON_CONTAINERS +from avalon.blender.pipeline import AVALON_CONTAINER_ID +from avalon.blender.pipeline import AVALON_PROPERTY +from openpype.hosts.blender.api import plugin + + +class JsonLayoutLoader(plugin.AssetLoader): + """Load layout published from Unreal.""" + + families = ["layout"] + representations = ["json"] + + label = "Load Layout" + icon = "code-fork" + color = "orange" + + animation_creator_name = "CreateAnimation" + + def _remove(self, asset_group): + objects = list(asset_group.children) + + for obj in objects: + api.remove(obj.get(AVALON_PROPERTY)) + + def _get_loader(self, loaders, family): + name = "" + if family == 'rig': + name = "BlendRigLoader" + elif family == 'model': + name = "BlendModelLoader" + + if name == "": + return None + + for loader in loaders: + if loader.__name__ == name: + return loader + + return None + + def _process(self, libpath, asset_group, actions): + bpy.ops.object.select_all(action='DESELECT') + + with open(libpath, "r") as fp: + data = json.load(fp) + + all_loaders = api.discover(api.Loader) + + for element in data: + reference = element.get('reference') + family = element.get('family') + + loaders = api.loaders_from_representation(all_loaders, reference) + loader = self._get_loader(loaders, family) + + if not loader: + continue + + instance_name = element.get('instance_name') + + action = None + + if actions: + action = actions.get(instance_name, None) + + options = { + 'parent': asset_group, + 'transform': element.get('transform'), + 'action': action + } + + # This should return the loaded asset, but the load call will be + # added to the queue to run in the Blender main thread, so + # at this time it will not return anything. The assets will be + # loaded in the next Blender cycle, so we use the options to + # set the transform, parent and assign the action, if there is one. + api.load( + loader, + reference, + namespace=instance_name, + options=options + ) + + def process_asset(self, + context: dict, + name: str, + namespace: Optional[str] = None, + options: Optional[Dict] = None): + """ + Arguments: + name: Use pre-defined name + namespace: Use pre-defined namespace + context: Full parenthood of representation to load + options: Additional settings dictionary + """ + libpath = self.fname + asset = context["asset"]["name"] + subset = context["subset"]["name"] + + asset_name = plugin.asset_name(asset, subset) + unique_number = plugin.get_unique_number(asset, subset) + group_name = plugin.asset_name(asset, subset, unique_number) + namespace = namespace or f"{asset}_{unique_number}" + + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + if not avalon_container: + avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) + bpy.context.scene.collection.children.link(avalon_container) + + asset_group = bpy.data.objects.new(group_name, object_data=None) + asset_group.empty_display_type = 'SINGLE_ARROW' + avalon_container.objects.link(asset_group) + + self._process(libpath, asset_group, None) + + bpy.context.scene.collection.objects.link(asset_group) + + asset_group[AVALON_PROPERTY] = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": name, + "namespace": namespace or '', + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + "libpath": libpath, + "asset_name": asset_name, + "parent": str(context["representation"]["parent"]), + "family": context["representation"]["context"]["family"], + "objectName": group_name + } + + self[:] = asset_group.children + return asset_group.children + + def exec_update(self, container: Dict, representation: Dict): + """Update the loaded asset. + + This will remove all objects of the current collection, load the new + ones and add them to the collection. + If the objects of the collection are used in another collection they + will not be removed, only unlinked. Normally this should not be the + case though. + """ + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + libpath = Path(api.get_representation_path(representation)) + extension = libpath.suffix.lower() + + self.log.info( + "Container: %s\nRepresentation: %s", + pformat(container, indent=2), + pformat(representation, indent=2), + ) + + assert asset_group, ( + f"The asset is not loaded: {container['objectName']}" + ) + assert libpath, ( + "No existing library file found for {container['objectName']}" + ) + assert libpath.is_file(), ( + f"The file doesn't exist: {libpath}" + ) + assert extension in plugin.VALID_EXTENSIONS, ( + f"Unsupported file: {libpath}" + ) + + metadata = asset_group.get(AVALON_PROPERTY) + group_libpath = metadata["libpath"] + + normalized_group_libpath = ( + str(Path(bpy.path.abspath(group_libpath)).resolve()) + ) + normalized_libpath = ( + str(Path(bpy.path.abspath(str(libpath))).resolve()) + ) + self.log.debug( + "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_group_libpath, + normalized_libpath, + ) + if normalized_group_libpath == normalized_libpath: + self.log.info("Library already loaded, not updating...") + return + + actions = {} + + for obj in asset_group.children: + obj_meta = obj.get(AVALON_PROPERTY) + if obj_meta.get('family') == 'rig': + rig = None + for child in obj.children: + if child.type == 'ARMATURE': + rig = child + break + if not rig: + raise Exception("No armature in the rig asset group.") + if rig.animation_data and rig.animation_data.action: + instance_name = obj_meta.get('instance_name') + actions[instance_name] = rig.animation_data.action + + mat = asset_group.matrix_basis.copy() + + self._remove(asset_group) + + self._process(str(libpath), asset_group, actions) + + asset_group.matrix_basis = mat + + metadata["libpath"] = str(libpath) + metadata["representation"] = str(representation["_id"]) + + def exec_remove(self, container: Dict) -> bool: + """Remove an existing container from a Blender scene. + + Arguments: + container (openpype:container-1.0): Container to remove, + from `host.ls()`. + + Returns: + bool: Whether the container was deleted. + """ + object_name = container["objectName"] + asset_group = bpy.data.objects.get(object_name) + + if not asset_group: + return False + + self._remove(asset_group) + + bpy.data.objects.remove(asset_group) + + return True diff --git a/openpype/hosts/blender/plugins/load/load_model.py b/openpype/hosts/blender/plugins/load/load_model.py index 5ddbdecf01..dd48be3db7 100644 --- a/openpype/hosts/blender/plugins/load/load_model.py +++ b/openpype/hosts/blender/plugins/load/load_model.py @@ -40,9 +40,8 @@ class BlendModelLoader(plugin.AssetLoader): bpy.data.objects.remove(obj) def _process(self, libpath, asset_group, group_name): - relative = bpy.context.preferences.filepaths.use_relative_paths with bpy.data.libraries.load( - libpath, link=True, relative=relative + libpath, link=True, relative=False ) as (data_from, data_to): data_to.objects = data_from.objects @@ -127,6 +126,42 @@ class BlendModelLoader(plugin.AssetLoader): asset_group.empty_display_type = 'SINGLE_ARROW' avalon_container.objects.link(asset_group) + bpy.ops.object.select_all(action='DESELECT') + + if options is not None: + parent = options.get('parent') + transform = options.get('transform') + + if parent and transform: + location = transform.get('translation') + rotation = transform.get('rotation') + scale = transform.get('scale') + + # Y position is inverted in sign because Unreal and Blender have the + # Y axis mirrored + asset_group.location = ( + location.get('x'), + location.get('y'), + location.get('z') + ) + asset_group.rotation_euler = ( + rotation.get('x'), + rotation.get('y'), + rotation.get('z') + ) + asset_group.scale = ( + scale.get('x'), + scale.get('y'), + scale.get('z') + ) + + bpy.context.view_layer.objects.active = parent + asset_group.select_set(True) + + bpy.ops.object.parent_set(keep_transform=True) + + bpy.ops.object.select_all(action='DESELECT') + objects = self._process(libpath, asset_group, group_name) bpy.context.scene.collection.objects.link(asset_group) diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py index 236672391c..d12c398794 100644 --- a/openpype/hosts/blender/plugins/load/load_rig.py +++ b/openpype/hosts/blender/plugins/load/load_rig.py @@ -42,9 +42,8 @@ class BlendRigLoader(plugin.AssetLoader): bpy.data.objects.remove(obj) def _process(self, libpath, asset_group, group_name, action): - relative = bpy.context.preferences.filepaths.use_relative_paths with bpy.data.libraries.load( - libpath, link=True, relative=relative + libpath, link=True, relative=False ) as (data_from, data_to): data_to.objects = data_from.objects @@ -161,7 +160,46 @@ class BlendRigLoader(plugin.AssetLoader): asset_group.empty_display_type = 'SINGLE_ARROW' avalon_container.objects.link(asset_group) - objects = self._process(libpath, asset_group, group_name, None) + action = None + + bpy.ops.object.select_all(action='DESELECT') + + if options is not None: + parent = options.get('parent') + transform = options.get('transform') + action = options.get('action') + + if parent and transform: + location = transform.get('translation') + rotation = transform.get('rotation') + scale = transform.get('scale') + + # Y position is inverted in sign because Unreal and Blender have the + # Y axis mirrored + asset_group.location = ( + location.get('x'), + location.get('y'), + location.get('z') + ) + asset_group.rotation_euler = ( + rotation.get('x'), + rotation.get('y'), + rotation.get('z') + ) + asset_group.scale = ( + scale.get('x'), + scale.get('y'), + scale.get('z') + ) + + bpy.context.view_layer.objects.active = parent + asset_group.select_set(True) + + bpy.ops.object.parent_set(keep_transform=True) + + bpy.ops.object.select_all(action='DESELECT') + + objects = self._process(libpath, asset_group, group_name, action) bpy.context.scene.collection.objects.link(asset_group) diff --git a/openpype/hosts/blender/plugins/publish/extract_layout.py b/openpype/hosts/blender/plugins/publish/extract_layout.py index c6c9bf67f5..cd081b4479 100644 --- a/openpype/hosts/blender/plugins/publish/extract_layout.py +++ b/openpype/hosts/blender/plugins/publish/extract_layout.py @@ -3,7 +3,8 @@ import json import bpy -from avalon import blender, io +from avalon import io +from avalon.blender.pipeline import AVALON_PROPERTY import openpype.api @@ -24,52 +25,49 @@ class ExtractLayout(openpype.api.Extractor): json_data = [] - for collection in instance: - for asset in collection.children: - collection = bpy.data.collections[asset.name] - container = bpy.data.collections[asset.name + '_CON'] - metadata = container.get(blender.pipeline.AVALON_PROPERTY) + asset_group = bpy.data.objects[str(instance)] - parent = metadata["parent"] - family = metadata["family"] + for asset in asset_group.children: + metadata = asset.get(AVALON_PROPERTY) - self.log.debug("Parent: {}".format(parent)) - blend = io.find_one( - { - "type": "representation", - "parent": io.ObjectId(parent), - "name": "blend" - }, - projection={"_id": True}) - blend_id = blend["_id"] + parent = metadata["parent"] + family = metadata["family"] - json_element = {} - json_element["reference"] = str(blend_id) - json_element["family"] = family - json_element["instance_name"] = asset.name - json_element["asset_name"] = metadata["lib_container"] - json_element["file_path"] = metadata["libpath"] + self.log.debug("Parent: {}".format(parent)) + blend = io.find_one( + { + "type": "representation", + "parent": io.ObjectId(parent), + "name": "blend" + }, + projection={"_id": True}) + blend_id = blend["_id"] - obj = collection.objects[0] + json_element = {} + json_element["reference"] = str(blend_id) + json_element["family"] = family + json_element["instance_name"] = asset.name + json_element["asset_name"] = metadata["asset_name"] + json_element["file_path"] = metadata["libpath"] - json_element["transform"] = { - "translation": { - "x": obj.location.x, - "y": obj.location.y, - "z": obj.location.z - }, - "rotation": { - "x": obj.rotation_euler.x, - "y": obj.rotation_euler.y, - "z": obj.rotation_euler.z, - }, - "scale": { - "x": obj.scale.x, - "y": obj.scale.y, - "z": obj.scale.z - } + json_element["transform"] = { + "translation": { + "x": asset.location.x, + "y": asset.location.y, + "z": asset.location.z + }, + "rotation": { + "x": asset.rotation_euler.x, + "y": asset.rotation_euler.y, + "z": asset.rotation_euler.z, + }, + "scale": { + "x": asset.scale.x, + "y": asset.scale.y, + "z": asset.scale.z } - json_data.append(json_element) + } + json_data.append(json_element) json_filename = "{}.json".format(instance.name) json_path = os.path.join(stagingdir, json_filename) From 3038abad72f91d7af5c310e4e660ad1ec5f55960 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 13 Jul 2021 18:00:17 +0200 Subject: [PATCH 018/308] wip sync with cb code --- openpype/hosts/houdini/api/__init__.py | 97 ++++- openpype/hosts/houdini/api/lib.py | 140 ++++++- openpype/hosts/houdini/api/usd.py | 255 +++++++++++++ .../plugins/create/create_composite.py | 35 ++ .../plugins/create/create_pointcache.py | 1 + .../plugins/create/create_redshift_rop.py | 63 ++++ .../plugins/create/create_remote_publish.py | 22 ++ .../houdini/plugins/create/create_usd.py | 39 ++ .../plugins/create/create_usd_model.py | 39 ++ .../plugins/create/create_usd_workspaces.py | 57 +++ .../plugins/create/create_usdrender.py | 36 ++ .../hosts/houdini/plugins/load/actions.py | 78 ++++ .../houdini/plugins/load/load_alembic.py | 11 +- .../hosts/houdini/plugins/load/load_camera.py | 132 +++++-- .../hosts/houdini/plugins/load/load_image.py | 119 ++++++ .../houdini/plugins/load/load_usd_layer.py | 74 ++++ .../plugins/load/load_usd_reference.py | 74 ++++ .../hosts/houdini/plugins/load/load_vdb.py | 106 ++++++ .../houdini/plugins/load/show_usdview.py | 44 +++ .../plugins/publish/collect_active_state.py | 36 ++ .../houdini/plugins/publish/collect_frames.py | 39 +- .../houdini/plugins/publish/collect_inputs.py | 120 ++++++ .../plugins/publish/collect_instances.py | 9 + .../publish/collect_instances_usd_layered.py | 155 ++++++++ .../plugins/publish/collect_output_node.py | 52 ++- .../plugins/publish/collect_redshift_rop.py | 137 +++++++ .../plugins/publish/collect_remote_publish.py | 30 ++ .../publish/collect_render_products.py | 130 +++++++ .../plugins/publish/collect_usd_bootstrap.py | 117 ++++++ .../plugins/publish/collect_usd_layers.py | 67 ++++ .../plugins/publish/extract_alembic.py | 15 +- .../plugins/publish/extract_composite.py | 36 ++ .../houdini/plugins/publish/extract_usd.py | 37 ++ .../plugins/publish/extract_usd_layered.py | 303 +++++++++++++++ .../plugins/publish/extract_vdb_cache.py | 13 +- .../plugins/publish/increment_current_file.py | 48 +++ .../increment_current_file_deadline.py | 34 ++ .../houdini/plugins/publish/save_scene.py | 37 ++ .../plugins/publish/save_scene_deadline.py | 23 ++ .../publish/submit_houdini_render_deadline.py | 158 ++++++++ .../plugins/publish/submit_remote_publish.py | 152 ++++++++ .../validate_abc_primitive_to_detail.py | 116 ++++++ .../publish/validate_alembic_face_sets.py | 35 ++ .../publish/validate_alembic_input_node.py | 37 +- .../plugins/publish/validate_bypass.py | 9 +- .../plugins/publish/validate_camera_rop.py | 4 +- .../publish/validate_cop_output_node.py | 58 +++ .../publish/validate_file_extension.py | 60 +++ .../plugins/publish/validate_frame_token.py | 50 +++ .../validate_houdini_license_category.py | 28 ++ .../publish/validate_mkpaths_toggled.py | 12 +- .../plugins/publish/validate_no_errors.py | 64 ++++ .../validate_primitive_hierarchy_paths.py | 52 +-- .../publish/validate_remote_publish.py | 43 +++ .../validate_remote_publish_enabled.py | 35 ++ .../publish/validate_sop_output_node.py | 78 ++++ .../validate_usd_layer_path_backslashes.py | 51 +++ .../publish/validate_usd_model_and_shade.py | 75 ++++ .../publish/validate_usd_output_node.py | 50 +++ .../validate_usd_render_product_names.py | 30 ++ .../plugins/publish/validate_usd_setdress.py | 51 +++ .../validate_usd_shade_model_exists.py | 36 ++ .../publish/validate_usd_shade_workspace.py | 60 +++ .../publish/validate_vdb_output_node.py | 68 ++++ .../vendor/husdoutputprocessors/__init__.py | 1 + .../avalon_uri_processor.py | 168 +++++++++ .../stagingdir_processor.py | 90 +++++ openpype/lib/usdlib.py | 347 ++++++++++++++++++ 68 files changed, 4744 insertions(+), 134 deletions(-) create mode 100644 openpype/hosts/houdini/api/usd.py create mode 100644 openpype/hosts/houdini/plugins/create/create_composite.py create mode 100644 openpype/hosts/houdini/plugins/create/create_redshift_rop.py create mode 100644 openpype/hosts/houdini/plugins/create/create_remote_publish.py create mode 100644 openpype/hosts/houdini/plugins/create/create_usd.py create mode 100644 openpype/hosts/houdini/plugins/create/create_usd_model.py create mode 100644 openpype/hosts/houdini/plugins/create/create_usd_workspaces.py create mode 100644 openpype/hosts/houdini/plugins/create/create_usdrender.py create mode 100644 openpype/hosts/houdini/plugins/load/actions.py create mode 100644 openpype/hosts/houdini/plugins/load/load_image.py create mode 100644 openpype/hosts/houdini/plugins/load/load_usd_layer.py create mode 100644 openpype/hosts/houdini/plugins/load/load_usd_reference.py create mode 100644 openpype/hosts/houdini/plugins/load/load_vdb.py create mode 100644 openpype/hosts/houdini/plugins/load/show_usdview.py create mode 100644 openpype/hosts/houdini/plugins/publish/collect_active_state.py create mode 100644 openpype/hosts/houdini/plugins/publish/collect_inputs.py create mode 100644 openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py create mode 100644 openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py create mode 100644 openpype/hosts/houdini/plugins/publish/collect_remote_publish.py create mode 100644 openpype/hosts/houdini/plugins/publish/collect_render_products.py create mode 100644 openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py create mode 100644 openpype/hosts/houdini/plugins/publish/collect_usd_layers.py create mode 100644 openpype/hosts/houdini/plugins/publish/extract_composite.py create mode 100644 openpype/hosts/houdini/plugins/publish/extract_usd.py create mode 100644 openpype/hosts/houdini/plugins/publish/extract_usd_layered.py create mode 100644 openpype/hosts/houdini/plugins/publish/increment_current_file.py create mode 100644 openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py create mode 100644 openpype/hosts/houdini/plugins/publish/save_scene.py create mode 100644 openpype/hosts/houdini/plugins/publish/save_scene_deadline.py create mode 100644 openpype/hosts/houdini/plugins/publish/submit_houdini_render_deadline.py create mode 100644 openpype/hosts/houdini/plugins/publish/submit_remote_publish.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_file_extension.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_frame_token.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_no_errors.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_remote_publish.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py create mode 100644 openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py create mode 100644 openpype/hosts/houdini/vendor/husdoutputprocessors/__init__.py create mode 100644 openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py create mode 100644 openpype/hosts/houdini/vendor/husdoutputprocessors/stagingdir_processor.py create mode 100644 openpype/lib/usdlib.py diff --git a/openpype/hosts/houdini/api/__init__.py b/openpype/hosts/houdini/api/__init__.py index 21f4ae41c3..8eda4aff26 100644 --- a/openpype/hosts/houdini/api/__init__.py +++ b/openpype/hosts/houdini/api/__init__.py @@ -1,5 +1,7 @@ import os +import sys import logging +import contextlib import hou @@ -11,7 +13,12 @@ from avalon.houdini import pipeline as houdini import openpype.hosts.houdini from openpype.hosts.houdini.api import lib -from openpype.lib import any_outdated +from openpype.lib import ( + any_outdated, + update_task_from_path +) + +from .lib import get_asset_fps log = logging.getLogger("openpype.hosts.houdini") @@ -33,15 +40,25 @@ def install(): avalon.before("save", before_save) avalon.on("save", on_save) avalon.on("open", on_open) + avalon.on("new", on_new) pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled) log.info("Setting default family states for loader..") - avalon.data["familiesStateToggled"] = ["imagesequence"] + avalon.data["familiesStateToggled"] = [ + "imagesequence", + "review" + ] + # Expose Houdini husdoutputprocessors + hou_setup_pythonpath = os.path.join(os.path.dirname(PACKAGE_DIR), + "setup", "houdini", "pythonpath") + print("Adding PYTHONPATH: %s" % hou_setup_pythonpath) + sys.path.append(hou_setup_pythonpath) -def on_init(*args): - houdini.on_houdini_initialize() + # Set asset FPS for the empty scene directly after launch of Houdini + # so it initializes into the correct scene FPS + _set_asset_fps() def before_save(*args): @@ -52,6 +69,8 @@ def on_save(*args): avalon.logger.info("Running callback on save..") + update_task_from_path(hou.hipFile.path()) + nodes = lib.get_id_required_nodes() for node, new_id in lib.generate_ids(nodes): lib.set_id(node, new_id, overwrite=False) @@ -59,8 +78,18 @@ def on_save(*args): def on_open(*args): + if not hou.isUIAvailable(): + log.debug("Batch mode detected, ignoring `on_open` callbacks..") + return + avalon.logger.info("Running callback on open..") + update_task_from_path(hou.hipFile.path()) + + # Validate FPS after update_task_from_path to + # ensure it is using correct FPS for the asset + lib.validate_fps() + if any_outdated(): from ..widgets import popup @@ -70,7 +99,7 @@ def on_open(*args): parent = hou.ui.mainQtWindow() if parent is None: log.info("Skipping outdated content pop-up " - "because Maya window can't be found.") + "because Houdini window can't be found.") else: # Show outdated pop-up @@ -79,15 +108,53 @@ def on_open(*args): tool.show(parent=parent) dialog = popup.Popup(parent=parent) - dialog.setWindowTitle("Maya scene has outdated content") + dialog.setWindowTitle("Houdini scene has outdated content") dialog.setMessage("There are outdated containers in " - "your Maya scene.") - dialog.on_show.connect(_on_show_inventory) + "your Houdini scene.") + dialog.on_clicked.connect(_on_show_inventory) dialog.show() +def on_new(_): + """Set project resolution and fps when create a new file""" + avalon.logger.info("Running callback on new..") + _set_asset_fps() + + +def _set_asset_fps(): + """Set Houdini scene FPS to the default required for current asset""" + + # Set new scene fps + fps = get_asset_fps() + print("Setting scene FPS to %i" % fps) + lib.set_scene_fps(fps) + + def on_pyblish_instance_toggled(instance, new_value, old_value): """Toggle saver tool passthrough states on instance toggles.""" + + @contextlib.contextmanager + def main_take(no_update=True): + """Enter root take during context""" + original_take = hou.takes.currentTake() + original_update_mode = hou.updateModeSetting() + root = hou.takes.rootTake() + has_changed = False + try: + if original_take != root: + has_changed = True + if no_update: + hou.setUpdateMode(hou.updateMode.Manual) + hou.takes.setCurrentTake(root) + yield + finally: + if has_changed: + if no_update: + hou.setUpdateMode(original_update_mode) + hou.takes.setCurrentTake(original_take) + + if not instance.data.get("_allowToggleBypass", True): + return nodes = instance[:] if not nodes: @@ -96,8 +163,20 @@ def on_pyblish_instance_toggled(instance, new_value, old_value): # Assume instance node is first node instance_node = nodes[0] + if not hasattr(instance_node, "isBypassed"): + # Likely not a node that can actually be bypassed + log.debug("Can't bypass node: %s", instance_node.path()) + return + if instance_node.isBypassed() != (not old_value): print("%s old bypass state didn't match old instance state, " "updating anyway.." % instance_node.path()) - instance_node.bypass(not new_value) + try: + # Go into the main take, because when in another take changing + # the bypass state of a note cannot be done due to it being locked + # by default. + with main_take(no_update=True): + instance_node.bypass(not new_value) + except hou.PermissionError as exc: + log.warning("%s - %s", instance_node.path(), exc) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 1f0f90811f..53f0e59ea9 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -1,14 +1,19 @@ import uuid - +import logging from contextlib import contextmanager -import hou - -from openpype import lib - +from openpype.api import get_asset from avalon import api, io from avalon.houdini import lib as houdini +import hou + +log = logging.getLogger(__name__) + + +def get_asset_fps(): + """Return current asset fps.""" + return get_asset()["data"].get("fps") def set_id(node, unique_id, overwrite=False): @@ -171,10 +176,10 @@ def get_output_parameter(node): node_type = node.type().name() if node_type == "geometry": return node.parm("sopoutput") - elif node_type == "alembic": return node.parm("filename") - + elif node_type == "comp": + return node.parm("copoutput") else: raise TypeError("Node type '%s' not supported" % node_type) @@ -205,7 +210,7 @@ def validate_fps(): """ - fps = lib.get_asset()["data"]["fps"] + fps = get_asset_fps() current_fps = hou.fps() # returns float if current_fps != fps: @@ -217,18 +222,123 @@ def validate_fps(): if parent is None: pass else: - dialog = popup.Popup2(parent=parent) + dialog = popup.Popup(parent=parent) dialog.setModal(True) - dialog.setWindowTitle("Houdini scene not in line with project") - dialog.setMessage("The FPS is out of sync, please fix it") + dialog.setWindowTitle("Houdini scene does not match project FPS") + dialog.setMessage("Scene %i FPS does not match project %i FPS" % + (current_fps, fps)) + dialog.setButtonText("Fix") - # Set new text for button (add optional argument for the popup?) - toggle = dialog.widgets["toggle"] - toggle.setEnabled(False) - dialog.on_show.connect(lambda: set_scene_fps(fps)) + # on_show is the Fix button clicked callback + dialog.on_clicked.connect(lambda: set_scene_fps(fps)) dialog.show() return False return True + + +def create_remote_publish_node(force=True): + """Function to create a remote publish node in /out + + This is a hacked "Shell" node that does *nothing* except for triggering + `colorbleed.lib.publish_remote()` as pre-render script. + + All default attributes of the Shell node are hidden to the Artist to + avoid confusion. + + Additionally some custom attributes are added that can be collected + by a Collector to set specific settings for the publish, e.g. whether + to separate the jobs per instance or process in one single job. + + """ + + cmd = "import colorbleed.lib; colorbleed.lib.publish_remote()" + + existing = hou.node("/out/REMOTE_PUBLISH") + if existing: + if force: + log.warning("Removing existing '/out/REMOTE_PUBLISH' node..") + existing.destroy() + else: + raise RuntimeError("Node already exists /out/REMOTE_PUBLISH. " + "Please remove manually or set `force` to " + "True.") + + # Create the shell node + out = hou.node("/out") + node = out.createNode("shell", node_name="REMOTE_PUBLISH") + node.moveToGoodPosition() + + # Set color make it stand out (avalon/pyblish color) + node.setColor(hou.Color(0.439, 0.709, 0.933)) + + # Set the pre-render script + node.setParms({ + "prerender": cmd, + "lprerender": "python" # command language + }) + + # Lock the attributes to ensure artists won't easily mess things up. + node.parm("prerender").lock(True) + node.parm("lprerender").lock(True) + + # Lock up the actual shell command + command_parm = node.parm("command") + command_parm.set("") + command_parm.lock(True) + shellexec_parm = node.parm("shellexec") + shellexec_parm.set(False) + shellexec_parm.lock(True) + + # Get the node's parm template group so we can customize it + template = node.parmTemplateGroup() + + # Hide default tabs + template.hideFolder("Shell", True) + template.hideFolder("Scripts", True) + + # Hide default settings + template.hide("execute", True) + template.hide("renderdialog", True) + template.hide("trange", True) + template.hide("f", True) + template.hide("take", True) + + # Add custom settings to this node. + parm_folder = hou.FolderParmTemplate("folder", "Submission Settings") + + # Separate Jobs per Instance + parm = hou.ToggleParmTemplate(name="separateJobPerInstance", + label="Separate Job per Instance", + default_value=False) + parm_folder.addParmTemplate(parm) + + # Add our custom Submission Settings folder + template.append(parm_folder) + + # Apply template back to the node + node.setParmTemplateGroup(template) + + +def render_rop(ropnode): + """Render ROP node utility for Publishing. + + This renders a ROP node with the settings we want during Publishing. + """ + # Print verbose when in batch mode without UI + verbose = not hou.isUIAvailable() + + # Render + try: + ropnode.render(verbose=verbose, + # Allow Deadline to capture completion percentage + output_progress=verbose) + except hou.Error as exc: + # The hou.Error is not inherited from a Python Exception class, + # so we explicitly capture the houdini error, otherwise pyblish + # will remain hanging. + import traceback + traceback.print_exc() + raise RuntimeError("Render failed: {0}".format(exc)) diff --git a/openpype/hosts/houdini/api/usd.py b/openpype/hosts/houdini/api/usd.py new file mode 100644 index 0000000000..545cd3f7a5 --- /dev/null +++ b/openpype/hosts/houdini/api/usd.py @@ -0,0 +1,255 @@ +"""Houdini-specific USD Library functions.""" + +import contextlib + +from avalon import io +import logging +from avalon.vendor.Qt import QtCore, QtGui +from avalon.tools.widgets import AssetWidget +from avalon import style + +from pxr import Sdf + + +log = logging.getLogger(__name__) + + +def pick_asset(node): + """Show a user interface to select an Asset in the project + + When double clicking an asset it will set the Asset value in the + 'asset' parameter. + + """ + + pos = QtGui.QCursor.pos() + + parm = node.parm("asset_name") + if not parm: + log.error("Node has no 'asset' parameter: %s", node) + return + + # Construct the AssetWidget as a frameless popup so it automatically + # closes when clicked outside of it. + global tool + tool = AssetWidget(silo_creatable=False) + tool.setContentsMargins(5, 5, 5, 5) + tool.setWindowTitle("Pick Asset") + tool.setStyleSheet(style.load_stylesheet()) + tool.setWindowFlags(QtCore.Qt.FramelessWindowHint | QtCore.Qt.Popup) + tool.refresh() + + # Select the current asset if there is any + name = parm.eval() + if name: + from avalon import io + db_asset = io.find_one({"name": name, "type": "asset"}) + if db_asset: + silo = db_asset.get("silo") + if silo: + tool.set_silo(silo) + tool.select_assets([name], expand=True) + + # Show cursor (top right of window) near cursor + tool.resize(250, 400) + tool.move(tool.mapFromGlobal(pos) - QtCore.QPoint(tool.width(), 0)) + + def set_parameter_callback(index): + name = index.data(tool.model.DocumentRole)["name"] + parm.set(name) + tool.close() + + tool.view.doubleClicked.connect(set_parameter_callback) + tool.show() + + +def add_usd_output_processor(ropnode, processor): + """Add USD Output Processor to USD Rop node. + + Args: + ropnode (hou.RopNode): The USD Rop node. + processor (str): The output processor name. This is the basename of + the python file that contains the Houdini USD Output Processor. + + """ + + import loputils + + loputils.handleOutputProcessorAdd({ + "node": ropnode, + "parm": ropnode.parm("outputprocessors"), + "script_value": processor + }) + + +def remove_usd_output_processor(ropnode, processor): + """Removes USD Output Processor from USD Rop node. + + Args: + ropnode (hou.RopNode): The USD Rop node. + processor (str): The output processor name. This is the basename of + the python file that contains the Houdini USD Output Processor. + + """ + import loputils + + parm = ropnode.parm(processor + "_remove") + if not parm: + raise RuntimeError("Output Processor %s does not " + "exist on %s" % (processor, ropnode.name())) + + loputils.handleOutputProcessorRemove({ + "node": ropnode, + "parm": parm + }) + + +@contextlib.contextmanager +def outputprocessors(ropnode, + processors=tuple(), + disable_all_others=True): + """Context manager to temporarily add Output Processors to USD ROP node. + + Args: + ropnode (hou.RopNode): The USD Rop node. + processors (tuple or list): The processors to add. + disable_all_others (bool, Optional): Whether to disable all + output processors currently on the ROP node that are not in the + `processors` list passed to this function. + + """ + # TODO: Add support for forcing the correct Order of the processors + + original = [] + prefix = "enableoutputprocessor_" + processor_parms = ropnode.globParms(prefix + "*") + for parm in processor_parms: + original.append((parm, parm.eval())) + + if disable_all_others: + for parm in processor_parms: + parm.set(False) + + added = [] + for processor in processors: + + parm = ropnode.parm(prefix + processor) + if parm: + # If processor already exists, just enable it + parm.set(True) + + else: + # Else add the new processor + add_usd_output_processor(ropnode, processor) + added.append(processor) + + try: + yield + finally: + + # Remove newly added processors + for processor in added: + remove_usd_output_processor(ropnode, processor) + + # Revert to original values + for parm, value in original: + if parm: + parm.set(value) + + +def get_usd_rop_loppath(node): + + # Get sop path + node_type = node.type().name() + if node_type == "usd": + return node.parm("loppath").evalAsNode() + + elif node_type in {"usd_rop", "usdrender_rop"}: + # Inside Solaris e.g. /stage (not in ROP context) + # When incoming connection is present it takes it directly + inputs = node.inputs() + if inputs: + return inputs[0] + else: + return node.parm("loppath").evalAsNode() + + +def get_layer_save_path(layer): + """Get custom HoudiniLayerInfo->HoudiniSavePath from SdfLayer. + + Args: + layer (pxr.Sdf.Layer): The Layer to retrieve the save pah data from. + + Returns: + str or None: Path to save to when data exists. + + """ + hou_layer_info = layer.rootPrims.get("HoudiniLayerInfo") + if not hou_layer_info: + return + + save_path = hou_layer_info.customData.get("HoudiniSavePath", None) + if save_path: + # Unfortunately this doesn't actually resolve the full absolute path + return layer.ComputeAbsolutePath(save_path) + + +def get_referenced_layers(layer): + """Return SdfLayers for all external references of the current layer + + Args: + layer (pxr.Sdf.Layer): The Layer to retrieve the save pah data from. + + Returns: + list: List of pxr.Sdf.Layer that are external references to this layer + + """ + + layers = [] + for layer_id in layer.GetExternalReferences(): + layer = Sdf.Layer.Find(layer_id) + if not layer: + # A file may not be in memory and is + # referenced from disk. As such it cannot + # be found. We will ignore those layers. + continue + + layers.append(layer) + + return layers + + +def iter_layer_recursive(layer): + """Recursively iterate all 'external' referenced layers""" + + layers = get_referenced_layers(layer) + traversed = set(layers) # Avoid recursion to itself (if even possible) + traverse = list(layers) + for layer in traverse: + + # Include children layers (recursion) + children_layers = get_referenced_layers(layer) + children_layers = [x for x in children_layers if x not in traversed] + traverse.extend(children_layers) + traversed.update(children_layers) + + yield layer + + +def get_configured_save_layers(usd_rop): + + lop_node = get_usd_rop_loppath(usd_rop) + stage = lop_node.stage(apply_viewport_overrides=False) + if not stage: + raise RuntimeError("No valid USD stage for ROP node: " + "%s" % usd_rop.path()) + + root_layer = stage.GetRootLayer() + + save_layers = [] + for layer in iter_layer_recursive(root_layer): + save_path = get_layer_save_path(layer) + if save_path is not None: + save_layers.append(layer) + + return save_layers diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py new file mode 100644 index 0000000000..d8ec41e61a --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -0,0 +1,35 @@ +from avalon import houdini + + +class CreateCompositeSequence(houdini.Creator): + """Composite ROP to Image Sequence""" + + label = "Composite (Image Sequence)" + family = "colorbleed.imagesequence" + icon = "gears" + + def __init__(self, *args, **kwargs): + super(CreateCompositeSequence, self).__init__(*args, **kwargs) + + # Remove the active, we are checking the bypass flag of the nodes + self.data.pop("active", None) + + # Type of ROP node to create + self.data.update({"node_type": "comp"}) + + def process(self): + instance = super(CreateCompositeSequence, self).process() + + parms = {"copoutput": "$HIP/pyblish/%s.$F4.exr" % self.name} + + if self.nodes: + node = self.nodes[0] + parms.update({"coppath": node.path()}) + + instance.setParms(parms) + + # Lock any parameters in this list + to_lock = ["prim_to_detail_pattern"] + for name in to_lock: + parm = instance.parm(name) + parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 6be854ac28..7ab1046df3 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -25,6 +25,7 @@ class CreatePointCache(plugin.Creator): "path_attrib": "path", # Pass path attribute for output "prim_to_detail_pattern": "cbId", "format": 2, # Set format to Ogawa + "facesets": 0, # No face sets (by default exclude them) "filename": "$HIP/pyblish/%s.abc" % self.name} if self.nodes: diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py new file mode 100644 index 0000000000..b944d592f2 --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py @@ -0,0 +1,63 @@ +import hou + +from avalon import houdini + + +class CreateRedshiftROP(houdini.Creator): + """Redshift ROP""" + + label = "Redshift ROP" + family = "redshift_rop" + icon = "magic" + defaults = ["master"] + + def __init__(self, *args, **kwargs): + super(CreateRedshiftROP, self).__init__(*args, **kwargs) + + # Clear the family prefix from the subset + subset = self.data["subset"] + subset_no_prefix = subset[len(self.family):] + subset_no_prefix = subset_no_prefix[0].lower() + subset_no_prefix[1:] + self.data["subset"] = subset_no_prefix + + # Add chunk size attribute + self.data["chunkSize"] = 10 + + # Remove the active, we are checking the bypass flag of the nodes + self.data.pop("active", None) + + self.data.update({"node_type": "Redshift_ROP"}) + + def process(self): + instance = super(CreateRedshiftROP, self).process() + + basename = instance.name() + instance.setName(basename + "_ROP", unique_name=True) + + # Also create the linked Redshift IPR Rop + ipr_rop = self.parent.createNode("Redshift_IPR", + node_name=basename + "_IPR") + + # Move it to directly under the Redshift ROP + ipr_rop.setPosition(instance.position() + hou.Vector2(0, -1)) + + # Set the linked rop to the Redshift ROP + ipr_rop.parm("linked_rop").set(ipr_rop.relativePathTo(instance)) + + prefix = '${HIP}/render/${HIPNAME}/`chs("subset")`.${AOV}.$F4.exr' + parms = { + # Render frame range + "trange": 1, + + # Redshift ROP settings + "RS_outputFileNamePrefix": prefix, + "RS_outputMultilayerMode": 0, # no multi-layered exr + "RS_outputBeautyAOVSuffix": "beauty" + } + instance.setParms(parms) + + # Lock some Avalon attributes + to_lock = ["family", "id"] + for name in to_lock: + parm = instance.parm(name) + parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/create/create_remote_publish.py b/openpype/hosts/houdini/plugins/create/create_remote_publish.py new file mode 100644 index 0000000000..bc6cf5d949 --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/create_remote_publish.py @@ -0,0 +1,22 @@ +from avalon import houdini + +from colorbleed.houdini import lib + + +class CreateRemotePublish(houdini.Creator): + """Create Remote Publish Submission Settings node.""" + + label = "Remote Publish" + family = "remotePublish" + icon = "cloud-upload" + + def process(self): + """This is a stub creator process. + + This does not create a regular instance that the instance collector + picks up. Instead we force this one to solely create something we + explicitly want to create. The only reason this class is here is so + that Artists can also create the node through the Avalon creator. + + """ + lib.create_remote_publish_node(force=True) diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py new file mode 100644 index 0000000000..d4cf3f761b --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -0,0 +1,39 @@ +from avalon import houdini + + +class CreateUSD(houdini.Creator): + """Universal Scene Description""" + + label = "USD" + family = "colorbleed.usd" + icon = "gears" + + def __init__(self, *args, **kwargs): + super(CreateUSD, self).__init__(*args, **kwargs) + + # Remove the active, we are checking the bypass flag of the nodes + self.data.pop("active", None) + + self.data.update({"node_type": "usd"}) + + def process(self): + instance = super(CreateUSD, self).process() + + parms = { + "lopoutput": "$HIP/pyblish/%s.usd" % self.name, + "enableoutputprocessor_simplerelativepaths": False + } + + if self.nodes: + node = self.nodes[0] + parms.update({"loppath": node.path()}) + + instance.setParms(parms) + + # Lock any parameters in this list + to_lock = ["fileperframe", + # Lock some Avalon attributes + "family", "id"] + for name in to_lock: + parm = instance.parm(name) + parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/create/create_usd_model.py b/openpype/hosts/houdini/plugins/create/create_usd_model.py new file mode 100644 index 0000000000..f4c377e0a8 --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/create_usd_model.py @@ -0,0 +1,39 @@ +import re + +from avalon import api +from avalon.houdini import lib +import hou + + +class CreateUSDModel(api.Creator): + """Author USD Model""" + + label = "USD Model" + family = "usdModel" + icon = "gears" + + def process(self): + + node_type = "cb::author_model:1.0" + + subset = self.data["subset"] + name = "author_{}".format(subset) + variant = re.match("usdModel(.*)", subset).group(1) + + # Get stage root and create node + stage = hou.node("/stage") + instance = stage.createNode(node_type, node_name=name) + instance.moveToGoodPosition(move_unconnected=True) + + parms = { + "asset_name": self.data["asset"], + "variant_name": variant + } + + # Set the Geo Path to the first selected node (if any) + selection = hou.selectedNodes() + if selection: + node = selection[0] + parms["geo_path"] = node.path() + + instance.setParms(parms) diff --git a/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py b/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py new file mode 100644 index 0000000000..87c54c9fb7 --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py @@ -0,0 +1,57 @@ +from avalon import api +from avalon.houdini import lib +import hou + + +class _USDWorkspace(api.Creator): + """Base class to create pre-built USD Workspaces""" + + node_name = None + node_type = None + step = None + icon = "gears" + + def process(self): + + if not all([self.node_type, self.node_name, self.step]): + self.log.error("Incomplete USD Workspace parameters") + return + + name = self.node_name + node_type = self.node_type + + # Force the subset to "{asset}.{step}.usd" + subset = "usd{step}".format(step=self.step) + self.data["subset"] = subset + + # Get stage root and create node + stage = hou.node("/stage") + instance = stage.createNode(node_type, node_name=name) + instance.moveToGoodPosition() + + # With the Workspace HDAs there is no need to imprint the instance data + # since this data is pre-built into it. However, we do set the right + # asset as that can be defined by the user. + parms = { + "asset": self.data["asset"] + } + instance.setParms(parms) + + return instance + + +class USDCreateShadingWorkspace(_USDWorkspace): + """USD Shading Workspace""" + + defaults = ["Shade"] + + label = "USD Shading Workspace" + family = "colorbleed.shade.usd" + + node_type = "cb::shadingWorkspace::1.0" + node_name = "shadingWorkspace" + step = "Shade" + + +# Don't allow the base class to be picked up by Avalon +del _USDWorkspace diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py new file mode 100644 index 0000000000..4914e8e3ab --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -0,0 +1,36 @@ +import hou + +from avalon import houdini + + +class CreateUSDRender(houdini.Creator): + """USD Render ROP in /stage""" + + label = "USD Render" + family = "colorbleed.usdrender" + icon = "magic" + + def __init__(self, *args, **kwargs): + super(CreateUSDRender, self).__init__(*args, **kwargs) + + self.parent = hou.node("/stage") + + # Remove the active, we are checking the bypass flag of the nodes + self.data.pop("active", None) + + self.data.update({"node_type": "usdrender_rop"}) + + def process(self): + instance = super(CreateUSDRender, self).process() + + parms = { + # Render frame range + "trange": 1 + } + instance.setParms(parms) + + # Lock some Avalon attributes + to_lock = ["family", "id"] + for name in to_lock: + parm = instance.parm(name) + parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/load/actions.py b/openpype/hosts/houdini/plugins/load/actions.py new file mode 100644 index 0000000000..bb8cd7a1f6 --- /dev/null +++ b/openpype/hosts/houdini/plugins/load/actions.py @@ -0,0 +1,78 @@ +"""A module containing generic loader actions that will display in the Loader. + +""" + +from avalon import api + + +class SetFrameRangeLoader(api.Loader): + """Set Maya frame range""" + + families = ["colorbleed.animation", + "colorbleed.camera", + "colorbleed.pointcache", + "colorbleed.vdbcache", + "colorbleed.usd"] + representations = ["abc", "vdb", "usd"] + + label = "Set frame range" + order = 11 + icon = "clock-o" + color = "white" + + def load(self, context, name, namespace, data): + + import hou + + version = context['version'] + version_data = version.get("data", {}) + + start = version_data.get("startFrame", None) + end = version_data.get("endFrame", None) + + if start is None or end is None: + print("Skipping setting frame range because start or " + "end frame data is missing..") + return + + hou.playbar.setFrameRange(start, end) + hou.playbar.setPlaybackRange(start, end) + + +class SetFrameRangeWithHandlesLoader(api.Loader): + """Set Maya frame range including pre- and post-handles""" + + families = ["colorbleed.animation", + "colorbleed.camera", + "colorbleed.pointcache", + "colorbleed.vdbcache", + "colorbleed.usd"] + representations = ["abc", "vdb", "usd"] + + label = "Set frame range (with handles)" + order = 12 + icon = "clock-o" + color = "white" + + def load(self, context, name, namespace, data): + + import hou + + version = context['version'] + version_data = version.get("data", {}) + + start = version_data.get("startFrame", None) + end = version_data.get("endFrame", None) + + if start is None or end is None: + print("Skipping setting frame range because start or " + "end frame data is missing..") + return + + # Include handles + handles = version_data.get("handles", 0) + start -= handles + end += handles + + hou.playbar.setFrameRange(start, end) + hou.playbar.setPlaybackRange(start, end) diff --git a/openpype/hosts/houdini/plugins/load/load_alembic.py b/openpype/hosts/houdini/plugins/load/load_alembic.py index 8fc2b6a61a..076b4051f8 100644 --- a/openpype/hosts/houdini/plugins/load/load_alembic.py +++ b/openpype/hosts/houdini/plugins/load/load_alembic.py @@ -8,7 +8,8 @@ class AbcLoader(api.Loader): families = ["model", "animation", - "pointcache"] + "pointcache", + "gpuCache"] label = "Load Alembic" representations = ["abc"] order = -10 @@ -68,8 +69,9 @@ class AbcLoader(api.Loader): null = container.createNode("null", node_name="OUT".format(name)) null.setInput(0, normal_node) - # Set display on last node - null.setDisplayFlag(True) + # Ensure display flag is on the Alembic input node and not on the OUT + # node to optimize "debug" displaying in the viewport. + alembic.setDisplayFlag(True) # Set new position for unpack node else it gets cluttered nodes = [container, alembic, unpack, normal_node, null] @@ -82,7 +84,8 @@ class AbcLoader(api.Loader): namespace, nodes, context, - self.__class__.__name__) + self.__class__.__name__, + suffix="") def update(self, container, representation): diff --git a/openpype/hosts/houdini/plugins/load/load_camera.py b/openpype/hosts/houdini/plugins/load/load_camera.py index a3d67f6e5e..ac0e1f0436 100644 --- a/openpype/hosts/houdini/plugins/load/load_camera.py +++ b/openpype/hosts/houdini/plugins/load/load_camera.py @@ -1,8 +1,78 @@ from avalon import api - from avalon.houdini import pipeline, lib +ARCHIVE_EXPRESSION = '__import__("_alembic_hom_extensions").alembicGetCameraDict' + + +def transfer_non_default_values(src, dest, ignore=None): + """Copy parm from src to dest. + + Because the Alembic Archive rebuilds the entire node + hierarchy on triggering "Build Hierarchy" we want to + preserve any local tweaks made by the user on the camera + for ease of use. That could be a background image, a + resolution change or even Redshift camera parameters. + + We try to do so by finding all Parms that exist on both + source and destination node, include only those that both + are not at their default value, they must be visible, + we exclude those that have the special "alembic archive" + channel expression and ignore certain Parm types. + + """ + import hou + + src.updateParmStates() + + for parm in src.allParms(): + + if ignore and parm.name() in ignore: + continue + + # If destination parm does not exist, ignore.. + dest_parm = dest.parm(parm.name()) + if not dest_parm: + continue + + # Ignore values that are currently at default + if parm.isAtDefault() and dest_parm.isAtDefault(): + continue + + if not parm.isVisible(): + # Ignore hidden parameters, assume they + # are implementation details + continue + + expression = None + try: + expression = parm.expression() + except hou.OperationFailed: + # No expression present + pass + + if expression is not None and ARCHIVE_EXPRESSION in expression: + # Assume it's part of the automated connections that the Alembic Archive + # makes on loading of the camera and thus we do not want to transfer + # the expression + continue + + # Ignore folders, separators, etc. + ignore_types = { + hou.parmTemplateType.Toggle, + hou.parmTemplateType.Menu, + hou.parmTemplateType.Button, + hou.parmTemplateType.FolderSet, + hou.parmTemplateType.Separator, + hou.parmTemplateType.Label, + } + if parm.parmTemplate().type() in ignore_types: + continue + + print("Preserving attribute: %s" % parm.name()) + dest_parm.setFromParm(parm) + + class CameraLoader(api.Loader): """Specific loader of Alembic for the avalon.animation family""" @@ -30,7 +100,7 @@ class CameraLoader(api.Loader): counter = 1 asset_name = context["asset"]["name"] - namespace = namespace if namespace else asset_name + namespace = namespace or asset_name formatted = "{}_{}".format(namespace, name) if namespace else name node_name = "{0}_{1:03d}".format(formatted, counter) @@ -59,7 +129,8 @@ class CameraLoader(api.Loader): namespace, nodes, context, - self.__class__.__name__) + self.__class__.__name__, + suffix="") def update(self, container, representation): @@ -73,14 +144,40 @@ class CameraLoader(api.Loader): node.setParms({"fileName": file_path, "representation": str(representation["_id"])}) + # Store the cam temporarily next to the Alembic Archive + # so that we can preserve parm values the user set on it + # after build hierarchy was triggered. + old_camera = self._get_camera(node) + temp_camera = old_camera.copyTo(node.parent()) + # Rebuild node.parm("buildHierarchy").pressButton() + # Apply values to the new camera + new_camera = self._get_camera(node) + transfer_non_default_values(temp_camera, + new_camera, + # The hidden uniform scale attribute + # gets a default connection to "icon_scale" + # just skip that completely + ignore={"scale"}) + + temp_camera.destroy() + def remove(self, container): node = container["node"] node.destroy() + def _get_camera(self, node): + import hou + cameras = node.recursiveGlob("*", + filter=hou.nodeTypeFilter.ObjCamera, + include_subnets=False) + + assert len(cameras) == 1, "Camera instance must have only one camera" + return cameras[0] + def create_and_connect(self, node, node_type, name=None): """Create a node within a node which and connect it to the input @@ -93,27 +190,10 @@ class CameraLoader(api.Loader): hou.Node """ + if name: + new_node = node.createNode(node_type, node_name=name) + else: + new_node = node.createNode(node_type) - import hou - - try: - - if name: - new_node = node.createNode(node_type, node_name=name) - else: - new_node = node.createNode(node_type) - - new_node.moveToGoodPosition() - - try: - input_node = next(i for i in node.allItems() if - isinstance(i, hou.SubnetIndirectInput)) - except StopIteration: - return new_node - - new_node.setInput(0, input_node) - return new_node - - except Exception: - raise RuntimeError("Could not created node type `%s` in node `%s`" - % (node_type, node)) + new_node.moveToGoodPosition() + return new_node diff --git a/openpype/hosts/houdini/plugins/load/load_image.py b/openpype/hosts/houdini/plugins/load/load_image.py new file mode 100644 index 0000000000..026cb07f67 --- /dev/null +++ b/openpype/hosts/houdini/plugins/load/load_image.py @@ -0,0 +1,119 @@ +import os + +from avalon import api +from avalon.houdini import pipeline, lib + +import hou + +def get_image_avalon_container(): + """The COP2 files must be in a COP2 network. + + So we maintain a single entry point within AVALON_CONTAINERS, + just for ease of use. + + """ + + path = pipeline.AVALON_CONTAINERS + avalon_container = hou.node(path) + if not avalon_container: + # Let's create avalon container secretly + # but make sure the pipeline still is built the + # way we anticipate it was built, asserting it. + assert path == "/obj/AVALON_CONTAINERS" + + parent = hou.node("/obj") + avalon_container = parent.createNode("subnet", + node_name="AVALON_CONTAINERS") + + image_container = hou.node(path + "/IMAGES") + if not image_container: + image_container = avalon_container.createNode("cop2net", node_name="IMAGES") + image_container.moveToGoodPosition() + + return image_container + + +class ImageLoader(api.Loader): + """Specific loader of Alembic for the avalon.animation family""" + + families = ["colorbleed.imagesequence"] + label = "Load Image (COP2)" + representations = ["*"] + order = -10 + + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + + # Format file name, Houdini only wants forward slashes + file_path = os.path.normpath(self.fname) + file_path = file_path.replace("\\", "/") + file_path = self._get_file_sequence(file_path) + + # Get the root node + parent = get_image_avalon_container() + + # Define node name + namespace = namespace if namespace else context["asset"]["name"] + node_name = "{}_{}".format(namespace, name) if namespace else name + + node = parent.createNode("file", node_name=node_name) + node.moveToGoodPosition() + + node.setParms({"filename1": file_path}) + + # Imprint it manually + data = { + "schema": "avalon-core:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "name": node_name, + "namespace": namespace, + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + } + + # todo: add folder="Avalon" + lib.imprint(node, data) + + return node + + def update(self, container, representation): + + node = container["node"] + + # Update the file path + file_path = api.get_representation_path(representation) + file_path = file_path.replace("\\", "/") + file_path = self._get_file_sequence(file_path) + + # Update attributes + node.setParms({ + "filename1": file_path, + "representation": str(representation["_id"]) + }) + + def remove(self, container): + + node = container["node"] + + # Let's clean up the IMAGES COP2 network + # if it ends up being empty and we deleted + # the last file node. Store the parent + # before we delete the node. + parent = node.parent() + + node.destroy() + + if not parent.children(): + parent.destroy() + + + + def _get_file_sequence(self, root): + files = sorted(os.listdir(root)) + + first_fname = files[0] + prefix, padding, suffix = first_fname.rsplit(".", 2) + fname = ".".join([prefix, "$F{}".format(len(padding)), suffix]) + return os.path.join(root, fname).replace("\\", "/") \ No newline at end of file diff --git a/openpype/hosts/houdini/plugins/load/load_usd_layer.py b/openpype/hosts/houdini/plugins/load/load_usd_layer.py new file mode 100644 index 0000000000..79fe3e88fd --- /dev/null +++ b/openpype/hosts/houdini/plugins/load/load_usd_layer.py @@ -0,0 +1,74 @@ +from avalon import api +from avalon.houdini import pipeline, lib + + +class USDSublayerLoader(api.Loader): + """Sublayer USD file in Solaris""" + + families = ["colorbleed.usd", + "colorbleed.pointcache", + "colorbleed.animation", + "colorbleed.camera", + "usdCamera"] + label = "Sublayer USD" + representations = ["usd", "usda", "usdlc", "usdnc", "abc"] + order = 1 + + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + + import os + import hou + + # Format file name, Houdini only wants forward slashes + file_path = os.path.normpath(self.fname) + file_path = file_path.replace("\\", "/") + + # Get the root node + stage = hou.node("/stage") + + # Define node name + namespace = namespace if namespace else context["asset"]["name"] + node_name = "{}_{}".format(namespace, name) if namespace else name + + # Create USD reference + container = stage.createNode("sublayer", node_name=node_name) + container.setParms({"filepath1": file_path}) + container.moveToGoodPosition() + + # Imprint it manually + data = { + "schema": "avalon-core:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "name": node_name, + "namespace": namespace, + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + } + + # todo: add folder="Avalon" + lib.imprint(container, data) + + return container + + def update(self, container, representation): + + node = container["node"] + + # Update the file path + file_path = api.get_representation_path(representation) + file_path = file_path.replace("\\", "/") + + # Update attributes + node.setParms({"filepath1": file_path, + "representation": str(representation["_id"])}) + + # Reload files + node.parm("reload").pressButton() + + def remove(self, container): + + node = container["node"] + node.destroy() \ No newline at end of file diff --git a/openpype/hosts/houdini/plugins/load/load_usd_reference.py b/openpype/hosts/houdini/plugins/load/load_usd_reference.py new file mode 100644 index 0000000000..f996ccdc3c --- /dev/null +++ b/openpype/hosts/houdini/plugins/load/load_usd_reference.py @@ -0,0 +1,74 @@ +from avalon import api +from avalon.houdini import pipeline, lib + + +class USDReferenceLoader(api.Loader): + """Reference USD file in Solaris""" + + families = ["colorbleed.usd", + "colorbleed.pointcache", + "colorbleed.animation", + "colorbleed.camera", + "usdCamera"] + label = "Reference USD" + representations = ["usd", "usda", "usdlc", "usdnc", "abc"] + order = -8 + + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + + import os + import hou + + # Format file name, Houdini only wants forward slashes + file_path = os.path.normpath(self.fname) + file_path = file_path.replace("\\", "/") + + # Get the root node + stage = hou.node("/stage") + + # Define node name + namespace = namespace if namespace else context["asset"]["name"] + node_name = "{}_{}".format(namespace, name) if namespace else name + + # Create USD reference + container = stage.createNode("reference", node_name=node_name) + container.setParms({"filepath1": file_path}) + container.moveToGoodPosition() + + # Imprint it manually + data = { + "schema": "avalon-core:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "name": node_name, + "namespace": namespace, + "loader": str(self.__class__.__name__), + "representation": str(context["representation"]["_id"]), + } + + # todo: add folder="Avalon" + lib.imprint(container, data) + + return container + + def update(self, container, representation): + + node = container["node"] + + # Update the file path + file_path = api.get_representation_path(representation) + file_path = file_path.replace("\\", "/") + + # Update attributes + node.setParms({"filepath1": file_path, + "representation": str(representation["_id"])}) + + # Reload files + node.parm("reload").pressButton() + + def remove(self, container): + + node = container["node"] + node.destroy() diff --git a/openpype/hosts/houdini/plugins/load/load_vdb.py b/openpype/hosts/houdini/plugins/load/load_vdb.py new file mode 100644 index 0000000000..be2b2556f5 --- /dev/null +++ b/openpype/hosts/houdini/plugins/load/load_vdb.py @@ -0,0 +1,106 @@ +import os +import re +from avalon import api + +from avalon.houdini import pipeline, lib + + +class VdbLoader(api.Loader): + """Specific loader of Alembic for the avalon.animation family""" + + families = ["colorbleed.vdbcache"] + label = "Load VDB" + representations = ["vdb"] + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + + import hou + + # Get the root node + obj = hou.node("/obj") + + # Define node name + namespace = namespace if namespace else context["asset"]["name"] + node_name = "{}_{}".format(namespace, name) if namespace else name + + # Create a new geo node + container = obj.createNode("geo", node_name=node_name) + + # Remove the file node, it only loads static meshes + # Houdini 17 has removed the file node from the geo node + file_node = container.node("file1") + if file_node: + file_node.destroy() + + # Explicitly create a file node + file_node = container.createNode("file", node_name=node_name) + file_node.setParms({"file": self.format_path(self.fname)}) + + # Set display on last node + file_node.setDisplayFlag(True) + + nodes = [container, file_node] + self[:] = nodes + + return pipeline.containerise(node_name, + namespace, + nodes, + context, + self.__class__.__name__, + suffix="") + + def format_path(self, path): + """Format file path correctly for single vdb or vdb sequence""" + + if not os.path.exists(path): + raise RuntimeError("Path does not exist: %s" % path) + + # The path is either a single file or sequence in a folder. + is_single_file = os.path.isfile(path) + if is_single_file: + filename = path + else: + # The path points to the publish .vdb sequence folder so we + # find the first file in there that ends with .vdb + files = sorted(os.listdir(path)) + first = next((x for x in files if x.endswith(".vdb")), None) + if first is None: + raise RuntimeError("Couldn't find first .vdb file of " + "sequence in: %s" % path) + + # Set .vdb to $F.vdb + first = re.sub(r"\.(\d+)\.vdb$", ".$F.vdb", first) + + filename = os.path.join(path, first) + + filename = os.path.normpath(filename) + filename = filename.replace("\\", "/") + + return filename + + def update(self, container, representation): + + node = container["node"] + try: + file_node = next(n for n in node.children() if + n.type().name() == "file") + except StopIteration: + self.log.error("Could not find node of type `alembic`") + return + + # Update the file path + file_path = api.get_representation_path(representation) + file_path = self.format_path(file_path) + + file_node.setParms({"fileName": file_path}) + + # Update attribute + node.setParms({"representation": str(representation["_id"])}) + + def remove(self, container): + + node = container["node"] + node.destroy() diff --git a/openpype/hosts/houdini/plugins/load/show_usdview.py b/openpype/hosts/houdini/plugins/load/show_usdview.py new file mode 100644 index 0000000000..b9aa0e7ddc --- /dev/null +++ b/openpype/hosts/houdini/plugins/load/show_usdview.py @@ -0,0 +1,44 @@ +from avalon import api + + +class ShowInUsdview(api.Loader): + """Open USD file in usdview""" + + families = ["colorbleed.usd"] + label = "Show in usdview" + representations = ["usd", "usda", "usdlc", "usdnc"] + order = 10 + + icon = "code-fork" + color = "white" + + def load(self, context, name=None, namespace=None, data=None): + + import os + import subprocess + + import avalon.lib as lib + + usdview = lib.which("usdview") + + filepath = os.path.normpath(self.fname) + filepath = filepath.replace("\\", "/") + + if not os.path.exists(filepath): + self.log.error("File does not exist: %s" % filepath) + return + + self.log.info("Start houdini variant of usdview...") + + # For now avoid some pipeline environment variables that initialize + # Avalon in Houdini as it is redundant for usdview and slows boot time + env = os.environ.copy() + env.pop("PYTHONPATH", None) + env.pop("HOUDINI_SCRIPT_PATH", None) + env.pop("HOUDINI_MENU_PATH", None) + + # Force string to avoid unicode issues + env = {str(key): str(value) for key, value in env.items()} + + subprocess.Popen([usdview, filepath, "--renderer", "GL"], + env=env) diff --git a/openpype/hosts/houdini/plugins/publish/collect_active_state.py b/openpype/hosts/houdini/plugins/publish/collect_active_state.py new file mode 100644 index 0000000000..ee8640b04e --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_active_state.py @@ -0,0 +1,36 @@ +import pyblish.api +import openpype.api + + +class CollectInstanceActiveState(pyblish.api.InstancePlugin): + """Collect default active state for instance from its node bypass state. + + This is done at the very end of the CollectorOrder so that any required + collecting of data iterating over instances (with InstancePlugin) will + actually collect the data for when the user enables the state in the UI. + Otherwise potentially required data might have skipped collecting. + + """ + + order = pyblish.api.CollectorOrder + 0.299 + families = ["*"] + hosts = ["houdini"] + label = "Instance Active State" + + def process(self, instance): + + # Must have node to check for bypass state + if len(instance) == 0: + return + + # Check bypass state and reverse + node = instance[0] + active = not node.isBypassed() + + # Set instance active state + instance.data.update({ + "active": active, + # temporarily translation of `active` to `publish` till issue has + # been resolved: https://github.com/pyblish/pyblish-base/issues/307 + "publish": active + }) diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index 1d664aeaeb..ed43f717c2 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -6,11 +6,12 @@ from openpype.hosts.houdini.api import lib class CollectFrames(pyblish.api.InstancePlugin): - """Collect all frames which would be a resukl""" + """Collect all frames which would be saved from the ROP nodes""" order = pyblish.api.CollectorOrder label = "Collect Frames" - families = ["vdbcache"] + families = ["vdbcache", + "imagesequence"] def process(self, instance): @@ -19,10 +20,17 @@ class CollectFrames(pyblish.api.InstancePlugin): output_parm = lib.get_output_parameter(ropnode) output = output_parm.eval() + _, ext = os.path.splitext(output) file_name = os.path.basename(output) - match = re.match("(\w+)\.(\d+)\.vdb", file_name) result = file_name + # Get the filename pattern match from the output + # path so we can compute all frames that would + # come out from rendering the ROP node if there + # is a frame pattern in the name + pattern = r"\w+\.(\d+)" + re.escape(ext) + match = re.match(pattern, file_name) + start_frame = instance.data.get("frameStart", None) end_frame = instance.data.get("frameEnd", None) @@ -35,6 +43,8 @@ class CollectFrames(pyblish.api.InstancePlugin): int(start_frame), int(end_frame)) + # todo: `frames` currently conflicts with "explicit frames" for a + # for a custom frame list. So this should be refactored. instance.data.update({"frames": result}) def create_file_list(self, match, start_frame, end_frame): @@ -50,17 +60,24 @@ class CollectFrames(pyblish.api.InstancePlugin): """ + # Get the padding length + frame = match.group(1) + padding = len(frame) + + # Get the parts of the filename surrounding the frame number + # so we can put our own frame numbers in. + span = match.span(1) + prefix = match.string[:span[0]] + suffix = match.string[span[1]:] + + # Generate filenames for all frames result = [] + for i in range(start_frame, end_frame+1): - padding = len(match.group(2)) - name = match.group(1) - padding_format = "{number:0{width}d}" + # Format frame number by the padding amount + str_frame = "{number:0{width}d}".format(number=i, width=padding) - count = start_frame - while count <= end_frame: - str_count = padding_format.format(number=count, width=padding) - file_name = "{}.{}.vdb".format(name, str_count) + file_name = prefix + str_frame + suffix result.append(file_name) - count += 1 return result diff --git a/openpype/hosts/houdini/plugins/publish/collect_inputs.py b/openpype/hosts/houdini/plugins/publish/collect_inputs.py new file mode 100644 index 0000000000..1fafba483e --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_inputs.py @@ -0,0 +1,120 @@ +import hou + +import avalon.io as io +import avalon.api as api +import pyblish.api + + +def collect_input_containers(nodes): + """Collect containers that contain any of the node in `nodes`. + + This will return any loaded Avalon container that contains at least one of + the nodes. As such, the Avalon container is an input for it. Or in short, + there are member nodes of that container. + + Returns: + list: Input avalon containers + + """ + + # Lookup by node ids + lookup = frozenset(nodes) + + containers = [] + host = api.registered_host() + for container in host.ls(): + + node = container["node"] + + # Usually the loaded containers don't have any complex references + # and the contained children should be all we need. So we disregard + # checking for .references() on the nodes. + members = set(node.allSubChildren()) + members.add(node) # include the node itself + + # If there's an intersection + if not lookup.isdisjoint(members): + containers.append(container) + + return containers + + +def iter_upstream(node): + """Yields all upstream inputs for the current node. + + This includes all `node.inputAncestors()` but also traverses through all + `node.references()` for the node itself and for any of the upstream nodes. + This method has no max-depth and will collect all upstream inputs. + + Yields: + hou.Node: The upstream nodes, including references. + + """ + + upstream = node.inputAncestors(include_ref_inputs=True, + follow_subnets=True) + + # Initialize process queue with the node's ancestors itself + queue = list(upstream) + collected = set(upstream) + + # Traverse upstream references for all nodes and yield them as we + # process the queue. + while queue: + upstream_node = queue.pop() + yield upstream_node + + # Find its references that are not collected yet. + references = upstream_node.references() + references = [n for n in references if n not in collected] + + queue.extend(references) + collected.update(references) + + # Include the references' ancestors that have not been collected yet. + for reference in references: + ancestors = reference.inputAncestors(include_ref_inputs=True, + follow_subnets=True) + ancestors = [n for n in ancestors if n not in collected] + + queue.extend(ancestors) + collected.update(ancestors) + + +class CollectUpstreamInputs(pyblish.api.InstancePlugin): + """Collect source input containers used for this publish. + + This will include `inputs` data of which loaded publishes were used in the + generation of this publish. This leaves an upstream trace to what was used + as input. + + """ + + label = "Collect Inputs" + order = pyblish.api.CollectorOrder + 0.4 + hosts = ["houdini"] + + def process(self, instance): + # We can't get the "inputAncestors" directly from the ROP + # node, so we find the related output node (set in SOP/COP path) + # and include that together with its ancestors + output = instance.data.get("output_node") + + if output is None: + # If no valid output node is set then ignore it as validation + # will be checking those cases. + self.log.debug("No output node found, skipping " + "collecting of inputs..") + return + + # Collect all upstream parents + nodes = list(iter_upstream(output)) + nodes.append(output) + + # Collect containers for the given set of nodes + containers = collect_input_containers(nodes) + + inputs = [c["representation"] for c in containers] + instance.data["inputs"] = inputs + + self.log.info("Collected inputs: %s" % inputs) diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py index 2e294face2..6b00b7cb22 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py @@ -31,6 +31,13 @@ class CollectInstances(pyblish.api.ContextPlugin): def process(self, context): nodes = hou.node("/out").children() + + # Include instances in USD stage only when it exists so it + # remains backwards compatible with version before houdini 18 + stage = hou.node("/stage") + if stage: + nodes += stage.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop) + for node in nodes: if not node.parm("id"): @@ -55,6 +62,8 @@ class CollectInstances(pyblish.api.ContextPlugin): # Create nice name if the instance has a frame range. label = data.get("name", node.name()) + label += " (%s)" % data["asset"] # include asset in name + if "frameStart" in data and "frameEnd" in data: frames = "[{frameStart} - {frameEnd}]".format(**data) label = "{} {}".format(label, frames) diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py b/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py new file mode 100644 index 0000000000..a3e9400970 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py @@ -0,0 +1,155 @@ +import os + +import hou +import pyblish.api +from avalon import io +from avalon.houdini import lib +import openpype.hosts.houdini.api.usd as hou_usdlib +import openpype.lib.usdlib as usdlib + + +class CollectInstancesUsdLayered(pyblish.api.ContextPlugin): + """Collect Instances from a ROP Network and its configured layer paths. + + The output nodes of the ROP node will only be published when *any* of the + layers remain set to 'publish' by the user. + + This works differently from most of our Avalon instances in the pipeline. + As opposed to storing `pyblish.avalon.instance` as id on the node we store + `pyblish.avalon.usdlayered`. + + Additionally this instance has no need for storing family, asset, subset + or name on the nodes. Instead all information is retrieved solely from + the output filepath, which is an Avalon URI: + avalon://{asset}/{subset}.{representation} + + Each final ROP node is considered a dependency for any of the Configured + Save Path layers it sets along the way. As such, the instances shown in + the Pyblish UI are solely the configured layers. The encapsulating usd + files are generated whenever *any* of the dependencies is published. + + These dependency instances are stored in: + instance.data["publishDependencies"] + + """ + + order = pyblish.api.CollectorOrder - 0.01 + label = "Collect Instances (USD Configured Layers)" + hosts = ["houdini"] + + def process(self, context): + + stage = hou.node("/stage") + if not stage: + # Likely Houdini version <18 + return + + nodes = stage.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop) + for node in nodes: + + if not node.parm("id"): + continue + + if node.evalParm("id") != "pyblish.avalon.usdlayered": + continue + + has_family = node.evalParm("family") + assert has_family, "'%s' is missing 'family'" % node.name() + + self.process_node(node, context) + + def sort_by_family(instance): + """Sort by family""" + return instance.data.get("families", instance.data.get("family")) + + # Sort/grouped by family (preserving local index) + context[:] = sorted(context, key=sort_by_family) + + return context + + def process_node(self, node, context): + + # Allow a single ROP node or a full ROP network of USD ROP nodes + # to be processed as a single entry that should "live together" on + # a publish. + if node.type().name() == "ropnet": + # All rop nodes inside ROP Network + ropnodes = node.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop) + else: + # A single node + ropnodes = [node] + + data = lib.read(node) + + # Don't use the explicit "colorbleed.usd.layered" family for publishing + # instead use the "colorbleed.usd" family to integrate. + data["publishFamilies"] = ["colorbleed.usd"] + + # For now group ALL of them into USD Layer subset group + # Allow this subset to be grouped into a USD Layer on creation + data["subsetGroup"] = "USD Layer" + + instances = list() + dependencies = [] + for ropnode in ropnodes: + + # Create a dependency instance per ROP Node. + lopoutput = ropnode.evalParm("lopoutput") + dependency_save_data = self.get_save_data(lopoutput) + dependency = context.create_instance(dependency_save_data["name"]) + dependency.append(ropnode) + dependency.data.update(data) + dependency.data.update(dependency_save_data) + dependency.data["family"] = "colorbleed.usd.dependency" + dependency.data["optional"] = False + dependencies.append(dependency) + + # Hide the dependency instance from the context + context.pop() + + # Get all configured layers for this USD ROP node + # and create a Pyblish instance for each one + layers = hou_usdlib.get_configured_save_layers(ropnode) + for layer in layers: + save_path = hou_usdlib.get_layer_save_path(layer) + save_data = self.get_save_data(save_path) + if not save_data: + continue + self.log.info(save_path) + + instance = context.create_instance(save_data["name"]) + instance[:] = [node] + + # Set the instance data + instance.data.update(data) + instance.data.update(save_data) + instance.data["usdLayer"] = layer + + # Don't allow the Pyblish `instanceToggled` we have installed + # to set this node to bypass. + instance.data["_allowToggleBypass"] = False + + instances.append(instance) + + # Store the collected ROP node dependencies + self.log.debug("Collected dependencies: %s" % (dependencies,)) + for instance in instances: + instance.data["publishDependencies"] = dependencies + + def get_save_data(self, save_path): + + # Resolve Avalon URI + uri_data = usdlib.parse_avalon_uri(save_path) + if not uri_data: + self.log.warning("Non Avalon URI Layer Path: %s" % save_path) + return {} + + # Collect asset + subset from URI + name = "{subset} ({asset})".format(**uri_data) + fname = "{asset}_{subset}.{ext}".format(**uri_data) + + data = dict(uri_data) + data["usdSavePath"] = save_path + data["usdFilename"] = fname + data["name"] = name + return data diff --git a/openpype/hosts/houdini/plugins/publish/collect_output_node.py b/openpype/hosts/houdini/plugins/publish/collect_output_node.py index c0587d5336..e8aa701f26 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/collect_output_node.py @@ -2,13 +2,18 @@ import pyblish.api class CollectOutputSOPPath(pyblish.api.InstancePlugin): - """Collect the out node's SOP Path value.""" + """Collect the out node's SOP/COP Path value.""" order = pyblish.api.CollectorOrder families = ["pointcache", - "vdbcache"] + "camera", + "vdbcache", + "imagesequence", + "usd", + "usdrender"] + hosts = ["houdini"] - label = "Collect Output SOP Path" + label = "Collect Output Node Path" def process(self, instance): @@ -17,12 +22,43 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin): node = instance[0] # Get sop path - if node.type().name() == "alembic": - sop_path_parm = "sop_path" + node_type = node.type().name() + if node_type == "geometry": + out_node = node.parm("soppath").evalAsNode() + + elif node_type == "alembic": + + # Alembic can switch between using SOP Path or object + if node.parm("use_sop_path").eval(): + out_node = node.parm("sop_path").evalAsNode() + else: + root = node.parm("root").eval() + objects = node.parm("objects").eval() + path = root + "/" + objects + out_node = hou.node(path) + + elif node_type == "comp": + out_node = node.parm("coppath").evalAsNode() + + elif node_type == "usd" or node_type == "usdrender": + out_node = node.parm("loppath").evalAsNode() + + elif node_type == "usd_rop" or node_type == "usdrender_rop": + # Inside Solaris e.g. /stage (not in ROP context) + # When incoming connection is present it takes it directly + inputs = node.inputs() + if inputs: + out_node = inputs[0] + else: + out_node = node.parm("loppath").evalAsNode() + else: - sop_path_parm = "soppath" + raise ValueError("ROP node type '%s' is" + " not supported." % node_type) - sop_path = node.parm(sop_path_parm).eval() - out_node = hou.node(sop_path) + if not out_node: + self.log.warning("No output node collected.") + return + self.log.debug("Output node: %s" % out_node.path()) instance.data["output_node"] = out_node diff --git a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py new file mode 100644 index 0000000000..f19b1eec2c --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py @@ -0,0 +1,137 @@ +import re +import os + +import hou +import pyblish.api + + +def get_top_referenced_parm(parm): + + processed = set() # disallow infinite loop + while True: + if parm.path() in processed: + raise RuntimeError("Parameter references result in cycle.") + + processed.add(parm.path()) + + ref = parm.getReferencedParm() + if ref.path() == parm.path(): + # It returns itself when it doesn't reference + # another parameter + return ref + else: + parm = ref + + +def evalParmNoFrame(node, parm, pad_character="#"): + + parameter = node.parm(parm) + assert parameter, "Parameter does not exist: %s.%s" % (node, parm) + + # If the parameter has a parameter reference, then get that + # parameter instead as otherwise `unexpandedString()` fails. + parameter = get_top_referenced_parm(parameter) + + # Substitute out the frame numbering with padded characters + try: + raw = parameter.unexpandedString() + except hou.Error as exc: + print("Failed: %s" % parameter) + raise RuntimeError(exc) + + def replace(match): + padding = 1 + n = match.group(2) + if n and int(n): + padding = int(n) + return pad_character * padding + + expression = re.sub(r"(\$F([0-9]*))", replace, raw) + + with hou.ScriptEvalContext(parameter): + return hou.expandStringAtFrame(expression, 0) + + +class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): + """Collect USD Render Products + + Collects the instance.data["files"] for the render products. + + Provides: + instance -> files + + """ + + label = "Redshift ROP Render Products" + order = pyblish.api.CollectorOrder + 0.4 + hosts = ["houdini"] + families = ["redshift_rop"] + + def process(self, instance): + + rop = instance[0] + + # Collect chunkSize + chunk_size_parm = rop.parm("chunkSize") + if chunk_size_parm: + chunk_size = int(chunk_size_parm.eval()) + instance.data["chunkSize"] = chunk_size + self.log.debug("Chunk Size: %s" % chunk_size) + + default_prefix = evalParmNoFrame(rop, "RS_outputFileNamePrefix") + beauty_suffix = rop.evalParm("RS_outputBeautyAOVSuffix") + render_products = [] + + # Default beauty AOV + beauty_product = self.get_render_product_name(prefix=default_prefix, + suffix=beauty_suffix) + render_products.append(beauty_product) + + num_aovs = rop.evalParm("RS_aov") + for index in range(num_aovs): + i = index + 1 + + # Skip disabled AOVs + if not rop.evalParm("RS_aovEnable_%s" % i): + continue + + aov_suffix = rop.evalParm("RS_aovSuffix_%s" % i) + aov_prefix = evalParmNoFrame(rop, "RS_aovCustomPrefix_%s" % i) + if not aov_prefix: + aov_prefix = default_prefix + + aov_product = self.get_render_product_name(aov_prefix, aov_suffix) + render_products.append(aov_product) + + for product in render_products: + self.log.debug("Found render product: %s" % product) + + filenames = list(render_products) + instance.data["files"] = filenames + + def get_render_product_name(self, prefix, suffix): + """Return the output filename using the AOV prefix and suffix""" + + # When AOV is explicitly defined in prefix we just swap it out + # directly with the AOV suffix to embed it. + # Note: ${AOV} seems to be evaluated in the parameter as %AOV% + has_aov_in_prefix = "%AOV%" in prefix + if has_aov_in_prefix: + # It seems that when some special separator characters are present + # before the %AOV% token that Redshift will secretly remove it if + # there is no suffix for the current product, for example: + # foo_%AOV% -> foo.exr + pattern = "%AOV%" if suffix else "[._-]?%AOV%" + product_name = re.sub(pattern, + suffix, + prefix, + flags=re.IGNORECASE) + else: + if suffix: + # Add ".{suffix}" before the extension + prefix_base, ext = os.path.splitext(prefix) + product_name = prefix_base + "." + suffix + ext + else: + product_name = prefix + + return product_name diff --git a/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py b/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py new file mode 100644 index 0000000000..8f0210129f --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py @@ -0,0 +1,30 @@ +import pyblish.api +import openpype.api + +import hou +from avalon.houdini import lib + + +class CollectRemotePublishSettings(pyblish.api.ContextPlugin): + """Collect custom settings of the Remote Publish node.""" + + order = pyblish.api.CollectorOrder + families = ["*"] + hosts = ['houdini'] + targets = ["deadline"] + label = 'Remote Publish Submission Settings' + actions = [openpype.api.RepairAction] + + def process(self, context): + + node = hou.node("/out/REMOTE_PUBLISH") + if not node: + return + + attributes = lib.read(node) + + # Debug the settings we have collected + for key, value in sorted(attributes.items()): + self.log.debug("Collected %s: %s" % (key, value)) + + context.data.update(attributes) diff --git a/openpype/hosts/houdini/plugins/publish/collect_render_products.py b/openpype/hosts/houdini/plugins/publish/collect_render_products.py new file mode 100644 index 0000000000..084c00cd70 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_render_products.py @@ -0,0 +1,130 @@ +import re +import os + +import hou +import pxr.UsdRender + +import avalon.io as io +import avalon.api as api +import pyblish.api + + +def get_var_changed(variable=None): + """Return changed variables and operators that use it. + + Note: `varchange` hscript states that it forces a recook of the nodes + that use Variables. That was tested in Houdini + 18.0.391. + + Args: + variable (str, Optional): A specific variable to query the operators + for. When None is provided it will return all variables that have + had recent changes and require a recook. Defaults to None. + + Returns: + dict: Variable that changed with the operators that use it. + + """ + cmd = "varchange -V" + if variable: + cmd += " {0}".format(variable) + output, errors = hou.hscript(cmd) + + changed = {} + for line in output.split("Variable: "): + if not line.strip(): + continue + + split = line.split() + var = split[0] + operators = split[1:] + changed[var] = operators + + return changed + + +class CollectRenderProducts(pyblish.api.InstancePlugin): + """Collect USD Render Products""" + + label = "Collect Render Products" + order = pyblish.api.CollectorOrder + 0.4 + hosts = ["houdini"] + families = ["colorbleed.usdrender"] + + def process(self, instance): + + node = instance.data.get("output_node") + if not node: + rop_path = instance[0].path() + raise RuntimeError("No output node found. Make sure to connect an " + "input to the USD ROP: %s" % rop_path) + + # Workaround Houdini 18.0.391 bug where $HIPNAME doesn't automatically + # update after scene save. + if hou.applicationVersion() == (18, 0, 391): + self.log.debug("Checking for recook to workaround " + "$HIPNAME refresh bug...") + changed = get_var_changed("HIPNAME").get("HIPNAME") + if changed: + self.log.debug("Recooking for $HIPNAME refresh bug...") + for operator in changed: + hou.node(operator).cook(force=True) + + # Make sure to recook any 'cache' nodes in the history chain + chain = [node] + chain.extend(node.inputAncestors()) + for input_node in chain: + if input_node.type().name() == "cache": + input_node.cook(force=True) + + stage = node.stage() + + filenames = [] + for prim in stage.Traverse(): + + if not prim.IsA(pxr.UsdRender.Product): + continue + + # Get Render Product Name + product = pxr.UsdRender.Product(prim) + + # We force taking it from any random time sample as opposed to + # "default" that the USD Api falls back to since that won't return + # time sampled values if they were set per time sample. + name = product.GetProductNameAttr().Get(time=0) + dirname = os.path.dirname(name) + basename = os.path.basename(name) + + dollarf_regex = r"(\$F([0-9]?))" + frame_regex = r"^(.+\.)([0-9]+)(\.[a-zA-Z]+)$" + if re.match(dollarf_regex, basename): + # TODO: Confirm this actually is allowed USD stages and HUSK + # Substitute $F + def replace(match): + """Replace $F4 with padded #""" + padding = int(match.group(2)) if match.group(2) else 1 + return "#" * padding + + filename_base = re.sub(dollarf_regex, replace, basename) + filename = os.path.join(dirname, filename_base) + else: + # Substitute basename.0001.ext + def replace(match): + prefix, frame, ext = match.groups() + padding = "#" * len(frame) + return prefix + padding + ext + + filename_base = re.sub(frame_regex, replace, basename) + filename = os.path.join(dirname, filename_base) + filename = filename.replace("\\", "/") + + assert "#" in filename, "Couldn't resolve render product name " \ + "with frame number: %s" % name + + filenames.append(filename) + + prim_path = str(prim.GetPath()) + self.log.info("Collected %s name: %s" % (prim_path, filename)) + + # Filenames for Deadline + instance.data["files"] = filenames diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py new file mode 100644 index 0000000000..fbf1ef4cb1 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py @@ -0,0 +1,117 @@ +import hou + +import pyblish.api + +from avalon import io +from avalon.houdini import lib +import openpype.lib.usdlib as usdlib + + +class CollectUsdBootstrap(pyblish.api.InstancePlugin): + """Collect special Asset/Shot bootstrap instances if those are needed. + + Some specific subsets are intended to be part of the default structure + of an "Asset" or "Shot" in our USD pipeline. For example, for an Asset + we layer a Model and Shade USD file over each other and expose that in + a Asset USD file, ready to use. + + On the first publish of any of the components of a Asset or Shot the + missing pieces are bootstrapped and generated in the pipeline too. This + means that on the very first publish of your model the Asset USD file + will exist too. + + """ + + order = pyblish.api.CollectorOrder + 0.35 + label = "Collect USD Bootstrap" + hosts = ["houdini"] + families = ["colorbleed.usd", + "colorbleed.usd.layered"] + + def process(self, instance): + + # Detect whether the current subset is a subset in a pipeline + def get_bootstrap(instance): + instance_subset = instance.data["subset"] + for name, layers in usdlib.PIPELINE.items(): + if instance_subset in set(layers): + return name # e.g. "asset" + break + else: + return + + bootstrap = get_bootstrap(instance) + if bootstrap: + self.add_bootstrap(instance, bootstrap) + + # Check if any of the dependencies requires a bootstrap + for dependency in instance.data.get("publishDependencies", list()): + bootstrap = get_bootstrap(dependency) + if bootstrap: + self.add_bootstrap(dependency, bootstrap) + + def add_bootstrap(self, instance, bootstrap): + + self.log.debug("Add bootstrap for: %s" % bootstrap) + + asset = io.find_one({"name": instance.data["asset"], + "type": "asset"}) + assert asset, "Asset must exist: %s" % asset + + # Check which are not about to be created and don't exist yet + required = { + "shot": ["usdShot"], + "asset": ["usdAsset"] + }.get(bootstrap) + + require_all_layers = instance.data.get("requireAllLayers", False) + if require_all_layers: + # USD files load fine in usdview and Houdini even when layered or + # referenced files do not exist. So by default we don't require + # the layers to exist. + layers = usdlib.PIPELINE.get(bootstrap) + if layers: + required += list(layers) + + self.log.debug("Checking required bootstrap: %s" % required) + for subset in required: + if self._subset_exists(instance, subset, asset): + continue + + self.log.debug("Creating {0} USD bootstrap: {1} {2}".format( + bootstrap, + asset["name"], + subset + )) + + new = instance.context.create_instance(subset) + new.data["subset"] = subset + new.data["label"] = "{0} ({1})".format(subset, asset["name"]) + new.data["family"] = "colorbleed.usd.bootstrap" + new.data["comment"] = "Automated bootstrap USD file." + new.data["publishFamilies"] = ["colorbleed.usd"] + + # Do not allow the user to toggle this instance + new.data["optional"] = False + + # Copy some data from the instance for which we bootstrap + for key in ["asset"]: + new.data[key] = instance.data[key] + + def _subset_exists(self, instance, subset, asset): + """Return whether subset exists in current context or in database.""" + + # Allow it to be created during this publish session + context = instance.context + for inst in context: + if ( + inst.data["subset"] == subset + and inst.data["asset"] == asset["name"] + ): + return True + + # Or, if they already exist in the database we can + # skip them too. + return bool(io.find_one({"name": subset, + "type": "subset", + "parent": asset["_id"]})) diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py new file mode 100644 index 0000000000..2920b5366d --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -0,0 +1,67 @@ +import os + +import pyblish.api + +from avalon import io +from avalon.houdini import lib +import openpype.hosts.houdini.lib.usd as usdlib + +import hou +from pxr import Sdf + + +class CollectUsdLayers(pyblish.api.InstancePlugin): + """Collect the USD Layers that have configured save paths.""" + + order = pyblish.api.CollectorOrder + 0.35 + label = "Collect USD Layers" + hosts = ["houdini"] + families = ["usd"] + + def process(self, instance): + + output = instance.data.get("output_node") + if not output: + self.log.debug("No output node found..") + return + + rop_node = instance[0] + + save_layers = [] + for layer in usdlib.get_configured_save_layers(rop_node): + + info = layer.rootPrims.get("HoudiniLayerInfo") + save_path = info.customData.get("HoudiniSavePath") + creator = info.customData.get("HoudiniCreatorNode") + + self.log.debug("Found configured save path: " + "%s -> %s" % (layer, save_path)) + + # Log node that configured this save path + if creator: + self.log.debug("Created by: %s" % creator) + + save_layers.append((layer, save_path)) + + # Store on the instance + instance.data["usdConfiguredSavePaths"] = save_layers + + # Create configured layer instances so User can disable updating + # specific configured layers for publishing. + context = instance.context + for layer, save_path in save_layers: + name = os.path.basename(save_path) + label = "{0} -> {1}".format(instance.data["name"], name) + layer_inst = context.create_instance(name) + + family = "colorbleed.usdlayer" + layer_inst.data["family"] = family + layer_inst.data["families"] = [family] + layer_inst.data["subset"] = "__stub__" + layer_inst.data["label"] = label + layer_inst.data["asset"] = instance.data["asset"] + layer_inst.append(instance[0]) # include same USD ROP + layer_inst.append((layer, save_path)) # include layer data + + # Allow this subset to be grouped into a USD Layer on creation + layer_inst.data["subsetGroup"] = "USD Layer" diff --git a/openpype/hosts/houdini/plugins/publish/extract_alembic.py b/openpype/hosts/houdini/plugins/publish/extract_alembic.py index b251ebdc90..23f926254b 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_alembic.py +++ b/openpype/hosts/houdini/plugins/publish/extract_alembic.py @@ -2,6 +2,7 @@ import os import pyblish.api import openpype.api +from openpype.hosts.houdini.api.lib import render_rop class ExtractAlembic(openpype.api.Extractor): @@ -9,6 +10,7 @@ class ExtractAlembic(openpype.api.Extractor): order = pyblish.api.ExtractorOrder label = "Extract Alembic" hosts = ["houdini"] + targets = ["local"] families = ["pointcache", "camera"] def process(self, instance): @@ -20,22 +22,15 @@ class ExtractAlembic(openpype.api.Extractor): # Get the filename from the filename parameter output = ropnode.evalParm("filename") staging_dir = os.path.dirname(output) - # instance.data["stagingDir"] = staging_dir + instance.data["stagingDir"] = staging_dir file_name = os.path.basename(output) # We run the render self.log.info("Writing alembic '%s' to '%s'" % (file_name, staging_dir)) - try: - ropnode.render() - except hou.Error as exc: - # The hou.Error is not inherited from a Python Exception class, - # so we explicitly capture the houdini error, otherwise pyblish - # will remain hanging. - import traceback - traceback.print_exc() - raise RuntimeError("Render failed: {0}".format(exc)) + + render_rop(ropnode) if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py new file mode 100644 index 0000000000..63cee5d9c9 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -0,0 +1,36 @@ +import os + +import pyblish.api +import openpype.api + +from openpype.hosts.houdini.api.lib import render_rop + + +class ExtractComposite(openpype.api.Extractor): + + order = pyblish.api.ExtractorOrder + label = "Extract Composite (Image Sequence)" + hosts = ["houdini"] + targets = ["local"] + families = ["imagesequence"] + + def process(self, instance): + + ropnode = instance[0] + + # Get the filename from the copoutput parameter + # `.evalParm(parameter)` will make sure all tokens are resolved + output = ropnode.evalParm("copoutput") + staging_dir = os.path.dirname(output) + instance.data["stagingDir"] = staging_dir + file_name = os.path.basename(output) + + self.log.info("Writing comp '%s' to '%s'" % (file_name, staging_dir)) + + render_rop(ropnode) + + if "files" not in instance.data: + instance.data["files"] = [] + + frames = instance.data["frames"] + instance.data["files"].append(frames) diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py new file mode 100644 index 0000000000..0968ba87e9 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -0,0 +1,37 @@ +import os + +import pyblish.api +import openpype.api +from openpype.hosts.houdini.api.lib import render_rop + + +class ExtractUSD(openpype.api.Extractor): + + order = pyblish.api.ExtractorOrder + label = "Extract USD" + hosts = ["houdini"] + targets = ["local"] + families = ["colorbleed.usd", + "usdModel", + "usdSetDress"] + + def process(self, instance): + + ropnode = instance[0] + + # Get the filename from the filename parameter + output = ropnode.evalParm("lopoutput") + staging_dir = os.path.dirname(output) + instance.data["stagingDir"] = staging_dir + file_name = os.path.basename(output) + + self.log.info("Writing USD '%s' to '%s'" % (file_name, staging_dir)) + + render_rop(ropnode) + + assert os.path.exists(output), "Output does not exist: %s" % output + + if "files" not in instance.data: + instance.data["files"] = [] + + instance.data["files"].append(file_name) diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py new file mode 100644 index 0000000000..329d26bf3b --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -0,0 +1,303 @@ +import os +import contextlib +import hou +import sys +from collections import deque + +import pyblish.api +import openpype.api + +import openpype.hosts.houdini.api.usd as hou_usdlib +from openpype.hosts.houdini.api.lib import render_rop + + +class ExitStack(object): + """Context manager for dynamic management of a stack of exit callbacks + + For example: + + with ExitStack() as stack: + files = [stack.enter_context(open(fname)) for fname in filenames] + # All opened files will automatically be closed at the end of + # the with statement, even if attempts to open files later + # in the list raise an exception + + """ + def __init__(self): + self._exit_callbacks = deque() + + def pop_all(self): + """Preserve the context stack by transferring it to a new instance""" + new_stack = type(self)() + new_stack._exit_callbacks = self._exit_callbacks + self._exit_callbacks = deque() + return new_stack + + def _push_cm_exit(self, cm, cm_exit): + """Helper to correctly register callbacks to __exit__ methods""" + def _exit_wrapper(*exc_details): + return cm_exit(cm, *exc_details) + _exit_wrapper.__self__ = cm + self.push(_exit_wrapper) + + def push(self, exit): + """Registers a callback with the standard __exit__ method signature + + Can suppress exceptions the same way __exit__ methods can. + + Also accepts any object with an __exit__ method (registering a call + to the method instead of the object itself) + """ + # We use an unbound method rather than a bound method to follow + # the standard lookup behaviour for special methods + _cb_type = type(exit) + try: + exit_method = _cb_type.__exit__ + except AttributeError: + # Not a context manager, so assume its a callable + self._exit_callbacks.append(exit) + else: + self._push_cm_exit(exit, exit_method) + return exit # Allow use as a decorator + + def callback(self, callback, *args, **kwds): + """Registers an arbitrary callback and arguments. + + Cannot suppress exceptions. + """ + def _exit_wrapper(exc_type, exc, tb): + callback(*args, **kwds) + # We changed the signature, so using @wraps is not appropriate, but + # setting __wrapped__ may still help with introspection + _exit_wrapper.__wrapped__ = callback + self.push(_exit_wrapper) + return callback # Allow use as a decorator + + def enter_context(self, cm): + """Enters the supplied context manager + + If successful, also pushes its __exit__ method as a callback and + returns the result of the __enter__ method. + """ + # We look up the special methods on the type to match the with statement + _cm_type = type(cm) + _exit = _cm_type.__exit__ + result = _cm_type.__enter__(cm) + self._push_cm_exit(cm, _exit) + return result + + def close(self): + """Immediately unwind the context stack""" + self.__exit__(None, None, None) + + def __enter__(self): + return self + + def __exit__(self, *exc_details): + # We manipulate the exception state so it behaves as though + # we were actually nesting multiple with statements + frame_exc = sys.exc_info()[1] + def _fix_exception_context(new_exc, old_exc): + while 1: + exc_context = new_exc.__context__ + if exc_context in (None, frame_exc): + break + new_exc = exc_context + new_exc.__context__ = old_exc + + # Callbacks are invoked in LIFO order to match the behaviour of + # nested context managers + suppressed_exc = False + while self._exit_callbacks: + cb = self._exit_callbacks.pop() + try: + if cb(*exc_details): + suppressed_exc = True + exc_details = (None, None, None) + except: + new_exc_details = sys.exc_info() + # simulate the stack of exceptions by setting the context + _fix_exception_context(new_exc_details[1], exc_details[1]) + if not self._exit_callbacks: + raise + exc_details = new_exc_details + return suppressed_exc + + +@contextlib.contextmanager +def parm_values(overrides): + """Override Parameter values during the context.""" + + originals = [] + try: + for parm, value in overrides: + originals.append((parm, parm.eval())) + parm.set(value) + yield + finally: + for parm, value in originals: + # Parameter might not exist anymore so first + # check whether it's still valid + if hou.parm(parm.path()): + parm.set(value) + + +class ExtractUSDLayered(openpype.api.Extractor): + + order = pyblish.api.ExtractorOrder + label = "Extract Layered USD" + hosts = ["houdini"] + targets = ["local"] + families = ["colorbleed.usd.layered", + "usdShade"] + + # Force Output Processors so it will always save any file + # into our unique staging directory with processed Avalon paths + output_processors = [ + "avalon_uri_processor", + "stagingdir_processor" + ] + + def process(self, instance): + + self.log.info("Extracting: %s" % instance) + + staging_dir = self.staging_dir(instance) + fname = instance.data.get("usdFilename") + + # The individual rop nodes are collected as "publishDependencies" + dependencies = instance.data["publishDependencies"] + ropnodes = [dependency[0] for dependency in dependencies] + assert all(node.type().name() in {"usd", "usd_rop"} + for node in ropnodes) + + # Main ROP node, either a USD Rop or ROP network with multiple USD ROPs + node = instance[0] + + # Collect any output dependencies that have not been processed yet + # during extraction of other instances + outputs = [fname] + active_dependencies = [dep for dep in dependencies if + dep.data.get("publish", True) and + not dep.data.get("_isExtracted", False)] + for dependency in active_dependencies: + outputs.append(dependency.data["usdFilename"]) + + pattern = r"*[/\]{0} {0}" + save_pattern = " ".join(pattern.format(fname) for fname in outputs) + + # Run a stack of context managers before we start the render to + # temporarily adjust USD ROP settings for our publish output. + rop_overrides = { + # This sets staging directory on the processor to force our + # output files to end up in the Staging Directory. + "stagingdiroutputprocessor_stagingDir": staging_dir, + + # Force the Avalon URI Output Processor to refactor paths for + # references, payloads and layers to published paths. + "avalonurioutputprocessor_use_publish_paths": True, + + # Only write out specific USD files based on our outputs + "savepattern": save_pattern + } + overrides = list() + with ExitStack() as stack: + + for ropnode in ropnodes: + manager = hou_usdlib.outputprocessors( + ropnode, + processors=self.output_processors, + disable_all_others=True + ) + stack.enter_context(manager) + + # Some of these must be added after we enter the output + # processor context manager because those parameters only + # exist when the Output Processor is added to the ROP node. + for name, value in rop_overrides.items(): + parm = ropnode.parm(name) + assert parm, "Parm not found: %s.%s" % (ropnode.path(), + name) + overrides.append((parm, value)) + + stack.enter_context(parm_values(overrides)) + + # Render the single ROP node or the full ROP network + render_rop(node) + + # Assert all output files in the Staging Directory + for output_fname in outputs: + path = os.path.join(staging_dir, output_fname) + assert os.path.exists(path), "Output file must exist: %s" % path + + # Set up the dependency for publish if they have new content + # compared to previous publishes + for dependency in active_dependencies: + dependency_fname = dependency.data["usdFilename"] + + filepath = os.path.join(staging_dir, dependency_fname) + similar = self._compare_with_latest_publish(dependency, + filepath) + if similar: + # Deactivate this dependency + self.log.debug("Dependency matches previous publish version," + " deactivating %s for publish" % dependency) + dependency.data["publish"] = False + else: + self.log.debug("Extracted dependency: %s" % dependency) + # This dependency should be published + dependency.data["files"] = [dependency_fname] + dependency.data["stagingDir"] = staging_dir + dependency.data["_isExtracted"] = True + + # Store the created files on the instance + if "files" not in instance.data: + instance.data["files"] = [] + instance.data["files"].append(fname) + + def _compare_with_latest_publish(self, dependency, new_file): + + from avalon import api, io + import filecmp + + _, ext = os.path.splitext(new_file) + + # Compare this dependency with the latest published version + # to detect whether we should make this into a new publish + # version. If not, skip it. + asset = io.find_one({ + "name": dependency.data["asset"], + "type": "asset" + }) + subset = io.find_one({ + "name": dependency.data["subset"], + "type": "subset", + "parent": asset["_id"] + }) + if not subset: + # Subset doesn't exist yet. Definitely new file + self.log.debug("No existing subset..") + return False + + version = io.find_one({ + "type": "version", + "parent": subset["_id"], + }, sort=[("name", -1)]) + if not version: + self.log.debug("No existing version..") + return False + + representation = io.find_one({ + "name": ext.lstrip("."), + "type": "representation", + "parent": version["_id"] + }) + if not representation: + self.log.debug("No existing representation..") + return False + + old_file = api.get_representation_path(representation) + if not os.path.exists(old_file): + return False + + return filecmp.cmp(old_file, new_file) diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py index f480fe6236..d077635dfd 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py +++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py @@ -2,6 +2,7 @@ import os import pyblish.api import openpype.api +from openpype.hosts.api.houdini.lib import render_rop class ExtractVDBCache(openpype.api.Extractor): @@ -9,6 +10,7 @@ class ExtractVDBCache(openpype.api.Extractor): order = pyblish.api.ExtractorOrder + 0.1 label = "Extract VDB Cache" families = ["vdbcache"] + targets = ["local"] hosts = ["houdini"] def process(self, instance): @@ -25,15 +27,8 @@ class ExtractVDBCache(openpype.api.Extractor): file_name = os.path.basename(sop_output) self.log.info("Writing VDB '%s' to '%s'" % (file_name, staging_dir)) - try: - ropnode.render() - except hou.Error as exc: - # The hou.Error is not inherited from a Python Exception class, - # so we explicitly capture the houdini error, otherwise pyblish - # will remain hanging. - import traceback - traceback.print_exc() - raise RuntimeError("Render failed: {0}".format(exc)) + + render_rop(ropnode) output = instance.data["frames"] diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py new file mode 100644 index 0000000000..205599eaa3 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py @@ -0,0 +1,48 @@ +import pyblish.api +import avalon.api + +from openpype.api import version_up +from openpype.action import get_errored_plugins_from_data + + +class IncrementCurrentFile(pyblish.api.InstancePlugin): + """Increment the current file. + + Saves the current scene with an increased version number. + + """ + + label = "Increment current file" + order = pyblish.api.IntegratorOrder + 9.0 + hosts = ["houdini"] + families = ["colorbleed.usdrender", + "redshift_rop"] + targets = ["local"] + + def process(self, instance): + + # This should be a ContextPlugin, but this is a workaround + # for a bug in pyblish to run once for a family: issue #250 + context = instance.context + key = "__hasRun{}".format(self.__class__.__name__) + if context.data.get(key, False): + return + else: + context.data[key] = True + + context = instance.context + errored_plugins = get_errored_plugins_from_data(context) + if any(plugin.__name__ == "HoudiniSubmitPublishDeadline" + for plugin in errored_plugins): + raise RuntimeError("Skipping incrementing current file because " + "submission to deadline failed.") + + # Filename must not have changed since collecting + host = avalon.api.registered_host() + current_file = host.current_file() + assert context.data['currentFile'] == current_file, ( + "Collected filename from current scene name." + ) + + new_filepath = version_up(current_file) + host.save(new_filepath) diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py b/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py new file mode 100644 index 0000000000..06ec711b9e --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py @@ -0,0 +1,34 @@ +import pyblish.api + +import os +import hou +from openpype.api import version_up +from openpype.action import get_errored_plugins_from_data + + +class IncrementCurrentFileDeadline(pyblish.api.ContextPlugin): + """Increment the current file. + + Saves the current scene with an increased version number. + + """ + + label = "Increment current file" + order = pyblish.api.IntegratorOrder + 9.0 + hosts = ["houdini"] + targets = ["deadline"] + + def process(self, context): + + errored_plugins = get_errored_plugins_from_data(context) + if any(plugin.__name__ == "HoudiniSubmitPublishDeadline" + for plugin in errored_plugins): + raise RuntimeError("Skipping incrementing current file because " + "submission to deadline failed.") + + current_filepath = context.data["currentFile"] + new_filepath = version_up(current_filepath) + + hou.hipFile.save(file_name=new_filepath, + save_to_recent_files=True) + diff --git a/openpype/hosts/houdini/plugins/publish/save_scene.py b/openpype/hosts/houdini/plugins/publish/save_scene.py new file mode 100644 index 0000000000..ec97944bee --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/save_scene.py @@ -0,0 +1,37 @@ +import pyblish.api +import avalon.api + + +class SaveCurrentScene(pyblish.api.InstancePlugin): + """Save current scene""" + + label = "Save current file" + order = pyblish.api.IntegratorOrder - 0.49 + hosts = ["houdini"] + families = ["colorbleed.usdrender", + "redshift_rop"] + targets = ["local"] + + def process(self, instance): + + # This should be a ContextPlugin, but this is a workaround + # for a bug in pyblish to run once for a family: issue #250 + context = instance.context + key = "__hasRun{}".format(self.__class__.__name__) + if context.data.get(key, False): + return + else: + context.data[key] = True + + # Filename must not have changed since collecting + host = avalon.api.registered_host() + current_file = host.current_file() + assert context.data['currentFile'] == current_file, ( + "Collected filename from current scene name." + ) + + if host.has_unsaved_changes(): + self.log.info("Saving current file..") + host.save_file(current_file) + else: + self.log.debug("No unsaved changes, skipping file save..") diff --git a/openpype/hosts/houdini/plugins/publish/save_scene_deadline.py b/openpype/hosts/houdini/plugins/publish/save_scene_deadline.py new file mode 100644 index 0000000000..8a787025c4 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/save_scene_deadline.py @@ -0,0 +1,23 @@ +import pyblish.api + + +class SaveCurrentSceneDeadline(pyblish.api.ContextPlugin): + """Save current scene""" + + label = "Save current file" + order = pyblish.api.IntegratorOrder - 0.49 + hosts = ["houdini"] + targets = ["deadline"] + + def process(self, context): + import hou + + assert context.data['currentFile'] == hou.hipFile.path(), ( + "Collected filename from current scene name." + ) + + if hou.hipFile.hasUnsavedChanges(): + self.log.info("Saving current file..") + hou.hipFile.save(save_to_recent_files=True) + else: + self.log.debug("No unsaved changes, skipping file save..") diff --git a/openpype/hosts/houdini/plugins/publish/submit_houdini_render_deadline.py b/openpype/hosts/houdini/plugins/publish/submit_houdini_render_deadline.py new file mode 100644 index 0000000000..34566f6c63 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/submit_houdini_render_deadline.py @@ -0,0 +1,158 @@ +import os +import json +import getpass + +from avalon import api +from avalon.vendor import requests + +import pyblish.api + +import hou + + +class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): + """Submit Solaris USD Render ROPs to Deadline + + Renders are submitted to a Deadline Web Service as + supplied via the environment variable AVALON_DEADLINE. + + Target "local": + Even though this does *not* render locally this is seen as + a 'local' submission as it is the regular way of submitting + a Houdini render locally. + + """ + + label = "Submit Render to Deadline" + order = pyblish.api.IntegratorOrder + hosts = ["houdini"] + families = ["colorbleed.usdrender", + "redshift_rop"] + targets = ["local"] + + def process(self, instance): + + context = instance.context + code = context.data["code"] + filepath = context.data["currentFile"] + filename = os.path.basename(filepath) + comment = context.data.get("comment", "") + deadline_user = context.data.get("deadlineUser", getpass.getuser()) + jobname = "%s - %s" % (filename, instance.name) + + # Support code prefix label for batch name + batch_name = filename + if code: + batch_name = "{0} - {1}".format(code, batch_name) + + # Output driver to render + driver = instance[0] + + # StartFrame to EndFrame by byFrameStep + frames = "{start}-{end}x{step}".format( + start=int(instance.data["startFrame"]), + end=int(instance.data["endFrame"]), + step=int(instance.data["byFrameStep"]), + ) + + # Documentation for keys available at: + # https://docs.thinkboxsoftware.com + # /products/deadline/8.0/1_User%20Manual/manual + # /manual-submission.html#job-info-file-options + payload = { + "JobInfo": { + # Top-level group name + "BatchName": batch_name, + + # Job name, as seen in Monitor + "Name": jobname, + + # Arbitrary username, for visualisation in Monitor + "UserName": deadline_user, + + "Plugin": "Houdini", + "Pool": "houdini_redshift", # todo: remove hardcoded pool + "Frames": frames, + + "ChunkSize": instance.data.get("chunkSize", 10), + + "Comment": comment + }, + "PluginInfo": { + # Input + "SceneFile": filepath, + "OutputDriver": driver.path(), + + # Mandatory for Deadline + # Houdini version without patch number + "Version": hou.applicationVersionString().rsplit(".", 1)[0], + + "IgnoreInputs": True + }, + + # Mandatory for Deadline, may be empty + "AuxFiles": [] + } + + # Include critical environment variables with submission + api.Session + keys = [ + # Submit along the current Avalon tool setup that we launched + # this application with so the Render Slave can build its own + # similar environment using it, e.g. "maya2018;vray4.x;yeti3.1.9" + "AVALON_TOOLS", + ] + environment = dict({key: os.environ[key] for key in keys + if key in os.environ}, **api.Session) + + payload["JobInfo"].update({ + "EnvironmentKeyValue%d" % index: "{key}={value}".format( + key=key, + value=environment[key] + ) for index, key in enumerate(environment) + }) + + # Include OutputFilename entries + # The first entry also enables double-click to preview rendered + # frames from Deadline Monitor + output_data = {} + for i, filepath in enumerate(instance.data["files"]): + dirname = os.path.dirname(filepath) + fname = os.path.basename(filepath) + output_data["OutputDirectory%d" % i] = dirname.replace("\\", "/") + output_data["OutputFilename%d" % i] = fname + + # For now ensure destination folder exists otherwise HUSK + # will fail to render the output image. This is supposedly fixed + # in new production builds of Houdini + # TODO Remove this workaround with Houdini 18.0.391+ + if not os.path.exists(dirname): + self.log.info("Ensuring output directory exists: %s" % + dirname) + os.makedirs(dirname) + + payload["JobInfo"].update(output_data) + + self.submit(instance, payload) + + def submit(self, instance, payload): + + AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE", + "http://localhost:8082") + assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" + + plugin = payload["JobInfo"]["Plugin"] + self.log.info("Using Render Plugin : {}".format(plugin)) + + self.log.info("Submitting..") + self.log.debug(json.dumps(payload, indent=4, sort_keys=True)) + + # E.g. http://192.168.0.1:8082/api/jobs + url = "{}/api/jobs".format(AVALON_DEADLINE) + response = requests.post(url, json=payload) + if not response.ok: + raise Exception(response.text) + + # Store output dir for unified publisher (filesequence) + output_dir = os.path.dirname(instance.data["files"][0]) + instance.data["outputDir"] = output_dir + instance.data["deadlineSubmissionJob"] = response.json() diff --git a/openpype/hosts/houdini/plugins/publish/submit_remote_publish.py b/openpype/hosts/houdini/plugins/publish/submit_remote_publish.py new file mode 100644 index 0000000000..b9278c1a90 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/submit_remote_publish.py @@ -0,0 +1,152 @@ +import os +import json +import getpass + +import hou + +from avalon import api, io +from avalon.vendor import requests + +import pyblish.api + + +class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): + """Submit Houdini scene to perform a local publish in Deadline. + + Publishing in Deadline can be helpful for scenes that publish very slow. + This way it can process in the background on another machine without the + Artist having to wait for the publish to finish on their local machine. + + Submission is done through the Deadline Web Service as + supplied via the environment variable AVALON_DEADLINE. + + """ + + label = "Submit Scene to Deadline" + order = pyblish.api.IntegratorOrder + hosts = ["houdini"] + families = ["*"] + targets = ["deadline"] + + def process(self, context): + + # Ensure no errors so far + assert all(result["success"] for result in context.data["results"]), ( + "Errors found, aborting integration..") + + # Deadline connection + AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE", + "http://localhost:8082") + assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" + + # Note that `publish` data member might change in the future. + # See: https://github.com/pyblish/pyblish-base/issues/307 + actives = [i for i in context if i.data["publish"]] + instance_names = sorted(instance.name for instance in actives) + + if not instance_names: + self.log.warning("No active instances found. " + "Skipping submission..") + return + + scene = context.data["currentFile"] + scenename = os.path.basename(scene) + + # Get project code + project = io.find_one({"type": "project"}) + code = project["data"].get("code", project["name"]) + + job_name = "{scene} [PUBLISH]".format(scene=scenename) + batch_name = "{code} - {scene}".format(code=code, scene=scenename) + deadline_user = "roy" # todo: get deadline user dynamically + + # Get only major.minor version of Houdini, ignore patch version + version = hou.applicationVersionString() + version = ".".join(version.split(".")[:2]) + + # Generate the payload for Deadline submission + payload = { + "JobInfo": { + "Plugin": "Houdini", + "Pool": "houdini", # todo: remove hardcoded pool + "BatchName": batch_name, + "Comment": context.data.get("comment", ""), + "Priority": 50, + "Frames": "1-1", # Always trigger a single frame + "IsFrameDependent": False, + "Name": job_name, + "UserName": deadline_user, + # "Comment": instance.context.data.get("comment", ""), + # "InitialStatus": state + + }, + "PluginInfo": { + + "Build": None, # Don't force build + "IgnoreInputs": True, + + # Inputs + "SceneFile": scene, + "OutputDriver": "/out/REMOTE_PUBLISH", + + # Mandatory for Deadline + "Version": version, + + }, + + # Mandatory for Deadline, may be empty + "AuxFiles": [] + } + + # Process submission per individual instance if the submission + # is set to publish each instance as a separate job. Else submit + # a single job to process all instances. + per_instance = context.data.get("separateJobPerInstance", False) + if per_instance: + # Submit a job per instance + job_name = payload["JobInfo"]["Name"] + for instance in instance_names: + # Clarify job name per submission (include instance name) + payload["JobInfo"]["Name"] = job_name + " - %s" % instance + self.submit_job(payload, + instances=[instance], + deadline=AVALON_DEADLINE) + else: + # Submit a single job + self.submit_job(payload, + instances=instance_names, + deadline=AVALON_DEADLINE) + + def submit_job(self, payload, instances, deadline): + + # Ensure we operate on a copy, a shallow copy is fine. + payload = payload.copy() + + # Include critical environment variables with submission + api.Session + keys = [ + # Submit along the current Avalon tool setup that we launched + # this application with so the Render Slave can build its own + # similar environment using it, e.g. "houdini17.5;pluginx2.3" + "AVALON_TOOLS", + ] + + environment = dict({key: os.environ[key] for key in keys + if key in os.environ}, **api.Session) + environment["PYBLISH_ACTIVE_INSTANCES"] = ",".join(instances) + + payload["JobInfo"].update({ + "EnvironmentKeyValue%d" % index: "{key}={value}".format( + key=key, + value=environment[key] + ) for index, key in enumerate(environment) + }) + + # Submit + self.log.info("Submitting..") + self.log.debug(json.dumps(payload, indent=4, sort_keys=True)) + + # E.g. http://192.168.0.1:8082/api/jobs + url = "{}/api/jobs".format(deadline) + response = requests.post(url, json=payload) + if not response.ok: + raise Exception(response.text) diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py new file mode 100644 index 0000000000..31eb3d1fb1 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py @@ -0,0 +1,116 @@ +import pyblish.api +import openpype.api + +from collections import defaultdict + + +class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): + """Validate Alembic ROP Primitive to Detail attribute is consistent. + + The Alembic ROP crashes Houdini whenever an attribute in the "Primitive to + Detail" parameter exists on only a part of the primitives that belong to + the same hierarchy path. Whenever it encounters inconsistent values, + specifically where some are empty as opposed to others then Houdini + crashes. (Tested in Houdini 17.5.229) + + """ + + order = openpype.api.ValidateContentsOrder + 0.1 + families = ["colorbleed.pointcache"] + hosts = ["houdini"] + label = "Validate Primitive to Detail (Abc)" + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError("Primitives found with inconsistent primitive " + "to detail attributes. See log.") + + @classmethod + def get_invalid(cls, instance): + + output = instance.data["output_node"] + + rop = instance[0] + pattern = rop.parm("prim_to_detail_pattern").eval().strip() + if not pattern: + cls.log.debug("Alembic ROP has no 'Primitive to Detail' pattern. " + "Validation is ignored..") + return + + build_from_path = rop.parm("build_from_path").eval() + if not build_from_path: + cls.log.debug("Alembic ROP has 'Build from Path' disabled. " + "Validation is ignored..") + return + + path_attr = rop.parm("path_attrib").eval() + if not path_attr: + cls.log.error("The Alembic ROP node has no Path Attribute" + "value set, but 'Build Hierarchy from Attribute'" + "is enabled.") + return [rop.path()] + + # Let's assume each attribute is explicitly named for now and has no + # wildcards for Primitive to Detail. This simplifies the check. + cls.log.debug("Checking Primitive to Detail pattern: %s" % pattern) + cls.log.debug("Checking with path attribute: %s" % path_attr) + + # Check if the primitive attribute exists + frame = instance.data.get("startFrame", 0) + geo = output.geometryAtFrame(frame) + + # If there are no primitives on the start frame then it might be + # something that is emitted over time. As such we can't actually + # validate whether the attributes exist, because they won't exist + # yet. In that case, just warn the user and allow it. + if len(geo.iterPrims()) == 0: + cls.log.warning("No primitives found on current frame. Validation" + " for Primitive to Detail will be skipped.") + return + + attrib = geo.findPrimAttrib(path_attr) + if not attrib: + cls.log.info("Geometry Primitives are missing " + "path attribute: `%s`" % path_attr) + return [output.path()] + + # Ensure at least a single string value is present + if not attrib.strings(): + cls.log.info("Primitive path attribute has no " + "string values: %s" % path_attr) + return [output.path()] + + paths = None + for attr in pattern.split(" "): + if not attr.strip(): + # Ignore empty values + continue + + # Check if the primitive attribute exists + attrib = geo.findPrimAttrib(attr) + if not attrib: + # It is allowed to not have the attribute at all + continue + + # The issue can only happen if at least one string attribute is + # present. So we ignore cases with no values whatsoever. + if not attrib.strings(): + continue + + check = defaultdict(set) + values = geo.primStringAttribValues(attr) + if paths is None: + paths = geo.primStringAttribValues(path_attr) + + for path, value in zip(paths, values): + check[path].add(value) + + for path, values in check.items(): + # Whenever a single path has multiple values for the + # Primitive to Detail attribute then we consider it + # inconsistent and invalidate the ROP node's content. + if len(values) > 1: + cls.log.warning("Path has multiple values: %s (path: %s)" + % (list(values), path)) + return [output.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py new file mode 100644 index 0000000000..da79569edd --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py @@ -0,0 +1,35 @@ +import pyblish.api +import openpype.api + + +class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): + """Validate Face Sets are disabled for extraction to pointcache. + + When groups are saved as Face Sets with the Alembic these show up + as shadingEngine connections in Maya - however, with animated groups + these connections in Maya won't work as expected, it won't update per + frame. Additionally, it can break shader assignments in some cases + where it requires to first break this connection to allow a shader to + be assigned. + + It is allowed to include Face Sets, so only an issue is logged to + identify that it could introduce issues down the pipeline. + + """ + + order = openpype.api.ValidateContentsOrder + 0.1 + families = ["colorbleed.pointcache"] + hosts = ["houdini"] + label = "Validate Alembic ROP Face Sets" + + def process(self, instance): + + rop = instance[0] + facesets = rop.parm("facesets").eval() + + # 0 = No Face Sets + # 1 = Save Non-Empty Groups as Face Sets + # 2 = Save All Groups As Face Sets + if facesets != 0: + self.log.warning("Alembic ROP saves 'Face Sets' for Geometry. " + "Are you sure you want this?") diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py index e8596b739d..3595918765 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +import colorbleed.api class ValidateAlembicInputNode(pyblish.api.InstancePlugin): @@ -11,27 +11,40 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder + 0.1 - families = ["pointcache"] + order = colorbleed.api.ValidateContentsOrder + 0.1 + families = ["colorbleed.pointcache"] hosts = ["houdini"] label = "Validate Input Node (Abc)" def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Node connected to the output node incorrect") + raise RuntimeError("Primitive types found that are not supported" + "for Alembic output.") @classmethod def get_invalid(cls, instance): - invalid_nodes = ["VDB", "Volume"] + invalid_prim_types = ["VDB", "Volume"] node = instance.data["output_node"] - - prims = node.geometry().prims() - - for prim in prims: - prim_type = prim.type().name() - if prim_type in invalid_nodes: + + if not hasattr(node, "geometry"): + # In the case someone has explicitly set an Object + # node instead of a SOP node in Geometry context + # then for now we ignore - this allows us to also + # export object transforms. + cls.log.warning("No geometry output node found, skipping check..") + return + + frame = instance.data.get("startFrame", 0) + geo = node.geometryAtFrame(frame) + + invalid = False + for prim_type in invalid_prim_types: + if geo.countPrimType(prim_type) > 0: cls.log.error("Found a primitive which is of type '%s' !" % prim_type) - return [instance] + invalid = True + + if invalid: + return [instance] diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index 9118ae0e8c..c04734c684 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -1,5 +1,5 @@ import pyblish.api -import openpype.api +import colorbleed.api class ValidateBypassed(pyblish.api.InstancePlugin): @@ -11,13 +11,18 @@ class ValidateBypassed(pyblish.api.InstancePlugin): """ - order = openpype.api.ValidateContentsOrder - 0.1 + order = colorbleed.api.ValidateContentsOrder - 0.1 families = ["*"] hosts = ["houdini"] label = "Validate ROP Bypass" def process(self, instance): + if len(instance) == 0: + # Ignore instances without any nodes + # e.g. in memory bootstrap instances + return + invalid = self.get_invalid(instance) if invalid: rop = invalid[0] diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py index ca75579267..f509c51bc6 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py +++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py @@ -6,7 +6,7 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): """Validate Camera ROP settings.""" order = openpype.api.ValidateContentsOrder - families = ['camera'] + families = ['colorbleed.camera'] hosts = ['houdini'] label = 'Camera ROP' @@ -34,7 +34,7 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): if not camera: raise ValueError("Camera path does not exist: %s" % path) - if not camera.type().name() == "cam": + if camera.type().name() != "cam": raise ValueError("Object set in Alembic ROP is not a camera: " "%s (type: %s)" % (camera, camera.type().name())) diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py new file mode 100644 index 0000000000..51c5d07b0f --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -0,0 +1,58 @@ +import pyblish.api + + +class ValidateCopOutputNode(pyblish.api.InstancePlugin): + """Validate the instance COP Output Node. + + This will ensure: + - The COP Path is set. + - The COP Path refers to an existing object. + - The COP Path node is a COP node. + + """ + + order = pyblish.api.ValidatorOrder + families = ["colorbleed.imagesequence"] + hosts = ["houdini"] + label = "Validate COP Output Node" + + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError("Output node(s) `%s` are incorrect. " + "See plug-in log for details." % invalid) + + @classmethod + def get_invalid(cls, instance): + + import hou + + output_node = instance.data["output_node"] + + if output_node is None: + node = instance[0] + cls.log.error("COP Output node in '%s' does not exist. " + "Ensure a valid COP output path is set." + % node.path()) + + return [node.path()] + + # Output node must be a Sop node. + if not isinstance(output_node, hou.CopNode): + cls.log.error("Output node %s is not a COP node. " + "COP Path must point to a COP node, " + "instead found category type: %s" % ( + output_node.path(), + output_node.type().category().name() + ) + ) + return [output_node.path()] + + # For the sake of completeness also assert the category type + # is Cop2 to avoid potential edge case scenarios even though + # the isinstance check above should be stricter than this category + assert output_node.type().category().name() == "Cop2", ( + "Output node %s is not of category Cop2. This is a bug.." % + output_node.path() + ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py new file mode 100644 index 0000000000..5823c3eddc --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py @@ -0,0 +1,60 @@ +import os +import pyblish.api + +from openpype.hosts.houdini.api import lib + + +class ValidateFileExtension(pyblish.api.InstancePlugin): + """Validate the output file extension fits the output family. + + File extensions: + - Pointcache must be .abc + - Camera must be .abc + - VDB must be .vdb + + """ + + order = pyblish.api.ValidatorOrder + families = ["pointcache", + "camera", + "vdbcache"] + hosts = ["houdini"] + label = "Output File Extension" + + family_extensions = { + "pointcache": ".abc", + "camera": ".abc", + "vdbcache": ".vdb" + } + + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError("ROP node has incorrect " + "file extension: %s" % invalid) + + @classmethod + def get_invalid(cls, instance): + + # Get ROP node from instance + node = instance[0] + + # Create lookup for current family in instance + families = instance.data.get("families", list()) + family = instance.data.get("family", None) + if family: + families.append(family) + families = set(families) + + # Perform extension check + output = lib.get_output_parameter(node).eval() + _, output_extension = os.path.splitext(output) + + for family in families: + extension = cls.family_extensions.get(family, None) + if extension is None: + raise RuntimeError("Unsupported family: %s" % family) + + if output_extension != extension: + return [node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py new file mode 100644 index 0000000000..f55f05032d --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py @@ -0,0 +1,50 @@ +import pyblish.api + +from openpype.hosts.houdini.api import lib + + +class ValidateFrameToken(pyblish.api.InstancePlugin): + """Validate if the unexpanded string contains the frame ('$F') token + + This validator will *only* check the output parameter of the node if + the Valid Frame Range is not set to 'Render Current Frame' + + Rules: + If you render out a frame range it is mandatory to have the + frame token - '$F4' or similar - to ensure that each frame gets + written. If this is not the case you will override the same file + every time a frame is written out. + + Examples: + Good: 'my_vbd_cache.$F4.vdb' + Bad: 'my_vbd_cache.vdb' + + """ + + order = pyblish.api.ValidatorOrder + label = "Validate Frame Token" + families = ["vdbcache"] + + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError("Output settings do no match for '%s'" % + instance) + + @classmethod + def get_invalid(cls, instance): + + node = instance[0] + + # Check trange parm, 0 means Render Current Frame + frame_range = node.evalParm("trange") + if frame_range == 0: + return [] + + output_parm = lib.get_output_parameter(node) + unexpanded_str = output_parm.unexpandedString() + + if "$F" not in unexpanded_str: + cls.log.error("No frame token found in '%s'" % node.path()) + return [instance] diff --git a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py new file mode 100644 index 0000000000..e18404b7ad --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py @@ -0,0 +1,28 @@ +import pyblish.api + + +class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin): + """Validate the Houdini instance runs a Commercial license. + + When extracting USD files from a non-commercial Houdini license, even with + Houdini Indie license, the resulting files will get "scrambled" with + a license protection and get a special .usdnc or .usdlc suffix. + + This currently breaks the Subset/representation pipeline so we disallow + any publish with those licenses. Only the commercial license is valid. + + """ + + order = pyblish.api.ValidatorOrder + families = ["usd"] + hosts = ["houdini"] + label = "Houdini Commercial License" + + def process(self, instance): + + import hou + + license = hou.licenseCategory() + if license != hou.licenseCategoryType.Commercial: + raise RuntimeError("USD Publishing requires a full Commercial " + "license. You are on: %s" % license) diff --git a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py index a735f4b64b..826dedf933 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py @@ -1,14 +1,14 @@ import pyblish.api -import openpype.api +import colorbleed.api class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin): """Validate Create Intermediate Directories is enabled on ROP node.""" - order = openpype.api.ValidateContentsOrder - families = ['pointcache', - 'camera', - 'vdbcache'] + order = colorbleed.api.ValidateContentsOrder + families = ['colorbleed.pointcache', + 'colorbleed.camera', + 'colorbleed.vdbcache'] hosts = ['houdini'] label = 'Create Intermediate Directories Checked' @@ -30,3 +30,5 @@ class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin): result.append(node.path()) return result + + diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py new file mode 100644 index 0000000000..1a7cf9d599 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -0,0 +1,64 @@ +import pyblish.api +import openpype.api +import hou + + +def cook_in_range(node, start, end): + current = hou.intFrame() + if start >= current >= end: + # Allow cooking current frame since we're in frame range + node.cook(force=False) + else: + node.cook(force=False, frame_range=(start, start)) + + +def get_errors(node): + """Get cooking errors. + + If node already has errors check whether it needs to recook + If so, then recook first to see if that solves it. + + """ + if node.errors() and node.needsToCook(): + node.cook() + + return node.errors() + + +class ValidateNoErrors(pyblish.api.InstancePlugin): + """Validate the Instance has no current cooking errors.""" + + order = openpype.api.ValidateContentsOrder + hosts = ['houdini'] + label = 'Validate no errors' + + def process(self, instance): + + validate_nodes = [] + + if len(instance) > 0: + validate_nodes.append(instance[0]) + output_node = instance.data.get("output_node") + if output_node: + validate_nodes.append(output_node) + + for node in validate_nodes: + self.log.debug("Validating for errors: %s" % node.path()) + errors = get_errors(node) + + if errors: + # If there are current errors, then try an unforced cook + # to see whether the error will disappear. + self.log.debug("Recooking to revalidate error " + "is up to date for: %s" % node.path()) + current_frame = hou.intFrame() + start = instance.data.get("startFrame", current_frame) + end = instance.data.get("endFrame", current_frame) + cook_in_range(node, start=start, end=end) + + # Check for errors again after the forced recook + errors = get_errors(node) + if errors: + self.log.error(errors) + raise RuntimeError("Node has errors: %s" % node.path()) + diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py index 608e236198..785dd1db78 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py @@ -28,7 +28,6 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): import hou output = instance.data["output_node"] - prims = output.geometry().prims() rop = instance[0] build_from_path = rop.parm("build_from_path").eval() @@ -46,30 +45,41 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): cls.log.debug("Checking for attribute: %s" % path_attr) - missing_attr = [] - invalid_attr = [] - for prim in prims: + # Check if the primitive attribute exists + frame = instance.data.get("startFrame", 0) + geo = output.geometryAtFrame(frame) - try: - path = prim.stringAttribValue(path_attr) - except hou.OperationFailed: - # Attribute does not exist. - missing_attr.append(prim) - continue + # If there are no primitives on the current frame then we can't + # check whether the path names are correct. So we'll just issue a + # warning that the check can't be done consistently and skip + # validation. + if len(geo.iterPrims()) == 0: + cls.log.warning("No primitives found on current frame. Validation" + " for primitive hierarchy paths will be skipped," + " thus can't be validated.") + return - if not path: - # Empty path value is invalid. - invalid_attr.append(prim) - continue + # Check if there are any values for the primitives + attrib = geo.findPrimAttrib(path_attr) + if not attrib: + cls.log.info("Geometry Primitives are missing " + "path attribute: `%s`" % path_attr) + return [output.path()] - if missing_attr: - cls.log.info("Prims are missing attribute `%s`" % path_attr) + # Ensure at least a single string value is present + if not attrib.strings(): + cls.log.info("Primitive path attribute has no " + "string values: %s" % path_attr) + return [output.path()] - if invalid_attr: + paths = geo.primStringAttribValues(path_attr) + # Ensure all primitives are set to a valid path + # Collect all invalid primitive numbers + invalid_prims = [i for i, path in enumerate(paths) if not path] + if invalid_prims: + num_prims = len(geo.iterPrims()) # faster than len(geo.prims()) cls.log.info("Prims have no value for attribute `%s` " "(%s of %s prims)" % (path_attr, - len(invalid_attr), - len(prims))) - - if missing_attr or invalid_attr: + len(invalid_prims), + num_prims)) return [output.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py new file mode 100644 index 0000000000..931acdcc2f --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py @@ -0,0 +1,43 @@ +import pyblish.api +import openpype.api + +from openpype.hosts.houdini.api import lib + +import hou + + +class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin): + """Validate the remote publish out node exists for Deadline to trigger.""" + + order = pyblish.api.ValidatorOrder - 0.4 + families = ["*"] + hosts = ['houdini'] + targets = ["deadline"] + label = 'Remote Publish ROP node' + actions = [openpype.api.RepairContextAction] + + def process(self, context): + + cmd = "import colorbleed.lib; colorbleed.lib.publish_remote()" + + node = hou.node("/out/REMOTE_PUBLISH") + if not node: + raise RuntimeError("Missing REMOTE_PUBLISH node.") + + # We ensure it's a shell node and that it has the pre-render script + # set correctly. Plus the shell script it will trigger should be + # completely empty (doing nothing) + assert node.type().name() == "shell", "Must be shell ROP node" + assert node.parm("command").eval() == "", "Must have no command" + assert not node.parm("shellexec").eval(), "Must not execute in shell" + assert node.parm("prerender").eval() == cmd, ( + "REMOTE_PUBLISH node does not have correct prerender script." + ) + assert node.parm("lprerender").eval() == "python", ( + "REMOTE_PUBLISH node prerender script type not set to 'python'" + ) + + @classmethod + def repair(cls, context): + """(Re)create the node if it fails to pass validation""" + lib.create_remote_publish_node(force=True) diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py new file mode 100644 index 0000000000..9f486842ae --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py @@ -0,0 +1,35 @@ +import pyblish.api +import openpype.api + +import hou + + +class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): + """Validate the remote publish node is *not* bypassed.""" + + order = pyblish.api.ValidatorOrder - 0.39 + families = ["*"] + hosts = ['houdini'] + targets = ["deadline"] + label = 'Remote Publish ROP enabled' + actions = [openpype.api.RepairContextAction] + + def process(self, context): + + node = hou.node("/out/REMOTE_PUBLISH") + if not node: + raise RuntimeError("Missing REMOTE_PUBLISH node.") + + if node.isBypassed(): + raise RuntimeError("REMOTE_PUBLISH must not be bypassed.") + + @classmethod + def repair(cls, context): + """(Re)create the node if it fails to pass validation""" + + node = hou.node("/out/REMOTE_PUBLISH") + if not node: + raise RuntimeError("Missing REMOTE_PUBLISH node.") + + cls.log.info("Disabling bypass on /out/REMOTE_PUBLISH") + node.bypass(False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py new file mode 100644 index 0000000000..7ba9ddd534 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -0,0 +1,78 @@ +import pyblish.api + + +class ValidateSopOutputNode(pyblish.api.InstancePlugin): + """Validate the instance SOP Output Node. + + This will ensure: + - The SOP Path is set. + - The SOP Path refers to an existing object. + - The SOP Path node is a SOP node. + - The SOP Path node has at least one input connection (has an input) + - The SOP Path has geometry data. + + """ + + order = pyblish.api.ValidatorOrder + families = ["pointcache", + "vdbcache"] + hosts = ["houdini"] + label = "Validate Output Node" + + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError("Output node(s) `%s` are incorrect. " + "See plug-in log for details." % invalid) + + @classmethod + def get_invalid(cls, instance): + + import hou + + output_node = instance.data["output_node"] + + if output_node is None: + node = instance[0] + cls.log.error("SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." + % node.path()) + + return [node.path()] + + # Output node must be a Sop node. + if not isinstance(output_node, hou.SopNode): + cls.log.error("Output node %s is not a SOP node. " + "SOP Path must point to a SOP node, " + "instead found category type: %s" % ( + output_node.path(), + output_node.type().category().name() + ) + ) + return [output_node.path()] + + # For the sake of completeness also assert the category type + # is Sop to avoid potential edge case scenarios even though + # the isinstance check above should be stricter than this category + assert output_node.type().category().name() == "Sop", ( + "Output node %s is not of category Sop. This is a bug.." % + output_node.path() + ) + + # Ensure the node is cooked and succeeds to cook so we can correctly + # check for its geometry data. + if output_node.needsToCook(): + cls.log.debug("Cooking node: %s" % output_node.path()) + try: + output_node.cook() + except hou.Error as exc: + cls.log.error("Cook failed: %s" % exc) + cls.log.error(output_node.errors()[0]) + return [output_node.path()] + + # Ensure the output node has at least Geometry data + if not output_node.geometry(): + cls.log.error("Output node `%s` has no geometry data." + % output_node.path()) + return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py new file mode 100644 index 0000000000..a21e5c267f --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -0,0 +1,51 @@ +import pyblish.api + +import openpype.hosts.houdini.api.usd as hou_usdlib + + +class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): + """Validate USD loaded paths have no backslashes. + + This is a crucial validation for HUSK USD rendering as Houdini's + USD Render ROP will fail to write out a .usd file for rendering that + correctly preserves the backslashes, e.g. it will incorrectly convert a + '\t' to a TAB character disallowing HUSK to find those specific files. + + This validation is redundant for usdModel since that flattens the model + before write. As such it will never have any used layers with a path. + + """ + + order = pyblish.api.ValidatorOrder + families = ["usdSetDress", + "usdShade", + "usd", + "usdrender"] + hosts = ["houdini"] + label = "USD Layer path backslashes" + optional = True + + def process(self, instance): + + rop = instance[0] + lop_path = hou_usdlib.get_usd_rop_loppath(rop) + stage = lop_path.stage(apply_viewport_overrides=False) + + invalid = [] + for layer in stage.GetUsedLayers(): + references = layer.externalReferences + + for ref in references: + + # Ignore anonymous layers + if ref.startswith("anon:"): + continue + + # If any backslashes in the path consider it invalid + if "\\" in ref: + self.log.error("Found invalid path: %s" % ref) + invalid.append(layer) + + if invalid: + raise RuntimeError("Loaded layers have backslashes. " + "This is invalid for HUSK USD rendering.") diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py new file mode 100644 index 0000000000..4fe4322bb3 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py @@ -0,0 +1,75 @@ +import pyblish.api + +import openpype.hosts.houdini.api.usd as hou_usdlib + + +from pxr import UsdShade, UsdRender, UsdLux + + +def fullname(o): + """Get fully qualified class name""" + module = o.__module__ + if module is None or module == str.__module__: + return o.__name__ + return module + '.' + o.__name__ + + +class ValidateUsdModel(pyblish.api.InstancePlugin): + """Validate USD Model. + + Disallow Shaders, Render settings, products and vars and Lux lights. + + """ + + order = pyblish.api.ValidatorOrder + families = ["usdModel"] + hosts = ["houdini"] + label = "Validate USD Model" + optional = True + + disallowed = [ + UsdShade.Shader, + UsdRender.Settings, + UsdRender.Product, + UsdRender.Var, + UsdLux.Light + ] + + def process(self, instance): + + rop = instance[0] + lop_path = hou_usdlib.get_usd_rop_loppath(rop) + stage = lop_path.stage(apply_viewport_overrides=False) + + invalid = [] + for prim in stage.Traverse(): + + for klass in self.disallowed: + if klass(prim): + # Get full class name without pxr. prefix + name = fullname(klass).split("pxr.", 1)[-1] + path = str(prim.GetPath()) + self.log.warning("Disallowed %s: %s" % (name, path)) + + invalid.append(prim) + + if invalid: + prim_paths = sorted([str(prim.GetPath()) for prim in invalid]) + raise RuntimeError("Found invalid primitives: %s" % prim_paths) + + +class ValidateUsdShade(ValidateUsdModel): + """Validate usdShade. + + Disallow Render settings, products, vars and Lux lights. + + """ + families = ["usdShade"] + label = "Validate USD Shade" + + disallowed = [ + UsdRender.Settings, + UsdRender.Product, + UsdRender.Var, + UsdLux.Light + ] diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py new file mode 100644 index 0000000000..0960129819 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -0,0 +1,50 @@ +import pyblish.api + + +class ValidateUSDOutputNode(pyblish.api.InstancePlugin): + """Validate the instance USD LOPs Output Node. + + This will ensure: + - The LOP Path is set. + - The LOP Path refers to an existing object. + - The LOP Path node is a LOP node. + + """ + + order = pyblish.api.ValidatorOrder + families = ["colorbleed.usd"] + hosts = ["houdini"] + label = "Validate Output Node (USD)" + + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError("Output node(s) `%s` are incorrect. " + "See plug-in log for details." % invalid) + + @classmethod + def get_invalid(cls, instance): + + import hou + + output_node = instance.data["output_node"] + + if output_node is None: + node = instance[0] + cls.log.error("USD node '%s' LOP path does not exist. " + "Ensure a valid LOP path is set." + % node.path()) + + return [node.path()] + + # Output node must be a Sop node. + if not isinstance(output_node, hou.LopNode): + cls.log.error("Output node %s is not a LOP node. " + "LOP Path must point to a LOP node, " + "instead found category type: %s" % ( + output_node.path(), + output_node.type().category().name() + ) + ) + return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py new file mode 100644 index 0000000000..18231a9605 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py @@ -0,0 +1,30 @@ +import pyblish.api + +import os + + +class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin): + """Validate USD Render Product names are correctly set absolute paths.""" + + order = pyblish.api.ValidatorOrder + families = ["colorbleed.usdrender"] + hosts = ["houdini"] + label = "Validate USD Render Product Names" + optional = True + + def process(self, instance): + + invalid = [] + for filepath in instance.data["files"]: + + if not filepath: + invalid.append("Detected empty output filepath.") + + if not os.path.isabs(filepath): + invalid.append("Output file path is not " + "absolute path: %s" % filepath) + + if invalid: + for message in invalid: + self.log.error(message) + raise RuntimeError("USD Render Paths are invalid.") diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py new file mode 100644 index 0000000000..8af53fa617 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py @@ -0,0 +1,51 @@ +import pyblish.api + +import openpype.hosts.houdini.api.usd as hou_usdlib + + +class ValidateUsdSetDress(pyblish.api.InstancePlugin): + """Validate USD Set Dress. + + Must only have references or payloads. May not generate new mesh or + flattened meshes. + + """ + + order = pyblish.api.ValidatorOrder + families = ["usdSetDress"] + hosts = ["houdini"] + label = "Validate USD Set Dress" + optional = True + + def process(self, instance): + + from pxr import UsdGeom + + rop = instance[0] + lop_path = hou_usdlib.get_usd_rop_loppath(rop) + stage = lop_path.stage(apply_viewport_overrides=False) + + invalid = [] + for node in stage.Traverse(): + + if UsdGeom.Mesh(node): + # This solely checks whether there is any USD involved + # in this Prim's Stack and doesn't accurately tell us + # whether it was generated locally or not. + # TODO: More accurately track whether the Prim was created + # in the local scene + stack = node.GetPrimStack() + for sdf in stack: + path = sdf.layer.realPath + if path: + break + else: + prim_path = node.GetPath() + self.log.error("%s is not referenced geometry." % + prim_path) + invalid.append(node) + + if invalid: + raise RuntimeError("SetDress contains local geometry. " + "This is not allowed, it must be an assembly " + "of referenced assets.") diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py new file mode 100644 index 0000000000..3de18fd9b4 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py @@ -0,0 +1,36 @@ +import re + +import pyblish.api +import openpype.api + +from avalon import io + + +class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): + """Validate the Instance has no current cooking errors.""" + + order = openpype.api.ValidateContentsOrder + hosts = ['houdini'] + families = ["usdShade"] + label = 'USD Shade model exists' + + def process(self, instance): + + asset = instance.data["asset"] + subset = instance.data["subset"] + + # Assume shading variation starts after a dot separator + shade_subset = subset.split(".", 1)[0] + model_subset = re.sub("^usdShade", "usdModel", shade_subset) + + asset_doc = io.find_one({"name": asset, + "type": "asset"}) + if not asset_doc: + raise RuntimeError("Asset does not exist: %s" % asset) + + subset_doc = io.find_one({"name": model_subset, + "type": "subset", + "parent": asset_doc["_id"]}) + if not subset_doc: + raise RuntimeError("USD Model subset not found: " + "%s (%s)" % (model_subset, asset)) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py new file mode 100644 index 0000000000..3220159508 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -0,0 +1,60 @@ +import re + +import pyblish.api +import openpype.api + +from avalon import io +import hou + + +class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): + """Validate USD Shading Workspace is correct version. + + There have been some issues with outdated/erroneous Shading Workspaces + so this is to confirm everything is set as it should. + + """ + + order = openpype.api.ValidateContentsOrder + hosts = ["houdini"] + families = ["usdShade"] + label = "USD Shade Workspace" + + def process(self, instance): + + rop = instance[0] + workspace = rop.parent() + + definition = workspace.type().definition() + name = definition.nodeType().name() + library = definition.libraryFilePath() + + all_definitions = hou.hda.definitionsInFile(library) + node_type, version = name.rsplit(":", 1) + version = float(version) + + highest = version + for other_definition in all_definitions: + other_name = other_definition.nodeType().name() + other_node_type, other_version = other_name.rsplit(":", 1) + other_version = float(other_version) + + if node_type != other_node_type: + continue + + # Get highest version + highest = max(highest, other_version) + + if version != highest: + raise RuntimeError("Shading Workspace is not the latest version." + " Found %s. Latest is %s." % (version, highest)) + + # There were some issues with the editable node not having the right + # configured path. So for now let's assure that is correct to.from + value = ('avalon://`chs("../asset_name")`/' + 'usdShade`chs("../model_variantname1")`.usd') + rop_value = rop.parm("lopoutput").rawValue() + if rop_value != value: + raise RuntimeError("Shading Workspace has invalid 'lopoutput'" + " parameter value. The Shading Workspace" + " needs to be reset to its default values.") diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py new file mode 100644 index 0000000000..d3894ee41d --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -0,0 +1,68 @@ +import pyblish.api +import openpype.api +import hou + + +class ValidateVDBOutputNode(pyblish.api.InstancePlugin): + """Validate that the node connected to the output node is of type VDB + + Regardless of the amount of VDBs create the output will need to have an + equal amount of VDBs, points, primitives and vertices + + A VDB is an inherited type of Prim, holds the following data: + - Primitives: 1 + - Points: 1 + - Vertices: 1 + - VDBs: 1 + + """ + + order = openpype.api.ValidateContentsOrder + 0.1 + families = ["colorbleed.vdbcache"] + hosts = ["houdini"] + label = "Validate Output Node (VDB)" + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError("Node connected to the output node is not" + " of type VDB!") + + @classmethod + def get_invalid(cls, instance): + + node = instance.data["output_node"] + if node is None: + cls.log.error("SOP path is not correctly set on " + "ROP node '%s'." % instance[0].path()) + return [instance] + + frame = instance.data.get("startFrame", 0) + geometry = node.geometryAtFrame(frame) + if geometry is None: + # No geometry data on this node, maybe the node hasn't cooked? + cls.log.error("SOP node has no geometry data. " + "Is it cooked? %s" % node.path()) + return [node] + + prims = geometry.prims() + nr_of_prims = len(prims) + + # All primitives must be hou.VDB + invalid_prim = False + for prim in prims: + if not isinstance(prim, hou.VDB): + cls.log.error("Found non-VDB primitive: %s" % prim) + invalid_prim = True + if invalid_prim: + return [instance] + + nr_of_points = len(geometry.points()) + if nr_of_points != nr_of_prims: + cls.log.error("The number of primitives and points do not match") + return [instance] + + for prim in prims: + if prim.numVertices() != 1: + cls.log.error("Found primitive with more than 1 vertex!") + return [instance] diff --git a/openpype/hosts/houdini/vendor/husdoutputprocessors/__init__.py b/openpype/hosts/houdini/vendor/husdoutputprocessors/__init__.py new file mode 100644 index 0000000000..69e3be50da --- /dev/null +++ b/openpype/hosts/houdini/vendor/husdoutputprocessors/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py b/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py new file mode 100644 index 0000000000..4071eb3e0c --- /dev/null +++ b/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py @@ -0,0 +1,168 @@ +import hou +import husdoutputprocessors.base as base +import os +import re +import logging + +import colorbleed.usdlib as usdlib + + +def _get_project_publish_template(): + """Return publish template from database for current project""" + from avalon import io + project = io.find_one({"type": "project"}, + projection={"config.template.publish": True}) + return project["config"]["template"]["publish"] + + +class AvalonURIOutputProcessor(base.OutputProcessorBase): + """Process Avalon URIs into their full path equivalents. + + """ + + _parameters = None + _param_prefix = 'avalonurioutputprocessor_' + _parms = { + "use_publish_paths": _param_prefix + "use_publish_paths" + } + + def __init__(self): + """ There is only one object of each output processor class that is + ever created in a Houdini session. Therefore be very careful + about what data gets put in this object. + """ + self._template = None + self._use_publish_paths = False + self._cache = dict() + + def displayName(self): + return 'Avalon URI Output Processor' + + def parameters(self): + + if not self._parameters: + parameters = hou.ParmTemplateGroup() + use_publish_path = hou.ToggleParmTemplate( + name=self._parms["use_publish_paths"], + label='Resolve Reference paths to publish paths', + default_value=False, + help=("When enabled any paths for Layers, References or " + "Payloads are resolved to published master versions.\n" + "This is usually only used by the publishing pipeline, " + "but can be used for testing too.")) + parameters.append(use_publish_path) + self._parameters = parameters.asDialogScript() + + return self._parameters + + def beginSave(self, config_node, t): + self._template = _get_project_publish_template() + + parm = self._parms["use_publish_paths"] + self._use_publish_paths = config_node.parm(parm).evalAtTime(t) + self._cache.clear() + + def endSave(self): + self._template = None + self._use_publish_paths = None + self._cache.clear() + + def processAsset(self, + asset_path, + asset_path_for_save, + referencing_layer_path, + asset_is_layer, + for_save): + """ + Args: + asset_path (str): The incoming file path you want to alter or not. + asset_path_for_save (bool): Whether the current path is a + referenced path in the USD file. When True, return the path + you want inside USD file. + referencing_layer_path (str): ??? + asset_is_layer (bool): Whether this asset is a USD layer file. + If this is False, the asset is something else (for example, + a texture or volume file). + for_save (bool): Whether the asset path is for a file to be saved + out. If so, then return actual written filepath. + + Returns: + The refactored asset path. + + """ + + # Retrieve from cache if this query occurred before (optimization) + cache_key = (asset_path, asset_path_for_save, asset_is_layer, for_save) + if cache_key in self._cache: + return self._cache[cache_key] + + relative_template = "{asset}_{subset}.{ext}" + uri_data = usdlib.parse_avalon_uri(asset_path) + if uri_data: + + if for_save: + # Set save output path to a relative path so other + # processors can potentially manage it easily? + path = relative_template.format(**uri_data) + + print("Avalon URI Resolver: %s -> %s" % (asset_path, path)) + self._cache[cache_key] = path + return path + + if self._use_publish_paths: + # Resolve to an Avalon published asset for embedded paths + path = self._get_usd_master_path(**uri_data) + else: + path = relative_template.format(**uri_data) + + print("Avalon URI Resolver: %s -> %s" % (asset_path, path)) + self._cache[cache_key] = path + return path + + self._cache[cache_key] = asset_path + return asset_path + + def _get_usd_master_path(self, + asset, + subset, + ext): + """Get the filepath for a .usd file of a subset. + + This will return the path to an unversioned master file generated by + `usd_master_file.py`. + + """ + + from avalon import api, io + + PROJECT = api.Session["AVALON_PROJECT"] + asset_doc = io.find_one({"name": asset, + "type": "asset"}) + if not asset_doc: + raise RuntimeError("Invalid asset name: '%s'" % asset) + + root = api.registered_root() + path = self._template.format(**{ + "root": root, + "project": PROJECT, + "silo": asset_doc["silo"], + "asset": asset_doc["name"], + "subset": subset, + "representation": ext, + "version": 0 # stub version zero + }) + + # Remove the version folder + subset_folder = os.path.dirname(os.path.dirname(path)) + master_folder = os.path.join(subset_folder, "master") + fname = "{0}.{1}".format(subset, ext) + + return os.path.join(master_folder, fname).replace("\\", "/") + + +output_processor = AvalonURIOutputProcessor() + + +def usdOutputProcessor(): + return output_processor + diff --git a/openpype/hosts/houdini/vendor/husdoutputprocessors/stagingdir_processor.py b/openpype/hosts/houdini/vendor/husdoutputprocessors/stagingdir_processor.py new file mode 100644 index 0000000000..d8e36d5aa8 --- /dev/null +++ b/openpype/hosts/houdini/vendor/husdoutputprocessors/stagingdir_processor.py @@ -0,0 +1,90 @@ +import hou +import husdoutputprocessors.base as base +import os + + +class StagingDirOutputProcessor(base.OutputProcessorBase): + """Output all USD Rop file nodes into the Staging Directory + + Ignore any folders and paths set in the Configured Layers + and USD Rop node, just take the filename and save into a + single directory. + + """ + theParameters = None + parameter_prefix = "stagingdiroutputprocessor_" + stagingdir_parm_name = parameter_prefix + "stagingDir" + + def __init__(self): + self.staging_dir = None + + def displayName(self): + return 'StagingDir Output Processor' + + def parameters(self): + if not self.theParameters: + parameters = hou.ParmTemplateGroup() + rootdirparm = hou.StringParmTemplate( + self.stagingdir_parm_name, + 'Staging Directory', 1, + string_type=hou.stringParmType.FileReference, + file_type=hou.fileType.Directory + ) + parameters.append(rootdirparm) + self.theParameters = parameters.asDialogScript() + return self.theParameters + + def beginSave(self, config_node, t): + + # Use the Root Directory parameter if it is set. + root_dir_parm = config_node.parm(self.stagingdir_parm_name) + if root_dir_parm: + self.staging_dir = root_dir_parm.evalAtTime(t) + + if not self.staging_dir: + out_file_parm = config_node.parm('lopoutput') + if out_file_parm: + self.staging_dir = out_file_parm.evalAtTime(t) + if self.staging_dir: + (self.staging_dir, filename) = os.path.split(self.staging_dir) + + def endSave(self): + self.staging_dir = None + + def processAsset(self, asset_path, + asset_path_for_save, + referencing_layer_path, + asset_is_layer, + for_save): + """ + Args: + asset_path (str): The incoming file path you want to alter or not. + asset_path_for_save (bool): Whether the current path is a + referenced path in the USD file. When True, return the path + you want inside USD file. + referencing_layer_path (str): ??? + asset_is_layer (bool): Whether this asset is a USD layer file. + If this is False, the asset is something else (for example, + a texture or volume file). + for_save (bool): Whether the asset path is for a file to be saved + out. If so, then return actual written filepath. + + Returns: + The refactored asset path. + + """ + + # Treat save paths as being relative to the output path. + if for_save and self.staging_dir: + # Whenever we're processing a Save Path make sure to + # resolve it to the Staging Directory + filename = os.path.basename(asset_path) + return os.path.join(self.staging_dir, filename) + + return asset_path + + +output_processor = StagingDirOutputProcessor() +def usdOutputProcessor(): + return output_processor + diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py new file mode 100644 index 0000000000..cc036a9491 --- /dev/null +++ b/openpype/lib/usdlib.py @@ -0,0 +1,347 @@ +import os +import re +import logging + +try: + from pxr import Usd, UsdGeom, Sdf, Kind +except ImportError: + # Allow to fall back on Multiverse 6.3.0+ pxr usd library + from mvpxr import Usd, UsdGeom, Sdf, Kind + +from avalon import io, api + +log = logging.getLogger(__name__) + + +# The predefined steps order used for bootstrapping USD Shots and Assets. +# These are ordered in order from strongest to weakest opinions, like in USD. +PIPELINE = { + "shot": ["usdLighting", + "usdFx", + "usdSimulation", + "usdAnimation", + "usdLayout"], + "asset": ["usdShade", + "usdModel"] +} + + +def create_asset(filepath, + asset_name, + reference_layers, + kind=Kind.Tokens.component): + """ + Creates an asset file that consists of a top level layer and sublayers for + shading and geometry. + + Args: + filepath (str): Filepath where the asset.usd file will be saved. + reference_layers (list): USD Files to reference in the asset. + Note that the bottom layer (first file, like a model) would + be last in the list. The strongest layer will be the first + index. + asset_name (str): The name for the Asset identifier and default prim. + kind (pxr.Kind): A USD Kind for the root asset. + + """ + # Also see create_asset.py in PixarAnimationStudios/USD endToEnd example + + log.info("Creating asset at %s", filepath) + + # Make the layer ascii - good for readability, plus the file is small + root_layer = Sdf.Layer.CreateNew(filepath, args={'format': 'usda'}) + stage = Usd.Stage.Open(root_layer) + + # Define a prim for the asset and make it the default for the stage. + asset_prim = UsdGeom.Xform.Define(stage, '/%s' % asset_name).GetPrim() + stage.SetDefaultPrim(asset_prim) + + # Let viewing applications know how to orient a free camera properly + UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) + + # Usually we will "loft up" the kind authored into the exported geometry + # layer rather than re-stamping here; we'll leave that for a later + # tutorial, and just be explicit here. + model = Usd.ModelAPI(asset_prim) + if kind: + model.SetKind(kind) + + model.SetAssetName(asset_name) + model.SetAssetIdentifier('%s/%s.usd' % (asset_name, asset_name)) + + # Add references to the asset prim + references = asset_prim.GetReferences() + for reference_filepath in reference_layers: + references.AddReference(reference_filepath) + + stage.GetRootLayer().Save() + + +def create_shot(filepath, layers, create_layers=False): + """Create a shot with separate layers for departments. + + Args: + filepath (str): Filepath where the asset.usd file will be saved. + layers (str): When provided this will be added verbatim in the + subLayerPaths layers. When the provided layer paths do not exist + they are generated using Sdf.Layer.CreateNew + create_layers (bool): Whether to create the stub layers on disk if + they do not exist yet. + + Returns: + str: The saved shot file path + + """ + # Also see create_shot.py in PixarAnimationStudios/USD endToEnd example + + stage = Usd.Stage.CreateNew(filepath) + log.info("Creating shot at %s" % filepath) + + for layer_path in layers: + if create_layers and not os.path.exists(layer_path): + # We use the Sdf API here to quickly create layers. Also, we're + # using it as a way to author the subLayerPaths as there is no + # way to do that directly in the Usd API. + layer_folder = os.path.dirname(layer_path) + if not os.path.exists(layer_folder): + os.makedirs(layer_folder) + + Sdf.Layer.CreateNew(layer_path) + + stage.GetRootLayer().subLayerPaths.append(layer_path) + + # Lets viewing applications know how to orient a free camera properly + UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) + stage.GetRootLayer().Save() + + return filepath + + +def create_model(filename, asset, variant_subsets): + """Create a USD Model file. + + For each of the variation paths it will payload the path and set its + relevant variation name. + + """ + + asset_doc = io.find_one({"name": asset, "type": "asset"}) + assert asset_doc, "Asset not found: %s" % asset + + variants = [] + for subset in variant_subsets: + prefix = "usdModel" + if subset.startswith(prefix): + # Strip off `usdModel_` + variant = subset[len(prefix):] + else: + raise ValueError("Model subsets must start " + "with usdModel: %s" % subset) + + path = get_usd_master_path(asset=asset_doc, + subset=subset, + representation="usd") + variants.append((variant, path)) + + stage = _create_variants_file(filename, + variants=variants, + variantset="model", + variant_prim="/root", + reference_prim="/root/geo", + as_payload=True) + + UsdGeom.SetStageMetersPerUnit(stage, 1) + UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) + + # modelAPI = Usd.ModelAPI(root_prim) + # modelAPI.SetKind(Kind.Tokens.component) + + # See http://openusd.org/docs/api/class_usd_model_a_p_i.html#details + # for more on assetInfo + # modelAPI.SetAssetName(asset) + # modelAPI.SetAssetIdentifier(asset) + + stage.GetRootLayer().Save() + + +def create_shade(filename, asset, variant_subsets): + """Create a master USD shade file for an asset. + + For each available model variation this should generate a reference + to a `usdShade_{modelVariant}` subset. + + """ + + asset_doc = io.find_one({"name": asset, "type": "asset"}) + assert asset_doc, "Asset not found: %s" % asset + + variants = [] + + for subset in variant_subsets: + prefix = "usdModel" + if subset.startswith(prefix): + # Strip off `usdModel_` + variant = subset[len(prefix):] + else: + raise ValueError("Model subsets must start " + "with usdModel: %s" % subset) + + shade_subset = re.sub("^usdModel", "usdShade", subset) + path = get_usd_master_path(asset=asset_doc, + subset=shade_subset, + representation="usd") + variants.append((variant, path)) + + stage = _create_variants_file(filename, + variants=variants, + variantset="model", + variant_prim="/root") + + stage.GetRootLayer().Save() + + +def create_shade_variation(filename, + asset, + model_variant, + shade_variants): + """Create the master Shade file for a specific model variant. + + This should reference all shade variants for the specific model variant. + + """ + + asset_doc = io.find_one({"name": asset, "type": "asset"}) + assert asset_doc, "Asset not found: %s" % asset + + variants = [] + for variant in shade_variants: + subset = "usdShade_{model}_{shade}".format(model=model_variant, + shade=variant) + path = get_usd_master_path(asset=asset_doc, + subset=subset, + representation="usd") + variants.append((variant, path)) + + stage = _create_variants_file(filename, + variants=variants, + variantset="shade", + variant_prim="/root") + + stage.GetRootLayer().Save() + + +def _create_variants_file(filename, + variants, + variantset, + default_variant=None, + variant_prim="/root", + reference_prim=None, + set_default_variant=True, + as_payload=False, + skip_variant_on_single_file=True): + + root_layer = Sdf.Layer.CreateNew(filename, args={'format': 'usda'}) + stage = Usd.Stage.Open(root_layer) + + root_prim = stage.DefinePrim(variant_prim) + stage.SetDefaultPrim(root_prim) + + def _reference(path): + """Reference/Payload path depending on function arguments""" + + if reference_prim: + prim = stage.DefinePrim(reference_prim) + else: + prim = root_prim + + if as_payload: + # Payload + prim.GetPayloads().AddPayload(Sdf.Payload(path)) + else: + # Reference + prim.GetReferences().AddReference(Sdf.Reference(path)) + + assert variants, "Must have variants, got: %s" % variants + + log.info(filename) + + if skip_variant_on_single_file and len(variants) == 1: + # Reference directly, no variants + variant_path = variants[0][1] + _reference(variant_path) + + log.info("Non-variants..") + log.info("Path: %s" % variant_path) + + else: + # Variants + append = Usd.ListPositionBackOfAppendList + variant_set = root_prim.GetVariantSets().AddVariantSet(variantset, + append) + + for variant, variant_path in variants: + + if default_variant is None: + default_variant = variant + + variant_set.AddVariant(variant, append) + variant_set.SetVariantSelection(variant) + with variant_set.GetVariantEditContext(): + _reference(variant_path) + + log.info("Variants..") + log.info("Variant: %s" % variant) + log.info("Path: %s" % variant_path) + + if set_default_variant: + variant_set.SetVariantSelection(default_variant) + + return stage + + +def get_usd_master_path(asset, + subset, + representation): + """Get the filepath for a .usd file of a subset. + + This will return the path to an unversioned master file generated by + `usd_master_file.py`. + + """ + + project = io.find_one({"type": "project"}, + projection={"config.template.publish": True}) + template = project["config"]["template"]["publish"] + + if isinstance(asset, dict) and "silo" in asset and "name" in asset: + # Allow explicitly passing asset document + asset_doc = asset + else: + asset_doc = io.find_one({"name": asset, + "type": "asset"}) + + path = template.format(**{ + "root": api.registered_root(), + "project": api.Session["AVALON_PROJECT"], + "silo": asset_doc["silo"], + "asset": asset_doc["name"], + "subset": subset, + "representation": representation, + "version": 0 # stub version zero + }) + + # Remove the version folder + subset_folder = os.path.dirname(os.path.dirname(path)) + master_folder = os.path.join(subset_folder, "master") + fname = "{0}.{1}".format(subset, representation) + + return os.path.join(master_folder, fname).replace("\\", "/") + + +def parse_avalon_uri(uri): + # URI Pattern: avalon://{asset}/{subset}.{ext} + pattern = r"avalon://(?P[^/.]*)/(?P[^/]*)\.(?P.*)" + if uri.startswith("avalon://"): + match = re.match(pattern, uri) + if match: + return match.groupdict() From c85bd30e1b01aa543f8a14a5b03de2a01f8fbcd7 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Jul 2021 12:58:16 +0200 Subject: [PATCH 019/308] =?UTF-8?q?hound=20cleanup=20=F0=9F=90=B6?= =?UTF-8?q?=F0=9F=A7=BD=F0=9F=A7=BA=20I.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/hosts/houdini/api/__init__.py | 23 +++++-------- openpype/hosts/houdini/api/usd.py | 3 +- .../plugins/create/create_redshift_rop.py | 2 +- .../plugins/create/create_usd_model.py | 1 - .../plugins/create/create_usd_workspaces.py | 1 - .../hosts/houdini/plugins/load/load_camera.py | 34 +++++++++---------- 6 files changed, 27 insertions(+), 37 deletions(-) diff --git a/openpype/hosts/houdini/api/__init__.py b/openpype/hosts/houdini/api/__init__.py index 8eda4aff26..bb43654fef 100644 --- a/openpype/hosts/houdini/api/__init__.py +++ b/openpype/hosts/houdini/api/__init__.py @@ -6,16 +6,13 @@ import contextlib import hou from pyblish import api as pyblish - from avalon import api as avalon -from avalon.houdini import pipeline as houdini import openpype.hosts.houdini from openpype.hosts.houdini.api import lib from openpype.lib import ( - any_outdated, - update_task_from_path + any_outdated ) from .lib import get_asset_fps @@ -29,6 +26,7 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") + def install(): pyblish.register_plugin_path(PUBLISH_PATH) @@ -36,7 +34,7 @@ def install(): avalon.register_plugin_path(avalon.Creator, CREATE_PATH) log.info("Installing callbacks ... ") - avalon.on("init", on_init) + # avalon.on("init", on_init) avalon.before("save", before_save) avalon.on("save", on_save) avalon.on("open", on_open) @@ -50,11 +48,10 @@ def install(): "review" ] - # Expose Houdini husdoutputprocessors - hou_setup_pythonpath = os.path.join(os.path.dirname(PACKAGE_DIR), - "setup", "houdini", "pythonpath") - print("Adding PYTHONPATH: %s" % hou_setup_pythonpath) - sys.path.append(hou_setup_pythonpath) + # add houdini vendor packages + hou_pythonpath = os.path.join(os.path.dirname(HOST_DIR), "vendor") + + sys.path.append(hou_pythonpath) # Set asset FPS for the empty scene directly after launch of Houdini # so it initializes into the correct scene FPS @@ -69,8 +66,6 @@ def on_save(*args): avalon.logger.info("Running callback on save..") - update_task_from_path(hou.hipFile.path()) - nodes = lib.get_id_required_nodes() for node, new_id in lib.generate_ids(nodes): lib.set_id(node, new_id, overwrite=False) @@ -84,14 +79,12 @@ def on_open(*args): avalon.logger.info("Running callback on open..") - update_task_from_path(hou.hipFile.path()) - # Validate FPS after update_task_from_path to # ensure it is using correct FPS for the asset lib.validate_fps() if any_outdated(): - from ..widgets import popup + from openpype.widgets import popup log.warning("Scene has outdated content.") diff --git a/openpype/hosts/houdini/api/usd.py b/openpype/hosts/houdini/api/usd.py index 545cd3f7a5..48b97bb250 100644 --- a/openpype/hosts/houdini/api/usd.py +++ b/openpype/hosts/houdini/api/usd.py @@ -2,9 +2,8 @@ import contextlib -from avalon import io import logging -from avalon.vendor.Qt import QtCore, QtGui +from Qt import QtCore, QtGui from avalon.tools.widgets import AssetWidget from avalon import style diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py index b944d592f2..f6e482954d 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py @@ -19,7 +19,7 @@ class CreateRedshiftROP(houdini.Creator): subset_no_prefix = subset[len(self.family):] subset_no_prefix = subset_no_prefix[0].lower() + subset_no_prefix[1:] self.data["subset"] = subset_no_prefix - + # Add chunk size attribute self.data["chunkSize"] = 10 diff --git a/openpype/hosts/houdini/plugins/create/create_usd_model.py b/openpype/hosts/houdini/plugins/create/create_usd_model.py index f4c377e0a8..e412a88c71 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd_model.py +++ b/openpype/hosts/houdini/plugins/create/create_usd_model.py @@ -1,7 +1,6 @@ import re from avalon import api -from avalon.houdini import lib import hou diff --git a/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py b/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py index 87c54c9fb7..906a217c0f 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py +++ b/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py @@ -1,5 +1,4 @@ from avalon import api -from avalon.houdini import lib import hou diff --git a/openpype/hosts/houdini/plugins/load/load_camera.py b/openpype/hosts/houdini/plugins/load/load_camera.py index ac0e1f0436..0cb17dc7d6 100644 --- a/openpype/hosts/houdini/plugins/load/load_camera.py +++ b/openpype/hosts/houdini/plugins/load/load_camera.py @@ -23,40 +23,40 @@ def transfer_non_default_values(src, dest, ignore=None): """ import hou - src.updateParmStates() + src.updateParmStates() for parm in src.allParms(): - + if ignore and parm.name() in ignore: continue - + # If destination parm does not exist, ignore.. dest_parm = dest.parm(parm.name()) if not dest_parm: continue - + # Ignore values that are currently at default if parm.isAtDefault() and dest_parm.isAtDefault(): continue - + if not parm.isVisible(): # Ignore hidden parameters, assume they # are implementation details continue - + expression = None try: expression = parm.expression() except hou.OperationFailed: # No expression present pass - + if expression is not None and ARCHIVE_EXPRESSION in expression: - # Assume it's part of the automated connections that the Alembic Archive - # makes on loading of the camera and thus we do not want to transfer - # the expression + # Assume it's part of the automated connections that the + # Alembic Archive makes on loading of the camera and thus we do + # not want to transfer the expression continue - + # Ignore folders, separators, etc. ignore_types = { hou.parmTemplateType.Toggle, @@ -68,7 +68,7 @@ def transfer_non_default_values(src, dest, ignore=None): } if parm.parmTemplate().type() in ignore_types: continue - + print("Preserving attribute: %s" % parm.name()) dest_parm.setFromParm(parm) @@ -155,13 +155,13 @@ class CameraLoader(api.Loader): # Apply values to the new camera new_camera = self._get_camera(node) - transfer_non_default_values(temp_camera, + transfer_non_default_values(temp_camera, new_camera, - # The hidden uniform scale attribute - # gets a default connection to "icon_scale" - # just skip that completely + # The hidden uniform scale attribute + # gets a default connection to + # "icon_scale" just skip that completely ignore={"scale"}) - + temp_camera.destroy() def remove(self, container): From 7b2932c9a78aa0e829a6bd06b148eb29a6f31e7d Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Jul 2021 15:25:14 +0200 Subject: [PATCH 020/308] =?UTF-8?q?hound=20cleanup=20=F0=9F=90=B6?= =?UTF-8?q?=F0=9F=A7=BD=F0=9F=A7=BA=20II.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../plugins/create/create_alembic_camera.py | 7 +- .../plugins/create/create_composite.py | 2 +- .../plugins/create/create_pointcache.py | 16 ++-- .../plugins/create/create_redshift_rop.py | 10 +- .../houdini/plugins/create/create_usd.py | 13 ++- .../plugins/create/create_usd_model.py | 7 +- .../plugins/create/create_usd_workspaces.py | 6 +- .../plugins/create/create_usdrender.py | 2 +- .../plugins/create/create_vbd_cache.py | 6 +- .../hosts/houdini/plugins/load/actions.py | 42 +++++---- .../houdini/plugins/load/load_alembic.py | 24 ++--- .../hosts/houdini/plugins/load/load_camera.py | 9 +- .../hosts/houdini/plugins/load/load_image.py | 54 ++++++----- .../houdini/plugins/load/load_usd_layer.py | 22 +++-- .../plugins/load/load_usd_reference.py | 20 ++-- .../hosts/houdini/plugins/load/load_vdb.py | 25 +++-- .../houdini/plugins/load/show_usdview.py | 3 +- .../plugins/publish/collect_active_state.py | 15 +-- .../plugins/publish/collect_current_file.py | 12 ++- .../houdini/plugins/publish/collect_frames.py | 13 ++- .../houdini/plugins/publish/collect_inputs.py | 20 ++-- .../plugins/publish/collect_instances.py | 2 +- .../publish/collect_instances_usd_layered.py | 3 - .../plugins/publish/collect_output_node.py | 19 ++-- .../plugins/publish/collect_redshift_rop.py | 26 +++--- .../plugins/publish/collect_remote_publish.py | 4 +- .../publish/collect_render_products.py | 23 +++-- .../plugins/publish/collect_usd_bootstrap.py | 36 ++++--- .../plugins/publish/collect_usd_layers.py | 1 - .../plugins/publish/collect_workscene_fps.py | 2 +- .../houdini/plugins/publish/extract_usd.py | 2 +- .../plugins/publish/extract_usd_layered.py | 93 ++++++++++--------- .../plugins/publish/extract_vdb_cache.py | 6 +- .../plugins/publish/increment_current_file.py | 21 +++-- .../increment_current_file_deadline.py | 17 ++-- .../houdini/plugins/publish/save_scene.py | 2 +- .../plugins/publish/save_scene_deadline.py | 6 +- .../publish/submit_houdini_render_deadline.py | 10 +- .../plugins/publish/submit_remote_publish.py | 58 ++++++------ .../plugins/publish/valiate_vdb_input_node.py | 7 +- .../validate_abc_primitive_to_detail.py | 56 +++++++---- .../publish/validate_alembic_face_sets.py | 18 ++-- .../publish/validate_alembic_input_node.py | 21 +++-- .../publish/validate_animation_settings.py | 5 +- .../plugins/publish/validate_bypass.py | 8 +- .../plugins/publish/validate_camera_rop.py | 20 ++-- .../publish/validate_cop_output_node.py | 34 +++---- .../publish/validate_file_extension.py | 11 +-- .../plugins/publish/validate_frame_token.py | 7 +- .../validate_houdini_license_category.py | 6 +- .../publish/validate_mkpaths_toggled.py | 20 ++-- .../plugins/publish/validate_no_errors.py | 21 +++-- .../publish/validate_outnode_exists.py | 4 +- .../plugins/publish/validate_output_node.py | 44 +++++---- .../validate_primitive_hierarchy_paths.py | 47 ++++++---- .../publish/validate_remote_publish.py | 18 ++-- .../validate_remote_publish_enabled.py | 6 +- .../publish/validate_sop_output_node.py | 38 ++++---- .../validate_usd_layer_path_backslashes.py | 11 +-- .../publish/validate_usd_model_and_shade.py | 7 +- .../publish/validate_usd_output_node.py | 28 +++--- .../validate_usd_render_product_names.py | 7 +- .../plugins/publish/validate_usd_setdress.py | 13 ++- .../validate_usd_shade_model_exists.py | 23 +++-- .../publish/validate_usd_shade_workspace.py | 21 +++-- .../publish/validate_vdb_input_node.py | 7 +- .../publish/validate_vdb_output_node.py | 27 +++--- 67 files changed, 652 insertions(+), 542 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index adcfb48539..99a587b035 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -23,7 +23,7 @@ class CreateAlembicCamera(plugin.Creator): parms = { "filename": "$HIP/pyblish/%s.abc" % self.name, - "use_sop_path": False + "use_sop_path": False, } if self.nodes: @@ -33,10 +33,7 @@ class CreateAlembicCamera(plugin.Creator): # Split the node path into the first root and the remainder # So we can set the root and objects parameters correctly _, root, remainder = path.split("/", 2) - parms.update({ - "root": "/" + root, - "objects": remainder - }) + parms.update({"root": "/" + root, "objects": remainder}) instance.setParms(parms) diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index d8ec41e61a..4f91fa2258 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -5,7 +5,7 @@ class CreateCompositeSequence(houdini.Creator): """Composite ROP to Image Sequence""" label = "Composite (Image Sequence)" - family = "colorbleed.imagesequence" + family = "imagesequence" icon = "gears" def __init__(self, *args, **kwargs): diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 7ab1046df3..cc452ed806 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -20,13 +20,15 @@ class CreatePointCache(plugin.Creator): def process(self): instance = super(CreatePointCache, self).process() - parms = {"use_sop_path": True, # Export single node from SOP Path - "build_from_path": True, # Direct path of primitive in output - "path_attrib": "path", # Pass path attribute for output - "prim_to_detail_pattern": "cbId", - "format": 2, # Set format to Ogawa - "facesets": 0, # No face sets (by default exclude them) - "filename": "$HIP/pyblish/%s.abc" % self.name} + parms = { + "use_sop_path": True, # Export single node from SOP Path + "build_from_path": True, # Direct path of primitive in output + "path_attrib": "path", # Pass path attribute for output + "prim_to_detail_pattern": "cbId", + "format": 2, # Set format to Ogawa + "facesets": 0, # No face sets (by default exclude them) + "filename": "$HIP/pyblish/%s.abc" % self.name, + } if self.nodes: node = self.nodes[0] diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py index f6e482954d..0babc17c6b 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py @@ -35,8 +35,9 @@ class CreateRedshiftROP(houdini.Creator): instance.setName(basename + "_ROP", unique_name=True) # Also create the linked Redshift IPR Rop - ipr_rop = self.parent.createNode("Redshift_IPR", - node_name=basename + "_IPR") + ipr_rop = self.parent.createNode( + "Redshift_IPR", node_name=basename + "_IPR" + ) # Move it to directly under the Redshift ROP ipr_rop.setPosition(instance.position() + hou.Vector2(0, -1)) @@ -48,11 +49,10 @@ class CreateRedshiftROP(houdini.Creator): parms = { # Render frame range "trange": 1, - # Redshift ROP settings "RS_outputFileNamePrefix": prefix, - "RS_outputMultilayerMode": 0, # no multi-layered exr - "RS_outputBeautyAOVSuffix": "beauty" + "RS_outputMultilayerMode": 0, # no multi-layered exr + "RS_outputBeautyAOVSuffix": "beauty", } instance.setParms(parms) diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index d4cf3f761b..5ca8875dc0 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -5,7 +5,7 @@ class CreateUSD(houdini.Creator): """Universal Scene Description""" label = "USD" - family = "colorbleed.usd" + family = "usd" icon = "gears" def __init__(self, *args, **kwargs): @@ -21,7 +21,7 @@ class CreateUSD(houdini.Creator): parms = { "lopoutput": "$HIP/pyblish/%s.usd" % self.name, - "enableoutputprocessor_simplerelativepaths": False + "enableoutputprocessor_simplerelativepaths": False, } if self.nodes: @@ -31,9 +31,12 @@ class CreateUSD(houdini.Creator): instance.setParms(parms) # Lock any parameters in this list - to_lock = ["fileperframe", - # Lock some Avalon attributes - "family", "id"] + to_lock = [ + "fileperframe", + # Lock some Avalon attributes + "family", + "id", + ] for name in to_lock: parm = instance.parm(name) parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/create/create_usd_model.py b/openpype/hosts/houdini/plugins/create/create_usd_model.py index e412a88c71..96563f2f91 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd_model.py +++ b/openpype/hosts/houdini/plugins/create/create_usd_model.py @@ -13,7 +13,7 @@ class CreateUSDModel(api.Creator): def process(self): - node_type = "cb::author_model:1.0" + node_type = "op::author_model:1.0" subset = self.data["subset"] name = "author_{}".format(subset) @@ -24,10 +24,7 @@ class CreateUSDModel(api.Creator): instance = stage.createNode(node_type, node_name=name) instance.moveToGoodPosition(move_unconnected=True) - parms = { - "asset_name": self.data["asset"], - "variant_name": variant - } + parms = {"asset_name": self.data["asset"], "variant_name": variant} # Set the Geo Path to the first selected node (if any) selection = hou.selectedNodes() diff --git a/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py b/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py index 906a217c0f..a2309122e4 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py +++ b/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py @@ -31,9 +31,7 @@ class _USDWorkspace(api.Creator): # With the Workspace HDAs there is no need to imprint the instance data # since this data is pre-built into it. However, we do set the right # asset as that can be defined by the user. - parms = { - "asset": self.data["asset"] - } + parms = {"asset": self.data["asset"]} instance.setParms(parms) return instance @@ -47,7 +45,7 @@ class USDCreateShadingWorkspace(_USDWorkspace): label = "USD Shading Workspace" family = "colorbleed.shade.usd" - node_type = "cb::shadingWorkspace::1.0" + node_type = "op::shadingWorkspace::1.0" node_name = "shadingWorkspace" step = "Shade" diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py index 4914e8e3ab..1c488f381b 100644 --- a/openpype/hosts/houdini/plugins/create/create_usdrender.py +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -7,7 +7,7 @@ class CreateUSDRender(houdini.Creator): """USD Render ROP in /stage""" label = "USD Render" - family = "colorbleed.usdrender" + family = "usdrender" icon = "magic" def __init__(self, *args, **kwargs): diff --git a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py index f8f3bbf9c3..677c3d5a9a 100644 --- a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py @@ -21,8 +21,10 @@ class CreateVDBCache(plugin.Creator): def process(self): instance = super(CreateVDBCache, self).process() - parms = {"sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name, - "initsim": True} + parms = { + "sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name, + "initsim": True, + } if self.nodes: node = self.nodes[0] diff --git a/openpype/hosts/houdini/plugins/load/actions.py b/openpype/hosts/houdini/plugins/load/actions.py index bb8cd7a1f6..6e9410ff58 100644 --- a/openpype/hosts/houdini/plugins/load/actions.py +++ b/openpype/hosts/houdini/plugins/load/actions.py @@ -6,13 +6,15 @@ from avalon import api class SetFrameRangeLoader(api.Loader): - """Set Maya frame range""" + """Set Houdini frame range""" - families = ["colorbleed.animation", - "colorbleed.camera", - "colorbleed.pointcache", - "colorbleed.vdbcache", - "colorbleed.usd"] + families = [ + "animation", + "camera", + "pointcache", + "vdbcache", + "usd", + ] representations = ["abc", "vdb", "usd"] label = "Set frame range" @@ -24,15 +26,17 @@ class SetFrameRangeLoader(api.Loader): import hou - version = context['version'] + version = context["version"] version_data = version.get("data", {}) start = version_data.get("startFrame", None) end = version_data.get("endFrame", None) if start is None or end is None: - print("Skipping setting frame range because start or " - "end frame data is missing..") + print( + "Skipping setting frame range because start or " + "end frame data is missing.." + ) return hou.playbar.setFrameRange(start, end) @@ -42,11 +46,13 @@ class SetFrameRangeLoader(api.Loader): class SetFrameRangeWithHandlesLoader(api.Loader): """Set Maya frame range including pre- and post-handles""" - families = ["colorbleed.animation", - "colorbleed.camera", - "colorbleed.pointcache", - "colorbleed.vdbcache", - "colorbleed.usd"] + families = [ + "animation", + "camera", + "pointcache", + "vdbcache", + "usd", + ] representations = ["abc", "vdb", "usd"] label = "Set frame range (with handles)" @@ -58,15 +64,17 @@ class SetFrameRangeWithHandlesLoader(api.Loader): import hou - version = context['version'] + version = context["version"] version_data = version.get("data", {}) start = version_data.get("startFrame", None) end = version_data.get("endFrame", None) if start is None or end is None: - print("Skipping setting frame range because start or " - "end frame data is missing..") + print( + "Skipping setting frame range because start or " + "end frame data is missing.." + ) return # Include handles diff --git a/openpype/hosts/houdini/plugins/load/load_alembic.py b/openpype/hosts/houdini/plugins/load/load_alembic.py index 076b4051f8..cd0f0f0d2d 100644 --- a/openpype/hosts/houdini/plugins/load/load_alembic.py +++ b/openpype/hosts/houdini/plugins/load/load_alembic.py @@ -6,10 +6,7 @@ from avalon.houdini import pipeline, lib class AbcLoader(api.Loader): """Specific loader of Alembic for the avalon.animation family""" - families = ["model", - "animation", - "pointcache", - "gpuCache"] + families = ["model", "animation", "pointcache", "gpuCache"] label = "Load Alembic" representations = ["abc"] order = -10 @@ -80,19 +77,22 @@ class AbcLoader(api.Loader): self[:] = nodes - return pipeline.containerise(node_name, - namespace, - nodes, - context, - self.__class__.__name__, - suffix="") + return pipeline.containerise( + node_name, + namespace, + nodes, + context, + self.__class__.__name__, + suffix="", + ) def update(self, container, representation): node = container["node"] try: - alembic_node = next(n for n in node.children() if - n.type().name() == "alembic") + alembic_node = next( + n for n in node.children() if n.type().name() == "alembic" + ) except StopIteration: self.log.error("Could not find node of type `alembic`") return diff --git a/openpype/hosts/houdini/plugins/load/load_camera.py b/openpype/hosts/houdini/plugins/load/load_camera.py index 0cb17dc7d6..65697eef63 100644 --- a/openpype/hosts/houdini/plugins/load/load_camera.py +++ b/openpype/hosts/houdini/plugins/load/load_camera.py @@ -2,24 +2,25 @@ from avalon import api from avalon.houdini import pipeline, lib -ARCHIVE_EXPRESSION = '__import__("_alembic_hom_extensions").alembicGetCameraDict' +ARCHIVE_EXPRESSION = ('__import__("_alembic_hom_extensions")' + '.alembicGetCameraDict') def transfer_non_default_values(src, dest, ignore=None): """Copy parm from src to dest. - + Because the Alembic Archive rebuilds the entire node hierarchy on triggering "Build Hierarchy" we want to preserve any local tweaks made by the user on the camera for ease of use. That could be a background image, a resolution change or even Redshift camera parameters. - + We try to do so by finding all Parms that exist on both source and destination node, include only those that both are not at their default value, they must be visible, we exclude those that have the special "alembic archive" channel expression and ignore certain Parm types. - + """ import hou diff --git a/openpype/hosts/houdini/plugins/load/load_image.py b/openpype/hosts/houdini/plugins/load/load_image.py index 026cb07f67..4ff2777d77 100644 --- a/openpype/hosts/houdini/plugins/load/load_image.py +++ b/openpype/hosts/houdini/plugins/load/load_image.py @@ -5,14 +5,15 @@ from avalon.houdini import pipeline, lib import hou + def get_image_avalon_container(): - """The COP2 files must be in a COP2 network. - + """The COP2 files must be in a COP2 network. + So we maintain a single entry point within AVALON_CONTAINERS, just for ease of use. - + """ - + path = pipeline.AVALON_CONTAINERS avalon_container = hou.node(path) if not avalon_container: @@ -20,18 +21,21 @@ def get_image_avalon_container(): # but make sure the pipeline still is built the # way we anticipate it was built, asserting it. assert path == "/obj/AVALON_CONTAINERS" - + parent = hou.node("/obj") - avalon_container = parent.createNode("subnet", - node_name="AVALON_CONTAINERS") - + avalon_container = parent.createNode( + "subnet", node_name="AVALON_CONTAINERS" + ) + image_container = hou.node(path + "/IMAGES") if not image_container: - image_container = avalon_container.createNode("cop2net", node_name="IMAGES") + image_container = avalon_container.createNode( + "cop2net", node_name="IMAGES" + ) image_container.moveToGoodPosition() - + return image_container - + class ImageLoader(api.Loader): """Specific loader of Alembic for the avalon.animation family""" @@ -57,12 +61,12 @@ class ImageLoader(api.Loader): # Define node name namespace = namespace if namespace else context["asset"]["name"] node_name = "{}_{}".format(namespace, name) if namespace else name - + node = parent.createNode("file", node_name=node_name) node.moveToGoodPosition() node.setParms({"filename1": file_path}) - + # Imprint it manually data = { "schema": "avalon-core:container-2.0", @@ -75,7 +79,7 @@ class ImageLoader(api.Loader): # todo: add folder="Avalon" lib.imprint(node, data) - + return node def update(self, container, representation): @@ -88,32 +92,32 @@ class ImageLoader(api.Loader): file_path = self._get_file_sequence(file_path) # Update attributes - node.setParms({ - "filename1": file_path, - "representation": str(representation["_id"]) - }) + node.setParms( + { + "filename1": file_path, + "representation": str(representation["_id"]), + } + ) def remove(self, container): node = container["node"] - + # Let's clean up the IMAGES COP2 network # if it ends up being empty and we deleted # the last file node. Store the parent # before we delete the node. parent = node.parent() - + node.destroy() - + if not parent.children(): parent.destroy() - - def _get_file_sequence(self, root): files = sorted(os.listdir(root)) - + first_fname = files[0] prefix, padding, suffix = first_fname.rsplit(".", 2) fname = ".".join([prefix, "$F{}".format(len(padding)), suffix]) - return os.path.join(root, fname).replace("\\", "/") \ No newline at end of file + return os.path.join(root, fname).replace("\\", "/") diff --git a/openpype/hosts/houdini/plugins/load/load_usd_layer.py b/openpype/hosts/houdini/plugins/load/load_usd_layer.py index 79fe3e88fd..7483101409 100644 --- a/openpype/hosts/houdini/plugins/load/load_usd_layer.py +++ b/openpype/hosts/houdini/plugins/load/load_usd_layer.py @@ -5,11 +5,13 @@ from avalon.houdini import pipeline, lib class USDSublayerLoader(api.Loader): """Sublayer USD file in Solaris""" - families = ["colorbleed.usd", - "colorbleed.pointcache", - "colorbleed.animation", - "colorbleed.camera", - "usdCamera"] + families = [ + "colorbleed.usd", + "colorbleed.pointcache", + "colorbleed.animation", + "colorbleed.camera", + "usdCamera", + ] label = "Sublayer USD" representations = ["usd", "usda", "usdlc", "usdnc", "abc"] order = 1 @@ -62,8 +64,12 @@ class USDSublayerLoader(api.Loader): file_path = file_path.replace("\\", "/") # Update attributes - node.setParms({"filepath1": file_path, - "representation": str(representation["_id"])}) + node.setParms( + { + "filepath1": file_path, + "representation": str(representation["_id"]), + } + ) # Reload files node.parm("reload").pressButton() @@ -71,4 +77,4 @@ class USDSublayerLoader(api.Loader): def remove(self, container): node = container["node"] - node.destroy() \ No newline at end of file + node.destroy() diff --git a/openpype/hosts/houdini/plugins/load/load_usd_reference.py b/openpype/hosts/houdini/plugins/load/load_usd_reference.py index f996ccdc3c..cab3cb5269 100644 --- a/openpype/hosts/houdini/plugins/load/load_usd_reference.py +++ b/openpype/hosts/houdini/plugins/load/load_usd_reference.py @@ -5,11 +5,13 @@ from avalon.houdini import pipeline, lib class USDReferenceLoader(api.Loader): """Reference USD file in Solaris""" - families = ["colorbleed.usd", - "colorbleed.pointcache", - "colorbleed.animation", - "colorbleed.camera", - "usdCamera"] + families = [ + "colorbleed.usd", + "colorbleed.pointcache", + "colorbleed.animation", + "colorbleed.camera", + "usdCamera", + ] label = "Reference USD" representations = ["usd", "usda", "usdlc", "usdnc", "abc"] order = -8 @@ -62,8 +64,12 @@ class USDReferenceLoader(api.Loader): file_path = file_path.replace("\\", "/") # Update attributes - node.setParms({"filepath1": file_path, - "representation": str(representation["_id"])}) + node.setParms( + { + "filepath1": file_path, + "representation": str(representation["_id"]), + } + ) # Reload files node.parm("reload").pressButton() diff --git a/openpype/hosts/houdini/plugins/load/load_vdb.py b/openpype/hosts/houdini/plugins/load/load_vdb.py index be2b2556f5..576b459d7d 100644 --- a/openpype/hosts/houdini/plugins/load/load_vdb.py +++ b/openpype/hosts/houdini/plugins/load/load_vdb.py @@ -45,12 +45,14 @@ class VdbLoader(api.Loader): nodes = [container, file_node] self[:] = nodes - return pipeline.containerise(node_name, - namespace, - nodes, - context, - self.__class__.__name__, - suffix="") + return pipeline.containerise( + node_name, + namespace, + nodes, + context, + self.__class__.__name__, + suffix="", + ) def format_path(self, path): """Format file path correctly for single vdb or vdb sequence""" @@ -68,8 +70,10 @@ class VdbLoader(api.Loader): files = sorted(os.listdir(path)) first = next((x for x in files if x.endswith(".vdb")), None) if first is None: - raise RuntimeError("Couldn't find first .vdb file of " - "sequence in: %s" % path) + raise RuntimeError( + "Couldn't find first .vdb file of " + "sequence in: %s" % path + ) # Set .vdb to $F.vdb first = re.sub(r"\.(\d+)\.vdb$", ".$F.vdb", first) @@ -85,8 +89,9 @@ class VdbLoader(api.Loader): node = container["node"] try: - file_node = next(n for n in node.children() if - n.type().name() == "file") + file_node = next( + n for n in node.children() if n.type().name() == "file" + ) except StopIteration: self.log.error("Could not find node of type `alembic`") return diff --git a/openpype/hosts/houdini/plugins/load/show_usdview.py b/openpype/hosts/houdini/plugins/load/show_usdview.py index b9aa0e7ddc..f23974094e 100644 --- a/openpype/hosts/houdini/plugins/load/show_usdview.py +++ b/openpype/hosts/houdini/plugins/load/show_usdview.py @@ -40,5 +40,4 @@ class ShowInUsdview(api.Loader): # Force string to avoid unicode issues env = {str(key): str(value) for key, value in env.items()} - subprocess.Popen([usdview, filepath, "--renderer", "GL"], - env=env) + subprocess.Popen([usdview, filepath, "--renderer", "GL"], env=env) diff --git a/openpype/hosts/houdini/plugins/publish/collect_active_state.py b/openpype/hosts/houdini/plugins/publish/collect_active_state.py index ee8640b04e..7b3637df88 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_active_state.py +++ b/openpype/hosts/houdini/plugins/publish/collect_active_state.py @@ -1,5 +1,4 @@ import pyblish.api -import openpype.api class CollectInstanceActiveState(pyblish.api.InstancePlugin): @@ -28,9 +27,11 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin): active = not node.isBypassed() # Set instance active state - instance.data.update({ - "active": active, - # temporarily translation of `active` to `publish` till issue has - # been resolved: https://github.com/pyblish/pyblish-base/issues/307 - "publish": active - }) + instance.data.update( + { + "active": active, + # temporarily translation of `active` to `publish` till issue has + # been resolved: https://github.com/pyblish/pyblish-base/issues/307 + "publish": active, + } + ) diff --git a/openpype/hosts/houdini/plugins/publish/collect_current_file.py b/openpype/hosts/houdini/plugins/publish/collect_current_file.py index b35a943833..c0b987ebbc 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/collect_current_file.py @@ -9,7 +9,7 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.5 label = "Houdini Current File" - hosts = ['houdini'] + hosts = ["houdini"] def process(self, context): """Inject the current working file""" @@ -27,8 +27,10 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): # could have existed already. We will allow it if the file exists, # but show a warning for this edge case to clarify the potential # false positive. - self.log.warning("Current file is 'untitled.hip' and we are " - "unable to detect whether the current scene is " - "saved correctly.") + self.log.warning( + "Current file is 'untitled.hip' and we are " + "unable to detect whether the current scene is " + "saved correctly." + ) - context.data['currentFile'] = filepath + context.data["currentFile"] = filepath diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index ed43f717c2..ef77c3230b 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -10,8 +10,7 @@ class CollectFrames(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder label = "Collect Frames" - families = ["vdbcache", - "imagesequence"] + families = ["vdbcache", "imagesequence"] def process(self, instance): @@ -39,9 +38,9 @@ class CollectFrames(pyblish.api.InstancePlugin): # Check if frames are bigger than 1 (file collection) # override the result if end_frame - start_frame > 1: - result = self.create_file_list(match, - int(start_frame), - int(end_frame)) + result = self.create_file_list( + match, int(start_frame), int(end_frame) + ) # todo: `frames` currently conflicts with "explicit frames" for a # for a custom frame list. So this should be refactored. @@ -67,12 +66,12 @@ class CollectFrames(pyblish.api.InstancePlugin): # Get the parts of the filename surrounding the frame number # so we can put our own frame numbers in. span = match.span(1) - prefix = match.string[:span[0]] + prefix = match.string[: span[0]] suffix = match.string[span[1]:] # Generate filenames for all frames result = [] - for i in range(start_frame, end_frame+1): + for i in range(start_frame, end_frame + 1): # Format frame number by the padding amount str_frame = "{number:0{width}d}".format(number=i, width=padding) diff --git a/openpype/hosts/houdini/plugins/publish/collect_inputs.py b/openpype/hosts/houdini/plugins/publish/collect_inputs.py index 1fafba483e..39e2737e8c 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_inputs.py +++ b/openpype/hosts/houdini/plugins/publish/collect_inputs.py @@ -1,6 +1,3 @@ -import hou - -import avalon.io as io import avalon.api as api import pyblish.api @@ -30,7 +27,7 @@ def collect_input_containers(nodes): # and the contained children should be all we need. So we disregard # checking for .references() on the nodes. members = set(node.allSubChildren()) - members.add(node) # include the node itself + members.add(node) # include the node itself # If there's an intersection if not lookup.isdisjoint(members): @@ -51,8 +48,9 @@ def iter_upstream(node): """ - upstream = node.inputAncestors(include_ref_inputs=True, - follow_subnets=True) + upstream = node.inputAncestors( + include_ref_inputs=True, follow_subnets=True + ) # Initialize process queue with the node's ancestors itself queue = list(upstream) @@ -73,8 +71,9 @@ def iter_upstream(node): # Include the references' ancestors that have not been collected yet. for reference in references: - ancestors = reference.inputAncestors(include_ref_inputs=True, - follow_subnets=True) + ancestors = reference.inputAncestors( + include_ref_inputs=True, follow_subnets=True + ) ancestors = [n for n in ancestors if n not in collected] queue.extend(ancestors) @@ -103,8 +102,9 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin): if output is None: # If no valid output node is set then ignore it as validation # will be checking those cases. - self.log.debug("No output node found, skipping " - "collecting of inputs..") + self.log.debug( + "No output node found, skipping " "collecting of inputs.." + ) return # Collect all upstream parents diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py index 6b00b7cb22..1b36526783 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py @@ -62,7 +62,7 @@ class CollectInstances(pyblish.api.ContextPlugin): # Create nice name if the instance has a frame range. label = data.get("name", node.name()) - label += " (%s)" % data["asset"] # include asset in name + label += " (%s)" % data["asset"] # include asset in name if "frameStart" in data and "frameEnd" in data: frames = "[{frameStart} - {frameEnd}]".format(**data) diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py b/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py index a3e9400970..7df5e8b6f2 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances_usd_layered.py @@ -1,8 +1,5 @@ -import os - import hou import pyblish.api -from avalon import io from avalon.houdini import lib import openpype.hosts.houdini.api.usd as hou_usdlib import openpype.lib.usdlib as usdlib diff --git a/openpype/hosts/houdini/plugins/publish/collect_output_node.py b/openpype/hosts/houdini/plugins/publish/collect_output_node.py index e8aa701f26..938ee81cc3 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/collect_output_node.py @@ -5,12 +5,14 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin): """Collect the out node's SOP/COP Path value.""" order = pyblish.api.CollectorOrder - families = ["pointcache", - "camera", - "vdbcache", - "imagesequence", - "usd", - "usdrender"] + families = [ + "pointcache", + "camera", + "vdbcache", + "imagesequence", + "usd", + "usdrender", + ] hosts = ["houdini"] label = "Collect Output Node Path" @@ -53,8 +55,9 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin): out_node = node.parm("loppath").evalAsNode() else: - raise ValueError("ROP node type '%s' is" - " not supported." % node_type) + raise ValueError( + "ROP node type '%s' is" " not supported." % node_type + ) if not out_node: self.log.warning("No output node collected.") diff --git a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py index f19b1eec2c..72b554b567 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py @@ -7,11 +7,11 @@ import pyblish.api def get_top_referenced_parm(parm): - processed = set() # disallow infinite loop + processed = set() # disallow infinite loop while True: if parm.path() in processed: raise RuntimeError("Parameter references result in cycle.") - + processed.add(parm.path()) ref = parm.getReferencedParm() @@ -27,7 +27,7 @@ def evalParmNoFrame(node, parm, pad_character="#"): parameter = node.parm(parm) assert parameter, "Parameter does not exist: %s.%s" % (node, parm) - + # If the parameter has a parameter reference, then get that # parameter instead as otherwise `unexpandedString()` fails. parameter = get_top_referenced_parm(parameter) @@ -38,7 +38,7 @@ def evalParmNoFrame(node, parm, pad_character="#"): except hou.Error as exc: print("Failed: %s" % parameter) raise RuntimeError(exc) - + def replace(match): padding = 1 n = match.group(2) @@ -70,31 +70,32 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): def process(self, instance): rop = instance[0] - + # Collect chunkSize chunk_size_parm = rop.parm("chunkSize") if chunk_size_parm: chunk_size = int(chunk_size_parm.eval()) instance.data["chunkSize"] = chunk_size self.log.debug("Chunk Size: %s" % chunk_size) - + default_prefix = evalParmNoFrame(rop, "RS_outputFileNamePrefix") beauty_suffix = rop.evalParm("RS_outputBeautyAOVSuffix") render_products = [] # Default beauty AOV - beauty_product = self.get_render_product_name(prefix=default_prefix, - suffix=beauty_suffix) + beauty_product = self.get_render_product_name( + prefix=default_prefix, suffix=beauty_suffix + ) render_products.append(beauty_product) num_aovs = rop.evalParm("RS_aov") for index in range(num_aovs): i = index + 1 - + # Skip disabled AOVs if not rop.evalParm("RS_aovEnable_%s" % i): continue - + aov_suffix = rop.evalParm("RS_aovSuffix_%s" % i) aov_prefix = evalParmNoFrame(rop, "RS_aovCustomPrefix_%s" % i) if not aov_prefix: @@ -122,10 +123,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): # there is no suffix for the current product, for example: # foo_%AOV% -> foo.exr pattern = "%AOV%" if suffix else "[._-]?%AOV%" - product_name = re.sub(pattern, - suffix, - prefix, - flags=re.IGNORECASE) + product_name = re.sub(pattern, suffix, prefix, flags=re.IGNORECASE) else: if suffix: # Add ".{suffix}" before the extension diff --git a/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py b/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py index 8f0210129f..3ae16efe56 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py +++ b/openpype/hosts/houdini/plugins/publish/collect_remote_publish.py @@ -10,9 +10,9 @@ class CollectRemotePublishSettings(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder families = ["*"] - hosts = ['houdini'] + hosts = ["houdini"] targets = ["deadline"] - label = 'Remote Publish Submission Settings' + label = "Remote Publish Submission Settings" actions = [openpype.api.RepairAction] def process(self, context): diff --git a/openpype/hosts/houdini/plugins/publish/collect_render_products.py b/openpype/hosts/houdini/plugins/publish/collect_render_products.py index 084c00cd70..e8a4a3dc3d 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_render_products.py +++ b/openpype/hosts/houdini/plugins/publish/collect_render_products.py @@ -28,7 +28,7 @@ def get_var_changed(variable=None): cmd = "varchange -V" if variable: cmd += " {0}".format(variable) - output, errors = hou.hscript(cmd) + output, _ = hou.hscript(cmd) changed = {} for line in output.split("Variable: "): @@ -44,7 +44,7 @@ def get_var_changed(variable=None): class CollectRenderProducts(pyblish.api.InstancePlugin): - """Collect USD Render Products""" + """Collect USD Render Products.""" label = "Collect Render Products" order = pyblish.api.CollectorOrder + 0.4 @@ -56,14 +56,17 @@ class CollectRenderProducts(pyblish.api.InstancePlugin): node = instance.data.get("output_node") if not node: rop_path = instance[0].path() - raise RuntimeError("No output node found. Make sure to connect an " - "input to the USD ROP: %s" % rop_path) + raise RuntimeError( + "No output node found. Make sure to connect an " + "input to the USD ROP: %s" % rop_path + ) # Workaround Houdini 18.0.391 bug where $HIPNAME doesn't automatically # update after scene save. if hou.applicationVersion() == (18, 0, 391): - self.log.debug("Checking for recook to workaround " - "$HIPNAME refresh bug...") + self.log.debug( + "Checking for recook to workaround " "$HIPNAME refresh bug..." + ) changed = get_var_changed("HIPNAME").get("HIPNAME") if changed: self.log.debug("Recooking for $HIPNAME refresh bug...") @@ -101,7 +104,7 @@ class CollectRenderProducts(pyblish.api.InstancePlugin): # TODO: Confirm this actually is allowed USD stages and HUSK # Substitute $F def replace(match): - """Replace $F4 with padded #""" + """Replace $F4 with padded #.""" padding = int(match.group(2)) if match.group(2) else 1 return "#" * padding @@ -118,8 +121,10 @@ class CollectRenderProducts(pyblish.api.InstancePlugin): filename = os.path.join(dirname, filename_base) filename = filename.replace("\\", "/") - assert "#" in filename, "Couldn't resolve render product name " \ - "with frame number: %s" % name + assert "#" in filename, ( + "Couldn't resolve render product name " + "with frame number: %s" % name + ) filenames.append(filename) diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py index fbf1ef4cb1..991354fc5a 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py @@ -25,8 +25,7 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.35 label = "Collect USD Bootstrap" hosts = ["houdini"] - families = ["colorbleed.usd", - "colorbleed.usd.layered"] + families = ["usd", "usd.layered"] def process(self, instance): @@ -35,7 +34,7 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): instance_subset = instance.data["subset"] for name, layers in usdlib.PIPELINE.items(): if instance_subset in set(layers): - return name # e.g. "asset" + return name # e.g. "asset" break else: return @@ -54,15 +53,11 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): self.log.debug("Add bootstrap for: %s" % bootstrap) - asset = io.find_one({"name": instance.data["asset"], - "type": "asset"}) + asset = io.find_one({"name": instance.data["asset"], "type": "asset"}) assert asset, "Asset must exist: %s" % asset # Check which are not about to be created and don't exist yet - required = { - "shot": ["usdShot"], - "asset": ["usdAsset"] - }.get(bootstrap) + required = {"shot": ["usdShot"], "asset": ["usdAsset"]}.get(bootstrap) require_all_layers = instance.data.get("requireAllLayers", False) if require_all_layers: @@ -78,18 +73,18 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): if self._subset_exists(instance, subset, asset): continue - self.log.debug("Creating {0} USD bootstrap: {1} {2}".format( - bootstrap, - asset["name"], - subset - )) + self.log.debug( + "Creating {0} USD bootstrap: {1} {2}".format( + bootstrap, asset["name"], subset + ) + ) new = instance.context.create_instance(subset) new.data["subset"] = subset new.data["label"] = "{0} ({1})".format(subset, asset["name"]) - new.data["family"] = "colorbleed.usd.bootstrap" + new.data["family"] = "usd.bootstrap" new.data["comment"] = "Automated bootstrap USD file." - new.data["publishFamilies"] = ["colorbleed.usd"] + new.data["publishFamilies"] = ["usd"] # Do not allow the user to toggle this instance new.data["optional"] = False @@ -100,7 +95,6 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): def _subset_exists(self, instance, subset, asset): """Return whether subset exists in current context or in database.""" - # Allow it to be created during this publish session context = instance.context for inst in context: @@ -112,6 +106,8 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): # Or, if they already exist in the database we can # skip them too. - return bool(io.find_one({"name": subset, - "type": "subset", - "parent": asset["_id"]})) + return bool( + io.find_one( + {"name": subset, "type": "subset", "parent": asset["_id"]} + ) + ) diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index 2920b5366d..581f029ac2 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -7,7 +7,6 @@ from avalon.houdini import lib import openpype.hosts.houdini.lib.usd as usdlib import hou -from pxr import Sdf class CollectUsdLayers(pyblish.api.InstancePlugin): diff --git a/openpype/hosts/houdini/plugins/publish/collect_workscene_fps.py b/openpype/hosts/houdini/plugins/publish/collect_workscene_fps.py index c145eea519..6f6cc978cd 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_workscene_fps.py +++ b/openpype/hosts/houdini/plugins/publish/collect_workscene_fps.py @@ -3,7 +3,7 @@ import hou class CollectWorksceneFPS(pyblish.api.ContextPlugin): - """Get the FPS of the work scene""" + """Get the FPS of the work scene.""" label = "Workscene FPS" order = pyblish.api.CollectorOrder diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index 0968ba87e9..ae1dfb3f8f 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -11,7 +11,7 @@ class ExtractUSD(openpype.api.Extractor): label = "Extract USD" hosts = ["houdini"] targets = ["local"] - families = ["colorbleed.usd", + families = ["usd", "usdModel", "usdSetDress"] diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index 329d26bf3b..06b48f3b43 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -12,7 +12,7 @@ from openpype.hosts.houdini.api.lib import render_rop class ExitStack(object): - """Context manager for dynamic management of a stack of exit callbacks + """Context manager for dynamic management of a stack of exit callbacks. For example: @@ -23,6 +23,7 @@ class ExitStack(object): # in the list raise an exception """ + def __init__(self): self._exit_callbacks = deque() @@ -35,8 +36,10 @@ class ExitStack(object): def _push_cm_exit(self, cm, cm_exit): """Helper to correctly register callbacks to __exit__ methods""" + def _exit_wrapper(*exc_details): return cm_exit(cm, *exc_details) + _exit_wrapper.__self__ = cm self.push(_exit_wrapper) @@ -58,20 +61,22 @@ class ExitStack(object): self._exit_callbacks.append(exit) else: self._push_cm_exit(exit, exit_method) - return exit # Allow use as a decorator + return exit # Allow use as a decorator def callback(self, callback, *args, **kwds): """Registers an arbitrary callback and arguments. Cannot suppress exceptions. """ + def _exit_wrapper(exc_type, exc, tb): callback(*args, **kwds) + # We changed the signature, so using @wraps is not appropriate, but # setting __wrapped__ may still help with introspection _exit_wrapper.__wrapped__ = callback self.push(_exit_wrapper) - return callback # Allow use as a decorator + return callback # Allow use as a decorator def enter_context(self, cm): """Enters the supplied context manager @@ -97,6 +102,7 @@ class ExitStack(object): # We manipulate the exception state so it behaves as though # we were actually nesting multiple with statements frame_exc = sys.exc_info()[1] + def _fix_exception_context(new_exc, old_exc): while 1: exc_context = new_exc.__context__ @@ -148,15 +154,11 @@ class ExtractUSDLayered(openpype.api.Extractor): label = "Extract Layered USD" hosts = ["houdini"] targets = ["local"] - families = ["colorbleed.usd.layered", - "usdShade"] + families = ["colorbleed.usd.layered", "usdShade"] # Force Output Processors so it will always save any file # into our unique staging directory with processed Avalon paths - output_processors = [ - "avalon_uri_processor", - "stagingdir_processor" - ] + output_processors = ["avalon_uri_processor", "stagingdir_processor"] def process(self, instance): @@ -168,8 +170,9 @@ class ExtractUSDLayered(openpype.api.Extractor): # The individual rop nodes are collected as "publishDependencies" dependencies = instance.data["publishDependencies"] ropnodes = [dependency[0] for dependency in dependencies] - assert all(node.type().name() in {"usd", "usd_rop"} - for node in ropnodes) + assert all( + node.type().name() in {"usd", "usd_rop"} for node in ropnodes + ) # Main ROP node, either a USD Rop or ROP network with multiple USD ROPs node = instance[0] @@ -177,9 +180,12 @@ class ExtractUSDLayered(openpype.api.Extractor): # Collect any output dependencies that have not been processed yet # during extraction of other instances outputs = [fname] - active_dependencies = [dep for dep in dependencies if - dep.data.get("publish", True) and - not dep.data.get("_isExtracted", False)] + active_dependencies = [ + dep + for dep in dependencies + if dep.data.get("publish", True) + and not dep.data.get("_isExtracted", False) + ] for dependency in active_dependencies: outputs.append(dependency.data["usdFilename"]) @@ -192,13 +198,11 @@ class ExtractUSDLayered(openpype.api.Extractor): # This sets staging directory on the processor to force our # output files to end up in the Staging Directory. "stagingdiroutputprocessor_stagingDir": staging_dir, - # Force the Avalon URI Output Processor to refactor paths for # references, payloads and layers to published paths. "avalonurioutputprocessor_use_publish_paths": True, - # Only write out specific USD files based on our outputs - "savepattern": save_pattern + "savepattern": save_pattern, } overrides = list() with ExitStack() as stack: @@ -207,7 +211,7 @@ class ExtractUSDLayered(openpype.api.Extractor): manager = hou_usdlib.outputprocessors( ropnode, processors=self.output_processors, - disable_all_others=True + disable_all_others=True, ) stack.enter_context(manager) @@ -216,8 +220,10 @@ class ExtractUSDLayered(openpype.api.Extractor): # exist when the Output Processor is added to the ROP node. for name, value in rop_overrides.items(): parm = ropnode.parm(name) - assert parm, "Parm not found: %s.%s" % (ropnode.path(), - name) + assert parm, "Parm not found: %s.%s" % ( + ropnode.path(), + name, + ) overrides.append((parm, value)) stack.enter_context(parm_values(overrides)) @@ -236,12 +242,13 @@ class ExtractUSDLayered(openpype.api.Extractor): dependency_fname = dependency.data["usdFilename"] filepath = os.path.join(staging_dir, dependency_fname) - similar = self._compare_with_latest_publish(dependency, - filepath) + similar = self._compare_with_latest_publish(dependency, filepath) if similar: # Deactivate this dependency - self.log.debug("Dependency matches previous publish version," - " deactivating %s for publish" % dependency) + self.log.debug( + "Dependency matches previous publish version," + " deactivating %s for publish" % dependency + ) dependency.data["publish"] = False else: self.log.debug("Extracted dependency: %s" % dependency) @@ -265,33 +272,35 @@ class ExtractUSDLayered(openpype.api.Extractor): # Compare this dependency with the latest published version # to detect whether we should make this into a new publish # version. If not, skip it. - asset = io.find_one({ - "name": dependency.data["asset"], - "type": "asset" - }) - subset = io.find_one({ - "name": dependency.data["subset"], - "type": "subset", - "parent": asset["_id"] - }) + asset = io.find_one( + {"name": dependency.data["asset"], "type": "asset"} + ) + subset = io.find_one( + { + "name": dependency.data["subset"], + "type": "subset", + "parent": asset["_id"], + } + ) if not subset: # Subset doesn't exist yet. Definitely new file self.log.debug("No existing subset..") return False - version = io.find_one({ - "type": "version", - "parent": subset["_id"], - }, sort=[("name", -1)]) + version = io.find_one( + {"type": "version", "parent": subset["_id"], }, sort=[("name", -1)] + ) if not version: self.log.debug("No existing version..") return False - representation = io.find_one({ - "name": ext.lstrip("."), - "type": "representation", - "parent": version["_id"] - }) + representation = io.find_one( + { + "name": ext.lstrip("."), + "type": "representation", + "parent": version["_id"], + } + ) if not representation: self.log.debug("No existing representation..") return False diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py index d077635dfd..432faf68c3 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py +++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py @@ -36,9 +36,9 @@ class ExtractVDBCache(openpype.api.Extractor): instance.data["representations"] = [] representation = { - 'name': 'mov', - 'ext': 'mov', - 'files': output, + "name": "mov", + "ext": "mov", + "files": output, "stagingDir": staging_dir, } instance.data["representations"].append(representation) diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py index 205599eaa3..31c2954ee7 100644 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py @@ -15,8 +15,7 @@ class IncrementCurrentFile(pyblish.api.InstancePlugin): label = "Increment current file" order = pyblish.api.IntegratorOrder + 9.0 hosts = ["houdini"] - families = ["colorbleed.usdrender", - "redshift_rop"] + families = ["colorbleed.usdrender", "redshift_rop"] targets = ["local"] def process(self, instance): @@ -32,17 +31,21 @@ class IncrementCurrentFile(pyblish.api.InstancePlugin): context = instance.context errored_plugins = get_errored_plugins_from_data(context) - if any(plugin.__name__ == "HoudiniSubmitPublishDeadline" - for plugin in errored_plugins): - raise RuntimeError("Skipping incrementing current file because " - "submission to deadline failed.") + if any( + plugin.__name__ == "HoudiniSubmitPublishDeadline" + for plugin in errored_plugins + ): + raise RuntimeError( + "Skipping incrementing current file because " + "submission to deadline failed." + ) # Filename must not have changed since collecting host = avalon.api.registered_host() current_file = host.current_file() - assert context.data['currentFile'] == current_file, ( - "Collected filename from current scene name." - ) + assert ( + context.data["currentFile"] == current_file + ), "Collected filename from current scene name." new_filepath = version_up(current_file) host.save(new_filepath) diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py b/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py index 06ec711b9e..faa015f739 100644 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file_deadline.py @@ -1,6 +1,5 @@ import pyblish.api -import os import hou from openpype.api import version_up from openpype.action import get_errored_plugins_from_data @@ -21,14 +20,16 @@ class IncrementCurrentFileDeadline(pyblish.api.ContextPlugin): def process(self, context): errored_plugins = get_errored_plugins_from_data(context) - if any(plugin.__name__ == "HoudiniSubmitPublishDeadline" - for plugin in errored_plugins): - raise RuntimeError("Skipping incrementing current file because " - "submission to deadline failed.") + if any( + plugin.__name__ == "HoudiniSubmitPublishDeadline" + for plugin in errored_plugins + ): + raise RuntimeError( + "Skipping incrementing current file because " + "submission to deadline failed." + ) current_filepath = context.data["currentFile"] new_filepath = version_up(current_filepath) - hou.hipFile.save(file_name=new_filepath, - save_to_recent_files=True) - + hou.hipFile.save(file_name=new_filepath, save_to_recent_files=True) diff --git a/openpype/hosts/houdini/plugins/publish/save_scene.py b/openpype/hosts/houdini/plugins/publish/save_scene.py index ec97944bee..1b12efa603 100644 --- a/openpype/hosts/houdini/plugins/publish/save_scene.py +++ b/openpype/hosts/houdini/plugins/publish/save_scene.py @@ -8,7 +8,7 @@ class SaveCurrentScene(pyblish.api.InstancePlugin): label = "Save current file" order = pyblish.api.IntegratorOrder - 0.49 hosts = ["houdini"] - families = ["colorbleed.usdrender", + families = ["usdrender", "redshift_rop"] targets = ["local"] diff --git a/openpype/hosts/houdini/plugins/publish/save_scene_deadline.py b/openpype/hosts/houdini/plugins/publish/save_scene_deadline.py index 8a787025c4..a0efd0610c 100644 --- a/openpype/hosts/houdini/plugins/publish/save_scene_deadline.py +++ b/openpype/hosts/houdini/plugins/publish/save_scene_deadline.py @@ -12,9 +12,9 @@ class SaveCurrentSceneDeadline(pyblish.api.ContextPlugin): def process(self, context): import hou - assert context.data['currentFile'] == hou.hipFile.path(), ( - "Collected filename from current scene name." - ) + assert ( + context.data["currentFile"] == hou.hipFile.path() + ), "Collected filename from current scene name." if hou.hipFile.hasUnsavedChanges(): self.log.info("Saving current file..") diff --git a/openpype/hosts/houdini/plugins/publish/submit_houdini_render_deadline.py b/openpype/hosts/houdini/plugins/publish/submit_houdini_render_deadline.py index 34566f6c63..f471d788b6 100644 --- a/openpype/hosts/houdini/plugins/publish/submit_houdini_render_deadline.py +++ b/openpype/hosts/houdini/plugins/publish/submit_houdini_render_deadline.py @@ -11,7 +11,7 @@ import hou class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): - """Submit Solaris USD Render ROPs to Deadline + """Submit Solaris USD Render ROPs to Deadline. Renders are submitted to a Deadline Web Service as supplied via the environment variable AVALON_DEADLINE. @@ -26,7 +26,7 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): label = "Submit Render to Deadline" order = pyblish.api.IntegratorOrder hosts = ["houdini"] - families = ["colorbleed.usdrender", + families = ["usdrender", "redshift_rop"] targets = ["local"] @@ -50,9 +50,9 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): # StartFrame to EndFrame by byFrameStep frames = "{start}-{end}x{step}".format( - start=int(instance.data["startFrame"]), - end=int(instance.data["endFrame"]), - step=int(instance.data["byFrameStep"]), + start=int(instance.data["startFrame"]), + end=int(instance.data["endFrame"]), + step=int(instance.data["byFrameStep"]), ) # Documentation for keys available at: diff --git a/openpype/hosts/houdini/plugins/publish/submit_remote_publish.py b/openpype/hosts/houdini/plugins/publish/submit_remote_publish.py index b9278c1a90..77666921c8 100644 --- a/openpype/hosts/houdini/plugins/publish/submit_remote_publish.py +++ b/openpype/hosts/houdini/plugins/publish/submit_remote_publish.py @@ -31,12 +31,14 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): def process(self, context): # Ensure no errors so far - assert all(result["success"] for result in context.data["results"]), ( - "Errors found, aborting integration..") + assert all( + result["success"] for result in context.data["results"] + ), "Errors found, aborting integration.." # Deadline connection - AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE", - "http://localhost:8082") + AVALON_DEADLINE = api.Session.get( + "AVALON_DEADLINE", "http://localhost:8082" + ) assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" # Note that `publish` data member might change in the future. @@ -45,8 +47,9 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): instance_names = sorted(instance.name for instance in actives) if not instance_names: - self.log.warning("No active instances found. " - "Skipping submission..") + self.log.warning( + "No active instances found. " "Skipping submission.." + ) return scene = context.data["currentFile"] @@ -72,30 +75,24 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): "BatchName": batch_name, "Comment": context.data.get("comment", ""), "Priority": 50, - "Frames": "1-1", # Always trigger a single frame + "Frames": "1-1", # Always trigger a single frame "IsFrameDependent": False, "Name": job_name, "UserName": deadline_user, # "Comment": instance.context.data.get("comment", ""), # "InitialStatus": state - }, "PluginInfo": { - "Build": None, # Don't force build "IgnoreInputs": True, - # Inputs "SceneFile": scene, "OutputDriver": "/out/REMOTE_PUBLISH", - # Mandatory for Deadline "Version": version, - }, - # Mandatory for Deadline, may be empty - "AuxFiles": [] + "AuxFiles": [], } # Process submission per individual instance if the submission @@ -108,14 +105,14 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): for instance in instance_names: # Clarify job name per submission (include instance name) payload["JobInfo"]["Name"] = job_name + " - %s" % instance - self.submit_job(payload, - instances=[instance], - deadline=AVALON_DEADLINE) + self.submit_job( + payload, instances=[instance], deadline=AVALON_DEADLINE + ) else: # Submit a single job - self.submit_job(payload, - instances=instance_names, - deadline=AVALON_DEADLINE) + self.submit_job( + payload, instances=instance_names, deadline=AVALON_DEADLINE + ) def submit_job(self, payload, instances, deadline): @@ -130,16 +127,21 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): "AVALON_TOOLS", ] - environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **api.Session) + environment = dict( + {key: os.environ[key] for key in keys if key in os.environ}, + **api.Session + ) environment["PYBLISH_ACTIVE_INSTANCES"] = ",".join(instances) - payload["JobInfo"].update({ - "EnvironmentKeyValue%d" % index: "{key}={value}".format( - key=key, - value=environment[key] - ) for index, key in enumerate(environment) - }) + payload["JobInfo"].update( + { + "EnvironmentKeyValue%d" + % index: "{key}={value}".format( + key=key, value=environment[key] + ) + for index, key in enumerate(environment) + } + ) # Submit self.log.info("Submitting..") diff --git a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py index 7b23d73ac7..0ae1bc94eb 100644 --- a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py @@ -3,7 +3,7 @@ import openpype.api class ValidateVDBInputNode(pyblish.api.InstancePlugin): - """Validate that the node connected to the output node is of type VDB + """Validate that the node connected to the output node is of type VDB. Regardless of the amount of VDBs create the output will need to have an equal amount of VDBs, points, primitives and vertices @@ -24,8 +24,9 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Node connected to the output node is not" - "of type VDB!") + raise RuntimeError( + "Node connected to the output node is not" "of type VDB!" + ) @classmethod def get_invalid(cls, instance): diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py index 31eb3d1fb1..8fe1b44b7a 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py +++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py @@ -16,15 +16,17 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): """ order = openpype.api.ValidateContentsOrder + 0.1 - families = ["colorbleed.pointcache"] + families = ["pointcache"] hosts = ["houdini"] label = "Validate Primitive to Detail (Abc)" def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Primitives found with inconsistent primitive " - "to detail attributes. See log.") + raise RuntimeError( + "Primitives found with inconsistent primitive " + "to detail attributes. See log." + ) @classmethod def get_invalid(cls, instance): @@ -34,21 +36,27 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): rop = instance[0] pattern = rop.parm("prim_to_detail_pattern").eval().strip() if not pattern: - cls.log.debug("Alembic ROP has no 'Primitive to Detail' pattern. " - "Validation is ignored..") + cls.log.debug( + "Alembic ROP has no 'Primitive to Detail' pattern. " + "Validation is ignored.." + ) return build_from_path = rop.parm("build_from_path").eval() if not build_from_path: - cls.log.debug("Alembic ROP has 'Build from Path' disabled. " - "Validation is ignored..") + cls.log.debug( + "Alembic ROP has 'Build from Path' disabled. " + "Validation is ignored.." + ) return path_attr = rop.parm("path_attrib").eval() if not path_attr: - cls.log.error("The Alembic ROP node has no Path Attribute" - "value set, but 'Build Hierarchy from Attribute'" - "is enabled.") + cls.log.error( + "The Alembic ROP node has no Path Attribute" + "value set, but 'Build Hierarchy from Attribute'" + "is enabled." + ) return [rop.path()] # Let's assume each attribute is explicitly named for now and has no @@ -59,26 +67,32 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): # Check if the primitive attribute exists frame = instance.data.get("startFrame", 0) geo = output.geometryAtFrame(frame) - + # If there are no primitives on the start frame then it might be # something that is emitted over time. As such we can't actually # validate whether the attributes exist, because they won't exist # yet. In that case, just warn the user and allow it. if len(geo.iterPrims()) == 0: - cls.log.warning("No primitives found on current frame. Validation" - " for Primitive to Detail will be skipped.") + cls.log.warning( + "No primitives found on current frame. Validation" + " for Primitive to Detail will be skipped." + ) return - + attrib = geo.findPrimAttrib(path_attr) if not attrib: - cls.log.info("Geometry Primitives are missing " - "path attribute: `%s`" % path_attr) + cls.log.info( + "Geometry Primitives are missing " + "path attribute: `%s`" % path_attr + ) return [output.path()] # Ensure at least a single string value is present if not attrib.strings(): - cls.log.info("Primitive path attribute has no " - "string values: %s" % path_attr) + cls.log.info( + "Primitive path attribute has no " + "string values: %s" % path_attr + ) return [output.path()] paths = None @@ -111,6 +125,8 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): # Primitive to Detail attribute then we consider it # inconsistent and invalidate the ROP node's content. if len(values) > 1: - cls.log.warning("Path has multiple values: %s (path: %s)" - % (list(values), path)) + cls.log.warning( + "Path has multiple values: %s (path: %s)" + % (list(values), path) + ) return [output.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py index da79569edd..e9126ffef0 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py @@ -4,21 +4,21 @@ import openpype.api class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): """Validate Face Sets are disabled for extraction to pointcache. - + When groups are saved as Face Sets with the Alembic these show up as shadingEngine connections in Maya - however, with animated groups these connections in Maya won't work as expected, it won't update per - frame. Additionally, it can break shader assignments in some cases + frame. Additionally, it can break shader assignments in some cases where it requires to first break this connection to allow a shader to be assigned. - + It is allowed to include Face Sets, so only an issue is logged to identify that it could introduce issues down the pipeline. """ order = openpype.api.ValidateContentsOrder + 0.1 - families = ["colorbleed.pointcache"] + families = ["pointcache"] hosts = ["houdini"] label = "Validate Alembic ROP Face Sets" @@ -26,10 +26,12 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): rop = instance[0] facesets = rop.parm("facesets").eval() - + # 0 = No Face Sets # 1 = Save Non-Empty Groups as Face Sets # 2 = Save All Groups As Face Sets - if facesets != 0: - self.log.warning("Alembic ROP saves 'Face Sets' for Geometry. " - "Are you sure you want this?") + if facesets != 0: + self.log.warning( + "Alembic ROP saves 'Face Sets' for Geometry. " + "Are you sure you want this?" + ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py index 3595918765..17c9da837a 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py @@ -3,7 +3,7 @@ import colorbleed.api class ValidateAlembicInputNode(pyblish.api.InstancePlugin): - """Validate that the node connected to the output is correct + """Validate that the node connected to the output is correct. The connected node cannot be of the following types for Alembic: - VDB @@ -12,22 +12,24 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): """ order = colorbleed.api.ValidateContentsOrder + 0.1 - families = ["colorbleed.pointcache"] + families = ["pointcache"] hosts = ["houdini"] label = "Validate Input Node (Abc)" def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Primitive types found that are not supported" - "for Alembic output.") + raise RuntimeError( + "Primitive types found that are not supported" + "for Alembic output." + ) @classmethod def get_invalid(cls, instance): invalid_prim_types = ["VDB", "Volume"] node = instance.data["output_node"] - + if not hasattr(node, "geometry"): # In the case someone has explicitly set an Object # node instead of a SOP node in Geometry context @@ -35,15 +37,16 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): # export object transforms. cls.log.warning("No geometry output node found, skipping check..") return - + frame = instance.data.get("startFrame", 0) geo = node.geometryAtFrame(frame) - + invalid = False for prim_type in invalid_prim_types: if geo.countPrimType(prim_type) > 0: - cls.log.error("Found a primitive which is of type '%s' !" - % prim_type) + cls.log.error( + "Found a primitive which is of type '%s' !" % prim_type + ) invalid = True if invalid: diff --git a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py index a42c3696da..5eb8f93d03 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py +++ b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py @@ -29,8 +29,9 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Output settings do no match for '%s'" % - instance) + raise RuntimeError( + "Output settings do no match for '%s'" % instance + ) @classmethod def get_invalid(cls, instance): diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index c04734c684..79c67c3008 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -1,5 +1,5 @@ import pyblish.api -import colorbleed.api +import openpype.api class ValidateBypassed(pyblish.api.InstancePlugin): @@ -11,7 +11,7 @@ class ValidateBypassed(pyblish.api.InstancePlugin): """ - order = colorbleed.api.ValidateContentsOrder - 0.1 + order = openpype.api.ValidateContentsOrder - 0.1 families = ["*"] hosts = ["houdini"] label = "Validate ROP Bypass" @@ -27,8 +27,8 @@ class ValidateBypassed(pyblish.api.InstancePlugin): if invalid: rop = invalid[0] raise RuntimeError( - "ROP node %s is set to bypass, publishing cannot continue.." % - rop.path() + "ROP node %s is set to bypass, publishing cannot continue.." + % rop.path() ) @classmethod diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py index f509c51bc6..a0919e1323 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py +++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py @@ -6,9 +6,9 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): """Validate Camera ROP settings.""" order = openpype.api.ValidateContentsOrder - families = ['colorbleed.camera'] - hosts = ['houdini'] - label = 'Camera ROP' + families = ["camera"] + hosts = ["houdini"] + label = "Camera ROP" def process(self, instance): @@ -16,8 +16,10 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): node = instance[0] if node.parm("use_sop_path").eval(): - raise RuntimeError("Alembic ROP for Camera export should not be " - "set to 'Use Sop Path'. Please disable.") + raise RuntimeError( + "Alembic ROP for Camera export should not be " + "set to 'Use Sop Path'. Please disable." + ) # Get the root and objects parameter of the Alembic ROP node root = node.parm("root").eval() @@ -35,7 +37,7 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): raise ValueError("Camera path does not exist: %s" % path) if camera.type().name() != "cam": - raise ValueError("Object set in Alembic ROP is not a camera: " - "%s (type: %s)" % (camera, camera.type().name())) - - + raise ValueError( + "Object set in Alembic ROP is not a camera: " + "%s (type: %s)" % (camera, camera.type().name()) + ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py index 51c5d07b0f..543539ffe3 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -12,7 +12,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - families = ["colorbleed.imagesequence"] + families = ["imagesequence"] hosts = ["houdini"] label = "Validate COP Output Node" @@ -20,8 +20,10 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid) + raise RuntimeError( + "Output node(s) `%s` are incorrect. " + "See plug-in log for details." % invalid + ) @classmethod def get_invalid(cls, instance): @@ -32,27 +34,27 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): if output_node is None: node = instance[0] - cls.log.error("COP Output node in '%s' does not exist. " - "Ensure a valid COP output path is set." - % node.path()) + cls.log.error( + "COP Output node in '%s' does not exist. " + "Ensure a valid COP output path is set." % node.path() + ) return [node.path()] # Output node must be a Sop node. if not isinstance(output_node, hou.CopNode): - cls.log.error("Output node %s is not a COP node. " - "COP Path must point to a COP node, " - "instead found category type: %s" % ( - output_node.path(), - output_node.type().category().name() - ) - ) + cls.log.error( + "Output node %s is not a COP node. " + "COP Path must point to a COP node, " + "instead found category type: %s" + % (output_node.path(), output_node.type().category().name()) + ) return [output_node.path()] - + # For the sake of completeness also assert the category type # is Cop2 to avoid potential edge case scenarios even though # the isinstance check above should be stricter than this category assert output_node.type().category().name() == "Cop2", ( - "Output node %s is not of category Cop2. This is a bug.." % - output_node.path() + "Output node %s is not of category Cop2. This is a bug.." + % output_node.path() ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py index 5823c3eddc..c299a47e74 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py +++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py @@ -15,24 +15,23 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - families = ["pointcache", - "camera", - "vdbcache"] + families = ["pointcache", "camera", "vdbcache"] hosts = ["houdini"] label = "Output File Extension" family_extensions = { "pointcache": ".abc", "camera": ".abc", - "vdbcache": ".vdb" + "vdbcache": ".vdb", } def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("ROP node has incorrect " - "file extension: %s" % invalid) + raise RuntimeError( + "ROP node has incorrect " "file extension: %s" % invalid + ) @classmethod def get_invalid(cls, instance): diff --git a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py index f55f05032d..76b5910576 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py +++ b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py @@ -4,7 +4,7 @@ from openpype.hosts.houdini.api import lib class ValidateFrameToken(pyblish.api.InstancePlugin): - """Validate if the unexpanded string contains the frame ('$F') token + """Validate if the unexpanded string contains the frame ('$F') token. This validator will *only* check the output parameter of the node if the Valid Frame Range is not set to 'Render Current Frame' @@ -29,8 +29,9 @@ class ValidateFrameToken(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Output settings do no match for '%s'" % - instance) + raise RuntimeError( + "Output settings do no match for '%s'" % instance + ) @classmethod def get_invalid(cls, instance): diff --git a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py index e18404b7ad..f5f03aa844 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py +++ b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py @@ -24,5 +24,7 @@ class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin): license = hou.licenseCategory() if license != hou.licenseCategoryType.Commercial: - raise RuntimeError("USD Publishing requires a full Commercial " - "license. You are on: %s" % license) + raise RuntimeError( + "USD Publishing requires a full Commercial " + "license. You are on: %s" % license + ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py index 826dedf933..cd72877949 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py @@ -1,23 +1,23 @@ import pyblish.api -import colorbleed.api +import openpype.api class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin): """Validate Create Intermediate Directories is enabled on ROP node.""" - order = colorbleed.api.ValidateContentsOrder - families = ['colorbleed.pointcache', - 'colorbleed.camera', - 'colorbleed.vdbcache'] - hosts = ['houdini'] - label = 'Create Intermediate Directories Checked' + order = openpype.api.ValidateContentsOrder + families = ["pointcache", "camera", "vdbcache"] + hosts = ["houdini"] + label = "Create Intermediate Directories Checked" def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Found ROP node with Create Intermediate " - "Directories turned off: %s" % invalid) + raise RuntimeError( + "Found ROP node with Create Intermediate " + "Directories turned off: %s" % invalid + ) @classmethod def get_invalid(cls, instance): @@ -30,5 +30,3 @@ class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin): result.append(node.path()) return result - - diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index 1a7cf9d599..f58e5f8d7d 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -10,7 +10,7 @@ def cook_in_range(node, start, end): node.cook(force=False) else: node.cook(force=False, frame_range=(start, start)) - + def get_errors(node): """Get cooking errors. @@ -29,8 +29,8 @@ class ValidateNoErrors(pyblish.api.InstancePlugin): """Validate the Instance has no current cooking errors.""" order = openpype.api.ValidateContentsOrder - hosts = ['houdini'] - label = 'Validate no errors' + hosts = ["houdini"] + label = "Validate no errors" def process(self, instance): @@ -45,20 +45,21 @@ class ValidateNoErrors(pyblish.api.InstancePlugin): for node in validate_nodes: self.log.debug("Validating for errors: %s" % node.path()) errors = get_errors(node) - + if errors: # If there are current errors, then try an unforced cook # to see whether the error will disappear. - self.log.debug("Recooking to revalidate error " - "is up to date for: %s" % node.path()) + self.log.debug( + "Recooking to revalidate error " + "is up to date for: %s" % node.path() + ) current_frame = hou.intFrame() - start = instance.data.get("startFrame", current_frame) - end = instance.data.get("endFrame", current_frame) + start = instance.data.get("frameStart", current_frame) + end = instance.data.get("frameEnd", current_frame) cook_in_range(node, start=start, end=end) - + # Check for errors again after the forced recook errors = get_errors(node) if errors: self.log.error(errors) raise RuntimeError("Node has errors: %s" % node.path()) - diff --git a/openpype/hosts/houdini/plugins/publish/validate_outnode_exists.py b/openpype/hosts/houdini/plugins/publish/validate_outnode_exists.py index bfa2d38f1a..aedc68d5df 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_outnode_exists.py +++ b/openpype/hosts/houdini/plugins/publish/validate_outnode_exists.py @@ -3,7 +3,7 @@ import openpype.api class ValidatOutputNodeExists(pyblish.api.InstancePlugin): - """Validate if node attribute Create intermediate Directories is turned on + """Validate if node attribute Create intermediate Directories is turned on. Rules: * The node must have Create intermediate Directories turned on to @@ -13,7 +13,7 @@ class ValidatOutputNodeExists(pyblish.api.InstancePlugin): order = openpype.api.ValidateContentsOrder families = ["*"] - hosts = ['houdini'] + hosts = ["houdini"] label = "Output Node Exists" def process(self, instance): diff --git a/openpype/hosts/houdini/plugins/publish/validate_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_output_node.py index 5e20ee40d6..0b60ab5c48 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_output_node.py @@ -14,8 +14,7 @@ class ValidateOutputNode(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - families = ["pointcache", - "vdbcache"] + families = ["pointcache", "vdbcache"] hosts = ["houdini"] label = "Validate Output Node" @@ -23,8 +22,10 @@ class ValidateOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid) + raise RuntimeError( + "Output node(s) `%s` are incorrect. " + "See plug-in log for details." % invalid + ) @classmethod def get_invalid(cls, instance): @@ -35,39 +36,42 @@ class ValidateOutputNode(pyblish.api.InstancePlugin): if output_node is None: node = instance[0] - cls.log.error("SOP Output node in '%s' does not exist. " - "Ensure a valid SOP output path is set." - % node.path()) + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % node.path() + ) return [node.path()] # Output node must be a Sop node. if not isinstance(output_node, hou.SopNode): - cls.log.error("Output node %s is not a SOP node. " - "SOP Path must point to a SOP node, " - "instead found category type: %s" % ( - output_node.path(), - output_node.type().category().name() - ) - ) + cls.log.error( + "Output node %s is not a SOP node. " + "SOP Path must point to a SOP node, " + "instead found category type: %s" + % (output_node.path(), output_node.type().category().name()) + ) return [output_node.path()] # For the sake of completeness also assert the category type # is Sop to avoid potential edge case scenarios even though # the isinstance check above should be stricter than this category assert output_node.type().category().name() == "Sop", ( - "Output node %s is not of category Sop. This is a bug.." % - output_node.path() + "Output node %s is not of category Sop. This is a bug.." + % output_node.path() ) # Check if output node has incoming connections if not output_node.inputConnections(): - cls.log.error("Output node `%s` has no incoming connections" - % output_node.path()) + cls.log.error( + "Output node `%s` has no incoming connections" + % output_node.path() + ) return [output_node.path()] # Ensure the output node has at least Geometry data if not output_node.geometry(): - cls.log.error("Output node `%s` has no geometry data." - % output_node.path()) + cls.log.error( + "Output node `%s` has no geometry data." % output_node.path() + ) return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py index 785dd1db78..3c15532be8 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py @@ -19,8 +19,9 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("See log for details. " - "Invalid nodes: {0}".format(invalid)) + raise RuntimeError( + "See log for details. " "Invalid nodes: {0}".format(invalid) + ) @classmethod def get_invalid(cls, instance): @@ -32,15 +33,19 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): rop = instance[0] build_from_path = rop.parm("build_from_path").eval() if not build_from_path: - cls.log.debug("Alembic ROP has 'Build from Path' disabled. " - "Validation is ignored..") + cls.log.debug( + "Alembic ROP has 'Build from Path' disabled. " + "Validation is ignored.." + ) return path_attr = rop.parm("path_attrib").eval() if not path_attr: - cls.log.error("The Alembic ROP node has no Path Attribute" - "value set, but 'Build Hierarchy from Attribute'" - "is enabled.") + cls.log.error( + "The Alembic ROP node has no Path Attribute" + "value set, but 'Build Hierarchy from Attribute'" + "is enabled." + ) return [rop.path()] cls.log.debug("Checking for attribute: %s" % path_attr) @@ -54,22 +59,28 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): # warning that the check can't be done consistently and skip # validation. if len(geo.iterPrims()) == 0: - cls.log.warning("No primitives found on current frame. Validation" - " for primitive hierarchy paths will be skipped," - " thus can't be validated.") + cls.log.warning( + "No primitives found on current frame. Validation" + " for primitive hierarchy paths will be skipped," + " thus can't be validated." + ) return # Check if there are any values for the primitives attrib = geo.findPrimAttrib(path_attr) if not attrib: - cls.log.info("Geometry Primitives are missing " - "path attribute: `%s`" % path_attr) + cls.log.info( + "Geometry Primitives are missing " + "path attribute: `%s`" % path_attr + ) return [output.path()] # Ensure at least a single string value is present if not attrib.strings(): - cls.log.info("Primitive path attribute has no " - "string values: %s" % path_attr) + cls.log.info( + "Primitive path attribute has no " + "string values: %s" % path_attr + ) return [output.path()] paths = geo.primStringAttribValues(path_attr) @@ -78,8 +89,8 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): invalid_prims = [i for i, path in enumerate(paths) if not path] if invalid_prims: num_prims = len(geo.iterPrims()) # faster than len(geo.prims()) - cls.log.info("Prims have no value for attribute `%s` " - "(%s of %s prims)" % (path_attr, - len(invalid_prims), - num_prims)) + cls.log.info( + "Prims have no value for attribute `%s` " + "(%s of %s prims)" % (path_attr, len(invalid_prims), num_prims) + ) return [output.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py index 931acdcc2f..95c66edff0 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py @@ -11,9 +11,9 @@ class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin): order = pyblish.api.ValidatorOrder - 0.4 families = ["*"] - hosts = ['houdini'] + hosts = ["houdini"] targets = ["deadline"] - label = 'Remote Publish ROP node' + label = "Remote Publish ROP node" actions = [openpype.api.RepairContextAction] def process(self, context): @@ -30,14 +30,14 @@ class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin): assert node.type().name() == "shell", "Must be shell ROP node" assert node.parm("command").eval() == "", "Must have no command" assert not node.parm("shellexec").eval(), "Must not execute in shell" - assert node.parm("prerender").eval() == cmd, ( - "REMOTE_PUBLISH node does not have correct prerender script." - ) - assert node.parm("lprerender").eval() == "python", ( - "REMOTE_PUBLISH node prerender script type not set to 'python'" - ) + assert ( + node.parm("prerender").eval() == cmd + ), "REMOTE_PUBLISH node does not have correct prerender script." + assert ( + node.parm("lprerender").eval() == "python" + ), "REMOTE_PUBLISH node prerender script type not set to 'python'" @classmethod def repair(cls, context): - """(Re)create the node if it fails to pass validation""" + """(Re)create the node if it fails to pass validation.""" lib.create_remote_publish_node(force=True) diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py index 9f486842ae..b681fd0ee1 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py @@ -9,9 +9,9 @@ class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): order = pyblish.api.ValidatorOrder - 0.39 families = ["*"] - hosts = ['houdini'] + hosts = ["houdini"] targets = ["deadline"] - label = 'Remote Publish ROP enabled' + label = "Remote Publish ROP enabled" actions = [openpype.api.RepairContextAction] def process(self, context): @@ -25,7 +25,7 @@ class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): @classmethod def repair(cls, context): - """(Re)create the node if it fails to pass validation""" + """(Re)create the node if it fails to pass validation.""" node = hou.node("/out/REMOTE_PUBLISH") if not node: diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index 7ba9ddd534..a5a07b1b1a 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -14,8 +14,7 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - families = ["pointcache", - "vdbcache"] + families = ["pointcache", "vdbcache"] hosts = ["houdini"] label = "Validate Output Node" @@ -23,8 +22,10 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid) + raise RuntimeError( + "Output node(s) `%s` are incorrect. " + "See plug-in log for details." % invalid + ) @classmethod def get_invalid(cls, instance): @@ -35,29 +36,29 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): if output_node is None: node = instance[0] - cls.log.error("SOP Output node in '%s' does not exist. " - "Ensure a valid SOP output path is set." - % node.path()) + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % node.path() + ) return [node.path()] # Output node must be a Sop node. if not isinstance(output_node, hou.SopNode): - cls.log.error("Output node %s is not a SOP node. " - "SOP Path must point to a SOP node, " - "instead found category type: %s" % ( - output_node.path(), - output_node.type().category().name() - ) - ) + cls.log.error( + "Output node %s is not a SOP node. " + "SOP Path must point to a SOP node, " + "instead found category type: %s" + % (output_node.path(), output_node.type().category().name()) + ) return [output_node.path()] # For the sake of completeness also assert the category type # is Sop to avoid potential edge case scenarios even though # the isinstance check above should be stricter than this category assert output_node.type().category().name() == "Sop", ( - "Output node %s is not of category Sop. This is a bug.." % - output_node.path() + "Output node %s is not of category Sop. This is a bug.." + % output_node.path() ) # Ensure the node is cooked and succeeds to cook so we can correctly @@ -73,6 +74,7 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): # Ensure the output node has at least Geometry data if not output_node.geometry(): - cls.log.error("Output node `%s` has no geometry data." - % output_node.path()) + cls.log.error( + "Output node `%s` has no geometry data." % output_node.path() + ) return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index a21e5c267f..ac0181aed2 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -17,10 +17,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - families = ["usdSetDress", - "usdShade", - "usd", - "usdrender"] + families = ["usdSetDress", "usdShade", "usd", "usdrender"] hosts = ["houdini"] label = "USD Layer path backslashes" optional = True @@ -47,5 +44,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): invalid.append(layer) if invalid: - raise RuntimeError("Loaded layers have backslashes. " - "This is invalid for HUSK USD rendering.") + raise RuntimeError( + "Loaded layers have backslashes. " + "This is invalid for HUSK USD rendering." + ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py index 4fe4322bb3..2fd2f5eb9f 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py @@ -11,7 +11,7 @@ def fullname(o): module = o.__module__ if module is None or module == str.__module__: return o.__name__ - return module + '.' + o.__name__ + return module + "." + o.__name__ class ValidateUsdModel(pyblish.api.InstancePlugin): @@ -32,7 +32,7 @@ class ValidateUsdModel(pyblish.api.InstancePlugin): UsdRender.Settings, UsdRender.Product, UsdRender.Var, - UsdLux.Light + UsdLux.Light, ] def process(self, instance): @@ -64,6 +64,7 @@ class ValidateUsdShade(ValidateUsdModel): Disallow Render settings, products, vars and Lux lights. """ + families = ["usdShade"] label = "Validate USD Shade" @@ -71,5 +72,5 @@ class ValidateUsdShade(ValidateUsdModel): UsdRender.Settings, UsdRender.Product, UsdRender.Var, - UsdLux.Light + UsdLux.Light, ] diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py index 0960129819..1f10fafdf4 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -12,7 +12,7 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): """ order = pyblish.api.ValidatorOrder - families = ["colorbleed.usd"] + families = ["usd"] hosts = ["houdini"] label = "Validate Output Node (USD)" @@ -20,8 +20,10 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid) + raise RuntimeError( + "Output node(s) `%s` are incorrect. " + "See plug-in log for details." % invalid + ) @classmethod def get_invalid(cls, instance): @@ -32,19 +34,19 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): if output_node is None: node = instance[0] - cls.log.error("USD node '%s' LOP path does not exist. " - "Ensure a valid LOP path is set." - % node.path()) + cls.log.error( + "USD node '%s' LOP path does not exist. " + "Ensure a valid LOP path is set." % node.path() + ) return [node.path()] # Output node must be a Sop node. if not isinstance(output_node, hou.LopNode): - cls.log.error("Output node %s is not a LOP node. " - "LOP Path must point to a LOP node, " - "instead found category type: %s" % ( - output_node.path(), - output_node.type().category().name() - ) - ) + cls.log.error( + "Output node %s is not a LOP node. " + "LOP Path must point to a LOP node, " + "instead found category type: %s" + % (output_node.path(), output_node.type().category().name()) + ) return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py index 18231a9605..36336a03ae 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py @@ -7,7 +7,7 @@ class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin): """Validate USD Render Product names are correctly set absolute paths.""" order = pyblish.api.ValidatorOrder - families = ["colorbleed.usdrender"] + families = ["usdrender"] hosts = ["houdini"] label = "Validate USD Render Product Names" optional = True @@ -21,8 +21,9 @@ class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin): invalid.append("Detected empty output filepath.") if not os.path.isabs(filepath): - invalid.append("Output file path is not " - "absolute path: %s" % filepath) + invalid.append( + "Output file path is not " "absolute path: %s" % filepath + ) if invalid: for message in invalid: diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py index 8af53fa617..fb1094e6b5 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py @@ -41,11 +41,14 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin): break else: prim_path = node.GetPath() - self.log.error("%s is not referenced geometry." % - prim_path) + self.log.error( + "%s is not referenced geometry." % prim_path + ) invalid.append(node) if invalid: - raise RuntimeError("SetDress contains local geometry. " - "This is not allowed, it must be an assembly " - "of referenced assets.") + raise RuntimeError( + "SetDress contains local geometry. " + "This is not allowed, it must be an assembly " + "of referenced assets." + ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py index 3de18fd9b4..fcfbf6b22d 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py @@ -10,9 +10,9 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): """Validate the Instance has no current cooking errors.""" order = openpype.api.ValidateContentsOrder - hosts = ['houdini'] + hosts = ["houdini"] families = ["usdShade"] - label = 'USD Shade model exists' + label = "USD Shade model exists" def process(self, instance): @@ -23,14 +23,19 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): shade_subset = subset.split(".", 1)[0] model_subset = re.sub("^usdShade", "usdModel", shade_subset) - asset_doc = io.find_one({"name": asset, - "type": "asset"}) + asset_doc = io.find_one({"name": asset, "type": "asset"}) if not asset_doc: raise RuntimeError("Asset does not exist: %s" % asset) - subset_doc = io.find_one({"name": model_subset, - "type": "subset", - "parent": asset_doc["_id"]}) + subset_doc = io.find_one( + { + "name": model_subset, + "type": "subset", + "parent": asset_doc["_id"], + } + ) if not subset_doc: - raise RuntimeError("USD Model subset not found: " - "%s (%s)" % (model_subset, asset)) + raise RuntimeError( + "USD Model subset not found: " + "%s (%s)" % (model_subset, asset) + ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index 3220159508..0fd1cf1fad 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -3,7 +3,6 @@ import re import pyblish.api import openpype.api -from avalon import io import hou @@ -46,15 +45,21 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): highest = max(highest, other_version) if version != highest: - raise RuntimeError("Shading Workspace is not the latest version." - " Found %s. Latest is %s." % (version, highest)) + raise RuntimeError( + "Shading Workspace is not the latest version." + " Found %s. Latest is %s." % (version, highest) + ) # There were some issues with the editable node not having the right # configured path. So for now let's assure that is correct to.from - value = ('avalon://`chs("../asset_name")`/' - 'usdShade`chs("../model_variantname1")`.usd') + value = ( + 'avalon://`chs("../asset_name")`/' + 'usdShade`chs("../model_variantname1")`.usd' + ) rop_value = rop.parm("lopoutput").rawValue() if rop_value != value: - raise RuntimeError("Shading Workspace has invalid 'lopoutput'" - " parameter value. The Shading Workspace" - " needs to be reset to its default values.") + raise RuntimeError( + "Shading Workspace has invalid 'lopoutput'" + " parameter value. The Shading Workspace" + " needs to be reset to its default values." + ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py index 7b23d73ac7..0ae1bc94eb 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py @@ -3,7 +3,7 @@ import openpype.api class ValidateVDBInputNode(pyblish.api.InstancePlugin): - """Validate that the node connected to the output node is of type VDB + """Validate that the node connected to the output node is of type VDB. Regardless of the amount of VDBs create the output will need to have an equal amount of VDBs, points, primitives and vertices @@ -24,8 +24,9 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Node connected to the output node is not" - "of type VDB!") + raise RuntimeError( + "Node connected to the output node is not" "of type VDB!" + ) @classmethod def get_invalid(cls, instance): diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index d3894ee41d..1ba840b71d 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -4,7 +4,7 @@ import hou class ValidateVDBOutputNode(pyblish.api.InstancePlugin): - """Validate that the node connected to the output node is of type VDB + """Validate that the node connected to the output node is of type VDB. Regardless of the amount of VDBs create the output will need to have an equal amount of VDBs, points, primitives and vertices @@ -18,36 +18,41 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): """ order = openpype.api.ValidateContentsOrder + 0.1 - families = ["colorbleed.vdbcache"] + families = ["vdbcache"] hosts = ["houdini"] label = "Validate Output Node (VDB)" def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Node connected to the output node is not" - " of type VDB!") + raise RuntimeError( + "Node connected to the output node is not" " of type VDB!" + ) @classmethod def get_invalid(cls, instance): node = instance.data["output_node"] if node is None: - cls.log.error("SOP path is not correctly set on " - "ROP node '%s'." % instance[0].path()) + cls.log.error( + "SOP path is not correctly set on " + "ROP node '%s'." % instance[0].path() + ) return [instance] - - frame = instance.data.get("startFrame", 0) + + frame = instance.data.get("frameStart", 0) geometry = node.geometryAtFrame(frame) if geometry is None: # No geometry data on this node, maybe the node hasn't cooked? - cls.log.error("SOP node has no geometry data. " - "Is it cooked? %s" % node.path()) + cls.log.error( + "SOP node has no geometry data. " + "Is it cooked? %s" % node.path() + ) return [node] prims = geometry.prims() nr_of_prims = len(prims) - + # All primitives must be hou.VDB invalid_prim = False for prim in prims: From 6d2563fa5c688bba62649e7da8da09cd5db16a3b Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Jul 2021 15:48:00 +0200 Subject: [PATCH 021/308] =?UTF-8?q?hound=20cleanup=20=F0=9F=90=B6?= =?UTF-8?q?=F0=9F=A7=BD=F0=9F=A7=BA=20III.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/hosts/houdini/api/__init__.py | 1 - openpype/hosts/houdini/plugins/load/load_camera.py | 2 +- openpype/hosts/houdini/plugins/load/load_vdb.py | 7 +++---- .../houdini/plugins/publish/collect_active_state.py | 5 +++-- .../plugins/publish/collect_render_products.py | 4 +--- .../houdini/plugins/publish/collect_usd_bootstrap.py | 3 --- .../houdini/plugins/publish/collect_usd_layers.py | 7 +------ .../houdini/plugins/publish/extract_usd_layered.py | 12 ++++++++---- .../publish/submit_houdini_remote_publish.py} | 1 - .../publish/submit_houdini_render_deadline.py | 0 10 files changed, 17 insertions(+), 25 deletions(-) rename openpype/{hosts/houdini/plugins/publish/submit_remote_publish.py => modules/deadline/plugins/publish/submit_houdini_remote_publish.py} (99%) rename openpype/{hosts/houdini => modules/deadline}/plugins/publish/submit_houdini_render_deadline.py (100%) diff --git a/openpype/hosts/houdini/api/__init__.py b/openpype/hosts/houdini/api/__init__.py index bb43654fef..7328236b97 100644 --- a/openpype/hosts/houdini/api/__init__.py +++ b/openpype/hosts/houdini/api/__init__.py @@ -125,7 +125,6 @@ def _set_asset_fps(): def on_pyblish_instance_toggled(instance, new_value, old_value): """Toggle saver tool passthrough states on instance toggles.""" - @contextlib.contextmanager def main_take(no_update=True): """Enter root take during context""" diff --git a/openpype/hosts/houdini/plugins/load/load_camera.py b/openpype/hosts/houdini/plugins/load/load_camera.py index 65697eef63..83246b7d97 100644 --- a/openpype/hosts/houdini/plugins/load/load_camera.py +++ b/openpype/hosts/houdini/plugins/load/load_camera.py @@ -146,7 +146,7 @@ class CameraLoader(api.Loader): "representation": str(representation["_id"])}) # Store the cam temporarily next to the Alembic Archive - # so that we can preserve parm values the user set on it + # so that we can preserve parm values the user set on it # after build hierarchy was triggered. old_camera = self._get_camera(node) temp_camera = old_camera.copyTo(node.parent()) diff --git a/openpype/hosts/houdini/plugins/load/load_vdb.py b/openpype/hosts/houdini/plugins/load/load_vdb.py index 576b459d7d..5f7e400b39 100644 --- a/openpype/hosts/houdini/plugins/load/load_vdb.py +++ b/openpype/hosts/houdini/plugins/load/load_vdb.py @@ -2,13 +2,13 @@ import os import re from avalon import api -from avalon.houdini import pipeline, lib +from avalon.houdini import pipeline class VdbLoader(api.Loader): """Specific loader of Alembic for the avalon.animation family""" - families = ["colorbleed.vdbcache"] + families = ["vdbcache"] label = "Load VDB" representations = ["vdb"] order = -10 @@ -55,8 +55,7 @@ class VdbLoader(api.Loader): ) def format_path(self, path): - """Format file path correctly for single vdb or vdb sequence""" - + """Format file path correctly for single vdb or vdb sequence.""" if not os.path.exists(path): raise RuntimeError("Path does not exist: %s" % path) diff --git a/openpype/hosts/houdini/plugins/publish/collect_active_state.py b/openpype/hosts/houdini/plugins/publish/collect_active_state.py index 7b3637df88..1193f0cd19 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_active_state.py +++ b/openpype/hosts/houdini/plugins/publish/collect_active_state.py @@ -30,8 +30,9 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin): instance.data.update( { "active": active, - # temporarily translation of `active` to `publish` till issue has - # been resolved: https://github.com/pyblish/pyblish-base/issues/307 + # temporarily translation of `active` to `publish` till + # issue has been resolved: + # https://github.com/pyblish/pyblish-base/issues/307 "publish": active, } ) diff --git a/openpype/hosts/houdini/plugins/publish/collect_render_products.py b/openpype/hosts/houdini/plugins/publish/collect_render_products.py index e8a4a3dc3d..d7163b43c0 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_render_products.py +++ b/openpype/hosts/houdini/plugins/publish/collect_render_products.py @@ -4,8 +4,6 @@ import os import hou import pxr.UsdRender -import avalon.io as io -import avalon.api as api import pyblish.api @@ -49,7 +47,7 @@ class CollectRenderProducts(pyblish.api.InstancePlugin): label = "Collect Render Products" order = pyblish.api.CollectorOrder + 0.4 hosts = ["houdini"] - families = ["colorbleed.usdrender"] + families = ["usdrender"] def process(self, instance): diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py index 991354fc5a..66dfba64df 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py @@ -1,9 +1,6 @@ -import hou - import pyblish.api from avalon import io -from avalon.houdini import lib import openpype.lib.usdlib as usdlib diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index 581f029ac2..8be6ead1b1 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -1,12 +1,7 @@ import os import pyblish.api - -from avalon import io -from avalon.houdini import lib -import openpype.hosts.houdini.lib.usd as usdlib - -import hou +import openpype.hosts.houdini.api.usd as usdlib class CollectUsdLayers(pyblish.api.InstancePlugin): diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index 06b48f3b43..890d420a73 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -44,12 +44,13 @@ class ExitStack(object): self.push(_exit_wrapper) def push(self, exit): - """Registers a callback with the standard __exit__ method signature + """Registers a callback with the standard __exit__ method signature. Can suppress exceptions the same way __exit__ methods can. Also accepts any object with an __exit__ method (registering a call to the method instead of the object itself) + """ # We use an unbound method rather than a bound method to follow # the standard lookup behaviour for special methods @@ -84,7 +85,8 @@ class ExitStack(object): If successful, also pushes its __exit__ method as a callback and returns the result of the __enter__ method. """ - # We look up the special methods on the type to match the with statement + # We look up the special methods on the type to match the with + # statement _cm_type = type(cm) _exit = _cm_type.__exit__ result = _cm_type.__enter__(cm) @@ -174,7 +176,8 @@ class ExtractUSDLayered(openpype.api.Extractor): node.type().name() in {"usd", "usd_rop"} for node in ropnodes ) - # Main ROP node, either a USD Rop or ROP network with multiple USD ROPs + # Main ROP node, either a USD Rop or ROP network with + # multiple USD ROPs node = instance[0] # Collect any output dependencies that have not been processed yet @@ -288,7 +291,8 @@ class ExtractUSDLayered(openpype.api.Extractor): return False version = io.find_one( - {"type": "version", "parent": subset["_id"], }, sort=[("name", -1)] + {"type": "version", "parent": subset["_id"], }, + sort=[("name", -1)] ) if not version: self.log.debug("No existing version..") diff --git a/openpype/hosts/houdini/plugins/publish/submit_remote_publish.py b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py similarity index 99% rename from openpype/hosts/houdini/plugins/publish/submit_remote_publish.py rename to openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py index 77666921c8..9ada437716 100644 --- a/openpype/hosts/houdini/plugins/publish/submit_remote_publish.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py @@ -1,6 +1,5 @@ import os import json -import getpass import hou diff --git a/openpype/hosts/houdini/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py similarity index 100% rename from openpype/hosts/houdini/plugins/publish/submit_houdini_render_deadline.py rename to openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py From 64f9f43f5ed8ea96383d0ad866c92f9738108807 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Jul 2021 16:02:31 +0200 Subject: [PATCH 022/308] =?UTF-8?q?hound=20cleanup=20=F0=9F=90=B6?= =?UTF-8?q?=F0=9F=A7=BD=F0=9F=A7=BA=20IV.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/hosts/houdini/api/usd.py | 33 ++++++++++--------- .../plugins/publish/extract_usd_layered.py | 2 +- .../publish/validate_usd_shade_workspace.py | 2 -- 3 files changed, 18 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/houdini/api/usd.py b/openpype/hosts/houdini/api/usd.py index 48b97bb250..850ffb60e5 100644 --- a/openpype/hosts/houdini/api/usd.py +++ b/openpype/hosts/houdini/api/usd.py @@ -42,6 +42,7 @@ def pick_asset(node): name = parm.eval() if name: from avalon import io + db_asset = io.find_one({"name": name, "type": "asset"}) if db_asset: silo = db_asset.get("silo") @@ -74,11 +75,13 @@ def add_usd_output_processor(ropnode, processor): import loputils - loputils.handleOutputProcessorAdd({ - "node": ropnode, - "parm": ropnode.parm("outputprocessors"), - "script_value": processor - }) + loputils.handleOutputProcessorAdd( + { + "node": ropnode, + "parm": ropnode.parm("outputprocessors"), + "script_value": processor, + } + ) def remove_usd_output_processor(ropnode, processor): @@ -94,19 +97,16 @@ def remove_usd_output_processor(ropnode, processor): parm = ropnode.parm(processor + "_remove") if not parm: - raise RuntimeError("Output Processor %s does not " - "exist on %s" % (processor, ropnode.name())) + raise RuntimeError( + "Output Processor %s does not " + "exist on %s" % (processor, ropnode.name()) + ) - loputils.handleOutputProcessorRemove({ - "node": ropnode, - "parm": parm - }) + loputils.handleOutputProcessorRemove({"node": ropnode, "parm": parm}) @contextlib.contextmanager -def outputprocessors(ropnode, - processors=tuple(), - disable_all_others=True): +def outputprocessors(ropnode, processors=tuple(), disable_all_others=True): """Context manager to temporarily add Output Processors to USD ROP node. Args: @@ -240,8 +240,9 @@ def get_configured_save_layers(usd_rop): lop_node = get_usd_rop_loppath(usd_rop) stage = lop_node.stage(apply_viewport_overrides=False) if not stage: - raise RuntimeError("No valid USD stage for ROP node: " - "%s" % usd_rop.path()) + raise RuntimeError( + "No valid USD stage for ROP node: " "%s" % usd_rop.path() + ) root_layer = stage.GetRootLayer() diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index 890d420a73..b9741c50ca 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -122,7 +122,7 @@ class ExitStack(object): if cb(*exc_details): suppressed_exc = True exc_details = (None, None, None) - except: + except Exception: new_exc_details = sys.exc_info() # simulate the stack of exceptions by setting the context _fix_exception_context(new_exc_details[1], exc_details[1]) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index 0fd1cf1fad..a77ca2f3cb 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -1,5 +1,3 @@ -import re - import pyblish.api import openpype.api From 5992c5f950187424a4ff3079b56e460b5d668ffd Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Jul 2021 16:25:21 +0200 Subject: [PATCH 023/308] openpype conformation --- openpype/hosts/houdini/plugins/create/create_composite.py | 4 ++-- .../hosts/houdini/plugins/create/create_redshift_rop.py | 5 ++--- .../hosts/houdini/plugins/create/create_remote_publish.py | 7 +++---- openpype/hosts/houdini/plugins/create/create_usd.py | 4 ++-- openpype/hosts/houdini/plugins/create/create_usd_model.py | 5 ++--- .../hosts/houdini/plugins/create/create_usd_workspaces.py | 4 ++-- openpype/hosts/houdini/plugins/create/create_usdrender.py | 5 ++--- 7 files changed, 15 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index 4f91fa2258..7293669bef 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -1,7 +1,7 @@ -from avalon import houdini +from openpype.hosts.houdini.api import plugin -class CreateCompositeSequence(houdini.Creator): +class CreateCompositeSequence(plugin.Creator): """Composite ROP to Image Sequence""" label = "Composite (Image Sequence)" diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py index 0babc17c6b..40d2ac58c7 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py @@ -1,9 +1,8 @@ import hou - -from avalon import houdini +from openpype.hosts.houdini.api import plugin -class CreateRedshiftROP(houdini.Creator): +class CreateRedshiftROP(plugin.Creator): """Redshift ROP""" label = "Redshift ROP" diff --git a/openpype/hosts/houdini/plugins/create/create_remote_publish.py b/openpype/hosts/houdini/plugins/create/create_remote_publish.py index bc6cf5d949..b9782209cd 100644 --- a/openpype/hosts/houdini/plugins/create/create_remote_publish.py +++ b/openpype/hosts/houdini/plugins/create/create_remote_publish.py @@ -1,9 +1,8 @@ -from avalon import houdini - -from colorbleed.houdini import lib +from openpype.hosts.houdini.api import plugin +from openpype.hosts.houdini.api import lib -class CreateRemotePublish(houdini.Creator): +class CreateRemotePublish(plugin.Creator): """Create Remote Publish Submission Settings node.""" label = "Remote Publish" diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index 5ca8875dc0..642612f465 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -1,7 +1,7 @@ -from avalon import houdini +from openpype.hosts.houdini.api import plugin -class CreateUSD(houdini.Creator): +class CreateUSD(plugin.Creator): """Universal Scene Description""" label = "USD" diff --git a/openpype/hosts/houdini/plugins/create/create_usd_model.py b/openpype/hosts/houdini/plugins/create/create_usd_model.py index 96563f2f91..5276211f2c 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd_model.py +++ b/openpype/hosts/houdini/plugins/create/create_usd_model.py @@ -1,10 +1,9 @@ import re - -from avalon import api +from openpype.hosts.houdini.api import plugin import hou -class CreateUSDModel(api.Creator): +class CreateUSDModel(plugin.Creator): """Author USD Model""" label = "USD Model" diff --git a/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py b/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py index a2309122e4..fc8ef5c810 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py +++ b/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py @@ -1,8 +1,8 @@ -from avalon import api +from openpype.hosts.houdini.api import plugin import hou -class _USDWorkspace(api.Creator): +class _USDWorkspace(plugin.Creator): """Base class to create pre-built USD Workspaces""" node_name = None diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py index 1c488f381b..34e1a9cc54 100644 --- a/openpype/hosts/houdini/plugins/create/create_usdrender.py +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -1,9 +1,8 @@ import hou - -from avalon import houdini +from openpype.hosts.houdini.api import plugin -class CreateUSDRender(houdini.Creator): +class CreateUSDRender(plugin.Creator): """USD Render ROP in /stage""" label = "USD Render" From 493b9cb756e0faa70cc8829488e2646d0e684aed Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Jul 2021 16:31:18 +0200 Subject: [PATCH 024/308] =?UTF-8?q?yet=20another=20hound=20=F0=9F=A6=AE?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/lib/usdlib.py | 153 +++++++++++++++++++++-------------------- 1 file changed, 78 insertions(+), 75 deletions(-) diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index cc036a9491..3ae7430c7b 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -16,20 +16,20 @@ log = logging.getLogger(__name__) # The predefined steps order used for bootstrapping USD Shots and Assets. # These are ordered in order from strongest to weakest opinions, like in USD. PIPELINE = { - "shot": ["usdLighting", - "usdFx", - "usdSimulation", - "usdAnimation", - "usdLayout"], - "asset": ["usdShade", - "usdModel"] + "shot": [ + "usdLighting", + "usdFx", + "usdSimulation", + "usdAnimation", + "usdLayout", + ], + "asset": ["usdShade", "usdModel"], } -def create_asset(filepath, - asset_name, - reference_layers, - kind=Kind.Tokens.component): +def create_asset( + filepath, asset_name, reference_layers, kind=Kind.Tokens.component +): """ Creates an asset file that consists of a top level layer and sublayers for shading and geometry. @@ -49,11 +49,11 @@ def create_asset(filepath, log.info("Creating asset at %s", filepath) # Make the layer ascii - good for readability, plus the file is small - root_layer = Sdf.Layer.CreateNew(filepath, args={'format': 'usda'}) + root_layer = Sdf.Layer.CreateNew(filepath, args={"format": "usda"}) stage = Usd.Stage.Open(root_layer) # Define a prim for the asset and make it the default for the stage. - asset_prim = UsdGeom.Xform.Define(stage, '/%s' % asset_name).GetPrim() + asset_prim = UsdGeom.Xform.Define(stage, "/%s" % asset_name).GetPrim() stage.SetDefaultPrim(asset_prim) # Let viewing applications know how to orient a free camera properly @@ -67,7 +67,7 @@ def create_asset(filepath, model.SetKind(kind) model.SetAssetName(asset_name) - model.SetAssetIdentifier('%s/%s.usd' % (asset_name, asset_name)) + model.SetAssetIdentifier("%s/%s.usd" % (asset_name, asset_name)) # Add references to the asset prim references = asset_prim.GetReferences() @@ -135,20 +135,23 @@ def create_model(filename, asset, variant_subsets): # Strip off `usdModel_` variant = subset[len(prefix):] else: - raise ValueError("Model subsets must start " - "with usdModel: %s" % subset) + raise ValueError( + "Model subsets must start " "with usdModel: %s" % subset + ) - path = get_usd_master_path(asset=asset_doc, - subset=subset, - representation="usd") + path = get_usd_master_path( + asset=asset_doc, subset=subset, representation="usd" + ) variants.append((variant, path)) - stage = _create_variants_file(filename, - variants=variants, - variantset="model", - variant_prim="/root", - reference_prim="/root/geo", - as_payload=True) + stage = _create_variants_file( + filename, + variants=variants, + variantset="model", + variant_prim="/root", + reference_prim="/root/geo", + as_payload=True, + ) UsdGeom.SetStageMetersPerUnit(stage, 1) UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y) @@ -183,27 +186,24 @@ def create_shade(filename, asset, variant_subsets): # Strip off `usdModel_` variant = subset[len(prefix):] else: - raise ValueError("Model subsets must start " - "with usdModel: %s" % subset) + raise ValueError( + "Model subsets must start " "with usdModel: %s" % subset + ) shade_subset = re.sub("^usdModel", "usdShade", subset) - path = get_usd_master_path(asset=asset_doc, - subset=shade_subset, - representation="usd") + path = get_usd_master_path( + asset=asset_doc, subset=shade_subset, representation="usd" + ) variants.append((variant, path)) - stage = _create_variants_file(filename, - variants=variants, - variantset="model", - variant_prim="/root") + stage = _create_variants_file( + filename, variants=variants, variantset="model", variant_prim="/root" + ) stage.GetRootLayer().Save() -def create_shade_variation(filename, - asset, - model_variant, - shade_variants): +def create_shade_variation(filename, asset, model_variant, shade_variants): """Create the master Shade file for a specific model variant. This should reference all shade variants for the specific model variant. @@ -215,32 +215,34 @@ def create_shade_variation(filename, variants = [] for variant in shade_variants: - subset = "usdShade_{model}_{shade}".format(model=model_variant, - shade=variant) - path = get_usd_master_path(asset=asset_doc, - subset=subset, - representation="usd") + subset = "usdShade_{model}_{shade}".format( + model=model_variant, shade=variant + ) + path = get_usd_master_path( + asset=asset_doc, subset=subset, representation="usd" + ) variants.append((variant, path)) - stage = _create_variants_file(filename, - variants=variants, - variantset="shade", - variant_prim="/root") + stage = _create_variants_file( + filename, variants=variants, variantset="shade", variant_prim="/root" + ) stage.GetRootLayer().Save() -def _create_variants_file(filename, - variants, - variantset, - default_variant=None, - variant_prim="/root", - reference_prim=None, - set_default_variant=True, - as_payload=False, - skip_variant_on_single_file=True): +def _create_variants_file( + filename, + variants, + variantset, + default_variant=None, + variant_prim="/root", + reference_prim=None, + set_default_variant=True, + as_payload=False, + skip_variant_on_single_file=True, +): - root_layer = Sdf.Layer.CreateNew(filename, args={'format': 'usda'}) + root_layer = Sdf.Layer.CreateNew(filename, args={"format": "usda"}) stage = Usd.Stage.Open(root_layer) root_prim = stage.DefinePrim(variant_prim) @@ -276,8 +278,9 @@ def _create_variants_file(filename, else: # Variants append = Usd.ListPositionBackOfAppendList - variant_set = root_prim.GetVariantSets().AddVariantSet(variantset, - append) + variant_set = root_prim.GetVariantSets().AddVariantSet( + variantset, append + ) for variant, variant_path in variants: @@ -299,9 +302,7 @@ def _create_variants_file(filename, return stage -def get_usd_master_path(asset, - subset, - representation): +def get_usd_master_path(asset, subset, representation): """Get the filepath for a .usd file of a subset. This will return the path to an unversioned master file generated by @@ -309,26 +310,28 @@ def get_usd_master_path(asset, """ - project = io.find_one({"type": "project"}, - projection={"config.template.publish": True}) + project = io.find_one( + {"type": "project"}, projection={"config.template.publish": True} + ) template = project["config"]["template"]["publish"] if isinstance(asset, dict) and "silo" in asset and "name" in asset: # Allow explicitly passing asset document asset_doc = asset else: - asset_doc = io.find_one({"name": asset, - "type": "asset"}) + asset_doc = io.find_one({"name": asset, "type": "asset"}) - path = template.format(**{ - "root": api.registered_root(), - "project": api.Session["AVALON_PROJECT"], - "silo": asset_doc["silo"], - "asset": asset_doc["name"], - "subset": subset, - "representation": representation, - "version": 0 # stub version zero - }) + path = template.format( + **{ + "root": api.registered_root(), + "project": api.Session["AVALON_PROJECT"], + "silo": asset_doc["silo"], + "asset": asset_doc["name"], + "subset": subset, + "representation": representation, + "version": 0, # stub version zero + } + ) # Remove the version folder subset_folder = os.path.dirname(os.path.dirname(path)) From b6c25f987c8aa591d1b79d18705e7da11ab49723 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 20 Jul 2021 16:02:23 +0200 Subject: [PATCH 025/308] separated collection from initialization of modules --- openpype/modules/base.py | 40 +++++++++++++++++++++++++++++----------- 1 file changed, 29 insertions(+), 11 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index c7efbd5ab3..91fdd49724 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -352,21 +352,16 @@ class ModulesManager: # For report of time consumption self._report = {} + self._raw_modules = None + self.initialize_modules() self.connect_modules() - def initialize_modules(self): - """Import and initialize modules.""" - self.log.debug("*** Pype modules initialization.") - # Prepare settings for modules - system_settings = getattr(self, "_system_settings", None) - if system_settings is None: - system_settings = get_system_settings() - modules_settings = system_settings["modules"] + def collect_modules(self): + if self._raw_modules is not None: + return - report = {} - time_start = time.time() - prev_start_time = time_start + self._raw_modules = [] # Go through globals in `pype.modules` for name in dir(openpype.modules): @@ -394,7 +389,27 @@ class ModulesManager: ).format(name, ", ".join(not_implemented))) continue + self._raw_modules.append(modules_item) + + def initialize_modules(self): + """Import and initialize modules.""" + self.collect_modules() + + self.log.debug("*** Pype modules initialization.") + # Prepare settings for modules + system_settings = getattr(self, "_system_settings", None) + if system_settings is None: + system_settings = get_system_settings() + modules_settings = system_settings["modules"] + + report = {} + time_start = time.time() + prev_start_time = time_start + + # Go through globals in `pype.modules` + for modules_item in self._raw_modules: try: + name = modules_item.__name__ # Try initialize module module = modules_item(self, modules_settings) # Store initialized object @@ -711,6 +726,9 @@ class TrayModulesManager(ModulesManager): self.modules_by_id = {} self.modules_by_name = {} self._report = {} + + self._raw_modules = None + self.tray_manager = None self.doubleclick_callbacks = {} From e4c4e1072c28b50d55185df9f85b67cba9937d76 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 22 Jul 2021 21:29:40 +0200 Subject: [PATCH 026/308] Webpublisher backend - added new command for webserver WIP webserver_cli --- openpype/cli.py | 9 + openpype/modules/webserver/webserver_cli.py | 217 ++++++++++++++++++ .../modules/webserver/webserver_module.py | 6 +- openpype/pype_commands.py | 7 + 4 files changed, 236 insertions(+), 3 deletions(-) create mode 100644 openpype/modules/webserver/webserver_cli.py diff --git a/openpype/cli.py b/openpype/cli.py index ec5b04c468..1065152adb 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -94,6 +94,15 @@ def eventserver(debug, ) +@main.command() +@click.option("-d", "--debug", is_flag=True, help="Print debug messages") +def webpublisherwebserver(debug): + if debug: + os.environ['OPENPYPE_DEBUG'] = "3" + + PypeCommands().launch_webpublisher_webservercli() + + @main.command() @click.argument("output_json_path") @click.option("--project", help="Project name", default=None) diff --git a/openpype/modules/webserver/webserver_cli.py b/openpype/modules/webserver/webserver_cli.py new file mode 100644 index 0000000000..f3f2fc73d1 --- /dev/null +++ b/openpype/modules/webserver/webserver_cli.py @@ -0,0 +1,217 @@ +import attr +import time +import json +import datetime +from bson.objectid import ObjectId +import collections +from aiohttp.web_response import Response + +from avalon.api import AvalonMongoDB +from openpype.modules.avalon_apps.rest_api import _RestApiEndpoint + +from openpype.api import get_hierarchy + + +class WebpublisherProjectsEndpoint(_RestApiEndpoint): + async def get(self) -> Response: + output = [] + for project_name in self.dbcon.database.collection_names(): + project_doc = self.dbcon.database[project_name].find_one({ + "type": "project" + }) + if project_doc: + ret_val = { + "id": project_doc["_id"], + "name": project_doc["name"] + } + output.append(ret_val) + return Response( + status=200, + body=self.resource.encode(output), + content_type="application/json" + ) + + +@attr.s +class AssetItem(object): + """Data class for Render Layer metadata.""" + id = attr.ib() + name = attr.ib() + + # Render Products + children = attr.ib(init=False, default=attr.Factory(list)) + + +class WebpublisherHiearchyEndpoint(_RestApiEndpoint): + async def get(self, project_name) -> Response: + output = [] + query_projection = { + "_id": 1, + "data.tasks": 1, + "data.visualParent": 1, + "name": 1, + "type": 1, + } + + asset_docs = self.dbcon.database[project_name].find( + {"type": "asset"}, + query_projection + ) + asset_docs_by_id = { + asset_doc["_id"]: asset_doc + for asset_doc in asset_docs + } + + asset_ids = list(asset_docs_by_id.keys()) + result = [] + if asset_ids: + result = self.dbcon.database[project_name].aggregate([ + { + "$match": { + "type": "subset", + "parent": {"$in": asset_ids} + } + }, + { + "$group": { + "_id": "$parent", + "count": {"$sum": 1} + } + } + ]) + + asset_docs_by_parent_id = collections.defaultdict(list) + for asset_doc in asset_docs_by_id.values(): + parent_id = asset_doc["data"].get("visualParent") + asset_docs_by_parent_id[parent_id].append(asset_doc) + + appending_queue = collections.deque() + appending_queue.append((None, "root")) + + asset_items_by_id = {} + non_modifiable_items = set() + assets = {} + + # # # while appending_queue: + # # assets = self._recur_hiearchy(asset_docs_by_parent_id, + # # appending_queue, + # # assets, None) + # while asset_docs_by_parent_id: + # for parent_id, asset_docs in asset_items_by_id.items(): + # asset_docs = asset_docs_by_parent_id.get(parent_id) or [] + + while appending_queue: + parent_id, parent_item_name = appending_queue.popleft() + + asset_docs = asset_docs_by_parent_id.get(parent_id) or [] + + asset_item = assets.get(parent_id) + if not asset_item: + asset_item = AssetItem(str(parent_id), parent_item_name) + + for asset_doc in sorted(asset_docs, key=lambda item: item["name"]): + child_item = AssetItem(str(asset_doc["_id"]), + asset_doc["name"]) + asset_item.children.append(child_item) + if not asset_doc["data"]["tasks"]: + appending_queue.append((asset_doc["_id"], + child_item.name)) + + else: + asset_item = child_item + for task_name, _ in asset_doc["data"]["tasks"].items(): + child_item = AssetItem(str(asset_doc["_id"]), + task_name) + asset_item.children.append(child_item) + assets[parent_id] = attr.asdict(asset_item) + + + return Response( + status=200, + body=self.resource.encode(assets), + content_type="application/json" + ) + + def _recur_hiearchy(self, asset_docs_by_parent_id, + appending_queue, assets, asset_item): + parent_id, parent_item_name = appending_queue.popleft() + + asset_docs = asset_docs_by_parent_id.get(parent_id) or [] + + if not asset_item: + asset_item = assets.get(parent_id) + if not asset_item: + asset_item = AssetItem(str(parent_id), parent_item_name) + + for asset_doc in sorted(asset_docs, key=lambda item: item["name"]): + child_item = AssetItem(str(asset_doc["_id"]), + asset_doc["name"]) + asset_item.children.append(child_item) + if not asset_doc["data"]["tasks"]: + appending_queue.append((asset_doc["_id"], + child_item.name)) + asset_item = child_item + assets = self._recur_hiearchy(asset_docs_by_parent_id, appending_queue, + assets, asset_item) + else: + asset_item = child_item + for task_name, _ in asset_doc["data"]["tasks"].items(): + child_item = AssetItem(str(asset_doc["_id"]), + task_name) + asset_item.children.append(child_item) + assets[asset_item.id] = attr.asdict(asset_item) + + return assets + +class RestApiResource: + def __init__(self, server_manager): + self.server_manager = server_manager + + self.dbcon = AvalonMongoDB() + self.dbcon.install() + + @staticmethod + def json_dump_handler(value): + print("valuetype:: {}".format(type(value))) + if isinstance(value, datetime.datetime): + return value.isoformat() + if isinstance(value, ObjectId): + return str(value) + raise TypeError(value) + + @classmethod + def encode(cls, data): + return json.dumps( + data, + indent=4, + default=cls.json_dump_handler + ).encode("utf-8") + + +def run_webserver(): + print("webserver") + from openpype.modules import ModulesManager + + manager = ModulesManager() + webserver_module = manager.modules_by_name["webserver"] + webserver_module.create_server_manager() + + resource = RestApiResource(webserver_module.server_manager) + projects_endpoint = WebpublisherProjectsEndpoint(resource) + webserver_module.server_manager.add_route( + "GET", + "/webpublisher/projects", + projects_endpoint.dispatch + ) + + hiearchy_endpoint = WebpublisherHiearchyEndpoint(resource) + webserver_module.server_manager.add_route( + "GET", + "/webpublisher/hiearchy/{project_name}", + hiearchy_endpoint.dispatch + ) + + webserver_module.start_server() + while True: + time.sleep(0.5) + diff --git a/openpype/modules/webserver/webserver_module.py b/openpype/modules/webserver/webserver_module.py index b61619acde..4832038575 100644 --- a/openpype/modules/webserver/webserver_module.py +++ b/openpype/modules/webserver/webserver_module.py @@ -50,10 +50,8 @@ class WebServerModule(PypeModule, ITrayService): static_prefix = "/res" self.server_manager.add_static(static_prefix, resources.RESOURCES_DIR) - webserver_url = "http://localhost:{}".format(self.port) - os.environ["OPENPYPE_WEBSERVER_URL"] = webserver_url os.environ["OPENPYPE_STATICS_SERVER"] = "{}{}".format( - webserver_url, static_prefix + os.environ["OPENPYPE_WEBSERVER_URL"], static_prefix ) def _add_listeners(self): @@ -81,6 +79,8 @@ class WebServerModule(PypeModule, ITrayService): self.server_manager.on_stop_callbacks.append( self.set_service_failed_icon ) + webserver_url = "http://localhost:{}".format(self.port) + os.environ["OPENPYPE_WEBSERVER_URL"] = webserver_url @staticmethod def find_free_port( diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 7c47d8c613..6ccf10e8ce 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -40,6 +40,13 @@ class PypeCommands: ) return run_event_server(*args) + @staticmethod + def launch_webpublisher_webservercli(*args): + from openpype.modules.webserver.webserver_cli import ( + run_webserver + ) + return run_webserver(*args) + @staticmethod def launch_standalone_publisher(): from openpype.tools import standalonepublish From e4cc3033057c4e33a9a535efd79eb7c74d196f12 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Sun, 25 Jul 2021 15:50:47 +0200 Subject: [PATCH 027/308] Webpublisher backend - implemented context endopoint --- openpype/modules/webserver/webserver_cli.py | 153 ++++++++------------ 1 file changed, 59 insertions(+), 94 deletions(-) diff --git a/openpype/modules/webserver/webserver_cli.py b/openpype/modules/webserver/webserver_cli.py index f3f2fc73d1..3ebbc86358 100644 --- a/openpype/modules/webserver/webserver_cli.py +++ b/openpype/modules/webserver/webserver_cli.py @@ -13,6 +13,7 @@ from openpype.api import get_hierarchy class WebpublisherProjectsEndpoint(_RestApiEndpoint): + """Returns list of project names.""" async def get(self) -> Response: output = [] for project_name in self.dbcon.database.collection_names(): @@ -32,23 +33,44 @@ class WebpublisherProjectsEndpoint(_RestApiEndpoint): ) -@attr.s -class AssetItem(object): - """Data class for Render Layer metadata.""" - id = attr.ib() - name = attr.ib() +class Node(dict): + """Node element in context tree.""" - # Render Products - children = attr.ib(init=False, default=attr.Factory(list)) + def __init__(self, uid, node_type, name): + self._parent = None # pointer to parent Node + self["type"] = node_type + self["name"] = name + self['id'] = uid # keep reference to id # + self['children'] = [] # collection of pointers to child Nodes + + @property + def parent(self): + return self._parent # simply return the object at the _parent pointer + + @parent.setter + def parent(self, node): + self._parent = node + # add this node to parent's list of children + node['children'].append(self) + + +class TaskNode(Node): + """Special node type only for Tasks.""" + def __init__(self, node_type, name): + self._parent = None + self["type"] = node_type + self["name"] = name + self["attributes"] = {} class WebpublisherHiearchyEndpoint(_RestApiEndpoint): + """Returns dictionary with context tree from assets.""" async def get(self, project_name) -> Response: - output = [] query_projection = { "_id": 1, "data.tasks": 1, "data.visualParent": 1, + "data.entityType": 1, "name": 1, "type": 1, } @@ -62,106 +84,51 @@ class WebpublisherHiearchyEndpoint(_RestApiEndpoint): for asset_doc in asset_docs } - asset_ids = list(asset_docs_by_id.keys()) - result = [] - if asset_ids: - result = self.dbcon.database[project_name].aggregate([ - { - "$match": { - "type": "subset", - "parent": {"$in": asset_ids} - } - }, - { - "$group": { - "_id": "$parent", - "count": {"$sum": 1} - } - } - ]) - asset_docs_by_parent_id = collections.defaultdict(list) for asset_doc in asset_docs_by_id.values(): parent_id = asset_doc["data"].get("visualParent") asset_docs_by_parent_id[parent_id].append(asset_doc) - appending_queue = collections.deque() - appending_queue.append((None, "root")) + assets = collections.defaultdict(list) - asset_items_by_id = {} - non_modifiable_items = set() - assets = {} + for parent_id, children in asset_docs_by_parent_id.items(): + for child in children: + node = assets.get(child["_id"]) + if not node: + node = Node(child["_id"], + child["data"]["entityType"], + child["name"]) + assets[child["_id"]] = node - # # # while appending_queue: - # # assets = self._recur_hiearchy(asset_docs_by_parent_id, - # # appending_queue, - # # assets, None) - # while asset_docs_by_parent_id: - # for parent_id, asset_docs in asset_items_by_id.items(): - # asset_docs = asset_docs_by_parent_id.get(parent_id) or [] + tasks = child["data"].get("tasks", {}) + for t_name, t_con in tasks.items(): + task_node = TaskNode("task", t_name) + task_node["attributes"]["type"] = t_con.get("type") - while appending_queue: - parent_id, parent_item_name = appending_queue.popleft() + task_node.parent = node - asset_docs = asset_docs_by_parent_id.get(parent_id) or [] - - asset_item = assets.get(parent_id) - if not asset_item: - asset_item = AssetItem(str(parent_id), parent_item_name) - - for asset_doc in sorted(asset_docs, key=lambda item: item["name"]): - child_item = AssetItem(str(asset_doc["_id"]), - asset_doc["name"]) - asset_item.children.append(child_item) - if not asset_doc["data"]["tasks"]: - appending_queue.append((asset_doc["_id"], - child_item.name)) - - else: - asset_item = child_item - for task_name, _ in asset_doc["data"]["tasks"].items(): - child_item = AssetItem(str(asset_doc["_id"]), - task_name) - asset_item.children.append(child_item) - assets[parent_id] = attr.asdict(asset_item) + parent_node = assets.get(parent_id) + if not parent_node: + asset_doc = asset_docs_by_id.get(parent_id) + if asset_doc: # regular node + parent_node = Node(parent_id, + asset_doc["data"]["entityType"], + asset_doc["name"]) + else: # root + parent_node = Node(parent_id, + "project", + project_name) + assets[parent_id] = parent_node + node.parent = parent_node + roots = [x for x in assets.values() if x.parent is None] return Response( status=200, - body=self.resource.encode(assets), + body=self.resource.encode(roots[0]), content_type="application/json" ) - def _recur_hiearchy(self, asset_docs_by_parent_id, - appending_queue, assets, asset_item): - parent_id, parent_item_name = appending_queue.popleft() - - asset_docs = asset_docs_by_parent_id.get(parent_id) or [] - - if not asset_item: - asset_item = assets.get(parent_id) - if not asset_item: - asset_item = AssetItem(str(parent_id), parent_item_name) - - for asset_doc in sorted(asset_docs, key=lambda item: item["name"]): - child_item = AssetItem(str(asset_doc["_id"]), - asset_doc["name"]) - asset_item.children.append(child_item) - if not asset_doc["data"]["tasks"]: - appending_queue.append((asset_doc["_id"], - child_item.name)) - asset_item = child_item - assets = self._recur_hiearchy(asset_docs_by_parent_id, appending_queue, - assets, asset_item) - else: - asset_item = child_item - for task_name, _ in asset_doc["data"]["tasks"].items(): - child_item = AssetItem(str(asset_doc["_id"]), - task_name) - asset_item.children.append(child_item) - assets[asset_item.id] = attr.asdict(asset_item) - - return assets class RestApiResource: def __init__(self, server_manager): @@ -172,7 +139,6 @@ class RestApiResource: @staticmethod def json_dump_handler(value): - print("valuetype:: {}".format(type(value))) if isinstance(value, datetime.datetime): return value.isoformat() if isinstance(value, ObjectId): @@ -189,7 +155,6 @@ class RestApiResource: def run_webserver(): - print("webserver") from openpype.modules import ModulesManager manager = ModulesManager() From 6c32a8e6a36d11e1988933be3ed2d1c4a7c2e51e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 26 Jul 2021 10:15:23 +0200 Subject: [PATCH 028/308] Webpublisher backend - changed uri to api --- openpype/modules/webserver/webserver_cli.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/webserver/webserver_cli.py b/openpype/modules/webserver/webserver_cli.py index 3ebbc86358..17b98cc1af 100644 --- a/openpype/modules/webserver/webserver_cli.py +++ b/openpype/modules/webserver/webserver_cli.py @@ -165,14 +165,14 @@ def run_webserver(): projects_endpoint = WebpublisherProjectsEndpoint(resource) webserver_module.server_manager.add_route( "GET", - "/webpublisher/projects", + "/api/projects", projects_endpoint.dispatch ) hiearchy_endpoint = WebpublisherHiearchyEndpoint(resource) webserver_module.server_manager.add_route( "GET", - "/webpublisher/hiearchy/{project_name}", + "/api/hiearchy/{project_name}", hiearchy_endpoint.dispatch ) From 622ff2a797bcf9b5954f5b0f80ad06482576521a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 26 Jul 2021 11:25:52 +0200 Subject: [PATCH 029/308] Webpublisher backend - changed uri to api --- openpype/modules/webserver/webserver_cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/webserver/webserver_cli.py b/openpype/modules/webserver/webserver_cli.py index 17b98cc1af..b6317a5675 100644 --- a/openpype/modules/webserver/webserver_cli.py +++ b/openpype/modules/webserver/webserver_cli.py @@ -172,7 +172,7 @@ def run_webserver(): hiearchy_endpoint = WebpublisherHiearchyEndpoint(resource) webserver_module.server_manager.add_route( "GET", - "/api/hiearchy/{project_name}", + "/api/hierarchy/{project_name}", hiearchy_endpoint.dispatch ) From f622e32fcdf20477c68ab783ee5ecc7d376a17dd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 17:50:25 +0200 Subject: [PATCH 030/308] added base class of OpenPypeAddOn --- openpype/modules/base.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 91fdd49724..87d6c4cbbc 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -86,6 +86,10 @@ class IPluginPaths: pass +class OpenPypeAddOn(PypeModule): + pass + + @six.add_metaclass(ABCMeta) class ILaunchHookPaths: """Module has launch hook paths to return. From e6e7ee6867f401b0851f371e3ddd78e6e1fe3853 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 17:51:17 +0200 Subject: [PATCH 031/308] use callback directly --- openpype/modules/base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 87d6c4cbbc..dd144075e1 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -149,12 +149,12 @@ class ITrayModule: Some callbacks need to be processed on main thread (menu actions must be added on main thread or they won't get triggered etc.) """ - # called without initialized tray, still main thread needed if not self.tray_initialized: + # TODO Called without initialized tray, still main thread needed try: - callback = self._main_thread_callbacks.popleft() callback() - except: + + except Exception: self.log.warning( "Failed to execute {} in main thread".format(callback), exc_info=True) From 26e8f9250636185730294580d0b7d5125929bfc4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 17:51:46 +0200 Subject: [PATCH 032/308] idea of modules and addons import --- openpype/modules/modules_import.py | 84 ++++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 openpype/modules/modules_import.py diff --git a/openpype/modules/modules_import.py b/openpype/modules/modules_import.py new file mode 100644 index 0000000000..24441c6d20 --- /dev/null +++ b/openpype/modules/modules_import.py @@ -0,0 +1,84 @@ +import sys +import six + + +class __ModuleClass: + __attributes__ = {} + __defaults__ = set() + + def __getattr__(self, attr_name): + return self.__attributes__.get( + attr_name, + type("Missing.{}".format(attr_name), (), {}) + ) + + def __setattr__(self, attr_name, value): + self.__attributes__[attr_name] = value + + def keys(self): + return self.__attributes__.keys() + + def values(self): + return self.__attributes__.values() + + def items(self): + return self.__attributes__.items() + + +def _load_module_from_dirpath_py2(dirpath, module_name, dst_module_name): + full_module_name = "{}.{}".format(dst_module_name, module_name) + if full_module_name in sys.modules: + return sys.modules[full_module_name] + + import imp + + dst_module = sys.modules[dst_module_name] + + fp, pathname, description = imp.find_module(module_name, [dirpath]) + module = imp.load_module(full_module_name, fp, pathname, description) + setattr(dst_module, module_name, module) + + return module + + +def _load_module_from_dirpath_py3(dirpath, module_name, dst_module_name): + full_module_name = "{}.{}".format(dst_module_name, module_name) + if full_module_name in sys.modules: + return sys.modules[full_module_name] + + import importlib.util + from importlib._bootstrap_external import PathFinder + + dst_module = sys.modules[dst_module_name] + loader = PathFinder.find_module(full_module_name, [dirpath]) + + spec = importlib.util.spec_from_loader( + full_module_name, loader, origin=dirpath + ) + + module = importlib.util.module_from_spec(spec) + + if dst_module is not None: + setattr(dst_module, module_name, module) + + sys.modules[full_module_name] = module + + loader.exec_module(module) + + return module + + +def load_module_from_dirpath(dirpath, folder_name, dst_module_name): + if six.PY3: + module = _load_module_from_dirpath_py3( + dirpath, folder_name, dst_module_name + ) + else: + module = _load_module_from_dirpath_py2( + dirpath, folder_name, dst_module_name + ) + return module + + +sys.modules["openpype_modules"] = __ModuleClass() +sys.modules["openpype_interfaces"] = __ModuleClass() From 5ba787c274f43b6d91aac205ae97b8310224b88c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 18:08:18 +0200 Subject: [PATCH 033/308] defined OpenPypeInterface --- openpype/modules/__init__.py | 3 +++ openpype/modules/base.py | 10 ++++++++++ 2 files changed, 13 insertions(+) diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index d6fb9c0aef..3ac11950ef 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- from .base import ( PypeModule, + OpenPypeInterface, ITrayModule, ITrayAction, ITrayService, @@ -44,6 +45,8 @@ from .slack import SlackIntegrationModule __all__ = ( "PypeModule", + "OpenPypeInterface", + "ITrayModule", "ITrayAction", "ITrayService", diff --git a/openpype/modules/base.py b/openpype/modules/base.py index dd144075e1..373e9c9422 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -14,6 +14,16 @@ from openpype.lib import PypeLogger from openpype import resources +@six.add_metaclass(ABCMeta) +class OpenPypeInterface: + """Base class of Interface that can be used as Mixin with abstract parts. + + This is way how OpenPype module or addon can tell that has implementation + for specific part or for other module/addon. + """ + pass + + @six.add_metaclass(ABCMeta) class PypeModule: """Base class of pype module. From f2b53133e0cafb223d35cc53866fcfde4c4815a0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 18:11:26 +0200 Subject: [PATCH 034/308] moved interfaces to interfaces --- openpype/modules/base.py | 261 -------------------------------- openpype/modules/interfaces.py | 267 +++++++++++++++++++++++++++++++++ 2 files changed, 267 insertions(+), 261 deletions(-) create mode 100644 openpype/modules/interfaces.py diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 373e9c9422..6e1d19589c 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -80,271 +80,10 @@ class PypeModule: return {} -@six.add_metaclass(ABCMeta) -class IPluginPaths: - """Module has plugin paths to return. - - Expected result is dictionary with keys "publish", "create", "load" or - "actions" and values as list or string. - { - "publish": ["path/to/publish_plugins"] - } - """ - # TODO validation of an output - @abstractmethod - def get_plugin_paths(self): - pass - - class OpenPypeAddOn(PypeModule): pass -@six.add_metaclass(ABCMeta) -class ILaunchHookPaths: - """Module has launch hook paths to return. - - Expected result is list of paths. - ["path/to/launch_hooks_dir"] - """ - - @abstractmethod - def get_launch_hook_paths(self): - pass - - -@six.add_metaclass(ABCMeta) -class ITrayModule: - """Module has special procedures when used in Pype Tray. - - IMPORTANT: - The module still must be usable if is not used in tray even if - would do nothing. - """ - tray_initialized = False - _tray_manager = None - - @abstractmethod - def tray_init(self): - """Initialization part of tray implementation. - - Triggered between `initialization` and `connect_with_modules`. - - This is where GUIs should be loaded or tray specific parts should be - prepared. - """ - pass - - @abstractmethod - def tray_menu(self, tray_menu): - """Add module's action to tray menu.""" - pass - - @abstractmethod - def tray_start(self): - """Start procedure in Pype tray.""" - pass - - @abstractmethod - def tray_exit(self): - """Cleanup method which is executed on tray shutdown. - - This is place where all threads should be shut. - """ - pass - - def execute_in_main_thread(self, callback): - """ Pushes callback to the queue or process 'callback' on a main thread - - Some callbacks need to be processed on main thread (menu actions - must be added on main thread or they won't get triggered etc.) - """ - if not self.tray_initialized: - # TODO Called without initialized tray, still main thread needed - try: - callback() - - except Exception: - self.log.warning( - "Failed to execute {} in main thread".format(callback), - exc_info=True) - - return - self.manager.tray_manager.execute_in_main_thread(callback) - - def show_tray_message(self, title, message, icon=None, msecs=None): - """Show tray message. - - Args: - title (str): Title of message. - message (str): Content of message. - icon (QSystemTrayIcon.MessageIcon): Message's icon. Default is - Information icon, may differ by Qt version. - msecs (int): Duration of message visibility in miliseconds. - Default is 10000 msecs, may differ by Qt version. - """ - if self._tray_manager: - self._tray_manager.show_tray_message(title, message, icon, msecs) - - def add_doubleclick_callback(self, callback): - if hasattr(self.manager, "add_doubleclick_callback"): - self.manager.add_doubleclick_callback(self, callback) - - -class ITrayAction(ITrayModule): - """Implementation of Tray action. - - Add action to tray menu which will trigger `on_action_trigger`. - It is expected to be used for showing tools. - - Methods `tray_start`, `tray_exit` and `connect_with_modules` are overriden - as it's not expected that action will use them. But it is possible if - necessary. - """ - - admin_action = False - _admin_submenu = None - - @property - @abstractmethod - def label(self): - """Service label showed in menu.""" - pass - - @abstractmethod - def on_action_trigger(self): - """What happens on actions click.""" - pass - - def tray_menu(self, tray_menu): - from Qt import QtWidgets - - if self.admin_action: - menu = self.admin_submenu(tray_menu) - action = QtWidgets.QAction(self.label, menu) - menu.addAction(action) - if not menu.menuAction().isVisible(): - menu.menuAction().setVisible(True) - - else: - action = QtWidgets.QAction(self.label, tray_menu) - tray_menu.addAction(action) - - action.triggered.connect(self.on_action_trigger) - - def tray_start(self): - return - - def tray_exit(self): - return - - @staticmethod - def admin_submenu(tray_menu): - if ITrayAction._admin_submenu is None: - from Qt import QtWidgets - - admin_submenu = QtWidgets.QMenu("Admin", tray_menu) - admin_submenu.menuAction().setVisible(False) - ITrayAction._admin_submenu = admin_submenu - return ITrayAction._admin_submenu - - -class ITrayService(ITrayModule): - # Module's property - menu_action = None - - # Class properties - _services_submenu = None - _icon_failed = None - _icon_running = None - _icon_idle = None - - @property - @abstractmethod - def label(self): - """Service label showed in menu.""" - pass - - # TODO be able to get any sort of information to show/print - # @abstractmethod - # def get_service_info(self): - # pass - - @staticmethod - def services_submenu(tray_menu): - if ITrayService._services_submenu is None: - from Qt import QtWidgets - - services_submenu = QtWidgets.QMenu("Services", tray_menu) - services_submenu.menuAction().setVisible(False) - ITrayService._services_submenu = services_submenu - return ITrayService._services_submenu - - @staticmethod - def add_service_action(action): - ITrayService._services_submenu.addAction(action) - if not ITrayService._services_submenu.menuAction().isVisible(): - ITrayService._services_submenu.menuAction().setVisible(True) - - @staticmethod - def _load_service_icons(): - from Qt import QtGui - ITrayService._failed_icon = QtGui.QIcon( - resources.get_resource("icons", "circle_red.png") - ) - ITrayService._icon_running = QtGui.QIcon( - resources.get_resource("icons", "circle_green.png") - ) - ITrayService._icon_idle = QtGui.QIcon( - resources.get_resource("icons", "circle_orange.png") - ) - - @staticmethod - def get_icon_running(): - if ITrayService._icon_running is None: - ITrayService._load_service_icons() - return ITrayService._icon_running - - @staticmethod - def get_icon_idle(): - if ITrayService._icon_idle is None: - ITrayService._load_service_icons() - return ITrayService._icon_idle - - @staticmethod - def get_icon_failed(): - if ITrayService._failed_icon is None: - ITrayService._load_service_icons() - return ITrayService._failed_icon - - def tray_menu(self, tray_menu): - from Qt import QtWidgets - action = QtWidgets.QAction( - self.label, - self.services_submenu(tray_menu) - ) - self.menu_action = action - - self.add_service_action(action) - - self.set_service_running_icon() - - def set_service_running_icon(self): - """Change icon of an QAction to green circle.""" - if self.menu_action: - self.menu_action.setIcon(self.get_icon_running()) - - def set_service_failed_icon(self): - """Change icon of an QAction to red circle.""" - if self.menu_action: - self.menu_action.setIcon(self.get_icon_failed()) - - def set_service_idle_icon(self): - """Change icon of an QAction to orange circle.""" - if self.menu_action: - self.menu_action.setIcon(self.get_icon_idle()) - - class ModulesManager: """Manager of Pype modules helps to load and prepare them to work. diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py new file mode 100644 index 0000000000..6d51f1b828 --- /dev/null +++ b/openpype/modules/interfaces.py @@ -0,0 +1,267 @@ +from abc import abstractmethod + +from openpype import resources + +from .base import ( + OpenPypeInterface +) + + +class IPluginPaths(OpenPypeInterface): + """Module has plugin paths to return. + + Expected result is dictionary with keys "publish", "create", "load" or + "actions" and values as list or string. + { + "publish": ["path/to/publish_plugins"] + } + """ + # TODO validation of an output + @abstractmethod + def get_plugin_paths(self): + pass + + +class ILaunchHookPaths(OpenPypeInterface): + """Module has launch hook paths to return. + + Expected result is list of paths. + ["path/to/launch_hooks_dir"] + """ + + @abstractmethod + def get_launch_hook_paths(self): + pass + + +class ITrayModule(OpenPypeInterface): + """Module has special procedures when used in Pype Tray. + + IMPORTANT: + The module still must be usable if is not used in tray even if + would do nothing. + """ + tray_initialized = False + _tray_manager = None + + @abstractmethod + def tray_init(self): + """Initialization part of tray implementation. + + Triggered between `initialization` and `connect_with_modules`. + + This is where GUIs should be loaded or tray specific parts should be + prepared. + """ + pass + + @abstractmethod + def tray_menu(self, tray_menu): + """Add module's action to tray menu.""" + pass + + @abstractmethod + def tray_start(self): + """Start procedure in Pype tray.""" + pass + + @abstractmethod + def tray_exit(self): + """Cleanup method which is executed on tray shutdown. + + This is place where all threads should be shut. + """ + pass + + def execute_in_main_thread(self, callback): + """ Pushes callback to the queue or process 'callback' on a main thread + + Some callbacks need to be processed on main thread (menu actions + must be added on main thread or they won't get triggered etc.) + """ + if not self.tray_initialized: + # TODO Called without initialized tray, still main thread needed + try: + callback() + + except Exception: + self.log.warning( + "Failed to execute {} in main thread".format(callback), + exc_info=True) + + return + self.manager.tray_manager.execute_in_main_thread(callback) + + def show_tray_message(self, title, message, icon=None, msecs=None): + """Show tray message. + + Args: + title (str): Title of message. + message (str): Content of message. + icon (QSystemTrayIcon.MessageIcon): Message's icon. Default is + Information icon, may differ by Qt version. + msecs (int): Duration of message visibility in miliseconds. + Default is 10000 msecs, may differ by Qt version. + """ + if self._tray_manager: + self._tray_manager.show_tray_message(title, message, icon, msecs) + + def add_doubleclick_callback(self, callback): + if hasattr(self.manager, "add_doubleclick_callback"): + self.manager.add_doubleclick_callback(self, callback) + + +class ITrayAction(ITrayModule): + """Implementation of Tray action. + + Add action to tray menu which will trigger `on_action_trigger`. + It is expected to be used for showing tools. + + Methods `tray_start`, `tray_exit` and `connect_with_modules` are overriden + as it's not expected that action will use them. But it is possible if + necessary. + """ + + admin_action = False + _admin_submenu = None + + @property + @abstractmethod + def label(self): + """Service label showed in menu.""" + pass + + @abstractmethod + def on_action_trigger(self): + """What happens on actions click.""" + pass + + def tray_menu(self, tray_menu): + from Qt import QtWidgets + + if self.admin_action: + menu = self.admin_submenu(tray_menu) + action = QtWidgets.QAction(self.label, menu) + menu.addAction(action) + if not menu.menuAction().isVisible(): + menu.menuAction().setVisible(True) + + else: + action = QtWidgets.QAction(self.label, tray_menu) + tray_menu.addAction(action) + + action.triggered.connect(self.on_action_trigger) + + def tray_start(self): + return + + def tray_exit(self): + return + + @staticmethod + def admin_submenu(tray_menu): + if ITrayAction._admin_submenu is None: + from Qt import QtWidgets + + admin_submenu = QtWidgets.QMenu("Admin", tray_menu) + admin_submenu.menuAction().setVisible(False) + ITrayAction._admin_submenu = admin_submenu + return ITrayAction._admin_submenu + + +class ITrayService(ITrayModule): + # Module's property + menu_action = None + + # Class properties + _services_submenu = None + _icon_failed = None + _icon_running = None + _icon_idle = None + + @property + @abstractmethod + def label(self): + """Service label showed in menu.""" + pass + + # TODO be able to get any sort of information to show/print + # @abstractmethod + # def get_service_info(self): + # pass + + @staticmethod + def services_submenu(tray_menu): + if ITrayService._services_submenu is None: + from Qt import QtWidgets + + services_submenu = QtWidgets.QMenu("Services", tray_menu) + services_submenu.menuAction().setVisible(False) + ITrayService._services_submenu = services_submenu + return ITrayService._services_submenu + + @staticmethod + def add_service_action(action): + ITrayService._services_submenu.addAction(action) + if not ITrayService._services_submenu.menuAction().isVisible(): + ITrayService._services_submenu.menuAction().setVisible(True) + + @staticmethod + def _load_service_icons(): + from Qt import QtGui + + ITrayService._failed_icon = QtGui.QIcon( + resources.get_resource("icons", "circle_red.png") + ) + ITrayService._icon_running = QtGui.QIcon( + resources.get_resource("icons", "circle_green.png") + ) + ITrayService._icon_idle = QtGui.QIcon( + resources.get_resource("icons", "circle_orange.png") + ) + + @staticmethod + def get_icon_running(): + if ITrayService._icon_running is None: + ITrayService._load_service_icons() + return ITrayService._icon_running + + @staticmethod + def get_icon_idle(): + if ITrayService._icon_idle is None: + ITrayService._load_service_icons() + return ITrayService._icon_idle + + @staticmethod + def get_icon_failed(): + if ITrayService._failed_icon is None: + ITrayService._load_service_icons() + return ITrayService._failed_icon + + def tray_menu(self, tray_menu): + from Qt import QtWidgets + + action = QtWidgets.QAction( + self.label, + self.services_submenu(tray_menu) + ) + self.menu_action = action + + self.add_service_action(action) + + self.set_service_running_icon() + + def set_service_running_icon(self): + """Change icon of an QAction to green circle.""" + if self.menu_action: + self.menu_action.setIcon(self.get_icon_running()) + + def set_service_failed_icon(self): + """Change icon of an QAction to red circle.""" + if self.menu_action: + self.menu_action.setIcon(self.get_icon_failed()) + + def set_service_idle_icon(self): + """Change icon of an QAction to orange circle.""" + if self.menu_action: + self.menu_action.setIcon(self.get_icon_idle()) From 32a82b50f4386d6756c24bdf17f3e02f606dd5f1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 27 Jul 2021 18:47:15 +0200 Subject: [PATCH 035/308] Webpublisher - backend - added webpublisher host --- openpype/hosts/webpublisher/README.md | 6 + openpype/hosts/webpublisher/__init__.py | 0 .../plugins/collect_published_files.py | 159 ++++++++++++++++++ 3 files changed, 165 insertions(+) create mode 100644 openpype/hosts/webpublisher/README.md create mode 100644 openpype/hosts/webpublisher/__init__.py create mode 100644 openpype/hosts/webpublisher/plugins/collect_published_files.py diff --git a/openpype/hosts/webpublisher/README.md b/openpype/hosts/webpublisher/README.md new file mode 100644 index 0000000000..0826e44490 --- /dev/null +++ b/openpype/hosts/webpublisher/README.md @@ -0,0 +1,6 @@ +Webpublisher +------------- + +Plugins meant for processing of Webpublisher. + +Gets triggered by calling openpype.cli.remotepublish with appropriate arguments. \ No newline at end of file diff --git a/openpype/hosts/webpublisher/__init__.py b/openpype/hosts/webpublisher/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/webpublisher/plugins/collect_published_files.py b/openpype/hosts/webpublisher/plugins/collect_published_files.py new file mode 100644 index 0000000000..1cc0dfe83f --- /dev/null +++ b/openpype/hosts/webpublisher/plugins/collect_published_files.py @@ -0,0 +1,159 @@ +"""Loads publishing context from json and continues in publish process. + +Requires: + anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11) + +Provides: + context, instances -> All data from previous publishing process. +""" + +import os +import json + +import pyblish.api +from avalon import api + + +class CollectPublishedFiles(pyblish.api.ContextPlugin): + """ + This collector will try to find json files in provided + `OPENPYPE_PUBLISH_DATA`. Those files _MUST_ share same context. + + """ + # must be really early, context values are only in json file + order = pyblish.api.CollectorOrder - 0.495 + label = "Collect rendered frames" + host = ["webpublisher"] + + _context = None + + def _load_json(self, path): + path = path.strip('\"') + assert os.path.isfile(path), ( + "Path to json file doesn't exist. \"{}\"".format(path) + ) + data = None + with open(path, "r") as json_file: + try: + data = json.load(json_file) + except Exception as exc: + self.log.error( + "Error loading json: " + "{} - Exception: {}".format(path, exc) + ) + return data + + def _fill_staging_dir(self, data_object, anatomy): + staging_dir = data_object.get("stagingDir") + if staging_dir: + data_object["stagingDir"] = anatomy.fill_root(staging_dir) + + def _process_path(self, data, anatomy): + # validate basic necessary data + data_err = "invalid json file - missing data" + required = ["asset", "user", "comment", + "job", "instances", "session", "version"] + assert all(elem in data.keys() for elem in required), data_err + + # set context by first json file + ctx = self._context.data + + ctx["asset"] = ctx.get("asset") or data.get("asset") + ctx["intent"] = ctx.get("intent") or data.get("intent") + ctx["comment"] = ctx.get("comment") or data.get("comment") + ctx["user"] = ctx.get("user") or data.get("user") + ctx["version"] = ctx.get("version") or data.get("version") + + # basic sanity check to see if we are working in same context + # if some other json file has different context, bail out. + ctx_err = "inconsistent contexts in json files - %s" + assert ctx.get("asset") == data.get("asset"), ctx_err % "asset" + assert ctx.get("intent") == data.get("intent"), ctx_err % "intent" + assert ctx.get("comment") == data.get("comment"), ctx_err % "comment" + assert ctx.get("user") == data.get("user"), ctx_err % "user" + assert ctx.get("version") == data.get("version"), ctx_err % "version" + + # ftrack credentials are passed as environment variables by Deadline + # to publish job, but Muster doesn't pass them. + if data.get("ftrack") and not os.environ.get("FTRACK_API_USER"): + ftrack = data.get("ftrack") + os.environ["FTRACK_API_USER"] = ftrack["FTRACK_API_USER"] + os.environ["FTRACK_API_KEY"] = ftrack["FTRACK_API_KEY"] + os.environ["FTRACK_SERVER"] = ftrack["FTRACK_SERVER"] + + # now we can just add instances from json file and we are done + for instance_data in data.get("instances"): + self.log.info(" - processing instance for {}".format( + instance_data.get("subset"))) + instance = self._context.create_instance( + instance_data.get("subset") + ) + self.log.info("Filling stagingDir...") + + self._fill_staging_dir(instance_data, anatomy) + instance.data.update(instance_data) + + # stash render job id for later validation + instance.data["render_job_id"] = data.get("job").get("_id") + + representations = [] + for repre_data in instance_data.get("representations") or []: + self._fill_staging_dir(repre_data, anatomy) + representations.append(repre_data) + + instance.data["representations"] = representations + + # add audio if in metadata data + if data.get("audio"): + instance.data.update({ + "audio": [{ + "filename": data.get("audio"), + "offset": 0 + }] + }) + self.log.info( + f"Adding audio to instance: {instance.data['audio']}") + + def process(self, context): + self._context = context + + assert os.environ.get("OPENPYPE_PUBLISH_DATA"), ( + "Missing `OPENPYPE_PUBLISH_DATA`") + paths = os.environ["OPENPYPE_PUBLISH_DATA"].split(os.pathsep) + + project_name = os.environ.get("AVALON_PROJECT") + if project_name is None: + raise AssertionError( + "Environment `AVALON_PROJECT` was not found." + "Could not set project `root` which may cause issues." + ) + + # TODO root filling should happen after collect Anatomy + self.log.info("Getting root setting for project \"{}\"".format( + project_name + )) + + anatomy = context.data["anatomy"] + self.log.info("anatomy: {}".format(anatomy.roots)) + try: + session_is_set = False + for path in paths: + path = anatomy.fill_root(path) + data = self._load_json(path) + assert data, "failed to load json file" + if not session_is_set: + session_data = data["session"] + remapped = anatomy.roots_obj.path_remapper( + session_data["AVALON_WORKDIR"] + ) + if remapped: + session_data["AVALON_WORKDIR"] = remapped + + self.log.info("Setting session using data from file") + api.Session.update(session_data) + os.environ.update(session_data) + session_is_set = True + self._process_path(data, anatomy) + except Exception as e: + self.log.error(e, exc_info=True) + raise Exception("Error") from e From ca1ad20506c99b00412091a42a5cfe8ef28af7bd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 27 Jul 2021 18:47:58 +0200 Subject: [PATCH 036/308] Webpublisher - backend - added task_finish endpoint Added scaffolding to run publish process --- openpype/cli.py | 27 ++++++++- openpype/lib/applications.py | 20 ++++--- openpype/modules/webserver/webserver_cli.py | 62 +++++++++++++++++++-- openpype/pype_commands.py | 62 ++++++++++++++++++++- 4 files changed, 151 insertions(+), 20 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index 1065152adb..e56a572c9c 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -96,11 +96,16 @@ def eventserver(debug, @main.command() @click.option("-d", "--debug", is_flag=True, help="Print debug messages") -def webpublisherwebserver(debug): +@click.option("-e", "--executable", help="Executable") +@click.option("-u", "--upload_dir", help="Upload dir") +def webpublisherwebserver(debug, executable, upload_dir): if debug: os.environ['OPENPYPE_DEBUG'] = "3" - PypeCommands().launch_webpublisher_webservercli() + PypeCommands().launch_webpublisher_webservercli( + upload_dir=upload_dir, + executable=executable + ) @main.command() @@ -140,6 +145,24 @@ def publish(debug, paths, targets): PypeCommands.publish(list(paths), targets) +@main.command() +@click.argument("paths", nargs=-1) +@click.option("-d", "--debug", is_flag=True, help="Print debug messages") +@click.option("-h", "--host", help="Host") +@click.option("-p", "--project", help="Project") +@click.option("-t", "--targets", help="Targets module", default=None, + multiple=True) +def remotepublish(debug, project, paths, host, targets=None): + """Start CLI publishing. + + Publish collects json from paths provided as an argument. + More than one path is allowed. + """ + if debug: + os.environ['OPENPYPE_DEBUG'] = '3' + PypeCommands.remotepublish(project, list(paths), host, targets=None) + + @main.command() @click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("-p", "--project", required=True, diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index fb86d06150..1d0d5dcbaa 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1004,7 +1004,7 @@ class EnvironmentPrepData(dict): def get_app_environments_for_context( - project_name, asset_name, task_name, app_name, env=None + project_name, asset_name, task_name, app_name=None, env=None ): """Prepare environment variables by context. Args: @@ -1033,20 +1033,14 @@ def get_app_environments_for_context( "name": asset_name }) - # Prepare app object which can be obtained only from ApplciationManager - app_manager = ApplicationManager() - app = app_manager.applications[app_name] - # Project's anatomy anatomy = Anatomy(project_name) - data = EnvironmentPrepData({ + prep_dict = { "project_name": project_name, "asset_name": asset_name, "task_name": task_name, - "app": app, - "dbcon": dbcon, "project_doc": project_doc, "asset_doc": asset_doc, @@ -1054,7 +1048,15 @@ def get_app_environments_for_context( "anatomy": anatomy, "env": env - }) + } + + if app_name: + # Prepare app object which can be obtained only from ApplicationManager + app_manager = ApplicationManager() + app = app_manager.applications[app_name] + prep_dict["app"] = app + + data = EnvironmentPrepData(prep_dict) prepare_host_environments(data) prepare_context_environments(data) diff --git a/openpype/modules/webserver/webserver_cli.py b/openpype/modules/webserver/webserver_cli.py index b6317a5675..00caa24d27 100644 --- a/openpype/modules/webserver/webserver_cli.py +++ b/openpype/modules/webserver/webserver_cli.py @@ -1,16 +1,15 @@ -import attr +import os import time import json import datetime from bson.objectid import ObjectId import collections from aiohttp.web_response import Response +import subprocess from avalon.api import AvalonMongoDB from openpype.modules.avalon_apps.rest_api import _RestApiEndpoint -from openpype.api import get_hierarchy - class WebpublisherProjectsEndpoint(_RestApiEndpoint): """Returns list of project names.""" @@ -130,9 +129,51 @@ class WebpublisherHiearchyEndpoint(_RestApiEndpoint): ) +class WebpublisherTaskFinishEndpoint(_RestApiEndpoint): + """Returns list of project names.""" + async def post(self, request) -> Response: + output = {} + + print(request) + + json_path = os.path.join(self.resource.upload_dir, + "webpublisher.json") # temp - pull from request + + openpype_app = self.resource.executable + args = [ + openpype_app, + 'remotepublish', + json_path + ] + + if not openpype_app or not os.path.exists(openpype_app): + msg = "Non existent OpenPype executable {}".format(openpype_app) + raise RuntimeError(msg) + + add_args = { + "host": "webpublisher", + "project": request.query["project"] + } + + for key, value in add_args.items(): + args.append("--{}".format(key)) + args.append(value) + + print("args:: {}".format(args)) + + exit_code = subprocess.call(args, shell=True) + return Response( + status=200, + body=self.resource.encode(output), + content_type="application/json" + ) + + class RestApiResource: - def __init__(self, server_manager): + def __init__(self, server_manager, executable, upload_dir): self.server_manager = server_manager + self.upload_dir = upload_dir + self.executable = executable self.dbcon = AvalonMongoDB() self.dbcon.install() @@ -154,14 +195,16 @@ class RestApiResource: ).encode("utf-8") -def run_webserver(): +def run_webserver(*args, **kwargs): from openpype.modules import ModulesManager manager = ModulesManager() webserver_module = manager.modules_by_name["webserver"] webserver_module.create_server_manager() - resource = RestApiResource(webserver_module.server_manager) + resource = RestApiResource(webserver_module.server_manager, + upload_dir=kwargs["upload_dir"], + executable=kwargs["executable"]) projects_endpoint = WebpublisherProjectsEndpoint(resource) webserver_module.server_manager.add_route( "GET", @@ -176,6 +219,13 @@ def run_webserver(): hiearchy_endpoint.dispatch ) + task_finish_endpoint = WebpublisherTaskFinishEndpoint(resource) + webserver_module.server_manager.add_route( + "POST", + "/api/task_finish", + task_finish_endpoint.dispatch + ) + webserver_module.start_server() while True: time.sleep(0.5) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 6ccf10e8ce..d2726fd2a6 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -41,11 +41,11 @@ class PypeCommands: return run_event_server(*args) @staticmethod - def launch_webpublisher_webservercli(*args): + def launch_webpublisher_webservercli(*args, **kwargs): from openpype.modules.webserver.webserver_cli import ( run_webserver ) - return run_webserver(*args) + return run_webserver(*args, **kwargs) @staticmethod def launch_standalone_publisher(): @@ -53,7 +53,7 @@ class PypeCommands: standalonepublish.main() @staticmethod - def publish(paths, targets=None): + def publish(paths, targets=None, host=None): """Start headless publishing. Publish use json from passed paths argument. @@ -111,6 +111,62 @@ class PypeCommands: log.info("Publish finished.") uninstall() + @staticmethod + def remotepublish(project, paths, host, targets=None): + """Start headless publishing. + + Publish use json from passed paths argument. + + Args: + paths (list): Paths to jsons. + targets (string): What module should be targeted + (to choose validator for example) + host (string) + + Raises: + RuntimeError: When there is no path to process. + """ + if not any(paths): + raise RuntimeError("No publish paths specified") + + from openpype import install, uninstall + from openpype.api import Logger + + # Register target and host + import pyblish.api + import pyblish.util + + log = Logger.get_logger() + + install() + + if host: + pyblish.api.register_host(host) + + if targets: + if isinstance(targets, str): + targets = [targets] + for target in targets: + pyblish.api.register_target(target) + + os.environ["OPENPYPE_PUBLISH_DATA"] = os.pathsep.join(paths) + os.environ["AVALON_PROJECT"] = project + os.environ["AVALON_APP"] = host # to trigger proper plugings + + log.info("Running publish ...") + + # Error exit as soon as any error occurs. + error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}" + + for result in pyblish.util.publish_iter(): + if result["error"]: + log.error(error_format.format(**result)) + uninstall() + sys.exit(1) + + log.info("Publish finished.") + uninstall() + def extractenvironments(output_json_path, project, asset, task, app): env = os.environ.copy() if all((project, asset, task, app)): From abdaf019ba230709099b05406043c57572e2d10e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 19:01:08 +0200 Subject: [PATCH 037/308] interfaces inherit from OpenPypeInterface --- openpype/modules/__init__.py | 13 ++++++++----- openpype/modules/ftrack/ftrack_module.py | 4 ++-- openpype/modules/idle_manager/idle_module.py | 9 ++++++--- openpype/modules/settings_action.py | 9 ++++++--- openpype/modules/timers_manager/timers_manager.py | 11 ++++++++--- openpype/modules/webserver/webserver_module.py | 9 ++++++--- 6 files changed, 36 insertions(+), 19 deletions(-) diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 3ac11950ef..724f442b74 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -2,13 +2,15 @@ from .base import ( PypeModule, OpenPypeInterface, + ModulesManager, + TrayModulesManager +) +from .interfaces import ( ITrayModule, ITrayAction, ITrayService, IPluginPaths, - ILaunchHookPaths, - ModulesManager, - TrayModulesManager + ILaunchHookPaths ) from .settings_action import ( SettingsAction, @@ -47,13 +49,14 @@ __all__ = ( "PypeModule", "OpenPypeInterface", + "ModulesManager", + "TrayModulesManager", + "ITrayModule", "ITrayAction", "ITrayService", "IPluginPaths", "ILaunchHookPaths", - "ModulesManager", - "TrayModulesManager", "SettingsAction", "LocalSettingsAction", diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index ee139a500e..70f34b6389 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -6,6 +6,7 @@ import six import openpype from openpype.modules import ( PypeModule, + OpenPypeInterface, ITrayModule, IPluginPaths, ITimersManager, @@ -17,8 +18,7 @@ from openpype.settings import SaveWarningExc FTRACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) -@six.add_metaclass(ABCMeta) -class IFtrackEventHandlerPaths: +class IFtrackEventHandlerPaths(OpenPypeInterface): """Other modules interface to return paths to ftrack event handlers. Expected output is dictionary with "server" and "user" keys. diff --git a/openpype/modules/idle_manager/idle_module.py b/openpype/modules/idle_manager/idle_module.py index 5dd5160aa7..57ccc9cce7 100644 --- a/openpype/modules/idle_manager/idle_module.py +++ b/openpype/modules/idle_manager/idle_module.py @@ -4,11 +4,14 @@ from abc import ABCMeta, abstractmethod import six -from openpype.modules import PypeModule, ITrayService +from openpype.modules import ( + PypeModule, + OpenPypeInterface, + ITrayService +) -@six.add_metaclass(ABCMeta) -class IIdleManager: +class IIdleManager(OpenPypeInterface): """Other modules interface to return callbacks by idle time in seconds. Expected output is dictionary with seconds as keys and callback/s diff --git a/openpype/modules/settings_action.py b/openpype/modules/settings_action.py index 9db4a252bc..f6d6463b25 100644 --- a/openpype/modules/settings_action.py +++ b/openpype/modules/settings_action.py @@ -2,11 +2,14 @@ from abc import ABCMeta, abstractmethod import six -from . import PypeModule, ITrayAction +from . import ( + PypeModule, + OpenPypeInterface, + ITrayAction +) -@six.add_metaclass(ABCMeta) -class ISettingsChangeListener: +class ISettingsChangeListener(OpenPypeInterface): """Module has plugin paths to return. Expected result is dictionary with keys "publish", "create", "load" or diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index 92edd5aeaa..9566f9a6ef 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -2,12 +2,17 @@ import os import collections from abc import ABCMeta, abstractmethod import six -from .. import PypeModule, ITrayService, IIdleManager, IWebServerRoutes +from .. import ( + PypeModule, + OpenPypeInterface, + ITrayService, + IIdleManager, + IWebServerRoutes +) from avalon.api import AvalonMongoDB -@six.add_metaclass(ABCMeta) -class ITimersManager: +class ITimersManager(OpenPypeInterface): timer_manager_module = None @abstractmethod diff --git a/openpype/modules/webserver/webserver_module.py b/openpype/modules/webserver/webserver_module.py index b61619acde..edb0b0be3f 100644 --- a/openpype/modules/webserver/webserver_module.py +++ b/openpype/modules/webserver/webserver_module.py @@ -5,11 +5,14 @@ from abc import ABCMeta, abstractmethod import six from openpype import resources -from .. import PypeModule, ITrayService +from .. import ( + PypeModule, + OpenPypeInterface, + ITrayService +) -@six.add_metaclass(ABCMeta) -class IWebServerRoutes: +class IWebServerRoutes(OpenPypeInterface): """Other modules interface to register their routes.""" @abstractmethod def webserver_initialization(self, server_manager): From a0b24b9325e6c6a4240b55f7b1bbec1e3e2b8b2a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 19:01:25 +0200 Subject: [PATCH 038/308] remove deprecated sync server initialization --- openpype/modules/sync_server/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/modules/sync_server/__init__.py b/openpype/modules/sync_server/__init__.py index a814f0db62..d6a038372b 100644 --- a/openpype/modules/sync_server/__init__.py +++ b/openpype/modules/sync_server/__init__.py @@ -1,5 +1,6 @@ from openpype.modules.sync_server.sync_server_module import SyncServerModule -def tray_init(tray_widget, main_widget): - return SyncServerModule() +__all__ = ( + "SyncServerModule", +) From b83e932a6ba76a4db80377e16f22d5219935fb70 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 19:58:45 +0200 Subject: [PATCH 039/308] interfaces are defined in one specific file --- openpype/modules/ftrack/__init__.py | 2 - openpype/modules/ftrack/ftrack_module.py | 12 ------ openpype/modules/ftrack/interfaces.py | 12 ++++++ openpype/modules/idle_manager/__init__.py | 4 +- openpype/modules/idle_manager/idle_module.py | 27 ------------- openpype/modules/idle_manager/interfaces.py | 26 +++++++++++++ openpype/modules/interfaces.py | 4 +- openpype/modules/settings_module/__init__.py | 9 +++++ .../modules/settings_module/interfaces.py | 30 ++++++++++++++ .../{ => settings_module}/settings_action.py | 39 +------------------ openpype/modules/timers_manager/__init__.py | 4 +- openpype/modules/timers_manager/interfaces.py | 26 +++++++++++++ .../modules/timers_manager/timers_manager.py | 24 ------------ openpype/modules/webserver/__init__.py | 4 +- openpype/modules/webserver/interfaces.py | 9 +++++ .../modules/webserver/webserver_module.py | 7 ---- 16 files changed, 118 insertions(+), 121 deletions(-) create mode 100644 openpype/modules/ftrack/interfaces.py create mode 100644 openpype/modules/idle_manager/interfaces.py create mode 100644 openpype/modules/settings_module/__init__.py create mode 100644 openpype/modules/settings_module/interfaces.py rename openpype/modules/{ => settings_module}/settings_action.py (81%) create mode 100644 openpype/modules/timers_manager/interfaces.py create mode 100644 openpype/modules/webserver/interfaces.py diff --git a/openpype/modules/ftrack/__init__.py b/openpype/modules/ftrack/__init__.py index c1a557812c..7261254c6f 100644 --- a/openpype/modules/ftrack/__init__.py +++ b/openpype/modules/ftrack/__init__.py @@ -1,11 +1,9 @@ from .ftrack_module import ( FtrackModule, - IFtrackEventHandlerPaths, FTRACK_MODULE_DIR ) __all__ = ( "FtrackModule", - "IFtrackEventHandlerPaths", "FTRACK_MODULE_DIR" ) diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 70f34b6389..3685978003 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -1,8 +1,6 @@ import os import json import collections -from abc import ABCMeta, abstractmethod -import six import openpype from openpype.modules import ( PypeModule, @@ -18,16 +16,6 @@ from openpype.settings import SaveWarningExc FTRACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) -class IFtrackEventHandlerPaths(OpenPypeInterface): - """Other modules interface to return paths to ftrack event handlers. - - Expected output is dictionary with "server" and "user" keys. - """ - @abstractmethod - def get_event_handler_paths(self): - pass - - class FtrackModule( PypeModule, ITrayModule, diff --git a/openpype/modules/ftrack/interfaces.py b/openpype/modules/ftrack/interfaces.py new file mode 100644 index 0000000000..16ce0d2e62 --- /dev/null +++ b/openpype/modules/ftrack/interfaces.py @@ -0,0 +1,12 @@ +from abc import abstractmethod +from openpype.modules import OpenPypeInterface + + +class IFtrackEventHandlerPaths(OpenPypeInterface): + """Other modules interface to return paths to ftrack event handlers. + + Expected output is dictionary with "server" and "user" keys. + """ + @abstractmethod + def get_event_handler_paths(self): + pass diff --git a/openpype/modules/idle_manager/__init__.py b/openpype/modules/idle_manager/__init__.py index 651f360c50..9d6e10bf39 100644 --- a/openpype/modules/idle_manager/__init__.py +++ b/openpype/modules/idle_manager/__init__.py @@ -1,10 +1,8 @@ from .idle_module import ( - IdleManager, - IIdleManager + IdleManager ) __all__ = ( "IdleManager", - "IIdleManager" ) diff --git a/openpype/modules/idle_manager/idle_module.py b/openpype/modules/idle_manager/idle_module.py index 57ccc9cce7..9e5211a0fa 100644 --- a/openpype/modules/idle_manager/idle_module.py +++ b/openpype/modules/idle_manager/idle_module.py @@ -1,8 +1,5 @@ import platform import collections -from abc import ABCMeta, abstractmethod - -import six from openpype.modules import ( PypeModule, @@ -11,30 +8,6 @@ from openpype.modules import ( ) -class IIdleManager(OpenPypeInterface): - """Other modules interface to return callbacks by idle time in seconds. - - Expected output is dictionary with seconds as keys and callback/s - as value, value may be callback of list of callbacks. - EXAMPLE: - ``` - { - 60: self.on_minute_idle - } - ``` - """ - idle_manager = None - - @abstractmethod - def callbacks_by_idle_time(self): - pass - - @property - def idle_time(self): - if self.idle_manager: - return self.idle_manager.idle_time - - class IdleManager(PypeModule, ITrayService): """ Measure user's idle time in seconds. Idle time resets on keyboard/mouse input. diff --git a/openpype/modules/idle_manager/interfaces.py b/openpype/modules/idle_manager/interfaces.py new file mode 100644 index 0000000000..71cd17a64a --- /dev/null +++ b/openpype/modules/idle_manager/interfaces.py @@ -0,0 +1,26 @@ +from abc import abstractmethod +from openpype.modules import OpenPypeInterface + + +class IIdleManager(OpenPypeInterface): + """Other modules interface to return callbacks by idle time in seconds. + + Expected output is dictionary with seconds as keys and callback/s + as value, value may be callback of list of callbacks. + EXAMPLE: + ``` + { + 60: self.on_minute_idle + } + ``` + """ + idle_manager = None + + @abstractmethod + def callbacks_by_idle_time(self): + pass + + @property + def idle_time(self): + if self.idle_manager: + return self.idle_manager.idle_time diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py index 6d51f1b828..a60c5fa606 100644 --- a/openpype/modules/interfaces.py +++ b/openpype/modules/interfaces.py @@ -2,9 +2,7 @@ from abc import abstractmethod from openpype import resources -from .base import ( - OpenPypeInterface -) +from openpype.modules import OpenPypeInterface class IPluginPaths(OpenPypeInterface): diff --git a/openpype/modules/settings_module/__init__.py b/openpype/modules/settings_module/__init__.py new file mode 100644 index 0000000000..95510eba9d --- /dev/null +++ b/openpype/modules/settings_module/__init__.py @@ -0,0 +1,9 @@ +from .settings_action import ( + LocalSettingsAction, + SettingsAction +) + +__all__ = ( + "LocalSettingsAction", + "SettingsAction" +) diff --git a/openpype/modules/settings_module/interfaces.py b/openpype/modules/settings_module/interfaces.py new file mode 100644 index 0000000000..42db395649 --- /dev/null +++ b/openpype/modules/settings_module/interfaces.py @@ -0,0 +1,30 @@ +from abc import abstractmethod +from openpype.modules import OpenPypeInterface + + +class ISettingsChangeListener(OpenPypeInterface): + """Module has plugin paths to return. + + Expected result is dictionary with keys "publish", "create", "load" or + "actions" and values as list or string. + { + "publish": ["path/to/publish_plugins"] + } + """ + @abstractmethod + def on_system_settings_save( + self, old_value, new_value, changes, new_value_metadata + ): + pass + + @abstractmethod + def on_project_settings_save( + self, old_value, new_value, changes, project_name, new_value_metadata + ): + pass + + @abstractmethod + def on_project_anatomy_save( + self, old_value, new_value, changes, project_name, new_value_metadata + ): + pass diff --git a/openpype/modules/settings_action.py b/openpype/modules/settings_module/settings_action.py similarity index 81% rename from openpype/modules/settings_action.py rename to openpype/modules/settings_module/settings_action.py index f6d6463b25..a6909e1fdf 100644 --- a/openpype/modules/settings_action.py +++ b/openpype/modules/settings_module/settings_action.py @@ -1,40 +1,5 @@ -from abc import ABCMeta, abstractmethod - -import six - -from . import ( - PypeModule, - OpenPypeInterface, - ITrayAction -) - - -class ISettingsChangeListener(OpenPypeInterface): - """Module has plugin paths to return. - - Expected result is dictionary with keys "publish", "create", "load" or - "actions" and values as list or string. - { - "publish": ["path/to/publish_plugins"] - } - """ - @abstractmethod - def on_system_settings_save( - self, old_value, new_value, changes, new_value_metadata - ): - pass - - @abstractmethod - def on_project_settings_save( - self, old_value, new_value, changes, project_name, new_value_metadata - ): - pass - - @abstractmethod - def on_project_anatomy_save( - self, old_value, new_value, changes, project_name, new_value_metadata - ): - pass +from openpype.modules import PypeModule +from openpype_interfaces import ITrayAction class SettingsAction(PypeModule, ITrayAction): diff --git a/openpype/modules/timers_manager/__init__.py b/openpype/modules/timers_manager/__init__.py index 1b565cc59a..5d7a4166d3 100644 --- a/openpype/modules/timers_manager/__init__.py +++ b/openpype/modules/timers_manager/__init__.py @@ -1,9 +1,7 @@ from .timers_manager import ( - ITimersManager, TimersManager ) __all__ = ( - "ITimersManager", - "TimersManager" + "TimersManager", ) diff --git a/openpype/modules/timers_manager/interfaces.py b/openpype/modules/timers_manager/interfaces.py new file mode 100644 index 0000000000..179013cffe --- /dev/null +++ b/openpype/modules/timers_manager/interfaces.py @@ -0,0 +1,26 @@ +from abc import abstractmethod +from openpype.modules import OpenPypeInterface + + +class ITimersManager(OpenPypeInterface): + timer_manager_module = None + + @abstractmethod + def stop_timer(self): + pass + + @abstractmethod + def start_timer(self, data): + pass + + def timer_started(self, data): + if not self.timer_manager_module: + return + + self.timer_manager_module.timer_started(self.id, data) + + def timer_stopped(self): + if not self.timer_manager_module: + return + + self.timer_manager_module.timer_stopped(self.id) diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index 9566f9a6ef..f893a0f3e7 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -12,30 +12,6 @@ from .. import ( from avalon.api import AvalonMongoDB -class ITimersManager(OpenPypeInterface): - timer_manager_module = None - - @abstractmethod - def stop_timer(self): - pass - - @abstractmethod - def start_timer(self, data): - pass - - def timer_started(self, data): - if not self.timer_manager_module: - return - - self.timer_manager_module.timer_started(self.id, data) - - def timer_stopped(self): - if not self.timer_manager_module: - return - - self.timer_manager_module.timer_stopped(self.id) - - class TimersManager(PypeModule, ITrayService, IIdleManager, IWebServerRoutes): """ Handles about Timers. diff --git a/openpype/modules/webserver/__init__.py b/openpype/modules/webserver/__init__.py index defd115e57..899b97d6d4 100644 --- a/openpype/modules/webserver/__init__.py +++ b/openpype/modules/webserver/__init__.py @@ -1,10 +1,8 @@ from .webserver_module import ( - WebServerModule, - IWebServerRoutes + WebServerModule ) __all__ = ( "WebServerModule", - "IWebServerRoutes" ) diff --git a/openpype/modules/webserver/interfaces.py b/openpype/modules/webserver/interfaces.py new file mode 100644 index 0000000000..779361a9ec --- /dev/null +++ b/openpype/modules/webserver/interfaces.py @@ -0,0 +1,9 @@ +from abc import abstractmethod +from openpype.modules import OpenPypeInterface + + +class IWebServerRoutes(OpenPypeInterface): + """Other modules interface to register their routes.""" + @abstractmethod + def webserver_initialization(self, server_manager): + pass diff --git a/openpype/modules/webserver/webserver_module.py b/openpype/modules/webserver/webserver_module.py index edb0b0be3f..57e5df8e85 100644 --- a/openpype/modules/webserver/webserver_module.py +++ b/openpype/modules/webserver/webserver_module.py @@ -12,13 +12,6 @@ from .. import ( ) -class IWebServerRoutes(OpenPypeInterface): - """Other modules interface to register their routes.""" - @abstractmethod - def webserver_initialization(self, server_manager): - pass - - class WebServerModule(PypeModule, ITrayService): name = "webserver" label = "WebServer" From 45f894bf3c7e06baf86135a24b9fb7d50c416f5a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 19:59:56 +0200 Subject: [PATCH 040/308] load interfaces and modules more dynamically --- openpype/modules/base.py | 209 ++++++++++++++++++++++++++++++++------- 1 file changed, 171 insertions(+), 38 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 6e1d19589c..e4e9013eee 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -1,5 +1,8 @@ # -*- coding: utf-8 -*- """Base class for Pype Modules.""" +import os +import sys +import types import time import inspect import logging @@ -14,6 +17,141 @@ from openpype.lib import PypeLogger from openpype import resources +class __ModuleClass: + def __init__(self): + self.object_setattr("__attributes__", {}) + self.object_setattr("__defaults__", set()) + + def __getattr__(self, attr_name): + return self.__attributes__.get( + attr_name, + type("Missing.{}".format(attr_name), (), {}) + ) + + def __iter__(self): + for module in self.values(): + yield module + + def object_setattr(self, attr_name, value): + object.__setattr__(self, attr_name, value) + + def __setattr__(self, attr_name, value): + self.__attributes__[attr_name] = value + + def keys(self): + return self.__attributes__.keys() + + def values(self): + return self.__attributes__.values() + + def items(self): + return self.__attributes__.items() + + +def load_interfaces(force=False): + if not force and "openpype_interfaces" in sys.modules: + return + + sys.modules["openpype_interfaces"] = openpype_interfaces = __ModuleClass() + + log = PypeLogger.get_logger("InterfacesLoader") + + current_dir = os.path.abspath(os.path.dirname(__file__)) + + interface_paths = [ + os.path.join(current_dir, "interfaces.py") + ] + + for filename in os.listdir(current_dir): + full_path = os.path.join(current_dir, filename) + if os.path.isdir(full_path): + interface_paths.append( + os.path.join(full_path, "interfaces.py") + ) + + # print(interface_paths) + for full_path in interface_paths: + if not os.path.exists(full_path): + continue + + filename = os.path.splitext(os.path.basename(full_path))[0] + + try: + # Prepare module object where content of file will be parsed + module = types.ModuleType(filename) + + if six.PY3: + import importlib + + # Use loader so module has full specs + module_loader = importlib.machinery.SourceFileLoader( + filename, full_path + ) + module_loader.exec_module(module) + else: + # Execute module code and store content to module + with open(full_path) as _stream: + # Execute content and store it to module object + exec(_stream.read(), module.__dict__) + + module.__file__ = full_path + + except Exception: + log.warning( + "Failed to load path: \"{0}\"".format(full_path), + exc_info=True + ) + continue + + for attr_name in dir(module): + attr = getattr(module, attr_name) + if ( + not inspect.isclass(attr) + or attr is OpenPypeInterface + or not issubclass(attr, OpenPypeInterface) + ): + continue + setattr(openpype_interfaces, attr_name, attr) + + +def load_modules(force=False): + if not force and "openpype_modules" in sys.modules: + return + + from openpype.lib import modules_from_path + + sys.modules["openpype_modules"] = openpype_modules = __ModuleClass() + + log = PypeLogger.get_logger("ModulesLoader") + + from . import ( + avalon_apps, + clockify, + deadline, + ftrack, + idle_manager, + log_viewer, + muster, + settings_module, + slack, + sync_server, + timers_manager, + webserver + ) + setattr(openpype_modules, "avalon_apps", avalon_apps) + setattr(openpype_modules, "clockify", clockify) + setattr(openpype_modules, "deadline", deadline) + setattr(openpype_modules, "ftrack", ftrack) + setattr(openpype_modules, "idle_manager", idle_manager) + setattr(openpype_modules, "log_viewer", log_viewer) + setattr(openpype_modules, "muster", muster) + setattr(openpype_modules, "settings_module", settings_module) + setattr(openpype_modules, "sync_server", sync_server) + setattr(openpype_modules, "slack", slack) + setattr(openpype_modules, "timers_manager", timers_manager) + setattr(openpype_modules, "webserver", webserver) + + @six.add_metaclass(ABCMeta) class OpenPypeInterface: """Base class of Interface that can be used as Mixin with abstract parts. @@ -105,44 +243,12 @@ class ModulesManager: # For report of time consumption self._report = {} - self._raw_modules = None - self.initialize_modules() self.connect_modules() def collect_modules(self): - if self._raw_modules is not None: - return - - self._raw_modules = [] - - # Go through globals in `pype.modules` - for name in dir(openpype.modules): - modules_item = getattr(openpype.modules, name, None) - # Filter globals that are not classes which inherit from PypeModule - if ( - not inspect.isclass(modules_item) - or modules_item is openpype.modules.PypeModule - or not issubclass(modules_item, openpype.modules.PypeModule) - ): - continue - - # Check if class is abstract (Developing purpose) - if inspect.isabstract(modules_item): - # Find missing implementations by convetion on `abc` module - not_implemented = [] - for attr_name in dir(modules_item): - attr = getattr(modules_item, attr_name, None) - if attr and getattr(attr, "__isabstractmethod__", None): - not_implemented.append(attr_name) - - # Log missing implementations - self.log.warning(( - "Skipping abstract Class: {}. Missing implementations: {}" - ).format(name, ", ".join(not_implemented))) - continue - - self._raw_modules.append(modules_item) + load_interfaces() + load_modules() def initialize_modules(self): """Import and initialize modules.""" @@ -159,8 +265,37 @@ class ModulesManager: time_start = time.time() prev_start_time = time_start - # Go through globals in `pype.modules` - for modules_item in self._raw_modules: + module_classes = [] + for module in openpype_modules: + # Go through globals in `pype.modules` + for name in dir(module): + modules_item = getattr(module, name, None) + # Filter globals that are not classes which inherit from + # PypeModule + if ( + not inspect.isclass(modules_item) + or modules_item is PypeModule + or not issubclass(modules_item, PypeModule) + ): + continue + + # Check if class is abstract (Developing purpose) + if inspect.isabstract(modules_item): + # Find missing implementations by convetion on `abc` module + not_implemented = [] + for attr_name in dir(modules_item): + attr = getattr(modules_item, attr_name, None) + if attr and getattr(attr, "__isabstractmethod__", None): + not_implemented.append(attr_name) + + # Log missing implementations + self.log.warning(( + "Skipping abstract Class: {}. Missing implementations: {}" + ).format(name, ", ".join(not_implemented))) + continue + module_classes.append(modules_item) + + for modules_item in module_classes: try: name = modules_item.__name__ # Try initialize module @@ -480,8 +615,6 @@ class TrayModulesManager(ModulesManager): self.modules_by_name = {} self._report = {} - self._raw_modules = None - self.tray_manager = None self.doubleclick_callbacks = {} From c7e126bc6ebabc53f541706dd850cf2ffe8e941f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 20:00:31 +0200 Subject: [PATCH 041/308] use dynamic imports in modules manager --- openpype/modules/base.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index e4e9013eee..c84a8a95a4 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -254,6 +254,8 @@ class ModulesManager: """Import and initialize modules.""" self.collect_modules() + import openpype_modules + self.log.debug("*** Pype modules initialization.") # Prepare settings for modules system_settings = getattr(self, "_system_settings", None) @@ -395,6 +397,8 @@ class ModulesManager: and "actions" each containing list of paths. """ # Output structure + from openpype_interfaces import IPluginPaths + output = { "publish": [], "create": [], @@ -447,6 +451,8 @@ class ModulesManager: Returns: list: Paths to launch hook directories. """ + from openpype_interfaces import ILaunchHookPaths + str_type = type("") expected_types = (list, tuple, set) @@ -647,6 +653,8 @@ class TrayModulesManager(ModulesManager): self.tray_menu(tray_menu) def get_enabled_tray_modules(self): + from openpype_interfaces import ITrayModule + output = [] for module in self.modules: if module.enabled and isinstance(module, ITrayModule): @@ -722,6 +730,8 @@ class TrayModulesManager(ModulesManager): self._report["Tray menu"] = report def start_modules(self): + from openpype_interfaces import ITrayService + report = {} time_start = time.time() prev_start_time = time_start From 8f35cb61f5a996331b76e7e676faa46a0ec2208c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 20:00:53 +0200 Subject: [PATCH 042/308] removed all modules and iterfaces from public api --- openpype/modules/__init__.py | 76 +----------------------------------- 1 file changed, 1 insertion(+), 75 deletions(-) diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 724f442b74..3ad9a75161 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -5,44 +5,6 @@ from .base import ( ModulesManager, TrayModulesManager ) -from .interfaces import ( - ITrayModule, - ITrayAction, - ITrayService, - IPluginPaths, - ILaunchHookPaths -) -from .settings_action import ( - SettingsAction, - ISettingsChangeListener, - LocalSettingsAction -) -from .webserver import ( - WebServerModule, - IWebServerRoutes -) -from .idle_manager import ( - IdleManager, - IIdleManager -) -from .timers_manager import ( - TimersManager, - ITimersManager -) -from .avalon_apps import AvalonModule -from .launcher_action import LauncherAction -from .ftrack import ( - FtrackModule, - IFtrackEventHandlerPaths -) -from .clockify import ClockifyModule -from .log_viewer import LogViewModule -from .muster import MusterModule -from .deadline import DeadlineModule -from .project_manager_action import ProjectManagerAction -from .standalonepublish_action import StandAlonePublishAction -from .sync_server import SyncServerModule -from .slack import SlackIntegrationModule __all__ = ( @@ -50,41 +12,5 @@ __all__ = ( "OpenPypeInterface", "ModulesManager", - "TrayModulesManager", - - "ITrayModule", - "ITrayAction", - "ITrayService", - "IPluginPaths", - "ILaunchHookPaths", - - "SettingsAction", - "LocalSettingsAction", - - "WebServerModule", - "IWebServerRoutes", - - "IdleManager", - "IIdleManager", - - "TimersManager", - "ITimersManager", - - "AvalonModule", - "LauncherAction", - - "FtrackModule", - "IFtrackEventHandlerPaths", - - "ClockifyModule", - "IdleManager", - "LogViewModule", - "MusterModule", - "DeadlineModule", - "ProjectManagerAction", - "StandAlonePublishAction", - - "SyncServerModule", - - "SlackIntegrationModule" + "TrayModulesManager" ) From 6813ff03664a7f6c11e995bc24669a4a6254e71d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 20:01:54 +0200 Subject: [PATCH 043/308] changed imports where from interfaces are loaded --- openpype/modules/avalon_apps/avalon_app.py | 4 ++-- openpype/modules/clockify/clockify_module.py | 4 ++-- openpype/modules/deadline/deadline_module.py | 4 ++-- openpype/modules/ftrack/ftrack_module.py | 9 +++++---- openpype/modules/idle_manager/idle_module.py | 8 ++++---- openpype/modules/log_viewer/log_view_module.py | 3 ++- openpype/modules/muster/muster.py | 4 ++-- openpype/modules/project_manager_action.py | 3 ++- openpype/modules/slack/slack_module.py | 7 +++++-- openpype/modules/standalonepublish_action.py | 3 ++- openpype/modules/sync_server/sync_server_module.py | 3 ++- openpype/modules/timers_manager/timers_manager.py | 8 +++----- openpype/modules/webserver/host_console_listener.py | 2 +- openpype/modules/webserver/webserver_module.py | 11 ++++------- 14 files changed, 38 insertions(+), 35 deletions(-) diff --git a/openpype/modules/avalon_apps/avalon_app.py b/openpype/modules/avalon_apps/avalon_app.py index 4e95f6e72b..7f130bfab1 100644 --- a/openpype/modules/avalon_apps/avalon_app.py +++ b/openpype/modules/avalon_apps/avalon_app.py @@ -1,8 +1,8 @@ import os import openpype from openpype import resources -from .. import ( - PypeModule, +from openpype.modules import PypeModule +from openpype_interfaces import ( ITrayModule, IWebServerRoutes ) diff --git a/openpype/modules/clockify/clockify_module.py b/openpype/modules/clockify/clockify_module.py index e3751c46b8..83f8d07c3a 100644 --- a/openpype/modules/clockify/clockify_module.py +++ b/openpype/modules/clockify/clockify_module.py @@ -7,8 +7,8 @@ from .constants import ( CLOCKIFY_FTRACK_USER_PATH, CLOCKIFY_FTRACK_SERVER_PATH ) -from openpype.modules import ( - PypeModule, +from openpype.modules import PypeModule +from openpype_interfaces import ( ITrayModule, IPluginPaths, IFtrackEventHandlerPaths, diff --git a/openpype/modules/deadline/deadline_module.py b/openpype/modules/deadline/deadline_module.py index 2a2fba41d6..47fd4e9656 100644 --- a/openpype/modules/deadline/deadline_module.py +++ b/openpype/modules/deadline/deadline_module.py @@ -1,6 +1,6 @@ import os -from openpype.modules import ( - PypeModule, IPluginPaths) +from openpype.modules import PypeModule +from openpype_interfaces import IPluginPaths class DeadlineModule(PypeModule, IPluginPaths): diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 3685978003..6fce308b19 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -2,14 +2,15 @@ import os import json import collections import openpype -from openpype.modules import ( - PypeModule, - OpenPypeInterface, +from openpype.modules import PypeModule + +from openpype_interfaces import ( ITrayModule, IPluginPaths, ITimersManager, ILaunchHookPaths, - ISettingsChangeListener + ISettingsChangeListener, + IFtrackEventHandlerPaths ) from openpype.settings import SaveWarningExc diff --git a/openpype/modules/idle_manager/idle_module.py b/openpype/modules/idle_manager/idle_module.py index 9e5211a0fa..d669fcb90e 100644 --- a/openpype/modules/idle_manager/idle_module.py +++ b/openpype/modules/idle_manager/idle_module.py @@ -1,10 +1,10 @@ import platform import collections -from openpype.modules import ( - PypeModule, - OpenPypeInterface, - ITrayService +from openpype.modules import PypeModule +from openpype_interfaces import ( + ITrayService, + IIdleManager ) diff --git a/openpype/modules/log_viewer/log_view_module.py b/openpype/modules/log_viewer/log_view_module.py index dde482b04c..22826d8a54 100644 --- a/openpype/modules/log_viewer/log_view_module.py +++ b/openpype/modules/log_viewer/log_view_module.py @@ -1,5 +1,6 @@ from openpype.api import Logger -from .. import PypeModule, ITrayModule +from openpype.modules import PypeModule +from openpype_interfaces import ITrayModule class LogViewModule(PypeModule, ITrayModule): diff --git a/openpype/modules/muster/muster.py b/openpype/modules/muster/muster.py index 1a82926802..164f20054a 100644 --- a/openpype/modules/muster/muster.py +++ b/openpype/modules/muster/muster.py @@ -2,8 +2,8 @@ import os import json import appdirs import requests -from .. import ( - PypeModule, +from openpype.modules import PypeModule +from openpype_interfaces import ( ITrayModule, IWebServerRoutes ) diff --git a/openpype/modules/project_manager_action.py b/openpype/modules/project_manager_action.py index 1387aa258c..9a36d973b3 100644 --- a/openpype/modules/project_manager_action.py +++ b/openpype/modules/project_manager_action.py @@ -1,4 +1,5 @@ -from . import PypeModule, ITrayAction +from openpype.modules import PypeModule +from openpype_interfaces import ITrayAction class ProjectManagerAction(PypeModule, ITrayAction): diff --git a/openpype/modules/slack/slack_module.py b/openpype/modules/slack/slack_module.py index 9dd5a3d02b..8e6ac10037 100644 --- a/openpype/modules/slack/slack_module.py +++ b/openpype/modules/slack/slack_module.py @@ -1,6 +1,9 @@ import os -from openpype.modules import ( - PypeModule, IPluginPaths, ILaunchHookPaths) +from openpype.modules import PypeModule +from openpype_interfaces import ( + IPluginPaths, + ILaunchHookPaths +) SLACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/openpype/modules/standalonepublish_action.py b/openpype/modules/standalonepublish_action.py index 4f87f9704c..53319f9e11 100644 --- a/openpype/modules/standalonepublish_action.py +++ b/openpype/modules/standalonepublish_action.py @@ -2,7 +2,8 @@ import os import platform import subprocess from openpype.lib import get_pype_execute_args -from . import PypeModule, ITrayAction +from openpype.modules import PypeModule +from openpype_interfaces import ITrayAction class StandAlonePublishAction(PypeModule, ITrayAction): diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 15de4b12e9..63f39474b1 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -7,7 +7,8 @@ import copy from avalon.api import AvalonMongoDB -from .. import PypeModule, ITrayModule +from openpype.modules import PypeModule +from openpype_interfaces import ITrayModule from openpype.api import ( Anatomy, get_project_settings, diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index f893a0f3e7..b31e14209a 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -1,10 +1,8 @@ import os import collections -from abc import ABCMeta, abstractmethod -import six -from .. import ( - PypeModule, - OpenPypeInterface, +from openpype.modules import PypeModule +from openpype_interfaces import ( + ITimersManager, ITrayService, IIdleManager, IWebServerRoutes diff --git a/openpype/modules/webserver/host_console_listener.py b/openpype/modules/webserver/host_console_listener.py index 01a8af643e..bcf4cadf6a 100644 --- a/openpype/modules/webserver/host_console_listener.py +++ b/openpype/modules/webserver/host_console_listener.py @@ -5,7 +5,7 @@ import logging from concurrent.futures import CancelledError from Qt import QtWidgets -from openpype.modules import ITrayService +from openpype_interfaces import ITrayService log = logging.getLogger(__name__) diff --git a/openpype/modules/webserver/webserver_module.py b/openpype/modules/webserver/webserver_module.py index 57e5df8e85..192baad013 100644 --- a/openpype/modules/webserver/webserver_module.py +++ b/openpype/modules/webserver/webserver_module.py @@ -1,14 +1,11 @@ import os import socket -from abc import ABCMeta, abstractmethod - -import six from openpype import resources -from .. import ( - PypeModule, - OpenPypeInterface, - ITrayService +from openpype.modules import PypeModule +from openpype_interfaces import ( + ITrayService, + IWebServerRoutes ) From 82a607f7d919fc5528fe7a5c08c8fd368e486be0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 20:05:55 +0200 Subject: [PATCH 044/308] add missing modules --- openpype/modules/base.py | 9 +++++++-- openpype/settings/lib.py | 3 ++- openpype/tools/tray/pype_tray.py | 10 +++++----- 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index c84a8a95a4..e8e3860297 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -14,7 +14,6 @@ import six import openpype from openpype.settings import get_system_settings from openpype.lib import PypeLogger -from openpype import resources class __ModuleClass: @@ -136,7 +135,9 @@ def load_modules(force=False): slack, sync_server, timers_manager, - webserver + webserver, + standalonepublish_action, + project_manager_action ) setattr(openpype_modules, "avalon_apps", avalon_apps) setattr(openpype_modules, "clockify", clockify) @@ -150,6 +151,10 @@ def load_modules(force=False): setattr(openpype_modules, "slack", slack) setattr(openpype_modules, "timers_manager", timers_manager) setattr(openpype_modules, "webserver", webserver) + setattr( + openpype_modules, "standalonepublish_action", standalonepublish_action + ) + setattr(openpype_modules, "project_manager_action", project_manager_action) @six.add_metaclass(ABCMeta) diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 5c2c0dcd94..ec9846eef7 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -114,7 +114,8 @@ def save_studio_settings(data): SaveWarningExc: If any module raises the exception. """ # Notify Pype modules - from openpype.modules import ModulesManager, ISettingsChangeListener + from openpype.modules import ModulesManager + from openpype_interfaces import ISettingsChangeListener old_data = get_system_settings() default_values = get_default_settings()[SYSTEM_SETTINGS_KEY] diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 794312f389..ed66f1a80f 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -15,11 +15,7 @@ from openpype.api import ( get_system_settings ) from openpype.lib import get_pype_execute_args -from openpype.modules import ( - TrayModulesManager, - ITrayAction, - ITrayService -) +from openpype.modules import TrayModulesManager from openpype import style from .pype_info_widget import PypeInfoWidget @@ -80,6 +76,10 @@ class TrayManager: def initialize_modules(self): """Add modules to tray.""" + from openpype_interfaces import ( + ITrayAction, + ITrayService + ) self.modules_manager.initialize(self, self.tray_widget.menu) From 65dedb05345034038131e774ff646a15e0b3cc86 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 20:06:15 +0200 Subject: [PATCH 045/308] use relative imports --- openpype/modules/sync_server/__init__.py | 2 +- openpype/modules/sync_server/tray/app.py | 2 +- openpype/modules/sync_server/tray/delegates.py | 2 +- openpype/modules/sync_server/tray/models.py | 2 +- openpype/modules/sync_server/tray/widgets.py | 8 ++++---- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/modules/sync_server/__init__.py b/openpype/modules/sync_server/__init__.py index d6a038372b..430ab53c91 100644 --- a/openpype/modules/sync_server/__init__.py +++ b/openpype/modules/sync_server/__init__.py @@ -1,4 +1,4 @@ -from openpype.modules.sync_server.sync_server_module import SyncServerModule +from .sync_server_module import SyncServerModule __all__ = ( diff --git a/openpype/modules/sync_server/tray/app.py b/openpype/modules/sync_server/tray/app.py index dd2b4be749..106076d81c 100644 --- a/openpype/modules/sync_server/tray/app.py +++ b/openpype/modules/sync_server/tray/app.py @@ -5,7 +5,7 @@ from openpype.tools.settings import style from openpype.lib import PypeLogger from openpype import resources -from openpype.modules.sync_server.tray.widgets import ( +from .widgets import ( SyncProjectListWidget, SyncRepresentationSummaryWidget ) diff --git a/openpype/modules/sync_server/tray/delegates.py b/openpype/modules/sync_server/tray/delegates.py index 9316ec2c3e..461b9fffb3 100644 --- a/openpype/modules/sync_server/tray/delegates.py +++ b/openpype/modules/sync_server/tray/delegates.py @@ -2,7 +2,7 @@ import os from Qt import QtCore, QtWidgets, QtGui from openpype.lib import PypeLogger -from openpype.modules.sync_server.tray import lib +from . import lib log = PypeLogger().get_logger("SyncServer") diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py index efef039b8b..8c86d3b98f 100644 --- a/openpype/modules/sync_server/tray/models.py +++ b/openpype/modules/sync_server/tray/models.py @@ -11,7 +11,7 @@ from avalon.vendor import qtawesome from openpype.lib import PypeLogger from openpype.api import get_local_site_id -from openpype.modules.sync_server.tray import lib +from . import lib log = PypeLogger().get_logger("SyncServer") diff --git a/openpype/modules/sync_server/tray/widgets.py b/openpype/modules/sync_server/tray/widgets.py index d38416fbce..c9160733a0 100644 --- a/openpype/modules/sync_server/tray/widgets.py +++ b/openpype/modules/sync_server/tray/widgets.py @@ -17,13 +17,13 @@ from openpype.lib import PypeLogger from avalon.tools.delegates import pretty_timestamp from avalon.vendor import qtawesome -from openpype.modules.sync_server.tray.models import ( +from .models import ( SyncRepresentationSummaryModel, SyncRepresentationDetailModel ) -from openpype.modules.sync_server.tray import lib -from openpype.modules.sync_server.tray import delegates +from . import lib +from . import delegates log = PypeLogger().get_logger("SyncServer") @@ -187,7 +187,7 @@ class _SyncRepresentationWidget(QtWidgets.QWidget): detail_window = SyncServerDetailWindow( self.sync_server, _id, self.model.project, parent=self) detail_window.exec() - + def _on_context_menu(self, point): """ Shows menu with loader actions on Right-click. From 4ca5ef46b54f7fc445cec3ecc78e4bfd7541bfd7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 20:10:41 +0200 Subject: [PATCH 046/308] adde new lib import functions --- openpype/lib/__init__.py | 6 +- openpype/lib/python_module_tools.py | 96 ++++++++++++++++++++++++----- 2 files changed, 84 insertions(+), 18 deletions(-) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 12c04a4236..52a6024feb 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -52,9 +52,11 @@ from .vendor_bin_utils import ( ) from .python_module_tools import ( + import_filepath, modules_from_path, recursive_bases_from_class, - classes_from_module + classes_from_module, + load_module_from_dirpath ) from .avalon_context import ( @@ -170,9 +172,11 @@ __all__ = [ "get_ffmpeg_tool_path", "ffprobe_streams", + "import_filepath", "modules_from_path", "recursive_bases_from_class", "classes_from_module", + "load_module_from_dirpath", "CURRENT_DOC_SCHEMAS", "PROJECT_NAME_ALLOWED_SYMBOLS", diff --git a/openpype/lib/python_module_tools.py b/openpype/lib/python_module_tools.py index 44a1007889..102ae7e71a 100644 --- a/openpype/lib/python_module_tools.py +++ b/openpype/lib/python_module_tools.py @@ -9,6 +9,29 @@ log = logging.getLogger(__name__) PY3 = sys.version_info[0] == 3 +def import_filepath(filepath, module_name=None): + if module_name is None: + module_name = os.path.splitext(os.path.basename(filepath))[0] + + # Prepare module object where content of file will be parsed + module = types.ModuleType(module_name) + + if PY3: + # Use loader so module has full specs + module_loader = importlib.machinery.SourceFileLoader( + module_name, filepath + ) + module_loader.exec_module(module) + else: + # Execute module code and store content to module + with open(filepath) as _stream: + # Execute content and store it to module object + exec(_stream.read(), module.__dict__) + + module.__file__ = filepath + return module + + def modules_from_path(folder_path): """Get python scripts as modules from a path. @@ -55,23 +78,7 @@ def modules_from_path(folder_path): continue try: - # Prepare module object where content of file will be parsed - module = types.ModuleType(mod_name) - - if PY3: - # Use loader so module has full specs - module_loader = importlib.machinery.SourceFileLoader( - mod_name, full_path - ) - module_loader.exec_module(module) - else: - # Execute module code and store content to module - with open(full_path) as _stream: - # Execute content and store it to module object - exec(_stream.read(), module.__dict__) - - module.__file__ = full_path - + module = import_filepath(full_path, mod_name) modules.append((full_path, module)) except Exception: @@ -127,3 +134,58 @@ def classes_from_module(superclass, module): classes.append(obj) return classes + + +def _load_module_from_dirpath_py2(dirpath, module_name, dst_module_name): + full_module_name = "{}.{}".format(dst_module_name, module_name) + if full_module_name in sys.modules: + return sys.modules[full_module_name] + + import imp + + dst_module = sys.modules[dst_module_name] + + fp, pathname, description = imp.find_module(module_name, [dirpath]) + module = imp.load_module(full_module_name, fp, pathname, description) + setattr(dst_module, module_name, module) + + return module + + +def _load_module_from_dirpath_py3(dirpath, module_name, dst_module_name): + full_module_name = "{}.{}".format(dst_module_name, module_name) + if full_module_name in sys.modules: + return sys.modules[full_module_name] + + import importlib.util + from importlib._bootstrap_external import PathFinder + + dst_module = sys.modules[dst_module_name] + loader = PathFinder.find_module(full_module_name, [dirpath]) + + spec = importlib.util.spec_from_loader( + full_module_name, loader, origin=dirpath + ) + + module = importlib.util.module_from_spec(spec) + + if dst_module is not None: + setattr(dst_module, module_name, module) + + sys.modules[full_module_name] = module + + loader.exec_module(module) + + return module + + +def load_module_from_dirpath(dirpath, folder_name, dst_module_name): + if PY3: + module = _load_module_from_dirpath_py3( + dirpath, folder_name, dst_module_name + ) + else: + module = _load_module_from_dirpath_py2( + dirpath, folder_name, dst_module_name + ) + return module From cc457406a78aaceb5ccd0504b330211313a9879f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 20:11:43 +0200 Subject: [PATCH 047/308] use import_filepath from lib --- openpype/modules/base.py | 22 +++------------------- 1 file changed, 3 insertions(+), 19 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index e8e3860297..8709bccf3e 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -51,6 +51,8 @@ def load_interfaces(force=False): if not force and "openpype_interfaces" in sys.modules: return + from openpype.lib import import_filepath + sys.modules["openpype_interfaces"] = openpype_interfaces = __ModuleClass() log = PypeLogger.get_logger("InterfacesLoader") @@ -73,27 +75,9 @@ def load_interfaces(force=False): if not os.path.exists(full_path): continue - filename = os.path.splitext(os.path.basename(full_path))[0] - try: # Prepare module object where content of file will be parsed - module = types.ModuleType(filename) - - if six.PY3: - import importlib - - # Use loader so module has full specs - module_loader = importlib.machinery.SourceFileLoader( - filename, full_path - ) - module_loader.exec_module(module) - else: - # Execute module code and store content to module - with open(full_path) as _stream: - # Execute content and store it to module object - exec(_stream.read(), module.__dict__) - - module.__file__ = full_path + module = import_filepath(full_path) except Exception: log.warning( From 8d5ef62c1c26fe3fd5e6542ea07b665975a5bce0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 27 Jul 2021 20:13:51 +0200 Subject: [PATCH 048/308] minor changes --- openpype/modules/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 8709bccf3e..3c2aca73d6 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -107,6 +107,7 @@ def load_modules(force=False): log = PypeLogger.get_logger("ModulesLoader") + # TODO import dynamically from defined paths from . import ( avalon_apps, clockify, @@ -175,7 +176,7 @@ class PypeModule: def __init__(self, manager, settings): self.manager = manager - self.log = PypeLogger().get_logger(self.name) + self.log = PypeLogger.get_logger(self.name) self.initialize(settings) From 0444e325501d5d7e0c901d80b9e0c8994c04c11e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 11:51:05 +0200 Subject: [PATCH 049/308] python 2 compatibility --- openpype/modules/base.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 3c2aca73d6..f1b0ef6808 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -16,10 +16,12 @@ from openpype.settings import get_system_settings from openpype.lib import PypeLogger -class __ModuleClass: +# Inherit from `object` for Python 2 hosts +class _ModuleClass(object): def __init__(self): - self.object_setattr("__attributes__", {}) - self.object_setattr("__defaults__", set()) + # Call setattr on super class + super(_ModuleClass, self).__setattr__("__attributes__", dict()) + super(_ModuleClass, self).__setattr__("__defaults__", set()) def __getattr__(self, attr_name): return self.__attributes__.get( @@ -31,9 +33,6 @@ class __ModuleClass: for module in self.values(): yield module - def object_setattr(self, attr_name, value): - object.__setattr__(self, attr_name, value) - def __setattr__(self, attr_name, value): self.__attributes__[attr_name] = value @@ -53,7 +52,7 @@ def load_interfaces(force=False): from openpype.lib import import_filepath - sys.modules["openpype_interfaces"] = openpype_interfaces = __ModuleClass() + sys.modules["openpype_interfaces"] = openpype_interfaces = _ModuleClass() log = PypeLogger.get_logger("InterfacesLoader") @@ -103,7 +102,7 @@ def load_modules(force=False): from openpype.lib import modules_from_path - sys.modules["openpype_modules"] = openpype_modules = __ModuleClass() + sys.modules["openpype_modules"] = openpype_modules = _ModuleClass() log = PypeLogger.get_logger("ModulesLoader") From 9b84b6b72a3178599db07d930ec443db82d1f5f5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 11:51:12 +0200 Subject: [PATCH 050/308] added missing launcher module --- openpype/modules/base.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index f1b0ef6808..a13363f18d 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -120,6 +120,7 @@ def load_modules(force=False): sync_server, timers_manager, webserver, + launcher_action, standalonepublish_action, project_manager_action ) @@ -135,6 +136,7 @@ def load_modules(force=False): setattr(openpype_modules, "slack", slack) setattr(openpype_modules, "timers_manager", timers_manager) setattr(openpype_modules, "webserver", webserver) + setattr(openpype_modules, "launcher_action", launcher_action) setattr( openpype_modules, "standalonepublish_action", standalonepublish_action ) From c2720b6728bb5503897cfbd7f6f746bf6b2d22d4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 11:52:53 +0200 Subject: [PATCH 051/308] fix launcher module --- openpype/modules/launcher_action.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/launcher_action.py b/openpype/modules/launcher_action.py index 0059ff021b..728143ffac 100644 --- a/openpype/modules/launcher_action.py +++ b/openpype/modules/launcher_action.py @@ -1,4 +1,5 @@ -from . import PypeModule, ITrayAction +from openpype.modules import PypeModule +from openpype_interfaces import ITrayAction class LauncherAction(PypeModule, ITrayAction): From 0b0b74ca7cd0d552583696377324e95cc9de9dfd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 12:02:46 +0200 Subject: [PATCH 052/308] added name attribute to _ModuleClass --- openpype/modules/base.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index a13363f18d..9ae799d6d7 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -18,8 +18,9 @@ from openpype.lib import PypeLogger # Inherit from `object` for Python 2 hosts class _ModuleClass(object): - def __init__(self): + def __init__(self, name): # Call setattr on super class + super(_ModuleClass, self).__setattr__("name", name) super(_ModuleClass, self).__setattr__("__attributes__", dict()) super(_ModuleClass, self).__setattr__("__defaults__", set()) @@ -52,7 +53,9 @@ def load_interfaces(force=False): from openpype.lib import import_filepath - sys.modules["openpype_interfaces"] = openpype_interfaces = _ModuleClass() + sys.modules["openpype_interfaces"] = openpype_interfaces = _ModuleClass( + "openpype_interfaces" + ) log = PypeLogger.get_logger("InterfacesLoader") @@ -102,7 +105,9 @@ def load_modules(force=False): from openpype.lib import modules_from_path - sys.modules["openpype_modules"] = openpype_modules = _ModuleClass() + sys.modules["openpype_modules"] = openpype_modules = _ModuleClass( + "openpype_modules" + ) log = PypeLogger.get_logger("ModulesLoader") From 0ec3bb18d9878b9c67d5f29147f2b66596a22865 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 12:03:51 +0200 Subject: [PATCH 053/308] added _InterfacesClass for interfaces --- openpype/modules/base.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 9ae799d6d7..2086a5a280 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -47,14 +47,18 @@ class _ModuleClass(object): return self.__attributes__.items() +class _InterfacesClass(_ModuleClass): + pass + + def load_interfaces(force=False): if not force and "openpype_interfaces" in sys.modules: return from openpype.lib import import_filepath - sys.modules["openpype_interfaces"] = openpype_interfaces = _ModuleClass( - "openpype_interfaces" + sys.modules["openpype_interfaces"] = openpype_interfaces = ( + _InterfacesClass("openpype_interfaces") ) log = PypeLogger.get_logger("InterfacesLoader") From a9616ce560e0d515a46825d3f055f19708b6de01 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 12:05:12 +0200 Subject: [PATCH 054/308] Interface return missing interface if is not found --- openpype/modules/base.py | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 2086a5a280..d2a1b8ed93 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -25,10 +25,11 @@ class _ModuleClass(object): super(_ModuleClass, self).__setattr__("__defaults__", set()) def __getattr__(self, attr_name): - return self.__attributes__.get( - attr_name, - type("Missing.{}".format(attr_name), (), {}) - ) + if attr_name not in self.__attributes__: + raise ImportError("No module named {}.{}".format( + self.name, attr_name + )) + return self.__attributes__[attr_name] def __iter__(self): for module in self.values(): @@ -48,7 +49,16 @@ class _ModuleClass(object): class _InterfacesClass(_ModuleClass): - pass + def __getattr__(self, attr_name): + if attr_name not in self.__attributes__: + # Fake Interface if is not missing + self.__attributes__[attr_name] = type( + "{}".format(attr_name), + (MissingInteface, ), + {} + ) + + return self.__attributes__[attr_name] def load_interfaces(force=False): @@ -162,6 +172,10 @@ class OpenPypeInterface: pass +class MissingInteface(OpenPypeInterface): + pass + + @six.add_metaclass(ABCMeta) class PypeModule: """Base class of pype module. From bf4d85d5872afd9c3ebb9e8834bcc11877b07fa8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 12:05:24 +0200 Subject: [PATCH 055/308] fix remaining ISettingsChangeListener imports --- openpype/settings/lib.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index ec9846eef7..4a363910b8 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -162,7 +162,8 @@ def save_project_settings(project_name, overrides): SaveWarningExc: If any module raises the exception. """ # Notify Pype modules - from openpype.modules import ModulesManager, ISettingsChangeListener + from openpype.modules import ModulesManager + from openpype_interfaces import ISettingsChangeListener default_values = get_default_settings()[PROJECT_SETTINGS_KEY] if project_name: @@ -223,7 +224,8 @@ def save_project_anatomy(project_name, anatomy_data): SaveWarningExc: If any module raises the exception. """ # Notify Pype modules - from openpype.modules import ModulesManager, ISettingsChangeListener + from openpype.modules import ModulesManager + from openpype_interfaces import ISettingsChangeListener default_values = get_default_settings()[PROJECT_ANATOMY_KEY] if project_name: From d1dfa251d9fe02d12ffc74041b8a9aaf6439638c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 12:05:52 +0200 Subject: [PATCH 056/308] simplified fake interface --- openpype/modules/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index d2a1b8ed93..877c363f61 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -53,7 +53,7 @@ class _InterfacesClass(_ModuleClass): if attr_name not in self.__attributes__: # Fake Interface if is not missing self.__attributes__[attr_name] = type( - "{}".format(attr_name), + attr_name, (MissingInteface, ), {} ) From 2b9f4794abb3422daac91b2d6a4cce9c3a008c40 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 12:14:55 +0200 Subject: [PATCH 057/308] removed unused file --- openpype/modules/modules_import.py | 84 ------------------------------ 1 file changed, 84 deletions(-) delete mode 100644 openpype/modules/modules_import.py diff --git a/openpype/modules/modules_import.py b/openpype/modules/modules_import.py deleted file mode 100644 index 24441c6d20..0000000000 --- a/openpype/modules/modules_import.py +++ /dev/null @@ -1,84 +0,0 @@ -import sys -import six - - -class __ModuleClass: - __attributes__ = {} - __defaults__ = set() - - def __getattr__(self, attr_name): - return self.__attributes__.get( - attr_name, - type("Missing.{}".format(attr_name), (), {}) - ) - - def __setattr__(self, attr_name, value): - self.__attributes__[attr_name] = value - - def keys(self): - return self.__attributes__.keys() - - def values(self): - return self.__attributes__.values() - - def items(self): - return self.__attributes__.items() - - -def _load_module_from_dirpath_py2(dirpath, module_name, dst_module_name): - full_module_name = "{}.{}".format(dst_module_name, module_name) - if full_module_name in sys.modules: - return sys.modules[full_module_name] - - import imp - - dst_module = sys.modules[dst_module_name] - - fp, pathname, description = imp.find_module(module_name, [dirpath]) - module = imp.load_module(full_module_name, fp, pathname, description) - setattr(dst_module, module_name, module) - - return module - - -def _load_module_from_dirpath_py3(dirpath, module_name, dst_module_name): - full_module_name = "{}.{}".format(dst_module_name, module_name) - if full_module_name in sys.modules: - return sys.modules[full_module_name] - - import importlib.util - from importlib._bootstrap_external import PathFinder - - dst_module = sys.modules[dst_module_name] - loader = PathFinder.find_module(full_module_name, [dirpath]) - - spec = importlib.util.spec_from_loader( - full_module_name, loader, origin=dirpath - ) - - module = importlib.util.module_from_spec(spec) - - if dst_module is not None: - setattr(dst_module, module_name, module) - - sys.modules[full_module_name] = module - - loader.exec_module(module) - - return module - - -def load_module_from_dirpath(dirpath, folder_name, dst_module_name): - if six.PY3: - module = _load_module_from_dirpath_py3( - dirpath, folder_name, dst_module_name - ) - else: - module = _load_module_from_dirpath_py2( - dirpath, folder_name, dst_module_name - ) - return module - - -sys.modules["openpype_modules"] = __ModuleClass() -sys.modules["openpype_interfaces"] = __ModuleClass() From bf1db0f57c4924257dadcbb48026703cb9c4d46c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:05:29 +0200 Subject: [PATCH 058/308] created folder default modules --- openpype/modules/{ => default_modules}/clockify/__init__.py | 0 openpype/modules/{ => default_modules}/clockify/clockify_api.py | 0 .../modules/{ => default_modules}/clockify/clockify_module.py | 0 openpype/modules/{ => default_modules}/clockify/constants.py | 0 .../clockify/ftrack/server/action_clockify_sync_server.py | 2 +- .../clockify/ftrack/user/action_clockify_sync_local.py | 2 +- .../clockify/launcher_actions/ClockifyStart.py | 0 .../clockify/launcher_actions/ClockifySync.py | 0 openpype/modules/{ => default_modules}/clockify/widgets.py | 0 9 files changed, 2 insertions(+), 2 deletions(-) rename openpype/modules/{ => default_modules}/clockify/__init__.py (100%) rename openpype/modules/{ => default_modules}/clockify/clockify_api.py (100%) rename openpype/modules/{ => default_modules}/clockify/clockify_module.py (100%) rename openpype/modules/{ => default_modules}/clockify/constants.py (100%) rename openpype/modules/{ => default_modules}/clockify/ftrack/server/action_clockify_sync_server.py (98%) rename openpype/modules/{ => default_modules}/clockify/ftrack/user/action_clockify_sync_local.py (98%) rename openpype/modules/{ => default_modules}/clockify/launcher_actions/ClockifyStart.py (100%) rename openpype/modules/{ => default_modules}/clockify/launcher_actions/ClockifySync.py (100%) rename openpype/modules/{ => default_modules}/clockify/widgets.py (100%) diff --git a/openpype/modules/clockify/__init__.py b/openpype/modules/default_modules/clockify/__init__.py similarity index 100% rename from openpype/modules/clockify/__init__.py rename to openpype/modules/default_modules/clockify/__init__.py diff --git a/openpype/modules/clockify/clockify_api.py b/openpype/modules/default_modules/clockify/clockify_api.py similarity index 100% rename from openpype/modules/clockify/clockify_api.py rename to openpype/modules/default_modules/clockify/clockify_api.py diff --git a/openpype/modules/clockify/clockify_module.py b/openpype/modules/default_modules/clockify/clockify_module.py similarity index 100% rename from openpype/modules/clockify/clockify_module.py rename to openpype/modules/default_modules/clockify/clockify_module.py diff --git a/openpype/modules/clockify/constants.py b/openpype/modules/default_modules/clockify/constants.py similarity index 100% rename from openpype/modules/clockify/constants.py rename to openpype/modules/default_modules/clockify/constants.py diff --git a/openpype/modules/clockify/ftrack/server/action_clockify_sync_server.py b/openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py similarity index 98% rename from openpype/modules/clockify/ftrack/server/action_clockify_sync_server.py rename to openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py index 495f87dc7e..8379414c0c 100644 --- a/openpype/modules/clockify/ftrack/server/action_clockify_sync_server.py +++ b/openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py @@ -1,6 +1,6 @@ import os import json -from openpype.modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib import ServerAction from openpype.modules.clockify.clockify_api import ClockifyAPI diff --git a/openpype/modules/clockify/ftrack/user/action_clockify_sync_local.py b/openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py similarity index 98% rename from openpype/modules/clockify/ftrack/user/action_clockify_sync_local.py rename to openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py index 4f4579a8bf..3d55ee92b6 100644 --- a/openpype/modules/clockify/ftrack/user/action_clockify_sync_local.py +++ b/openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py @@ -1,5 +1,5 @@ import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype.modules.clockify.clockify_api import ClockifyAPI diff --git a/openpype/modules/clockify/launcher_actions/ClockifyStart.py b/openpype/modules/default_modules/clockify/launcher_actions/ClockifyStart.py similarity index 100% rename from openpype/modules/clockify/launcher_actions/ClockifyStart.py rename to openpype/modules/default_modules/clockify/launcher_actions/ClockifyStart.py diff --git a/openpype/modules/clockify/launcher_actions/ClockifySync.py b/openpype/modules/default_modules/clockify/launcher_actions/ClockifySync.py similarity index 100% rename from openpype/modules/clockify/launcher_actions/ClockifySync.py rename to openpype/modules/default_modules/clockify/launcher_actions/ClockifySync.py diff --git a/openpype/modules/clockify/widgets.py b/openpype/modules/default_modules/clockify/widgets.py similarity index 100% rename from openpype/modules/clockify/widgets.py rename to openpype/modules/default_modules/clockify/widgets.py From 68b1183d815e4d66867024e0f8b6cf49016c276b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:05:57 +0200 Subject: [PATCH 059/308] moved avalon apps module --- .../modules/{ => default_modules}/avalon_apps/__init__.py | 0 .../modules/{ => default_modules}/avalon_apps/avalon_app.py | 0 .../modules/{ => default_modules}/avalon_apps/rest_api.py | 5 +---- 3 files changed, 1 insertion(+), 4 deletions(-) rename openpype/modules/{ => default_modules}/avalon_apps/__init__.py (100%) rename openpype/modules/{ => default_modules}/avalon_apps/avalon_app.py (100%) rename openpype/modules/{ => default_modules}/avalon_apps/rest_api.py (97%) diff --git a/openpype/modules/avalon_apps/__init__.py b/openpype/modules/default_modules/avalon_apps/__init__.py similarity index 100% rename from openpype/modules/avalon_apps/__init__.py rename to openpype/modules/default_modules/avalon_apps/__init__.py diff --git a/openpype/modules/avalon_apps/avalon_app.py b/openpype/modules/default_modules/avalon_apps/avalon_app.py similarity index 100% rename from openpype/modules/avalon_apps/avalon_app.py rename to openpype/modules/default_modules/avalon_apps/avalon_app.py diff --git a/openpype/modules/avalon_apps/rest_api.py b/openpype/modules/default_modules/avalon_apps/rest_api.py similarity index 97% rename from openpype/modules/avalon_apps/rest_api.py rename to openpype/modules/default_modules/avalon_apps/rest_api.py index b77c256398..533050fc0c 100644 --- a/openpype/modules/avalon_apps/rest_api.py +++ b/openpype/modules/default_modules/avalon_apps/rest_api.py @@ -1,16 +1,13 @@ import os -import re import json import datetime -import bson from bson.objectid import ObjectId -import bson.json_util from aiohttp.web_response import Response from avalon.api import AvalonMongoDB -from openpype.modules.webserver.base_routes import RestApiEndpoint +from openpype_modules.webserver.base_routes import RestApiEndpoint class _RestApiEndpoint(RestApiEndpoint): From c5798467e23f8551701e33ff14117c85c032f86d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:06:26 +0200 Subject: [PATCH 060/308] moved deadline module --- openpype/modules/{ => default_modules}/deadline/__init__.py | 0 .../modules/{ => default_modules}/deadline/deadline_module.py | 0 .../deadline/plugins/publish/submit_aftereffects_deadline.py | 0 .../deadline/plugins/publish/submit_harmony_deadline.py | 0 .../deadline/plugins/publish/submit_maya_deadline.py | 0 .../deadline/plugins/publish/submit_nuke_deadline.py | 0 .../deadline/plugins/publish/submit_publish_job.py | 0 .../deadline/plugins/publish/validate_deadline_connection.py | 0 .../plugins/publish/validate_expected_and_rendered_files.py | 0 9 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/deadline/__init__.py (100%) rename openpype/modules/{ => default_modules}/deadline/deadline_module.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/submit_aftereffects_deadline.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/submit_harmony_deadline.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/submit_maya_deadline.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/submit_nuke_deadline.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/submit_publish_job.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/validate_deadline_connection.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/validate_expected_and_rendered_files.py (100%) diff --git a/openpype/modules/deadline/__init__.py b/openpype/modules/default_modules/deadline/__init__.py similarity index 100% rename from openpype/modules/deadline/__init__.py rename to openpype/modules/default_modules/deadline/__init__.py diff --git a/openpype/modules/deadline/deadline_module.py b/openpype/modules/default_modules/deadline/deadline_module.py similarity index 100% rename from openpype/modules/deadline/deadline_module.py rename to openpype/modules/default_modules/deadline/deadline_module.py diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_aftereffects_deadline.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_aftereffects_deadline.py diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_harmony_deadline.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_harmony_deadline.py diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_maya_deadline.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_maya_deadline.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_maya_deadline.py diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_nuke_deadline.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_nuke_deadline.py diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_publish_job.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_publish_job.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_publish_job.py diff --git a/openpype/modules/deadline/plugins/publish/validate_deadline_connection.py b/openpype/modules/default_modules/deadline/plugins/publish/validate_deadline_connection.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/validate_deadline_connection.py rename to openpype/modules/default_modules/deadline/plugins/publish/validate_deadline_connection.py diff --git a/openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py b/openpype/modules/default_modules/deadline/plugins/publish/validate_expected_and_rendered_files.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py rename to openpype/modules/default_modules/deadline/plugins/publish/validate_expected_and_rendered_files.py From 7b5ef747e9d1e69f66bcc8e4b775d4f813e600ee Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:07:48 +0200 Subject: [PATCH 061/308] moved webserver module --- openpype/modules/{ => default_modules}/webserver/__init__.py | 0 openpype/modules/{ => default_modules}/webserver/base_routes.py | 0 .../{ => default_modules}/webserver/host_console_listener.py | 0 openpype/modules/{ => default_modules}/webserver/interfaces.py | 0 openpype/modules/{ => default_modules}/webserver/server.py | 0 .../modules/{ => default_modules}/webserver/webserver_module.py | 2 +- 6 files changed, 1 insertion(+), 1 deletion(-) rename openpype/modules/{ => default_modules}/webserver/__init__.py (100%) rename openpype/modules/{ => default_modules}/webserver/base_routes.py (100%) rename openpype/modules/{ => default_modules}/webserver/host_console_listener.py (100%) rename openpype/modules/{ => default_modules}/webserver/interfaces.py (100%) rename openpype/modules/{ => default_modules}/webserver/server.py (100%) rename openpype/modules/{ => default_modules}/webserver/webserver_module.py (98%) diff --git a/openpype/modules/webserver/__init__.py b/openpype/modules/default_modules/webserver/__init__.py similarity index 100% rename from openpype/modules/webserver/__init__.py rename to openpype/modules/default_modules/webserver/__init__.py diff --git a/openpype/modules/webserver/base_routes.py b/openpype/modules/default_modules/webserver/base_routes.py similarity index 100% rename from openpype/modules/webserver/base_routes.py rename to openpype/modules/default_modules/webserver/base_routes.py diff --git a/openpype/modules/webserver/host_console_listener.py b/openpype/modules/default_modules/webserver/host_console_listener.py similarity index 100% rename from openpype/modules/webserver/host_console_listener.py rename to openpype/modules/default_modules/webserver/host_console_listener.py diff --git a/openpype/modules/webserver/interfaces.py b/openpype/modules/default_modules/webserver/interfaces.py similarity index 100% rename from openpype/modules/webserver/interfaces.py rename to openpype/modules/default_modules/webserver/interfaces.py diff --git a/openpype/modules/webserver/server.py b/openpype/modules/default_modules/webserver/server.py similarity index 100% rename from openpype/modules/webserver/server.py rename to openpype/modules/default_modules/webserver/server.py diff --git a/openpype/modules/webserver/webserver_module.py b/openpype/modules/default_modules/webserver/webserver_module.py similarity index 98% rename from openpype/modules/webserver/webserver_module.py rename to openpype/modules/default_modules/webserver/webserver_module.py index 192baad013..f81bf52410 100644 --- a/openpype/modules/webserver/webserver_module.py +++ b/openpype/modules/default_modules/webserver/webserver_module.py @@ -50,7 +50,7 @@ class WebServerModule(PypeModule, ITrayService): ) def _add_listeners(self): - from openpype.modules.webserver import host_console_listener + from openpype_modules.webserver import host_console_listener self._host_listener = host_console_listener.HostListener( self.server_manager, self From 6291e01003ce072fd37639688c4febcc35dc5397 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:10:34 +0200 Subject: [PATCH 062/308] moved idle manager module --- openpype/modules/{ => default_modules}/idle_manager/__init__.py | 0 .../modules/{ => default_modules}/idle_manager/idle_module.py | 0 .../modules/{ => default_modules}/idle_manager/idle_threads.py | 0 openpype/modules/{ => default_modules}/idle_manager/interfaces.py | 0 4 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/idle_manager/__init__.py (100%) rename openpype/modules/{ => default_modules}/idle_manager/idle_module.py (100%) rename openpype/modules/{ => default_modules}/idle_manager/idle_threads.py (100%) rename openpype/modules/{ => default_modules}/idle_manager/interfaces.py (100%) diff --git a/openpype/modules/idle_manager/__init__.py b/openpype/modules/default_modules/idle_manager/__init__.py similarity index 100% rename from openpype/modules/idle_manager/__init__.py rename to openpype/modules/default_modules/idle_manager/__init__.py diff --git a/openpype/modules/idle_manager/idle_module.py b/openpype/modules/default_modules/idle_manager/idle_module.py similarity index 100% rename from openpype/modules/idle_manager/idle_module.py rename to openpype/modules/default_modules/idle_manager/idle_module.py diff --git a/openpype/modules/idle_manager/idle_threads.py b/openpype/modules/default_modules/idle_manager/idle_threads.py similarity index 100% rename from openpype/modules/idle_manager/idle_threads.py rename to openpype/modules/default_modules/idle_manager/idle_threads.py diff --git a/openpype/modules/idle_manager/interfaces.py b/openpype/modules/default_modules/idle_manager/interfaces.py similarity index 100% rename from openpype/modules/idle_manager/interfaces.py rename to openpype/modules/default_modules/idle_manager/interfaces.py From 3468a9a58be61109a4a84f0095b410af01bc38e9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:11:15 +0200 Subject: [PATCH 063/308] moved muster module --- openpype/modules/{ => default_modules}/muster/__init__.py | 0 openpype/modules/{ => default_modules}/muster/muster.py | 0 openpype/modules/{ => default_modules}/muster/rest_api.py | 0 openpype/modules/{ => default_modules}/muster/widget_login.py | 0 4 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/muster/__init__.py (100%) rename openpype/modules/{ => default_modules}/muster/muster.py (100%) rename openpype/modules/{ => default_modules}/muster/rest_api.py (100%) rename openpype/modules/{ => default_modules}/muster/widget_login.py (100%) diff --git a/openpype/modules/muster/__init__.py b/openpype/modules/default_modules/muster/__init__.py similarity index 100% rename from openpype/modules/muster/__init__.py rename to openpype/modules/default_modules/muster/__init__.py diff --git a/openpype/modules/muster/muster.py b/openpype/modules/default_modules/muster/muster.py similarity index 100% rename from openpype/modules/muster/muster.py rename to openpype/modules/default_modules/muster/muster.py diff --git a/openpype/modules/muster/rest_api.py b/openpype/modules/default_modules/muster/rest_api.py similarity index 100% rename from openpype/modules/muster/rest_api.py rename to openpype/modules/default_modules/muster/rest_api.py diff --git a/openpype/modules/muster/widget_login.py b/openpype/modules/default_modules/muster/widget_login.py similarity index 100% rename from openpype/modules/muster/widget_login.py rename to openpype/modules/default_modules/muster/widget_login.py From fca039fdfae21eb29eb55f0ef7bdc20cd7c67ea5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:11:43 +0200 Subject: [PATCH 064/308] moved settings module --- .../modules/{ => default_modules}/settings_module/__init__.py | 0 .../modules/{ => default_modules}/settings_module/interfaces.py | 0 .../{ => default_modules}/settings_module/settings_action.py | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/settings_module/__init__.py (100%) rename openpype/modules/{ => default_modules}/settings_module/interfaces.py (100%) rename openpype/modules/{ => default_modules}/settings_module/settings_action.py (100%) diff --git a/openpype/modules/settings_module/__init__.py b/openpype/modules/default_modules/settings_module/__init__.py similarity index 100% rename from openpype/modules/settings_module/__init__.py rename to openpype/modules/default_modules/settings_module/__init__.py diff --git a/openpype/modules/settings_module/interfaces.py b/openpype/modules/default_modules/settings_module/interfaces.py similarity index 100% rename from openpype/modules/settings_module/interfaces.py rename to openpype/modules/default_modules/settings_module/interfaces.py diff --git a/openpype/modules/settings_module/settings_action.py b/openpype/modules/default_modules/settings_module/settings_action.py similarity index 100% rename from openpype/modules/settings_module/settings_action.py rename to openpype/modules/default_modules/settings_module/settings_action.py From 6fb0d1fece0dc15f630313c8af1e88fc0a163191 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:12:16 +0200 Subject: [PATCH 065/308] moved timers manager module --- openpype/modules/{ => default_modules}/timers_manager/__init__.py | 0 .../modules/{ => default_modules}/timers_manager/interfaces.py | 0 openpype/modules/{ => default_modules}/timers_manager/rest_api.py | 0 .../{ => default_modules}/timers_manager/timers_manager.py | 0 .../{ => default_modules}/timers_manager/widget_user_idle.py | 0 5 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/timers_manager/__init__.py (100%) rename openpype/modules/{ => default_modules}/timers_manager/interfaces.py (100%) rename openpype/modules/{ => default_modules}/timers_manager/rest_api.py (100%) rename openpype/modules/{ => default_modules}/timers_manager/timers_manager.py (100%) rename openpype/modules/{ => default_modules}/timers_manager/widget_user_idle.py (100%) diff --git a/openpype/modules/timers_manager/__init__.py b/openpype/modules/default_modules/timers_manager/__init__.py similarity index 100% rename from openpype/modules/timers_manager/__init__.py rename to openpype/modules/default_modules/timers_manager/__init__.py diff --git a/openpype/modules/timers_manager/interfaces.py b/openpype/modules/default_modules/timers_manager/interfaces.py similarity index 100% rename from openpype/modules/timers_manager/interfaces.py rename to openpype/modules/default_modules/timers_manager/interfaces.py diff --git a/openpype/modules/timers_manager/rest_api.py b/openpype/modules/default_modules/timers_manager/rest_api.py similarity index 100% rename from openpype/modules/timers_manager/rest_api.py rename to openpype/modules/default_modules/timers_manager/rest_api.py diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/default_modules/timers_manager/timers_manager.py similarity index 100% rename from openpype/modules/timers_manager/timers_manager.py rename to openpype/modules/default_modules/timers_manager/timers_manager.py diff --git a/openpype/modules/timers_manager/widget_user_idle.py b/openpype/modules/default_modules/timers_manager/widget_user_idle.py similarity index 100% rename from openpype/modules/timers_manager/widget_user_idle.py rename to openpype/modules/default_modules/timers_manager/widget_user_idle.py From 224273c1f11ef1749a8d4d780756d54323de822d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:13:16 +0200 Subject: [PATCH 066/308] moved sync server module --- .../{ => default_modules}/sync_server/README.md | 0 .../{ => default_modules}/sync_server/__init__.py | 0 .../sync_server/providers/__init__.py | 0 .../sync_server/providers/abstract_provider.py | 0 .../sync_server/providers/gdrive.py | 0 .../sync_server/providers/lib.py | 0 .../sync_server/providers/local_drive.py | 0 .../sync_server/providers/resources/folder.png | Bin .../sync_server/providers/resources/gdrive.png | Bin .../sync_server/providers/resources/local_drive.png | Bin .../sync_server/providers/resources/studio.png | Bin .../sync_server/resources/paused.png | Bin .../sync_server/resources/synced.png | Bin .../sync_server/sync_server.py | 0 .../sync_server/sync_server_module.py | 0 .../{ => default_modules}/sync_server/tray/app.py | 0 .../sync_server/tray/delegates.py | 0 .../{ => default_modules}/sync_server/tray/lib.py | 0 .../sync_server/tray/models.py | 0 .../sync_server/tray/widgets.py | 0 .../{ => default_modules}/sync_server/utils.py | 0 21 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/sync_server/README.md (100%) rename openpype/modules/{ => default_modules}/sync_server/__init__.py (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/__init__.py (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/abstract_provider.py (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/gdrive.py (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/lib.py (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/local_drive.py (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/resources/folder.png (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/resources/gdrive.png (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/resources/local_drive.png (100%) rename openpype/modules/{ => default_modules}/sync_server/providers/resources/studio.png (100%) rename openpype/modules/{ => default_modules}/sync_server/resources/paused.png (100%) rename openpype/modules/{ => default_modules}/sync_server/resources/synced.png (100%) rename openpype/modules/{ => default_modules}/sync_server/sync_server.py (100%) rename openpype/modules/{ => default_modules}/sync_server/sync_server_module.py (100%) rename openpype/modules/{ => default_modules}/sync_server/tray/app.py (100%) rename openpype/modules/{ => default_modules}/sync_server/tray/delegates.py (100%) rename openpype/modules/{ => default_modules}/sync_server/tray/lib.py (100%) rename openpype/modules/{ => default_modules}/sync_server/tray/models.py (100%) rename openpype/modules/{ => default_modules}/sync_server/tray/widgets.py (100%) rename openpype/modules/{ => default_modules}/sync_server/utils.py (100%) diff --git a/openpype/modules/sync_server/README.md b/openpype/modules/default_modules/sync_server/README.md similarity index 100% rename from openpype/modules/sync_server/README.md rename to openpype/modules/default_modules/sync_server/README.md diff --git a/openpype/modules/sync_server/__init__.py b/openpype/modules/default_modules/sync_server/__init__.py similarity index 100% rename from openpype/modules/sync_server/__init__.py rename to openpype/modules/default_modules/sync_server/__init__.py diff --git a/openpype/modules/sync_server/providers/__init__.py b/openpype/modules/default_modules/sync_server/providers/__init__.py similarity index 100% rename from openpype/modules/sync_server/providers/__init__.py rename to openpype/modules/default_modules/sync_server/providers/__init__.py diff --git a/openpype/modules/sync_server/providers/abstract_provider.py b/openpype/modules/default_modules/sync_server/providers/abstract_provider.py similarity index 100% rename from openpype/modules/sync_server/providers/abstract_provider.py rename to openpype/modules/default_modules/sync_server/providers/abstract_provider.py diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/default_modules/sync_server/providers/gdrive.py similarity index 100% rename from openpype/modules/sync_server/providers/gdrive.py rename to openpype/modules/default_modules/sync_server/providers/gdrive.py diff --git a/openpype/modules/sync_server/providers/lib.py b/openpype/modules/default_modules/sync_server/providers/lib.py similarity index 100% rename from openpype/modules/sync_server/providers/lib.py rename to openpype/modules/default_modules/sync_server/providers/lib.py diff --git a/openpype/modules/sync_server/providers/local_drive.py b/openpype/modules/default_modules/sync_server/providers/local_drive.py similarity index 100% rename from openpype/modules/sync_server/providers/local_drive.py rename to openpype/modules/default_modules/sync_server/providers/local_drive.py diff --git a/openpype/modules/sync_server/providers/resources/folder.png b/openpype/modules/default_modules/sync_server/providers/resources/folder.png similarity index 100% rename from openpype/modules/sync_server/providers/resources/folder.png rename to openpype/modules/default_modules/sync_server/providers/resources/folder.png diff --git a/openpype/modules/sync_server/providers/resources/gdrive.png b/openpype/modules/default_modules/sync_server/providers/resources/gdrive.png similarity index 100% rename from openpype/modules/sync_server/providers/resources/gdrive.png rename to openpype/modules/default_modules/sync_server/providers/resources/gdrive.png diff --git a/openpype/modules/sync_server/providers/resources/local_drive.png b/openpype/modules/default_modules/sync_server/providers/resources/local_drive.png similarity index 100% rename from openpype/modules/sync_server/providers/resources/local_drive.png rename to openpype/modules/default_modules/sync_server/providers/resources/local_drive.png diff --git a/openpype/modules/sync_server/providers/resources/studio.png b/openpype/modules/default_modules/sync_server/providers/resources/studio.png similarity index 100% rename from openpype/modules/sync_server/providers/resources/studio.png rename to openpype/modules/default_modules/sync_server/providers/resources/studio.png diff --git a/openpype/modules/sync_server/resources/paused.png b/openpype/modules/default_modules/sync_server/resources/paused.png similarity index 100% rename from openpype/modules/sync_server/resources/paused.png rename to openpype/modules/default_modules/sync_server/resources/paused.png diff --git a/openpype/modules/sync_server/resources/synced.png b/openpype/modules/default_modules/sync_server/resources/synced.png similarity index 100% rename from openpype/modules/sync_server/resources/synced.png rename to openpype/modules/default_modules/sync_server/resources/synced.png diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/default_modules/sync_server/sync_server.py similarity index 100% rename from openpype/modules/sync_server/sync_server.py rename to openpype/modules/default_modules/sync_server/sync_server.py diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/default_modules/sync_server/sync_server_module.py similarity index 100% rename from openpype/modules/sync_server/sync_server_module.py rename to openpype/modules/default_modules/sync_server/sync_server_module.py diff --git a/openpype/modules/sync_server/tray/app.py b/openpype/modules/default_modules/sync_server/tray/app.py similarity index 100% rename from openpype/modules/sync_server/tray/app.py rename to openpype/modules/default_modules/sync_server/tray/app.py diff --git a/openpype/modules/sync_server/tray/delegates.py b/openpype/modules/default_modules/sync_server/tray/delegates.py similarity index 100% rename from openpype/modules/sync_server/tray/delegates.py rename to openpype/modules/default_modules/sync_server/tray/delegates.py diff --git a/openpype/modules/sync_server/tray/lib.py b/openpype/modules/default_modules/sync_server/tray/lib.py similarity index 100% rename from openpype/modules/sync_server/tray/lib.py rename to openpype/modules/default_modules/sync_server/tray/lib.py diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/default_modules/sync_server/tray/models.py similarity index 100% rename from openpype/modules/sync_server/tray/models.py rename to openpype/modules/default_modules/sync_server/tray/models.py diff --git a/openpype/modules/sync_server/tray/widgets.py b/openpype/modules/default_modules/sync_server/tray/widgets.py similarity index 100% rename from openpype/modules/sync_server/tray/widgets.py rename to openpype/modules/default_modules/sync_server/tray/widgets.py diff --git a/openpype/modules/sync_server/utils.py b/openpype/modules/default_modules/sync_server/utils.py similarity index 100% rename from openpype/modules/sync_server/utils.py rename to openpype/modules/default_modules/sync_server/utils.py From a2887d9023eb912680b9d74cfdb898b2752a2684 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:13:31 +0200 Subject: [PATCH 067/308] moved standalone publish action --- .../modules/{ => default_modules}/standalonepublish_action.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/standalonepublish_action.py (100%) diff --git a/openpype/modules/standalonepublish_action.py b/openpype/modules/default_modules/standalonepublish_action.py similarity index 100% rename from openpype/modules/standalonepublish_action.py rename to openpype/modules/default_modules/standalonepublish_action.py From 8f79bac4234bbcba16578a596aa1a571e8f149ea Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:13:54 +0200 Subject: [PATCH 068/308] moved log viewer module --- openpype/modules/{ => default_modules}/log_viewer/__init__.py | 0 .../modules/{ => default_modules}/log_viewer/log_view_module.py | 0 .../modules/{ => default_modules}/log_viewer/tray/__init__.py | 0 openpype/modules/{ => default_modules}/log_viewer/tray/app.py | 0 openpype/modules/{ => default_modules}/log_viewer/tray/models.py | 0 openpype/modules/{ => default_modules}/log_viewer/tray/widgets.py | 0 6 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/log_viewer/__init__.py (100%) rename openpype/modules/{ => default_modules}/log_viewer/log_view_module.py (100%) rename openpype/modules/{ => default_modules}/log_viewer/tray/__init__.py (100%) rename openpype/modules/{ => default_modules}/log_viewer/tray/app.py (100%) rename openpype/modules/{ => default_modules}/log_viewer/tray/models.py (100%) rename openpype/modules/{ => default_modules}/log_viewer/tray/widgets.py (100%) diff --git a/openpype/modules/log_viewer/__init__.py b/openpype/modules/default_modules/log_viewer/__init__.py similarity index 100% rename from openpype/modules/log_viewer/__init__.py rename to openpype/modules/default_modules/log_viewer/__init__.py diff --git a/openpype/modules/log_viewer/log_view_module.py b/openpype/modules/default_modules/log_viewer/log_view_module.py similarity index 100% rename from openpype/modules/log_viewer/log_view_module.py rename to openpype/modules/default_modules/log_viewer/log_view_module.py diff --git a/openpype/modules/log_viewer/tray/__init__.py b/openpype/modules/default_modules/log_viewer/tray/__init__.py similarity index 100% rename from openpype/modules/log_viewer/tray/__init__.py rename to openpype/modules/default_modules/log_viewer/tray/__init__.py diff --git a/openpype/modules/log_viewer/tray/app.py b/openpype/modules/default_modules/log_viewer/tray/app.py similarity index 100% rename from openpype/modules/log_viewer/tray/app.py rename to openpype/modules/default_modules/log_viewer/tray/app.py diff --git a/openpype/modules/log_viewer/tray/models.py b/openpype/modules/default_modules/log_viewer/tray/models.py similarity index 100% rename from openpype/modules/log_viewer/tray/models.py rename to openpype/modules/default_modules/log_viewer/tray/models.py diff --git a/openpype/modules/log_viewer/tray/widgets.py b/openpype/modules/default_modules/log_viewer/tray/widgets.py similarity index 100% rename from openpype/modules/log_viewer/tray/widgets.py rename to openpype/modules/default_modules/log_viewer/tray/widgets.py From e42b03c0c48193edd52a5a013c8fb98a8d1b33ef Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:14:12 +0200 Subject: [PATCH 069/308] moved project manager action --- openpype/modules/{ => default_modules}/project_manager_action.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/project_manager_action.py (100%) diff --git a/openpype/modules/project_manager_action.py b/openpype/modules/default_modules/project_manager_action.py similarity index 100% rename from openpype/modules/project_manager_action.py rename to openpype/modules/default_modules/project_manager_action.py From 3259929658bd39601a8c4ab77554db7bc9b4e936 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:15:42 +0200 Subject: [PATCH 070/308] moved launcher action module --- openpype/modules/{ => default_modules}/launcher_action.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/launcher_action.py (100%) diff --git a/openpype/modules/launcher_action.py b/openpype/modules/default_modules/launcher_action.py similarity index 100% rename from openpype/modules/launcher_action.py rename to openpype/modules/default_modules/launcher_action.py From f25e242df9305ed3191ae82ef3e3396de17a766e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:16:06 +0200 Subject: [PATCH 071/308] moved default interfaces --- openpype/modules/{ => default_modules}/interfaces.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/interfaces.py (100%) diff --git a/openpype/modules/interfaces.py b/openpype/modules/default_modules/interfaces.py similarity index 100% rename from openpype/modules/interfaces.py rename to openpype/modules/default_modules/interfaces.py From df5434e895370fcee4d0b1c60dce8bceac5aa57a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:16:39 +0200 Subject: [PATCH 072/308] moved slack module --- .../modules/{ => default_modules}/slack/README.md | 0 .../modules/{ => default_modules}/slack/__init__.py | 0 .../slack/launch_hooks/pre_python2_vendor.py | 0 .../{ => default_modules}/slack/manifest.yml | 0 .../slack/plugins/publish/collect_slack_family.py | 0 .../slack/plugins/publish/integrate_slack_api.py | 0 .../python2_vendor/python-slack-sdk-1/.appveyor.yml | 0 .../python2_vendor/python-slack-sdk-1/.coveragerc | 0 .../slack/python2_vendor/python-slack-sdk-1/.flake8 | 0 .../python-slack-sdk-1/.github/contributing.md | 0 .../python-slack-sdk-1/.github/issue_template.md | 0 .../python-slack-sdk-1/.github/maintainers_guide.md | 0 .../.github/pull_request_template.md | 0 .../python2_vendor/python-slack-sdk-1/.gitignore | 0 .../python2_vendor/python-slack-sdk-1/.travis.yml | 0 .../slack/python2_vendor/python-slack-sdk-1/LICENSE | 0 .../python2_vendor/python-slack-sdk-1/MANIFEST.in | 0 .../python2_vendor/python-slack-sdk-1/README.rst | 0 .../python-slack-sdk-1/docs-src/.gitignore | 0 .../python-slack-sdk-1/docs-src/Makefile | 0 .../docs-src/_themes/slack/conf.py | 0 .../docs-src/_themes/slack/layout.html | 0 .../docs-src/_themes/slack/localtoc.html | 0 .../docs-src/_themes/slack/relations.html | 0 .../docs-src/_themes/slack/sidebar.html | 0 .../docs-src/_themes/slack/static/default.css_t | 0 .../docs-src/_themes/slack/static/docs.css_t | 0 .../docs-src/_themes/slack/static/pygments.css_t | 0 .../docs-src/_themes/slack/theme.conf | 0 .../python-slack-sdk-1/docs-src/about.rst | 0 .../python-slack-sdk-1/docs-src/auth.rst | 0 .../python-slack-sdk-1/docs-src/basic_usage.rst | 0 .../python-slack-sdk-1/docs-src/changelog.rst | 0 .../python-slack-sdk-1/docs-src/conf.py | 0 .../python-slack-sdk-1/docs-src/conversations.rst | 0 .../python-slack-sdk-1/docs-src/faq.rst | 0 .../python-slack-sdk-1/docs-src/index.rst | 0 .../python-slack-sdk-1/docs-src/make.bat | 0 .../python-slack-sdk-1/docs-src/metadata.rst | 0 .../docs-src/real_time_messaging.rst | 0 .../slack/python2_vendor/python-slack-sdk-1/docs.sh | 0 .../python-slack-sdk-1/docs/.buildinfo | 0 .../python-slack-sdk-1/docs/.nojekyll | 0 .../python-slack-sdk-1/docs/_static/ajax-loader.gif | Bin .../python-slack-sdk-1/docs/_static/basic.css | 0 .../python-slack-sdk-1/docs/_static/classic.css | 0 .../docs/_static/comment-bright.png | Bin .../docs/_static/comment-close.png | Bin .../python-slack-sdk-1/docs/_static/comment.png | Bin .../python-slack-sdk-1/docs/_static/default.css | 0 .../python-slack-sdk-1/docs/_static/docs.css | 0 .../python-slack-sdk-1/docs/_static/doctools.js | 0 .../docs/_static/documentation_options.js | 0 .../docs/_static/down-pressed.png | Bin .../python-slack-sdk-1/docs/_static/down.png | Bin .../python-slack-sdk-1/docs/_static/file.png | Bin .../python-slack-sdk-1/docs/_static/jquery-3.2.1.js | 0 .../python-slack-sdk-1/docs/_static/jquery.js | 0 .../docs/_static/language_data.js | 0 .../python-slack-sdk-1/docs/_static/minus.png | Bin .../python-slack-sdk-1/docs/_static/plus.png | Bin .../python-slack-sdk-1/docs/_static/pygments.css | 0 .../python-slack-sdk-1/docs/_static/searchtools.js | 0 .../python-slack-sdk-1/docs/_static/sidebar.js | 0 .../docs/_static/underscore-1.3.1.js | 0 .../python-slack-sdk-1/docs/_static/underscore.js | 0 .../python-slack-sdk-1/docs/_static/up-pressed.png | Bin .../python-slack-sdk-1/docs/_static/up.png | Bin .../python-slack-sdk-1/docs/_static/websupport.js | 0 .../python-slack-sdk-1/docs/about.html | 0 .../python-slack-sdk-1/docs/auth.html | 0 .../python-slack-sdk-1/docs/basic_usage.html | 0 .../python-slack-sdk-1/docs/changelog.html | 0 .../python-slack-sdk-1/docs/conversations.html | 0 .../python2_vendor/python-slack-sdk-1/docs/faq.html | 0 .../python-slack-sdk-1/docs/genindex.html | 0 .../python-slack-sdk-1/docs/index.html | 0 .../python-slack-sdk-1/docs/metadata.html | 0 .../python-slack-sdk-1/docs/objects.inv | 0 .../docs/real_time_messaging.html | 0 .../python-slack-sdk-1/docs/search.html | 0 .../python-slack-sdk-1/docs/searchindex.js | 0 .../python-slack-sdk-1/requirements.txt | 0 .../python2_vendor/python-slack-sdk-1/setup.cfg | 0 .../python2_vendor/python-slack-sdk-1/setup.py | 0 .../python-slack-sdk-1/slackclient/__init__.py | 0 .../python-slack-sdk-1/slackclient/channel.py | 0 .../python-slack-sdk-1/slackclient/client.py | 0 .../python-slack-sdk-1/slackclient/exceptions.py | 0 .../python-slack-sdk-1/slackclient/im.py | 0 .../python-slack-sdk-1/slackclient/server.py | 0 .../python-slack-sdk-1/slackclient/slackrequest.py | 0 .../python-slack-sdk-1/slackclient/user.py | 0 .../python-slack-sdk-1/slackclient/util.py | 0 .../python-slack-sdk-1/slackclient/version.py | 0 .../python-slack-sdk-1/test_requirements.txt | 0 .../python-slack-sdk-1/tests/conftest.py | 0 .../tests/data/channel.created.json | 0 .../python-slack-sdk-1/tests/data/im.created.json | 0 .../python-slack-sdk-1/tests/data/rtm.start.json | 0 .../python-slack-sdk-1/tests/data/slack_logo.png | Bin .../python-slack-sdk-1/tests/test_channel.py | 0 .../python-slack-sdk-1/tests/test_server.py | 0 .../python-slack-sdk-1/tests/test_slackclient.py | 0 .../python-slack-sdk-1/tests/test_slackrequest.py | 0 .../slack/python2_vendor/python-slack-sdk-1/tox.ini | 0 .../{ => default_modules}/slack/slack_module.py | 0 107 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/slack/README.md (100%) rename openpype/modules/{ => default_modules}/slack/__init__.py (100%) rename openpype/modules/{ => default_modules}/slack/launch_hooks/pre_python2_vendor.py (100%) rename openpype/modules/{ => default_modules}/slack/manifest.yml (100%) rename openpype/modules/{ => default_modules}/slack/plugins/publish/collect_slack_family.py (100%) rename openpype/modules/{ => default_modules}/slack/plugins/publish/integrate_slack_api.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.appveyor.yml (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.coveragerc (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.flake8 (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.github/contributing.md (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.github/issue_template.md (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.github/maintainers_guide.md (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.github/pull_request_template.md (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.gitignore (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/.travis.yml (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/LICENSE (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/MANIFEST.in (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/README.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/.gitignore (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/Makefile (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/conf.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/layout.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/localtoc.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/relations.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/sidebar.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/default.css_t (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/docs.css_t (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/pygments.css_t (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/theme.conf (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/about.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/auth.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/basic_usage.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/changelog.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/conf.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/conversations.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/faq.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/index.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/make.bat (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/metadata.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs-src/real_time_messaging.rst (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs.sh (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/.buildinfo (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/.nojekyll (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/ajax-loader.gif (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/basic.css (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/classic.css (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-bright.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-close.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/default.css (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/docs.css (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/doctools.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/documentation_options.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/down-pressed.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/down.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/file.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery-3.2.1.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/language_data.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/minus.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/plus.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/pygments.css (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/searchtools.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/sidebar.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore-1.3.1.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/up-pressed.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/up.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/_static/websupport.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/about.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/auth.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/basic_usage.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/changelog.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/conversations.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/faq.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/genindex.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/index.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/metadata.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/objects.inv (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/real_time_messaging.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/search.html (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/docs/searchindex.js (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/requirements.txt (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/setup.cfg (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/setup.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/__init__.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/channel.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/client.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/exceptions.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/im.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/server.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/slackrequest.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/user.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/util.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/slackclient/version.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/test_requirements.txt (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/conftest.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/data/channel.created.json (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/data/im.created.json (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/data/rtm.start.json (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/data/slack_logo.png (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/test_channel.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/test_server.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/test_slackclient.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tests/test_slackrequest.py (100%) rename openpype/modules/{ => default_modules}/slack/python2_vendor/python-slack-sdk-1/tox.ini (100%) rename openpype/modules/{ => default_modules}/slack/slack_module.py (100%) diff --git a/openpype/modules/slack/README.md b/openpype/modules/default_modules/slack/README.md similarity index 100% rename from openpype/modules/slack/README.md rename to openpype/modules/default_modules/slack/README.md diff --git a/openpype/modules/slack/__init__.py b/openpype/modules/default_modules/slack/__init__.py similarity index 100% rename from openpype/modules/slack/__init__.py rename to openpype/modules/default_modules/slack/__init__.py diff --git a/openpype/modules/slack/launch_hooks/pre_python2_vendor.py b/openpype/modules/default_modules/slack/launch_hooks/pre_python2_vendor.py similarity index 100% rename from openpype/modules/slack/launch_hooks/pre_python2_vendor.py rename to openpype/modules/default_modules/slack/launch_hooks/pre_python2_vendor.py diff --git a/openpype/modules/slack/manifest.yml b/openpype/modules/default_modules/slack/manifest.yml similarity index 100% rename from openpype/modules/slack/manifest.yml rename to openpype/modules/default_modules/slack/manifest.yml diff --git a/openpype/modules/slack/plugins/publish/collect_slack_family.py b/openpype/modules/default_modules/slack/plugins/publish/collect_slack_family.py similarity index 100% rename from openpype/modules/slack/plugins/publish/collect_slack_family.py rename to openpype/modules/default_modules/slack/plugins/publish/collect_slack_family.py diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/default_modules/slack/plugins/publish/integrate_slack_api.py similarity index 100% rename from openpype/modules/slack/plugins/publish/integrate_slack_api.py rename to openpype/modules/default_modules/slack/plugins/publish/integrate_slack_api.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.appveyor.yml b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.appveyor.yml similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.appveyor.yml rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.appveyor.yml diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.coveragerc b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.coveragerc similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.coveragerc rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.coveragerc diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.flake8 b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.flake8 similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.flake8 rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.flake8 diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/contributing.md b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/contributing.md similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/contributing.md rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/contributing.md diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/issue_template.md b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/issue_template.md similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/issue_template.md rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/issue_template.md diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/maintainers_guide.md b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/maintainers_guide.md similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/maintainers_guide.md rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/maintainers_guide.md diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/pull_request_template.md b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/pull_request_template.md similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.github/pull_request_template.md rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.github/pull_request_template.md diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.gitignore b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.gitignore similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.gitignore rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.gitignore diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/.travis.yml b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.travis.yml similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/.travis.yml rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/.travis.yml diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/LICENSE b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/LICENSE similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/LICENSE rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/LICENSE diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/MANIFEST.in b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/MANIFEST.in similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/MANIFEST.in rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/MANIFEST.in diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/README.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/README.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/README.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/README.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/.gitignore b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/.gitignore similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/.gitignore rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/.gitignore diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/Makefile b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/Makefile similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/Makefile rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/Makefile diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/conf.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/conf.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/conf.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/conf.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/layout.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/layout.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/layout.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/layout.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/localtoc.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/localtoc.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/localtoc.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/localtoc.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/relations.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/relations.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/relations.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/relations.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/sidebar.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/sidebar.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/sidebar.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/sidebar.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/default.css_t b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/default.css_t similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/default.css_t rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/default.css_t diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/docs.css_t b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/docs.css_t similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/docs.css_t rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/docs.css_t diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/pygments.css_t b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/pygments.css_t similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/pygments.css_t rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/static/pygments.css_t diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/theme.conf b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/theme.conf similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/theme.conf rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/_themes/slack/theme.conf diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/about.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/about.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/about.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/about.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/auth.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/auth.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/auth.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/auth.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/basic_usage.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/basic_usage.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/basic_usage.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/basic_usage.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/changelog.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/changelog.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/changelog.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/changelog.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conf.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conf.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conf.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conf.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conversations.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conversations.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conversations.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/conversations.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/faq.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/faq.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/faq.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/faq.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/index.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/index.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/index.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/index.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/make.bat b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/make.bat similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/make.bat rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/make.bat diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/metadata.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/metadata.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/metadata.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/metadata.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/real_time_messaging.rst b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/real_time_messaging.rst similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs-src/real_time_messaging.rst rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs-src/real_time_messaging.rst diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs.sh b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs.sh similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs.sh rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs.sh diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/.buildinfo b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/.buildinfo similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/.buildinfo rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/.buildinfo diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/.nojekyll b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/.nojekyll similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/.nojekyll rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/.nojekyll diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/ajax-loader.gif b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/ajax-loader.gif similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/ajax-loader.gif rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/ajax-loader.gif diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/basic.css b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/basic.css similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/basic.css rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/basic.css diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/classic.css b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/classic.css similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/classic.css rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/classic.css diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-bright.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-bright.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-bright.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-bright.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-close.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-close.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-close.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment-close.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/comment.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/default.css b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/default.css similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/default.css rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/default.css diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/docs.css b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/docs.css similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/docs.css rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/docs.css diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/doctools.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/doctools.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/doctools.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/doctools.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/documentation_options.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/documentation_options.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/documentation_options.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/documentation_options.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down-pressed.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down-pressed.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down-pressed.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down-pressed.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/down.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/file.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/file.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/file.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/file.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery-3.2.1.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery-3.2.1.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery-3.2.1.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery-3.2.1.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/jquery.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/language_data.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/language_data.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/language_data.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/language_data.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/minus.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/minus.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/minus.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/minus.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/plus.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/plus.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/plus.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/plus.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/pygments.css b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/pygments.css similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/pygments.css rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/pygments.css diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/searchtools.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/searchtools.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/searchtools.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/searchtools.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/sidebar.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/sidebar.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/sidebar.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/sidebar.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore-1.3.1.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore-1.3.1.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore-1.3.1.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore-1.3.1.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/underscore.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up-pressed.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up-pressed.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up-pressed.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up-pressed.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/up.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/websupport.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/websupport.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/websupport.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/_static/websupport.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/about.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/about.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/about.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/about.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/auth.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/auth.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/auth.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/auth.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/basic_usage.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/basic_usage.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/basic_usage.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/basic_usage.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/changelog.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/changelog.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/changelog.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/changelog.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/conversations.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/conversations.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/conversations.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/conversations.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/faq.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/faq.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/faq.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/faq.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/genindex.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/genindex.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/genindex.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/genindex.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/index.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/index.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/index.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/index.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/metadata.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/metadata.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/metadata.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/metadata.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/objects.inv b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/objects.inv similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/objects.inv rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/objects.inv diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/real_time_messaging.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/real_time_messaging.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/real_time_messaging.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/real_time_messaging.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/search.html b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/search.html similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/search.html rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/search.html diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/searchindex.js b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/searchindex.js similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/docs/searchindex.js rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/docs/searchindex.js diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/requirements.txt b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/requirements.txt similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/requirements.txt rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/requirements.txt diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/setup.cfg b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/setup.cfg similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/setup.cfg rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/setup.cfg diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/setup.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/setup.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/setup.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/setup.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/__init__.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/__init__.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/__init__.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/__init__.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/channel.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/channel.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/channel.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/channel.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/client.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/client.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/client.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/client.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/exceptions.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/exceptions.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/exceptions.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/exceptions.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/im.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/im.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/im.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/im.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/server.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/server.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/server.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/server.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/slackrequest.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/slackrequest.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/slackrequest.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/slackrequest.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/user.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/user.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/user.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/user.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/util.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/util.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/util.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/util.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/version.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/version.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/slackclient/version.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/slackclient/version.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/test_requirements.txt b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/test_requirements.txt similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/test_requirements.txt rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/test_requirements.txt diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/conftest.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/conftest.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/conftest.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/conftest.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/channel.created.json b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/channel.created.json similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/channel.created.json rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/channel.created.json diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/im.created.json b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/im.created.json similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/im.created.json rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/im.created.json diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/rtm.start.json b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/rtm.start.json similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/rtm.start.json rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/rtm.start.json diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/slack_logo.png b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/slack_logo.png similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/data/slack_logo.png rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/data/slack_logo.png diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_channel.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_channel.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_channel.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_channel.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_server.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_server.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_server.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_server.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackclient.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackclient.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackclient.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackclient.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackrequest.py b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackrequest.py similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackrequest.py rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tests/test_slackrequest.py diff --git a/openpype/modules/slack/python2_vendor/python-slack-sdk-1/tox.ini b/openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tox.ini similarity index 100% rename from openpype/modules/slack/python2_vendor/python-slack-sdk-1/tox.ini rename to openpype/modules/default_modules/slack/python2_vendor/python-slack-sdk-1/tox.ini diff --git a/openpype/modules/slack/slack_module.py b/openpype/modules/default_modules/slack/slack_module.py similarity index 100% rename from openpype/modules/slack/slack_module.py rename to openpype/modules/default_modules/slack/slack_module.py From 3406c47fda3efa19f4f312cf412df99495c8803d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:19:35 +0200 Subject: [PATCH 073/308] moved ftrack module --- .../{ => default_modules}/ftrack/__init__.py | 0 .../action_clone_review_session.py | 2 +- .../action_multiple_notes.py | 2 +- .../action_prepare_project.py | 2 +- .../action_push_frame_values_to_task.py | 2 +- .../action_sync_to_avalon.py | 4 +- .../event_del_avalon_id_from_new.py | 6 +- .../event_first_version_status.py | 2 +- .../event_next_task_update.py | 2 +- .../event_push_frame_values_to_task.py | 2 +- .../event_radio_buttons.py | 2 +- .../event_sync_to_avalon.py | 2 +- .../event_task_to_parent_status.py | 2 +- .../event_task_to_version_status.py | 2 +- .../event_thumbnail_updates.py | 2 +- .../event_user_assigment.py | 4 +- .../event_version_to_task_statuses.py | 2 +- .../action_applications.py | 2 +- .../action_batch_task_creation.py | 2 +- .../action_clean_hierarchical_attributes.py | 2 +- .../action_client_review_sort.py | 2 +- .../action_component_open.py | 2 +- .../action_create_cust_attrs.py | 2 +- .../action_create_folders.py | 2 +- .../action_create_project_structure.py | 2 +- .../action_delete_asset.py | 2 +- .../action_delete_old_versions.py | 2 +- .../event_handlers_user/action_delivery.py | 4 +- .../event_handlers_user/action_djvview.py | 2 +- .../event_handlers_user/action_job_killer.py | 2 +- .../action_multiple_notes.py | 2 +- .../action_prepare_project.py | 2 +- .../ftrack/event_handlers_user/action_rv.py | 2 +- .../ftrack/event_handlers_user/action_seed.py | 2 +- .../action_store_thumbnails_to_avalon.py | 4 +- .../action_sync_to_avalon.py | 4 +- .../ftrack/event_handlers_user/action_test.py | 2 +- .../action_thumbnail_to_childern.py | 2 +- .../action_thumbnail_to_parent.py | 2 +- .../action_where_run_ask.py | 2 +- .../action_where_run_show.py | 2 +- .../ftrack/ftrack_module.py | 2 +- .../ftrack/ftrack_server/__init__.py | 0 .../ftrack/ftrack_server/event_server_cli.py | 8 +- .../ftrack/ftrack_server/ftrack_server.py | 0 .../ftrack/ftrack_server/lib.py | 2 +- .../ftrack/ftrack_server/socket_thread.py | 0 .../ftrack/interfaces.py | 0 .../launch_hooks/post_ftrack_changes.py | 0 .../ftrack/launch_hooks/pre_python2_vendor.py | 2 +- .../ftrack/lib/__init__.py | 0 .../ftrack/lib/avalon_sync.py | 0 .../ftrack/lib/constants.py | 0 .../ftrack/lib/credentials.py | 0 .../ftrack/lib/custom_attributes.json | 0 .../ftrack/lib/custom_attributes.py | 0 .../ftrack/lib/ftrack_action_handler.py | 0 .../ftrack/lib/ftrack_base_handler.py | 2 +- .../ftrack/lib/ftrack_event_handler.py | 0 .../ftrack/lib/settings.py | 0 .../integrate_ftrack_comments.py | 0 .../plugins/publish/collect_ftrack_api.py | 0 .../plugins/publish/collect_ftrack_family.py | 0 .../plugins/publish/integrate_ftrack_api.py | 0 .../integrate_ftrack_component_overwrite.py | 0 .../publish/integrate_ftrack_instances.py | 0 .../plugins/publish/integrate_ftrack_note.py | 0 .../publish/integrate_hierarchy_ftrack.py | 2 +- .../publish/integrate_remove_components.py | 0 .../validate_custom_ftrack_attributes.py | 0 .../python2_vendor/arrow/.github/FUNDING.yml | 1 + .../.github/ISSUE_TEMPLATE/bug_report.md | 27 + .../.github/ISSUE_TEMPLATE/documentation.md | 17 + .../.github/ISSUE_TEMPLATE/feature_request.md | 17 + .../arrow/.github/pull_request_template.md | 22 + .../workflows/continuous_integration.yml | 123 + .../ftrack/python2_vendor/arrow/.gitignore | 211 + .../arrow/.pre-commit-config.yaml | 41 + .../ftrack/python2_vendor/arrow/CHANGELOG.rst | 598 +++ .../ftrack/python2_vendor/arrow/LICENSE | 201 + .../ftrack/python2_vendor/arrow/MANIFEST.in | 3 + .../ftrack/python2_vendor/arrow/Makefile | 44 + .../ftrack/python2_vendor/arrow/README.rst | 133 + .../python2_vendor/arrow/arrow/__init__.py | 18 + .../python2_vendor/arrow/arrow/_version.py | 1 + .../ftrack/python2_vendor/arrow/arrow/api.py | 54 + .../python2_vendor/arrow/arrow/arrow.py | 1584 ++++++ .../python2_vendor/arrow/arrow/constants.py | 9 + .../python2_vendor/arrow/arrow/factory.py | 301 ++ .../python2_vendor/arrow/arrow/formatter.py | 139 + .../python2_vendor/arrow/arrow/locales.py | 4267 +++++++++++++++++ .../python2_vendor/arrow/arrow/parser.py | 596 +++ .../ftrack/python2_vendor/arrow/arrow/util.py | 115 + .../ftrack/python2_vendor/arrow/docs/Makefile | 20 + .../ftrack/python2_vendor/arrow/docs/conf.py | 62 + .../python2_vendor/arrow/docs/index.rst | 566 +++ .../ftrack/python2_vendor/arrow/docs/make.bat | 35 + .../python2_vendor/arrow/docs/releases.rst | 3 + .../python2_vendor/arrow/requirements.txt | 14 + .../ftrack/python2_vendor/arrow/setup.cfg | 2 + .../ftrack/python2_vendor/arrow/setup.py | 50 + .../python2_vendor/arrow/tests/__init__.py | 0 .../python2_vendor/arrow/tests/conftest.py | 76 + .../python2_vendor/arrow/tests/test_api.py | 28 + .../python2_vendor/arrow/tests/test_arrow.py | 2150 +++++++++ .../arrow/tests/test_factory.py | 390 ++ .../arrow/tests/test_formatter.py | 282 ++ .../arrow/tests/test_locales.py | 1352 ++++++ .../python2_vendor/arrow/tests/test_parser.py | 1657 +++++++ .../python2_vendor/arrow/tests/test_util.py | 81 + .../python2_vendor/arrow/tests/utils.py | 16 + .../ftrack/python2_vendor/arrow/tox.ini | 53 + .../backports/__init__.py | 0 .../backports/configparser/__init__.py | 0 .../backports/configparser/helpers.py | 0 .../backports/functools_lru_cache.py | 0 .../builtins/builtins/__init__.py | 0 .../ftrack-python-api/.gitignore | 42 + .../ftrack-python-api/LICENSE.python | 254 + .../ftrack-python-api/LICENSE.txt | 176 + .../ftrack-python-api/MANIFEST.in | 4 + .../ftrack-python-api/README.rst | 34 + .../ftrack-python-api/bitbucket-pipelines.yml | 24 + .../ftrack-python-api/doc/_static/ftrack.css | 16 + .../doc/api_reference/accessor/base.rst | 8 + .../doc/api_reference/accessor/disk.rst | 8 + .../doc/api_reference/accessor/index.rst | 14 + .../doc/api_reference/accessor/server.rst | 8 + .../doc/api_reference/attribute.rst | 8 + .../doc/api_reference/cache.rst | 8 + .../doc/api_reference/collection.rst | 8 + .../api_reference/entity/asset_version.rst | 8 + .../doc/api_reference/entity/base.rst | 8 + .../doc/api_reference/entity/component.rst | 8 + .../doc/api_reference/entity/factory.rst | 8 + .../doc/api_reference/entity/index.rst | 14 + .../doc/api_reference/entity/job.rst | 8 + .../doc/api_reference/entity/location.rst | 8 + .../doc/api_reference/entity/note.rst | 8 + .../api_reference/entity/project_schema.rst | 8 + .../doc/api_reference/entity/user.rst | 8 + .../doc/api_reference/event/base.rst | 8 + .../doc/api_reference/event/expression.rst | 8 + .../doc/api_reference/event/hub.rst | 8 + .../doc/api_reference/event/index.rst | 14 + .../doc/api_reference/event/subscriber.rst | 8 + .../doc/api_reference/event/subscription.rst | 8 + .../doc/api_reference/exception.rst | 8 + .../doc/api_reference/formatter.rst | 8 + .../doc/api_reference/index.rst | 20 + .../doc/api_reference/inspection.rst | 8 + .../doc/api_reference/logging.rst | 8 + .../doc/api_reference/operation.rst | 8 + .../doc/api_reference/plugin.rst | 8 + .../doc/api_reference/query.rst | 8 + .../resource_identifier_transformer/base.rst | 10 + .../resource_identifier_transformer/index.rst | 16 + .../doc/api_reference/session.rst | 8 + .../doc/api_reference/structure/base.rst | 8 + .../doc/api_reference/structure/id.rst | 8 + .../doc/api_reference/structure/index.rst | 14 + .../doc/api_reference/structure/origin.rst | 8 + .../doc/api_reference/structure/standard.rst | 8 + .../doc/api_reference/symbol.rst | 8 + .../ftrack-python-api/doc/caching.rst | 175 + .../ftrack-python-api/doc/conf.py | 102 + .../ftrack-python-api/doc/docutils.conf | 2 + .../doc/environment_variables.rst | 56 + .../ftrack-python-api/doc/event_list.rst | 137 + .../example/assignments_and_allocations.rst | 82 + .../doc/example/component.rst | 23 + .../doc/example/custom_attribute.rst | 94 + .../doc/example/encode_media.rst | 53 + .../doc/example/entity_links.rst | 56 + .../ftrack-python-api/doc/example/index.rst | 52 + .../doc/example/invite_user.rst | 31 + .../ftrack-python-api/doc/example/job.rst | 97 + .../doc/example/link_attribute.rst | 55 + .../ftrack-python-api/doc/example/list.rst | 46 + .../manage_custom_attribute_configuration.rst | 320 ++ .../doc/example/metadata.rst | 43 + .../ftrack-python-api/doc/example/note.rst | 169 + .../ftrack-python-api/doc/example/project.rst | 65 + .../doc/example/publishing.rst | 73 + .../doc/example/review_session.rst | 87 + .../ftrack-python-api/doc/example/scope.rst | 27 + .../doc/example/security_roles.rst | 73 + .../doc/example/sync_ldap_users.rst | 30 + .../doc/example/task_template.rst | 56 + .../doc/example/thumbnail.rst | 71 + .../ftrack-python-api/doc/example/timer.rst | 37 + .../doc/example/web_review.rst | 78 + .../ftrack-python-api/doc/glossary.rst | 76 + .../ftrack-python-api/doc/handling_events.rst | 315 ++ .../image/configuring_plugins_directory.png | Bin 0 -> 7313 bytes .../ftrack-python-api/doc/index.rst | 42 + .../ftrack-python-api/doc/installing.rst | 77 + .../ftrack-python-api/doc/introduction.rst | 26 + .../doc/locations/configuring.rst | 87 + .../ftrack-python-api/doc/locations/index.rst | 18 + .../doc/locations/overview.rst | 143 + .../doc/locations/tutorial.rst | 193 + .../ftrack-python-api/doc/querying.rst | 263 + .../ftrack-python-api/doc/release/index.rst | 18 + .../doc/release/migrating_from_old_api.rst | 613 +++ .../doc/release/migration.rst | 98 + .../doc/release/release_notes.rst | 1478 ++++++ .../doc/resource/example_plugin.py | 24 + .../doc/resource/example_plugin_safe.py | 0 .../resource/example_plugin_using_session.py | 37 + .../doc/security_and_authentication.rst | 38 + .../ftrack-python-api/doc/tutorial.rst | 156 + .../doc/understanding_sessions.rst | 281 ++ .../doc/working_with_entities.rst | 434 ++ .../ftrack-python-api/pytest.ini | 7 + .../resource/plugin/configure_locations.py | 39 + .../resource/plugin/construct_entity_type.py | 46 + .../ftrack-python-api/setup.cfg | 6 + .../python2_vendor/ftrack-python-api/setup.py | 81 + .../ftrack-python-api/source/__init__.py | 1 + .../source/ftrack_api/__init__.py | 32 + .../_centralized_storage_scenario.py | 656 +++ .../source/ftrack_api/_python_ntpath.py | 534 +++ .../source/ftrack_api/_version.py | 1 + .../source/ftrack_api/_weakref.py | 66 + .../source/ftrack_api/accessor/__init__.py | 2 + .../source/ftrack_api/accessor/base.py | 124 + .../source/ftrack_api/accessor/disk.py | 250 + .../source/ftrack_api/accessor/server.py | 240 + .../source/ftrack_api/attribute.py | 707 +++ .../source/ftrack_api/cache.py | 579 +++ .../source/ftrack_api/collection.py | 507 ++ .../source/ftrack_api/data.py | 119 + .../source/ftrack_api/entity/__init__.py | 2 + .../source/ftrack_api/entity/asset_version.py | 91 + .../source/ftrack_api/entity/base.py | 402 ++ .../source/ftrack_api/entity/component.py | 74 + .../source/ftrack_api/entity/factory.py | 435 ++ .../source/ftrack_api/entity/job.py | 48 + .../source/ftrack_api/entity/location.py | 733 +++ .../source/ftrack_api/entity/note.py | 105 + .../ftrack_api/entity/project_schema.py | 94 + .../source/ftrack_api/entity/user.py | 123 + .../source/ftrack_api/event/__init__.py | 2 + .../source/ftrack_api/event/base.py | 85 + .../source/ftrack_api/event/expression.py | 282 ++ .../source/ftrack_api/event/hub.py | 1091 +++++ .../source/ftrack_api/event/subscriber.py | 27 + .../source/ftrack_api/event/subscription.py | 23 + .../source/ftrack_api/exception.py | 392 ++ .../source/ftrack_api/formatter.py | 131 + .../source/ftrack_api/inspection.py | 135 + .../source/ftrack_api/logging.py | 43 + .../source/ftrack_api/operation.py | 115 + .../source/ftrack_api/plugin.py | 121 + .../source/ftrack_api/query.py | 202 + .../__init__.py | 2 + .../resource_identifier_transformer/base.py | 50 + .../source/ftrack_api/session.py | 2515 ++++++++++ .../source/ftrack_api/structure/__init__.py | 2 + .../source/ftrack_api/structure/base.py | 38 + .../source/ftrack_api/structure/entity_id.py | 12 + .../source/ftrack_api/structure/id.py | 91 + .../source/ftrack_api/structure/origin.py | 28 + .../source/ftrack_api/structure/standard.py | 217 + .../source/ftrack_api/symbol.py | 77 + .../test/fixture/media/colour_wheel.mov | Bin 0 -> 17627 bytes .../test/fixture/media/image-resized-10.png | Bin 0 -> 115 bytes .../test/fixture/media/image.png | Bin 0 -> 883 bytes .../fixture/plugin/configure_locations.py | 40 + .../fixture/plugin/construct_entity_type.py | 52 + .../fixture/plugin/count_session_event.py | 41 + .../ftrack-python-api/test/unit/__init__.py | 2 + .../test/unit/accessor/__init__.py | 2 + .../test/unit/accessor/test_disk.py | 267 ++ .../test/unit/accessor/test_server.py | 41 + .../ftrack-python-api/test/unit/conftest.py | 539 +++ .../test/unit/entity/__init__.py | 2 + .../test/unit/entity/test_asset_version.py | 54 + .../test/unit/entity/test_base.py | 14 + .../test/unit/entity/test_component.py | 70 + .../test/unit/entity/test_factory.py | 25 + .../test/unit/entity/test_job.py | 42 + .../test/unit/entity/test_location.py | 516 ++ .../test/unit/entity/test_metadata.py | 135 + .../test/unit/entity/test_note.py | 67 + .../test/unit/entity/test_project_schema.py | 64 + .../test/unit/entity/test_scopes.py | 24 + .../test/unit/entity/test_user.py | 49 + .../test/unit/event/__init__.py | 2 + .../unit/event/event_hub_server_heartbeat.py | 92 + .../test/unit/event/test_base.py | 36 + .../test/unit/event/test_expression.py | 174 + .../test/unit/event/test_hub.py | 701 +++ .../test/unit/event/test_subscriber.py | 33 + .../test/unit/event/test_subscription.py | 28 + .../__init__.py | 2 + .../test_base.py | 36 + .../test/unit/structure/__init__.py | 2 + .../test/unit/structure/test_base.py | 31 + .../test/unit/structure/test_entity_id.py | 49 + .../test/unit/structure/test_id.py | 115 + .../test/unit/structure/test_origin.py | 33 + .../test/unit/structure/test_standard.py | 309 ++ .../test/unit/test_attribute.py | 146 + .../ftrack-python-api/test/unit/test_cache.py | 416 ++ .../test/unit/test_collection.py | 574 +++ .../test/unit/test_custom_attribute.py | 251 + .../ftrack-python-api/test/unit/test_data.py | 129 + .../test/unit/test_formatter.py | 70 + .../test/unit/test_inspection.py | 101 + .../test/unit/test_operation.py | 79 + .../test/unit/test_package.py | 48 + .../test/unit/test_plugin.py | 192 + .../ftrack-python-api/test/unit/test_query.py | 164 + .../test/unit/test_session.py | 1519 ++++++ .../ftrack-python-api/test/unit/test_timer.py | 74 + .../ftrack/scripts/sub_event_processor.py | 4 +- .../ftrack/scripts/sub_event_status.py | 4 +- .../ftrack/scripts/sub_event_storer.py | 6 +- .../ftrack/scripts/sub_legacy_server.py | 2 +- .../ftrack/scripts/sub_user_server.py | 4 +- .../ftrack/tray/__init__.py | 0 .../ftrack/tray/ftrack_tray.py | 0 .../ftrack/tray/login_dialog.py | 2 +- .../ftrack/tray/login_tools.py | 0 openpype/modules/ftrack/python2_vendor/arrow | 1 - .../ftrack/python2_vendor/ftrack-python-api | 1 - 328 files changed, 42186 insertions(+), 69 deletions(-) rename openpype/modules/{ => default_modules}/ftrack/__init__.py (100%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/action_clone_review_session.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/action_multiple_notes.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/action_prepare_project.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/action_push_frame_values_to_task.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/action_sync_to_avalon.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_del_avalon_id_from_new.py (90%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_first_version_status.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_next_task_update.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_push_frame_values_to_task.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_radio_buttons.py (96%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_sync_to_avalon.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_task_to_parent_status.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_task_to_version_status.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_thumbnail_updates.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_user_assigment.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/event_version_to_task_statuses.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_applications.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_batch_task_creation.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_client_review_sort.py (97%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_component_open.py (96%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_create_cust_attrs.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_create_folders.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_create_project_structure.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_delete_asset.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_delete_old_versions.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_delivery.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_djvview.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_job_killer.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_multiple_notes.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_prepare_project.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_rv.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_seed.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py (99%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_sync_to_avalon.py (98%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_test.py (89%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_thumbnail_to_childern.py (96%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_thumbnail_to_parent.py (97%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_where_run_ask.py (94%) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_user/action_where_run_show.py (98%) rename openpype/modules/{ => default_modules}/ftrack/ftrack_module.py (99%) rename openpype/modules/{ => default_modules}/ftrack/ftrack_server/__init__.py (100%) rename openpype/modules/{ => default_modules}/ftrack/ftrack_server/event_server_cli.py (98%) rename openpype/modules/{ => default_modules}/ftrack/ftrack_server/ftrack_server.py (100%) rename openpype/modules/{ => default_modules}/ftrack/ftrack_server/lib.py (99%) rename openpype/modules/{ => default_modules}/ftrack/ftrack_server/socket_thread.py (100%) rename openpype/modules/{ => default_modules}/ftrack/interfaces.py (100%) rename openpype/modules/{ => default_modules}/ftrack/launch_hooks/post_ftrack_changes.py (100%) rename openpype/modules/{ => default_modules}/ftrack/launch_hooks/pre_python2_vendor.py (96%) rename openpype/modules/{ => default_modules}/ftrack/lib/__init__.py (100%) rename openpype/modules/{ => default_modules}/ftrack/lib/avalon_sync.py (100%) rename openpype/modules/{ => default_modules}/ftrack/lib/constants.py (100%) rename openpype/modules/{ => default_modules}/ftrack/lib/credentials.py (100%) rename openpype/modules/{ => default_modules}/ftrack/lib/custom_attributes.json (100%) rename openpype/modules/{ => default_modules}/ftrack/lib/custom_attributes.py (100%) rename openpype/modules/{ => default_modules}/ftrack/lib/ftrack_action_handler.py (100%) rename openpype/modules/{ => default_modules}/ftrack/lib/ftrack_base_handler.py (99%) rename openpype/modules/{ => default_modules}/ftrack/lib/ftrack_event_handler.py (100%) rename openpype/modules/{ => default_modules}/ftrack/lib/settings.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/_unused_publish/integrate_ftrack_comments.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/collect_ftrack_api.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/collect_ftrack_family.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/integrate_ftrack_api.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/integrate_ftrack_instances.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/integrate_ftrack_note.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/integrate_hierarchy_ftrack.py (99%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/integrate_remove_components.py (100%) rename openpype/modules/{ => default_modules}/ftrack/plugins/publish/validate_custom_ftrack_attributes.py (100%) create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/FUNDING.yml create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/bug_report.md create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/documentation.md create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/feature_request.md create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/pull_request_template.md create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/workflows/continuous_integration.yml create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.gitignore create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.pre-commit-config.yaml create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/CHANGELOG.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/LICENSE create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/MANIFEST.in create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/Makefile create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/README.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/_version.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/api.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/arrow.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/constants.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/factory.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/formatter.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/locales.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/parser.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/util.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/Makefile create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/conf.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/make.bat create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/releases.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/requirements.txt create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.cfg create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/conftest.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_api.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_arrow.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_factory.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_formatter.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_locales.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_parser.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_util.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/utils.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tox.ini rename openpype/modules/{ => default_modules}/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py (100%) rename openpype/modules/{ => default_modules}/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/__init__.py (100%) rename openpype/modules/{ => default_modules}/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/helpers.py (100%) rename openpype/modules/{ => default_modules}/ftrack/python2_vendor/backports.functools_lru_cache/backports/functools_lru_cache.py (100%) rename openpype/modules/{ => default_modules}/ftrack/python2_vendor/builtins/builtins/__init__.py (100%) create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/.gitignore create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.python create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.txt create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/MANIFEST.in create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/README.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/caching.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/conf.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/image/configuring_plugins_directory.png create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/installing.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/querying.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_safe.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/pytest.ini create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.cfg create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/colour_wheel.mov create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image-resized-10.png create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image.png create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/configure_locations.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/construct_entity_type.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/count_session_event.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/test_disk.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/test_server.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/conftest.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py create mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py rename openpype/modules/{ => default_modules}/ftrack/scripts/sub_event_processor.py (95%) rename openpype/modules/{ => default_modules}/ftrack/scripts/sub_event_status.py (98%) rename openpype/modules/{ => default_modules}/ftrack/scripts/sub_event_storer.py (96%) rename openpype/modules/{ => default_modules}/ftrack/scripts/sub_legacy_server.py (97%) rename openpype/modules/{ => default_modules}/ftrack/scripts/sub_user_server.py (93%) rename openpype/modules/{ => default_modules}/ftrack/tray/__init__.py (100%) rename openpype/modules/{ => default_modules}/ftrack/tray/ftrack_tray.py (100%) rename openpype/modules/{ => default_modules}/ftrack/tray/login_dialog.py (99%) rename openpype/modules/{ => default_modules}/ftrack/tray/login_tools.py (100%) delete mode 160000 openpype/modules/ftrack/python2_vendor/arrow delete mode 160000 openpype/modules/ftrack/python2_vendor/ftrack-python-api diff --git a/openpype/modules/ftrack/__init__.py b/openpype/modules/default_modules/ftrack/__init__.py similarity index 100% rename from openpype/modules/ftrack/__init__.py rename to openpype/modules/default_modules/ftrack/__init__.py diff --git a/openpype/modules/ftrack/event_handlers_server/action_clone_review_session.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_clone_review_session.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_server/action_clone_review_session.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_clone_review_session.py index 59c8bffb75..1ad7a17785 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_clone_review_session.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/action_clone_review_session.py @@ -1,6 +1,6 @@ import json -from openpype.modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib import ServerAction def clone_review_session(session, entity): diff --git a/openpype/modules/ftrack/event_handlers_server/action_multiple_notes.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_multiple_notes.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_server/action_multiple_notes.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_multiple_notes.py index 9ad7b1a969..f9aac2c80a 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_multiple_notes.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/action_multiple_notes.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib import ServerAction class MultipleNotesServer(ServerAction): diff --git a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_prepare_project.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/action_prepare_project.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_prepare_project.py index 3a96ae3311..85317031b2 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/action_prepare_project.py @@ -4,7 +4,7 @@ from avalon.api import AvalonMongoDB from openpype.api import ProjectSettings from openpype.lib import create_project -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( ServerAction, get_openpype_attr, CUST_ATTR_AUTO_SYNC diff --git a/openpype/modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py index b38e18d089..3f63ce6fac 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/action_push_frame_values_to_task.py @@ -2,7 +2,7 @@ import sys import json import collections import ftrack_api -from openpype.modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib import ServerAction class PushHierValuesToNonHier(ServerAction): diff --git a/openpype/modules/ftrack/event_handlers_server/action_sync_to_avalon.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_sync_to_avalon.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_server/action_sync_to_avalon.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_sync_to_avalon.py index 8f78f998ac..d449c4b7df 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_sync_to_avalon.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/action_sync_to_avalon.py @@ -1,8 +1,8 @@ import time import traceback -from openpype.modules.ftrack.lib import ServerAction -from openpype.modules.ftrack.lib.avalon_sync import SyncEntitiesFactory +from openpype_modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib.avalon_sync import SyncEntitiesFactory class SyncToAvalonServer(ServerAction): diff --git a/openpype/modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py similarity index 90% rename from openpype/modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py index 078596cc2e..35b5d809fd 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_del_avalon_id_from_new.py @@ -1,6 +1,6 @@ -from openpype.modules.ftrack.lib import BaseEvent -from openpype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY -from openpype.modules.ftrack.event_handlers_server.event_sync_to_avalon import ( +from openpype_modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY +from openpype_modules.ftrack.event_handlers_server.event_sync_to_avalon import ( SyncToAvalonEvent ) diff --git a/openpype/modules/ftrack/event_handlers_server/event_first_version_status.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_first_version_status.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_first_version_status.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_first_version_status.py index 511f62a207..ecc6c95d90 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_first_version_status.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_first_version_status.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class FirstVersionStatus(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_next_task_update.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_next_task_update.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_next_task_update.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_next_task_update.py index ad62beb296..a65ae46545 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_next_task_update.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_next_task_update.py @@ -1,5 +1,5 @@ import collections -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class NextTaskUpdate(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py index 81719258e1..10b165e7f6 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_push_frame_values_to_task.py @@ -2,7 +2,7 @@ import collections import datetime import ftrack_api -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( BaseEvent, query_custom_attributes ) diff --git a/openpype/modules/ftrack/event_handlers_server/event_radio_buttons.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_radio_buttons.py similarity index 96% rename from openpype/modules/ftrack/event_handlers_server/event_radio_buttons.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_radio_buttons.py index 1ebd7b68d2..99ad3aec37 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_radio_buttons.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_radio_buttons.py @@ -1,5 +1,5 @@ import ftrack_api -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class RadioButtons(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_sync_to_avalon.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_sync_to_avalon.py index 1dd056adee..93a0404c0b 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_sync_to_avalon.py @@ -17,7 +17,7 @@ import ftrack_api from avalon import schema from avalon.api import AvalonMongoDB -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( get_openpype_attr, CUST_ATTR_ID_KEY, CUST_ATTR_AUTO_SYNC, diff --git a/openpype/modules/ftrack/event_handlers_server/event_task_to_parent_status.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_parent_status.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_task_to_parent_status.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_parent_status.py index 4192a4bed0..a0e039926e 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_task_to_parent_status.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_parent_status.py @@ -1,5 +1,5 @@ import collections -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class TaskStatusToParent(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_task_to_version_status.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_version_status.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_task_to_version_status.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_version_status.py index f2d3723021..b77849c678 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_task_to_version_status.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_task_to_version_status.py @@ -1,5 +1,5 @@ import collections -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class TaskToVersionStatus(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_thumbnail_updates.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_thumbnail_updates.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_thumbnail_updates.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_thumbnail_updates.py index cbeeeee5c5..64673f792c 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_thumbnail_updates.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_thumbnail_updates.py @@ -1,5 +1,5 @@ import collections -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class ThumbnailEvents(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_user_assigment.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_server/event_user_assigment.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_user_assigment.py index a0734e14a1..efc1e76775 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_user_assigment.py @@ -2,8 +2,8 @@ import os import re import subprocess -from openpype.modules.ftrack.lib import BaseEvent -from openpype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY +from openpype_modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY from avalon.api import AvalonMongoDB from bson.objectid import ObjectId diff --git a/openpype/modules/ftrack/event_handlers_server/event_version_to_task_statuses.py b/openpype/modules/default_modules/ftrack/event_handlers_server/event_version_to_task_statuses.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_server/event_version_to_task_statuses.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/event_version_to_task_statuses.py index f215bedcc2..e36c3eecd9 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_version_to_task_statuses.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_server/event_version_to_task_statuses.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseEvent +from openpype_modules.ftrack.lib import BaseEvent class VersionToTaskStatus(BaseEvent): diff --git a/openpype/modules/ftrack/event_handlers_user/action_applications.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_applications.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_applications.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_applications.py index 23c96e1b9f..54de6f1fd6 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_applications.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_applications.py @@ -1,7 +1,7 @@ import os from uuid import uuid4 -from openpype.modules.ftrack.lib import BaseAction +from openpype_modules.ftrack.lib import BaseAction from openpype.lib import ( ApplicationManager, ApplicationLaunchFailed, diff --git a/openpype/modules/ftrack/event_handlers_user/action_batch_task_creation.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_batch_task_creation.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_batch_task_creation.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_batch_task_creation.py index b9f0e7c5d3..c7fb1af98b 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_batch_task_creation.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_batch_task_creation.py @@ -2,7 +2,7 @@ Taken from https://github.com/tokejepsen/ftrack-hooks/tree/master/batch_tasks """ -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class BatchTasksAction(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py index 45cc9adf55..dc97ed972d 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py @@ -1,6 +1,6 @@ import collections import ftrack_api -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( BaseAction, statics_icon, get_openpype_attr diff --git a/openpype/modules/ftrack/event_handlers_user/action_client_review_sort.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_client_review_sort.py similarity index 97% rename from openpype/modules/ftrack/event_handlers_user/action_client_review_sort.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_client_review_sort.py index 7c9a2881d6..5ad5f10e8e 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_client_review_sort.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_client_review_sort.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon try: from functools import cmp_to_key except Exception: diff --git a/openpype/modules/ftrack/event_handlers_user/action_component_open.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_component_open.py similarity index 96% rename from openpype/modules/ftrack/event_handlers_user/action_component_open.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_component_open.py index b3cdac0722..c731713c10 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_component_open.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_component_open.py @@ -1,7 +1,7 @@ import os import sys import subprocess -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class ComponentOpen(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py index 63605eda5e..599d2eb257 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py @@ -2,7 +2,7 @@ import collections import json import arrow import ftrack_api -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( BaseAction, statics_icon, diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_folders.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_folders.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_create_folders.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_create_folders.py index 075b8d3d25..994dbd90e4 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_folders.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_folders.py @@ -1,5 +1,5 @@ import os -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from avalon import lib as avalonlib from openpype.api import ( Anatomy, diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_project_structure.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_create_project_structure.py index 035a1c60de..121c9f652b 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_project_structure.py @@ -2,7 +2,7 @@ import os import re import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype.api import Anatomy, get_project_settings diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_delete_asset.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py index c20491349f..f860065b26 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py @@ -4,7 +4,7 @@ from datetime import datetime from queue import Queue from bson.objectid import ObjectId -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from avalon.api import AvalonMongoDB diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_old_versions.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_old_versions.py index dbddc7a95e..063f086e9c 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_old_versions.py @@ -5,7 +5,7 @@ import uuid import clique from pymongo import UpdateOne -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from avalon.api import AvalonMongoDB from openpype.api import Anatomy diff --git a/openpype/modules/ftrack/event_handlers_user/action_delivery.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delivery.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_delivery.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_delivery.py index 2e7599647a..1f28b18900 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delivery.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delivery.py @@ -6,8 +6,8 @@ import collections from bson.objectid import ObjectId from openpype.api import Anatomy, config -from openpype.modules.ftrack.lib import BaseAction, statics_icon -from openpype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY +from openpype_modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY from openpype.lib.delivery import ( path_from_representation, get_format_dict, diff --git a/openpype/modules/ftrack/event_handlers_user/action_djvview.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_djvview.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_djvview.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_djvview.py index c05fbed2d0..c603a2d200 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_djvview.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_djvview.py @@ -1,7 +1,7 @@ import os import subprocess from operator import itemgetter -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class DJVViewAction(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_job_killer.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_job_killer.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_job_killer.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_job_killer.py index 47ed1e7895..af24e0280d 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_job_killer.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_job_killer.py @@ -1,5 +1,5 @@ import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class JobKiller(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_multiple_notes.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_multiple_notes.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_multiple_notes.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_multiple_notes.py index 8db65fe39b..4a89c6d7e9 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_multiple_notes.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_multiple_notes.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class MultipleNotes(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_prepare_project.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_prepare_project.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_prepare_project.py index ea0bfa2971..c266d24fd3 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_prepare_project.py @@ -4,7 +4,7 @@ from avalon.api import AvalonMongoDB from openpype.api import ProjectSettings from openpype.lib import create_project -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack.lib import ( BaseAction, statics_icon, get_openpype_attr, diff --git a/openpype/modules/ftrack/event_handlers_user/action_rv.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_rv.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_rv.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_rv.py index 3172b74261..71d790f7e7 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_rv.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_rv.py @@ -3,7 +3,7 @@ import subprocess import traceback import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon import ftrack_api from avalon import io, api diff --git a/openpype/modules/ftrack/event_handlers_user/action_seed.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_seed.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_seed.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_seed.py index 1f01f0af1d..4021d70c0a 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_seed.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_seed.py @@ -1,6 +1,6 @@ import os from operator import itemgetter -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class SeedDebugProject(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py similarity index 99% rename from openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py index 4464e51d3d..4820925844 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py @@ -4,11 +4,11 @@ import json import requests from bson.objectid import ObjectId -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype.api import Anatomy from avalon.api import AvalonMongoDB -from openpype.modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY +from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY class StoreThumbnailsToAvalon(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_sync_to_avalon.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_sync_to_avalon.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_sync_to_avalon.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_sync_to_avalon.py index 89fac7cf80..d6ca561bbe 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_sync_to_avalon.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_sync_to_avalon.py @@ -1,8 +1,8 @@ import time import traceback -from openpype.modules.ftrack.lib import BaseAction, statics_icon -from openpype.modules.ftrack.lib.avalon_sync import SyncEntitiesFactory +from openpype_modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib.avalon_sync import SyncEntitiesFactory class SyncToAvalonLocal(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_test.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_test.py similarity index 89% rename from openpype/modules/ftrack/event_handlers_user/action_test.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_test.py index 206c67de50..bd71ba5bf9 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_test.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_test.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class TestAction(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py similarity index 96% rename from openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py index a12f25b57d..3b90960160 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_childern.py @@ -1,5 +1,5 @@ import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class ThumbToChildren(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py similarity index 97% rename from openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py index 284723bb0f..2f0110b7aa 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_thumbnail_to_parent.py @@ -1,5 +1,5 @@ import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class ThumbToParent(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py similarity index 94% rename from openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py index 6950d45ecd..8e81ae4a1b 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_where_run_ask.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class ActionAskWhereIRun(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_where_run_show.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_show.py similarity index 98% rename from openpype/modules/ftrack/event_handlers_user/action_where_run_show.py rename to openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_show.py index 4ce1a439a3..8ac9fc272d 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_where_run_show.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_show.py @@ -1,7 +1,7 @@ import platform import socket import getpass -from openpype.modules.ftrack.lib import BaseAction +from openpype_modules.ftrack.lib import BaseAction class ActionShowWhereIRun(BaseAction): diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/default_modules/ftrack/ftrack_module.py similarity index 99% rename from openpype/modules/ftrack/ftrack_module.py rename to openpype/modules/default_modules/ftrack/ftrack_module.py index 6fce308b19..6fd2737261 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/default_modules/ftrack/ftrack_module.py @@ -231,7 +231,7 @@ class FtrackModule( return import ftrack_api - from openpype.modules.ftrack.lib import get_openpype_attr + from openpype_modules.ftrack.lib import get_openpype_attr try: session = self.create_ftrack_session() diff --git a/openpype/modules/ftrack/ftrack_server/__init__.py b/openpype/modules/default_modules/ftrack/ftrack_server/__init__.py similarity index 100% rename from openpype/modules/ftrack/ftrack_server/__init__.py rename to openpype/modules/default_modules/ftrack/ftrack_server/__init__.py diff --git a/openpype/modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py similarity index 98% rename from openpype/modules/ftrack/ftrack_server/event_server_cli.py rename to openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py index 8bba22b475..1e14929d96 100644 --- a/openpype/modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py @@ -18,17 +18,17 @@ from openpype.lib import ( get_pype_execute_args, OpenPypeMongoConnection ) -from openpype.modules.ftrack import FTRACK_MODULE_DIR -from openpype.modules.ftrack.lib import ( +from openpype_modules.ftrack import FTRACK_MODULE_DIR +from openpype_modules.ftrack.lib import ( credentials, get_ftrack_url_from_settings ) -from openpype.modules.ftrack.ftrack_server.lib import ( +from openpype_modules.ftrack.ftrack_server.lib import ( check_ftrack_url, get_ftrack_event_mongo_info ) -from openpype.modules.ftrack.ftrack_server import socket_thread +from openpype_modules.ftrack.ftrack_server import socket_thread class MongoPermissionsError(Exception): diff --git a/openpype/modules/ftrack/ftrack_server/ftrack_server.py b/openpype/modules/default_modules/ftrack/ftrack_server/ftrack_server.py similarity index 100% rename from openpype/modules/ftrack/ftrack_server/ftrack_server.py rename to openpype/modules/default_modules/ftrack/ftrack_server/ftrack_server.py diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/default_modules/ftrack/ftrack_server/lib.py similarity index 99% rename from openpype/modules/ftrack/ftrack_server/lib.py rename to openpype/modules/default_modules/ftrack/ftrack_server/lib.py index 88f849e765..e80d6a3a6b 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/default_modules/ftrack/ftrack_server/lib.py @@ -22,7 +22,7 @@ try: from weakref import WeakMethod except ImportError: from ftrack_api._weakref import WeakMethod -from openpype.modules.ftrack.lib import get_ftrack_event_mongo_info +from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info from openpype.lib import OpenPypeMongoConnection from openpype.api import Logger diff --git a/openpype/modules/ftrack/ftrack_server/socket_thread.py b/openpype/modules/default_modules/ftrack/ftrack_server/socket_thread.py similarity index 100% rename from openpype/modules/ftrack/ftrack_server/socket_thread.py rename to openpype/modules/default_modules/ftrack/ftrack_server/socket_thread.py diff --git a/openpype/modules/ftrack/interfaces.py b/openpype/modules/default_modules/ftrack/interfaces.py similarity index 100% rename from openpype/modules/ftrack/interfaces.py rename to openpype/modules/default_modules/ftrack/interfaces.py diff --git a/openpype/modules/ftrack/launch_hooks/post_ftrack_changes.py b/openpype/modules/default_modules/ftrack/launch_hooks/post_ftrack_changes.py similarity index 100% rename from openpype/modules/ftrack/launch_hooks/post_ftrack_changes.py rename to openpype/modules/default_modules/ftrack/launch_hooks/post_ftrack_changes.py diff --git a/openpype/modules/ftrack/launch_hooks/pre_python2_vendor.py b/openpype/modules/default_modules/ftrack/launch_hooks/pre_python2_vendor.py similarity index 96% rename from openpype/modules/ftrack/launch_hooks/pre_python2_vendor.py rename to openpype/modules/default_modules/ftrack/launch_hooks/pre_python2_vendor.py index d34b6533fb..0dd894bebf 100644 --- a/openpype/modules/ftrack/launch_hooks/pre_python2_vendor.py +++ b/openpype/modules/default_modules/ftrack/launch_hooks/pre_python2_vendor.py @@ -1,6 +1,6 @@ import os from openpype.lib import PreLaunchHook -from openpype.modules.ftrack import FTRACK_MODULE_DIR +from openpype_modules.ftrack import FTRACK_MODULE_DIR class PrePython2Support(PreLaunchHook): diff --git a/openpype/modules/ftrack/lib/__init__.py b/openpype/modules/default_modules/ftrack/lib/__init__.py similarity index 100% rename from openpype/modules/ftrack/lib/__init__.py rename to openpype/modules/default_modules/ftrack/lib/__init__.py diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/default_modules/ftrack/lib/avalon_sync.py similarity index 100% rename from openpype/modules/ftrack/lib/avalon_sync.py rename to openpype/modules/default_modules/ftrack/lib/avalon_sync.py diff --git a/openpype/modules/ftrack/lib/constants.py b/openpype/modules/default_modules/ftrack/lib/constants.py similarity index 100% rename from openpype/modules/ftrack/lib/constants.py rename to openpype/modules/default_modules/ftrack/lib/constants.py diff --git a/openpype/modules/ftrack/lib/credentials.py b/openpype/modules/default_modules/ftrack/lib/credentials.py similarity index 100% rename from openpype/modules/ftrack/lib/credentials.py rename to openpype/modules/default_modules/ftrack/lib/credentials.py diff --git a/openpype/modules/ftrack/lib/custom_attributes.json b/openpype/modules/default_modules/ftrack/lib/custom_attributes.json similarity index 100% rename from openpype/modules/ftrack/lib/custom_attributes.json rename to openpype/modules/default_modules/ftrack/lib/custom_attributes.json diff --git a/openpype/modules/ftrack/lib/custom_attributes.py b/openpype/modules/default_modules/ftrack/lib/custom_attributes.py similarity index 100% rename from openpype/modules/ftrack/lib/custom_attributes.py rename to openpype/modules/default_modules/ftrack/lib/custom_attributes.py diff --git a/openpype/modules/ftrack/lib/ftrack_action_handler.py b/openpype/modules/default_modules/ftrack/lib/ftrack_action_handler.py similarity index 100% rename from openpype/modules/ftrack/lib/ftrack_action_handler.py rename to openpype/modules/default_modules/ftrack/lib/ftrack_action_handler.py diff --git a/openpype/modules/ftrack/lib/ftrack_base_handler.py b/openpype/modules/default_modules/ftrack/lib/ftrack_base_handler.py similarity index 99% rename from openpype/modules/ftrack/lib/ftrack_base_handler.py rename to openpype/modules/default_modules/ftrack/lib/ftrack_base_handler.py index 011ce8db9d..8a29b40029 100644 --- a/openpype/modules/ftrack/lib/ftrack_base_handler.py +++ b/openpype/modules/default_modules/ftrack/lib/ftrack_base_handler.py @@ -9,7 +9,7 @@ from openpype.api import Logger from openpype.settings import get_project_settings import ftrack_api -from openpype.modules.ftrack import ftrack_server +from openpype_modules.ftrack import ftrack_server class MissingPermision(Exception): diff --git a/openpype/modules/ftrack/lib/ftrack_event_handler.py b/openpype/modules/default_modules/ftrack/lib/ftrack_event_handler.py similarity index 100% rename from openpype/modules/ftrack/lib/ftrack_event_handler.py rename to openpype/modules/default_modules/ftrack/lib/ftrack_event_handler.py diff --git a/openpype/modules/ftrack/lib/settings.py b/openpype/modules/default_modules/ftrack/lib/settings.py similarity index 100% rename from openpype/modules/ftrack/lib/settings.py rename to openpype/modules/default_modules/ftrack/lib/settings.py diff --git a/openpype/modules/ftrack/plugins/_unused_publish/integrate_ftrack_comments.py b/openpype/modules/default_modules/ftrack/plugins/_unused_publish/integrate_ftrack_comments.py similarity index 100% rename from openpype/modules/ftrack/plugins/_unused_publish/integrate_ftrack_comments.py rename to openpype/modules/default_modules/ftrack/plugins/_unused_publish/integrate_ftrack_comments.py diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py b/openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_api.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py rename to openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_api.py diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py b/openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_family.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py rename to openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_family.py diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_api.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_api.py diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_component_overwrite.py diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_instances.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/integrate_ftrack_instances.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_instances.py diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_note.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_ftrack_note.py diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py similarity index 99% rename from openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 118a73a636..2fd5296d24 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -9,7 +9,7 @@ CUST_ATTR_AUTO_SYNC = "avalon_auto_sync" CUST_ATTR_GROUP = "openpype" -# Copy of `get_pype_attr` from openpype.modules.ftrack.lib +# Copy of `get_pype_attr` from openpype_modules.ftrack.lib # TODO import from openpype's ftrack module when possible to not break Python 2 def get_pype_attr(session, split_hierarchical=True): custom_attributes = [] diff --git a/openpype/modules/ftrack/plugins/publish/integrate_remove_components.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_remove_components.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/integrate_remove_components.py rename to openpype/modules/default_modules/ftrack/plugins/publish/integrate_remove_components.py diff --git a/openpype/modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py b/openpype/modules/default_modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py similarity index 100% rename from openpype/modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py rename to openpype/modules/default_modules/ftrack/plugins/publish/validate_custom_ftrack_attributes.py diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/FUNDING.yml b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/FUNDING.yml new file mode 100644 index 0000000000..c3608357a4 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/FUNDING.yml @@ -0,0 +1 @@ +open_collective: arrow diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/bug_report.md b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000000..e4e242ee42 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,27 @@ +--- +name: "🐞 Bug Report" +about: Find a bug? Create a report to help us improve. +title: '' +labels: 'bug' +assignees: '' +--- + + + +## Issue Description + + + +## System Info + +- 🖥 **OS name and version**: +- 🐍 **Python version**: +- 🏹 **Arrow version**: diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/documentation.md b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/documentation.md new file mode 100644 index 0000000000..753ed0c620 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/documentation.md @@ -0,0 +1,17 @@ +--- +name: "📚 Documentation" +about: Find errors or problems in the docs (https://arrow.readthedocs.io)? +title: '' +labels: 'documentation' +assignees: '' +--- + + + +## Issue Description + + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/feature_request.md b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000..fcab9213f5 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,17 @@ +--- +name: "💡 Feature Request" +about: Have an idea for a new feature or improvement? +title: '' +labels: 'enhancement' +assignees: '' +--- + + + +## Feature Request + + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/pull_request_template.md b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/pull_request_template.md new file mode 100644 index 0000000000..0e07c288af --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/pull_request_template.md @@ -0,0 +1,22 @@ +## Pull Request Checklist + +Thank you for taking the time to improve Arrow! Before submitting your pull request, please check all *appropriate* boxes: + + +- [ ] 🧪 Added **tests** for changed code. +- [ ] 🛠️ All tests **pass** when run locally (run `tox` or `make test` to find out!). +- [ ] 🧹 All linting checks **pass** when run locally (run `tox -e lint` or `make lint` to find out!). +- [ ] 📚 Updated **documentation** for changed code. +- [ ] ⏩ Code is **up-to-date** with the `master` branch. + +If you have *any* questions about your code changes or any of the points above, please submit your questions along with the pull request and we will try our best to help! + +## Description of Changes + + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/workflows/continuous_integration.yml b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/workflows/continuous_integration.yml new file mode 100644 index 0000000000..d800f399c6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/workflows/continuous_integration.yml @@ -0,0 +1,123 @@ +name: tests + +on: + pull_request: # Run on all pull requests + push: # Run only on pushes to master + branches: + - master + schedule: # Run monthly + - cron: "0 0 1 * *" + +jobs: + test: + name: ${{ matrix.os }} (${{ matrix.python-version }}) + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + python-version: ["pypy3", "2.7", "3.5", "3.6", "3.7", "3.8", "3.9-dev"] + os: [ubuntu-latest, macos-latest, windows-latest] + exclude: + # pypy3 randomly fails on Windows builds + - os: windows-latest + python-version: "pypy3" + + steps: + # Check out latest code + - uses: actions/checkout@v2 + + # Configure pip cache + - name: Cache pip (Linux) + uses: actions/cache@v2 + if: startsWith(runner.os, 'Linux') + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + + - name: Cache pip (macOS) + uses: actions/cache@v2 + if: startsWith(runner.os, 'macOS') + with: + path: ~/Library/Caches/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + + - name: Cache pip (Windows) + uses: actions/cache@v2 + if: startsWith(runner.os, 'Windows') + with: + path: ~\AppData\Local\pip\Cache + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + + # Set up Python + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + # Install dependencies + - name: Install dependencies + run: | + pip install -U pip setuptools wheel + pip install -U tox tox-gh-actions + + # Run tests + - name: Test with tox + run: tox + + # Upload coverage report + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v1 + with: + file: coverage.xml + + lint: + runs-on: ubuntu-latest + + steps: + # Check out latest code + - uses: actions/checkout@v2 + + # Set up Python + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: "3.8" + + # Configure pip cache + - name: Cache pip + uses: actions/cache@v2 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + + # Configure pre-commit cache + - name: Cache pre-commit + uses: actions/cache@v2 + with: + path: ~/.cache/pre-commit + key: ${{ runner.os }}-pre-commit-${{ hashFiles('**/.pre-commit-config.yaml') }} + restore-keys: | + ${{ runner.os }}-pre-commit- + + # Install dependencies + - name: Install dependencies + run: | + pip install -U pip setuptools wheel + pip install -U tox + + # Lint code + - name: Lint code + run: tox -e lint + + # Lint docs + - name: Lint docs + run: tox -e docs diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.gitignore b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.gitignore new file mode 100644 index 0000000000..0448d0cf0c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.gitignore @@ -0,0 +1,211 @@ +README.rst.new + +# Small entry point file for debugging tasks +test.py + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +local/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Swap +[._]*.s[a-v][a-z] +[._]*.sw[a-p] +[._]s[a-rt-v][a-z] +[._]ss[a-gi-z] +[._]sw[a-p] + +# Session +Session.vim +Sessionx.vim + +# Temporary +.netrwhist +*~ +# Auto-generated tag files +tags +# Persistent undo +[._]*.un~ + +.idea/ +.vscode/ + +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +# Windows thumbnail cache files +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.pre-commit-config.yaml b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.pre-commit-config.yaml new file mode 100644 index 0000000000..1f5128595b --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.pre-commit-config.yaml @@ -0,0 +1,41 @@ +default_language_version: + python: python3 +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.2.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: fix-encoding-pragma + exclude: ^arrow/_version.py + - id: requirements-txt-fixer + - id: check-ast + - id: check-yaml + - id: check-case-conflict + - id: check-docstring-first + - id: check-merge-conflict + - id: debug-statements + - repo: https://github.com/timothycrosley/isort + rev: 5.4.2 + hooks: + - id: isort + - repo: https://github.com/asottile/pyupgrade + rev: v2.7.2 + hooks: + - id: pyupgrade + - repo: https://github.com/pre-commit/pygrep-hooks + rev: v1.6.0 + hooks: + - id: python-no-eval + - id: python-check-blanket-noqa + - id: rst-backticks + - repo: https://github.com/psf/black + rev: 20.8b1 + hooks: + - id: black + args: [--safe, --quiet] + - repo: https://gitlab.com/pycqa/flake8 + rev: 3.8.3 + hooks: + - id: flake8 + additional_dependencies: [flake8-bugbear] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/CHANGELOG.rst b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/CHANGELOG.rst new file mode 100644 index 0000000000..0b55a4522c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/CHANGELOG.rst @@ -0,0 +1,598 @@ +Changelog +========= + +0.17.0 (2020-10-2) +------------------- + +- [WARN] Arrow will **drop support** for Python 2.7 and 3.5 in the upcoming 1.0.0 release. This is the last major release to support Python 2.7 and Python 3.5. +- [NEW] Arrow now properly handles imaginary datetimes during DST shifts. For example: + +..code-block:: python + >>> just_before = arrow.get(2013, 3, 31, 1, 55, tzinfo="Europe/Paris") + >>> just_before.shift(minutes=+10) + + +..code-block:: python + >>> before = arrow.get("2018-03-10 23:00:00", "YYYY-MM-DD HH:mm:ss", tzinfo="US/Pacific") + >>> after = arrow.get("2018-03-11 04:00:00", "YYYY-MM-DD HH:mm:ss", tzinfo="US/Pacific") + >>> result=[(t, t.to("utc")) for t in arrow.Arrow.range("hour", before, after)] + >>> for r in result: + ... print(r) + ... + (, ) + (, ) + (, ) + (, ) + (, ) + +- [NEW] Added ``humanize`` week granularity translation for Tagalog. +- [CHANGE] Calls to the ``timestamp`` property now emit a ``DeprecationWarning``. In a future release, ``timestamp`` will be changed to a method to align with Python's datetime module. If you would like to continue using the property, please change your code to use the ``int_timestamp`` or ``float_timestamp`` properties instead. +- [CHANGE] Expanded and improved Catalan locale. +- [FIX] Fixed a bug that caused ``Arrow.range()`` to incorrectly cut off ranges in certain scenarios when using month, quarter, or year endings. +- [FIX] Fixed a bug that caused day of week token parsing to be case sensitive. +- [INTERNAL] A number of functions were reordered in arrow.py for better organization and grouping of related methods. This change will have no impact on usage. +- [INTERNAL] A minimum tox version is now enforced for compatibility reasons. Contributors must use tox >3.18.0 going forward. + +0.16.0 (2020-08-23) +------------------- + +- [WARN] Arrow will **drop support** for Python 2.7 and 3.5 in the upcoming 1.0.0 release. The 0.16.x and 0.17.x releases are the last to support Python 2.7 and 3.5. +- [NEW] Implemented `PEP 495 `_ to handle ambiguous datetimes. This is achieved by the addition of the ``fold`` attribute for Arrow objects. For example: + +.. code-block:: python + + >>> before = Arrow(2017, 10, 29, 2, 0, tzinfo='Europe/Stockholm') + + >>> before.fold + 0 + >>> before.ambiguous + True + >>> after = Arrow(2017, 10, 29, 2, 0, tzinfo='Europe/Stockholm', fold=1) + + >>> after = before.replace(fold=1) + + +- [NEW] Added ``normalize_whitespace`` flag to ``arrow.get``. This is useful for parsing log files and/or any files that may contain inconsistent spacing. For example: + +.. code-block:: python + + >>> arrow.get("Jun 1 2005 1:33PM", "MMM D YYYY H:mmA", normalize_whitespace=True) + + >>> arrow.get("2013-036 \t 04:05:06Z", normalize_whitespace=True) + + +0.15.8 (2020-07-23) +------------------- + +- [WARN] Arrow will **drop support** for Python 2.7 and 3.5 in the upcoming 1.0.0 release. The 0.15.x, 0.16.x, and 0.17.x releases are the last to support Python 2.7 and 3.5. +- [NEW] Added ``humanize`` week granularity translation for Czech. +- [FIX] ``arrow.get`` will now pick sane defaults when weekdays are passed with particular token combinations, see `#446 `_. +- [INTERNAL] Moved arrow to an organization. The repo can now be found `here `_. +- [INTERNAL] Started issuing deprecation warnings for Python 2.7 and 3.5. +- [INTERNAL] Added Python 3.9 to CI pipeline. + +0.15.7 (2020-06-19) +------------------- + +- [NEW] Added a number of built-in format strings. See the `docs `_ for a complete list of supported formats. For example: + +.. code-block:: python + + >>> arw = arrow.utcnow() + >>> arw.format(arrow.FORMAT_COOKIE) + 'Wednesday, 27-May-2020 10:30:35 UTC' + +- [NEW] Arrow is now fully compatible with Python 3.9 and PyPy3. +- [NEW] Added Makefile, tox.ini, and requirements.txt files to the distribution bundle. +- [NEW] Added French Canadian and Swahili locales. +- [NEW] Added ``humanize`` week granularity translation for Hebrew, Greek, Macedonian, Swedish, Slovak. +- [FIX] ms and μs timestamps are now normalized in ``arrow.get()``, ``arrow.fromtimestamp()``, and ``arrow.utcfromtimestamp()``. For example: + +.. code-block:: python + + >>> ts = 1591161115194556 + >>> arw = arrow.get(ts) + + >>> arw.timestamp + 1591161115 + +- [FIX] Refactored and updated Macedonian, Hebrew, Korean, and Portuguese locales. + +0.15.6 (2020-04-29) +------------------- + +- [NEW] Added support for parsing and formatting `ISO 8601 week dates `_ via a new token ``W``, for example: + +.. code-block:: python + + >>> arrow.get("2013-W29-6", "W") + + >>> utc=arrow.utcnow() + >>> utc + + >>> utc.format("W") + '2020-W04-4' + +- [NEW] Formatting with ``x`` token (microseconds) is now possible, for example: + +.. code-block:: python + + >>> dt = arrow.utcnow() + >>> dt.format("x") + '1585669870688329' + >>> dt.format("X") + '1585669870' + +- [NEW] Added ``humanize`` week granularity translation for German, Italian, Polish & Taiwanese locales. +- [FIX] Consolidated and simplified German locales. +- [INTERNAL] Moved testing suite from nosetest/Chai to pytest/pytest-mock. +- [INTERNAL] Converted xunit-style setup and teardown functions in tests to pytest fixtures. +- [INTERNAL] Setup Github Actions for CI alongside Travis. +- [INTERNAL] Help support Arrow's future development by donating to the project on `Open Collective `_. + +0.15.5 (2020-01-03) +------------------- + +- [WARN] Python 2 reached EOL on 2020-01-01. arrow will **drop support** for Python 2 in a future release to be decided (see `#739 `_). +- [NEW] Added bounds parameter to ``span_range``, ``interval`` and ``span`` methods. This allows you to include or exclude the start and end values. +- [NEW] ``arrow.get()`` can now create arrow objects from a timestamp with a timezone, for example: + +.. code-block:: python + + >>> arrow.get(1367900664, tzinfo=tz.gettz('US/Pacific')) + + +- [NEW] ``humanize`` can now combine multiple levels of granularity, for example: + +.. code-block:: python + + >>> later140 = arrow.utcnow().shift(seconds=+8400) + >>> later140.humanize(granularity="minute") + 'in 139 minutes' + >>> later140.humanize(granularity=["hour", "minute"]) + 'in 2 hours and 19 minutes' + +- [NEW] Added Hong Kong locale (``zh_hk``). +- [NEW] Added ``humanize`` week granularity translation for Dutch. +- [NEW] Numbers are now displayed when using the seconds granularity in ``humanize``. +- [CHANGE] ``range`` now supports both the singular and plural forms of the ``frames`` argument (e.g. day and days). +- [FIX] Improved parsing of strings that contain punctuation. +- [FIX] Improved behaviour of ``humanize`` when singular seconds are involved. + +0.15.4 (2019-11-02) +------------------- + +- [FIX] Fixed an issue that caused package installs to fail on Conda Forge. + +0.15.3 (2019-11-02) +------------------- + +- [NEW] ``factory.get()`` can now create arrow objects from a ISO calendar tuple, for example: + +.. code-block:: python + + >>> arrow.get((2013, 18, 7)) + + +- [NEW] Added a new token ``x`` to allow parsing of integer timestamps with milliseconds and microseconds. +- [NEW] Formatting now supports escaping of characters using the same syntax as parsing, for example: + +.. code-block:: python + + >>> arw = arrow.now() + >>> fmt = "YYYY-MM-DD h [h] m" + >>> arw.format(fmt) + '2019-11-02 3 h 32' + +- [NEW] Added ``humanize`` week granularity translations for Chinese, Spanish and Vietnamese. +- [CHANGE] Added ``ParserError`` to module exports. +- [FIX] Added support for midnight at end of day. See `#703 `_ for details. +- [INTERNAL] Created Travis build for macOS. +- [INTERNAL] Test parsing and formatting against full timezone database. + +0.15.2 (2019-09-14) +------------------- + +- [NEW] Added ``humanize`` week granularity translations for Portuguese and Brazilian Portuguese. +- [NEW] Embedded changelog within docs and added release dates to versions. +- [FIX] Fixed a bug that caused test failures on Windows only, see `#668 `_ for details. + +0.15.1 (2019-09-10) +------------------- + +- [NEW] Added ``humanize`` week granularity translations for Japanese. +- [FIX] Fixed a bug that caused Arrow to fail when passed a negative timestamp string. +- [FIX] Fixed a bug that caused Arrow to fail when passed a datetime object with ``tzinfo`` of type ``StaticTzInfo``. + +0.15.0 (2019-09-08) +------------------- + +- [NEW] Added support for DDD and DDDD ordinal date tokens. The following functionality is now possible: ``arrow.get("1998-045")``, ``arrow.get("1998-45", "YYYY-DDD")``, ``arrow.get("1998-045", "YYYY-DDDD")``. +- [NEW] ISO 8601 basic format for dates and times is now supported (e.g. ``YYYYMMDDTHHmmssZ``). +- [NEW] Added ``humanize`` week granularity translations for French, Russian and Swiss German locales. +- [CHANGE] Timestamps of type ``str`` are no longer supported **without a format string** in the ``arrow.get()`` method. This change was made to support the ISO 8601 basic format and to address bugs such as `#447 `_. + +The following will NOT work in v0.15.0: + +.. code-block:: python + + >>> arrow.get("1565358758") + >>> arrow.get("1565358758.123413") + +The following will work in v0.15.0: + +.. code-block:: python + + >>> arrow.get("1565358758", "X") + >>> arrow.get("1565358758.123413", "X") + >>> arrow.get(1565358758) + >>> arrow.get(1565358758.123413) + +- [CHANGE] When a meridian token (a|A) is passed and no meridians are available for the specified locale (e.g. unsupported or untranslated) a ``ParserError`` is raised. +- [CHANGE] The timestamp token (``X``) will now match float timestamps of type ``str``: ``arrow.get(“1565358758.123415”, “X”)``. +- [CHANGE] Strings with leading and/or trailing whitespace will no longer be parsed without a format string. Please see `the docs `_ for ways to handle this. +- [FIX] The timestamp token (``X``) will now only match on strings that **strictly contain integers and floats**, preventing incorrect matches. +- [FIX] Most instances of ``arrow.get()`` returning an incorrect ``Arrow`` object from a partial parsing match have been eliminated. The following issue have been addressed: `#91 `_, `#196 `_, `#396 `_, `#434 `_, `#447 `_, `#456 `_, `#519 `_, `#538 `_, `#560 `_. + +0.14.7 (2019-09-04) +------------------- + +- [CHANGE] ``ArrowParseWarning`` will no longer be printed on every call to ``arrow.get()`` with a datetime string. The purpose of the warning was to start a conversation about the upcoming 0.15.0 changes and we appreciate all the feedback that the community has given us! + +0.14.6 (2019-08-28) +------------------- + +- [NEW] Added support for ``week`` granularity in ``Arrow.humanize()``. For example, ``arrow.utcnow().shift(weeks=-1).humanize(granularity="week")`` outputs "a week ago". This change introduced two new untranslated words, ``week`` and ``weeks``, to all locale dictionaries, so locale contributions are welcome! +- [NEW] Fully translated the Brazilian Portugese locale. +- [CHANGE] Updated the Macedonian locale to inherit from a Slavic base. +- [FIX] Fixed a bug that caused ``arrow.get()`` to ignore tzinfo arguments of type string (e.g. ``arrow.get(tzinfo="Europe/Paris")``). +- [FIX] Fixed a bug that occurred when ``arrow.Arrow()`` was instantiated with a ``pytz`` tzinfo object. +- [FIX] Fixed a bug that caused Arrow to fail when passed a sub-second token, that when rounded, had a value greater than 999999 (e.g. ``arrow.get("2015-01-12T01:13:15.9999995")``). Arrow should now accurately propagate the rounding for large sub-second tokens. + +0.14.5 (2019-08-09) +------------------- + +- [NEW] Added Afrikaans locale. +- [CHANGE] Removed deprecated ``replace`` shift functionality. Users looking to pass plural properties to the ``replace`` function to shift values should use ``shift`` instead. +- [FIX] Fixed bug that occurred when ``factory.get()`` was passed a locale kwarg. + +0.14.4 (2019-07-30) +------------------- + +- [FIX] Fixed a regression in 0.14.3 that prevented a tzinfo argument of type string to be passed to the ``get()`` function. Functionality such as ``arrow.get("2019072807", "YYYYMMDDHH", tzinfo="UTC")`` should work as normal again. +- [CHANGE] Moved ``backports.functools_lru_cache`` dependency from ``extra_requires`` to ``install_requires`` for ``Python 2.7`` installs to fix `#495 `_. + +0.14.3 (2019-07-28) +------------------- + +- [NEW] Added full support for Python 3.8. +- [CHANGE] Added warnings for upcoming factory.get() parsing changes in 0.15.0. Please see `#612 `_ for full details. +- [FIX] Extensive refactor and update of documentation. +- [FIX] factory.get() can now construct from kwargs. +- [FIX] Added meridians to Spanish Locale. + +0.14.2 (2019-06-06) +------------------- + +- [CHANGE] Travis CI builds now use tox to lint and run tests. +- [FIX] Fixed UnicodeDecodeError on certain locales (#600). + +0.14.1 (2019-06-06) +------------------- + +- [FIX] Fixed ``ImportError: No module named 'dateutil'`` (#598). + +0.14.0 (2019-06-06) +------------------- + +- [NEW] Added provisional support for Python 3.8. +- [CHANGE] Removed support for EOL Python 3.4. +- [FIX] Updated setup.py with modern Python standards. +- [FIX] Upgraded dependencies to latest versions. +- [FIX] Enabled flake8 and black on travis builds. +- [FIX] Formatted code using black and isort. + +0.13.2 (2019-05-30) +------------------- + +- [NEW] Add is_between method. +- [FIX] Improved humanize behaviour for near zero durations (#416). +- [FIX] Correct humanize behaviour with future days (#541). +- [FIX] Documentation updates. +- [FIX] Improvements to German Locale. + +0.13.1 (2019-02-17) +------------------- + +- [NEW] Add support for Python 3.7. +- [CHANGE] Remove deprecation decorators for Arrow.range(), Arrow.span_range() and Arrow.interval(), all now return generators, wrap with list() to get old behavior. +- [FIX] Documentation and docstring updates. + +0.13.0 (2019-01-09) +------------------- + +- [NEW] Added support for Python 3.6. +- [CHANGE] Drop support for Python 2.6/3.3. +- [CHANGE] Return generator instead of list for Arrow.range(), Arrow.span_range() and Arrow.interval(). +- [FIX] Make arrow.get() work with str & tzinfo combo. +- [FIX] Make sure special RegEx characters are escaped in format string. +- [NEW] Added support for ZZZ when formatting. +- [FIX] Stop using datetime.utcnow() in internals, use datetime.now(UTC) instead. +- [FIX] Return NotImplemented instead of TypeError in arrow math internals. +- [NEW] Added Estonian Locale. +- [FIX] Small fixes to Greek locale. +- [FIX] TagalogLocale improvements. +- [FIX] Added test requirements to setup. +- [FIX] Improve docs for get, now and utcnow methods. +- [FIX] Correct typo in depreciation warning. + +0.12.1 +------ + +- [FIX] Allow universal wheels to be generated and reliably installed. +- [FIX] Make humanize respect only_distance when granularity argument is also given. + +0.12.0 +------ + +- [FIX] Compatibility fix for Python 2.x + +0.11.0 +------ + +- [FIX] Fix grammar of ArabicLocale +- [NEW] Add Nepali Locale +- [FIX] Fix month name + rename AustriaLocale -> AustrianLocale +- [FIX] Fix typo in Basque Locale +- [FIX] Fix grammar in PortugueseBrazilian locale +- [FIX] Remove pip --user-mirrors flag +- [NEW] Add Indonesian Locale + +0.10.0 +------ + +- [FIX] Fix getattr off by one for quarter +- [FIX] Fix negative offset for UTC +- [FIX] Update arrow.py + +0.9.0 +----- + +- [NEW] Remove duplicate code +- [NEW] Support gnu date iso 8601 +- [NEW] Add support for universal wheels +- [NEW] Slovenian locale +- [NEW] Slovak locale +- [NEW] Romanian locale +- [FIX] respect limit even if end is defined range +- [FIX] Separate replace & shift functions +- [NEW] Added tox +- [FIX] Fix supported Python versions in documentation +- [NEW] Azerbaijani locale added, locale issue fixed in Turkish. +- [FIX] Format ParserError's raise message + +0.8.0 +----- + +- [] + +0.7.1 +----- + +- [NEW] Esperanto locale (batisteo) + +0.7.0 +----- + +- [FIX] Parse localized strings #228 (swistakm) +- [FIX] Modify tzinfo parameter in ``get`` api #221 (bottleimp) +- [FIX] Fix Czech locale (PrehistoricTeam) +- [FIX] Raise TypeError when adding/subtracting non-dates (itsmeolivia) +- [FIX] Fix pytz conversion error (Kudo) +- [FIX] Fix overzealous time truncation in span_range (kdeldycke) +- [NEW] Humanize for time duration #232 (ybrs) +- [NEW] Add Thai locale (sipp11) +- [NEW] Adding Belarusian (be) locale (oire) +- [NEW] Search date in strings (beenje) +- [NEW] Note that arrow's tokens differ from strptime's. (offby1) + +0.6.0 +----- + +- [FIX] Added support for Python 3 +- [FIX] Avoid truncating oversized epoch timestamps. Fixes #216. +- [FIX] Fixed month abbreviations for Ukrainian +- [FIX] Fix typo timezone +- [FIX] A couple of dialect fixes and two new languages +- [FIX] Spanish locale: ``Miercoles`` should have acute accent +- [Fix] Fix Finnish grammar +- [FIX] Fix typo in 'Arrow.floor' docstring +- [FIX] Use read() utility to open README +- [FIX] span_range for week frame +- [NEW] Add minimal support for fractional seconds longer than six digits. +- [NEW] Adding locale support for Marathi (mr) +- [NEW] Add count argument to span method +- [NEW] Improved docs + +0.5.1 - 0.5.4 +------------- + +- [FIX] test the behavior of simplejson instead of calling for_json directly (tonyseek) +- [FIX] Add Hebrew Locale (doodyparizada) +- [FIX] Update documentation location (andrewelkins) +- [FIX] Update setup.py Development Status level (andrewelkins) +- [FIX] Case insensitive month match (cshowe) + +0.5.0 +----- + +- [NEW] struct_time addition. (mhworth) +- [NEW] Version grep (eirnym) +- [NEW] Default to ISO 8601 format (emonty) +- [NEW] Raise TypeError on comparison (sniekamp) +- [NEW] Adding Macedonian(mk) locale (krisfremen) +- [FIX] Fix for ISO seconds and fractional seconds (sdispater) (andrewelkins) +- [FIX] Use correct Dutch wording for "hours" (wbolster) +- [FIX] Complete the list of english locales (indorilftw) +- [FIX] Change README to reStructuredText (nyuszika7h) +- [FIX] Parse lower-cased 'h' (tamentis) +- [FIX] Slight modifications to Dutch locale (nvie) + +0.4.4 +----- + +- [NEW] Include the docs in the released tarball +- [NEW] Czech localization Czech localization for Arrow +- [NEW] Add fa_ir to locales +- [FIX] Fixes parsing of time strings with a final Z +- [FIX] Fixes ISO parsing and formatting for fractional seconds +- [FIX] test_fromtimestamp sp +- [FIX] some typos fixed +- [FIX] removed an unused import statement +- [FIX] docs table fix +- [FIX] Issue with specify 'X' template and no template at all to arrow.get +- [FIX] Fix "import" typo in docs/index.rst +- [FIX] Fix unit tests for zero passed +- [FIX] Update layout.html +- [FIX] In Norwegian and new Norwegian months and weekdays should not be capitalized +- [FIX] Fixed discrepancy between specifying 'X' to arrow.get and specifying no template + +0.4.3 +----- + +- [NEW] Turkish locale (Emre) +- [NEW] Arabic locale (Mosab Ahmad) +- [NEW] Danish locale (Holmars) +- [NEW] Icelandic locale (Holmars) +- [NEW] Hindi locale (Atmb4u) +- [NEW] Malayalam locale (Atmb4u) +- [NEW] Finnish locale (Stormpat) +- [NEW] Portuguese locale (Danielcorreia) +- [NEW] ``h`` and ``hh`` strings are now supported (Averyonghub) +- [FIX] An incorrect inflection in the Polish locale has been fixed (Avalanchy) +- [FIX] ``arrow.get`` now properly handles ``Date`` (Jaapz) +- [FIX] Tests are now declared in ``setup.py`` and the manifest (Pypingou) +- [FIX] ``__version__`` has been added to ``__init__.py`` (Sametmax) +- [FIX] ISO 8601 strings can be parsed without a separator (Ivandiguisto / Root) +- [FIX] Documentation is now more clear regarding some inputs on ``arrow.get`` (Eriktaubeneck) +- [FIX] Some documentation links have been fixed (Vrutsky) +- [FIX] Error messages for parse errors are now more descriptive (Maciej Albin) +- [FIX] The parser now correctly checks for separators in strings (Mschwager) + +0.4.2 +----- + +- [NEW] Factory ``get`` method now accepts a single ``Arrow`` argument. +- [NEW] Tokens SSSS, SSSSS and SSSSSS are supported in parsing. +- [NEW] ``Arrow`` objects have a ``float_timestamp`` property. +- [NEW] Vietnamese locale (Iu1nguoi) +- [NEW] Factory ``get`` method now accepts a list of format strings (Dgilland) +- [NEW] A MANIFEST.in file has been added (Pypingou) +- [NEW] Tests can be run directly from ``setup.py`` (Pypingou) +- [FIX] Arrow docs now list 'day of week' format tokens correctly (Rudolphfroger) +- [FIX] Several issues with the Korean locale have been resolved (Yoloseem) +- [FIX] ``humanize`` now correctly returns unicode (Shvechikov) +- [FIX] ``Arrow`` objects now pickle / unpickle correctly (Yoloseem) + +0.4.1 +----- + +- [NEW] Table / explanation of formatting & parsing tokens in docs +- [NEW] Brazilian locale (Augusto2112) +- [NEW] Dutch locale (OrangeTux) +- [NEW] Italian locale (Pertux) +- [NEW] Austrain locale (LeChewbacca) +- [NEW] Tagalog locale (Marksteve) +- [FIX] Corrected spelling and day numbers in German locale (LeChewbacca) +- [FIX] Factory ``get`` method should now handle unicode strings correctly (Bwells) +- [FIX] Midnight and noon should now parse and format correctly (Bwells) + +0.4.0 +----- + +- [NEW] Format-free ISO 8601 parsing in factory ``get`` method +- [NEW] Support for 'week' / 'weeks' in ``span``, ``range``, ``span_range``, ``floor`` and ``ceil`` +- [NEW] Support for 'weeks' in ``replace`` +- [NEW] Norwegian locale (Martinp) +- [NEW] Japanese locale (CortYuming) +- [FIX] Timezones no longer show the wrong sign when formatted (Bean) +- [FIX] Microseconds are parsed correctly from strings (Bsidhom) +- [FIX] Locale day-of-week is no longer off by one (Cynddl) +- [FIX] Corrected plurals of Ukrainian and Russian nouns (Catchagain) +- [CHANGE] Old 0.1 ``arrow`` module method removed +- [CHANGE] Dropped timestamp support in ``range`` and ``span_range`` (never worked correctly) +- [CHANGE] Dropped parsing of single string as tz string in factory ``get`` method (replaced by ISO 8601) + +0.3.5 +----- + +- [NEW] French locale (Cynddl) +- [NEW] Spanish locale (Slapresta) +- [FIX] Ranges handle multiple timezones correctly (Ftobia) + +0.3.4 +----- + +- [FIX] Humanize no longer sometimes returns the wrong month delta +- [FIX] ``__format__`` works correctly with no format string + +0.3.3 +----- + +- [NEW] Python 2.6 support +- [NEW] Initial support for locale-based parsing and formatting +- [NEW] ArrowFactory class, now proxied as the module API +- [NEW] ``factory`` api method to obtain a factory for a custom type +- [FIX] Python 3 support and tests completely ironed out + +0.3.2 +----- + +- [NEW] Python 3+ support + +0.3.1 +----- + +- [FIX] The old ``arrow`` module function handles timestamps correctly as it used to + +0.3.0 +----- + +- [NEW] ``Arrow.replace`` method +- [NEW] Accept timestamps, datetimes and Arrows for datetime inputs, where reasonable +- [FIX] ``range`` and ``span_range`` respect end and limit parameters correctly +- [CHANGE] Arrow objects are no longer mutable +- [CHANGE] Plural attribute name semantics altered: single -> absolute, plural -> relative +- [CHANGE] Plural names no longer supported as properties (e.g. ``arrow.utcnow().years``) + +0.2.1 +----- + +- [NEW] Support for localized humanization +- [NEW] English, Russian, Greek, Korean, Chinese locales + +0.2.0 +----- + +- **REWRITE** +- [NEW] Date parsing +- [NEW] Date formatting +- [NEW] ``floor``, ``ceil`` and ``span`` methods +- [NEW] ``datetime`` interface implementation +- [NEW] ``clone`` method +- [NEW] ``get``, ``now`` and ``utcnow`` API methods + +0.1.6 +----- + +- [NEW] Humanized time deltas +- [NEW] ``__eq__`` implemented +- [FIX] Issues with conversions related to daylight savings time resolved +- [CHANGE] ``__str__`` uses ISO formatting + +0.1.5 +----- + +- **Started tracking changes** +- [NEW] Parsing of ISO-formatted time zone offsets (e.g. '+02:30', '-05:00') +- [NEW] Resolved some issues with timestamps and delta / Olson time zones diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/LICENSE b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/LICENSE new file mode 100644 index 0000000000..2bef500de7 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Chris Smith + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/MANIFEST.in b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/MANIFEST.in new file mode 100644 index 0000000000..d9955ed96a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/MANIFEST.in @@ -0,0 +1,3 @@ +include LICENSE CHANGELOG.rst README.rst Makefile requirements.txt tox.ini +recursive-include tests *.py +recursive-include docs *.py *.rst *.bat Makefile diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/Makefile b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/Makefile new file mode 100644 index 0000000000..f294985dc6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/Makefile @@ -0,0 +1,44 @@ +.PHONY: auto test docs clean + +auto: build38 + +build27: PYTHON_VER = python2.7 +build35: PYTHON_VER = python3.5 +build36: PYTHON_VER = python3.6 +build37: PYTHON_VER = python3.7 +build38: PYTHON_VER = python3.8 +build39: PYTHON_VER = python3.9 + +build27 build35 build36 build37 build38 build39: clean + virtualenv venv --python=$(PYTHON_VER) + . venv/bin/activate; \ + pip install -r requirements.txt; \ + pre-commit install + +test: + rm -f .coverage coverage.xml + . venv/bin/activate; pytest + +lint: + . venv/bin/activate; pre-commit run --all-files --show-diff-on-failure + +docs: + rm -rf docs/_build + . venv/bin/activate; cd docs; make html + +clean: clean-dist + rm -rf venv .pytest_cache ./**/__pycache__ + rm -f .coverage coverage.xml ./**/*.pyc + +clean-dist: + rm -rf dist build .egg .eggs arrow.egg-info + +build-dist: + . venv/bin/activate; \ + pip install -U setuptools twine wheel; \ + python setup.py sdist bdist_wheel + +upload-dist: + . venv/bin/activate; twine upload dist/* + +publish: test clean-dist build-dist upload-dist clean-dist diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/README.rst b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/README.rst new file mode 100644 index 0000000000..69f6c50d81 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/README.rst @@ -0,0 +1,133 @@ +Arrow: Better dates & times for Python +====================================== + +.. start-inclusion-marker-do-not-remove + +.. image:: https://github.com/arrow-py/arrow/workflows/tests/badge.svg?branch=master + :alt: Build Status + :target: https://github.com/arrow-py/arrow/actions?query=workflow%3Atests+branch%3Amaster + +.. image:: https://codecov.io/gh/arrow-py/arrow/branch/master/graph/badge.svg + :alt: Coverage + :target: https://codecov.io/gh/arrow-py/arrow + +.. image:: https://img.shields.io/pypi/v/arrow.svg + :alt: PyPI Version + :target: https://pypi.python.org/pypi/arrow + +.. image:: https://img.shields.io/pypi/pyversions/arrow.svg + :alt: Supported Python Versions + :target: https://pypi.python.org/pypi/arrow + +.. image:: https://img.shields.io/pypi/l/arrow.svg + :alt: License + :target: https://pypi.python.org/pypi/arrow + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :alt: Code Style: Black + :target: https://github.com/psf/black + + +**Arrow** is a Python library that offers a sensible and human-friendly approach to creating, manipulating, formatting and converting dates, times and timestamps. It implements and updates the datetime type, plugging gaps in functionality and providing an intelligent module API that supports many common creation scenarios. Simply put, it helps you work with dates and times with fewer imports and a lot less code. + +Arrow is named after the `arrow of time `_ and is heavily inspired by `moment.js `_ and `requests `_. + +Why use Arrow over built-in modules? +------------------------------------ + +Python's standard library and some other low-level modules have near-complete date, time and timezone functionality, but don't work very well from a usability perspective: + +- Too many modules: datetime, time, calendar, dateutil, pytz and more +- Too many types: date, time, datetime, tzinfo, timedelta, relativedelta, etc. +- Timezones and timestamp conversions are verbose and unpleasant +- Timezone naivety is the norm +- Gaps in functionality: ISO 8601 parsing, timespans, humanization + +Features +-------- + +- Fully-implemented, drop-in replacement for datetime +- Supports Python 2.7, 3.5, 3.6, 3.7, 3.8 and 3.9 +- Timezone-aware and UTC by default +- Provides super-simple creation options for many common input scenarios +- :code:`shift` method with support for relative offsets, including weeks +- Formats and parses strings automatically +- Wide support for ISO 8601 +- Timezone conversion +- Timestamp available as a property +- Generates time spans, ranges, floors and ceilings for time frames ranging from microsecond to year +- Humanizes and supports a growing list of contributed locales +- Extensible for your own Arrow-derived types + +Quick Start +----------- + +Installation +~~~~~~~~~~~~ + +To install Arrow, use `pip `_ or `pipenv `_: + +.. code-block:: console + + $ pip install -U arrow + +Example Usage +~~~~~~~~~~~~~ + +.. code-block:: python + + >>> import arrow + >>> arrow.get('2013-05-11T21:23:58.970460+07:00') + + + >>> utc = arrow.utcnow() + >>> utc + + + >>> utc = utc.shift(hours=-1) + >>> utc + + + >>> local = utc.to('US/Pacific') + >>> local + + + >>> local.timestamp + 1368303838 + + >>> local.format() + '2013-05-11 13:23:58 -07:00' + + >>> local.format('YYYY-MM-DD HH:mm:ss ZZ') + '2013-05-11 13:23:58 -07:00' + + >>> local.humanize() + 'an hour ago' + + >>> local.humanize(locale='ko_kr') + '1시간 전' + +.. end-inclusion-marker-do-not-remove + +Documentation +------------- + +For full documentation, please visit `arrow.readthedocs.io `_. + +Contributing +------------ + +Contributions are welcome for both code and localizations (adding and updating locales). Begin by gaining familiarity with the Arrow library and its features. Then, jump into contributing: + +#. Find an issue or feature to tackle on the `issue tracker `_. Issues marked with the `"good first issue" label `_ may be a great place to start! +#. Fork `this repository `_ on GitHub and begin making changes in a branch. +#. Add a few tests to ensure that the bug was fixed or the feature works as expected. +#. Run the entire test suite and linting checks by running one of the following commands: :code:`tox` (if you have `tox `_ installed) **OR** :code:`make build38 && make test && make lint` (if you do not have Python 3.8 installed, replace :code:`build38` with the latest Python version on your system). +#. Submit a pull request and await feedback 😃. + +If you have any questions along the way, feel free to ask them `here `_. + +Support Arrow +------------- + +`Open Collective `_ is an online funding platform that provides tools to raise money and share your finances with full transparency. It is the platform of choice for individuals and companies to make one-time or recurring donations directly to the project. If you are interested in making a financial contribution, please visit the `Arrow collective `_. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/__init__.py new file mode 100644 index 0000000000..2883527be8 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +from ._version import __version__ +from .api import get, now, utcnow +from .arrow import Arrow +from .factory import ArrowFactory +from .formatter import ( + FORMAT_ATOM, + FORMAT_COOKIE, + FORMAT_RFC822, + FORMAT_RFC850, + FORMAT_RFC1036, + FORMAT_RFC1123, + FORMAT_RFC2822, + FORMAT_RFC3339, + FORMAT_RSS, + FORMAT_W3C, +) +from .parser import ParserError diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/_version.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/_version.py new file mode 100644 index 0000000000..fd86b3ee91 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/_version.py @@ -0,0 +1 @@ +__version__ = "0.17.0" diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/api.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/api.py new file mode 100644 index 0000000000..a6b7be3de2 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/api.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +""" +Provides the default implementation of :class:`ArrowFactory ` +methods for use as a module API. + +""" + +from __future__ import absolute_import + +from arrow.factory import ArrowFactory + +# internal default factory. +_factory = ArrowFactory() + + +def get(*args, **kwargs): + """Calls the default :class:`ArrowFactory ` ``get`` method.""" + + return _factory.get(*args, **kwargs) + + +get.__doc__ = _factory.get.__doc__ + + +def utcnow(): + """Calls the default :class:`ArrowFactory ` ``utcnow`` method.""" + + return _factory.utcnow() + + +utcnow.__doc__ = _factory.utcnow.__doc__ + + +def now(tz=None): + """Calls the default :class:`ArrowFactory ` ``now`` method.""" + + return _factory.now(tz) + + +now.__doc__ = _factory.now.__doc__ + + +def factory(type): + """Returns an :class:`.ArrowFactory` for the specified :class:`Arrow ` + or derived type. + + :param type: the type, :class:`Arrow ` or derived. + + """ + + return ArrowFactory(type) + + +__all__ = ["get", "utcnow", "now", "factory"] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/arrow.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/arrow.py new file mode 100644 index 0000000000..4fe9541789 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/arrow.py @@ -0,0 +1,1584 @@ +# -*- coding: utf-8 -*- +""" +Provides the :class:`Arrow ` class, an enhanced ``datetime`` +replacement. + +""" + +from __future__ import absolute_import + +import calendar +import sys +import warnings +from datetime import datetime, timedelta +from datetime import tzinfo as dt_tzinfo +from math import trunc + +from dateutil import tz as dateutil_tz +from dateutil.relativedelta import relativedelta + +from arrow import formatter, locales, parser, util + +if sys.version_info[:2] < (3, 6): # pragma: no cover + with warnings.catch_warnings(): + warnings.simplefilter("default", DeprecationWarning) + warnings.warn( + "Arrow will drop support for Python 2.7 and 3.5 in the upcoming v1.0.0 release. Please upgrade to " + "Python 3.6+ to continue receiving updates for Arrow.", + DeprecationWarning, + ) + + +class Arrow(object): + """An :class:`Arrow ` object. + + Implements the ``datetime`` interface, behaving as an aware ``datetime`` while implementing + additional functionality. + + :param year: the calendar year. + :param month: the calendar month. + :param day: the calendar day. + :param hour: (optional) the hour. Defaults to 0. + :param minute: (optional) the minute, Defaults to 0. + :param second: (optional) the second, Defaults to 0. + :param microsecond: (optional) the microsecond. Defaults to 0. + :param tzinfo: (optional) A timezone expression. Defaults to UTC. + :param fold: (optional) 0 or 1, used to disambiguate repeated times. Defaults to 0. + + .. _tz-expr: + + Recognized timezone expressions: + + - A ``tzinfo`` object. + - A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'. + - A ``str`` in ISO 8601 style, as in '+07:00'. + - A ``str``, one of the following: 'local', 'utc', 'UTC'. + + Usage:: + + >>> import arrow + >>> arrow.Arrow(2013, 5, 5, 12, 30, 45) + + + """ + + resolution = datetime.resolution + + _ATTRS = ["year", "month", "day", "hour", "minute", "second", "microsecond"] + _ATTRS_PLURAL = ["{}s".format(a) for a in _ATTRS] + _MONTHS_PER_QUARTER = 3 + _SECS_PER_MINUTE = float(60) + _SECS_PER_HOUR = float(60 * 60) + _SECS_PER_DAY = float(60 * 60 * 24) + _SECS_PER_WEEK = float(60 * 60 * 24 * 7) + _SECS_PER_MONTH = float(60 * 60 * 24 * 30.5) + _SECS_PER_YEAR = float(60 * 60 * 24 * 365.25) + + def __init__( + self, + year, + month, + day, + hour=0, + minute=0, + second=0, + microsecond=0, + tzinfo=None, + **kwargs + ): + if tzinfo is None: + tzinfo = dateutil_tz.tzutc() + # detect that tzinfo is a pytz object (issue #626) + elif ( + isinstance(tzinfo, dt_tzinfo) + and hasattr(tzinfo, "localize") + and hasattr(tzinfo, "zone") + and tzinfo.zone + ): + tzinfo = parser.TzinfoParser.parse(tzinfo.zone) + elif util.isstr(tzinfo): + tzinfo = parser.TzinfoParser.parse(tzinfo) + + fold = kwargs.get("fold", 0) + + # use enfold here to cover direct arrow.Arrow init on 2.7/3.5 + self._datetime = dateutil_tz.enfold( + datetime(year, month, day, hour, minute, second, microsecond, tzinfo), + fold=fold, + ) + + # factories: single object, both original and from datetime. + + @classmethod + def now(cls, tzinfo=None): + """Constructs an :class:`Arrow ` object, representing "now" in the given + timezone. + + :param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time. + + Usage:: + + >>> arrow.now('Asia/Baku') + + + """ + + if tzinfo is None: + tzinfo = dateutil_tz.tzlocal() + + dt = datetime.now(tzinfo) + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dt.tzinfo, + fold=getattr(dt, "fold", 0), + ) + + @classmethod + def utcnow(cls): + """Constructs an :class:`Arrow ` object, representing "now" in UTC + time. + + Usage:: + + >>> arrow.utcnow() + + + """ + + dt = datetime.now(dateutil_tz.tzutc()) + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dt.tzinfo, + fold=getattr(dt, "fold", 0), + ) + + @classmethod + def fromtimestamp(cls, timestamp, tzinfo=None): + """Constructs an :class:`Arrow ` object from a timestamp, converted to + the given timezone. + + :param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either. + :param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time. + """ + + if tzinfo is None: + tzinfo = dateutil_tz.tzlocal() + elif util.isstr(tzinfo): + tzinfo = parser.TzinfoParser.parse(tzinfo) + + if not util.is_timestamp(timestamp): + raise ValueError( + "The provided timestamp '{}' is invalid.".format(timestamp) + ) + + timestamp = util.normalize_timestamp(float(timestamp)) + dt = datetime.fromtimestamp(timestamp, tzinfo) + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dt.tzinfo, + fold=getattr(dt, "fold", 0), + ) + + @classmethod + def utcfromtimestamp(cls, timestamp): + """Constructs an :class:`Arrow ` object from a timestamp, in UTC time. + + :param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either. + + """ + + if not util.is_timestamp(timestamp): + raise ValueError( + "The provided timestamp '{}' is invalid.".format(timestamp) + ) + + timestamp = util.normalize_timestamp(float(timestamp)) + dt = datetime.utcfromtimestamp(timestamp) + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dateutil_tz.tzutc(), + fold=getattr(dt, "fold", 0), + ) + + @classmethod + def fromdatetime(cls, dt, tzinfo=None): + """Constructs an :class:`Arrow ` object from a ``datetime`` and + optional replacement timezone. + + :param dt: the ``datetime`` + :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to ``dt``'s + timezone, or UTC if naive. + + If you only want to replace the timezone of naive datetimes:: + + >>> dt + datetime.datetime(2013, 5, 5, 0, 0, tzinfo=tzutc()) + >>> arrow.Arrow.fromdatetime(dt, dt.tzinfo or 'US/Pacific') + + + """ + + if tzinfo is None: + if dt.tzinfo is None: + tzinfo = dateutil_tz.tzutc() + else: + tzinfo = dt.tzinfo + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + tzinfo, + fold=getattr(dt, "fold", 0), + ) + + @classmethod + def fromdate(cls, date, tzinfo=None): + """Constructs an :class:`Arrow ` object from a ``date`` and optional + replacement timezone. Time values are set to 0. + + :param date: the ``date`` + :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to UTC. + """ + + if tzinfo is None: + tzinfo = dateutil_tz.tzutc() + + return cls(date.year, date.month, date.day, tzinfo=tzinfo) + + @classmethod + def strptime(cls, date_str, fmt, tzinfo=None): + """Constructs an :class:`Arrow ` object from a date string and format, + in the style of ``datetime.strptime``. Optionally replaces the parsed timezone. + + :param date_str: the date string. + :param fmt: the format string. + :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to the parsed + timezone if ``fmt`` contains a timezone directive, otherwise UTC. + + Usage:: + + >>> arrow.Arrow.strptime('20-01-2019 15:49:10', '%d-%m-%Y %H:%M:%S') + + + """ + + dt = datetime.strptime(date_str, fmt) + if tzinfo is None: + tzinfo = dt.tzinfo + + return cls( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + tzinfo, + fold=getattr(dt, "fold", 0), + ) + + # factories: ranges and spans + + @classmethod + def range(cls, frame, start, end=None, tz=None, limit=None): + """Returns an iterator of :class:`Arrow ` objects, representing + points in time between two inputs. + + :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...). + :param start: A datetime expression, the start of the range. + :param end: (optional) A datetime expression, the end of the range. + :param tz: (optional) A :ref:`timezone expression `. Defaults to + ``start``'s timezone, or UTC if ``start`` is naive. + :param limit: (optional) A maximum number of tuples to return. + + **NOTE**: The ``end`` or ``limit`` must be provided. Call with ``end`` alone to + return the entire range. Call with ``limit`` alone to return a maximum # of results from + the start. Call with both to cap a range at a maximum # of results. + + **NOTE**: ``tz`` internally **replaces** the timezones of both ``start`` and ``end`` before + iterating. As such, either call with naive objects and ``tz``, or aware objects from the + same timezone and no ``tz``. + + Supported frame values: year, quarter, month, week, day, hour, minute, second. + + Recognized datetime expressions: + + - An :class:`Arrow ` object. + - A ``datetime`` object. + + Usage:: + + >>> start = datetime(2013, 5, 5, 12, 30) + >>> end = datetime(2013, 5, 5, 17, 15) + >>> for r in arrow.Arrow.range('hour', start, end): + ... print(repr(r)) + ... + + + + + + + **NOTE**: Unlike Python's ``range``, ``end`` *may* be included in the returned iterator:: + + >>> start = datetime(2013, 5, 5, 12, 30) + >>> end = datetime(2013, 5, 5, 13, 30) + >>> for r in arrow.Arrow.range('hour', start, end): + ... print(repr(r)) + ... + + + + """ + + _, frame_relative, relative_steps = cls._get_frames(frame) + + tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz) + + start = cls._get_datetime(start).replace(tzinfo=tzinfo) + end, limit = cls._get_iteration_params(end, limit) + end = cls._get_datetime(end).replace(tzinfo=tzinfo) + + current = cls.fromdatetime(start) + original_day = start.day + day_is_clipped = False + i = 0 + + while current <= end and i < limit: + i += 1 + yield current + + values = [getattr(current, f) for f in cls._ATTRS] + current = cls(*values, tzinfo=tzinfo).shift( + **{frame_relative: relative_steps} + ) + + if frame in ["month", "quarter", "year"] and current.day < original_day: + day_is_clipped = True + + if day_is_clipped and not cls._is_last_day_of_month(current): + current = current.replace(day=original_day) + + def span(self, frame, count=1, bounds="[)"): + """Returns two new :class:`Arrow ` objects, representing the timespan + of the :class:`Arrow ` object in a given timeframe. + + :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). + :param count: (optional) the number of frames to span. + :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies + whether to include or exclude the start and end values in the span. '(' excludes + the start, '[' includes the start, ')' excludes the end, and ']' includes the end. + If the bounds are not specified, the default bound '[)' is used. + + Supported frame values: year, quarter, month, week, day, hour, minute, second. + + Usage:: + + >>> arrow.utcnow() + + + >>> arrow.utcnow().span('hour') + (, ) + + >>> arrow.utcnow().span('day') + (, ) + + >>> arrow.utcnow().span('day', count=2) + (, ) + + >>> arrow.utcnow().span('day', bounds='[]') + (, ) + + """ + + util.validate_bounds(bounds) + + frame_absolute, frame_relative, relative_steps = self._get_frames(frame) + + if frame_absolute == "week": + attr = "day" + elif frame_absolute == "quarter": + attr = "month" + else: + attr = frame_absolute + + index = self._ATTRS.index(attr) + frames = self._ATTRS[: index + 1] + + values = [getattr(self, f) for f in frames] + + for _ in range(3 - len(values)): + values.append(1) + + floor = self.__class__(*values, tzinfo=self.tzinfo) + + if frame_absolute == "week": + floor = floor.shift(days=-(self.isoweekday() - 1)) + elif frame_absolute == "quarter": + floor = floor.shift(months=-((self.month - 1) % 3)) + + ceil = floor.shift(**{frame_relative: count * relative_steps}) + + if bounds[0] == "(": + floor = floor.shift(microseconds=+1) + + if bounds[1] == ")": + ceil = ceil.shift(microseconds=-1) + + return floor, ceil + + def floor(self, frame): + """Returns a new :class:`Arrow ` object, representing the "floor" + of the timespan of the :class:`Arrow ` object in a given timeframe. + Equivalent to the first element in the 2-tuple returned by + :func:`span `. + + :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). + + Usage:: + + >>> arrow.utcnow().floor('hour') + + """ + + return self.span(frame)[0] + + def ceil(self, frame): + """Returns a new :class:`Arrow ` object, representing the "ceiling" + of the timespan of the :class:`Arrow ` object in a given timeframe. + Equivalent to the second element in the 2-tuple returned by + :func:`span `. + + :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). + + Usage:: + + >>> arrow.utcnow().ceil('hour') + + """ + + return self.span(frame)[1] + + @classmethod + def span_range(cls, frame, start, end, tz=None, limit=None, bounds="[)"): + """Returns an iterator of tuples, each :class:`Arrow ` objects, + representing a series of timespans between two inputs. + + :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...). + :param start: A datetime expression, the start of the range. + :param end: (optional) A datetime expression, the end of the range. + :param tz: (optional) A :ref:`timezone expression `. Defaults to + ``start``'s timezone, or UTC if ``start`` is naive. + :param limit: (optional) A maximum number of tuples to return. + :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies + whether to include or exclude the start and end values in each span in the range. '(' excludes + the start, '[' includes the start, ')' excludes the end, and ']' includes the end. + If the bounds are not specified, the default bound '[)' is used. + + **NOTE**: The ``end`` or ``limit`` must be provided. Call with ``end`` alone to + return the entire range. Call with ``limit`` alone to return a maximum # of results from + the start. Call with both to cap a range at a maximum # of results. + + **NOTE**: ``tz`` internally **replaces** the timezones of both ``start`` and ``end`` before + iterating. As such, either call with naive objects and ``tz``, or aware objects from the + same timezone and no ``tz``. + + Supported frame values: year, quarter, month, week, day, hour, minute, second. + + Recognized datetime expressions: + + - An :class:`Arrow ` object. + - A ``datetime`` object. + + **NOTE**: Unlike Python's ``range``, ``end`` will *always* be included in the returned + iterator of timespans. + + Usage: + + >>> start = datetime(2013, 5, 5, 12, 30) + >>> end = datetime(2013, 5, 5, 17, 15) + >>> for r in arrow.Arrow.span_range('hour', start, end): + ... print(r) + ... + (, ) + (, ) + (, ) + (, ) + (, ) + (, ) + + """ + + tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz) + start = cls.fromdatetime(start, tzinfo).span(frame)[0] + _range = cls.range(frame, start, end, tz, limit) + return (r.span(frame, bounds=bounds) for r in _range) + + @classmethod + def interval(cls, frame, start, end, interval=1, tz=None, bounds="[)"): + """Returns an iterator of tuples, each :class:`Arrow ` objects, + representing a series of intervals between two inputs. + + :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...). + :param start: A datetime expression, the start of the range. + :param end: (optional) A datetime expression, the end of the range. + :param interval: (optional) Time interval for the given time frame. + :param tz: (optional) A timezone expression. Defaults to UTC. + :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies + whether to include or exclude the start and end values in the intervals. '(' excludes + the start, '[' includes the start, ')' excludes the end, and ']' includes the end. + If the bounds are not specified, the default bound '[)' is used. + + Supported frame values: year, quarter, month, week, day, hour, minute, second + + Recognized datetime expressions: + + - An :class:`Arrow ` object. + - A ``datetime`` object. + + Recognized timezone expressions: + + - A ``tzinfo`` object. + - A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'. + - A ``str`` in ISO 8601 style, as in '+07:00'. + - A ``str``, one of the following: 'local', 'utc', 'UTC'. + + Usage: + + >>> start = datetime(2013, 5, 5, 12, 30) + >>> end = datetime(2013, 5, 5, 17, 15) + >>> for r in arrow.Arrow.interval('hour', start, end, 2): + ... print r + ... + (, ) + (, ) + (, ) + """ + if interval < 1: + raise ValueError("interval has to be a positive integer") + + spanRange = iter(cls.span_range(frame, start, end, tz, bounds=bounds)) + while True: + try: + intvlStart, intvlEnd = next(spanRange) + for _ in range(interval - 1): + _, intvlEnd = next(spanRange) + yield intvlStart, intvlEnd + except StopIteration: + return + + # representations + + def __repr__(self): + return "<{} [{}]>".format(self.__class__.__name__, self.__str__()) + + def __str__(self): + return self._datetime.isoformat() + + def __format__(self, formatstr): + + if len(formatstr) > 0: + return self.format(formatstr) + + return str(self) + + def __hash__(self): + return self._datetime.__hash__() + + # attributes and properties + + def __getattr__(self, name): + + if name == "week": + return self.isocalendar()[1] + + if name == "quarter": + return int((self.month - 1) / self._MONTHS_PER_QUARTER) + 1 + + if not name.startswith("_"): + value = getattr(self._datetime, name, None) + + if value is not None: + return value + + return object.__getattribute__(self, name) + + @property + def tzinfo(self): + """Gets the ``tzinfo`` of the :class:`Arrow ` object. + + Usage:: + + >>> arw=arrow.utcnow() + >>> arw.tzinfo + tzutc() + + """ + + return self._datetime.tzinfo + + @tzinfo.setter + def tzinfo(self, tzinfo): + """ Sets the ``tzinfo`` of the :class:`Arrow ` object. """ + + self._datetime = self._datetime.replace(tzinfo=tzinfo) + + @property + def datetime(self): + """Returns a datetime representation of the :class:`Arrow ` object. + + Usage:: + + >>> arw=arrow.utcnow() + >>> arw.datetime + datetime.datetime(2019, 1, 24, 16, 35, 27, 276649, tzinfo=tzutc()) + + """ + + return self._datetime + + @property + def naive(self): + """Returns a naive datetime representation of the :class:`Arrow ` + object. + + Usage:: + + >>> nairobi = arrow.now('Africa/Nairobi') + >>> nairobi + + >>> nairobi.naive + datetime.datetime(2019, 1, 23, 19, 27, 12, 297999) + + """ + + return self._datetime.replace(tzinfo=None) + + @property + def timestamp(self): + """Returns a timestamp representation of the :class:`Arrow ` object, in + UTC time. + + Usage:: + + >>> arrow.utcnow().timestamp + 1548260567 + + """ + + warnings.warn( + "For compatibility with the datetime.timestamp() method this property will be replaced with a method in " + "the 1.0.0 release, please switch to the .int_timestamp property for identical behaviour as soon as " + "possible.", + DeprecationWarning, + ) + return calendar.timegm(self._datetime.utctimetuple()) + + @property + def int_timestamp(self): + """Returns a timestamp representation of the :class:`Arrow ` object, in + UTC time. + + Usage:: + + >>> arrow.utcnow().int_timestamp + 1548260567 + + """ + + return calendar.timegm(self._datetime.utctimetuple()) + + @property + def float_timestamp(self): + """Returns a floating-point representation of the :class:`Arrow ` + object, in UTC time. + + Usage:: + + >>> arrow.utcnow().float_timestamp + 1548260516.830896 + + """ + + # IDEA get rid of this in 1.0.0 and wrap datetime.timestamp() + # Or for compatibility retain this but make it call the timestamp method + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + return self.timestamp + float(self.microsecond) / 1000000 + + @property + def fold(self): + """ Returns the ``fold`` value of the :class:`Arrow ` object. """ + + # in python < 3.6 _datetime will be a _DatetimeWithFold if fold=1 and a datetime with no fold attribute + # otherwise, so we need to return zero to cover the latter case + return getattr(self._datetime, "fold", 0) + + @property + def ambiguous(self): + """ Returns a boolean indicating whether the :class:`Arrow ` object is ambiguous.""" + + return dateutil_tz.datetime_ambiguous(self._datetime) + + @property + def imaginary(self): + """Indicates whether the :class: `Arrow ` object exists in the current timezone.""" + + return not dateutil_tz.datetime_exists(self._datetime) + + # mutation and duplication. + + def clone(self): + """Returns a new :class:`Arrow ` object, cloned from the current one. + + Usage: + + >>> arw = arrow.utcnow() + >>> cloned = arw.clone() + + """ + + return self.fromdatetime(self._datetime) + + def replace(self, **kwargs): + """Returns a new :class:`Arrow ` object with attributes updated + according to inputs. + + Use property names to set their value absolutely:: + + >>> import arrow + >>> arw = arrow.utcnow() + >>> arw + + >>> arw.replace(year=2014, month=6) + + + You can also replace the timezone without conversion, using a + :ref:`timezone expression `:: + + >>> arw.replace(tzinfo=tz.tzlocal()) + + + """ + + absolute_kwargs = {} + + for key, value in kwargs.items(): + + if key in self._ATTRS: + absolute_kwargs[key] = value + elif key in ["week", "quarter"]: + raise AttributeError("setting absolute {} is not supported".format(key)) + elif key not in ["tzinfo", "fold"]: + raise AttributeError('unknown attribute: "{}"'.format(key)) + + current = self._datetime.replace(**absolute_kwargs) + + tzinfo = kwargs.get("tzinfo") + + if tzinfo is not None: + tzinfo = self._get_tzinfo(tzinfo) + current = current.replace(tzinfo=tzinfo) + + fold = kwargs.get("fold") + + # TODO revisit this once we drop support for 2.7/3.5 + if fold is not None: + current = dateutil_tz.enfold(current, fold=fold) + + return self.fromdatetime(current) + + def shift(self, **kwargs): + """Returns a new :class:`Arrow ` object with attributes updated + according to inputs. + + Use pluralized property names to relatively shift their current value: + + >>> import arrow + >>> arw = arrow.utcnow() + >>> arw + + >>> arw.shift(years=1, months=-1) + + + Day-of-the-week relative shifting can use either Python's weekday numbers + (Monday = 0, Tuesday = 1 .. Sunday = 6) or using dateutil.relativedelta's + day instances (MO, TU .. SU). When using weekday numbers, the returned + date will always be greater than or equal to the starting date. + + Using the above code (which is a Saturday) and asking it to shift to Saturday: + + >>> arw.shift(weekday=5) + + + While asking for a Monday: + + >>> arw.shift(weekday=0) + + + """ + + relative_kwargs = {} + additional_attrs = ["weeks", "quarters", "weekday"] + + for key, value in kwargs.items(): + + if key in self._ATTRS_PLURAL or key in additional_attrs: + relative_kwargs[key] = value + else: + raise AttributeError( + "Invalid shift time frame. Please select one of the following: {}.".format( + ", ".join(self._ATTRS_PLURAL + additional_attrs) + ) + ) + + # core datetime does not support quarters, translate to months. + relative_kwargs.setdefault("months", 0) + relative_kwargs["months"] += ( + relative_kwargs.pop("quarters", 0) * self._MONTHS_PER_QUARTER + ) + + current = self._datetime + relativedelta(**relative_kwargs) + + if not dateutil_tz.datetime_exists(current): + current = dateutil_tz.resolve_imaginary(current) + + return self.fromdatetime(current) + + def to(self, tz): + """Returns a new :class:`Arrow ` object, converted + to the target timezone. + + :param tz: A :ref:`timezone expression `. + + Usage:: + + >>> utc = arrow.utcnow() + >>> utc + + + >>> utc.to('US/Pacific') + + + >>> utc.to(tz.tzlocal()) + + + >>> utc.to('-07:00') + + + >>> utc.to('local') + + + >>> utc.to('local').to('utc') + + + """ + + if not isinstance(tz, dt_tzinfo): + tz = parser.TzinfoParser.parse(tz) + + dt = self._datetime.astimezone(tz) + + return self.__class__( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + dt.second, + dt.microsecond, + dt.tzinfo, + fold=getattr(dt, "fold", 0), + ) + + # string output and formatting + + def format(self, fmt="YYYY-MM-DD HH:mm:ssZZ", locale="en_us"): + """Returns a string representation of the :class:`Arrow ` object, + formatted according to a format string. + + :param fmt: the format string. + + Usage:: + + >>> arrow.utcnow().format('YYYY-MM-DD HH:mm:ss ZZ') + '2013-05-09 03:56:47 -00:00' + + >>> arrow.utcnow().format('X') + '1368071882' + + >>> arrow.utcnow().format('MMMM DD, YYYY') + 'May 09, 2013' + + >>> arrow.utcnow().format() + '2013-05-09 03:56:47 -00:00' + + """ + + return formatter.DateTimeFormatter(locale).format(self._datetime, fmt) + + def humanize( + self, other=None, locale="en_us", only_distance=False, granularity="auto" + ): + """Returns a localized, humanized representation of a relative difference in time. + + :param other: (optional) an :class:`Arrow ` or ``datetime`` object. + Defaults to now in the current :class:`Arrow ` object's timezone. + :param locale: (optional) a ``str`` specifying a locale. Defaults to 'en_us'. + :param only_distance: (optional) returns only time difference eg: "11 seconds" without "in" or "ago" part. + :param granularity: (optional) defines the precision of the output. Set it to strings 'second', 'minute', + 'hour', 'day', 'week', 'month' or 'year' or a list of any combination of these strings + + Usage:: + + >>> earlier = arrow.utcnow().shift(hours=-2) + >>> earlier.humanize() + '2 hours ago' + + >>> later = earlier.shift(hours=4) + >>> later.humanize(earlier) + 'in 4 hours' + + """ + + locale_name = locale + locale = locales.get_locale(locale) + + if other is None: + utc = datetime.utcnow().replace(tzinfo=dateutil_tz.tzutc()) + dt = utc.astimezone(self._datetime.tzinfo) + + elif isinstance(other, Arrow): + dt = other._datetime + + elif isinstance(other, datetime): + if other.tzinfo is None: + dt = other.replace(tzinfo=self._datetime.tzinfo) + else: + dt = other.astimezone(self._datetime.tzinfo) + + else: + raise TypeError( + "Invalid 'other' argument of type '{}'. " + "Argument must be of type None, Arrow, or datetime.".format( + type(other).__name__ + ) + ) + + if isinstance(granularity, list) and len(granularity) == 1: + granularity = granularity[0] + + delta = int(round(util.total_seconds(self._datetime - dt))) + sign = -1 if delta < 0 else 1 + diff = abs(delta) + delta = diff + + try: + if granularity == "auto": + if diff < 10: + return locale.describe("now", only_distance=only_distance) + + if diff < 45: + seconds = sign * delta + return locale.describe( + "seconds", seconds, only_distance=only_distance + ) + + elif diff < 90: + return locale.describe("minute", sign, only_distance=only_distance) + elif diff < 2700: + minutes = sign * int(max(delta / 60, 2)) + return locale.describe( + "minutes", minutes, only_distance=only_distance + ) + + elif diff < 5400: + return locale.describe("hour", sign, only_distance=only_distance) + elif diff < 79200: + hours = sign * int(max(delta / 3600, 2)) + return locale.describe("hours", hours, only_distance=only_distance) + + # anything less than 48 hours should be 1 day + elif diff < 172800: + return locale.describe("day", sign, only_distance=only_distance) + elif diff < 554400: + days = sign * int(max(delta / 86400, 2)) + return locale.describe("days", days, only_distance=only_distance) + + elif diff < 907200: + return locale.describe("week", sign, only_distance=only_distance) + elif diff < 2419200: + weeks = sign * int(max(delta / 604800, 2)) + return locale.describe("weeks", weeks, only_distance=only_distance) + + elif diff < 3888000: + return locale.describe("month", sign, only_distance=only_distance) + elif diff < 29808000: + self_months = self._datetime.year * 12 + self._datetime.month + other_months = dt.year * 12 + dt.month + + months = sign * int(max(abs(other_months - self_months), 2)) + + return locale.describe( + "months", months, only_distance=only_distance + ) + + elif diff < 47260800: + return locale.describe("year", sign, only_distance=only_distance) + else: + years = sign * int(max(delta / 31536000, 2)) + return locale.describe("years", years, only_distance=only_distance) + + elif util.isstr(granularity): + if granularity == "second": + delta = sign * delta + if abs(delta) < 2: + return locale.describe("now", only_distance=only_distance) + elif granularity == "minute": + delta = sign * delta / self._SECS_PER_MINUTE + elif granularity == "hour": + delta = sign * delta / self._SECS_PER_HOUR + elif granularity == "day": + delta = sign * delta / self._SECS_PER_DAY + elif granularity == "week": + delta = sign * delta / self._SECS_PER_WEEK + elif granularity == "month": + delta = sign * delta / self._SECS_PER_MONTH + elif granularity == "year": + delta = sign * delta / self._SECS_PER_YEAR + else: + raise AttributeError( + "Invalid level of granularity. Please select between 'second', 'minute', 'hour', 'day', 'week', 'month' or 'year'" + ) + + if trunc(abs(delta)) != 1: + granularity += "s" + return locale.describe(granularity, delta, only_distance=only_distance) + + else: + timeframes = [] + if "year" in granularity: + years = sign * delta / self._SECS_PER_YEAR + delta %= self._SECS_PER_YEAR + timeframes.append(["year", years]) + + if "month" in granularity: + months = sign * delta / self._SECS_PER_MONTH + delta %= self._SECS_PER_MONTH + timeframes.append(["month", months]) + + if "week" in granularity: + weeks = sign * delta / self._SECS_PER_WEEK + delta %= self._SECS_PER_WEEK + timeframes.append(["week", weeks]) + + if "day" in granularity: + days = sign * delta / self._SECS_PER_DAY + delta %= self._SECS_PER_DAY + timeframes.append(["day", days]) + + if "hour" in granularity: + hours = sign * delta / self._SECS_PER_HOUR + delta %= self._SECS_PER_HOUR + timeframes.append(["hour", hours]) + + if "minute" in granularity: + minutes = sign * delta / self._SECS_PER_MINUTE + delta %= self._SECS_PER_MINUTE + timeframes.append(["minute", minutes]) + + if "second" in granularity: + seconds = sign * delta + timeframes.append(["second", seconds]) + + if len(timeframes) < len(granularity): + raise AttributeError( + "Invalid level of granularity. " + "Please select between 'second', 'minute', 'hour', 'day', 'week', 'month' or 'year'." + ) + + for tf in timeframes: + # Make granularity plural if the delta is not equal to 1 + if trunc(abs(tf[1])) != 1: + tf[0] += "s" + return locale.describe_multi(timeframes, only_distance=only_distance) + + except KeyError as e: + raise ValueError( + "Humanization of the {} granularity is not currently translated in the '{}' locale. " + "Please consider making a contribution to this locale.".format( + e, locale_name + ) + ) + + # query functions + + def is_between(self, start, end, bounds="()"): + """Returns a boolean denoting whether the specified date and time is between + the start and end dates and times. + + :param start: an :class:`Arrow ` object. + :param end: an :class:`Arrow ` object. + :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies + whether to include or exclude the start and end values in the range. '(' excludes + the start, '[' includes the start, ')' excludes the end, and ']' includes the end. + If the bounds are not specified, the default bound '()' is used. + + Usage:: + + >>> start = arrow.get(datetime(2013, 5, 5, 12, 30, 10)) + >>> end = arrow.get(datetime(2013, 5, 5, 12, 30, 36)) + >>> arrow.get(datetime(2013, 5, 5, 12, 30, 27)).is_between(start, end) + True + + >>> start = arrow.get(datetime(2013, 5, 5)) + >>> end = arrow.get(datetime(2013, 5, 8)) + >>> arrow.get(datetime(2013, 5, 8)).is_between(start, end, '[]') + True + + >>> start = arrow.get(datetime(2013, 5, 5)) + >>> end = arrow.get(datetime(2013, 5, 8)) + >>> arrow.get(datetime(2013, 5, 8)).is_between(start, end, '[)') + False + + """ + + util.validate_bounds(bounds) + + if not isinstance(start, Arrow): + raise TypeError( + "Can't parse start date argument type of '{}'".format(type(start)) + ) + + if not isinstance(end, Arrow): + raise TypeError( + "Can't parse end date argument type of '{}'".format(type(end)) + ) + + include_start = bounds[0] == "[" + include_end = bounds[1] == "]" + + target_timestamp = self.float_timestamp + start_timestamp = start.float_timestamp + end_timestamp = end.float_timestamp + + if include_start and include_end: + return ( + target_timestamp >= start_timestamp + and target_timestamp <= end_timestamp + ) + elif include_start and not include_end: + return ( + target_timestamp >= start_timestamp and target_timestamp < end_timestamp + ) + elif not include_start and include_end: + return ( + target_timestamp > start_timestamp and target_timestamp <= end_timestamp + ) + else: + return ( + target_timestamp > start_timestamp and target_timestamp < end_timestamp + ) + + # datetime methods + + def date(self): + """Returns a ``date`` object with the same year, month and day. + + Usage:: + + >>> arrow.utcnow().date() + datetime.date(2019, 1, 23) + + """ + + return self._datetime.date() + + def time(self): + """Returns a ``time`` object with the same hour, minute, second, microsecond. + + Usage:: + + >>> arrow.utcnow().time() + datetime.time(12, 15, 34, 68352) + + """ + + return self._datetime.time() + + def timetz(self): + """Returns a ``time`` object with the same hour, minute, second, microsecond and + tzinfo. + + Usage:: + + >>> arrow.utcnow().timetz() + datetime.time(12, 5, 18, 298893, tzinfo=tzutc()) + + """ + + return self._datetime.timetz() + + def astimezone(self, tz): + """Returns a ``datetime`` object, converted to the specified timezone. + + :param tz: a ``tzinfo`` object. + + Usage:: + + >>> pacific=arrow.now('US/Pacific') + >>> nyc=arrow.now('America/New_York').tzinfo + >>> pacific.astimezone(nyc) + datetime.datetime(2019, 1, 20, 10, 24, 22, 328172, tzinfo=tzfile('/usr/share/zoneinfo/America/New_York')) + + """ + + return self._datetime.astimezone(tz) + + def utcoffset(self): + """Returns a ``timedelta`` object representing the whole number of minutes difference from + UTC time. + + Usage:: + + >>> arrow.now('US/Pacific').utcoffset() + datetime.timedelta(-1, 57600) + + """ + + return self._datetime.utcoffset() + + def dst(self): + """Returns the daylight savings time adjustment. + + Usage:: + + >>> arrow.utcnow().dst() + datetime.timedelta(0) + + """ + + return self._datetime.dst() + + def timetuple(self): + """Returns a ``time.struct_time``, in the current timezone. + + Usage:: + + >>> arrow.utcnow().timetuple() + time.struct_time(tm_year=2019, tm_mon=1, tm_mday=20, tm_hour=15, tm_min=17, tm_sec=8, tm_wday=6, tm_yday=20, tm_isdst=0) + + """ + + return self._datetime.timetuple() + + def utctimetuple(self): + """Returns a ``time.struct_time``, in UTC time. + + Usage:: + + >>> arrow.utcnow().utctimetuple() + time.struct_time(tm_year=2019, tm_mon=1, tm_mday=19, tm_hour=21, tm_min=41, tm_sec=7, tm_wday=5, tm_yday=19, tm_isdst=0) + + """ + + return self._datetime.utctimetuple() + + def toordinal(self): + """Returns the proleptic Gregorian ordinal of the date. + + Usage:: + + >>> arrow.utcnow().toordinal() + 737078 + + """ + + return self._datetime.toordinal() + + def weekday(self): + """Returns the day of the week as an integer (0-6). + + Usage:: + + >>> arrow.utcnow().weekday() + 5 + + """ + + return self._datetime.weekday() + + def isoweekday(self): + """Returns the ISO day of the week as an integer (1-7). + + Usage:: + + >>> arrow.utcnow().isoweekday() + 6 + + """ + + return self._datetime.isoweekday() + + def isocalendar(self): + """Returns a 3-tuple, (ISO year, ISO week number, ISO weekday). + + Usage:: + + >>> arrow.utcnow().isocalendar() + (2019, 3, 6) + + """ + + return self._datetime.isocalendar() + + def isoformat(self, sep="T"): + """Returns an ISO 8601 formatted representation of the date and time. + + Usage:: + + >>> arrow.utcnow().isoformat() + '2019-01-19T18:30:52.442118+00:00' + + """ + + return self._datetime.isoformat(sep) + + def ctime(self): + """Returns a ctime formatted representation of the date and time. + + Usage:: + + >>> arrow.utcnow().ctime() + 'Sat Jan 19 18:26:50 2019' + + """ + + return self._datetime.ctime() + + def strftime(self, format): + """Formats in the style of ``datetime.strftime``. + + :param format: the format string. + + Usage:: + + >>> arrow.utcnow().strftime('%d-%m-%Y %H:%M:%S') + '23-01-2019 12:28:17' + + """ + + return self._datetime.strftime(format) + + def for_json(self): + """Serializes for the ``for_json`` protocol of simplejson. + + Usage:: + + >>> arrow.utcnow().for_json() + '2019-01-19T18:25:36.760079+00:00' + + """ + + return self.isoformat() + + # math + + def __add__(self, other): + + if isinstance(other, (timedelta, relativedelta)): + return self.fromdatetime(self._datetime + other, self._datetime.tzinfo) + + return NotImplemented + + def __radd__(self, other): + return self.__add__(other) + + def __sub__(self, other): + + if isinstance(other, (timedelta, relativedelta)): + return self.fromdatetime(self._datetime - other, self._datetime.tzinfo) + + elif isinstance(other, datetime): + return self._datetime - other + + elif isinstance(other, Arrow): + return self._datetime - other._datetime + + return NotImplemented + + def __rsub__(self, other): + + if isinstance(other, datetime): + return other - self._datetime + + return NotImplemented + + # comparisons + + def __eq__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return False + + return self._datetime == self._get_datetime(other) + + def __ne__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return True + + return not self.__eq__(other) + + def __gt__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return NotImplemented + + return self._datetime > self._get_datetime(other) + + def __ge__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return NotImplemented + + return self._datetime >= self._get_datetime(other) + + def __lt__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return NotImplemented + + return self._datetime < self._get_datetime(other) + + def __le__(self, other): + + if not isinstance(other, (Arrow, datetime)): + return NotImplemented + + return self._datetime <= self._get_datetime(other) + + def __cmp__(self, other): + if sys.version_info[0] < 3: # pragma: no cover + if not isinstance(other, (Arrow, datetime)): + raise TypeError( + "can't compare '{}' to '{}'".format(type(self), type(other)) + ) + + # internal methods + + @staticmethod + def _get_tzinfo(tz_expr): + + if tz_expr is None: + return dateutil_tz.tzutc() + if isinstance(tz_expr, dt_tzinfo): + return tz_expr + else: + try: + return parser.TzinfoParser.parse(tz_expr) + except parser.ParserError: + raise ValueError("'{}' not recognized as a timezone".format(tz_expr)) + + @classmethod + def _get_datetime(cls, expr): + """Get datetime object for a specified expression.""" + if isinstance(expr, Arrow): + return expr.datetime + elif isinstance(expr, datetime): + return expr + elif util.is_timestamp(expr): + timestamp = float(expr) + return cls.utcfromtimestamp(timestamp).datetime + else: + raise ValueError( + "'{}' not recognized as a datetime or timestamp.".format(expr) + ) + + @classmethod + def _get_frames(cls, name): + + if name in cls._ATTRS: + return name, "{}s".format(name), 1 + elif name[-1] == "s" and name[:-1] in cls._ATTRS: + return name[:-1], name, 1 + elif name in ["week", "weeks"]: + return "week", "weeks", 1 + elif name in ["quarter", "quarters"]: + return "quarter", "months", 3 + + supported = ", ".join( + [ + "year(s)", + "month(s)", + "day(s)", + "hour(s)", + "minute(s)", + "second(s)", + "microsecond(s)", + "week(s)", + "quarter(s)", + ] + ) + raise AttributeError( + "range/span over frame {} not supported. Supported frames: {}".format( + name, supported + ) + ) + + @classmethod + def _get_iteration_params(cls, end, limit): + + if end is None: + + if limit is None: + raise ValueError("one of 'end' or 'limit' is required") + + return cls.max, limit + + else: + if limit is None: + return end, sys.maxsize + return end, limit + + @staticmethod + def _is_last_day_of_month(date): + return date.day == calendar.monthrange(date.year, date.month)[1] + + +Arrow.min = Arrow.fromdatetime(datetime.min) +Arrow.max = Arrow.fromdatetime(datetime.max) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/constants.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/constants.py new file mode 100644 index 0000000000..81e37b26de --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/constants.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- + +# Output of time.mktime(datetime.max.timetuple()) on macOS +# This value must be hardcoded for compatibility with Windows +# Platform-independent max timestamps are hard to form +# https://stackoverflow.com/q/46133223 +MAX_TIMESTAMP = 253402318799.0 +MAX_TIMESTAMP_MS = MAX_TIMESTAMP * 1000 +MAX_TIMESTAMP_US = MAX_TIMESTAMP * 1000000 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/factory.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/factory.py new file mode 100644 index 0000000000..05933e8151 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/factory.py @@ -0,0 +1,301 @@ +# -*- coding: utf-8 -*- +""" +Implements the :class:`ArrowFactory ` class, +providing factory methods for common :class:`Arrow ` +construction scenarios. + +""" + +from __future__ import absolute_import + +import calendar +from datetime import date, datetime +from datetime import tzinfo as dt_tzinfo +from time import struct_time + +from dateutil import tz as dateutil_tz + +from arrow import parser +from arrow.arrow import Arrow +from arrow.util import is_timestamp, iso_to_gregorian, isstr + + +class ArrowFactory(object): + """A factory for generating :class:`Arrow ` objects. + + :param type: (optional) the :class:`Arrow `-based class to construct from. + Defaults to :class:`Arrow `. + + """ + + def __init__(self, type=Arrow): + self.type = type + + def get(self, *args, **kwargs): + """Returns an :class:`Arrow ` object based on flexible inputs. + + :param locale: (optional) a ``str`` specifying a locale for the parser. Defaults to 'en_us'. + :param tzinfo: (optional) a :ref:`timezone expression ` or tzinfo object. + Replaces the timezone unless using an input form that is explicitly UTC or specifies + the timezone in a positional argument. Defaults to UTC. + :param normalize_whitespace: (optional) a ``bool`` specifying whether or not to normalize + redundant whitespace (spaces, tabs, and newlines) in a datetime string before parsing. + Defaults to false. + + Usage:: + + >>> import arrow + + **No inputs** to get current UTC time:: + + >>> arrow.get() + + + **None** to also get current UTC time:: + + >>> arrow.get(None) + + + **One** :class:`Arrow ` object, to get a copy. + + >>> arw = arrow.utcnow() + >>> arrow.get(arw) + + + **One** ``float`` or ``int``, convertible to a floating-point timestamp, to get + that timestamp in UTC:: + + >>> arrow.get(1367992474.293378) + + + >>> arrow.get(1367992474) + + + **One** ISO 8601-formatted ``str``, to parse it:: + + >>> arrow.get('2013-09-29T01:26:43.830580') + + + **One** ISO 8601-formatted ``str``, in basic format, to parse it:: + + >>> arrow.get('20160413T133656.456289') + + + **One** ``tzinfo``, to get the current time **converted** to that timezone:: + + >>> arrow.get(tz.tzlocal()) + + + **One** naive ``datetime``, to get that datetime in UTC:: + + >>> arrow.get(datetime(2013, 5, 5)) + + + **One** aware ``datetime``, to get that datetime:: + + >>> arrow.get(datetime(2013, 5, 5, tzinfo=tz.tzlocal())) + + + **One** naive ``date``, to get that date in UTC:: + + >>> arrow.get(date(2013, 5, 5)) + + + **One** time.struct time:: + + >>> arrow.get(gmtime(0)) + + + **One** iso calendar ``tuple``, to get that week date in UTC:: + + >>> arrow.get((2013, 18, 7)) + + + **Two** arguments, a naive or aware ``datetime``, and a replacement + :ref:`timezone expression `:: + + >>> arrow.get(datetime(2013, 5, 5), 'US/Pacific') + + + **Two** arguments, a naive ``date``, and a replacement + :ref:`timezone expression `:: + + >>> arrow.get(date(2013, 5, 5), 'US/Pacific') + + + **Two** arguments, both ``str``, to parse the first according to the format of the second:: + + >>> arrow.get('2013-05-05 12:30:45 America/Chicago', 'YYYY-MM-DD HH:mm:ss ZZZ') + + + **Two** arguments, first a ``str`` to parse and second a ``list`` of formats to try:: + + >>> arrow.get('2013-05-05 12:30:45', ['MM/DD/YYYY', 'YYYY-MM-DD HH:mm:ss']) + + + **Three or more** arguments, as for the constructor of a ``datetime``:: + + >>> arrow.get(2013, 5, 5, 12, 30, 45) + + + """ + + arg_count = len(args) + locale = kwargs.pop("locale", "en_us") + tz = kwargs.get("tzinfo", None) + normalize_whitespace = kwargs.pop("normalize_whitespace", False) + + # if kwargs given, send to constructor unless only tzinfo provided + if len(kwargs) > 1: + arg_count = 3 + + # tzinfo kwarg is not provided + if len(kwargs) == 1 and tz is None: + arg_count = 3 + + # () -> now, @ utc. + if arg_count == 0: + if isstr(tz): + tz = parser.TzinfoParser.parse(tz) + return self.type.now(tz) + + if isinstance(tz, dt_tzinfo): + return self.type.now(tz) + + return self.type.utcnow() + + if arg_count == 1: + arg = args[0] + + # (None) -> now, @ utc. + if arg is None: + return self.type.utcnow() + + # try (int, float) -> from timestamp with tz + elif not isstr(arg) and is_timestamp(arg): + if tz is None: + # set to UTC by default + tz = dateutil_tz.tzutc() + return self.type.fromtimestamp(arg, tzinfo=tz) + + # (Arrow) -> from the object's datetime. + elif isinstance(arg, Arrow): + return self.type.fromdatetime(arg.datetime) + + # (datetime) -> from datetime. + elif isinstance(arg, datetime): + return self.type.fromdatetime(arg) + + # (date) -> from date. + elif isinstance(arg, date): + return self.type.fromdate(arg) + + # (tzinfo) -> now, @ tzinfo. + elif isinstance(arg, dt_tzinfo): + return self.type.now(arg) + + # (str) -> parse. + elif isstr(arg): + dt = parser.DateTimeParser(locale).parse_iso(arg, normalize_whitespace) + return self.type.fromdatetime(dt, tz) + + # (struct_time) -> from struct_time + elif isinstance(arg, struct_time): + return self.type.utcfromtimestamp(calendar.timegm(arg)) + + # (iso calendar) -> convert then from date + elif isinstance(arg, tuple) and len(arg) == 3: + dt = iso_to_gregorian(*arg) + return self.type.fromdate(dt) + + else: + raise TypeError( + "Can't parse single argument of type '{}'".format(type(arg)) + ) + + elif arg_count == 2: + + arg_1, arg_2 = args[0], args[1] + + if isinstance(arg_1, datetime): + + # (datetime, tzinfo/str) -> fromdatetime replace tzinfo. + if isinstance(arg_2, dt_tzinfo) or isstr(arg_2): + return self.type.fromdatetime(arg_1, arg_2) + else: + raise TypeError( + "Can't parse two arguments of types 'datetime', '{}'".format( + type(arg_2) + ) + ) + + elif isinstance(arg_1, date): + + # (date, tzinfo/str) -> fromdate replace tzinfo. + if isinstance(arg_2, dt_tzinfo) or isstr(arg_2): + return self.type.fromdate(arg_1, tzinfo=arg_2) + else: + raise TypeError( + "Can't parse two arguments of types 'date', '{}'".format( + type(arg_2) + ) + ) + + # (str, format) -> parse. + elif isstr(arg_1) and (isstr(arg_2) or isinstance(arg_2, list)): + dt = parser.DateTimeParser(locale).parse( + args[0], args[1], normalize_whitespace + ) + return self.type.fromdatetime(dt, tzinfo=tz) + + else: + raise TypeError( + "Can't parse two arguments of types '{}' and '{}'".format( + type(arg_1), type(arg_2) + ) + ) + + # 3+ args -> datetime-like via constructor. + else: + return self.type(*args, **kwargs) + + def utcnow(self): + """Returns an :class:`Arrow ` object, representing "now" in UTC time. + + Usage:: + + >>> import arrow + >>> arrow.utcnow() + + """ + + return self.type.utcnow() + + def now(self, tz=None): + """Returns an :class:`Arrow ` object, representing "now" in the given + timezone. + + :param tz: (optional) A :ref:`timezone expression `. Defaults to local time. + + Usage:: + + >>> import arrow + >>> arrow.now() + + + >>> arrow.now('US/Pacific') + + + >>> arrow.now('+02:00') + + + >>> arrow.now('local') + + """ + + if tz is None: + tz = dateutil_tz.tzlocal() + elif not isinstance(tz, dt_tzinfo): + tz = parser.TzinfoParser.parse(tz) + + return self.type.now(tz) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/formatter.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/formatter.py new file mode 100644 index 0000000000..9f9d7a44da --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/formatter.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, division + +import calendar +import re + +from dateutil import tz as dateutil_tz + +from arrow import locales, util + +FORMAT_ATOM = "YYYY-MM-DD HH:mm:ssZZ" +FORMAT_COOKIE = "dddd, DD-MMM-YYYY HH:mm:ss ZZZ" +FORMAT_RFC822 = "ddd, DD MMM YY HH:mm:ss Z" +FORMAT_RFC850 = "dddd, DD-MMM-YY HH:mm:ss ZZZ" +FORMAT_RFC1036 = "ddd, DD MMM YY HH:mm:ss Z" +FORMAT_RFC1123 = "ddd, DD MMM YYYY HH:mm:ss Z" +FORMAT_RFC2822 = "ddd, DD MMM YYYY HH:mm:ss Z" +FORMAT_RFC3339 = "YYYY-MM-DD HH:mm:ssZZ" +FORMAT_RSS = "ddd, DD MMM YYYY HH:mm:ss Z" +FORMAT_W3C = "YYYY-MM-DD HH:mm:ssZZ" + + +class DateTimeFormatter(object): + + # This pattern matches characters enclosed in square brackets are matched as + # an atomic group. For more info on atomic groups and how to they are + # emulated in Python's re library, see https://stackoverflow.com/a/13577411/2701578 + + _FORMAT_RE = re.compile( + r"(\[(?:(?=(?P[^]]))(?P=literal))*\]|YYY?Y?|MM?M?M?|Do|DD?D?D?|d?dd?d?|HH?|hh?|mm?|ss?|SS?S?S?S?S?|ZZ?Z?|a|A|X|x|W)" + ) + + def __init__(self, locale="en_us"): + + self.locale = locales.get_locale(locale) + + def format(cls, dt, fmt): + + return cls._FORMAT_RE.sub(lambda m: cls._format_token(dt, m.group(0)), fmt) + + def _format_token(self, dt, token): + + if token and token.startswith("[") and token.endswith("]"): + return token[1:-1] + + if token == "YYYY": + return self.locale.year_full(dt.year) + if token == "YY": + return self.locale.year_abbreviation(dt.year) + + if token == "MMMM": + return self.locale.month_name(dt.month) + if token == "MMM": + return self.locale.month_abbreviation(dt.month) + if token == "MM": + return "{:02d}".format(dt.month) + if token == "M": + return str(dt.month) + + if token == "DDDD": + return "{:03d}".format(dt.timetuple().tm_yday) + if token == "DDD": + return str(dt.timetuple().tm_yday) + if token == "DD": + return "{:02d}".format(dt.day) + if token == "D": + return str(dt.day) + + if token == "Do": + return self.locale.ordinal_number(dt.day) + + if token == "dddd": + return self.locale.day_name(dt.isoweekday()) + if token == "ddd": + return self.locale.day_abbreviation(dt.isoweekday()) + if token == "d": + return str(dt.isoweekday()) + + if token == "HH": + return "{:02d}".format(dt.hour) + if token == "H": + return str(dt.hour) + if token == "hh": + return "{:02d}".format(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12)) + if token == "h": + return str(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12)) + + if token == "mm": + return "{:02d}".format(dt.minute) + if token == "m": + return str(dt.minute) + + if token == "ss": + return "{:02d}".format(dt.second) + if token == "s": + return str(dt.second) + + if token == "SSSSSS": + return str("{:06d}".format(int(dt.microsecond))) + if token == "SSSSS": + return str("{:05d}".format(int(dt.microsecond / 10))) + if token == "SSSS": + return str("{:04d}".format(int(dt.microsecond / 100))) + if token == "SSS": + return str("{:03d}".format(int(dt.microsecond / 1000))) + if token == "SS": + return str("{:02d}".format(int(dt.microsecond / 10000))) + if token == "S": + return str(int(dt.microsecond / 100000)) + + if token == "X": + # TODO: replace with a call to dt.timestamp() when we drop Python 2.7 + return str(calendar.timegm(dt.utctimetuple())) + + if token == "x": + # TODO: replace with a call to dt.timestamp() when we drop Python 2.7 + ts = calendar.timegm(dt.utctimetuple()) + (dt.microsecond / 1000000) + return str(int(ts * 1000000)) + + if token == "ZZZ": + return dt.tzname() + + if token in ["ZZ", "Z"]: + separator = ":" if token == "ZZ" else "" + tz = dateutil_tz.tzutc() if dt.tzinfo is None else dt.tzinfo + total_minutes = int(util.total_seconds(tz.utcoffset(dt)) / 60) + + sign = "+" if total_minutes >= 0 else "-" + total_minutes = abs(total_minutes) + hour, minute = divmod(total_minutes, 60) + + return "{}{:02d}{}{:02d}".format(sign, hour, separator, minute) + + if token in ("a", "A"): + return self.locale.meridian(dt.hour, token) + + if token == "W": + year, week, day = dt.isocalendar() + return "{}-W{:02d}-{}".format(year, week, day) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/locales.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/locales.py new file mode 100644 index 0000000000..6833da5a78 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/locales.py @@ -0,0 +1,4267 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, unicode_literals + +import inspect +import sys +from math import trunc + + +def get_locale(name): + """Returns an appropriate :class:`Locale ` + corresponding to an inpute locale name. + + :param name: the name of the locale. + + """ + + locale_cls = _locales.get(name.lower()) + + if locale_cls is None: + raise ValueError("Unsupported locale '{}'".format(name)) + + return locale_cls() + + +def get_locale_by_class_name(name): + """Returns an appropriate :class:`Locale ` + corresponding to an locale class name. + + :param name: the name of the locale class. + + """ + locale_cls = globals().get(name) + + if locale_cls is None: + raise ValueError("Unsupported locale '{}'".format(name)) + + return locale_cls() + + +# base locale type. + + +class Locale(object): + """ Represents locale-specific data and functionality. """ + + names = [] + + timeframes = { + "now": "", + "second": "", + "seconds": "", + "minute": "", + "minutes": "", + "hour": "", + "hours": "", + "day": "", + "days": "", + "week": "", + "weeks": "", + "month": "", + "months": "", + "year": "", + "years": "", + } + + meridians = {"am": "", "pm": "", "AM": "", "PM": ""} + + past = None + future = None + and_word = None + + month_names = [] + month_abbreviations = [] + + day_names = [] + day_abbreviations = [] + + ordinal_day_re = r"(\d+)" + + def __init__(self): + + self._month_name_to_ordinal = None + + def describe(self, timeframe, delta=0, only_distance=False): + """Describes a delta within a timeframe in plain language. + + :param timeframe: a string representing a timeframe. + :param delta: a quantity representing a delta in a timeframe. + :param only_distance: return only distance eg: "11 seconds" without "in" or "ago" keywords + """ + + humanized = self._format_timeframe(timeframe, delta) + if not only_distance: + humanized = self._format_relative(humanized, timeframe, delta) + + return humanized + + def describe_multi(self, timeframes, only_distance=False): + """Describes a delta within multiple timeframes in plain language. + + :param timeframes: a list of string, quantity pairs each representing a timeframe and delta. + :param only_distance: return only distance eg: "2 hours and 11 seconds" without "in" or "ago" keywords + """ + + humanized = "" + for index, (timeframe, delta) in enumerate(timeframes): + humanized += self._format_timeframe(timeframe, delta) + if index == len(timeframes) - 2 and self.and_word: + humanized += " " + self.and_word + " " + elif index < len(timeframes) - 1: + humanized += " " + + if not only_distance: + humanized = self._format_relative(humanized, timeframe, delta) + + return humanized + + def day_name(self, day): + """Returns the day name for a specified day of the week. + + :param day: the ``int`` day of the week (1-7). + + """ + + return self.day_names[day] + + def day_abbreviation(self, day): + """Returns the day abbreviation for a specified day of the week. + + :param day: the ``int`` day of the week (1-7). + + """ + + return self.day_abbreviations[day] + + def month_name(self, month): + """Returns the month name for a specified month of the year. + + :param month: the ``int`` month of the year (1-12). + + """ + + return self.month_names[month] + + def month_abbreviation(self, month): + """Returns the month abbreviation for a specified month of the year. + + :param month: the ``int`` month of the year (1-12). + + """ + + return self.month_abbreviations[month] + + def month_number(self, name): + """Returns the month number for a month specified by name or abbreviation. + + :param name: the month name or abbreviation. + + """ + + if self._month_name_to_ordinal is None: + self._month_name_to_ordinal = self._name_to_ordinal(self.month_names) + self._month_name_to_ordinal.update( + self._name_to_ordinal(self.month_abbreviations) + ) + + return self._month_name_to_ordinal.get(name) + + def year_full(self, year): + """Returns the year for specific locale if available + + :param name: the ``int`` year (4-digit) + """ + return "{:04d}".format(year) + + def year_abbreviation(self, year): + """Returns the year for specific locale if available + + :param name: the ``int`` year (4-digit) + """ + return "{:04d}".format(year)[2:] + + def meridian(self, hour, token): + """Returns the meridian indicator for a specified hour and format token. + + :param hour: the ``int`` hour of the day. + :param token: the format token. + """ + + if token == "a": + return self.meridians["am"] if hour < 12 else self.meridians["pm"] + if token == "A": + return self.meridians["AM"] if hour < 12 else self.meridians["PM"] + + def ordinal_number(self, n): + """Returns the ordinal format of a given integer + + :param n: an integer + """ + return self._ordinal_number(n) + + def _ordinal_number(self, n): + return "{}".format(n) + + def _name_to_ordinal(self, lst): + return dict(map(lambda i: (i[1].lower(), i[0] + 1), enumerate(lst[1:]))) + + def _format_timeframe(self, timeframe, delta): + return self.timeframes[timeframe].format(trunc(abs(delta))) + + def _format_relative(self, humanized, timeframe, delta): + + if timeframe == "now": + return humanized + + direction = self.past if delta < 0 else self.future + + return direction.format(humanized) + + +# base locale type implementations. + + +class EnglishLocale(Locale): + + names = [ + "en", + "en_us", + "en_gb", + "en_au", + "en_be", + "en_jp", + "en_za", + "en_ca", + "en_ph", + ] + + past = "{0} ago" + future = "in {0}" + and_word = "and" + + timeframes = { + "now": "just now", + "second": "a second", + "seconds": "{0} seconds", + "minute": "a minute", + "minutes": "{0} minutes", + "hour": "an hour", + "hours": "{0} hours", + "day": "a day", + "days": "{0} days", + "week": "a week", + "weeks": "{0} weeks", + "month": "a month", + "months": "{0} months", + "year": "a year", + "years": "{0} years", + } + + meridians = {"am": "am", "pm": "pm", "AM": "AM", "PM": "PM"} + + month_names = [ + "", + "January", + "February", + "March", + "April", + "May", + "June", + "July", + "August", + "September", + "October", + "November", + "December", + ] + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mar", + "Apr", + "May", + "Jun", + "Jul", + "Aug", + "Sep", + "Oct", + "Nov", + "Dec", + ] + + day_names = [ + "", + "Monday", + "Tuesday", + "Wednesday", + "Thursday", + "Friday", + "Saturday", + "Sunday", + ] + day_abbreviations = ["", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] + + ordinal_day_re = r"((?P[2-3]?1(?=st)|[2-3]?2(?=nd)|[2-3]?3(?=rd)|[1-3]?[04-9](?=th)|1[1-3](?=th))(st|nd|rd|th))" + + def _ordinal_number(self, n): + if n % 100 not in (11, 12, 13): + remainder = abs(n) % 10 + if remainder == 1: + return "{}st".format(n) + elif remainder == 2: + return "{}nd".format(n) + elif remainder == 3: + return "{}rd".format(n) + return "{}th".format(n) + + def describe(self, timeframe, delta=0, only_distance=False): + """Describes a delta within a timeframe in plain language. + + :param timeframe: a string representing a timeframe. + :param delta: a quantity representing a delta in a timeframe. + :param only_distance: return only distance eg: "11 seconds" without "in" or "ago" keywords + """ + + humanized = super(EnglishLocale, self).describe(timeframe, delta, only_distance) + if only_distance and timeframe == "now": + humanized = "instantly" + + return humanized + + +class ItalianLocale(Locale): + names = ["it", "it_it"] + past = "{0} fa" + future = "tra {0}" + and_word = "e" + + timeframes = { + "now": "adesso", + "second": "un secondo", + "seconds": "{0} qualche secondo", + "minute": "un minuto", + "minutes": "{0} minuti", + "hour": "un'ora", + "hours": "{0} ore", + "day": "un giorno", + "days": "{0} giorni", + "week": "una settimana,", + "weeks": "{0} settimane", + "month": "un mese", + "months": "{0} mesi", + "year": "un anno", + "years": "{0} anni", + } + + month_names = [ + "", + "gennaio", + "febbraio", + "marzo", + "aprile", + "maggio", + "giugno", + "luglio", + "agosto", + "settembre", + "ottobre", + "novembre", + "dicembre", + ] + month_abbreviations = [ + "", + "gen", + "feb", + "mar", + "apr", + "mag", + "giu", + "lug", + "ago", + "set", + "ott", + "nov", + "dic", + ] + + day_names = [ + "", + "lunedì", + "martedì", + "mercoledì", + "giovedì", + "venerdì", + "sabato", + "domenica", + ] + day_abbreviations = ["", "lun", "mar", "mer", "gio", "ven", "sab", "dom"] + + ordinal_day_re = r"((?P[1-3]?[0-9](?=[ºª]))[ºª])" + + def _ordinal_number(self, n): + return "{}º".format(n) + + +class SpanishLocale(Locale): + names = ["es", "es_es"] + past = "hace {0}" + future = "en {0}" + and_word = "y" + + timeframes = { + "now": "ahora", + "second": "un segundo", + "seconds": "{0} segundos", + "minute": "un minuto", + "minutes": "{0} minutos", + "hour": "una hora", + "hours": "{0} horas", + "day": "un día", + "days": "{0} días", + "week": "una semana", + "weeks": "{0} semanas", + "month": "un mes", + "months": "{0} meses", + "year": "un año", + "years": "{0} años", + } + + meridians = {"am": "am", "pm": "pm", "AM": "AM", "PM": "PM"} + + month_names = [ + "", + "enero", + "febrero", + "marzo", + "abril", + "mayo", + "junio", + "julio", + "agosto", + "septiembre", + "octubre", + "noviembre", + "diciembre", + ] + month_abbreviations = [ + "", + "ene", + "feb", + "mar", + "abr", + "may", + "jun", + "jul", + "ago", + "sep", + "oct", + "nov", + "dic", + ] + + day_names = [ + "", + "lunes", + "martes", + "miércoles", + "jueves", + "viernes", + "sábado", + "domingo", + ] + day_abbreviations = ["", "lun", "mar", "mie", "jue", "vie", "sab", "dom"] + + ordinal_day_re = r"((?P[1-3]?[0-9](?=[ºª]))[ºª])" + + def _ordinal_number(self, n): + return "{}º".format(n) + + +class FrenchBaseLocale(Locale): + + past = "il y a {0}" + future = "dans {0}" + and_word = "et" + + timeframes = { + "now": "maintenant", + "second": "une seconde", + "seconds": "{0} quelques secondes", + "minute": "une minute", + "minutes": "{0} minutes", + "hour": "une heure", + "hours": "{0} heures", + "day": "un jour", + "days": "{0} jours", + "week": "une semaine", + "weeks": "{0} semaines", + "month": "un mois", + "months": "{0} mois", + "year": "un an", + "years": "{0} ans", + } + + month_names = [ + "", + "janvier", + "février", + "mars", + "avril", + "mai", + "juin", + "juillet", + "août", + "septembre", + "octobre", + "novembre", + "décembre", + ] + + day_names = [ + "", + "lundi", + "mardi", + "mercredi", + "jeudi", + "vendredi", + "samedi", + "dimanche", + ] + day_abbreviations = ["", "lun", "mar", "mer", "jeu", "ven", "sam", "dim"] + + ordinal_day_re = ( + r"((?P\b1(?=er\b)|[1-3]?[02-9](?=e\b)|[1-3]1(?=e\b))(er|e)\b)" + ) + + def _ordinal_number(self, n): + if abs(n) == 1: + return "{}er".format(n) + return "{}e".format(n) + + +class FrenchLocale(FrenchBaseLocale, Locale): + + names = ["fr", "fr_fr"] + + month_abbreviations = [ + "", + "janv", + "févr", + "mars", + "avr", + "mai", + "juin", + "juil", + "août", + "sept", + "oct", + "nov", + "déc", + ] + + +class FrenchCanadianLocale(FrenchBaseLocale, Locale): + + names = ["fr_ca"] + + month_abbreviations = [ + "", + "janv", + "févr", + "mars", + "avr", + "mai", + "juin", + "juill", + "août", + "sept", + "oct", + "nov", + "déc", + ] + + +class GreekLocale(Locale): + + names = ["el", "el_gr"] + + past = "{0} πριν" + future = "σε {0}" + and_word = "και" + + timeframes = { + "now": "τώρα", + "second": "ένα δεύτερο", + "seconds": "{0} δευτερόλεπτα", + "minute": "ένα λεπτό", + "minutes": "{0} λεπτά", + "hour": "μία ώρα", + "hours": "{0} ώρες", + "day": "μία μέρα", + "days": "{0} μέρες", + "month": "ένα μήνα", + "months": "{0} μήνες", + "year": "ένα χρόνο", + "years": "{0} χρόνια", + } + + month_names = [ + "", + "Ιανουαρίου", + "Φεβρουαρίου", + "Μαρτίου", + "Απριλίου", + "Μαΐου", + "Ιουνίου", + "Ιουλίου", + "Αυγούστου", + "Σεπτεμβρίου", + "Οκτωβρίου", + "Νοεμβρίου", + "Δεκεμβρίου", + ] + month_abbreviations = [ + "", + "Ιαν", + "Φεβ", + "Μαρ", + "Απρ", + "Μαϊ", + "Ιον", + "Ιολ", + "Αυγ", + "Σεπ", + "Οκτ", + "Νοε", + "Δεκ", + ] + + day_names = [ + "", + "Δευτέρα", + "Τρίτη", + "Τετάρτη", + "Πέμπτη", + "Παρασκευή", + "Σάββατο", + "Κυριακή", + ] + day_abbreviations = ["", "Δευ", "Τρι", "Τετ", "Πεμ", "Παρ", "Σαβ", "Κυρ"] + + +class JapaneseLocale(Locale): + + names = ["ja", "ja_jp"] + + past = "{0}前" + future = "{0}後" + + timeframes = { + "now": "現在", + "second": "二番目の", + "seconds": "{0}数秒", + "minute": "1分", + "minutes": "{0}分", + "hour": "1時間", + "hours": "{0}時間", + "day": "1日", + "days": "{0}日", + "week": "1週間", + "weeks": "{0}週間", + "month": "1ヶ月", + "months": "{0}ヶ月", + "year": "1年", + "years": "{0}年", + } + + month_names = [ + "", + "1月", + "2月", + "3月", + "4月", + "5月", + "6月", + "7月", + "8月", + "9月", + "10月", + "11月", + "12月", + ] + month_abbreviations = [ + "", + " 1", + " 2", + " 3", + " 4", + " 5", + " 6", + " 7", + " 8", + " 9", + "10", + "11", + "12", + ] + + day_names = ["", "月曜日", "火曜日", "水曜日", "木曜日", "金曜日", "土曜日", "日曜日"] + day_abbreviations = ["", "月", "火", "水", "木", "金", "土", "日"] + + +class SwedishLocale(Locale): + + names = ["sv", "sv_se"] + + past = "för {0} sen" + future = "om {0}" + and_word = "och" + + timeframes = { + "now": "just nu", + "second": "en sekund", + "seconds": "{0} några sekunder", + "minute": "en minut", + "minutes": "{0} minuter", + "hour": "en timme", + "hours": "{0} timmar", + "day": "en dag", + "days": "{0} dagar", + "week": "en vecka", + "weeks": "{0} veckor", + "month": "en månad", + "months": "{0} månader", + "year": "ett år", + "years": "{0} år", + } + + month_names = [ + "", + "januari", + "februari", + "mars", + "april", + "maj", + "juni", + "juli", + "augusti", + "september", + "oktober", + "november", + "december", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "maj", + "jun", + "jul", + "aug", + "sep", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "måndag", + "tisdag", + "onsdag", + "torsdag", + "fredag", + "lördag", + "söndag", + ] + day_abbreviations = ["", "mån", "tis", "ons", "tor", "fre", "lör", "sön"] + + +class FinnishLocale(Locale): + + names = ["fi", "fi_fi"] + + # The finnish grammar is very complex, and its hard to convert + # 1-to-1 to something like English. + + past = "{0} sitten" + future = "{0} kuluttua" + + timeframes = { + "now": ["juuri nyt", "juuri nyt"], + "second": ["sekunti", "sekunti"], + "seconds": ["{0} muutama sekunti", "{0} muutaman sekunnin"], + "minute": ["minuutti", "minuutin"], + "minutes": ["{0} minuuttia", "{0} minuutin"], + "hour": ["tunti", "tunnin"], + "hours": ["{0} tuntia", "{0} tunnin"], + "day": ["päivä", "päivä"], + "days": ["{0} päivää", "{0} päivän"], + "month": ["kuukausi", "kuukauden"], + "months": ["{0} kuukautta", "{0} kuukauden"], + "year": ["vuosi", "vuoden"], + "years": ["{0} vuotta", "{0} vuoden"], + } + + # Months and days are lowercase in Finnish + month_names = [ + "", + "tammikuu", + "helmikuu", + "maaliskuu", + "huhtikuu", + "toukokuu", + "kesäkuu", + "heinäkuu", + "elokuu", + "syyskuu", + "lokakuu", + "marraskuu", + "joulukuu", + ] + + month_abbreviations = [ + "", + "tammi", + "helmi", + "maalis", + "huhti", + "touko", + "kesä", + "heinä", + "elo", + "syys", + "loka", + "marras", + "joulu", + ] + + day_names = [ + "", + "maanantai", + "tiistai", + "keskiviikko", + "torstai", + "perjantai", + "lauantai", + "sunnuntai", + ] + + day_abbreviations = ["", "ma", "ti", "ke", "to", "pe", "la", "su"] + + def _format_timeframe(self, timeframe, delta): + return ( + self.timeframes[timeframe][0].format(abs(delta)), + self.timeframes[timeframe][1].format(abs(delta)), + ) + + def _format_relative(self, humanized, timeframe, delta): + if timeframe == "now": + return humanized[0] + + direction = self.past if delta < 0 else self.future + which = 0 if delta < 0 else 1 + + return direction.format(humanized[which]) + + def _ordinal_number(self, n): + return "{}.".format(n) + + +class ChineseCNLocale(Locale): + + names = ["zh", "zh_cn"] + + past = "{0}前" + future = "{0}后" + + timeframes = { + "now": "刚才", + "second": "一秒", + "seconds": "{0}秒", + "minute": "1分钟", + "minutes": "{0}分钟", + "hour": "1小时", + "hours": "{0}小时", + "day": "1天", + "days": "{0}天", + "week": "一周", + "weeks": "{0}周", + "month": "1个月", + "months": "{0}个月", + "year": "1年", + "years": "{0}年", + } + + month_names = [ + "", + "一月", + "二月", + "三月", + "四月", + "五月", + "六月", + "七月", + "八月", + "九月", + "十月", + "十一月", + "十二月", + ] + month_abbreviations = [ + "", + " 1", + " 2", + " 3", + " 4", + " 5", + " 6", + " 7", + " 8", + " 9", + "10", + "11", + "12", + ] + + day_names = ["", "星期一", "星期二", "星期三", "星期四", "星期五", "星期六", "星期日"] + day_abbreviations = ["", "一", "二", "三", "四", "五", "六", "日"] + + +class ChineseTWLocale(Locale): + + names = ["zh_tw"] + + past = "{0}前" + future = "{0}後" + and_word = "和" + + timeframes = { + "now": "剛才", + "second": "1秒", + "seconds": "{0}秒", + "minute": "1分鐘", + "minutes": "{0}分鐘", + "hour": "1小時", + "hours": "{0}小時", + "day": "1天", + "days": "{0}天", + "week": "1週", + "weeks": "{0}週", + "month": "1個月", + "months": "{0}個月", + "year": "1年", + "years": "{0}年", + } + + month_names = [ + "", + "1月", + "2月", + "3月", + "4月", + "5月", + "6月", + "7月", + "8月", + "9月", + "10月", + "11月", + "12月", + ] + month_abbreviations = [ + "", + " 1", + " 2", + " 3", + " 4", + " 5", + " 6", + " 7", + " 8", + " 9", + "10", + "11", + "12", + ] + + day_names = ["", "週一", "週二", "週三", "週四", "週五", "週六", "週日"] + day_abbreviations = ["", "一", "二", "三", "四", "五", "六", "日"] + + +class HongKongLocale(Locale): + + names = ["zh_hk"] + + past = "{0}前" + future = "{0}後" + + timeframes = { + "now": "剛才", + "second": "1秒", + "seconds": "{0}秒", + "minute": "1分鐘", + "minutes": "{0}分鐘", + "hour": "1小時", + "hours": "{0}小時", + "day": "1天", + "days": "{0}天", + "week": "1星期", + "weeks": "{0}星期", + "month": "1個月", + "months": "{0}個月", + "year": "1年", + "years": "{0}年", + } + + month_names = [ + "", + "1月", + "2月", + "3月", + "4月", + "5月", + "6月", + "7月", + "8月", + "9月", + "10月", + "11月", + "12月", + ] + month_abbreviations = [ + "", + " 1", + " 2", + " 3", + " 4", + " 5", + " 6", + " 7", + " 8", + " 9", + "10", + "11", + "12", + ] + + day_names = ["", "星期一", "星期二", "星期三", "星期四", "星期五", "星期六", "星期日"] + day_abbreviations = ["", "一", "二", "三", "四", "五", "六", "日"] + + +class KoreanLocale(Locale): + + names = ["ko", "ko_kr"] + + past = "{0} 전" + future = "{0} 후" + + timeframes = { + "now": "지금", + "second": "1초", + "seconds": "{0}초", + "minute": "1분", + "minutes": "{0}분", + "hour": "한시간", + "hours": "{0}시간", + "day": "하루", + "days": "{0}일", + "week": "1주", + "weeks": "{0}주", + "month": "한달", + "months": "{0}개월", + "year": "1년", + "years": "{0}년", + } + + special_dayframes = { + -3: "그끄제", + -2: "그제", + -1: "어제", + 1: "내일", + 2: "모레", + 3: "글피", + 4: "그글피", + } + + special_yearframes = {-2: "제작년", -1: "작년", 1: "내년", 2: "내후년"} + + month_names = [ + "", + "1월", + "2월", + "3월", + "4월", + "5월", + "6월", + "7월", + "8월", + "9월", + "10월", + "11월", + "12월", + ] + month_abbreviations = [ + "", + " 1", + " 2", + " 3", + " 4", + " 5", + " 6", + " 7", + " 8", + " 9", + "10", + "11", + "12", + ] + + day_names = ["", "월요일", "화요일", "수요일", "목요일", "금요일", "토요일", "일요일"] + day_abbreviations = ["", "월", "화", "수", "목", "금", "토", "일"] + + def _ordinal_number(self, n): + ordinals = ["0", "첫", "두", "세", "네", "다섯", "여섯", "일곱", "여덟", "아홉", "열"] + if n < len(ordinals): + return "{}번째".format(ordinals[n]) + return "{}번째".format(n) + + def _format_relative(self, humanized, timeframe, delta): + if timeframe in ("day", "days"): + special = self.special_dayframes.get(delta) + if special: + return special + elif timeframe in ("year", "years"): + special = self.special_yearframes.get(delta) + if special: + return special + + return super(KoreanLocale, self)._format_relative(humanized, timeframe, delta) + + +# derived locale types & implementations. +class DutchLocale(Locale): + + names = ["nl", "nl_nl"] + + past = "{0} geleden" + future = "over {0}" + + timeframes = { + "now": "nu", + "second": "een seconde", + "seconds": "{0} seconden", + "minute": "een minuut", + "minutes": "{0} minuten", + "hour": "een uur", + "hours": "{0} uur", + "day": "een dag", + "days": "{0} dagen", + "week": "een week", + "weeks": "{0} weken", + "month": "een maand", + "months": "{0} maanden", + "year": "een jaar", + "years": "{0} jaar", + } + + # In Dutch names of months and days are not starting with a capital letter + # like in the English language. + month_names = [ + "", + "januari", + "februari", + "maart", + "april", + "mei", + "juni", + "juli", + "augustus", + "september", + "oktober", + "november", + "december", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mrt", + "apr", + "mei", + "jun", + "jul", + "aug", + "sep", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "maandag", + "dinsdag", + "woensdag", + "donderdag", + "vrijdag", + "zaterdag", + "zondag", + ] + day_abbreviations = ["", "ma", "di", "wo", "do", "vr", "za", "zo"] + + +class SlavicBaseLocale(Locale): + def _format_timeframe(self, timeframe, delta): + + form = self.timeframes[timeframe] + delta = abs(delta) + + if isinstance(form, list): + + if delta % 10 == 1 and delta % 100 != 11: + form = form[0] + elif 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): + form = form[1] + else: + form = form[2] + + return form.format(delta) + + +class BelarusianLocale(SlavicBaseLocale): + + names = ["be", "be_by"] + + past = "{0} таму" + future = "праз {0}" + + timeframes = { + "now": "зараз", + "second": "секунду", + "seconds": "{0} некалькі секунд", + "minute": "хвіліну", + "minutes": ["{0} хвіліну", "{0} хвіліны", "{0} хвілін"], + "hour": "гадзіну", + "hours": ["{0} гадзіну", "{0} гадзіны", "{0} гадзін"], + "day": "дзень", + "days": ["{0} дзень", "{0} дні", "{0} дзён"], + "month": "месяц", + "months": ["{0} месяц", "{0} месяцы", "{0} месяцаў"], + "year": "год", + "years": ["{0} год", "{0} гады", "{0} гадоў"], + } + + month_names = [ + "", + "студзеня", + "лютага", + "сакавіка", + "красавіка", + "траўня", + "чэрвеня", + "ліпеня", + "жніўня", + "верасня", + "кастрычніка", + "лістапада", + "снежня", + ] + month_abbreviations = [ + "", + "студ", + "лют", + "сак", + "крас", + "трав", + "чэрв", + "ліп", + "жнів", + "вер", + "каст", + "ліст", + "снеж", + ] + + day_names = [ + "", + "панядзелак", + "аўторак", + "серада", + "чацвер", + "пятніца", + "субота", + "нядзеля", + ] + day_abbreviations = ["", "пн", "ат", "ср", "чц", "пт", "сб", "нд"] + + +class PolishLocale(SlavicBaseLocale): + + names = ["pl", "pl_pl"] + + past = "{0} temu" + future = "za {0}" + + # The nouns should be in genitive case (Polish: "dopełniacz") + # in order to correctly form `past` & `future` expressions. + timeframes = { + "now": "teraz", + "second": "sekundę", + "seconds": ["{0} sekund", "{0} sekundy", "{0} sekund"], + "minute": "minutę", + "minutes": ["{0} minut", "{0} minuty", "{0} minut"], + "hour": "godzinę", + "hours": ["{0} godzin", "{0} godziny", "{0} godzin"], + "day": "dzień", + "days": "{0} dni", + "week": "tydzień", + "weeks": ["{0} tygodni", "{0} tygodnie", "{0} tygodni"], + "month": "miesiąc", + "months": ["{0} miesięcy", "{0} miesiące", "{0} miesięcy"], + "year": "rok", + "years": ["{0} lat", "{0} lata", "{0} lat"], + } + + month_names = [ + "", + "styczeń", + "luty", + "marzec", + "kwiecień", + "maj", + "czerwiec", + "lipiec", + "sierpień", + "wrzesień", + "październik", + "listopad", + "grudzień", + ] + month_abbreviations = [ + "", + "sty", + "lut", + "mar", + "kwi", + "maj", + "cze", + "lip", + "sie", + "wrz", + "paź", + "lis", + "gru", + ] + + day_names = [ + "", + "poniedziałek", + "wtorek", + "środa", + "czwartek", + "piątek", + "sobota", + "niedziela", + ] + day_abbreviations = ["", "Pn", "Wt", "Śr", "Czw", "Pt", "So", "Nd"] + + +class RussianLocale(SlavicBaseLocale): + + names = ["ru", "ru_ru"] + + past = "{0} назад" + future = "через {0}" + + timeframes = { + "now": "сейчас", + "second": "Второй", + "seconds": "{0} несколько секунд", + "minute": "минуту", + "minutes": ["{0} минуту", "{0} минуты", "{0} минут"], + "hour": "час", + "hours": ["{0} час", "{0} часа", "{0} часов"], + "day": "день", + "days": ["{0} день", "{0} дня", "{0} дней"], + "week": "неделю", + "weeks": ["{0} неделю", "{0} недели", "{0} недель"], + "month": "месяц", + "months": ["{0} месяц", "{0} месяца", "{0} месяцев"], + "year": "год", + "years": ["{0} год", "{0} года", "{0} лет"], + } + + month_names = [ + "", + "января", + "февраля", + "марта", + "апреля", + "мая", + "июня", + "июля", + "августа", + "сентября", + "октября", + "ноября", + "декабря", + ] + month_abbreviations = [ + "", + "янв", + "фев", + "мар", + "апр", + "май", + "июн", + "июл", + "авг", + "сен", + "окт", + "ноя", + "дек", + ] + + day_names = [ + "", + "понедельник", + "вторник", + "среда", + "четверг", + "пятница", + "суббота", + "воскресенье", + ] + day_abbreviations = ["", "пн", "вт", "ср", "чт", "пт", "сб", "вс"] + + +class AfrikaansLocale(Locale): + + names = ["af", "af_nl"] + + past = "{0} gelede" + future = "in {0}" + + timeframes = { + "now": "nou", + "second": "n sekonde", + "seconds": "{0} sekondes", + "minute": "minuut", + "minutes": "{0} minute", + "hour": "uur", + "hours": "{0} ure", + "day": "een dag", + "days": "{0} dae", + "month": "een maand", + "months": "{0} maande", + "year": "een jaar", + "years": "{0} jaar", + } + + month_names = [ + "", + "Januarie", + "Februarie", + "Maart", + "April", + "Mei", + "Junie", + "Julie", + "Augustus", + "September", + "Oktober", + "November", + "Desember", + ] + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mrt", + "Apr", + "Mei", + "Jun", + "Jul", + "Aug", + "Sep", + "Okt", + "Nov", + "Des", + ] + + day_names = [ + "", + "Maandag", + "Dinsdag", + "Woensdag", + "Donderdag", + "Vrydag", + "Saterdag", + "Sondag", + ] + day_abbreviations = ["", "Ma", "Di", "Wo", "Do", "Vr", "Za", "So"] + + +class BulgarianLocale(SlavicBaseLocale): + + names = ["bg", "bg_BG"] + + past = "{0} назад" + future = "напред {0}" + + timeframes = { + "now": "сега", + "second": "секунда", + "seconds": "{0} няколко секунди", + "minute": "минута", + "minutes": ["{0} минута", "{0} минути", "{0} минути"], + "hour": "час", + "hours": ["{0} час", "{0} часа", "{0} часа"], + "day": "ден", + "days": ["{0} ден", "{0} дни", "{0} дни"], + "month": "месец", + "months": ["{0} месец", "{0} месеца", "{0} месеца"], + "year": "година", + "years": ["{0} година", "{0} години", "{0} години"], + } + + month_names = [ + "", + "януари", + "февруари", + "март", + "април", + "май", + "юни", + "юли", + "август", + "септември", + "октомври", + "ноември", + "декември", + ] + month_abbreviations = [ + "", + "ян", + "февр", + "март", + "апр", + "май", + "юни", + "юли", + "авг", + "септ", + "окт", + "ноем", + "дек", + ] + + day_names = [ + "", + "понеделник", + "вторник", + "сряда", + "четвъртък", + "петък", + "събота", + "неделя", + ] + day_abbreviations = ["", "пон", "вт", "ср", "четв", "пет", "съб", "нед"] + + +class UkrainianLocale(SlavicBaseLocale): + + names = ["ua", "uk_ua"] + + past = "{0} тому" + future = "за {0}" + + timeframes = { + "now": "зараз", + "second": "секунда", + "seconds": "{0} кілька секунд", + "minute": "хвилину", + "minutes": ["{0} хвилину", "{0} хвилини", "{0} хвилин"], + "hour": "годину", + "hours": ["{0} годину", "{0} години", "{0} годин"], + "day": "день", + "days": ["{0} день", "{0} дні", "{0} днів"], + "month": "місяць", + "months": ["{0} місяць", "{0} місяці", "{0} місяців"], + "year": "рік", + "years": ["{0} рік", "{0} роки", "{0} років"], + } + + month_names = [ + "", + "січня", + "лютого", + "березня", + "квітня", + "травня", + "червня", + "липня", + "серпня", + "вересня", + "жовтня", + "листопада", + "грудня", + ] + month_abbreviations = [ + "", + "січ", + "лют", + "бер", + "квіт", + "трав", + "черв", + "лип", + "серп", + "вер", + "жовт", + "лист", + "груд", + ] + + day_names = [ + "", + "понеділок", + "вівторок", + "середа", + "четвер", + "п’ятниця", + "субота", + "неділя", + ] + day_abbreviations = ["", "пн", "вт", "ср", "чт", "пт", "сб", "нд"] + + +class MacedonianLocale(SlavicBaseLocale): + names = ["mk", "mk_mk"] + + past = "пред {0}" + future = "за {0}" + + timeframes = { + "now": "сега", + "second": "една секунда", + "seconds": ["{0} секунда", "{0} секунди", "{0} секунди"], + "minute": "една минута", + "minutes": ["{0} минута", "{0} минути", "{0} минути"], + "hour": "еден саат", + "hours": ["{0} саат", "{0} саати", "{0} саати"], + "day": "еден ден", + "days": ["{0} ден", "{0} дена", "{0} дена"], + "week": "една недела", + "weeks": ["{0} недела", "{0} недели", "{0} недели"], + "month": "еден месец", + "months": ["{0} месец", "{0} месеци", "{0} месеци"], + "year": "една година", + "years": ["{0} година", "{0} години", "{0} години"], + } + + meridians = {"am": "дп", "pm": "пп", "AM": "претпладне", "PM": "попладне"} + + month_names = [ + "", + "Јануари", + "Февруари", + "Март", + "Април", + "Мај", + "Јуни", + "Јули", + "Август", + "Септември", + "Октомври", + "Ноември", + "Декември", + ] + month_abbreviations = [ + "", + "Јан", + "Фев", + "Мар", + "Апр", + "Мај", + "Јун", + "Јул", + "Авг", + "Септ", + "Окт", + "Ноем", + "Декем", + ] + + day_names = [ + "", + "Понеделник", + "Вторник", + "Среда", + "Четврток", + "Петок", + "Сабота", + "Недела", + ] + day_abbreviations = [ + "", + "Пон", + "Вт", + "Сре", + "Чет", + "Пет", + "Саб", + "Нед", + ] + + +class GermanBaseLocale(Locale): + + past = "vor {0}" + future = "in {0}" + and_word = "und" + + timeframes = { + "now": "gerade eben", + "second": "eine Sekunde", + "seconds": "{0} Sekunden", + "minute": "einer Minute", + "minutes": "{0} Minuten", + "hour": "einer Stunde", + "hours": "{0} Stunden", + "day": "einem Tag", + "days": "{0} Tagen", + "week": "einer Woche", + "weeks": "{0} Wochen", + "month": "einem Monat", + "months": "{0} Monaten", + "year": "einem Jahr", + "years": "{0} Jahren", + } + + timeframes_only_distance = timeframes.copy() + timeframes_only_distance["minute"] = "eine Minute" + timeframes_only_distance["hour"] = "eine Stunde" + timeframes_only_distance["day"] = "ein Tag" + timeframes_only_distance["week"] = "eine Woche" + timeframes_only_distance["month"] = "ein Monat" + timeframes_only_distance["year"] = "ein Jahr" + + month_names = [ + "", + "Januar", + "Februar", + "März", + "April", + "Mai", + "Juni", + "Juli", + "August", + "September", + "Oktober", + "November", + "Dezember", + ] + + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mär", + "Apr", + "Mai", + "Jun", + "Jul", + "Aug", + "Sep", + "Okt", + "Nov", + "Dez", + ] + + day_names = [ + "", + "Montag", + "Dienstag", + "Mittwoch", + "Donnerstag", + "Freitag", + "Samstag", + "Sonntag", + ] + + day_abbreviations = ["", "Mo", "Di", "Mi", "Do", "Fr", "Sa", "So"] + + def _ordinal_number(self, n): + return "{}.".format(n) + + def describe(self, timeframe, delta=0, only_distance=False): + """Describes a delta within a timeframe in plain language. + + :param timeframe: a string representing a timeframe. + :param delta: a quantity representing a delta in a timeframe. + :param only_distance: return only distance eg: "11 seconds" without "in" or "ago" keywords + """ + + if not only_distance: + return super(GermanBaseLocale, self).describe( + timeframe, delta, only_distance + ) + + # German uses a different case without 'in' or 'ago' + humanized = self.timeframes_only_distance[timeframe].format(trunc(abs(delta))) + + return humanized + + +class GermanLocale(GermanBaseLocale, Locale): + + names = ["de", "de_de"] + + +class SwissLocale(GermanBaseLocale, Locale): + + names = ["de_ch"] + + +class AustrianLocale(GermanBaseLocale, Locale): + + names = ["de_at"] + + month_names = [ + "", + "Jänner", + "Februar", + "März", + "April", + "Mai", + "Juni", + "Juli", + "August", + "September", + "Oktober", + "November", + "Dezember", + ] + + +class NorwegianLocale(Locale): + + names = ["nb", "nb_no"] + + past = "for {0} siden" + future = "om {0}" + + timeframes = { + "now": "nå nettopp", + "second": "et sekund", + "seconds": "{0} noen sekunder", + "minute": "ett minutt", + "minutes": "{0} minutter", + "hour": "en time", + "hours": "{0} timer", + "day": "en dag", + "days": "{0} dager", + "month": "en måned", + "months": "{0} måneder", + "year": "ett år", + "years": "{0} år", + } + + month_names = [ + "", + "januar", + "februar", + "mars", + "april", + "mai", + "juni", + "juli", + "august", + "september", + "oktober", + "november", + "desember", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "mai", + "jun", + "jul", + "aug", + "sep", + "okt", + "nov", + "des", + ] + + day_names = [ + "", + "mandag", + "tirsdag", + "onsdag", + "torsdag", + "fredag", + "lørdag", + "søndag", + ] + day_abbreviations = ["", "ma", "ti", "on", "to", "fr", "lø", "sø"] + + +class NewNorwegianLocale(Locale): + + names = ["nn", "nn_no"] + + past = "for {0} sidan" + future = "om {0}" + + timeframes = { + "now": "no nettopp", + "second": "et sekund", + "seconds": "{0} nokre sekund", + "minute": "ett minutt", + "minutes": "{0} minutt", + "hour": "ein time", + "hours": "{0} timar", + "day": "ein dag", + "days": "{0} dagar", + "month": "en månad", + "months": "{0} månader", + "year": "eit år", + "years": "{0} år", + } + + month_names = [ + "", + "januar", + "februar", + "mars", + "april", + "mai", + "juni", + "juli", + "august", + "september", + "oktober", + "november", + "desember", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "mai", + "jun", + "jul", + "aug", + "sep", + "okt", + "nov", + "des", + ] + + day_names = [ + "", + "måndag", + "tysdag", + "onsdag", + "torsdag", + "fredag", + "laurdag", + "sundag", + ] + day_abbreviations = ["", "må", "ty", "on", "to", "fr", "la", "su"] + + +class PortugueseLocale(Locale): + names = ["pt", "pt_pt"] + + past = "há {0}" + future = "em {0}" + and_word = "e" + + timeframes = { + "now": "agora", + "second": "um segundo", + "seconds": "{0} segundos", + "minute": "um minuto", + "minutes": "{0} minutos", + "hour": "uma hora", + "hours": "{0} horas", + "day": "um dia", + "days": "{0} dias", + "week": "uma semana", + "weeks": "{0} semanas", + "month": "um mês", + "months": "{0} meses", + "year": "um ano", + "years": "{0} anos", + } + + month_names = [ + "", + "Janeiro", + "Fevereiro", + "Março", + "Abril", + "Maio", + "Junho", + "Julho", + "Agosto", + "Setembro", + "Outubro", + "Novembro", + "Dezembro", + ] + month_abbreviations = [ + "", + "Jan", + "Fev", + "Mar", + "Abr", + "Mai", + "Jun", + "Jul", + "Ago", + "Set", + "Out", + "Nov", + "Dez", + ] + + day_names = [ + "", + "Segunda-feira", + "Terça-feira", + "Quarta-feira", + "Quinta-feira", + "Sexta-feira", + "Sábado", + "Domingo", + ] + day_abbreviations = ["", "Seg", "Ter", "Qua", "Qui", "Sex", "Sab", "Dom"] + + +class BrazilianPortugueseLocale(PortugueseLocale): + names = ["pt_br"] + + past = "faz {0}" + + +class TagalogLocale(Locale): + + names = ["tl", "tl_ph"] + + past = "nakaraang {0}" + future = "{0} mula ngayon" + + timeframes = { + "now": "ngayon lang", + "second": "isang segundo", + "seconds": "{0} segundo", + "minute": "isang minuto", + "minutes": "{0} minuto", + "hour": "isang oras", + "hours": "{0} oras", + "day": "isang araw", + "days": "{0} araw", + "week": "isang linggo", + "weeks": "{0} linggo", + "month": "isang buwan", + "months": "{0} buwan", + "year": "isang taon", + "years": "{0} taon", + } + + month_names = [ + "", + "Enero", + "Pebrero", + "Marso", + "Abril", + "Mayo", + "Hunyo", + "Hulyo", + "Agosto", + "Setyembre", + "Oktubre", + "Nobyembre", + "Disyembre", + ] + month_abbreviations = [ + "", + "Ene", + "Peb", + "Mar", + "Abr", + "May", + "Hun", + "Hul", + "Ago", + "Set", + "Okt", + "Nob", + "Dis", + ] + + day_names = [ + "", + "Lunes", + "Martes", + "Miyerkules", + "Huwebes", + "Biyernes", + "Sabado", + "Linggo", + ] + day_abbreviations = ["", "Lun", "Mar", "Miy", "Huw", "Biy", "Sab", "Lin"] + + meridians = {"am": "nu", "pm": "nh", "AM": "ng umaga", "PM": "ng hapon"} + + def _ordinal_number(self, n): + return "ika-{}".format(n) + + +class VietnameseLocale(Locale): + + names = ["vi", "vi_vn"] + + past = "{0} trước" + future = "{0} nữa" + + timeframes = { + "now": "hiện tại", + "second": "một giây", + "seconds": "{0} giây", + "minute": "một phút", + "minutes": "{0} phút", + "hour": "một giờ", + "hours": "{0} giờ", + "day": "một ngày", + "days": "{0} ngày", + "week": "một tuần", + "weeks": "{0} tuần", + "month": "một tháng", + "months": "{0} tháng", + "year": "một năm", + "years": "{0} năm", + } + + month_names = [ + "", + "Tháng Một", + "Tháng Hai", + "Tháng Ba", + "Tháng Tư", + "Tháng Năm", + "Tháng Sáu", + "Tháng Bảy", + "Tháng Tám", + "Tháng Chín", + "Tháng Mười", + "Tháng Mười Một", + "Tháng Mười Hai", + ] + month_abbreviations = [ + "", + "Tháng 1", + "Tháng 2", + "Tháng 3", + "Tháng 4", + "Tháng 5", + "Tháng 6", + "Tháng 7", + "Tháng 8", + "Tháng 9", + "Tháng 10", + "Tháng 11", + "Tháng 12", + ] + + day_names = [ + "", + "Thứ Hai", + "Thứ Ba", + "Thứ Tư", + "Thứ Năm", + "Thứ Sáu", + "Thứ Bảy", + "Chủ Nhật", + ] + day_abbreviations = ["", "Thứ 2", "Thứ 3", "Thứ 4", "Thứ 5", "Thứ 6", "Thứ 7", "CN"] + + +class TurkishLocale(Locale): + + names = ["tr", "tr_tr"] + + past = "{0} önce" + future = "{0} sonra" + + timeframes = { + "now": "şimdi", + "second": "bir saniye", + "seconds": "{0} saniye", + "minute": "bir dakika", + "minutes": "{0} dakika", + "hour": "bir saat", + "hours": "{0} saat", + "day": "bir gün", + "days": "{0} gün", + "month": "bir ay", + "months": "{0} ay", + "year": "yıl", + "years": "{0} yıl", + } + + month_names = [ + "", + "Ocak", + "Şubat", + "Mart", + "Nisan", + "Mayıs", + "Haziran", + "Temmuz", + "Ağustos", + "Eylül", + "Ekim", + "Kasım", + "Aralık", + ] + month_abbreviations = [ + "", + "Oca", + "Şub", + "Mar", + "Nis", + "May", + "Haz", + "Tem", + "Ağu", + "Eyl", + "Eki", + "Kas", + "Ara", + ] + + day_names = [ + "", + "Pazartesi", + "Salı", + "Çarşamba", + "Perşembe", + "Cuma", + "Cumartesi", + "Pazar", + ] + day_abbreviations = ["", "Pzt", "Sal", "Çar", "Per", "Cum", "Cmt", "Paz"] + + +class AzerbaijaniLocale(Locale): + + names = ["az", "az_az"] + + past = "{0} əvvəl" + future = "{0} sonra" + + timeframes = { + "now": "indi", + "second": "saniyə", + "seconds": "{0} saniyə", + "minute": "bir dəqiqə", + "minutes": "{0} dəqiqə", + "hour": "bir saat", + "hours": "{0} saat", + "day": "bir gün", + "days": "{0} gün", + "month": "bir ay", + "months": "{0} ay", + "year": "il", + "years": "{0} il", + } + + month_names = [ + "", + "Yanvar", + "Fevral", + "Mart", + "Aprel", + "May", + "İyun", + "İyul", + "Avqust", + "Sentyabr", + "Oktyabr", + "Noyabr", + "Dekabr", + ] + month_abbreviations = [ + "", + "Yan", + "Fev", + "Mar", + "Apr", + "May", + "İyn", + "İyl", + "Avq", + "Sen", + "Okt", + "Noy", + "Dek", + ] + + day_names = [ + "", + "Bazar ertəsi", + "Çərşənbə axşamı", + "Çərşənbə", + "Cümə axşamı", + "Cümə", + "Şənbə", + "Bazar", + ] + day_abbreviations = ["", "Ber", "Çax", "Çər", "Cax", "Cüm", "Şnb", "Bzr"] + + +class ArabicLocale(Locale): + names = [ + "ar", + "ar_ae", + "ar_bh", + "ar_dj", + "ar_eg", + "ar_eh", + "ar_er", + "ar_km", + "ar_kw", + "ar_ly", + "ar_om", + "ar_qa", + "ar_sa", + "ar_sd", + "ar_so", + "ar_ss", + "ar_td", + "ar_ye", + ] + + past = "منذ {0}" + future = "خلال {0}" + + timeframes = { + "now": "الآن", + "second": "ثانية", + "seconds": {"double": "ثانيتين", "ten": "{0} ثوان", "higher": "{0} ثانية"}, + "minute": "دقيقة", + "minutes": {"double": "دقيقتين", "ten": "{0} دقائق", "higher": "{0} دقيقة"}, + "hour": "ساعة", + "hours": {"double": "ساعتين", "ten": "{0} ساعات", "higher": "{0} ساعة"}, + "day": "يوم", + "days": {"double": "يومين", "ten": "{0} أيام", "higher": "{0} يوم"}, + "month": "شهر", + "months": {"double": "شهرين", "ten": "{0} أشهر", "higher": "{0} شهر"}, + "year": "سنة", + "years": {"double": "سنتين", "ten": "{0} سنوات", "higher": "{0} سنة"}, + } + + month_names = [ + "", + "يناير", + "فبراير", + "مارس", + "أبريل", + "مايو", + "يونيو", + "يوليو", + "أغسطس", + "سبتمبر", + "أكتوبر", + "نوفمبر", + "ديسمبر", + ] + month_abbreviations = [ + "", + "يناير", + "فبراير", + "مارس", + "أبريل", + "مايو", + "يونيو", + "يوليو", + "أغسطس", + "سبتمبر", + "أكتوبر", + "نوفمبر", + "ديسمبر", + ] + + day_names = [ + "", + "الإثنين", + "الثلاثاء", + "الأربعاء", + "الخميس", + "الجمعة", + "السبت", + "الأحد", + ] + day_abbreviations = ["", "إثنين", "ثلاثاء", "أربعاء", "خميس", "جمعة", "سبت", "أحد"] + + def _format_timeframe(self, timeframe, delta): + form = self.timeframes[timeframe] + delta = abs(delta) + if isinstance(form, dict): + if delta == 2: + form = form["double"] + elif delta > 2 and delta <= 10: + form = form["ten"] + else: + form = form["higher"] + + return form.format(delta) + + +class LevantArabicLocale(ArabicLocale): + names = ["ar_iq", "ar_jo", "ar_lb", "ar_ps", "ar_sy"] + month_names = [ + "", + "كانون الثاني", + "شباط", + "آذار", + "نيسان", + "أيار", + "حزيران", + "تموز", + "آب", + "أيلول", + "تشرين الأول", + "تشرين الثاني", + "كانون الأول", + ] + month_abbreviations = [ + "", + "كانون الثاني", + "شباط", + "آذار", + "نيسان", + "أيار", + "حزيران", + "تموز", + "آب", + "أيلول", + "تشرين الأول", + "تشرين الثاني", + "كانون الأول", + ] + + +class AlgeriaTunisiaArabicLocale(ArabicLocale): + names = ["ar_tn", "ar_dz"] + month_names = [ + "", + "جانفي", + "فيفري", + "مارس", + "أفريل", + "ماي", + "جوان", + "جويلية", + "أوت", + "سبتمبر", + "أكتوبر", + "نوفمبر", + "ديسمبر", + ] + month_abbreviations = [ + "", + "جانفي", + "فيفري", + "مارس", + "أفريل", + "ماي", + "جوان", + "جويلية", + "أوت", + "سبتمبر", + "أكتوبر", + "نوفمبر", + "ديسمبر", + ] + + +class MauritaniaArabicLocale(ArabicLocale): + names = ["ar_mr"] + month_names = [ + "", + "يناير", + "فبراير", + "مارس", + "إبريل", + "مايو", + "يونيو", + "يوليو", + "أغشت", + "شتمبر", + "أكتوبر", + "نوفمبر", + "دجمبر", + ] + month_abbreviations = [ + "", + "يناير", + "فبراير", + "مارس", + "إبريل", + "مايو", + "يونيو", + "يوليو", + "أغشت", + "شتمبر", + "أكتوبر", + "نوفمبر", + "دجمبر", + ] + + +class MoroccoArabicLocale(ArabicLocale): + names = ["ar_ma"] + month_names = [ + "", + "يناير", + "فبراير", + "مارس", + "أبريل", + "ماي", + "يونيو", + "يوليوز", + "غشت", + "شتنبر", + "أكتوبر", + "نونبر", + "دجنبر", + ] + month_abbreviations = [ + "", + "يناير", + "فبراير", + "مارس", + "أبريل", + "ماي", + "يونيو", + "يوليوز", + "غشت", + "شتنبر", + "أكتوبر", + "نونبر", + "دجنبر", + ] + + +class IcelandicLocale(Locale): + def _format_timeframe(self, timeframe, delta): + + timeframe = self.timeframes[timeframe] + if delta < 0: + timeframe = timeframe[0] + elif delta > 0: + timeframe = timeframe[1] + + return timeframe.format(abs(delta)) + + names = ["is", "is_is"] + + past = "fyrir {0} síðan" + future = "eftir {0}" + + timeframes = { + "now": "rétt í þessu", + "second": ("sekúndu", "sekúndu"), + "seconds": ("{0} nokkrum sekúndum", "nokkrar sekúndur"), + "minute": ("einni mínútu", "eina mínútu"), + "minutes": ("{0} mínútum", "{0} mínútur"), + "hour": ("einum tíma", "einn tíma"), + "hours": ("{0} tímum", "{0} tíma"), + "day": ("einum degi", "einn dag"), + "days": ("{0} dögum", "{0} daga"), + "month": ("einum mánuði", "einn mánuð"), + "months": ("{0} mánuðum", "{0} mánuði"), + "year": ("einu ári", "eitt ár"), + "years": ("{0} árum", "{0} ár"), + } + + meridians = {"am": "f.h.", "pm": "e.h.", "AM": "f.h.", "PM": "e.h."} + + month_names = [ + "", + "janúar", + "febrúar", + "mars", + "apríl", + "maí", + "júní", + "júlí", + "ágúst", + "september", + "október", + "nóvember", + "desember", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "maí", + "jún", + "júl", + "ágú", + "sep", + "okt", + "nóv", + "des", + ] + + day_names = [ + "", + "mánudagur", + "þriðjudagur", + "miðvikudagur", + "fimmtudagur", + "föstudagur", + "laugardagur", + "sunnudagur", + ] + day_abbreviations = ["", "mán", "þri", "mið", "fim", "fös", "lau", "sun"] + + +class DanishLocale(Locale): + + names = ["da", "da_dk"] + + past = "for {0} siden" + future = "efter {0}" + and_word = "og" + + timeframes = { + "now": "lige nu", + "second": "et sekund", + "seconds": "{0} et par sekunder", + "minute": "et minut", + "minutes": "{0} minutter", + "hour": "en time", + "hours": "{0} timer", + "day": "en dag", + "days": "{0} dage", + "month": "en måned", + "months": "{0} måneder", + "year": "et år", + "years": "{0} år", + } + + month_names = [ + "", + "januar", + "februar", + "marts", + "april", + "maj", + "juni", + "juli", + "august", + "september", + "oktober", + "november", + "december", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "maj", + "jun", + "jul", + "aug", + "sep", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "mandag", + "tirsdag", + "onsdag", + "torsdag", + "fredag", + "lørdag", + "søndag", + ] + day_abbreviations = ["", "man", "tir", "ons", "tor", "fre", "lør", "søn"] + + +class MalayalamLocale(Locale): + + names = ["ml"] + + past = "{0} മുമ്പ്" + future = "{0} ശേഷം" + + timeframes = { + "now": "ഇപ്പോൾ", + "second": "ഒരു നിമിഷം", + "seconds": "{0} സെക്കന്റ്‌", + "minute": "ഒരു മിനിറ്റ്", + "minutes": "{0} മിനിറ്റ്", + "hour": "ഒരു മണിക്കൂർ", + "hours": "{0} മണിക്കൂർ", + "day": "ഒരു ദിവസം ", + "days": "{0} ദിവസം ", + "month": "ഒരു മാസം ", + "months": "{0} മാസം ", + "year": "ഒരു വർഷം ", + "years": "{0} വർഷം ", + } + + meridians = { + "am": "രാവിലെ", + "pm": "ഉച്ചക്ക് ശേഷം", + "AM": "രാവിലെ", + "PM": "ഉച്ചക്ക് ശേഷം", + } + + month_names = [ + "", + "ജനുവരി", + "ഫെബ്രുവരി", + "മാർച്ച്‌", + "ഏപ്രിൽ ", + "മെയ്‌ ", + "ജൂണ്‍", + "ജൂലൈ", + "ഓഗസ്റ്റ്‌", + "സെപ്റ്റംബർ", + "ഒക്ടോബർ", + "നവംബർ", + "ഡിസംബർ", + ] + month_abbreviations = [ + "", + "ജനു", + "ഫെബ് ", + "മാർ", + "ഏപ്രിൽ", + "മേയ്", + "ജൂണ്‍", + "ജൂലൈ", + "ഓഗസ്റ", + "സെപ്റ്റ", + "ഒക്ടോ", + "നവം", + "ഡിസം", + ] + + day_names = ["", "തിങ്കള്‍", "ചൊവ്വ", "ബുധന്‍", "വ്യാഴം", "വെള്ളി", "ശനി", "ഞായര്‍"] + day_abbreviations = [ + "", + "തിങ്കള്‍", + "ചൊവ്വ", + "ബുധന്‍", + "വ്യാഴം", + "വെള്ളി", + "ശനി", + "ഞായര്‍", + ] + + +class HindiLocale(Locale): + + names = ["hi"] + + past = "{0} पहले" + future = "{0} बाद" + + timeframes = { + "now": "अभी", + "second": "एक पल", + "seconds": "{0} सेकंड्", + "minute": "एक मिनट ", + "minutes": "{0} मिनट ", + "hour": "एक घंटा", + "hours": "{0} घंटे", + "day": "एक दिन", + "days": "{0} दिन", + "month": "एक माह ", + "months": "{0} महीने ", + "year": "एक वर्ष ", + "years": "{0} साल ", + } + + meridians = {"am": "सुबह", "pm": "शाम", "AM": "सुबह", "PM": "शाम"} + + month_names = [ + "", + "जनवरी", + "फरवरी", + "मार्च", + "अप्रैल ", + "मई", + "जून", + "जुलाई", + "अगस्त", + "सितंबर", + "अक्टूबर", + "नवंबर", + "दिसंबर", + ] + month_abbreviations = [ + "", + "जन", + "फ़र", + "मार्च", + "अप्रै", + "मई", + "जून", + "जुलाई", + "आग", + "सित", + "अकत", + "नवे", + "दिस", + ] + + day_names = [ + "", + "सोमवार", + "मंगलवार", + "बुधवार", + "गुरुवार", + "शुक्रवार", + "शनिवार", + "रविवार", + ] + day_abbreviations = ["", "सोम", "मंगल", "बुध", "गुरुवार", "शुक्र", "शनि", "रवि"] + + +class CzechLocale(Locale): + names = ["cs", "cs_cz"] + + timeframes = { + "now": "Teď", + "second": {"past": "vteřina", "future": "vteřina", "zero": "vteřina"}, + "seconds": {"past": "{0} sekundami", "future": ["{0} sekundy", "{0} sekund"]}, + "minute": {"past": "minutou", "future": "minutu", "zero": "{0} minut"}, + "minutes": {"past": "{0} minutami", "future": ["{0} minuty", "{0} minut"]}, + "hour": {"past": "hodinou", "future": "hodinu", "zero": "{0} hodin"}, + "hours": {"past": "{0} hodinami", "future": ["{0} hodiny", "{0} hodin"]}, + "day": {"past": "dnem", "future": "den", "zero": "{0} dnů"}, + "days": {"past": "{0} dny", "future": ["{0} dny", "{0} dnů"]}, + "week": {"past": "týdnem", "future": "týden", "zero": "{0} týdnů"}, + "weeks": {"past": "{0} týdny", "future": ["{0} týdny", "{0} týdnů"]}, + "month": {"past": "měsícem", "future": "měsíc", "zero": "{0} měsíců"}, + "months": {"past": "{0} měsíci", "future": ["{0} měsíce", "{0} měsíců"]}, + "year": {"past": "rokem", "future": "rok", "zero": "{0} let"}, + "years": {"past": "{0} lety", "future": ["{0} roky", "{0} let"]}, + } + + past = "Před {0}" + future = "Za {0}" + + month_names = [ + "", + "leden", + "únor", + "březen", + "duben", + "květen", + "červen", + "červenec", + "srpen", + "září", + "říjen", + "listopad", + "prosinec", + ] + month_abbreviations = [ + "", + "led", + "úno", + "bře", + "dub", + "kvě", + "čvn", + "čvc", + "srp", + "zář", + "říj", + "lis", + "pro", + ] + + day_names = [ + "", + "pondělí", + "úterý", + "středa", + "čtvrtek", + "pátek", + "sobota", + "neděle", + ] + day_abbreviations = ["", "po", "út", "st", "čt", "pá", "so", "ne"] + + def _format_timeframe(self, timeframe, delta): + """Czech aware time frame format function, takes into account + the differences between past and future forms.""" + form = self.timeframes[timeframe] + if isinstance(form, dict): + if delta == 0: + form = form["zero"] # And *never* use 0 in the singular! + elif delta > 0: + form = form["future"] + else: + form = form["past"] + delta = abs(delta) + + if isinstance(form, list): + if 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): + form = form[0] + else: + form = form[1] + + return form.format(delta) + + +class SlovakLocale(Locale): + names = ["sk", "sk_sk"] + + timeframes = { + "now": "Teraz", + "second": {"past": "sekundou", "future": "sekundu", "zero": "{0} sekúnd"}, + "seconds": {"past": "{0} sekundami", "future": ["{0} sekundy", "{0} sekúnd"]}, + "minute": {"past": "minútou", "future": "minútu", "zero": "{0} minút"}, + "minutes": {"past": "{0} minútami", "future": ["{0} minúty", "{0} minút"]}, + "hour": {"past": "hodinou", "future": "hodinu", "zero": "{0} hodín"}, + "hours": {"past": "{0} hodinami", "future": ["{0} hodiny", "{0} hodín"]}, + "day": {"past": "dňom", "future": "deň", "zero": "{0} dní"}, + "days": {"past": "{0} dňami", "future": ["{0} dni", "{0} dní"]}, + "week": {"past": "týždňom", "future": "týždeň", "zero": "{0} týždňov"}, + "weeks": {"past": "{0} týždňami", "future": ["{0} týždne", "{0} týždňov"]}, + "month": {"past": "mesiacom", "future": "mesiac", "zero": "{0} mesiacov"}, + "months": {"past": "{0} mesiacmi", "future": ["{0} mesiace", "{0} mesiacov"]}, + "year": {"past": "rokom", "future": "rok", "zero": "{0} rokov"}, + "years": {"past": "{0} rokmi", "future": ["{0} roky", "{0} rokov"]}, + } + + past = "Pred {0}" + future = "O {0}" + and_word = "a" + + month_names = [ + "", + "január", + "február", + "marec", + "apríl", + "máj", + "jún", + "júl", + "august", + "september", + "október", + "november", + "december", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "máj", + "jún", + "júl", + "aug", + "sep", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "pondelok", + "utorok", + "streda", + "štvrtok", + "piatok", + "sobota", + "nedeľa", + ] + day_abbreviations = ["", "po", "ut", "st", "št", "pi", "so", "ne"] + + def _format_timeframe(self, timeframe, delta): + """Slovak aware time frame format function, takes into account + the differences between past and future forms.""" + form = self.timeframes[timeframe] + if isinstance(form, dict): + if delta == 0: + form = form["zero"] # And *never* use 0 in the singular! + elif delta > 0: + form = form["future"] + else: + form = form["past"] + delta = abs(delta) + + if isinstance(form, list): + if 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): + form = form[0] + else: + form = form[1] + + return form.format(delta) + + +class FarsiLocale(Locale): + + names = ["fa", "fa_ir"] + + past = "{0} قبل" + future = "در {0}" + + timeframes = { + "now": "اکنون", + "second": "یک لحظه", + "seconds": "{0} ثانیه", + "minute": "یک دقیقه", + "minutes": "{0} دقیقه", + "hour": "یک ساعت", + "hours": "{0} ساعت", + "day": "یک روز", + "days": "{0} روز", + "month": "یک ماه", + "months": "{0} ماه", + "year": "یک سال", + "years": "{0} سال", + } + + meridians = { + "am": "قبل از ظهر", + "pm": "بعد از ظهر", + "AM": "قبل از ظهر", + "PM": "بعد از ظهر", + } + + month_names = [ + "", + "January", + "February", + "March", + "April", + "May", + "June", + "July", + "August", + "September", + "October", + "November", + "December", + ] + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mar", + "Apr", + "May", + "Jun", + "Jul", + "Aug", + "Sep", + "Oct", + "Nov", + "Dec", + ] + + day_names = [ + "", + "دو شنبه", + "سه شنبه", + "چهارشنبه", + "پنجشنبه", + "جمعه", + "شنبه", + "یکشنبه", + ] + day_abbreviations = ["", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] + + +class HebrewLocale(Locale): + + names = ["he", "he_IL"] + + past = "לפני {0}" + future = "בעוד {0}" + and_word = "ו" + + timeframes = { + "now": "הרגע", + "second": "שנייה", + "seconds": "{0} שניות", + "minute": "דקה", + "minutes": "{0} דקות", + "hour": "שעה", + "hours": "{0} שעות", + "2-hours": "שעתיים", + "day": "יום", + "days": "{0} ימים", + "2-days": "יומיים", + "week": "שבוע", + "weeks": "{0} שבועות", + "2-weeks": "שבועיים", + "month": "חודש", + "months": "{0} חודשים", + "2-months": "חודשיים", + "year": "שנה", + "years": "{0} שנים", + "2-years": "שנתיים", + } + + meridians = { + "am": 'לפנ"צ', + "pm": 'אחר"צ', + "AM": "לפני הצהריים", + "PM": "אחרי הצהריים", + } + + month_names = [ + "", + "ינואר", + "פברואר", + "מרץ", + "אפריל", + "מאי", + "יוני", + "יולי", + "אוגוסט", + "ספטמבר", + "אוקטובר", + "נובמבר", + "דצמבר", + ] + month_abbreviations = [ + "", + "ינו׳", + "פבר׳", + "מרץ", + "אפר׳", + "מאי", + "יוני", + "יולי", + "אוג׳", + "ספט׳", + "אוק׳", + "נוב׳", + "דצמ׳", + ] + + day_names = ["", "שני", "שלישי", "רביעי", "חמישי", "שישי", "שבת", "ראשון"] + day_abbreviations = ["", "ב׳", "ג׳", "ד׳", "ה׳", "ו׳", "ש׳", "א׳"] + + def _format_timeframe(self, timeframe, delta): + """Hebrew couple of aware""" + couple = "2-{}".format(timeframe) + single = timeframe.rstrip("s") + if abs(delta) == 2 and couple in self.timeframes: + key = couple + elif abs(delta) == 1 and single in self.timeframes: + key = single + else: + key = timeframe + + return self.timeframes[key].format(trunc(abs(delta))) + + def describe_multi(self, timeframes, only_distance=False): + """Describes a delta within multiple timeframes in plain language. + In Hebrew, the and word behaves a bit differently. + + :param timeframes: a list of string, quantity pairs each representing a timeframe and delta. + :param only_distance: return only distance eg: "2 hours and 11 seconds" without "in" or "ago" keywords + """ + + humanized = "" + for index, (timeframe, delta) in enumerate(timeframes): + last_humanized = self._format_timeframe(timeframe, delta) + if index == 0: + humanized = last_humanized + elif index == len(timeframes) - 1: # Must have at least 2 items + humanized += " " + self.and_word + if last_humanized[0].isdecimal(): + humanized += "־" + humanized += last_humanized + else: # Don't add for the last one + humanized += ", " + last_humanized + + if not only_distance: + humanized = self._format_relative(humanized, timeframe, delta) + + return humanized + + +class MarathiLocale(Locale): + + names = ["mr"] + + past = "{0} आधी" + future = "{0} नंतर" + + timeframes = { + "now": "सद्य", + "second": "एक सेकंद", + "seconds": "{0} सेकंद", + "minute": "एक मिनिट ", + "minutes": "{0} मिनिट ", + "hour": "एक तास", + "hours": "{0} तास", + "day": "एक दिवस", + "days": "{0} दिवस", + "month": "एक महिना ", + "months": "{0} महिने ", + "year": "एक वर्ष ", + "years": "{0} वर्ष ", + } + + meridians = {"am": "सकाळ", "pm": "संध्याकाळ", "AM": "सकाळ", "PM": "संध्याकाळ"} + + month_names = [ + "", + "जानेवारी", + "फेब्रुवारी", + "मार्च", + "एप्रिल", + "मे", + "जून", + "जुलै", + "अॉगस्ट", + "सप्टेंबर", + "अॉक्टोबर", + "नोव्हेंबर", + "डिसेंबर", + ] + month_abbreviations = [ + "", + "जान", + "फेब्रु", + "मार्च", + "एप्रि", + "मे", + "जून", + "जुलै", + "अॉग", + "सप्टें", + "अॉक्टो", + "नोव्हें", + "डिसें", + ] + + day_names = [ + "", + "सोमवार", + "मंगळवार", + "बुधवार", + "गुरुवार", + "शुक्रवार", + "शनिवार", + "रविवार", + ] + day_abbreviations = ["", "सोम", "मंगळ", "बुध", "गुरु", "शुक्र", "शनि", "रवि"] + + +def _map_locales(): + + locales = {} + + for _, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass): + if issubclass(cls, Locale): # pragma: no branch + for name in cls.names: + locales[name.lower()] = cls + + return locales + + +class CatalanLocale(Locale): + names = ["ca", "ca_es", "ca_ad", "ca_fr", "ca_it"] + past = "Fa {0}" + future = "En {0}" + and_word = "i" + + timeframes = { + "now": "Ara mateix", + "second": "un segon", + "seconds": "{0} segons", + "minute": "1 minut", + "minutes": "{0} minuts", + "hour": "una hora", + "hours": "{0} hores", + "day": "un dia", + "days": "{0} dies", + "month": "un mes", + "months": "{0} mesos", + "year": "un any", + "years": "{0} anys", + } + + month_names = [ + "", + "gener", + "febrer", + "març", + "abril", + "maig", + "juny", + "juliol", + "agost", + "setembre", + "octubre", + "novembre", + "desembre", + ] + month_abbreviations = [ + "", + "gen.", + "febr.", + "març", + "abr.", + "maig", + "juny", + "jul.", + "ag.", + "set.", + "oct.", + "nov.", + "des.", + ] + day_names = [ + "", + "dilluns", + "dimarts", + "dimecres", + "dijous", + "divendres", + "dissabte", + "diumenge", + ] + day_abbreviations = [ + "", + "dl.", + "dt.", + "dc.", + "dj.", + "dv.", + "ds.", + "dg.", + ] + + +class BasqueLocale(Locale): + names = ["eu", "eu_eu"] + past = "duela {0}" + future = "{0}" # I don't know what's the right phrase in Basque for the future. + + timeframes = { + "now": "Orain", + "second": "segundo bat", + "seconds": "{0} segundu", + "minute": "minutu bat", + "minutes": "{0} minutu", + "hour": "ordu bat", + "hours": "{0} ordu", + "day": "egun bat", + "days": "{0} egun", + "month": "hilabete bat", + "months": "{0} hilabet", + "year": "urte bat", + "years": "{0} urte", + } + + month_names = [ + "", + "urtarrilak", + "otsailak", + "martxoak", + "apirilak", + "maiatzak", + "ekainak", + "uztailak", + "abuztuak", + "irailak", + "urriak", + "azaroak", + "abenduak", + ] + month_abbreviations = [ + "", + "urt", + "ots", + "mar", + "api", + "mai", + "eka", + "uzt", + "abu", + "ira", + "urr", + "aza", + "abe", + ] + day_names = [ + "", + "astelehena", + "asteartea", + "asteazkena", + "osteguna", + "ostirala", + "larunbata", + "igandea", + ] + day_abbreviations = ["", "al", "ar", "az", "og", "ol", "lr", "ig"] + + +class HungarianLocale(Locale): + + names = ["hu", "hu_hu"] + + past = "{0} ezelőtt" + future = "{0} múlva" + + timeframes = { + "now": "éppen most", + "second": {"past": "egy második", "future": "egy második"}, + "seconds": {"past": "{0} másodpercekkel", "future": "{0} pár másodperc"}, + "minute": {"past": "egy perccel", "future": "egy perc"}, + "minutes": {"past": "{0} perccel", "future": "{0} perc"}, + "hour": {"past": "egy órával", "future": "egy óra"}, + "hours": {"past": "{0} órával", "future": "{0} óra"}, + "day": {"past": "egy nappal", "future": "egy nap"}, + "days": {"past": "{0} nappal", "future": "{0} nap"}, + "month": {"past": "egy hónappal", "future": "egy hónap"}, + "months": {"past": "{0} hónappal", "future": "{0} hónap"}, + "year": {"past": "egy évvel", "future": "egy év"}, + "years": {"past": "{0} évvel", "future": "{0} év"}, + } + + month_names = [ + "", + "január", + "február", + "március", + "április", + "május", + "június", + "július", + "augusztus", + "szeptember", + "október", + "november", + "december", + ] + month_abbreviations = [ + "", + "jan", + "febr", + "márc", + "ápr", + "máj", + "jún", + "júl", + "aug", + "szept", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "hétfő", + "kedd", + "szerda", + "csütörtök", + "péntek", + "szombat", + "vasárnap", + ] + day_abbreviations = ["", "hét", "kedd", "szer", "csüt", "pént", "szom", "vas"] + + meridians = {"am": "de", "pm": "du", "AM": "DE", "PM": "DU"} + + def _format_timeframe(self, timeframe, delta): + form = self.timeframes[timeframe] + + if isinstance(form, dict): + if delta > 0: + form = form["future"] + else: + form = form["past"] + + return form.format(abs(delta)) + + +class EsperantoLocale(Locale): + names = ["eo", "eo_xx"] + past = "antaŭ {0}" + future = "post {0}" + + timeframes = { + "now": "nun", + "second": "sekundo", + "seconds": "{0} kelkaj sekundoj", + "minute": "unu minuto", + "minutes": "{0} minutoj", + "hour": "un horo", + "hours": "{0} horoj", + "day": "unu tago", + "days": "{0} tagoj", + "month": "unu monato", + "months": "{0} monatoj", + "year": "unu jaro", + "years": "{0} jaroj", + } + + month_names = [ + "", + "januaro", + "februaro", + "marto", + "aprilo", + "majo", + "junio", + "julio", + "aŭgusto", + "septembro", + "oktobro", + "novembro", + "decembro", + ] + month_abbreviations = [ + "", + "jan", + "feb", + "mar", + "apr", + "maj", + "jun", + "jul", + "aŭg", + "sep", + "okt", + "nov", + "dec", + ] + + day_names = [ + "", + "lundo", + "mardo", + "merkredo", + "ĵaŭdo", + "vendredo", + "sabato", + "dimanĉo", + ] + day_abbreviations = ["", "lun", "mar", "mer", "ĵaŭ", "ven", "sab", "dim"] + + meridians = {"am": "atm", "pm": "ptm", "AM": "ATM", "PM": "PTM"} + + ordinal_day_re = r"((?P[1-3]?[0-9](?=a))a)" + + def _ordinal_number(self, n): + return "{}a".format(n) + + +class ThaiLocale(Locale): + + names = ["th", "th_th"] + + past = "{0}{1}ที่ผ่านมา" + future = "ในอีก{1}{0}" + + timeframes = { + "now": "ขณะนี้", + "second": "วินาที", + "seconds": "{0} ไม่กี่วินาที", + "minute": "1 นาที", + "minutes": "{0} นาที", + "hour": "1 ชั่วโมง", + "hours": "{0} ชั่วโมง", + "day": "1 วัน", + "days": "{0} วัน", + "month": "1 เดือน", + "months": "{0} เดือน", + "year": "1 ปี", + "years": "{0} ปี", + } + + month_names = [ + "", + "มกราคม", + "กุมภาพันธ์", + "มีนาคม", + "เมษายน", + "พฤษภาคม", + "มิถุนายน", + "กรกฎาคม", + "สิงหาคม", + "กันยายน", + "ตุลาคม", + "พฤศจิกายน", + "ธันวาคม", + ] + month_abbreviations = [ + "", + "ม.ค.", + "ก.พ.", + "มี.ค.", + "เม.ย.", + "พ.ค.", + "มิ.ย.", + "ก.ค.", + "ส.ค.", + "ก.ย.", + "ต.ค.", + "พ.ย.", + "ธ.ค.", + ] + + day_names = ["", "จันทร์", "อังคาร", "พุธ", "พฤหัสบดี", "ศุกร์", "เสาร์", "อาทิตย์"] + day_abbreviations = ["", "จ", "อ", "พ", "พฤ", "ศ", "ส", "อา"] + + meridians = {"am": "am", "pm": "pm", "AM": "AM", "PM": "PM"} + + BE_OFFSET = 543 + + def year_full(self, year): + """Thai always use Buddhist Era (BE) which is CE + 543""" + year += self.BE_OFFSET + return "{:04d}".format(year) + + def year_abbreviation(self, year): + """Thai always use Buddhist Era (BE) which is CE + 543""" + year += self.BE_OFFSET + return "{:04d}".format(year)[2:] + + def _format_relative(self, humanized, timeframe, delta): + """Thai normally doesn't have any space between words""" + if timeframe == "now": + return humanized + space = "" if timeframe == "seconds" else " " + direction = self.past if delta < 0 else self.future + + return direction.format(humanized, space) + + +class BengaliLocale(Locale): + + names = ["bn", "bn_bd", "bn_in"] + + past = "{0} আগে" + future = "{0} পরে" + + timeframes = { + "now": "এখন", + "second": "একটি দ্বিতীয়", + "seconds": "{0} সেকেন্ড", + "minute": "এক মিনিট", + "minutes": "{0} মিনিট", + "hour": "এক ঘণ্টা", + "hours": "{0} ঘণ্টা", + "day": "এক দিন", + "days": "{0} দিন", + "month": "এক মাস", + "months": "{0} মাস ", + "year": "এক বছর", + "years": "{0} বছর", + } + + meridians = {"am": "সকাল", "pm": "বিকাল", "AM": "সকাল", "PM": "বিকাল"} + + month_names = [ + "", + "জানুয়ারি", + "ফেব্রুয়ারি", + "মার্চ", + "এপ্রিল", + "মে", + "জুন", + "জুলাই", + "আগস্ট", + "সেপ্টেম্বর", + "অক্টোবর", + "নভেম্বর", + "ডিসেম্বর", + ] + month_abbreviations = [ + "", + "জানু", + "ফেব", + "মার্চ", + "এপ্রি", + "মে", + "জুন", + "জুল", + "অগা", + "সেপ্ট", + "অক্টো", + "নভে", + "ডিসে", + ] + + day_names = [ + "", + "সোমবার", + "মঙ্গলবার", + "বুধবার", + "বৃহস্পতিবার", + "শুক্রবার", + "শনিবার", + "রবিবার", + ] + day_abbreviations = ["", "সোম", "মঙ্গল", "বুধ", "বৃহঃ", "শুক্র", "শনি", "রবি"] + + def _ordinal_number(self, n): + if n > 10 or n == 0: + return "{}তম".format(n) + if n in [1, 5, 7, 8, 9, 10]: + return "{}ম".format(n) + if n in [2, 3]: + return "{}য়".format(n) + if n == 4: + return "{}র্থ".format(n) + if n == 6: + return "{}ষ্ঠ".format(n) + + +class RomanshLocale(Locale): + + names = ["rm", "rm_ch"] + + past = "avant {0}" + future = "en {0}" + + timeframes = { + "now": "en quest mument", + "second": "in secunda", + "seconds": "{0} secundas", + "minute": "ina minuta", + "minutes": "{0} minutas", + "hour": "in'ura", + "hours": "{0} ura", + "day": "in di", + "days": "{0} dis", + "month": "in mais", + "months": "{0} mais", + "year": "in onn", + "years": "{0} onns", + } + + month_names = [ + "", + "schaner", + "favrer", + "mars", + "avrigl", + "matg", + "zercladur", + "fanadur", + "avust", + "settember", + "october", + "november", + "december", + ] + + month_abbreviations = [ + "", + "schan", + "fav", + "mars", + "avr", + "matg", + "zer", + "fan", + "avu", + "set", + "oct", + "nov", + "dec", + ] + + day_names = [ + "", + "glindesdi", + "mardi", + "mesemna", + "gievgia", + "venderdi", + "sonda", + "dumengia", + ] + + day_abbreviations = ["", "gli", "ma", "me", "gie", "ve", "so", "du"] + + +class RomanianLocale(Locale): + names = ["ro", "ro_ro"] + + past = "{0} în urmă" + future = "peste {0}" + and_word = "și" + + timeframes = { + "now": "acum", + "second": "o secunda", + "seconds": "{0} câteva secunde", + "minute": "un minut", + "minutes": "{0} minute", + "hour": "o oră", + "hours": "{0} ore", + "day": "o zi", + "days": "{0} zile", + "month": "o lună", + "months": "{0} luni", + "year": "un an", + "years": "{0} ani", + } + + month_names = [ + "", + "ianuarie", + "februarie", + "martie", + "aprilie", + "mai", + "iunie", + "iulie", + "august", + "septembrie", + "octombrie", + "noiembrie", + "decembrie", + ] + month_abbreviations = [ + "", + "ian", + "febr", + "mart", + "apr", + "mai", + "iun", + "iul", + "aug", + "sept", + "oct", + "nov", + "dec", + ] + + day_names = [ + "", + "luni", + "marți", + "miercuri", + "joi", + "vineri", + "sâmbătă", + "duminică", + ] + day_abbreviations = ["", "Lun", "Mar", "Mie", "Joi", "Vin", "Sâm", "Dum"] + + +class SlovenianLocale(Locale): + names = ["sl", "sl_si"] + + past = "pred {0}" + future = "čez {0}" + and_word = "in" + + timeframes = { + "now": "zdaj", + "second": "sekundo", + "seconds": "{0} sekund", + "minute": "minuta", + "minutes": "{0} minutami", + "hour": "uro", + "hours": "{0} ur", + "day": "dan", + "days": "{0} dni", + "month": "mesec", + "months": "{0} mesecev", + "year": "leto", + "years": "{0} let", + } + + meridians = {"am": "", "pm": "", "AM": "", "PM": ""} + + month_names = [ + "", + "Januar", + "Februar", + "Marec", + "April", + "Maj", + "Junij", + "Julij", + "Avgust", + "September", + "Oktober", + "November", + "December", + ] + + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mar", + "Apr", + "Maj", + "Jun", + "Jul", + "Avg", + "Sep", + "Okt", + "Nov", + "Dec", + ] + + day_names = [ + "", + "Ponedeljek", + "Torek", + "Sreda", + "Četrtek", + "Petek", + "Sobota", + "Nedelja", + ] + + day_abbreviations = ["", "Pon", "Tor", "Sre", "Čet", "Pet", "Sob", "Ned"] + + +class IndonesianLocale(Locale): + + names = ["id", "id_id"] + + past = "{0} yang lalu" + future = "dalam {0}" + and_word = "dan" + + timeframes = { + "now": "baru saja", + "second": "1 sebentar", + "seconds": "{0} detik", + "minute": "1 menit", + "minutes": "{0} menit", + "hour": "1 jam", + "hours": "{0} jam", + "day": "1 hari", + "days": "{0} hari", + "month": "1 bulan", + "months": "{0} bulan", + "year": "1 tahun", + "years": "{0} tahun", + } + + meridians = {"am": "", "pm": "", "AM": "", "PM": ""} + + month_names = [ + "", + "Januari", + "Februari", + "Maret", + "April", + "Mei", + "Juni", + "Juli", + "Agustus", + "September", + "Oktober", + "November", + "Desember", + ] + + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mar", + "Apr", + "Mei", + "Jun", + "Jul", + "Ags", + "Sept", + "Okt", + "Nov", + "Des", + ] + + day_names = ["", "Senin", "Selasa", "Rabu", "Kamis", "Jumat", "Sabtu", "Minggu"] + + day_abbreviations = [ + "", + "Senin", + "Selasa", + "Rabu", + "Kamis", + "Jumat", + "Sabtu", + "Minggu", + ] + + +class NepaliLocale(Locale): + names = ["ne", "ne_np"] + + past = "{0} पहिले" + future = "{0} पछी" + + timeframes = { + "now": "अहिले", + "second": "एक सेकेन्ड", + "seconds": "{0} सेकण्ड", + "minute": "मिनेट", + "minutes": "{0} मिनेट", + "hour": "एक घण्टा", + "hours": "{0} घण्टा", + "day": "एक दिन", + "days": "{0} दिन", + "month": "एक महिना", + "months": "{0} महिना", + "year": "एक बर्ष", + "years": "बर्ष", + } + + meridians = {"am": "पूर्वाह्न", "pm": "अपरान्ह", "AM": "पूर्वाह्न", "PM": "अपरान्ह"} + + month_names = [ + "", + "जनवरी", + "फेब्रुअरी", + "मार्च", + "एप्रील", + "मे", + "जुन", + "जुलाई", + "अगष्ट", + "सेप्टेम्बर", + "अक्टोबर", + "नोवेम्बर", + "डिसेम्बर", + ] + month_abbreviations = [ + "", + "जन", + "फेब", + "मार्च", + "एप्रील", + "मे", + "जुन", + "जुलाई", + "अग", + "सेप", + "अक्ट", + "नोव", + "डिस", + ] + + day_names = [ + "", + "सोमवार", + "मंगलवार", + "बुधवार", + "बिहिवार", + "शुक्रवार", + "शनिवार", + "आइतवार", + ] + + day_abbreviations = ["", "सोम", "मंगल", "बुध", "बिहि", "शुक्र", "शनि", "आइत"] + + +class EstonianLocale(Locale): + names = ["ee", "et"] + + past = "{0} tagasi" + future = "{0} pärast" + and_word = "ja" + + timeframes = { + "now": {"past": "just nüüd", "future": "just nüüd"}, + "second": {"past": "üks sekund", "future": "ühe sekundi"}, + "seconds": {"past": "{0} sekundit", "future": "{0} sekundi"}, + "minute": {"past": "üks minut", "future": "ühe minuti"}, + "minutes": {"past": "{0} minutit", "future": "{0} minuti"}, + "hour": {"past": "tund aega", "future": "tunni aja"}, + "hours": {"past": "{0} tundi", "future": "{0} tunni"}, + "day": {"past": "üks päev", "future": "ühe päeva"}, + "days": {"past": "{0} päeva", "future": "{0} päeva"}, + "month": {"past": "üks kuu", "future": "ühe kuu"}, + "months": {"past": "{0} kuud", "future": "{0} kuu"}, + "year": {"past": "üks aasta", "future": "ühe aasta"}, + "years": {"past": "{0} aastat", "future": "{0} aasta"}, + } + + month_names = [ + "", + "Jaanuar", + "Veebruar", + "Märts", + "Aprill", + "Mai", + "Juuni", + "Juuli", + "August", + "September", + "Oktoober", + "November", + "Detsember", + ] + month_abbreviations = [ + "", + "Jan", + "Veb", + "Mär", + "Apr", + "Mai", + "Jun", + "Jul", + "Aug", + "Sep", + "Okt", + "Nov", + "Dets", + ] + + day_names = [ + "", + "Esmaspäev", + "Teisipäev", + "Kolmapäev", + "Neljapäev", + "Reede", + "Laupäev", + "Pühapäev", + ] + day_abbreviations = ["", "Esm", "Teis", "Kolm", "Nelj", "Re", "Lau", "Püh"] + + def _format_timeframe(self, timeframe, delta): + form = self.timeframes[timeframe] + if delta > 0: + form = form["future"] + else: + form = form["past"] + return form.format(abs(delta)) + + +class SwahiliLocale(Locale): + + names = [ + "sw", + "sw_ke", + "sw_tz", + ] + + past = "{0} iliyopita" + future = "muda wa {0}" + and_word = "na" + + timeframes = { + "now": "sasa hivi", + "second": "sekunde", + "seconds": "sekunde {0}", + "minute": "dakika moja", + "minutes": "dakika {0}", + "hour": "saa moja", + "hours": "saa {0}", + "day": "siku moja", + "days": "siku {0}", + "week": "wiki moja", + "weeks": "wiki {0}", + "month": "mwezi moja", + "months": "miezi {0}", + "year": "mwaka moja", + "years": "miaka {0}", + } + + meridians = {"am": "asu", "pm": "mch", "AM": "ASU", "PM": "MCH"} + + month_names = [ + "", + "Januari", + "Februari", + "Machi", + "Aprili", + "Mei", + "Juni", + "Julai", + "Agosti", + "Septemba", + "Oktoba", + "Novemba", + "Desemba", + ] + month_abbreviations = [ + "", + "Jan", + "Feb", + "Mac", + "Apr", + "Mei", + "Jun", + "Jul", + "Ago", + "Sep", + "Okt", + "Nov", + "Des", + ] + + day_names = [ + "", + "Jumatatu", + "Jumanne", + "Jumatano", + "Alhamisi", + "Ijumaa", + "Jumamosi", + "Jumapili", + ] + day_abbreviations = [ + "", + "Jumatatu", + "Jumanne", + "Jumatano", + "Alhamisi", + "Ijumaa", + "Jumamosi", + "Jumapili", + ] + + +_locales = _map_locales() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/parser.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/parser.py new file mode 100644 index 0000000000..243fd1721c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/parser.py @@ -0,0 +1,596 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, unicode_literals + +import re +from datetime import datetime, timedelta + +from dateutil import tz + +from arrow import locales +from arrow.util import iso_to_gregorian, next_weekday, normalize_timestamp + +try: + from functools import lru_cache +except ImportError: # pragma: no cover + from backports.functools_lru_cache import lru_cache # pragma: no cover + + +class ParserError(ValueError): + pass + + +# Allows for ParserErrors to be propagated from _build_datetime() +# when day_of_year errors occur. +# Before this, the ParserErrors were caught by the try/except in +# _parse_multiformat() and the appropriate error message was not +# transmitted to the user. +class ParserMatchError(ParserError): + pass + + +class DateTimeParser(object): + + _FORMAT_RE = re.compile( + r"(YYY?Y?|MM?M?M?|Do|DD?D?D?|d?d?d?d|HH?|hh?|mm?|ss?|S+|ZZ?Z?|a|A|x|X|W)" + ) + _ESCAPE_RE = re.compile(r"\[[^\[\]]*\]") + + _ONE_OR_TWO_DIGIT_RE = re.compile(r"\d{1,2}") + _ONE_OR_TWO_OR_THREE_DIGIT_RE = re.compile(r"\d{1,3}") + _ONE_OR_MORE_DIGIT_RE = re.compile(r"\d+") + _TWO_DIGIT_RE = re.compile(r"\d{2}") + _THREE_DIGIT_RE = re.compile(r"\d{3}") + _FOUR_DIGIT_RE = re.compile(r"\d{4}") + _TZ_Z_RE = re.compile(r"([\+\-])(\d{2})(?:(\d{2}))?|Z") + _TZ_ZZ_RE = re.compile(r"([\+\-])(\d{2})(?:\:(\d{2}))?|Z") + _TZ_NAME_RE = re.compile(r"\w[\w+\-/]+") + # NOTE: timestamps cannot be parsed from natural language strings (by removing the ^...$) because it will + # break cases like "15 Jul 2000" and a format list (see issue #447) + _TIMESTAMP_RE = re.compile(r"^\-?\d+\.?\d+$") + _TIMESTAMP_EXPANDED_RE = re.compile(r"^\-?\d+$") + _TIME_RE = re.compile(r"^(\d{2})(?:\:?(\d{2}))?(?:\:?(\d{2}))?(?:([\.\,])(\d+))?$") + _WEEK_DATE_RE = re.compile(r"(?P\d{4})[\-]?W(?P\d{2})[\-]?(?P\d)?") + + _BASE_INPUT_RE_MAP = { + "YYYY": _FOUR_DIGIT_RE, + "YY": _TWO_DIGIT_RE, + "MM": _TWO_DIGIT_RE, + "M": _ONE_OR_TWO_DIGIT_RE, + "DDDD": _THREE_DIGIT_RE, + "DDD": _ONE_OR_TWO_OR_THREE_DIGIT_RE, + "DD": _TWO_DIGIT_RE, + "D": _ONE_OR_TWO_DIGIT_RE, + "HH": _TWO_DIGIT_RE, + "H": _ONE_OR_TWO_DIGIT_RE, + "hh": _TWO_DIGIT_RE, + "h": _ONE_OR_TWO_DIGIT_RE, + "mm": _TWO_DIGIT_RE, + "m": _ONE_OR_TWO_DIGIT_RE, + "ss": _TWO_DIGIT_RE, + "s": _ONE_OR_TWO_DIGIT_RE, + "X": _TIMESTAMP_RE, + "x": _TIMESTAMP_EXPANDED_RE, + "ZZZ": _TZ_NAME_RE, + "ZZ": _TZ_ZZ_RE, + "Z": _TZ_Z_RE, + "S": _ONE_OR_MORE_DIGIT_RE, + "W": _WEEK_DATE_RE, + } + + SEPARATORS = ["-", "/", "."] + + def __init__(self, locale="en_us", cache_size=0): + + self.locale = locales.get_locale(locale) + self._input_re_map = self._BASE_INPUT_RE_MAP.copy() + self._input_re_map.update( + { + "MMMM": self._generate_choice_re( + self.locale.month_names[1:], re.IGNORECASE + ), + "MMM": self._generate_choice_re( + self.locale.month_abbreviations[1:], re.IGNORECASE + ), + "Do": re.compile(self.locale.ordinal_day_re), + "dddd": self._generate_choice_re( + self.locale.day_names[1:], re.IGNORECASE + ), + "ddd": self._generate_choice_re( + self.locale.day_abbreviations[1:], re.IGNORECASE + ), + "d": re.compile(r"[1-7]"), + "a": self._generate_choice_re( + (self.locale.meridians["am"], self.locale.meridians["pm"]) + ), + # note: 'A' token accepts both 'am/pm' and 'AM/PM' formats to + # ensure backwards compatibility of this token + "A": self._generate_choice_re(self.locale.meridians.values()), + } + ) + if cache_size > 0: + self._generate_pattern_re = lru_cache(maxsize=cache_size)( + self._generate_pattern_re + ) + + # TODO: since we support more than ISO 8601, we should rename this function + # IDEA: break into multiple functions + def parse_iso(self, datetime_string, normalize_whitespace=False): + + if normalize_whitespace: + datetime_string = re.sub(r"\s+", " ", datetime_string.strip()) + + has_space_divider = " " in datetime_string + has_t_divider = "T" in datetime_string + + num_spaces = datetime_string.count(" ") + if has_space_divider and num_spaces != 1 or has_t_divider and num_spaces > 0: + raise ParserError( + "Expected an ISO 8601-like string, but was given '{}'. Try passing in a format string to resolve this.".format( + datetime_string + ) + ) + + has_time = has_space_divider or has_t_divider + has_tz = False + + # date formats (ISO 8601 and others) to test against + # NOTE: YYYYMM is omitted to avoid confusion with YYMMDD (no longer part of ISO 8601, but is still often used) + formats = [ + "YYYY-MM-DD", + "YYYY-M-DD", + "YYYY-M-D", + "YYYY/MM/DD", + "YYYY/M/DD", + "YYYY/M/D", + "YYYY.MM.DD", + "YYYY.M.DD", + "YYYY.M.D", + "YYYYMMDD", + "YYYY-DDDD", + "YYYYDDDD", + "YYYY-MM", + "YYYY/MM", + "YYYY.MM", + "YYYY", + "W", + ] + + if has_time: + + if has_space_divider: + date_string, time_string = datetime_string.split(" ", 1) + else: + date_string, time_string = datetime_string.split("T", 1) + + time_parts = re.split(r"[\+\-Z]", time_string, 1, re.IGNORECASE) + + time_components = self._TIME_RE.match(time_parts[0]) + + if time_components is None: + raise ParserError( + "Invalid time component provided. Please specify a format or provide a valid time component in the basic or extended ISO 8601 time format." + ) + + ( + hours, + minutes, + seconds, + subseconds_sep, + subseconds, + ) = time_components.groups() + + has_tz = len(time_parts) == 2 + has_minutes = minutes is not None + has_seconds = seconds is not None + has_subseconds = subseconds is not None + + is_basic_time_format = ":" not in time_parts[0] + tz_format = "Z" + + # use 'ZZ' token instead since tz offset is present in non-basic format + if has_tz and ":" in time_parts[1]: + tz_format = "ZZ" + + time_sep = "" if is_basic_time_format else ":" + + if has_subseconds: + time_string = "HH{time_sep}mm{time_sep}ss{subseconds_sep}S".format( + time_sep=time_sep, subseconds_sep=subseconds_sep + ) + elif has_seconds: + time_string = "HH{time_sep}mm{time_sep}ss".format(time_sep=time_sep) + elif has_minutes: + time_string = "HH{time_sep}mm".format(time_sep=time_sep) + else: + time_string = "HH" + + if has_space_divider: + formats = ["{} {}".format(f, time_string) for f in formats] + else: + formats = ["{}T{}".format(f, time_string) for f in formats] + + if has_time and has_tz: + # Add "Z" or "ZZ" to the format strings to indicate to + # _parse_token() that a timezone needs to be parsed + formats = ["{}{}".format(f, tz_format) for f in formats] + + return self._parse_multiformat(datetime_string, formats) + + def parse(self, datetime_string, fmt, normalize_whitespace=False): + + if normalize_whitespace: + datetime_string = re.sub(r"\s+", " ", datetime_string) + + if isinstance(fmt, list): + return self._parse_multiformat(datetime_string, fmt) + + fmt_tokens, fmt_pattern_re = self._generate_pattern_re(fmt) + + match = fmt_pattern_re.search(datetime_string) + + if match is None: + raise ParserMatchError( + "Failed to match '{}' when parsing '{}'".format(fmt, datetime_string) + ) + + parts = {} + for token in fmt_tokens: + if token == "Do": + value = match.group("value") + elif token == "W": + value = (match.group("year"), match.group("week"), match.group("day")) + else: + value = match.group(token) + self._parse_token(token, value, parts) + + return self._build_datetime(parts) + + def _generate_pattern_re(self, fmt): + + # fmt is a string of tokens like 'YYYY-MM-DD' + # we construct a new string by replacing each + # token by its pattern: + # 'YYYY-MM-DD' -> '(?P\d{4})-(?P\d{2})-(?P
\d{2})' + tokens = [] + offset = 0 + + # Escape all special RegEx chars + escaped_fmt = re.escape(fmt) + + # Extract the bracketed expressions to be reinserted later. + escaped_fmt = re.sub(self._ESCAPE_RE, "#", escaped_fmt) + + # Any number of S is the same as one. + # TODO: allow users to specify the number of digits to parse + escaped_fmt = re.sub(r"S+", "S", escaped_fmt) + + escaped_data = re.findall(self._ESCAPE_RE, fmt) + + fmt_pattern = escaped_fmt + + for m in self._FORMAT_RE.finditer(escaped_fmt): + token = m.group(0) + try: + input_re = self._input_re_map[token] + except KeyError: + raise ParserError("Unrecognized token '{}'".format(token)) + input_pattern = "(?P<{}>{})".format(token, input_re.pattern) + tokens.append(token) + # a pattern doesn't have the same length as the token + # it replaces! We keep the difference in the offset variable. + # This works because the string is scanned left-to-right and matches + # are returned in the order found by finditer. + fmt_pattern = ( + fmt_pattern[: m.start() + offset] + + input_pattern + + fmt_pattern[m.end() + offset :] + ) + offset += len(input_pattern) - (m.end() - m.start()) + + final_fmt_pattern = "" + split_fmt = fmt_pattern.split(r"\#") + + # Due to the way Python splits, 'split_fmt' will always be longer + for i in range(len(split_fmt)): + final_fmt_pattern += split_fmt[i] + if i < len(escaped_data): + final_fmt_pattern += escaped_data[i][1:-1] + + # Wrap final_fmt_pattern in a custom word boundary to strictly + # match the formatting pattern and filter out date and time formats + # that include junk such as: blah1998-09-12 blah, blah 1998-09-12blah, + # blah1998-09-12blah. The custom word boundary matches every character + # that is not a whitespace character to allow for searching for a date + # and time string in a natural language sentence. Therefore, searching + # for a string of the form YYYY-MM-DD in "blah 1998-09-12 blah" will + # work properly. + # Certain punctuation before or after the target pattern such as + # "1998-09-12," is permitted. For the full list of valid punctuation, + # see the documentation. + + starting_word_boundary = ( + r"(?\s])" # This is the list of punctuation that is ok before the pattern (i.e. "It can't not be these characters before the pattern") + r"(\b|^)" # The \b is to block cases like 1201912 but allow 201912 for pattern YYYYMM. The ^ was necessary to allow a negative number through i.e. before epoch numbers + ) + ending_word_boundary = ( + r"(?=[\,\.\;\:\?\!\"\'\`\[\]\{\}\(\)\<\>]?" # Positive lookahead stating that these punctuation marks can appear after the pattern at most 1 time + r"(?!\S))" # Don't allow any non-whitespace character after the punctuation + ) + bounded_fmt_pattern = r"{}{}{}".format( + starting_word_boundary, final_fmt_pattern, ending_word_boundary + ) + + return tokens, re.compile(bounded_fmt_pattern, flags=re.IGNORECASE) + + def _parse_token(self, token, value, parts): + + if token == "YYYY": + parts["year"] = int(value) + + elif token == "YY": + value = int(value) + parts["year"] = 1900 + value if value > 68 else 2000 + value + + elif token in ["MMMM", "MMM"]: + parts["month"] = self.locale.month_number(value.lower()) + + elif token in ["MM", "M"]: + parts["month"] = int(value) + + elif token in ["DDDD", "DDD"]: + parts["day_of_year"] = int(value) + + elif token in ["DD", "D"]: + parts["day"] = int(value) + + elif token == "Do": + parts["day"] = int(value) + + elif token == "dddd": + # locale day names are 1-indexed + day_of_week = [x.lower() for x in self.locale.day_names].index( + value.lower() + ) + parts["day_of_week"] = day_of_week - 1 + + elif token == "ddd": + # locale day abbreviations are 1-indexed + day_of_week = [x.lower() for x in self.locale.day_abbreviations].index( + value.lower() + ) + parts["day_of_week"] = day_of_week - 1 + + elif token.upper() in ["HH", "H"]: + parts["hour"] = int(value) + + elif token in ["mm", "m"]: + parts["minute"] = int(value) + + elif token in ["ss", "s"]: + parts["second"] = int(value) + + elif token == "S": + # We have the *most significant* digits of an arbitrary-precision integer. + # We want the six most significant digits as an integer, rounded. + # IDEA: add nanosecond support somehow? Need datetime support for it first. + value = value.ljust(7, str("0")) + + # floating-point (IEEE-754) defaults to half-to-even rounding + seventh_digit = int(value[6]) + if seventh_digit == 5: + rounding = int(value[5]) % 2 + elif seventh_digit > 5: + rounding = 1 + else: + rounding = 0 + + parts["microsecond"] = int(value[:6]) + rounding + + elif token == "X": + parts["timestamp"] = float(value) + + elif token == "x": + parts["expanded_timestamp"] = int(value) + + elif token in ["ZZZ", "ZZ", "Z"]: + parts["tzinfo"] = TzinfoParser.parse(value) + + elif token in ["a", "A"]: + if value in (self.locale.meridians["am"], self.locale.meridians["AM"]): + parts["am_pm"] = "am" + elif value in (self.locale.meridians["pm"], self.locale.meridians["PM"]): + parts["am_pm"] = "pm" + + elif token == "W": + parts["weekdate"] = value + + @staticmethod + def _build_datetime(parts): + + weekdate = parts.get("weekdate") + + if weekdate is not None: + # we can use strptime (%G, %V, %u) in python 3.6 but these tokens aren't available before that + year, week = int(weekdate[0]), int(weekdate[1]) + + if weekdate[2] is not None: + day = int(weekdate[2]) + else: + # day not given, default to 1 + day = 1 + + dt = iso_to_gregorian(year, week, day) + parts["year"] = dt.year + parts["month"] = dt.month + parts["day"] = dt.day + + timestamp = parts.get("timestamp") + + if timestamp is not None: + return datetime.fromtimestamp(timestamp, tz=tz.tzutc()) + + expanded_timestamp = parts.get("expanded_timestamp") + + if expanded_timestamp is not None: + return datetime.fromtimestamp( + normalize_timestamp(expanded_timestamp), + tz=tz.tzutc(), + ) + + day_of_year = parts.get("day_of_year") + + if day_of_year is not None: + year = parts.get("year") + month = parts.get("month") + if year is None: + raise ParserError( + "Year component is required with the DDD and DDDD tokens." + ) + + if month is not None: + raise ParserError( + "Month component is not allowed with the DDD and DDDD tokens." + ) + + date_string = "{}-{}".format(year, day_of_year) + try: + dt = datetime.strptime(date_string, "%Y-%j") + except ValueError: + raise ParserError( + "The provided day of year '{}' is invalid.".format(day_of_year) + ) + + parts["year"] = dt.year + parts["month"] = dt.month + parts["day"] = dt.day + + day_of_week = parts.get("day_of_week") + day = parts.get("day") + + # If day is passed, ignore day of week + if day_of_week is not None and day is None: + year = parts.get("year", 1970) + month = parts.get("month", 1) + day = 1 + + # dddd => first day of week after epoch + # dddd YYYY => first day of week in specified year + # dddd MM YYYY => first day of week in specified year and month + # dddd MM => first day after epoch in specified month + next_weekday_dt = next_weekday(datetime(year, month, day), day_of_week) + parts["year"] = next_weekday_dt.year + parts["month"] = next_weekday_dt.month + parts["day"] = next_weekday_dt.day + + am_pm = parts.get("am_pm") + hour = parts.get("hour", 0) + + if am_pm == "pm" and hour < 12: + hour += 12 + elif am_pm == "am" and hour == 12: + hour = 0 + + # Support for midnight at the end of day + if hour == 24: + if parts.get("minute", 0) != 0: + raise ParserError("Midnight at the end of day must not contain minutes") + if parts.get("second", 0) != 0: + raise ParserError("Midnight at the end of day must not contain seconds") + if parts.get("microsecond", 0) != 0: + raise ParserError( + "Midnight at the end of day must not contain microseconds" + ) + hour = 0 + day_increment = 1 + else: + day_increment = 0 + + # account for rounding up to 1000000 + microsecond = parts.get("microsecond", 0) + if microsecond == 1000000: + microsecond = 0 + second_increment = 1 + else: + second_increment = 0 + + increment = timedelta(days=day_increment, seconds=second_increment) + + return ( + datetime( + year=parts.get("year", 1), + month=parts.get("month", 1), + day=parts.get("day", 1), + hour=hour, + minute=parts.get("minute", 0), + second=parts.get("second", 0), + microsecond=microsecond, + tzinfo=parts.get("tzinfo"), + ) + + increment + ) + + def _parse_multiformat(self, string, formats): + + _datetime = None + + for fmt in formats: + try: + _datetime = self.parse(string, fmt) + break + except ParserMatchError: + pass + + if _datetime is None: + raise ParserError( + "Could not match input '{}' to any of the following formats: {}".format( + string, ", ".join(formats) + ) + ) + + return _datetime + + # generates a capture group of choices separated by an OR operator + @staticmethod + def _generate_choice_re(choices, flags=0): + return re.compile(r"({})".format("|".join(choices)), flags=flags) + + +class TzinfoParser(object): + _TZINFO_RE = re.compile(r"^([\+\-])?(\d{2})(?:\:?(\d{2}))?$") + + @classmethod + def parse(cls, tzinfo_string): + + tzinfo = None + + if tzinfo_string == "local": + tzinfo = tz.tzlocal() + + elif tzinfo_string in ["utc", "UTC", "Z"]: + tzinfo = tz.tzutc() + + else: + + iso_match = cls._TZINFO_RE.match(tzinfo_string) + + if iso_match: + sign, hours, minutes = iso_match.groups() + if minutes is None: + minutes = 0 + seconds = int(hours) * 3600 + int(minutes) * 60 + + if sign == "-": + seconds *= -1 + + tzinfo = tz.tzoffset(None, seconds) + + else: + tzinfo = tz.gettz(tzinfo_string) + + if tzinfo is None: + raise ParserError( + 'Could not parse timezone expression "{}"'.format(tzinfo_string) + ) + + return tzinfo diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/util.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/util.py new file mode 100644 index 0000000000..acce8878df --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/util.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +import datetime +import numbers + +from dateutil.rrule import WEEKLY, rrule + +from arrow.constants import MAX_TIMESTAMP, MAX_TIMESTAMP_MS, MAX_TIMESTAMP_US + + +def next_weekday(start_date, weekday): + """Get next weekday from the specified start date. + + :param start_date: Datetime object representing the start date. + :param weekday: Next weekday to obtain. Can be a value between 0 (Monday) and 6 (Sunday). + :return: Datetime object corresponding to the next weekday after start_date. + + Usage:: + + # Get first Monday after epoch + >>> next_weekday(datetime(1970, 1, 1), 0) + 1970-01-05 00:00:00 + + # Get first Thursday after epoch + >>> next_weekday(datetime(1970, 1, 1), 3) + 1970-01-01 00:00:00 + + # Get first Sunday after epoch + >>> next_weekday(datetime(1970, 1, 1), 6) + 1970-01-04 00:00:00 + """ + if weekday < 0 or weekday > 6: + raise ValueError("Weekday must be between 0 (Monday) and 6 (Sunday).") + return rrule(freq=WEEKLY, dtstart=start_date, byweekday=weekday, count=1)[0] + + +def total_seconds(td): + """Get total seconds for timedelta.""" + return td.total_seconds() + + +def is_timestamp(value): + """Check if value is a valid timestamp.""" + if isinstance(value, bool): + return False + if not ( + isinstance(value, numbers.Integral) + or isinstance(value, float) + or isinstance(value, str) + ): + return False + try: + float(value) + return True + except ValueError: + return False + + +def normalize_timestamp(timestamp): + """Normalize millisecond and microsecond timestamps into normal timestamps.""" + if timestamp > MAX_TIMESTAMP: + if timestamp < MAX_TIMESTAMP_MS: + timestamp /= 1e3 + elif timestamp < MAX_TIMESTAMP_US: + timestamp /= 1e6 + else: + raise ValueError( + "The specified timestamp '{}' is too large.".format(timestamp) + ) + return timestamp + + +# Credit to https://stackoverflow.com/a/1700069 +def iso_to_gregorian(iso_year, iso_week, iso_day): + """Converts an ISO week date tuple into a datetime object.""" + + if not 1 <= iso_week <= 53: + raise ValueError("ISO Calendar week value must be between 1-53.") + + if not 1 <= iso_day <= 7: + raise ValueError("ISO Calendar day value must be between 1-7") + + # The first week of the year always contains 4 Jan. + fourth_jan = datetime.date(iso_year, 1, 4) + delta = datetime.timedelta(fourth_jan.isoweekday() - 1) + year_start = fourth_jan - delta + gregorian = year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1) + + return gregorian + + +def validate_bounds(bounds): + if bounds != "()" and bounds != "(]" and bounds != "[)" and bounds != "[]": + raise ValueError( + 'Invalid bounds. Please select between "()", "(]", "[)", or "[]".' + ) + + +# Python 2.7 / 3.0+ definitions for isstr function. + +try: # pragma: no cover + basestring + + def isstr(s): + return isinstance(s, basestring) # noqa: F821 + + +except NameError: # pragma: no cover + + def isstr(s): + return isinstance(s, str) + + +__all__ = ["next_weekday", "total_seconds", "is_timestamp", "isstr", "iso_to_gregorian"] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/Makefile b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/Makefile new file mode 100644 index 0000000000..d4bb2cbb9e --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/conf.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/conf.py new file mode 100644 index 0000000000..aaf3c50822 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/conf.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- + +# -- Path setup -------------------------------------------------------------- + +import io +import os +import sys + +sys.path.insert(0, os.path.abspath("..")) + +about = {} +with io.open("../arrow/_version.py", "r", encoding="utf-8") as f: + exec(f.read(), about) + +# -- Project information ----------------------------------------------------- + +project = u"Arrow 🏹" +copyright = "2020, Chris Smith" +author = "Chris Smith" + +release = about["__version__"] + +# -- General configuration --------------------------------------------------- + +extensions = ["sphinx.ext.autodoc"] + +templates_path = [] + +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + +master_doc = "index" +source_suffix = ".rst" +pygments_style = "sphinx" + +language = None + +# -- Options for HTML output ------------------------------------------------- + +html_theme = "alabaster" +html_theme_path = [] +html_static_path = [] + +html_show_sourcelink = False +html_show_sphinx = False +html_show_copyright = True + +# https://alabaster.readthedocs.io/en/latest/customization.html +html_theme_options = { + "description": "Arrow is a sensible and human-friendly approach to dates, times and timestamps.", + "github_user": "arrow-py", + "github_repo": "arrow", + "github_banner": True, + "show_related": False, + "show_powered_by": False, + "github_button": True, + "github_type": "star", + "github_count": "true", # must be a string +} + +html_sidebars = { + "**": ["about.html", "localtoc.html", "relations.html", "searchbox.html"] +} diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/index.rst new file mode 100644 index 0000000000..e2830b04f3 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/index.rst @@ -0,0 +1,566 @@ +Arrow: Better dates & times for Python +====================================== + +Release v\ |release| (`Installation`_) (`Changelog `_) + +.. include:: ../README.rst + :start-after: start-inclusion-marker-do-not-remove + :end-before: end-inclusion-marker-do-not-remove + +User's Guide +------------ + +Creation +~~~~~~~~ + +Get 'now' easily: + +.. code-block:: python + + >>> arrow.utcnow() + + + >>> arrow.now() + + + >>> arrow.now('US/Pacific') + + +Create from timestamps (:code:`int` or :code:`float`): + +.. code-block:: python + + >>> arrow.get(1367900664) + + + >>> arrow.get(1367900664.152325) + + +Use a naive or timezone-aware datetime, or flexibly specify a timezone: + +.. code-block:: python + + >>> arrow.get(datetime.utcnow()) + + + >>> arrow.get(datetime(2013, 5, 5), 'US/Pacific') + + + >>> from dateutil import tz + >>> arrow.get(datetime(2013, 5, 5), tz.gettz('US/Pacific')) + + + >>> arrow.get(datetime.now(tz.gettz('US/Pacific'))) + + +Parse from a string: + +.. code-block:: python + + >>> arrow.get('2013-05-05 12:30:45', 'YYYY-MM-DD HH:mm:ss') + + +Search a date in a string: + +.. code-block:: python + + >>> arrow.get('June was born in May 1980', 'MMMM YYYY') + + +Some ISO 8601 compliant strings are recognized and parsed without a format string: + + >>> arrow.get('2013-09-30T15:34:00.000-07:00') + + +Arrow objects can be instantiated directly too, with the same arguments as a datetime: + +.. code-block:: python + + >>> arrow.get(2013, 5, 5) + + + >>> arrow.Arrow(2013, 5, 5) + + +Properties +~~~~~~~~~~ + +Get a datetime or timestamp representation: + +.. code-block:: python + + >>> a = arrow.utcnow() + >>> a.datetime + datetime.datetime(2013, 5, 7, 4, 38, 15, 447644, tzinfo=tzutc()) + + >>> a.timestamp + 1367901495 + +Get a naive datetime, and tzinfo: + +.. code-block:: python + + >>> a.naive + datetime.datetime(2013, 5, 7, 4, 38, 15, 447644) + + >>> a.tzinfo + tzutc() + +Get any datetime value: + +.. code-block:: python + + >>> a.year + 2013 + +Call datetime functions that return properties: + +.. code-block:: python + + >>> a.date() + datetime.date(2013, 5, 7) + + >>> a.time() + datetime.time(4, 38, 15, 447644) + +Replace & Shift +~~~~~~~~~~~~~~~ + +Get a new :class:`Arrow ` object, with altered attributes, just as you would with a datetime: + +.. code-block:: python + + >>> arw = arrow.utcnow() + >>> arw + + + >>> arw.replace(hour=4, minute=40) + + +Or, get one with attributes shifted forward or backward: + +.. code-block:: python + + >>> arw.shift(weeks=+3) + + +Even replace the timezone without altering other attributes: + +.. code-block:: python + + >>> arw.replace(tzinfo='US/Pacific') + + +Move between the earlier and later moments of an ambiguous time: + +.. code-block:: python + + >>> paris_transition = arrow.Arrow(2019, 10, 27, 2, tzinfo="Europe/Paris", fold=0) + >>> paris_transition + + >>> paris_transition.ambiguous + True + >>> paris_transition.replace(fold=1) + + +Format +~~~~~~ + +.. code-block:: python + + >>> arrow.utcnow().format('YYYY-MM-DD HH:mm:ss ZZ') + '2013-05-07 05:23:16 -00:00' + +Convert +~~~~~~~ + +Convert from UTC to other timezones by name or tzinfo: + +.. code-block:: python + + >>> utc = arrow.utcnow() + >>> utc + + + >>> utc.to('US/Pacific') + + + >>> utc.to(tz.gettz('US/Pacific')) + + +Or using shorthand: + +.. code-block:: python + + >>> utc.to('local') + + + >>> utc.to('local').to('utc') + + + +Humanize +~~~~~~~~ + +Humanize relative to now: + +.. code-block:: python + + >>> past = arrow.utcnow().shift(hours=-1) + >>> past.humanize() + 'an hour ago' + +Or another Arrow, or datetime: + +.. code-block:: python + + >>> present = arrow.utcnow() + >>> future = present.shift(hours=2) + >>> future.humanize(present) + 'in 2 hours' + +Indicate time as relative or include only the distance + +.. code-block:: python + + >>> present = arrow.utcnow() + >>> future = present.shift(hours=2) + >>> future.humanize(present) + 'in 2 hours' + >>> future.humanize(present, only_distance=True) + '2 hours' + + +Indicate a specific time granularity (or multiple): + +.. code-block:: python + + >>> present = arrow.utcnow() + >>> future = present.shift(minutes=66) + >>> future.humanize(present, granularity="minute") + 'in 66 minutes' + >>> future.humanize(present, granularity=["hour", "minute"]) + 'in an hour and 6 minutes' + >>> present.humanize(future, granularity=["hour", "minute"]) + 'an hour and 6 minutes ago' + >>> future.humanize(present, only_distance=True, granularity=["hour", "minute"]) + 'an hour and 6 minutes' + +Support for a growing number of locales (see ``locales.py`` for supported languages): + +.. code-block:: python + + + >>> future = arrow.utcnow().shift(hours=1) + >>> future.humanize(a, locale='ru') + 'через 2 час(а,ов)' + + +Ranges & Spans +~~~~~~~~~~~~~~ + +Get the time span of any unit: + +.. code-block:: python + + >>> arrow.utcnow().span('hour') + (, ) + +Or just get the floor and ceiling: + +.. code-block:: python + + >>> arrow.utcnow().floor('hour') + + + >>> arrow.utcnow().ceil('hour') + + +You can also get a range of time spans: + +.. code-block:: python + + >>> start = datetime(2013, 5, 5, 12, 30) + >>> end = datetime(2013, 5, 5, 17, 15) + >>> for r in arrow.Arrow.span_range('hour', start, end): + ... print r + ... + (, ) + (, ) + (, ) + (, ) + (, ) + +Or just iterate over a range of time: + +.. code-block:: python + + >>> start = datetime(2013, 5, 5, 12, 30) + >>> end = datetime(2013, 5, 5, 17, 15) + >>> for r in arrow.Arrow.range('hour', start, end): + ... print repr(r) + ... + + + + + + +.. toctree:: + :maxdepth: 2 + +Factories +~~~~~~~~~ + +Use factories to harness Arrow's module API for a custom Arrow-derived type. First, derive your type: + +.. code-block:: python + + >>> class CustomArrow(arrow.Arrow): + ... + ... def days_till_xmas(self): + ... + ... xmas = arrow.Arrow(self.year, 12, 25) + ... + ... if self > xmas: + ... xmas = xmas.shift(years=1) + ... + ... return (xmas - self).days + + +Then get and use a factory for it: + +.. code-block:: python + + >>> factory = arrow.ArrowFactory(CustomArrow) + >>> custom = factory.utcnow() + >>> custom + >>> + + >>> custom.days_till_xmas() + >>> 211 + +Supported Tokens +~~~~~~~~~~~~~~~~ + +Use the following tokens for parsing and formatting. Note that they are **not** the same as the tokens for `strptime `_: + ++--------------------------------+--------------+-------------------------------------------+ +| |Token |Output | ++================================+==============+===========================================+ +|**Year** |YYYY |2000, 2001, 2002 ... 2012, 2013 | ++--------------------------------+--------------+-------------------------------------------+ +| |YY |00, 01, 02 ... 12, 13 | ++--------------------------------+--------------+-------------------------------------------+ +|**Month** |MMMM |January, February, March ... [#t1]_ | ++--------------------------------+--------------+-------------------------------------------+ +| |MMM |Jan, Feb, Mar ... [#t1]_ | ++--------------------------------+--------------+-------------------------------------------+ +| |MM |01, 02, 03 ... 11, 12 | ++--------------------------------+--------------+-------------------------------------------+ +| |M |1, 2, 3 ... 11, 12 | ++--------------------------------+--------------+-------------------------------------------+ +|**Day of Year** |DDDD |001, 002, 003 ... 364, 365 | ++--------------------------------+--------------+-------------------------------------------+ +| |DDD |1, 2, 3 ... 364, 365 | ++--------------------------------+--------------+-------------------------------------------+ +|**Day of Month** |DD |01, 02, 03 ... 30, 31 | ++--------------------------------+--------------+-------------------------------------------+ +| |D |1, 2, 3 ... 30, 31 | ++--------------------------------+--------------+-------------------------------------------+ +| |Do |1st, 2nd, 3rd ... 30th, 31st | ++--------------------------------+--------------+-------------------------------------------+ +|**Day of Week** |dddd |Monday, Tuesday, Wednesday ... [#t2]_ | ++--------------------------------+--------------+-------------------------------------------+ +| |ddd |Mon, Tue, Wed ... [#t2]_ | ++--------------------------------+--------------+-------------------------------------------+ +| |d |1, 2, 3 ... 6, 7 | ++--------------------------------+--------------+-------------------------------------------+ +|**ISO week date** |W |2011-W05-4, 2019-W17 | ++--------------------------------+--------------+-------------------------------------------+ +|**Hour** |HH |00, 01, 02 ... 23, 24 | ++--------------------------------+--------------+-------------------------------------------+ +| |H |0, 1, 2 ... 23, 24 | ++--------------------------------+--------------+-------------------------------------------+ +| |hh |01, 02, 03 ... 11, 12 | ++--------------------------------+--------------+-------------------------------------------+ +| |h |1, 2, 3 ... 11, 12 | ++--------------------------------+--------------+-------------------------------------------+ +|**AM / PM** |A |AM, PM, am, pm [#t1]_ | ++--------------------------------+--------------+-------------------------------------------+ +| |a |am, pm [#t1]_ | ++--------------------------------+--------------+-------------------------------------------+ +|**Minute** |mm |00, 01, 02 ... 58, 59 | ++--------------------------------+--------------+-------------------------------------------+ +| |m |0, 1, 2 ... 58, 59 | ++--------------------------------+--------------+-------------------------------------------+ +|**Second** |ss |00, 01, 02 ... 58, 59 | ++--------------------------------+--------------+-------------------------------------------+ +| |s |0, 1, 2 ... 58, 59 | ++--------------------------------+--------------+-------------------------------------------+ +|**Sub-second** |S... |0, 02, 003, 000006, 123123123123... [#t3]_ | ++--------------------------------+--------------+-------------------------------------------+ +|**Timezone** |ZZZ |Asia/Baku, Europe/Warsaw, GMT ... [#t4]_ | ++--------------------------------+--------------+-------------------------------------------+ +| |ZZ |-07:00, -06:00 ... +06:00, +07:00, +08, Z | ++--------------------------------+--------------+-------------------------------------------+ +| |Z |-0700, -0600 ... +0600, +0700, +08, Z | ++--------------------------------+--------------+-------------------------------------------+ +|**Seconds Timestamp** |X |1381685817, 1381685817.915482 ... [#t5]_ | ++--------------------------------+--------------+-------------------------------------------+ +|**ms or µs Timestamp** |x |1569980330813, 1569980330813221 | ++--------------------------------+--------------+-------------------------------------------+ + +.. rubric:: Footnotes + +.. [#t1] localization support for parsing and formatting +.. [#t2] localization support only for formatting +.. [#t3] the result is truncated to microseconds, with `half-to-even rounding `_. +.. [#t4] timezone names from `tz database `_ provided via dateutil package, note that abbreviations such as MST, PDT, BRST are unlikely to parse due to ambiguity. Use the full IANA zone name instead (Asia/Shanghai, Europe/London, America/Chicago etc). +.. [#t5] this token cannot be used for parsing timestamps out of natural language strings due to compatibility reasons + +Built-in Formats +++++++++++++++++ + +There are several formatting standards that are provided as built-in tokens. + +.. code-block:: python + + >>> arw = arrow.utcnow() + >>> arw.format(arrow.FORMAT_ATOM) + '2020-05-27 10:30:35+00:00' + >>> arw.format(arrow.FORMAT_COOKIE) + 'Wednesday, 27-May-2020 10:30:35 UTC' + >>> arw.format(arrow.FORMAT_RSS) + 'Wed, 27 May 2020 10:30:35 +0000' + >>> arw.format(arrow.FORMAT_RFC822) + 'Wed, 27 May 20 10:30:35 +0000' + >>> arw.format(arrow.FORMAT_RFC850) + 'Wednesday, 27-May-20 10:30:35 UTC' + >>> arw.format(arrow.FORMAT_RFC1036) + 'Wed, 27 May 20 10:30:35 +0000' + >>> arw.format(arrow.FORMAT_RFC1123) + 'Wed, 27 May 2020 10:30:35 +0000' + >>> arw.format(arrow.FORMAT_RFC2822) + 'Wed, 27 May 2020 10:30:35 +0000' + >>> arw.format(arrow.FORMAT_RFC3339) + '2020-05-27 10:30:35+00:00' + >>> arw.format(arrow.FORMAT_W3C) + '2020-05-27 10:30:35+00:00' + +Escaping Formats +~~~~~~~~~~~~~~~~ + +Tokens, phrases, and regular expressions in a format string can be escaped when parsing and formatting by enclosing them within square brackets. + +Tokens & Phrases +++++++++++++++++ + +Any `token `_ or phrase can be escaped as follows: + +.. code-block:: python + + >>> fmt = "YYYY-MM-DD h [h] m" + >>> arw = arrow.get("2018-03-09 8 h 40", fmt) + + >>> arw.format(fmt) + '2018-03-09 8 h 40' + + >>> fmt = "YYYY-MM-DD h [hello] m" + >>> arw = arrow.get("2018-03-09 8 hello 40", fmt) + + >>> arw.format(fmt) + '2018-03-09 8 hello 40' + + >>> fmt = "YYYY-MM-DD h [hello world] m" + >>> arw = arrow.get("2018-03-09 8 hello world 40", fmt) + + >>> arw.format(fmt) + '2018-03-09 8 hello world 40' + +This can be useful for parsing dates in different locales such as French, in which it is common to format time strings as "8 h 40" rather than "8:40". + +Regular Expressions ++++++++++++++++++++ + +You can also escape regular expressions by enclosing them within square brackets. In the following example, we are using the regular expression :code:`\s+` to match any number of whitespace characters that separate the tokens. This is useful if you do not know the number of spaces between tokens ahead of time (e.g. in log files). + +.. code-block:: python + + >>> fmt = r"ddd[\s+]MMM[\s+]DD[\s+]HH:mm:ss[\s+]YYYY" + >>> arrow.get("Mon Sep 08 16:41:45 2014", fmt) + + + >>> arrow.get("Mon \tSep 08 16:41:45 2014", fmt) + + + >>> arrow.get("Mon Sep 08 16:41:45 2014", fmt) + + +Punctuation +~~~~~~~~~~~ + +Date and time formats may be fenced on either side by one punctuation character from the following list: ``, . ; : ? ! " \` ' [ ] { } ( ) < >`` + +.. code-block:: python + + >>> arrow.get("Cool date: 2019-10-31T09:12:45.123456+04:30.", "YYYY-MM-DDTHH:mm:ss.SZZ") + + + >>> arrow.get("Tomorrow (2019-10-31) is Halloween!", "YYYY-MM-DD") + + + >>> arrow.get("Halloween is on 2019.10.31.", "YYYY.MM.DD") + + + >>> arrow.get("It's Halloween tomorrow (2019-10-31)!", "YYYY-MM-DD") + # Raises exception because there are multiple punctuation marks following the date + +Redundant Whitespace +~~~~~~~~~~~~~~~~~~~~ + +Redundant whitespace characters (spaces, tabs, and newlines) can be normalized automatically by passing in the ``normalize_whitespace`` flag to ``arrow.get``: + +.. code-block:: python + + >>> arrow.get('\t \n 2013-05-05T12:30:45.123456 \t \n', normalize_whitespace=True) + + + >>> arrow.get('2013-05-05 T \n 12:30:45\t123456', 'YYYY-MM-DD T HH:mm:ss S', normalize_whitespace=True) + + +API Guide +--------- + +arrow.arrow +~~~~~~~~~~~ + +.. automodule:: arrow.arrow + :members: + +arrow.factory +~~~~~~~~~~~~~ + +.. automodule:: arrow.factory + :members: + +arrow.api +~~~~~~~~~ + +.. automodule:: arrow.api + :members: + +arrow.locale +~~~~~~~~~~~~ + +.. automodule:: arrow.locales + :members: + :undoc-members: + +Release History +--------------- + +.. toctree:: + :maxdepth: 2 + + releases diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/make.bat b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/make.bat new file mode 100644 index 0000000000..922152e96a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/releases.rst b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/releases.rst new file mode 100644 index 0000000000..22e1e59c8c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/releases.rst @@ -0,0 +1,3 @@ +.. _releases: + +.. include:: ../CHANGELOG.rst diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/requirements.txt b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/requirements.txt new file mode 100644 index 0000000000..df565d8384 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/requirements.txt @@ -0,0 +1,14 @@ +backports.functools_lru_cache==1.6.1; python_version == "2.7" +dateparser==0.7.* +pre-commit==1.21.*; python_version <= "3.5" +pre-commit==2.6.*; python_version >= "3.6" +pytest==4.6.*; python_version == "2.7" +pytest==6.0.*; python_version >= "3.5" +pytest-cov==2.10.* +pytest-mock==2.0.*; python_version == "2.7" +pytest-mock==3.2.*; python_version >= "3.5" +python-dateutil==2.8.* +pytz==2019.* +simplejson==3.17.* +sphinx==1.8.*; python_version == "2.7" +sphinx==3.2.*; python_version >= "3.5" diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.cfg b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.cfg new file mode 100644 index 0000000000..2a9acf13da --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal = 1 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.py new file mode 100644 index 0000000000..dc4f0e77d5 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +import io + +from setuptools import setup + +with io.open("README.rst", "r", encoding="utf-8") as f: + readme = f.read() + +about = {} +with io.open("arrow/_version.py", "r", encoding="utf-8") as f: + exec(f.read(), about) + +setup( + name="arrow", + version=about["__version__"], + description="Better dates & times for Python", + long_description=readme, + long_description_content_type="text/x-rst", + url="https://arrow.readthedocs.io", + author="Chris Smith", + author_email="crsmithdev@gmail.com", + license="Apache 2.0", + packages=["arrow"], + zip_safe=False, + python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*", + install_requires=[ + "python-dateutil>=2.7.0", + "backports.functools_lru_cache>=1.2.1;python_version=='2.7'", + ], + classifiers=[ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Topic :: Software Development :: Libraries :: Python Modules", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + ], + keywords="arrow date time datetime timestamp timezone humanize", + project_urls={ + "Repository": "https://github.com/arrow-py/arrow", + "Bug Reports": "https://github.com/arrow-py/arrow/issues", + "Documentation": "https://arrow.readthedocs.io", + }, +) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/conftest.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/conftest.py new file mode 100644 index 0000000000..5bc8a4af2e --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/conftest.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +from datetime import datetime + +import pytest +from dateutil import tz as dateutil_tz + +from arrow import arrow, factory, formatter, locales, parser + + +@pytest.fixture(scope="class") +def time_utcnow(request): + request.cls.arrow = arrow.Arrow.utcnow() + + +@pytest.fixture(scope="class") +def time_2013_01_01(request): + request.cls.now = arrow.Arrow.utcnow() + request.cls.arrow = arrow.Arrow(2013, 1, 1) + request.cls.datetime = datetime(2013, 1, 1) + + +@pytest.fixture(scope="class") +def time_2013_02_03(request): + request.cls.arrow = arrow.Arrow(2013, 2, 3, 12, 30, 45, 1) + + +@pytest.fixture(scope="class") +def time_2013_02_15(request): + request.cls.datetime = datetime(2013, 2, 15, 3, 41, 22, 8923) + request.cls.arrow = arrow.Arrow.fromdatetime(request.cls.datetime) + + +@pytest.fixture(scope="class") +def time_1975_12_25(request): + request.cls.datetime = datetime( + 1975, 12, 25, 14, 15, 16, tzinfo=dateutil_tz.gettz("America/New_York") + ) + request.cls.arrow = arrow.Arrow.fromdatetime(request.cls.datetime) + + +@pytest.fixture(scope="class") +def arrow_formatter(request): + request.cls.formatter = formatter.DateTimeFormatter() + + +@pytest.fixture(scope="class") +def arrow_factory(request): + request.cls.factory = factory.ArrowFactory() + + +@pytest.fixture(scope="class") +def lang_locales(request): + request.cls.locales = locales._locales + + +@pytest.fixture(scope="class") +def lang_locale(request): + # As locale test classes are prefixed with Test, we are dynamically getting the locale by the test class name. + # TestEnglishLocale -> EnglishLocale + name = request.cls.__name__[4:] + request.cls.locale = locales.get_locale_by_class_name(name) + + +@pytest.fixture(scope="class") +def dt_parser(request): + request.cls.parser = parser.DateTimeParser() + + +@pytest.fixture(scope="class") +def dt_parser_regex(request): + request.cls.format_regex = parser.DateTimeParser._FORMAT_RE + + +@pytest.fixture(scope="class") +def tzinfo_parser(request): + request.cls.parser = parser.TzinfoParser() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_api.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_api.py new file mode 100644 index 0000000000..9b19a27cd9 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_api.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +import arrow + + +class TestModule: + def test_get(self, mocker): + mocker.patch("arrow.api._factory.get", return_value="result") + + assert arrow.api.get() == "result" + + def test_utcnow(self, mocker): + mocker.patch("arrow.api._factory.utcnow", return_value="utcnow") + + assert arrow.api.utcnow() == "utcnow" + + def test_now(self, mocker): + mocker.patch("arrow.api._factory.now", tz="tz", return_value="now") + + assert arrow.api.now("tz") == "now" + + def test_factory(self): + class MockCustomArrowClass(arrow.Arrow): + pass + + result = arrow.api.factory(MockCustomArrowClass) + + assert isinstance(result, arrow.factory.ArrowFactory) + assert isinstance(result.utcnow(), MockCustomArrowClass) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_arrow.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_arrow.py new file mode 100644 index 0000000000..b0bd20a5e3 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_arrow.py @@ -0,0 +1,2150 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import, unicode_literals + +import calendar +import pickle +import sys +import time +from datetime import date, datetime, timedelta + +import dateutil +import pytest +import pytz +import simplejson as json +from dateutil import tz +from dateutil.relativedelta import FR, MO, SA, SU, TH, TU, WE + +from arrow import arrow + +from .utils import assert_datetime_equality + + +class TestTestArrowInit: + def test_init_bad_input(self): + + with pytest.raises(TypeError): + arrow.Arrow(2013) + + with pytest.raises(TypeError): + arrow.Arrow(2013, 2) + + with pytest.raises(ValueError): + arrow.Arrow(2013, 2, 2, 12, 30, 45, 9999999) + + def test_init(self): + + result = arrow.Arrow(2013, 2, 2) + self.expected = datetime(2013, 2, 2, tzinfo=tz.tzutc()) + assert result._datetime == self.expected + + result = arrow.Arrow(2013, 2, 2, 12) + self.expected = datetime(2013, 2, 2, 12, tzinfo=tz.tzutc()) + assert result._datetime == self.expected + + result = arrow.Arrow(2013, 2, 2, 12, 30) + self.expected = datetime(2013, 2, 2, 12, 30, tzinfo=tz.tzutc()) + assert result._datetime == self.expected + + result = arrow.Arrow(2013, 2, 2, 12, 30, 45) + self.expected = datetime(2013, 2, 2, 12, 30, 45, tzinfo=tz.tzutc()) + assert result._datetime == self.expected + + result = arrow.Arrow(2013, 2, 2, 12, 30, 45, 999999) + self.expected = datetime(2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.tzutc()) + assert result._datetime == self.expected + + result = arrow.Arrow( + 2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.gettz("Europe/Paris") + ) + self.expected = datetime( + 2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.gettz("Europe/Paris") + ) + assert result._datetime == self.expected + + # regression tests for issue #626 + def test_init_pytz_timezone(self): + + result = arrow.Arrow( + 2013, 2, 2, 12, 30, 45, 999999, tzinfo=pytz.timezone("Europe/Paris") + ) + self.expected = datetime( + 2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.gettz("Europe/Paris") + ) + assert result._datetime == self.expected + assert_datetime_equality(result._datetime, self.expected, 1) + + def test_init_with_fold(self): + before = arrow.Arrow(2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm") + after = arrow.Arrow(2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm", fold=1) + + assert hasattr(before, "fold") + assert hasattr(after, "fold") + + # PEP-495 requires the comparisons below to be true + assert before == after + assert before.utcoffset() != after.utcoffset() + + +class TestTestArrowFactory: + def test_now(self): + + result = arrow.Arrow.now() + + assert_datetime_equality( + result._datetime, datetime.now().replace(tzinfo=tz.tzlocal()) + ) + + def test_utcnow(self): + + result = arrow.Arrow.utcnow() + + assert_datetime_equality( + result._datetime, datetime.utcnow().replace(tzinfo=tz.tzutc()) + ) + + assert result.fold == 0 + + def test_fromtimestamp(self): + + timestamp = time.time() + + result = arrow.Arrow.fromtimestamp(timestamp) + assert_datetime_equality( + result._datetime, datetime.now().replace(tzinfo=tz.tzlocal()) + ) + + result = arrow.Arrow.fromtimestamp(timestamp, tzinfo=tz.gettz("Europe/Paris")) + assert_datetime_equality( + result._datetime, + datetime.fromtimestamp(timestamp, tz.gettz("Europe/Paris")), + ) + + result = arrow.Arrow.fromtimestamp(timestamp, tzinfo="Europe/Paris") + assert_datetime_equality( + result._datetime, + datetime.fromtimestamp(timestamp, tz.gettz("Europe/Paris")), + ) + + with pytest.raises(ValueError): + arrow.Arrow.fromtimestamp("invalid timestamp") + + def test_utcfromtimestamp(self): + + timestamp = time.time() + + result = arrow.Arrow.utcfromtimestamp(timestamp) + assert_datetime_equality( + result._datetime, datetime.utcnow().replace(tzinfo=tz.tzutc()) + ) + + with pytest.raises(ValueError): + arrow.Arrow.utcfromtimestamp("invalid timestamp") + + def test_fromdatetime(self): + + dt = datetime(2013, 2, 3, 12, 30, 45, 1) + + result = arrow.Arrow.fromdatetime(dt) + + assert result._datetime == dt.replace(tzinfo=tz.tzutc()) + + def test_fromdatetime_dt_tzinfo(self): + + dt = datetime(2013, 2, 3, 12, 30, 45, 1, tzinfo=tz.gettz("US/Pacific")) + + result = arrow.Arrow.fromdatetime(dt) + + assert result._datetime == dt.replace(tzinfo=tz.gettz("US/Pacific")) + + def test_fromdatetime_tzinfo_arg(self): + + dt = datetime(2013, 2, 3, 12, 30, 45, 1) + + result = arrow.Arrow.fromdatetime(dt, tz.gettz("US/Pacific")) + + assert result._datetime == dt.replace(tzinfo=tz.gettz("US/Pacific")) + + def test_fromdate(self): + + dt = date(2013, 2, 3) + + result = arrow.Arrow.fromdate(dt, tz.gettz("US/Pacific")) + + assert result._datetime == datetime(2013, 2, 3, tzinfo=tz.gettz("US/Pacific")) + + def test_strptime(self): + + formatted = datetime(2013, 2, 3, 12, 30, 45).strftime("%Y-%m-%d %H:%M:%S") + + result = arrow.Arrow.strptime(formatted, "%Y-%m-%d %H:%M:%S") + assert result._datetime == datetime(2013, 2, 3, 12, 30, 45, tzinfo=tz.tzutc()) + + result = arrow.Arrow.strptime( + formatted, "%Y-%m-%d %H:%M:%S", tzinfo=tz.gettz("Europe/Paris") + ) + assert result._datetime == datetime( + 2013, 2, 3, 12, 30, 45, tzinfo=tz.gettz("Europe/Paris") + ) + + +@pytest.mark.usefixtures("time_2013_02_03") +class TestTestArrowRepresentation: + def test_repr(self): + + result = self.arrow.__repr__() + + assert result == "".format(self.arrow._datetime.isoformat()) + + def test_str(self): + + result = self.arrow.__str__() + + assert result == self.arrow._datetime.isoformat() + + def test_hash(self): + + result = self.arrow.__hash__() + + assert result == self.arrow._datetime.__hash__() + + def test_format(self): + + result = "{:YYYY-MM-DD}".format(self.arrow) + + assert result == "2013-02-03" + + def test_bare_format(self): + + result = self.arrow.format() + + assert result == "2013-02-03 12:30:45+00:00" + + def test_format_no_format_string(self): + + result = "{}".format(self.arrow) + + assert result == str(self.arrow) + + def test_clone(self): + + result = self.arrow.clone() + + assert result is not self.arrow + assert result._datetime == self.arrow._datetime + + +@pytest.mark.usefixtures("time_2013_01_01") +class TestArrowAttribute: + def test_getattr_base(self): + + with pytest.raises(AttributeError): + self.arrow.prop + + def test_getattr_week(self): + + assert self.arrow.week == 1 + + def test_getattr_quarter(self): + # start dates + q1 = arrow.Arrow(2013, 1, 1) + q2 = arrow.Arrow(2013, 4, 1) + q3 = arrow.Arrow(2013, 8, 1) + q4 = arrow.Arrow(2013, 10, 1) + assert q1.quarter == 1 + assert q2.quarter == 2 + assert q3.quarter == 3 + assert q4.quarter == 4 + + # end dates + q1 = arrow.Arrow(2013, 3, 31) + q2 = arrow.Arrow(2013, 6, 30) + q3 = arrow.Arrow(2013, 9, 30) + q4 = arrow.Arrow(2013, 12, 31) + assert q1.quarter == 1 + assert q2.quarter == 2 + assert q3.quarter == 3 + assert q4.quarter == 4 + + def test_getattr_dt_value(self): + + assert self.arrow.year == 2013 + + def test_tzinfo(self): + + self.arrow.tzinfo = tz.gettz("PST") + assert self.arrow.tzinfo == tz.gettz("PST") + + def test_naive(self): + + assert self.arrow.naive == self.arrow._datetime.replace(tzinfo=None) + + def test_timestamp(self): + + assert self.arrow.timestamp == calendar.timegm( + self.arrow._datetime.utctimetuple() + ) + + with pytest.warns(DeprecationWarning): + self.arrow.timestamp + + def test_int_timestamp(self): + + assert self.arrow.int_timestamp == calendar.timegm( + self.arrow._datetime.utctimetuple() + ) + + def test_float_timestamp(self): + + result = self.arrow.float_timestamp - self.arrow.timestamp + + assert result == self.arrow.microsecond + + def test_getattr_fold(self): + + # UTC is always unambiguous + assert self.now.fold == 0 + + ambiguous_dt = arrow.Arrow( + 2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm", fold=1 + ) + assert ambiguous_dt.fold == 1 + + with pytest.raises(AttributeError): + ambiguous_dt.fold = 0 + + def test_getattr_ambiguous(self): + + assert not self.now.ambiguous + + ambiguous_dt = arrow.Arrow(2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm") + + assert ambiguous_dt.ambiguous + + def test_getattr_imaginary(self): + + assert not self.now.imaginary + + imaginary_dt = arrow.Arrow(2013, 3, 31, 2, 30, tzinfo="Europe/Paris") + + assert imaginary_dt.imaginary + + +@pytest.mark.usefixtures("time_utcnow") +class TestArrowComparison: + def test_eq(self): + + assert self.arrow == self.arrow + assert self.arrow == self.arrow.datetime + assert not (self.arrow == "abc") + + def test_ne(self): + + assert not (self.arrow != self.arrow) + assert not (self.arrow != self.arrow.datetime) + assert self.arrow != "abc" + + def test_gt(self): + + arrow_cmp = self.arrow.shift(minutes=1) + + assert not (self.arrow > self.arrow) + assert not (self.arrow > self.arrow.datetime) + + with pytest.raises(TypeError): + self.arrow > "abc" + + assert self.arrow < arrow_cmp + assert self.arrow < arrow_cmp.datetime + + def test_ge(self): + + with pytest.raises(TypeError): + self.arrow >= "abc" + + assert self.arrow >= self.arrow + assert self.arrow >= self.arrow.datetime + + def test_lt(self): + + arrow_cmp = self.arrow.shift(minutes=1) + + assert not (self.arrow < self.arrow) + assert not (self.arrow < self.arrow.datetime) + + with pytest.raises(TypeError): + self.arrow < "abc" + + assert self.arrow < arrow_cmp + assert self.arrow < arrow_cmp.datetime + + def test_le(self): + + with pytest.raises(TypeError): + self.arrow <= "abc" + + assert self.arrow <= self.arrow + assert self.arrow <= self.arrow.datetime + + +@pytest.mark.usefixtures("time_2013_01_01") +class TestArrowMath: + def test_add_timedelta(self): + + result = self.arrow.__add__(timedelta(days=1)) + + assert result._datetime == datetime(2013, 1, 2, tzinfo=tz.tzutc()) + + def test_add_other(self): + + with pytest.raises(TypeError): + self.arrow + 1 + + def test_radd(self): + + result = self.arrow.__radd__(timedelta(days=1)) + + assert result._datetime == datetime(2013, 1, 2, tzinfo=tz.tzutc()) + + def test_sub_timedelta(self): + + result = self.arrow.__sub__(timedelta(days=1)) + + assert result._datetime == datetime(2012, 12, 31, tzinfo=tz.tzutc()) + + def test_sub_datetime(self): + + result = self.arrow.__sub__(datetime(2012, 12, 21, tzinfo=tz.tzutc())) + + assert result == timedelta(days=11) + + def test_sub_arrow(self): + + result = self.arrow.__sub__(arrow.Arrow(2012, 12, 21, tzinfo=tz.tzutc())) + + assert result == timedelta(days=11) + + def test_sub_other(self): + + with pytest.raises(TypeError): + self.arrow - object() + + def test_rsub_datetime(self): + + result = self.arrow.__rsub__(datetime(2012, 12, 21, tzinfo=tz.tzutc())) + + assert result == timedelta(days=-11) + + def test_rsub_other(self): + + with pytest.raises(TypeError): + timedelta(days=1) - self.arrow + + +@pytest.mark.usefixtures("time_utcnow") +class TestArrowDatetimeInterface: + def test_date(self): + + result = self.arrow.date() + + assert result == self.arrow._datetime.date() + + def test_time(self): + + result = self.arrow.time() + + assert result == self.arrow._datetime.time() + + def test_timetz(self): + + result = self.arrow.timetz() + + assert result == self.arrow._datetime.timetz() + + def test_astimezone(self): + + other_tz = tz.gettz("US/Pacific") + + result = self.arrow.astimezone(other_tz) + + assert result == self.arrow._datetime.astimezone(other_tz) + + def test_utcoffset(self): + + result = self.arrow.utcoffset() + + assert result == self.arrow._datetime.utcoffset() + + def test_dst(self): + + result = self.arrow.dst() + + assert result == self.arrow._datetime.dst() + + def test_timetuple(self): + + result = self.arrow.timetuple() + + assert result == self.arrow._datetime.timetuple() + + def test_utctimetuple(self): + + result = self.arrow.utctimetuple() + + assert result == self.arrow._datetime.utctimetuple() + + def test_toordinal(self): + + result = self.arrow.toordinal() + + assert result == self.arrow._datetime.toordinal() + + def test_weekday(self): + + result = self.arrow.weekday() + + assert result == self.arrow._datetime.weekday() + + def test_isoweekday(self): + + result = self.arrow.isoweekday() + + assert result == self.arrow._datetime.isoweekday() + + def test_isocalendar(self): + + result = self.arrow.isocalendar() + + assert result == self.arrow._datetime.isocalendar() + + def test_isoformat(self): + + result = self.arrow.isoformat() + + assert result == self.arrow._datetime.isoformat() + + def test_simplejson(self): + + result = json.dumps({"v": self.arrow.for_json()}, for_json=True) + + assert json.loads(result)["v"] == self.arrow._datetime.isoformat() + + def test_ctime(self): + + result = self.arrow.ctime() + + assert result == self.arrow._datetime.ctime() + + def test_strftime(self): + + result = self.arrow.strftime("%Y") + + assert result == self.arrow._datetime.strftime("%Y") + + +class TestArrowFalsePositiveDst: + """These tests relate to issues #376 and #551. + The key points in both issues are that arrow will assign a UTC timezone if none is provided and + .to() will change other attributes to be correct whereas .replace() only changes the specified attribute. + + Issue 376 + >>> arrow.get('2016-11-06').to('America/New_York').ceil('day') + < Arrow [2016-11-05T23:59:59.999999-04:00] > + + Issue 551 + >>> just_before = arrow.get('2018-11-04T01:59:59.999999') + >>> just_before + 2018-11-04T01:59:59.999999+00:00 + >>> just_after = just_before.shift(microseconds=1) + >>> just_after + 2018-11-04T02:00:00+00:00 + >>> just_before_eastern = just_before.replace(tzinfo='US/Eastern') + >>> just_before_eastern + 2018-11-04T01:59:59.999999-04:00 + >>> just_after_eastern = just_after.replace(tzinfo='US/Eastern') + >>> just_after_eastern + 2018-11-04T02:00:00-05:00 + """ + + def test_dst(self): + self.before_1 = arrow.Arrow( + 2016, 11, 6, 3, 59, tzinfo=tz.gettz("America/New_York") + ) + self.before_2 = arrow.Arrow(2016, 11, 6, tzinfo=tz.gettz("America/New_York")) + self.after_1 = arrow.Arrow(2016, 11, 6, 4, tzinfo=tz.gettz("America/New_York")) + self.after_2 = arrow.Arrow( + 2016, 11, 6, 23, 59, tzinfo=tz.gettz("America/New_York") + ) + self.before_3 = arrow.Arrow( + 2018, 11, 4, 3, 59, tzinfo=tz.gettz("America/New_York") + ) + self.before_4 = arrow.Arrow(2018, 11, 4, tzinfo=tz.gettz("America/New_York")) + self.after_3 = arrow.Arrow(2018, 11, 4, 4, tzinfo=tz.gettz("America/New_York")) + self.after_4 = arrow.Arrow( + 2018, 11, 4, 23, 59, tzinfo=tz.gettz("America/New_York") + ) + assert self.before_1.day == self.before_2.day + assert self.after_1.day == self.after_2.day + assert self.before_3.day == self.before_4.day + assert self.after_3.day == self.after_4.day + + +class TestArrowConversion: + def test_to(self): + + dt_from = datetime.now() + arrow_from = arrow.Arrow.fromdatetime(dt_from, tz.gettz("US/Pacific")) + + self.expected = dt_from.replace(tzinfo=tz.gettz("US/Pacific")).astimezone( + tz.tzutc() + ) + + assert arrow_from.to("UTC").datetime == self.expected + assert arrow_from.to(tz.tzutc()).datetime == self.expected + + # issue #368 + def test_to_pacific_then_utc(self): + result = arrow.Arrow(2018, 11, 4, 1, tzinfo="-08:00").to("US/Pacific").to("UTC") + assert result == arrow.Arrow(2018, 11, 4, 9) + + # issue #368 + def test_to_amsterdam_then_utc(self): + result = arrow.Arrow(2016, 10, 30).to("Europe/Amsterdam") + assert result.utcoffset() == timedelta(seconds=7200) + + # regression test for #690 + def test_to_israel_same_offset(self): + + result = arrow.Arrow(2019, 10, 27, 2, 21, 1, tzinfo="+03:00").to("Israel") + expected = arrow.Arrow(2019, 10, 27, 1, 21, 1, tzinfo="Israel") + + assert result == expected + assert result.utcoffset() != expected.utcoffset() + + # issue 315 + def test_anchorage_dst(self): + before = arrow.Arrow(2016, 3, 13, 1, 59, tzinfo="America/Anchorage") + after = arrow.Arrow(2016, 3, 13, 2, 1, tzinfo="America/Anchorage") + + assert before.utcoffset() != after.utcoffset() + + # issue 476 + def test_chicago_fall(self): + + result = arrow.Arrow(2017, 11, 5, 2, 1, tzinfo="-05:00").to("America/Chicago") + expected = arrow.Arrow(2017, 11, 5, 1, 1, tzinfo="America/Chicago") + + assert result == expected + assert result.utcoffset() != expected.utcoffset() + + def test_toronto_gap(self): + + before = arrow.Arrow(2011, 3, 13, 6, 30, tzinfo="UTC").to("America/Toronto") + after = arrow.Arrow(2011, 3, 13, 7, 30, tzinfo="UTC").to("America/Toronto") + + assert before.datetime.replace(tzinfo=None) == datetime(2011, 3, 13, 1, 30) + assert after.datetime.replace(tzinfo=None) == datetime(2011, 3, 13, 3, 30) + + assert before.utcoffset() != after.utcoffset() + + def test_sydney_gap(self): + + before = arrow.Arrow(2012, 10, 6, 15, 30, tzinfo="UTC").to("Australia/Sydney") + after = arrow.Arrow(2012, 10, 6, 16, 30, tzinfo="UTC").to("Australia/Sydney") + + assert before.datetime.replace(tzinfo=None) == datetime(2012, 10, 7, 1, 30) + assert after.datetime.replace(tzinfo=None) == datetime(2012, 10, 7, 3, 30) + + assert before.utcoffset() != after.utcoffset() + + +class TestArrowPickling: + def test_pickle_and_unpickle(self): + + dt = arrow.Arrow.utcnow() + + pickled = pickle.dumps(dt) + + unpickled = pickle.loads(pickled) + + assert unpickled == dt + + +class TestArrowReplace: + def test_not_attr(self): + + with pytest.raises(AttributeError): + arrow.Arrow.utcnow().replace(abc=1) + + def test_replace(self): + + arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) + + assert arw.replace(year=2012) == arrow.Arrow(2012, 5, 5, 12, 30, 45) + assert arw.replace(month=1) == arrow.Arrow(2013, 1, 5, 12, 30, 45) + assert arw.replace(day=1) == arrow.Arrow(2013, 5, 1, 12, 30, 45) + assert arw.replace(hour=1) == arrow.Arrow(2013, 5, 5, 1, 30, 45) + assert arw.replace(minute=1) == arrow.Arrow(2013, 5, 5, 12, 1, 45) + assert arw.replace(second=1) == arrow.Arrow(2013, 5, 5, 12, 30, 1) + + def test_replace_tzinfo(self): + + arw = arrow.Arrow.utcnow().to("US/Eastern") + + result = arw.replace(tzinfo=tz.gettz("US/Pacific")) + + assert result == arw.datetime.replace(tzinfo=tz.gettz("US/Pacific")) + + def test_replace_fold(self): + + before = arrow.Arrow(2017, 11, 5, 1, tzinfo="America/New_York") + after = before.replace(fold=1) + + assert before.fold == 0 + assert after.fold == 1 + assert before == after + assert before.utcoffset() != after.utcoffset() + + def test_replace_fold_and_other(self): + + arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) + + assert arw.replace(fold=1, minute=50) == arrow.Arrow(2013, 5, 5, 12, 50, 45) + assert arw.replace(minute=50, fold=1) == arrow.Arrow(2013, 5, 5, 12, 50, 45) + + def test_replace_week(self): + + with pytest.raises(AttributeError): + arrow.Arrow.utcnow().replace(week=1) + + def test_replace_quarter(self): + + with pytest.raises(AttributeError): + arrow.Arrow.utcnow().replace(quarter=1) + + def test_replace_quarter_and_fold(self): + with pytest.raises(AttributeError): + arrow.utcnow().replace(fold=1, quarter=1) + + with pytest.raises(AttributeError): + arrow.utcnow().replace(quarter=1, fold=1) + + def test_replace_other_kwargs(self): + + with pytest.raises(AttributeError): + arrow.utcnow().replace(abc="def") + + +class TestArrowShift: + def test_not_attr(self): + + now = arrow.Arrow.utcnow() + + with pytest.raises(AttributeError): + now.shift(abc=1) + + with pytest.raises(AttributeError): + now.shift(week=1) + + def test_shift(self): + + arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) + + assert arw.shift(years=1) == arrow.Arrow(2014, 5, 5, 12, 30, 45) + assert arw.shift(quarters=1) == arrow.Arrow(2013, 8, 5, 12, 30, 45) + assert arw.shift(quarters=1, months=1) == arrow.Arrow(2013, 9, 5, 12, 30, 45) + assert arw.shift(months=1) == arrow.Arrow(2013, 6, 5, 12, 30, 45) + assert arw.shift(weeks=1) == arrow.Arrow(2013, 5, 12, 12, 30, 45) + assert arw.shift(days=1) == arrow.Arrow(2013, 5, 6, 12, 30, 45) + assert arw.shift(hours=1) == arrow.Arrow(2013, 5, 5, 13, 30, 45) + assert arw.shift(minutes=1) == arrow.Arrow(2013, 5, 5, 12, 31, 45) + assert arw.shift(seconds=1) == arrow.Arrow(2013, 5, 5, 12, 30, 46) + assert arw.shift(microseconds=1) == arrow.Arrow(2013, 5, 5, 12, 30, 45, 1) + + # Remember: Python's weekday 0 is Monday + assert arw.shift(weekday=0) == arrow.Arrow(2013, 5, 6, 12, 30, 45) + assert arw.shift(weekday=1) == arrow.Arrow(2013, 5, 7, 12, 30, 45) + assert arw.shift(weekday=2) == arrow.Arrow(2013, 5, 8, 12, 30, 45) + assert arw.shift(weekday=3) == arrow.Arrow(2013, 5, 9, 12, 30, 45) + assert arw.shift(weekday=4) == arrow.Arrow(2013, 5, 10, 12, 30, 45) + assert arw.shift(weekday=5) == arrow.Arrow(2013, 5, 11, 12, 30, 45) + assert arw.shift(weekday=6) == arw + + with pytest.raises(IndexError): + arw.shift(weekday=7) + + # Use dateutil.relativedelta's convenient day instances + assert arw.shift(weekday=MO) == arrow.Arrow(2013, 5, 6, 12, 30, 45) + assert arw.shift(weekday=MO(0)) == arrow.Arrow(2013, 5, 6, 12, 30, 45) + assert arw.shift(weekday=MO(1)) == arrow.Arrow(2013, 5, 6, 12, 30, 45) + assert arw.shift(weekday=MO(2)) == arrow.Arrow(2013, 5, 13, 12, 30, 45) + assert arw.shift(weekday=TU) == arrow.Arrow(2013, 5, 7, 12, 30, 45) + assert arw.shift(weekday=TU(0)) == arrow.Arrow(2013, 5, 7, 12, 30, 45) + assert arw.shift(weekday=TU(1)) == arrow.Arrow(2013, 5, 7, 12, 30, 45) + assert arw.shift(weekday=TU(2)) == arrow.Arrow(2013, 5, 14, 12, 30, 45) + assert arw.shift(weekday=WE) == arrow.Arrow(2013, 5, 8, 12, 30, 45) + assert arw.shift(weekday=WE(0)) == arrow.Arrow(2013, 5, 8, 12, 30, 45) + assert arw.shift(weekday=WE(1)) == arrow.Arrow(2013, 5, 8, 12, 30, 45) + assert arw.shift(weekday=WE(2)) == arrow.Arrow(2013, 5, 15, 12, 30, 45) + assert arw.shift(weekday=TH) == arrow.Arrow(2013, 5, 9, 12, 30, 45) + assert arw.shift(weekday=TH(0)) == arrow.Arrow(2013, 5, 9, 12, 30, 45) + assert arw.shift(weekday=TH(1)) == arrow.Arrow(2013, 5, 9, 12, 30, 45) + assert arw.shift(weekday=TH(2)) == arrow.Arrow(2013, 5, 16, 12, 30, 45) + assert arw.shift(weekday=FR) == arrow.Arrow(2013, 5, 10, 12, 30, 45) + assert arw.shift(weekday=FR(0)) == arrow.Arrow(2013, 5, 10, 12, 30, 45) + assert arw.shift(weekday=FR(1)) == arrow.Arrow(2013, 5, 10, 12, 30, 45) + assert arw.shift(weekday=FR(2)) == arrow.Arrow(2013, 5, 17, 12, 30, 45) + assert arw.shift(weekday=SA) == arrow.Arrow(2013, 5, 11, 12, 30, 45) + assert arw.shift(weekday=SA(0)) == arrow.Arrow(2013, 5, 11, 12, 30, 45) + assert arw.shift(weekday=SA(1)) == arrow.Arrow(2013, 5, 11, 12, 30, 45) + assert arw.shift(weekday=SA(2)) == arrow.Arrow(2013, 5, 18, 12, 30, 45) + assert arw.shift(weekday=SU) == arw + assert arw.shift(weekday=SU(0)) == arw + assert arw.shift(weekday=SU(1)) == arw + assert arw.shift(weekday=SU(2)) == arrow.Arrow(2013, 5, 12, 12, 30, 45) + + def test_shift_negative(self): + + arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) + + assert arw.shift(years=-1) == arrow.Arrow(2012, 5, 5, 12, 30, 45) + assert arw.shift(quarters=-1) == arrow.Arrow(2013, 2, 5, 12, 30, 45) + assert arw.shift(quarters=-1, months=-1) == arrow.Arrow(2013, 1, 5, 12, 30, 45) + assert arw.shift(months=-1) == arrow.Arrow(2013, 4, 5, 12, 30, 45) + assert arw.shift(weeks=-1) == arrow.Arrow(2013, 4, 28, 12, 30, 45) + assert arw.shift(days=-1) == arrow.Arrow(2013, 5, 4, 12, 30, 45) + assert arw.shift(hours=-1) == arrow.Arrow(2013, 5, 5, 11, 30, 45) + assert arw.shift(minutes=-1) == arrow.Arrow(2013, 5, 5, 12, 29, 45) + assert arw.shift(seconds=-1) == arrow.Arrow(2013, 5, 5, 12, 30, 44) + assert arw.shift(microseconds=-1) == arrow.Arrow(2013, 5, 5, 12, 30, 44, 999999) + + # Not sure how practical these negative weekdays are + assert arw.shift(weekday=-1) == arw.shift(weekday=SU) + assert arw.shift(weekday=-2) == arw.shift(weekday=SA) + assert arw.shift(weekday=-3) == arw.shift(weekday=FR) + assert arw.shift(weekday=-4) == arw.shift(weekday=TH) + assert arw.shift(weekday=-5) == arw.shift(weekday=WE) + assert arw.shift(weekday=-6) == arw.shift(weekday=TU) + assert arw.shift(weekday=-7) == arw.shift(weekday=MO) + + with pytest.raises(IndexError): + arw.shift(weekday=-8) + + assert arw.shift(weekday=MO(-1)) == arrow.Arrow(2013, 4, 29, 12, 30, 45) + assert arw.shift(weekday=TU(-1)) == arrow.Arrow(2013, 4, 30, 12, 30, 45) + assert arw.shift(weekday=WE(-1)) == arrow.Arrow(2013, 5, 1, 12, 30, 45) + assert arw.shift(weekday=TH(-1)) == arrow.Arrow(2013, 5, 2, 12, 30, 45) + assert arw.shift(weekday=FR(-1)) == arrow.Arrow(2013, 5, 3, 12, 30, 45) + assert arw.shift(weekday=SA(-1)) == arrow.Arrow(2013, 5, 4, 12, 30, 45) + assert arw.shift(weekday=SU(-1)) == arw + assert arw.shift(weekday=SU(-2)) == arrow.Arrow(2013, 4, 28, 12, 30, 45) + + def test_shift_quarters_bug(self): + + arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) + + # The value of the last-read argument was used instead of the ``quarters`` argument. + # Recall that the keyword argument dict, like all dicts, is unordered, so only certain + # combinations of arguments would exhibit this. + assert arw.shift(quarters=0, years=1) == arrow.Arrow(2014, 5, 5, 12, 30, 45) + assert arw.shift(quarters=0, months=1) == arrow.Arrow(2013, 6, 5, 12, 30, 45) + assert arw.shift(quarters=0, weeks=1) == arrow.Arrow(2013, 5, 12, 12, 30, 45) + assert arw.shift(quarters=0, days=1) == arrow.Arrow(2013, 5, 6, 12, 30, 45) + assert arw.shift(quarters=0, hours=1) == arrow.Arrow(2013, 5, 5, 13, 30, 45) + assert arw.shift(quarters=0, minutes=1) == arrow.Arrow(2013, 5, 5, 12, 31, 45) + assert arw.shift(quarters=0, seconds=1) == arrow.Arrow(2013, 5, 5, 12, 30, 46) + assert arw.shift(quarters=0, microseconds=1) == arrow.Arrow( + 2013, 5, 5, 12, 30, 45, 1 + ) + + def test_shift_positive_imaginary(self): + + # Avoid shifting into imaginary datetimes, take into account DST and other timezone changes. + + new_york = arrow.Arrow(2017, 3, 12, 1, 30, tzinfo="America/New_York") + assert new_york.shift(hours=+1) == arrow.Arrow( + 2017, 3, 12, 3, 30, tzinfo="America/New_York" + ) + + # pendulum example + paris = arrow.Arrow(2013, 3, 31, 1, 50, tzinfo="Europe/Paris") + assert paris.shift(minutes=+20) == arrow.Arrow( + 2013, 3, 31, 3, 10, tzinfo="Europe/Paris" + ) + + canberra = arrow.Arrow(2018, 10, 7, 1, 30, tzinfo="Australia/Canberra") + assert canberra.shift(hours=+1) == arrow.Arrow( + 2018, 10, 7, 3, 30, tzinfo="Australia/Canberra" + ) + + kiev = arrow.Arrow(2018, 3, 25, 2, 30, tzinfo="Europe/Kiev") + assert kiev.shift(hours=+1) == arrow.Arrow( + 2018, 3, 25, 4, 30, tzinfo="Europe/Kiev" + ) + + # Edge case, the entire day of 2011-12-30 is imaginary in this zone! + apia = arrow.Arrow(2011, 12, 29, 23, tzinfo="Pacific/Apia") + assert apia.shift(hours=+2) == arrow.Arrow( + 2011, 12, 31, 1, tzinfo="Pacific/Apia" + ) + + def test_shift_negative_imaginary(self): + + new_york = arrow.Arrow(2011, 3, 13, 3, 30, tzinfo="America/New_York") + assert new_york.shift(hours=-1) == arrow.Arrow( + 2011, 3, 13, 3, 30, tzinfo="America/New_York" + ) + assert new_york.shift(hours=-2) == arrow.Arrow( + 2011, 3, 13, 1, 30, tzinfo="America/New_York" + ) + + london = arrow.Arrow(2019, 3, 31, 2, tzinfo="Europe/London") + assert london.shift(hours=-1) == arrow.Arrow( + 2019, 3, 31, 2, tzinfo="Europe/London" + ) + assert london.shift(hours=-2) == arrow.Arrow( + 2019, 3, 31, 0, tzinfo="Europe/London" + ) + + # edge case, crossing the international dateline + apia = arrow.Arrow(2011, 12, 31, 1, tzinfo="Pacific/Apia") + assert apia.shift(hours=-2) == arrow.Arrow( + 2011, 12, 31, 23, tzinfo="Pacific/Apia" + ) + + @pytest.mark.skipif( + dateutil.__version__ < "2.7.1", reason="old tz database (2018d needed)" + ) + def test_shift_kiritimati(self): + # corrected 2018d tz database release, will fail in earlier versions + + kiritimati = arrow.Arrow(1994, 12, 30, 12, 30, tzinfo="Pacific/Kiritimati") + assert kiritimati.shift(days=+1) == arrow.Arrow( + 1995, 1, 1, 12, 30, tzinfo="Pacific/Kiritimati" + ) + + @pytest.mark.skipif( + sys.version_info < (3, 6), reason="unsupported before python 3.6" + ) + def shift_imaginary_seconds(self): + # offset has a seconds component + monrovia = arrow.Arrow(1972, 1, 6, 23, tzinfo="Africa/Monrovia") + assert monrovia.shift(hours=+1, minutes=+30) == arrow.Arrow( + 1972, 1, 7, 1, 14, 30, tzinfo="Africa/Monrovia" + ) + + +class TestArrowRange: + def test_year(self): + + result = list( + arrow.Arrow.range( + "year", datetime(2013, 1, 2, 3, 4, 5), datetime(2016, 4, 5, 6, 7, 8) + ) + ) + + assert result == [ + arrow.Arrow(2013, 1, 2, 3, 4, 5), + arrow.Arrow(2014, 1, 2, 3, 4, 5), + arrow.Arrow(2015, 1, 2, 3, 4, 5), + arrow.Arrow(2016, 1, 2, 3, 4, 5), + ] + + def test_quarter(self): + + result = list( + arrow.Arrow.range( + "quarter", datetime(2013, 2, 3, 4, 5, 6), datetime(2013, 5, 6, 7, 8, 9) + ) + ) + + assert result == [ + arrow.Arrow(2013, 2, 3, 4, 5, 6), + arrow.Arrow(2013, 5, 3, 4, 5, 6), + ] + + def test_month(self): + + result = list( + arrow.Arrow.range( + "month", datetime(2013, 2, 3, 4, 5, 6), datetime(2013, 5, 6, 7, 8, 9) + ) + ) + + assert result == [ + arrow.Arrow(2013, 2, 3, 4, 5, 6), + arrow.Arrow(2013, 3, 3, 4, 5, 6), + arrow.Arrow(2013, 4, 3, 4, 5, 6), + arrow.Arrow(2013, 5, 3, 4, 5, 6), + ] + + def test_week(self): + + result = list( + arrow.Arrow.range( + "week", datetime(2013, 9, 1, 2, 3, 4), datetime(2013, 10, 1, 2, 3, 4) + ) + ) + + assert result == [ + arrow.Arrow(2013, 9, 1, 2, 3, 4), + arrow.Arrow(2013, 9, 8, 2, 3, 4), + arrow.Arrow(2013, 9, 15, 2, 3, 4), + arrow.Arrow(2013, 9, 22, 2, 3, 4), + arrow.Arrow(2013, 9, 29, 2, 3, 4), + ] + + def test_day(self): + + result = list( + arrow.Arrow.range( + "day", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 5, 6, 7, 8) + ) + ) + + assert result == [ + arrow.Arrow(2013, 1, 2, 3, 4, 5), + arrow.Arrow(2013, 1, 3, 3, 4, 5), + arrow.Arrow(2013, 1, 4, 3, 4, 5), + arrow.Arrow(2013, 1, 5, 3, 4, 5), + ] + + def test_hour(self): + + result = list( + arrow.Arrow.range( + "hour", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 6, 7, 8) + ) + ) + + assert result == [ + arrow.Arrow(2013, 1, 2, 3, 4, 5), + arrow.Arrow(2013, 1, 2, 4, 4, 5), + arrow.Arrow(2013, 1, 2, 5, 4, 5), + arrow.Arrow(2013, 1, 2, 6, 4, 5), + ] + + result = list( + arrow.Arrow.range( + "hour", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 3, 4, 5) + ) + ) + + assert result == [arrow.Arrow(2013, 1, 2, 3, 4, 5)] + + def test_minute(self): + + result = list( + arrow.Arrow.range( + "minute", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 3, 7, 8) + ) + ) + + assert result == [ + arrow.Arrow(2013, 1, 2, 3, 4, 5), + arrow.Arrow(2013, 1, 2, 3, 5, 5), + arrow.Arrow(2013, 1, 2, 3, 6, 5), + arrow.Arrow(2013, 1, 2, 3, 7, 5), + ] + + def test_second(self): + + result = list( + arrow.Arrow.range( + "second", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 3, 4, 8) + ) + ) + + assert result == [ + arrow.Arrow(2013, 1, 2, 3, 4, 5), + arrow.Arrow(2013, 1, 2, 3, 4, 6), + arrow.Arrow(2013, 1, 2, 3, 4, 7), + arrow.Arrow(2013, 1, 2, 3, 4, 8), + ] + + def test_arrow(self): + + result = list( + arrow.Arrow.range( + "day", + arrow.Arrow(2013, 1, 2, 3, 4, 5), + arrow.Arrow(2013, 1, 5, 6, 7, 8), + ) + ) + + assert result == [ + arrow.Arrow(2013, 1, 2, 3, 4, 5), + arrow.Arrow(2013, 1, 3, 3, 4, 5), + arrow.Arrow(2013, 1, 4, 3, 4, 5), + arrow.Arrow(2013, 1, 5, 3, 4, 5), + ] + + def test_naive_tz(self): + + result = arrow.Arrow.range( + "year", datetime(2013, 1, 2, 3), datetime(2016, 4, 5, 6), "US/Pacific" + ) + + for r in result: + assert r.tzinfo == tz.gettz("US/Pacific") + + def test_aware_same_tz(self): + + result = arrow.Arrow.range( + "day", + arrow.Arrow(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")), + arrow.Arrow(2013, 1, 3, tzinfo=tz.gettz("US/Pacific")), + ) + + for r in result: + assert r.tzinfo == tz.gettz("US/Pacific") + + def test_aware_different_tz(self): + + result = arrow.Arrow.range( + "day", + datetime(2013, 1, 1, tzinfo=tz.gettz("US/Eastern")), + datetime(2013, 1, 3, tzinfo=tz.gettz("US/Pacific")), + ) + + for r in result: + assert r.tzinfo == tz.gettz("US/Eastern") + + def test_aware_tz(self): + + result = arrow.Arrow.range( + "day", + datetime(2013, 1, 1, tzinfo=tz.gettz("US/Eastern")), + datetime(2013, 1, 3, tzinfo=tz.gettz("US/Pacific")), + tz=tz.gettz("US/Central"), + ) + + for r in result: + assert r.tzinfo == tz.gettz("US/Central") + + def test_imaginary(self): + # issue #72, avoid duplication in utc column + + before = arrow.Arrow(2018, 3, 10, 23, tzinfo="US/Pacific") + after = arrow.Arrow(2018, 3, 11, 4, tzinfo="US/Pacific") + + pacific_range = [t for t in arrow.Arrow.range("hour", before, after)] + utc_range = [t.to("utc") for t in arrow.Arrow.range("hour", before, after)] + + assert len(pacific_range) == len(set(pacific_range)) + assert len(utc_range) == len(set(utc_range)) + + def test_unsupported(self): + + with pytest.raises(AttributeError): + next(arrow.Arrow.range("abc", datetime.utcnow(), datetime.utcnow())) + + def test_range_over_months_ending_on_different_days(self): + # regression test for issue #842 + result = list(arrow.Arrow.range("month", datetime(2015, 1, 31), limit=4)) + assert result == [ + arrow.Arrow(2015, 1, 31), + arrow.Arrow(2015, 2, 28), + arrow.Arrow(2015, 3, 31), + arrow.Arrow(2015, 4, 30), + ] + + result = list(arrow.Arrow.range("month", datetime(2015, 1, 30), limit=3)) + assert result == [ + arrow.Arrow(2015, 1, 30), + arrow.Arrow(2015, 2, 28), + arrow.Arrow(2015, 3, 30), + ] + + result = list(arrow.Arrow.range("month", datetime(2015, 2, 28), limit=3)) + assert result == [ + arrow.Arrow(2015, 2, 28), + arrow.Arrow(2015, 3, 28), + arrow.Arrow(2015, 4, 28), + ] + + result = list(arrow.Arrow.range("month", datetime(2015, 3, 31), limit=3)) + assert result == [ + arrow.Arrow(2015, 3, 31), + arrow.Arrow(2015, 4, 30), + arrow.Arrow(2015, 5, 31), + ] + + def test_range_over_quarter_months_ending_on_different_days(self): + result = list(arrow.Arrow.range("quarter", datetime(2014, 11, 30), limit=3)) + assert result == [ + arrow.Arrow(2014, 11, 30), + arrow.Arrow(2015, 2, 28), + arrow.Arrow(2015, 5, 30), + ] + + def test_range_over_year_maintains_end_date_across_leap_year(self): + result = list(arrow.Arrow.range("year", datetime(2012, 2, 29), limit=5)) + assert result == [ + arrow.Arrow(2012, 2, 29), + arrow.Arrow(2013, 2, 28), + arrow.Arrow(2014, 2, 28), + arrow.Arrow(2015, 2, 28), + arrow.Arrow(2016, 2, 29), + ] + + +class TestArrowSpanRange: + def test_year(self): + + result = list( + arrow.Arrow.span_range("year", datetime(2013, 2, 1), datetime(2016, 3, 31)) + ) + + assert result == [ + ( + arrow.Arrow(2013, 1, 1), + arrow.Arrow(2013, 12, 31, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2014, 1, 1), + arrow.Arrow(2014, 12, 31, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2015, 1, 1), + arrow.Arrow(2015, 12, 31, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2016, 1, 1), + arrow.Arrow(2016, 12, 31, 23, 59, 59, 999999), + ), + ] + + def test_quarter(self): + + result = list( + arrow.Arrow.span_range( + "quarter", datetime(2013, 2, 2), datetime(2013, 5, 15) + ) + ) + + assert result == [ + (arrow.Arrow(2013, 1, 1), arrow.Arrow(2013, 3, 31, 23, 59, 59, 999999)), + (arrow.Arrow(2013, 4, 1), arrow.Arrow(2013, 6, 30, 23, 59, 59, 999999)), + ] + + def test_month(self): + + result = list( + arrow.Arrow.span_range("month", datetime(2013, 1, 2), datetime(2013, 4, 15)) + ) + + assert result == [ + (arrow.Arrow(2013, 1, 1), arrow.Arrow(2013, 1, 31, 23, 59, 59, 999999)), + (arrow.Arrow(2013, 2, 1), arrow.Arrow(2013, 2, 28, 23, 59, 59, 999999)), + (arrow.Arrow(2013, 3, 1), arrow.Arrow(2013, 3, 31, 23, 59, 59, 999999)), + (arrow.Arrow(2013, 4, 1), arrow.Arrow(2013, 4, 30, 23, 59, 59, 999999)), + ] + + def test_week(self): + + result = list( + arrow.Arrow.span_range("week", datetime(2013, 2, 2), datetime(2013, 2, 28)) + ) + + assert result == [ + (arrow.Arrow(2013, 1, 28), arrow.Arrow(2013, 2, 3, 23, 59, 59, 999999)), + (arrow.Arrow(2013, 2, 4), arrow.Arrow(2013, 2, 10, 23, 59, 59, 999999)), + ( + arrow.Arrow(2013, 2, 11), + arrow.Arrow(2013, 2, 17, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 2, 18), + arrow.Arrow(2013, 2, 24, 23, 59, 59, 999999), + ), + (arrow.Arrow(2013, 2, 25), arrow.Arrow(2013, 3, 3, 23, 59, 59, 999999)), + ] + + def test_day(self): + + result = list( + arrow.Arrow.span_range( + "day", datetime(2013, 1, 1, 12), datetime(2013, 1, 4, 12) + ) + ) + + assert result == [ + ( + arrow.Arrow(2013, 1, 1, 0), + arrow.Arrow(2013, 1, 1, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 2, 0), + arrow.Arrow(2013, 1, 2, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 3, 0), + arrow.Arrow(2013, 1, 3, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 4, 0), + arrow.Arrow(2013, 1, 4, 23, 59, 59, 999999), + ), + ] + + def test_days(self): + + result = list( + arrow.Arrow.span_range( + "days", datetime(2013, 1, 1, 12), datetime(2013, 1, 4, 12) + ) + ) + + assert result == [ + ( + arrow.Arrow(2013, 1, 1, 0), + arrow.Arrow(2013, 1, 1, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 2, 0), + arrow.Arrow(2013, 1, 2, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 3, 0), + arrow.Arrow(2013, 1, 3, 23, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 4, 0), + arrow.Arrow(2013, 1, 4, 23, 59, 59, 999999), + ), + ] + + def test_hour(self): + + result = list( + arrow.Arrow.span_range( + "hour", datetime(2013, 1, 1, 0, 30), datetime(2013, 1, 1, 3, 30) + ) + ) + + assert result == [ + ( + arrow.Arrow(2013, 1, 1, 0), + arrow.Arrow(2013, 1, 1, 0, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 1), + arrow.Arrow(2013, 1, 1, 1, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 2), + arrow.Arrow(2013, 1, 1, 2, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 3), + arrow.Arrow(2013, 1, 1, 3, 59, 59, 999999), + ), + ] + + result = list( + arrow.Arrow.span_range( + "hour", datetime(2013, 1, 1, 3, 30), datetime(2013, 1, 1, 3, 30) + ) + ) + + assert result == [ + (arrow.Arrow(2013, 1, 1, 3), arrow.Arrow(2013, 1, 1, 3, 59, 59, 999999)) + ] + + def test_minute(self): + + result = list( + arrow.Arrow.span_range( + "minute", datetime(2013, 1, 1, 0, 0, 30), datetime(2013, 1, 1, 0, 3, 30) + ) + ) + + assert result == [ + ( + arrow.Arrow(2013, 1, 1, 0, 0), + arrow.Arrow(2013, 1, 1, 0, 0, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 0, 1), + arrow.Arrow(2013, 1, 1, 0, 1, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 0, 2), + arrow.Arrow(2013, 1, 1, 0, 2, 59, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 0, 3), + arrow.Arrow(2013, 1, 1, 0, 3, 59, 999999), + ), + ] + + def test_second(self): + + result = list( + arrow.Arrow.span_range( + "second", datetime(2013, 1, 1), datetime(2013, 1, 1, 0, 0, 3) + ) + ) + + assert result == [ + ( + arrow.Arrow(2013, 1, 1, 0, 0, 0), + arrow.Arrow(2013, 1, 1, 0, 0, 0, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 0, 0, 1), + arrow.Arrow(2013, 1, 1, 0, 0, 1, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 0, 0, 2), + arrow.Arrow(2013, 1, 1, 0, 0, 2, 999999), + ), + ( + arrow.Arrow(2013, 1, 1, 0, 0, 3), + arrow.Arrow(2013, 1, 1, 0, 0, 3, 999999), + ), + ] + + def test_naive_tz(self): + + tzinfo = tz.gettz("US/Pacific") + + result = arrow.Arrow.span_range( + "hour", datetime(2013, 1, 1, 0), datetime(2013, 1, 1, 3, 59), "US/Pacific" + ) + + for f, c in result: + assert f.tzinfo == tzinfo + assert c.tzinfo == tzinfo + + def test_aware_same_tz(self): + + tzinfo = tz.gettz("US/Pacific") + + result = arrow.Arrow.span_range( + "hour", + datetime(2013, 1, 1, 0, tzinfo=tzinfo), + datetime(2013, 1, 1, 2, 59, tzinfo=tzinfo), + ) + + for f, c in result: + assert f.tzinfo == tzinfo + assert c.tzinfo == tzinfo + + def test_aware_different_tz(self): + + tzinfo1 = tz.gettz("US/Pacific") + tzinfo2 = tz.gettz("US/Eastern") + + result = arrow.Arrow.span_range( + "hour", + datetime(2013, 1, 1, 0, tzinfo=tzinfo1), + datetime(2013, 1, 1, 2, 59, tzinfo=tzinfo2), + ) + + for f, c in result: + assert f.tzinfo == tzinfo1 + assert c.tzinfo == tzinfo1 + + def test_aware_tz(self): + + result = arrow.Arrow.span_range( + "hour", + datetime(2013, 1, 1, 0, tzinfo=tz.gettz("US/Eastern")), + datetime(2013, 1, 1, 2, 59, tzinfo=tz.gettz("US/Eastern")), + tz="US/Central", + ) + + for f, c in result: + assert f.tzinfo == tz.gettz("US/Central") + assert c.tzinfo == tz.gettz("US/Central") + + def test_bounds_param_is_passed(self): + + result = list( + arrow.Arrow.span_range( + "quarter", datetime(2013, 2, 2), datetime(2013, 5, 15), bounds="[]" + ) + ) + + assert result == [ + (arrow.Arrow(2013, 1, 1), arrow.Arrow(2013, 4, 1)), + (arrow.Arrow(2013, 4, 1), arrow.Arrow(2013, 7, 1)), + ] + + +class TestArrowInterval: + def test_incorrect_input(self): + with pytest.raises(ValueError): + list( + arrow.Arrow.interval( + "month", datetime(2013, 1, 2), datetime(2013, 4, 15), 0 + ) + ) + + def test_correct(self): + result = list( + arrow.Arrow.interval( + "hour", datetime(2013, 5, 5, 12, 30), datetime(2013, 5, 5, 17, 15), 2 + ) + ) + + assert result == [ + ( + arrow.Arrow(2013, 5, 5, 12), + arrow.Arrow(2013, 5, 5, 13, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 5, 5, 14), + arrow.Arrow(2013, 5, 5, 15, 59, 59, 999999), + ), + ( + arrow.Arrow(2013, 5, 5, 16), + arrow.Arrow(2013, 5, 5, 17, 59, 59, 999999), + ), + ] + + def test_bounds_param_is_passed(self): + result = list( + arrow.Arrow.interval( + "hour", + datetime(2013, 5, 5, 12, 30), + datetime(2013, 5, 5, 17, 15), + 2, + bounds="[]", + ) + ) + + assert result == [ + (arrow.Arrow(2013, 5, 5, 12), arrow.Arrow(2013, 5, 5, 14)), + (arrow.Arrow(2013, 5, 5, 14), arrow.Arrow(2013, 5, 5, 16)), + (arrow.Arrow(2013, 5, 5, 16), arrow.Arrow(2013, 5, 5, 18)), + ] + + +@pytest.mark.usefixtures("time_2013_02_15") +class TestArrowSpan: + def test_span_attribute(self): + + with pytest.raises(AttributeError): + self.arrow.span("span") + + def test_span_year(self): + + floor, ceil = self.arrow.span("year") + + assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 12, 31, 23, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_quarter(self): + + floor, ceil = self.arrow.span("quarter") + + assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 3, 31, 23, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_quarter_count(self): + + floor, ceil = self.arrow.span("quarter", 2) + + assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 6, 30, 23, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_year_count(self): + + floor, ceil = self.arrow.span("year", 2) + + assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc()) + assert ceil == datetime(2014, 12, 31, 23, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_month(self): + + floor, ceil = self.arrow.span("month") + + assert floor == datetime(2013, 2, 1, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 28, 23, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_week(self): + + floor, ceil = self.arrow.span("week") + + assert floor == datetime(2013, 2, 11, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 17, 23, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_day(self): + + floor, ceil = self.arrow.span("day") + + assert floor == datetime(2013, 2, 15, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 15, 23, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_hour(self): + + floor, ceil = self.arrow.span("hour") + + assert floor == datetime(2013, 2, 15, 3, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 15, 3, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_minute(self): + + floor, ceil = self.arrow.span("minute") + + assert floor == datetime(2013, 2, 15, 3, 41, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 15, 3, 41, 59, 999999, tzinfo=tz.tzutc()) + + def test_span_second(self): + + floor, ceil = self.arrow.span("second") + + assert floor == datetime(2013, 2, 15, 3, 41, 22, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 15, 3, 41, 22, 999999, tzinfo=tz.tzutc()) + + def test_span_microsecond(self): + + floor, ceil = self.arrow.span("microsecond") + + assert floor == datetime(2013, 2, 15, 3, 41, 22, 8923, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 15, 3, 41, 22, 8923, tzinfo=tz.tzutc()) + + def test_floor(self): + + floor, ceil = self.arrow.span("month") + + assert floor == self.arrow.floor("month") + assert ceil == self.arrow.ceil("month") + + def test_span_inclusive_inclusive(self): + + floor, ceil = self.arrow.span("hour", bounds="[]") + + assert floor == datetime(2013, 2, 15, 3, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 15, 4, tzinfo=tz.tzutc()) + + def test_span_exclusive_inclusive(self): + + floor, ceil = self.arrow.span("hour", bounds="(]") + + assert floor == datetime(2013, 2, 15, 3, 0, 0, 1, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 15, 4, tzinfo=tz.tzutc()) + + def test_span_exclusive_exclusive(self): + + floor, ceil = self.arrow.span("hour", bounds="()") + + assert floor == datetime(2013, 2, 15, 3, 0, 0, 1, tzinfo=tz.tzutc()) + assert ceil == datetime(2013, 2, 15, 3, 59, 59, 999999, tzinfo=tz.tzutc()) + + def test_bounds_are_validated(self): + + with pytest.raises(ValueError): + floor, ceil = self.arrow.span("hour", bounds="][") + + +@pytest.mark.usefixtures("time_2013_01_01") +class TestArrowHumanize: + def test_granularity(self): + + assert self.now.humanize(granularity="second") == "just now" + + later1 = self.now.shift(seconds=1) + assert self.now.humanize(later1, granularity="second") == "just now" + assert later1.humanize(self.now, granularity="second") == "just now" + assert self.now.humanize(later1, granularity="minute") == "0 minutes ago" + assert later1.humanize(self.now, granularity="minute") == "in 0 minutes" + + later100 = self.now.shift(seconds=100) + assert self.now.humanize(later100, granularity="second") == "100 seconds ago" + assert later100.humanize(self.now, granularity="second") == "in 100 seconds" + assert self.now.humanize(later100, granularity="minute") == "a minute ago" + assert later100.humanize(self.now, granularity="minute") == "in a minute" + assert self.now.humanize(later100, granularity="hour") == "0 hours ago" + assert later100.humanize(self.now, granularity="hour") == "in 0 hours" + + later4000 = self.now.shift(seconds=4000) + assert self.now.humanize(later4000, granularity="minute") == "66 minutes ago" + assert later4000.humanize(self.now, granularity="minute") == "in 66 minutes" + assert self.now.humanize(later4000, granularity="hour") == "an hour ago" + assert later4000.humanize(self.now, granularity="hour") == "in an hour" + assert self.now.humanize(later4000, granularity="day") == "0 days ago" + assert later4000.humanize(self.now, granularity="day") == "in 0 days" + + later105 = self.now.shift(seconds=10 ** 5) + assert self.now.humanize(later105, granularity="hour") == "27 hours ago" + assert later105.humanize(self.now, granularity="hour") == "in 27 hours" + assert self.now.humanize(later105, granularity="day") == "a day ago" + assert later105.humanize(self.now, granularity="day") == "in a day" + assert self.now.humanize(later105, granularity="week") == "0 weeks ago" + assert later105.humanize(self.now, granularity="week") == "in 0 weeks" + assert self.now.humanize(later105, granularity="month") == "0 months ago" + assert later105.humanize(self.now, granularity="month") == "in 0 months" + assert self.now.humanize(later105, granularity=["month"]) == "0 months ago" + assert later105.humanize(self.now, granularity=["month"]) == "in 0 months" + + later106 = self.now.shift(seconds=3 * 10 ** 6) + assert self.now.humanize(later106, granularity="day") == "34 days ago" + assert later106.humanize(self.now, granularity="day") == "in 34 days" + assert self.now.humanize(later106, granularity="week") == "4 weeks ago" + assert later106.humanize(self.now, granularity="week") == "in 4 weeks" + assert self.now.humanize(later106, granularity="month") == "a month ago" + assert later106.humanize(self.now, granularity="month") == "in a month" + assert self.now.humanize(later106, granularity="year") == "0 years ago" + assert later106.humanize(self.now, granularity="year") == "in 0 years" + + later506 = self.now.shift(seconds=50 * 10 ** 6) + assert self.now.humanize(later506, granularity="week") == "82 weeks ago" + assert later506.humanize(self.now, granularity="week") == "in 82 weeks" + assert self.now.humanize(later506, granularity="month") == "18 months ago" + assert later506.humanize(self.now, granularity="month") == "in 18 months" + assert self.now.humanize(later506, granularity="year") == "a year ago" + assert later506.humanize(self.now, granularity="year") == "in a year" + + later108 = self.now.shift(seconds=10 ** 8) + assert self.now.humanize(later108, granularity="year") == "3 years ago" + assert later108.humanize(self.now, granularity="year") == "in 3 years" + + later108onlydistance = self.now.shift(seconds=10 ** 8) + assert ( + self.now.humanize( + later108onlydistance, only_distance=True, granularity="year" + ) + == "3 years" + ) + assert ( + later108onlydistance.humanize( + self.now, only_distance=True, granularity="year" + ) + == "3 years" + ) + + with pytest.raises(AttributeError): + self.now.humanize(later108, granularity="years") + + def test_multiple_granularity(self): + assert self.now.humanize(granularity="second") == "just now" + assert self.now.humanize(granularity=["second"]) == "just now" + assert ( + self.now.humanize(granularity=["year", "month", "day", "hour", "second"]) + == "in 0 years 0 months 0 days 0 hours and 0 seconds" + ) + + later4000 = self.now.shift(seconds=4000) + assert ( + later4000.humanize(self.now, granularity=["hour", "minute"]) + == "in an hour and 6 minutes" + ) + assert ( + self.now.humanize(later4000, granularity=["hour", "minute"]) + == "an hour and 6 minutes ago" + ) + assert ( + later4000.humanize( + self.now, granularity=["hour", "minute"], only_distance=True + ) + == "an hour and 6 minutes" + ) + assert ( + later4000.humanize(self.now, granularity=["day", "hour", "minute"]) + == "in 0 days an hour and 6 minutes" + ) + assert ( + self.now.humanize(later4000, granularity=["day", "hour", "minute"]) + == "0 days an hour and 6 minutes ago" + ) + + later105 = self.now.shift(seconds=10 ** 5) + assert ( + self.now.humanize(later105, granularity=["hour", "day", "minute"]) + == "a day 3 hours and 46 minutes ago" + ) + with pytest.raises(AttributeError): + self.now.humanize(later105, granularity=["error", "second"]) + + later108onlydistance = self.now.shift(seconds=10 ** 8) + assert ( + self.now.humanize( + later108onlydistance, only_distance=True, granularity=["year"] + ) + == "3 years" + ) + assert ( + self.now.humanize( + later108onlydistance, only_distance=True, granularity=["month", "week"] + ) + == "37 months and 4 weeks" + ) + assert ( + self.now.humanize( + later108onlydistance, only_distance=True, granularity=["year", "second"] + ) + == "3 years and 5327200 seconds" + ) + + one_min_one_sec_ago = self.now.shift(minutes=-1, seconds=-1) + assert ( + one_min_one_sec_ago.humanize(self.now, granularity=["minute", "second"]) + == "a minute and a second ago" + ) + + one_min_two_secs_ago = self.now.shift(minutes=-1, seconds=-2) + assert ( + one_min_two_secs_ago.humanize(self.now, granularity=["minute", "second"]) + == "a minute and 2 seconds ago" + ) + + def test_seconds(self): + + later = self.now.shift(seconds=10) + + # regression test for issue #727 + assert self.now.humanize(later) == "10 seconds ago" + assert later.humanize(self.now) == "in 10 seconds" + + assert self.now.humanize(later, only_distance=True) == "10 seconds" + assert later.humanize(self.now, only_distance=True) == "10 seconds" + + def test_minute(self): + + later = self.now.shift(minutes=1) + + assert self.now.humanize(later) == "a minute ago" + assert later.humanize(self.now) == "in a minute" + + assert self.now.humanize(later, only_distance=True) == "a minute" + assert later.humanize(self.now, only_distance=True) == "a minute" + + def test_minutes(self): + + later = self.now.shift(minutes=2) + + assert self.now.humanize(later) == "2 minutes ago" + assert later.humanize(self.now) == "in 2 minutes" + + assert self.now.humanize(later, only_distance=True) == "2 minutes" + assert later.humanize(self.now, only_distance=True) == "2 minutes" + + def test_hour(self): + + later = self.now.shift(hours=1) + + assert self.now.humanize(later) == "an hour ago" + assert later.humanize(self.now) == "in an hour" + + assert self.now.humanize(later, only_distance=True) == "an hour" + assert later.humanize(self.now, only_distance=True) == "an hour" + + def test_hours(self): + + later = self.now.shift(hours=2) + + assert self.now.humanize(later) == "2 hours ago" + assert later.humanize(self.now) == "in 2 hours" + + assert self.now.humanize(later, only_distance=True) == "2 hours" + assert later.humanize(self.now, only_distance=True) == "2 hours" + + def test_day(self): + + later = self.now.shift(days=1) + + assert self.now.humanize(later) == "a day ago" + assert later.humanize(self.now) == "in a day" + + # regression test for issue #697 + less_than_48_hours = self.now.shift( + days=1, hours=23, seconds=59, microseconds=999999 + ) + assert self.now.humanize(less_than_48_hours) == "a day ago" + assert less_than_48_hours.humanize(self.now) == "in a day" + + less_than_48_hours_date = less_than_48_hours._datetime.date() + with pytest.raises(TypeError): + # humanize other argument does not take raw datetime.date objects + self.now.humanize(less_than_48_hours_date) + + # convert from date to arrow object + less_than_48_hours_date = arrow.Arrow.fromdate(less_than_48_hours_date) + assert self.now.humanize(less_than_48_hours_date) == "a day ago" + assert less_than_48_hours_date.humanize(self.now) == "in a day" + + assert self.now.humanize(later, only_distance=True) == "a day" + assert later.humanize(self.now, only_distance=True) == "a day" + + def test_days(self): + + later = self.now.shift(days=2) + + assert self.now.humanize(later) == "2 days ago" + assert later.humanize(self.now) == "in 2 days" + + assert self.now.humanize(later, only_distance=True) == "2 days" + assert later.humanize(self.now, only_distance=True) == "2 days" + + # Regression tests for humanize bug referenced in issue 541 + later = self.now.shift(days=3) + assert later.humanize(self.now) == "in 3 days" + + later = self.now.shift(days=3, seconds=1) + assert later.humanize(self.now) == "in 3 days" + + later = self.now.shift(days=4) + assert later.humanize(self.now) == "in 4 days" + + def test_week(self): + + later = self.now.shift(weeks=1) + + assert self.now.humanize(later) == "a week ago" + assert later.humanize(self.now) == "in a week" + + assert self.now.humanize(later, only_distance=True) == "a week" + assert later.humanize(self.now, only_distance=True) == "a week" + + def test_weeks(self): + + later = self.now.shift(weeks=2) + + assert self.now.humanize(later) == "2 weeks ago" + assert later.humanize(self.now) == "in 2 weeks" + + assert self.now.humanize(later, only_distance=True) == "2 weeks" + assert later.humanize(self.now, only_distance=True) == "2 weeks" + + def test_month(self): + + later = self.now.shift(months=1) + + assert self.now.humanize(later) == "a month ago" + assert later.humanize(self.now) == "in a month" + + assert self.now.humanize(later, only_distance=True) == "a month" + assert later.humanize(self.now, only_distance=True) == "a month" + + def test_months(self): + + later = self.now.shift(months=2) + earlier = self.now.shift(months=-2) + + assert earlier.humanize(self.now) == "2 months ago" + assert later.humanize(self.now) == "in 2 months" + + assert self.now.humanize(later, only_distance=True) == "2 months" + assert later.humanize(self.now, only_distance=True) == "2 months" + + def test_year(self): + + later = self.now.shift(years=1) + + assert self.now.humanize(later) == "a year ago" + assert later.humanize(self.now) == "in a year" + + assert self.now.humanize(later, only_distance=True) == "a year" + assert later.humanize(self.now, only_distance=True) == "a year" + + def test_years(self): + + later = self.now.shift(years=2) + + assert self.now.humanize(later) == "2 years ago" + assert later.humanize(self.now) == "in 2 years" + + assert self.now.humanize(later, only_distance=True) == "2 years" + assert later.humanize(self.now, only_distance=True) == "2 years" + + arw = arrow.Arrow(2014, 7, 2) + + result = arw.humanize(self.datetime) + + assert result == "in 2 years" + + def test_arrow(self): + + arw = arrow.Arrow.fromdatetime(self.datetime) + + result = arw.humanize(arrow.Arrow.fromdatetime(self.datetime)) + + assert result == "just now" + + def test_datetime_tzinfo(self): + + arw = arrow.Arrow.fromdatetime(self.datetime) + + result = arw.humanize(self.datetime.replace(tzinfo=tz.tzutc())) + + assert result == "just now" + + def test_other(self): + + arw = arrow.Arrow.fromdatetime(self.datetime) + + with pytest.raises(TypeError): + arw.humanize(object()) + + def test_invalid_locale(self): + + arw = arrow.Arrow.fromdatetime(self.datetime) + + with pytest.raises(ValueError): + arw.humanize(locale="klingon") + + def test_none(self): + + arw = arrow.Arrow.utcnow() + + result = arw.humanize() + + assert result == "just now" + + result = arw.humanize(None) + + assert result == "just now" + + def test_untranslated_granularity(self, mocker): + + arw = arrow.Arrow.utcnow() + later = arw.shift(weeks=1) + + # simulate an untranslated timeframe key + mocker.patch.dict("arrow.locales.EnglishLocale.timeframes") + del arrow.locales.EnglishLocale.timeframes["week"] + with pytest.raises(ValueError): + arw.humanize(later, granularity="week") + + +@pytest.mark.usefixtures("time_2013_01_01") +class TestArrowHumanizeTestsWithLocale: + def test_now(self): + + arw = arrow.Arrow(2013, 1, 1, 0, 0, 0) + + result = arw.humanize(self.datetime, locale="ru") + + assert result == "сейчас" + + def test_seconds(self): + arw = arrow.Arrow(2013, 1, 1, 0, 0, 44) + + result = arw.humanize(self.datetime, locale="ru") + + assert result == "через 44 несколько секунд" + + def test_years(self): + + arw = arrow.Arrow(2011, 7, 2) + + result = arw.humanize(self.datetime, locale="ru") + + assert result == "2 года назад" + + +class TestArrowIsBetween: + def test_start_before_end(self): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + start = arrow.Arrow.fromdatetime(datetime(2013, 5, 8)) + end = arrow.Arrow.fromdatetime(datetime(2013, 5, 5)) + result = target.is_between(start, end) + assert not result + + def test_exclusive_exclusive_bounds(self): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 5, 12, 30, 27)) + start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5, 12, 30, 10)) + end = arrow.Arrow.fromdatetime(datetime(2013, 5, 5, 12, 30, 36)) + result = target.is_between(start, end, "()") + assert result + result = target.is_between(start, end) + assert result + + def test_exclusive_exclusive_bounds_same_date(self): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + start = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + end = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + result = target.is_between(start, end, "()") + assert not result + + def test_inclusive_exclusive_bounds(self): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 6)) + start = arrow.Arrow.fromdatetime(datetime(2013, 5, 4)) + end = arrow.Arrow.fromdatetime(datetime(2013, 5, 6)) + result = target.is_between(start, end, "[)") + assert not result + + def test_exclusive_inclusive_bounds(self): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5)) + end = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + result = target.is_between(start, end, "(]") + assert result + + def test_inclusive_inclusive_bounds_same_date(self): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + start = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + end = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + result = target.is_between(start, end, "[]") + assert result + + def test_type_error_exception(self): + with pytest.raises(TypeError): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + start = datetime(2013, 5, 5) + end = arrow.Arrow.fromdatetime(datetime(2013, 5, 8)) + target.is_between(start, end) + + with pytest.raises(TypeError): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5)) + end = datetime(2013, 5, 8) + target.is_between(start, end) + + with pytest.raises(TypeError): + target.is_between(None, None) + + def test_value_error_exception(self): + target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) + start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5)) + end = arrow.Arrow.fromdatetime(datetime(2013, 5, 8)) + with pytest.raises(ValueError): + target.is_between(start, end, "][") + with pytest.raises(ValueError): + target.is_between(start, end, "") + with pytest.raises(ValueError): + target.is_between(start, end, "]") + with pytest.raises(ValueError): + target.is_between(start, end, "[") + with pytest.raises(ValueError): + target.is_between(start, end, "hello") + + +class TestArrowUtil: + def test_get_datetime(self): + + get_datetime = arrow.Arrow._get_datetime + + arw = arrow.Arrow.utcnow() + dt = datetime.utcnow() + timestamp = time.time() + + assert get_datetime(arw) == arw.datetime + assert get_datetime(dt) == dt + assert ( + get_datetime(timestamp) == arrow.Arrow.utcfromtimestamp(timestamp).datetime + ) + + with pytest.raises(ValueError) as raise_ctx: + get_datetime("abc") + assert "not recognized as a datetime or timestamp" in str(raise_ctx.value) + + def test_get_tzinfo(self): + + get_tzinfo = arrow.Arrow._get_tzinfo + + with pytest.raises(ValueError) as raise_ctx: + get_tzinfo("abc") + assert "not recognized as a timezone" in str(raise_ctx.value) + + def test_get_iteration_params(self): + + assert arrow.Arrow._get_iteration_params("end", None) == ("end", sys.maxsize) + assert arrow.Arrow._get_iteration_params(None, 100) == (arrow.Arrow.max, 100) + assert arrow.Arrow._get_iteration_params(100, 120) == (100, 120) + + with pytest.raises(ValueError): + arrow.Arrow._get_iteration_params(None, None) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_factory.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_factory.py new file mode 100644 index 0000000000..2b8df5168f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_factory.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +import time +from datetime import date, datetime + +import pytest +from dateutil import tz + +from arrow.parser import ParserError + +from .utils import assert_datetime_equality + + +@pytest.mark.usefixtures("arrow_factory") +class TestGet: + def test_no_args(self): + + assert_datetime_equality( + self.factory.get(), datetime.utcnow().replace(tzinfo=tz.tzutc()) + ) + + def test_timestamp_one_arg_no_arg(self): + + no_arg = self.factory.get(1406430900).timestamp + one_arg = self.factory.get("1406430900", "X").timestamp + + assert no_arg == one_arg + + def test_one_arg_none(self): + + assert_datetime_equality( + self.factory.get(None), datetime.utcnow().replace(tzinfo=tz.tzutc()) + ) + + def test_struct_time(self): + + assert_datetime_equality( + self.factory.get(time.gmtime()), + datetime.utcnow().replace(tzinfo=tz.tzutc()), + ) + + def test_one_arg_timestamp(self): + + int_timestamp = int(time.time()) + timestamp_dt = datetime.utcfromtimestamp(int_timestamp).replace( + tzinfo=tz.tzutc() + ) + + assert self.factory.get(int_timestamp) == timestamp_dt + + with pytest.raises(ParserError): + self.factory.get(str(int_timestamp)) + + float_timestamp = time.time() + timestamp_dt = datetime.utcfromtimestamp(float_timestamp).replace( + tzinfo=tz.tzutc() + ) + + assert self.factory.get(float_timestamp) == timestamp_dt + + with pytest.raises(ParserError): + self.factory.get(str(float_timestamp)) + + # Regression test for issue #216 + # Python 3 raises OverflowError, Python 2 raises ValueError + timestamp = 99999999999999999999999999.99999999999999999999999999 + with pytest.raises((OverflowError, ValueError)): + self.factory.get(timestamp) + + def test_one_arg_expanded_timestamp(self): + + millisecond_timestamp = 1591328104308 + microsecond_timestamp = 1591328104308505 + + # Regression test for issue #796 + assert self.factory.get(millisecond_timestamp) == datetime.utcfromtimestamp( + 1591328104.308 + ).replace(tzinfo=tz.tzutc()) + assert self.factory.get(microsecond_timestamp) == datetime.utcfromtimestamp( + 1591328104.308505 + ).replace(tzinfo=tz.tzutc()) + + def test_one_arg_timestamp_with_tzinfo(self): + + timestamp = time.time() + timestamp_dt = datetime.fromtimestamp(timestamp, tz=tz.tzutc()).astimezone( + tz.gettz("US/Pacific") + ) + timezone = tz.gettz("US/Pacific") + + assert_datetime_equality( + self.factory.get(timestamp, tzinfo=timezone), timestamp_dt + ) + + def test_one_arg_arrow(self): + + arw = self.factory.utcnow() + result = self.factory.get(arw) + + assert arw == result + + def test_one_arg_datetime(self): + + dt = datetime.utcnow().replace(tzinfo=tz.tzutc()) + + assert self.factory.get(dt) == dt + + def test_one_arg_date(self): + + d = date.today() + dt = datetime(d.year, d.month, d.day, tzinfo=tz.tzutc()) + + assert self.factory.get(d) == dt + + def test_one_arg_tzinfo(self): + + self.expected = ( + datetime.utcnow() + .replace(tzinfo=tz.tzutc()) + .astimezone(tz.gettz("US/Pacific")) + ) + + assert_datetime_equality( + self.factory.get(tz.gettz("US/Pacific")), self.expected + ) + + # regression test for issue #658 + def test_one_arg_dateparser_datetime(self): + dateparser = pytest.importorskip("dateparser") + expected = datetime(1990, 1, 1).replace(tzinfo=tz.tzutc()) + # dateparser outputs: datetime.datetime(1990, 1, 1, 0, 0, tzinfo=) + parsed_date = dateparser.parse("1990-01-01T00:00:00+00:00") + dt_output = self.factory.get(parsed_date)._datetime.replace(tzinfo=tz.tzutc()) + assert dt_output == expected + + def test_kwarg_tzinfo(self): + + self.expected = ( + datetime.utcnow() + .replace(tzinfo=tz.tzutc()) + .astimezone(tz.gettz("US/Pacific")) + ) + + assert_datetime_equality( + self.factory.get(tzinfo=tz.gettz("US/Pacific")), self.expected + ) + + def test_kwarg_tzinfo_string(self): + + self.expected = ( + datetime.utcnow() + .replace(tzinfo=tz.tzutc()) + .astimezone(tz.gettz("US/Pacific")) + ) + + assert_datetime_equality(self.factory.get(tzinfo="US/Pacific"), self.expected) + + with pytest.raises(ParserError): + self.factory.get(tzinfo="US/PacificInvalidTzinfo") + + def test_kwarg_normalize_whitespace(self): + result = self.factory.get( + "Jun 1 2005 1:33PM", + "MMM D YYYY H:mmA", + tzinfo=tz.tzutc(), + normalize_whitespace=True, + ) + assert result._datetime == datetime(2005, 6, 1, 13, 33, tzinfo=tz.tzutc()) + + result = self.factory.get( + "\t 2013-05-05T12:30:45.123456 \t \n", + tzinfo=tz.tzutc(), + normalize_whitespace=True, + ) + assert result._datetime == datetime( + 2013, 5, 5, 12, 30, 45, 123456, tzinfo=tz.tzutc() + ) + + def test_one_arg_iso_str(self): + + dt = datetime.utcnow() + + assert_datetime_equality( + self.factory.get(dt.isoformat()), dt.replace(tzinfo=tz.tzutc()) + ) + + def test_one_arg_iso_calendar(self): + + pairs = [ + (datetime(2004, 1, 4), (2004, 1, 7)), + (datetime(2008, 12, 30), (2009, 1, 2)), + (datetime(2010, 1, 2), (2009, 53, 6)), + (datetime(2000, 2, 29), (2000, 9, 2)), + (datetime(2005, 1, 1), (2004, 53, 6)), + (datetime(2010, 1, 4), (2010, 1, 1)), + (datetime(2010, 1, 3), (2009, 53, 7)), + (datetime(2003, 12, 29), (2004, 1, 1)), + ] + + for pair in pairs: + dt, iso = pair + assert self.factory.get(iso) == self.factory.get(dt) + + with pytest.raises(TypeError): + self.factory.get((2014, 7, 1, 4)) + + with pytest.raises(TypeError): + self.factory.get((2014, 7)) + + with pytest.raises(ValueError): + self.factory.get((2014, 70, 1)) + + with pytest.raises(ValueError): + self.factory.get((2014, 7, 10)) + + def test_one_arg_other(self): + + with pytest.raises(TypeError): + self.factory.get(object()) + + def test_one_arg_bool(self): + + with pytest.raises(TypeError): + self.factory.get(False) + + with pytest.raises(TypeError): + self.factory.get(True) + + def test_two_args_datetime_tzinfo(self): + + result = self.factory.get(datetime(2013, 1, 1), tz.gettz("US/Pacific")) + + assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) + + def test_two_args_datetime_tz_str(self): + + result = self.factory.get(datetime(2013, 1, 1), "US/Pacific") + + assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) + + def test_two_args_date_tzinfo(self): + + result = self.factory.get(date(2013, 1, 1), tz.gettz("US/Pacific")) + + assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) + + def test_two_args_date_tz_str(self): + + result = self.factory.get(date(2013, 1, 1), "US/Pacific") + + assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) + + def test_two_args_datetime_other(self): + + with pytest.raises(TypeError): + self.factory.get(datetime.utcnow(), object()) + + def test_two_args_date_other(self): + + with pytest.raises(TypeError): + self.factory.get(date.today(), object()) + + def test_two_args_str_str(self): + + result = self.factory.get("2013-01-01", "YYYY-MM-DD") + + assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.tzutc()) + + def test_two_args_str_tzinfo(self): + + result = self.factory.get("2013-01-01", tzinfo=tz.gettz("US/Pacific")) + + assert_datetime_equality( + result._datetime, datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) + ) + + def test_two_args_twitter_format(self): + + # format returned by twitter API for created_at: + twitter_date = "Fri Apr 08 21:08:54 +0000 2016" + result = self.factory.get(twitter_date, "ddd MMM DD HH:mm:ss Z YYYY") + + assert result._datetime == datetime(2016, 4, 8, 21, 8, 54, tzinfo=tz.tzutc()) + + def test_two_args_str_list(self): + + result = self.factory.get("2013-01-01", ["MM/DD/YYYY", "YYYY-MM-DD"]) + + assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.tzutc()) + + def test_two_args_unicode_unicode(self): + + result = self.factory.get(u"2013-01-01", u"YYYY-MM-DD") + + assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.tzutc()) + + def test_two_args_other(self): + + with pytest.raises(TypeError): + self.factory.get(object(), object()) + + def test_three_args_with_tzinfo(self): + + timefmt = "YYYYMMDD" + d = "20150514" + + assert self.factory.get(d, timefmt, tzinfo=tz.tzlocal()) == datetime( + 2015, 5, 14, tzinfo=tz.tzlocal() + ) + + def test_three_args(self): + + assert self.factory.get(2013, 1, 1) == datetime(2013, 1, 1, tzinfo=tz.tzutc()) + + def test_full_kwargs(self): + + assert ( + self.factory.get( + year=2016, + month=7, + day=14, + hour=7, + minute=16, + second=45, + microsecond=631092, + ) + == datetime(2016, 7, 14, 7, 16, 45, 631092, tzinfo=tz.tzutc()) + ) + + def test_three_kwargs(self): + + assert self.factory.get(year=2016, month=7, day=14) == datetime( + 2016, 7, 14, 0, 0, tzinfo=tz.tzutc() + ) + + def test_tzinfo_string_kwargs(self): + result = self.factory.get("2019072807", "YYYYMMDDHH", tzinfo="UTC") + assert result._datetime == datetime(2019, 7, 28, 7, 0, 0, 0, tzinfo=tz.tzutc()) + + def test_insufficient_kwargs(self): + + with pytest.raises(TypeError): + self.factory.get(year=2016) + + with pytest.raises(TypeError): + self.factory.get(year=2016, month=7) + + def test_locale(self): + result = self.factory.get("2010", "YYYY", locale="ja") + assert result._datetime == datetime(2010, 1, 1, 0, 0, 0, 0, tzinfo=tz.tzutc()) + + # regression test for issue #701 + result = self.factory.get( + "Montag, 9. September 2019, 16:15-20:00", "dddd, D. MMMM YYYY", locale="de" + ) + assert result._datetime == datetime(2019, 9, 9, 0, 0, 0, 0, tzinfo=tz.tzutc()) + + def test_locale_kwarg_only(self): + res = self.factory.get(locale="ja") + assert res.tzinfo == tz.tzutc() + + def test_locale_with_tzinfo(self): + res = self.factory.get(locale="ja", tzinfo=tz.gettz("Asia/Tokyo")) + assert res.tzinfo == tz.gettz("Asia/Tokyo") + + +@pytest.mark.usefixtures("arrow_factory") +class TestUtcNow: + def test_utcnow(self): + + assert_datetime_equality( + self.factory.utcnow()._datetime, + datetime.utcnow().replace(tzinfo=tz.tzutc()), + ) + + +@pytest.mark.usefixtures("arrow_factory") +class TestNow: + def test_no_tz(self): + + assert_datetime_equality(self.factory.now(), datetime.now(tz.tzlocal())) + + def test_tzinfo(self): + + assert_datetime_equality( + self.factory.now(tz.gettz("EST")), datetime.now(tz.gettz("EST")) + ) + + def test_tz_str(self): + + assert_datetime_equality(self.factory.now("EST"), datetime.now(tz.gettz("EST"))) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_formatter.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_formatter.py new file mode 100644 index 0000000000..e97aeb5dcc --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_formatter.py @@ -0,0 +1,282 @@ +# -*- coding: utf-8 -*- +from datetime import datetime + +import pytest +import pytz +from dateutil import tz as dateutil_tz + +from arrow import ( + FORMAT_ATOM, + FORMAT_COOKIE, + FORMAT_RFC822, + FORMAT_RFC850, + FORMAT_RFC1036, + FORMAT_RFC1123, + FORMAT_RFC2822, + FORMAT_RFC3339, + FORMAT_RSS, + FORMAT_W3C, +) + +from .utils import make_full_tz_list + + +@pytest.mark.usefixtures("arrow_formatter") +class TestFormatterFormatToken: + def test_format(self): + + dt = datetime(2013, 2, 5, 12, 32, 51) + + result = self.formatter.format(dt, "MM-DD-YYYY hh:mm:ss a") + + assert result == "02-05-2013 12:32:51 pm" + + def test_year(self): + + dt = datetime(2013, 1, 1) + assert self.formatter._format_token(dt, "YYYY") == "2013" + assert self.formatter._format_token(dt, "YY") == "13" + + def test_month(self): + + dt = datetime(2013, 1, 1) + assert self.formatter._format_token(dt, "MMMM") == "January" + assert self.formatter._format_token(dt, "MMM") == "Jan" + assert self.formatter._format_token(dt, "MM") == "01" + assert self.formatter._format_token(dt, "M") == "1" + + def test_day(self): + + dt = datetime(2013, 2, 1) + assert self.formatter._format_token(dt, "DDDD") == "032" + assert self.formatter._format_token(dt, "DDD") == "32" + assert self.formatter._format_token(dt, "DD") == "01" + assert self.formatter._format_token(dt, "D") == "1" + assert self.formatter._format_token(dt, "Do") == "1st" + + assert self.formatter._format_token(dt, "dddd") == "Friday" + assert self.formatter._format_token(dt, "ddd") == "Fri" + assert self.formatter._format_token(dt, "d") == "5" + + def test_hour(self): + + dt = datetime(2013, 1, 1, 2) + assert self.formatter._format_token(dt, "HH") == "02" + assert self.formatter._format_token(dt, "H") == "2" + + dt = datetime(2013, 1, 1, 13) + assert self.formatter._format_token(dt, "HH") == "13" + assert self.formatter._format_token(dt, "H") == "13" + + dt = datetime(2013, 1, 1, 2) + assert self.formatter._format_token(dt, "hh") == "02" + assert self.formatter._format_token(dt, "h") == "2" + + dt = datetime(2013, 1, 1, 13) + assert self.formatter._format_token(dt, "hh") == "01" + assert self.formatter._format_token(dt, "h") == "1" + + # test that 12-hour time converts to '12' at midnight + dt = datetime(2013, 1, 1, 0) + assert self.formatter._format_token(dt, "hh") == "12" + assert self.formatter._format_token(dt, "h") == "12" + + def test_minute(self): + + dt = datetime(2013, 1, 1, 0, 1) + assert self.formatter._format_token(dt, "mm") == "01" + assert self.formatter._format_token(dt, "m") == "1" + + def test_second(self): + + dt = datetime(2013, 1, 1, 0, 0, 1) + assert self.formatter._format_token(dt, "ss") == "01" + assert self.formatter._format_token(dt, "s") == "1" + + def test_sub_second(self): + + dt = datetime(2013, 1, 1, 0, 0, 0, 123456) + assert self.formatter._format_token(dt, "SSSSSS") == "123456" + assert self.formatter._format_token(dt, "SSSSS") == "12345" + assert self.formatter._format_token(dt, "SSSS") == "1234" + assert self.formatter._format_token(dt, "SSS") == "123" + assert self.formatter._format_token(dt, "SS") == "12" + assert self.formatter._format_token(dt, "S") == "1" + + dt = datetime(2013, 1, 1, 0, 0, 0, 2000) + assert self.formatter._format_token(dt, "SSSSSS") == "002000" + assert self.formatter._format_token(dt, "SSSSS") == "00200" + assert self.formatter._format_token(dt, "SSSS") == "0020" + assert self.formatter._format_token(dt, "SSS") == "002" + assert self.formatter._format_token(dt, "SS") == "00" + assert self.formatter._format_token(dt, "S") == "0" + + def test_timestamp(self): + + timestamp = 1588437009.8952794 + dt = datetime.utcfromtimestamp(timestamp) + expected = str(int(timestamp)) + assert self.formatter._format_token(dt, "X") == expected + + # Must round because time.time() may return a float with greater + # than 6 digits of precision + expected = str(int(timestamp * 1000000)) + assert self.formatter._format_token(dt, "x") == expected + + def test_timezone(self): + + dt = datetime.utcnow().replace(tzinfo=dateutil_tz.gettz("US/Pacific")) + + result = self.formatter._format_token(dt, "ZZ") + assert result == "-07:00" or result == "-08:00" + + result = self.formatter._format_token(dt, "Z") + assert result == "-0700" or result == "-0800" + + @pytest.mark.parametrize("full_tz_name", make_full_tz_list()) + def test_timezone_formatter(self, full_tz_name): + + # This test will fail if we use "now" as date as soon as we change from/to DST + dt = datetime(1986, 2, 14, tzinfo=pytz.timezone("UTC")).replace( + tzinfo=dateutil_tz.gettz(full_tz_name) + ) + abbreviation = dt.tzname() + + result = self.formatter._format_token(dt, "ZZZ") + assert result == abbreviation + + def test_am_pm(self): + + dt = datetime(2012, 1, 1, 11) + assert self.formatter._format_token(dt, "a") == "am" + assert self.formatter._format_token(dt, "A") == "AM" + + dt = datetime(2012, 1, 1, 13) + assert self.formatter._format_token(dt, "a") == "pm" + assert self.formatter._format_token(dt, "A") == "PM" + + def test_week(self): + dt = datetime(2017, 5, 19) + assert self.formatter._format_token(dt, "W") == "2017-W20-5" + + # make sure week is zero padded when needed + dt_early = datetime(2011, 1, 20) + assert self.formatter._format_token(dt_early, "W") == "2011-W03-4" + + def test_nonsense(self): + dt = datetime(2012, 1, 1, 11) + assert self.formatter._format_token(dt, None) is None + assert self.formatter._format_token(dt, "NONSENSE") is None + + def test_escape(self): + + assert ( + self.formatter.format( + datetime(2015, 12, 10, 17, 9), "MMMM D, YYYY [at] h:mma" + ) + == "December 10, 2015 at 5:09pm" + ) + + assert ( + self.formatter.format( + datetime(2015, 12, 10, 17, 9), "[MMMM] M D, YYYY [at] h:mma" + ) + == "MMMM 12 10, 2015 at 5:09pm" + ) + + assert ( + self.formatter.format( + datetime(1990, 11, 25), + "[It happened on] MMMM Do [in the year] YYYY [a long time ago]", + ) + == "It happened on November 25th in the year 1990 a long time ago" + ) + + assert ( + self.formatter.format( + datetime(1990, 11, 25), + "[It happened on] MMMM Do [in the][ year] YYYY [a long time ago]", + ) + == "It happened on November 25th in the year 1990 a long time ago" + ) + + assert ( + self.formatter.format( + datetime(1, 1, 1), "[I'm][ entirely][ escaped,][ weee!]" + ) + == "I'm entirely escaped, weee!" + ) + + # Special RegEx characters + assert ( + self.formatter.format( + datetime(2017, 12, 31, 2, 0), "MMM DD, YYYY |^${}().*+?<>-& h:mm A" + ) + == "Dec 31, 2017 |^${}().*+?<>-& 2:00 AM" + ) + + # Escaping is atomic: brackets inside brackets are treated literally + assert self.formatter.format(datetime(1, 1, 1), "[[[ ]]") == "[[ ]" + + +@pytest.mark.usefixtures("arrow_formatter", "time_1975_12_25") +class TestFormatterBuiltinFormats: + def test_atom(self): + assert ( + self.formatter.format(self.datetime, FORMAT_ATOM) + == "1975-12-25 14:15:16-05:00" + ) + + def test_cookie(self): + assert ( + self.formatter.format(self.datetime, FORMAT_COOKIE) + == "Thursday, 25-Dec-1975 14:15:16 EST" + ) + + def test_rfc_822(self): + assert ( + self.formatter.format(self.datetime, FORMAT_RFC822) + == "Thu, 25 Dec 75 14:15:16 -0500" + ) + + def test_rfc_850(self): + assert ( + self.formatter.format(self.datetime, FORMAT_RFC850) + == "Thursday, 25-Dec-75 14:15:16 EST" + ) + + def test_rfc_1036(self): + assert ( + self.formatter.format(self.datetime, FORMAT_RFC1036) + == "Thu, 25 Dec 75 14:15:16 -0500" + ) + + def test_rfc_1123(self): + assert ( + self.formatter.format(self.datetime, FORMAT_RFC1123) + == "Thu, 25 Dec 1975 14:15:16 -0500" + ) + + def test_rfc_2822(self): + assert ( + self.formatter.format(self.datetime, FORMAT_RFC2822) + == "Thu, 25 Dec 1975 14:15:16 -0500" + ) + + def test_rfc3339(self): + assert ( + self.formatter.format(self.datetime, FORMAT_RFC3339) + == "1975-12-25 14:15:16-05:00" + ) + + def test_rss(self): + assert ( + self.formatter.format(self.datetime, FORMAT_RSS) + == "Thu, 25 Dec 1975 14:15:16 -0500" + ) + + def test_w3c(self): + assert ( + self.formatter.format(self.datetime, FORMAT_W3C) + == "1975-12-25 14:15:16-05:00" + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_locales.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_locales.py new file mode 100644 index 0000000000..006ccdd5ba --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_locales.py @@ -0,0 +1,1352 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +import pytest + +from arrow import arrow, locales + + +@pytest.mark.usefixtures("lang_locales") +class TestLocaleValidation: + """Validate locales to ensure that translations are valid and complete""" + + def test_locale_validation(self): + + for _, locale_cls in self.locales.items(): + # 7 days + 1 spacer to allow for 1-indexing of months + assert len(locale_cls.day_names) == 8 + assert locale_cls.day_names[0] == "" + # ensure that all string from index 1 onward are valid (not blank or None) + assert all(locale_cls.day_names[1:]) + + assert len(locale_cls.day_abbreviations) == 8 + assert locale_cls.day_abbreviations[0] == "" + assert all(locale_cls.day_abbreviations[1:]) + + # 12 months + 1 spacer to allow for 1-indexing of months + assert len(locale_cls.month_names) == 13 + assert locale_cls.month_names[0] == "" + assert all(locale_cls.month_names[1:]) + + assert len(locale_cls.month_abbreviations) == 13 + assert locale_cls.month_abbreviations[0] == "" + assert all(locale_cls.month_abbreviations[1:]) + + assert len(locale_cls.names) > 0 + assert locale_cls.past is not None + assert locale_cls.future is not None + + +class TestModule: + def test_get_locale(self, mocker): + mock_locale = mocker.Mock() + mock_locale_cls = mocker.Mock() + mock_locale_cls.return_value = mock_locale + + with pytest.raises(ValueError): + arrow.locales.get_locale("locale_name") + + cls_dict = arrow.locales._locales + mocker.patch.dict(cls_dict, {"locale_name": mock_locale_cls}) + + result = arrow.locales.get_locale("locale_name") + + assert result == mock_locale + + def test_get_locale_by_class_name(self, mocker): + mock_locale_cls = mocker.Mock() + mock_locale_obj = mock_locale_cls.return_value = mocker.Mock() + + globals_fn = mocker.Mock() + globals_fn.return_value = {"NonExistentLocale": mock_locale_cls} + + with pytest.raises(ValueError): + arrow.locales.get_locale_by_class_name("NonExistentLocale") + + mocker.patch.object(locales, "globals", globals_fn) + result = arrow.locales.get_locale_by_class_name("NonExistentLocale") + + mock_locale_cls.assert_called_once_with() + assert result == mock_locale_obj + + def test_locales(self): + + assert len(locales._locales) > 0 + + +@pytest.mark.usefixtures("lang_locale") +class TestEnglishLocale: + def test_describe(self): + assert self.locale.describe("now", only_distance=True) == "instantly" + assert self.locale.describe("now", only_distance=False) == "just now" + + def test_format_timeframe(self): + + assert self.locale._format_timeframe("hours", 2) == "2 hours" + assert self.locale._format_timeframe("hour", 0) == "an hour" + + def test_format_relative_now(self): + + result = self.locale._format_relative("just now", "now", 0) + + assert result == "just now" + + def test_format_relative_past(self): + + result = self.locale._format_relative("an hour", "hour", 1) + + assert result == "in an hour" + + def test_format_relative_future(self): + + result = self.locale._format_relative("an hour", "hour", -1) + + assert result == "an hour ago" + + def test_ordinal_number(self): + assert self.locale.ordinal_number(0) == "0th" + assert self.locale.ordinal_number(1) == "1st" + assert self.locale.ordinal_number(2) == "2nd" + assert self.locale.ordinal_number(3) == "3rd" + assert self.locale.ordinal_number(4) == "4th" + assert self.locale.ordinal_number(10) == "10th" + assert self.locale.ordinal_number(11) == "11th" + assert self.locale.ordinal_number(12) == "12th" + assert self.locale.ordinal_number(13) == "13th" + assert self.locale.ordinal_number(14) == "14th" + assert self.locale.ordinal_number(21) == "21st" + assert self.locale.ordinal_number(22) == "22nd" + assert self.locale.ordinal_number(23) == "23rd" + assert self.locale.ordinal_number(24) == "24th" + + assert self.locale.ordinal_number(100) == "100th" + assert self.locale.ordinal_number(101) == "101st" + assert self.locale.ordinal_number(102) == "102nd" + assert self.locale.ordinal_number(103) == "103rd" + assert self.locale.ordinal_number(104) == "104th" + assert self.locale.ordinal_number(110) == "110th" + assert self.locale.ordinal_number(111) == "111th" + assert self.locale.ordinal_number(112) == "112th" + assert self.locale.ordinal_number(113) == "113th" + assert self.locale.ordinal_number(114) == "114th" + assert self.locale.ordinal_number(121) == "121st" + assert self.locale.ordinal_number(122) == "122nd" + assert self.locale.ordinal_number(123) == "123rd" + assert self.locale.ordinal_number(124) == "124th" + + def test_meridian_invalid_token(self): + assert self.locale.meridian(7, None) is None + assert self.locale.meridian(7, "B") is None + assert self.locale.meridian(7, "NONSENSE") is None + + +@pytest.mark.usefixtures("lang_locale") +class TestItalianLocale: + def test_ordinal_number(self): + assert self.locale.ordinal_number(1) == "1º" + + +@pytest.mark.usefixtures("lang_locale") +class TestSpanishLocale: + def test_ordinal_number(self): + assert self.locale.ordinal_number(1) == "1º" + + def test_format_timeframe(self): + assert self.locale._format_timeframe("now", 0) == "ahora" + assert self.locale._format_timeframe("seconds", 1) == "1 segundos" + assert self.locale._format_timeframe("seconds", 3) == "3 segundos" + assert self.locale._format_timeframe("seconds", 30) == "30 segundos" + assert self.locale._format_timeframe("minute", 1) == "un minuto" + assert self.locale._format_timeframe("minutes", 4) == "4 minutos" + assert self.locale._format_timeframe("minutes", 40) == "40 minutos" + assert self.locale._format_timeframe("hour", 1) == "una hora" + assert self.locale._format_timeframe("hours", 5) == "5 horas" + assert self.locale._format_timeframe("hours", 23) == "23 horas" + assert self.locale._format_timeframe("day", 1) == "un día" + assert self.locale._format_timeframe("days", 6) == "6 días" + assert self.locale._format_timeframe("days", 12) == "12 días" + assert self.locale._format_timeframe("week", 1) == "una semana" + assert self.locale._format_timeframe("weeks", 2) == "2 semanas" + assert self.locale._format_timeframe("weeks", 3) == "3 semanas" + assert self.locale._format_timeframe("month", 1) == "un mes" + assert self.locale._format_timeframe("months", 7) == "7 meses" + assert self.locale._format_timeframe("months", 11) == "11 meses" + assert self.locale._format_timeframe("year", 1) == "un año" + assert self.locale._format_timeframe("years", 8) == "8 años" + assert self.locale._format_timeframe("years", 12) == "12 años" + + assert self.locale._format_timeframe("now", 0) == "ahora" + assert self.locale._format_timeframe("seconds", -1) == "1 segundos" + assert self.locale._format_timeframe("seconds", -9) == "9 segundos" + assert self.locale._format_timeframe("seconds", -12) == "12 segundos" + assert self.locale._format_timeframe("minute", -1) == "un minuto" + assert self.locale._format_timeframe("minutes", -2) == "2 minutos" + assert self.locale._format_timeframe("minutes", -10) == "10 minutos" + assert self.locale._format_timeframe("hour", -1) == "una hora" + assert self.locale._format_timeframe("hours", -3) == "3 horas" + assert self.locale._format_timeframe("hours", -11) == "11 horas" + assert self.locale._format_timeframe("day", -1) == "un día" + assert self.locale._format_timeframe("days", -2) == "2 días" + assert self.locale._format_timeframe("days", -12) == "12 días" + assert self.locale._format_timeframe("week", -1) == "una semana" + assert self.locale._format_timeframe("weeks", -2) == "2 semanas" + assert self.locale._format_timeframe("weeks", -3) == "3 semanas" + assert self.locale._format_timeframe("month", -1) == "un mes" + assert self.locale._format_timeframe("months", -3) == "3 meses" + assert self.locale._format_timeframe("months", -13) == "13 meses" + assert self.locale._format_timeframe("year", -1) == "un año" + assert self.locale._format_timeframe("years", -4) == "4 años" + assert self.locale._format_timeframe("years", -14) == "14 años" + + +@pytest.mark.usefixtures("lang_locale") +class TestFrenchLocale: + def test_ordinal_number(self): + assert self.locale.ordinal_number(1) == "1er" + assert self.locale.ordinal_number(2) == "2e" + + def test_month_abbreviation(self): + assert "juil" in self.locale.month_abbreviations + + +@pytest.mark.usefixtures("lang_locale") +class TestFrenchCanadianLocale: + def test_month_abbreviation(self): + assert "juill" in self.locale.month_abbreviations + + +@pytest.mark.usefixtures("lang_locale") +class TestRussianLocale: + def test_plurals2(self): + assert self.locale._format_timeframe("hours", 0) == "0 часов" + assert self.locale._format_timeframe("hours", 1) == "1 час" + assert self.locale._format_timeframe("hours", 2) == "2 часа" + assert self.locale._format_timeframe("hours", 4) == "4 часа" + assert self.locale._format_timeframe("hours", 5) == "5 часов" + assert self.locale._format_timeframe("hours", 21) == "21 час" + assert self.locale._format_timeframe("hours", 22) == "22 часа" + assert self.locale._format_timeframe("hours", 25) == "25 часов" + + # feminine grammatical gender should be tested separately + assert self.locale._format_timeframe("minutes", 0) == "0 минут" + assert self.locale._format_timeframe("minutes", 1) == "1 минуту" + assert self.locale._format_timeframe("minutes", 2) == "2 минуты" + assert self.locale._format_timeframe("minutes", 4) == "4 минуты" + assert self.locale._format_timeframe("minutes", 5) == "5 минут" + assert self.locale._format_timeframe("minutes", 21) == "21 минуту" + assert self.locale._format_timeframe("minutes", 22) == "22 минуты" + assert self.locale._format_timeframe("minutes", 25) == "25 минут" + + +@pytest.mark.usefixtures("lang_locale") +class TestPolishLocale: + def test_plurals(self): + + assert self.locale._format_timeframe("seconds", 0) == "0 sekund" + assert self.locale._format_timeframe("second", 1) == "sekundę" + assert self.locale._format_timeframe("seconds", 2) == "2 sekundy" + assert self.locale._format_timeframe("seconds", 5) == "5 sekund" + assert self.locale._format_timeframe("seconds", 21) == "21 sekund" + assert self.locale._format_timeframe("seconds", 22) == "22 sekundy" + assert self.locale._format_timeframe("seconds", 25) == "25 sekund" + + assert self.locale._format_timeframe("minutes", 0) == "0 minut" + assert self.locale._format_timeframe("minute", 1) == "minutę" + assert self.locale._format_timeframe("minutes", 2) == "2 minuty" + assert self.locale._format_timeframe("minutes", 5) == "5 minut" + assert self.locale._format_timeframe("minutes", 21) == "21 minut" + assert self.locale._format_timeframe("minutes", 22) == "22 minuty" + assert self.locale._format_timeframe("minutes", 25) == "25 minut" + + assert self.locale._format_timeframe("hours", 0) == "0 godzin" + assert self.locale._format_timeframe("hour", 1) == "godzinę" + assert self.locale._format_timeframe("hours", 2) == "2 godziny" + assert self.locale._format_timeframe("hours", 5) == "5 godzin" + assert self.locale._format_timeframe("hours", 21) == "21 godzin" + assert self.locale._format_timeframe("hours", 22) == "22 godziny" + assert self.locale._format_timeframe("hours", 25) == "25 godzin" + + assert self.locale._format_timeframe("weeks", 0) == "0 tygodni" + assert self.locale._format_timeframe("week", 1) == "tydzień" + assert self.locale._format_timeframe("weeks", 2) == "2 tygodnie" + assert self.locale._format_timeframe("weeks", 5) == "5 tygodni" + assert self.locale._format_timeframe("weeks", 21) == "21 tygodni" + assert self.locale._format_timeframe("weeks", 22) == "22 tygodnie" + assert self.locale._format_timeframe("weeks", 25) == "25 tygodni" + + assert self.locale._format_timeframe("months", 0) == "0 miesięcy" + assert self.locale._format_timeframe("month", 1) == "miesiąc" + assert self.locale._format_timeframe("months", 2) == "2 miesiące" + assert self.locale._format_timeframe("months", 5) == "5 miesięcy" + assert self.locale._format_timeframe("months", 21) == "21 miesięcy" + assert self.locale._format_timeframe("months", 22) == "22 miesiące" + assert self.locale._format_timeframe("months", 25) == "25 miesięcy" + + assert self.locale._format_timeframe("years", 0) == "0 lat" + assert self.locale._format_timeframe("year", 1) == "rok" + assert self.locale._format_timeframe("years", 2) == "2 lata" + assert self.locale._format_timeframe("years", 5) == "5 lat" + assert self.locale._format_timeframe("years", 21) == "21 lat" + assert self.locale._format_timeframe("years", 22) == "22 lata" + assert self.locale._format_timeframe("years", 25) == "25 lat" + + +@pytest.mark.usefixtures("lang_locale") +class TestIcelandicLocale: + def test_format_timeframe(self): + + assert self.locale._format_timeframe("minute", -1) == "einni mínútu" + assert self.locale._format_timeframe("minute", 1) == "eina mínútu" + + assert self.locale._format_timeframe("hours", -2) == "2 tímum" + assert self.locale._format_timeframe("hours", 2) == "2 tíma" + assert self.locale._format_timeframe("now", 0) == "rétt í þessu" + + +@pytest.mark.usefixtures("lang_locale") +class TestMalayalamLocale: + def test_format_timeframe(self): + + assert self.locale._format_timeframe("hours", 2) == "2 മണിക്കൂർ" + assert self.locale._format_timeframe("hour", 0) == "ഒരു മണിക്കൂർ" + + def test_format_relative_now(self): + + result = self.locale._format_relative("ഇപ്പോൾ", "now", 0) + + assert result == "ഇപ്പോൾ" + + def test_format_relative_past(self): + + result = self.locale._format_relative("ഒരു മണിക്കൂർ", "hour", 1) + assert result == "ഒരു മണിക്കൂർ ശേഷം" + + def test_format_relative_future(self): + + result = self.locale._format_relative("ഒരു മണിക്കൂർ", "hour", -1) + assert result == "ഒരു മണിക്കൂർ മുമ്പ്" + + +@pytest.mark.usefixtures("lang_locale") +class TestHindiLocale: + def test_format_timeframe(self): + + assert self.locale._format_timeframe("hours", 2) == "2 घंटे" + assert self.locale._format_timeframe("hour", 0) == "एक घंटा" + + def test_format_relative_now(self): + + result = self.locale._format_relative("अभी", "now", 0) + assert result == "अभी" + + def test_format_relative_past(self): + + result = self.locale._format_relative("एक घंटा", "hour", 1) + assert result == "एक घंटा बाद" + + def test_format_relative_future(self): + + result = self.locale._format_relative("एक घंटा", "hour", -1) + assert result == "एक घंटा पहले" + + +@pytest.mark.usefixtures("lang_locale") +class TestCzechLocale: + def test_format_timeframe(self): + + assert self.locale._format_timeframe("hours", 2) == "2 hodiny" + assert self.locale._format_timeframe("hours", 5) == "5 hodin" + assert self.locale._format_timeframe("hour", 0) == "0 hodin" + assert self.locale._format_timeframe("hours", -2) == "2 hodinami" + assert self.locale._format_timeframe("hours", -5) == "5 hodinami" + assert self.locale._format_timeframe("now", 0) == "Teď" + + assert self.locale._format_timeframe("weeks", 2) == "2 týdny" + assert self.locale._format_timeframe("weeks", 5) == "5 týdnů" + assert self.locale._format_timeframe("week", 0) == "0 týdnů" + assert self.locale._format_timeframe("weeks", -2) == "2 týdny" + assert self.locale._format_timeframe("weeks", -5) == "5 týdny" + + def test_format_relative_now(self): + + result = self.locale._format_relative("Teď", "now", 0) + assert result == "Teď" + + def test_format_relative_future(self): + + result = self.locale._format_relative("hodinu", "hour", 1) + assert result == "Za hodinu" + + def test_format_relative_past(self): + + result = self.locale._format_relative("hodinou", "hour", -1) + assert result == "Před hodinou" + + +@pytest.mark.usefixtures("lang_locale") +class TestSlovakLocale: + def test_format_timeframe(self): + + assert self.locale._format_timeframe("seconds", -5) == "5 sekundami" + assert self.locale._format_timeframe("seconds", -2) == "2 sekundami" + assert self.locale._format_timeframe("second", -1) == "sekundou" + assert self.locale._format_timeframe("second", 0) == "0 sekúnd" + assert self.locale._format_timeframe("second", 1) == "sekundu" + assert self.locale._format_timeframe("seconds", 2) == "2 sekundy" + assert self.locale._format_timeframe("seconds", 5) == "5 sekúnd" + + assert self.locale._format_timeframe("minutes", -5) == "5 minútami" + assert self.locale._format_timeframe("minutes", -2) == "2 minútami" + assert self.locale._format_timeframe("minute", -1) == "minútou" + assert self.locale._format_timeframe("minute", 0) == "0 minút" + assert self.locale._format_timeframe("minute", 1) == "minútu" + assert self.locale._format_timeframe("minutes", 2) == "2 minúty" + assert self.locale._format_timeframe("minutes", 5) == "5 minút" + + assert self.locale._format_timeframe("hours", -5) == "5 hodinami" + assert self.locale._format_timeframe("hours", -2) == "2 hodinami" + assert self.locale._format_timeframe("hour", -1) == "hodinou" + assert self.locale._format_timeframe("hour", 0) == "0 hodín" + assert self.locale._format_timeframe("hour", 1) == "hodinu" + assert self.locale._format_timeframe("hours", 2) == "2 hodiny" + assert self.locale._format_timeframe("hours", 5) == "5 hodín" + + assert self.locale._format_timeframe("days", -5) == "5 dňami" + assert self.locale._format_timeframe("days", -2) == "2 dňami" + assert self.locale._format_timeframe("day", -1) == "dňom" + assert self.locale._format_timeframe("day", 0) == "0 dní" + assert self.locale._format_timeframe("day", 1) == "deň" + assert self.locale._format_timeframe("days", 2) == "2 dni" + assert self.locale._format_timeframe("days", 5) == "5 dní" + + assert self.locale._format_timeframe("weeks", -5) == "5 týždňami" + assert self.locale._format_timeframe("weeks", -2) == "2 týždňami" + assert self.locale._format_timeframe("week", -1) == "týždňom" + assert self.locale._format_timeframe("week", 0) == "0 týždňov" + assert self.locale._format_timeframe("week", 1) == "týždeň" + assert self.locale._format_timeframe("weeks", 2) == "2 týždne" + assert self.locale._format_timeframe("weeks", 5) == "5 týždňov" + + assert self.locale._format_timeframe("months", -5) == "5 mesiacmi" + assert self.locale._format_timeframe("months", -2) == "2 mesiacmi" + assert self.locale._format_timeframe("month", -1) == "mesiacom" + assert self.locale._format_timeframe("month", 0) == "0 mesiacov" + assert self.locale._format_timeframe("month", 1) == "mesiac" + assert self.locale._format_timeframe("months", 2) == "2 mesiace" + assert self.locale._format_timeframe("months", 5) == "5 mesiacov" + + assert self.locale._format_timeframe("years", -5) == "5 rokmi" + assert self.locale._format_timeframe("years", -2) == "2 rokmi" + assert self.locale._format_timeframe("year", -1) == "rokom" + assert self.locale._format_timeframe("year", 0) == "0 rokov" + assert self.locale._format_timeframe("year", 1) == "rok" + assert self.locale._format_timeframe("years", 2) == "2 roky" + assert self.locale._format_timeframe("years", 5) == "5 rokov" + + assert self.locale._format_timeframe("now", 0) == "Teraz" + + def test_format_relative_now(self): + + result = self.locale._format_relative("Teraz", "now", 0) + assert result == "Teraz" + + def test_format_relative_future(self): + + result = self.locale._format_relative("hodinu", "hour", 1) + assert result == "O hodinu" + + def test_format_relative_past(self): + + result = self.locale._format_relative("hodinou", "hour", -1) + assert result == "Pred hodinou" + + +@pytest.mark.usefixtures("lang_locale") +class TestBulgarianLocale: + def test_plurals2(self): + assert self.locale._format_timeframe("hours", 0) == "0 часа" + assert self.locale._format_timeframe("hours", 1) == "1 час" + assert self.locale._format_timeframe("hours", 2) == "2 часа" + assert self.locale._format_timeframe("hours", 4) == "4 часа" + assert self.locale._format_timeframe("hours", 5) == "5 часа" + assert self.locale._format_timeframe("hours", 21) == "21 час" + assert self.locale._format_timeframe("hours", 22) == "22 часа" + assert self.locale._format_timeframe("hours", 25) == "25 часа" + + # feminine grammatical gender should be tested separately + assert self.locale._format_timeframe("minutes", 0) == "0 минути" + assert self.locale._format_timeframe("minutes", 1) == "1 минута" + assert self.locale._format_timeframe("minutes", 2) == "2 минути" + assert self.locale._format_timeframe("minutes", 4) == "4 минути" + assert self.locale._format_timeframe("minutes", 5) == "5 минути" + assert self.locale._format_timeframe("minutes", 21) == "21 минута" + assert self.locale._format_timeframe("minutes", 22) == "22 минути" + assert self.locale._format_timeframe("minutes", 25) == "25 минути" + + +@pytest.mark.usefixtures("lang_locale") +class TestMacedonianLocale: + def test_singles_mk(self): + assert self.locale._format_timeframe("second", 1) == "една секунда" + assert self.locale._format_timeframe("minute", 1) == "една минута" + assert self.locale._format_timeframe("hour", 1) == "еден саат" + assert self.locale._format_timeframe("day", 1) == "еден ден" + assert self.locale._format_timeframe("week", 1) == "една недела" + assert self.locale._format_timeframe("month", 1) == "еден месец" + assert self.locale._format_timeframe("year", 1) == "една година" + + def test_meridians_mk(self): + assert self.locale.meridian(7, "A") == "претпладне" + assert self.locale.meridian(18, "A") == "попладне" + assert self.locale.meridian(10, "a") == "дп" + assert self.locale.meridian(22, "a") == "пп" + + def test_describe_mk(self): + assert self.locale.describe("second", only_distance=True) == "една секунда" + assert self.locale.describe("second", only_distance=False) == "за една секунда" + assert self.locale.describe("minute", only_distance=True) == "една минута" + assert self.locale.describe("minute", only_distance=False) == "за една минута" + assert self.locale.describe("hour", only_distance=True) == "еден саат" + assert self.locale.describe("hour", only_distance=False) == "за еден саат" + assert self.locale.describe("day", only_distance=True) == "еден ден" + assert self.locale.describe("day", only_distance=False) == "за еден ден" + assert self.locale.describe("week", only_distance=True) == "една недела" + assert self.locale.describe("week", only_distance=False) == "за една недела" + assert self.locale.describe("month", only_distance=True) == "еден месец" + assert self.locale.describe("month", only_distance=False) == "за еден месец" + assert self.locale.describe("year", only_distance=True) == "една година" + assert self.locale.describe("year", only_distance=False) == "за една година" + + def test_relative_mk(self): + # time + assert self.locale._format_relative("сега", "now", 0) == "сега" + assert self.locale._format_relative("1 секунда", "seconds", 1) == "за 1 секунда" + assert self.locale._format_relative("1 минута", "minutes", 1) == "за 1 минута" + assert self.locale._format_relative("1 саат", "hours", 1) == "за 1 саат" + assert self.locale._format_relative("1 ден", "days", 1) == "за 1 ден" + assert self.locale._format_relative("1 недела", "weeks", 1) == "за 1 недела" + assert self.locale._format_relative("1 месец", "months", 1) == "за 1 месец" + assert self.locale._format_relative("1 година", "years", 1) == "за 1 година" + assert ( + self.locale._format_relative("1 секунда", "seconds", -1) == "пред 1 секунда" + ) + assert ( + self.locale._format_relative("1 минута", "minutes", -1) == "пред 1 минута" + ) + assert self.locale._format_relative("1 саат", "hours", -1) == "пред 1 саат" + assert self.locale._format_relative("1 ден", "days", -1) == "пред 1 ден" + assert self.locale._format_relative("1 недела", "weeks", -1) == "пред 1 недела" + assert self.locale._format_relative("1 месец", "months", -1) == "пред 1 месец" + assert self.locale._format_relative("1 година", "years", -1) == "пред 1 година" + + def test_plurals_mk(self): + # Seconds + assert self.locale._format_timeframe("seconds", 0) == "0 секунди" + assert self.locale._format_timeframe("seconds", 1) == "1 секунда" + assert self.locale._format_timeframe("seconds", 2) == "2 секунди" + assert self.locale._format_timeframe("seconds", 4) == "4 секунди" + assert self.locale._format_timeframe("seconds", 5) == "5 секунди" + assert self.locale._format_timeframe("seconds", 21) == "21 секунда" + assert self.locale._format_timeframe("seconds", 22) == "22 секунди" + assert self.locale._format_timeframe("seconds", 25) == "25 секунди" + + # Minutes + assert self.locale._format_timeframe("minutes", 0) == "0 минути" + assert self.locale._format_timeframe("minutes", 1) == "1 минута" + assert self.locale._format_timeframe("minutes", 2) == "2 минути" + assert self.locale._format_timeframe("minutes", 4) == "4 минути" + assert self.locale._format_timeframe("minutes", 5) == "5 минути" + assert self.locale._format_timeframe("minutes", 21) == "21 минута" + assert self.locale._format_timeframe("minutes", 22) == "22 минути" + assert self.locale._format_timeframe("minutes", 25) == "25 минути" + + # Hours + assert self.locale._format_timeframe("hours", 0) == "0 саати" + assert self.locale._format_timeframe("hours", 1) == "1 саат" + assert self.locale._format_timeframe("hours", 2) == "2 саати" + assert self.locale._format_timeframe("hours", 4) == "4 саати" + assert self.locale._format_timeframe("hours", 5) == "5 саати" + assert self.locale._format_timeframe("hours", 21) == "21 саат" + assert self.locale._format_timeframe("hours", 22) == "22 саати" + assert self.locale._format_timeframe("hours", 25) == "25 саати" + + # Days + assert self.locale._format_timeframe("days", 0) == "0 дена" + assert self.locale._format_timeframe("days", 1) == "1 ден" + assert self.locale._format_timeframe("days", 2) == "2 дена" + assert self.locale._format_timeframe("days", 3) == "3 дена" + assert self.locale._format_timeframe("days", 21) == "21 ден" + + # Weeks + assert self.locale._format_timeframe("weeks", 0) == "0 недели" + assert self.locale._format_timeframe("weeks", 1) == "1 недела" + assert self.locale._format_timeframe("weeks", 2) == "2 недели" + assert self.locale._format_timeframe("weeks", 4) == "4 недели" + assert self.locale._format_timeframe("weeks", 5) == "5 недели" + assert self.locale._format_timeframe("weeks", 21) == "21 недела" + assert self.locale._format_timeframe("weeks", 22) == "22 недели" + assert self.locale._format_timeframe("weeks", 25) == "25 недели" + + # Months + assert self.locale._format_timeframe("months", 0) == "0 месеци" + assert self.locale._format_timeframe("months", 1) == "1 месец" + assert self.locale._format_timeframe("months", 2) == "2 месеци" + assert self.locale._format_timeframe("months", 4) == "4 месеци" + assert self.locale._format_timeframe("months", 5) == "5 месеци" + assert self.locale._format_timeframe("months", 21) == "21 месец" + assert self.locale._format_timeframe("months", 22) == "22 месеци" + assert self.locale._format_timeframe("months", 25) == "25 месеци" + + # Years + assert self.locale._format_timeframe("years", 1) == "1 година" + assert self.locale._format_timeframe("years", 2) == "2 години" + assert self.locale._format_timeframe("years", 5) == "5 години" + + def test_multi_describe_mk(self): + describe = self.locale.describe_multi + + fulltest = [("years", 5), ("weeks", 1), ("hours", 1), ("minutes", 6)] + assert describe(fulltest) == "за 5 години 1 недела 1 саат 6 минути" + seconds4000_0days = [("days", 0), ("hours", 1), ("minutes", 6)] + assert describe(seconds4000_0days) == "за 0 дена 1 саат 6 минути" + seconds4000 = [("hours", 1), ("minutes", 6)] + assert describe(seconds4000) == "за 1 саат 6 минути" + assert describe(seconds4000, only_distance=True) == "1 саат 6 минути" + seconds3700 = [("hours", 1), ("minutes", 1)] + assert describe(seconds3700) == "за 1 саат 1 минута" + seconds300_0hours = [("hours", 0), ("minutes", 5)] + assert describe(seconds300_0hours) == "за 0 саати 5 минути" + seconds300 = [("minutes", 5)] + assert describe(seconds300) == "за 5 минути" + seconds60 = [("minutes", 1)] + assert describe(seconds60) == "за 1 минута" + assert describe(seconds60, only_distance=True) == "1 минута" + seconds60 = [("seconds", 1)] + assert describe(seconds60) == "за 1 секунда" + assert describe(seconds60, only_distance=True) == "1 секунда" + + +@pytest.mark.usefixtures("time_2013_01_01") +@pytest.mark.usefixtures("lang_locale") +class TestHebrewLocale: + def test_couple_of_timeframe(self): + assert self.locale._format_timeframe("days", 1) == "יום" + assert self.locale._format_timeframe("days", 2) == "יומיים" + assert self.locale._format_timeframe("days", 3) == "3 ימים" + + assert self.locale._format_timeframe("hours", 1) == "שעה" + assert self.locale._format_timeframe("hours", 2) == "שעתיים" + assert self.locale._format_timeframe("hours", 3) == "3 שעות" + + assert self.locale._format_timeframe("week", 1) == "שבוע" + assert self.locale._format_timeframe("weeks", 2) == "שבועיים" + assert self.locale._format_timeframe("weeks", 3) == "3 שבועות" + + assert self.locale._format_timeframe("months", 1) == "חודש" + assert self.locale._format_timeframe("months", 2) == "חודשיים" + assert self.locale._format_timeframe("months", 4) == "4 חודשים" + + assert self.locale._format_timeframe("years", 1) == "שנה" + assert self.locale._format_timeframe("years", 2) == "שנתיים" + assert self.locale._format_timeframe("years", 5) == "5 שנים" + + def test_describe_multi(self): + describe = self.locale.describe_multi + + fulltest = [("years", 5), ("weeks", 1), ("hours", 1), ("minutes", 6)] + assert describe(fulltest) == "בעוד 5 שנים, שבוע, שעה ו־6 דקות" + seconds4000_0days = [("days", 0), ("hours", 1), ("minutes", 6)] + assert describe(seconds4000_0days) == "בעוד 0 ימים, שעה ו־6 דקות" + seconds4000 = [("hours", 1), ("minutes", 6)] + assert describe(seconds4000) == "בעוד שעה ו־6 דקות" + assert describe(seconds4000, only_distance=True) == "שעה ו־6 דקות" + seconds3700 = [("hours", 1), ("minutes", 1)] + assert describe(seconds3700) == "בעוד שעה ודקה" + seconds300_0hours = [("hours", 0), ("minutes", 5)] + assert describe(seconds300_0hours) == "בעוד 0 שעות ו־5 דקות" + seconds300 = [("minutes", 5)] + assert describe(seconds300) == "בעוד 5 דקות" + seconds60 = [("minutes", 1)] + assert describe(seconds60) == "בעוד דקה" + assert describe(seconds60, only_distance=True) == "דקה" + + +@pytest.mark.usefixtures("lang_locale") +class TestMarathiLocale: + def test_dateCoreFunctionality(self): + dt = arrow.Arrow(2015, 4, 11, 17, 30, 00) + assert self.locale.month_name(dt.month) == "एप्रिल" + assert self.locale.month_abbreviation(dt.month) == "एप्रि" + assert self.locale.day_name(dt.isoweekday()) == "शनिवार" + assert self.locale.day_abbreviation(dt.isoweekday()) == "शनि" + + def test_format_timeframe(self): + assert self.locale._format_timeframe("hours", 2) == "2 तास" + assert self.locale._format_timeframe("hour", 0) == "एक तास" + + def test_format_relative_now(self): + result = self.locale._format_relative("सद्य", "now", 0) + assert result == "सद्य" + + def test_format_relative_past(self): + result = self.locale._format_relative("एक तास", "hour", 1) + assert result == "एक तास नंतर" + + def test_format_relative_future(self): + result = self.locale._format_relative("एक तास", "hour", -1) + assert result == "एक तास आधी" + + # Not currently implemented + def test_ordinal_number(self): + assert self.locale.ordinal_number(1) == "1" + + +@pytest.mark.usefixtures("lang_locale") +class TestFinnishLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("hours", 2) == ("2 tuntia", "2 tunnin") + assert self.locale._format_timeframe("hour", 0) == ("tunti", "tunnin") + + def test_format_relative_now(self): + result = self.locale._format_relative(["juuri nyt", "juuri nyt"], "now", 0) + assert result == "juuri nyt" + + def test_format_relative_past(self): + result = self.locale._format_relative(["tunti", "tunnin"], "hour", 1) + assert result == "tunnin kuluttua" + + def test_format_relative_future(self): + result = self.locale._format_relative(["tunti", "tunnin"], "hour", -1) + assert result == "tunti sitten" + + def test_ordinal_number(self): + assert self.locale.ordinal_number(1) == "1." + + +@pytest.mark.usefixtures("lang_locale") +class TestGermanLocale: + def test_ordinal_number(self): + assert self.locale.ordinal_number(1) == "1." + + def test_define(self): + assert self.locale.describe("minute", only_distance=True) == "eine Minute" + assert self.locale.describe("minute", only_distance=False) == "in einer Minute" + assert self.locale.describe("hour", only_distance=True) == "eine Stunde" + assert self.locale.describe("hour", only_distance=False) == "in einer Stunde" + assert self.locale.describe("day", only_distance=True) == "ein Tag" + assert self.locale.describe("day", only_distance=False) == "in einem Tag" + assert self.locale.describe("week", only_distance=True) == "eine Woche" + assert self.locale.describe("week", only_distance=False) == "in einer Woche" + assert self.locale.describe("month", only_distance=True) == "ein Monat" + assert self.locale.describe("month", only_distance=False) == "in einem Monat" + assert self.locale.describe("year", only_distance=True) == "ein Jahr" + assert self.locale.describe("year", only_distance=False) == "in einem Jahr" + + def test_weekday(self): + dt = arrow.Arrow(2015, 4, 11, 17, 30, 00) + assert self.locale.day_name(dt.isoweekday()) == "Samstag" + assert self.locale.day_abbreviation(dt.isoweekday()) == "Sa" + + +@pytest.mark.usefixtures("lang_locale") +class TestHungarianLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("hours", 2) == "2 óra" + assert self.locale._format_timeframe("hour", 0) == "egy órával" + assert self.locale._format_timeframe("hours", -2) == "2 órával" + assert self.locale._format_timeframe("now", 0) == "éppen most" + + +@pytest.mark.usefixtures("lang_locale") +class TestEsperantoLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("hours", 2) == "2 horoj" + assert self.locale._format_timeframe("hour", 0) == "un horo" + assert self.locale._format_timeframe("hours", -2) == "2 horoj" + assert self.locale._format_timeframe("now", 0) == "nun" + + def test_ordinal_number(self): + assert self.locale.ordinal_number(1) == "1a" + + +@pytest.mark.usefixtures("lang_locale") +class TestThaiLocale: + def test_year_full(self): + assert self.locale.year_full(2015) == "2558" + + def test_year_abbreviation(self): + assert self.locale.year_abbreviation(2015) == "58" + + def test_format_relative_now(self): + result = self.locale._format_relative("ขณะนี้", "now", 0) + assert result == "ขณะนี้" + + def test_format_relative_past(self): + result = self.locale._format_relative("1 ชั่วโมง", "hour", 1) + assert result == "ในอีก 1 ชั่วโมง" + result = self.locale._format_relative("{0} ชั่วโมง", "hours", 2) + assert result == "ในอีก {0} ชั่วโมง" + result = self.locale._format_relative("ไม่กี่วินาที", "seconds", 42) + assert result == "ในอีกไม่กี่วินาที" + + def test_format_relative_future(self): + result = self.locale._format_relative("1 ชั่วโมง", "hour", -1) + assert result == "1 ชั่วโมง ที่ผ่านมา" + + +@pytest.mark.usefixtures("lang_locale") +class TestBengaliLocale: + def test_ordinal_number(self): + assert self.locale._ordinal_number(0) == "0তম" + assert self.locale._ordinal_number(1) == "1ম" + assert self.locale._ordinal_number(3) == "3য়" + assert self.locale._ordinal_number(4) == "4র্থ" + assert self.locale._ordinal_number(5) == "5ম" + assert self.locale._ordinal_number(6) == "6ষ্ঠ" + assert self.locale._ordinal_number(10) == "10ম" + assert self.locale._ordinal_number(11) == "11তম" + assert self.locale._ordinal_number(42) == "42তম" + assert self.locale._ordinal_number(-1) is None + + +@pytest.mark.usefixtures("lang_locale") +class TestRomanianLocale: + def test_timeframes(self): + + assert self.locale._format_timeframe("hours", 2) == "2 ore" + assert self.locale._format_timeframe("months", 2) == "2 luni" + + assert self.locale._format_timeframe("days", 2) == "2 zile" + assert self.locale._format_timeframe("years", 2) == "2 ani" + + assert self.locale._format_timeframe("hours", 3) == "3 ore" + assert self.locale._format_timeframe("months", 4) == "4 luni" + assert self.locale._format_timeframe("days", 3) == "3 zile" + assert self.locale._format_timeframe("years", 5) == "5 ani" + + def test_relative_timeframes(self): + assert self.locale._format_relative("acum", "now", 0) == "acum" + assert self.locale._format_relative("o oră", "hour", 1) == "peste o oră" + assert self.locale._format_relative("o oră", "hour", -1) == "o oră în urmă" + assert self.locale._format_relative("un minut", "minute", 1) == "peste un minut" + assert ( + self.locale._format_relative("un minut", "minute", -1) == "un minut în urmă" + ) + assert ( + self.locale._format_relative("câteva secunde", "seconds", -1) + == "câteva secunde în urmă" + ) + assert ( + self.locale._format_relative("câteva secunde", "seconds", 1) + == "peste câteva secunde" + ) + assert self.locale._format_relative("o zi", "day", -1) == "o zi în urmă" + assert self.locale._format_relative("o zi", "day", 1) == "peste o zi" + + +@pytest.mark.usefixtures("lang_locale") +class TestArabicLocale: + def test_timeframes(self): + + # single + assert self.locale._format_timeframe("minute", 1) == "دقيقة" + assert self.locale._format_timeframe("hour", 1) == "ساعة" + assert self.locale._format_timeframe("day", 1) == "يوم" + assert self.locale._format_timeframe("month", 1) == "شهر" + assert self.locale._format_timeframe("year", 1) == "سنة" + + # double + assert self.locale._format_timeframe("minutes", 2) == "دقيقتين" + assert self.locale._format_timeframe("hours", 2) == "ساعتين" + assert self.locale._format_timeframe("days", 2) == "يومين" + assert self.locale._format_timeframe("months", 2) == "شهرين" + assert self.locale._format_timeframe("years", 2) == "سنتين" + + # up to ten + assert self.locale._format_timeframe("minutes", 3) == "3 دقائق" + assert self.locale._format_timeframe("hours", 4) == "4 ساعات" + assert self.locale._format_timeframe("days", 5) == "5 أيام" + assert self.locale._format_timeframe("months", 6) == "6 أشهر" + assert self.locale._format_timeframe("years", 10) == "10 سنوات" + + # more than ten + assert self.locale._format_timeframe("minutes", 11) == "11 دقيقة" + assert self.locale._format_timeframe("hours", 19) == "19 ساعة" + assert self.locale._format_timeframe("months", 24) == "24 شهر" + assert self.locale._format_timeframe("days", 50) == "50 يوم" + assert self.locale._format_timeframe("years", 115) == "115 سنة" + + +@pytest.mark.usefixtures("lang_locale") +class TestNepaliLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("hours", 3) == "3 घण्टा" + assert self.locale._format_timeframe("hour", 0) == "एक घण्टा" + + def test_format_relative_now(self): + result = self.locale._format_relative("अहिले", "now", 0) + assert result == "अहिले" + + def test_format_relative_future(self): + result = self.locale._format_relative("एक घण्टा", "hour", 1) + assert result == "एक घण्टा पछी" + + def test_format_relative_past(self): + result = self.locale._format_relative("एक घण्टा", "hour", -1) + assert result == "एक घण्टा पहिले" + + +@pytest.mark.usefixtures("lang_locale") +class TestIndonesianLocale: + def test_timeframes(self): + assert self.locale._format_timeframe("hours", 2) == "2 jam" + assert self.locale._format_timeframe("months", 2) == "2 bulan" + + assert self.locale._format_timeframe("days", 2) == "2 hari" + assert self.locale._format_timeframe("years", 2) == "2 tahun" + + assert self.locale._format_timeframe("hours", 3) == "3 jam" + assert self.locale._format_timeframe("months", 4) == "4 bulan" + assert self.locale._format_timeframe("days", 3) == "3 hari" + assert self.locale._format_timeframe("years", 5) == "5 tahun" + + def test_format_relative_now(self): + assert self.locale._format_relative("baru saja", "now", 0) == "baru saja" + + def test_format_relative_past(self): + assert self.locale._format_relative("1 jam", "hour", 1) == "dalam 1 jam" + assert self.locale._format_relative("1 detik", "seconds", 1) == "dalam 1 detik" + + def test_format_relative_future(self): + assert self.locale._format_relative("1 jam", "hour", -1) == "1 jam yang lalu" + + +@pytest.mark.usefixtures("lang_locale") +class TestTagalogLocale: + def test_singles_tl(self): + assert self.locale._format_timeframe("second", 1) == "isang segundo" + assert self.locale._format_timeframe("minute", 1) == "isang minuto" + assert self.locale._format_timeframe("hour", 1) == "isang oras" + assert self.locale._format_timeframe("day", 1) == "isang araw" + assert self.locale._format_timeframe("week", 1) == "isang linggo" + assert self.locale._format_timeframe("month", 1) == "isang buwan" + assert self.locale._format_timeframe("year", 1) == "isang taon" + + def test_meridians_tl(self): + assert self.locale.meridian(7, "A") == "ng umaga" + assert self.locale.meridian(18, "A") == "ng hapon" + assert self.locale.meridian(10, "a") == "nu" + assert self.locale.meridian(22, "a") == "nh" + + def test_describe_tl(self): + assert self.locale.describe("second", only_distance=True) == "isang segundo" + assert ( + self.locale.describe("second", only_distance=False) + == "isang segundo mula ngayon" + ) + assert self.locale.describe("minute", only_distance=True) == "isang minuto" + assert ( + self.locale.describe("minute", only_distance=False) + == "isang minuto mula ngayon" + ) + assert self.locale.describe("hour", only_distance=True) == "isang oras" + assert ( + self.locale.describe("hour", only_distance=False) + == "isang oras mula ngayon" + ) + assert self.locale.describe("day", only_distance=True) == "isang araw" + assert ( + self.locale.describe("day", only_distance=False) == "isang araw mula ngayon" + ) + assert self.locale.describe("week", only_distance=True) == "isang linggo" + assert ( + self.locale.describe("week", only_distance=False) + == "isang linggo mula ngayon" + ) + assert self.locale.describe("month", only_distance=True) == "isang buwan" + assert ( + self.locale.describe("month", only_distance=False) + == "isang buwan mula ngayon" + ) + assert self.locale.describe("year", only_distance=True) == "isang taon" + assert ( + self.locale.describe("year", only_distance=False) + == "isang taon mula ngayon" + ) + + def test_relative_tl(self): + # time + assert self.locale._format_relative("ngayon", "now", 0) == "ngayon" + assert ( + self.locale._format_relative("1 segundo", "seconds", 1) + == "1 segundo mula ngayon" + ) + assert ( + self.locale._format_relative("1 minuto", "minutes", 1) + == "1 minuto mula ngayon" + ) + assert ( + self.locale._format_relative("1 oras", "hours", 1) == "1 oras mula ngayon" + ) + assert self.locale._format_relative("1 araw", "days", 1) == "1 araw mula ngayon" + assert ( + self.locale._format_relative("1 linggo", "weeks", 1) + == "1 linggo mula ngayon" + ) + assert ( + self.locale._format_relative("1 buwan", "months", 1) + == "1 buwan mula ngayon" + ) + assert ( + self.locale._format_relative("1 taon", "years", 1) == "1 taon mula ngayon" + ) + assert ( + self.locale._format_relative("1 segundo", "seconds", -1) + == "nakaraang 1 segundo" + ) + assert ( + self.locale._format_relative("1 minuto", "minutes", -1) + == "nakaraang 1 minuto" + ) + assert self.locale._format_relative("1 oras", "hours", -1) == "nakaraang 1 oras" + assert self.locale._format_relative("1 araw", "days", -1) == "nakaraang 1 araw" + assert ( + self.locale._format_relative("1 linggo", "weeks", -1) + == "nakaraang 1 linggo" + ) + assert ( + self.locale._format_relative("1 buwan", "months", -1) == "nakaraang 1 buwan" + ) + assert self.locale._format_relative("1 taon", "years", -1) == "nakaraang 1 taon" + + def test_plurals_tl(self): + # Seconds + assert self.locale._format_timeframe("seconds", 0) == "0 segundo" + assert self.locale._format_timeframe("seconds", 1) == "1 segundo" + assert self.locale._format_timeframe("seconds", 2) == "2 segundo" + assert self.locale._format_timeframe("seconds", 4) == "4 segundo" + assert self.locale._format_timeframe("seconds", 5) == "5 segundo" + assert self.locale._format_timeframe("seconds", 21) == "21 segundo" + assert self.locale._format_timeframe("seconds", 22) == "22 segundo" + assert self.locale._format_timeframe("seconds", 25) == "25 segundo" + + # Minutes + assert self.locale._format_timeframe("minutes", 0) == "0 minuto" + assert self.locale._format_timeframe("minutes", 1) == "1 minuto" + assert self.locale._format_timeframe("minutes", 2) == "2 minuto" + assert self.locale._format_timeframe("minutes", 4) == "4 minuto" + assert self.locale._format_timeframe("minutes", 5) == "5 minuto" + assert self.locale._format_timeframe("minutes", 21) == "21 minuto" + assert self.locale._format_timeframe("minutes", 22) == "22 minuto" + assert self.locale._format_timeframe("minutes", 25) == "25 minuto" + + # Hours + assert self.locale._format_timeframe("hours", 0) == "0 oras" + assert self.locale._format_timeframe("hours", 1) == "1 oras" + assert self.locale._format_timeframe("hours", 2) == "2 oras" + assert self.locale._format_timeframe("hours", 4) == "4 oras" + assert self.locale._format_timeframe("hours", 5) == "5 oras" + assert self.locale._format_timeframe("hours", 21) == "21 oras" + assert self.locale._format_timeframe("hours", 22) == "22 oras" + assert self.locale._format_timeframe("hours", 25) == "25 oras" + + # Days + assert self.locale._format_timeframe("days", 0) == "0 araw" + assert self.locale._format_timeframe("days", 1) == "1 araw" + assert self.locale._format_timeframe("days", 2) == "2 araw" + assert self.locale._format_timeframe("days", 3) == "3 araw" + assert self.locale._format_timeframe("days", 21) == "21 araw" + + # Weeks + assert self.locale._format_timeframe("weeks", 0) == "0 linggo" + assert self.locale._format_timeframe("weeks", 1) == "1 linggo" + assert self.locale._format_timeframe("weeks", 2) == "2 linggo" + assert self.locale._format_timeframe("weeks", 4) == "4 linggo" + assert self.locale._format_timeframe("weeks", 5) == "5 linggo" + assert self.locale._format_timeframe("weeks", 21) == "21 linggo" + assert self.locale._format_timeframe("weeks", 22) == "22 linggo" + assert self.locale._format_timeframe("weeks", 25) == "25 linggo" + + # Months + assert self.locale._format_timeframe("months", 0) == "0 buwan" + assert self.locale._format_timeframe("months", 1) == "1 buwan" + assert self.locale._format_timeframe("months", 2) == "2 buwan" + assert self.locale._format_timeframe("months", 4) == "4 buwan" + assert self.locale._format_timeframe("months", 5) == "5 buwan" + assert self.locale._format_timeframe("months", 21) == "21 buwan" + assert self.locale._format_timeframe("months", 22) == "22 buwan" + assert self.locale._format_timeframe("months", 25) == "25 buwan" + + # Years + assert self.locale._format_timeframe("years", 1) == "1 taon" + assert self.locale._format_timeframe("years", 2) == "2 taon" + assert self.locale._format_timeframe("years", 5) == "5 taon" + + def test_multi_describe_tl(self): + describe = self.locale.describe_multi + + fulltest = [("years", 5), ("weeks", 1), ("hours", 1), ("minutes", 6)] + assert describe(fulltest) == "5 taon 1 linggo 1 oras 6 minuto mula ngayon" + seconds4000_0days = [("days", 0), ("hours", 1), ("minutes", 6)] + assert describe(seconds4000_0days) == "0 araw 1 oras 6 minuto mula ngayon" + seconds4000 = [("hours", 1), ("minutes", 6)] + assert describe(seconds4000) == "1 oras 6 minuto mula ngayon" + assert describe(seconds4000, only_distance=True) == "1 oras 6 minuto" + seconds3700 = [("hours", 1), ("minutes", 1)] + assert describe(seconds3700) == "1 oras 1 minuto mula ngayon" + seconds300_0hours = [("hours", 0), ("minutes", 5)] + assert describe(seconds300_0hours) == "0 oras 5 minuto mula ngayon" + seconds300 = [("minutes", 5)] + assert describe(seconds300) == "5 minuto mula ngayon" + seconds60 = [("minutes", 1)] + assert describe(seconds60) == "1 minuto mula ngayon" + assert describe(seconds60, only_distance=True) == "1 minuto" + seconds60 = [("seconds", 1)] + assert describe(seconds60) == "1 segundo mula ngayon" + assert describe(seconds60, only_distance=True) == "1 segundo" + + def test_ordinal_number_tl(self): + assert self.locale.ordinal_number(0) == "ika-0" + assert self.locale.ordinal_number(1) == "ika-1" + assert self.locale.ordinal_number(2) == "ika-2" + assert self.locale.ordinal_number(3) == "ika-3" + assert self.locale.ordinal_number(10) == "ika-10" + assert self.locale.ordinal_number(23) == "ika-23" + assert self.locale.ordinal_number(100) == "ika-100" + assert self.locale.ordinal_number(103) == "ika-103" + assert self.locale.ordinal_number(114) == "ika-114" + + +@pytest.mark.usefixtures("lang_locale") +class TestEstonianLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("now", 0) == "just nüüd" + assert self.locale._format_timeframe("second", 1) == "ühe sekundi" + assert self.locale._format_timeframe("seconds", 3) == "3 sekundi" + assert self.locale._format_timeframe("seconds", 30) == "30 sekundi" + assert self.locale._format_timeframe("minute", 1) == "ühe minuti" + assert self.locale._format_timeframe("minutes", 4) == "4 minuti" + assert self.locale._format_timeframe("minutes", 40) == "40 minuti" + assert self.locale._format_timeframe("hour", 1) == "tunni aja" + assert self.locale._format_timeframe("hours", 5) == "5 tunni" + assert self.locale._format_timeframe("hours", 23) == "23 tunni" + assert self.locale._format_timeframe("day", 1) == "ühe päeva" + assert self.locale._format_timeframe("days", 6) == "6 päeva" + assert self.locale._format_timeframe("days", 12) == "12 päeva" + assert self.locale._format_timeframe("month", 1) == "ühe kuu" + assert self.locale._format_timeframe("months", 7) == "7 kuu" + assert self.locale._format_timeframe("months", 11) == "11 kuu" + assert self.locale._format_timeframe("year", 1) == "ühe aasta" + assert self.locale._format_timeframe("years", 8) == "8 aasta" + assert self.locale._format_timeframe("years", 12) == "12 aasta" + + assert self.locale._format_timeframe("now", 0) == "just nüüd" + assert self.locale._format_timeframe("second", -1) == "üks sekund" + assert self.locale._format_timeframe("seconds", -9) == "9 sekundit" + assert self.locale._format_timeframe("seconds", -12) == "12 sekundit" + assert self.locale._format_timeframe("minute", -1) == "üks minut" + assert self.locale._format_timeframe("minutes", -2) == "2 minutit" + assert self.locale._format_timeframe("minutes", -10) == "10 minutit" + assert self.locale._format_timeframe("hour", -1) == "tund aega" + assert self.locale._format_timeframe("hours", -3) == "3 tundi" + assert self.locale._format_timeframe("hours", -11) == "11 tundi" + assert self.locale._format_timeframe("day", -1) == "üks päev" + assert self.locale._format_timeframe("days", -2) == "2 päeva" + assert self.locale._format_timeframe("days", -12) == "12 päeva" + assert self.locale._format_timeframe("month", -1) == "üks kuu" + assert self.locale._format_timeframe("months", -3) == "3 kuud" + assert self.locale._format_timeframe("months", -13) == "13 kuud" + assert self.locale._format_timeframe("year", -1) == "üks aasta" + assert self.locale._format_timeframe("years", -4) == "4 aastat" + assert self.locale._format_timeframe("years", -14) == "14 aastat" + + +@pytest.mark.usefixtures("lang_locale") +class TestPortugueseLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("now", 0) == "agora" + assert self.locale._format_timeframe("second", 1) == "um segundo" + assert self.locale._format_timeframe("seconds", 30) == "30 segundos" + assert self.locale._format_timeframe("minute", 1) == "um minuto" + assert self.locale._format_timeframe("minutes", 40) == "40 minutos" + assert self.locale._format_timeframe("hour", 1) == "uma hora" + assert self.locale._format_timeframe("hours", 23) == "23 horas" + assert self.locale._format_timeframe("day", 1) == "um dia" + assert self.locale._format_timeframe("days", 12) == "12 dias" + assert self.locale._format_timeframe("month", 1) == "um mês" + assert self.locale._format_timeframe("months", 11) == "11 meses" + assert self.locale._format_timeframe("year", 1) == "um ano" + assert self.locale._format_timeframe("years", 12) == "12 anos" + + +@pytest.mark.usefixtures("lang_locale") +class TestBrazilianPortugueseLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("now", 0) == "agora" + assert self.locale._format_timeframe("second", 1) == "um segundo" + assert self.locale._format_timeframe("seconds", 30) == "30 segundos" + assert self.locale._format_timeframe("minute", 1) == "um minuto" + assert self.locale._format_timeframe("minutes", 40) == "40 minutos" + assert self.locale._format_timeframe("hour", 1) == "uma hora" + assert self.locale._format_timeframe("hours", 23) == "23 horas" + assert self.locale._format_timeframe("day", 1) == "um dia" + assert self.locale._format_timeframe("days", 12) == "12 dias" + assert self.locale._format_timeframe("month", 1) == "um mês" + assert self.locale._format_timeframe("months", 11) == "11 meses" + assert self.locale._format_timeframe("year", 1) == "um ano" + assert self.locale._format_timeframe("years", 12) == "12 anos" + assert self.locale._format_relative("uma hora", "hour", -1) == "faz uma hora" + + +@pytest.mark.usefixtures("lang_locale") +class TestHongKongLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("now", 0) == "剛才" + assert self.locale._format_timeframe("second", 1) == "1秒" + assert self.locale._format_timeframe("seconds", 30) == "30秒" + assert self.locale._format_timeframe("minute", 1) == "1分鐘" + assert self.locale._format_timeframe("minutes", 40) == "40分鐘" + assert self.locale._format_timeframe("hour", 1) == "1小時" + assert self.locale._format_timeframe("hours", 23) == "23小時" + assert self.locale._format_timeframe("day", 1) == "1天" + assert self.locale._format_timeframe("days", 12) == "12天" + assert self.locale._format_timeframe("week", 1) == "1星期" + assert self.locale._format_timeframe("weeks", 38) == "38星期" + assert self.locale._format_timeframe("month", 1) == "1個月" + assert self.locale._format_timeframe("months", 11) == "11個月" + assert self.locale._format_timeframe("year", 1) == "1年" + assert self.locale._format_timeframe("years", 12) == "12年" + + +@pytest.mark.usefixtures("lang_locale") +class TestChineseTWLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("now", 0) == "剛才" + assert self.locale._format_timeframe("second", 1) == "1秒" + assert self.locale._format_timeframe("seconds", 30) == "30秒" + assert self.locale._format_timeframe("minute", 1) == "1分鐘" + assert self.locale._format_timeframe("minutes", 40) == "40分鐘" + assert self.locale._format_timeframe("hour", 1) == "1小時" + assert self.locale._format_timeframe("hours", 23) == "23小時" + assert self.locale._format_timeframe("day", 1) == "1天" + assert self.locale._format_timeframe("days", 12) == "12天" + assert self.locale._format_timeframe("week", 1) == "1週" + assert self.locale._format_timeframe("weeks", 38) == "38週" + assert self.locale._format_timeframe("month", 1) == "1個月" + assert self.locale._format_timeframe("months", 11) == "11個月" + assert self.locale._format_timeframe("year", 1) == "1年" + assert self.locale._format_timeframe("years", 12) == "12年" + + +@pytest.mark.usefixtures("lang_locale") +class TestSwahiliLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("now", 0) == "sasa hivi" + assert self.locale._format_timeframe("second", 1) == "sekunde" + assert self.locale._format_timeframe("seconds", 3) == "sekunde 3" + assert self.locale._format_timeframe("seconds", 30) == "sekunde 30" + assert self.locale._format_timeframe("minute", 1) == "dakika moja" + assert self.locale._format_timeframe("minutes", 4) == "dakika 4" + assert self.locale._format_timeframe("minutes", 40) == "dakika 40" + assert self.locale._format_timeframe("hour", 1) == "saa moja" + assert self.locale._format_timeframe("hours", 5) == "saa 5" + assert self.locale._format_timeframe("hours", 23) == "saa 23" + assert self.locale._format_timeframe("day", 1) == "siku moja" + assert self.locale._format_timeframe("days", 6) == "siku 6" + assert self.locale._format_timeframe("days", 12) == "siku 12" + assert self.locale._format_timeframe("month", 1) == "mwezi moja" + assert self.locale._format_timeframe("months", 7) == "miezi 7" + assert self.locale._format_timeframe("week", 1) == "wiki moja" + assert self.locale._format_timeframe("weeks", 2) == "wiki 2" + assert self.locale._format_timeframe("months", 11) == "miezi 11" + assert self.locale._format_timeframe("year", 1) == "mwaka moja" + assert self.locale._format_timeframe("years", 8) == "miaka 8" + assert self.locale._format_timeframe("years", 12) == "miaka 12" + + def test_format_relative_now(self): + result = self.locale._format_relative("sasa hivi", "now", 0) + assert result == "sasa hivi" + + def test_format_relative_past(self): + result = self.locale._format_relative("saa moja", "hour", 1) + assert result == "muda wa saa moja" + + def test_format_relative_future(self): + result = self.locale._format_relative("saa moja", "hour", -1) + assert result == "saa moja iliyopita" + + +@pytest.mark.usefixtures("lang_locale") +class TestKoreanLocale: + def test_format_timeframe(self): + assert self.locale._format_timeframe("now", 0) == "지금" + assert self.locale._format_timeframe("second", 1) == "1초" + assert self.locale._format_timeframe("seconds", 2) == "2초" + assert self.locale._format_timeframe("minute", 1) == "1분" + assert self.locale._format_timeframe("minutes", 2) == "2분" + assert self.locale._format_timeframe("hour", 1) == "한시간" + assert self.locale._format_timeframe("hours", 2) == "2시간" + assert self.locale._format_timeframe("day", 1) == "하루" + assert self.locale._format_timeframe("days", 2) == "2일" + assert self.locale._format_timeframe("week", 1) == "1주" + assert self.locale._format_timeframe("weeks", 2) == "2주" + assert self.locale._format_timeframe("month", 1) == "한달" + assert self.locale._format_timeframe("months", 2) == "2개월" + assert self.locale._format_timeframe("year", 1) == "1년" + assert self.locale._format_timeframe("years", 2) == "2년" + + def test_format_relative(self): + assert self.locale._format_relative("지금", "now", 0) == "지금" + + assert self.locale._format_relative("1초", "second", 1) == "1초 후" + assert self.locale._format_relative("2초", "seconds", 2) == "2초 후" + assert self.locale._format_relative("1분", "minute", 1) == "1분 후" + assert self.locale._format_relative("2분", "minutes", 2) == "2분 후" + assert self.locale._format_relative("한시간", "hour", 1) == "한시간 후" + assert self.locale._format_relative("2시간", "hours", 2) == "2시간 후" + assert self.locale._format_relative("하루", "day", 1) == "내일" + assert self.locale._format_relative("2일", "days", 2) == "모레" + assert self.locale._format_relative("3일", "days", 3) == "글피" + assert self.locale._format_relative("4일", "days", 4) == "그글피" + assert self.locale._format_relative("5일", "days", 5) == "5일 후" + assert self.locale._format_relative("1주", "week", 1) == "1주 후" + assert self.locale._format_relative("2주", "weeks", 2) == "2주 후" + assert self.locale._format_relative("한달", "month", 1) == "한달 후" + assert self.locale._format_relative("2개월", "months", 2) == "2개월 후" + assert self.locale._format_relative("1년", "year", 1) == "내년" + assert self.locale._format_relative("2년", "years", 2) == "내후년" + assert self.locale._format_relative("3년", "years", 3) == "3년 후" + + assert self.locale._format_relative("1초", "second", -1) == "1초 전" + assert self.locale._format_relative("2초", "seconds", -2) == "2초 전" + assert self.locale._format_relative("1분", "minute", -1) == "1분 전" + assert self.locale._format_relative("2분", "minutes", -2) == "2분 전" + assert self.locale._format_relative("한시간", "hour", -1) == "한시간 전" + assert self.locale._format_relative("2시간", "hours", -2) == "2시간 전" + assert self.locale._format_relative("하루", "day", -1) == "어제" + assert self.locale._format_relative("2일", "days", -2) == "그제" + assert self.locale._format_relative("3일", "days", -3) == "그끄제" + assert self.locale._format_relative("4일", "days", -4) == "4일 전" + assert self.locale._format_relative("1주", "week", -1) == "1주 전" + assert self.locale._format_relative("2주", "weeks", -2) == "2주 전" + assert self.locale._format_relative("한달", "month", -1) == "한달 전" + assert self.locale._format_relative("2개월", "months", -2) == "2개월 전" + assert self.locale._format_relative("1년", "year", -1) == "작년" + assert self.locale._format_relative("2년", "years", -2) == "제작년" + assert self.locale._format_relative("3년", "years", -3) == "3년 전" + + def test_ordinal_number(self): + assert self.locale.ordinal_number(0) == "0번째" + assert self.locale.ordinal_number(1) == "첫번째" + assert self.locale.ordinal_number(2) == "두번째" + assert self.locale.ordinal_number(3) == "세번째" + assert self.locale.ordinal_number(4) == "네번째" + assert self.locale.ordinal_number(5) == "다섯번째" + assert self.locale.ordinal_number(6) == "여섯번째" + assert self.locale.ordinal_number(7) == "일곱번째" + assert self.locale.ordinal_number(8) == "여덟번째" + assert self.locale.ordinal_number(9) == "아홉번째" + assert self.locale.ordinal_number(10) == "열번째" + assert self.locale.ordinal_number(11) == "11번째" + assert self.locale.ordinal_number(12) == "12번째" + assert self.locale.ordinal_number(100) == "100번째" diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_parser.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_parser.py new file mode 100644 index 0000000000..9fb4e68f3c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_parser.py @@ -0,0 +1,1657 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +import calendar +import os +import time +from datetime import datetime + +import pytest +from dateutil import tz + +import arrow +from arrow import formatter, parser +from arrow.constants import MAX_TIMESTAMP_US +from arrow.parser import DateTimeParser, ParserError, ParserMatchError + +from .utils import make_full_tz_list + + +@pytest.mark.usefixtures("dt_parser") +class TestDateTimeParser: + def test_parse_multiformat(self, mocker): + mocker.patch( + "arrow.parser.DateTimeParser.parse", + string="str", + fmt="fmt_a", + side_effect=parser.ParserError, + ) + + with pytest.raises(parser.ParserError): + self.parser._parse_multiformat("str", ["fmt_a"]) + + mock_datetime = mocker.Mock() + mocker.patch( + "arrow.parser.DateTimeParser.parse", + string="str", + fmt="fmt_b", + return_value=mock_datetime, + ) + + result = self.parser._parse_multiformat("str", ["fmt_a", "fmt_b"]) + assert result == mock_datetime + + def test_parse_multiformat_all_fail(self, mocker): + mocker.patch( + "arrow.parser.DateTimeParser.parse", + string="str", + fmt="fmt_a", + side_effect=parser.ParserError, + ) + + mocker.patch( + "arrow.parser.DateTimeParser.parse", + string="str", + fmt="fmt_b", + side_effect=parser.ParserError, + ) + + with pytest.raises(parser.ParserError): + self.parser._parse_multiformat("str", ["fmt_a", "fmt_b"]) + + def test_parse_multiformat_unself_expected_fail(self, mocker): + class UnselfExpectedError(Exception): + pass + + mocker.patch( + "arrow.parser.DateTimeParser.parse", + string="str", + fmt="fmt_a", + side_effect=UnselfExpectedError, + ) + + with pytest.raises(UnselfExpectedError): + self.parser._parse_multiformat("str", ["fmt_a", "fmt_b"]) + + def test_parse_token_nonsense(self): + parts = {} + self.parser._parse_token("NONSENSE", "1900", parts) + assert parts == {} + + def test_parse_token_invalid_meridians(self): + parts = {} + self.parser._parse_token("A", "a..m", parts) + assert parts == {} + self.parser._parse_token("a", "p..m", parts) + assert parts == {} + + def test_parser_no_caching(self, mocker): + + mocked_parser = mocker.patch( + "arrow.parser.DateTimeParser._generate_pattern_re", fmt="fmt_a" + ) + self.parser = parser.DateTimeParser(cache_size=0) + for _ in range(100): + self.parser._generate_pattern_re("fmt_a") + assert mocked_parser.call_count == 100 + + def test_parser_1_line_caching(self, mocker): + mocked_parser = mocker.patch("arrow.parser.DateTimeParser._generate_pattern_re") + self.parser = parser.DateTimeParser(cache_size=1) + + for _ in range(100): + self.parser._generate_pattern_re(fmt="fmt_a") + assert mocked_parser.call_count == 1 + assert mocked_parser.call_args_list[0] == mocker.call(fmt="fmt_a") + + for _ in range(100): + self.parser._generate_pattern_re(fmt="fmt_b") + assert mocked_parser.call_count == 2 + assert mocked_parser.call_args_list[1] == mocker.call(fmt="fmt_b") + + for _ in range(100): + self.parser._generate_pattern_re(fmt="fmt_a") + assert mocked_parser.call_count == 3 + assert mocked_parser.call_args_list[2] == mocker.call(fmt="fmt_a") + + def test_parser_multiple_line_caching(self, mocker): + mocked_parser = mocker.patch("arrow.parser.DateTimeParser._generate_pattern_re") + self.parser = parser.DateTimeParser(cache_size=2) + + for _ in range(100): + self.parser._generate_pattern_re(fmt="fmt_a") + assert mocked_parser.call_count == 1 + assert mocked_parser.call_args_list[0] == mocker.call(fmt="fmt_a") + + for _ in range(100): + self.parser._generate_pattern_re(fmt="fmt_b") + assert mocked_parser.call_count == 2 + assert mocked_parser.call_args_list[1] == mocker.call(fmt="fmt_b") + + # fmt_a and fmt_b are in the cache, so no new calls should be made + for _ in range(100): + self.parser._generate_pattern_re(fmt="fmt_a") + for _ in range(100): + self.parser._generate_pattern_re(fmt="fmt_b") + assert mocked_parser.call_count == 2 + assert mocked_parser.call_args_list[0] == mocker.call(fmt="fmt_a") + assert mocked_parser.call_args_list[1] == mocker.call(fmt="fmt_b") + + def test_YY_and_YYYY_format_list(self): + + assert self.parser.parse("15/01/19", ["DD/MM/YY", "DD/MM/YYYY"]) == datetime( + 2019, 1, 15 + ) + + # Regression test for issue #580 + assert self.parser.parse("15/01/2019", ["DD/MM/YY", "DD/MM/YYYY"]) == datetime( + 2019, 1, 15 + ) + + assert ( + self.parser.parse( + "15/01/2019T04:05:06.789120Z", + ["D/M/YYThh:mm:ss.SZ", "D/M/YYYYThh:mm:ss.SZ"], + ) + == datetime(2019, 1, 15, 4, 5, 6, 789120, tzinfo=tz.tzutc()) + ) + + # regression test for issue #447 + def test_timestamp_format_list(self): + # should not match on the "X" token + assert ( + self.parser.parse( + "15 Jul 2000", + ["MM/DD/YYYY", "YYYY-MM-DD", "X", "DD-MMMM-YYYY", "D MMM YYYY"], + ) + == datetime(2000, 7, 15) + ) + + with pytest.raises(ParserError): + self.parser.parse("15 Jul", "X") + + +@pytest.mark.usefixtures("dt_parser") +class TestDateTimeParserParse: + def test_parse_list(self, mocker): + + mocker.patch( + "arrow.parser.DateTimeParser._parse_multiformat", + string="str", + formats=["fmt_a", "fmt_b"], + return_value="result", + ) + + result = self.parser.parse("str", ["fmt_a", "fmt_b"]) + assert result == "result" + + def test_parse_unrecognized_token(self, mocker): + + mocker.patch.dict("arrow.parser.DateTimeParser._BASE_INPUT_RE_MAP") + del arrow.parser.DateTimeParser._BASE_INPUT_RE_MAP["YYYY"] + + # need to make another local parser to apply patch changes + _parser = parser.DateTimeParser() + with pytest.raises(parser.ParserError): + _parser.parse("2013-01-01", "YYYY-MM-DD") + + def test_parse_parse_no_match(self): + + with pytest.raises(ParserError): + self.parser.parse("01-01", "YYYY-MM-DD") + + def test_parse_separators(self): + + with pytest.raises(ParserError): + self.parser.parse("1403549231", "YYYY-MM-DD") + + def test_parse_numbers(self): + + self.expected = datetime(2012, 1, 1, 12, 5, 10) + assert ( + self.parser.parse("2012-01-01 12:05:10", "YYYY-MM-DD HH:mm:ss") + == self.expected + ) + + def test_parse_year_two_digit(self): + + self.expected = datetime(1979, 1, 1, 12, 5, 10) + assert ( + self.parser.parse("79-01-01 12:05:10", "YY-MM-DD HH:mm:ss") == self.expected + ) + + def test_parse_timestamp(self): + + tz_utc = tz.tzutc() + int_timestamp = int(time.time()) + self.expected = datetime.fromtimestamp(int_timestamp, tz=tz_utc) + assert self.parser.parse("{:d}".format(int_timestamp), "X") == self.expected + + float_timestamp = time.time() + self.expected = datetime.fromtimestamp(float_timestamp, tz=tz_utc) + assert self.parser.parse("{:f}".format(float_timestamp), "X") == self.expected + + # test handling of ns timestamp (arrow will round to 6 digits regardless) + self.expected = datetime.fromtimestamp(float_timestamp, tz=tz_utc) + assert ( + self.parser.parse("{:f}123".format(float_timestamp), "X") == self.expected + ) + + # test ps timestamp (arrow will round to 6 digits regardless) + self.expected = datetime.fromtimestamp(float_timestamp, tz=tz_utc) + assert ( + self.parser.parse("{:f}123456".format(float_timestamp), "X") + == self.expected + ) + + # NOTE: negative timestamps cannot be handled by datetime on Window + # Must use timedelta to handle them. ref: https://stackoverflow.com/questions/36179914 + if os.name != "nt": + # regression test for issue #662 + negative_int_timestamp = -int_timestamp + self.expected = datetime.fromtimestamp(negative_int_timestamp, tz=tz_utc) + assert ( + self.parser.parse("{:d}".format(negative_int_timestamp), "X") + == self.expected + ) + + negative_float_timestamp = -float_timestamp + self.expected = datetime.fromtimestamp(negative_float_timestamp, tz=tz_utc) + assert ( + self.parser.parse("{:f}".format(negative_float_timestamp), "X") + == self.expected + ) + + # NOTE: timestamps cannot be parsed from natural language strings (by removing the ^...$) because it will + # break cases like "15 Jul 2000" and a format list (see issue #447) + with pytest.raises(ParserError): + natural_lang_string = "Meet me at {} at the restaurant.".format( + float_timestamp + ) + self.parser.parse(natural_lang_string, "X") + + with pytest.raises(ParserError): + self.parser.parse("1565982019.", "X") + + with pytest.raises(ParserError): + self.parser.parse(".1565982019", "X") + + def test_parse_expanded_timestamp(self): + # test expanded timestamps that include milliseconds + # and microseconds as multiples rather than decimals + # requested in issue #357 + + tz_utc = tz.tzutc() + timestamp = 1569982581.413132 + timestamp_milli = int(round(timestamp * 1000)) + timestamp_micro = int(round(timestamp * 1000000)) + + # "x" token should parse integer timestamps below MAX_TIMESTAMP normally + self.expected = datetime.fromtimestamp(int(timestamp), tz=tz_utc) + assert self.parser.parse("{:d}".format(int(timestamp)), "x") == self.expected + + self.expected = datetime.fromtimestamp(round(timestamp, 3), tz=tz_utc) + assert self.parser.parse("{:d}".format(timestamp_milli), "x") == self.expected + + self.expected = datetime.fromtimestamp(timestamp, tz=tz_utc) + assert self.parser.parse("{:d}".format(timestamp_micro), "x") == self.expected + + # anything above max µs timestamp should fail + with pytest.raises(ValueError): + self.parser.parse("{:d}".format(int(MAX_TIMESTAMP_US) + 1), "x") + + # floats are not allowed with the "x" token + with pytest.raises(ParserMatchError): + self.parser.parse("{:f}".format(timestamp), "x") + + def test_parse_names(self): + + self.expected = datetime(2012, 1, 1) + + assert self.parser.parse("January 1, 2012", "MMMM D, YYYY") == self.expected + assert self.parser.parse("Jan 1, 2012", "MMM D, YYYY") == self.expected + + def test_parse_pm(self): + + self.expected = datetime(1, 1, 1, 13, 0, 0) + assert self.parser.parse("1 pm", "H a") == self.expected + assert self.parser.parse("1 pm", "h a") == self.expected + + self.expected = datetime(1, 1, 1, 1, 0, 0) + assert self.parser.parse("1 am", "H A") == self.expected + assert self.parser.parse("1 am", "h A") == self.expected + + self.expected = datetime(1, 1, 1, 0, 0, 0) + assert self.parser.parse("12 am", "H A") == self.expected + assert self.parser.parse("12 am", "h A") == self.expected + + self.expected = datetime(1, 1, 1, 12, 0, 0) + assert self.parser.parse("12 pm", "H A") == self.expected + assert self.parser.parse("12 pm", "h A") == self.expected + + def test_parse_tz_hours_only(self): + + self.expected = datetime(2025, 10, 17, 5, 30, 10, tzinfo=tz.tzoffset(None, 0)) + parsed = self.parser.parse("2025-10-17 05:30:10+00", "YYYY-MM-DD HH:mm:ssZ") + assert parsed == self.expected + + def test_parse_tz_zz(self): + + self.expected = datetime(2013, 1, 1, tzinfo=tz.tzoffset(None, -7 * 3600)) + assert self.parser.parse("2013-01-01 -07:00", "YYYY-MM-DD ZZ") == self.expected + + @pytest.mark.parametrize("full_tz_name", make_full_tz_list()) + def test_parse_tz_name_zzz(self, full_tz_name): + + self.expected = datetime(2013, 1, 1, tzinfo=tz.gettz(full_tz_name)) + assert ( + self.parser.parse("2013-01-01 {}".format(full_tz_name), "YYYY-MM-DD ZZZ") + == self.expected + ) + + # note that offsets are not timezones + with pytest.raises(ParserError): + self.parser.parse("2013-01-01 12:30:45.9+1000", "YYYY-MM-DDZZZ") + + with pytest.raises(ParserError): + self.parser.parse("2013-01-01 12:30:45.9+10:00", "YYYY-MM-DDZZZ") + + with pytest.raises(ParserError): + self.parser.parse("2013-01-01 12:30:45.9-10", "YYYY-MM-DDZZZ") + + def test_parse_subsecond(self): + self.expected = datetime(2013, 1, 1, 12, 30, 45, 900000) + assert ( + self.parser.parse("2013-01-01 12:30:45.9", "YYYY-MM-DD HH:mm:ss.S") + == self.expected + ) + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 980000) + assert ( + self.parser.parse("2013-01-01 12:30:45.98", "YYYY-MM-DD HH:mm:ss.SS") + == self.expected + ) + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987000) + assert ( + self.parser.parse("2013-01-01 12:30:45.987", "YYYY-MM-DD HH:mm:ss.SSS") + == self.expected + ) + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987600) + assert ( + self.parser.parse("2013-01-01 12:30:45.9876", "YYYY-MM-DD HH:mm:ss.SSSS") + == self.expected + ) + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987650) + assert ( + self.parser.parse("2013-01-01 12:30:45.98765", "YYYY-MM-DD HH:mm:ss.SSSSS") + == self.expected + ) + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987654) + assert ( + self.parser.parse( + "2013-01-01 12:30:45.987654", "YYYY-MM-DD HH:mm:ss.SSSSSS" + ) + == self.expected + ) + + def test_parse_subsecond_rounding(self): + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987654) + datetime_format = "YYYY-MM-DD HH:mm:ss.S" + + # round up + string = "2013-01-01 12:30:45.9876539" + assert self.parser.parse(string, datetime_format) == self.expected + assert self.parser.parse_iso(string) == self.expected + + # round down + string = "2013-01-01 12:30:45.98765432" + assert self.parser.parse(string, datetime_format) == self.expected + assert self.parser.parse_iso(string) == self.expected + + # round half-up + string = "2013-01-01 12:30:45.987653521" + assert self.parser.parse(string, datetime_format) == self.expected + assert self.parser.parse_iso(string) == self.expected + + # round half-down + string = "2013-01-01 12:30:45.9876545210" + assert self.parser.parse(string, datetime_format) == self.expected + assert self.parser.parse_iso(string) == self.expected + + # overflow (zero out the subseconds and increment the seconds) + # regression tests for issue #636 + def test_parse_subsecond_rounding_overflow(self): + datetime_format = "YYYY-MM-DD HH:mm:ss.S" + + self.expected = datetime(2013, 1, 1, 12, 30, 46) + string = "2013-01-01 12:30:45.9999995" + assert self.parser.parse(string, datetime_format) == self.expected + assert self.parser.parse_iso(string) == self.expected + + self.expected = datetime(2013, 1, 1, 12, 31, 0) + string = "2013-01-01 12:30:59.9999999" + assert self.parser.parse(string, datetime_format) == self.expected + assert self.parser.parse_iso(string) == self.expected + + self.expected = datetime(2013, 1, 2, 0, 0, 0) + string = "2013-01-01 23:59:59.9999999" + assert self.parser.parse(string, datetime_format) == self.expected + assert self.parser.parse_iso(string) == self.expected + + # 6 digits should remain unrounded + self.expected = datetime(2013, 1, 1, 12, 30, 45, 999999) + string = "2013-01-01 12:30:45.999999" + assert self.parser.parse(string, datetime_format) == self.expected + assert self.parser.parse_iso(string) == self.expected + + # Regression tests for issue #560 + def test_parse_long_year(self): + with pytest.raises(ParserError): + self.parser.parse("09 January 123456789101112", "DD MMMM YYYY") + + with pytest.raises(ParserError): + self.parser.parse("123456789101112 09 January", "YYYY DD MMMM") + + with pytest.raises(ParserError): + self.parser.parse("68096653015/01/19", "YY/M/DD") + + def test_parse_with_extra_words_at_start_and_end_invalid(self): + input_format_pairs = [ + ("blah2016", "YYYY"), + ("blah2016blah", "YYYY"), + ("2016blah", "YYYY"), + ("2016-05blah", "YYYY-MM"), + ("2016-05-16blah", "YYYY-MM-DD"), + ("2016-05-16T04:05:06.789120blah", "YYYY-MM-DDThh:mm:ss.S"), + ("2016-05-16T04:05:06.789120ZblahZ", "YYYY-MM-DDThh:mm:ss.SZ"), + ("2016-05-16T04:05:06.789120Zblah", "YYYY-MM-DDThh:mm:ss.SZ"), + ("2016-05-16T04:05:06.789120blahZ", "YYYY-MM-DDThh:mm:ss.SZ"), + ] + + for pair in input_format_pairs: + with pytest.raises(ParserError): + self.parser.parse(pair[0], pair[1]) + + def test_parse_with_extra_words_at_start_and_end_valid(self): + # Spaces surrounding the parsable date are ok because we + # allow the parsing of natural language input. Additionally, a single + # character of specific punctuation before or after the date is okay. + # See docs for full list of valid punctuation. + + assert self.parser.parse("blah 2016 blah", "YYYY") == datetime(2016, 1, 1) + + assert self.parser.parse("blah 2016", "YYYY") == datetime(2016, 1, 1) + + assert self.parser.parse("2016 blah", "YYYY") == datetime(2016, 1, 1) + + # test one additional space along with space divider + assert self.parser.parse( + "blah 2016-05-16 04:05:06.789120", "YYYY-MM-DD hh:mm:ss.S" + ) == datetime(2016, 5, 16, 4, 5, 6, 789120) + + assert self.parser.parse( + "2016-05-16 04:05:06.789120 blah", "YYYY-MM-DD hh:mm:ss.S" + ) == datetime(2016, 5, 16, 4, 5, 6, 789120) + + # test one additional space along with T divider + assert self.parser.parse( + "blah 2016-05-16T04:05:06.789120", "YYYY-MM-DDThh:mm:ss.S" + ) == datetime(2016, 5, 16, 4, 5, 6, 789120) + + assert self.parser.parse( + "2016-05-16T04:05:06.789120 blah", "YYYY-MM-DDThh:mm:ss.S" + ) == datetime(2016, 5, 16, 4, 5, 6, 789120) + + assert ( + self.parser.parse( + "Meet me at 2016-05-16T04:05:06.789120 at the restaurant.", + "YYYY-MM-DDThh:mm:ss.S", + ) + == datetime(2016, 5, 16, 4, 5, 6, 789120) + ) + + assert ( + self.parser.parse( + "Meet me at 2016-05-16 04:05:06.789120 at the restaurant.", + "YYYY-MM-DD hh:mm:ss.S", + ) + == datetime(2016, 5, 16, 4, 5, 6, 789120) + ) + + # regression test for issue #701 + # tests cases of a partial match surrounded by punctuation + # for the list of valid punctuation, see documentation + def test_parse_with_punctuation_fences(self): + assert self.parser.parse( + "Meet me at my house on Halloween (2019-31-10)", "YYYY-DD-MM" + ) == datetime(2019, 10, 31) + + assert self.parser.parse( + "Monday, 9. September 2019, 16:15-20:00", "dddd, D. MMMM YYYY" + ) == datetime(2019, 9, 9) + + assert self.parser.parse("A date is 11.11.2011.", "DD.MM.YYYY") == datetime( + 2011, 11, 11 + ) + + with pytest.raises(ParserMatchError): + self.parser.parse("11.11.2011.1 is not a valid date.", "DD.MM.YYYY") + + with pytest.raises(ParserMatchError): + self.parser.parse( + "This date has too many punctuation marks following it (11.11.2011).", + "DD.MM.YYYY", + ) + + def test_parse_with_leading_and_trailing_whitespace(self): + assert self.parser.parse(" 2016", "YYYY") == datetime(2016, 1, 1) + + assert self.parser.parse("2016 ", "YYYY") == datetime(2016, 1, 1) + + assert self.parser.parse(" 2016 ", "YYYY") == datetime(2016, 1, 1) + + assert self.parser.parse( + " 2016-05-16 04:05:06.789120 ", "YYYY-MM-DD hh:mm:ss.S" + ) == datetime(2016, 5, 16, 4, 5, 6, 789120) + + assert self.parser.parse( + " 2016-05-16T04:05:06.789120 ", "YYYY-MM-DDThh:mm:ss.S" + ) == datetime(2016, 5, 16, 4, 5, 6, 789120) + + def test_parse_YYYY_DDDD(self): + assert self.parser.parse("1998-136", "YYYY-DDDD") == datetime(1998, 5, 16) + + assert self.parser.parse("1998-006", "YYYY-DDDD") == datetime(1998, 1, 6) + + with pytest.raises(ParserError): + self.parser.parse("1998-456", "YYYY-DDDD") + + def test_parse_YYYY_DDD(self): + assert self.parser.parse("1998-6", "YYYY-DDD") == datetime(1998, 1, 6) + + assert self.parser.parse("1998-136", "YYYY-DDD") == datetime(1998, 5, 16) + + with pytest.raises(ParserError): + self.parser.parse("1998-756", "YYYY-DDD") + + # month cannot be passed with DDD and DDDD tokens + def test_parse_YYYY_MM_DDDD(self): + with pytest.raises(ParserError): + self.parser.parse("2015-01-009", "YYYY-MM-DDDD") + + # year is required with the DDD and DDDD tokens + def test_parse_DDD_only(self): + with pytest.raises(ParserError): + self.parser.parse("5", "DDD") + + def test_parse_DDDD_only(self): + with pytest.raises(ParserError): + self.parser.parse("145", "DDDD") + + def test_parse_ddd_and_dddd(self): + fr_parser = parser.DateTimeParser("fr") + + # Day of week should be ignored when a day is passed + # 2019-10-17 is a Thursday, so we know day of week + # is ignored if the same date is outputted + expected = datetime(2019, 10, 17) + assert self.parser.parse("Tue 2019-10-17", "ddd YYYY-MM-DD") == expected + assert fr_parser.parse("mar 2019-10-17", "ddd YYYY-MM-DD") == expected + assert self.parser.parse("Tuesday 2019-10-17", "dddd YYYY-MM-DD") == expected + assert fr_parser.parse("mardi 2019-10-17", "dddd YYYY-MM-DD") == expected + + # Get first Tuesday after epoch + expected = datetime(1970, 1, 6) + assert self.parser.parse("Tue", "ddd") == expected + assert fr_parser.parse("mar", "ddd") == expected + assert self.parser.parse("Tuesday", "dddd") == expected + assert fr_parser.parse("mardi", "dddd") == expected + + # Get first Tuesday in 2020 + expected = datetime(2020, 1, 7) + assert self.parser.parse("Tue 2020", "ddd YYYY") == expected + assert fr_parser.parse("mar 2020", "ddd YYYY") == expected + assert self.parser.parse("Tuesday 2020", "dddd YYYY") == expected + assert fr_parser.parse("mardi 2020", "dddd YYYY") == expected + + # Get first Tuesday in February 2020 + expected = datetime(2020, 2, 4) + assert self.parser.parse("Tue 02 2020", "ddd MM YYYY") == expected + assert fr_parser.parse("mar 02 2020", "ddd MM YYYY") == expected + assert self.parser.parse("Tuesday 02 2020", "dddd MM YYYY") == expected + assert fr_parser.parse("mardi 02 2020", "dddd MM YYYY") == expected + + # Get first Tuesday in February after epoch + expected = datetime(1970, 2, 3) + assert self.parser.parse("Tue 02", "ddd MM") == expected + assert fr_parser.parse("mar 02", "ddd MM") == expected + assert self.parser.parse("Tuesday 02", "dddd MM") == expected + assert fr_parser.parse("mardi 02", "dddd MM") == expected + + # Times remain intact + expected = datetime(2020, 2, 4, 10, 25, 54, 123456, tz.tzoffset(None, -3600)) + assert ( + self.parser.parse( + "Tue 02 2020 10:25:54.123456-01:00", "ddd MM YYYY HH:mm:ss.SZZ" + ) + == expected + ) + assert ( + fr_parser.parse( + "mar 02 2020 10:25:54.123456-01:00", "ddd MM YYYY HH:mm:ss.SZZ" + ) + == expected + ) + assert ( + self.parser.parse( + "Tuesday 02 2020 10:25:54.123456-01:00", "dddd MM YYYY HH:mm:ss.SZZ" + ) + == expected + ) + assert ( + fr_parser.parse( + "mardi 02 2020 10:25:54.123456-01:00", "dddd MM YYYY HH:mm:ss.SZZ" + ) + == expected + ) + + def test_parse_ddd_and_dddd_ignore_case(self): + # Regression test for issue #851 + expected = datetime(2019, 6, 24) + assert ( + self.parser.parse("MONDAY, June 24, 2019", "dddd, MMMM DD, YYYY") + == expected + ) + + def test_parse_ddd_and_dddd_then_format(self): + # Regression test for issue #446 + arw_formatter = formatter.DateTimeFormatter() + assert arw_formatter.format(self.parser.parse("Mon", "ddd"), "ddd") == "Mon" + assert ( + arw_formatter.format(self.parser.parse("Monday", "dddd"), "dddd") + == "Monday" + ) + assert arw_formatter.format(self.parser.parse("Tue", "ddd"), "ddd") == "Tue" + assert ( + arw_formatter.format(self.parser.parse("Tuesday", "dddd"), "dddd") + == "Tuesday" + ) + assert arw_formatter.format(self.parser.parse("Wed", "ddd"), "ddd") == "Wed" + assert ( + arw_formatter.format(self.parser.parse("Wednesday", "dddd"), "dddd") + == "Wednesday" + ) + assert arw_formatter.format(self.parser.parse("Thu", "ddd"), "ddd") == "Thu" + assert ( + arw_formatter.format(self.parser.parse("Thursday", "dddd"), "dddd") + == "Thursday" + ) + assert arw_formatter.format(self.parser.parse("Fri", "ddd"), "ddd") == "Fri" + assert ( + arw_formatter.format(self.parser.parse("Friday", "dddd"), "dddd") + == "Friday" + ) + assert arw_formatter.format(self.parser.parse("Sat", "ddd"), "ddd") == "Sat" + assert ( + arw_formatter.format(self.parser.parse("Saturday", "dddd"), "dddd") + == "Saturday" + ) + assert arw_formatter.format(self.parser.parse("Sun", "ddd"), "ddd") == "Sun" + assert ( + arw_formatter.format(self.parser.parse("Sunday", "dddd"), "dddd") + == "Sunday" + ) + + def test_parse_HH_24(self): + assert self.parser.parse( + "2019-10-30T24:00:00", "YYYY-MM-DDTHH:mm:ss" + ) == datetime(2019, 10, 31, 0, 0, 0, 0) + assert self.parser.parse("2019-10-30T24:00", "YYYY-MM-DDTHH:mm") == datetime( + 2019, 10, 31, 0, 0, 0, 0 + ) + assert self.parser.parse("2019-10-30T24", "YYYY-MM-DDTHH") == datetime( + 2019, 10, 31, 0, 0, 0, 0 + ) + assert self.parser.parse( + "2019-10-30T24:00:00.0", "YYYY-MM-DDTHH:mm:ss.S" + ) == datetime(2019, 10, 31, 0, 0, 0, 0) + assert self.parser.parse( + "2019-10-31T24:00:00", "YYYY-MM-DDTHH:mm:ss" + ) == datetime(2019, 11, 1, 0, 0, 0, 0) + assert self.parser.parse( + "2019-12-31T24:00:00", "YYYY-MM-DDTHH:mm:ss" + ) == datetime(2020, 1, 1, 0, 0, 0, 0) + assert self.parser.parse( + "2019-12-31T23:59:59.9999999", "YYYY-MM-DDTHH:mm:ss.S" + ) == datetime(2020, 1, 1, 0, 0, 0, 0) + + with pytest.raises(ParserError): + self.parser.parse("2019-12-31T24:01:00", "YYYY-MM-DDTHH:mm:ss") + + with pytest.raises(ParserError): + self.parser.parse("2019-12-31T24:00:01", "YYYY-MM-DDTHH:mm:ss") + + with pytest.raises(ParserError): + self.parser.parse("2019-12-31T24:00:00.1", "YYYY-MM-DDTHH:mm:ss.S") + + with pytest.raises(ParserError): + self.parser.parse("2019-12-31T24:00:00.999999", "YYYY-MM-DDTHH:mm:ss.S") + + def test_parse_W(self): + + assert self.parser.parse("2011-W05-4", "W") == datetime(2011, 2, 3) + assert self.parser.parse("2011W054", "W") == datetime(2011, 2, 3) + assert self.parser.parse("2011-W05", "W") == datetime(2011, 1, 31) + assert self.parser.parse("2011W05", "W") == datetime(2011, 1, 31) + assert self.parser.parse("2011-W05-4T14:17:01", "WTHH:mm:ss") == datetime( + 2011, 2, 3, 14, 17, 1 + ) + assert self.parser.parse("2011W054T14:17:01", "WTHH:mm:ss") == datetime( + 2011, 2, 3, 14, 17, 1 + ) + assert self.parser.parse("2011-W05T14:17:01", "WTHH:mm:ss") == datetime( + 2011, 1, 31, 14, 17, 1 + ) + assert self.parser.parse("2011W05T141701", "WTHHmmss") == datetime( + 2011, 1, 31, 14, 17, 1 + ) + assert self.parser.parse("2011W054T141701", "WTHHmmss") == datetime( + 2011, 2, 3, 14, 17, 1 + ) + + bad_formats = [ + "201W22", + "1995-W1-4", + "2001-W34-90", + "2001--W34", + "2011-W03--3", + "thstrdjtrsrd676776r65", + "2002-W66-1T14:17:01", + "2002-W23-03T14:17:01", + ] + + for fmt in bad_formats: + with pytest.raises(ParserError): + self.parser.parse(fmt, "W") + + def test_parse_normalize_whitespace(self): + assert self.parser.parse( + "Jun 1 2005 1:33PM", "MMM D YYYY H:mmA", normalize_whitespace=True + ) == datetime(2005, 6, 1, 13, 33) + + with pytest.raises(ParserError): + self.parser.parse("Jun 1 2005 1:33PM", "MMM D YYYY H:mmA") + + assert ( + self.parser.parse( + "\t 2013-05-05 T \n 12:30:45\t123456 \t \n", + "YYYY-MM-DD T HH:mm:ss S", + normalize_whitespace=True, + ) + == datetime(2013, 5, 5, 12, 30, 45, 123456) + ) + + with pytest.raises(ParserError): + self.parser.parse( + "\t 2013-05-05 T \n 12:30:45\t123456 \t \n", + "YYYY-MM-DD T HH:mm:ss S", + ) + + assert self.parser.parse( + " \n Jun 1\t 2005\n ", "MMM D YYYY", normalize_whitespace=True + ) == datetime(2005, 6, 1) + + with pytest.raises(ParserError): + self.parser.parse(" \n Jun 1\t 2005\n ", "MMM D YYYY") + + +@pytest.mark.usefixtures("dt_parser_regex") +class TestDateTimeParserRegex: + def test_format_year(self): + + assert self.format_regex.findall("YYYY-YY") == ["YYYY", "YY"] + + def test_format_month(self): + + assert self.format_regex.findall("MMMM-MMM-MM-M") == ["MMMM", "MMM", "MM", "M"] + + def test_format_day(self): + + assert self.format_regex.findall("DDDD-DDD-DD-D") == ["DDDD", "DDD", "DD", "D"] + + def test_format_hour(self): + + assert self.format_regex.findall("HH-H-hh-h") == ["HH", "H", "hh", "h"] + + def test_format_minute(self): + + assert self.format_regex.findall("mm-m") == ["mm", "m"] + + def test_format_second(self): + + assert self.format_regex.findall("ss-s") == ["ss", "s"] + + def test_format_subsecond(self): + + assert self.format_regex.findall("SSSSSS-SSSSS-SSSS-SSS-SS-S") == [ + "SSSSSS", + "SSSSS", + "SSSS", + "SSS", + "SS", + "S", + ] + + def test_format_tz(self): + + assert self.format_regex.findall("ZZZ-ZZ-Z") == ["ZZZ", "ZZ", "Z"] + + def test_format_am_pm(self): + + assert self.format_regex.findall("A-a") == ["A", "a"] + + def test_format_timestamp(self): + + assert self.format_regex.findall("X") == ["X"] + + def test_format_timestamp_milli(self): + + assert self.format_regex.findall("x") == ["x"] + + def test_escape(self): + + escape_regex = parser.DateTimeParser._ESCAPE_RE + + assert escape_regex.findall("2018-03-09 8 [h] 40 [hello]") == ["[h]", "[hello]"] + + def test_month_names(self): + p = parser.DateTimeParser("en_us") + + text = "_".join(calendar.month_name[1:]) + + result = p._input_re_map["MMMM"].findall(text) + + assert result == calendar.month_name[1:] + + def test_month_abbreviations(self): + p = parser.DateTimeParser("en_us") + + text = "_".join(calendar.month_abbr[1:]) + + result = p._input_re_map["MMM"].findall(text) + + assert result == calendar.month_abbr[1:] + + def test_digits(self): + + assert parser.DateTimeParser._ONE_OR_TWO_DIGIT_RE.findall("4-56") == ["4", "56"] + assert parser.DateTimeParser._ONE_OR_TWO_OR_THREE_DIGIT_RE.findall( + "4-56-789" + ) == ["4", "56", "789"] + assert parser.DateTimeParser._ONE_OR_MORE_DIGIT_RE.findall( + "4-56-789-1234-12345" + ) == ["4", "56", "789", "1234", "12345"] + assert parser.DateTimeParser._TWO_DIGIT_RE.findall("12-3-45") == ["12", "45"] + assert parser.DateTimeParser._THREE_DIGIT_RE.findall("123-4-56") == ["123"] + assert parser.DateTimeParser._FOUR_DIGIT_RE.findall("1234-56") == ["1234"] + + def test_tz(self): + tz_z_re = parser.DateTimeParser._TZ_Z_RE + assert tz_z_re.findall("-0700") == [("-", "07", "00")] + assert tz_z_re.findall("+07") == [("+", "07", "")] + assert tz_z_re.search("15/01/2019T04:05:06.789120Z") is not None + assert tz_z_re.search("15/01/2019T04:05:06.789120") is None + + tz_zz_re = parser.DateTimeParser._TZ_ZZ_RE + assert tz_zz_re.findall("-07:00") == [("-", "07", "00")] + assert tz_zz_re.findall("+07") == [("+", "07", "")] + assert tz_zz_re.search("15/01/2019T04:05:06.789120Z") is not None + assert tz_zz_re.search("15/01/2019T04:05:06.789120") is None + + tz_name_re = parser.DateTimeParser._TZ_NAME_RE + assert tz_name_re.findall("Europe/Warsaw") == ["Europe/Warsaw"] + assert tz_name_re.findall("GMT") == ["GMT"] + + def test_timestamp(self): + timestamp_re = parser.DateTimeParser._TIMESTAMP_RE + assert timestamp_re.findall("1565707550.452729") == ["1565707550.452729"] + assert timestamp_re.findall("-1565707550.452729") == ["-1565707550.452729"] + assert timestamp_re.findall("-1565707550") == ["-1565707550"] + assert timestamp_re.findall("1565707550") == ["1565707550"] + assert timestamp_re.findall("1565707550.") == [] + assert timestamp_re.findall(".1565707550") == [] + + def test_timestamp_milli(self): + timestamp_expanded_re = parser.DateTimeParser._TIMESTAMP_EXPANDED_RE + assert timestamp_expanded_re.findall("-1565707550") == ["-1565707550"] + assert timestamp_expanded_re.findall("1565707550") == ["1565707550"] + assert timestamp_expanded_re.findall("1565707550.452729") == [] + assert timestamp_expanded_re.findall("1565707550.") == [] + assert timestamp_expanded_re.findall(".1565707550") == [] + + def test_time(self): + time_re = parser.DateTimeParser._TIME_RE + time_seperators = [":", ""] + + for sep in time_seperators: + assert time_re.findall("12") == [("12", "", "", "", "")] + assert time_re.findall("12{sep}35".format(sep=sep)) == [ + ("12", "35", "", "", "") + ] + assert time_re.findall("12{sep}35{sep}46".format(sep=sep)) == [ + ("12", "35", "46", "", "") + ] + assert time_re.findall("12{sep}35{sep}46.952313".format(sep=sep)) == [ + ("12", "35", "46", ".", "952313") + ] + assert time_re.findall("12{sep}35{sep}46,952313".format(sep=sep)) == [ + ("12", "35", "46", ",", "952313") + ] + + assert time_re.findall("12:") == [] + assert time_re.findall("12:35:46.") == [] + assert time_re.findall("12:35:46,") == [] + + +@pytest.mark.usefixtures("dt_parser") +class TestDateTimeParserISO: + def test_YYYY(self): + + assert self.parser.parse_iso("2013") == datetime(2013, 1, 1) + + def test_YYYY_DDDD(self): + assert self.parser.parse_iso("1998-136") == datetime(1998, 5, 16) + + assert self.parser.parse_iso("1998-006") == datetime(1998, 1, 6) + + with pytest.raises(ParserError): + self.parser.parse_iso("1998-456") + + # 2016 is a leap year, so Feb 29 exists (leap day) + assert self.parser.parse_iso("2016-059") == datetime(2016, 2, 28) + assert self.parser.parse_iso("2016-060") == datetime(2016, 2, 29) + assert self.parser.parse_iso("2016-061") == datetime(2016, 3, 1) + + # 2017 is not a leap year, so Feb 29 does not exist + assert self.parser.parse_iso("2017-059") == datetime(2017, 2, 28) + assert self.parser.parse_iso("2017-060") == datetime(2017, 3, 1) + assert self.parser.parse_iso("2017-061") == datetime(2017, 3, 2) + + # Since 2016 is a leap year, the 366th day falls in the same year + assert self.parser.parse_iso("2016-366") == datetime(2016, 12, 31) + + # Since 2017 is not a leap year, the 366th day falls in the next year + assert self.parser.parse_iso("2017-366") == datetime(2018, 1, 1) + + def test_YYYY_DDDD_HH_mm_ssZ(self): + + assert self.parser.parse_iso("2013-036 04:05:06+01:00") == datetime( + 2013, 2, 5, 4, 5, 6, tzinfo=tz.tzoffset(None, 3600) + ) + + assert self.parser.parse_iso("2013-036 04:05:06Z") == datetime( + 2013, 2, 5, 4, 5, 6, tzinfo=tz.tzutc() + ) + + def test_YYYY_MM_DDDD(self): + with pytest.raises(ParserError): + self.parser.parse_iso("2014-05-125") + + def test_YYYY_MM(self): + + for separator in DateTimeParser.SEPARATORS: + assert self.parser.parse_iso(separator.join(("2013", "02"))) == datetime( + 2013, 2, 1 + ) + + def test_YYYY_MM_DD(self): + + for separator in DateTimeParser.SEPARATORS: + assert self.parser.parse_iso( + separator.join(("2013", "02", "03")) + ) == datetime(2013, 2, 3) + + def test_YYYY_MM_DDTHH_mmZ(self): + + assert self.parser.parse_iso("2013-02-03T04:05+01:00") == datetime( + 2013, 2, 3, 4, 5, tzinfo=tz.tzoffset(None, 3600) + ) + + def test_YYYY_MM_DDTHH_mm(self): + + assert self.parser.parse_iso("2013-02-03T04:05") == datetime(2013, 2, 3, 4, 5) + + def test_YYYY_MM_DDTHH(self): + + assert self.parser.parse_iso("2013-02-03T04") == datetime(2013, 2, 3, 4) + + def test_YYYY_MM_DDTHHZ(self): + + assert self.parser.parse_iso("2013-02-03T04+01:00") == datetime( + 2013, 2, 3, 4, tzinfo=tz.tzoffset(None, 3600) + ) + + def test_YYYY_MM_DDTHH_mm_ssZ(self): + + assert self.parser.parse_iso("2013-02-03T04:05:06+01:00") == datetime( + 2013, 2, 3, 4, 5, 6, tzinfo=tz.tzoffset(None, 3600) + ) + + def test_YYYY_MM_DDTHH_mm_ss(self): + + assert self.parser.parse_iso("2013-02-03T04:05:06") == datetime( + 2013, 2, 3, 4, 5, 6 + ) + + def test_YYYY_MM_DD_HH_mmZ(self): + + assert self.parser.parse_iso("2013-02-03 04:05+01:00") == datetime( + 2013, 2, 3, 4, 5, tzinfo=tz.tzoffset(None, 3600) + ) + + def test_YYYY_MM_DD_HH_mm(self): + + assert self.parser.parse_iso("2013-02-03 04:05") == datetime(2013, 2, 3, 4, 5) + + def test_YYYY_MM_DD_HH(self): + + assert self.parser.parse_iso("2013-02-03 04") == datetime(2013, 2, 3, 4) + + def test_invalid_time(self): + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03T") + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03 044") + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03 04:05:06.") + + def test_YYYY_MM_DD_HH_mm_ssZ(self): + + assert self.parser.parse_iso("2013-02-03 04:05:06+01:00") == datetime( + 2013, 2, 3, 4, 5, 6, tzinfo=tz.tzoffset(None, 3600) + ) + + def test_YYYY_MM_DD_HH_mm_ss(self): + + assert self.parser.parse_iso("2013-02-03 04:05:06") == datetime( + 2013, 2, 3, 4, 5, 6 + ) + + def test_YYYY_MM_DDTHH_mm_ss_S(self): + + assert self.parser.parse_iso("2013-02-03T04:05:06.7") == datetime( + 2013, 2, 3, 4, 5, 6, 700000 + ) + + assert self.parser.parse_iso("2013-02-03T04:05:06.78") == datetime( + 2013, 2, 3, 4, 5, 6, 780000 + ) + + assert self.parser.parse_iso("2013-02-03T04:05:06.789") == datetime( + 2013, 2, 3, 4, 5, 6, 789000 + ) + + assert self.parser.parse_iso("2013-02-03T04:05:06.7891") == datetime( + 2013, 2, 3, 4, 5, 6, 789100 + ) + + assert self.parser.parse_iso("2013-02-03T04:05:06.78912") == datetime( + 2013, 2, 3, 4, 5, 6, 789120 + ) + + # ISO 8601:2004(E), ISO, 2004-12-01, 4.2.2.4 ... the decimal fraction + # shall be divided from the integer part by the decimal sign specified + # in ISO 31-0, i.e. the comma [,] or full stop [.]. Of these, the comma + # is the preferred sign. + assert self.parser.parse_iso("2013-02-03T04:05:06,789123678") == datetime( + 2013, 2, 3, 4, 5, 6, 789124 + ) + + # there is no limit on the number of decimal places + assert self.parser.parse_iso("2013-02-03T04:05:06.789123678") == datetime( + 2013, 2, 3, 4, 5, 6, 789124 + ) + + def test_YYYY_MM_DDTHH_mm_ss_SZ(self): + + assert self.parser.parse_iso("2013-02-03T04:05:06.7+01:00") == datetime( + 2013, 2, 3, 4, 5, 6, 700000, tzinfo=tz.tzoffset(None, 3600) + ) + + assert self.parser.parse_iso("2013-02-03T04:05:06.78+01:00") == datetime( + 2013, 2, 3, 4, 5, 6, 780000, tzinfo=tz.tzoffset(None, 3600) + ) + + assert self.parser.parse_iso("2013-02-03T04:05:06.789+01:00") == datetime( + 2013, 2, 3, 4, 5, 6, 789000, tzinfo=tz.tzoffset(None, 3600) + ) + + assert self.parser.parse_iso("2013-02-03T04:05:06.7891+01:00") == datetime( + 2013, 2, 3, 4, 5, 6, 789100, tzinfo=tz.tzoffset(None, 3600) + ) + + assert self.parser.parse_iso("2013-02-03T04:05:06.78912+01:00") == datetime( + 2013, 2, 3, 4, 5, 6, 789120, tzinfo=tz.tzoffset(None, 3600) + ) + + assert self.parser.parse_iso("2013-02-03 04:05:06.78912Z") == datetime( + 2013, 2, 3, 4, 5, 6, 789120, tzinfo=tz.tzutc() + ) + + def test_W(self): + + assert self.parser.parse_iso("2011-W05-4") == datetime(2011, 2, 3) + + assert self.parser.parse_iso("2011-W05-4T14:17:01") == datetime( + 2011, 2, 3, 14, 17, 1 + ) + + assert self.parser.parse_iso("2011W054") == datetime(2011, 2, 3) + + assert self.parser.parse_iso("2011W054T141701") == datetime( + 2011, 2, 3, 14, 17, 1 + ) + + def test_invalid_Z(self): + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03T04:05:06.78912z") + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03T04:05:06.78912zz") + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03T04:05:06.78912Zz") + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03T04:05:06.78912ZZ") + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03T04:05:06.78912+Z") + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03T04:05:06.78912-Z") + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-02-03T04:05:06.78912 Z") + + def test_parse_subsecond(self): + self.expected = datetime(2013, 1, 1, 12, 30, 45, 900000) + assert self.parser.parse_iso("2013-01-01 12:30:45.9") == self.expected + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 980000) + assert self.parser.parse_iso("2013-01-01 12:30:45.98") == self.expected + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987000) + assert self.parser.parse_iso("2013-01-01 12:30:45.987") == self.expected + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987600) + assert self.parser.parse_iso("2013-01-01 12:30:45.9876") == self.expected + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987650) + assert self.parser.parse_iso("2013-01-01 12:30:45.98765") == self.expected + + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987654) + assert self.parser.parse_iso("2013-01-01 12:30:45.987654") == self.expected + + # use comma as subsecond separator + self.expected = datetime(2013, 1, 1, 12, 30, 45, 987654) + assert self.parser.parse_iso("2013-01-01 12:30:45,987654") == self.expected + + def test_gnu_date(self): + """Regression tests for parsing output from GNU date.""" + # date -Ins + assert self.parser.parse_iso("2016-11-16T09:46:30,895636557-0800") == datetime( + 2016, 11, 16, 9, 46, 30, 895636, tzinfo=tz.tzoffset(None, -3600 * 8) + ) + + # date --rfc-3339=ns + assert self.parser.parse_iso("2016-11-16 09:51:14.682141526-08:00") == datetime( + 2016, 11, 16, 9, 51, 14, 682142, tzinfo=tz.tzoffset(None, -3600 * 8) + ) + + def test_isoformat(self): + + dt = datetime.utcnow() + + assert self.parser.parse_iso(dt.isoformat()) == dt + + def test_parse_iso_normalize_whitespace(self): + assert self.parser.parse_iso( + "2013-036 \t 04:05:06Z", normalize_whitespace=True + ) == datetime(2013, 2, 5, 4, 5, 6, tzinfo=tz.tzutc()) + + with pytest.raises(ParserError): + self.parser.parse_iso("2013-036 \t 04:05:06Z") + + assert self.parser.parse_iso( + "\t 2013-05-05T12:30:45.123456 \t \n", normalize_whitespace=True + ) == datetime(2013, 5, 5, 12, 30, 45, 123456) + + with pytest.raises(ParserError): + self.parser.parse_iso("\t 2013-05-05T12:30:45.123456 \t \n") + + def test_parse_iso_with_leading_and_trailing_whitespace(self): + datetime_string = " 2016-11-15T06:37:19.123456" + with pytest.raises(ParserError): + self.parser.parse_iso(datetime_string) + + datetime_string = " 2016-11-15T06:37:19.123456 " + with pytest.raises(ParserError): + self.parser.parse_iso(datetime_string) + + datetime_string = "2016-11-15T06:37:19.123456 " + with pytest.raises(ParserError): + self.parser.parse_iso(datetime_string) + + datetime_string = "2016-11-15T 06:37:19.123456" + with pytest.raises(ParserError): + self.parser.parse_iso(datetime_string) + + # leading whitespace + datetime_string = " 2016-11-15 06:37:19.123456" + with pytest.raises(ParserError): + self.parser.parse_iso(datetime_string) + + # trailing whitespace + datetime_string = "2016-11-15 06:37:19.123456 " + with pytest.raises(ParserError): + self.parser.parse_iso(datetime_string) + + datetime_string = " 2016-11-15 06:37:19.123456 " + with pytest.raises(ParserError): + self.parser.parse_iso(datetime_string) + + # two dividing spaces + datetime_string = "2016-11-15 06:37:19.123456" + with pytest.raises(ParserError): + self.parser.parse_iso(datetime_string) + + def test_parse_iso_with_extra_words_at_start_and_end_invalid(self): + test_inputs = [ + "blah2016", + "blah2016blah", + "blah 2016 blah", + "blah 2016", + "2016 blah", + "blah 2016-05-16 04:05:06.789120", + "2016-05-16 04:05:06.789120 blah", + "blah 2016-05-16T04:05:06.789120", + "2016-05-16T04:05:06.789120 blah", + "2016blah", + "2016-05blah", + "2016-05-16blah", + "2016-05-16T04:05:06.789120blah", + "2016-05-16T04:05:06.789120ZblahZ", + "2016-05-16T04:05:06.789120Zblah", + "2016-05-16T04:05:06.789120blahZ", + "Meet me at 2016-05-16T04:05:06.789120 at the restaurant.", + "Meet me at 2016-05-16 04:05:06.789120 at the restaurant.", + ] + + for ti in test_inputs: + with pytest.raises(ParserError): + self.parser.parse_iso(ti) + + def test_iso8601_basic_format(self): + assert self.parser.parse_iso("20180517") == datetime(2018, 5, 17) + + assert self.parser.parse_iso("20180517T10") == datetime(2018, 5, 17, 10) + + assert self.parser.parse_iso("20180517T105513.843456") == datetime( + 2018, 5, 17, 10, 55, 13, 843456 + ) + + assert self.parser.parse_iso("20180517T105513Z") == datetime( + 2018, 5, 17, 10, 55, 13, tzinfo=tz.tzutc() + ) + + assert self.parser.parse_iso("20180517T105513.843456-0700") == datetime( + 2018, 5, 17, 10, 55, 13, 843456, tzinfo=tz.tzoffset(None, -25200) + ) + + assert self.parser.parse_iso("20180517T105513-0700") == datetime( + 2018, 5, 17, 10, 55, 13, tzinfo=tz.tzoffset(None, -25200) + ) + + assert self.parser.parse_iso("20180517T105513-07") == datetime( + 2018, 5, 17, 10, 55, 13, tzinfo=tz.tzoffset(None, -25200) + ) + + # ordinal in basic format: YYYYDDDD + assert self.parser.parse_iso("1998136") == datetime(1998, 5, 16) + + # timezone requires +- seperator + with pytest.raises(ParserError): + self.parser.parse_iso("20180517T1055130700") + + with pytest.raises(ParserError): + self.parser.parse_iso("20180517T10551307") + + # too many digits in date + with pytest.raises(ParserError): + self.parser.parse_iso("201860517T105513Z") + + # too many digits in time + with pytest.raises(ParserError): + self.parser.parse_iso("20180517T1055213Z") + + def test_midnight_end_day(self): + assert self.parser.parse_iso("2019-10-30T24:00:00") == datetime( + 2019, 10, 31, 0, 0, 0, 0 + ) + assert self.parser.parse_iso("2019-10-30T24:00") == datetime( + 2019, 10, 31, 0, 0, 0, 0 + ) + assert self.parser.parse_iso("2019-10-30T24:00:00.0") == datetime( + 2019, 10, 31, 0, 0, 0, 0 + ) + assert self.parser.parse_iso("2019-10-31T24:00:00") == datetime( + 2019, 11, 1, 0, 0, 0, 0 + ) + assert self.parser.parse_iso("2019-12-31T24:00:00") == datetime( + 2020, 1, 1, 0, 0, 0, 0 + ) + assert self.parser.parse_iso("2019-12-31T23:59:59.9999999") == datetime( + 2020, 1, 1, 0, 0, 0, 0 + ) + + with pytest.raises(ParserError): + self.parser.parse_iso("2019-12-31T24:01:00") + + with pytest.raises(ParserError): + self.parser.parse_iso("2019-12-31T24:00:01") + + with pytest.raises(ParserError): + self.parser.parse_iso("2019-12-31T24:00:00.1") + + with pytest.raises(ParserError): + self.parser.parse_iso("2019-12-31T24:00:00.999999") + + +@pytest.mark.usefixtures("tzinfo_parser") +class TestTzinfoParser: + def test_parse_local(self): + + assert self.parser.parse("local") == tz.tzlocal() + + def test_parse_utc(self): + + assert self.parser.parse("utc") == tz.tzutc() + assert self.parser.parse("UTC") == tz.tzutc() + + def test_parse_iso(self): + + assert self.parser.parse("01:00") == tz.tzoffset(None, 3600) + assert self.parser.parse("11:35") == tz.tzoffset(None, 11 * 3600 + 2100) + assert self.parser.parse("+01:00") == tz.tzoffset(None, 3600) + assert self.parser.parse("-01:00") == tz.tzoffset(None, -3600) + + assert self.parser.parse("0100") == tz.tzoffset(None, 3600) + assert self.parser.parse("+0100") == tz.tzoffset(None, 3600) + assert self.parser.parse("-0100") == tz.tzoffset(None, -3600) + + assert self.parser.parse("01") == tz.tzoffset(None, 3600) + assert self.parser.parse("+01") == tz.tzoffset(None, 3600) + assert self.parser.parse("-01") == tz.tzoffset(None, -3600) + + def test_parse_str(self): + + assert self.parser.parse("US/Pacific") == tz.gettz("US/Pacific") + + def test_parse_fails(self): + + with pytest.raises(parser.ParserError): + self.parser.parse("fail") + + +@pytest.mark.usefixtures("dt_parser") +class TestDateTimeParserMonthName: + def test_shortmonth_capitalized(self): + + assert self.parser.parse("2013-Jan-01", "YYYY-MMM-DD") == datetime(2013, 1, 1) + + def test_shortmonth_allupper(self): + + assert self.parser.parse("2013-JAN-01", "YYYY-MMM-DD") == datetime(2013, 1, 1) + + def test_shortmonth_alllower(self): + + assert self.parser.parse("2013-jan-01", "YYYY-MMM-DD") == datetime(2013, 1, 1) + + def test_month_capitalized(self): + + assert self.parser.parse("2013-January-01", "YYYY-MMMM-DD") == datetime( + 2013, 1, 1 + ) + + def test_month_allupper(self): + + assert self.parser.parse("2013-JANUARY-01", "YYYY-MMMM-DD") == datetime( + 2013, 1, 1 + ) + + def test_month_alllower(self): + + assert self.parser.parse("2013-january-01", "YYYY-MMMM-DD") == datetime( + 2013, 1, 1 + ) + + def test_localized_month_name(self): + parser_ = parser.DateTimeParser("fr_fr") + + assert parser_.parse("2013-Janvier-01", "YYYY-MMMM-DD") == datetime(2013, 1, 1) + + def test_localized_month_abbreviation(self): + parser_ = parser.DateTimeParser("it_it") + + assert parser_.parse("2013-Gen-01", "YYYY-MMM-DD") == datetime(2013, 1, 1) + + +@pytest.mark.usefixtures("dt_parser") +class TestDateTimeParserMeridians: + def test_meridians_lowercase(self): + assert self.parser.parse("2013-01-01 5am", "YYYY-MM-DD ha") == datetime( + 2013, 1, 1, 5 + ) + + assert self.parser.parse("2013-01-01 5pm", "YYYY-MM-DD ha") == datetime( + 2013, 1, 1, 17 + ) + + def test_meridians_capitalized(self): + assert self.parser.parse("2013-01-01 5AM", "YYYY-MM-DD hA") == datetime( + 2013, 1, 1, 5 + ) + + assert self.parser.parse("2013-01-01 5PM", "YYYY-MM-DD hA") == datetime( + 2013, 1, 1, 17 + ) + + def test_localized_meridians_lowercase(self): + parser_ = parser.DateTimeParser("hu_hu") + assert parser_.parse("2013-01-01 5 de", "YYYY-MM-DD h a") == datetime( + 2013, 1, 1, 5 + ) + + assert parser_.parse("2013-01-01 5 du", "YYYY-MM-DD h a") == datetime( + 2013, 1, 1, 17 + ) + + def test_localized_meridians_capitalized(self): + parser_ = parser.DateTimeParser("hu_hu") + assert parser_.parse("2013-01-01 5 DE", "YYYY-MM-DD h A") == datetime( + 2013, 1, 1, 5 + ) + + assert parser_.parse("2013-01-01 5 DU", "YYYY-MM-DD h A") == datetime( + 2013, 1, 1, 17 + ) + + # regression test for issue #607 + def test_es_meridians(self): + parser_ = parser.DateTimeParser("es") + + assert parser_.parse( + "Junio 30, 2019 - 08:00 pm", "MMMM DD, YYYY - hh:mm a" + ) == datetime(2019, 6, 30, 20, 0) + + with pytest.raises(ParserError): + parser_.parse( + "Junio 30, 2019 - 08:00 pasdfasdfm", "MMMM DD, YYYY - hh:mm a" + ) + + def test_fr_meridians(self): + parser_ = parser.DateTimeParser("fr") + + # the French locale always uses a 24 hour clock, so it does not support meridians + with pytest.raises(ParserError): + parser_.parse("Janvier 30, 2019 - 08:00 pm", "MMMM DD, YYYY - hh:mm a") + + +@pytest.mark.usefixtures("dt_parser") +class TestDateTimeParserMonthOrdinalDay: + def test_english(self): + parser_ = parser.DateTimeParser("en_us") + + assert parser_.parse("January 1st, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 1 + ) + assert parser_.parse("January 2nd, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 2 + ) + assert parser_.parse("January 3rd, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 3 + ) + assert parser_.parse("January 4th, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 4 + ) + assert parser_.parse("January 11th, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 11 + ) + assert parser_.parse("January 12th, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 12 + ) + assert parser_.parse("January 13th, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 13 + ) + assert parser_.parse("January 21st, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 21 + ) + assert parser_.parse("January 31st, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 31 + ) + + with pytest.raises(ParserError): + parser_.parse("January 1th, 2013", "MMMM Do, YYYY") + + with pytest.raises(ParserError): + parser_.parse("January 11st, 2013", "MMMM Do, YYYY") + + def test_italian(self): + parser_ = parser.DateTimeParser("it_it") + + assert parser_.parse("Gennaio 1º, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 1 + ) + + def test_spanish(self): + parser_ = parser.DateTimeParser("es_es") + + assert parser_.parse("Enero 1º, 2013", "MMMM Do, YYYY") == datetime(2013, 1, 1) + + def test_french(self): + parser_ = parser.DateTimeParser("fr_fr") + + assert parser_.parse("Janvier 1er, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 1 + ) + + assert parser_.parse("Janvier 2e, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 2 + ) + + assert parser_.parse("Janvier 11e, 2013", "MMMM Do, YYYY") == datetime( + 2013, 1, 11 + ) + + +@pytest.mark.usefixtures("dt_parser") +class TestDateTimeParserSearchDate: + def test_parse_search(self): + + assert self.parser.parse( + "Today is 25 of September of 2003", "DD of MMMM of YYYY" + ) == datetime(2003, 9, 25) + + def test_parse_search_with_numbers(self): + + assert self.parser.parse( + "2000 people met the 2012-01-01 12:05:10", "YYYY-MM-DD HH:mm:ss" + ) == datetime(2012, 1, 1, 12, 5, 10) + + assert self.parser.parse( + "Call 01-02-03 on 79-01-01 12:05:10", "YY-MM-DD HH:mm:ss" + ) == datetime(1979, 1, 1, 12, 5, 10) + + def test_parse_search_with_names(self): + + assert self.parser.parse("June was born in May 1980", "MMMM YYYY") == datetime( + 1980, 5, 1 + ) + + def test_parse_search_locale_with_names(self): + p = parser.DateTimeParser("sv_se") + + assert p.parse("Jan föddes den 31 Dec 1980", "DD MMM YYYY") == datetime( + 1980, 12, 31 + ) + + assert p.parse("Jag föddes den 25 Augusti 1975", "DD MMMM YYYY") == datetime( + 1975, 8, 25 + ) + + def test_parse_search_fails(self): + + with pytest.raises(parser.ParserError): + self.parser.parse("Jag föddes den 25 Augusti 1975", "DD MMMM YYYY") + + def test_escape(self): + + format = "MMMM D, YYYY [at] h:mma" + assert self.parser.parse( + "Thursday, December 10, 2015 at 5:09pm", format + ) == datetime(2015, 12, 10, 17, 9) + + format = "[MMMM] M D, YYYY [at] h:mma" + assert self.parser.parse("MMMM 12 10, 2015 at 5:09pm", format) == datetime( + 2015, 12, 10, 17, 9 + ) + + format = "[It happened on] MMMM Do [in the year] YYYY [a long time ago]" + assert self.parser.parse( + "It happened on November 25th in the year 1990 a long time ago", format + ) == datetime(1990, 11, 25) + + format = "[It happened on] MMMM Do [in the][ year] YYYY [a long time ago]" + assert self.parser.parse( + "It happened on November 25th in the year 1990 a long time ago", format + ) == datetime(1990, 11, 25) + + format = "[I'm][ entirely][ escaped,][ weee!]" + assert self.parser.parse("I'm entirely escaped, weee!", format) == datetime( + 1, 1, 1 + ) + + # Special RegEx characters + format = "MMM DD, YYYY |^${}().*+?<>-& h:mm A" + assert self.parser.parse( + "Dec 31, 2017 |^${}().*+?<>-& 2:00 AM", format + ) == datetime(2017, 12, 31, 2, 0) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_util.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_util.py new file mode 100644 index 0000000000..e48b4de066 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_util.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +import time +from datetime import datetime + +import pytest + +from arrow import util + + +class TestUtil: + def test_next_weekday(self): + # Get first Monday after epoch + assert util.next_weekday(datetime(1970, 1, 1), 0) == datetime(1970, 1, 5) + + # Get first Tuesday after epoch + assert util.next_weekday(datetime(1970, 1, 1), 1) == datetime(1970, 1, 6) + + # Get first Wednesday after epoch + assert util.next_weekday(datetime(1970, 1, 1), 2) == datetime(1970, 1, 7) + + # Get first Thursday after epoch + assert util.next_weekday(datetime(1970, 1, 1), 3) == datetime(1970, 1, 1) + + # Get first Friday after epoch + assert util.next_weekday(datetime(1970, 1, 1), 4) == datetime(1970, 1, 2) + + # Get first Saturday after epoch + assert util.next_weekday(datetime(1970, 1, 1), 5) == datetime(1970, 1, 3) + + # Get first Sunday after epoch + assert util.next_weekday(datetime(1970, 1, 1), 6) == datetime(1970, 1, 4) + + # Weekdays are 0-indexed + with pytest.raises(ValueError): + util.next_weekday(datetime(1970, 1, 1), 7) + + with pytest.raises(ValueError): + util.next_weekday(datetime(1970, 1, 1), -1) + + def test_total_seconds(self): + td = datetime(2019, 1, 1) - datetime(2018, 1, 1) + assert util.total_seconds(td) == td.total_seconds() + + def test_is_timestamp(self): + timestamp_float = time.time() + timestamp_int = int(timestamp_float) + + assert util.is_timestamp(timestamp_int) + assert util.is_timestamp(timestamp_float) + assert util.is_timestamp(str(timestamp_int)) + assert util.is_timestamp(str(timestamp_float)) + + assert not util.is_timestamp(True) + assert not util.is_timestamp(False) + + class InvalidTimestamp: + pass + + assert not util.is_timestamp(InvalidTimestamp()) + + full_datetime = "2019-06-23T13:12:42" + assert not util.is_timestamp(full_datetime) + + def test_normalize_timestamp(self): + timestamp = 1591161115.194556 + millisecond_timestamp = 1591161115194 + microsecond_timestamp = 1591161115194556 + + assert util.normalize_timestamp(timestamp) == timestamp + assert util.normalize_timestamp(millisecond_timestamp) == 1591161115.194 + assert util.normalize_timestamp(microsecond_timestamp) == 1591161115.194556 + + with pytest.raises(ValueError): + util.normalize_timestamp(3e17) + + def test_iso_gregorian(self): + with pytest.raises(ValueError): + util.iso_to_gregorian(2013, 0, 5) + + with pytest.raises(ValueError): + util.iso_to_gregorian(2013, 8, 0) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/utils.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/utils.py new file mode 100644 index 0000000000..2a048feb3f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/utils.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +import pytz +from dateutil.zoneinfo import get_zonefile_instance + +from arrow import util + + +def make_full_tz_list(): + dateutil_zones = set(get_zonefile_instance().zones) + pytz_zones = set(pytz.all_timezones) + return dateutil_zones.union(pytz_zones) + + +def assert_datetime_equality(dt1, dt2, within=10): + assert dt1.tzinfo == dt2.tzinfo + assert abs(util.total_seconds(dt1 - dt2)) < within diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tox.ini b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tox.ini new file mode 100644 index 0000000000..46576b12e3 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tox.ini @@ -0,0 +1,53 @@ +[tox] +minversion = 3.18.0 +envlist = py{py3,27,35,36,37,38,39},lint,docs +skip_missing_interpreters = true + +[gh-actions] +python = + pypy3: pypy3 + 2.7: py27 + 3.5: py35 + 3.6: py36 + 3.7: py37 + 3.8: py38 + 3.9: py39 + +[testenv] +deps = -rrequirements.txt +allowlist_externals = pytest +commands = pytest + +[testenv:lint] +basepython = python3 +skip_install = true +deps = pre-commit +commands = + pre-commit install + pre-commit run --all-files --show-diff-on-failure + +[testenv:docs] +basepython = python3 +skip_install = true +changedir = docs +deps = + doc8 + sphinx + python-dateutil +allowlist_externals = make +commands = + doc8 index.rst ../README.rst --extension .rst --ignore D001 + make html SPHINXOPTS="-W --keep-going" + +[pytest] +addopts = -v --cov-branch --cov=arrow --cov-fail-under=100 --cov-report=term-missing --cov-report=xml +testpaths = tests + +[isort] +line_length = 88 +multi_line_output = 3 +include_trailing_comma = true + +[flake8] +per-file-ignores = arrow/__init__.py:F401 +ignore = E203,E501,W503 diff --git a/openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py similarity index 100% rename from openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py rename to openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/__init__.py diff --git a/openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/__init__.py similarity index 100% rename from openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/__init__.py rename to openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/__init__.py diff --git a/openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/helpers.py b/openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/helpers.py similarity index 100% rename from openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/helpers.py rename to openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/configparser/helpers.py diff --git a/openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/functools_lru_cache.py b/openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/functools_lru_cache.py similarity index 100% rename from openpype/modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/functools_lru_cache.py rename to openpype/modules/default_modules/ftrack/python2_vendor/backports.functools_lru_cache/backports/functools_lru_cache.py diff --git a/openpype/modules/ftrack/python2_vendor/builtins/builtins/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/builtins/builtins/__init__.py similarity index 100% rename from openpype/modules/ftrack/python2_vendor/builtins/builtins/__init__.py rename to openpype/modules/default_modules/ftrack/python2_vendor/builtins/builtins/__init__.py diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/.gitignore b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/.gitignore new file mode 100644 index 0000000000..be621609ab --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/.gitignore @@ -0,0 +1,42 @@ +# General +*.py[cod] + +# Packages +*.egg +*.egg-info +dist +build +.eggs/ +eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.tox + +# Caches +Thumbs.db + +# Development +.project +.pydevproject +.settings +.idea/ +.history/ +.vscode/ + +# Testing +.cache +test-reports/* +.pytest_cache/* \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.python b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.python new file mode 100644 index 0000000000..9dc010d803 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.python @@ -0,0 +1,254 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.txt b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.txt new file mode 100644 index 0000000000..d9a10c0d8e --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.txt @@ -0,0 +1,176 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/MANIFEST.in b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/MANIFEST.in new file mode 100644 index 0000000000..3216ee548c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/MANIFEST.in @@ -0,0 +1,4 @@ +include LICENSE.txt +include README.rst +recursive-include resource *.py +recursive-include doc *.rst *.conf *.py *.png *.css diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/README.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/README.rst new file mode 100644 index 0000000000..074a35f97c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/README.rst @@ -0,0 +1,34 @@ +################# +ftrack Python API +################# + +Python API for ftrack. + +.. important:: + + This is the new Python client for the ftrack API. If you are migrating from + the old client then please read the dedicated `migration guide `_. + +************* +Documentation +************* + +Full documentation, including installation and setup guides, can be found at +http://ftrack-python-api.rtd.ftrack.com/en/stable/ + +********************* +Copyright and license +********************* + +Copyright (c) 2014 ftrack + +Licensed under the Apache License, Version 2.0 (the "License"); you may not use +this work except in compliance with the License. You may obtain a copy of the +License in the LICENSE.txt file, or at: + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed +under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +CONDITIONS OF ANY KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml new file mode 100644 index 0000000000..355f00f752 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml @@ -0,0 +1,24 @@ +# Test configuration for bitbucket pipelines. +options: + max-time: 20 +definitions: + services: + ftrack: + image: + name: ftrackdocker/test-server:latest + username: $DOCKER_HUB_USERNAME + password: $DOCKER_HUB_PASSWORD + email: $DOCKER_HUB_EMAIL +pipelines: + default: + - parallel: + - step: + name: run tests against python 2.7.x + image: python:2.7 + caches: + - pip + services: + - ftrack + script: + - bash -c 'while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' $FTRACK_SERVER)" != "200" ]]; do sleep 1; done' + - python setup.py test \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css new file mode 100644 index 0000000000..3456b0c3c5 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css @@ -0,0 +1,16 @@ +@import "css/theme.css"; + +.domain-summary li { + float: left; + min-width: 12em; +} + +.domain-summary ul:before, ul:after { + content: ''; + clear: both; + display:block; +} + +.rst-content table.docutils td:last-child { + white-space: normal; +} diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst new file mode 100644 index 0000000000..4e165b0122 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +************************ +ftrack_api.accessor.base +************************ + +.. automodule:: ftrack_api.accessor.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst new file mode 100644 index 0000000000..f7d9dddf37 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +************************ +ftrack_api.accessor.disk +************************ + +.. automodule:: ftrack_api.accessor.disk diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst new file mode 100644 index 0000000000..0adc23fe2d --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst @@ -0,0 +1,14 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +******************* +ftrack_api.accessor +******************* + +.. automodule:: ftrack_api.accessor + +.. toctree:: + :maxdepth: 1 + :glob: + + * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst new file mode 100644 index 0000000000..62bd7f4165 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +************************ +ftrack_api.accessor.server +************************ + +.. automodule:: ftrack_api.accessor.server diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst new file mode 100644 index 0000000000..9fd8994eb1 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +******************** +ftrack_api.attribute +******************** + +.. automodule:: ftrack_api.attribute diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst new file mode 100644 index 0000000000..cbf9128a5a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +**************** +ftrack_api.cache +**************** + +.. automodule:: ftrack_api.cache diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst new file mode 100644 index 0000000000..607d574cb5 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +********************* +ftrack_api.collection +********************* + +.. automodule:: ftrack_api.collection diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst new file mode 100644 index 0000000000..0bc4ce35f1 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +******************************* +ftrack_api.entity.asset_version +******************************* + +.. automodule:: ftrack_api.entity.asset_version diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst new file mode 100644 index 0000000000..f4beedc9a4 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +********************** +ftrack_api.entity.base +********************** + +.. automodule:: ftrack_api.entity.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst new file mode 100644 index 0000000000..c9ce0a0cf1 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +*************************** +ftrack_api.entity.component +*************************** + +.. automodule:: ftrack_api.entity.component diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst new file mode 100644 index 0000000000..483c16641b --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +************************* +ftrack_api.entity.factory +************************* + +.. automodule:: ftrack_api.entity.factory diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst new file mode 100644 index 0000000000..fce68c0e94 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst @@ -0,0 +1,14 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +***************** +ftrack_api.entity +***************** + +.. automodule:: ftrack_api.entity + +.. toctree:: + :maxdepth: 1 + :glob: + + * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst new file mode 100644 index 0000000000..9d22a7c378 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +********************* +ftrack_api.entity.job +********************* + +.. automodule:: ftrack_api.entity.job diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst new file mode 100644 index 0000000000..60e006a10c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +************************** +ftrack_api.entity.location +************************** + +.. automodule:: ftrack_api.entity.location diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst new file mode 100644 index 0000000000..3588e48e5b --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +********************** +ftrack_api.entity.note +********************** + +.. automodule:: ftrack_api.entity.note diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst new file mode 100644 index 0000000000..5777ab0b40 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +******************************** +ftrack_api.entity.project_schema +******************************** + +.. automodule:: ftrack_api.entity.project_schema diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst new file mode 100644 index 0000000000..0014498b9c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +********************** +ftrack_api.entity.user +********************** + +.. automodule:: ftrack_api.entity.user diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst new file mode 100644 index 0000000000..2b0ca8d3ed --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +********************* +ftrack_api.event.base +********************* + +.. automodule:: ftrack_api.event.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst new file mode 100644 index 0000000000..f582717060 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +*************************** +ftrack_api.event.expression +*************************** + +.. automodule:: ftrack_api.event.expression diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst new file mode 100644 index 0000000000..36d7a33163 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +******************** +ftrack_api.event.hub +******************** + +.. automodule:: ftrack_api.event.hub diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst new file mode 100644 index 0000000000..0986e8e2f4 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst @@ -0,0 +1,14 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +**************** +ftrack_api.event +**************** + +.. automodule:: ftrack_api.event + +.. toctree:: + :maxdepth: 1 + :glob: + + * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst new file mode 100644 index 0000000000..974f375817 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +*************************** +ftrack_api.event.subscriber +*************************** + +.. automodule:: ftrack_api.event.subscriber diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst new file mode 100644 index 0000000000..94a20e3611 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +***************************** +ftrack_api.event.subscription +***************************** + +.. automodule:: ftrack_api.event.subscription diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst new file mode 100644 index 0000000000..64c3a699d7 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +******************** +ftrack_api.exception +******************** + +.. automodule:: ftrack_api.exception diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst new file mode 100644 index 0000000000..9b8154bdc3 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +******************** +ftrack_api.formatter +******************** + +.. automodule:: ftrack_api.formatter diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst new file mode 100644 index 0000000000..ea3517ca68 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst @@ -0,0 +1,20 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _api_reference: + +************* +API Reference +************* + +ftrack_api +========== + +.. automodule:: ftrack_api + +.. toctree:: + :maxdepth: 1 + :glob: + + */index + * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst new file mode 100644 index 0000000000..8223ee72f2 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +********************* +ftrack_api.inspection +********************* + +.. automodule:: ftrack_api.inspection diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst new file mode 100644 index 0000000000..ecb883d385 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2016 ftrack + +****************** +ftrack_api.logging +****************** + +.. automodule:: ftrack_api.logging diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst new file mode 100644 index 0000000000..b2dff9933d --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +******************** +ftrack_api.operation +******************** + +.. automodule:: ftrack_api.operation diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst new file mode 100644 index 0000000000..a4993d94cf --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +***************** +ftrack_api.plugin +***************** + +.. automodule:: ftrack_api.plugin diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst new file mode 100644 index 0000000000..acbd8d237a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +**************** +ftrack_api.query +**************** + +.. automodule:: ftrack_api.query diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst new file mode 100644 index 0000000000..09cdad8627 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst @@ -0,0 +1,10 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _api_reference/resource_identifier_transformer.base: + +*********************************************** +ftrack_api.resource_identifier_transformer.base +*********************************************** + +.. automodule:: ftrack_api.resource_identifier_transformer.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst new file mode 100644 index 0000000000..755f052c9d --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst @@ -0,0 +1,16 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _api_reference/resource_identifier_transformer: + +****************************************** +ftrack_api.resource_identifier_transformer +****************************************** + +.. automodule:: ftrack_api.resource_identifier_transformer + +.. toctree:: + :maxdepth: 1 + :glob: + + * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst new file mode 100644 index 0000000000..dcce173d1f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +****************** +ftrack_api.session +****************** + +.. automodule:: ftrack_api.session diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst new file mode 100644 index 0000000000..55a1cc75d2 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +************************* +ftrack_api.structure.base +************************* + +.. automodule:: ftrack_api.structure.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst new file mode 100644 index 0000000000..ade2c7ae88 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +*********************** +ftrack_api.structure.id +*********************** + +.. automodule:: ftrack_api.structure.id diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst new file mode 100644 index 0000000000..cbd4545cf7 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst @@ -0,0 +1,14 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +******************** +ftrack_api.structure +******************** + +.. automodule:: ftrack_api.structure + +.. toctree:: + :maxdepth: 1 + :glob: + + * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst new file mode 100644 index 0000000000..403173e257 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +*************************** +ftrack_api.structure.origin +*************************** + +.. automodule:: ftrack_api.structure.origin diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst new file mode 100644 index 0000000000..5c0d88026b --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +***************************** +ftrack_api.structure.standard +***************************** + +.. automodule:: ftrack_api.structure.standard diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst new file mode 100644 index 0000000000..55dc0125a8 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst @@ -0,0 +1,8 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +***************** +ftrack_api.symbol +***************** + +.. automodule:: ftrack_api.symbol diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/caching.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/caching.rst new file mode 100644 index 0000000000..bfc5cef401 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/caching.rst @@ -0,0 +1,175 @@ +.. + :copyright: Copyright (c) 2015 ftrack + + +.. _caching: + +******* +Caching +******* + +The API makes use of caching in order to provide more efficient retrieval of +data by reducing the number of calls to the remote server:: + + # First call to retrieve user performs a request to the server. + user = session.get('User', 'some-user-id') + + # A later call in the same session to retrieve the same user just gets + # the existing instance from the cache without a request to the server. + user = session.get('User', 'some-user-id') + +It also seamlessly merges related data together regardless of how it was +retrieved:: + + >>> timelog = user['timelogs'][0] + >>> with session.auto_populating(False): + >>> print timelog['comment'] + NOT_SET + >>> session.query( + ... 'select comment from Timelog where id is "{0}"' + ... .format(timelog['id']) + ... ).all() + >>> with session.auto_populating(False): + >>> print timelog['comment'] + 'Some comment' + +By default, each :class:`~ftrack_api.session.Session` is configured with a +simple :class:`~ftrack_api.cache.MemoryCache()` and the cache is lost as soon as +the session expires. + +Configuring a session cache +=========================== + +It is possible to configure the cache that a session uses. An example would be a +persistent auto-populated cache that survives between sessions:: + + import os + import ftrack_api.cache + + # Specify where the file based cache should be stored. + cache_path = os.path.join(tempfile.gettempdir(), 'ftrack_session_cache.dbm') + + + # Define a cache maker that returns a file based cache. Note that a + # function is used because the file based cache should use the session's + # encode and decode methods to serialise the entity data to a format that + # can be written to disk (JSON). + def cache_maker(session): + '''Return cache to use for *session*.''' + return ftrack_api.cache.SerialisedCache( + ftrack_api.cache.FileCache(cache_path), + encode=session.encode, + decode=session.decode + ) + + # Create the session using the cache maker. + session = ftrack_api.Session(cache=cache_maker) + +.. note:: + + There can be a performance penalty when using a more complex cache setup. + For example, serialising data and also writing and reading from disk can be + relatively slow operations. + +Regardless of the cache specified, the session will always construct a +:class:`~ftrack_api.cache.LayeredCache` with a +:class:`~ftrack_api.cache.MemoryCache` at the top level and then your cache at +the second level. This is to ensure consistency of instances returned by the +session. + +You can check (or even modify) at any time what cache configuration a session is +using by accessing the `cache` attribute on a +:class:`~ftrack_api.session.Session`:: + + >>> print session.cache + + +Writing a new cache interface +============================= + +If you have a custom cache backend you should be able to integrate it into the +system by writing a cache interface that matches the one defined by +:class:`ftrack_api.cache.Cache`. This typically involves a subclass and +overriding the :meth:`~ftrack_api.cache.Cache.get`, +:meth:`~ftrack_api.cache.Cache.set` and :meth:`~ftrack_api.cache.Cache.remove` +methods. + + +Managing what gets cached +========================= + +The cache system is quite flexible when it comes to controlling what should be +cached. + +Consider you have a layered cache where the bottom layer cache should be +persisted between sessions. In this setup you probably don't want the persisted +cache to hold non-persisted values, such as modified entity values or newly +created entities not yet committed to the server. However, you might want the +top level memory cache to hold onto these values. + +Here is one way to set this up. First define a new proxy cache that is selective +about what it sets:: + + import ftrack_api.inspection + + + class SelectiveCache(ftrack_api.cache.ProxyCache): + '''Proxy cache that won't cache newly created entities.''' + + def set(self, key, value): + '''Set *value* for *key*.''' + if isinstance(value, ftrack_api.entity.base.Entity): + if ( + ftrack_api.inspection.state(value) + is ftrack_api.symbol.CREATED + ): + return + + super(SelectiveCache, self).set(key, value) + +Now use this custom cache to wrap the serialised cache in the setup above: + +.. code-block:: python + :emphasize-lines: 3, 9 + + def cache_maker(session): + '''Return cache to use for *session*.''' + return SelectiveCache( + ftrack_api.cache.SerialisedCache( + ftrack_api.cache.FileCache(cache_path), + encode=session.encode, + decode=session.decode + ) + ) + +Now to prevent modified attributes also being persisted, tweak the encode +settings for the file cache: + +.. code-block:: python + :emphasize-lines: 1, 9-12 + + import functools + + + def cache_maker(session): + '''Return cache to use for *session*.''' + return SelectiveCache( + ftrack_api.cache.SerialisedCache( + ftrack_api.cache.FileCache(cache_path), + encode=functools.partial( + session.encode, + entity_attribute_strategy='persisted_only' + ), + decode=session.decode + ) + ) + +And use the updated cache maker for your session:: + + session = ftrack_api.Session(cache=cache_maker) + +.. note:: + + For some type of attributes that are computed, long term caching is not + recommended and such values will not be encoded with the `persisted_only` + strategy. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/conf.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/conf.py new file mode 100644 index 0000000000..1154472155 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/conf.py @@ -0,0 +1,102 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +'''ftrack Python API documentation build configuration file.''' + +import os +import re + +# -- General ------------------------------------------------------------------ + +# Extensions. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.extlinks', + 'sphinx.ext.intersphinx', + 'sphinx.ext.todo', + 'sphinx.ext.viewcode', + 'lowdown' +] + + +# The suffix of source filenames. +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'ftrack Python API' +copyright = u'2014, ftrack' + +# Version +with open( + os.path.join( + os.path.dirname(__file__), '..', 'source', + 'ftrack_api', '_version.py' + ) +) as _version_file: + _version = re.match( + r'.*__version__ = \'(.*?)\'', _version_file.read(), re.DOTALL + ).group(1) + +version = _version +release = _version + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_template'] + +# A list of prefixes to ignore for module listings. +modindex_common_prefix = [ + 'ftrack_api.' +] + +# -- HTML output -------------------------------------------------------------- + +if not os.environ.get('READTHEDOCS', None) == 'True': + # Only import and set the theme if building locally. + import sphinx_rtd_theme + html_theme = 'sphinx_rtd_theme' + html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + +html_static_path = ['_static'] +html_style = 'ftrack.css' + +# If True, copy source rst files to output for reference. +html_copy_source = True + + +# -- Autodoc ------------------------------------------------------------------ + +autodoc_default_flags = ['members', 'undoc-members', 'inherited-members'] +autodoc_member_order = 'bysource' + + +def autodoc_skip(app, what, name, obj, skip, options): + '''Don't skip __init__ method for autodoc.''' + if name == '__init__': + return False + + return skip + + +# -- Intersphinx -------------------------------------------------------------- + +intersphinx_mapping = { + 'python': ('http://docs.python.org/', None), + 'ftrack': ( + 'http://rtd.ftrack.com/docs/ftrack/en/stable/', None + ) +} + + +# -- Todos --------------------------------------------------------------------- + +todo_include_todos = os.environ.get('FTRACK_DOC_INCLUDE_TODOS', False) == 'True' + + +# -- Setup -------------------------------------------------------------------- + +def setup(app): + app.connect('autodoc-skip-member', autodoc_skip) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf new file mode 100644 index 0000000000..3c927cc1ee --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf @@ -0,0 +1,2 @@ +[html4css1 writer] +field-name-limit:0 \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst new file mode 100644 index 0000000000..99019ee44f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst @@ -0,0 +1,56 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _environment_variables: + +********************* +Environment variables +********************* + +The following is a consolidated list of environment variables that this API +can reference: + +.. envvar:: FTRACK_SERVER + + The full url of the ftrack server to connect to. For example + "https://mycompany.ftrackapp.com" + +.. envvar:: FTRACK_API_USER + + The username of the ftrack user to act on behalf of when performing actions + in the system. + + .. note:: + + When this environment variable is not set, the API will typically also + check other standard operating system variables that hold the username + of the current logged in user. To do this it uses + :func:`getpass.getuser`. + +.. envvar:: FTRACK_API_KEY + + The API key to use when performing actions in the system. The API key is + used to determine the permissions that a script has in the system. + +.. envvar:: FTRACK_APIKEY + + For backwards compatibility. See :envvar:`FTRACK_API_KEY`. + +.. envvar:: FTRACK_EVENT_PLUGIN_PATH + + Paths to search recursively for plugins to load and use in a session. + Multiple paths can be specified by separating with the value of + :attr:`os.pathsep` (e.g. ':' or ';'). + +.. envvar:: FTRACK_API_SCHEMA_CACHE_PATH + + Path to a directory that will be used for storing and retrieving a cache of + the entity schemas fetched from the server. + +.. envvar:: http_proxy / https_proxy + + If you need to use a proxy to connect to ftrack you can use the + "standard" :envvar:`http_proxy` and :envvar:`https_proxy`. Please note that they + are lowercase. + + For example "export https_proxy=http://proxy.mycompany.com:8080" \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst new file mode 100644 index 0000000000..0c44a1b68c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst @@ -0,0 +1,137 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _event_list: + +********** +Event list +********** + +The following is a consolidated list of events published directly by this API. + +For some events, a template plugin file is also listed for download +(:guilabel:`Download template plugin`) to help get you started with writing your +own plugin for a particular event. + +.. seealso:: + + * :ref:`handling_events` + * :ref:`ftrack server event list ` + +.. _event_list/ftrack.api.session.construct-entity-type: + +ftrack.api.session.construct-entity-type +======================================== + +:download:`Download template plugin +` + +:ref:`Synchronous `. Published by +the session to retrieve constructed class for specified schema:: + + Event( + topic='ftrack.api.session.construct-entity-type', + data=dict( + schema=schema, + schemas=schemas + ) + ) + +Expects returned data to be:: + + A Python class. + +.. seealso:: :ref:`working_with_entities/entity_types`. + +.. _event_list/ftrack.api.session.configure-location: + +ftrack.api.session.configure-location +===================================== + +:download:`Download template plugin +` + +:ref:`Synchronous `. Published by +the session to allow configuring of location instances:: + + Event( + topic='ftrack.api.session.configure-location', + data=dict( + session=self + ) + ) + +.. seealso:: :ref:`Configuring locations `. + +.. _event_list/ftrack.location.component-added: + +ftrack.location.component-added +=============================== + +Published whenever a component is added to a location:: + + Event( + topic='ftrack.location.component-added', + data=dict( + component_id='e2dc0524-b576-11d3-9612-080027331d74', + location_id='07b82a97-8cf9-11e3-9383-20c9d081909b' + ) + ) + +.. _event_list/ftrack.location.component-removed: + +ftrack.location.component-removed +================================= + +Published whenever a component is removed from a location:: + + Event( + topic='ftrack.location.component-removed', + data=dict( + component_id='e2dc0524-b576-11d3-9612-080027331d74', + location_id='07b82a97-8cf9-11e3-9383-20c9d081909b' + ) + ) + +.. _event_list/ftrack.api.session.ready: + +ftrack.api.session.ready +======================== + +:ref:`Synchronous `. Published after +a :class:`~ftrack_api.session.Session` has been initialized and +is ready to be used:: + + Event( + topic='ftrack.api.session.ready', + data=dict( + session=, + ) + ) + +.. warning:: + + Since the event is synchronous and blocking, avoid doing any unnecessary + work as it will slow down session initialization. + +.. seealso:: + + Also see example usage in :download:`example_plugin_using_session.py + `. + + +.. _event_list/ftrack.api.session.reset: + +ftrack.api.session.reset +======================== + +:ref:`Synchronous `. Published after +a :class:`~ftrack_api.session.Session` has been reset and is ready to be used +again:: + + Event( + topic='ftrack.api.session.reset', + data=dict( + session=, + ) + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst new file mode 100644 index 0000000000..985eb9bb44 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst @@ -0,0 +1,82 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _example/assignments_and_allocations: + +**************************************** +Working with assignments and allocations +**************************************** + +.. currentmodule:: ftrack_api.session + +The API exposes `assignments` and `allocations` relationships on objects in +the project hierarchy. You can use these to retrieve the allocated or assigned +resources, which can be either groups or users. + +Allocations can be used to allocate users or groups to a project team, while +assignments are more explicit and is used to assign users to tasks. Both +assignment and allocations are modelled as `Appointment` objects, with a +`type` attribute indicating the type of the appoinment. + +The following example retrieves all users part of the project team:: + + # Retrieve a project + project = session.query('Project').first() + + # Set to hold all users part of the project team + project_team = set() + + # Add all allocated groups and users + for allocation in project['allocations']: + + # Resource may be either a group or a user + resource = allocation['resource'] + + # If the resource is a group, add its members + if isinstance(resource, session.types['Group']): + for membership in resource['memberships']: + user = membership['user'] + project_team.add(user) + + # The resource is a user, add it. + else: + user = resource + project_team.add(user) + +The next example shows how to assign the current user to a task:: + + # Retrieve a task and the current user + task = session.query('Task').first() + current_user = session.query( + u'User where username is {0}'.format(session.api_user) + ).one() + + # Create a new Appointment of type assignment. + session.create('Appointment', { + 'context': task, + 'resource': current_user, + 'type': 'assignment' + }) + + # Finally, persist the new assignment + session.commit() + +To list all users assigned to a task, see the following example:: + + task = session.query('Task').first() + users = session.query( + 'select first_name, last_name from User ' + 'where assignments any (context_id = "{0}")'.format(task['id']) + ) + for user in users: + print user['first_name'], user['last_name'] + +To list the current user's assigned tasks, see the example below:: + + assigned_tasks = session.query( + 'select link from Task ' + 'where assignments any (resource.username = "{0}")'.format(session.api_user) + ) + for task in assigned_tasks: + print u' / '.join(item['name'] for item in task['link']) + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst new file mode 100644 index 0000000000..6a39bb20d1 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst @@ -0,0 +1,23 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _example/component: + +*********************** +Working with components +*********************** + +.. currentmodule:: ftrack_api.session + +Components can be created manually or using the provide helper methods on a +:meth:`session ` or existing +:meth:`asset version +`:: + + component = version.create_component('/path/to/file_or_sequence.jpg') + session.commit() + +When a component is created using the helpers it is automatically added to a +location. + +.. seealso:: :ref:`Locations tutorial ` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst new file mode 100644 index 0000000000..033942b442 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst @@ -0,0 +1,94 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _example/custom_attribute: + +*********************** +Using custom attributes +*********************** + +.. currentmodule:: ftrack_api.session + +Custom attributes can be written and read from entities using the +``custom_attributes`` property. + +The ``custom_attributes`` property provides a similar interface to a dictionary. + +Keys can be printed using the keys method:: + + >>> task['custom_attributes'].keys() + [u'my_text_field'] + +or access keys and values as items:: + + >>> print task['custom_attributes'].items() + [(u'my_text_field', u'some text')] + +Read existing custom attribute values:: + + >>> print task['custom_attributes']['my_text_field'] + 'some text' + +Updating a custom attributes can also be done similar to a dictionary:: + + task['custom_attributes']['my_text_field'] = 'foo' + +To query for tasks with a custom attribute, ``my_text_field``, you can use the +key from the configuration:: + + for task in session.query( + 'Task where custom_attributes any ' + '(key is "my_text_field" and value is "bar")' + ): + print task['name'] + +Limitations +=========== + +Expression attributes +--------------------- + +Expression attributes are not yet supported and the reported value will +always be the non-evaluated expression. + +Hierarchical attributes +----------------------- + +Hierarchical attributes are not yet fully supported in the API. Hierarchical +attributes support both read and write, but when read they are not calculated +and instead the `raw` value is returned:: + + # The hierarchical attribute `my_attribute` is set on Shot but this will not + # be reflected on the children. Instead the raw value is returned. + print shot['custom_attributes']['my_attribute'] + 'foo' + print task['custom_attributes']['my_attribute'] + None + +To work around this limitation it is possible to use the legacy api for +hierarchical attributes or to manually query the parents for values and use the +first value that is set. + +Validation +========== + +Custom attributes are validated on the ftrack server before persisted. The +validation will check that the type of the data is correct for the custom +attribute. + + * number - :py:class:`int` or :py:class:`float` + * text - :py:class:`str` or :py:class:`unicode` + * enumerator - :py:class:`list` + * boolean - :py:class:`bool` + * date - :py:class:`datetime.datetime` or :py:class:`datetime.date` + +If the value set is not valid a :py:exc:`ftrack_api.exception.ServerError` is +raised with debug information:: + + shot['custom_attributes']['fstart'] = 'test' + + Traceback (most recent call last): + ... + ftrack_api.exception.ServerError: Server reported error: + ValidationError(Custom attribute value for "fstart" must be of type number. + Got "test" of type ) \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst new file mode 100644 index 0000000000..2be01ffe47 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst @@ -0,0 +1,53 @@ +.. + :copyright: Copyright (c) 2016 ftrack + +.. currentmodule:: ftrack_api.session + +.. _example/encode_media: + +************** +Encoding media +************** + +Media such as images and video can be encoded by the ftrack server to allow +playing it in the ftrack web interface. Media can be encoded using +:meth:`ftrack_api.session.Session.encode_media` which accepts a path to a file +or an existing component in the ftrack.server location. + +Here is an example of how to encode a video and read the output:: + + job = session.encode_media('/PATH/TO/MEDIA') + job_data = json.loads(job['data']) + + print 'Source component id', job_data['source_component_id'] + print 'Keeping original component', job_data['keep_original'] + for output in job_data['output']: + print u'Output component - id: {0}, format: {1}'.format( + output['component_id'], output['format'] + ) + +You can also call the corresponding helper method on an :meth:`asset version +`, to have the +encoded components automatically associated with the version:: + + job = asset_version.encode_media('/PATH/TO/MEDIA') + +It is also possible to get the URL to an encoded component once the job has +finished:: + + job = session.encode_media('/PATH/TO/MEDIA') + + # Wait for job to finish. + + location = session.query('Location where name is "ftrack.server"').one() + for component in job['job_components']: + print location.get_url(component) + +Media can also be an existing component in another location. Before encoding it, +the component needs to be added to the ftrack.server location:: + + location = session.query('Location where name is "ftrack.server"').one() + location.add_component(component) + session.commit() + + job = session.encode_media(component) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst new file mode 100644 index 0000000000..43e31484f4 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst @@ -0,0 +1,56 @@ +.. + :copyright: Copyright (c) 2016 ftrack + +.. _example/entity_links: + +****************** +Using entity links +****************** + +A link can be used to represent a dependency or another relation between +two entities in ftrack. + +There are two types of entities that can be linked: + +* Versions can be linked to other asset versions, where the link entity type + is `AssetVersionLink`. +* Objects like Task, Shot or Folder, where the link entity type is + `TypedContextLink`. + +Both `AssetVersion` and `TypedContext` objects have the same relations +`incoming_links` and `outgoing_links`. To list the incoming links to a Shot we +can use the relationship `incoming_links`:: + + for link in shot['incoming_links']: + print link['from'], link['to'] + +In the above example `link['to']` is the shot and `link['from']` could be an +asset build or something else that is linked to the shot. There is an equivalent +`outgoing_links` that can be used to access outgoing links on an object. + +To create a new link between objects or asset versions create a new +`TypedContextLink` or `AssetVersionLink` entity with the from and to properties +set. In this example we will link two asset versions:: + + session.create('AssetVersionLink', { + 'from': from_asset_version, + 'to': to_asset_version + }) + session.commit() + +Using asset version link shortcut +================================= + +Links on asset version can also be created by the use of the `uses_versions` and +`used_in_versions` relations:: + + rig_version['uses_versions'].append(model_version) + session.commit() + +This has the same result as creating the `AssetVersionLink` entity as in the +previous section. + +Which versions are using the model can be listed with:: + + for version in model_version['used_in_versions']: + print '{0} is using {1}'.format(version, model_version) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst new file mode 100644 index 0000000000..4fca37d754 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst @@ -0,0 +1,52 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. currentmodule:: ftrack_api.session + +.. _example: + +************** +Usage examples +************** + +The following examples show how to use the API to accomplish specific tasks +using the default configuration. + +.. note:: + + If you are using a server with a customised configuration you may need to + alter the examples slightly to make them work correctly. + +Most of the examples assume you have the *ftrack_api* package imported and have +already constructed a :class:`Session`:: + + import ftrack_api + + session = ftrack_api.Session() + + +.. toctree:: + + project + component + review_session + metadata + custom_attribute + manage_custom_attribute_configuration + link_attribute + scope + job + note + list + timer + assignments_and_allocations + thumbnail + encode_media + entity_links + web_review + publishing + security_roles + task_template + sync_ldap_users + invite_user + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst new file mode 100644 index 0000000000..342f0ef602 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst @@ -0,0 +1,31 @@ +.. + :copyright: Copyright (c) 2017 ftrack + +.. _example/invite_user: + +********************* +Invite user +********************* + +Here we create a new user and send them a invitation through mail + + +Create a new user:: + + user_email = 'artist@mail.vfx-company.com' + + new_user = session.create( + 'User', { + 'username':user_email, + 'email':user_email, + 'is_active':True + } + ) + + session.commit() + + +Invite our new user:: + + new_user.send_invite() + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst new file mode 100644 index 0000000000..296a0f5e17 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst @@ -0,0 +1,97 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _example/job: + +************* +Managing jobs +************* + +.. currentmodule:: ftrack_api.session + +Jobs can be used to display feedback to users in the ftrack web interface when +performing long running tasks in the API. + +To create a job use :meth:`Session.create`:: + + user = # Get a user from ftrack. + + job = session.create('Job', { + 'user': user, + 'status': 'running' + }) + +The created job will appear as running in the :guilabel:`jobs` menu for the +specified user. To set a description on the job, add a dictionary containing +description as the `data` key: + +.. note:: + + In the current version of the API the dictionary needs to be JSON + serialised. + +.. code-block:: python + + import json + + job = session.create('Job', { + 'user': user, + 'status': 'running', + 'data': json.dumps({ + 'description': 'My custom job description.' + }) + }) + +When the long running task has finished simply set the job as completed and +continue with the next task. + +.. code-block:: python + + job['status'] = 'done' + session.commit() + +Attachments +=========== + +Job attachments are files that are attached to a job. In the ftrack web +interface these attachments can be downloaded by clicking on a job in the `Jobs` +menu. + +To get a job's attachments through the API you can use the `job_components` +relation and then use the ftrack server location to get the download URL:: + + server_location = session.query( + 'Location where name is "ftrack.server"' + ).one() + + for job_component in job['job_components']: + print 'Download URL: {0}'.format( + server_location.get_url(job_component['component']) + ) + +To add an attachment to a job you have to add it to the ftrack server location +and create a `jobComponent`:: + + server_location = session.query( + 'Location where name is "ftrack.server"' + ).one() + + # Create component and name it "My file". + component = session.create_component( + '/path/to/file', + data={'name': 'My file'}, + location=server_location + ) + + # Attach the component to the job. + session.create( + 'JobComponent', + {'component_id': component['id'], 'job_id': job['id']} + ) + + session.commit() + +.. note:: + + The ftrack web interface does only support downloading one attachment so + attaching more than one will have limited support in the web interface. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst new file mode 100644 index 0000000000..1dcea842cd --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst @@ -0,0 +1,55 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _example/link_attribute: + +********************* +Using link attributes +********************* + +The `link` attribute can be used to retreive the ids and names of the parents of +an object. It is particularly useful in cases where the path of an object must +be presented in a UI, but can also be used to speedup certain query patterns. + +You can use the `link` attribute on any entity inheriting from a +`Context` or `AssetVersion`. Here we use it on the `Task` entity:: + + task = session.query( + 'select link from Task where name is "myTask"' + ).first() + print task['link'] + +It can also be used create a list of parent entities, including the task +itself:: + + entities = [] + for item in task['link']: + entities.append(session.get(item['type'], item['id'])) + +The `link` attribute is an ordered list of dictionaries containting data +of the parents and the item itself. Each dictionary contains the following +entries: + + id + The id of the object and can be used to do a :meth:`Session.get`. + name + The name of the object. + type + The schema id of the object. + +A more advanced use-case is to get the parent names and ids of all timelogs for +a user:: + + for timelog in session.query( + 'select context.link, start, duration from Timelog ' + 'where user.username is "john.doe"' + ): + print timelog['context']['link'], timelog['start'], timelog['duration'] + +The attribute is also available from the `AssetVersion` asset relation:: + + for asset_version in session.query( + 'select link from AssetVersion ' + 'where user.username is "john.doe"' + ): + print asset_version['link'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst new file mode 100644 index 0000000000..155b25f9af --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst @@ -0,0 +1,46 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _example/list: + +*********** +Using lists +*********** + +.. currentmodule:: ftrack_api.session + +Lists can be used to create a collection of asset versions or objects such as +tasks. It could be a list of items that should be sent to client, be included in +todays review session or items that belong together in way that is different +from the project hierarchy. + +There are two types of lists, one for asset versions and one for other objects +such as tasks. + +To create a list use :meth:`Session.create`:: + + user = # Get a user from ftrack. + project = # Get a project from ftrack. + list_category = # Get a list category from ftrack. + + asset_version_list = session.create('AssetVersionList', { + 'owner': user, + 'project': project, + 'category': list_category + }) + + task_list = session.create('TypedContextList', { + 'owner': user, + 'project': project, + 'category': list_category + }) + +Then add items to the list like this:: + + asset_version_list['items'].append(asset_version) + task_list['items'].append(task) + +And remove items from the list like this:: + + asset_version_list['items'].remove(asset_version) + task_list['items'].remove(task) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst new file mode 100644 index 0000000000..e3d7c4062c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst @@ -0,0 +1,320 @@ +.. + :copyright: Copyright (c) 2017 ftrack + +.. _example/manage_custom_attribute_configuration: + +**************************************** +Managing custom attribute configurations +**************************************** + +From the API it is not only possible to +:ref:`read and update custom attributes for entities `, +but also managing custom attribute configurations. + +Existing custom attribute configurations can be queried as :: + + # Print all existing custom attribute configurations. + print session.query('CustomAttributeConfiguration').all() + +Use :meth:`Session.create` to create a new custom attribute configuration:: + + # Get the custom attribute type. + custom_attribute_type = session.query( + 'CustomAttributeType where name is "text"' + ).one() + + # Create a custom attribute configuration. + session.create('CustomAttributeConfiguration', { + 'entity_type': 'assetversion', + 'type': custom_attribute_type, + 'label': 'Asset version text attribute', + 'key': 'asset_version_text_attribute', + 'default': 'bar', + 'config': json.dumps({'markdown': False}) + }) + + # Persist it to the ftrack instance. + session.commit() + +.. tip:: + + The example above does not add security roles. This can be done either + from System Settings in the ftrack web application, or by following the + :ref:`example/manage_custom_attribute_configuration/security_roles` example. + +Global or project specific +========================== + +A custom attribute can be global or project specific depending on the +`project_id` attribute:: + + # Create a custom attribute configuration. + session.create('CustomAttributeConfiguration', { + # Set the `project_id` and the custom attribute will only be available + # on `my_project`. + 'project_id': my_project['id'], + 'entity_type': 'assetversion', + 'type': custom_attribute_type, + 'label': 'Asset version text attribute', + 'key': 'asset_version_text_attribute', + 'default': 'bar', + 'config': json.dumps({'markdown': False}) + }) + session.commit() + +A project specific custom attribute can be changed to a global:: + + custom_attribute_configuration['project_id'] = None + session.commit() + +Changing a global custom attribute configuration to a project specific is not +allowed. + +Entity types +============ + +Custom attribute configuration entity types are using a legacy notation. A +configuration can have one of the following as `entity_type`: + +:task: + Represents TypedContext (Folder, Shot, Sequence, Task, etc.) custom + attribute configurations. When setting this as entity_type the + object_type_id must be set as well. + + Creating a text custom attribute for Folder:: + + custom_attribute_type = session.query( + 'CustomAttributeType where name is "text"' + ).one() + object_type = session.query('ObjectType where name is "Folder"').one() + session.create('CustomAttributeConfiguration', { + 'entity_type': 'task', + 'object_type_id': object_type['id'], + 'type': custom_attribute_type, + 'label': 'Foo', + 'key': 'foo', + 'default': 'bar', + }) + session.commit() + + Can be associated with a `project_id`. + +:show: + Represents Projects custom attribute configurations. + + Can be associated with a `project_id`. + +:assetversion: + Represents AssetVersion custom attribute configurations. + + Can be associated with a `project_id`. + +:user: + Represents User custom attribute configurations. + + Must be `global` and cannot be associated with a `project_id`. + +:list: + Represents List custom attribute configurations. + + Can be associated with a `project_id`. + +:asset: + Represents Asset custom attribute configurations. + + .. note:: + + Asset custom attributes have limited support in the ftrack web + interface. + + Can be associated with a `project_id`. + +It is not possible to change type after a custom attribute configuration has +been created. + +Custom attribute configuration types +==================================== + +Custom attributes can be of different data types depending on what type is set +in the configuration. Some types requires an extra json encoded config to be +set: + +:text: + A sting type custom attribute. + + The `default` value must be either :py:class:`str` or :py:class:`unicode`. + + Can be either presented as raw text or markdown formatted in applicaitons + which support it. This is configured through a markwdown key:: + + # Get the custom attribute type. + custom_attribute_type = session.query( + 'CustomAttributeType where name is "text"' + ).one() + + # Create a custom attribute configuration. + session.create('CustomAttributeConfiguration', { + 'entity_type': 'assetversion', + 'type': custom_attribute_type, + 'label': 'Asset version text attribute', + 'key': 'asset_version_text_attribute', + 'default': 'bar', + 'config': json.dumps({'markdown': False}) + }) + + # Persist it to the ftrack instance. + session.commit() + +:boolean: + + A boolean type custom attribute. + + The `default` value must be a :py:class:`bool`. + + No config is required. + +:date: + A date type custom attribute. + + The `default` value must be an :term:`arrow` date - e.g. + arrow.Arrow(2017, 2, 8). + + No config is required. + +:enumerator: + An enumerator type custom attribute. + + The `default` value must be a list with either :py:class:`str` or + :py:class:`unicode`. + + The enumerator can either be single or multi select. The config must a json + dump of a dictionary containing `multiSelect` and `data`. Where + `multiSelect` is True or False and data is a list of options. Each option + should be a dictionary containing `value` and `menu`, where `menu` is meant + to be used as label in a user interface. + + Create a custom attribute enumerator:: + + custom_attribute_type = session.query( + 'CustomAttributeType where name is "enumerator"' + ).first() + session.create('CustomAttributeConfiguration', { + 'entity_type': 'assetversion', + 'type': custom_attribute_type, + 'label': 'Enumerator attribute', + 'key': 'enumerator_attribute', + 'default': ['bar'], + 'config': json.dumps({ + 'multiSelect': True, + 'data': json.dumps([ + {'menu': 'Foo', 'value': 'foo'}, + {'menu': 'Bar', 'value': 'bar'} + ]) + }) + }) + session.commit() + +:dynamic enumerator: + + An enumerator type where available options are fetched from remote. Created + in the same way as enumerator but without `data`. + +:number: + + A number custom attribute can be either decimal or integer for presentation. + + This can be configured through the `isdecimal` config option:: + + custom_attribute_type = session.query( + 'CustomAttributeType where name is "number"' + ).first() + session.create('CustomAttributeConfiguration', { + 'entity_type': 'assetversion', + 'type': custom_attribute_type, + 'label': 'Number attribute', + 'key': 'number_attribute', + 'default': 42, + 'config': json.dumps({ + 'isdecimal': True + }) + }) + session.commit() + +Changing default +================ + +It is possible to update the `default` value of a custom attribute +configuration. This will not change the value of any existing custom +attributes:: + + # Change the default value of custom attributes. This will only affect + # newly created entities. + custom_attribute_configuration['default'] = 43 + session.commit() + +.. _example/manage_custom_attribute_configuration/security_roles: + +Security roles +============== + +By default new custom attribute configurations and the entity values are not +readable or writable by any security role. + +This can be configured through the `read_security_roles` and `write_security_roles` +attributes:: + + # Pick random security role. + security_role = session.query('SecurityRole').first() + custom_attribute_type = session.query( + 'CustomAttributeType where name is "date"' + ).first() + session.create('CustomAttributeConfiguration', { + 'entity_type': 'assetversion', + 'type': custom_attribute_type, + 'label': 'Date attribute', + 'key': 'date_attribute', + 'default': arrow.Arrow(2017, 2, 8), + 'write_security_roles': [security_role], + 'read_security_roles': [security_role] + }) + session.commit() + +.. note:: + + Setting the correct security role is important and must be changed to + whatever security role is appropriate for your configuration and intended + purpose. + +Custom attribute groups +======================= + +A custom attribute configuration can be categorized using a +`CustomAttributeGroup`:: + + group = session.query('CustomAttributeGroup').first() + security_role = session.query('SecurityRole').first() + custom_attribute_type = session.query( + 'CustomAttributeType where name is "enumerator"' + ).first() + session.create('CustomAttributeConfiguration', { + 'entity_type': 'assetversion', + 'type': custom_attribute_type, + 'label': 'Enumerator attribute', + 'key': 'enumerator_attribute', + 'default': ['bar'], + 'config': json.dumps({ + 'multiSelect': True, + 'data': json.dumps([ + {'menu': 'Foo', 'value': 'foo'}, + {'menu': 'Bar', 'value': 'bar'} + ]) + }), + 'group': group, + 'write_security_roles': [security_role], + 'read_security_roles': [security_role] + }) + session.commit() + +.. seealso:: + + :ref:`example/custom_attribute` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst new file mode 100644 index 0000000000..7b16881017 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst @@ -0,0 +1,43 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _example/metadata: + +************** +Using metadata +************** + +.. currentmodule:: ftrack_api.session + +Key/value metadata can be written to entities using the metadata property +and also used to query entities. + +The metadata property has a similar interface as a dictionary and keys can be +printed using the keys method:: + + >>> print new_sequence['metadata'].keys() + ['frame_padding', 'focal_length'] + +or items:: + + >>> print new_sequence['metadata'].items() + [('frame_padding': '4'), ('focal_length': '70')] + +Read existing metadata:: + + >>> print new_sequence['metadata']['frame_padding'] + '4' + +Setting metadata can be done in a few ways where that later one will replace +any existing metadata:: + + new_sequence['metadata']['frame_padding'] = '5' + new_sequence['metadata'] = { + 'frame_padding': '4' + } + +Entities can also be queried using metadata:: + + session.query( + 'Sequence where metadata any (key is "frame_padding" and value is "4")' + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst new file mode 100644 index 0000000000..8f8f1bb57d --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst @@ -0,0 +1,169 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. currentmodule:: ftrack_api.session + +.. _example/note: + +*********** +Using notes +*********** + +Notes can be written on almost all levels in ftrack. To retrieve notes on an +entity you can either query them or use the relation called `notes`:: + + task = session.query('Task').first() + + # Retrieve notes using notes property. + notes_on_task = task['notes'] + + # Or query them. + notes_on_task = session.query('Note where parent_id is "{}"'.format( + task['id'] + )) + +.. note:: + + It's currently not possible to use the `parent` property when querying + notes or to use the `parent` property on notes:: + + task = session.query('Task').first() + + # This won't work in the current version of the API. + session.query('Note where parent.id is "{}"'.format( + task['id'] + )) + + # Neither will this. + parent_of_note = note['parent'] + +To create new notes you can either use the helper method called +:meth:`~ftrack_api.entity.note.CreateNoteMixin.create_note` on any entity that +can have notes or use :meth:`Session.create` to create them manually:: + + user = session.query('User').first() + + # Create note using the helper method. + note = task.create_note('My new note', author=user) + + # Manually create a note + note = session.create('Note', { + 'content': 'My new note', + 'author': user + }) + + task['notes'].append(note) + +Replying to an existing note can also be done with a helper method or by +using :meth:`Session.create`:: + + # Create using helper method. + first_note_on_task = task['notes'][0] + first_note_on_task.create_reply('My new reply on note', author=user) + + # Create manually + reply = session.create('Note', { + 'content': 'My new note', + 'author': user + }) + + first_note_on_task.replies.append(reply) + +Notes can have labels. Use the label argument to set labels on the +note using the helper method:: + + label = session.query( + 'NoteLabel where name is "External Note"' + ).first() + + note = task.create_note( + 'New note with external category', author=user, labels=[label] + ) + +Or add labels to notes when creating a note manually:: + + label = session.query( + 'NoteLabel where name is "External Note"' + ).first() + + note = session.create('Note', { + 'content': 'New note with external category', + 'author': user + }) + + session.create('NoteLabelLink', { + 'note_id': note['id], + 'label_id': label['id'] + }) + + task['notes'].append(note) + +.. note:: + + Support for labels on notes was added in ftrack server version 4.3. For + older versions of the server, NoteCategory can be used instead. + +To specify a category when creating a note simply pass a `NoteCategory` instance +to the helper method:: + + category = session.query( + 'NoteCategory where name is "External Note"' + ).first() + + note = task.create_note( + 'New note with external category', author=user, category=category + ) + +When writing notes you might want to direct the note to someone. This is done +by adding users as recipients. If a user is added as a recipient the user will +receive notifications and the note will be displayed in their inbox. + +To add recipients pass a list of user or group instances to the helper method:: + + john = session.query('User where username is "john"').one() + animation_group = session.query('Group where name is "Animation"').first() + + note = task.create_note( + 'Note with recipients', author=user, recipients=[john, animation_group] + ) + +Attachments +=========== + +Note attachments are files that are attached to a note. In the ftrack web +interface these attachments appears next to the note and can be downloaded by +the user. + +To get a note's attachments through the API you can use the `note_components` +relation and then use the ftrack server location to get the download URL:: + + server_location = session.query( + 'Location where name is "ftrack.server"' + ).one() + + for note_component in note['note_components']: + print 'Download URL: {0}'.format( + server_location.get_url(note_component['component']) + ) + +To add an attachment to a note you have to add it to the ftrack server location +and create a `NoteComponent`:: + + server_location = session.query( + 'Location where name is "ftrack.server"' + ).one() + + # Create component and name it "My file". + component = session.create_component( + '/path/to/file', + data={'name': 'My file'}, + location=server_location + ) + + # Attach the component to the note. + session.create( + 'NoteComponent', + {'component_id': component['id'], 'note_id': note['id']} + ) + + session.commit() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst new file mode 100644 index 0000000000..0b4c0879d6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst @@ -0,0 +1,65 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _example/project: + +********************* +Working with projects +********************* + +.. currentmodule:: ftrack_api.session + +Creating a project +================== + +A project with sequences, shots and tasks can be created in one single +transaction. Tasks need to have a type and status set on creation based on the +project schema:: + + import uuid + + # Create a unique name for the project. + name = 'projectname_{0}'.format(uuid.uuid1().hex) + + # Naively pick the first project schema. For this example to work the + # schema must contain `Shot` and `Sequence` object types. + project_schema = session.query('ProjectSchema').first() + + # Create the project with the chosen schema. + project = session.create('Project', { + 'name': name, + 'full_name': name + '_full', + 'project_schema': project_schema + }) + + # Retrieve default types. + default_shot_status = project_schema.get_statuses('Shot')[0] + default_task_type = project_schema.get_types('Task')[0] + default_task_status = project_schema.get_statuses( + 'Task', default_task_type['id'] + )[0] + + # Create sequences, shots and tasks. + for sequence_number in range(1, 5): + sequence = session.create('Sequence', { + 'name': 'seq_{0}'.format(sequence_number), + 'parent': project + }) + + for shot_number in range(1, 5): + shot = session.create('Shot', { + 'name': '{0}0'.format(shot_number).zfill(3), + 'parent': sequence, + 'status': default_shot_status + }) + + for task_number in range(1, 5): + session.create('Task', { + 'name': 'task_{0}'.format(task_number), + 'parent': shot, + 'status': default_task_status, + 'type': default_task_type + }) + + # Commit all changes to the server. + session.commit() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst new file mode 100644 index 0000000000..bf1da18ab9 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst @@ -0,0 +1,73 @@ +.. + :copyright: Copyright (c) 2016 ftrack + +.. currentmodule:: ftrack_api.session + +.. _example/publishing: + +******************* +Publishing versions +******************* + +To know more about publishing and the concepts around publishing, read the +`ftrack article `_ +about publishing. + +To publish an asset you first need to get the context where the asset should be +published:: + + # Get a task from a given id. + task = session.get('Task', '423ac382-e61d-4802-8914-dce20c92b740') + +And the parent of the task which will be used to publish the asset on:: + + asset_parent = task['parent'] + +Then we create an asset and a version on the asset:: + + asset_type = session.query('AssetType where name is "Geometry"').one() + asset = session.create('Asset', { + 'name': 'My asset', + 'type': asset_type, + 'parent': asset_parent + }) + asset_version = session.create('AssetVersion', { + 'asset': asset, + 'task': task + }) + +.. note:: + + The task is not used as the parent of the asset, instead the task is linked + directly to the AssetVersion. + +Then when we have a version where we can create the components:: + + asset_version.create_component( + '/path/to/a/file.mov', location='auto' + ) + asset_version.create_component( + '/path/to/a/another-file.mov', location='auto' + ) + + session.commit() + +This will automatically create a new component and add it to the location which +has been configured as the first in priority. + +Components can also be named and added to a custom location like this:: + + location = session.query('Location where name is "my-location"') + asset_version.create_component( + '/path/to/a/file.mov', + data={ + 'name': 'foobar' + }, + location=location + ) + +.. seealso:: + + * :ref:`example/component` + * :ref:`example/web_review` + * :ref:`example/thumbnail` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst new file mode 100644 index 0000000000..68f7870d1c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst @@ -0,0 +1,87 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _example/review_session: + +********************* +Using review sessions +********************* + +.. currentmodule:: ftrack_api.session + +Client review sessions can either be queried manually or by using a project +instance. + +.. code-block:: python + + review_sessions = session.query( + 'ReviewSession where name is "Weekly review"' + ) + + project_review_sessions = project['review_sessions'] + +To create a new review session on a specific project use :meth:`Session.create`. + +.. code-block:: python + + review_session = session.create('ReviewSession', { + 'name': 'Weekly review', + 'description': 'See updates from last week.', + 'project': project + }) + +To add objects to a review session create them using +:meth:`Session.create` and reference a review session and an asset version. + +.. code-block:: python + + review_session = session.create('ReviewSessionObject', { + 'name': 'Compositing', + 'description': 'Fixed shadows.', + 'version': 'Version 3', + 'review_session': review_session, + 'asset_version': asset_version + }) + +To list all objects in a review session. + +.. code-block:: python + + review_session_objects = review_session['review_session_objects'] + +Listing and adding collaborators to review session can be done using +:meth:`Session.create` and the `review_session_invitees` relation on a +review session. + +.. code-block:: python + + invitee = session.create('ReviewSessionInvitee', { + 'name': 'John Doe', + 'email': 'john.doe@example.com', + 'review_session': review_session + }) + + session.commit() + + invitees = review_session['review_session_invitees'] + +To remove a collaborator simply delete the object using +:meth:`Session.delete`. + +.. code-block:: python + + session.delete(invitee) + +To send out an invite email to a signle collaborator use +:meth:`Session.send_review_session_invite`. + +.. code-block:: python + + session.send_review_session_invite(invitee) + +Multiple invitees can have emails sent to them in one batch using +:meth:`Session.send_review_session_invites`. + +.. code-block:: python + + session.send_review_session_invites(a_list_of_invitees) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst new file mode 100644 index 0000000000..3be42322ce --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst @@ -0,0 +1,27 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _example/scope: + +************ +Using scopes +************ + +.. currentmodule:: ftrack_api.session + +Entities can be queried based on their scopes:: + + >>> tasks = session.query( + ... 'Task where scopes.name is "London"' + ... ) + +Scopes can be read and modified for entities:: + + >>> scope = session.query( + ... 'Scope where name is "London"' + ... )[0] + ... + ... if scope in task['scopes']: + ... task['scopes'].remove(scope) + ... else: + ... task['scopes'].append(scope) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst new file mode 100644 index 0000000000..4219e3d126 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst @@ -0,0 +1,73 @@ +.. + :copyright: Copyright (c) 2017 ftrack + +.. _example/security_roles: + +********************************* +Working with user security roles +********************************* + +.. currentmodule:: ftrack_api.session + +The API exposes `SecurityRole` and `UserSecurityRole` that can be used to +specify who should have access to certain data on different projects. + +List all available security roles like this:: + + security_roles = session.query( + 'select name from SecurityRole where type is "PROJECT"' + ) + +.. note:: + + We only query for project roles since those are the ones we can add to a + user for certain projects. Other types include API and ASSIGNED. Type API + can only be added to global API keys, which is currently not supported via + the api and type ASSIGNED only applies to assigned tasks. + +To get all security roles from a user we can either use relations like this:: + + for user_security_role in user['user_security_roles']: + if user_security_role['is_all_projects']: + result_string = 'all projects' + else: + result_string = ', '.join( + [project['full_name'] for project in user_security_role['projects']] + ) + + print 'User has security role "{0}" which is valid on {1}.'.format( + user_security_role['security_role']['name'], + result_string + ) + +or query them directly like this:: + + user_security_roles = session.query( + 'UserSecurityRole where user.username is "{0}"'.format(session.api_user) + ).all() + +User security roles can also be added to a user for all projects like this:: + + project_manager_role = session.query( + 'SecurityRole where name is "Project Manager"' + ).one() + + session.create('UserSecurityRole', { + 'is_all_projects': True, + 'user': user, + 'security_role': project_manager_role + }) + session.commit() + +or for certain projects only like this:: + + projects = session.query( + 'Project where full_name is "project1" or full_name is "project2"' + ).all()[:] + + session.create('UserSecurityRole', { + 'user': user, + 'security_role': project_manager_role, + 'projects': projects + }) + session.commit() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst new file mode 100644 index 0000000000..5ea0e47dc6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst @@ -0,0 +1,30 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _example/sync_with_ldap: + +******************** +Sync users with LDAP +******************** + +.. currentmodule:: ftrack_api.session + + +If ftrack is configured to connect to LDAP you may trigger a +synchronization through the api using the +:meth:`ftrack_api.session.Session.call`:: + + result = session.call([ + dict( + action='delayed_job', + job_type='SYNC_USERS_LDAP' + ) + ]) + job = result[0]['data] + +You will get a `ftrack_api.entity.job.Job` instance back which can be used +to check the success of the job:: + + if job.get('status') == 'failed': + # The job failed get the error. + logging.error(job.get('data')) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst new file mode 100644 index 0000000000..c6161e834a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst @@ -0,0 +1,56 @@ +.. + :copyright: Copyright (c) 2017 ftrack + +.. _example/task_template: + +*************************** +Working with Task Templates +*************************** + +Task templates can help you organize your workflows by building a collection +of tasks to be applied for specific contexts. They can be applied to all `Context` +objects for example Project, Sequences, Shots, etc... + +Query task templates +======================= + +Retrive all task templates and there tasks for a project:: + + project = session.query('Project').first() + + for task_template in project['project_schema']['task_templates']: + print('\ntask template: {0}'.format( + task_template['name'] + )) + + for task_type in [t['task_type'] for t in task_template['items']]: + print('\ttask type: {0}'.format( + task_type['name'] + )) + + + +"Apply" a task template +======================= +Create all tasks in a random task template directly under the project:: + + + project = session.query('Project').first() + + task_template = random.choice( + project['project_schema']['task_templates'] + ) + + for task_type in [t['task_type'] for t in task_template['items']]: + session.create( + 'Task', { + 'name': task_type['name'], + 'type': task_type, + 'parent': project + } + ) + + session.commit() + + + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst new file mode 100644 index 0000000000..64199869a5 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst @@ -0,0 +1,71 @@ +.. + :copyright: Copyright (c) 2016 ftrack + +.. _example/thumbnail: + +*********************** +Working with thumbnails +*********************** + +Components can be used as thumbnails on various entities, including +`Project`, `Task`, `AssetVersion` and `User`. To create and set a thumbnail +you can use the helper method +:meth:`~ftrack_api.entity.component.CreateThumbnailMixin.create_thumbnail` on +any entity that can have a thumbnail:: + + task = session.get('Task', my_task_id) + thumbnail_component = task.create_thumbnail('/path/to/image.jpg') + +It is also possible to set an entity thumbnail by setting its `thumbnail` +relation or `thumbnail_id` attribute to a component you would +like to use as a thumbnail. For a component to be usable as a thumbnail, +it should + + 1. Be a FileComponent. + 2. Exist in the *ftrack.server* :term:`location`. + 3. Be of an appropriate resolution and valid file type. + +The following example creates a new component in the server location, and +uses that as a thumbnail for a task:: + + task = session.get('Task', my_task_id) + server_location = session.query( + 'Location where name is "ftrack.server"' + ).one() + + thumbnail_component = session.create_component( + '/path/to/image.jpg', + dict(name='thumbnail'), + location=server_location + ) + task['thumbnail'] = thumbnail_component + session.commit() + +The next example reuses a version's thumbnail for the asset parent thumbnail:: + + asset_version = session.get('AssetVersion', my_asset_version_id) + asset_parent = asset_version['asset']['parent'] + asset_parent['thumbnail_id'] = asset_version['thumbnail_id'] + session.commit() + +.. _example/thumbnail/url: + +Retrieving thumbnail URL +======================== + +To get an URL to a thumbnail, `thumbnail_component`, which can be used used +to download or display the image in an interface, use the following:: + + import ftrack_api.symbol + server_location = session.get('Location', ftrack_api.symbol.SERVER_LOCATION_ID) + thumbnail_url = server_location.get_thumbnail_url(thumbnail_component) + thumbnail_url_tiny = server_location.get_thumbnail_url( + thumbnail_component, size=100 + ) + thumbnail_url_large = server_location.get_thumbnail_url( + thumbnail_component, size=500 + ) + +.. seealso:: + + :ref:`example/component` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst new file mode 100644 index 0000000000..eb86e2f897 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst @@ -0,0 +1,37 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _example/timer: + +************ +Using timers +************ + +.. currentmodule:: ftrack_api.session + +Timers can be used to track how much time has been spend working on something. + +To start a timer for a user:: + + user = # Get a user from ftrack. + task = # Get a task from ftrack. + + user.start_timer(task) + +A timer has now been created for that user and should show up in the ftrack web +UI. + +To stop the currently running timer for a user and create a timelog from it:: + + user = # Get a user from ftrack. + + timelog = user.stop_timer() + +.. note:: + + Starting a timer when a timer is already running will raise in an exception. + Use the force parameter to automatically stop the running timer first. + + .. code-block:: python + + user.start_timer(task, force=True) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst new file mode 100644 index 0000000000..f1dede570f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst @@ -0,0 +1,78 @@ +.. + :copyright: Copyright (c) 2016 ftrack + +.. currentmodule:: ftrack_api.session + +.. _example/web_review: + +************************* +Publishing for web review +************************* + +Follow the :ref:`example/encode_media` example if you want to +upload and encode media using ftrack. + +If you already have a file encoded in the correct format and want to bypass +the built-in encoding in ftrack, you can create the component manually +and add it to the `ftrack.server` location:: + + # Retrieve or create version. + version = session.query('AssetVersion', 'SOME-ID') + + server_location = session.query('Location where name is "ftrack.server"').one() + filepath = '/path/to/local/file.mp4' + + component = version.create_component( + path=filepath, + data={ + 'name': 'ftrackreview-mp4' + }, + location=server_location + ) + + # Meta data needs to contain *frameIn*, *frameOut* and *frameRate*. + component['metadata']['ftr_meta'] = json.dumps({ + 'frameIn': 0, + 'frameOut': 150, + 'frameRate': 25 + }) + + component.session.commit() + +To publish an image for review the steps are similar:: + + # Retrieve or create version. + version = session.query('AssetVersion', 'SOME-ID') + + server_location = session.query('Location where name is "ftrack.server"').one() + filepath = '/path/to/image.jpg' + + component = version.create_component( + path=filepath, + data={ + 'name': 'ftrackreview-image' + }, + location=server_location + ) + + # Meta data needs to contain *format*. + component['metadata']['ftr_meta'] = json.dumps({ + 'format': 'image' + }) + + component.session.commit() + +Here is a list of components names and how they should be used: + +================== ===================================== +Component name Use +================== ===================================== +ftrackreview-image Images reviewable in the browser +ftrackreview-mp4 H.264/mp4 video reviewable in browser +ftrackreview-webm WebM video reviewable in browser +================== ===================================== + +.. note:: + + Make sure to use the pre-defined component names and set the `ftr_meta` on + the components or review will not work. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst new file mode 100644 index 0000000000..aa5cc77976 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst @@ -0,0 +1,76 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +******** +Glossary +******** + +.. glossary:: + + accessor + An implementation (typically a :term:`Python` plugin) for accessing + a particular type of storage using a specific protocol. + + .. seealso:: :ref:`locations/overview/accessors` + + action + Actions in ftrack provide a standardised way to integrate other tools, + either off-the-shelf or custom built, directly into your ftrack + workflow. + + .. seealso:: :ref:`ftrack:using/actions` + + api + Application programming interface. + + arrow + A Python library that offers a sensible, human-friendly approach to + creating, manipulating, formatting and converting dates, times, and + timestamps. Read more at http://crsmithdev.com/arrow/ + + asset + A container for :term:`asset versions `, typically + representing the output from an artist. For example, 'geometry' + from a modeling artist. Has an :term:`asset type` that categorises the + asset. + + asset type + Category for a particular asset. + + asset version + A specific version of data for an :term:`asset`. Can contain multiple + :term:`components `. + + component + A container to hold any type of data (such as a file or file sequence). + An :term:`asset version` can have any number of components, each with + a specific name. For example, a published version of geometry might + have two components containing the high and low resolution files, with + the component names as 'hires' and 'lowres' respectively. + + PEP-8 + Style guide for :term:`Python` code. Read the guide at + https://www.python.org/dev/peps/pep-0008/ + + plugin + :term:`Python` plugins are used by the API to extend it with new + functionality, such as :term:`locations ` or :term:`actions `. + + .. seealso:: :ref:`understanding_sessions/plugins` + + python + A programming language that lets you work more quickly and integrate + your systems more effectively. Often used in creative industries. Visit + the language website at http://www.python.org + + PyPi + :term:`Python` package index. The Python Package Index or PyPI is the + official third-party software repository for the Python programming + language. Visit the website at https://pypi.python.org/pypi + + resource identifier + A string that is stored in ftrack as a reference to a resource (such as + a file) in a specific location. Used by :term:`accessors ` to + determine how to access data. + + .. seealso:: :ref:`locations/overview/resource_identifiers` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst new file mode 100644 index 0000000000..1d378473fa --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst @@ -0,0 +1,315 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _handling_events: + +*************** +Handling events +*************** + +.. currentmodule:: ftrack_api.event + +Events are generated in ftrack when things happen such as a task being updated +or a new version being published. Each :class:`~ftrack_api.session.Session` +automatically connects to the event server and can be used to subscribe to +specific events and perform an action as a result. That action could be updating +another related entity based on a status change or generating folders when a new +shot is created for example. + +The :class:`~hub.EventHub` for each :class:`~ftrack_api.session.Session` is +accessible via :attr:`Session.event_hub +<~ftrack_api.session.Session.event_hub>`. + +.. _handling_events/subscribing: + +Subscribing to events +===================== + +To listen to events, you register a function against a subscription using +:meth:`Session.event_hub.subscribe `. The subscription +uses the :ref:`expression ` syntax and will filter +against each :class:`~base.Event` instance to determine if the registered +function should receive that event. If the subscription matches, the registered +function will be called with the :class:`~base.Event` instance as its sole +argument. The :class:`~base.Event` instance is a mapping like structure and can +be used like a normal dictionary. + +The following example subscribes a function to receive all 'ftrack.update' +events and then print out the entities that were updated:: + + import ftrack_api + + + def my_callback(event): + '''Event callback printing all new or updated entities.''' + for entity in event['data'].get('entities', []): + + # Print data for the entity. + print(entity) + + + # Subscribe to events with the update topic. + session = ftrack_api.Session() + session.event_hub.subscribe('topic=ftrack.update', my_callback) + +At this point, if you run this, your code would exit almost immediately. This +is because the event hub listens for events in a background thread. Typically, +you only want to stay connected whilst using the session, but in some cases you +will want to block and listen for events solely - a dedicated event processor. +To do this, use the :meth:`EventHub.wait ` method:: + + # Wait for events to be received and handled. + session.event_hub.wait() + +You cancel waiting for events by using a system interrupt (:kbd:`Ctrl-C`). +Alternatively, you can specify a *duration* to process events for:: + + # Only wait and process events for 5 seconds. + session.event_hub.wait(duration=5) + +.. note:: + + Events are continually received and queued for processing in the background + as soon as the connection to the server is established. As a result you may + see a flurry of activity as soon as you call + :meth:`~hub.EventHub.wait` for the first time. + +.. _handling_events/subscribing/subscriber_information: + +Subscriber information +---------------------- + +When subscribing, you can also specify additional information about your +subscriber. This contextual information can be useful when routing events, +particularly when :ref:`targeting events +`. By default, the +:class:`~hub.EventHub` will set some default information, but it can be +useful to enhance this. To do so, simply pass in *subscriber* as a dictionary of +data to the :meth:`~hub.EventHub.subscribe` method:: + + session.event_hub.subscribe( + 'topic=ftrack.update', + my_callback, + subscriber={ + 'id': 'my-unique-subscriber-id', + 'applicationId': 'maya' + } + ) + +.. _handling_events/subscribing/sending_replies: + +Sending replies +--------------- + +When handling an event it is sometimes useful to be able to send information +back to the source of the event. For example, +:ref:`ftrack:developing/events/list/ftrack.location.request-resolve` would +expect a resolved path to be sent back. + +You can craft a custom reply event if you want, but an easier way is just to +return the appropriate data from your handler. Any non *None* value will be +automatically sent as a reply:: + + def on_event(event): + # Send following data in automatic reply. + return {'success': True, 'message': 'Cool!'} + + session.event_hub.subscribe('topic=test-reply', on_event) + +.. seealso:: + + :ref:`handling_events/publishing/handling_replies` + +.. note:: + + Some events are published :ref:`synchronously + `. In this case, any returned data + is passed back to the publisher directly. + +.. _handling_events/subscribing/stopping_events: + +Stopping events +--------------- + +The *event* instance passed to each event handler also provides a method for +stopping the event, :meth:`Event.stop `. + +Once an event has been stopped, no further handlers for that specific event +will be called **locally**. Other handlers in other processes may still be +called. + +Combining this with setting appropriate priorities when subscribing to a topic +allows handlers to prevent lower priority handlers running when desired. + + >>> import ftrack_api + >>> import ftrack_api.event.base + >>> + >>> def callback_a(event): + ... '''Stop the event!''' + ... print('Callback A') + ... event.stop() + >>> + >>> def callback_b(event): + ... '''Never run.''' + ... print('Callback B') + >>> + >>> session = ftrack_api.Session() + >>> session.event_hub.subscribe( + ... 'topic=test-stop-event', callback_a, priority=10 + ... ) + >>> session.event_hub.subscribe( + ... 'topic=test-stop-event', callback_b, priority=20 + ... ) + >>> session.event_hub.publish( + ... ftrack_api.event.base.Event(topic='test-stop-event') + ... ) + >>> session.event_hub.wait(duration=5) + Callback A called. + +.. _handling_events/publishing: + +Publishing events +================= + +So far we have looked at listening to events coming from ftrack. However, you +are also free to publish your own events (or even publish relevant ftrack +events). + +To do this, simply construct an instance of :class:`ftrack_api.event.base.Event` +and pass it to :meth:`EventHub.publish ` via the session:: + + import ftrack_api.event.base + + event = ftrack_api.event.base.Event( + topic='my-company.some-topic', + data={'key': 'value'} + ) + session.event_hub.publish(event) + +The event hub will automatically add some information to your event before it +gets published, including the *source* of the event. By default the event source +is just the event hub, but you can customise this to provide more relevant +information if you want. For example, if you were publishing from within Maya:: + + session.event_hub.publish(ftrack_api.event.base.Event( + topic='my-company.some-topic', + data={'key': 'value'}, + source={ + 'applicationId': 'maya' + } + )) + +Remember that all supplied information can be used by subscribers to filter +events so the more accurate the information the better. + +.. _handling_events/publishing/synchronously: + +Publish synchronously +--------------------- + +It is also possible to call :meth:`~hub.EventHub.publish` synchronously by +passing `synchronous=True`. In synchronous mode, only local handlers will be +called. The result from each called handler is collected and all the results +returned together in a list:: + + >>> import ftrack_api + >>> import ftrack_api.event.base + >>> + >>> def callback_a(event): + ... return 'A' + >>> + >>> def callback_b(event): + ... return 'B' + >>> + >>> session = ftrack_api.Session() + >>> session.event_hub.subscribe( + ... 'topic=test-synchronous', callback_a, priority=10 + ... ) + >>> session.event_hub.subscribe( + ... 'topic=test-synchronous', callback_b, priority=20 + ... ) + >>> results = session.event_hub.publish( + ... ftrack_api.event.base.Event(topic='test-synchronous'), + ... synchronous=True + ... ) + >>> print results + ['A', 'B'] + +.. _handling_events/publishing/handling_replies: + +Handling replies +---------------- + +When publishing an event it is also possible to pass a callable that will be +called with any :ref:`reply event ` +received in response to the published event. + +To do so, simply pass in a callable as the *on_reply* parameter:: + + def handle_reply(event): + print 'Got reply', event + + session.event_hub.publish( + ftrack_api.event.base.Event(topic='test-reply'), + on_reply=handle_reply + ) + +.. _handling_events/publishing/targeting: + +Targeting events +---------------- + +In addition to subscribers filtering events to receive, it is also possible to +give an event a specific target to help route it to the right subscriber. + +To do this, set the *target* value on the event to an :ref:`expression +`. The expression will filter against registered +:ref:`subscriber information +`. + +For example, if you have many subscribers listening for a event, but only want +one of those subscribers to get the event, you can target the event to the +subscriber using its registered subscriber id:: + + session.event_hub.publish( + ftrack_api.event.base.Event( + topic='my-company.topic', + data={'key': 'value'}, + target='id=my-custom-subscriber-id' + ) + ) + +.. _handling_events/expressions: + +Expressions +=========== + +An expression is used to filter against a data structure, returning whether the +structure fulfils the expression requirements. Expressions are currently used +for subscriptions when :ref:`subscribing to events +` and for targets when :ref:`publishing targeted +events `. + +The form of the expression is loosely groupings of 'key=value' with conjunctions +to join them. + +For example, a common expression for subscriptions is to filter against an event +topic:: + + 'topic=ftrack.location.component-added' + +However, you can also perform more complex filtering, including accessing +nested parameters:: + + 'topic=ftrack.location.component-added and data.locationId=london' + +.. note:: + + If the structure being tested does not have any value for the specified + key reference then it is treated as *not* matching. + +You can also use a single wildcard '*' at the end of any value for matching +multiple values. For example, the following would match all events that have a +topic starting with 'ftrack.':: + + 'topic=ftrack.*' diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/image/configuring_plugins_directory.png b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/image/configuring_plugins_directory.png new file mode 100644 index 0000000000000000000000000000000000000000..7438cb52bebd5dd1c0c5814cd7e1d5f2fdf6a572 GIT binary patch literal 7313 zcmV;C9B$)@P)KOJ#IsdX9YK2_oxysWkE--^>ub$^C=yJ%K%Nmb;2;67`L)eAY4kcqufz9M);<8V8!p z=o?uv&!ZhL4m4L)*EDnoGk5M=e08kfw{zRawV>DKmo+j(-X%mx?aklXJ34-A7N39K ze@&v_bdT*GepTS$Og#Q7dZ2A3&$bdOFRNkcPiuQ=E~c>2)91UHrmhjwBz$)TLRRNP zR|~zM+_`ToU4~HW(JdcWod`9orf&147T29$o58)^S`X+i%A<8<#YDSnTx70uIw!yKJOJ(=j z_ibhR<=Sq>C~i};bUo6;?ouuandaZ8P_5UzeB~e3mw({$&@~8xy5|X}E(ysG)Yu33 z^P8z^w=Q`m##C7lgr%;z)@FS_pP%=BWOp!mA=L-f6`cXLzq$XPX;6iXTY*)o}39>}p6n$tmGpgu~iD$?*G{5_Z4unBilu z{R3c;HHe3GZuSE_?i;+UDDaB`KBEh1ab&xL%>g>FpL*yUXeA1=I*7S=F^gkYe)=^C5*GB&v?!spa8xuMf* zmPG5fz_8}ut|zHpefHCZ^axtf=98)&#TF=C$aJV2A&Y}8G7U0VK62K9sP(*hO=SyT zR9I?eV`KUxTr8xuSV%nUDjYag=0Cak=Aq&`S?z?Aj^G=;P3bpi53r^6e&OpB!vrGa z*`NQ?)zu9)y6)eHot_&LHFS8#pCzZkw+l?Hilf2kSWq1OB06H={iEF2F>I1VAHnqH zp>GIT!5UKGlGTDBD5?z3VQYnV%Wn{}0^eMCC96?$s-{9h z24`5k)U}MvRH1y>n)wn-Rss+vMF+&!rW&CsSJ~9_l&rsYoAQ$JxEXE@QLP!H4|dvhV@zzFEit5UD1(uV%F(A@$o(wL6|ZUd->r zbmBpu!>XXN{(z{YkJ%+gRCPC8S*Q~2_|j}FcV)?ntPeOTtEDFJLi8iCSSJ>QMD?sJ zO*BFSuK70vn`y!n)k**os=<-E+eUTm?Zuh{Yh3E%Pb0Pa}Z-vat3=DLAFQibi8U4Gs`V6VN@w&x^%vX|Fp?m0USUnp?k!k3pgx!>WvAV4voV2?O zD_enq%~g2B>0??fsCH5^re$LdSuA9EHxCW`>!1f!&z|JS5d2mkLVid;rY9q9H7l_GM+q?69=#2NpVjScK5agadSJ$Z|vVmHHoh)~$3Yj)nC^yaP|b3lI~N-js~>WMo9FBO+oPZCFR5L zQs()w`8VKA7KCJ@a9n*WA*snU7^Rc*J3)Sh_~kEuDTMf&i>7Jv4d4I%_YL9OkmpoI zBq-(0d_C8M#O%7XvobX1UwWZ`Ro?VG4wKNfNXUjD&Y8fM&Z{)ShY4^1b6-UP zK~zj07y{P%vKfYPVuFOF-2Dw{fXrf6hiOgdD+oxCGz$7YB4O6n7c^@o0e+@BAgKus zTODj_=(Do+II zZz-Yxn+hPun1n4? zudyH>KYsklC!hS&g}CD5k3Tj9ud%{6k#uaH>Cn@~l%y?+o^j6F(3Gs@h9$<#l)Il7 z&W3JW;0Z0DWiJ)uGws`vt3$&c`ZMkrpZY2!D9Al%n0L#ih zBe3VTC_B0=QyJSeMHT_csc;0TXlU}P>B?C@cs84Zxz8yGeZuC>Va$$bI9Wh$rj^Gi zGs(vkYFv(#34Ja|o3jE{MI~SPIE<_QT(xhGZlv793VAWr>5ItBe%8;+n3T-`@6N(H z2~D83h{4;i4tZcaa9ri<*o>FgdWU@U=#f%NdSHGsgwqcUQca(49<3xVOsD1JCdk~- ziq1BfZd3Ch!@_wD6@ym<$^=a1eq@!jgrJnK>(b?hT8QJ;-8uc@sML&%^R2<6=)dd1JXZD41vkL z{w4J!0~6dJiU9gV&kcA6cPlLX_4IllSsy-p`0UxU&pa?c8N%t?irmFq!1{;B7xUZR zk!zg>``QLOq<|KRur>XLipBO8!8R@iMR-qlPA56$&e-87xAO|<3>1kwc2#6Wi-NR< z*i|S<-kTvHASXK{U~02{PDOvPLwe?pD?+mr}+JG3x$euNJ+AxO`^A>M9C@;?dC$q+th~3es|xK7+B_c~*o6f(*;u z_tFx0F0G-{F8e_gmhHKU&UQ$I7DJVK&K;h)<6e*|Bp-o88)~WVO zM3-A}8tzbSAm0X{uYwHr2Adl=FvsAl)vx=d&PN}8Wd09__Q3pP2tHff#7LCWMTlh@ z*)rD&d@gdq$2uVU`E6x=&x~r-kOz2DU55jLt&e z6Qt%{WJ*2B-4Go2LTldVW?VA#I7S43Z|C(Eiq zDvHz!vV1N`JdVOslooaXD+Ql{8KZKYWw3nTjW}saNZJ9PP+sEkf(s(+{ zGj}m;9|6=$!*0e2y5p(2AZe5yLEbNmtgY)Hr9$7Y_RY}&da2UT=>(^YK(FD?oxuw;%hI+4?g(dT#!gOh2E}zv|Yx9kQ%Fr1E9dok)b#+k0d@kGZaFo!=Q$( z6b@Gx)>=VcZqy?`HiuiG(Ww@<*(wP%tkfxAD`}u{EY^Dv`u^PgxSfVkl|Cusd7=on z+P6nX!vk;rI}32_)^W}M&)${&rqLwv`Gbc26UP65%`@^_jyGF=GWzb*Lb)wDht~cM zqh;;f3GseFg833Yc4yfgv>MBYG)REJ5};v(T4=Q5d8?XA(rlW^e5bE2)x)@_y1J^T ze_h>e)b%Gm{hb;>h7vf|za}A)hs~2n{=X@35K$02quodr;xbk21mOTe!_(JO07%QS zo;`bZ@7}#{zy0?1?b|Dy-~s-GLU;uN$HGuWiu{h_NdD{Zdv&Mo{ieUvQKU$b`sth^ zMT*p&I*JvmORgqxFkG4m# zVg<;6D?E-9CjH{(ciRpB%#NE{>Hg~0{wa4;0rJOJrQj<(K;U?o%C2eJr=>HS1sT)4 zQ$m#Aj{BJ7EK>9oAdd|fUj7t(g$D@yK22ll|5!5glrQ9dJOUQ1Ju0R&n7^qbgRZlk zv)aA_FhM1;&+gZ7K!j$nV9i`#U|>slMB=FR5tm+$mWfm|LcG9 zdgIX@gL>W^@LED{GJj)!3cFDOG7P`iei?j)2MSE8&i2pqPn)hdd*x!b+!~b_iA=3w zFTp38&kQI#I?~dk;={!3DA}u9H4(S5JH@iy_TR`%tv<@zhrU}$Ehe7Y)z$OS-r+^Fh z2g5Kf4Ab+Yiq2js#j` zodY{tuwe?=d$hc=Wz`z{D=|t7|4z44mUyX7A+;_{c9}(;JeBl3c%3sJUPs}jLdUIq zhaT_&6Kv zyvdaAuNs$1u<;wN&j66Vb3IgU zBuvTjfwAob)s=NS_LW>GC7)lgeKHt7sHEp3*+UA^*Yr_8w?2iw_$&{F<}Dfq3Xq{V z_o5JZg$D|dup9K47=OZ+!%Oa#)Mzogg$9URV8t&9w2p6EDa4uyLAbX%$q}Z)sK#C3uIN{3Yw(4RP zCr2wuU84vo>vmKn*C{kQX%j0}6qVaa==qC#UVRbfphtDJhYZWv=i9+ocz{4~gxshO z$4esFuKb$?0+Pxp9L*q@oA|tMKsl2GGPsFm?gMzpRxxC)8)K2Df4F}Q*P36{WZGmh zg0&t-$i-Ysa2Wz%2p4#SoTJe>%zhTp3?3mbAX)+q zL+}RxQm!*BAfuN58j$+yem>0Wpd&U#lavBvNd6)FBKQgq5SUn<**IcnE}4y9yRl!a zw5a65oxQH#_gYrP=?4H3zF7Qe+aJm%T2PXzc^y2Jp|$AG2IO86>PEBGo>NNU;7AnV zZL^ZkV)kW!I}wosQZ6G}%hth3{R8We^}G9@8nhJ*;ew9?U5hLwQ%m@OS=rs!E9Awb zX*n7S+~n| zP6Q-_bDHV+Q~J0dd@BzXo!lk#w8FT-KK2qc8_%m#)}O(D%RCRhF#rSts=GT+r!}I{ zyz3tDa}Fwcv&4!LT7+kN^8=K@@FJEY*K(Ig!ThDg8I&8PVGRdizT=Z+!f~?4m#j7e=S7vv{hKxSziMc1vdAISSGAEqekW zqnbh3XqHn3iG!*PMJ>O+av`D-&6_TX8yKP=E~2TR-H20SN-{Q!;ct)@Q@9l5Wps@Bb15##~-e!tl!Dul#Wt zpEVCVBDY6ka`xL@HW-1H9zcWNXRPlBY9pB-Yx^EpNIr&h0jtof(biFn9zK{#ExKLN zBxKi@xaNA2-s@wbFZ|gX=^S{ZTy!LyPU7Hmgn`!^t4IezmzqC&Oe#PI5&`nKFl}6k zggAVJ+QPG$;Kxg`S)IqIbXAIHhWpQPqBEr9$8H(JpyancduVa;p*{w{vU^UsPaLQ}hJ9Wv4l;5)} z`)yzSQb)03#R`y$6`xR6cj_ooq%OIdKnDj0|B}%Wn4(CL0wiEhdWDC=q#^~#BcO7* zJn0G#c=bnHqezhgWWW_3|7-8c6=b!QXnzs^z!&HTh>1Jxl#CFscqPJq@8OMzyKxtJ z!+utxgoJ=Z34sD73KX=Zf>uwf2VGBlw>c5RluA{lIv{nx`SUob23#KEI6mZ~I&RqT zZz1Hj-+t@=goifxGw4_vd84ez)4Z3zvGWZQq=-B5*Nbc;vmi9BLHq z%PL!!>EunecVy3p_7qaMZ%1(_B%DQXJ8Aet=DDJz=GLHX)9@kypEoB#KwqbGQu};S zHH31NZF!jeRy{weMdCX!ipZ5bDpDoEWeV4D>kLq=OQP^JlsYRV$0iC7RGuu+JV}@} z{c$a}OU(7J>SgMEp_5`63ShkeqKkH6J<5%c55uchul%3z&<1N`#n_qNjq;9At~5WP zsZrWu%5}fiS>6!F`aC*o>lbM5`w}I2nIQC?Fvmg)oLV_?P_$36)f)zddRZc~_ITizTh{QTS-%L6 zPZMj5tBju-q8j9yk{~i1?k*^5T0LI|K(7N^Kt_T?{l8hieAEezZY@QSCU`n*78_qHtAz!?B;s1n(Hdq@A{Rchg==cN1(~d5% z(w`t7pHbnAm8^|-0F?Bu3_V=mMO2(6vgPU-G%}C(g+=G4N!km>X2gbM32<>u_(}3! zoc3}tyj#59+yr?ZUS^OJr(E{hCO)IDlVwQOy1A~`*(wdMt(2X`oo?pdN^CP64^F0; zc}HSyPQn4^+ZpoVc>esk{}UeC;1)v8+bwuT4h357FU`%Ju>i$L6+&CPgoLMf%RLR@ z!ZO5tLxzQ1{-OxBXEvlu61s%UsI)Ds$4!23Zi1>dEY2?(wL(aexepq}aj|#bc!0G! zgOgn~s@D<{EW^`us)9JhFC`?R>f}6FUi(93HUp7~saD&MIyd0ivuFNKcxVG3A#wln(N>goIt9c2?j) zh>5iAD37c;z(HjcshuE-ZU7 zdw?}*^}w+;+2s_>MF)pyDBTU%2>JBsQ|}i8z+fGHb+GdHG)H$-=|z1AcYu9z2#iS~ zbX#2ZE6S<9D5KqiU{AR17(}eEp|K|O6xLuBYRu!nZW7@oAlp2)Gv$~qVVns`k?AC1 z8y%X=#JNLg8Hcjbxbk^(69mvIu^GV4i*nHdL47oM5uehWc?Ad$mD}LHIv+(}BRe)D zmn(Hqy@`~eEmPX)duy@5C}0{~L*Dks@J`*?iNH@gH(?{>lP6ESgaiXGloUeVzdezH zO9eJy;&-CqHxZl*Deq}XJ~^_V7II=Kis+1o06RS>tMr{QYoh}E17!g ztRQ>kUnyHBe`F5qF;xjCr6~>IiE}$PLf&rhSAZzc&OzGo-_U=mANU>r7i9VX2x7LW zopA3@t$7c()2)6tLjLf>4_|%t)u*3+`oRYuy!F;w89-m?KlM%hFKuxAS3|L(XU@xe rgJ69>%In+H&=0rLaU`. + +.. toctree:: + :maxdepth: 1 + + introduction + installing + tutorial + understanding_sessions + working_with_entities + querying + handling_events + caching + locations/index + example/index + api_reference/index + event_list + environment_variables + security_and_authentication + release/index + glossary + +****************** +Indices and tables +****************** + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/installing.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/installing.rst new file mode 100644 index 0000000000..5e42621bee --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/installing.rst @@ -0,0 +1,77 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _installing: + +********** +Installing +********** + +.. highlight:: bash + +Installation is simple with `pip `_:: + + pip install ftrack-python-api + +Building from source +==================== + +You can also build manually from the source for more control. First obtain a +copy of the source by either downloading the +`zipball `_ or +cloning the public repository:: + + git clone git@bitbucket.org:ftrack/ftrack-python-api.git + +Then you can build and install the package into your current Python +site-packages folder:: + + python setup.py install + +Alternatively, just build locally and manage yourself:: + + python setup.py build + +Building documentation from source +---------------------------------- + +To build the documentation from source:: + + python setup.py build_sphinx + +Then view in your browser:: + + file:///path/to/ftrack-python-api/build/doc/html/index.html + +Running tests against the source +-------------------------------- + +With a copy of the source it is also possible to run the unit tests:: + + python setup.py test + +Dependencies +============ + +* `ftrack server `_ >= 3.3.11 +* `Python `_ >= 2.7, < 3 +* `Requests `_ >= 2, <3, +* `Arrow `_ >= 0.4.4, < 1, +* `termcolor `_ >= 1.1.0, < 2, +* `pyparsing `_ >= 2.0, < 3, +* `Clique `_ >= 1.2.0, < 2, +* `websocket-client `_ >= 0.40.0, < 1 + +Additional For building +----------------------- + +* `Sphinx `_ >= 1.2.2, < 2 +* `sphinx_rtd_theme `_ >= 0.1.6, < 1 +* `Lowdown `_ >= 0.1.0, < 2 + +Additional For testing +---------------------- + +* `Pytest `_ >= 2.3.5, < 3 +* `pytest-mock `_ >= 0.4, < 1, +* `pytest-catchlog `_ >= 1, <=2 \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst new file mode 100644 index 0000000000..63fe980749 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst @@ -0,0 +1,26 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _introduction: + +************ +Introduction +************ + +This API allows developers to write :term:`Python` scripts that talk directly +with an ftrack server. The scripts can perform operations against that server +depending on granted permissions. + +With any API it is important to find the right balance between flexibility and +usefulness. If an API is too low level then everyone ends up writing boilerplate +code for common problems and usually in an non-uniform way making it harder to +share scripts with others. It's also harder to get started with such an API. +Conversely, an API that attempts to be too smart can often become restrictive +when trying to do more advanced functionality or optimise for performance. + +With this API we have tried to strike the right balance between these two, +providing an API that should be simple to use out-of-the-box, but also expose +more flexibility and power when needed. + +Nothing is perfect though, so please do provide feedback on ways that we can +continue to improve this API for your specific needs. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst new file mode 100644 index 0000000000..97483221aa --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst @@ -0,0 +1,87 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _locations/configuring: + +********************* +Configuring locations +********************* + +To allow management of data by a location or retrieval of filesystem paths where +supported, a location instance needs to be configured in a session with an +:term:`accessor` and :term:`structure`. + +.. note:: + + The standard builtin locations require no further setup or configuration + and it is not necessary to read the rest of this section to use them. + +Before continuing, make sure that you are familiar with the general concepts +of locations by reading the :ref:`locations/overview`. + +.. _locations/configuring/manually: + +Configuring manually +==================== + +Locations can be configured manually when using a session by retrieving the +location and setting the appropriate attributes:: + + location = session.query('Location where name is "my.location"').one() + location.structure = ftrack_api.structure.id.IdStructure() + location.priority = 50 + +.. _locations/configuring/automatically: + +Configuring automatically +========================= + +Often the configuration of locations should be determined by developers +looking after the core pipeline and so ftrack provides a way for a plugin to +be registered to configure the necessary locations for each session. This can +then be managed centrally if desired. + +The configuration is handled through the standard events system via a topic +*ftrack.api.session.configure-location*. Set up an :ref:`event listener plugin +` as normal with a register function that +accepts a :class:`~ftrack_api.session.Session` instance. Then register a +callback against the relevant topic to configure locations at the appropriate +time:: + + import ftrack_api + import ftrack_api.entity.location + import ftrack_api.accessor.disk + import ftrack_api.structure.id + + + def configure_locations(event): + '''Configure locations for session.''' + session = event['data']['session'] + + # Find location(s) and customise instances. + location = session.query('Location where name is "my.location"').one() + ftrack_api.mixin(location, ftrack_api.entity.location.UnmanagedLocationMixin) + location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + location.structure = ftrack_api.structure.id.IdStructure() + location.priority = 50 + + + def register(session): + '''Register plugin with *session*.''' + session.event_hub.subscribe( + 'topic=ftrack.api.session.configure-location', + configure_locations + ) + +.. note:: + + If you expect the plugin to also be evaluated by the legacy API, remember + to :ref:`validate the arguments `. + +So long as the directory containing the plugin exists on your +:envvar:`FTRACK_EVENT_PLUGIN_PATH`, the plugin will run for each session +created and any configured locations will then remain configured for the +duration of that related session. + +Be aware that you can configure many locations in one plugin or have separate +plugins for different locations - the choice is entirely up to you! diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst new file mode 100644 index 0000000000..ac1eaba649 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst @@ -0,0 +1,18 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _developing/locations: + +********* +Locations +********* + +Learn how to access locations using the API and configure your own location +plugins. + +.. toctree:: + :maxdepth: 1 + + overview + tutorial + configuring diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst new file mode 100644 index 0000000000..0a6ec171aa --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst @@ -0,0 +1,143 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _locations/overview: + +******** +Overview +******** + +Locations provides a way to easily track and manage data (files, image sequences +etc.) using ftrack. + +With locations it is possible to see where published data is in the world and +also to transfer data automatically between different locations, even different +storage mechanisms, by defining a few simple :term:`Python` plugins. By keeping +track of the size of the data it also helps manage storage capacity better. In +addition, the intrinsic links to production information makes assigning work to +others and transferring only the relevant data much simpler as well as greatly +reducing the burden on those responsible for archiving finished work. + +Concepts +======== + +The system is implemented in layers using a few key concepts in order to provide +a balance between out of the box functionality and custom configuration. + +.. _locations/overview/locations: + +Locations +--------- + +Data locations can be varied in scope and meaning - a facility, a laptop, a +specific drive. As such, rather than place a hard limit on what can be +considered a location, ftrack simply requires that a location be identifiable by +a string and that string be unique to that location. + +A global company with facilities in many different parts of the world might +follow a location naming convention similar to the following: + + * 'ftrack.london.server01' + * 'ftrack.london.server02' + * 'ftrack.nyc.server01' + * 'ftrack.amsterdam.server01' + * '..' + +Whereas, for a looser setup, the following might suit better: + + * 'bjorns-workstation' + * 'fredriks-mobile' + * 'martins-laptop' + * 'cloud-backup' + +Availability +------------ + +When tracking data across several locations it is important to be able to +quickly find out where data is available and where it is not. As such, ftrack +provides simple mechanisms for retrieving information on the availability of a +:term:`component` in each location. + +For a single file, the availability with be either 0% or 100%. For containers, +such as file sequences, each file is tracked separately and the availability of +the container calculated as an overall percentage (e.g. 47%). + +.. _locations/overview/accessors: + +Accessors +--------- + +Due to the flexibility of what can be considered a location, the system must be +able to cope with locations that represent different ways of storing data. For +example, data might be stored on a local hard drive, a cloud service or even in +a database. + +In addition, the method of accessing that storage can change depending on +perspective - local filesystem, FTP, S3 API etc. + +To handle this, ftrack introduces the idea of an :term:`accessor` that provides +access to the data in a standard way. An accessor is implemented in +:term:`Python` following a set interface and can be configured at runtime to +provide relevant access to a location. + +With an accessor configured for a location, it becomes possible to not only +track data, but also manage it through ftrack by using the accessor to add and +remove data from the location. + +At present, ftrack includes a :py:class:`disk accessor +` for local filesystem access. More will be +added over time and developers are encouraged to contribute their own. + +.. _locations/overview/structure: + +Structure +--------- + +Another important consideration for locations is how data should be structured +in the location (folder structure and naming conventions). For example, +different facilities may want to use different folder structures, or different +storage mechanisms may use different paths for the data. + +For this, ftrack supports the use of a :term:`Python` structure plugin. This +plugin is called when adding a :term:`component` to a location in order to +determine the correct structure to use. + +.. note:: + + A structure plugin accepts an ftrack entity as its input and so can be + reused for generating general structures as well. For example, an action + callback could be implemented to create the base folder structure for some + selected shots by reusing a structure plugin. + +.. _locations/overview/resource_identifiers: + +Resource identifiers +-------------------- + +When a :term:`component` can be linked to multiple locations it becomes +necessary to store information about the relationship on the link rather than +directly on the :term:`component` itself. The most important information is the +path to the data in that location. + +However, as seen above, not all locations may be filesystem based or accessed +using standard filesystem protocols. For this reason, and to help avoid +confusion, this *path* is referred to as a :term:`resource identifier` and no +limitations are placed on the format. Keep in mind though that accessors use +this information (retrieved from the database) in order to work out how to +access the data, so the format used must be compatible with all the accessors +used for any one location. For this reason, most +:term:`resource identifiers ` should ideally look like +relative filesystem paths. + +.. _locations/overview/resource_identifiers/transformer: + +Transformer +^^^^^^^^^^^ + +To further support custom formats for +:term:`resource identifiers `, it is also possible to +configure a resource identifier transformer plugin which will convert +the identifiers before they are stored centrally and after they are retrieved. + +A possible use case of this might be to store JSON encoded metadata about a path +in the database and convert this to an actual filesystem path on retrieval. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst new file mode 100644 index 0000000000..4c5a6c0f13 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst @@ -0,0 +1,193 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _locations/tutorial: + +******** +Tutorial +******** + +This tutorial is a walkthrough on how you interact with Locations using the +ftrack :term:`API`. Before you read this tutorial, make sure you familiarize +yourself with the location concepts by reading the :ref:`locations/overview`. + +All examples assume you are using Python 2.x, have the :mod:`ftrack_api` +module imported and a :class:`session ` created. + +.. code-block:: python + + import ftrack_api + session = ftrack_api.Session() + +.. _locations/creating-locations: + +Creating locations +================== + +Locations can be created just like any other entity using +:meth:`Session.create `:: + + location = session.create('Location', dict(name='my.location')) + session.commit() + +.. note:: + Location names beginning with ``ftrack.`` are reserved for internal use. Do + not use this prefix for your location names. + +To create a location only if it doesn't already exist use the convenience +method :meth:`Session.ensure `. This will return +either an existing matching location or a newly created one. + +Retrieving locations +==================== + +You can retrieve existing locations using the standard session +:meth:`~ftrack_api.session.Session.get` and +:meth:`~ftrack_api.session.Session.query` methods:: + + # Retrieve location by unique id. + location_by_id = session.get('Location', 'unique-id') + + # Retrieve location by name. + location_by_name = session.query( + 'Location where name is "my.location"' + ).one() + +To retrieve all existing locations use a standard query:: + + all_locations = session.query('Location').all() + for existing_location in all_locations: + print existing_location['name'] + +Configuring locations +===================== + +At this point you have created a custom location "my.location" in the database +and have an instance to reflect that. However, the location cannot be used in +this session to manage data unless it has been configured. To configure a +location for the session, set the appropriate attributes for accessor and +structure:: + + import tempfile + import ftrack_api.accessor.disk + import ftrack_api.structure.id + + # Assign a disk accessor with *temporary* storage + location.accessor = ftrack_api.accessor.disk.DiskAccessor( + prefix=tempfile.mkdtemp() + ) + + # Assign using ID structure. + location.structure = ftrack_api.structure.id.IdStructure() + + # Set a priority which will be used when automatically picking locations. + # Lower number is higher priority. + location.priority = 30 + +To learn more about how to configure locations automatically in a session, see +:ref:`locations/configuring`. + +.. note:: + + If a location is not configured in a session it can still be used as a + standard entity and to find out availability of components + +Using components with locations +=============================== + +The Locations :term:`API` tries to use sane defaults to stay out of your way. +When creating :term:`components `, a location is automatically picked +using :meth:`Session.pick_location `:: + + (_, component_path) = tempfile.mkstemp(suffix='.txt') + component_a = session.create_component(path=component_path) + +To override, specify a location explicitly:: + + (_, component_path) = tempfile.mkstemp(suffix='.txt') + component_b = session.create_component( + path=component_path, location=location + ) + +If you set the location to ``None``, the component will only be present in the +special origin location for the duration of the session:: + + (_, component_path) = tempfile.mkstemp(suffix='.txt') + component_c = session.create_component(path=component_path, location=None) + +After creating a :term:`component` in a location, it can be added to another +location by calling :meth:`Location.add_component +` and passing the location to +use as the *source* location:: + + origin_location = session.query( + 'Location where name is "ftrack.origin"' + ).one() + location.add_component(component_c, origin_location) + +To remove a component from a location use :meth:`Location.remove_component +`:: + + location.remove_component(component_b) + +Each location specifies whether to automatically manage data when adding or +removing components. To ensure that a location does not manage data, mixin the +relevant location mixin class before use:: + + import ftrack_api + import ftrack_api.entity.location + + ftrack_api.mixin(location, ftrack_api.entity.location.UnmanagedLocationMixin) + +Accessing paths +=============== + +The locations system is designed to help avoid having to deal with filesystem +paths directly. This is particularly important when you consider that a number +of locations won't provide any direct filesystem access (such as cloud storage). + +However, it is useful to still be able to get a filesystem path from locations +that support them (typically those configured with a +:class:`~ftrack_api.accessor.disk.DiskAccessor`). For example, you might need to +pass a filesystem path to another application or perform a copy using a faster +protocol. + +To retrieve the path if available, use :meth:`Location.get_filesystem_path +`:: + + print location.get_filesystem_path(component_c) + +Obtaining component availability +================================ + +Components in locations have a notion of availability. For regular components, +consisting of a single file, the availability would be either 0 if the +component is unavailable or 100 percent if the component is available in the +location. Composite components, like image sequences, have an availability +which is proportional to the amount of child components that have been added to +the location. + +For example, an image sequence might currently be in a state of being +transferred to :data:`test.location`. If half of the images are transferred, it +might be possible to start working with the sequence. To check availability use +the helper :meth:`Session.get_component_availability +` method:: + + print session.get_component_availability(component_c) + +There are also convenience methods on both :meth:`components +` and :meth:`locations +` for +retrieving availability as well:: + + print component_c.get_availability() + print location.get_component_availability(component_c) + +Location events +=============== + +If you want to receive event notifications when components are added to or +removed from locations, you can subscribe to the topics published, +:data:`ftrack_api.symbol.COMPONENT_ADDED_TO_LOCATION_TOPIC` or +:data:`ftrack_api.symbol.COMPONENT_REMOVED_FROM_LOCATION_TOPIC` and the callback +you want to be run. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/querying.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/querying.rst new file mode 100644 index 0000000000..7a200529ab --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/querying.rst @@ -0,0 +1,263 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _querying: + +******** +Querying +******** + +.. currentmodule:: ftrack_api.session + +The API provides a simple, but powerful query language in addition to iterating +directly over entity attributes. Using queries can often substantially speed +up your code as well as reduce the amount of code written. + +A query is issued using :meth:`Session.query` and returns a list of matching +entities. The query always has a single *target* entity type that the query +is built against. This means that you cannot currently retrieve back a list of +different entity types in one query, though using :ref:`projections +` does allow retrieving related entities of a different +type in one go. + +The syntax for a query is: + +.. code-block:: none + + select from where + +However, both the selection of projections and criteria are optional. This means +the most basic query is just to fetch all entities of a particular type, such as +all projects in the system:: + + projects = session.query('Project') + +A query always returns a :class:`~ftrack_api.query.QueryResult` instance that +acts like a list with some special behaviour. The main special behaviour is that +the actual query to the server is not issued until you iterate or index into the +query results:: + + for project in projects: + print project['name'] + +You can also explicitly call :meth:`~ftrack_api.query.QueryResult.all` on the +result set:: + + projects = session.query('Project').all() + +.. note:: + + This behaviour exists in order to make way for efficient *paging* and other + optimisations in future. + +.. _querying/criteria: + +Using criteria to narrow results +================================ + +Often you will have some idea of the entities you want to retrieve. In this +case you can optimise your code by not fetching more data than you need. To do +this, add criteria to your query:: + + projects = session.query('Project where status is active') + +Each criteria follows the form: + +.. code-block:: none + + + +You can inspect the entity type or instance to find out which :ref:`attributes +` are available to filter on for a particular +entity type. The list of :ref:`operators ` that can +be applied and the types of values they expect is listed later on. + +.. _querying/criteria/combining: + +Combining criteria +------------------ + +Multiple criteria can be applied in a single expression by joining them with +either ``and`` or ``or``:: + + projects = session.query( + 'Project where status is active and name like "%thrones"' + ) + +You can use parenthesis to control the precedence when compound criteria are +used (by default ``and`` takes precedence):: + + projects = session.query( + 'Project where status is active and ' + '(name like "%thrones" or full_name like "%thrones")' + ) + +.. _querying/criteria/relationships: + +Filtering on relationships +-------------------------- + +Filtering on relationships is also intuitively supported. Simply follow the +relationship using a dotted notation:: + + tasks_in_project = session.query( + 'Task where project.id is "{0}"'.format(project['id']) + ) + +This works even for multiple strides across relationships (though do note that +excessive strides can affect performance):: + + tasks_completed_in_project = session.query( + 'Task where project.id is "{0}" and ' + 'status.type.name is "Done"' + .format(project['id']) + ) + +The same works for collections (where each entity in the collection is compared +against the subsequent condition):: + + import arrow + + tasks_with_time_logged_today = session.query( + 'Task where timelogs.start >= "{0}"'.format(arrow.now().floor('day')) + ) + +In the above query, each *Task* that has at least one *Timelog* with a *start* +time greater than the start of today is returned. + +When filtering on relationships, the conjunctions ``has`` and ``any`` can be +used to specify how the criteria should be applied. This becomes important when +querying using multiple conditions on collection relationships. The relationship +condition can be written against the following form:: + + () + +For optimal performance ``has`` should be used for scalar relationships when +multiple conditions are involved. For example, to find notes by a specific +author when only name is known:: + + notes_written_by_jane_doe = session.query( + 'Note where author has (first_name is "Jane" and last_name is "Doe")' + ) + +This query could be written without ``has``, giving the same results:: + + notes_written_by_jane_doe = session.query( + 'Note where author.first_name is "Jane" and author.last_name is "Doe"' + ) + +``any`` should be used for collection relationships. For example, to find all +projects that have at least one metadata instance that has `key=some_key` +and `value=some_value` the query would be:: + + projects_where_some_key_is_some_value = session.query( + 'Project where metadata any (key=some_key and value=some_value)' + ) + +If the query was written without ``any``, projects with one metadata matching +*key* and another matching the *value* would be returned. + +``any`` can also be used to query for empty relationship collections:: + + users_without_timelogs = session.query( + 'User where not timelogs any ()' + ) + +.. _querying/criteria/operators: + +Supported operators +------------------- + +This is the list of currently supported operators: + ++--------------+----------------+----------------------------------------------+ +| Operators | Description | Example | ++==============+================+==============================================+ +| = | Exactly equal. | name is "martin" | +| is | | | ++--------------+----------------+----------------------------------------------+ +| != | Not exactly | name is_not "martin" | +| is_not | equal. | | ++--------------+----------------+----------------------------------------------+ +| > | Greater than | start after "2015-06-01" | +| after | exclusive. | | +| greater_than | | | ++--------------+----------------+----------------------------------------------+ +| < | Less than | end before "2015-06-01" | +| before | exclusive. | | +| less_than | | | ++--------------+----------------+----------------------------------------------+ +| >= | Greater than | bid >= 10 | +| | inclusive. | | ++--------------+----------------+----------------------------------------------+ +| <= | Less than | bid <= 10 | +| | inclusive. | | ++--------------+----------------+----------------------------------------------+ +| in | One of. | status.type.name in ("In Progress", "Done") | ++--------------+----------------+----------------------------------------------+ +| not_in | Not one of. | status.name not_in ("Omitted", "On Hold") | ++--------------+----------------+----------------------------------------------+ +| like | Matches | name like "%thrones" | +| | pattern. | | ++--------------+----------------+----------------------------------------------+ +| not_like | Does not match | name not_like "%thrones" | +| | pattern. | | ++--------------+----------------+----------------------------------------------+ +| has | Test scalar | author has (first_name is "Jane" and | +| | relationship. | last_name is "Doe") | ++--------------+----------------+----------------------------------------------+ +| any | Test collection| metadata any (key=some_key and | +| | relationship. | value=some_value) | ++--------------+----------------+----------------------------------------------+ + +.. _querying/projections: + +Optimising using projections +============================ + +In :ref:`understanding_sessions` we mentioned :ref:`auto-population +` of attribute values on access. This +meant that when iterating over a lot of entities and attributes a large number +of queries were being sent to the server. Ultimately, this can cause your code +to run slowly:: + + >>> projects = session.query('Project') + >>> for project in projects: + ... print( + ... # Multiple queries issued here for each attribute accessed for + ... # each project in the loop! + ... '{project[full_name]} - {project[status][name]})' + ... .format(project=project) + ... ) + + +Fortunately, there is an easy way to optimise. If you know what attributes you +are interested in ahead of time you can include them in your query string as +*projections* in order to fetch them in one go:: + + >>> projects = session.query( + ... 'select full_name, status.name from Project' + ... ) + >>> for project in projects: + ... print( + ... # No additional queries issued here as the values were already + ... # loaded by the above query! + ... '{project[full_name]} - {project[status][name]})' + ... .format(project=project) + ... ) + +Notice how this works for related entities as well. In the example above, we +also fetched the name of each *Status* entity attached to a project in the same +query, which meant that no further queries had to be issued when accessing those +nested attributes. + +.. note:: + + There are no arbitrary limits to the number (or depth) of projections, but + do be aware that excessive projections can ultimately result in poor + performance also. As always, it is about choosing the right tool for the + job. + +You can also customise the +:ref:`working_with_entities/entity_types/default_projections` to use for each +entity type when none are specified in the query string. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst new file mode 100644 index 0000000000..0eef0b7407 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst @@ -0,0 +1,18 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _release: + +*************************** +Release and migration notes +*************************** + +Find out information about what has changed between versions and any important +migration notes to be aware of when switching to a new version. + +.. toctree:: + :maxdepth: 1 + + release_notes + migration + migrating_from_old_api diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst new file mode 100644 index 0000000000..699ccf224a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst @@ -0,0 +1,613 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _release/migrating_from_old_api: + +********************** +Migrating from old API +********************** + +.. currentmodule:: ftrack_api.session + +Why a new API? +============== + +With the introduction of Workflows, ftrack is capable of supporting a greater +diversity of industries. We're enabling teams to closely align the system with +their existing practices and naming conventions, resulting in a tool that feels +more natural and intuitive. The old API was locked to specific workflows, making +it impractical to support this new feature naturally. + +We also wanted this new flexibility to extend to developers, so we set about +redesigning the API to fully leverage the power in the system. And while we had +the wrenches out, we figured why not go that extra mile and build in some of the +features that we see developers having to continually implement in-house across +different companies - features such as caching and support for custom pipeline +extensions. In essence, we decided to build the API that, as pipeline +developers, we had always wanted from our production tracking and asset +management systems. We think we succeeded, and we hope you agree. + +Installing +========== + +Before, you used to download the API package from your ftrack instance. With +each release of the new API we make it available on :term:`PyPi`, and +installing is super simple: + +.. code-block:: none + + pip install ftrack-python-api + +Before installing, it is always good to check the latest +:ref:`release/release_notes` to see which version of the ftrack server is +required. + +.. seealso:: :ref:`installing` + +Overview +======== + +An API needs to be approachable, so we built the new API to feel +intuitive and familiar. We bundle all the core functionality into one place – a +session – with consistent methods for interacting with entities in the system:: + + import ftrack_api + session = ftrack_api.Session() + +The session is responsible for loading plugins and communicating with the ftrack +server and allows you to use multiple simultaneous sessions. You will no longer +need to explicitly call :meth:`ftrack.setup` to load plugins. + +The core methods are straightforward: + +Session.create + create a new entity, like a new version. +Session.query + fetch entities from the server using a powerful query language. +Session.delete + delete existing entities. +Session.commit + commit all changes in one efficient call. + +.. note:: + + The new API batches create, update and delete operations by default for + efficiency. To synchronise local changes with the server you need to call + :meth:`Session.commit`. + +In addition all entities in the API now act like simple Python dictionaries, +with some additional helper methods where appropriate. If you know a little +Python (or even if you don't) getting up to speed should be a breeze:: + + >>> print user.keys() + ['first_name', 'last_name', 'email', ...] + >>> print user['email'] + 'old@example.com' + >>> user['email'] = 'new@example.com' + +And of course, relationships between entities are reflected in a natural way as +well:: + + new_timelog = session.create('Timelog', {...}) + task['timelogs'].append(new_timelog) + +.. seealso :: :ref:`tutorial` + +The new API also makes use of caching in order to provide more efficient +retrieval of data by reducing the number of calls to the remote server. + +.. seealso:: :ref:`caching` + +Open source and standard code style +=================================== + +The new API is open source software and developed in public at +`Bitbucket `_. We welcome you +to join us in the development and create pull requests there. + +In the new API, we also follow the standard code style for Python, +:term:`PEP-8`. This means that you will now find that methods and variables are +written using ``snake_case`` instead of ``camelCase``, amongst other things. + +Package name +============ + +The new package is named :mod:`ftrack_api`. By using a new package name, we +enable you to use the old API and the new side-by-side in the same process. + +Old API:: + + import ftrack + +New API:: + + import ftrack_api + +Specifying your credentials +=========================== + +The old API used three environment variables to authenticate with your ftrack +instance. While these continue to work as before, you now also have +the option to specify them when initializing the session:: + + >>> import ftrack_api + >>> session = ftrack_api.Session( + ... server_url='https://mycompany.ftrackapp.com', + ... api_key='7545384e-a653-11e1-a82c-f22c11dd25eq', + ... api_user='martin' + ... ) + +In the examples below, will assume that you have imported the package and +created a session. + +.. seealso:: + + * :ref:`environment_variables` + * :ref:`tutorial` + + +Querying objects +================ + +The old API relied on predefined methods for querying objects and constructors +which enabled you to get an entity by it's id or name. + +Old API:: + + project = ftrack.getProject('dev_tutorial') + task = ftrack.Task('8923b7b3-4bf0-11e5-8811-3c0754289fd3') + user = ftrack.User('jane') + +New API:: + + project = session.query('Project where name is "dev_tutorial"').one() + task = session.get('Task', '8923b7b3-4bf0-11e5-8811-3c0754289fd3') + user = session.query('User where username is "jane"').one() + +While the new API can be a bit more verbose for simple queries, it is much more +powerful and allows you to filter on any field and preload related data:: + + tasks = session.query( + 'select name, parent.name from Task ' + 'where project.full_name is "My Project" ' + 'and status.type.short is "DONE" ' + 'and not timelogs any ()' + ).all() + +The above fetches all tasks for “My Project” that are done but have no timelogs. +It also pre-fetches related information about the tasks parent – all in one +efficient query. + +.. seealso:: :ref:`querying` + +Creating objects +================ + +In the old API, you create objects using specialized methods, such as +:meth:`ftrack.createProject`, :meth:`Project.createSequence` and +:meth:`Task.createShot`. + +In the new API, you can create any object using :meth:`Session.create`. In +addition, there are a few helper methods to reduce the amount of boilerplate +necessary to create certain objects. Don't forget to call :meth:`Session.commit` +once you have issued your create statements to commit your changes. + +As an example, let's look at populating a project with a few entities. + +Old API:: + + project = ftrack.getProject('migration_test') + + # Get default task type and status from project schema + taskType = project.getTaskTypes()[0] + taskStatus = project.getTaskStatuses(taskType)[0] + + sequence = project.createSequence('001') + + # Create five shots with one task each + for shot_number in xrange(10, 60, 10): + shot = sequence.createShot( + '{0:03d}'.format(shot_number) + ) + shot.createTask( + 'Task name', + taskType, + taskStatus + ) + + +New API:: + + project = session.query('Project where name is "migration_test"').one() + + # Get default task type and status from project schema + project_schema = project['project_schema'] + default_shot_status = project_schema.get_statuses('Shot')[0] + default_task_type = project_schema.get_types('Task')[0] + default_task_status = project_schema.get_statuses( + 'Task', default_task_type['id'] + )[0] + + # Create sequence + sequence = session.create('Sequence', { + 'name': '001', + 'parent': project + }) + + # Create five shots with one task each + for shot_number in xrange(10, 60, 10): + shot = session.create('Shot', { + 'name': '{0:03d}'.format(shot_number), + 'parent': sequence, + 'status': default_shot_status + }) + session.create('Task', { + 'name': 'Task name', + 'parent': shot, + 'status': default_task_status, + 'type': default_task_type + }) + + # Commit all changes to the server. + session.commit() + +If you test the example above, one thing you might notice is that the new API +is much more efficient. Thanks to the transaction-based architecture in the new +API only a single call to the server is required to create all the objects. + +.. seealso:: :ref:`working_with_entities/creating` + +Updating objects +================ + +Updating objects in the new API works in a similar way to the old API. Instead +of using the :meth:`set` method on objects, you simply set the key of the +entity to the new value, and call :meth:`Session.commit` to persist the +changes to the database. + +The following example adjusts the duration and comment of a timelog for a +user using the old and new API, respectively. + +Old API:: + + import ftrack + + user = ftrack.User('john') + user.set('email', 'john@example.com') + +New API:: + + import ftrack_api + session = ftrack_api.Session() + + user = session.query('User where username is "john"').one() + user['email'] = 'john@example.com' + session.commit() + +.. seealso:: :ref:`working_with_entities/updating` + + +Date and datetime attributes +============================ + +In the old API, date and datetime attributes where represented using a standard +:mod:`datetime` object. In the new API we have opted to use the :term:`arrow` +library instead. Datetime attributes are represented in the server timezone, +but with the timezone information stripped. + +Old API:: + + >>> import datetime + + >>> task_old_api = ftrack.Task(task_id) + >>> task_old_api.get('startdate') + datetime.datetime(2015, 9, 2, 0, 0) + + >>> # Updating a datetime attribute + >>> task_old_api.set('startdate', datetime.date.today()) + +New API:: + + >>> import arrow + + >>> task_new_api = session.get('Task', task_id) + >>> task_new_api['start_date'] + + + >>> # In the new API, utilize the arrow library when updating a datetime. + >>> task_new_api['start_date'] = arrow.utcnow().floor('day') + >>> session.commit() + +Custom attributes +================= + +In the old API, custom attributes could be retrieved from an entity by using +the methods :meth:`get` and :meth:`set`, like standard attributes. In the new +API, custom attributes can be written and read from entities using the +``custom_attributes`` property, which provides a dictionary-like interface. + +Old API:: + + >>> task_old_api = ftrack.Task(task_id) + >>> task_old_api.get('my_custom_attribute') + + >>> task_old_api.set('my_custom_attribute', 'My new value') + + +New API:: + + >>> task_new_api = session.get('Task', task_id) + >>> task_new_api['custom_attributes']['my_custom_attribute'] + + + >>> task_new_api['custom_attributes']['my_custom_attribute'] = 'My new value' + +For more information on working with custom attributes and existing +limitations, please see: + +.. seealso:: + + :ref:`example/custom_attribute` + + +Using both APIs side-by-side +============================ + +With so many powerful new features and the necessary support for more flexible +workflows, we chose early on to not limit the new API design by necessitating +backwards compatibility. However, we also didn't want to force teams using the +existing API to make a costly all-or-nothing switchover. As such, we have made +the new API capable of coexisting in the same process as the old API:: + + import ftrack + import ftrack_api + +In addition, the old API will continue to be supported for some time, but do +note that it will not support the new `Workflows +`_ and will not have new features back ported +to it. + +In the first example, we obtain a task reference using the old API and +then use the new API to assign a user to it:: + + import ftrack + import ftrack_api + + # Create session for new API, authenticating using envvars. + session = ftrack_api.Session() + + # Obtain task id using old API + shot = ftrack.getShot(['migration_test', '001', '010']) + task = shot.getTasks()[0] + task_id = task.getId() + + user = session.query( + 'User where username is "{0}"'.format(session.api_user) + ).one() + session.create('Appointment', { + 'resource': user, + 'context_id': task_id, + 'type': 'assignment' + }) + +The second example fetches a version using the new API and uploads and sets a +thumbnail using the old API:: + + import arrow + import ftrack + + # fetch a version published today + version = session.query( + 'AssetVersion where date >= "{0}"'.format( + arrow.now().floor('day') + ) + ).first() + + # Create a thumbnail using the old api. + thumbnail_path = '/path/to/thumbnail.jpg' + version_old_api = ftrack.AssetVersion(version['id']) + thumbnail = version_old_api.createThumbnail(thumbnail_path) + + # Also set the same thumbnail on the task linked to the version. + task_old_api = ftrack.Task(version['task_id']) + task_old_api.setThumbnail(thumbnail) + +.. note:: + + It is now possible to set thumbnails using the new API as well, for more + info see :ref:`example/thumbnail`. + +Plugin registration +------------------- + +To make event and location plugin register functions work with both old and new +API the function should be updated to validate the input arguments. For old +plugins the register method should validate that the first input is of type +``ftrack.Registry``, and for the new API it should be of type +:class:`ftrack_api.session.Session`. + +If the input parameter is not validated, a plugin might be mistakenly +registered twice, since both the new and old API will look for plugins the +same directories. + +.. seealso:: + + :ref:`ftrack:release/migration/3.0.29/developer_notes/register_function` + + +Example: publishing a new version +================================= + +In the following example, we look at migrating a script which publishes a new +version with two components. + +Old API:: + + # Query a shot and a task to create the asset against. + shot = ftrack.getShot(['dev_tutorial', '001', '010']) + task = shot.getTasks()[0] + + # Create new asset. + asset = shot.createAsset(name='forest', assetType='geo') + + # Create a new version for the asset. + version = asset.createVersion( + comment='Added more leaves.', + taskid=task.getId() + ) + + # Get the calculated version number. + print version.getVersion() + + # Add some components. + previewPath = '/path/to/forest_preview.mov' + previewComponent = version.createComponent(path=previewPath) + + modelPath = '/path/to/forest_mode.ma' + modelComponent = version.createComponent(name='model', path=modelPath) + + # Publish. + asset.publish() + + # Add thumbnail to version. + thumbnail = version.createThumbnail('/path/to/forest_thumbnail.jpg') + + # Set thumbnail on other objects without duplicating it. + task.setThumbnail(thumbnail) + +New API:: + + # Query a shot and a task to create the asset against. + shot = session.query( + 'Shot where project.name is "dev_tutorial" ' + 'and parent.name is "001" and name is "010"' + ).one() + task = shot['children'][0] + + # Create new asset. + asset_type = session.query('AssetType where short is "geo"').first() + asset = session.create('Asset', { + 'parent': shot, + 'name': 'forest', + 'type': asset_type + }) + + # Create a new version for the asset. + status = session.query('Status where name is "Pending"').one() + version = session.create('AssetVersion', { + 'asset': asset, + 'status': status, + 'comment': 'Added more leaves.', + 'task': task + }) + + # In the new API, the version number is not set until we persist the changes + print 'Version number before commit: {0}'.format(version['version']) + session.commit() + print 'Version number after commit: {0}'.format(version['version']) + + # Add some components. + preview_path = '/path/to/forest_preview.mov' + preview_component = version.create_component(preview_path, location='auto') + + model_path = '/path/to/forest_mode.ma' + model_component = version.create_component(model_path, { + 'name': 'model' + }, location='auto') + + # Publish. Newly created version defaults to being published in the new api, + # but if set to false you can update it by setting the key on the version. + version['is_published'] = True + + # Persist the changes + session.commit() + + # Add thumbnail to version. + thumbnail = version.create_thumbnail( + '/path/to/forest_thumbnail.jpg' + ) + + # Set thumbnail on other objects without duplicating it. + task['thumbnail'] = thumbnail + session.commit() + + +Workarounds for missing convenience methods +=========================================== + +Query object by path +-------------------- + +In the old API, there existed a convenience methods to get an object by +referencing the path (i.e object and parent names). + +Old API:: + + shot = ftrack.getShot(['dev_tutorial', '001', '010']) + +New API:: + + shot = session.query( + 'Shot where project.name is "dev_tutorial" ' + 'and parent.name is "001" and name is "010"' + ) + + +Retrieving an object's parents +------------------------------ + +To retrieve a list of an object's parents, you could call the method +:meth:`getParents` in the old API. Currently, it is not possible to fetch this +in a single call using the new API, so you will have to traverse the ancestors +one-by-one and fetch each object's parent. + +Old API:: + + parents = task.getParents() + +New API:: + + parents = [] + for item in task['link'][:-1]: + parents.append(session.get(item['type'], item['id'])) + +Note that link includes the task itself so `[:-1]` is used to only retreive the +parents. To learn more about the `link` attribute, see +:ref:`Using link attributes example`. + +Limitations in the current version of the API +============================================= + +The new API is still quite young and in active development and there are a few +limitations currently to keep in mind when using it. + +Missing schemas +--------------- + +The following entities are as of the time of writing not currently available +in the new API. Let us know if you depend on any of them. + + * Booking + * Calendar and Calendar Type + * Dependency + * Manager and Manager Type + * Phase + * Role + * Task template + * Temp data + +Action base class +----------------- +There is currently no helper class for creating actions using the new API. We +will add one in the near future. + +In the meantime, it is still possible to create actions without the base class +by listening and responding to the +:ref:`ftrack:developing/events/list/ftrack.action.discover` and +:ref:`ftrack:developing/events/list/ftrack.action.launch` events. + +Legacy location +--------------- + +The ftrack legacy disk locations utilizing the +:class:`InternalResourceIdentifierTransformer` has been deprecated. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst new file mode 100644 index 0000000000..1df2211f96 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst @@ -0,0 +1,98 @@ +.. + :copyright: Copyright (c) 2015 ftrack + +.. _release/migration: + +*************** +Migration notes +*************** + +.. note:: + + Migrating from the old ftrack API? Read the dedicated :ref:`guide + `. + +Migrate to upcoming 2.0.0 +========================= + +.. _release/migration/2.0.0/event_hub: + +Default behavior for connecting to event hub +-------------------------------------------- + +The default behavior for the `ftrack_api.Session` class will change +for the argument `auto_connect_event_hub`, the default value will +switch from True to False. In order for code relying on the event hub +to continue functioning as expected you must modify your code +to explicitly set the argument to True or that you manually call +`session.event_hub.connect()`. + +.. note:: + If you rely on the `ftrack.location.component-added` or + `ftrack.location.component-removed` events to further process created + or deleted components remember that your session must be connected + to the event hub for the events to be published. + + +Migrate to 1.0.3 +================ + +.. _release/migration/1.0.3/mutating_dictionary: + +Mutating custom attribute dictionary +------------------------------------ + +Custom attributes can no longer be set by mutating entire dictionary:: + + # This will result in an error. + task['custom_attributes'] = dict(foo='baz', bar=2) + session.commit() + +Instead the individual values should be changed:: + + # This works better. + task['custom_attributes']['foo'] = 'baz' + task['custom_attributes']['bar'] = 2 + session.commit() + +Migrate to 1.0.0 +================ + +.. _release/migration/1.0.0/chunked_transfer: + +Chunked accessor transfers +-------------------------- + +Data transfers between accessors is now buffered using smaller chunks instead of +all data at the same time. Included accessor file representations such as +:class:`ftrack_api.data.File` and :class:`ftrack_api.accessor.server.ServerFile` +are built to handle that. If you have written your own accessor and file +representation you may have to update it to support multiple reads using the +limit parameter and multiple writes. + +Migrate to 0.2.0 +================ + +.. _release/migration/0.2.0/new_api_name: + +New API name +------------ + +In this release the API has been renamed from `ftrack` to `ftrack_api`. This is +to allow both the old and new API to co-exist in the same environment without +confusion. + +As such, any scripts using this new API need to be updated to import +`ftrack_api` instead of `ftrack`. For example: + +**Previously**:: + + import ftrack + import ftrack.formatter + ... + +**Now**:: + + import ftrack_api + import ftrack_api.formatter + ... diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst new file mode 100644 index 0000000000..d7978ac0b8 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst @@ -0,0 +1,1478 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _release/release_notes: + +************* +Release Notes +************* + +.. currentmodule:: ftrack_api.session + +.. release:: 1.8.2 + :date: 2020-01-14 + + .. change:: fixed + :tag: Test + + test_ensure_entity_with_non_string_data_types test fails due to missing parents. + + .. change:: changed + :tags: session + + Use WeakMethod when registering atexit handler to prevent memory leak. + +.. release:: 1.8.1 + :date: 2019-10-30 + + .. change:: changed + :tags: Location + + Increase chunk size for file operations to 1 Megabyte. + This value can now also be set from the environment variable: + + :envvar:`FTRACK_API_FILE_CHUNK_SIZE` + + .. change:: new + :tag: setup + + Add check for correct python version when installing with pip. + + .. change:: new + :tags: Notes + + Add support for note labels in create_note helper method. + + .. change:: changed + :tags: session + + Ensure errors from server are fully reported with stack trace. + +.. release:: 1.8.0 + :date: 2019-02-21 + + .. change:: fixed + :tags: documentation + + Event description component-removed report component-added event signature. + + .. change:: new + :tags: session, attribute + + Add new scalar type `object` to factory. + + .. change:: new + :tags: session, attribute + + Add support for list of `computed` attributes as part of schema + definition. A computed attribute is derived on the server side, and can + be time dependentant and differ between users. As such a computed + attribute is not suitable for long term encoding and will not be encoded + with the `persisted_only` stragey. + + .. change:: changed + + The `delayed_job` method has been deprecated in favour of a direct + `Session.call`. See :ref:`example/sync_with_ldap` for example + usage. + + .. change:: changed + + Private method :meth:`Session._call` has been converted to + a public method, :meth:`Session.call`. + + The private method will continue to work, but a pending deprecation + warning will be issued when used. The private method will be removed + entirely in version 2.0. + + .. change:: changed + :tags: session, events + + Event server connection error is too generic, + the actual error is now reported to users. + +.. release:: 1.7.1 + :date: 2018-11-13 + + .. change:: fixed + :tags: session, events + + Meta events for event hub connect and disconnect does not include + source. + + .. change:: fixed + :tags: session, location + + Missing context argument to + :meth:`ResourceIdentifierTransformer.decode` + in :meth:`Location.get_resource_identifier`. + +.. release:: 1.7.0 + :date: 2018-07-27 + + .. change:: new + :tags: session, events + + Added new events :ref:`event_list/ftrack.api.session.ready` and + :ref:`event_list/ftrack.api.session.reset` which can be used to perform + operations after the session is ready or has been reset, respectively. + + .. change:: changed + + Private method :meth:`Session._entity_reference` has been converted to + a public method, :meth:`Session.entity_reference`. + + The private method will continue to work, but a pending deprecation + warning will be issued when used. The private method will be removed + entirely in version 2.0. + + .. change:: fixed + :tags: session, events + + :meth:`Session.close` raises an exception if event hub was explicitly + connected after session initialization. + +.. release:: 1.6.0 + :date: 2018-05-17 + + .. change:: new + :tags: depreciation, events + + In version 2.0.0 of the `ftrack-python-api` the default behavior for + the :class:`Session` class will change for the argument + *auto_connect_event_hub*, the default value will switch from *True* to + *False*. + + A warning will now be emitted if async events are published or + subscribed to without *auto_connect_event_hub* has not explicitly been + set to *True*. + + .. seealso:: :ref:`release/migration/2.0.0/event_hub`. + + .. change:: fixed + :tags: documentation + + Event payload not same as what is being emitted for + :ref:`event_list/ftrack.location.component-added` and + :ref:`event_list/ftrack.location.component-removed`. + + .. change:: fixed + :tags: events + + Pyparsing is causing random errors in a threaded environment. + +.. release:: 1.5.0 + :date: 2018-04-19 + + .. change:: fixed + :tags: session, cache + + Cached entities not updated correctly when fetched in a nested + query. + +.. release:: 1.4.0 + :date: 2018-02-05 + + .. change:: fixed + :tags: session, cache + + Collection attributes not merged correctly when fetched from + server. + + .. change:: new + :tags: session, user, api key + + New function :meth:`ftrack_api.session.Session.reset_remote` allows + resetting of attributes to their default value. A convenience method + for resetting a users api key utalizing this was also added + :meth:`ftrack_api.entity.user.User.reset_api_key`. + + .. seealso:: :ref:`working_with_entities/resetting` + + .. change:: new + + Add support for sending out invitation emails to users. + See :ref:`example/invite_user` for example usage. + + .. change:: changed + :tags: cache, performance + + Entities fetched from cache are now lazily merged. Improved + performance when dealing with highly populated caches. + +.. release:: 1.3.3 + :date: 2017-11-16 + + + .. change:: new + :tags: users, ldap + + Add support for triggering a synchronization of + users between ldap and ftrack. See :ref:`example/sync_with_ldap` + for example usage. + + .. note:: + + This requires that you run ftrack 3.5.10 or later. + + .. change:: fixed + :tags: metadata + + Not possible to set metadata on creation. + +.. release:: 1.3.2 + :date: 2017-09-18 + + + .. change:: new + :tags: task template + + Added example for managing task templates through the API. See + :ref:`example/task_template` for example usage. + + .. change:: fixed + :tags: custom attributes + + Not possible to set hierarchical custom attributes on an entity that + has not been committed. + + .. change:: fixed + :tags: custom attributes + + Not possible to set custom attributes on an `Asset` that has not been + committed. + + .. change:: fixed + :tags: metadata + + Not possible to set metadata on creation. + +.. release:: 1.3.1 + :date: 2017-07-21 + + .. change:: fixed + :tags: session, events + + Calling disconnect on the event hub is slow. + +.. release:: 1.3.0 + :date: 2017-07-17 + + .. change:: new + :tags: session + + Support using a :class:`Session` as a context manager to aid closing of + session after use:: + + with ftrack_api.Session() as session: + # Perform operations with session. + + .. change:: new + :tags: session + + :meth:`Session.close` automatically called on Python exit if session not + already closed. + + .. change:: new + :tags: session + + Added :meth:`Session.close` to properly close a session's connections to + the server(s) as well as ensure event listeners are properly + unsubscribed. + + .. change:: new + + Added :exc:`ftrack_api.exception.ConnectionClosedError` to represent + error caused when trying to access servers over closed connection. + +.. release:: 1.2.0 + :date: 2017-06-16 + + .. change:: changed + :tags: events + + Updated the websocket-client dependency to version >= 0.40.0 to allow + for http proxies. + + .. change:: fixed + :tags: documentation + + The :ref:`example/publishing` example incorrectly stated that a + location would be automatically picked if the *location* keyword + argument was omitted. + +.. release:: 1.1.1 + :date: 2017-04-27 + + .. change:: fixed + :tags: custom attributes + + Cannot use custom attributes for `Asset` in ftrack versions prior to + `3.5.0`. + + .. change:: fixed + :tags: documentation + + The :ref:`example ` + section for managing `text` custom attributes is not correct. + +.. release:: 1.1.0 + :date: 2017-03-08 + + .. change:: new + :tags: server location, thumbnail + + Added method :meth:`get_thumbnail_url() ` + to server location, which can be used to retrieve a thumbnail URL. + See :ref:`example/thumbnail/url` for example usage. + + .. change:: new + :tags: documentation + + Added :ref:`example ` on how to manage entity + links from the API. + + .. change:: new + :tags: documentation + + Added :ref:`example ` on + how to manage custom attribute configurations from the API. + + .. change:: new + :tags: documentation + + Added :ref:`example ` on how to use + `SecurityRole` and `UserSecurityRole` to manage security roles for + users. + + .. change:: new + :tags: documentation + + Added :ref:`examples ` to show how + to list a user's assigned tasks and all users assigned to a task. + + .. change:: changed + :tags: session, plugins + + Added *plugin_arguments* to :class:`Session` to allow passing of + optional keyword arguments to discovered plugin register functions. Only + arguments defined in a plugin register function signature are passed so + existing plugin register functions do not need updating if the new + functionality is not desired. + + .. change:: fixed + :tags: documentation + + The :ref:`example/project` example can be confusing since the project + schema may not contain the necessary object types. + + .. change:: fixed + :tags: documentation + + Query tutorial article gives misleading information about the ``has`` + operator. + + .. change:: fixed + :tags: session + + Size is not set on sequence components when using + :meth:`Session.create_component`. + +.. release:: 1.0.4 + :date: 2017-01-13 + + .. change:: fixed + :tags: custom attributes + + Custom attribute values cannot be set on entities that are not + persisted. + + .. change:: fixed + :tags: events + + `username` in published event's source data is set to the operating + system user and not the API user. + +.. release:: 1.0.3 + :date: 2017-01-04 + + .. change:: changed + :tags: session, custom attributes + + Increased performance of custom attributes and better support for + filtering when using a version of ftrack that supports non-sparse + attribute values. + + .. change:: changed + :tags: session, custom attributes + + Custom attributes can no longer be set by mutating entire dictionary. + + .. seealso:: :ref:`release/migration/1.0.3/mutating_dictionary`. + +.. release:: 1.0.2 + :date: 2016-11-17 + + .. change:: changed + :tags: session + + Removed version restriction for higher server versions. + +.. release:: 1.0.1 + :date: 2016-11-11 + + .. change:: fixed + + :meth:`EventHub.publish ` + *on_reply* callback only called for first received reply. It should be + called for all relevant replies received. + +.. release:: 1.0.0 + :date: 2016-10-28 + + .. change:: new + :tags: session + + :meth:`Session.get_upload_metadata` has been added. + + .. change:: changed + :tags: locations, backwards-incompatible + + Data transfer between locations using accessors is now chunked to avoid + reading large files into memory. + + .. seealso:: :ref:`release/migration/1.0.0/chunked_transfer`. + + .. change:: changed + :tags: server accessor + + :class:`ftrack_api.accessor.server.ServerFile` has been refactored to + work with large files more efficiently. + + .. change:: changed + :tags: server accessor + + :class:`ftrack_api.accessor.server.ServerFile` has been updated to use + the get_upload_metadata API endpoint instead of + /component/getPutMetadata. + + .. change:: changed + :tags: locations + + :class:`ftrack_api.data.String` is now using a temporary file instead of + StringIO to avoid reading large files into memory. + + .. change:: fixed + :tags: session, locations + + `ftrack.centralized-storage` does not properly validate location + selection during user configuration. + +.. release:: 0.16.0 + :date: 2016-10-18 + + .. change:: new + :tags: session, encode media + + :meth:`Session.encode_media` can now automatically associate the output + with a version by specifying a *version_id* keyword argument. A new + helper method on versions, :meth:`AssetVersion.encode_media + `, can be + used to make versions playable in a browser. A server version of 3.3.32 + or higher is required for it to function properly. + + .. seealso:: :ref:`example/encode_media`. + + .. change:: changed + :tags: session, encode media + + You can now decide if :meth:`Session.encode_media` should keep or + delete the original component by specifying the *keep_original* + keyword argument. + + .. change:: changed + :tags: backwards-incompatible, collection + + Collection mutation now stores collection instance in operations rather + than underlying data structure. + + .. change:: changed + :tags: performance + + Improve performance of commit operations by optimising encoding and + reducing payload sent to server. + + .. change:: fixed + :tags: documentation + + Asset parent variable is declared but never used in + :ref:`example/publishing`. + + .. change:: fixed + :tags: documentation + + Documentation of hierarchical attributes and their limitations are + misleading. See :ref:`example/custom_attribute`. + +.. release:: 0.15.5 + :date: 2016-08-12 + + .. change:: new + :tags: documentation + + Added two new examples for :ref:`example/publishing` and + :ref:`example/web_review`. + + .. change:: fixed + :tags: session, availability + + :meth:`Session.get_component_availabilities` ignores passed locations + shortlist and includes all locations in returned availability mapping. + + .. change:: fixed + :tags: documentation + + Source distribution of ftrack-python-api does not include ftrack.css + in the documentation. + +.. release:: 0.15.4 + :date: 2016-07-12 + + .. change:: fixed + :tags: querying + + Custom offset not respected by + :meth:`QueryResult.first `. + + .. change:: changed + :tags: querying + + Using a custom offset with :meth:`QueryResult.one + ` helper method now raises an + exception as an offset is inappropriate when expecting to select a + single item. + + .. change:: fixed + :tags: caching + + :meth:`LayeredCache.remove ` + incorrectly raises :exc:`~exceptions.KeyError` if key only exists in + sub-layer cache. + +.. release:: 0.15.3 + :date: 2016-06-30 + + .. change:: fixed + :tags: session, caching + + A newly created entity now has the correct + :attr:`ftrack_api.symbol.CREATED` state when checked in caching layer. + Previously the state was :attr:`ftrack_api.symbol.NOT_SET`. Note that + this fix causes a change in logic and the stored + :class:`ftrack_api.operation.CreateEntityOperation` might hold data that + has not been fully :meth:`merged `. + + .. change:: fixed + :tags: documentation + + The second example in the assignments article is not working. + + .. change:: changed + :tags: session, caching + + A callable cache maker can now return ``None`` to indicate that it could + not create a suitable cache, but :class:`Session` instantiation can + continue safely. + +.. release:: 0.15.2 + :date: 2016-06-02 + + .. change:: new + :tags: documentation + + Added an example on how to work with assignments and allocations + :ref:`example/assignments_and_allocations`. + + .. change:: new + :tags: documentation + + Added :ref:`example/entity_links` article with + examples of how to manage asset version dependencies. + + .. change:: fixed + :tags: performance + + Improve performance of large collection management. + + .. change:: fixed + + Entities are not hashable because + :meth:`ftrack_api.entity.base.Entity.__hash__` raises `TypeError`. + +.. release:: 0.15.1 + :date: 2016-05-02 + + .. change:: fixed + :tags: collection, attribute, performance + + Custom attribute configurations does not cache necessary keys, leading + to performance issues. + + .. change:: fixed + :tags: locations, structure + + Standard structure does not work if version relation is not set on + the `Component`. + +.. release:: 0.15.0 + :date: 2016-04-04 + + .. change:: new + :tags: session, locations + + `ftrack.centralized-storage` not working properly on Windows. + +.. release:: 0.14.0 + :date: 2016-03-14 + + .. change:: changed + :tags: session, locations + + The `ftrack.centralized-storage` configurator now validates that name, + label and description for new locations are filled in. + + .. change:: new + :tags: session, client review + + Added :meth:`Session.send_review_session_invite` and + :meth:`Session.send_review_session_invites` that can be used to inform + review session invitees about a review session. + + .. seealso:: :ref:`Usage guide `. + + .. change:: new + :tags: session, locations + + Added `ftrack.centralized-storage` configurator as a private module. It + implements a wizard like interface used to configure a centralised + storage scenario. + + .. change:: new + :tags: session, locations + + `ftrack.centralized-storage` storage scenario is automatically + configured based on information passed from the server with the + `query_server_information` action. + + .. change:: new + :tags: structure + + Added :class:`ftrack_api.structure.standard.StandardStructure` with + hierarchy based resource identifier generation. + + .. change:: new + :tags: documentation + + Added more information to the :ref:`understanding_sessions/plugins` + article. + + .. change:: fixed + + :meth:`~ftrack_api.entity.user.User.start_timer` arguments *comment* + and *name* are ignored. + + .. change:: fixed + + :meth:`~ftrack_api.entity.user.User.stop_timer` calculates the wrong + duration when the server is not running in UTC. + + For the duration to be calculated correctly ftrack server version + >= 3.3.15 is required. + +.. release:: 0.13.0 + :date: 2016-02-10 + + .. change:: new + :tags: component, thumbnail + + Added improved support for handling thumbnails. + + .. seealso:: :ref:`example/thumbnail`. + + .. change:: new + :tags: session, encode media + + Added :meth:`Session.encode_media` that can be used to encode + media to make it playable in a browser. + + .. seealso:: :ref:`example/encode_media`. + + .. change:: fixed + + :meth:`Session.commit` fails when setting a custom attribute on an asset + version that has been created and committed in the same session. + + .. change:: new + :tags: locations + + Added :meth:`ftrack_api.entity.location.Location.get_url` to retrieve a + URL to a component in a location if supported by the + :class:`ftrack_api.accessor.base.Accessor`. + + .. change:: new + :tags: documentation + + Updated :ref:`example/note` and :ref:`example/job` articles with + examples of how to use note and job components. + + .. change:: changed + :tags: logging, performance + + Logged messages now evaluated lazily using + :class:`ftrack_api.logging.LazyLogMessage` as optimisation. + + .. change:: changed + :tags: session, events + + Auto connection of event hub for :class:`Session` now takes place in + background to improve session startup time. + + .. change:: changed + :tags: session, events + + Event hub connection timeout is now 60 seconds instead of 10. + + .. change:: changed + :tags: server version + + ftrack server version >= 3.3.11, < 3.4 required. + + .. change:: changed + :tags: querying, performance + + :class:`ftrack_api.query.QueryResult` now pages internally using a + specified page size in order to optimise record retrieval for large + query results. :meth:`Session.query` has also been updated to allow + passing a custom page size at runtime if desired. + + .. change:: changed + :tags: querying, performance + + Increased performance of :meth:`~ftrack_api.query.QueryResult.first` and + :meth:`~ftrack_api.query.QueryResult.one` by using new `limit` syntax. + +.. release:: 0.12.0 + :date: 2015-12-17 + + .. change:: new + :tags: session, widget url + + Added :meth:`ftrack_api.session.Session.get_widget_url` to retrieve an + authenticated URL to info or tasks widgets. + +.. release:: 0.11.0 + :date: 2015-12-04 + + .. change:: new + :tags: documentation + + Updated :ref:`release/migrating_from_old_api` with new link attribute + and added a :ref:`usage example `. + + .. change:: new + :tags: caching, schemas, performance + + Caching of schemas for increased performance. + :meth:`ftrack_api.session.Session` now accepts `schema_cache_path` + argument to specify location of schema cache. If not set it will use a + temporary folder. + +.. release:: 0.10.0 + :date: 2015-11-24 + + .. change:: changed + :tags: tests + + Updated session test to use mocked schemas for encoding tests. + + .. change:: fixed + + Documentation specifies Python 2.6 instead of Python 2.7 as minimum + interpreter version. + + .. change:: fixed + + Documentation does not reflect current dependencies. + + .. change:: changed + :tags: session, component, locations, performance + + Improved performance of + :meth:`ftrack_api.entity.location.Location.add_components` by batching + database operations. + + As a result it is no longer possible to determine progress of transfer + for container components in realtime as events will be emitted in batch + at end of operation. + + In addition, it is now the callers responsibility to clean up any + transferred data should an error occur during either data transfer or + database registration. + + .. change:: changed + :tags: exception, locations + + :exc:`ftrack_api.exception.ComponentInLocationError` now accepts either + a single component or multiple components and makes them available as + *components* in its *details* parameter. + + .. change:: changed + :tags: tests + + Updated session test to not fail on the new private link attribute. + + .. change:: changed + :tags: session + + Internal method :py:meth:`_fetch_schemas` has beed renamed to + :py:meth:`Session._load_schemas` and now requires a `schema_cache_path` + argument. + +.. release:: 0.9.0 + :date: 2015-10-30 + + .. change:: new + :tags: caching + + Added :meth:`ftrack_api.cache.Cache.values` as helper for retrieving + all values in cache. + + .. change:: fixed + :tags: session, caching + + :meth:`Session.merge` redundantly attempts to expand entity references + that have already been expanded causing performance degradation. + + .. change:: new + :tags: session + + :meth:`Session.rollback` has been added to support cleanly reverting + session state to last good state following a failed commit. + + .. change:: changed + :tags: events + + Event hub will no longer allow unverified SSL connections. + + .. seealso:: :ref:`security_and_authentication`. + + .. change:: changed + :tags: session + + :meth:`Session.reset` no longer resets the connection. It also clears + all local state and re-configures certain aspects that are cache + dependant, such as location plugins. + + .. change:: fixed + :tags: factory + + Debug logging messages using incorrect index for formatting leading to + misleading exception. + +.. release:: 0.8.4 + :date: 2015-10-08 + + .. change:: new + + Added initial support for custom attributes. + + .. seealso:: :ref:`example/custom_attribute`. + + .. change:: new + :tags: collection, attribute + + Added :class:`ftrack_api.collection.CustomAttributeCollectionProxy` and + :class:`ftrack_api.attribute.CustomAttributeCollectionAttribute` to + handle custom attributes. + + .. change:: changed + :tags: collection, attribute + + ``ftrack_api.attribute.MappedCollectionAttribute`` renamed to + :class:`ftrack_api.attribute.KeyValueMappedCollectionAttribute` to more + closely reflect purpose. + + .. change:: changed + :tags: collection + + :class:`ftrack_api.collection.MappedCollectionProxy` has been refactored + as a generic base class with key, value specialisation handled in new + dedicated class + :class:`ftrack_api.collection.KeyValueMappedCollectionProxy`. This is + done to avoid confusion following introduction of new + :class:`ftrack_api.collection.CustomAttributeCollectionProxy` class. + + .. change:: fixed + :tags: events + + The event hub does not always reconnect after computer has come back + from sleep. + +.. release:: 0.8.3 + :date: 2015-09-28 + + .. change:: changed + :tags: server version + + ftrack server version >= 3.2.1, < 3.4 required. + + .. change:: changed + + Updated *ftrack.server* location implementation. A server version of 3.3 + or higher is required for it to function properly. + + .. change:: fixed + + :meth:`ftrack_api.entity.factory.StandardFactory.create` not respecting + *bases* argument. + +.. release:: 0.8.2 + :date: 2015-09-16 + + .. change:: fixed + :tags: session + + Wrong file type set on component when publishing image sequence using + :meth:`Session.create_component`. + +.. release:: 0.8.1 + :date: 2015-09-08 + + .. change:: fixed + :tags: session + + :meth:`Session.ensure` not implemented. + +.. release:: 0.8.0 + :date: 2015-08-28 + + .. change:: changed + :tags: server version + + ftrack server version >= 3.2.1, < 3.3 required. + + .. change:: new + + Added lists example. + + .. seealso:: :ref:`example/list`. + + .. change:: new + + Added convenience methods for handling timers + :class:`~ftrack_api.entity.user.User.start_timer` and + :class:`~ftrack_api.entity.user.User.stop_timer`. + + .. change:: changed + + The dynamic API classes Type, Status, Priority and + StatusType have been renamed to Type, Status, Priority and State. + + .. change:: changed + + :meth:`Session.reset` now also clears the top most level cache (by + default a :class:`~ftrack_api.cache.MemoryCache`). + + .. change:: fixed + + Some invalid server url formats not detected. + + .. change:: fixed + + Reply events not encoded correctly causing them to be misinterpreted by + the server. + +.. release:: 0.7.0 + :date: 2015-08-24 + + .. change:: changed + :tags: server version + + ftrack server version >= 3.2, < 3.3 required. + + .. change:: changed + + Removed automatic set of default statusid, priorityid and typeid on + objects as that is now either not mandatory or handled on server. + + .. change:: changed + + Updated :meth:`~ftrack_api.entity.project_schema.ProjectSchema.get_statuses` + and :meth:`~ftrack_api.entity.project_schema.ProjectSchema.get_types` to + handle custom objects. + +.. release:: 0.6.0 + :date: 2015-08-19 + + .. change:: changed + :tags: server version + + ftrack server version >= 3.1.8, < 3.2 required. + + .. change:: changed + :tags: querying, documentation + + Updated documentation with details on new operators ``has`` and ``any`` + for querying relationships. + + .. seealso:: :ref:`querying/criteria/operators` + +.. release:: 0.5.2 + :date: 2015-07-29 + + .. change:: changed + :tags: server version + + ftrack server version 3.1.5 or greater required. + + .. change:: changed + + Server reported errors are now more readable and are no longer sometimes + presented as an HTML page. + +.. release:: 0.5.1 + :date: 2015-07-06 + + .. change:: changed + + Defaults computed by :class:`~ftrack_api.entity.factory.StandardFactory` + are now memoised per session to improve performance. + + .. change:: changed + + :class:`~ftrack_api.cache.Memoiser` now supports a *return_copies* + parameter to control whether deep copies should be returned when a value + was retrieved from the cache. + +.. release:: 0.5.0 + :date: 2015-07-02 + + .. change:: changed + + Now checks for server compatibility and requires an ftrack server + version of 3.1 or greater. + + .. change:: new + + Added convenience methods to :class:`~ftrack_api.query.QueryResult` to + fetch :meth:`~ftrack_api.query.QueryResult.first` or exactly + :meth:`~ftrack_api.query.QueryResult.one` result. + + .. change:: new + :tags: notes + + Added support for handling notes. + + .. seealso:: :ref:`example/note`. + + .. change:: changed + + Collection attributes generate empty collection on first access when no + remote value available. This allows interacting with a collection on a + newly created entity before committing. + + .. change:: fixed + :tags: session + + Ambiguous error raised when :class:`Session` is started with an invalid + user or key. + + .. change:: fixed + :tags: caching, session + + :meth:`Session.merge` fails against + :class:`~ftrack_api.cache.SerialisedCache` when circular reference + encountered due to entity identity not being prioritised in merge. + +.. release:: 0.4.3 + :date: 2015-06-29 + + .. change:: fixed + :tags: plugins, session, entity types + + Entity types not constructed following standard install. + + This is because the discovery of the default plugins is unreliable + across Python installation processes (pip, wheel etc). Instead, the + default plugins have been added as templates to the :ref:`event_list` + documentation and the + :class:`~ftrack_api.entity.factory.StandardFactory` used to create any + missing classes on :class:`Session` startup. + +.. release:: 0.4.2 + :date: 2015-06-26 + + .. change:: fixed + :tags: metadata + + Setting exact same metadata twice can cause + :exc:`~ftrack_api.exception.ImmutableAttributeError` to be incorrectly + raised. + + .. change:: fixed + :tags: session + + Calling :meth:`Session.commit` does not clear locally set attribute + values leading to immutability checks being bypassed in certain cases. + +.. release:: 0.4.1 + :date: 2015-06-25 + + .. change:: fixed + :tags: metadata + + Setting metadata twice in one session causes `KeyError`. + +.. release:: 0.4.0 + :date: 2015-06-22 + + .. change:: changed + :tags: documentation + + Documentation extensively updated. + + .. change:: new + :tags: Client review + + Added support for handling review sessions. + + .. seealso:: :ref:`Usage guide `. + + .. change:: fixed + + Metadata property not working in line with rest of system, particularly + the caching framework. + + .. change:: new + :tags: collection + + Added :class:`ftrack_api.collection.MappedCollectionProxy` class for + providing a dictionary interface to a standard + :class:`ftrack_api.collection.Collection`. + + .. change:: new + :tags: collection, attribute + + Added :class:`ftrack_api.attribute.MappedCollectionAttribute` class for + describing an attribute that should use the + :class:`ftrack_api.collection.MappedCollectionProxy`. + + .. change:: new + + Entities that use composite primary keys are now fully supported in the + session, including for :meth:`Session.get` and :meth:`Session.populate`. + + .. change:: change + + Base :class:`ftrack_api.entity.factory.Factory` refactored to separate + out attribute instantiation into dedicated methods to make extending + simpler. + + .. change:: change + :tags: collection, attribute + + :class:`ftrack_api.attribute.DictionaryAttribute` and + :class:`ftrack_api.attribute.DictionaryAttributeCollection` removed. + They have been replaced by the new + :class:`ftrack_api.attribute.MappedCollectionAttribute` and + :class:`ftrack_api.collection.MappedCollectionProxy` respectively. + + .. change:: new + :tags: events + + :class:`Session` now supports an *auto_connect_event_hub* argument to + control whether the built in event hub should connect to the server on + session initialisation. This is useful for when only local events should + be supported or when the connection should be manually controlled. + +.. release:: 0.3.0 + :date: 2015-06-14 + + .. change:: fixed + + Session operations may be applied server side in invalid order resulting + in unexpected error. + + .. change:: fixed + + Creating and deleting an entity in single commit causes error as create + operation never persisted to server. + + Now all operations for the entity are ignored on commit when this case + is detected. + + .. change:: changed + + Internally moved from differential state to operation tracking for + determining session changes when persisting. + + .. change:: new + + ``Session.recorded_operations`` attribute for examining current + pending operations on a :class:`Session`. + + .. change:: new + + :meth:`Session.operation_recording` context manager for suspending + recording operations temporarily. Can also manually control + ``Session.record_operations`` boolean. + + .. change:: new + + Operation classes to track individual operations occurring in session. + + .. change:: new + + Public :meth:`Session.merge` method for merging arbitrary values into + the session manually. + + .. change:: changed + + An entity's state is now computed from the operations performed on it + and is no longer manually settable. + + .. change:: changed + + ``Entity.state`` attribute removed. Instead use the new inspection + :func:`ftrack_api.inspection.state`. + + Previously:: + + print entity.state + + Now:: + + import ftrack_api.inspection + print ftrack_api.inspection.state(entity) + + There is also an optimised inspection, + :func:`ftrack_api.inspection.states`. for determining state of many + entities at once. + + .. change:: changed + + Shallow copying a :class:`ftrack_api.symbol.Symbol` instance now + returns same instance. + +.. release:: 0.2.0 + :date: 2015-06-04 + + .. change:: changed + + Changed name of API from `ftrack` to `ftrack_api`. + + .. seealso:: :ref:`release/migration/0.2.0/new_api_name`. + + .. change:: new + :tags: caching + + Configurable caching support in :class:`Session`, including the ability + to use an external persisted cache and new cache implementations. + + .. seealso:: :ref:`caching`. + + .. change:: new + :tags: caching + + :meth:`Session.get` now tries to retrieve matching entity from + configured cache first. + + .. change:: new + :tags: serialisation, caching + + :meth:`Session.encode` supports a new mode *persisted_only* that will + only encode persisted attribute values. + + .. change:: changed + + Session.merge method is now private (:meth:`Session._merge`) until it is + qualified for general usage. + + .. change:: changed + :tags: entity state + + :class:`~ftrack_api.entity.base.Entity` state now managed on the entity + directly rather than stored separately in the :class:`Session`. + + Previously:: + + session.set_state(entity, state) + print session.get_state(entity) + + Now:: + + entity.state = state + print entity.state + + .. change:: changed + :tags: entity state + + Entity states are now :class:`ftrack_api.symbol.Symbol` instances rather + than strings. + + Previously:: + + entity.state = 'created' + + Now:: + + entity.state = ftrack_api.symbol.CREATED + + .. change:: fixed + :tags: entity state + + It is now valid to transition from most entity states to an + :attr:`ftrack_api.symbol.NOT_SET` state. + + .. change:: changed + :tags: caching + + :class:`~ftrack_api.cache.EntityKeyMaker` removed and replaced by + :class:`~ftrack_api.cache.StringKeyMaker`. Entity identity now + computed separately and passed to key maker to allow key maker to work + with non entity instances. + + .. change:: fixed + :tags: entity + + Internal data keys ignored when re/constructing entities reducing + distracting and irrelevant warnings in logs. + + .. change:: fixed + :tags: entity + + :class:`~ftrack_api.entity.base.Entity` equality test raises error when + other is not an entity instance. + + .. change:: changed + :tags: entity, caching + + :meth:`~ftrack_api.entity.base.Entity.merge` now also merges state and + local attributes. In addition, it ensures values being merged have also + been merged into the session and outputs more log messages. + + .. change:: fixed + :tags: inspection + + :func:`ftrack_api.inspection.identity` returns different result for same + entity depending on whether entity type is unicode or string. + + .. change:: fixed + + :func:`ftrack_api.mixin` causes method resolution failure when same + class mixed in multiple times. + + .. change:: changed + + Representations of objects now show plain id rather than converting to + hex. + + .. change:: fixed + :tags: events + + Event hub raises TypeError when listening to ftrack.update events. + + .. change:: fixed + :tags: events + + :meth:`ftrack_api.event.hub.EventHub.subscribe` fails when subscription + argument contains special characters such as `@` or `+`. + + .. change:: fixed + :tags: collection + + :meth:`ftrack_api.collection.Collection` incorrectly modifies entity + state on initialisation. + +.. release:: 0.1.0 + :date: 2015-03-25 + + .. change:: changed + + Moved standardised construct entity type logic to core package (as part + of the :class:`~ftrack_api.entity.factory.StandardFactory`) for easier + reuse and extension. + +.. release:: 0.1.0-beta.2 + :date: 2015-03-17 + + .. change:: new + :tags: locations + + Support for ftrack.server location. The corresponding server build is + required for it to function properly. + + .. change:: new + :tags: locations + + Support for managing components in locations has been added. Check out + the :ref:`dedicated tutorial `. + + .. change:: new + + A new inspection API (:mod:`ftrack_api.inspection`) has been added for + extracting useful information from objects in the system, such as the + identity of an entity. + + .. change:: changed + + ``Entity.primary_key`` and ``Entity.identity`` have been removed. + Instead, use the new :func:`ftrack_api.inspection.primary_key` and + :func:`ftrack_api.inspection.identity` functions. This was done to make it + clearer the the extracted information is determined from the current + entity state and modifying the returned object will have no effect on + the entity instance itself. + + .. change:: changed + + :func:`ftrack_api.inspection.primary_key` now returns a mapping of the + attribute names and values that make up the primary key, rather than + the previous behaviour of returning a tuple of just the values. To + emulate previous behaviour do:: + + ftrack_api.inspection.primary_key(entity).values() + + .. change:: changed + + :meth:`Session.encode` now supports different strategies for encoding + entities via the entity_attribute_strategy* keyword argument. This makes + it possible to use this method for general serialisation of entity + instances. + + .. change:: changed + + Encoded referenced entities are now a mapping containing + *__entity_type__* and then each key, value pair that makes up the + entity's primary key. For example:: + + { + '__entity_type__': 'User', + 'id': '8b90a444-4e65-11e1-a500-f23c91df25eb' + } + + .. change:: changed + + :meth:`Session.decode` no longer automatically adds decoded entities to + the :class:`Session` cache making it possible to use decode + independently. + + .. change:: new + + Added :meth:`Session.merge` for merging entities recursively into the + session cache. + + .. change:: fixed + + Replacing an entity in a :class:`ftrack_api.collection.Collection` with an + identical entity no longer raises + :exc:`ftrack_api.exception.DuplicateItemInCollectionError`. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py new file mode 100644 index 0000000000..5fda0195a9 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py @@ -0,0 +1,24 @@ +# :coding: utf-8 +import logging + +import ftrack_api.session + + +def register(session, **kw): + '''Register plugin. Called when used as an plugin.''' + logger = logging.getLogger('com.example.example-plugin') + + # Validate that session is an instance of ftrack_api.Session. If not, + # assume that register is being called from an old or incompatible API and + # return without doing anything. + if not isinstance(session, ftrack_api.session.Session): + logger.debug( + 'Not subscribing plugin as passed argument {0!r} is not an ' + 'ftrack_api.Session instance.'.format(session) + ) + return + + # Perform your logic here, such as subscribe to an event. + pass + + logger.debug('Plugin registered') diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_safe.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_safe.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py new file mode 100644 index 0000000000..dd11136d69 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py @@ -0,0 +1,37 @@ +# :coding: utf-8 +import logging + +import ftrack_api.session + + +def register_with_session_ready(event): + '''Called when session is ready to be used.''' + logger = logging.getLogger('com.example.example-plugin') + logger.debug('Session ready.') + session = event['data']['session'] + + # Session is now ready and can be used to e.g. query objects. + task = session.query('Task').first() + print task['name'] + + +def register(session, **kw): + '''Register plugin. Called when used as an plugin.''' + logger = logging.getLogger('com.example.example-plugin') + + # Validate that session is an instance of ftrack_api.Session. If not, + # assume that register is being called from an old or incompatible API and + # return without doing anything. + if not isinstance(session, ftrack_api.session.Session): + logger.debug( + 'Not subscribing plugin as passed argument {0!r} is not an ' + 'ftrack_api.Session instance.'.format(session) + ) + return + + session.event_hub.subscribe( + 'topic=ftrack.api.session.ready', + register_with_session_ready + ) + + logger.debug('Plugin registered') diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst new file mode 100644 index 0000000000..724afa81a6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst @@ -0,0 +1,38 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _security_and_authentication: + +*************************** +Security and authentication +*************************** + +Self signed SSL certificate +=========================== + +When using a self signed SSL certificate the API may fail to connect if it +cannot verify the SSL certificate. Under the hood the +`requests `_ library is used and it +must be specified where the trusted certificate authority can be found using the +environment variable ``REQUESTS_CA_BUNDLE``. + +.. seealso:: `SSL Cert Verification `_ + +InsecurePlatformWarning +======================= + +When using this API you may sometimes see a warning:: + + InsecurePlatformWarning: A true SSLContext object is not available. This + prevents urllib3 from configuring SSL appropriately and may cause certain + SSL connections to fail. + +If you encounter this warning, its recommended you upgrade to Python 2.7.9, or +use pyOpenSSL. To use pyOpenSSL simply:: + + pip install pyopenssl ndg-httpsclient pyasn1 + +and the `requests `_ library used by +this API will use pyOpenSSL instead. + +.. seealso:: `InsecurePlatformWarning `_ diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst new file mode 100644 index 0000000000..73b352eb2f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst @@ -0,0 +1,156 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _tutorial: + +******** +Tutorial +******** + +.. currentmodule:: ftrack_api.session + +This tutorial provides a quick dive into using the API and the broad stroke +concepts involved. + +First make sure the ftrack Python API is :ref:`installed `. + +Then start a Python session and import the ftrack API:: + + >>> import ftrack_api + +The API uses :ref:`sessions ` to manage communication +with an ftrack server. Create a session that connects to your ftrack server +(changing the passed values as appropriate):: + + >>> session = ftrack_api.Session( + ... server_url='https://mycompany.ftrackapp.com', + ... api_key='7545384e-a653-11e1-a82c-f22c11dd25eq', + ... api_user='martin' + ... ) + +.. note:: + + A session can use :ref:`environment variables + ` to configure itself. + +Now print a list of the available entity types retrieved from the server:: + + >>> print session.types.keys() + [u'TypedContext', u'ObjectType', u'Priority', u'Project', u'Sequence', + u'Shot', u'Task', u'Status', u'Type', u'Timelog', u'User'] + +Now the list of possible entity types is known, :ref:`query ` the +server to retrieve entities of a particular type by using the +:meth:`Session.query` method:: + + >>> projects = session.query('Project') + +Each project retrieved will be an :ref:`entity ` instance +that behaves much like a standard Python dictionary. For example, to find out +the available keys for an entity, call the +:meth:`~ftrack_api.entity.Entity.keys` method:: + + >>> print projects[0].keys() + [u'status', u'is_global', u'name', u'end_date', u'context_type', + u'id', u'full_name', u'root', u'start_date'] + +Now, iterate over the retrieved entities and print each ones name:: + + >>> for project in projects: + ... print project['name'] + test + client_review + tdb + man_test + ftrack + bunny + +.. note:: + + Many attributes for retrieved entities are loaded on demand when the + attribute is first accessed. Doing this lots of times in a script can be + inefficient, so it is worth using :ref:`projections ` + in queries or :ref:`pre-populating ` + entities where appropriate. You can also :ref:`customise default projections + ` to help others + pre-load common attributes. + +To narrow a search, add :ref:`criteria ` to the query:: + + >>> active_projects = session.query('Project where status is active') + +Combine criteria for more powerful queries:: + + >>> import arrow + >>> + >>> active_projects_ending_before_next_week = session.query( + ... 'Project where status is active and end_date before "{0}"' + ... .format(arrow.now().replace(weeks=+1)) + ... ) + +Some attributes on an entity will refer to another entity or collection of +entities, such as *children* on a *Project* being a collection of *Context* +entities that have the project as their parent:: + + >>> project = session.query('Project').first() + >>> print project['children'] + + +And on each *Context* there is a corresponding *parent* attribute which is a +link back to the parent:: + + >>> child = project['children'][0] + >>> print child['parent'] is project + True + +These relationships can also be used in the criteria for a query:: + + >>> results = session.query( + ... 'Context where parent.name like "te%"' + ... ) + +To create new entities in the system use :meth:`Session.create`:: + + >>> new_sequence = session.create('Sequence', { + ... 'name': 'Starlord Reveal' + ... }) + +The created entity is not yet persisted to the server, but it is still possible +to modify it. + + >>> new_sequence['description'] = 'First hero character reveal.' + +The sequence also needs a parent. This can be done in one of two ways: + +* Set the parent attribute on the sequence:: + + >>> new_sequence['parent'] = project + +* Add the sequence to a parent's children attribute:: + + >>> project['children'].append(new_sequence) + +When ready, persist to the server using :meth:`Session.commit`:: + + >>> session.commit() + +When finished with a :class:`Session`, it is important to :meth:`~Session.close` +it in order to release resources and properly unsubscribe any registered event +listeners. It is also possible to use the session as a context manager in order +to have it closed automatically after use:: + + >>> with ftrack_api.Session() as session: + ... print session.query('User').first() + + >>> print session.closed + True + +Once a :class:`Session` is closed, any operations that attempt to use the closed +connection to the ftrack server will fail:: + + >>> session.query('Project').first() + ConnectionClosedError: Connection closed. + +Continue to the next section to start learning more about the API in greater +depth or jump over to the :ref:`usage examples ` if you prefer to learn +by example. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst new file mode 100644 index 0000000000..e3602c4fa9 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst @@ -0,0 +1,281 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _understanding_sessions: + +********************** +Understanding sessions +********************** + +.. currentmodule:: ftrack_api.session + +All communication with an ftrack server takes place through a :class:`Session`. +This allows more opportunity for configuring the connection, plugins etc. and +also makes it possible to connect to multiple ftrack servers from within the +same Python process. + +.. _understanding_sessions/connection: + +Connection +========== + +A session can be manually configured at runtime to connect to a server with +certain credentials:: + + >>> session = ftrack_api.Session( + ... server_url='https://mycompany.ftrackapp.com', + ... api_key='7545384e-a653-11e1-a82c-f22c11dd25eq', + ... api_user='martin' + ... ) + +Alternatively, a session can use the following environment variables to +configure itself: + + * :envvar:`FTRACK_SERVER` + * :envvar:`FTRACK_API_USER` + * :envvar:`FTRACK_API_KEY` + +When using environment variables, no server connection arguments need to be +passed manually:: + + >>> session = ftrack_api.Session() + +.. _understanding_sessions/unit_of_work: + +Unit of work +============ + +Each session follows the unit of work pattern. This means that many of the +operations performed using a session will happen locally and only be persisted +to the server at certain times, notably when calling :meth:`Session.commit`. +This approach helps optimise calls to the server and also group related logic +together in a transaction:: + + user = session.create('User', {}) + user['username'] = 'martin' + other_user = session.create('User', {'username': 'bjorn'}) + other_user['email'] = 'bjorn@example.com' + +Behind the scenes a series of :class:`operations +` are recorded reflecting the changes made. You +can take a peek at these operations if desired by examining the +``Session.recorded_operations`` property:: + + >>> for operation in session.recorded_operations: + ... print operation + + + + + +Calling :meth:`Session.commit` persists all recorded operations to the server +and clears the operation log:: + + session.commit() + +.. note:: + + The commit call will optimise operations to be as efficient as possible + without breaking logical ordering. For example, a create followed by updates + on the same entity will be compressed into a single create. + +Queries are special and always issued on demand. As a result, a query may return +unexpected results if the relevant local changes have not yet been sent to the +server:: + + >>> user = session.create('User', {'username': 'some_unique_username'}) + >>> query = 'User where username is "{0}"'.format(user['username']) + >>> print len(session.query(query)) + 0 + >>> session.commit() + >>> print len(session.query(query)) + 1 + +Where possible, query results are merged in with existing data transparently +with any local changes preserved:: + + >>> user = session.query('User').first() + >>> user['email'] = 'me@example.com' # Not yet committed to server. + >>> retrieved = session.query( + ... 'User where id is "{0}"'.format(user['id']) + ... ).one() + >>> print retrieved['email'] # Displays locally set value. + 'me@example.com' + >>> print retrieved is user + True + +This is possible due to the smart :ref:`caching` layer in the session. + +.. _understanding_sessions/auto_population: + +Auto-population +=============== + +Another important concept in a session is that of auto-population. By default a +session is configured to auto-populate missing attribute values on access. This +means that the first time you access an attribute on an entity instance a query +will be sent to the server to fetch the value:: + + user = session.query('User').first() + # The next command will issue a request to the server to fetch the + # 'username' value on demand at this is the first time it is accessed. + print user['username'] + +Once a value has been retrieved it is :ref:`cached ` locally in the +session and accessing it again will not issue more server calls:: + + # On second access no server call is made. + print user['username'] + +You can control the auto population behaviour of a session by either changing +the ``Session.auto_populate`` attribute on a session or using the provided +context helper :meth:`Session.auto_populating` to temporarily change the +setting. When turned off you may see a special +:attr:`~ftrack_api.symbol.NOT_SET` symbol that represents a value has not yet +been fetched:: + + >>> with session.auto_populating(False): + ... print user['email'] + NOT_SET + +Whilst convenient for simple scripts, making many requests to the server for +each attribute can slow execution of a script. To support optimisation the API +includes methods for batch fetching attributes. Read about them in +:ref:`querying/projections` and :ref:`working_with_entities/populating`. + +.. _understanding_sessions/entity_types: + +Entity types +============ + +When a session has successfully connected to the server it will automatically +download schema information and :ref:`create appropriate classes +` for use. This is important as different +servers can support different entity types and configurations. + +This information is readily available and useful if you need to check that the +entity types you expect are present. Here's how to print a list of all entity +types registered for use in the current API session:: + + >>> print session.types.keys() + [u'Task', u'Shot', u'TypedContext', u'Sequence', u'Priority', + u'Status', u'Project', u'User', u'Type', u'ObjectType'] + +Each entity type is backed by a :ref:`customisable class +` that further describes the entity type and +the attributes that are available. + +.. hint:: + + If you need to use an :func:`isinstance` check, always go through the + session as the classes are built dynamically:: + + >>> isinstance(entity, session.types['Project']) + +.. _understanding_sessions/plugins: + +Configuring plugins +=================== + +Plugins are used by the API to extend it with new functionality, such as +:term:`locations ` or adding convenience methods to +:ref:`understanding_sessions/entity_types`. In addition to new API +functionality, event plugins may also be used for event processing by listening +to :ref:`ftrack update events ` or adding custom functionality to ftrack by registering +:term:`actions `. + + +When starting a new :class:`Session` either pass the *plugins_paths* to search +explicitly or rely on the environment variable +:envvar:`FTRACK_EVENT_PLUGIN_PATH`. As each session is independent of others, +you can configure plugins per session. + +The paths will be searched for :term:`plugins `, python files +which expose a `register` function. These functions will be evaluated and can +be used extend the API with new functionality, such as locations or actions. + +If you do not specify any override then the session will attempt to discover and +use the default plugins. + +Plugins are discovered using :func:`ftrack_api.plugin.discover` with the +session instance passed as the sole positional argument. Most plugins should +take the form of a mount function that then subscribes to specific :ref:`events +` on the session:: + + def configure_locations(event): + '''Configure locations for session.''' + session = event['data']['session'] + # Find location(s) and customise instances. + + def register(session): + '''Register plugin with *session*.''' + session.event_hub.subscribe( + 'topic=ftrack.api.session.configure-location', + configure_locations + ) + +Additional keyword arguments can be passed as *plugin_arguments* to the +:class:`Session` on instantiation. These are passed to the plugin register +function if its signature supports them:: + + # a_plugin.py + def register(session, reticulate_splines=False): + '''Register plugin with *session*.''' + ... + + # main.py + session = ftrack_api.Session( + plugin_arguments={ + 'reticulate_splines': True, + 'some_other_argument': 42 + } + ) + +.. seealso:: + + Lists of events which you can subscribe to in your plugins are available + both for :ref:`synchronous event published by the python API ` + and :ref:`asynchronous events published by the server ` + + +Quick setup +----------- + +1. Create a directory where plugins will be stored. Place any plugins you want +loaded automatically in an API *session* here. + +.. image:: /image/configuring_plugins_directory.png + +2. Configure the :envvar:`FTRACK_EVENT_PLUGIN_PATH` to point to the directory. + + +Detailed setup +-------------- + +Start out by creating a directory on your machine where you will store your +plugins. Download :download:`example_plugin.py ` +and place it in the directory. + +Open up a terminal window, and ensure that plugin is picked up when +instantiating the session and manually setting the *plugin_paths*:: + + >>> # Set up basic logging + >>> import logging + >>> logging.basicConfig() + >>> plugin_logger = logging.getLogger('com.example.example-plugin') + >>> plugin_logger.setLevel(logging.DEBUG) + >>> + >>> # Configure the API, loading plugins in the specified paths. + >>> import ftrack_api + >>> plugin_paths = ['/path/to/plugins'] + >>> session = ftrack_api.Session(plugin_paths=plugin_paths) + +If everything is working as expected, you should see the following in the +output:: + + DEBUG:com.example.example-plugin:Plugin registered + +Instead of specifying the plugin paths when instantiating the session, you can +also specify the :envvar:`FTRACK_EVENT_PLUGIN_PATH` to point to the directory. +To specify multiple directories, use the path separator for your operating +system. \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst new file mode 100644 index 0000000000..2d9d26f986 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst @@ -0,0 +1,434 @@ +.. + :copyright: Copyright (c) 2014 ftrack + +.. _working_with_entities: + +********************* +Working with entities +********************* + +.. currentmodule:: ftrack_api.session + +:class:`Entity ` instances are Python dict-like +objects whose keys correspond to attributes for that type in the system. They +may also provide helper methods to perform common operations such as replying to +a note:: + + note = session.query('Note').first() + print note.keys() + print note['content'] + note['content'] = 'A different message!' + reply = note.create_reply(...) + +.. _working_with_entities/attributes: + +Attributes +========== + +Each entity instance is typed according to its underlying entity type on the +server and configured with appropriate attributes. For example, a *task* will be +represented by a *Task* class and have corresponding attributes. You can +:ref:`customise entity classes ` to alter +attribute access or provide your own helper methods. + +To see the available attribute names on an entity use the +:meth:`~ftrack_api.entity.base.Entity.keys` method on the instance:: + + >>> task = session.query('Task').first() + >>> print task.keys() + ['id', 'name', ...] + +If you need more information about the type of attribute, examine the +``attributes`` property on the corresponding class:: + + >>> for attribute in type(task).attributes: + ... print attribute + + + + + + ... + +Notice that there are different types of attribute such as +:class:`~ftrack_api.attribute.ScalarAttribute` for plain values or +:class:`~ftrack_api.attribute.ReferenceAttribute` for relationships. These +different types are reflected in the behaviour on the entity instance when +accessing a particular attribute by key: + + >>> # Scalar + >>> print task['name'] + 'model' + >>> task['name'] = 'comp' + + >>> # Single reference + >>> print task['status'] + + >>> new_status = session.query('Status').first() + >>> task['status'] = new_status + + >>> # Collection + >>> print task['timelogs'] + + >>> print task['timelogs'][:] + [, ...] + >>> new_timelog = session.create('Timelog', {...}) + >>> task['timelogs'].append(new_timelog) + +.. _working_with_entities/attributes/bidirectional: + +Bi-directional relationships +---------------------------- + +Some attributes refer to different sides of a bi-directional relationship. In +the current version of the API bi-directional updates are not propagated +automatically to the other side of the relationship. For example, setting a +*parent* will not update the parent entity's *children* collection locally. +There are plans to support this behaviour better in the future. For now, after +commit, :ref:`populate ` the reverse side +attribute manually. + +.. _working_with_entities/creating: + +Creating entities +================= + +In order to create a new instance of an entity call :meth:`Session.create` +passing in the entity type to create and any initial attribute values:: + + new_user = session.create('User', {'username': 'martin'}) + +If there are any default values that can be set client side then they will be +applied at this point. Typically this will be the unique entity key:: + + >>> print new_user['id'] + 170f02a4-6656-4f15-a5cb-c4dd77ce0540 + +At this point no information has been sent to the server. However, you are free +to continue :ref:`updating ` this object +locally until you are ready to persist the changes by calling +:meth:`Session.commit`. + +If you are wondering about what would happen if you accessed an unset attribute +on a newly created entity, go ahead and give it a go:: + + >>> print new_user['first_name'] + NOT_SET + +The session knows that it is a newly created entity that has not yet been +persisted so it doesn't try to fetch any attributes on access even when +``session.auto_populate`` is turned on. + +.. _working_with_entities/updating: + +Updating entities +================= + +Updating an entity is as simple as modifying the values for specific keys on +the dict-like instance and calling :meth:`Session.commit` when ready. The entity +to update can either be a new entity or a retrieved entity:: + + task = session.query('Task').first() + task['bid'] = 8 + +Remember that, for existing entities, accessing an attribute will load it from +the server automatically. If you are interested in just setting values without +first fetching them from the server, turn :ref:`auto-population +` off temporarily:: + + >>> with session.auto_populating(False): + ... task = session.query('Task').first() + ... task['bid'] = 8 + + +.. _working_with_entities/resetting: + +Server side reset of entity attributes or settings. +=========================== + +Some entities support resetting of attributes, for example +to reset a users api key:: + + + session.reset_remote( + 'api_key', entity=session.query('User where username is "test_user"').one() + ) + +.. note:: + Currently the only attribute possible to reset is 'api_key' on + the user entity type. + + +.. _working_with_entities/deleting: + +Deleting entities +================= + +To delete an entity you need an instance of the entity in your session (either +from having created one or retrieving one). Then call :meth:`Session.delete` on +the entity and :meth:`Session.commit` when ready:: + + task_to_delete = session.query('Task').first() + session.delete(task_to_delete) + ... + session.commit() + +.. note:: + + Even though the entity is deleted, you will still have access to the local + instance and any local data stored on that instance whilst that instance + remains in memory. + +Keep in mind that some deletions, when propagated to the server, will cause +other entities to be deleted also, so you don't have to worry about deleting an +entire hierarchy manually. For example, deleting a *Task* will also delete all +*Notes* on that task. + +.. _working_with_entities/populating: + +Populating entities +=================== + +When an entity is retrieved via :meth:`Session.query` or :meth:`Session.get` it +will have some attributes prepopulated. The rest are dynamically loaded when +they are accessed. If you need to access many attributes it can be more +efficient to request all those attributes be loaded in one go. One way to do +this is to use a :ref:`projections ` in queries. + +However, if you have entities that have been passed to you from elsewhere you +don't have control over the query that was issued to get those entities. In this +case you can you can populate those entities in one go using +:meth:`Session.populate` which works exactly like :ref:`projections +` in queries do, but operating against known entities:: + + >>> users = session.query('User') + >>> session.populate(users, 'first_name, last_name') + >>> with session.auto_populating(False): # Turn off for example purpose. + ... for user in users: + ... print 'Name: {0}'.format(user['first_name']) + ... print 'Email: {0}'.format(user['email']) + Name: Martin + Email: NOT_SET + ... + +.. note:: + + You can populate a single or many entities in one call so long as they are + all the same entity type. + +.. _working_with_entities/entity_states: + +Entity states +============= + +Operations on entities are :ref:`recorded in the session +` as they happen. At any time you can +inspect an entity to determine its current state from those pending operations. + +To do this, use :func:`ftrack_api.inspection.state`:: + + >>> import ftrack_api.inspection + >>> new_user = session.create('User', {}) + >>> print ftrack_api.inspection.state(new_user) + CREATED + >>> existing_user = session.query('User').first() + >>> print ftrack_api.inspection.state(existing_user) + NOT_SET + >>> existing_user['email'] = 'martin@example.com' + >>> print ftrack_api.inspection.state(existing_user) + MODIFIED + >>> session.delete(new_user) + >>> print ftrack_api.inspection.state(new_user) + DELETED + +.. _working_with_entities/entity_types: + +Customising entity types +======================== + +Each type of entity in the system is represented in the Python client by a +dedicated class. However, because the types of entities can vary these classes +are built on demand using schema information retrieved from the server. + +Many of the default classes provide additional helper methods which are mixed +into the generated class at runtime when a session is started. + +In some cases it can be useful to tailor the custom classes to your own pipeline +workflows. Perhaps you want to add more helper functions, change attribute +access rules or even providing a layer of backwards compatibility for existing +code. The Python client was built with this in mind and makes such +customisations as easy as possible. + +When a :class:`Session` is constructed it fetches schema details from the +connected server and then calls an :class:`Entity factory +` to create classes from those schemas. It +does this by emitting a synchronous event, +*ftrack.api.session.construct-entity-type*, for each schema and expecting a +*class* object to be returned. + +In the default setup, a :download:`construct_entity_type.py +<../resource/plugin/construct_entity_type.py>` plugin is placed on the +:envvar:`FTRACK_EVENT_PLUGIN_PATH`. This plugin will register a trivial subclass +of :class:`ftrack_api.entity.factory.StandardFactory` to create the classes in +response to the construct event. The simplest way to get started is to edit this +default plugin as required. + +.. seealso:: :ref:`understanding_sessions/plugins` + +.. _working_with_entities/entity_types/default_projections: + +Default projections +------------------- + +When a :ref:`query ` is issued without any :ref:`projections +`, the session will automatically add default projections +according to the type of the entity. + +For example, the following shows that for a *User*, only *id* is fetched by +default when no projections added to the query:: + + >>> user = session.query('User').first() + >>> with session.auto_populating(False): # For demonstration purpose only. + ... print user.items() + [ + (u'id', u'59f0963a-15e2-11e1-a5f1-0019bb4983d8') + (u'username', Symbol(NOT_SET)), + (u'first_name', Symbol(NOT_SET)), + ... + ] + +.. note:: + + These default projections are also used when you access a relationship + attribute using the dictionary key syntax. + +If you want to default to fetching *username* for a *Task* as well then you can +change the default_projections* in your class factory plugin:: + + class Factory(ftrack_api.entity.factory.StandardFactory): + '''Entity class factory.''' + + def create(self, schema, bases=None): + '''Create and return entity class from *schema*.''' + cls = super(Factory, self).create(schema, bases=bases) + + # Further customise cls before returning. + if schema['id'] == 'User': + cls.default_projections = ['id', 'username'] + + return cls + +Now a projection-less query will also query *username* by default: + +.. note:: + + You will need to start a new session to pick up the change you made:: + + session = ftrack_api.Session() + +.. code-block:: python + + >>> user = session.query('User').first() + >>> with session.auto_populating(False): # For demonstration purpose only. + ... print user.items() + [ + (u'id', u'59f0963a-15e2-11e1-a5f1-0019bb4983d8') + (u'username', u'martin'), + (u'first_name', Symbol(NOT_SET)), + ... + ] + +Note that if any specific projections are applied in a query, those override +the default projections entirely. This allows you to also *reduce* the data +loaded on demand:: + + >>> session = ftrack_api.Session() # Start new session to avoid cache. + >>> user = session.query('select id from User').first() + >>> with session.auto_populating(False): # For demonstration purpose only. + ... print user.items() + [ + (u'id', u'59f0963a-15e2-11e1-a5f1-0019bb4983d8') + (u'username', Symbol(NOT_SET)), + (u'first_name', Symbol(NOT_SET)), + ... + ] + +.. _working_with_entities/entity_types/helper_methods: + +Helper methods +-------------- + +If you want to add additional helper methods to the constructed classes to +better support your pipeline logic, then you can simply patch the created +classes in your factory, much like with changing the default projections:: + + def get_full_name(self): + '''Return full name for user.''' + return '{0} {1}'.format(self['first_name'], self['last_name']).strip() + + class Factory(ftrack_api.entity.factory.StandardFactory): + '''Entity class factory.''' + + def create(self, schema, bases=None): + '''Create and return entity class from *schema*.''' + cls = super(Factory, self).create(schema, bases=bases) + + # Further customise cls before returning. + if schema['id'] == 'User': + cls.get_full_name = get_full_name + + return cls + +Now you have a new helper method *get_full_name* on your *User* entities:: + + >>> session = ftrack_api.Session() # New session to pick up changes. + >>> user = session.query('User').first() + >>> print user.get_full_name() + Martin Pengelly-Phillips + +If you'd rather not patch the existing classes, or perhaps have a lot of helpers +to mixin, you can instead inject your own class as the base class. The only +requirement is that it has the base :class:`~ftrack_api.entity.base.Entity` +class in its ancestor classes:: + + import ftrack_api.entity.base + + + class CustomUser(ftrack_api.entity.base.Entity): + '''Represent user.''' + + def get_full_name(self): + '''Return full name for user.''' + return '{0} {1}'.format(self['first_name'], self['last_name']).strip() + + + class Factory(ftrack_api.entity.factory.StandardFactory): + '''Entity class factory.''' + + def create(self, schema, bases=None): + '''Create and return entity class from *schema*.''' + # Alter base class for constructed class. + if bases is None: + bases = [ftrack_api.entity.base.Entity] + + if schema['id'] == 'User': + bases = [CustomUser] + + cls = super(Factory, self).create(schema, bases=bases) + return cls + +The resulting effect is the same:: + + >>> session = ftrack_api.Session() # New session to pick up changes. + >>> user = session.query('User').first() + >>> print user.get_full_name() + Martin Pengelly-Phillips + +.. note:: + + Your custom class is not the leaf class which will still be a dynamically + generated class. Instead your custom class becomes the base for the leaf + class:: + + >>> print type(user).__mro__ + (, , ...) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/pytest.ini b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/pytest.ini new file mode 100644 index 0000000000..b1f515ee18 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/pytest.ini @@ -0,0 +1,7 @@ +[pytest] +minversion = 2.4.2 +addopts = -v -k-slow --junitxml=test-reports/junit.xml --cache-clear +norecursedirs = .* _* +python_files = test_*.py +python_functions = test_* +mock_use_standalone_module = true \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py new file mode 100644 index 0000000000..0682a5eeb0 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py @@ -0,0 +1,39 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import logging + +import ftrack_api +import ftrack_api.entity.location +import ftrack_api.accessor.disk + + +def configure_locations(event): + '''Configure locations for session.''' + session = event['data']['session'] + + # Find location(s) and customise instances. + # + # location = session.query('Location where name is "my.location"').one() + # ftrack_api.mixin(location, ftrack_api.entity.location.UnmanagedLocationMixin) + # location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + + +def register(session): + '''Register plugin with *session*.''' + logger = logging.getLogger('ftrack_plugin:configure_locations.register') + + # Validate that session is an instance of ftrack_api.Session. If not, assume + # that register is being called from an old or incompatible API and return + # without doing anything. + if not isinstance(session, ftrack_api.Session): + logger.debug( + 'Not subscribing plugin as passed argument {0} is not an ' + 'ftrack_api.Session instance.'.format(session) + ) + return + + session.event_hub.subscribe( + 'topic=ftrack.api.session.configure-location', + configure_locations + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py new file mode 100644 index 0000000000..45f7841670 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py @@ -0,0 +1,46 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import logging + +import ftrack_api.entity.factory + + +class Factory(ftrack_api.entity.factory.StandardFactory): + '''Entity class factory.''' + + def create(self, schema, bases=None): + '''Create and return entity class from *schema*.''' + # Optionally change bases for class to be generated. + cls = super(Factory, self).create(schema, bases=bases) + + # Further customise cls before returning. + + return cls + + +def register(session): + '''Register plugin with *session*.''' + logger = logging.getLogger('ftrack_plugin:construct_entity_type.register') + + # Validate that session is an instance of ftrack_api.Session. If not, assume + # that register is being called from an old or incompatible API and return + # without doing anything. + if not isinstance(session, ftrack_api.Session): + logger.debug( + 'Not subscribing plugin as passed argument {0!r} is not an ' + 'ftrack_api.Session instance.'.format(session) + ) + return + + factory = Factory() + + def construct_entity_type(event): + '''Return class to represent entity type specified by *event*.''' + schema = event['data']['schema'] + return factory.create(schema) + + session.event_hub.subscribe( + 'topic=ftrack.api.session.construct-entity-type', + construct_entity_type + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.cfg b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.cfg new file mode 100644 index 0000000000..b2ad8fd086 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.cfg @@ -0,0 +1,6 @@ +[build_sphinx] +config-dir = doc +source-dir = doc +build-dir = build/doc +builder = html +all_files = 1 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.py new file mode 100644 index 0000000000..da99a572b4 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.py @@ -0,0 +1,81 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import os +import re + +from setuptools import setup, find_packages +from setuptools.command.test import test as TestCommand + + +ROOT_PATH = os.path.dirname(os.path.realpath(__file__)) +RESOURCE_PATH = os.path.join(ROOT_PATH, 'resource') +SOURCE_PATH = os.path.join(ROOT_PATH, 'source') +README_PATH = os.path.join(ROOT_PATH, 'README.rst') + + +# Read version from source. +with open( + os.path.join(SOURCE_PATH, 'ftrack_api', '_version.py') +) as _version_file: + VERSION = re.match( + r'.*__version__ = \'(.*?)\'', _version_file.read(), re.DOTALL + ).group(1) + + +# Custom commands. +class PyTest(TestCommand): + '''Pytest command.''' + + def finalize_options(self): + '''Finalize options to be used.''' + TestCommand.finalize_options(self) + self.test_args = [] + self.test_suite = True + + def run_tests(self): + '''Import pytest and run.''' + import pytest + raise SystemExit(pytest.main(self.test_args)) + + +# Call main setup. +setup( + name='ftrack-python-api', + version=VERSION, + description='Python API for ftrack.', + long_description=open(README_PATH).read(), + keywords='ftrack, python, api', + url='https://bitbucket.org/ftrack/ftrack-python-api', + author='ftrack', + author_email='support@ftrack.com', + license='Apache License (2.0)', + packages=find_packages(SOURCE_PATH), + package_dir={ + '': 'source' + }, + setup_requires=[ + 'sphinx >= 1.2.2, < 2', + 'sphinx_rtd_theme >= 0.1.6, < 1', + 'lowdown >= 0.1.0, < 2' + ], + install_requires=[ + 'requests >= 2, <3', + 'arrow >= 0.4.4, < 1', + 'termcolor >= 1.1.0, < 2', + 'pyparsing >= 2.0, < 3', + 'clique >= 1.2.0, < 2', + 'websocket-client >= 0.40.0, < 1' + ], + tests_require=[ + 'pytest >= 2.7, < 3', + 'pytest-mock >= 0.4, < 1', + 'pytest-catchlog >= 1, <=2' + ], + cmdclass={ + 'test': PyTest + }, + zip_safe=False, + python_requires=">=2.7.9, <3.0" + +) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/__init__.py new file mode 100644 index 0000000000..34833aa0dd --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/__init__.py @@ -0,0 +1 @@ +from ftrack_api import * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py new file mode 100644 index 0000000000..d8ee30bd8f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py @@ -0,0 +1,32 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from ._version import __version__ +from .session import Session + + +def mixin(instance, mixin_class, name=None): + '''Mixin *mixin_class* to *instance*. + + *name* can be used to specify new class name. If not specified then one will + be generated. + + ''' + if name is None: + name = '{0}{1}'.format( + instance.__class__.__name__, mixin_class.__name__ + ) + + # Check mixin class not already present in mro in order to avoid consistent + # method resolution failure. + if mixin_class in instance.__class__.mro(): + return + + instance.__class__ = type( + name, + ( + mixin_class, + instance.__class__ + ), + {} + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py new file mode 100644 index 0000000000..fbe14f3277 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py @@ -0,0 +1,656 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2016 ftrack + +from __future__ import absolute_import + +import logging +import json +import sys +import os + +import ftrack_api +import ftrack_api.structure.standard as _standard +from ftrack_api.logging import LazyLogMessage as L + + +scenario_name = 'ftrack.centralized-storage' + + +class ConfigureCentralizedStorageScenario(object): + '''Configure a centralized storage scenario.''' + + def __init__(self): + '''Instansiate centralized storage scenario.''' + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + + @property + def storage_scenario(self): + '''Return storage scenario setting.''' + return self.session.query( + 'select value from Setting ' + 'where name is "storage_scenario" and group is "STORAGE"' + ).one() + + @property + def existing_centralized_storage_configuration(self): + '''Return existing centralized storage configuration.''' + storage_scenario = self.storage_scenario + + try: + configuration = json.loads(storage_scenario['value']) + except (ValueError, TypeError): + return None + + if not isinstance(configuration, dict): + return None + + if configuration.get('scenario') != scenario_name: + return None + + return configuration.get('data', {}) + + def _get_confirmation_text(self, configuration): + '''Return confirmation text from *configuration*.''' + configure_location = configuration.get('configure_location') + select_location = configuration.get('select_location') + select_mount_point = configuration.get('select_mount_point') + + if configure_location: + location_text = unicode( + 'A new location will be created:\n\n' + '* Label: {location_label}\n' + '* Name: {location_name}\n' + '* Description: {location_description}\n' + ).format(**configure_location) + else: + location = self.session.get( + 'Location', select_location['location_id'] + ) + location_text = ( + u'You have choosen to use an existing location: {0}'.format( + location['label'] + ) + ) + + mount_points_text = unicode( + '* Linux: {linux}\n' + '* OS X: {osx}\n' + '* Windows: {windows}\n\n' + ).format( + linux=select_mount_point.get('linux_mount_point') or '*Not set*', + osx=select_mount_point.get('osx_mount_point') or '*Not set*', + windows=select_mount_point.get('windows_mount_point') or '*Not set*' + ) + + mount_points_not_set = [] + + if not select_mount_point.get('linux_mount_point'): + mount_points_not_set.append('Linux') + + if not select_mount_point.get('osx_mount_point'): + mount_points_not_set.append('OS X') + + if not select_mount_point.get('windows_mount_point'): + mount_points_not_set.append('Windows') + + if mount_points_not_set: + mount_points_text += unicode( + 'Please be aware that this location will not be working on ' + '{missing} because the mount points are not set up.' + ).format( + missing=' and '.join(mount_points_not_set) + ) + + text = unicode( + '#Confirm storage setup#\n\n' + 'Almost there! Please take a moment to verify the settings you ' + 'are about to save. You can always come back later and update the ' + 'configuration.\n' + '##Location##\n\n' + '{location}\n' + '##Mount points##\n\n' + '{mount_points}' + ).format( + location=location_text, + mount_points=mount_points_text + ) + + return text + + def configure_scenario(self, event): + '''Configure scenario based on *event* and return form items.''' + steps = ( + 'select_scenario', + 'select_location', + 'configure_location', + 'select_structure', + 'select_mount_point', + 'confirm_summary', + 'save_configuration' + ) + + warning_message = '' + values = event['data'].get('values', {}) + + # Calculate previous step and the next. + previous_step = values.get('step', 'select_scenario') + next_step = steps[steps.index(previous_step) + 1] + state = 'configuring' + + self.logger.info(L( + u'Configuring scenario, previous step: {0}, next step: {1}. ' + u'Values {2!r}.', + previous_step, next_step, values + )) + + if 'configuration' in values: + configuration = values.pop('configuration') + else: + configuration = {} + + if values: + # Update configuration with values from the previous step. + configuration[previous_step] = values + + if previous_step == 'select_location': + values = configuration['select_location'] + if values.get('location_id') != 'create_new_location': + location_exists = self.session.query( + 'Location where id is "{0}"'.format( + values.get('location_id') + ) + ).first() + if not location_exists: + next_step = 'select_location' + warning_message = ( + '**The selected location does not exist. Please choose ' + 'one from the dropdown or create a new one.**' + ) + + if next_step == 'select_location': + try: + location_id = ( + self.existing_centralized_storage_configuration['location_id'] + ) + except (KeyError, TypeError): + location_id = None + + options = [{ + 'label': 'Create new location', + 'value': 'create_new_location' + }] + for location in self.session.query( + 'select name, label, description from Location' + ): + if location['name'] not in ( + 'ftrack.origin', 'ftrack.unmanaged', 'ftrack.connect', + 'ftrack.server', 'ftrack.review' + ): + options.append({ + 'label': u'{label} ({name})'.format( + label=location['label'], name=location['name'] + ), + 'description': location['description'], + 'value': location['id'] + }) + + warning = '' + if location_id is not None: + # If there is already a location configured we must make the + # user aware that changing the location may be problematic. + warning = ( + '\n\n**Be careful if you switch to another location ' + 'for an existing storage scenario. Components that have ' + 'already been published to the previous location will be ' + 'made unavailable for common use.**' + ) + default_value = location_id + elif location_id is None and len(options) == 1: + # No location configured and no existing locations to use. + default_value = 'create_new_location' + else: + # There are existing locations to choose from but non of them + # are currently active in the centralized storage scenario. + default_value = None + + items = [{ + 'type': 'label', + 'value': ( + '#Select location#\n' + 'Choose an already existing location or create a new one ' + 'to represent your centralized storage. {0}'.format( + warning + ) + ) + }, { + 'type': 'enumerator', + 'label': 'Location', + 'name': 'location_id', + 'value': default_value, + 'data': options + }] + + default_location_name = 'studio.central-storage-location' + default_location_label = 'Studio location' + default_location_description = ( + 'The studio central location where all components are ' + 'stored.' + ) + + if previous_step == 'configure_location': + configure_location = configuration.get( + 'configure_location' + ) + + if configure_location: + try: + existing_location = self.session.query( + u'Location where name is "{0}"'.format( + configure_location.get('location_name') + ) + ).first() + except UnicodeEncodeError: + next_step = 'configure_location' + warning_message += ( + '**The location name contains non-ascii characters. ' + 'Please change the name and try again.**' + ) + values = configuration['select_location'] + else: + if existing_location: + next_step = 'configure_location' + warning_message += ( + u'**There is already a location named {0}. ' + u'Please change the name and try again.**'.format( + configure_location.get('location_name') + ) + ) + values = configuration['select_location'] + + if ( + not configure_location.get('location_name') or + not configure_location.get('location_label') or + not configure_location.get('location_description') + ): + next_step = 'configure_location' + warning_message += ( + '**Location name, label and description cannot ' + 'be empty.**' + ) + values = configuration['select_location'] + + if next_step == 'configure_location': + # Populate form with previous configuration. + default_location_label = configure_location['location_label'] + default_location_name = configure_location['location_name'] + default_location_description = ( + configure_location['location_description'] + ) + + if next_step == 'configure_location': + + if values.get('location_id') == 'create_new_location': + # Add options to create a new location. + items = [{ + 'type': 'label', + 'value': ( + '#Create location#\n' + 'Here you will create a new location to be used ' + 'with your new Storage scenario. For your ' + 'convenience we have already filled in some default ' + 'values. If this is the first time you are configuring ' + 'a storage scenario in ftrack we recommend that you ' + 'stick with these settings.' + ) + }, { + 'label': 'Label', + 'name': 'location_label', + 'value': default_location_label, + 'type': 'text' + }, { + 'label': 'Name', + 'name': 'location_name', + 'value': default_location_name, + 'type': 'text' + }, { + 'label': 'Description', + 'name': 'location_description', + 'value': default_location_description, + 'type': 'text' + }] + + else: + # The user selected an existing location. Move on to next + # step. + next_step = 'select_mount_point' + + if next_step == 'select_structure': + # There is only one structure to choose from, go to next step. + next_step = 'select_mount_point' + # items = [ + # { + # 'type': 'label', + # 'value': ( + # '#Select structure#\n' + # 'Select which structure to use with your location. ' + # 'The structure is used to generate the filesystem ' + # 'path for components that are added to this location.' + # ) + # }, + # { + # 'type': 'enumerator', + # 'label': 'Structure', + # 'name': 'structure_id', + # 'value': 'standard', + # 'data': [{ + # 'label': 'Standard', + # 'value': 'standard', + # 'description': ( + # 'The Standard structure uses the names in your ' + # 'project structure to determine the path.' + # ) + # }] + # } + # ] + + if next_step == 'select_mount_point': + try: + mount_points = ( + self.existing_centralized_storage_configuration['accessor']['mount_points'] + ) + except (KeyError, TypeError): + mount_points = dict() + + items = [ + { + 'value': ( + '#Mount points#\n' + 'Set mount points for your centralized storage ' + 'location. For the location to work as expected each ' + 'platform that you intend to use must have the ' + 'corresponding mount point set and the storage must ' + 'be accessible. If not set correctly files will not be ' + 'saved or read.' + ), + 'type': 'label' + }, { + 'type': 'text', + 'label': 'Linux', + 'name': 'linux_mount_point', + 'empty_text': 'E.g. /usr/mnt/MyStorage ...', + 'value': mount_points.get('linux', '') + }, { + 'type': 'text', + 'label': 'OS X', + 'name': 'osx_mount_point', + 'empty_text': 'E.g. /Volumes/MyStorage ...', + 'value': mount_points.get('osx', '') + }, { + 'type': 'text', + 'label': 'Windows', + 'name': 'windows_mount_point', + 'empty_text': 'E.g. \\\\MyStorage ...', + 'value': mount_points.get('windows', '') + } + ] + + if next_step == 'confirm_summary': + items = [{ + 'type': 'label', + 'value': self._get_confirmation_text(configuration) + }] + state = 'confirm' + + if next_step == 'save_configuration': + mount_points = configuration['select_mount_point'] + select_location = configuration['select_location'] + + if select_location['location_id'] == 'create_new_location': + configure_location = configuration['configure_location'] + location = self.session.create( + 'Location', + { + 'name': configure_location['location_name'], + 'label': configure_location['location_label'], + 'description': ( + configure_location['location_description'] + ) + } + ) + + else: + location = self.session.query( + 'Location where id is "{0}"'.format( + select_location['location_id'] + ) + ).one() + + setting_value = json.dumps({ + 'scenario': scenario_name, + 'data': { + 'location_id': location['id'], + 'location_name': location['name'], + 'accessor': { + 'mount_points': { + 'linux': mount_points['linux_mount_point'], + 'osx': mount_points['osx_mount_point'], + 'windows': mount_points['windows_mount_point'] + } + } + } + }) + + self.storage_scenario['value'] = setting_value + self.session.commit() + + # Broadcast an event that storage scenario has been configured. + event = ftrack_api.event.base.Event( + topic='ftrack.storage-scenario.configure-done' + ) + self.session.event_hub.publish(event) + + items = [{ + 'type': 'label', + 'value': ( + '#Done!#\n' + 'Your storage scenario is now configured and ready ' + 'to use. **Note that you may have to restart Connect and ' + 'other applications to start using it.**' + ) + }] + state = 'done' + + if warning_message: + items.insert(0, { + 'type': 'label', + 'value': warning_message + }) + + items.append({ + 'type': 'hidden', + 'value': configuration, + 'name': 'configuration' + }) + items.append({ + 'type': 'hidden', + 'value': next_step, + 'name': 'step' + }) + + return { + 'items': items, + 'state': state + } + + def discover_centralized_scenario(self, event): + '''Return action discover dictionary for *event*.''' + return { + 'id': scenario_name, + 'name': 'Centralized storage scenario', + 'description': ( + '(Recommended) centralized storage scenario where all files ' + 'are kept on a storage that is mounted and available to ' + 'everyone in the studio.' + ) + } + + def register(self, session): + '''Subscribe to events on *session*.''' + self.session = session + + #: TODO: Move these to a separate function. + session.event_hub.subscribe( + unicode( + 'topic=ftrack.storage-scenario.discover ' + 'and source.user.username="{0}"' + ).format( + session.api_user + ), + self.discover_centralized_scenario + ) + session.event_hub.subscribe( + unicode( + 'topic=ftrack.storage-scenario.configure ' + 'and data.scenario_id="{0}" ' + 'and source.user.username="{1}"' + ).format( + scenario_name, + session.api_user + ), + self.configure_scenario + ) + + +class ActivateCentralizedStorageScenario(object): + '''Activate a centralized storage scenario.''' + + def __init__(self): + '''Instansiate centralized storage scenario.''' + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + + def activate(self, event): + '''Activate scenario in *event*.''' + storage_scenario = event['data']['storage_scenario'] + + try: + location_data = storage_scenario['data'] + location_name = location_data['location_name'] + location_id = location_data['location_id'] + mount_points = location_data['accessor']['mount_points'] + + except KeyError: + error_message = ( + 'Unable to read storage scenario data.' + ) + self.logger.error(L(error_message)) + raise ftrack_api.exception.LocationError( + 'Unable to configure location based on scenario.' + ) + + else: + location = self.session.create( + 'Location', + data=dict( + name=location_name, + id=location_id + ), + reconstructing=True + ) + + if sys.platform == 'darwin': + prefix = mount_points['osx'] + elif sys.platform == 'linux2': + prefix = mount_points['linux'] + elif sys.platform == 'win32': + prefix = mount_points['windows'] + else: + raise ftrack_api.exception.LocationError( + ( + 'Unable to find accessor prefix for platform {0}.' + ).format(sys.platform) + ) + + location.accessor = ftrack_api.accessor.disk.DiskAccessor( + prefix=prefix + ) + location.structure = _standard.StandardStructure() + location.priority = 1 + self.logger.info(L( + u'Storage scenario activated. Configured {0!r} from ' + u'{1!r}', + location, storage_scenario + )) + + def _verify_startup(self, event): + '''Verify the storage scenario configuration.''' + storage_scenario = event['data']['storage_scenario'] + location_data = storage_scenario['data'] + mount_points = location_data['accessor']['mount_points'] + + prefix = None + if sys.platform == 'darwin': + prefix = mount_points['osx'] + elif sys.platform == 'linux2': + prefix = mount_points['linux'] + elif sys.platform == 'win32': + prefix = mount_points['windows'] + + if not prefix: + return ( + u'The storage scenario has not been configured for your ' + u'operating system. ftrack may not be able to ' + u'store and track files correctly.' + ) + + if not os.path.isdir(prefix): + return ( + unicode( + 'The path {0} does not exist. ftrack may not be able to ' + 'store and track files correctly. \n\nIf the storage is ' + 'newly setup you may want to create necessary folder ' + 'structures. If the storage is a network drive you should ' + 'make sure that it is mounted correctly.' + ).format(prefix) + ) + + def register(self, session): + '''Subscribe to events on *session*.''' + self.session = session + + session.event_hub.subscribe( + ( + 'topic=ftrack.storage-scenario.activate ' + 'and data.storage_scenario.scenario="{0}"'.format( + scenario_name + ) + ), + self.activate + ) + + # Listen to verify startup event from ftrack connect to allow responding + # with a message if something is not working correctly with this + # scenario that the user should be notified about. + self.session.event_hub.subscribe( + ( + 'topic=ftrack.connect.verify-startup ' + 'and data.storage_scenario.scenario="{0}"'.format( + scenario_name + ) + ), + self._verify_startup + ) + +def register(session): + '''Register storage scenario.''' + scenario = ActivateCentralizedStorageScenario() + scenario.register(session) + + +def register_configuration(session): + '''Register storage scenario.''' + scenario = ConfigureCentralizedStorageScenario() + scenario.register(session) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py new file mode 100644 index 0000000000..9f79a1850c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py @@ -0,0 +1,534 @@ +# pragma: no cover +# Module 'ntpath' -- common operations on WinNT/Win95 pathnames +"""Common pathname manipulations, WindowsNT/95 version. + +Instead of importing this module directly, import os and refer to this +module as os.path. +""" + +import os +import sys +import stat +import genericpath +import warnings + +from genericpath import * + +__all__ = ["normcase","isabs","join","splitdrive","split","splitext", + "basename","dirname","commonprefix","getsize","getmtime", + "getatime","getctime", "islink","exists","lexists","isdir","isfile", + "ismount","walk","expanduser","expandvars","normpath","abspath", + "splitunc","curdir","pardir","sep","pathsep","defpath","altsep", + "extsep","devnull","realpath","supports_unicode_filenames","relpath"] + +# strings representing various path-related bits and pieces +curdir = '.' +pardir = '..' +extsep = '.' +sep = '\\' +pathsep = ';' +altsep = '/' +defpath = '.;C:\\bin' +if 'ce' in sys.builtin_module_names: + defpath = '\\Windows' +elif 'os2' in sys.builtin_module_names: + # OS/2 w/ VACPP + altsep = '/' +devnull = 'nul' + +# Normalize the case of a pathname and map slashes to backslashes. +# Other normalizations (such as optimizing '../' away) are not done +# (this is done by normpath). + +def normcase(s): + """Normalize case of pathname. + + Makes all characters lowercase and all slashes into backslashes.""" + return s.replace("/", "\\").lower() + + +# Return whether a path is absolute. +# Trivial in Posix, harder on the Mac or MS-DOS. +# For DOS it is absolute if it starts with a slash or backslash (current +# volume), or if a pathname after the volume letter and colon / UNC resource +# starts with a slash or backslash. + +def isabs(s): + """Test whether a path is absolute""" + s = splitdrive(s)[1] + return s != '' and s[:1] in '/\\' + + +# Join two (or more) paths. + +def join(a, *p): + """Join two or more pathname components, inserting "\\" as needed. + If any component is an absolute path, all previous path components + will be discarded.""" + path = a + for b in p: + b_wins = 0 # set to 1 iff b makes path irrelevant + if path == "": + b_wins = 1 + + elif isabs(b): + # This probably wipes out path so far. However, it's more + # complicated if path begins with a drive letter: + # 1. join('c:', '/a') == 'c:/a' + # 2. join('c:/', '/a') == 'c:/a' + # But + # 3. join('c:/a', '/b') == '/b' + # 4. join('c:', 'd:/') = 'd:/' + # 5. join('c:/', 'd:/') = 'd:/' + if path[1:2] != ":" or b[1:2] == ":": + # Path doesn't start with a drive letter, or cases 4 and 5. + b_wins = 1 + + # Else path has a drive letter, and b doesn't but is absolute. + elif len(path) > 3 or (len(path) == 3 and + path[-1] not in "/\\"): + # case 3 + b_wins = 1 + + if b_wins: + path = b + else: + # Join, and ensure there's a separator. + assert len(path) > 0 + if path[-1] in "/\\": + if b and b[0] in "/\\": + path += b[1:] + else: + path += b + elif path[-1] == ":": + path += b + elif b: + if b[0] in "/\\": + path += b + else: + path += "\\" + b + else: + # path is not empty and does not end with a backslash, + # but b is empty; since, e.g., split('a/') produces + # ('a', ''), it's best if join() adds a backslash in + # this case. + path += '\\' + + return path + + +# Split a path in a drive specification (a drive letter followed by a +# colon) and the path specification. +# It is always true that drivespec + pathspec == p +def splitdrive(p): + """Split a pathname into drive and path specifiers. Returns a 2-tuple +"(drive,path)"; either part may be empty""" + if p[1:2] == ':': + return p[0:2], p[2:] + return '', p + + +# Parse UNC paths +def splitunc(p): + """Split a pathname into UNC mount point and relative path specifiers. + + Return a 2-tuple (unc, rest); either part may be empty. + If unc is not empty, it has the form '//host/mount' (or similar + using backslashes). unc+rest is always the input path. + Paths containing drive letters never have an UNC part. + """ + if p[1:2] == ':': + return '', p # Drive letter present + firstTwo = p[0:2] + if firstTwo == '//' or firstTwo == '\\\\': + # is a UNC path: + # vvvvvvvvvvvvvvvvvvvv equivalent to drive letter + # \\machine\mountpoint\directories... + # directory ^^^^^^^^^^^^^^^ + normp = normcase(p) + index = normp.find('\\', 2) + if index == -1: + ##raise RuntimeError, 'illegal UNC path: "' + p + '"' + return ("", p) + index = normp.find('\\', index + 1) + if index == -1: + index = len(p) + return p[:index], p[index:] + return '', p + + +# Split a path in head (everything up to the last '/') and tail (the +# rest). After the trailing '/' is stripped, the invariant +# join(head, tail) == p holds. +# The resulting head won't end in '/' unless it is the root. + +def split(p): + """Split a pathname. + + Return tuple (head, tail) where tail is everything after the final slash. + Either part may be empty.""" + + d, p = splitdrive(p) + # set i to index beyond p's last slash + i = len(p) + while i and p[i-1] not in '/\\': + i = i - 1 + head, tail = p[:i], p[i:] # now tail has no slashes + # remove trailing slashes from head, unless it's all slashes + head2 = head + while head2 and head2[-1] in '/\\': + head2 = head2[:-1] + head = head2 or head + return d + head, tail + + +# Split a path in root and extension. +# The extension is everything starting at the last dot in the last +# pathname component; the root is everything before that. +# It is always true that root + ext == p. + +def splitext(p): + return genericpath._splitext(p, sep, altsep, extsep) +splitext.__doc__ = genericpath._splitext.__doc__ + + +# Return the tail (basename) part of a path. + +def basename(p): + """Returns the final component of a pathname""" + return split(p)[1] + + +# Return the head (dirname) part of a path. + +def dirname(p): + """Returns the directory component of a pathname""" + return split(p)[0] + +# Is a path a symbolic link? +# This will always return false on systems where posix.lstat doesn't exist. + +def islink(path): + """Test for symbolic link. + On WindowsNT/95 and OS/2 always returns false + """ + return False + +# alias exists to lexists +lexists = exists + +# Is a path a mount point? Either a root (with or without drive letter) +# or an UNC path with at most a / or \ after the mount point. + +def ismount(path): + """Test whether a path is a mount point (defined as root of drive)""" + unc, rest = splitunc(path) + if unc: + return rest in ("", "/", "\\") + p = splitdrive(path)[1] + return len(p) == 1 and p[0] in '/\\' + + +# Directory tree walk. +# For each directory under top (including top itself, but excluding +# '.' and '..'), func(arg, dirname, filenames) is called, where +# dirname is the name of the directory and filenames is the list +# of files (and subdirectories etc.) in the directory. +# The func may modify the filenames list, to implement a filter, +# or to impose a different order of visiting. + +def walk(top, func, arg): + """Directory tree walk with callback function. + + For each directory in the directory tree rooted at top (including top + itself, but excluding '.' and '..'), call func(arg, dirname, fnames). + dirname is the name of the directory, and fnames a list of the names of + the files and subdirectories in dirname (excluding '.' and '..'). func + may modify the fnames list in-place (e.g. via del or slice assignment), + and walk will only recurse into the subdirectories whose names remain in + fnames; this can be used to implement a filter, or to impose a specific + order of visiting. No semantics are defined for, or required of, arg, + beyond that arg is always passed to func. It can be used, e.g., to pass + a filename pattern, or a mutable object designed to accumulate + statistics. Passing None for arg is common.""" + warnings.warnpy3k("In 3.x, os.path.walk is removed in favor of os.walk.", + stacklevel=2) + try: + names = os.listdir(top) + except os.error: + return + func(arg, top, names) + for name in names: + name = join(top, name) + if isdir(name): + walk(name, func, arg) + + +# Expand paths beginning with '~' or '~user'. +# '~' means $HOME; '~user' means that user's home directory. +# If the path doesn't begin with '~', or if the user or $HOME is unknown, +# the path is returned unchanged (leaving error reporting to whatever +# function is called with the expanded path as argument). +# See also module 'glob' for expansion of *, ? and [...] in pathnames. +# (A function should also be defined to do full *sh-style environment +# variable expansion.) + +def expanduser(path): + """Expand ~ and ~user constructs. + + If user or $HOME is unknown, do nothing.""" + if path[:1] != '~': + return path + i, n = 1, len(path) + while i < n and path[i] not in '/\\': + i = i + 1 + + if 'HOME' in os.environ: + userhome = os.environ['HOME'] + elif 'USERPROFILE' in os.environ: + userhome = os.environ['USERPROFILE'] + elif not 'HOMEPATH' in os.environ: + return path + else: + try: + drive = os.environ['HOMEDRIVE'] + except KeyError: + drive = '' + userhome = join(drive, os.environ['HOMEPATH']) + + if i != 1: #~user + userhome = join(dirname(userhome), path[1:i]) + + return userhome + path[i:] + + +# Expand paths containing shell variable substitutions. +# The following rules apply: +# - no expansion within single quotes +# - '$$' is translated into '$' +# - '%%' is translated into '%' if '%%' are not seen in %var1%%var2% +# - ${varname} is accepted. +# - $varname is accepted. +# - %varname% is accepted. +# - varnames can be made out of letters, digits and the characters '_-' +# (though is not verified in the ${varname} and %varname% cases) +# XXX With COMMAND.COM you can use any characters in a variable name, +# XXX except '^|<>='. + +def expandvars(path): + """Expand shell variables of the forms $var, ${var} and %var%. + + Unknown variables are left unchanged.""" + if '$' not in path and '%' not in path: + return path + import string + varchars = string.ascii_letters + string.digits + '_-' + res = '' + index = 0 + pathlen = len(path) + while index < pathlen: + c = path[index] + if c == '\'': # no expansion within single quotes + path = path[index + 1:] + pathlen = len(path) + try: + index = path.index('\'') + res = res + '\'' + path[:index + 1] + except ValueError: + res = res + path + index = pathlen - 1 + elif c == '%': # variable or '%' + if path[index + 1:index + 2] == '%': + res = res + c + index = index + 1 + else: + path = path[index+1:] + pathlen = len(path) + try: + index = path.index('%') + except ValueError: + res = res + '%' + path + index = pathlen - 1 + else: + var = path[:index] + if var in os.environ: + res = res + os.environ[var] + else: + res = res + '%' + var + '%' + elif c == '$': # variable or '$$' + if path[index + 1:index + 2] == '$': + res = res + c + index = index + 1 + elif path[index + 1:index + 2] == '{': + path = path[index+2:] + pathlen = len(path) + try: + index = path.index('}') + var = path[:index] + if var in os.environ: + res = res + os.environ[var] + else: + res = res + '${' + var + '}' + except ValueError: + res = res + '${' + path + index = pathlen - 1 + else: + var = '' + index = index + 1 + c = path[index:index + 1] + while c != '' and c in varchars: + var = var + c + index = index + 1 + c = path[index:index + 1] + if var in os.environ: + res = res + os.environ[var] + else: + res = res + '$' + var + if c != '': + index = index - 1 + else: + res = res + c + index = index + 1 + return res + + +# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A\B. +# Previously, this function also truncated pathnames to 8+3 format, +# but as this module is called "ntpath", that's obviously wrong! + +def normpath(path): + """Normalize path, eliminating double slashes, etc.""" + # Preserve unicode (if path is unicode) + backslash, dot = (u'\\', u'.') if isinstance(path, unicode) else ('\\', '.') + if path.startswith(('\\\\.\\', '\\\\?\\')): + # in the case of paths with these prefixes: + # \\.\ -> device names + # \\?\ -> literal paths + # do not do any normalization, but return the path unchanged + return path + path = path.replace("/", "\\") + prefix, path = splitdrive(path) + # We need to be careful here. If the prefix is empty, and the path starts + # with a backslash, it could either be an absolute path on the current + # drive (\dir1\dir2\file) or a UNC filename (\\server\mount\dir1\file). It + # is therefore imperative NOT to collapse multiple backslashes blindly in + # that case. + # The code below preserves multiple backslashes when there is no drive + # letter. This means that the invalid filename \\\a\b is preserved + # unchanged, where a\\\b is normalised to a\b. It's not clear that there + # is any better behaviour for such edge cases. + if prefix == '': + # No drive letter - preserve initial backslashes + while path[:1] == "\\": + prefix = prefix + backslash + path = path[1:] + else: + # We have a drive letter - collapse initial backslashes + if path.startswith("\\"): + prefix = prefix + backslash + path = path.lstrip("\\") + comps = path.split("\\") + i = 0 + while i < len(comps): + if comps[i] in ('.', ''): + del comps[i] + elif comps[i] == '..': + if i > 0 and comps[i-1] != '..': + del comps[i-1:i+1] + i -= 1 + elif i == 0 and prefix.endswith("\\"): + del comps[i] + else: + i += 1 + else: + i += 1 + # If the path is now empty, substitute '.' + if not prefix and not comps: + comps.append(dot) + return prefix + backslash.join(comps) + + +# Return an absolute path. +try: + from nt import _getfullpathname + +except ImportError: # not running on Windows - mock up something sensible + def abspath(path): + """Return the absolute version of a path.""" + if not isabs(path): + if isinstance(path, unicode): + cwd = os.getcwdu() + else: + cwd = os.getcwd() + path = join(cwd, path) + return normpath(path) + +else: # use native Windows method on Windows + def abspath(path): + """Return the absolute version of a path.""" + + if path: # Empty path must return current working directory. + try: + path = _getfullpathname(path) + except WindowsError: + pass # Bad path - return unchanged. + elif isinstance(path, unicode): + path = os.getcwdu() + else: + path = os.getcwd() + return normpath(path) + +# realpath is a no-op on systems without islink support +realpath = abspath +# Win9x family and earlier have no Unicode filename support. +supports_unicode_filenames = (hasattr(sys, "getwindowsversion") and + sys.getwindowsversion()[3] >= 2) + +def _abspath_split(path): + abs = abspath(normpath(path)) + prefix, rest = splitunc(abs) + is_unc = bool(prefix) + if not is_unc: + prefix, rest = splitdrive(abs) + return is_unc, prefix, [x for x in rest.split(sep) if x] + +def relpath(path, start=curdir): + """Return a relative version of a path""" + + if not path: + raise ValueError("no path specified") + + start_is_unc, start_prefix, start_list = _abspath_split(start) + path_is_unc, path_prefix, path_list = _abspath_split(path) + + if path_is_unc ^ start_is_unc: + raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" + % (path, start)) + if path_prefix.lower() != start_prefix.lower(): + if path_is_unc: + raise ValueError("path is on UNC root %s, start on UNC root %s" + % (path_prefix, start_prefix)) + else: + raise ValueError("path is on drive %s, start on drive %s" + % (path_prefix, start_prefix)) + # Work out how much of the filepath is shared by start and path. + i = 0 + for e1, e2 in zip(start_list, path_list): + if e1.lower() != e2.lower(): + break + i += 1 + + rel_list = [pardir] * (len(start_list)-i) + path_list[i:] + if not rel_list: + return curdir + return join(*rel_list) + +try: + # The genericpath.isdir implementation uses os.stat and checks the mode + # attribute to tell whether or not the path is a directory. + # This is overkill on Windows - just pass the path to GetFileAttributes + # and check the attribute from there. + from nt import _isdir as isdir +except ImportError: + # Use genericpath.isdir as imported above. + pass diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py new file mode 100644 index 0000000000..aa1a8c4aba --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py @@ -0,0 +1 @@ +__version__ = '1.8.2' diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py new file mode 100644 index 0000000000..69cc6f4b4f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py @@ -0,0 +1,66 @@ +""" +Yet another backport of WeakMethod for Python 2.7. +Changes include removing exception chaining and adding args to super() calls. + +Copyright (c) 2001-2019 Python Software Foundation.All rights reserved. + +Full license available in LICENSE.python. +""" +from weakref import ref + + +class WeakMethod(ref): + """ + A custom `weakref.ref` subclass which simulates a weak reference to + a bound method, working around the lifetime problem of bound methods. + """ + + __slots__ = "_func_ref", "_meth_type", "_alive", "__weakref__" + + def __new__(cls, meth, callback=None): + try: + obj = meth.__self__ + func = meth.__func__ + except AttributeError: + raise TypeError( + "argument should be a bound method, not {}".format(type(meth)) + ) + + def _cb(arg): + # The self-weakref trick is needed to avoid creating a reference + # cycle. + self = self_wr() + if self._alive: + self._alive = False + if callback is not None: + callback(self) + + self = ref.__new__(cls, obj, _cb) + self._func_ref = ref(func, _cb) + self._meth_type = type(meth) + self._alive = True + self_wr = ref(self) + return self + + def __call__(self): + obj = super(WeakMethod, self).__call__() + func = self._func_ref() + if obj is None or func is None: + return None + return self._meth_type(func, obj) + + def __eq__(self, other): + if isinstance(other, WeakMethod): + if not self._alive or not other._alive: + return self is other + return ref.__eq__(self, other) and self._func_ref == other._func_ref + return NotImplemented + + def __ne__(self, other): + if isinstance(other, WeakMethod): + if not self._alive or not other._alive: + return self is not other + return ref.__ne__(self, other) or self._func_ref != other._func_ref + return NotImplemented + + __hash__ = ref.__hash__ diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py new file mode 100644 index 0000000000..1aab07ed77 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py new file mode 100644 index 0000000000..6aa9cf0281 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py @@ -0,0 +1,124 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2013 ftrack + +import abc + +import ftrack_api.exception + + +class Accessor(object): + '''Provide data access to a location. + + A location represents a specific storage, but access to that storage may + vary. For example, both local filesystem and FTP access may be possible for + the same storage. An accessor implements these different ways of accessing + the same data location. + + As different accessors may access the same location, only part of a data + path that is commonly understood may be stored in the database. The format + of this path should be a contract between the accessors that require access + to the same location and is left as an implementation detail. As such, this + system provides no guarantee that two different accessors can provide access + to the same location, though this is a clear goal. The path stored centrally + is referred to as the **resource identifier** and should be used when + calling any of the accessor methods that accept a *resource_identifier* + argument. + + ''' + + __metaclass__ = abc.ABCMeta + + def __init__(self): + '''Initialise location accessor.''' + super(Accessor, self).__init__() + + @abc.abstractmethod + def list(self, resource_identifier): + '''Return list of entries in *resource_identifier* container. + + Each entry in the returned list should be a valid resource identifier. + + Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if + *resource_identifier* does not exist or + :exc:`~ftrack_api.exception.AccessorResourceInvalidError` if + *resource_identifier* is not a container. + + ''' + + @abc.abstractmethod + def exists(self, resource_identifier): + '''Return if *resource_identifier* is valid and exists in location.''' + + @abc.abstractmethod + def is_file(self, resource_identifier): + '''Return whether *resource_identifier* refers to a file.''' + + @abc.abstractmethod + def is_container(self, resource_identifier): + '''Return whether *resource_identifier* refers to a container.''' + + @abc.abstractmethod + def is_sequence(self, resource_identifier): + '''Return whether *resource_identifier* refers to a file sequence.''' + + @abc.abstractmethod + def open(self, resource_identifier, mode='rb'): + '''Return :class:`~ftrack_api.data.Data` for *resource_identifier*.''' + + @abc.abstractmethod + def remove(self, resource_identifier): + '''Remove *resource_identifier*. + + Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if + *resource_identifier* does not exist. + + ''' + + @abc.abstractmethod + def make_container(self, resource_identifier, recursive=True): + '''Make a container at *resource_identifier*. + + If *recursive* is True, also make any intermediate containers. + + Should silently ignore existing containers and not recreate them. + + ''' + + @abc.abstractmethod + def get_container(self, resource_identifier): + '''Return resource_identifier of container for *resource_identifier*. + + Raise :exc:`~ftrack_api.exception.AccessorParentResourceNotFoundError` + if container of *resource_identifier* could not be determined. + + ''' + + def remove_container(self, resource_identifier): # pragma: no cover + '''Remove container at *resource_identifier*.''' + return self.remove(resource_identifier) + + def get_filesystem_path(self, resource_identifier): # pragma: no cover + '''Return filesystem path for *resource_identifier*. + + Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if + filesystem path could not be determined from *resource_identifier* or + :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if + retrieving filesystem paths is not supported by this accessor. + + ''' + raise ftrack_api.exception.AccessorUnsupportedOperationError( + 'get_filesystem_path', resource_identifier=resource_identifier + ) + + def get_url(self, resource_identifier): + '''Return URL for *resource_identifier*. + + Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if + URL could not be determined from *resource_identifier* or + :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if + retrieving URL is not supported by this accessor. + + ''' + raise ftrack_api.exception.AccessorUnsupportedOperationError( + 'get_url', resource_identifier=resource_identifier + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py new file mode 100644 index 0000000000..65769603f6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py @@ -0,0 +1,250 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2013 ftrack + +import os +import sys +import errno +import contextlib + +import ftrack_api._python_ntpath as ntpath +import ftrack_api.accessor.base +import ftrack_api.data +from ftrack_api.exception import ( + AccessorFilesystemPathError, + AccessorUnsupportedOperationError, + AccessorResourceNotFoundError, + AccessorOperationFailedError, + AccessorPermissionDeniedError, + AccessorResourceInvalidError, + AccessorContainerNotEmptyError, + AccessorParentResourceNotFoundError +) + + +class DiskAccessor(ftrack_api.accessor.base.Accessor): + '''Provide disk access to a location. + + Expect resource identifiers to refer to relative filesystem paths. + + ''' + + def __init__(self, prefix, **kw): + '''Initialise location accessor. + + *prefix* specifies the base folder for the disk based structure and + will be prepended to any path. It should be specified in the syntax of + the current OS. + + ''' + if prefix: + prefix = os.path.expanduser(os.path.expandvars(prefix)) + prefix = os.path.abspath(prefix) + self.prefix = prefix + + super(DiskAccessor, self).__init__(**kw) + + def list(self, resource_identifier): + '''Return list of entries in *resource_identifier* container. + + Each entry in the returned list should be a valid resource identifier. + + Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if + *resource_identifier* does not exist or + :exc:`~ftrack_api.exception.AccessorResourceInvalidError` if + *resource_identifier* is not a container. + + ''' + filesystem_path = self.get_filesystem_path(resource_identifier) + + with error_handler( + operation='list', resource_identifier=resource_identifier + ): + listing = [] + for entry in os.listdir(filesystem_path): + listing.append(os.path.join(resource_identifier, entry)) + + return listing + + def exists(self, resource_identifier): + '''Return if *resource_identifier* is valid and exists in location.''' + filesystem_path = self.get_filesystem_path(resource_identifier) + return os.path.exists(filesystem_path) + + def is_file(self, resource_identifier): + '''Return whether *resource_identifier* refers to a file.''' + filesystem_path = self.get_filesystem_path(resource_identifier) + return os.path.isfile(filesystem_path) + + def is_container(self, resource_identifier): + '''Return whether *resource_identifier* refers to a container.''' + filesystem_path = self.get_filesystem_path(resource_identifier) + return os.path.isdir(filesystem_path) + + def is_sequence(self, resource_identifier): + '''Return whether *resource_identifier* refers to a file sequence.''' + raise AccessorUnsupportedOperationError(operation='is_sequence') + + def open(self, resource_identifier, mode='rb'): + '''Return :class:`~ftrack_api.Data` for *resource_identifier*.''' + filesystem_path = self.get_filesystem_path(resource_identifier) + + with error_handler( + operation='open', resource_identifier=resource_identifier + ): + data = ftrack_api.data.File(filesystem_path, mode) + + return data + + def remove(self, resource_identifier): + '''Remove *resource_identifier*. + + Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if + *resource_identifier* does not exist. + + ''' + filesystem_path = self.get_filesystem_path(resource_identifier) + + if self.is_file(resource_identifier): + with error_handler( + operation='remove', resource_identifier=resource_identifier + ): + os.remove(filesystem_path) + + elif self.is_container(resource_identifier): + with error_handler( + operation='remove', resource_identifier=resource_identifier + ): + os.rmdir(filesystem_path) + + else: + raise AccessorResourceNotFoundError( + resource_identifier=resource_identifier + ) + + def make_container(self, resource_identifier, recursive=True): + '''Make a container at *resource_identifier*. + + If *recursive* is True, also make any intermediate containers. + + ''' + filesystem_path = self.get_filesystem_path(resource_identifier) + + with error_handler( + operation='makeContainer', resource_identifier=resource_identifier + ): + try: + if recursive: + os.makedirs(filesystem_path) + else: + try: + os.mkdir(filesystem_path) + except OSError as error: + if error.errno == errno.ENOENT: + raise AccessorParentResourceNotFoundError( + resource_identifier=resource_identifier + ) + else: + raise + + except OSError, error: + if error.errno != errno.EEXIST: + raise + + def get_container(self, resource_identifier): + '''Return resource_identifier of container for *resource_identifier*. + + Raise :exc:`~ftrack_api.exception.AccessorParentResourceNotFoundError` if + container of *resource_identifier* could not be determined. + + ''' + filesystem_path = self.get_filesystem_path(resource_identifier) + + container = os.path.dirname(filesystem_path) + + if self.prefix: + if not container.startswith(self.prefix): + raise AccessorParentResourceNotFoundError( + resource_identifier=resource_identifier, + message='Could not determine container for ' + '{resource_identifier} as container falls outside ' + 'of configured prefix.' + ) + + # Convert container filesystem path into resource identifier. + container = container[len(self.prefix):] + if ntpath.isabs(container): + # Ensure that resulting path is relative by stripping any + # leftover prefixed slashes from string. + # E.g. If prefix was '/tmp' and path was '/tmp/foo/bar' the + # result will be 'foo/bar'. + container = container.lstrip('\\/') + + return container + + def get_filesystem_path(self, resource_identifier): + '''Return filesystem path for *resource_identifier*. + + For example:: + + >>> accessor = DiskAccessor('my.location', '/mountpoint') + >>> print accessor.get_filesystem_path('test.txt') + /mountpoint/test.txt + >>> print accessor.get_filesystem_path('/mountpoint/test.txt') + /mountpoint/test.txt + + Raise :exc:`ftrack_api.exception.AccessorFilesystemPathError` if filesystem + path could not be determined from *resource_identifier*. + + ''' + filesystem_path = resource_identifier + if filesystem_path: + filesystem_path = os.path.normpath(filesystem_path) + + if self.prefix: + if not os.path.isabs(filesystem_path): + filesystem_path = os.path.normpath( + os.path.join(self.prefix, filesystem_path) + ) + + if not filesystem_path.startswith(self.prefix): + raise AccessorFilesystemPathError( + resource_identifier=resource_identifier, + message='Could not determine access path for ' + 'resource_identifier outside of configured prefix: ' + '{resource_identifier}.' + ) + + return filesystem_path + + +@contextlib.contextmanager +def error_handler(**kw): + '''Conform raised OSError/IOError exception to appropriate FTrack error.''' + try: + yield + + except (OSError, IOError) as error: + (exception_type, exception_value, traceback) = sys.exc_info() + kw.setdefault('error', error) + + error_code = getattr(error, 'errno') + if not error_code: + raise AccessorOperationFailedError(**kw), None, traceback + + if error_code == errno.ENOENT: + raise AccessorResourceNotFoundError(**kw), None, traceback + + elif error_code == errno.EPERM: + raise AccessorPermissionDeniedError(**kw), None, traceback + + elif error_code == errno.ENOTEMPTY: + raise AccessorContainerNotEmptyError(**kw), None, traceback + + elif error_code in (errno.ENOTDIR, errno.EISDIR, errno.EINVAL): + raise AccessorResourceInvalidError(**kw), None, traceback + + else: + raise AccessorOperationFailedError(**kw), None, traceback + + except Exception: + raise diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py new file mode 100644 index 0000000000..9c735084d5 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py @@ -0,0 +1,240 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import os +import hashlib +import base64 +import json + +import requests + +from .base import Accessor +from ..data import String +import ftrack_api.exception +import ftrack_api.symbol + + +class ServerFile(String): + '''Representation of a server file.''' + + def __init__(self, resource_identifier, session, mode='rb'): + '''Initialise file.''' + self.mode = mode + self.resource_identifier = resource_identifier + self._session = session + self._has_read = False + + super(ServerFile, self).__init__() + + def flush(self): + '''Flush all changes.''' + super(ServerFile, self).flush() + + if self.mode == 'wb': + self._write() + + def read(self, limit=None): + '''Read file.''' + if not self._has_read: + self._read() + self._has_read = True + + return super(ServerFile, self).read(limit) + + def _read(self): + '''Read all remote content from key into wrapped_file.''' + position = self.tell() + self.seek(0) + + response = requests.get( + '{0}/component/get'.format(self._session.server_url), + params={ + 'id': self.resource_identifier, + 'username': self._session.api_user, + 'apiKey': self._session.api_key + }, + stream=True + ) + + try: + response.raise_for_status() + except requests.exceptions.HTTPError as error: + raise ftrack_api.exception.AccessorOperationFailedError( + 'Failed to read data: {0}.'.format(error) + ) + + for block in response.iter_content(ftrack_api.symbol.CHUNK_SIZE): + self.wrapped_file.write(block) + + self.flush() + self.seek(position) + + def _write(self): + '''Write current data to remote key.''' + position = self.tell() + self.seek(0) + + # Retrieve component from cache to construct a filename. + component = self._session.get('FileComponent', self.resource_identifier) + if not component: + raise ftrack_api.exception.AccessorOperationFailedError( + 'Unable to retrieve component with id: {0}.'.format( + self.resource_identifier + ) + ) + + # Construct a name from component name and file_type. + name = component['name'] + if component['file_type']: + name = u'{0}.{1}'.format( + name, + component['file_type'].lstrip('.') + ) + + try: + metadata = self._session.get_upload_metadata( + component_id=self.resource_identifier, + file_name=name, + file_size=self._get_size(), + checksum=self._compute_checksum() + ) + except Exception as error: + raise ftrack_api.exception.AccessorOperationFailedError( + 'Failed to get put metadata: {0}.'.format(error) + ) + + # Ensure at beginning of file before put. + self.seek(0) + + # Put the file based on the metadata. + response = requests.put( + metadata['url'], + data=self.wrapped_file, + headers=metadata['headers'] + ) + + try: + response.raise_for_status() + except requests.exceptions.HTTPError as error: + raise ftrack_api.exception.AccessorOperationFailedError( + 'Failed to put file to server: {0}.'.format(error) + ) + + self.seek(position) + + def _get_size(self): + '''Return size of file in bytes.''' + position = self.tell() + self.seek(0, os.SEEK_END) + length = self.tell() + self.seek(position) + return length + + def _compute_checksum(self): + '''Return checksum for file.''' + fp = self.wrapped_file + buf_size = ftrack_api.symbol.CHUNK_SIZE + hash_obj = hashlib.md5() + spos = fp.tell() + + s = fp.read(buf_size) + while s: + hash_obj.update(s) + s = fp.read(buf_size) + + base64_digest = base64.encodestring(hash_obj.digest()) + if base64_digest[-1] == '\n': + base64_digest = base64_digest[0:-1] + + fp.seek(spos) + return base64_digest + + +class _ServerAccessor(Accessor): + '''Provide server location access.''' + + def __init__(self, session, **kw): + '''Initialise location accessor.''' + super(_ServerAccessor, self).__init__(**kw) + + self._session = session + + def open(self, resource_identifier, mode='rb'): + '''Return :py:class:`~ftrack_api.Data` for *resource_identifier*.''' + return ServerFile(resource_identifier, session=self._session, mode=mode) + + def remove(self, resourceIdentifier): + '''Remove *resourceIdentifier*.''' + response = requests.get( + '{0}/component/remove'.format(self._session.server_url), + params={ + 'id': resourceIdentifier, + 'username': self._session.api_user, + 'apiKey': self._session.api_key + } + ) + if response.status_code != 200: + raise ftrack_api.exception.AccessorOperationFailedError( + 'Failed to remove file.' + ) + + def get_container(self, resource_identifier): + '''Return resource_identifier of container for *resource_identifier*.''' + return None + + def make_container(self, resource_identifier, recursive=True): + '''Make a container at *resource_identifier*.''' + + def list(self, resource_identifier): + '''Return list of entries in *resource_identifier* container.''' + raise NotImplementedError() + + def exists(self, resource_identifier): + '''Return if *resource_identifier* is valid and exists in location.''' + return False + + def is_file(self, resource_identifier): + '''Return whether *resource_identifier* refers to a file.''' + raise NotImplementedError() + + def is_container(self, resource_identifier): + '''Return whether *resource_identifier* refers to a container.''' + raise NotImplementedError() + + def is_sequence(self, resource_identifier): + '''Return whether *resource_identifier* refers to a file sequence.''' + raise NotImplementedError() + + def get_url(self, resource_identifier): + '''Return url for *resource_identifier*.''' + url_string = ( + u'{url}/component/get?id={id}&username={username}' + u'&apiKey={apiKey}' + ) + return url_string.format( + url=self._session.server_url, + id=resource_identifier, + username=self._session.api_user, + apiKey=self._session.api_key + ) + + def get_thumbnail_url(self, resource_identifier, size=None): + '''Return thumbnail url for *resource_identifier*. + + Optionally, specify *size* to constrain the downscaled image to size + x size pixels. + ''' + url_string = ( + u'{url}/component/thumbnail?id={id}&username={username}' + u'&apiKey={apiKey}' + ) + url = url_string.format( + url=self._session.server_url, + id=resource_identifier, + username=self._session.api_user, + apiKey=self._session.api_key + ) + if size: + url += u'&size={0}'.format(size) + + return url diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py new file mode 100644 index 0000000000..719b612f39 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py @@ -0,0 +1,707 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from __future__ import absolute_import + +import collections +import copy +import logging +import functools + +import ftrack_api.symbol +import ftrack_api.exception +import ftrack_api.collection +import ftrack_api.inspection +import ftrack_api.operation + +logger = logging.getLogger( + __name__ +) + + +def merge_references(function): + '''Decorator to handle merging of references / collections.''' + + @functools.wraps(function) + def get_value(attribute, entity): + '''Merge the attribute with the local cache.''' + + if attribute.name not in entity._inflated: + # Only merge on first access to avoid + # inflating them multiple times. + + logger.debug( + 'Merging potential new data into attached ' + 'entity for attribute {0}.'.format( + attribute.name + ) + ) + + # Local attributes. + local_value = attribute.get_local_value(entity) + if isinstance( + local_value, + ( + ftrack_api.entity.base.Entity, + ftrack_api.collection.Collection, + ftrack_api.collection.MappedCollectionProxy + ) + ): + logger.debug( + 'Merging local value for attribute {0}.'.format(attribute) + ) + + merged_local_value = entity.session._merge( + local_value, merged=dict() + ) + + if merged_local_value is not local_value: + with entity.session.operation_recording(False): + attribute.set_local_value(entity, merged_local_value) + + # Remote attributes. + remote_value = attribute.get_remote_value(entity) + if isinstance( + remote_value, + ( + ftrack_api.entity.base.Entity, + ftrack_api.collection.Collection, + ftrack_api.collection.MappedCollectionProxy + ) + ): + logger.debug( + 'Merging remote value for attribute {0}.'.format(attribute) + ) + + merged_remote_value = entity.session._merge( + remote_value, merged=dict() + ) + + if merged_remote_value is not remote_value: + attribute.set_remote_value(entity, merged_remote_value) + + entity._inflated.add( + attribute.name + ) + + return function( + attribute, entity + ) + + return get_value + + +class Attributes(object): + '''Collection of properties accessible by name.''' + + def __init__(self, attributes=None): + super(Attributes, self).__init__() + self._data = dict() + if attributes is not None: + for attribute in attributes: + self.add(attribute) + + def add(self, attribute): + '''Add *attribute*.''' + existing = self._data.get(attribute.name, None) + if existing: + raise ftrack_api.exception.NotUniqueError( + 'Attribute with name {0} already added as {1}' + .format(attribute.name, existing) + ) + + self._data[attribute.name] = attribute + + def remove(self, attribute): + '''Remove attribute.''' + self._data.pop(attribute.name) + + def get(self, name): + '''Return attribute by *name*. + + If no attribute matches *name* then return None. + + ''' + return self._data.get(name, None) + + def keys(self): + '''Return list of attribute names.''' + return self._data.keys() + + def __contains__(self, item): + '''Return whether *item* present.''' + if not isinstance(item, Attribute): + return False + + return item.name in self._data + + def __iter__(self): + '''Return iterator over attributes.''' + return self._data.itervalues() + + def __len__(self): + '''Return count of attributes.''' + return len(self._data) + + +class Attribute(object): + '''A name and value pair persisted remotely.''' + + def __init__( + self, name, default_value=ftrack_api.symbol.NOT_SET, mutable=True, + computed=False + ): + '''Initialise attribute with *name*. + + *default_value* represents the default value for the attribute. It may + be a callable. It is not used within the attribute when providing + values, but instead exists for other parts of the system to reference. + + If *mutable* is set to False then the local value of the attribute on an + entity can only be set when both the existing local and remote values + are :attr:`ftrack_api.symbol.NOT_SET`. The exception to this is when the + target value is also :attr:`ftrack_api.symbol.NOT_SET`. + + If *computed* is set to True the value is a remote side computed value + and should not be long-term cached. + + ''' + super(Attribute, self).__init__() + self._name = name + self._mutable = mutable + self._computed = computed + self.default_value = default_value + + self._local_key = 'local' + self._remote_key = 'remote' + + def __repr__(self): + '''Return representation of entity.''' + return '<{0}.{1}({2}) object at {3}>'.format( + self.__module__, + self.__class__.__name__, + self.name, + id(self) + ) + + def get_entity_storage(self, entity): + '''Return attribute storage on *entity* creating if missing.''' + storage_key = '_ftrack_attribute_storage' + storage = getattr(entity, storage_key, None) + if storage is None: + storage = collections.defaultdict( + lambda: + { + self._local_key: ftrack_api.symbol.NOT_SET, + self._remote_key: ftrack_api.symbol.NOT_SET + } + ) + setattr(entity, storage_key, storage) + + return storage + + @property + def name(self): + '''Return name.''' + return self._name + + @property + def mutable(self): + '''Return whether attribute is mutable.''' + return self._mutable + + @property + def computed(self): + '''Return whether attribute is computed.''' + return self._computed + + def get_value(self, entity): + '''Return current value for *entity*. + + If a value was set locally then return it, otherwise return last known + remote value. If no remote value yet retrieved, make a request for it + via the session and block until available. + + ''' + value = self.get_local_value(entity) + if value is not ftrack_api.symbol.NOT_SET: + return value + + value = self.get_remote_value(entity) + if value is not ftrack_api.symbol.NOT_SET: + return value + + if not entity.session.auto_populate: + return value + + self.populate_remote_value(entity) + return self.get_remote_value(entity) + + def get_local_value(self, entity): + '''Return locally set value for *entity*.''' + storage = self.get_entity_storage(entity) + return storage[self.name][self._local_key] + + def get_remote_value(self, entity): + '''Return remote value for *entity*. + + .. note:: + + Only return locally stored remote value, do not fetch from remote. + + ''' + storage = self.get_entity_storage(entity) + return storage[self.name][self._remote_key] + + def set_local_value(self, entity, value): + '''Set local *value* for *entity*.''' + if ( + not self.mutable + and self.is_set(entity) + and value is not ftrack_api.symbol.NOT_SET + ): + raise ftrack_api.exception.ImmutableAttributeError(self) + + old_value = self.get_local_value(entity) + + storage = self.get_entity_storage(entity) + storage[self.name][self._local_key] = value + + # Record operation. + if entity.session.record_operations: + entity.session.recorded_operations.push( + ftrack_api.operation.UpdateEntityOperation( + entity.entity_type, + ftrack_api.inspection.primary_key(entity), + self.name, + old_value, + value + ) + ) + + def set_remote_value(self, entity, value): + '''Set remote *value*. + + .. note:: + + Only set locally stored remote value, do not persist to remote. + + ''' + storage = self.get_entity_storage(entity) + storage[self.name][self._remote_key] = value + + def populate_remote_value(self, entity): + '''Populate remote value for *entity*.''' + entity.session.populate([entity], self.name) + + def is_modified(self, entity): + '''Return whether local value set and differs from remote. + + .. note:: + + Will not fetch remote value so may report True even when values + are the same on the remote. + + ''' + local_value = self.get_local_value(entity) + remote_value = self.get_remote_value(entity) + return ( + local_value is not ftrack_api.symbol.NOT_SET + and local_value != remote_value + ) + + def is_set(self, entity): + '''Return whether a value is set for *entity*.''' + return any([ + self.get_local_value(entity) is not ftrack_api.symbol.NOT_SET, + self.get_remote_value(entity) is not ftrack_api.symbol.NOT_SET + ]) + + +class ScalarAttribute(Attribute): + '''Represent a scalar value.''' + + def __init__(self, name, data_type, **kw): + '''Initialise property.''' + super(ScalarAttribute, self).__init__(name, **kw) + self.data_type = data_type + + +class ReferenceAttribute(Attribute): + '''Reference another entity.''' + + def __init__(self, name, entity_type, **kw): + '''Initialise property.''' + super(ReferenceAttribute, self).__init__(name, **kw) + self.entity_type = entity_type + + def populate_remote_value(self, entity): + '''Populate remote value for *entity*. + + As attribute references another entity, use that entity's configured + default projections to auto populate useful attributes when loading. + + ''' + reference_entity_type = entity.session.types[self.entity_type] + default_projections = reference_entity_type.default_projections + + projections = [] + if default_projections: + for projection in default_projections: + projections.append('{0}.{1}'.format(self.name, projection)) + else: + projections.append(self.name) + + entity.session.populate([entity], ', '.join(projections)) + + def is_modified(self, entity): + '''Return whether a local value has been set and differs from remote. + + .. note:: + + Will not fetch remote value so may report True even when values + are the same on the remote. + + ''' + local_value = self.get_local_value(entity) + remote_value = self.get_remote_value(entity) + + if local_value is ftrack_api.symbol.NOT_SET: + return False + + if remote_value is ftrack_api.symbol.NOT_SET: + return True + + if ( + ftrack_api.inspection.identity(local_value) + != ftrack_api.inspection.identity(remote_value) + ): + return True + + return False + + + @merge_references + def get_value(self, entity): + return super(ReferenceAttribute, self).get_value( + entity + ) + +class AbstractCollectionAttribute(Attribute): + '''Base class for collection attributes.''' + + #: Collection class used by attribute. + collection_class = None + + @merge_references + def get_value(self, entity): + '''Return current value for *entity*. + + If a value was set locally then return it, otherwise return last known + remote value. If no remote value yet retrieved, make a request for it + via the session and block until available. + + .. note:: + + As value is a collection that is mutable, will transfer a remote + value into the local value on access if no local value currently + set. + + ''' + super(AbstractCollectionAttribute, self).get_value(entity) + + # Conditionally, copy remote value into local value so that it can be + # mutated without side effects. + local_value = self.get_local_value(entity) + remote_value = self.get_remote_value(entity) + if ( + local_value is ftrack_api.symbol.NOT_SET + and isinstance(remote_value, self.collection_class) + ): + try: + with entity.session.operation_recording(False): + self.set_local_value(entity, copy.copy(remote_value)) + except ftrack_api.exception.ImmutableAttributeError: + pass + + value = self.get_local_value(entity) + + # If the local value is still not set then attempt to set it with a + # suitable placeholder collection so that the caller can interact with + # the collection using its normal interface. This is required for a + # newly created entity for example. It *could* be done as a simple + # default value, but that would incur cost for every collection even + # when they are not modified before commit. + if value is ftrack_api.symbol.NOT_SET: + try: + with entity.session.operation_recording(False): + self.set_local_value( + entity, + # None should be treated as empty collection. + None + ) + except ftrack_api.exception.ImmutableAttributeError: + pass + + return self.get_local_value(entity) + + def set_local_value(self, entity, value): + '''Set local *value* for *entity*.''' + if value is not ftrack_api.symbol.NOT_SET: + value = self._adapt_to_collection(entity, value) + value.mutable = self.mutable + + super(AbstractCollectionAttribute, self).set_local_value(entity, value) + + def set_remote_value(self, entity, value): + '''Set remote *value*. + + .. note:: + + Only set locally stored remote value, do not persist to remote. + + ''' + if value is not ftrack_api.symbol.NOT_SET: + value = self._adapt_to_collection(entity, value) + value.mutable = False + + super(AbstractCollectionAttribute, self).set_remote_value(entity, value) + + def _adapt_to_collection(self, entity, value): + '''Adapt *value* to appropriate collection instance for *entity*. + + .. note:: + + If *value* is None then return a suitable empty collection. + + ''' + raise NotImplementedError() + + +class CollectionAttribute(AbstractCollectionAttribute): + '''Represent a collection of other entities.''' + + #: Collection class used by attribute. + collection_class = ftrack_api.collection.Collection + + def _adapt_to_collection(self, entity, value): + '''Adapt *value* to a Collection instance on *entity*.''' + + if not isinstance(value, ftrack_api.collection.Collection): + + if value is None: + value = ftrack_api.collection.Collection(entity, self) + + elif isinstance(value, list): + value = ftrack_api.collection.Collection( + entity, self, data=value + ) + + else: + raise NotImplementedError( + 'Cannot convert {0!r} to collection.'.format(value) + ) + + else: + if value.attribute is not self: + raise ftrack_api.exception.AttributeError( + 'Collection already bound to a different attribute' + ) + + return value + + +class KeyValueMappedCollectionAttribute(AbstractCollectionAttribute): + '''Represent a mapped key, value collection of entities.''' + + #: Collection class used by attribute. + collection_class = ftrack_api.collection.KeyValueMappedCollectionProxy + + def __init__( + self, name, creator, key_attribute, value_attribute, **kw + ): + '''Initialise attribute with *name*. + + *creator* should be a function that accepts a dictionary of data and + is used by the referenced collection to create new entities in the + collection. + + *key_attribute* should be the name of the attribute on an entity in + the collection that represents the value for 'key' of the dictionary. + + *value_attribute* should be the name of the attribute on an entity in + the collection that represents the value for 'value' of the dictionary. + + ''' + self.creator = creator + self.key_attribute = key_attribute + self.value_attribute = value_attribute + + super(KeyValueMappedCollectionAttribute, self).__init__(name, **kw) + + def _adapt_to_collection(self, entity, value): + '''Adapt *value* to an *entity*.''' + if not isinstance( + value, ftrack_api.collection.KeyValueMappedCollectionProxy + ): + + if value is None: + value = ftrack_api.collection.KeyValueMappedCollectionProxy( + ftrack_api.collection.Collection(entity, self), + self.creator, self.key_attribute, + self.value_attribute + ) + + elif isinstance(value, (list, ftrack_api.collection.Collection)): + + if isinstance(value, list): + value = ftrack_api.collection.Collection( + entity, self, data=value + ) + + value = ftrack_api.collection.KeyValueMappedCollectionProxy( + value, self.creator, self.key_attribute, + self.value_attribute + ) + + elif isinstance(value, collections.Mapping): + # Convert mapping. + # TODO: When backend model improves, revisit this logic. + # First get existing value and delete all references. This is + # needed because otherwise they will not be automatically + # removed server side. + # The following should not cause recursion as the internal + # values should be mapped collections already. + current_value = self.get_value(entity) + if not isinstance( + current_value, + ftrack_api.collection.KeyValueMappedCollectionProxy + ): + raise NotImplementedError( + 'Cannot adapt mapping to collection as current value ' + 'type is not a KeyValueMappedCollectionProxy.' + ) + + # Create the new collection using the existing collection as + # basis. Then update through proxy interface to ensure all + # internal operations called consistently (such as entity + # deletion for key removal). + collection = ftrack_api.collection.Collection( + entity, self, data=current_value.collection[:] + ) + collection_proxy = ( + ftrack_api.collection.KeyValueMappedCollectionProxy( + collection, self.creator, + self.key_attribute, self.value_attribute + ) + ) + + # Remove expired keys from collection. + expired_keys = set(current_value.keys()) - set(value.keys()) + for key in expired_keys: + del collection_proxy[key] + + # Set new values for existing keys / add new keys. + for key, value in value.items(): + collection_proxy[key] = value + + value = collection_proxy + + else: + raise NotImplementedError( + 'Cannot convert {0!r} to collection.'.format(value) + ) + else: + if value.attribute is not self: + raise ftrack_api.exception.AttributeError( + 'Collection already bound to a different attribute.' + ) + + return value + + +class CustomAttributeCollectionAttribute(AbstractCollectionAttribute): + '''Represent a mapped custom attribute collection of entities.''' + + #: Collection class used by attribute. + collection_class = ( + ftrack_api.collection.CustomAttributeCollectionProxy + ) + + def _adapt_to_collection(self, entity, value): + '''Adapt *value* to an *entity*.''' + if not isinstance( + value, ftrack_api.collection.CustomAttributeCollectionProxy + ): + + if value is None: + value = ftrack_api.collection.CustomAttributeCollectionProxy( + ftrack_api.collection.Collection(entity, self) + ) + + elif isinstance(value, (list, ftrack_api.collection.Collection)): + + # Why are we creating a new if it is a list? This will cause + # any merge to create a new proxy and collection. + if isinstance(value, list): + value = ftrack_api.collection.Collection( + entity, self, data=value + ) + + value = ftrack_api.collection.CustomAttributeCollectionProxy( + value + ) + + elif isinstance(value, collections.Mapping): + # Convert mapping. + # TODO: When backend model improves, revisit this logic. + # First get existing value and delete all references. This is + # needed because otherwise they will not be automatically + # removed server side. + # The following should not cause recursion as the internal + # values should be mapped collections already. + current_value = self.get_value(entity) + if not isinstance( + current_value, + ftrack_api.collection.CustomAttributeCollectionProxy + ): + raise NotImplementedError( + 'Cannot adapt mapping to collection as current value ' + 'type is not a MappedCollectionProxy.' + ) + + # Create the new collection using the existing collection as + # basis. Then update through proxy interface to ensure all + # internal operations called consistently (such as entity + # deletion for key removal). + collection = ftrack_api.collection.Collection( + entity, self, data=current_value.collection[:] + ) + collection_proxy = ( + ftrack_api.collection.CustomAttributeCollectionProxy( + collection + ) + ) + + # Remove expired keys from collection. + expired_keys = set(current_value.keys()) - set(value.keys()) + for key in expired_keys: + del collection_proxy[key] + + # Set new values for existing keys / add new keys. + for key, value in value.items(): + collection_proxy[key] = value + + value = collection_proxy + + else: + raise NotImplementedError( + 'Cannot convert {0!r} to collection.'.format(value) + ) + else: + if value.attribute is not self: + raise ftrack_api.exception.AttributeError( + 'Collection already bound to a different attribute.' + ) + + return value diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py new file mode 100644 index 0000000000..49456dc2d7 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py @@ -0,0 +1,579 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +'''Caching framework. + +Defines a standardised :class:`Cache` interface for storing data against +specific keys. Key generation is also standardised using a :class:`KeyMaker` +interface. + +Combining a Cache and KeyMaker allows for memoisation of function calls with +respect to the arguments used by using a :class:`Memoiser`. + +As a convenience a simple :func:`memoise` decorator is included for quick +memoisation of function using a global cache and standard key maker. + +''' + +import collections +import functools +import abc +import copy +import inspect +import re +import anydbm +import contextlib +try: + import cPickle as pickle +except ImportError: # pragma: no cover + import pickle + +import ftrack_api.inspection +import ftrack_api.symbol + + +class Cache(object): + '''Cache interface. + + Derive from this to define concrete cache implementations. A cache is + centered around the concept of key:value pairings where the key is unique + across the cache. + + ''' + + __metaclass__ = abc.ABCMeta + + @abc.abstractmethod + def get(self, key): + '''Return value for *key*. + + Raise :exc:`KeyError` if *key* not found. + + ''' + + @abc.abstractmethod + def set(self, key, value): + '''Set *value* for *key*.''' + + @abc.abstractmethod + def remove(self, key): + '''Remove *key* and return stored value. + + Raise :exc:`KeyError` if *key* not found. + + ''' + + def keys(self): + '''Return list of keys at this current time. + + .. warning:: + + Actual keys may differ from those returned due to timing of access. + + ''' + raise NotImplementedError() # pragma: no cover + + def values(self): + '''Return values for current keys.''' + values = [] + for key in self.keys(): + try: + value = self.get(key) + except KeyError: + continue + else: + values.append(value) + + return values + + def clear(self, pattern=None): + '''Remove all keys matching *pattern*. + + *pattern* should be a regular expression string. + + If *pattern* is None then all keys will be removed. + + ''' + if pattern is not None: + pattern = re.compile(pattern) + + for key in self.keys(): + if pattern is not None: + if not pattern.search(key): + continue + + try: + self.remove(key) + except KeyError: + pass + + +class ProxyCache(Cache): + '''Proxy another cache.''' + + def __init__(self, proxied): + '''Initialise cache with *proxied* cache instance.''' + self.proxied = proxied + super(ProxyCache, self).__init__() + + def get(self, key): + '''Return value for *key*. + + Raise :exc:`KeyError` if *key* not found. + + ''' + return self.proxied.get(key) + + def set(self, key, value): + '''Set *value* for *key*.''' + return self.proxied.set(key, value) + + def remove(self, key): + '''Remove *key* and return stored value. + + Raise :exc:`KeyError` if *key* not found. + + ''' + return self.proxied.remove(key) + + def keys(self): + '''Return list of keys at this current time. + + .. warning:: + + Actual keys may differ from those returned due to timing of access. + + ''' + return self.proxied.keys() + + +class LayeredCache(Cache): + '''Layered cache.''' + + def __init__(self, caches): + '''Initialise cache with *caches*.''' + super(LayeredCache, self).__init__() + self.caches = caches + + def get(self, key): + '''Return value for *key*. + + Raise :exc:`KeyError` if *key* not found. + + Attempt to retrieve from cache layers in turn, starting with shallowest. + If value retrieved, then also set the value in each higher level cache + up from where retrieved. + + ''' + target_caches = [] + value = ftrack_api.symbol.NOT_SET + + for cache in self.caches: + try: + value = cache.get(key) + except KeyError: + target_caches.append(cache) + continue + else: + break + + if value is ftrack_api.symbol.NOT_SET: + raise KeyError(key) + + # Set value on all higher level caches. + for cache in target_caches: + cache.set(key, value) + + return value + + def set(self, key, value): + '''Set *value* for *key*.''' + for cache in self.caches: + cache.set(key, value) + + def remove(self, key): + '''Remove *key*. + + Raise :exc:`KeyError` if *key* not found in any layer. + + ''' + removed = False + for cache in self.caches: + try: + cache.remove(key) + except KeyError: + pass + else: + removed = True + + if not removed: + raise KeyError(key) + + def keys(self): + '''Return list of keys at this current time. + + .. warning:: + + Actual keys may differ from those returned due to timing of access. + + ''' + keys = [] + for cache in self.caches: + keys.extend(cache.keys()) + + return list(set(keys)) + + +class MemoryCache(Cache): + '''Memory based cache.''' + + def __init__(self): + '''Initialise cache.''' + self._cache = {} + super(MemoryCache, self).__init__() + + def get(self, key): + '''Return value for *key*. + + Raise :exc:`KeyError` if *key* not found. + + ''' + return self._cache[key] + + def set(self, key, value): + '''Set *value* for *key*.''' + self._cache[key] = value + + def remove(self, key): + '''Remove *key*. + + Raise :exc:`KeyError` if *key* not found. + + ''' + del self._cache[key] + + def keys(self): + '''Return list of keys at this current time. + + .. warning:: + + Actual keys may differ from those returned due to timing of access. + + ''' + return self._cache.keys() + + +class FileCache(Cache): + '''File based cache that uses :mod:`anydbm` module. + + .. note:: + + No locking of the underlying file is performed. + + ''' + + def __init__(self, path): + '''Initialise cache at *path*.''' + self.path = path + + # Initialise cache. + cache = anydbm.open(self.path, 'c') + cache.close() + + super(FileCache, self).__init__() + + @contextlib.contextmanager + def _database(self): + '''Yield opened database file.''' + cache = anydbm.open(self.path, 'w') + try: + yield cache + finally: + cache.close() + + def get(self, key): + '''Return value for *key*. + + Raise :exc:`KeyError` if *key* not found. + + ''' + with self._database() as cache: + return cache[key] + + def set(self, key, value): + '''Set *value* for *key*.''' + with self._database() as cache: + cache[key] = value + + def remove(self, key): + '''Remove *key*. + + Raise :exc:`KeyError` if *key* not found. + + ''' + with self._database() as cache: + del cache[key] + + def keys(self): + '''Return list of keys at this current time. + + .. warning:: + + Actual keys may differ from those returned due to timing of access. + + ''' + with self._database() as cache: + return cache.keys() + + +class SerialisedCache(ProxyCache): + '''Proxied cache that stores values as serialised data.''' + + def __init__(self, proxied, encode=None, decode=None): + '''Initialise cache with *encode* and *decode* callables. + + *proxied* is the underlying cache to use for storage. + + ''' + self.encode = encode + self.decode = decode + super(SerialisedCache, self).__init__(proxied) + + def get(self, key): + '''Return value for *key*. + + Raise :exc:`KeyError` if *key* not found. + + ''' + value = super(SerialisedCache, self).get(key) + if self.decode: + value = self.decode(value) + + return value + + def set(self, key, value): + '''Set *value* for *key*.''' + if self.encode: + value = self.encode(value) + + super(SerialisedCache, self).set(key, value) + + +class KeyMaker(object): + '''Generate unique keys.''' + + __metaclass__ = abc.ABCMeta + + def __init__(self): + '''Initialise key maker.''' + super(KeyMaker, self).__init__() + self.item_separator = '' + + def key(self, *items): + '''Return key for *items*.''' + keys = [] + for item in items: + keys.append(self._key(item)) + + return self.item_separator.join(keys) + + @abc.abstractmethod + def _key(self, obj): + '''Return key for *obj*.''' + + +class StringKeyMaker(KeyMaker): + '''Generate string key.''' + + def _key(self, obj): + '''Return key for *obj*.''' + return str(obj) + + +class ObjectKeyMaker(KeyMaker): + '''Generate unique keys for objects.''' + + def __init__(self): + '''Initialise key maker.''' + super(ObjectKeyMaker, self).__init__() + self.item_separator = '\0' + self.mapping_identifier = '\1' + self.mapping_pair_separator = '\2' + self.iterable_identifier = '\3' + self.name_identifier = '\4' + + def _key(self, item): + '''Return key for *item*. + + Returned key will be a pickle like string representing the *item*. This + allows for typically non-hashable objects to be used in key generation + (such as dictionaries). + + If *item* is iterable then each item in it shall also be passed to this + method to ensure correct key generation. + + Special markers are used to distinguish handling of specific cases in + order to ensure uniqueness of key corresponds directly to *item*. + + Example:: + + >>> key_maker = ObjectKeyMaker() + >>> def add(x, y): + ... "Return sum of *x* and *y*." + ... return x + y + ... + >>> key_maker.key(add, (1, 2)) + '\x04add\x00__main__\x00\x03\x80\x02K\x01.\x00\x80\x02K\x02.\x03' + >>> key_maker.key(add, (1, 3)) + '\x04add\x00__main__\x00\x03\x80\x02K\x01.\x00\x80\x02K\x03.\x03' + + ''' + # TODO: Consider using a more robust and comprehensive solution such as + # dill (https://github.com/uqfoundation/dill). + if isinstance(item, collections.Iterable): + if isinstance(item, basestring): + return pickle.dumps(item, pickle.HIGHEST_PROTOCOL) + + if isinstance(item, collections.Mapping): + contents = self.item_separator.join([ + ( + self._key(key) + + self.mapping_pair_separator + + self._key(value) + ) + for key, value in sorted(item.items()) + ]) + return ( + self.mapping_identifier + + contents + + self.mapping_identifier + ) + + else: + contents = self.item_separator.join([ + self._key(item) for item in item + ]) + return ( + self.iterable_identifier + + contents + + self.iterable_identifier + ) + + elif inspect.ismethod(item): + return ''.join(( + self.name_identifier, + item.__name__, + self.item_separator, + item.im_class.__name__, + self.item_separator, + item.__module__ + )) + + elif inspect.isfunction(item) or inspect.isclass(item): + return ''.join(( + self.name_identifier, + item.__name__, + self.item_separator, + item.__module__ + )) + + elif inspect.isbuiltin(item): + return self.name_identifier + item.__name__ + + else: + return pickle.dumps(item, pickle.HIGHEST_PROTOCOL) + + +class Memoiser(object): + '''Memoise function calls using a :class:`KeyMaker` and :class:`Cache`. + + Example:: + + >>> memoiser = Memoiser(MemoryCache(), ObjectKeyMaker()) + >>> def add(x, y): + ... "Return sum of *x* and *y*." + ... print 'Called' + ... return x + y + ... + >>> memoiser.call(add, (1, 2), {}) + Called + >>> memoiser.call(add, (1, 2), {}) + >>> memoiser.call(add, (1, 3), {}) + Called + + ''' + + def __init__(self, cache=None, key_maker=None, return_copies=True): + '''Initialise with *cache* and *key_maker* to use. + + If *cache* is not specified a default :class:`MemoryCache` will be + used. Similarly, if *key_maker* is not specified a default + :class:`ObjectKeyMaker` will be used. + + If *return_copies* is True then all results returned from the cache will + be deep copies to avoid indirect mutation of cached values. + + ''' + self.cache = cache + if self.cache is None: + self.cache = MemoryCache() + + self.key_maker = key_maker + if self.key_maker is None: + self.key_maker = ObjectKeyMaker() + + self.return_copies = return_copies + super(Memoiser, self).__init__() + + def call(self, function, args=None, kw=None): + '''Call *function* with *args* and *kw* and return result. + + If *function* was previously called with exactly the same arguments + then return cached result if available. + + Store result for call in cache. + + ''' + if args is None: + args = () + + if kw is None: + kw = {} + + # Support arguments being passed as positionals or keywords. + arguments = inspect.getcallargs(function, *args, **kw) + + key = self.key_maker.key(function, arguments) + try: + value = self.cache.get(key) + + except KeyError: + value = function(*args, **kw) + self.cache.set(key, value) + + # If requested, deep copy value to return in order to avoid cached value + # being inadvertently altered by the caller. + if self.return_copies: + value = copy.deepcopy(value) + + return value + + +def memoise_decorator(memoiser): + '''Decorator to memoise function calls using *memoiser*.''' + def outer(function): + + @functools.wraps(function) + def inner(*args, **kw): + return memoiser.call(function, args, kw) + + return inner + + return outer + + +#: Default memoiser. +memoiser = Memoiser() + +#: Default memoise decorator using standard cache and key maker. +memoise = memoise_decorator(memoiser) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py new file mode 100644 index 0000000000..91655a7b02 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py @@ -0,0 +1,507 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from __future__ import absolute_import + +import logging + +import collections +import copy + +import ftrack_api.exception +import ftrack_api.inspection +import ftrack_api.symbol +import ftrack_api.operation +import ftrack_api.cache +from ftrack_api.logging import LazyLogMessage as L + + +class Collection(collections.MutableSequence): + '''A collection of entities.''' + + def __init__(self, entity, attribute, mutable=True, data=None): + '''Initialise collection.''' + self.entity = entity + self.attribute = attribute + self._data = [] + self._identities = set() + + # Set initial dataset. + # Note: For initialisation, immutability is deferred till after initial + # population as otherwise there would be no public way to initialise an + # immutable collection. The reason self._data is not just set directly + # is to ensure other logic can be applied without special handling. + self.mutable = True + try: + if data is None: + data = [] + + with self.entity.session.operation_recording(False): + self.extend(data) + finally: + self.mutable = mutable + + def _identity_key(self, entity): + '''Return identity key for *entity*.''' + return str(ftrack_api.inspection.identity(entity)) + + def __copy__(self): + '''Return shallow copy. + + .. note:: + + To maintain expectations on usage, the shallow copy will include a + shallow copy of the underlying data store. + + ''' + cls = self.__class__ + copied_instance = cls.__new__(cls) + copied_instance.__dict__.update(self.__dict__) + copied_instance._data = copy.copy(self._data) + copied_instance._identities = copy.copy(self._identities) + + return copied_instance + + def _notify(self, old_value): + '''Notify about modification.''' + # Record operation. + if self.entity.session.record_operations: + self.entity.session.recorded_operations.push( + ftrack_api.operation.UpdateEntityOperation( + self.entity.entity_type, + ftrack_api.inspection.primary_key(self.entity), + self.attribute.name, + old_value, + self + ) + ) + + def insert(self, index, item): + '''Insert *item* at *index*.''' + if not self.mutable: + raise ftrack_api.exception.ImmutableCollectionError(self) + + if item in self: + raise ftrack_api.exception.DuplicateItemInCollectionError( + item, self + ) + + old_value = copy.copy(self) + self._data.insert(index, item) + self._identities.add(self._identity_key(item)) + self._notify(old_value) + + def __contains__(self, value): + '''Return whether *value* present in collection.''' + return self._identity_key(value) in self._identities + + def __getitem__(self, index): + '''Return item at *index*.''' + return self._data[index] + + def __setitem__(self, index, item): + '''Set *item* against *index*.''' + if not self.mutable: + raise ftrack_api.exception.ImmutableCollectionError(self) + + try: + existing_index = self.index(item) + except ValueError: + pass + else: + if index != existing_index: + raise ftrack_api.exception.DuplicateItemInCollectionError( + item, self + ) + + old_value = copy.copy(self) + try: + existing_item = self._data[index] + except IndexError: + pass + else: + self._identities.remove(self._identity_key(existing_item)) + + self._data[index] = item + self._identities.add(self._identity_key(item)) + self._notify(old_value) + + def __delitem__(self, index): + '''Remove item at *index*.''' + if not self.mutable: + raise ftrack_api.exception.ImmutableCollectionError(self) + + old_value = copy.copy(self) + item = self._data[index] + del self._data[index] + self._identities.remove(self._identity_key(item)) + self._notify(old_value) + + def __len__(self): + '''Return count of items.''' + return len(self._data) + + def __eq__(self, other): + '''Return whether this collection is equal to *other*.''' + if not isinstance(other, Collection): + return False + + return sorted(self._identities) == sorted(other._identities) + + def __ne__(self, other): + '''Return whether this collection is not equal to *other*.''' + return not self == other + + +class MappedCollectionProxy(collections.MutableMapping): + '''Common base class for mapped collection of entities.''' + + def __init__(self, collection): + '''Initialise proxy for *collection*.''' + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + self.collection = collection + super(MappedCollectionProxy, self).__init__() + + def __copy__(self): + '''Return shallow copy. + + .. note:: + + To maintain expectations on usage, the shallow copy will include a + shallow copy of the underlying collection. + + ''' + cls = self.__class__ + copied_instance = cls.__new__(cls) + copied_instance.__dict__.update(self.__dict__) + copied_instance.collection = copy.copy(self.collection) + + return copied_instance + + @property + def mutable(self): + '''Return whether collection is mutable.''' + return self.collection.mutable + + @mutable.setter + def mutable(self, value): + '''Set whether collection is mutable to *value*.''' + self.collection.mutable = value + + @property + def attribute(self): + '''Return attribute bound to.''' + return self.collection.attribute + + @attribute.setter + def attribute(self, value): + '''Set bound attribute to *value*.''' + self.collection.attribute = value + + +class KeyValueMappedCollectionProxy(MappedCollectionProxy): + '''A mapped collection of key, value entities. + + Proxy a standard :class:`Collection` as a mapping where certain attributes + from the entities in the collection are mapped to key, value pairs. + + For example:: + + >>> collection = [Metadata(key='foo', value='bar'), ...] + >>> mapped = KeyValueMappedCollectionProxy( + ... collection, create_metadata, + ... key_attribute='key', value_attribute='value' + ... ) + >>> print mapped['foo'] + 'bar' + >>> mapped['bam'] = 'biz' + >>> print mapped.collection[-1] + Metadata(key='bam', value='biz') + + ''' + + def __init__( + self, collection, creator, key_attribute, value_attribute + ): + '''Initialise collection.''' + self.creator = creator + self.key_attribute = key_attribute + self.value_attribute = value_attribute + super(KeyValueMappedCollectionProxy, self).__init__(collection) + + def _get_entity_by_key(self, key): + '''Return entity instance with matching *key* from collection.''' + for entity in self.collection: + if entity[self.key_attribute] == key: + return entity + + raise KeyError(key) + + def __getitem__(self, key): + '''Return value for *key*.''' + entity = self._get_entity_by_key(key) + return entity[self.value_attribute] + + def __setitem__(self, key, value): + '''Set *value* for *key*.''' + try: + entity = self._get_entity_by_key(key) + except KeyError: + data = { + self.key_attribute: key, + self.value_attribute: value + } + entity = self.creator(self, data) + + if ( + ftrack_api.inspection.state(entity) is + ftrack_api.symbol.CREATED + ): + # Persisting this entity will be handled here, record the + # operation. + self.collection.append(entity) + + else: + # The entity is created and persisted separately by the + # creator. Do not record this operation. + with self.collection.entity.session.operation_recording(False): + # Do not record this operation since it will trigger + # redudant and potentially failing operations. + self.collection.append(entity) + + else: + entity[self.value_attribute] = value + + def __delitem__(self, key): + '''Remove and delete *key*. + + .. note:: + + The associated entity will be deleted as well. + + ''' + for index, entity in enumerate(self.collection): + if entity[self.key_attribute] == key: + break + else: + raise KeyError(key) + + del self.collection[index] + entity.session.delete(entity) + + def __iter__(self): + '''Iterate over all keys.''' + keys = set() + for entity in self.collection: + keys.add(entity[self.key_attribute]) + + return iter(keys) + + def __len__(self): + '''Return count of keys.''' + keys = set() + for entity in self.collection: + keys.add(entity[self.key_attribute]) + + return len(keys) + + +class PerSessionDefaultKeyMaker(ftrack_api.cache.KeyMaker): + '''Generate key for session.''' + + def _key(self, obj): + '''Return key for *obj*.''' + if isinstance(obj, dict): + session = obj.get('session') + if session is not None: + # Key by session only. + return str(id(session)) + + return str(obj) + + +#: Memoiser for use with callables that should be called once per session. +memoise_session = ftrack_api.cache.memoise_decorator( + ftrack_api.cache.Memoiser( + key_maker=PerSessionDefaultKeyMaker(), return_copies=False + ) +) + + +@memoise_session +def _get_custom_attribute_configurations(session): + '''Return list of custom attribute configurations. + + The configuration objects will have key, project_id, id and object_type_id + populated. + + ''' + return session.query( + 'select key, project_id, id, object_type_id, entity_type from ' + 'CustomAttributeConfiguration' + ).all() + + +class CustomAttributeCollectionProxy(MappedCollectionProxy): + '''A mapped collection of custom attribute value entities.''' + + def __init__( + self, collection + ): + '''Initialise collection.''' + self.key_attribute = 'configuration_id' + self.value_attribute = 'value' + super(CustomAttributeCollectionProxy, self).__init__(collection) + + def _get_entity_configurations(self): + '''Return all configurations for current collection entity.''' + entity = self.collection.entity + entity_type = None + project_id = None + object_type_id = None + + if 'object_type_id' in entity.keys(): + project_id = entity['project_id'] + entity_type = 'task' + object_type_id = entity['object_type_id'] + + if entity.entity_type == 'AssetVersion': + project_id = entity['asset']['parent']['project_id'] + entity_type = 'assetversion' + + if entity.entity_type == 'Asset': + project_id = entity['parent']['project_id'] + entity_type = 'asset' + + if entity.entity_type == 'Project': + project_id = entity['id'] + entity_type = 'show' + + if entity.entity_type == 'User': + entity_type = 'user' + + if entity_type is None: + raise ValueError( + 'Entity {!r} not supported.'.format(entity) + ) + + configurations = [] + for configuration in _get_custom_attribute_configurations( + entity.session + ): + if ( + configuration['entity_type'] == entity_type and + configuration['project_id'] in (project_id, None) and + configuration['object_type_id'] == object_type_id + ): + configurations.append(configuration) + + # Return with global configurations at the end of the list. This is done + # so that global conigurations are shadowed by project specific if the + # configurations list is looped when looking for a matching `key`. + return sorted( + configurations, key=lambda item: item['project_id'] is None + ) + + def _get_keys(self): + '''Return a list of all keys.''' + keys = [] + for configuration in self._get_entity_configurations(): + keys.append(configuration['key']) + + return keys + + def _get_entity_by_key(self, key): + '''Return entity instance with matching *key* from collection.''' + configuration_id = self.get_configuration_id_from_key(key) + for entity in self.collection: + if entity[self.key_attribute] == configuration_id: + return entity + + return None + + def get_configuration_id_from_key(self, key): + '''Return id of configuration with matching *key*. + + Raise :exc:`KeyError` if no configuration with matching *key* found. + + ''' + for configuration in self._get_entity_configurations(): + if key == configuration['key']: + return configuration['id'] + + raise KeyError(key) + + def __getitem__(self, key): + '''Return value for *key*.''' + entity = self._get_entity_by_key(key) + + if entity: + return entity[self.value_attribute] + + for configuration in self._get_entity_configurations(): + if configuration['key'] == key: + return configuration['default'] + + raise KeyError(key) + + def __setitem__(self, key, value): + '''Set *value* for *key*.''' + custom_attribute_value = self._get_entity_by_key(key) + + if custom_attribute_value: + custom_attribute_value[self.value_attribute] = value + else: + entity = self.collection.entity + session = entity.session + data = { + self.key_attribute: self.get_configuration_id_from_key(key), + self.value_attribute: value, + 'entity_id': entity['id'] + } + + # Make sure to use the currently active collection. This is + # necessary since a merge might have replaced the current one. + self.collection.entity['custom_attributes'].collection.append( + session.create('CustomAttributeValue', data) + ) + + def __delitem__(self, key): + '''Remove and delete *key*. + + .. note:: + + The associated entity will be deleted as well. + + ''' + custom_attribute_value = self._get_entity_by_key(key) + + if custom_attribute_value: + index = self.collection.index(custom_attribute_value) + del self.collection[index] + + custom_attribute_value.session.delete(custom_attribute_value) + else: + self.logger.warning(L( + 'Cannot delete {0!r} on {1!r}, no custom attribute value set.', + key, self.collection.entity + )) + + def __eq__(self, collection): + '''Return True if *collection* equals proxy collection.''' + if collection is ftrack_api.symbol.NOT_SET: + return False + + return collection.collection == self.collection + + def __iter__(self): + '''Iterate over all keys.''' + keys = self._get_keys() + return iter(keys) + + def __len__(self): + '''Return count of keys.''' + keys = self._get_keys() + return len(keys) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py new file mode 100644 index 0000000000..1802e380c0 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py @@ -0,0 +1,119 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2013 ftrack + +import os +from abc import ABCMeta, abstractmethod +import tempfile + + +class Data(object): + '''File-like object for manipulating data.''' + + __metaclass__ = ABCMeta + + def __init__(self): + '''Initialise data access.''' + self.closed = False + + @abstractmethod + def read(self, limit=None): + '''Return content from current position up to *limit*.''' + + @abstractmethod + def write(self, content): + '''Write content at current position.''' + + def flush(self): + '''Flush buffers ensuring data written.''' + + def seek(self, offset, whence=os.SEEK_SET): + '''Move internal pointer by *offset*. + + The *whence* argument is optional and defaults to os.SEEK_SET or 0 + (absolute file positioning); other values are os.SEEK_CUR or 1 + (seek relative to the current position) and os.SEEK_END or 2 + (seek relative to the file's end). + + ''' + raise NotImplementedError('Seek not supported.') + + def tell(self): + '''Return current position of internal pointer.''' + raise NotImplementedError('Tell not supported.') + + def close(self): + '''Flush buffers and prevent further access.''' + self.flush() + self.closed = True + + +class FileWrapper(Data): + '''Data wrapper for Python file objects.''' + + def __init__(self, wrapped_file): + '''Initialise access to *wrapped_file*.''' + self.wrapped_file = wrapped_file + self._read_since_last_write = False + super(FileWrapper, self).__init__() + + def read(self, limit=None): + '''Return content from current position up to *limit*.''' + self._read_since_last_write = True + + if limit is None: + limit = -1 + + return self.wrapped_file.read(limit) + + def write(self, content): + '''Write content at current position.''' + if self._read_since_last_write: + # Windows requires a seek before switching from read to write. + self.seek(self.tell()) + + self.wrapped_file.write(content) + self._read_since_last_write = False + + def flush(self): + '''Flush buffers ensuring data written.''' + super(FileWrapper, self).flush() + if hasattr(self.wrapped_file, 'flush'): + self.wrapped_file.flush() + + def seek(self, offset, whence=os.SEEK_SET): + '''Move internal pointer by *offset*.''' + self.wrapped_file.seek(offset, whence) + + def tell(self): + '''Return current position of internal pointer.''' + return self.wrapped_file.tell() + + def close(self): + '''Flush buffers and prevent further access.''' + if not self.closed: + super(FileWrapper, self).close() + if hasattr(self.wrapped_file, 'close'): + self.wrapped_file.close() + + +class File(FileWrapper): + '''Data wrapper accepting filepath.''' + + def __init__(self, path, mode='rb'): + '''Open file at *path* with *mode*.''' + file_object = open(path, mode) + super(File, self).__init__(file_object) + + +class String(FileWrapper): + '''Data wrapper using TemporaryFile instance.''' + + def __init__(self, content=None): + '''Initialise data with *content*.''' + super(String, self).__init__( + tempfile.TemporaryFile() + ) + + if content is not None: + self.wrapped_file.write(content) + self.wrapped_file.seek(0) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py new file mode 100644 index 0000000000..1d452f2828 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py new file mode 100644 index 0000000000..859d94e436 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py @@ -0,0 +1,91 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api.entity.base + + +class AssetVersion(ftrack_api.entity.base.Entity): + '''Represent asset version.''' + + def create_component( + self, path, data=None, location=None + ): + '''Create a new component from *path* with additional *data* + + .. note:: + + This is a helper method. To create components manually use the + standard :meth:`Session.create` method. + + *path* can be a string representing a filesystem path to the data to + use for the component. The *path* can also be specified as a sequence + string, in which case a sequence component with child components for + each item in the sequence will be created automatically. The accepted + format for a sequence is '{head}{padding}{tail} [{ranges}]'. For + example:: + + '/path/to/file.%04d.ext [1-5, 7, 8, 10-20]' + + .. seealso:: + + `Clique documentation `_ + + *data* should be a dictionary of any additional data to construct the + component with (as passed to :meth:`Session.create`). This version is + automatically set as the component's version. + + If *location* is specified then automatically add component to that + location. + + ''' + if data is None: + data = {} + + data.pop('version_id', None) + data['version'] = self + + return self.session.create_component(path, data=data, location=location) + + def encode_media(self, media, keep_original='auto'): + '''Return a new Job that encode *media* to make it playable in browsers. + + *media* can be a path to a file or a FileComponent in the ftrack.server + location. + + The job will encode *media* based on the file type and job data contains + information about encoding in the following format:: + + { + 'output': [{ + 'format': 'video/mp4', + 'component_id': 'e2dc0524-b576-11d3-9612-080027331d74' + }, { + 'format': 'image/jpeg', + 'component_id': '07b82a97-8cf9-11e3-9383-20c9d081909b' + }], + 'source_component_id': 'e3791a09-7e11-4792-a398-3d9d4eefc294', + 'keep_original': True + } + + The output components are associated with the job via the job_components + relation. + + An image component will always be generated if possible, and will be + set as the version's thumbnail. + + The new components will automatically be associated with the version. + A server version of 3.3.32 or higher is required for this to function + properly. + + If *media* is a file path, a new source component will be created and + added to the ftrack server location and a call to :meth:`commit` will be + issued. If *media* is a FileComponent, it will be assumed to be in + available in the ftrack.server location. + + If *keep_original* is not set, the original media will be kept if it + is a FileComponent, and deleted if it is a file path. You can specify + True or False to change this behavior. + ''' + return self.session.encode_media( + media, version_id=self['id'], keep_original=keep_original + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py new file mode 100644 index 0000000000..f5a1a3cec3 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py @@ -0,0 +1,402 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from __future__ import absolute_import + +import abc +import collections +import logging + +import ftrack_api.symbol +import ftrack_api.attribute +import ftrack_api.inspection +import ftrack_api.exception +import ftrack_api.operation +from ftrack_api.logging import LazyLogMessage as L + + +class DynamicEntityTypeMetaclass(abc.ABCMeta): + '''Custom metaclass to customise representation of dynamic classes. + + .. note:: + + Derive from same metaclass as derived bases to avoid conflicts. + + ''' + def __repr__(self): + '''Return representation of class.''' + return ''.format(self.__name__) + + +class Entity(collections.MutableMapping): + '''Base class for all entities.''' + + __metaclass__ = DynamicEntityTypeMetaclass + + entity_type = 'Entity' + attributes = None + primary_key_attributes = None + default_projections = None + + def __init__(self, session, data=None, reconstructing=False): + '''Initialise entity. + + *session* is an instance of :class:`ftrack_api.session.Session` that + this entity instance is bound to. + + *data* is a mapping of key, value pairs to apply as initial attribute + values. + + *reconstructing* indicates whether this entity is being reconstructed, + such as from a query, and therefore should not have any special creation + logic applied, such as initialising defaults for missing data. + + ''' + super(Entity, self).__init__() + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + self.session = session + self._inflated = set() + + if data is None: + data = {} + + self.logger.debug(L( + '{0} entity from {1!r}.', + ('Reconstructing' if reconstructing else 'Constructing'), data + )) + + self._ignore_data_keys = ['__entity_type__'] + if not reconstructing: + self._construct(data) + else: + self._reconstruct(data) + + def _construct(self, data): + '''Construct from *data*.''' + # Suspend operation recording so that all modifications can be applied + # in single create operation. In addition, recording a modification + # operation requires a primary key which may not be available yet. + + relational_attributes = dict() + + with self.session.operation_recording(False): + # Set defaults for any unset local attributes. + for attribute in self.__class__.attributes: + if attribute.name not in data: + default_value = attribute.default_value + if callable(default_value): + default_value = default_value(self) + + attribute.set_local_value(self, default_value) + + + # Data represents locally set values. + for key, value in data.items(): + if key in self._ignore_data_keys: + continue + + attribute = self.__class__.attributes.get(key) + if attribute is None: + self.logger.debug(L( + 'Cannot populate {0!r} attribute as no such ' + 'attribute found on entity {1!r}.', key, self + )) + continue + + if not isinstance(attribute, ftrack_api.attribute.ScalarAttribute): + relational_attributes.setdefault( + attribute, value + ) + + else: + attribute.set_local_value(self, value) + + # Record create operation. + # Note: As this operation is recorded *before* any Session.merge takes + # place there is the possibility that the operation will hold references + # to outdated data in entity_data. However, this would be unusual in + # that it would mean the same new entity was created twice and only one + # altered. Conversely, if this operation were recorded *after* + # Session.merge took place, any cache would not be able to determine + # the status of the entity, which could be important if the cache should + # not store newly created entities that have not yet been persisted. Out + # of these two 'evils' this approach is deemed the lesser at this time. + # A third, more involved, approach to satisfy both might be to record + # the operation with a PENDING entity_data value and then update with + # merged values post merge. + if self.session.record_operations: + entity_data = {} + + # Lower level API used here to avoid including any empty + # collections that are automatically generated on access. + for attribute in self.attributes: + value = attribute.get_local_value(self) + if value is not ftrack_api.symbol.NOT_SET: + entity_data[attribute.name] = value + + self.session.recorded_operations.push( + ftrack_api.operation.CreateEntityOperation( + self.entity_type, + ftrack_api.inspection.primary_key(self), + entity_data + ) + ) + + for attribute, value in relational_attributes.items(): + # Finally we set values for "relational" attributes, we need + # to do this at the end in order to get the create operations + # in the correct order as the newly created attributes might + # contain references to the newly created entity. + + attribute.set_local_value( + self, value + ) + + def _reconstruct(self, data): + '''Reconstruct from *data*.''' + # Data represents remote values. + for key, value in data.items(): + if key in self._ignore_data_keys: + continue + + attribute = self.__class__.attributes.get(key) + if attribute is None: + self.logger.debug(L( + 'Cannot populate {0!r} attribute as no such attribute ' + 'found on entity {1!r}.', key, self + )) + continue + + attribute.set_remote_value(self, value) + + def __repr__(self): + '''Return representation of instance.''' + return ''.format( + self.__class__.__name__, id(self) + ) + + def __str__(self): + '''Return string representation of instance.''' + with self.session.auto_populating(False): + primary_key = ['Unknown'] + try: + primary_key = ftrack_api.inspection.primary_key(self).values() + except KeyError: + pass + + return '<{0}({1})>'.format( + self.__class__.__name__, ', '.join(primary_key) + ) + + def __hash__(self): + '''Return hash representing instance.''' + return hash(str(ftrack_api.inspection.identity(self))) + + def __eq__(self, other): + '''Return whether *other* is equal to this instance. + + .. note:: + + Equality is determined by both instances having the same identity. + Values of attributes are not considered. + + ''' + try: + return ( + ftrack_api.inspection.identity(other) + == ftrack_api.inspection.identity(self) + ) + except (AttributeError, KeyError): + return False + + def __getitem__(self, key): + '''Return attribute value for *key*.''' + attribute = self.__class__.attributes.get(key) + if attribute is None: + raise KeyError(key) + + return attribute.get_value(self) + + def __setitem__(self, key, value): + '''Set attribute *value* for *key*.''' + attribute = self.__class__.attributes.get(key) + if attribute is None: + raise KeyError(key) + + attribute.set_local_value(self, value) + + def __delitem__(self, key): + '''Clear attribute value for *key*. + + .. note:: + + Will not remove the attribute, but instead clear any local value + and revert to the last known server value. + + ''' + attribute = self.__class__.attributes.get(key) + attribute.set_local_value(self, ftrack_api.symbol.NOT_SET) + + def __iter__(self): + '''Iterate over all attributes keys.''' + for attribute in self.__class__.attributes: + yield attribute.name + + def __len__(self): + '''Return count of attributes.''' + return len(self.__class__.attributes) + + def values(self): + '''Return list of values.''' + if self.session.auto_populate: + self._populate_unset_scalar_attributes() + + return super(Entity, self).values() + + def items(self): + '''Return list of tuples of (key, value) pairs. + + .. note:: + + Will fetch all values from the server if not already fetched or set + locally. + + ''' + if self.session.auto_populate: + self._populate_unset_scalar_attributes() + + return super(Entity, self).items() + + def clear(self): + '''Reset all locally modified attribute values.''' + for attribute in self: + del self[attribute] + + def merge(self, entity, merged=None): + '''Merge *entity* attribute values and other data into this entity. + + Only merge values from *entity* that are not + :attr:`ftrack_api.symbol.NOT_SET`. + + Return a list of changes made with each change being a mapping with + the keys: + + * type - Either 'remote_attribute', 'local_attribute' or 'property'. + * name - The name of the attribute / property modified. + * old_value - The previous value. + * new_value - The new merged value. + + ''' + log_debug = self.logger.isEnabledFor(logging.DEBUG) + + if merged is None: + merged = {} + + log_message = 'Merged {type} "{name}": {old_value!r} -> {new_value!r}' + changes = [] + + # Attributes. + + # Prioritise by type so that scalar values are set first. This should + # guarantee that the attributes making up the identity of the entity + # are merged before merging any collections that may have references to + # this entity. + attributes = collections.deque() + for attribute in entity.attributes: + if isinstance(attribute, ftrack_api.attribute.ScalarAttribute): + attributes.appendleft(attribute) + else: + attributes.append(attribute) + + for other_attribute in attributes: + attribute = self.attributes.get(other_attribute.name) + + # Local attributes. + other_local_value = other_attribute.get_local_value(entity) + if other_local_value is not ftrack_api.symbol.NOT_SET: + local_value = attribute.get_local_value(self) + if local_value != other_local_value: + merged_local_value = self.session.merge( + other_local_value, merged=merged + ) + + attribute.set_local_value(self, merged_local_value) + changes.append({ + 'type': 'local_attribute', + 'name': attribute.name, + 'old_value': local_value, + 'new_value': merged_local_value + }) + log_debug and self.logger.debug( + log_message.format(**changes[-1]) + ) + + # Remote attributes. + other_remote_value = other_attribute.get_remote_value(entity) + if other_remote_value is not ftrack_api.symbol.NOT_SET: + remote_value = attribute.get_remote_value(self) + if remote_value != other_remote_value: + merged_remote_value = self.session.merge( + other_remote_value, merged=merged + ) + + attribute.set_remote_value( + self, merged_remote_value + ) + + changes.append({ + 'type': 'remote_attribute', + 'name': attribute.name, + 'old_value': remote_value, + 'new_value': merged_remote_value + }) + + log_debug and self.logger.debug( + log_message.format(**changes[-1]) + ) + + # We need to handle collections separately since + # they may store a local copy of the remote attribute + # even though it may not be modified. + if not isinstance( + attribute, ftrack_api.attribute.AbstractCollectionAttribute + ): + continue + + local_value = attribute.get_local_value( + self + ) + + # Populated but not modified, update it. + if ( + local_value is not ftrack_api.symbol.NOT_SET and + local_value == remote_value + ): + attribute.set_local_value( + self, merged_remote_value + ) + changes.append({ + 'type': 'local_attribute', + 'name': attribute.name, + 'old_value': local_value, + 'new_value': merged_remote_value + }) + + log_debug and self.logger.debug( + log_message.format(**changes[-1]) + ) + + return changes + + def _populate_unset_scalar_attributes(self): + '''Populate all unset scalar attributes in one query.''' + projections = [] + for attribute in self.attributes: + if isinstance(attribute, ftrack_api.attribute.ScalarAttribute): + if attribute.get_remote_value(self) is ftrack_api.symbol.NOT_SET: + projections.append(attribute.name) + + if projections: + self.session.populate([self], ', '.join(projections)) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py new file mode 100644 index 0000000000..9d59c4c051 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py @@ -0,0 +1,74 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api.entity.base + + +class Component(ftrack_api.entity.base.Entity): + '''Represent a component.''' + + def get_availability(self, locations=None): + '''Return availability in *locations*. + + If *locations* is None, all known locations will be checked. + + Return a dictionary of {location_id:percentage_availability} + + ''' + return self.session.get_component_availability( + self, locations=locations + ) + + +class CreateThumbnailMixin(object): + '''Mixin to add create_thumbnail method on entity class.''' + + def create_thumbnail(self, path, data=None): + '''Set entity thumbnail from *path*. + + Creates a thumbnail component using in the ftrack.server location + :meth:`Session.create_component + ` The thumbnail component + will be created using *data* if specified. If no component name is + given, `thumbnail` will be used. + + The file is expected to be of an appropriate size and valid file + type. + + .. note:: + + A :meth:`Session.commit` will be + automatically issued. + + ''' + if data is None: + data = {} + if not data.get('name'): + data['name'] = 'thumbnail' + + thumbnail_component = self.session.create_component( + path, data, location=None + ) + + origin_location = self.session.get( + 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID + ) + server_location = self.session.get( + 'Location', ftrack_api.symbol.SERVER_LOCATION_ID + ) + server_location.add_component(thumbnail_component, [origin_location]) + + # TODO: This commit can be avoided by reordering the operations in + # this method so that the component is transferred to ftrack.server + # after the thumbnail has been set. + # + # There is currently a bug in the API backend, causing the operations + # to *some* times be ordered wrongly, where the update occurs before + # the component has been created, causing an integrity error. + # + # Once this issue has been resolved, this commit can be removed and + # and the update placed between component creation and registration. + self['thumbnail_id'] = thumbnail_component['id'] + self.session.commit() + + return thumbnail_component diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py new file mode 100644 index 0000000000..e925b70f5a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py @@ -0,0 +1,435 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from __future__ import absolute_import + +import logging +import uuid +import functools + +import ftrack_api.attribute +import ftrack_api.entity.base +import ftrack_api.entity.location +import ftrack_api.entity.component +import ftrack_api.entity.asset_version +import ftrack_api.entity.project_schema +import ftrack_api.entity.note +import ftrack_api.entity.job +import ftrack_api.entity.user +import ftrack_api.symbol +import ftrack_api.cache +from ftrack_api.logging import LazyLogMessage as L + + +class Factory(object): + '''Entity class factory.''' + + def __init__(self): + '''Initialise factory.''' + super(Factory, self).__init__() + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + + def create(self, schema, bases=None): + '''Create and return entity class from *schema*. + + *bases* should be a list of bases to give the constructed class. If not + specified, default to :class:`ftrack_api.entity.base.Entity`. + + ''' + entity_type = schema['id'] + class_name = entity_type + + class_bases = bases + if class_bases is None: + class_bases = [ftrack_api.entity.base.Entity] + + class_namespace = dict() + + # Build attributes for class. + attributes = ftrack_api.attribute.Attributes() + immutable_properties = schema.get('immutable', []) + computed_properties = schema.get('computed', []) + for name, fragment in schema.get('properties', {}).items(): + mutable = name not in immutable_properties + computed = name in computed_properties + + default = fragment.get('default', ftrack_api.symbol.NOT_SET) + if default == '{uid}': + default = lambda instance: str(uuid.uuid4()) + + data_type = fragment.get('type', ftrack_api.symbol.NOT_SET) + + if data_type is not ftrack_api.symbol.NOT_SET: + + if data_type in ( + 'string', 'boolean', 'integer', 'number', 'variable', + 'object' + ): + # Basic scalar attribute. + if data_type == 'number': + data_type = 'float' + + if data_type == 'string': + data_format = fragment.get('format') + if data_format == 'date-time': + data_type = 'datetime' + + attribute = self.create_scalar_attribute( + class_name, name, mutable, computed, default, data_type + ) + if attribute: + attributes.add(attribute) + + elif data_type == 'array': + attribute = self.create_collection_attribute( + class_name, name, mutable + ) + if attribute: + attributes.add(attribute) + + elif data_type == 'mapped_array': + reference = fragment.get('items', {}).get('$ref') + if not reference: + self.logger.debug(L( + 'Skipping {0}.{1} mapped_array attribute that does ' + 'not define a schema reference.', class_name, name + )) + continue + + attribute = self.create_mapped_collection_attribute( + class_name, name, mutable, reference + ) + if attribute: + attributes.add(attribute) + + else: + self.logger.debug(L( + 'Skipping {0}.{1} attribute with unrecognised data ' + 'type {2}', class_name, name, data_type + )) + else: + # Reference attribute. + reference = fragment.get('$ref', ftrack_api.symbol.NOT_SET) + if reference is ftrack_api.symbol.NOT_SET: + self.logger.debug(L( + 'Skipping {0}.{1} mapped_array attribute that does ' + 'not define a schema reference.', class_name, name + )) + continue + + attribute = self.create_reference_attribute( + class_name, name, mutable, reference + ) + if attribute: + attributes.add(attribute) + + default_projections = schema.get('default_projections', []) + + # Construct class. + class_namespace['entity_type'] = entity_type + class_namespace['attributes'] = attributes + class_namespace['primary_key_attributes'] = schema['primary_key'][:] + class_namespace['default_projections'] = default_projections + + cls = type( + str(class_name), # type doesn't accept unicode. + tuple(class_bases), + class_namespace + ) + + return cls + + def create_scalar_attribute( + self, class_name, name, mutable, computed, default, data_type + ): + '''Return appropriate scalar attribute instance.''' + return ftrack_api.attribute.ScalarAttribute( + name, data_type=data_type, default_value=default, mutable=mutable, + computed=computed + ) + + def create_reference_attribute(self, class_name, name, mutable, reference): + '''Return appropriate reference attribute instance.''' + return ftrack_api.attribute.ReferenceAttribute( + name, reference, mutable=mutable + ) + + def create_collection_attribute(self, class_name, name, mutable): + '''Return appropriate collection attribute instance.''' + return ftrack_api.attribute.CollectionAttribute( + name, mutable=mutable + ) + + def create_mapped_collection_attribute( + self, class_name, name, mutable, reference + ): + '''Return appropriate mapped collection attribute instance.''' + self.logger.debug(L( + 'Skipping {0}.{1} mapped_array attribute that has ' + 'no implementation defined for reference {2}.', + class_name, name, reference + )) + + +class PerSessionDefaultKeyMaker(ftrack_api.cache.KeyMaker): + '''Generate key for defaults.''' + + def _key(self, obj): + '''Return key for *obj*.''' + if isinstance(obj, dict): + entity = obj.get('entity') + if entity is not None: + # Key by session only. + return str(id(entity.session)) + + return str(obj) + + +#: Memoiser for use with default callables that should only be called once per +# session. +memoise_defaults = ftrack_api.cache.memoise_decorator( + ftrack_api.cache.Memoiser( + key_maker=PerSessionDefaultKeyMaker(), return_copies=False + ) +) + +#: Memoiser for use with callables that should be called once per session. +memoise_session = ftrack_api.cache.memoise_decorator( + ftrack_api.cache.Memoiser( + key_maker=PerSessionDefaultKeyMaker(), return_copies=False + ) +) + + +@memoise_session +def _get_custom_attribute_configurations(session): + '''Return list of custom attribute configurations. + + The configuration objects will have key, project_id, id and object_type_id + populated. + + ''' + return session.query( + 'select key, project_id, id, object_type_id, entity_type, ' + 'is_hierarchical from CustomAttributeConfiguration' + ).all() + + +def _get_entity_configurations(entity): + '''Return all configurations for current collection entity.''' + entity_type = None + project_id = None + object_type_id = None + + if 'object_type_id' in entity.keys(): + project_id = entity['project_id'] + entity_type = 'task' + object_type_id = entity['object_type_id'] + + if entity.entity_type == 'AssetVersion': + project_id = entity['asset']['parent']['project_id'] + entity_type = 'assetversion' + + if entity.entity_type == 'Project': + project_id = entity['id'] + entity_type = 'show' + + if entity.entity_type == 'User': + entity_type = 'user' + + if entity.entity_type == 'Asset': + entity_type = 'asset' + + if entity.entity_type in ('TypedContextList', 'AssetVersionList'): + entity_type = 'list' + + if entity_type is None: + raise ValueError( + 'Entity {!r} not supported.'.format(entity) + ) + + configurations = [] + for configuration in _get_custom_attribute_configurations( + entity.session + ): + if ( + configuration['entity_type'] == entity_type and + configuration['project_id'] in (project_id, None) and + configuration['object_type_id'] == object_type_id + ): + # The custom attribute configuration is for the target entity type. + configurations.append(configuration) + elif ( + entity_type in ('asset', 'assetversion', 'show', 'task') and + configuration['project_id'] in (project_id, None) and + configuration['is_hierarchical'] + ): + # The target entity type allows hierarchical attributes. + configurations.append(configuration) + + # Return with global configurations at the end of the list. This is done + # so that global conigurations are shadowed by project specific if the + # configurations list is looped when looking for a matching `key`. + return sorted( + configurations, key=lambda item: item['project_id'] is None + ) + + +class StandardFactory(Factory): + '''Standard entity class factory.''' + + def create(self, schema, bases=None): + '''Create and return entity class from *schema*.''' + if not bases: + bases = [] + + extra_bases = [] + # Customise classes. + if schema['id'] == 'ProjectSchema': + extra_bases = [ftrack_api.entity.project_schema.ProjectSchema] + + elif schema['id'] == 'Location': + extra_bases = [ftrack_api.entity.location.Location] + + elif schema['id'] == 'AssetVersion': + extra_bases = [ftrack_api.entity.asset_version.AssetVersion] + + elif schema['id'].endswith('Component'): + extra_bases = [ftrack_api.entity.component.Component] + + elif schema['id'] == 'Note': + extra_bases = [ftrack_api.entity.note.Note] + + elif schema['id'] == 'Job': + extra_bases = [ftrack_api.entity.job.Job] + + elif schema['id'] == 'User': + extra_bases = [ftrack_api.entity.user.User] + + bases = extra_bases + bases + + # If bases does not contain any items, add the base entity class. + if not bases: + bases = [ftrack_api.entity.base.Entity] + + # Add mixins. + if 'notes' in schema.get('properties', {}): + bases.append( + ftrack_api.entity.note.CreateNoteMixin + ) + + if 'thumbnail_id' in schema.get('properties', {}): + bases.append( + ftrack_api.entity.component.CreateThumbnailMixin + ) + + cls = super(StandardFactory, self).create(schema, bases=bases) + + return cls + + def create_mapped_collection_attribute( + self, class_name, name, mutable, reference + ): + '''Return appropriate mapped collection attribute instance.''' + if reference == 'Metadata': + + def create_metadata(proxy, data, reference): + '''Return metadata for *data*.''' + entity = proxy.collection.entity + session = entity.session + data.update({ + 'parent_id': entity['id'], + 'parent_type': entity.entity_type + }) + return session.create(reference, data) + + creator = functools.partial( + create_metadata, reference=reference + ) + key_attribute = 'key' + value_attribute = 'value' + + return ftrack_api.attribute.KeyValueMappedCollectionAttribute( + name, creator, key_attribute, value_attribute, mutable=mutable + ) + + elif reference == 'CustomAttributeValue': + return ( + ftrack_api.attribute.CustomAttributeCollectionAttribute( + name, mutable=mutable + ) + ) + + elif reference.endswith('CustomAttributeValue'): + def creator(proxy, data): + '''Create a custom attribute based on *proxy* and *data*. + + Raise :py:exc:`KeyError` if related entity is already presisted + to the server. The proxy represents dense custom attribute + values and should never create new custom attribute values + through the proxy if entity exists on the remote. + + If the entity is not persisted the ususal + CustomAttributeValue items cannot be updated as + the related entity does not exist on remote and values not in + the proxy. Instead a CustomAttributeValue will + be reconstructed and an update operation will be recorded. + + ''' + entity = proxy.collection.entity + if ( + ftrack_api.inspection.state(entity) is not + ftrack_api.symbol.CREATED + ): + raise KeyError( + 'Custom attributes must be created explicitly for the ' + 'given entity type before being set.' + ) + + configuration = None + for candidate in _get_entity_configurations(entity): + if candidate['key'] == data['key']: + configuration = candidate + break + + if configuration is None: + raise ValueError( + u'No valid custom attribute for data {0!r} was found.' + .format(data) + ) + + create_data = dict(data.items()) + create_data['configuration_id'] = configuration['id'] + create_data['entity_id'] = entity['id'] + + session = entity.session + + # Create custom attribute by reconstructing it and update the + # value. This will prevent a create operation to be sent to the + # remote, as create operations for this entity type is not + # allowed. Instead an update operation will be recorded. + value = create_data.pop('value') + item = session.create( + reference, + create_data, + reconstructing=True + ) + + # Record update operation. + item['value'] = value + + return item + + key_attribute = 'key' + value_attribute = 'value' + + return ftrack_api.attribute.KeyValueMappedCollectionAttribute( + name, creator, key_attribute, value_attribute, mutable=mutable + ) + + self.logger.debug(L( + 'Skipping {0}.{1} mapped_array attribute that has no configuration ' + 'for reference {2}.', class_name, name, reference + )) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py new file mode 100644 index 0000000000..ae37922c51 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py @@ -0,0 +1,48 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api.entity.base + + +class Job(ftrack_api.entity.base.Entity): + '''Represent job.''' + + def __init__(self, session, data=None, reconstructing=False): + '''Initialise entity. + + *session* is an instance of :class:`ftrack_api.session.Session` that + this entity instance is bound to. + + *data* is a mapping of key, value pairs to apply as initial attribute + values. + + To set a job `description` visible in the web interface, *data* can + contain a key called `data` which should be a JSON serialised + dictionary containing description:: + + data = { + 'status': 'running', + 'data': json.dumps(dict(description='My job description.')), + ... + } + + Will raise a :py:exc:`ValueError` if *data* contains `type` and `type` + is set to something not equal to "api_job". + + *reconstructing* indicates whether this entity is being reconstructed, + such as from a query, and therefore should not have any special creation + logic applied, such as initialising defaults for missing data. + + ''' + + if not reconstructing: + if data.get('type') not in ('api_job', None): + raise ValueError( + 'Invalid job type "{0}". Must be "api_job"'.format( + data.get('type') + ) + ) + + super(Job, self).__init__( + session, data=data, reconstructing=reconstructing + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py new file mode 100644 index 0000000000..707f4fa652 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py @@ -0,0 +1,733 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import collections +import functools + +import ftrack_api.entity.base +import ftrack_api.exception +import ftrack_api.event.base +import ftrack_api.symbol +import ftrack_api.inspection +from ftrack_api.logging import LazyLogMessage as L + + +class Location(ftrack_api.entity.base.Entity): + '''Represent storage for components.''' + + def __init__(self, session, data=None, reconstructing=False): + '''Initialise entity. + + *session* is an instance of :class:`ftrack_api.session.Session` that + this entity instance is bound to. + + *data* is a mapping of key, value pairs to apply as initial attribute + values. + + *reconstructing* indicates whether this entity is being reconstructed, + such as from a query, and therefore should not have any special creation + logic applied, such as initialising defaults for missing data. + + ''' + self.accessor = ftrack_api.symbol.NOT_SET + self.structure = ftrack_api.symbol.NOT_SET + self.resource_identifier_transformer = ftrack_api.symbol.NOT_SET + self.priority = 95 + super(Location, self).__init__( + session, data=data, reconstructing=reconstructing + ) + + def __str__(self): + '''Return string representation of instance.''' + representation = super(Location, self).__str__() + + with self.session.auto_populating(False): + name = self['name'] + if name is not ftrack_api.symbol.NOT_SET: + representation = representation.replace( + '(', '("{0}", '.format(name) + ) + + return representation + + def add_component(self, component, source, recursive=True): + '''Add *component* to location. + + *component* should be a single component instance. + + *source* should be an instance of another location that acts as the + source. + + Raise :exc:`ftrack_api.ComponentInLocationError` if the *component* + already exists in this location. + + Raise :exc:`ftrack_api.LocationError` if managing data and the generated + target structure for the component already exists according to the + accessor. This helps prevent potential data loss by avoiding overwriting + existing data. Note that there is a race condition between the check and + the write so if another process creates data at the same target during + that period it will be overwritten. + + .. note:: + + A :meth:`Session.commit` may be + automatically issued as part of the component registration. + + ''' + return self.add_components( + [component], sources=source, recursive=recursive + ) + + def add_components(self, components, sources, recursive=True, _depth=0): + '''Add *components* to location. + + *components* should be a list of component instances. + + *sources* may be either a single source or a list of sources. If a list + then each corresponding index in *sources* will be used for each + *component*. A source should be an instance of another location. + + Raise :exc:`ftrack_api.exception.ComponentInLocationError` if any + component in *components* already exists in this location. In this case, + no changes will be made and no data transferred. + + Raise :exc:`ftrack_api.exception.LocationError` if managing data and the + generated target structure for the component already exists according to + the accessor. This helps prevent potential data loss by avoiding + overwriting existing data. Note that there is a race condition between + the check and the write so if another process creates data at the same + target during that period it will be overwritten. + + .. note:: + + A :meth:`Session.commit` may be + automatically issued as part of the components registration. + + .. important:: + + If this location manages data then the *components* data is first + transferred to the target prescribed by the structure plugin, using + the configured accessor. If any component fails to transfer then + :exc:`ftrack_api.exception.LocationError` is raised and none of the + components are registered with the database. In this case it is left + up to the caller to decide and act on manually cleaning up any + transferred data using the 'transferred' detail in the raised error. + + Likewise, after transfer, all components are registered with the + database in a batch call. If any component causes an error then all + components will remain unregistered and + :exc:`ftrack_api.exception.LocationError` will be raised detailing + issues and any transferred data under the 'transferred' detail key. + + ''' + if ( + isinstance(sources, basestring) + or not isinstance(sources, collections.Sequence) + ): + sources = [sources] + + sources_count = len(sources) + if sources_count not in (1, len(components)): + raise ValueError( + 'sources must be either a single source or a sequence of ' + 'sources with indexes corresponding to passed components.' + ) + + if not self.structure: + raise ftrack_api.exception.LocationError( + 'No structure defined for location {location}.', + details=dict(location=self) + ) + + if not components: + # Optimisation: Return early when no components to process, such as + # when called recursively on an empty sequence component. + return + + indent = ' ' * (_depth + 1) + + # Check that components not already added to location. + existing_components = [] + try: + self.get_resource_identifiers(components) + + except ftrack_api.exception.ComponentNotInLocationError as error: + missing_component_ids = [ + missing_component['id'] + for missing_component in error.details['components'] + ] + for component in components: + if component['id'] not in missing_component_ids: + existing_components.append(component) + + else: + existing_components.extend(components) + + if existing_components: + # Some of the components already present in location. + raise ftrack_api.exception.ComponentInLocationError( + existing_components, self + ) + + # Attempt to transfer each component's data to this location. + transferred = [] + + for index, component in enumerate(components): + try: + # Determine appropriate source. + if sources_count == 1: + source = sources[0] + else: + source = sources[index] + + # Add members first for container components. + is_container = 'members' in component.keys() + if is_container and recursive: + self.add_components( + component['members'], source, recursive=recursive, + _depth=(_depth + 1) + ) + + # Add component to this location. + context = self._get_context(component, source) + resource_identifier = self.structure.get_resource_identifier( + component, context + ) + + # Manage data transfer. + self._add_data(component, resource_identifier, source) + + except Exception as error: + raise ftrack_api.exception.LocationError( + 'Failed to transfer component {component} data to location ' + '{location} due to error:\n{indent}{error}\n{indent}' + 'Transferred component data that may require cleanup: ' + '{transferred}', + details=dict( + indent=indent, + component=component, + location=self, + error=error, + transferred=transferred + ) + ) + + else: + transferred.append((component, resource_identifier)) + + # Register all successfully transferred components. + components_to_register = [] + component_resource_identifiers = [] + + try: + for component, resource_identifier in transferred: + if self.resource_identifier_transformer: + # Optionally encode resource identifier before storing. + resource_identifier = ( + self.resource_identifier_transformer.encode( + resource_identifier, + context={'component': component} + ) + ) + + components_to_register.append(component) + component_resource_identifiers.append(resource_identifier) + + # Store component in location information. + self._register_components_in_location( + components, component_resource_identifiers + ) + + except Exception as error: + raise ftrack_api.exception.LocationError( + 'Failed to register components with location {location} due to ' + 'error:\n{indent}{error}\n{indent}Transferred component data ' + 'that may require cleanup: {transferred}', + details=dict( + indent=indent, + location=self, + error=error, + transferred=transferred + ) + ) + + # Publish events. + for component in components_to_register: + + component_id = ftrack_api.inspection.primary_key( + component + ).values()[0] + location_id = ftrack_api.inspection.primary_key(self).values()[0] + + self.session.event_hub.publish( + ftrack_api.event.base.Event( + topic=ftrack_api.symbol.COMPONENT_ADDED_TO_LOCATION_TOPIC, + data=dict( + component_id=component_id, + location_id=location_id + ), + ), + on_error='ignore' + ) + + def _get_context(self, component, source): + '''Return context for *component* and *source*.''' + context = {} + if source: + try: + source_resource_identifier = source.get_resource_identifier( + component + ) + except ftrack_api.exception.ComponentNotInLocationError: + pass + else: + context.update(dict( + source_resource_identifier=source_resource_identifier + )) + + return context + + def _add_data(self, component, resource_identifier, source): + '''Manage transfer of *component* data from *source*. + + *resource_identifier* specifies the identifier to use with this + locations accessor. + + ''' + self.logger.debug(L( + 'Adding data for component {0!r} from source {1!r} to location ' + '{2!r} using resource identifier {3!r}.', + component, resource_identifier, source, self + )) + + # Read data from source and write to this location. + if not source.accessor: + raise ftrack_api.exception.LocationError( + 'No accessor defined for source location {location}.', + details=dict(location=source) + ) + + if not self.accessor: + raise ftrack_api.exception.LocationError( + 'No accessor defined for target location {location}.', + details=dict(location=self) + ) + + is_container = 'members' in component.keys() + if is_container: + # TODO: Improve this check. Possibly introduce an inspection + # such as ftrack_api.inspection.is_sequence_component. + if component.entity_type != 'SequenceComponent': + self.accessor.make_container(resource_identifier) + + else: + # Try to make container of component. + try: + container = self.accessor.get_container( + resource_identifier + ) + + except ftrack_api.exception.AccessorParentResourceNotFoundError: + # Container could not be retrieved from + # resource_identifier. Assume that there is no need to + # make the container. + pass + + else: + # No need for existence check as make_container does not + # recreate existing containers. + self.accessor.make_container(container) + + if self.accessor.exists(resource_identifier): + # Note: There is a race condition here in that the + # data may be added externally between the check for + # existence and the actual write which would still + # result in potential data loss. However, there is no + # good cross platform, cross accessor solution for this + # at present. + raise ftrack_api.exception.LocationError( + 'Cannot add component as data already exists and ' + 'overwriting could result in data loss. Computed ' + 'target resource identifier was: {0}' + .format(resource_identifier) + ) + + # Read and write data. + source_data = source.accessor.open( + source.get_resource_identifier(component), 'rb' + ) + target_data = self.accessor.open(resource_identifier, 'wb') + + # Read/write data in chunks to avoid reading all into memory at the + # same time. + chunked_read = functools.partial( + source_data.read, ftrack_api.symbol.CHUNK_SIZE + ) + for chunk in iter(chunked_read, ''): + target_data.write(chunk) + + target_data.close() + source_data.close() + + def _register_component_in_location(self, component, resource_identifier): + '''Register *component* in location against *resource_identifier*.''' + return self._register_components_in_location( + [component], [resource_identifier] + ) + + def _register_components_in_location( + self, components, resource_identifiers + ): + '''Register *components* in location against *resource_identifiers*. + + Indices of *components* and *resource_identifiers* should align. + + ''' + for component, resource_identifier in zip( + components, resource_identifiers + ): + self.session.create( + 'ComponentLocation', data=dict( + component=component, + location=self, + resource_identifier=resource_identifier + ) + ) + + self.session.commit() + + def remove_component(self, component, recursive=True): + '''Remove *component* from location. + + .. note:: + + A :meth:`Session.commit` may be + automatically issued as part of the component deregistration. + + ''' + return self.remove_components([component], recursive=recursive) + + def remove_components(self, components, recursive=True): + '''Remove *components* from location. + + .. note:: + + A :meth:`Session.commit` may be + automatically issued as part of the components deregistration. + + ''' + for component in components: + # Check component is in this location + self.get_resource_identifier(component) + + # Remove members first for container components. + is_container = 'members' in component.keys() + if is_container and recursive: + self.remove_components( + component['members'], recursive=recursive + ) + + # Remove data. + self._remove_data(component) + + # Remove metadata. + self._deregister_component_in_location(component) + + # Emit event. + component_id = ftrack_api.inspection.primary_key( + component + ).values()[0] + location_id = ftrack_api.inspection.primary_key(self).values()[0] + self.session.event_hub.publish( + ftrack_api.event.base.Event( + topic=ftrack_api.symbol.COMPONENT_REMOVED_FROM_LOCATION_TOPIC, + data=dict( + component_id=component_id, + location_id=location_id + ) + ), + on_error='ignore' + ) + + def _remove_data(self, component): + '''Remove data associated with *component*.''' + if not self.accessor: + raise ftrack_api.exception.LocationError( + 'No accessor defined for location {location}.', + details=dict(location=self) + ) + + try: + self.accessor.remove( + self.get_resource_identifier(component) + ) + except ftrack_api.exception.AccessorResourceNotFoundError: + # If accessor does not support detecting sequence paths then an + # AccessorResourceNotFoundError is raised. For now, if the + # component type is 'SequenceComponent' assume success. + if not component.entity_type == 'SequenceComponent': + raise + + def _deregister_component_in_location(self, component): + '''Deregister *component* from location.''' + component_id = ftrack_api.inspection.primary_key(component).values()[0] + location_id = ftrack_api.inspection.primary_key(self).values()[0] + + # TODO: Use session.get for optimisation. + component_location = self.session.query( + 'ComponentLocation where component_id is {0} and location_id is ' + '{1}'.format(component_id, location_id) + )[0] + + self.session.delete(component_location) + + # TODO: Should auto-commit here be optional? + self.session.commit() + + def get_component_availability(self, component): + '''Return availability of *component* in this location as a float.''' + return self.session.get_component_availability( + component, locations=[self] + )[self['id']] + + def get_component_availabilities(self, components): + '''Return availabilities of *components* in this location. + + Return list of float values corresponding to each component. + + ''' + return [ + availability[self['id']] for availability in + self.session.get_component_availabilities( + components, locations=[self] + ) + ] + + def get_resource_identifier(self, component): + '''Return resource identifier for *component*. + + Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if the + component is not present in this location. + + ''' + return self.get_resource_identifiers([component])[0] + + def get_resource_identifiers(self, components): + '''Return resource identifiers for *components*. + + Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any + of the components are not present in this location. + + ''' + resource_identifiers = self._get_resource_identifiers(components) + + # Optionally decode resource identifier. + if self.resource_identifier_transformer: + for index, resource_identifier in enumerate(resource_identifiers): + resource_identifiers[index] = ( + self.resource_identifier_transformer.decode( + resource_identifier, + context={'component': components[index]} + ) + ) + + return resource_identifiers + + def _get_resource_identifiers(self, components): + '''Return resource identifiers for *components*. + + Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any + of the components are not present in this location. + + ''' + component_ids_mapping = collections.OrderedDict() + for component in components: + component_id = ftrack_api.inspection.primary_key( + component + ).values()[0] + component_ids_mapping[component_id] = component + + component_locations = self.session.query( + 'select component_id, resource_identifier from ComponentLocation ' + 'where location_id is {0} and component_id in ({1})' + .format( + ftrack_api.inspection.primary_key(self).values()[0], + ', '.join(component_ids_mapping.keys()) + ) + ) + + resource_identifiers_map = {} + for component_location in component_locations: + resource_identifiers_map[component_location['component_id']] = ( + component_location['resource_identifier'] + ) + + resource_identifiers = [] + missing = [] + for component_id, component in component_ids_mapping.items(): + if component_id not in resource_identifiers_map: + missing.append(component) + else: + resource_identifiers.append( + resource_identifiers_map[component_id] + ) + + if missing: + raise ftrack_api.exception.ComponentNotInLocationError( + missing, self + ) + + return resource_identifiers + + def get_filesystem_path(self, component): + '''Return filesystem path for *component*.''' + return self.get_filesystem_paths([component])[0] + + def get_filesystem_paths(self, components): + '''Return filesystem paths for *components*.''' + resource_identifiers = self.get_resource_identifiers(components) + + filesystem_paths = [] + for resource_identifier in resource_identifiers: + filesystem_paths.append( + self.accessor.get_filesystem_path(resource_identifier) + ) + + return filesystem_paths + + def get_url(self, component): + '''Return url for *component*. + + Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if + URL could not be determined from *component* or + :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if + retrieving URL is not supported by the location's accessor. + ''' + resource_identifier = self.get_resource_identifier(component) + + return self.accessor.get_url(resource_identifier) + + +class MemoryLocationMixin(object): + '''Represent storage for components. + + Unlike a standard location, only store metadata for components in this + location in memory rather than persisting to the database. + + ''' + + @property + def _cache(self): + '''Return cache.''' + try: + cache = self.__cache + except AttributeError: + cache = self.__cache = {} + + return cache + + def _register_component_in_location(self, component, resource_identifier): + '''Register *component* in location with *resource_identifier*.''' + component_id = ftrack_api.inspection.primary_key(component).values()[0] + self._cache[component_id] = resource_identifier + + def _register_components_in_location( + self, components, resource_identifiers + ): + '''Register *components* in location against *resource_identifiers*. + + Indices of *components* and *resource_identifiers* should align. + + ''' + for component, resource_identifier in zip( + components, resource_identifiers + ): + self._register_component_in_location(component, resource_identifier) + + def _deregister_component_in_location(self, component): + '''Deregister *component* in location.''' + component_id = ftrack_api.inspection.primary_key(component).values()[0] + self._cache.pop(component_id) + + def _get_resource_identifiers(self, components): + '''Return resource identifiers for *components*. + + Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any + of the referenced components are not present in this location. + + ''' + resource_identifiers = [] + missing = [] + for component in components: + component_id = ftrack_api.inspection.primary_key( + component + ).values()[0] + resource_identifier = self._cache.get(component_id) + if resource_identifier is None: + missing.append(component) + else: + resource_identifiers.append(resource_identifier) + + if missing: + raise ftrack_api.exception.ComponentNotInLocationError( + missing, self + ) + + return resource_identifiers + + +class UnmanagedLocationMixin(object): + '''Location that does not manage data.''' + + def _add_data(self, component, resource_identifier, source): + '''Manage transfer of *component* data from *source*. + + *resource_identifier* specifies the identifier to use with this + locations accessor. + + Overridden to have no effect. + + ''' + return + + def _remove_data(self, component): + '''Remove data associated with *component*. + + Overridden to have no effect. + + ''' + return + + +class OriginLocationMixin(MemoryLocationMixin, UnmanagedLocationMixin): + '''Special origin location that expects sources as filepaths.''' + + def _get_context(self, component, source): + '''Return context for *component* and *source*.''' + context = {} + if source: + context.update(dict( + source_resource_identifier=source + )) + + return context + + +class ServerLocationMixin(object): + '''Location representing ftrack server. + + Adds convenience methods to location, specific to ftrack server. + ''' + def get_thumbnail_url(self, component, size=None): + '''Return thumbnail url for *component*. + + Optionally, specify *size* to constrain the downscaled image to size + x size pixels. + + Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if + URL could not be determined from *resource_identifier* or + :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if + retrieving URL is not supported by the location's accessor. + ''' + resource_identifier = self.get_resource_identifier(component) + return self.accessor.get_thumbnail_url(resource_identifier, size) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py new file mode 100644 index 0000000000..f5a9403728 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py @@ -0,0 +1,105 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import warnings + +import ftrack_api.entity.base + + +class Note(ftrack_api.entity.base.Entity): + '''Represent a note.''' + + def create_reply( + self, content, author + ): + '''Create a reply with *content* and *author*. + + .. note:: + + This is a helper method. To create replies manually use the + standard :meth:`Session.create` method. + + ''' + reply = self.session.create( + 'Note', { + 'author': author, + 'content': content + } + ) + + self['replies'].append(reply) + + return reply + + +class CreateNoteMixin(object): + '''Mixin to add create_note method on entity class.''' + + def create_note( + self, content, author, recipients=None, category=None, labels=None + ): + '''Create note with *content*, *author*. + + NoteLabels can be set by including *labels*. + + Note category can be set by including *category*. + + *recipients* can be specified as a list of user or group instances. + + ''' + note_label_support = 'NoteLabel' in self.session.types + + if not labels: + labels = [] + + if labels and not note_label_support: + raise ValueError( + 'NoteLabel is not supported by the current server version.' + ) + + if category and labels: + raise ValueError( + 'Both category and labels cannot be set at the same time.' + ) + + if not recipients: + recipients = [] + + data = { + 'content': content, + 'author': author + } + + if category: + if note_label_support: + labels = [category] + warnings.warn( + 'category argument will be removed in an upcoming version, ' + 'please use labels instead.', + PendingDeprecationWarning + ) + else: + data['category_id'] = category['id'] + + note = self.session.create('Note', data) + + self['notes'].append(note) + + for resource in recipients: + recipient = self.session.create('Recipient', { + 'note_id': note['id'], + 'resource_id': resource['id'] + }) + + note['recipients'].append(recipient) + + for label in labels: + self.session.create( + 'NoteLabelLink', + { + 'label_id': label['id'], + 'note_id': note['id'] + } + ) + + return note diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py new file mode 100644 index 0000000000..ec6db7c019 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py @@ -0,0 +1,94 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api.entity.base + + +class ProjectSchema(ftrack_api.entity.base.Entity): + '''Class representing ProjectSchema.''' + + def get_statuses(self, schema, type_id=None): + '''Return statuses for *schema* and optional *type_id*. + + *type_id* is the id of the Type for a TypedContext and can be used to + get statuses where the workflow has been overridden. + + ''' + # Task has overrides and need to be handled separately. + if schema == 'Task': + if type_id is not None: + overrides = self['_overrides'] + for override in overrides: + if override['type_id'] == type_id: + return override['workflow_schema']['statuses'][:] + + return self['_task_workflow']['statuses'][:] + + elif schema == 'AssetVersion': + return self['_version_workflow']['statuses'][:] + + else: + try: + EntityTypeClass = self.session.types[schema] + except KeyError: + raise ValueError('Schema {0} does not exist.'.format(schema)) + + object_type_id_attribute = EntityTypeClass.attributes.get( + 'object_type_id' + ) + + try: + object_type_id = object_type_id_attribute.default_value + except AttributeError: + raise ValueError( + 'Schema {0} does not have statuses.'.format(schema) + ) + + for _schema in self['_schemas']: + if _schema['type_id'] == object_type_id: + result = self.session.query( + 'select task_status from SchemaStatus ' + 'where schema_id is {0}'.format(_schema['id']) + ) + return [ + schema_type['task_status'] for schema_type in result + ] + + raise ValueError( + 'No valid statuses were found for schema {0}.'.format(schema) + ) + + def get_types(self, schema): + '''Return types for *schema*.''' + # Task need to be handled separately. + if schema == 'Task': + return self['_task_type_schema']['types'][:] + + else: + try: + EntityTypeClass = self.session.types[schema] + except KeyError: + raise ValueError('Schema {0} does not exist.'.format(schema)) + + object_type_id_attribute = EntityTypeClass.attributes.get( + 'object_type_id' + ) + + try: + object_type_id = object_type_id_attribute.default_value + except AttributeError: + raise ValueError( + 'Schema {0} does not have types.'.format(schema) + ) + + for _schema in self['_schemas']: + if _schema['type_id'] == object_type_id: + result = self.session.query( + 'select task_type from SchemaType ' + 'where schema_id is {0}'.format(_schema['id']) + ) + return [schema_type['task_type'] for schema_type in result] + + raise ValueError( + 'No valid types were found for schema {0}.'.format(schema) + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py new file mode 100644 index 0000000000..511ad4ba99 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py @@ -0,0 +1,123 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import arrow + +import ftrack_api.entity.base +import ftrack_api.exception + + +class User(ftrack_api.entity.base.Entity): + '''Represent a user.''' + + def start_timer(self, context=None, comment='', name=None, force=False): + '''Start a timer for *context* and return it. + + *force* can be used to automatically stop an existing timer and create a + timelog for it. If you need to get access to the created timelog, use + :func:`stop_timer` instead. + + *comment* and *name* are optional but will be set on the timer. + + .. note:: + + This method will automatically commit the changes and if *force* is + False then it will fail with a + :class:`ftrack_api.exception.NotUniqueError` exception if a + timer is already running. + + ''' + if force: + try: + self.stop_timer() + except ftrack_api.exception.NoResultFoundError: + self.logger.debug('Failed to stop existing timer.') + + timer = self.session.create('Timer', { + 'user': self, + 'context': context, + 'name': name, + 'comment': comment + }) + + # Commit the new timer and try to catch any error that indicate another + # timelog already exists and inform the user about it. + try: + self.session.commit() + except ftrack_api.exception.ServerError as error: + if 'IntegrityError' in str(error): + raise ftrack_api.exception.NotUniqueError( + ('Failed to start a timelog for user with id: {0}, it is ' + 'likely that a timer is already running. Either use ' + 'force=True or stop the timer first.').format(self['id']) + ) + else: + # Reraise the error as it might be something unrelated. + raise + + return timer + + def stop_timer(self): + '''Stop the current timer and return a timelog created from it. + + If a timer is not running, a + :exc:`ftrack_api.exception.NoResultFoundError` exception will be + raised. + + .. note:: + + This method will automatically commit the changes. + + ''' + timer = self.session.query( + 'Timer where user_id = "{0}"'.format(self['id']) + ).one() + + # If the server is running in the same timezone as the local + # timezone, we remove the TZ offset to get the correct duration. + is_timezone_support_enabled = self.session.server_information.get( + 'is_timezone_support_enabled', None + ) + if is_timezone_support_enabled is None: + self.logger.warning( + 'Could not identify if server has timezone support enabled. ' + 'Will assume server is running in UTC.' + ) + is_timezone_support_enabled = True + + if is_timezone_support_enabled: + now = arrow.now() + else: + now = arrow.now().replace(tzinfo='utc') + + delta = now - timer['start'] + duration = delta.days * 24 * 60 * 60 + delta.seconds + + timelog = self.session.create('Timelog', { + 'user_id': timer['user_id'], + 'context_id': timer['context_id'], + 'comment': timer['comment'], + 'start': timer['start'], + 'duration': duration, + 'name': timer['name'] + }) + + self.session.delete(timer) + self.session.commit() + + return timelog + + def send_invite(self): + '''Send a invation email to the user''' + + self.session.send_user_invite( + self + ) + def reset_api_key(self): + '''Reset the users api key.''' + + response = self.session.reset_remote( + 'api_key', entity=self + ) + + return response['api_key'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py new file mode 100644 index 0000000000..1aab07ed77 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py new file mode 100644 index 0000000000..b5fd57da78 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py @@ -0,0 +1,85 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import uuid +import collections + + +class Event(collections.MutableMapping): + '''Represent a single event.''' + + def __init__(self, topic, id=None, data=None, sent=None, + source=None, target='', in_reply_to_event=None): + '''Initialise event. + + *topic* is the required topic for the event. It can use a dotted + notation to demarcate groupings. For example, 'ftrack.update'. + + *id* is the unique id for this event instance. It is primarily used when + replying to an event. If not supplied a default uuid based value will + be used. + + *data* refers to event specific data. It should be a mapping structure + and defaults to an empty dictionary if not supplied. + + *sent* is the timestamp the event is sent. It will be set automatically + as send time unless specified here. + + *source* is information about where the event originated. It should be + a mapping and include at least a unique id value under an 'id' key. If + not specified, senders usually populate the value automatically at + publish time. + + *target* can be an expression that targets this event. For example, + a reply event would target the event to the sender of the source event. + The expression will be tested against subscriber information only. + + *in_reply_to_event* is used when replying to an event and should contain + the unique id of the event being replied to. + + ''' + super(Event, self).__init__() + self._data = dict( + id=id or uuid.uuid4().hex, + data=data or {}, + topic=topic, + sent=sent, + source=source or {}, + target=target, + in_reply_to_event=in_reply_to_event + ) + self._stopped = False + + def stop(self): + '''Stop further processing of this event.''' + self._stopped = True + + def is_stopped(self): + '''Return whether event has been stopped.''' + return self._stopped + + def __str__(self): + '''Return string representation.''' + return '<{0} {1}>'.format( + self.__class__.__name__, str(self._data) + ) + + def __getitem__(self, key): + '''Return value for *key*.''' + return self._data[key] + + def __setitem__(self, key, value): + '''Set *value* for *key*.''' + self._data[key] = value + + def __delitem__(self, key): + '''Remove *key*.''' + del self._data[key] + + def __iter__(self): + '''Iterate over all keys.''' + return iter(self._data) + + def __len__(self): + '''Return count of keys.''' + return len(self._data) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py new file mode 100644 index 0000000000..0535e4fd5f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py @@ -0,0 +1,282 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from operator import eq, ne, ge, le, gt, lt + +from pyparsing import (Group, Word, CaselessKeyword, Forward, + FollowedBy, Suppress, oneOf, OneOrMore, Optional, + alphanums, quotedString, removeQuotes) + +import ftrack_api.exception + +# Do not enable packrat since it is not thread-safe and will result in parsing +# exceptions in a multi threaded environment. +# ParserElement.enablePackrat() + + +class Parser(object): + '''Parse string based expression into :class:`Expression` instance.''' + + def __init__(self): + '''Initialise parser.''' + self._operators = { + '=': eq, + '!=': ne, + '>=': ge, + '<=': le, + '>': gt, + '<': lt + } + self._parser = self._construct_parser() + super(Parser, self).__init__() + + def _construct_parser(self): + '''Construct and return parser.''' + field = Word(alphanums + '_.') + operator = oneOf(self._operators.keys()) + value = Word(alphanums + '-_,./*@+') + quoted_value = quotedString('quoted_value').setParseAction(removeQuotes) + + condition = Group( + field + operator + (quoted_value | value) + )('condition') + + not_ = Optional(Suppress(CaselessKeyword('not')))('not') + and_ = Suppress(CaselessKeyword('and'))('and') + or_ = Suppress(CaselessKeyword('or'))('or') + + expression = Forward() + parenthesis = Suppress('(') + expression + Suppress(')') + previous = condition | parenthesis + + for conjunction in (not_, and_, or_): + current = Forward() + + if conjunction in (and_, or_): + conjunction_expression = ( + FollowedBy(previous + conjunction + previous) + + Group( + previous + OneOrMore(conjunction + previous) + )(conjunction.resultsName) + ) + + elif conjunction in (not_, ): + conjunction_expression = ( + FollowedBy(conjunction.expr + current) + + Group(conjunction + current)(conjunction.resultsName) + ) + + else: # pragma: no cover + raise ValueError('Unrecognised conjunction.') + + current <<= (conjunction_expression | previous) + previous = current + + expression <<= previous + return expression('expression') + + def parse(self, expression): + '''Parse string *expression* into :class:`Expression`. + + Raise :exc:`ftrack_api.exception.ParseError` if *expression* could + not be parsed. + + ''' + result = None + expression = expression.strip() + if expression: + try: + result = self._parser.parseString( + expression, parseAll=True + ) + except Exception as error: + raise ftrack_api.exception.ParseError( + 'Failed to parse: {0}. {1}'.format(expression, error) + ) + + return self._process(result) + + def _process(self, result): + '''Process *result* using appropriate method. + + Method called is determined by the name of the result. + + ''' + method_name = '_process_{0}'.format(result.getName()) + method = getattr(self, method_name) + return method(result) + + def _process_expression(self, result): + '''Process *result* as expression.''' + return self._process(result[0]) + + def _process_not(self, result): + '''Process *result* as NOT operation.''' + return Not(self._process(result[0])) + + def _process_and(self, result): + '''Process *result* as AND operation.''' + return All([self._process(entry) for entry in result]) + + def _process_or(self, result): + '''Process *result* as OR operation.''' + return Any([self._process(entry) for entry in result]) + + def _process_condition(self, result): + '''Process *result* as condition.''' + key, operator, value = result + return Condition(key, self._operators[operator], value) + + def _process_quoted_value(self, result): + '''Process *result* as quoted value.''' + return result + + +class Expression(object): + '''Represent a structured expression to test candidates against.''' + + def __str__(self): + '''Return string representation.''' + return '<{0}>'.format(self.__class__.__name__) + + def match(self, candidate): + '''Return whether *candidate* satisfies this expression.''' + return True + + +class All(Expression): + '''Match candidate that matches all of the specified expressions. + + .. note:: + + If no expressions are supplied then will always match. + + ''' + + def __init__(self, expressions=None): + '''Initialise with list of *expressions* to match against.''' + self._expressions = expressions or [] + super(All, self).__init__() + + def __str__(self): + '''Return string representation.''' + return '<{0} [{1}]>'.format( + self.__class__.__name__, + ' '.join(map(str, self._expressions)) + ) + + def match(self, candidate): + '''Return whether *candidate* satisfies this expression.''' + return all([ + expression.match(candidate) for expression in self._expressions + ]) + + +class Any(Expression): + '''Match candidate that matches any of the specified expressions. + + .. note:: + + If no expressions are supplied then will never match. + + ''' + + def __init__(self, expressions=None): + '''Initialise with list of *expressions* to match against.''' + self._expressions = expressions or [] + super(Any, self).__init__() + + def __str__(self): + '''Return string representation.''' + return '<{0} [{1}]>'.format( + self.__class__.__name__, + ' '.join(map(str, self._expressions)) + ) + + def match(self, candidate): + '''Return whether *candidate* satisfies this expression.''' + return any([ + expression.match(candidate) for expression in self._expressions + ]) + + +class Not(Expression): + '''Negate expression.''' + + def __init__(self, expression): + '''Initialise with *expression* to negate.''' + self._expression = expression + super(Not, self).__init__() + + def __str__(self): + '''Return string representation.''' + return '<{0} {1}>'.format( + self.__class__.__name__, + self._expression + ) + + def match(self, candidate): + '''Return whether *candidate* satisfies this expression.''' + return not self._expression.match(candidate) + + +class Condition(Expression): + '''Represent condition.''' + + def __init__(self, key, operator, value): + '''Initialise condition. + + *key* is the key to check on the data when matching. It can be a nested + key represented by dots. For example, 'data.eventType' would attempt to + match candidate['data']['eventType']. If the candidate is missing any + of the requested keys then the match fails immediately. + + *operator* is the operator function to use to perform the match between + the retrieved candidate value and the conditional *value*. + + If *value* is a string, it can use a wildcard '*' at the end to denote + that any values matching the substring portion are valid when matching + equality only. + + ''' + self._key = key + self._operator = operator + self._value = value + self._wildcard = '*' + self._operatorMapping = { + eq: '=', + ne: '!=', + ge: '>=', + le: '<=', + gt: '>', + lt: '<' + } + + def __str__(self): + '''Return string representation.''' + return '<{0} {1}{2}{3}>'.format( + self.__class__.__name__, + self._key, + self._operatorMapping.get(self._operator, self._operator), + self._value + ) + + def match(self, candidate): + '''Return whether *candidate* satisfies this expression.''' + key_parts = self._key.split('.') + + try: + value = candidate + for keyPart in key_parts: + value = value[keyPart] + except (KeyError, TypeError): + return False + + if ( + self._operator is eq + and isinstance(self._value, basestring) + and self._value[-1] == self._wildcard + ): + return self._value[:-1] in value + else: + return self._operator(value, self._value) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py new file mode 100644 index 0000000000..9f4ba80c6e --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py @@ -0,0 +1,1091 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2013 ftrack + +from __future__ import absolute_import + +import collections +import urlparse +import threading +import Queue as queue +import logging +import time +import uuid +import operator +import functools +import json +import socket +import warnings + +import requests +import requests.exceptions +import websocket + +import ftrack_api.exception +import ftrack_api.event.base +import ftrack_api.event.subscriber +import ftrack_api.event.expression +from ftrack_api.logging import LazyLogMessage as L + + +SocketIoSession = collections.namedtuple('SocketIoSession', [ + 'id', + 'heartbeatTimeout', + 'supportedTransports', +]) + + +ServerDetails = collections.namedtuple('ServerDetails', [ + 'scheme', + 'hostname', + 'port', +]) + + + + +class EventHub(object): + '''Manage routing of events.''' + + _future_signature_warning = ( + 'When constructing your Session object you did not explicitly define ' + 'auto_connect_event_hub as True even though you appear to be publishing ' + 'and / or subscribing to asynchronous events. In version version 2.0 of ' + 'the ftrack-python-api the default behavior will change from True ' + 'to False. Please make sure to update your tools. You can read more at ' + 'http://ftrack-python-api.rtd.ftrack.com/en/stable/release/migration.html' + ) + + def __init__(self, server_url, api_user, api_key): + '''Initialise hub, connecting to ftrack *server_url*. + + *api_user* is the user to authenticate as and *api_key* is the API key + to authenticate with. + + ''' + super(EventHub, self).__init__() + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + self.id = uuid.uuid4().hex + self._connection = None + + self._unique_packet_id = 0 + self._packet_callbacks = {} + self._lock = threading.RLock() + + self._wait_timeout = 4 + + self._subscribers = [] + self._reply_callbacks = {} + self._intentional_disconnect = False + + self._event_queue = queue.Queue() + self._event_namespace = 'ftrack.event' + self._expression_parser = ftrack_api.event.expression.Parser() + + # Default values for auto reconnection timeout on unintentional + # disconnection. Equates to 5 minutes. + self._auto_reconnect_attempts = 30 + self._auto_reconnect_delay = 10 + + self._deprecation_warning_auto_connect = False + + # Mapping of Socket.IO codes to meaning. + self._code_name_mapping = { + '0': 'disconnect', + '1': 'connect', + '2': 'heartbeat', + '3': 'message', + '4': 'json', + '5': 'event', + '6': 'acknowledge', + '7': 'error' + } + self._code_name_mapping.update( + dict((name, code) for code, name in self._code_name_mapping.items()) + ) + + self._server_url = server_url + self._api_user = api_user + self._api_key = api_key + + # Parse server URL and store server details. + url_parse_result = urlparse.urlparse(self._server_url) + if not url_parse_result.scheme: + raise ValueError('Could not determine scheme from server url.') + + if not url_parse_result.hostname: + raise ValueError('Could not determine hostname from server url.') + + self.server = ServerDetails( + url_parse_result.scheme, + url_parse_result.hostname, + url_parse_result.port + ) + + def get_server_url(self): + '''Return URL to server.''' + return '{0}://{1}'.format( + self.server.scheme, self.get_network_location() + ) + + def get_network_location(self): + '''Return network location part of url (hostname with optional port).''' + if self.server.port: + return '{0}:{1}'.format(self.server.hostname, self.server.port) + else: + return self.server.hostname + + @property + def secure(self): + '''Return whether secure connection used.''' + return self.server.scheme == 'https' + + def connect(self): + '''Initialise connection to server. + + Raise :exc:`ftrack_api.exception.EventHubConnectionError` if already + connected or connection fails. + + ''' + + self._deprecation_warning_auto_connect = False + + if self.connected: + raise ftrack_api.exception.EventHubConnectionError( + 'Already connected.' + ) + + # Reset flag tracking whether disconnection was intentional. + self._intentional_disconnect = False + + try: + # Connect to socket.io server using websocket transport. + session = self._get_socket_io_session() + + if 'websocket' not in session.supportedTransports: + raise ValueError( + 'Server does not support websocket sessions.' + ) + + scheme = 'wss' if self.secure else 'ws' + url = '{0}://{1}/socket.io/1/websocket/{2}'.format( + scheme, self.get_network_location(), session.id + ) + + # timeout is set to 60 seconds to avoid the issue where the socket + # ends up in a bad state where it is reported as connected but the + # connection has been closed. The issue happens often when connected + # to a secure socket and the computer goes to sleep. + # More information on how the timeout works can be found here: + # https://docs.python.org/2/library/socket.html#socket.socket.setblocking + self._connection = websocket.create_connection(url, timeout=60) + + except Exception as error: + error_message = ( + 'Failed to connect to event server at {server_url} with ' + 'error: "{error}".' + ) + + error_details = { + 'error': unicode(error), + 'server_url': self.get_server_url() + } + + self.logger.debug( + L( + error_message, **error_details + ), + exc_info=1 + ) + raise ftrack_api.exception.EventHubConnectionError( + error_message, + details=error_details + ) + + # Start background processing thread. + self._processor_thread = _ProcessorThread(self) + self._processor_thread.start() + + # Subscribe to reply events if not already. Note: Only adding the + # subscriber locally as the following block will notify server of all + # existing subscribers, which would cause the server to report a + # duplicate subscriber error if EventHub.subscribe was called here. + try: + self._add_subscriber( + 'topic=ftrack.meta.reply', + self._handle_reply, + subscriber=dict( + id=self.id + ) + ) + except ftrack_api.exception.NotUniqueError: + pass + + # Now resubscribe any existing stored subscribers. This can happen when + # reconnecting automatically for example. + for subscriber in self._subscribers[:]: + self._notify_server_about_subscriber(subscriber) + + @property + def connected(self): + '''Return if connected.''' + return self._connection is not None and self._connection.connected + + def disconnect(self, unsubscribe=True): + '''Disconnect from server. + + Raise :exc:`ftrack_api.exception.EventHubConnectionError` if not + currently connected. + + If *unsubscribe* is True then unsubscribe all current subscribers + automatically before disconnecting. + + ''' + if not self.connected: + raise ftrack_api.exception.EventHubConnectionError( + 'Not currently connected.' + ) + + else: + # Set flag to indicate disconnection was intentional. + self._intentional_disconnect = True + + # Set blocking to true on socket to make sure unsubscribe events + # are emitted before closing the connection. + self._connection.sock.setblocking(1) + + # Unsubscribe all subscribers. + if unsubscribe: + for subscriber in self._subscribers[:]: + self.unsubscribe(subscriber.metadata['id']) + + # Now disconnect. + self._connection.close() + self._connection = None + + # Shutdown background processing thread. + self._processor_thread.cancel() + + # Join to it if it is not current thread to help ensure a clean + # shutdown. + if threading.current_thread() != self._processor_thread: + self._processor_thread.join(self._wait_timeout) + + def reconnect(self, attempts=10, delay=5): + '''Reconnect to server. + + Make *attempts* number of attempts with *delay* in seconds between each + attempt. + + .. note:: + + All current subscribers will be automatically resubscribed after + successful reconnection. + + Raise :exc:`ftrack_api.exception.EventHubConnectionError` if fail to + reconnect. + + ''' + try: + self.disconnect(unsubscribe=False) + except ftrack_api.exception.EventHubConnectionError: + pass + + for attempt in range(attempts): + self.logger.debug(L( + 'Reconnect attempt {0} of {1}', attempt, attempts + )) + + # Silence logging temporarily to avoid lots of failed connection + # related information. + try: + logging.disable(logging.CRITICAL) + + try: + self.connect() + except ftrack_api.exception.EventHubConnectionError: + time.sleep(delay) + else: + break + + finally: + logging.disable(logging.NOTSET) + + if not self.connected: + raise ftrack_api.exception.EventHubConnectionError( + 'Failed to reconnect to event server at {0} after {1} attempts.' + .format(self.get_server_url(), attempts) + ) + + def wait(self, duration=None): + '''Wait for events and handle as they arrive. + + If *duration* is specified, then only process events until duration is + reached. *duration* is in seconds though float values can be used for + smaller values. + + ''' + started = time.time() + + while True: + try: + event = self._event_queue.get(timeout=0.1) + except queue.Empty: + pass + else: + self._handle(event) + + # Additional special processing of events. + if event['topic'] == 'ftrack.meta.disconnected': + break + + if duration is not None: + if (time.time() - started) > duration: + break + + def get_subscriber_by_identifier(self, identifier): + '''Return subscriber with matching *identifier*. + + Return None if no subscriber with *identifier* found. + + ''' + for subscriber in self._subscribers[:]: + if subscriber.metadata.get('id') == identifier: + return subscriber + + return None + + def subscribe(self, subscription, callback, subscriber=None, priority=100): + '''Register *callback* for *subscription*. + + A *subscription* is a string that can specify in detail which events the + callback should receive. The filtering is applied against each event + object. Nested references are supported using '.' separators. + For example, 'topic=foo and data.eventType=Shot' would match the + following event:: + + + + The *callback* should accept an instance of + :class:`ftrack_api.event.base.Event` as its sole argument. + + Callbacks are called in order of *priority*. The lower the priority + number the sooner it will be called, with 0 being the first. The + default priority is 100. Note that priority only applies against other + callbacks registered with this hub and not as a global priority. + + An earlier callback can prevent processing of subsequent callbacks by + calling :meth:`Event.stop` on the passed `event` before + returning. + + .. warning:: + + Handlers block processing of other received events. For long + running callbacks it is advisable to delegate the main work to + another process or thread. + + A *callback* can be attached to *subscriber* information that details + the subscriber context. A subscriber context will be generated + automatically if not supplied. + + .. note:: + + The subscription will be stored locally, but until the server + receives notification of the subscription it is possible the + callback will not be called. + + Return subscriber identifier. + + Raise :exc:`ftrack_api.exception.NotUniqueError` if a subscriber with + the same identifier already exists. + + ''' + # Add subscriber locally. + subscriber = self._add_subscriber( + subscription, callback, subscriber, priority + ) + + # Notify server now if possible. + try: + self._notify_server_about_subscriber(subscriber) + except ftrack_api.exception.EventHubConnectionError: + self.logger.debug(L( + 'Failed to notify server about new subscriber {0} ' + 'as server not currently reachable.', subscriber.metadata['id'] + )) + + return subscriber.metadata['id'] + + def _add_subscriber( + self, subscription, callback, subscriber=None, priority=100 + ): + '''Add subscriber locally. + + See :meth:`subscribe` for argument descriptions. + + Return :class:`ftrack_api.event.subscriber.Subscriber` instance. + + Raise :exc:`ftrack_api.exception.NotUniqueError` if a subscriber with + the same identifier already exists. + + ''' + if subscriber is None: + subscriber = {} + + subscriber.setdefault('id', uuid.uuid4().hex) + + # Check subscriber not already subscribed. + existing_subscriber = self.get_subscriber_by_identifier( + subscriber['id'] + ) + + if existing_subscriber is not None: + raise ftrack_api.exception.NotUniqueError( + 'Subscriber with identifier {0} already exists.' + .format(subscriber['id']) + ) + + subscriber = ftrack_api.event.subscriber.Subscriber( + subscription=subscription, + callback=callback, + metadata=subscriber, + priority=priority + ) + + self._subscribers.append(subscriber) + + return subscriber + + def _notify_server_about_subscriber(self, subscriber): + '''Notify server of new *subscriber*.''' + subscribe_event = ftrack_api.event.base.Event( + topic='ftrack.meta.subscribe', + data=dict( + subscriber=subscriber.metadata, + subscription=str(subscriber.subscription) + ) + ) + + self._publish( + subscribe_event, + callback=functools.partial(self._on_subscribed, subscriber) + ) + + def _on_subscribed(self, subscriber, response): + '''Handle acknowledgement of subscription.''' + if response.get('success') is False: + self.logger.warning(L( + 'Server failed to subscribe subscriber {0}: {1}', + subscriber.metadata['id'], response.get('message') + )) + + def unsubscribe(self, subscriber_identifier): + '''Unsubscribe subscriber with *subscriber_identifier*. + + .. note:: + + If the server is not reachable then it won't be notified of the + unsubscription. However, the subscriber will be removed locally + regardless. + + ''' + subscriber = self.get_subscriber_by_identifier(subscriber_identifier) + + if subscriber is None: + raise ftrack_api.exception.NotFoundError( + 'Cannot unsubscribe missing subscriber with identifier {0}' + .format(subscriber_identifier) + ) + + self._subscribers.pop(self._subscribers.index(subscriber)) + + # Notify the server if possible. + unsubscribe_event = ftrack_api.event.base.Event( + topic='ftrack.meta.unsubscribe', + data=dict(subscriber=subscriber.metadata) + ) + + try: + self._publish( + unsubscribe_event, + callback=functools.partial(self._on_unsubscribed, subscriber) + ) + except ftrack_api.exception.EventHubConnectionError: + self.logger.debug(L( + 'Failed to notify server to unsubscribe subscriber {0} as ' + 'server not currently reachable.', subscriber.metadata['id'] + )) + + def _on_unsubscribed(self, subscriber, response): + '''Handle acknowledgement of unsubscribing *subscriber*.''' + if response.get('success') is not True: + self.logger.warning(L( + 'Server failed to unsubscribe subscriber {0}: {1}', + subscriber.metadata['id'], response.get('message') + )) + + def _prepare_event(self, event): + '''Prepare *event* for sending.''' + event['source'].setdefault('id', self.id) + event['source'].setdefault('user', { + 'username': self._api_user + }) + + def _prepare_reply_event(self, event, source_event, source=None): + '''Prepare *event* as a reply to another *source_event*. + + Modify *event*, setting appropriate values to target event correctly as + a reply. + + ''' + event['target'] = 'id={0}'.format(source_event['source']['id']) + event['in_reply_to_event'] = source_event['id'] + if source is not None: + event['source'] = source + + def publish( + self, event, synchronous=False, on_reply=None, on_error='raise' + ): + '''Publish *event*. + + If *synchronous* is specified as True then this method will wait and + return a list of results from any called callbacks. + + .. note:: + + Currently, if synchronous is True then only locally registered + callbacks will be called and no event will be sent to the server. + This may change in future. + + *on_reply* is an optional callable to call with any reply event that is + received in response to the published *event*. + + .. note:: + + Will not be called when *synchronous* is True. + + If *on_error* is set to 'ignore' then errors raised during publish of + event will be caught by this method and ignored. + + ''' + if self._deprecation_warning_auto_connect and not synchronous: + warnings.warn( + self._future_signature_warning, FutureWarning + ) + + try: + return self._publish( + event, synchronous=synchronous, on_reply=on_reply + ) + except Exception: + if on_error == 'ignore': + pass + else: + raise + + def publish_reply(self, source_event, data, source=None): + '''Publish a reply event to *source_event* with supplied *data*. + + If *source* is specified it will be used for the source value of the + sent event. + + ''' + reply_event = ftrack_api.event.base.Event( + 'ftrack.meta.reply', + data=data + ) + self._prepare_reply_event(reply_event, source_event, source=source) + self.publish(reply_event) + + def _publish(self, event, synchronous=False, callback=None, on_reply=None): + '''Publish *event*. + + If *synchronous* is specified as True then this method will wait and + return a list of results from any called callbacks. + + .. note:: + + Currently, if synchronous is True then only locally registered + callbacks will be called and no event will be sent to the server. + This may change in future. + + A *callback* can also be specified. This callback will be called once + the server acknowledges receipt of the sent event. A default callback + that checks for errors from the server will be used if not specified. + + *on_reply* is an optional callable to call with any reply event that is + received in response to the published *event*. Note that there is no + guarantee that a reply will be sent. + + Raise :exc:`ftrack_api.exception.EventHubConnectionError` if not + currently connected. + + ''' + # Prepare event adding any relevant additional information. + self._prepare_event(event) + + if synchronous: + # Bypass emitting event to server and instead call locally + # registered handlers directly, collecting and returning results. + return self._handle(event, synchronous=synchronous) + + if not self.connected: + raise ftrack_api.exception.EventHubConnectionError( + 'Cannot publish event asynchronously as not connected to ' + 'server.' + ) + + # Use standard callback if none specified. + if callback is None: + callback = functools.partial(self._on_published, event) + + # Emit event to central server for asynchronous processing. + try: + # Register on reply callback if specified. + if on_reply is not None: + # TODO: Add cleanup process that runs after a set duration to + # garbage collect old reply callbacks and prevent dictionary + # growing too large. + self._reply_callbacks[event['id']] = on_reply + + try: + self._emit_event_packet( + self._event_namespace, event, callback=callback + ) + except ftrack_api.exception.EventHubConnectionError: + # Connection may have dropped temporarily. Wait a few moments to + # see if background thread reconnects automatically. + time.sleep(15) + + self._emit_event_packet( + self._event_namespace, event, callback=callback + ) + except: + raise + + except Exception: + # Failure to send event should not cause caller to fail. + # TODO: This behaviour is inconsistent with the failing earlier on + # lack of connection and also with the error handling parameter of + # EventHub.publish. Consider refactoring. + self.logger.exception(L('Error sending event {0}.', event)) + + def _on_published(self, event, response): + '''Handle acknowledgement of published event.''' + if response.get('success', False) is False: + self.logger.error(L( + 'Server responded with error while publishing event {0}. ' + 'Error was: {1}', event, response.get('message') + )) + + def _handle(self, event, synchronous=False): + '''Handle *event*. + + If *synchronous* is True, do not send any automatic reply events. + + ''' + # Sort by priority, lower is higher. + # TODO: Use a sorted list to avoid sorting each time in order to improve + # performance. + subscribers = sorted( + self._subscribers, key=operator.attrgetter('priority') + ) + + results = [] + + target = event.get('target', None) + target_expression = None + if target: + try: + target_expression = self._expression_parser.parse(target) + except Exception: + self.logger.exception(L( + 'Cannot handle event as failed to parse event target ' + 'information: {0}', event + )) + return + + for subscriber in subscribers: + # Check if event is targeted to the subscriber. + if ( + target_expression is not None + and not target_expression.match(subscriber.metadata) + ): + continue + + # Check if subscriber interested in the event. + if not subscriber.interested_in(event): + continue + + response = None + + try: + response = subscriber.callback(event) + results.append(response) + except Exception: + self.logger.exception(L( + 'Error calling subscriber {0} for event {1}.', + subscriber, event + )) + + # Automatically publish a non None response as a reply when not in + # synchronous mode. + if not synchronous: + if self._deprecation_warning_auto_connect: + warnings.warn( + self._future_signature_warning, FutureWarning + ) + + if response is not None: + try: + self.publish_reply( + event, data=response, source=subscriber.metadata + ) + + except Exception: + self.logger.exception(L( + 'Error publishing response {0} from subscriber {1} ' + 'for event {2}.', response, subscriber, event + )) + + # Check whether to continue processing topic event. + if event.is_stopped(): + self.logger.debug(L( + 'Subscriber {0} stopped event {1}. Will not process ' + 'subsequent subscriber callbacks for this event.', + subscriber, event + )) + break + + return results + + def _handle_reply(self, event): + '''Handle reply *event*, passing it to any registered callback.''' + callback = self._reply_callbacks.get(event['in_reply_to_event'], None) + if callback is not None: + callback(event) + + def subscription(self, subscription, callback, subscriber=None, + priority=100): + '''Return context manager with *callback* subscribed to *subscription*. + + The subscribed callback will be automatically unsubscribed on exit + of the context manager. + + ''' + return _SubscriptionContext( + self, subscription, callback, subscriber=subscriber, + priority=priority, + ) + + # Socket.IO interface. + # + + def _get_socket_io_session(self): + '''Connect to server and retrieve session information.''' + socket_io_url = ( + '{0}://{1}/socket.io/1/?api_user={2}&api_key={3}' + ).format( + self.server.scheme, + self.get_network_location(), + self._api_user, + self._api_key + ) + try: + response = requests.get( + socket_io_url, + timeout=60 # 60 seconds timeout to recieve errors faster. + ) + except requests.exceptions.Timeout as error: + raise ftrack_api.exception.EventHubConnectionError( + 'Timed out connecting to server: {0}.'.format(error) + ) + except requests.exceptions.SSLError as error: + raise ftrack_api.exception.EventHubConnectionError( + 'Failed to negotiate SSL with server: {0}.'.format(error) + ) + except requests.exceptions.ConnectionError as error: + raise ftrack_api.exception.EventHubConnectionError( + 'Failed to connect to server: {0}.'.format(error) + ) + else: + status = response.status_code + if status != 200: + raise ftrack_api.exception.EventHubConnectionError( + 'Received unexpected status code {0}.'.format(status) + ) + + # Parse result and return session information. + parts = response.text.split(':') + return SocketIoSession( + parts[0], + parts[1], + parts[3].split(',') + ) + + def _add_packet_callback(self, callback): + '''Store callback against a new unique packet ID. + + Return the unique packet ID. + + ''' + with self._lock: + self._unique_packet_id += 1 + unique_identifier = self._unique_packet_id + + self._packet_callbacks[unique_identifier] = callback + + return '{0}+'.format(unique_identifier) + + def _pop_packet_callback(self, packet_identifier): + '''Pop and return callback for *packet_identifier*.''' + return self._packet_callbacks.pop(packet_identifier) + + def _emit_event_packet(self, namespace, event, callback): + '''Send *event* packet under *namespace*.''' + data = self._encode( + dict(name=namespace, args=[event]) + ) + self._send_packet( + self._code_name_mapping['event'], data=data, callback=callback + ) + + def _acknowledge_packet(self, packet_identifier, *args): + '''Send acknowledgement of packet with *packet_identifier*.''' + packet_identifier = packet_identifier.rstrip('+') + data = str(packet_identifier) + if args: + data += '+{1}'.format(self._encode(args)) + + self._send_packet(self._code_name_mapping['acknowledge'], data=data) + + def _send_packet(self, code, data='', callback=None): + '''Send packet via connection.''' + path = '' + packet_identifier = ( + self._add_packet_callback(callback) if callback else '' + ) + packet_parts = (str(code), packet_identifier, path, data) + packet = ':'.join(packet_parts) + + try: + self._connection.send(packet) + self.logger.debug(L(u'Sent packet: {0}', packet)) + except socket.error as error: + raise ftrack_api.exception.EventHubConnectionError( + 'Failed to send packet: {0}'.format(error) + ) + + def _receive_packet(self): + '''Receive and return packet via connection.''' + try: + packet = self._connection.recv() + except Exception as error: + raise ftrack_api.exception.EventHubConnectionError( + 'Error receiving packet: {0}'.format(error) + ) + + try: + parts = packet.split(':', 3) + except AttributeError: + raise ftrack_api.exception.EventHubPacketError( + 'Received invalid packet {0}'.format(packet) + ) + + code, packet_identifier, path, data = None, None, None, None + + count = len(parts) + if count == 4: + code, packet_identifier, path, data = parts + elif count == 3: + code, packet_identifier, path = parts + elif count == 1: + code = parts[0] + else: + raise ftrack_api.exception.EventHubPacketError( + 'Received invalid packet {0}'.format(packet) + ) + + self.logger.debug(L('Received packet: {0}', packet)) + return code, packet_identifier, path, data + + def _handle_packet(self, code, packet_identifier, path, data): + '''Handle packet received from server.''' + code_name = self._code_name_mapping[code] + + if code_name == 'connect': + self.logger.debug('Connected to event server.') + event = ftrack_api.event.base.Event('ftrack.meta.connected') + self._prepare_event(event) + self._event_queue.put(event) + + elif code_name == 'disconnect': + self.logger.debug('Disconnected from event server.') + if not self._intentional_disconnect: + self.logger.debug( + 'Disconnected unexpectedly. Attempting to reconnect.' + ) + try: + self.reconnect( + attempts=self._auto_reconnect_attempts, + delay=self._auto_reconnect_delay + ) + except ftrack_api.exception.EventHubConnectionError: + self.logger.debug('Failed to reconnect automatically.') + else: + self.logger.debug('Reconnected successfully.') + + if not self.connected: + event = ftrack_api.event.base.Event('ftrack.meta.disconnected') + self._prepare_event(event) + self._event_queue.put(event) + + elif code_name == 'heartbeat': + # Reply with heartbeat. + self._send_packet(self._code_name_mapping['heartbeat']) + + elif code_name == 'message': + self.logger.debug(L('Message received: {0}', data)) + + elif code_name == 'event': + payload = self._decode(data) + args = payload.get('args', []) + + if len(args) == 1: + event_payload = args[0] + if isinstance(event_payload, collections.Mapping): + try: + event = ftrack_api.event.base.Event(**event_payload) + except Exception: + self.logger.exception(L( + 'Failed to convert payload into event: {0}', + event_payload + )) + return + + self._event_queue.put(event) + + elif code_name == 'acknowledge': + parts = data.split('+', 1) + acknowledged_packet_identifier = int(parts[0]) + args = [] + if len(parts) == 2: + args = self._decode(parts[1]) + + try: + callback = self._pop_packet_callback( + acknowledged_packet_identifier + ) + except KeyError: + pass + else: + callback(*args) + + elif code_name == 'error': + self.logger.error(L('Event server reported error: {0}.', data)) + + else: + self.logger.debug(L('{0}: {1}', code_name, data)) + + def _encode(self, data): + '''Return *data* encoded as JSON formatted string.''' + return json.dumps( + data, + default=self._encode_object_hook, + ensure_ascii=False + ) + + def _encode_object_hook(self, item): + '''Return *item* transformed for encoding.''' + if isinstance(item, ftrack_api.event.base.Event): + # Convert to dictionary for encoding. + item = dict(**item) + + if 'in_reply_to_event' in item: + # Convert keys to server convention. + item['inReplyToEvent'] = item.pop('in_reply_to_event') + + return item + + raise TypeError('{0!r} is not JSON serializable'.format(item)) + + def _decode(self, string): + '''Return decoded JSON *string* as Python object.''' + return json.loads(string, object_hook=self._decode_object_hook) + + def _decode_object_hook(self, item): + '''Return *item* transformed.''' + if isinstance(item, collections.Mapping): + if 'inReplyToEvent' in item: + item['in_reply_to_event'] = item.pop('inReplyToEvent') + + return item + + +class _SubscriptionContext(object): + '''Context manager for a one-off subscription.''' + + def __init__(self, hub, subscription, callback, subscriber, priority): + '''Initialise context.''' + self._hub = hub + self._subscription = subscription + self._callback = callback + self._subscriber = subscriber + self._priority = priority + self._subscriberIdentifier = None + + def __enter__(self): + '''Enter context subscribing callback to topic.''' + self._subscriberIdentifier = self._hub.subscribe( + self._subscription, self._callback, subscriber=self._subscriber, + priority=self._priority + ) + + def __exit__(self, exception_type, exception_value, traceback): + '''Exit context unsubscribing callback from topic.''' + self._hub.unsubscribe(self._subscriberIdentifier) + + +class _ProcessorThread(threading.Thread): + '''Process messages from server.''' + + daemon = True + + def __init__(self, client): + '''Initialise thread with Socket.IO *client* instance.''' + super(_ProcessorThread, self).__init__() + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + self.client = client + self.done = threading.Event() + + def run(self): + '''Perform work in thread.''' + while not self.done.is_set(): + try: + code, packet_identifier, path, data = self.client._receive_packet() + self.client._handle_packet(code, packet_identifier, path, data) + + except ftrack_api.exception.EventHubPacketError as error: + self.logger.debug(L('Ignoring invalid packet: {0}', error)) + continue + + except ftrack_api.exception.EventHubConnectionError: + self.cancel() + + # Fake a disconnection event in order to trigger reconnection + # when necessary. + self.client._handle_packet('0', '', '', '') + + break + + except Exception as error: + self.logger.debug(L('Aborting processor thread: {0}', error)) + self.cancel() + break + + def cancel(self): + '''Cancel work as soon as possible.''' + self.done.set() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py new file mode 100644 index 0000000000..0d38463aaf --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py @@ -0,0 +1,27 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import ftrack_api.event.subscription + + +class Subscriber(object): + '''Represent event subscriber.''' + + def __init__(self, subscription, callback, metadata, priority): + '''Initialise subscriber.''' + self.subscription = ftrack_api.event.subscription.Subscription( + subscription + ) + self.callback = callback + self.metadata = metadata + self.priority = priority + + def __str__(self): + '''Return string representation.''' + return '<{0} metadata={1} subscription="{2}">'.format( + self.__class__.__name__, self.metadata, self.subscription + ) + + def interested_in(self, event): + '''Return whether subscriber interested in *event*.''' + return self.subscription.includes(event) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py new file mode 100644 index 0000000000..0b208d9977 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py @@ -0,0 +1,23 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import ftrack_api.event.expression + + +class Subscription(object): + '''Represent a subscription.''' + + parser = ftrack_api.event.expression.Parser() + + def __init__(self, subscription): + '''Initialise with *subscription*.''' + self._subscription = subscription + self._expression = self.parser.parse(subscription) + + def __str__(self): + '''Return string representation.''' + return self._subscription + + def includes(self, event): + '''Return whether subscription includes *event*.''' + return self._expression.match(event) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py new file mode 100644 index 0000000000..8a2eb9bc04 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py @@ -0,0 +1,392 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import sys +import traceback + +import ftrack_api.entity.base + + +class Error(Exception): + '''ftrack specific error.''' + + default_message = 'Unspecified error occurred.' + + def __init__(self, message=None, details=None): + '''Initialise exception with *message*. + + If *message* is None, the class 'default_message' will be used. + + *details* should be a mapping of extra information that can be used in + the message and also to provide more context. + + ''' + if message is None: + message = self.default_message + + self.message = message + self.details = details + if self.details is None: + self.details = {} + + self.traceback = traceback.format_exc() + + def __str__(self): + '''Return string representation.''' + keys = {} + for key, value in self.details.iteritems(): + if isinstance(value, unicode): + value = value.encode(sys.getfilesystemencoding()) + keys[key] = value + + return str(self.message.format(**keys)) + + +class AuthenticationError(Error): + '''Raise when an authentication error occurs.''' + + default_message = 'Authentication error.' + + +class ServerError(Error): + '''Raise when the server reports an error.''' + + default_message = 'Server reported error processing request.' + + +class ServerCompatibilityError(ServerError): + '''Raise when server appears incompatible.''' + + default_message = 'Server incompatible.' + + +class NotFoundError(Error): + '''Raise when something that should exist is not found.''' + + default_message = 'Not found.' + + +class NotUniqueError(Error): + '''Raise when unique value required and duplicate detected.''' + + default_message = 'Non-unique value detected.' + + +class IncorrectResultError(Error): + '''Raise when a result is incorrect.''' + + default_message = 'Incorrect result detected.' + + +class NoResultFoundError(IncorrectResultError): + '''Raise when a result was expected but no result was found.''' + + default_message = 'Expected result, but no result was found.' + + +class MultipleResultsFoundError(IncorrectResultError): + '''Raise when a single result expected, but multiple results found.''' + + default_message = 'Expected single result, but received multiple results.' + + +class EntityTypeError(Error): + '''Raise when an entity type error occurs.''' + + default_message = 'Entity type error.' + + +class UnrecognisedEntityTypeError(EntityTypeError): + '''Raise when an unrecognised entity type detected.''' + + default_message = 'Entity type "{entity_type}" not recognised.' + + def __init__(self, entity_type, **kw): + '''Initialise with *entity_type* that is unrecognised.''' + kw.setdefault('details', {}).update(dict( + entity_type=entity_type + )) + super(UnrecognisedEntityTypeError, self).__init__(**kw) + + +class OperationError(Error): + '''Raise when an operation error occurs.''' + + default_message = 'Operation error.' + + +class InvalidStateError(Error): + '''Raise when an invalid state detected.''' + + default_message = 'Invalid state.' + + +class InvalidStateTransitionError(InvalidStateError): + '''Raise when an invalid state transition detected.''' + + default_message = ( + 'Invalid transition from {current_state!r} to {target_state!r} state ' + 'for entity {entity!r}' + ) + + def __init__(self, current_state, target_state, entity, **kw): + '''Initialise error.''' + kw.setdefault('details', {}).update(dict( + current_state=current_state, + target_state=target_state, + entity=entity + )) + super(InvalidStateTransitionError, self).__init__(**kw) + + +class AttributeError(Error): + '''Raise when an error related to an attribute occurs.''' + + default_message = 'Attribute error.' + + +class ImmutableAttributeError(AttributeError): + '''Raise when modification of immutable attribute attempted.''' + + default_message = ( + 'Cannot modify value of immutable {attribute.name!r} attribute.' + ) + + def __init__(self, attribute, **kw): + '''Initialise error.''' + kw.setdefault('details', {}).update(dict( + attribute=attribute + )) + super(ImmutableAttributeError, self).__init__(**kw) + + +class CollectionError(Error): + '''Raise when an error related to collections occurs.''' + + default_message = 'Collection error.' + + def __init__(self, collection, **kw): + '''Initialise error.''' + kw.setdefault('details', {}).update(dict( + collection=collection + )) + super(CollectionError, self).__init__(**kw) + + +class ImmutableCollectionError(CollectionError): + '''Raise when modification of immutable collection attempted.''' + + default_message = ( + 'Cannot modify value of immutable collection {collection!r}.' + ) + + +class DuplicateItemInCollectionError(CollectionError): + '''Raise when duplicate item in collection detected.''' + + default_message = ( + 'Item {item!r} already exists in collection {collection!r}.' + ) + + def __init__(self, item, collection, **kw): + '''Initialise error.''' + kw.setdefault('details', {}).update(dict( + item=item + )) + super(DuplicateItemInCollectionError, self).__init__(collection, **kw) + + +class ParseError(Error): + '''Raise when a parsing error occurs.''' + + default_message = 'Failed to parse.' + + +class EventHubError(Error): + '''Raise when issues related to event hub occur.''' + + default_message = 'Event hub error occurred.' + + +class EventHubConnectionError(EventHubError): + '''Raise when event hub encounters connection problem.''' + + default_message = 'Event hub is not connected.' + + +class EventHubPacketError(EventHubError): + '''Raise when event hub encounters an issue with a packet.''' + + default_message = 'Invalid packet.' + + +class PermissionDeniedError(Error): + '''Raise when permission is denied.''' + + default_message = 'Permission denied.' + + +class LocationError(Error): + '''Base for errors associated with locations.''' + + default_message = 'Unspecified location error' + + +class ComponentNotInAnyLocationError(LocationError): + '''Raise when component not available in any location.''' + + default_message = 'Component not available in any location.' + + +class ComponentNotInLocationError(LocationError): + '''Raise when component(s) not in location.''' + + default_message = ( + 'Component(s) {formatted_components} not found in location {location}.' + ) + + def __init__(self, components, location, **kw): + '''Initialise with *components* and *location*.''' + if isinstance(components, ftrack_api.entity.base.Entity): + components = [components] + + kw.setdefault('details', {}).update(dict( + components=components, + formatted_components=', '.join( + [str(component) for component in components] + ), + location=location + )) + + super(ComponentNotInLocationError, self).__init__(**kw) + + +class ComponentInLocationError(LocationError): + '''Raise when component(s) already exists in location.''' + + default_message = ( + 'Component(s) {formatted_components} already exist in location ' + '{location}.' + ) + + def __init__(self, components, location, **kw): + '''Initialise with *components* and *location*.''' + if isinstance(components, ftrack_api.entity.base.Entity): + components = [components] + + kw.setdefault('details', {}).update(dict( + components=components, + formatted_components=', '.join( + [str(component) for component in components] + ), + location=location + )) + + super(ComponentInLocationError, self).__init__(**kw) + + +class AccessorError(Error): + '''Base for errors associated with accessors.''' + + default_message = 'Unspecified accessor error' + + +class AccessorOperationFailedError(AccessorError): + '''Base for failed operations on accessors.''' + + default_message = 'Operation {operation} failed: {error}' + + def __init__( + self, operation='', resource_identifier=None, error=None, **kw + ): + kw.setdefault('details', {}).update(dict( + operation=operation, + resource_identifier=resource_identifier, + error=error + )) + super(AccessorOperationFailedError, self).__init__(**kw) + + +class AccessorUnsupportedOperationError(AccessorOperationFailedError): + '''Raise when operation is unsupported.''' + + default_message = 'Operation {operation} unsupported.' + + +class AccessorPermissionDeniedError(AccessorOperationFailedError): + '''Raise when permission denied.''' + + default_message = ( + 'Cannot {operation} {resource_identifier}. Permission denied.' + ) + + +class AccessorResourceIdentifierError(AccessorError): + '''Raise when a error related to a resource_identifier occurs.''' + + default_message = 'Resource identifier is invalid: {resource_identifier}.' + + def __init__(self, resource_identifier, **kw): + kw.setdefault('details', {}).update(dict( + resource_identifier=resource_identifier + )) + super(AccessorResourceIdentifierError, self).__init__(**kw) + + +class AccessorFilesystemPathError(AccessorResourceIdentifierError): + '''Raise when a error related to an accessor filesystem path occurs.''' + + default_message = ( + 'Could not determine filesystem path from resource identifier: ' + '{resource_identifier}.' + ) + + +class AccessorResourceError(AccessorError): + '''Base for errors associated with specific resource.''' + + default_message = 'Unspecified resource error: {resource_identifier}' + + def __init__(self, operation='', resource_identifier=None, error=None, + **kw): + kw.setdefault('details', {}).update(dict( + operation=operation, + resource_identifier=resource_identifier + )) + super(AccessorResourceError, self).__init__(**kw) + + +class AccessorResourceNotFoundError(AccessorResourceError): + '''Raise when a required resource is not found.''' + + default_message = 'Resource not found: {resource_identifier}' + + +class AccessorParentResourceNotFoundError(AccessorResourceError): + '''Raise when a parent resource (such as directory) is not found.''' + + default_message = 'Parent resource is missing: {resource_identifier}' + + +class AccessorResourceInvalidError(AccessorResourceError): + '''Raise when a resource is not the right type.''' + + default_message = 'Resource invalid: {resource_identifier}' + + +class AccessorContainerNotEmptyError(AccessorResourceError): + '''Raise when container is not empty.''' + + default_message = 'Container is not empty: {resource_identifier}' + + +class StructureError(Error): + '''Base for errors associated with structures.''' + + default_message = 'Unspecified structure error' + + +class ConnectionClosedError(Error): + '''Raise when attempt to use closed connection detected.''' + + default_message = "Connection closed." diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py new file mode 100644 index 0000000000..c282fcc814 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py @@ -0,0 +1,131 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import termcolor + +import ftrack_api.entity.base +import ftrack_api.collection +import ftrack_api.symbol +import ftrack_api.inspection + + +#: Useful filters to pass to :func:`format`.` +FILTER = { + 'ignore_unset': ( + lambda entity, name, value: value is not ftrack_api.symbol.NOT_SET + ) +} + + +def format( + entity, formatters=None, attribute_filter=None, recursive=False, + indent=0, indent_first_line=True, _seen=None +): + '''Return formatted string representing *entity*. + + *formatters* can be used to customise formatting of elements. It should be a + mapping with one or more of the following keys: + + * header - Used to format entity type. + * label - Used to format attribute names. + + Specify an *attribute_filter* to control which attributes to include. By + default all attributes are included. The *attribute_filter* should be a + callable that accepts `(entity, attribute_name, attribute_value)` and + returns True if the attribute should be included in the output. For example, + to filter out all unset values:: + + attribute_filter=ftrack_api.formatter.FILTER['ignore_unset'] + + If *recursive* is True then recurse into Collections and format each entity + present. + + *indent* specifies the overall indentation in spaces of the formatted text, + whilst *indent_first_line* determines whether to apply that indent to the + first generated line. + + .. warning:: + + Iterates over all *entity* attributes which may cause multiple queries + to the server. Turn off auto populating in the session to prevent this. + + ''' + # Initialise default formatters. + if formatters is None: + formatters = dict() + + formatters.setdefault( + 'header', lambda text: termcolor.colored( + text, 'white', 'on_blue', attrs=['bold'] + ) + ) + formatters.setdefault( + 'label', lambda text: termcolor.colored( + text, 'blue', attrs=['bold'] + ) + ) + + # Determine indents. + spacer = ' ' * indent + if indent_first_line: + first_line_spacer = spacer + else: + first_line_spacer = '' + + # Avoid infinite recursion on circular references. + if _seen is None: + _seen = set() + + identifier = str(ftrack_api.inspection.identity(entity)) + if identifier in _seen: + return ( + first_line_spacer + + formatters['header'](entity.entity_type) + '{...}' + ) + + _seen.add(identifier) + information = list() + + information.append( + first_line_spacer + formatters['header'](entity.entity_type) + ) + for key, value in sorted(entity.items()): + if attribute_filter is not None: + if not attribute_filter(entity, key, value): + continue + + child_indent = indent + len(key) + 3 + + if isinstance(value, ftrack_api.entity.base.Entity): + value = format( + value, + formatters=formatters, + attribute_filter=attribute_filter, + recursive=recursive, + indent=child_indent, + indent_first_line=False, + _seen=_seen.copy() + ) + + if isinstance(value, ftrack_api.collection.Collection): + if recursive: + child_values = [] + for index, child in enumerate(value): + child_value = format( + child, + formatters=formatters, + attribute_filter=attribute_filter, + recursive=recursive, + indent=child_indent, + indent_first_line=index != 0, + _seen=_seen.copy() + ) + child_values.append(child_value) + + value = '\n'.join(child_values) + + information.append( + spacer + u' {0}: {1}'.format(formatters['label'](key), value) + ) + + return '\n'.join(information) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py new file mode 100644 index 0000000000..d8b815200e --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py @@ -0,0 +1,135 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import collections + +import ftrack_api.symbol +import ftrack_api.operation + + +def identity(entity): + '''Return unique identity of *entity*.''' + return ( + str(entity.entity_type), + primary_key(entity).values() + ) + + +def primary_key(entity): + '''Return primary key of *entity* as an ordered mapping of {field: value}. + + To get just the primary key values:: + + primary_key(entity).values() + + ''' + primary_key = collections.OrderedDict() + for name in entity.primary_key_attributes: + value = entity[name] + if value is ftrack_api.symbol.NOT_SET: + raise KeyError( + 'Missing required value for primary key attribute "{0}" on ' + 'entity {1!r}.'.format(name, entity) + ) + + primary_key[str(name)] = str(value) + + return primary_key + + +def _state(operation, state): + '''Return state following *operation* against current *state*.''' + if ( + isinstance( + operation, ftrack_api.operation.CreateEntityOperation + ) + and state is ftrack_api.symbol.NOT_SET + ): + state = ftrack_api.symbol.CREATED + + elif ( + isinstance( + operation, ftrack_api.operation.UpdateEntityOperation + ) + and state is ftrack_api.symbol.NOT_SET + ): + state = ftrack_api.symbol.MODIFIED + + elif isinstance( + operation, ftrack_api.operation.DeleteEntityOperation + ): + state = ftrack_api.symbol.DELETED + + return state + + +def state(entity): + '''Return current *entity* state. + + .. seealso:: :func:`ftrack_api.inspection.states`. + + ''' + value = ftrack_api.symbol.NOT_SET + + for operation in entity.session.recorded_operations: + # Determine if operation refers to an entity and whether that entity + # is *entity*. + if ( + isinstance( + operation, + ( + ftrack_api.operation.CreateEntityOperation, + ftrack_api.operation.UpdateEntityOperation, + ftrack_api.operation.DeleteEntityOperation + ) + ) + and operation.entity_type == entity.entity_type + and operation.entity_key == primary_key(entity) + ): + value = _state(operation, value) + + return value + + +def states(entities): + '''Return current states of *entities*. + + An optimised function for determining states of multiple entities in one + go. + + .. note:: + + All *entities* should belong to the same session. + + .. seealso:: :func:`ftrack_api.inspection.state`. + + ''' + if not entities: + return [] + + session = entities[0].session + + entities_by_identity = collections.OrderedDict() + for entity in entities: + key = (entity.entity_type, str(primary_key(entity).values())) + entities_by_identity[key] = ftrack_api.symbol.NOT_SET + + for operation in session.recorded_operations: + if ( + isinstance( + operation, + ( + ftrack_api.operation.CreateEntityOperation, + ftrack_api.operation.UpdateEntityOperation, + ftrack_api.operation.DeleteEntityOperation + ) + ) + ): + key = (operation.entity_type, str(operation.entity_key.values())) + if key not in entities_by_identity: + continue + + value = _state(operation, entities_by_identity[key]) + entities_by_identity[key] = value + + return entities_by_identity.values() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py new file mode 100644 index 0000000000..41969c5b2a --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py @@ -0,0 +1,43 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2016 ftrack + +import functools +import warnings + + +def deprecation_warning(message): + def decorator(function): + @functools.wraps(function) + def wrapper(*args, **kwargs): + warnings.warn( + message, + PendingDeprecationWarning + ) + return function(*args, **kwargs) + return wrapper + + return decorator + + +class LazyLogMessage(object): + '''A log message that can be evaluated lazily for improved performance. + + Example:: + + # Formatting of string will not occur unless debug logging enabled. + logger.debug(LazyLogMessage( + 'Hello {0}', 'world' + )) + + ''' + + def __init__(self, message, *args, **kwargs): + '''Initialise with *message* format string and arguments.''' + self.message = message + self.args = args + self.kwargs = kwargs + + def __str__(self): + '''Return string representation.''' + return self.message.format(*self.args, **self.kwargs) + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py new file mode 100644 index 0000000000..bb3bb4ee2c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py @@ -0,0 +1,115 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import copy + + +class Operations(object): + '''Stack of operations.''' + + def __init__(self): + '''Initialise stack.''' + self._stack = [] + super(Operations, self).__init__() + + def clear(self): + '''Clear all operations.''' + del self._stack[:] + + def push(self, operation): + '''Push *operation* onto stack.''' + self._stack.append(operation) + + def pop(self): + '''Pop and return most recent operation from stack.''' + return self._stack.pop() + + def __len__(self): + '''Return count of operations.''' + return len(self._stack) + + def __iter__(self): + '''Return iterator over operations.''' + return iter(self._stack) + + +class Operation(object): + '''Represent an operation.''' + + +class CreateEntityOperation(Operation): + '''Represent create entity operation.''' + + def __init__(self, entity_type, entity_key, entity_data): + '''Initialise operation. + + *entity_type* should be the type of entity in string form (as returned + from :attr:`ftrack_api.entity.base.Entity.entity_type`). + + *entity_key* should be the unique key for the entity and should follow + the form returned from :func:`ftrack_api.inspection.primary_key`. + + *entity_data* should be a mapping of the initial data to populate the + entity with when creating. + + .. note:: + + Shallow copies will be made of each value in *entity_data*. + + ''' + super(CreateEntityOperation, self).__init__() + self.entity_type = entity_type + self.entity_key = entity_key + self.entity_data = {} + for key, value in entity_data.items(): + self.entity_data[key] = copy.copy(value) + + +class UpdateEntityOperation(Operation): + '''Represent update entity operation.''' + + def __init__( + self, entity_type, entity_key, attribute_name, old_value, new_value + ): + '''Initialise operation. + + *entity_type* should be the type of entity in string form (as returned + from :attr:`ftrack_api.entity.base.Entity.entity_type`). + + *entity_key* should be the unique key for the entity and should follow + the form returned from :func:`ftrack_api.inspection.primary_key`. + + *attribute_name* should be the string name of the attribute being + modified and *old_value* and *new_value* should reflect the change in + value. + + .. note:: + + Shallow copies will be made of both *old_value* and *new_value*. + + ''' + super(UpdateEntityOperation, self).__init__() + self.entity_type = entity_type + self.entity_key = entity_key + self.attribute_name = attribute_name + self.old_value = copy.copy(old_value) + self.new_value = copy.copy(new_value) + + +class DeleteEntityOperation(Operation): + '''Represent delete entity operation.''' + + def __init__(self, entity_type, entity_key): + '''Initialise operation. + + *entity_type* should be the type of entity in string form (as returned + from :attr:`ftrack_api.entity.base.Entity.entity_type`). + + *entity_key* should be the unique key for the entity and should follow + the form returned from :func:`ftrack_api.inspection.primary_key`. + + ''' + super(DeleteEntityOperation, self).__init__() + self.entity_type = entity_type + self.entity_key = entity_key + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py new file mode 100644 index 0000000000..2c7a9a4500 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py @@ -0,0 +1,121 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from __future__ import absolute_import + +import logging +import os +import uuid +import imp +import inspect + + +def discover(paths, positional_arguments=None, keyword_arguments=None): + '''Find and load plugins in search *paths*. + + Each discovered module should implement a register function that accepts + *positional_arguments* and *keyword_arguments* as \*args and \*\*kwargs + respectively. + + If a register function does not accept variable arguments, then attempt to + only pass accepted arguments to the function by inspecting its signature. + + ''' + logger = logging.getLogger(__name__ + '.discover') + + if positional_arguments is None: + positional_arguments = [] + + if keyword_arguments is None: + keyword_arguments = {} + + for path in paths: + # Ignore empty paths that could resolve to current directory. + path = path.strip() + if not path: + continue + + for base, directories, filenames in os.walk(path): + for filename in filenames: + name, extension = os.path.splitext(filename) + if extension != '.py': + continue + + module_path = os.path.join(base, filename) + unique_name = uuid.uuid4().hex + + try: + module = imp.load_source(unique_name, module_path) + except Exception as error: + logger.warning( + 'Failed to load plugin from "{0}": {1}' + .format(module_path, error) + ) + continue + + try: + module.register + except AttributeError: + logger.warning( + 'Failed to load plugin that did not define a ' + '"register" function at the module level: {0}' + .format(module_path) + ) + else: + # Attempt to only pass arguments that are accepted by the + # register function. + specification = inspect.getargspec(module.register) + + selected_positional_arguments = positional_arguments + selected_keyword_arguments = keyword_arguments + + if ( + not specification.varargs and + len(positional_arguments) > len(specification.args) + ): + logger.warning( + 'Culling passed arguments to match register ' + 'function signature.' + ) + + selected_positional_arguments = positional_arguments[ + len(specification.args): + ] + selected_keyword_arguments = {} + + elif not specification.keywords: + # Remove arguments that have been passed as positionals. + remainder = specification.args[ + len(positional_arguments): + ] + + # Determine remaining available keyword arguments. + defined_keyword_arguments = [] + if specification.defaults: + defined_keyword_arguments = specification.args[ + -len(specification.defaults): + ] + + remaining_keyword_arguments = set([ + keyword_argument for keyword_argument + in defined_keyword_arguments + if keyword_argument in remainder + ]) + + if not set(keyword_arguments.keys()).issubset( + remaining_keyword_arguments + ): + logger.warning( + 'Culling passed arguments to match register ' + 'function signature.' + ) + selected_keyword_arguments = { + key: value + for key, value in keyword_arguments.items() + if key in remaining_keyword_arguments + } + + module.register( + *selected_positional_arguments, + **selected_keyword_arguments + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py new file mode 100644 index 0000000000..ea101a29d4 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py @@ -0,0 +1,202 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import re +import collections + +import ftrack_api.exception + + +class QueryResult(collections.Sequence): + '''Results from a query.''' + + OFFSET_EXPRESSION = re.compile('(?Poffset (?P\d+))') + LIMIT_EXPRESSION = re.compile('(?Plimit (?P\d+))') + + def __init__(self, session, expression, page_size=500): + '''Initialise result set. + + *session* should be an instance of :class:`ftrack_api.session.Session` + that will be used for executing the query *expression*. + + *page_size* should be an integer specifying the maximum number of + records to fetch in one request allowing the results to be fetched + incrementally in a transparent manner for optimal performance. Any + offset or limit specified in *expression* are honoured for final result + set, but intermediate queries may be issued with different offsets and + limits in order to fetch pages. When an embedded limit is smaller than + the given *page_size* it will be used instead and no paging will take + place. + + .. warning:: + + Setting *page_size* to a very large amount may negatively impact + performance of not only the caller, but the server in general. + + ''' + super(QueryResult, self).__init__() + self._session = session + self._results = [] + + ( + self._expression, + self._offset, + self._limit + ) = self._extract_offset_and_limit(expression) + + self._page_size = page_size + if self._limit is not None and self._limit < self._page_size: + # Optimise case where embedded limit is less than fetching a + # single page. + self._page_size = self._limit + + self._next_offset = self._offset + if self._next_offset is None: + # Initialise with zero offset. + self._next_offset = 0 + + def _extract_offset_and_limit(self, expression): + '''Process *expression* extracting offset and limit. + + Return (expression, offset, limit). + + ''' + offset = None + match = self.OFFSET_EXPRESSION.search(expression) + if match: + offset = int(match.group('value')) + expression = ( + expression[:match.start('offset')] + + expression[match.end('offset'):] + ) + + limit = None + match = self.LIMIT_EXPRESSION.search(expression) + if match: + limit = int(match.group('value')) + expression = ( + expression[:match.start('limit')] + + expression[match.end('limit'):] + ) + + return expression.strip(), offset, limit + + def __getitem__(self, index): + '''Return value at *index*.''' + while self._can_fetch_more() and index >= len(self._results): + self._fetch_more() + + return self._results[index] + + def __len__(self): + '''Return number of items.''' + while self._can_fetch_more(): + self._fetch_more() + + return len(self._results) + + def _can_fetch_more(self): + '''Return whether more results are available to fetch.''' + return self._next_offset is not None + + def _fetch_more(self): + '''Fetch next page of results if available.''' + if not self._can_fetch_more(): + return + + expression = '{0} offset {1} limit {2}'.format( + self._expression, self._next_offset, self._page_size + ) + records, metadata = self._session._query(expression) + self._results.extend(records) + + if self._limit is not None and (len(self._results) >= self._limit): + # Original limit reached. + self._next_offset = None + del self._results[self._limit:] + else: + # Retrieve next page offset from returned metadata. + self._next_offset = metadata.get('next', {}).get('offset', None) + + def all(self): + '''Fetch and return all data.''' + return list(self) + + def one(self): + '''Return exactly one single result from query by applying a limit. + + Raise :exc:`ValueError` if an existing limit is already present in the + expression. + + Raise :exc:`ValueError` if an existing offset is already present in the + expression as offset is inappropriate when expecting a single item. + + Raise :exc:`~ftrack_api.exception.MultipleResultsFoundError` if more + than one result was available or + :exc:`~ftrack_api.exception.NoResultFoundError` if no results were + available. + + .. note:: + + Both errors subclass + :exc:`~ftrack_api.exception.IncorrectResultError` if you want to + catch only one error type. + + ''' + expression = self._expression + + if self._limit is not None: + raise ValueError( + 'Expression already contains a limit clause.' + ) + + if self._offset is not None: + raise ValueError( + 'Expression contains an offset clause which does not make ' + 'sense when selecting a single item.' + ) + + # Apply custom limit as optimisation. A limit of 2 is used rather than + # 1 so that it is possible to test for multiple matching entries + # case. + expression += ' limit 2' + + results, metadata = self._session._query(expression) + + if not results: + raise ftrack_api.exception.NoResultFoundError() + + if len(results) != 1: + raise ftrack_api.exception.MultipleResultsFoundError() + + return results[0] + + def first(self): + '''Return first matching result from query by applying a limit. + + Raise :exc:`ValueError` if an existing limit is already present in the + expression. + + If no matching result available return None. + + ''' + expression = self._expression + + if self._limit is not None: + raise ValueError( + 'Expression already contains a limit clause.' + ) + + # Apply custom offset if present. + if self._offset is not None: + expression += ' offset {0}'.format(self._offset) + + # Apply custom limit as optimisation. + expression += ' limit 1' + + results, metadata = self._session._query(expression) + + if results: + return results[0] + + return None diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py new file mode 100644 index 0000000000..1aab07ed77 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py new file mode 100644 index 0000000000..ee069b57b6 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py @@ -0,0 +1,50 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + + +class ResourceIdentifierTransformer(object): + '''Transform resource identifiers. + + Provide ability to modify resource identifier before it is stored centrally + (:meth:`encode`), or after it has been retrieved, but before it is used + locally (:meth:`decode`). + + For example, you might want to decompose paths into a set of key, value + pairs to store centrally and then compose a path from those values when + reading back. + + .. note:: + + This is separate from any transformations an + :class:`ftrack_api.accessor.base.Accessor` may perform and is targeted + towards common transformations. + + ''' + + def __init__(self, session): + '''Initialise resource identifier transformer. + + *session* should be the :class:`ftrack_api.session.Session` instance + to use for communication with the server. + + ''' + self.session = session + super(ResourceIdentifierTransformer, self).__init__() + + def encode(self, resource_identifier, context=None): + '''Return encoded *resource_identifier* for storing centrally. + + A mapping of *context* values may be supplied to guide the + transformation. + + ''' + return resource_identifier + + def decode(self, resource_identifier, context=None): + '''Return decoded *resource_identifier* for use locally. + + A mapping of *context* values may be supplied to guide the + transformation. + + ''' + return resource_identifier diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py new file mode 100644 index 0000000000..1a5da44432 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py @@ -0,0 +1,2515 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from __future__ import absolute_import + +import json +import logging +import collections +import datetime +import os +import getpass +import functools +import itertools +import distutils.version +import hashlib +import tempfile +import threading +import atexit +import warnings + +import requests +import requests.auth +import arrow +import clique + +import ftrack_api +import ftrack_api.exception +import ftrack_api.entity.factory +import ftrack_api.entity.base +import ftrack_api.entity.location +import ftrack_api.cache +import ftrack_api.symbol +import ftrack_api.query +import ftrack_api.attribute +import ftrack_api.collection +import ftrack_api.event.hub +import ftrack_api.event.base +import ftrack_api.plugin +import ftrack_api.inspection +import ftrack_api.operation +import ftrack_api.accessor.disk +import ftrack_api.structure.origin +import ftrack_api.structure.entity_id +import ftrack_api.accessor.server +import ftrack_api._centralized_storage_scenario +import ftrack_api.logging +from ftrack_api.logging import LazyLogMessage as L + +try: + from weakref import WeakMethod +except ImportError: + from ftrack_api._weakref import WeakMethod + + +class SessionAuthentication(requests.auth.AuthBase): + '''Attach ftrack session authentication information to requests.''' + + def __init__(self, api_key, api_user): + '''Initialise with *api_key* and *api_user*.''' + self.api_key = api_key + self.api_user = api_user + super(SessionAuthentication, self).__init__() + + def __call__(self, request): + '''Modify *request* to have appropriate headers.''' + request.headers.update({ + 'ftrack-api-key': self.api_key, + 'ftrack-user': self.api_user + }) + return request + + +class Session(object): + '''An isolated session for interaction with an ftrack server.''' + + def __init__( + self, server_url=None, api_key=None, api_user=None, auto_populate=True, + plugin_paths=None, cache=None, cache_key_maker=None, + auto_connect_event_hub=None, schema_cache_path=None, + plugin_arguments=None + ): + '''Initialise session. + + *server_url* should be the URL of the ftrack server to connect to + including any port number. If not specified attempt to look up from + :envvar:`FTRACK_SERVER`. + + *api_key* should be the API key to use for authentication whilst + *api_user* should be the username of the user in ftrack to record + operations against. If not specified, *api_key* should be retrieved + from :envvar:`FTRACK_API_KEY` and *api_user* from + :envvar:`FTRACK_API_USER`. + + If *auto_populate* is True (the default), then accessing entity + attributes will cause them to be automatically fetched from the server + if they are not already. This flag can be changed on the session + directly at any time. + + *plugin_paths* should be a list of paths to search for plugins. If not + specified, default to looking up :envvar:`FTRACK_EVENT_PLUGIN_PATH`. + + *cache* should be an instance of a cache that fulfils the + :class:`ftrack_api.cache.Cache` interface and will be used as the cache + for the session. It can also be a callable that will be called with the + session instance as sole argument. The callable should return ``None`` + if a suitable cache could not be configured, but session instantiation + can continue safely. + + .. note:: + + The session will add the specified cache to a pre-configured layered + cache that specifies the top level cache as a + :class:`ftrack_api.cache.MemoryCache`. Therefore, it is unnecessary + to construct a separate memory cache for typical behaviour. Working + around this behaviour or removing the memory cache can lead to + unexpected behaviour. + + *cache_key_maker* should be an instance of a key maker that fulfils the + :class:`ftrack_api.cache.KeyMaker` interface and will be used to + generate keys for objects being stored in the *cache*. If not specified, + a :class:`~ftrack_api.cache.StringKeyMaker` will be used. + + If *auto_connect_event_hub* is True then embedded event hub will be + automatically connected to the event server and allow for publishing and + subscribing to **non-local** events. If False, then only publishing and + subscribing to **local** events will be possible until the hub is + manually connected using :meth:`EventHub.connect + `. + + .. note:: + + The event hub connection is performed in a background thread to + improve session startup time. If a registered plugin requires a + connected event hub then it should check the event hub connection + status explicitly. Subscribing to events does *not* require a + connected event hub. + + Enable schema caching by setting *schema_cache_path* to a folder path. + If not set, :envvar:`FTRACK_API_SCHEMA_CACHE_PATH` will be used to + determine the path to store cache in. If the environment variable is + also not specified then a temporary directory will be used. Set to + `False` to disable schema caching entirely. + + *plugin_arguments* should be an optional mapping (dict) of keyword + arguments to pass to plugin register functions upon discovery. If a + discovered plugin has a signature that is incompatible with the passed + arguments, the discovery mechanism will attempt to reduce the passed + arguments to only those that the plugin accepts. Note that a warning + will be logged in this case. + + ''' + super(Session, self).__init__() + self.logger = logging.getLogger( + __name__ + '.' + self.__class__.__name__ + ) + self._closed = False + + if server_url is None: + server_url = os.environ.get('FTRACK_SERVER') + + if not server_url: + raise TypeError( + 'Required "server_url" not specified. Pass as argument or set ' + 'in environment variable FTRACK_SERVER.' + ) + + self._server_url = server_url + + if api_key is None: + api_key = os.environ.get( + 'FTRACK_API_KEY', + # Backwards compatibility + os.environ.get('FTRACK_APIKEY') + ) + + if not api_key: + raise TypeError( + 'Required "api_key" not specified. Pass as argument or set in ' + 'environment variable FTRACK_API_KEY.' + ) + + self._api_key = api_key + + if api_user is None: + api_user = os.environ.get('FTRACK_API_USER') + if not api_user: + try: + api_user = getpass.getuser() + except Exception: + pass + + if not api_user: + raise TypeError( + 'Required "api_user" not specified. Pass as argument, set in ' + 'environment variable FTRACK_API_USER or one of the standard ' + 'environment variables used by Python\'s getpass module.' + ) + + self._api_user = api_user + + # Currently pending operations. + self.recorded_operations = ftrack_api.operation.Operations() + self.record_operations = True + + self.cache_key_maker = cache_key_maker + if self.cache_key_maker is None: + self.cache_key_maker = ftrack_api.cache.StringKeyMaker() + + # Enforce always having a memory cache at top level so that the same + # in-memory instance is returned from session. + self.cache = ftrack_api.cache.LayeredCache([ + ftrack_api.cache.MemoryCache() + ]) + + if cache is not None: + if callable(cache): + cache = cache(self) + + if cache is not None: + self.cache.caches.append(cache) + + self._managed_request = None + self._request = requests.Session() + self._request.auth = SessionAuthentication( + self._api_key, self._api_user + ) + + self.auto_populate = auto_populate + + # Fetch server information and in doing so also check credentials. + self._server_information = self._fetch_server_information() + + # Now check compatibility of server based on retrieved information. + self.check_server_compatibility() + + # Construct event hub and load plugins. + self._event_hub = ftrack_api.event.hub.EventHub( + self._server_url, + self._api_user, + self._api_key, + ) + + self._auto_connect_event_hub_thread = None + if auto_connect_event_hub in (None, True): + # Connect to event hub in background thread so as not to block main + # session usage waiting for event hub connection. + self._auto_connect_event_hub_thread = threading.Thread( + target=self._event_hub.connect + ) + self._auto_connect_event_hub_thread.daemon = True + self._auto_connect_event_hub_thread.start() + + # To help with migration from auto_connect_event_hub default changing + # from True to False. + self._event_hub._deprecation_warning_auto_connect = ( + auto_connect_event_hub is None + ) + + # Register to auto-close session on exit. + atexit.register(WeakMethod(self.close)) + + self._plugin_paths = plugin_paths + if self._plugin_paths is None: + self._plugin_paths = os.environ.get( + 'FTRACK_EVENT_PLUGIN_PATH', '' + ).split(os.pathsep) + + self._discover_plugins(plugin_arguments=plugin_arguments) + + # TODO: Make schemas read-only and non-mutable (or at least without + # rebuilding types)? + if schema_cache_path is not False: + if schema_cache_path is None: + schema_cache_path = os.environ.get( + 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() + ) + + schema_cache_path = os.path.join( + schema_cache_path, 'ftrack_api_schema_cache.json' + ) + + self.schemas = self._load_schemas(schema_cache_path) + self.types = self._build_entity_type_classes(self.schemas) + + ftrack_api._centralized_storage_scenario.register(self) + + self._configure_locations() + self.event_hub.publish( + ftrack_api.event.base.Event( + topic='ftrack.api.session.ready', + data=dict( + session=self + ) + ), + synchronous=True + ) + + def __enter__(self): + '''Return session as context manager.''' + return self + + def __exit__(self, exception_type, exception_value, traceback): + '''Exit session context, closing session in process.''' + self.close() + + @property + def _request(self): + '''Return request session. + + Raise :exc:`ftrack_api.exception.ConnectionClosedError` if session has + been closed and connection unavailable. + + ''' + if self._managed_request is None: + raise ftrack_api.exception.ConnectionClosedError() + + return self._managed_request + + @_request.setter + def _request(self, value): + '''Set request session to *value*.''' + self._managed_request = value + + @property + def closed(self): + '''Return whether session has been closed.''' + return self._closed + + @property + def server_information(self): + '''Return server information such as server version.''' + return self._server_information.copy() + + @property + def server_url(self): + '''Return server ulr used for session.''' + return self._server_url + + @property + def api_user(self): + '''Return username used for session.''' + return self._api_user + + @property + def api_key(self): + '''Return API key used for session.''' + return self._api_key + + @property + def event_hub(self): + '''Return event hub.''' + return self._event_hub + + @property + def _local_cache(self): + '''Return top level memory cache.''' + return self.cache.caches[0] + + def check_server_compatibility(self): + '''Check compatibility with connected server.''' + server_version = self.server_information.get('version') + if server_version is None: + raise ftrack_api.exception.ServerCompatibilityError( + 'Could not determine server version.' + ) + + # Perform basic version check. + if server_version != 'dev': + min_server_version = '3.3.11' + if ( + distutils.version.LooseVersion(min_server_version) + > distutils.version.LooseVersion(server_version) + ): + raise ftrack_api.exception.ServerCompatibilityError( + 'Server version {0} incompatible with this version of the ' + 'API which requires a server version >= {1}'.format( + server_version, + min_server_version + ) + ) + + def close(self): + '''Close session. + + Close connections to server. Clear any pending operations and local + cache. + + Use this to ensure that session is cleaned up properly after use. + + ''' + if self.closed: + self.logger.debug('Session already closed.') + return + + self._closed = True + + self.logger.debug('Closing session.') + if self.recorded_operations: + self.logger.warning( + 'Closing session with pending operations not persisted.' + ) + + # Clear pending operations. + self.recorded_operations.clear() + + # Clear top level cache (expected to be enforced memory cache). + self._local_cache.clear() + + # Close connections. + self._request.close() + self._request = None + + try: + self.event_hub.disconnect() + if self._auto_connect_event_hub_thread: + self._auto_connect_event_hub_thread.join() + except ftrack_api.exception.EventHubConnectionError: + pass + + self.logger.debug('Session closed.') + + def reset(self): + '''Reset session clearing local state. + + Clear all pending operations and expunge all entities from session. + + Also clear the local cache. If the cache used by the session is a + :class:`~ftrack_api.cache.LayeredCache` then only clear top level cache. + Otherwise, clear the entire cache. + + Plugins are not rediscovered or reinitialised, but certain plugin events + are re-emitted to properly configure session aspects that are dependant + on cache (such as location plugins). + + .. warning:: + + Previously attached entities are not reset in memory and will retain + their state, but should not be used. Doing so will cause errors. + + ''' + if self.recorded_operations: + self.logger.warning( + 'Resetting session with pending operations not persisted.' + ) + + # Clear pending operations. + self.recorded_operations.clear() + + # Clear top level cache (expected to be enforced memory cache). + self._local_cache.clear() + + # Re-configure certain session aspects that may be dependant on cache. + self._configure_locations() + + self.event_hub.publish( + ftrack_api.event.base.Event( + topic='ftrack.api.session.reset', + data=dict( + session=self + ) + ), + synchronous=True + ) + + def auto_populating(self, auto_populate): + '''Temporarily set auto populate to *auto_populate*. + + The current setting will be restored automatically when done. + + Example:: + + with session.auto_populating(False): + print entity['name'] + + ''' + return AutoPopulatingContext(self, auto_populate) + + def operation_recording(self, record_operations): + '''Temporarily set operation recording to *record_operations*. + + The current setting will be restored automatically when done. + + Example:: + + with session.operation_recording(False): + entity['name'] = 'change_not_recorded' + + ''' + return OperationRecordingContext(self, record_operations) + + @property + def created(self): + '''Return list of newly created entities.''' + entities = self._local_cache.values() + states = ftrack_api.inspection.states(entities) + + return [ + entity for (entity, state) in itertools.izip(entities, states) + if state is ftrack_api.symbol.CREATED + ] + + @property + def modified(self): + '''Return list of locally modified entities.''' + entities = self._local_cache.values() + states = ftrack_api.inspection.states(entities) + + return [ + entity for (entity, state) in itertools.izip(entities, states) + if state is ftrack_api.symbol.MODIFIED + ] + + @property + def deleted(self): + '''Return list of deleted entities.''' + entities = self._local_cache.values() + states = ftrack_api.inspection.states(entities) + + return [ + entity for (entity, state) in itertools.izip(entities, states) + if state is ftrack_api.symbol.DELETED + ] + + def reset_remote(self, reset_type, entity=None): + '''Perform a server side reset. + + *reset_type* is a server side supported reset type, + passing the optional *entity* to perform the option upon. + + Please refer to ftrack documentation for a complete list of + supported server side reset types. + ''' + + payload = { + 'action': 'reset_remote', + 'reset_type': reset_type + } + + if entity is not None: + payload.update({ + 'entity_type': entity.entity_type, + 'entity_key': entity.get('id') + }) + + result = self.call( + [payload] + ) + + return result[0]['data'] + + def create(self, entity_type, data=None, reconstructing=False): + '''Create and return an entity of *entity_type* with initial *data*. + + If specified, *data* should be a dictionary of key, value pairs that + should be used to populate attributes on the entity. + + If *reconstructing* is False then create a new entity setting + appropriate defaults for missing data. If True then reconstruct an + existing entity. + + Constructed entity will be automatically :meth:`merged ` + into the session. + + ''' + entity = self._create(entity_type, data, reconstructing=reconstructing) + entity = self.merge(entity) + return entity + + def _create(self, entity_type, data, reconstructing): + '''Create and return an entity of *entity_type* with initial *data*.''' + try: + EntityTypeClass = self.types[entity_type] + except KeyError: + raise ftrack_api.exception.UnrecognisedEntityTypeError(entity_type) + + return EntityTypeClass(self, data=data, reconstructing=reconstructing) + + def ensure(self, entity_type, data, identifying_keys=None): + '''Retrieve entity of *entity_type* with *data*, creating if necessary. + + *data* should be a dictionary of the same form passed to :meth:`create`. + + By default, check for an entity that has matching *data*. If + *identifying_keys* is specified as a list of keys then only consider the + values from *data* for those keys when searching for existing entity. If + *data* is missing an identifying key then raise :exc:`KeyError`. + + If no *identifying_keys* specified then use all of the keys from the + passed *data*. Raise :exc:`ValueError` if no *identifying_keys* can be + determined. + + Each key should be a string. + + .. note:: + + Currently only top level scalars supported. To ensure an entity by + looking at relationships, manually issue the :meth:`query` and + :meth:`create` calls. + + If more than one entity matches the determined filter criteria then + raise :exc:`~ftrack_api.exception.MultipleResultsFoundError`. + + If no matching entity found then create entity using supplied *data*. + + If a matching entity is found, then update it if necessary with *data*. + + .. note:: + + If entity created or updated then a :meth:`commit` will be issued + automatically. If this behaviour is undesired, perform the + :meth:`query` and :meth:`create` calls manually. + + Return retrieved or created entity. + + Example:: + + # First time, a new entity with `username=martin` is created. + entity = session.ensure('User', {'username': 'martin'}) + + # After that, the existing entity is retrieved. + entity = session.ensure('User', {'username': 'martin'}) + + # When existing entity retrieved, entity may also be updated to + # match supplied data. + entity = session.ensure( + 'User', {'username': 'martin', 'email': 'martin@example.com'} + ) + + ''' + if not identifying_keys: + identifying_keys = data.keys() + + self.logger.debug(L( + 'Ensuring entity {0!r} with data {1!r} using identifying keys ' + '{2!r}', entity_type, data, identifying_keys + )) + + if not identifying_keys: + raise ValueError( + 'Could not determine any identifying data to check against ' + 'when ensuring {0!r} with data {1!r}. Identifying keys: {2!r}' + .format(entity_type, data, identifying_keys) + ) + + expression = '{0} where'.format(entity_type) + criteria = [] + for identifying_key in identifying_keys: + value = data[identifying_key] + + if isinstance(value, basestring): + value = '"{0}"'.format(value) + + elif isinstance( + value, (arrow.Arrow, datetime.datetime, datetime.date) + ): + # Server does not store microsecond or timezone currently so + # need to strip from query. + # TODO: When datetime handling improved, update this logic. + value = ( + arrow.get(value).naive.replace(microsecond=0).isoformat() + ) + value = '"{0}"'.format(value) + + criteria.append('{0} is {1}'.format(identifying_key, value)) + + expression = '{0} {1}'.format( + expression, ' and '.join(criteria) + ) + + try: + entity = self.query(expression).one() + + except ftrack_api.exception.NoResultFoundError: + self.logger.debug('Creating entity as did not already exist.') + + # Create entity. + entity = self.create(entity_type, data) + self.commit() + + else: + self.logger.debug('Retrieved matching existing entity.') + + # Update entity if required. + updated = False + for key, target_value in data.items(): + if entity[key] != target_value: + entity[key] = target_value + updated = True + + if updated: + self.logger.debug('Updating existing entity to match new data.') + self.commit() + + return entity + + def delete(self, entity): + '''Mark *entity* for deletion.''' + if self.record_operations: + self.recorded_operations.push( + ftrack_api.operation.DeleteEntityOperation( + entity.entity_type, + ftrack_api.inspection.primary_key(entity) + ) + ) + + def get(self, entity_type, entity_key): + '''Return entity of *entity_type* with unique *entity_key*. + + First check for an existing entry in the configured cache, otherwise + issue a query to the server. + + If no matching entity found, return None. + + ''' + self.logger.debug(L('Get {0} with key {1}', entity_type, entity_key)) + + primary_key_definition = self.types[entity_type].primary_key_attributes + if isinstance(entity_key, basestring): + entity_key = [entity_key] + + if len(entity_key) != len(primary_key_definition): + raise ValueError( + 'Incompatible entity_key {0!r} supplied. Entity type {1} ' + 'expects a primary key composed of {2} values ({3}).' + .format( + entity_key, entity_type, len(primary_key_definition), + ', '.join(primary_key_definition) + ) + ) + + entity = None + try: + entity = self._get(entity_type, entity_key) + + + except KeyError: + + # Query for matching entity. + self.logger.debug( + 'Entity not present in cache. Issuing new query.' + ) + condition = [] + for key, value in zip(primary_key_definition, entity_key): + condition.append('{0} is "{1}"'.format(key, value)) + + expression = '{0} where ({1})'.format( + entity_type, ' and '.join(condition) + ) + + results = self.query(expression).all() + if results: + entity = results[0] + + return entity + + def _get(self, entity_type, entity_key): + '''Return cached entity of *entity_type* with unique *entity_key*. + + Raise :exc:`KeyError` if no such entity in the cache. + + ''' + # Check cache for existing entity emulating + # ftrack_api.inspection.identity result object to pass to key maker. + cache_key = self.cache_key_maker.key( + (str(entity_type), map(str, entity_key)) + ) + self.logger.debug(L( + 'Checking cache for entity with key {0}', cache_key + )) + entity = self.cache.get(cache_key) + self.logger.debug(L( + 'Retrieved existing entity from cache: {0} at {1}', + entity, id(entity) + )) + + return entity + + def query(self, expression, page_size=500): + '''Query against remote data according to *expression*. + + *expression* is not executed directly. Instead return an + :class:`ftrack_api.query.QueryResult` instance that will execute remote + call on access. + + *page_size* specifies the maximum page size that the returned query + result object should be configured with. + + .. seealso:: :ref:`querying` + + ''' + self.logger.debug(L('Query {0!r}', expression)) + + # Add in sensible projections if none specified. Note that this is + # done here rather than on the server to allow local modification of the + # schema setting to include commonly used custom attributes for example. + # TODO: Use a proper parser perhaps? + if not expression.startswith('select'): + entity_type = expression.split(' ', 1)[0] + EntityTypeClass = self.types[entity_type] + projections = EntityTypeClass.default_projections + + expression = 'select {0} from {1}'.format( + ', '.join(projections), + expression + ) + + query_result = ftrack_api.query.QueryResult( + self, expression, page_size=page_size + ) + return query_result + + def _query(self, expression): + '''Execute *query* and return (records, metadata). + + Records will be a list of entities retrieved via the query and metadata + a dictionary of accompanying information about the result set. + + ''' + # TODO: Actually support batching several queries together. + # TODO: Should batches have unique ids to match them up later. + batch = [{ + 'action': 'query', + 'expression': expression + }] + + # TODO: When should this execute? How to handle background=True? + results = self.call(batch) + + # Merge entities into local cache and return merged entities. + data = [] + merged = dict() + for entity in results[0]['data']: + data.append(self._merge_recursive(entity, merged)) + + return data, results[0]['metadata'] + + def merge(self, value, merged=None): + '''Merge *value* into session and return merged value. + + *merged* should be a mapping to record merges during run and should be + used to avoid infinite recursion. If not set will default to a + dictionary. + + ''' + if merged is None: + merged = {} + + with self.operation_recording(False): + return self._merge(value, merged) + + def _merge(self, value, merged): + '''Return merged *value*.''' + log_debug = self.logger.isEnabledFor(logging.DEBUG) + + if isinstance(value, ftrack_api.entity.base.Entity): + log_debug and self.logger.debug( + 'Merging entity into session: {0} at {1}' + .format(value, id(value)) + ) + + return self._merge_entity(value, merged=merged) + + elif isinstance(value, ftrack_api.collection.Collection): + log_debug and self.logger.debug( + 'Merging collection into session: {0!r} at {1}' + .format(value, id(value)) + ) + + merged_collection = [] + for entry in value: + merged_collection.append( + self._merge(entry, merged=merged) + ) + + return merged_collection + + elif isinstance(value, ftrack_api.collection.MappedCollectionProxy): + log_debug and self.logger.debug( + 'Merging mapped collection into session: {0!r} at {1}' + .format(value, id(value)) + ) + + merged_collection = [] + for entry in value.collection: + merged_collection.append( + self._merge(entry, merged=merged) + ) + + return merged_collection + + else: + return value + + def _merge_recursive(self, entity, merged=None): + '''Merge *entity* and all its attributes recursivly.''' + log_debug = self.logger.isEnabledFor(logging.DEBUG) + + if merged is None: + merged = {} + + attached = self.merge(entity, merged) + + for attribute in entity.attributes: + # Remote attributes. + remote_value = attribute.get_remote_value(entity) + + if isinstance( + remote_value, + ( + ftrack_api.entity.base.Entity, + ftrack_api.collection.Collection, + ftrack_api.collection.MappedCollectionProxy + ) + ): + log_debug and self.logger.debug( + 'Merging remote value for attribute {0}.'.format(attribute) + ) + + if isinstance(remote_value, ftrack_api.entity.base.Entity): + self._merge_recursive(remote_value, merged=merged) + + elif isinstance( + remote_value, ftrack_api.collection.Collection + ): + for entry in remote_value: + self._merge_recursive(entry, merged=merged) + + elif isinstance( + remote_value, ftrack_api.collection.MappedCollectionProxy + ): + for entry in remote_value.collection: + self._merge_recursive(entry, merged=merged) + + return attached + + def _merge_entity(self, entity, merged=None): + '''Merge *entity* into session returning merged entity. + + Merge is recursive so any references to other entities will also be + merged. + + *entity* will never be modified in place. Ensure that the returned + merged entity instance is used. + + ''' + log_debug = self.logger.isEnabledFor(logging.DEBUG) + + if merged is None: + merged = {} + + with self.auto_populating(False): + entity_key = self.cache_key_maker.key( + ftrack_api.inspection.identity(entity) + ) + + # Check whether this entity has already been processed. + attached_entity = merged.get(entity_key) + if attached_entity is not None: + log_debug and self.logger.debug( + 'Entity already processed for key {0} as {1} at {2}' + .format(entity_key, attached_entity, id(attached_entity)) + ) + + return attached_entity + else: + log_debug and self.logger.debug( + 'Entity not already processed for key {0}.' + .format(entity_key) + ) + + # Check for existing instance of entity in cache. + log_debug and self.logger.debug( + 'Checking for entity in cache with key {0}'.format(entity_key) + ) + try: + attached_entity = self.cache.get(entity_key) + + log_debug and self.logger.debug( + 'Retrieved existing entity from cache: {0} at {1}' + .format(attached_entity, id(attached_entity)) + ) + + except KeyError: + # Construct new minimal instance to store in cache. + attached_entity = self._create( + entity.entity_type, {}, reconstructing=True + ) + + log_debug and self.logger.debug( + 'Entity not present in cache. Constructed new instance: ' + '{0} at {1}'.format(attached_entity, id(attached_entity)) + ) + + # Mark entity as seen to avoid infinite loops. + merged[entity_key] = attached_entity + + changes = attached_entity.merge(entity, merged=merged) + if changes: + self.cache.set(entity_key, attached_entity) + self.logger.debug('Cache updated with merged entity.') + + else: + self.logger.debug( + 'Cache not updated with merged entity as no differences ' + 'detected.' + ) + + return attached_entity + + def populate(self, entities, projections): + '''Populate *entities* with attributes specified by *projections*. + + Any locally set values included in the *projections* will not be + overwritten with the retrieved remote value. If this 'synchronise' + behaviour is required, first clear the relevant values on the entity by + setting them to :attr:`ftrack_api.symbol.NOT_SET`. Deleting the key will + have the same effect:: + + >>> print(user['username']) + martin + >>> del user['username'] + >>> print(user['username']) + Symbol(NOT_SET) + + .. note:: + + Entities that have been created and not yet persisted will be + skipped as they have no remote values to fetch. + + ''' + self.logger.debug(L( + 'Populate {0!r} projections for {1}.', projections, entities + )) + + if not isinstance( + entities, (list, tuple, ftrack_api.query.QueryResult) + ): + entities = [entities] + + # TODO: How to handle a mixed collection of different entity types + # Should probably fail, but need to consider handling hierarchies such + # as User and Group both deriving from Resource. Actually, could just + # proceed and ignore projections that are not present in entity type. + + entities_to_process = [] + + for entity in entities: + if ftrack_api.inspection.state(entity) is ftrack_api.symbol.CREATED: + # Created entities that are not yet persisted have no remote + # values. Don't raise an error here as it is reasonable to + # iterate over an entities properties and see that some of them + # are NOT_SET. + self.logger.debug(L( + 'Skipping newly created entity {0!r} for population as no ' + 'data will exist in the remote for this entity yet.', entity + )) + continue + + entities_to_process.append(entity) + + if entities_to_process: + reference_entity = entities_to_process[0] + entity_type = reference_entity.entity_type + query = 'select {0} from {1}'.format(projections, entity_type) + + primary_key_definition = reference_entity.primary_key_attributes + entity_keys = [ + ftrack_api.inspection.primary_key(entity).values() + for entity in entities_to_process + ] + + if len(primary_key_definition) > 1: + # Composite keys require full OR syntax unfortunately. + conditions = [] + for entity_key in entity_keys: + condition = [] + for key, value in zip(primary_key_definition, entity_key): + condition.append('{0} is "{1}"'.format(key, value)) + + conditions.append('({0})'.format('and '.join(condition))) + + query = '{0} where {1}'.format(query, ' or '.join(conditions)) + + else: + primary_key = primary_key_definition[0] + + if len(entity_keys) > 1: + query = '{0} where {1} in ({2})'.format( + query, primary_key, + ','.join([ + str(entity_key[0]) for entity_key in entity_keys + ]) + ) + else: + query = '{0} where {1} is {2}'.format( + query, primary_key, str(entity_keys[0][0]) + ) + + result = self.query(query) + + # Fetch all results now. Doing so will cause them to populate the + # relevant entities in the cache. + result.all() + + # TODO: Should we check that all requested attributes were + # actually populated? If some weren't would we mark that to avoid + # repeated calls or perhaps raise an error? + + # TODO: Make atomic. + def commit(self): + '''Commit all local changes to the server.''' + batch = [] + + with self.auto_populating(False): + for operation in self.recorded_operations: + + # Convert operation to payload. + if isinstance( + operation, ftrack_api.operation.CreateEntityOperation + ): + # At present, data payload requires duplicating entity + # type in data and also ensuring primary key added. + entity_data = { + '__entity_type__': operation.entity_type, + } + entity_data.update(operation.entity_key) + entity_data.update(operation.entity_data) + + payload = OperationPayload({ + 'action': 'create', + 'entity_type': operation.entity_type, + 'entity_key': operation.entity_key.values(), + 'entity_data': entity_data + }) + + elif isinstance( + operation, ftrack_api.operation.UpdateEntityOperation + ): + entity_data = { + # At present, data payload requires duplicating entity + # type. + '__entity_type__': operation.entity_type, + operation.attribute_name: operation.new_value + } + + payload = OperationPayload({ + 'action': 'update', + 'entity_type': operation.entity_type, + 'entity_key': operation.entity_key.values(), + 'entity_data': entity_data + }) + + elif isinstance( + operation, ftrack_api.operation.DeleteEntityOperation + ): + payload = OperationPayload({ + 'action': 'delete', + 'entity_type': operation.entity_type, + 'entity_key': operation.entity_key.values() + }) + + else: + raise ValueError( + 'Cannot commit. Unrecognised operation type {0} ' + 'detected.'.format(type(operation)) + ) + + batch.append(payload) + + # Optimise batch. + # TODO: Might be better to perform these on the operations list instead + # so all operation contextual information available. + + # If entity was created and deleted in one batch then remove all + # payloads for that entity. + created = set() + deleted = set() + + for payload in batch: + if payload['action'] == 'create': + created.add( + (payload['entity_type'], str(payload['entity_key'])) + ) + + elif payload['action'] == 'delete': + deleted.add( + (payload['entity_type'], str(payload['entity_key'])) + ) + + created_then_deleted = deleted.intersection(created) + if created_then_deleted: + optimised_batch = [] + for payload in batch: + entity_type = payload.get('entity_type') + entity_key = str(payload.get('entity_key')) + + if (entity_type, entity_key) in created_then_deleted: + continue + + optimised_batch.append(payload) + + batch = optimised_batch + + # Remove early update operations so that only last operation on + # attribute is applied server side. + updates_map = set() + for payload in reversed(batch): + if payload['action'] in ('update', ): + for key, value in payload['entity_data'].items(): + if key == '__entity_type__': + continue + + identity = ( + payload['entity_type'], str(payload['entity_key']), key + ) + if identity in updates_map: + del payload['entity_data'][key] + else: + updates_map.add(identity) + + # Remove NOT_SET values from entity_data. + for payload in batch: + entity_data = payload.get('entity_data', {}) + for key, value in entity_data.items(): + if value is ftrack_api.symbol.NOT_SET: + del entity_data[key] + + # Remove payloads with redundant entity_data. + optimised_batch = [] + for payload in batch: + entity_data = payload.get('entity_data') + if entity_data is not None: + keys = entity_data.keys() + if not keys or keys == ['__entity_type__']: + continue + + optimised_batch.append(payload) + + batch = optimised_batch + + # Collapse updates that are consecutive into one payload. Also, collapse + # updates that occur immediately after creation into the create payload. + optimised_batch = [] + previous_payload = None + + for payload in batch: + if ( + previous_payload is not None + and payload['action'] == 'update' + and previous_payload['action'] in ('create', 'update') + and previous_payload['entity_type'] == payload['entity_type'] + and previous_payload['entity_key'] == payload['entity_key'] + ): + previous_payload['entity_data'].update(payload['entity_data']) + continue + + else: + optimised_batch.append(payload) + previous_payload = payload + + batch = optimised_batch + + # Process batch. + if batch: + result = self.call(batch) + + # Clear recorded operations. + self.recorded_operations.clear() + + # As optimisation, clear local values which are not primary keys to + # avoid redundant merges when merging references. Note: primary keys + # remain as needed for cache retrieval on new entities. + with self.auto_populating(False): + with self.operation_recording(False): + for entity in self._local_cache.values(): + for attribute in entity: + if attribute not in entity.primary_key_attributes: + del entity[attribute] + + # Process results merging into cache relevant data. + for entry in result: + + if entry['action'] in ('create', 'update'): + # Merge returned entities into local cache. + self.merge(entry['data']) + + elif entry['action'] == 'delete': + # TODO: Detach entity - need identity returned? + # TODO: Expunge entity from cache. + pass + # Clear remaining local state, including local values for primary + # keys on entities that were merged. + with self.auto_populating(False): + with self.operation_recording(False): + for entity in self._local_cache.values(): + entity.clear() + + def rollback(self): + '''Clear all recorded operations and local state. + + Typically this would be used following a failed :meth:`commit` in order + to revert the session to a known good state. + + Newly created entities not yet persisted will be detached from the + session / purged from cache and no longer contribute, but the actual + objects are not deleted from memory. They should no longer be used and + doing so could cause errors. + + ''' + with self.auto_populating(False): + with self.operation_recording(False): + + # Detach all newly created entities and remove from cache. This + # is done because simply clearing the local values of newly + # created entities would result in entities with no identity as + # primary key was local while not persisted. In addition, it + # makes no sense for failed created entities to exist in session + # or cache. + for operation in self.recorded_operations: + if isinstance( + operation, ftrack_api.operation.CreateEntityOperation + ): + entity_key = str(( + str(operation.entity_type), + operation.entity_key.values() + )) + try: + self.cache.remove(entity_key) + except KeyError: + pass + + # Clear locally stored modifications on remaining entities. + for entity in self._local_cache.values(): + entity.clear() + + self.recorded_operations.clear() + + def _fetch_server_information(self): + '''Return server information.''' + result = self.call([{'action': 'query_server_information'}]) + return result[0] + + def _discover_plugins(self, plugin_arguments=None): + '''Find and load plugins in search paths. + + Each discovered module should implement a register function that + accepts this session as first argument. Typically the function should + register appropriate event listeners against the session's event hub. + + def register(session): + session.event_hub.subscribe( + 'topic=ftrack.api.session.construct-entity-type', + construct_entity_type + ) + + *plugin_arguments* should be an optional mapping of keyword arguments + and values to pass to plugin register functions upon discovery. + + ''' + plugin_arguments = plugin_arguments or {} + ftrack_api.plugin.discover( + self._plugin_paths, [self], plugin_arguments + ) + + def _read_schemas_from_cache(self, schema_cache_path): + '''Return schemas and schema hash from *schema_cache_path*. + + *schema_cache_path* should be the path to the file containing the + schemas in JSON format. + + ''' + self.logger.debug(L( + 'Reading schemas from cache {0!r}', schema_cache_path + )) + + if not os.path.exists(schema_cache_path): + self.logger.info(L( + 'Cache file not found at {0!r}.', schema_cache_path + )) + + return [], None + + with open(schema_cache_path, 'r') as schema_file: + schemas = json.load(schema_file) + hash_ = hashlib.md5( + json.dumps(schemas, sort_keys=True) + ).hexdigest() + + return schemas, hash_ + + def _write_schemas_to_cache(self, schemas, schema_cache_path): + '''Write *schemas* to *schema_cache_path*. + + *schema_cache_path* should be a path to a file that the schemas can be + written to in JSON format. + + ''' + self.logger.debug(L( + 'Updating schema cache {0!r} with new schemas.', schema_cache_path + )) + + with open(schema_cache_path, 'w') as local_cache_file: + json.dump(schemas, local_cache_file, indent=4) + + def _load_schemas(self, schema_cache_path): + '''Load schemas. + + First try to load schemas from cache at *schema_cache_path*. If the + cache is not available or the cache appears outdated then load schemas + from server and store fresh copy in cache. + + If *schema_cache_path* is set to `False`, always load schemas from + server bypassing cache. + + ''' + local_schema_hash = None + schemas = [] + + if schema_cache_path: + try: + schemas, local_schema_hash = self._read_schemas_from_cache( + schema_cache_path + ) + except (IOError, TypeError, AttributeError, ValueError): + # Catch any known exceptions when trying to read the local + # schema cache to prevent API from being unusable. + self.logger.exception(L( + 'Schema cache could not be loaded from {0!r}', + schema_cache_path + )) + + # Use `dictionary.get` to retrieve hash to support older version of + # ftrack server not returning a schema hash. + server_hash = self._server_information.get( + 'schema_hash', False + ) + if local_schema_hash != server_hash: + self.logger.debug(L( + 'Loading schemas from server due to hash not matching.' + 'Local: {0!r} != Server: {1!r}', local_schema_hash, server_hash + )) + schemas = self.call([{'action': 'query_schemas'}])[0] + + if schema_cache_path: + try: + self._write_schemas_to_cache(schemas, schema_cache_path) + except (IOError, TypeError): + self.logger.exception(L( + 'Failed to update schema cache {0!r}.', + schema_cache_path + )) + + else: + self.logger.debug(L( + 'Using cached schemas from {0!r}', schema_cache_path + )) + + return schemas + + def _build_entity_type_classes(self, schemas): + '''Build default entity type classes.''' + fallback_factory = ftrack_api.entity.factory.StandardFactory() + classes = {} + + for schema in schemas: + results = self.event_hub.publish( + ftrack_api.event.base.Event( + topic='ftrack.api.session.construct-entity-type', + data=dict( + schema=schema, + schemas=schemas + ) + ), + synchronous=True + ) + + results = [result for result in results if result is not None] + + if not results: + self.logger.debug(L( + 'Using default StandardFactory to construct entity type ' + 'class for "{0}"', schema['id'] + )) + entity_type_class = fallback_factory.create(schema) + + elif len(results) > 1: + raise ValueError( + 'Expected single entity type to represent schema "{0}" but ' + 'received {1} entity types instead.' + .format(schema['id'], len(results)) + ) + + else: + entity_type_class = results[0] + + classes[entity_type_class.entity_type] = entity_type_class + + return classes + + def _configure_locations(self): + '''Configure locations.''' + # First configure builtin locations, by injecting them into local cache. + + # Origin. + location = self.create( + 'Location', + data=dict( + name='ftrack.origin', + id=ftrack_api.symbol.ORIGIN_LOCATION_ID + ), + reconstructing=True + ) + ftrack_api.mixin( + location, ftrack_api.entity.location.OriginLocationMixin, + name='OriginLocation' + ) + location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + location.structure = ftrack_api.structure.origin.OriginStructure() + location.priority = 100 + + # Unmanaged. + location = self.create( + 'Location', + data=dict( + name='ftrack.unmanaged', + id=ftrack_api.symbol.UNMANAGED_LOCATION_ID + ), + reconstructing=True + ) + ftrack_api.mixin( + location, ftrack_api.entity.location.UnmanagedLocationMixin, + name='UnmanagedLocation' + ) + location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + location.structure = ftrack_api.structure.origin.OriginStructure() + # location.resource_identifier_transformer = ( + # ftrack_api.resource_identifier_transformer.internal.InternalResourceIdentifierTransformer(session) + # ) + location.priority = 90 + + # Review. + location = self.create( + 'Location', + data=dict( + name='ftrack.review', + id=ftrack_api.symbol.REVIEW_LOCATION_ID + ), + reconstructing=True + ) + ftrack_api.mixin( + location, ftrack_api.entity.location.UnmanagedLocationMixin, + name='UnmanagedLocation' + ) + location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + location.structure = ftrack_api.structure.origin.OriginStructure() + location.priority = 110 + + # Server. + location = self.create( + 'Location', + data=dict( + name='ftrack.server', + id=ftrack_api.symbol.SERVER_LOCATION_ID + ), + reconstructing=True + ) + ftrack_api.mixin( + location, ftrack_api.entity.location.ServerLocationMixin, + name='ServerLocation' + ) + location.accessor = ftrack_api.accessor.server._ServerAccessor( + session=self + ) + location.structure = ftrack_api.structure.entity_id.EntityIdStructure() + location.priority = 150 + + # Master location based on server scenario. + storage_scenario = self.server_information.get('storage_scenario') + + if ( + storage_scenario and + storage_scenario.get('scenario') + ): + self.event_hub.publish( + ftrack_api.event.base.Event( + topic='ftrack.storage-scenario.activate', + data=dict( + storage_scenario=storage_scenario + ) + ), + synchronous=True + ) + + # Next, allow further configuration of locations via events. + self.event_hub.publish( + ftrack_api.event.base.Event( + topic='ftrack.api.session.configure-location', + data=dict( + session=self + ) + ), + synchronous=True + ) + + @ftrack_api.logging.deprecation_warning( + 'Session._call is now available as public method Session.call. The ' + 'private method will be removed in version 2.0.' + ) + def _call(self, data): + '''Make request to server with *data* batch describing the actions. + + .. note:: + + This private method is now available as public method + :meth:`entity_reference`. This alias remains for backwards + compatibility, but will be removed in version 2.0. + + ''' + return self.call(data) + + def call(self, data): + '''Make request to server with *data* batch describing the actions.''' + url = self._server_url + '/api' + headers = { + 'content-type': 'application/json', + 'accept': 'application/json' + } + data = self.encode(data, entity_attribute_strategy='modified_only') + + self.logger.debug(L('Calling server {0} with {1!r}', url, data)) + + response = self._request.post( + url, + headers=headers, + data=data + ) + + self.logger.debug(L('Call took: {0}', response.elapsed.total_seconds())) + + self.logger.debug(L('Response: {0!r}', response.text)) + try: + result = self.decode(response.text) + + except Exception: + error_message = ( + 'Server reported error in unexpected format. Raw error was: {0}' + .format(response.text) + ) + self.logger.exception(error_message) + raise ftrack_api.exception.ServerError(error_message) + + else: + if 'exception' in result: + # Handle exceptions. + error_message = 'Server reported error: {0}({1})'.format( + result['exception'], result['content'] + ) + self.logger.exception(error_message) + raise ftrack_api.exception.ServerError(error_message) + + return result + + def encode(self, data, entity_attribute_strategy='set_only'): + '''Return *data* encoded as JSON formatted string. + + *entity_attribute_strategy* specifies how entity attributes should be + handled. The following strategies are available: + + * *all* - Encode all attributes, loading any that are currently NOT_SET. + * *set_only* - Encode only attributes that are currently set without + loading any from the remote. + * *modified_only* - Encode only attributes that have been modified + locally. + * *persisted_only* - Encode only remote (persisted) attribute values. + + ''' + entity_attribute_strategies = ( + 'all', 'set_only', 'modified_only', 'persisted_only' + ) + if entity_attribute_strategy not in entity_attribute_strategies: + raise ValueError( + 'Unsupported entity_attribute_strategy "{0}". Must be one of ' + '{1}'.format( + entity_attribute_strategy, + ', '.join(entity_attribute_strategies) + ) + ) + + return json.dumps( + data, + sort_keys=True, + default=functools.partial( + self._encode, + entity_attribute_strategy=entity_attribute_strategy + ) + ) + + def _encode(self, item, entity_attribute_strategy='set_only'): + '''Return JSON encodable version of *item*. + + *entity_attribute_strategy* specifies how entity attributes should be + handled. See :meth:`Session.encode` for available strategies. + + ''' + if isinstance(item, (arrow.Arrow, datetime.datetime, datetime.date)): + return { + '__type__': 'datetime', + 'value': item.isoformat() + } + + if isinstance(item, OperationPayload): + data = dict(item.items()) + if "entity_data" in data: + for key, value in data["entity_data"].items(): + if isinstance(value, ftrack_api.entity.base.Entity): + data["entity_data"][key] = self.entity_reference(value) + + return data + + if isinstance(item, ftrack_api.entity.base.Entity): + data = self.entity_reference(item) + + with self.auto_populating(True): + + for attribute in item.attributes: + value = ftrack_api.symbol.NOT_SET + + if entity_attribute_strategy == 'all': + value = attribute.get_value(item) + + elif entity_attribute_strategy == 'set_only': + if attribute.is_set(item): + value = attribute.get_local_value(item) + if value is ftrack_api.symbol.NOT_SET: + value = attribute.get_remote_value(item) + + elif entity_attribute_strategy == 'modified_only': + if attribute.is_modified(item): + value = attribute.get_local_value(item) + + elif entity_attribute_strategy == 'persisted_only': + if not attribute.computed: + value = attribute.get_remote_value(item) + + if value is not ftrack_api.symbol.NOT_SET: + if isinstance( + attribute, ftrack_api.attribute.ReferenceAttribute + ): + if isinstance(value, ftrack_api.entity.base.Entity): + value = self.entity_reference(value) + + data[attribute.name] = value + + return data + + if isinstance( + item, ftrack_api.collection.MappedCollectionProxy + ): + # Use proxied collection for serialisation. + item = item.collection + + if isinstance(item, ftrack_api.collection.Collection): + data = [] + for entity in item: + data.append(self.entity_reference(entity)) + + return data + + raise TypeError('{0!r} is not JSON serializable'.format(item)) + + def entity_reference(self, entity): + '''Return entity reference that uniquely identifies *entity*. + + Return a mapping containing the __entity_type__ of the entity along with + the key, value pairs that make up it's primary key. + + ''' + reference = { + '__entity_type__': entity.entity_type + } + with self.auto_populating(False): + reference.update(ftrack_api.inspection.primary_key(entity)) + + return reference + + @ftrack_api.logging.deprecation_warning( + 'Session._entity_reference is now available as public method ' + 'Session.entity_reference. The private method will be removed ' + 'in version 2.0.' + ) + def _entity_reference(self, entity): + '''Return entity reference that uniquely identifies *entity*. + + Return a mapping containing the __entity_type__ of the entity along + with the key, value pairs that make up it's primary key. + + .. note:: + + This private method is now available as public method + :meth:`entity_reference`. This alias remains for backwards + compatibility, but will be removed in version 2.0. + + ''' + return self.entity_reference(entity) + + def decode(self, string): + '''Return decoded JSON *string* as Python object.''' + with self.operation_recording(False): + return json.loads(string, object_hook=self._decode) + + def _decode(self, item): + '''Return *item* transformed into appropriate representation.''' + if isinstance(item, collections.Mapping): + if '__type__' in item: + if item['__type__'] == 'datetime': + item = arrow.get(item['value']) + + elif '__entity_type__' in item: + item = self._create( + item['__entity_type__'], item, reconstructing=True + ) + + return item + + def _get_locations(self, filter_inaccessible=True): + '''Helper to returns locations ordered by priority. + + If *filter_inaccessible* is True then only accessible locations will be + included in result. + + ''' + # Optimise this call. + locations = self.query('Location') + + # Filter. + if filter_inaccessible: + locations = filter( + lambda location: location.accessor, + locations + ) + + # Sort by priority. + locations = sorted( + locations, key=lambda location: location.priority + ) + + return locations + + def pick_location(self, component=None): + '''Return suitable location to use. + + If no *component* specified then return highest priority accessible + location. Otherwise, return highest priority accessible location that + *component* is available in. + + Return None if no suitable location could be picked. + + ''' + if component: + return self.pick_locations([component])[0] + + else: + locations = self._get_locations() + if locations: + return locations[0] + else: + return None + + def pick_locations(self, components): + '''Return suitable locations for *components*. + + Return list of locations corresponding to *components* where each + picked location is the highest priority accessible location for that + component. If a component has no location available then its + corresponding entry will be None. + + ''' + candidate_locations = self._get_locations() + availabilities = self.get_component_availabilities( + components, locations=candidate_locations + ) + + locations = [] + for component, availability in zip(components, availabilities): + location = None + + for candidate_location in candidate_locations: + if availability.get(candidate_location['id']) > 0.0: + location = candidate_location + break + + locations.append(location) + + return locations + + def create_component( + self, path, data=None, location='auto' + ): + '''Create a new component from *path* with additional *data* + + .. note:: + + This is a helper method. To create components manually use the + standard :meth:`Session.create` method. + + *path* can be a string representing a filesystem path to the data to + use for the component. The *path* can also be specified as a sequence + string, in which case a sequence component with child components for + each item in the sequence will be created automatically. The accepted + format for a sequence is '{head}{padding}{tail} [{ranges}]'. For + example:: + + '/path/to/file.%04d.ext [1-5, 7, 8, 10-20]' + + .. seealso:: + + `Clique documentation `_ + + *data* should be a dictionary of any additional data to construct the + component with (as passed to :meth:`Session.create`). + + If *location* is specified then automatically add component to that + location. The default of 'auto' will automatically pick a suitable + location to add the component to if one is available. To not add to any + location specifiy locations as None. + + .. note:: + + A :meth:`Session.commit` may be + automatically issued as part of the components registration in the + location. + ''' + if data is None: + data = {} + + if location == 'auto': + # Check if the component name matches one of the ftrackreview + # specific names. Add the component to the ftrack.review location if + # so. This is used to not break backwards compatibility. + if data.get('name') in ( + 'ftrackreview-mp4', 'ftrackreview-webm', 'ftrackreview-image' + ): + location = self.get( + 'Location', ftrack_api.symbol.REVIEW_LOCATION_ID + ) + + else: + location = self.pick_location() + + try: + collection = clique.parse(path) + + except ValueError: + # Assume is a single file. + if 'size' not in data: + data['size'] = self._get_filesystem_size(path) + + data.setdefault('file_type', os.path.splitext(path)[-1]) + + return self._create_component( + 'FileComponent', path, data, location + ) + + else: + # Calculate size of container and members. + member_sizes = {} + container_size = data.get('size') + + if container_size is not None: + if len(collection.indexes) > 0: + member_size = int( + round(container_size / len(collection.indexes)) + ) + for item in collection: + member_sizes[item] = member_size + + else: + container_size = 0 + for item in collection: + member_sizes[item] = self._get_filesystem_size(item) + container_size += member_sizes[item] + + # Create sequence component + container_path = collection.format('{head}{padding}{tail}') + data.setdefault('padding', collection.padding) + data.setdefault('file_type', os.path.splitext(container_path)[-1]) + data.setdefault('size', container_size) + + container = self._create_component( + 'SequenceComponent', container_path, data, location=None + ) + + # Create member components for sequence. + for member_path in collection: + member_data = { + 'name': collection.match(member_path).group('index'), + 'container': container, + 'size': member_sizes[member_path], + 'file_type': os.path.splitext(member_path)[-1] + } + + component = self._create_component( + 'FileComponent', member_path, member_data, location=None + ) + container['members'].append(component) + + if location: + origin_location = self.get( + 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID + ) + location.add_component( + container, origin_location, recursive=True + ) + + return container + + def _create_component(self, entity_type, path, data, location): + '''Create and return component. + + See public function :py:func:`createComponent` for argument details. + + ''' + component = self.create(entity_type, data) + + # Add to special origin location so that it is possible to add to other + # locations. + origin_location = self.get( + 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID + ) + origin_location.add_component(component, path, recursive=False) + + if location: + location.add_component(component, origin_location, recursive=False) + + return component + + def _get_filesystem_size(self, path): + '''Return size from *path*''' + try: + size = os.path.getsize(path) + except OSError: + size = 0 + + return size + + def get_component_availability(self, component, locations=None): + '''Return availability of *component*. + + If *locations* is set then limit result to availability of *component* + in those *locations*. + + Return a dictionary of {location_id:percentage_availability} + + ''' + return self.get_component_availabilities( + [component], locations=locations + )[0] + + def get_component_availabilities(self, components, locations=None): + '''Return availabilities of *components*. + + If *locations* is set then limit result to availabilities of + *components* in those *locations*. + + Return a list of dictionaries of {location_id:percentage_availability}. + The list indexes correspond to those of *components*. + + ''' + availabilities = [] + + if locations is None: + locations = self.query('Location') + + # Separate components into two lists, those that are containers and + # those that are not, so that queries can be optimised. + standard_components = [] + container_components = [] + + for component in components: + if 'members' in component.keys(): + container_components.append(component) + else: + standard_components.append(component) + + # Perform queries. + if standard_components: + self.populate( + standard_components, 'component_locations.location_id' + ) + + if container_components: + self.populate( + container_components, + 'members, component_locations.location_id' + ) + + base_availability = {} + for location in locations: + base_availability[location['id']] = 0.0 + + for component in components: + availability = base_availability.copy() + availabilities.append(availability) + + is_container = 'members' in component.keys() + if is_container and len(component['members']): + member_availabilities = self.get_component_availabilities( + component['members'], locations=locations + ) + multiplier = 1.0 / len(component['members']) + for member, member_availability in zip( + component['members'], member_availabilities + ): + for location_id, ratio in member_availability.items(): + availability[location_id] += ( + ratio * multiplier + ) + else: + for component_location in component['component_locations']: + location_id = component_location['location_id'] + if location_id in availability: + availability[location_id] = 100.0 + + for location_id, percentage in availability.items(): + # Avoid quantization error by rounding percentage and clamping + # to range 0-100. + adjusted_percentage = round(percentage, 9) + adjusted_percentage = max(0.0, min(adjusted_percentage, 100.0)) + availability[location_id] = adjusted_percentage + + return availabilities + + @ftrack_api.logging.deprecation_warning( + 'Session.delayed_job has been deprecated in favour of session.call. ' + 'Please refer to the release notes for more information.' + ) + def delayed_job(self, job_type): + '''Execute a delayed job on the server, a `ftrack.entity.job.Job` is returned. + + *job_type* should be one of the allowed job types. There is currently + only one remote job type "SYNC_USERS_LDAP". + ''' + if job_type not in (ftrack_api.symbol.JOB_SYNC_USERS_LDAP, ): + raise ValueError( + u'Invalid Job type: {0}.'.format(job_type) + ) + + operation = { + 'action': 'delayed_job', + 'job_type': job_type.name + } + + try: + result = self.call( + [operation] + )[0] + + except ftrack_api.exception.ServerError as error: + raise + + return result['data'] + + def get_widget_url(self, name, entity=None, theme=None): + '''Return an authenticated URL for widget with *name* and given options. + + The returned URL will be authenticated using a token which will expire + after 6 minutes. + + *name* should be the name of the widget to return and should be one of + 'info', 'tasks' or 'tasks_browser'. + + Certain widgets require an entity to be specified. If so, specify it by + setting *entity* to a valid entity instance. + + *theme* sets the theme of the widget and can be either 'light' or 'dark' + (defaulting to 'dark' if an invalid option given). + + ''' + operation = { + 'action': 'get_widget_url', + 'name': name, + 'theme': theme + } + if entity: + operation['entity_type'] = entity.entity_type + operation['entity_key'] = ( + ftrack_api.inspection.primary_key(entity).values() + ) + + try: + result = self.call([operation]) + + except ftrack_api.exception.ServerError as error: + # Raise informative error if the action is not supported. + if 'Invalid action u\'get_widget_url\'' in error.message: + raise ftrack_api.exception.ServerCompatibilityError( + 'Server version {0!r} does not support "get_widget_url", ' + 'please update server and try again.'.format( + self.server_information.get('version') + ) + ) + else: + raise + + else: + return result[0]['widget_url'] + + def encode_media(self, media, version_id=None, keep_original='auto'): + '''Return a new Job that encode *media* to make it playable in browsers. + + *media* can be a path to a file or a FileComponent in the ftrack.server + location. + + The job will encode *media* based on the file type and job data contains + information about encoding in the following format:: + + { + 'output': [{ + 'format': 'video/mp4', + 'component_id': 'e2dc0524-b576-11d3-9612-080027331d74' + }, { + 'format': 'image/jpeg', + 'component_id': '07b82a97-8cf9-11e3-9383-20c9d081909b' + }], + 'source_component_id': 'e3791a09-7e11-4792-a398-3d9d4eefc294', + 'keep_original': True + } + + The output components are associated with the job via the job_components + relation. + + An image component will always be generated if possible that can be used + as a thumbnail. + + If *media* is a file path, a new source component will be created and + added to the ftrack server location and a call to :meth:`commit` will be + issued. If *media* is a FileComponent, it will be assumed to be in + available in the ftrack.server location. + + If *version_id* is specified, the new components will automatically be + associated with the AssetVersion. Otherwise, the components will not + be associated to a version even if the supplied *media* belongs to one. + A server version of 3.3.32 or higher is required for the version_id + argument to function properly. + + If *keep_original* is not set, the original media will be kept if it + is a FileComponent, and deleted if it is a file path. You can specify + True or False to change this behavior. + ''' + if isinstance(media, basestring): + # Media is a path to a file. + server_location = self.get( + 'Location', ftrack_api.symbol.SERVER_LOCATION_ID + ) + if keep_original == 'auto': + keep_original = False + + component_data = None + if keep_original: + component_data = dict(version_id=version_id) + + component = self.create_component( + path=media, + data=component_data, + location=server_location + ) + + # Auto commit to ensure component exists when sent to server. + self.commit() + + elif ( + hasattr(media, 'entity_type') and + media.entity_type in ('FileComponent',) + ): + # Existing file component. + component = media + if keep_original == 'auto': + keep_original = True + + else: + raise ValueError( + 'Unable to encode media of type: {0}'.format(type(media)) + ) + + operation = { + 'action': 'encode_media', + 'component_id': component['id'], + 'version_id': version_id, + 'keep_original': keep_original + } + + try: + result = self.call([operation]) + + except ftrack_api.exception.ServerError as error: + # Raise informative error if the action is not supported. + if 'Invalid action u\'encode_media\'' in error.message: + raise ftrack_api.exception.ServerCompatibilityError( + 'Server version {0!r} does not support "encode_media", ' + 'please update server and try again.'.format( + self.server_information.get('version') + ) + ) + else: + raise + + return self.get('Job', result[0]['job_id']) + + def get_upload_metadata( + self, component_id, file_name, file_size, checksum=None + ): + '''Return URL and headers used to upload data for *component_id*. + + *file_name* and *file_size* should match the components details. + + The returned URL should be requested using HTTP PUT with the specified + headers. + + The *checksum* is used as the Content-MD5 header and should contain + the base64-encoded 128-bit MD5 digest of the message (without the + headers) according to RFC 1864. This can be used as a message integrity + check to verify that the data is the same data that was originally sent. + ''' + operation = { + 'action': 'get_upload_metadata', + 'component_id': component_id, + 'file_name': file_name, + 'file_size': file_size, + 'checksum': checksum + } + + try: + result = self.call([operation]) + + except ftrack_api.exception.ServerError as error: + # Raise informative error if the action is not supported. + if 'Invalid action u\'get_upload_metadata\'' in error.message: + raise ftrack_api.exception.ServerCompatibilityError( + 'Server version {0!r} does not support ' + '"get_upload_metadata", please update server and try ' + 'again.'.format( + self.server_information.get('version') + ) + ) + else: + raise + + return result[0] + + def send_user_invite(self, user): + '''Send a invitation to the provided *user*. + + *user* is a User instance + + ''' + + self.send_user_invites( + [user] + ) + + def send_user_invites(self, users): + '''Send a invitation to the provided *user*. + + *users* is a list of User instances + + ''' + + operations = [] + + for user in users: + operations.append( + { + 'action':'send_user_invite', + 'user_id': user['id'] + } + ) + + try: + self.call(operations) + + except ftrack_api.exception.ServerError as error: + # Raise informative error if the action is not supported. + if 'Invalid action u\'send_user_invite\'' in error.message: + raise ftrack_api.exception.ServerCompatibilityError( + 'Server version {0!r} does not support ' + '"send_user_invite", please update server and ' + 'try again.'.format( + self.server_information.get('version') + ) + ) + else: + raise + + def send_review_session_invite(self, invitee): + '''Send an invite to a review session to *invitee*. + + *invitee* is a instance of ReviewSessionInvitee. + + .. note:: + + The *invitee* must be committed. + + ''' + self.send_review_session_invites([invitee]) + + def send_review_session_invites(self, invitees): + '''Send an invite to a review session to a list of *invitees*. + + *invitee* is a list of ReviewSessionInvitee objects. + + .. note:: + + All *invitees* must be committed. + + ''' + operations = [] + + for invitee in invitees: + operations.append( + { + 'action': 'send_review_session_invite', + 'review_session_invitee_id': invitee['id'] + } + ) + + try: + self.call(operations) + except ftrack_api.exception.ServerError as error: + # Raise informative error if the action is not supported. + if 'Invalid action u\'send_review_session_invite\'' in error.message: + raise ftrack_api.exception.ServerCompatibilityError( + 'Server version {0!r} does not support ' + '"send_review_session_invite", please update server and ' + 'try again.'.format( + self.server_information.get('version') + ) + ) + else: + raise + + +class AutoPopulatingContext(object): + '''Context manager for temporary change of session auto_populate value.''' + + def __init__(self, session, auto_populate): + '''Initialise context.''' + super(AutoPopulatingContext, self).__init__() + self._session = session + self._auto_populate = auto_populate + self._current_auto_populate = None + + def __enter__(self): + '''Enter context switching to desired auto populate setting.''' + self._current_auto_populate = self._session.auto_populate + self._session.auto_populate = self._auto_populate + + def __exit__(self, exception_type, exception_value, traceback): + '''Exit context resetting auto populate to original setting.''' + self._session.auto_populate = self._current_auto_populate + + +class OperationRecordingContext(object): + '''Context manager for temporary change of session record_operations.''' + + def __init__(self, session, record_operations): + '''Initialise context.''' + super(OperationRecordingContext, self).__init__() + self._session = session + self._record_operations = record_operations + self._current_record_operations = None + + def __enter__(self): + '''Enter context.''' + self._current_record_operations = self._session.record_operations + self._session.record_operations = self._record_operations + + def __exit__(self, exception_type, exception_value, traceback): + '''Exit context.''' + self._session.record_operations = self._current_record_operations + + +class OperationPayload(collections.MutableMapping): + '''Represent operation payload.''' + + def __init__(self, *args, **kwargs): + '''Initialise payload.''' + super(OperationPayload, self).__init__() + self._data = dict() + self.update(dict(*args, **kwargs)) + + def __str__(self): + '''Return string representation.''' + return '<{0} {1}>'.format( + self.__class__.__name__, str(self._data) + ) + + def __getitem__(self, key): + '''Return value for *key*.''' + return self._data[key] + + def __setitem__(self, key, value): + '''Set *value* for *key*.''' + self._data[key] = value + + def __delitem__(self, key): + '''Remove *key*.''' + del self._data[key] + + def __iter__(self): + '''Iterate over all keys.''' + return iter(self._data) + + def __len__(self): + '''Return count of keys.''' + return len(self._data) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py new file mode 100644 index 0000000000..1aab07ed77 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py new file mode 100644 index 0000000000..eae3784dc2 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py @@ -0,0 +1,38 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from abc import ABCMeta, abstractmethod + + +class Structure(object): + '''Structure plugin interface. + + A structure plugin should compute appropriate paths for data. + + ''' + + __metaclass__ = ABCMeta + + def __init__(self, prefix=''): + '''Initialise structure.''' + self.prefix = prefix + self.path_separator = '/' + super(Structure, self).__init__() + + @abstractmethod + def get_resource_identifier(self, entity, context=None): + '''Return a resource identifier for supplied *entity*. + + *context* can be a mapping that supplies additional information. + + ''' + + def _get_sequence_expression(self, sequence): + '''Return a sequence expression for *sequence* component.''' + padding = sequence['padding'] + if padding: + expression = '%0{0}d'.format(padding) + else: + expression = '%d' + + return expression diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py new file mode 100644 index 0000000000..ae466bf6d9 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py @@ -0,0 +1,12 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api.structure.base + + +class EntityIdStructure(ftrack_api.structure.base.Structure): + '''Entity id pass-through structure.''' + + def get_resource_identifier(self, entity, context=None): + '''Return a *resourceIdentifier* for supplied *entity*.''' + return entity['id'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py new file mode 100644 index 0000000000..acc3e21b02 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py @@ -0,0 +1,91 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import os + +import ftrack_api.symbol +import ftrack_api.structure.base + + +class IdStructure(ftrack_api.structure.base.Structure): + '''Id based structure supporting Components only. + + A components unique id will be used to form a path to store the data at. + To avoid millions of entries in one directory each id is chunked into four + prefix directories with the remainder used to name the file:: + + /prefix/1/2/3/4/56789 + + If the component has a defined filetype it will be added to the path:: + + /prefix/1/2/3/4/56789.exr + + Components that are children of container components will be placed inside + the id structure of their parent:: + + /prefix/1/2/3/4/56789/355827648d.exr + /prefix/1/2/3/4/56789/ajf24215b5.exr + + However, sequence children will be named using their label as an index and + a common prefix of 'file.':: + + /prefix/1/2/3/4/56789/file.0001.exr + /prefix/1/2/3/4/56789/file.0002.exr + + ''' + + def get_resource_identifier(self, entity, context=None): + '''Return a resource identifier for supplied *entity*. + + *context* can be a mapping that supplies additional information. + + ''' + if entity.entity_type in ('FileComponent',): + # When in a container, place the file inside a directory named + # after the container. + container = entity['container'] + if container and container is not ftrack_api.symbol.NOT_SET: + path = self.get_resource_identifier(container) + + if container.entity_type in ('SequenceComponent',): + # Label doubles as index for now. + name = 'file.{0}{1}'.format( + entity['name'], entity['file_type'] + ) + parts = [os.path.dirname(path), name] + + else: + # Just place uniquely identified file into directory + name = entity['id'] + entity['file_type'] + parts = [path, name] + + else: + name = entity['id'][4:] + entity['file_type'] + parts = ([self.prefix] + list(entity['id'][:4]) + [name]) + + elif entity.entity_type in ('SequenceComponent',): + name = 'file' + + # Add a sequence identifier. + sequence_expression = self._get_sequence_expression(entity) + name += '.{0}'.format(sequence_expression) + + if ( + entity['file_type'] and + entity['file_type'] is not ftrack_api.symbol.NOT_SET + ): + name += entity['file_type'] + + parts = ([self.prefix] + list(entity['id'][:4]) + + [entity['id'][4:]] + [name]) + + elif entity.entity_type in ('ContainerComponent',): + # Just an id directory + parts = ([self.prefix] + + list(entity['id'][:4]) + [entity['id'][4:]]) + + else: + raise NotImplementedError('Cannot generate path for unsupported ' + 'entity {0}'.format(entity)) + + return self.path_separator.join(parts).strip('/') diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py new file mode 100644 index 0000000000..0d4d3a57f5 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py @@ -0,0 +1,28 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +from .base import Structure + + +class OriginStructure(Structure): + '''Origin structure that passes through existing resource identifier.''' + + def get_resource_identifier(self, entity, context=None): + '''Return a resource identifier for supplied *entity*. + + *context* should be a mapping that includes at least a + 'source_resource_identifier' key that refers to the resource identifier + to pass through. + + ''' + if context is None: + context = {} + + resource_identifier = context.get('source_resource_identifier') + if resource_identifier is None: + raise ValueError( + 'Could not generate resource identifier as no source resource ' + 'identifier found in passed context.' + ) + + return resource_identifier diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py new file mode 100644 index 0000000000..0b0602df00 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py @@ -0,0 +1,217 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import os +import re +import unicodedata + +import ftrack_api.symbol +import ftrack_api.structure.base + + +class StandardStructure(ftrack_api.structure.base.Structure): + '''Project hierarchy based structure that only supports Components. + + The resource identifier is generated from the project code, the name + of objects in the project structure, asset name and version number:: + + my_project/folder_a/folder_b/asset_name/v003 + + If the component is a `FileComponent` then the name of the component and the + file type are used as filename in the resource_identifier:: + + my_project/folder_a/folder_b/asset_name/v003/foo.jpg + + If the component is a `SequenceComponent` then a sequence expression, + `%04d`, is used. E.g. a component with the name `foo` yields:: + + my_project/folder_a/folder_b/asset_name/v003/foo.%04d.jpg + + For the member components their index in the sequence is used:: + + my_project/folder_a/folder_b/asset_name/v003/foo.0042.jpg + + The name of the component is added to the resource identifier if the + component is a `ContainerComponent`. E.g. a container component with the + name `bar` yields:: + + my_project/folder_a/folder_b/asset_name/v003/bar + + For a member of that container the file name is based on the component name + and file type:: + + my_project/folder_a/folder_b/asset_name/v003/bar/baz.pdf + + ''' + + def __init__( + self, project_versions_prefix=None, illegal_character_substitute='_' + ): + '''Initialise structure. + + If *project_versions_prefix* is defined, insert after the project code + for versions published directly under the project:: + + my_project//v001/foo.jpg + + Replace illegal characters with *illegal_character_substitute* if + defined. + + .. note:: + + Nested component containers/sequences are not supported. + + ''' + super(StandardStructure, self).__init__() + self.project_versions_prefix = project_versions_prefix + self.illegal_character_substitute = illegal_character_substitute + + def _get_parts(self, entity): + '''Return resource identifier parts from *entity*.''' + session = entity.session + + version = entity['version'] + + if version is ftrack_api.symbol.NOT_SET and entity['version_id']: + version = session.get('AssetVersion', entity['version_id']) + + error_message = ( + 'Component {0!r} must be attached to a committed ' + 'version and a committed asset with a parent context.'.format( + entity + ) + ) + + if ( + version is ftrack_api.symbol.NOT_SET or + version in session.created + ): + raise ftrack_api.exception.StructureError(error_message) + + link = version['link'] + + if not link: + raise ftrack_api.exception.StructureError(error_message) + + structure_names = [ + item['name'] + for item in link[1:-1] + ] + + project_id = link[0]['id'] + project = session.get('Project', project_id) + asset = version['asset'] + + version_number = self._format_version(version['version']) + + parts = [] + parts.append(project['name']) + + if structure_names: + parts.extend(structure_names) + elif self.project_versions_prefix: + # Add *project_versions_prefix* if configured and the version is + # published directly under the project. + parts.append(self.project_versions_prefix) + + parts.append(asset['name']) + parts.append(version_number) + + return [self.sanitise_for_filesystem(part) for part in parts] + + def _format_version(self, number): + '''Return a formatted string representing version *number*.''' + return 'v{0:03d}'.format(number) + + def sanitise_for_filesystem(self, value): + '''Return *value* with illegal filesystem characters replaced. + + An illegal character is one that is not typically valid for filesystem + usage, such as non ascii characters, or can be awkward to use in a + filesystem, such as spaces. Replace these characters with + the character specified by *illegal_character_substitute* on + initialisation. If no character was specified as substitute then return + *value* unmodified. + + ''' + if self.illegal_character_substitute is None: + return value + + if isinstance(value, str): + value = value.decode('utf-8') + + value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore') + value = re.sub('[^\w\.-]', self.illegal_character_substitute, value) + return unicode(value.strip().lower()) + + def get_resource_identifier(self, entity, context=None): + '''Return a resource identifier for supplied *entity*. + + *context* can be a mapping that supplies additional information, but + is unused in this implementation. + + + Raise a :py:exc:`ftrack_api.exeption.StructureError` if *entity* is not + attached to a committed version and a committed asset with a parent + context. + + ''' + if entity.entity_type in ('FileComponent',): + container = entity['container'] + + if container: + # Get resource identifier for container. + container_path = self.get_resource_identifier(container) + + if container.entity_type in ('SequenceComponent',): + # Strip the sequence component expression from the parent + # container and back the correct filename, i.e. + # /sequence/component/sequence_component_name.0012.exr. + name = '{0}.{1}{2}'.format( + container['name'], entity['name'], entity['file_type'] + ) + parts = [ + os.path.dirname(container_path), + self.sanitise_for_filesystem(name) + ] + + else: + # Container is not a sequence component so add it as a + # normal component inside the container. + name = entity['name'] + entity['file_type'] + parts = [ + container_path, self.sanitise_for_filesystem(name) + ] + + else: + # File component does not have a container, construct name from + # component name and file type. + parts = self._get_parts(entity) + name = entity['name'] + entity['file_type'] + parts.append(self.sanitise_for_filesystem(name)) + + elif entity.entity_type in ('SequenceComponent',): + # Create sequence expression for the sequence component and add it + # to the parts. + parts = self._get_parts(entity) + sequence_expression = self._get_sequence_expression(entity) + parts.append( + '{0}.{1}{2}'.format( + self.sanitise_for_filesystem(entity['name']), + sequence_expression, + self.sanitise_for_filesystem(entity['file_type']) + ) + ) + + elif entity.entity_type in ('ContainerComponent',): + # Add the name of the container to the resource identifier parts. + parts = self._get_parts(entity) + parts.append(self.sanitise_for_filesystem(entity['name'])) + + else: + raise NotImplementedError( + 'Cannot generate resource identifier for unsupported ' + 'entity {0!r}'.format(entity) + ) + + return self.path_separator.join(parts) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py new file mode 100644 index 0000000000..f46760f634 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py @@ -0,0 +1,77 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import os + + +class Symbol(object): + '''A constant symbol.''' + + def __init__(self, name, value=True): + '''Initialise symbol with unique *name* and *value*. + + *value* is used for nonzero testing. + + ''' + self.name = name + self.value = value + + def __str__(self): + '''Return string representation.''' + return self.name + + def __repr__(self): + '''Return representation.''' + return '{0}({1})'.format(self.__class__.__name__, self.name) + + def __nonzero__(self): + '''Return whether symbol represents non-zero value.''' + return bool(self.value) + + def __copy__(self): + '''Return shallow copy. + + Overridden to always return same instance. + + ''' + return self + + +#: Symbol representing that no value has been set or loaded. +NOT_SET = Symbol('NOT_SET', False) + +#: Symbol representing created state. +CREATED = Symbol('CREATED') + +#: Symbol representing modified state. +MODIFIED = Symbol('MODIFIED') + +#: Symbol representing deleted state. +DELETED = Symbol('DELETED') + +#: Topic published when component added to a location. +COMPONENT_ADDED_TO_LOCATION_TOPIC = 'ftrack.location.component-added' + +#: Topic published when component removed from a location. +COMPONENT_REMOVED_FROM_LOCATION_TOPIC = 'ftrack.location.component-removed' + +#: Identifier of builtin origin location. +ORIGIN_LOCATION_ID = 'ce9b348f-8809-11e3-821c-20c9d081909b' + +#: Identifier of builtin unmanaged location. +UNMANAGED_LOCATION_ID = 'cb268ecc-8809-11e3-a7e2-20c9d081909b' + +#: Identifier of builtin review location. +REVIEW_LOCATION_ID = 'cd41be70-8809-11e3-b98a-20c9d081909b' + +#: Identifier of builtin connect location. +CONNECT_LOCATION_ID = '07b82a97-8cf9-11e3-9383-20c9d081909b' + +#: Identifier of builtin server location. +SERVER_LOCATION_ID = '3a372bde-05bc-11e4-8908-20c9d081909b' + +#: Chunk size used when working with data, default to 1Mb. +CHUNK_SIZE = int(os.getenv('FTRACK_API_FILE_CHUNK_SIZE', 0)) or 1024*1024 + +#: Symbol representing syncing users with ldap +JOB_SYNC_USERS_LDAP = Symbol('SYNC_USERS_LDAP') diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/colour_wheel.mov b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/colour_wheel.mov new file mode 100644 index 0000000000000000000000000000000000000000..db34709c2426d85147e9512b4de3c66c7dd48a00 GIT binary patch literal 17627 zcmchf2S5|e_UMz)dsR9F2*nC0QWObIB?ux`L?JX01O&wbQW81{NV6ayAVn--P{9fW z=_m?Vz)C_>G$I6ugpg$31pV&!{qMW?zWcxL|K6BocV}nM%+8$o%{eD)5C{a??|4M$ z(c@?|T7VD1+avyKI_`Ju;6a!b6nxU(mv^aq{j4ExLm*JDi$#$L1pNG&{>ur>{=0Ll zKTH0jBVK9YQvtrPPAgQfuh(&SE-r`=B9b~02tI@r5uPs*6p-Odp569bbnC|}=hnBp zE>O@wzzYH%IuFoj3INHaL_{2+abfcJN1*_)r+5nZ^kY^{_xr}!b+g^8 zBASH_@`eQ+`1Vobb@{MGgoVc=Lz1XhZ}nQ~kGx*}J2)yNsFUkCpKnw1{G8=zT zdVlP8l6s)BZq>J%tks_TQZ{K-7pWo)2?ZlRKMb2?GPe&0r*z+8<|`PVo-#Y4QmAx` zsV*_$Jh86zsK!)vtMUmWAx>aw7G~DC6hD=pniUfVn1k9WLrl%idm1K*AFMNqxAY5o zrF8132yXTnneKW=(3qOKzO`OYm5j39koUB6E9r%NM7xOKENxg+wCl^z%mr^D3ijJ0 zZdtrQqI&tt>wd(6icrH6Pfw-Rd$t8wl)995-e3DzdxF{dIL3zHw}M&kn^@U zAl$iaIFFg*^9Yk2ld5wXlU*^3(-t-_#j(0XUvHD&u5EkfTOF{zqAp|Dpt?JAZwl4U ze@KR=NkjBEF*0pN;`j6QV{dRyxOG%TEnBt9yyfdy=+Ba8o?iY-U8;-tPfA%L4=TT- zO0M^u838vI1S*>z(erckuIXIUsi=D{A#-;|IXx+ndm8=7?a%c4=}F*+b2I9Jd_-vC zxx#iK``t&Q&JVVP3U%ZS70?6oU3Ksx>+6Li^*11yn)Ha+vRYUX$|8{EXrGRzv9ka zTxNuMSN%V`KIzn954Y;59;}cp-0aQn&nrBUl>ZA>X&Xkmtv>a`s-w}a` zG2p;P%M4m6>Z{6pv-}iv;*;#oBGdS@Vm@fM^m<3!u1lzuTefI1vR^n2<5qj@IH+Tn zQ@3pS4sV=Cs_x5NS*J=JH%x(@tk`n7u~gj+!}i>SJNIYs)mAPlJI%k$m_H$oE>-@N zh^=W8w#q_oQVQSOJeE-|9R2)uRMJZdoBPDS-sDq0psrbXF!4?IkI4i(o}F@L(`qZC zHF6?l^9O|T=g>u zln9rHSn#$!rG{vO40ZrCN^;1uu*UngaTWSO`VYsFI7Fy2nPezMt)8cK8_6abGj#Ys(SQgLfv0 z=09u-fMgrfOwG!WDDwj8^Qjy4R3$sQetY-bl}gG#)&d}wfG6A4&P6s1PM%sEX#i>! zM2R}b3tWDB`)(vwm&j1HT@=UXkef=S_T|-Au=5PAyeRo@AJ#KYc;gjnMcl4wWBXb1 zO}wO_zFjgK$Dj5HlN*zYfukwUI<|>*Of#nN{OoA(B`ut;F>ZMu<~=4GJDvk+Y2e_9 zvrNUHA9ZD8QbJF)=CaGnF-1Fam#xO+44clE=)KT4^R{fe*&?G^FW-zhwlsvflUWs- z@%RLZv+Spshm2w5*@Rj$RW9B3m`CLEr7LLrVRGZqkCSol_OKPv-rtcWU+A~}<5Dlp z&w|oXs&RpclUPU8?9`mW`qZoZ+SWf>+tbdr8)2^RUbY-_eipZ4JjZUP59849aAy}z zG4|=XyqsVNwf8%M3Uusb1U_Kcfu zAA=3DOwh)U&G~na`ZcU&C81q~o1ThugN?pKRY+PWzjiYE(R^;DxBW`o{EB63a%Jrw z+vTi&nNxvrzN6!tI@INMF5^xvq`{A_U_gvrUCOLwr{}uwXdCi zL5uKyu)SYZ#Lr{JnG=S>voCwHgEok^pdC<4{R<9@N)b_PGPk*%)a3qRu_CKm} z5x7=#rU@tW%ov8RNxgNA0&V%DAy}wzB4J#kF;{Q@Vo}4u>Tth$StXlzHrR zsNUA|i*Dp=?O%Y!PXu3)$&nwAyU8eyTkG9meX(wufa;K3Qq9_~LMlEp9_A^p9^Y}> zXteo-UCs)KBZ6HHH&rKE)`11t?*Lm`i<2(?+9lVXs-NAFyQgM&Bj0|01kN=|Fe zNG)6)wx%$1Hl57NwkR9ZdFt3&?E>`#Q+&LVWvi-XaNPx;wKSw&5_$zzzDnU)=l^DS9HLlx$k-O`lRvN zp<~38#@LY&hI?RjAPHiL`)I(8)!}0cK!0Oh|AX)TFzYm}I&j1afSHP0jkn+S0x;DO zSRMY#Qe&C$t5HjVsjD~YZk2L;kQsacG&8CLA*_=0kCqUIlG9f;LOLaLjTDE(+~=ZB z8iOHv^M@akY8g^3U?`B6BgJN>2rhm6gZbS*Mgxe2JRZ4SLSOG1X3Vzp67lo!^ZU zDmUD*xEy(;Vh)@HTsgGWiNGe|3*$}c^6Z5j9V?L@*Aib>zLjojxo|iwTrNanwV48t zHW1*W;a^K$lfHsUHQQj9W6*~=qfrXit{j8$oK4tmb_%3=tieM8QPS?$h(i@?J8J}I zbZT7lj=xt7x3Y*m`iL9R=w%_N8e8lhEsG0>+Wt9Jha-y%%S^?P`!Hk;BW?NhS1|!b zUsHW%TW`;{o-O%`1K@4wZW;8!eQU#{s||$}Wpy^GJ`_m%uum}YVCb}$+V5E^#XKVK zxN}`s#%ZW(sUz~8IFavKu69}B4<=+CW^tD1&cC-CFID0{aH=V^@Zd8~gLIm{2=S;+ zypIl_93^CuBWb0)K*dBGdYsVSqIxjWzwAkDRwzP$U*6p}?Cgnznh#f>?3j-8zr|Hu zx=Sqnn}wdznkNe1-xr{*(uU=ZS^Rv-ZTiGId)r4cREc^|lcsobPjL0#jaEMCni3L@ z-&X9OeQtjLRiurVmnvz9=a(_Xyc0&p6AC zx~53l$CM)PqQn9T!`x#|SM!(n-DG}jtzby)v;xFrQVanh{Ittl(R*)Cga3yR!IiJy ztYsGYaGFROBG`97^zFSi3bR-33uQ7}cmsHX+K8;YH?(q{*pQlSKmC4#1kt4a^ z>ZTpyaNLuL1BcDTHyO7aVxL{!&G(M#TRed1=G?H&~*Ix>v$g z_Jf}1=7^0y28s7+l(o}R*o{3J>y}GyAAegj$yqBw`L5r$>Lt1RKywe961{{)cq+a{ zk3kiZsA;eG8O~?Q;?#^yTFJZ!KT+bAxb9)^9Yf;X$S5fn>DOO~?fQE_Fe;kS7ISw! z5}TqqLwRw5Sa8bN{Cbt)-u%LckMu5CRFKjtDie7tY;nx)+h&owgcZc+g=f`&6mAJg>zE+aeC#}ZhkK-zU$_!s3uN}c zkYrz5cc%Bd)aTl>78`UYZS6mN5yF!Iw`H$!`3EtSXF9S_2Xd9H`PIJifd^RO! zDEVofN7F5gDV@L~0+pW3q~78qYC7w>3Z9~ja0x*1Nk@!Tf6p5q0!J|8GqNmT=H~a$ z`<`K`xicY8RSvO|*Mlp!9()SLbAw(n3O+3qPaz#LWDHpaj{#+JXjg_tM4-m(Qm@#L zLD8RPz!VC(#`O@1i>EezLJ-D-l=|l;DPWG!A~G(CS{8T#a-CckqddZm^@=h?)FnPW zGQm}$#LP`z1h%QK*H1K%EyX;zCS=Pm=-x}|Y9Ehluw$)>s3p~5lU&4`{ch_|O$t;; zdnGG&3Ma+SJJ?~UIeSWRughyO=fiVCWT97d`rIX`yt4MUN^wgDrXV;EenTI5waOb= z7pxz?M78FEwA$F$!h(e6U(h_kiplz^BC<7iViK{8@jWhhw0K{IRd&K184bmUacQsi z#WjiFzS?O$)`uB{Lt@~9dFYpdS$Sm^r^`NxIX7R5JNnTscNVABSeW8`b{Lb(F2(sD zId>MFvd(#nKlCFV>T%2Im_f{CyWDFzCwPOq^)+u~KX3!O~idwBG z5mZ{4`@J`EU`T;6%s;Kni6}Xa;71TOnoLfj=crDMA`7i0OHzC;0>Hm}%+?mnGu;{= zd=E%zsX)H%ZS|7f9S@gfz})ZgV1z#@%D}okNgGvvQ%Di|EIo0nj>0mYdv849xiAB9 z=kehq%7h0Uv2q4bV`p}AEs_R$GrktiqO8*?ZZr;J1^^0 zq2u5X=^CjLoz(91blvt#{AY7#o_DExy1Hsv`BW@z7nQnKAOjMM=u&6wdQN+-{}tH* zl`yiBVpN+ST?|-kLEV8etZ1};QHysY;uUM_zG3$8(VH6GBZ^iJMP`ZuYSvt6+UoQL zJw4v0ql+nM&6$+7ujW~-IGv8d3qW>LM5^$`eNM6tP7wO05IUHTtu_(kIc{HZrqC73 zJq>-J6VR_Xs0S9-2?>Nsqk5f-26Iy!DuojL&5WHoUy_COZTJ!A_6~%FOej}DRn7k$nfmG^^fFadzNz>@3pndtFyD3@)ZMX*cMI*O?z@Uk<3q;w22v+kf>TC!ymqBFnVsd6S+}kJ zt?a(qk7u4Qv!-gQDO}x{lE88P9i1V(EFNDf`Q${}V{JcdQ({2ZwhzAEWt6Y8`H>R2 zA5kHZk`(7DYkKP{ltAa(J@HIoTR(xrgP8b_r+A%hLi8{BGR z=q{KlmcuC7b=*;m2S}brS)R+bklhBUg)>dn34uN)jjQ2j7P=R)Hu$TbBR0kB-4tq5 zf1GEtuD7s#W^ACKv#NkQfALb?c|zPBVp&yWrlWbdVO*;;TEVR3(Ys{w=NvnAaMk3| z?>;6MotrNoiOX5iaubUP`jo#1pjM5{2bWN5B%9anSof0iUFJZqMQ`4P`@p%T6I?er z)h;?L+`f1-*qtV7wDwlp1sZ_S3i{~BiiU3A$L?bR1jwZtyCFVlU3 zxe1kN#g1IjzA(TTLun-n zRIi9hiPNk0ncAK6z(u%i>;AhnRd0AShQ5*D2^~cE-+^wj5y;JdSsRZiL7G~6n`KNA z`KGjv@c9#M`y@9OHyLULi+vNfQ5Ql;Ke^&2C9-KVvC2;`v#F@vRz1N+du40B@jO{2 zrRl{Ead}fqy$+wS;i#h-J=Uj@H&h@_gHMk`vawf@+Y*P4U6j28sy|vW7D=0D3~R{)A^*Y=7Dm3V%Gob3-H* za~eLGBx+j(nnm2iK^E^A8kw?c5_XAAh^M-F=LAE;p8qPCv=j^f6Uje$tWrIgi`qG zGEB3{h3FbS!yJ#o$<5+4`Y;?>dr}|fem?Y* zx1P(3vvt%xo|j^|eYU(n)Zw5G(CWOi61`qT{Sfk5eJA3QRC;&5t7Pj`40%Xev64S4 zlh1E0+Bl>8Ci(S}GW4EwvKCh(slE4u#@rB6Q&{%x2g*+h`0VX*Jzy~f{<3$t1eQP? z+V+Q6I;*>YMm36IPRzgNJ}jp;`8`}NJ!(vh%~R8%ir&4}l(!z)hc)&YU^?=}-Bxe$ zQ71PA3O;d#QCM`$+$Vg`py6Iw2lB-q0!Gyjs-s%U9;n%1&VVyE6ae$j& zfA`#6G2_M;ua^bO%HDgRiqe(*b9r=t5zeU(hZQ3#*vm2k-_P0{6@$q**!m z_2446(;eM{h`P_qI}$a{9lB%`&qm5zSCxrrQs~uC(%^jhSkY-je#=*t^=9fw!*@&Wj~yCZW&J(L1c{`x<8RllJ(?9G zHiX!0jWDZC&3mK52QKk@uR1ZG<@Va;gv0}5=p%~+{2mJV^5@b{C?9Niq8dat< zFD$VSKqj@yb(8+%9Lkh3(@G}G>Pq?~h<^fFHKY3P|Izo*!v3e<;Uczc4vFxE0~R24 z0Ruo12b2!!X9Ht^ZA6nKZh{^FG0wKa zI{T-?w^GO~Fnm;l0pwt1*_z0a1dPP6<%2U^?Kzpv1OUJrFl^!c1^^1j(+i&V04#?< zf6Pdsi-Ns$#Zm%+PHx>>K*s`b5y(5swaNa_V^Ug{5M*Qkpg;Zw+T_?S>%Qp~7IO`_ z1Uy$qZHWj2;gUR$7f@aMrtqFWsF z0I5Jxuk|xfV`X_xgd_9bef9b3uXV9v`{|E0GGdz~EOkM5RNSWO)ATIgS3N~FrTPoa zXDp{m`}GS)hr0$|M;8{ln3aa*dKf(GognZk&I&;&sf0Ne^n6VjbHVEQX)9r`a16Jc zT;+fAGC8RRvG$v!z1SMXftX~%2m1Ua#J-y6>Yf!7(jUZ-(zDM|uCL0XdZli!G)J9! zG1%~I&Qro8&+J(SQo~d$epDd+!eFUwz5TMnQT1Ii0 zyv)&A9r(2h9Ji%T2-8rr7cbAf7++(5_~0f(l>0~D_6$X_zF0B$8Z#`;pOq1KcPH{) z=BNlw#5~V-qIj4}U6E}V9c=XAJ!Zwp#1{9juN=CC2gM(3lKPfHSJ(Dn<2jpYi`7=X zJ0A%~MffceW~bSLX77rkHB9teuwa2P48E<2GdQEC>O|o+kEVQ)0ID|mU_Jby0u^f_ z+{|pO218hYbGo^>ShVaEw_&J9lw03e?@YqO8-^}#roG9aYusu$t4A3CEou0IHqb@u z0t@XNOO`t;l?DvhhiofmWzqn>F2jRE06(u}wnwi9pq0RqcLpk8=v+q#Dcpt(TiPMZ z8S!P7u26M?Cue~HLhI(ivX5LvK$9(|ffR1pa`v*lB-X51n(Ochz}13H%+*AM^O~p! z+DlImlz=2>HES*zJA#F?h+5fB0CbH6EWop<$-tRBOc*L=9!gr_p#+kR?C@=JpqT*L zWSBkxBc?K_YzCTLRepLPTN$0Ip#frE}~<87%TkD7;?}RB4J40C0lQ?KL%| z3;+&YP~u9{^JnE_tcwA=nB9@+?FpOp&)(HQ=p(PidWL8)7$Ah>Ra}3G(4yQCwPLP% zn9zI)yU6T*EGDHAp%619a=26=wth>$His}ru56D2c-;miz~!2T7f;j1+S%Lg zMh6mxxzW+10MB82Oh<TZKFG zSb0Ke4rNl$5Y(zydZR`lZyx1+;=qMhpTn#2>d6312bCaKZ#A=dVJ$K4ey>$TSzAEc z##_%raEC;uUw9&SZ2VMjsEERVvT%h*i*^fV&g;~%3o>%m%|D-ai&C7PQrGtBl25fe$SuYVFR7;C=eui(JOPj|6(4v856Vnffy;H1A z-%A4?T`q!Mc2UAGT}!6K!A2+E?DS)SP%>Sx;Nlvq0LOHZ?Ph%k9G-fV;4{kBcA$b_jE z3xGg)v=B-{V-J2Ei{lC>TbxLY1ZjY?SR>trdE)w;on34{*KGW-%h%@%<#x^6&aAOE zw0wW@(#6+ee5zd@;U1DzVnoh2!;NQV0EMgoJjlG8E1wx~2wYA$nKcJkpkM&Nd>Wil zh!Z>*Cj5pWcZuCskcb^20FW|oK_0dnNM%4~0l;F=1}>`TvGMGp`eINOOyJ%obJ#8l zKneC8R0Y_DKz=I;aPR=qW0Fe-?4ufFy?%7g}(<0<%d!F?8m79(mMBJIpo1=I6Uje2p?JD=z; z13=?n>9-#8TFWm00@p*6y7;N~+Z0g_ExdT@sxx;wL|1l@+o(qy_JlG_ApgoF7Wt^x!qC|RzG0w-tMJYz1G19BDWi}ybxF{&4h3?UxX8@0}ki33cwAO;_9Z1(9$zW7i%ja~? zw(MdKfg=N^qx1j;wuuaeoTmwlYKS>*!!OQlg{Wb|d;bOKMwI?D&^bZSrThLP&;bIy zk-Y`LwI7UN=w!NOJq5D2uQO!PTAdpO98nzra{r!jl8|w*F@MK6mtrux6ir1q>AuIA zyd;1o)Lq}cEL*PE_ABBt>ol0UMfoQ&SwrFGGUy$dKb`-^yHOQJsq!%e^K<2{Vd7B@ zsd;fpd_q*AwHkh*ekTrSPfSVGT_hUR$LX`9sCjR$cwCfBo^@=SKR7DVeZwQGnty+X zn}@rti>|mu+n-1~i{8ZbtE+7O;3jNkx>1zDClutUr)<$dR`**c(H%K#^KwPmBL9350& zz0C8?)06t`kuj#{4E;mCZ7#C9yy4);k__pMaxq`dZn%H7ohjihNiz5@aO&q0@}j{u zl$mI0j%uUW|0t6UxSX_#rO7y^N}PL1peh99&5h|vO>O>~IkxACxtUahYzT6IeG??T zz7^NS29a%?K-g@_ zq!Pd=J%(M(n0!V93>qaLnp~#ep~2Ksm_tj zW}2U3)hl*}2X^mQx3tQ4i)>2yhW?`YV?G2C?Czz75Kvr;u23ENQ9b<@nb$q**fetW z_H+~Xi(_w+ct^WcW64(#vnKD&Q85wSU5{j3Cw(79ou2TC=B%cT?Y>PmjK*>n=!D-k z*IE*R9cz$PRD>sf=;zF{H#ZOlz~n$0AQ7NF1>wh_aqD}b>bOI!nG640Sc51_hy52r z1`BX^M)M~8J3@dbBzTb91(op+TaE2N;N>l@l=hHnybAuH8V59ELGGf}$C!jM8vr#M zCb(b%t~_8D19lM^h!X%zcqqB0n0Ev~CJ{FFpRSw`TU-7c*s4(b$Jhe8ro$%+0Et8= zST;@*$kX0)xwMEA+&W%YAvJl}g4O<QLeBp1nKvi%LwiW+FxGp(0vX1Z^iR!XJxl z^OR9T_FL&1>859SlAYL9!tw7P z-fGPIY5ntvudQJ0PtSQ&sKmg8z3ZyF*xjtIky$z;d_&0Qk4-X4=6^#{t>qvH;&yUWw4mJg z*M=!FYULhQ4eoc6r_Kl#`hh~exQB8=Hv)+2U@GUM1NBTR2dl5`4L;y6t_uHsLm7}*EYTQ=?e`X95)$d0@>d^GYU-sj zZ}zEKPB%-HGK&dx0&f&xy4+i1FF@RES*MB>>)>B`ti39x>Tt(&0hhDr55p21m0rR$ zY7J4`rmpAyBr>z^#VHDTA=&CiQlqabwgv6#iA>q^aB^}^IQmu?pb+LcgumEcWaxO% z2H;y2aA^1ODCO`UPzqXLfJp&d0Ooph92&91ILF$;J2xEMqvRtsNxGk*+V>nOx+ycVr``h(Rf%o* z@@bReG4ZCD;55m=lHhQ@?6;ewdn)tC z_9^w3PN&jFTHh#f4sU#Y$;mNHTn|k1b6wY~*JYLkT#8P~VB;4c-QzJsnXjn1iMrJT zXJ(sJ_9u?K%RB6|e$?-<@2NYAz-iy*PBMwu;DoiEO-y|?If|e*$Vf12jj%FL_f_*L zr(F}2Z+B0^zJ>3I#VDEBjXgQ^gCx13Vqp+Q@&I@>Zdmp)rnlt&DvKeM;mNk#1^#5Ma7U=|~v~r*&HP^t_<~l=9aX%1QT+>J}9K|kX=Pxc{c-p@a z-ZIntZ1k__1e#-<0he8rB>j|<%axgfF2gVu;1@N;8QNNTt^Wr;Ex@=+D&sSJC~jg_ z5f}zv81U;6&|9hp=fyB*9!sNK9Dq}eG+zQ&IZKEr*r5L%QD8=xicD~m-L_kMeaNCN z`8~EdHXBPDtI=9|PC05f^9%V8kB{5vcfnO?l>dEH2{%P=_QVw`BFt_6!tPY0eTIwK z#yhJKij`~cBKAKSzQ+l?6=1^naUi;BX^>y0VRstRFd|?1Ky8?{mD=hDS2mH`?jBe@ z<(hC+`NLZZv9%@IPn*KV^ZY89zWA&qNZIivnjzYs4ql8AM$OjceZEpV(t%QuA_m>8 zH8pP5bVYe~e^t72=U`~)9xZR%qlJe(Rb$(3xPBDebAuF}%24j#+I0%i5q3Ry!8s#w zcC@8$=z>C{hXB}z0CYG3pukE7;MNXGl40xt z4-fTnVTKyO!@>aiqi_!K0{n}-g7P7+coG(%M#up6Z|9J}^9A8-g9*w6QctO~-FP=1(Q?id`I=HzC88x{hlZwH zOfDg;GM2JPv!dcTCmoI{*o)ar%sWYMcbD3SkDh$dGg)pa87$^1+IOqQuKKSx>T;X`o&XU0JPeo`fAfsupLxg5SmFP50#5xmogWcX24wNMjzUL|pP}MfT+a z@=wK)7LLG00=Jfvq;3e1S@LOur$S6VkO>Qn-*y=MHtjd-7VWTOzwEGogLUAak`BuF zpCjED^Zy&t6^{Ool1>7W4mRi?kgndoMVu#R+6~s>xhDKIuSqGQYtitYR=7Js(aX}iNF_^jWsjWhi{}DfMI@z?PF36 zXL`K1lKzP8FRxww0v+TQSH45zjM3#@jgtpcnL$@G1JFMRXiB#jOE*ZY&eG~Z*504* z7JA^cCM2A3Dz0WnZ6Pzsi@*Tjg|&v>l#Y7g%NQ3W?+22HFx5|+I`V!oL^Sxu4-$WoY2H`z}AWZO=Q4j>{9Vn@_{Q1*L7#{%%o!j+L}x^P|5S zmC2^B2SUV^9gdY}Rx28-e;N6qR{p?ePpaNrdlPC`GlEzq9d#wkO&tD*rft%P4p7I? z?cMcr3S$Pp-aIRKzQ-M@xaAjJdpbH7Z3iB2J8L%^mq0AQSy&sR`38tgw3JC0|Wwd(D!&4+^Zf8 z3Ol}-v)Iz#au;RsSqJ+Dc)@GvVBf$0<%QqR{_Do|{=PxSc!$CMLB~$QUrz7}4Gog! zWgwAVO4jh?AbJnX4naE|gc8yAUPpX`4#GQ7l;D6Pez2VU$>7Br;CtId_&W+j-o563 zNcDwMy@CS5eoI9{sY8mtq*@0Bc!ivv4cF}yu^DTah{N0$0)zB z&FkiUmC0?0IuaiDTi)XxZ_>Wg9Y$(h^!R~{dXAci~8!sUR>{qWb;c4hU zONs!%ODmvMTcnefIzS@i_2B-xqP);O#A`)<QTLRpa;dTLTyWj@pBkAyd$FSo* zAv{4a@w%`afl!C9i)nenITMRO=y@R!%MQaKoj@Q~{X`%P;q^7m2*kS0(0ztw%we8| K0|H@L{l5Sv%EZ9{ literal 0 HcmV?d00001 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image-resized-10.png b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image-resized-10.png new file mode 100644 index 0000000000000000000000000000000000000000..da6ec772092e788b9db8dd7bf98b9d713255bd72 GIT binary patch literal 115 zcmeAS@N?(olHy`uVBq!ia0vp^AT|dF8<0HkD{mW+vhs9s45^rt{Nwxo|NpDSef}SR z)AjRwLB+TFMqi%)*FV4eU*G8XwcaNAPd!`P&HpncCH@&33UInDb7J`WPs~|dc=2+|Ns5_85n>V41r7_ z2Z$G#W^V*)67zI%45?szdvPP9g98K0!D87L`y+K3D$*XGO*?h$S#W)QaTODT0|NsG z0|O(20s{jJLjwbY0MII^pec-jRhCRa3|q)F2jR8_Zn(Wbw&7?&LAE(a!FvT)I8$Oc za(q!@4wBoXsALXGT7oAksu>=jjG1By8QyR)iowr~<@`_ format, for example:: + + /tmp/asfjsfjoj3/%04d.jpg [1-3] + + ''' + items = [] + for index in range(3): + item_path = os.path.join( + temporary_directory, '{0:04d}.jpg'.format(index) + ) + with open(item_path, 'w') as file_descriptor: + file_descriptor.write(uuid.uuid4().hex) + file_descriptor.close() + + items.append(item_path) + + collections, _ = clique.assemble(items) + sequence_path = collections[0].format() + + return sequence_path + + +@pytest.fixture() +def video_path(): + '''Return a path to a video file.''' + video = os.path.abspath( + os.path.join( + os.path.dirname(__file__), + '..', + 'fixture', + 'media', + 'colour_wheel.mov' + ) + ) + + return video + + +@pytest.fixture() +def session(): + '''Return session instance.''' + return ftrack_api.Session() + + +@pytest.fixture() +def session_no_autoconnect_hub(): + '''Return session instance not auto connected to hub.''' + return ftrack_api.Session(auto_connect_event_hub=False) + + +@pytest.fixture() +def unique_name(): + '''Return a unique name.''' + return 'test-{0}'.format(uuid.uuid4()) + + +@pytest.fixture() +def temporary_path(request): + '''Return temporary path.''' + path = tempfile.mkdtemp() + + def cleanup(): + '''Remove created path.''' + try: + shutil.rmtree(path) + except OSError: + pass + + request.addfinalizer(cleanup) + + return path + + +@pytest.fixture() +def new_user(request, session, unique_name): + '''Return a newly created unique user.''' + entity = session.create('User', {'username': unique_name}) + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(entity) + session.commit() + + request.addfinalizer(cleanup) + + return entity + + +@pytest.fixture() +def user(session): + '''Return the same user entity for entire session.''' + # Jenkins user + entity = session.get('User', 'd07ae5d0-66e1-11e1-b5e9-f23c91df25eb') + assert entity is not None + + return entity + + +@pytest.fixture() +def project_schema(session): + '''Return project schema.''' + # VFX Scheme + entity = session.get( + 'ProjectSchema', '69cb7f92-4dbf-11e1-9902-f23c91df25eb' + ) + assert entity is not None + return entity + + +@pytest.fixture() +def new_project_tree(request, session, user): + '''Return new project with basic tree.''' + project_schema = session.query('ProjectSchema').first() + default_shot_status = project_schema.get_statuses('Shot')[0] + default_task_type = project_schema.get_types('Task')[0] + default_task_status = project_schema.get_statuses( + 'Task', default_task_type['id'] + )[0] + + project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) + project = session.create('Project', { + 'name': project_name, + 'full_name': project_name + '_full', + 'project_schema': project_schema + }) + + for sequence_number in range(1): + sequence = session.create('Sequence', { + 'name': 'sequence_{0:03d}'.format(sequence_number), + 'parent': project + }) + + for shot_number in range(1): + shot = session.create('Shot', { + 'name': 'shot_{0:03d}'.format(shot_number * 10), + 'parent': sequence, + 'status': default_shot_status + }) + + for task_number in range(1): + task = session.create('Task', { + 'name': 'task_{0:03d}'.format(task_number), + 'parent': shot, + 'status': default_task_status, + 'type': default_task_type + }) + + session.create('Appointment', { + 'type': 'assignment', + 'context': task, + 'resource': user + }) + + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(project) + session.commit() + + request.addfinalizer(cleanup) + + return project + + +@pytest.fixture() +def new_project(request, session, user): + '''Return new empty project.''' + project_schema = session.query('ProjectSchema').first() + project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) + project = session.create('Project', { + 'name': project_name, + 'full_name': project_name + '_full', + 'project_schema': project_schema + }) + + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(project) + session.commit() + + request.addfinalizer(cleanup) + + return project + + +@pytest.fixture() +def project(session): + '''Return same project for entire session.''' + # Test project. + entity = session.get('Project', '5671dcb0-66de-11e1-8e6e-f23c91df25eb') + assert entity is not None + + return entity + + +@pytest.fixture() +def new_task(request, session, unique_name): + '''Return a new task.''' + project = session.query( + 'Project where id is 5671dcb0-66de-11e1-8e6e-f23c91df25eb' + ).one() + project_schema = project['project_schema'] + default_task_type = project_schema.get_types('Task')[0] + default_task_status = project_schema.get_statuses( + 'Task', default_task_type['id'] + )[0] + + task = session.create('Task', { + 'name': unique_name, + 'parent': project, + 'status': default_task_status, + 'type': default_task_type + }) + + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(task) + session.commit() + + request.addfinalizer(cleanup) + + return task + + +@pytest.fixture() +def task(session): + '''Return same task for entire session.''' + # Tests/python_api/tasks/t1 + entity = session.get('Task', 'adb4ad6c-7679-11e2-8df2-f23c91df25eb') + assert entity is not None + + return entity + + +@pytest.fixture() +def new_scope(request, session, unique_name): + '''Return a new scope.''' + scope = session.create('Scope', { + 'name': unique_name + }) + + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(scope) + session.commit() + + request.addfinalizer(cleanup) + + return scope + + +@pytest.fixture() +def new_job(request, session, unique_name, user): + '''Return a new scope.''' + job = session.create('Job', { + 'type': 'api_job', + 'user': user + }) + + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(job) + session.commit() + + request.addfinalizer(cleanup) + + return job + + +@pytest.fixture() +def new_note(request, session, unique_name, new_task, user): + '''Return a new note attached to a task.''' + note = new_task.create_note(unique_name, user) + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(note) + session.commit() + + request.addfinalizer(cleanup) + + return note + + +@pytest.fixture() +def new_asset_version(request, session): + '''Return a new asset version.''' + asset_version = session.create('AssetVersion', { + 'asset_id': 'dd9a7e2e-c5eb-11e1-9885-f23c91df25eb' + }) + session.commit() + + # Do not cleanup the version as that will sometimes result in a deadlock + # database error. + + return asset_version + + +@pytest.fixture() +def new_component(request, session, temporary_file): + '''Return a new component not in any location except origin.''' + component = session.create_component(temporary_file, location=None) + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(component) + session.commit() + + request.addfinalizer(cleanup) + + return component + + +@pytest.fixture() +def new_container_component(request, session, temporary_directory): + '''Return a new container component not in any location except origin.''' + component = session.create('ContainerComponent') + + # Add to special origin location so that it is possible to add to other + # locations. + origin_location = session.get( + 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID + ) + origin_location.add_component( + component, temporary_directory, recursive=False + ) + + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(component) + session.commit() + + request.addfinalizer(cleanup) + + return component + + +@pytest.fixture() +def new_sequence_component(request, session, temporary_sequence): + '''Return a new sequence component not in any location except origin.''' + component = session.create_component(temporary_sequence, location=None) + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(component) + session.commit() + + request.addfinalizer(cleanup) + + return component + + +@pytest.fixture +def mocked_schemas(): + '''Return a list of mocked schemas.''' + return [{ + 'id': 'Foo', + 'type': 'object', + 'properties': { + 'id': { + 'type': 'string' + }, + 'string': { + 'type': 'string' + }, + 'integer': { + 'type': 'integer' + }, + 'number': { + 'type': 'number' + }, + 'boolean': { + 'type': 'boolean' + }, + 'bars': { + 'type': 'array', + 'items': { + 'ref': '$Bar' + } + }, + 'date': { + 'type': 'string', + 'format': 'date-time' + } + }, + 'immutable': [ + 'id' + ], + 'primary_key': [ + 'id' + ], + 'required': [ + 'id' + ], + 'default_projections': [ + 'id' + ] + }, { + 'id': 'Bar', + 'type': 'object', + 'properties': { + 'id': { + 'type': 'string' + }, + 'name': { + 'type': 'string' + }, + 'computed_value': { + 'type': 'string', + } + }, + 'computed': [ + 'computed_value' + ], + 'immutable': [ + 'id' + ], + 'primary_key': [ + 'id' + ], + 'required': [ + 'id' + ], + 'default_projections': [ + 'id' + ] + }] + + +@pytest.yield_fixture +def mocked_schema_session(mocker, mocked_schemas): + '''Return a session instance with mocked schemas.''' + with mocker.patch.object( + ftrack_api.Session, + '_load_schemas', + return_value=mocked_schemas + ): + # Mock _configure_locations since it will fail if no location schemas + # exist. + with mocker.patch.object( + ftrack_api.Session, + '_configure_locations' + ): + patched_session = ftrack_api.Session() + yield patched_session diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py new file mode 100644 index 0000000000..bc98f15de2 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py new file mode 100644 index 0000000000..78d61a62d1 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py @@ -0,0 +1,54 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack +import json + + +def test_create_component(new_asset_version, temporary_file): + '''Create component on asset version.''' + session = new_asset_version.session + component = new_asset_version.create_component( + temporary_file, location=None + ) + assert component['version'] is new_asset_version + + # Have to delete component before can delete asset version. + session.delete(component) + + +def test_create_component_specifying_different_version( + new_asset_version, temporary_file +): + '''Create component on asset version ignoring specified version.''' + session = new_asset_version.session + component = new_asset_version.create_component( + temporary_file, location=None, + data=dict( + version_id='this-value-should-be-ignored', + version='this-value-should-be-overridden' + ) + ) + assert component['version'] is new_asset_version + + # Have to delete component before can delete asset version. + session.delete(component) + + +def test_encode_media(new_asset_version, video_path): + '''Encode media based on a file path + + Encoded components should be associated with the version. + ''' + session = new_asset_version.session + job = new_asset_version.encode_media(video_path) + assert job.entity_type == 'Job' + + job_data = json.loads(job['data']) + assert 'output' in job_data + assert len(job_data['output']) + assert 'component_id' in job_data['output'][0] + + component_id = job_data['output'][0]['component_id'] + component = session.get('FileComponent', component_id) + + # Component should be associated with the version. + assert component['version_id'] == new_asset_version['id'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py new file mode 100644 index 0000000000..aff456e238 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py @@ -0,0 +1,14 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2016 ftrack + +import pytest + + +def test_hash(project, task, user): + '''Entities can be hashed.''' + test_set = set() + test_set.add(project) + test_set.add(task) + test_set.add(user) + + assert test_set == set((project, task, user)) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py new file mode 100644 index 0000000000..347c74a50d --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py @@ -0,0 +1,70 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack +import os + +import pytest + + +def test_get_availability(new_component): + '''Retrieve availability in locations.''' + session = new_component.session + availability = new_component.get_availability() + + # Note: Currently the origin location is also 0.0 as the link is not + # persisted to the server. This may change in future and this test would + # need updating as a result. + assert set(availability.values()) == set([0.0]) + + # Add to a location. + source_location = session.query( + 'Location where name is "ftrack.origin"' + ).one() + + target_location = session.query( + 'Location where name is "ftrack.unmanaged"' + ).one() + + target_location.add_component(new_component, source_location) + + # Recalculate availability. + + # Currently have to manually expire the related attribute. This should be + # solved in future by bi-directional relationship updating. + del new_component['component_locations'] + + availability = new_component.get_availability() + target_availability = availability.pop(target_location['id']) + assert target_availability == 100.0 + + # All other locations should still be 0. + assert set(availability.values()) == set([0.0]) + +@pytest.fixture() +def image_path(): + '''Return a path to an image file.''' + image_path = os.path.abspath( + os.path.join( + os.path.dirname(__file__), + '..', + '..', + 'fixture', + 'media', + 'image.png' + ) + ) + + return image_path + +def test_create_task_thumbnail(task, image_path): + '''Successfully create thumbnail component and set as task thumbnail.''' + component = task.create_thumbnail(image_path) + component.session.commit() + assert component['id'] == task['thumbnail_id'] + + +def test_create_thumbnail_with_data(task, image_path, unique_name): + '''Successfully create thumbnail component with custom data.''' + data = {'name': unique_name} + component = task.create_thumbnail(image_path, data=data) + component.session.commit() + assert component['name'] == unique_name diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py new file mode 100644 index 0000000000..5d5a0baa7c --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py @@ -0,0 +1,25 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api.entity.factory + + +class CustomUser(ftrack_api.entity.base.Entity): + '''Represent custom user.''' + + +def test_extend_standard_factory_with_bases(session): + '''Successfully add extra bases to standard factory.''' + standard_factory = ftrack_api.entity.factory.StandardFactory() + + schemas = session._load_schemas(False) + user_schema = [ + schema for schema in schemas if schema['id'] == 'User' + ].pop() + + user_class = standard_factory.create(user_schema, bases=[CustomUser]) + session.types[user_class.entity_type] = user_class + + user = session.query('User').first() + + assert CustomUser in type(user).__mro__ diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py new file mode 100644 index 0000000000..52ddbda0ac --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py @@ -0,0 +1,42 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import pytest + + +def test_create_job(session, user): + '''Create job.''' + job = session.create('Job', { + 'user': user + }) + + assert job + session.commit() + assert job['type'] == 'api_job' + + session.delete(job) + session.commit() + + +def test_create_job_with_valid_type(session, user): + '''Create job explicitly specifying valid type.''' + job = session.create('Job', { + 'user': user, + 'type': 'api_job' + }) + + assert job + session.commit() + assert job['type'] == 'api_job' + + session.delete(job) + session.commit() + + +def test_create_job_using_faulty_type(session, user): + '''Fail to create job with faulty type.''' + with pytest.raises(ValueError): + session.create('Job', { + 'user': user, + 'type': 'not-allowed-type' + }) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py new file mode 100644 index 0000000000..5bb90e451f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py @@ -0,0 +1,516 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import os +import base64 +import filecmp + +import pytest +import requests + +import ftrack_api.exception +import ftrack_api.accessor.disk +import ftrack_api.structure.origin +import ftrack_api.structure.id +import ftrack_api.entity.location +import ftrack_api.resource_identifier_transformer.base as _transformer +import ftrack_api.symbol + + +class Base64ResourceIdentifierTransformer( + _transformer.ResourceIdentifierTransformer +): + '''Resource identifier transformer for test purposes. + + Store resource identifier as base 64 encoded string. + + ''' + + def encode(self, resource_identifier, context=None): + '''Return encoded *resource_identifier* for storing centrally. + + A mapping of *context* values may be supplied to guide the + transformation. + + ''' + return base64.encodestring(resource_identifier) + + def decode(self, resource_identifier, context=None): + '''Return decoded *resource_identifier* for use locally. + + A mapping of *context* values may be supplied to guide the + transformation. + + ''' + return base64.decodestring(resource_identifier) + + +@pytest.fixture() +def new_location(request, session, unique_name, temporary_directory): + '''Return new managed location.''' + location = session.create('Location', { + 'name': 'test-location-{}'.format(unique_name) + }) + + location.accessor = ftrack_api.accessor.disk.DiskAccessor( + prefix=os.path.join(temporary_directory, 'location') + ) + location.structure = ftrack_api.structure.id.IdStructure() + location.priority = 10 + + session.commit() + + def cleanup(): + '''Remove created entity.''' + # First auto-remove all components in location. + for location_component in location['location_components']: + session.delete(location_component) + + # At present, need this intermediate commit otherwise server errors + # complaining that location still has components in it. + session.commit() + + session.delete(location) + session.commit() + + request.addfinalizer(cleanup) + + return location + + +@pytest.fixture() +def new_unmanaged_location(request, session, unique_name): + '''Return new unmanaged location.''' + location = session.create('Location', { + 'name': 'test-location-{}'.format(unique_name) + }) + + # TODO: Change to managed and use a temporary directory cleaned up after. + ftrack_api.mixin( + location, ftrack_api.entity.location.UnmanagedLocationMixin, + name='UnmanagedTestLocation' + ) + location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') + location.structure = ftrack_api.structure.origin.OriginStructure() + location.priority = 10 + + session.commit() + + def cleanup(): + '''Remove created entity.''' + # First auto-remove all components in location. + for location_component in location['location_components']: + session.delete(location_component) + + # At present, need this intermediate commit otherwise server errors + # complaining that location still has components in it. + session.commit() + + session.delete(location) + session.commit() + + request.addfinalizer(cleanup) + + return location + + +@pytest.fixture() +def origin_location(session): + '''Return origin location.''' + return session.query('Location where name is "ftrack.origin"').one() + +@pytest.fixture() +def server_location(session): + '''Return server location.''' + return session.get('Location', ftrack_api.symbol.SERVER_LOCATION_ID) + + +@pytest.fixture() +def server_image_component(request, session, server_location): + image_file = os.path.abspath( + os.path.join( + os.path.dirname(__file__), + '..', + '..', + 'fixture', + 'media', + 'image.png' + ) + ) + component = session.create_component( + image_file, location=server_location + ) + + def cleanup(): + server_location.remove_component(component) + request.addfinalizer(cleanup) + + return component + + +@pytest.mark.parametrize('name', [ + 'named', + None +], ids=[ + 'named', + 'unnamed' +]) +def test_string_representation(session, name): + '''Return string representation.''' + location = session.create('Location', {'id': '1'}) + if name: + location['name'] = name + assert str(location) == '' + else: + assert str(location) == '' + + +def test_add_components(new_location, origin_location, session, temporary_file): + '''Add components.''' + component_a = session.create_component( + temporary_file, location=None + ) + component_b = session.create_component( + temporary_file, location=None + ) + + assert ( + new_location.get_component_availabilities([component_a, component_b]) + == [0.0, 0.0] + ) + + new_location.add_components( + [component_a, component_b], [origin_location, origin_location] + ) + + # Recalculate availability. + + # Currently have to manually expire the related attribute. This should be + # solved in future by bi-directional relationship updating. + del component_a['component_locations'] + del component_b['component_locations'] + + assert ( + new_location.get_component_availabilities([component_a, component_b]) + == [100.0, 100.0] + ) + + +def test_add_components_from_single_location( + new_location, origin_location, session, temporary_file +): + '''Add components from single location.''' + component_a = session.create_component( + temporary_file, location=None + ) + component_b = session.create_component( + temporary_file, location=None + ) + + assert ( + new_location.get_component_availabilities([component_a, component_b]) + == [0.0, 0.0] + ) + + new_location.add_components([component_a, component_b], origin_location) + + # Recalculate availability. + + # Currently have to manually expire the related attribute. This should be + # solved in future by bi-directional relationship updating. + del component_a['component_locations'] + del component_b['component_locations'] + + assert ( + new_location.get_component_availabilities([component_a, component_b]) + == [100.0, 100.0] + ) + + +def test_add_components_with_mismatching_sources(new_location, new_component): + '''Fail to add components when sources mismatched.''' + with pytest.raises(ValueError): + new_location.add_components([new_component], []) + + +def test_add_components_with_undefined_structure(new_location, mocker): + '''Fail to add components when location structure undefined.''' + mocker.patch.object(new_location, 'structure', None) + + with pytest.raises(ftrack_api.exception.LocationError): + new_location.add_components([], []) + + +def test_add_components_already_in_location( + session, temporary_file, new_location, new_component, origin_location +): + '''Fail to add components already in location.''' + new_location.add_component(new_component, origin_location) + + another_new_component = session.create_component( + temporary_file, location=None + ) + + with pytest.raises(ftrack_api.exception.ComponentInLocationError): + new_location.add_components( + [another_new_component, new_component], origin_location + ) + + +def test_add_component_when_data_already_exists( + new_location, new_component, origin_location +): + '''Fail to add component when data already exists.''' + # Inject pre-existing data on disk. + resource_identifier = new_location.structure.get_resource_identifier( + new_component + ) + container = new_location.accessor.get_container(resource_identifier) + new_location.accessor.make_container(container) + data = new_location.accessor.open(resource_identifier, 'w') + data.close() + + with pytest.raises(ftrack_api.exception.LocationError): + new_location.add_component(new_component, origin_location) + + +def test_add_component_missing_source_accessor( + new_location, new_component, origin_location, mocker +): + '''Fail to add component when source is missing accessor.''' + mocker.patch.object(origin_location, 'accessor', None) + + with pytest.raises(ftrack_api.exception.LocationError): + new_location.add_component(new_component, origin_location) + + +def test_add_component_missing_target_accessor( + new_location, new_component, origin_location, mocker +): + '''Fail to add component when target is missing accessor.''' + mocker.patch.object(new_location, 'accessor', None) + + with pytest.raises(ftrack_api.exception.LocationError): + new_location.add_component(new_component, origin_location) + + +def test_add_container_component( + new_container_component, new_location, origin_location +): + '''Add container component.''' + new_location.add_component(new_container_component, origin_location) + + assert ( + new_location.get_component_availability(new_container_component) + == 100.0 + ) + + +def test_add_sequence_component_recursively( + new_sequence_component, new_location, origin_location +): + '''Add sequence component recursively.''' + new_location.add_component( + new_sequence_component, origin_location, recursive=True + ) + + assert ( + new_location.get_component_availability(new_sequence_component) + == 100.0 + ) + + +def test_add_sequence_component_non_recursively( + new_sequence_component, new_location, origin_location +): + '''Add sequence component non recursively.''' + new_location.add_component( + new_sequence_component, origin_location, recursive=False + ) + + assert ( + new_location.get_component_availability(new_sequence_component) + == 0.0 + ) + + +def test_remove_components( + session, new_location, origin_location, temporary_file +): + '''Remove components.''' + component_a = session.create_component( + temporary_file, location=None + ) + component_b = session.create_component( + temporary_file, location=None + ) + + new_location.add_components([component_a, component_b], origin_location) + assert ( + new_location.get_component_availabilities([component_a, component_b]) + == [100.0, 100.0] + ) + + new_location.remove_components([ + component_a, component_b + ]) + + # Recalculate availability. + + # Currently have to manually expire the related attribute. This should be + # solved in future by bi-directional relationship updating. + del component_a['component_locations'] + del component_b['component_locations'] + + assert ( + new_location.get_component_availabilities([component_a, component_b]) + == [0.0, 0.0] + ) + + +def test_remove_sequence_component_recursively( + new_sequence_component, new_location, origin_location +): + '''Remove sequence component recursively.''' + new_location.add_component( + new_sequence_component, origin_location, recursive=True + ) + + new_location.remove_component( + new_sequence_component, recursive=True + ) + + assert ( + new_location.get_component_availability(new_sequence_component) + == 0.0 + ) + + +def test_remove_sequence_component_non_recursively( + new_sequence_component, new_location, origin_location +): + '''Remove sequence component non recursively.''' + new_location.add_component( + new_sequence_component, origin_location, recursive=False + ) + + new_location.remove_component( + new_sequence_component, recursive=False + ) + + assert ( + new_location.get_component_availability(new_sequence_component) + == 0.0 + ) + + +def test_remove_component_missing_accessor( + new_location, new_component, origin_location, mocker +): + '''Fail to remove component when location is missing accessor.''' + new_location.add_component(new_component, origin_location) + mocker.patch.object(new_location, 'accessor', None) + + with pytest.raises(ftrack_api.exception.LocationError): + new_location.remove_component(new_component) + + +def test_resource_identifier_transformer( + new_component, new_unmanaged_location, origin_location, mocker +): + '''Transform resource identifier.''' + session = new_unmanaged_location.session + + transformer = Base64ResourceIdentifierTransformer(session) + mocker.patch.object( + new_unmanaged_location, 'resource_identifier_transformer', transformer + ) + + new_unmanaged_location.add_component(new_component, origin_location) + + original_resource_identifier = origin_location.get_resource_identifier( + new_component + ) + assert ( + new_component['component_locations'][0]['resource_identifier'] + == base64.encodestring(original_resource_identifier) + ) + + assert ( + new_unmanaged_location.get_resource_identifier(new_component) + == original_resource_identifier + ) + + +def test_get_filesystem_path(new_component, new_location, origin_location): + '''Retrieve filesystem path.''' + new_location.add_component(new_component, origin_location) + resource_identifier = new_location.structure.get_resource_identifier( + new_component + ) + expected = os.path.normpath( + os.path.join(new_location.accessor.prefix, resource_identifier) + ) + assert new_location.get_filesystem_path(new_component) == expected + + +def test_get_context(new_component, new_location, origin_location): + '''Retrieve context for component.''' + resource_identifier = origin_location.get_resource_identifier( + new_component + ) + context = new_location._get_context(new_component, origin_location) + assert context == { + 'source_resource_identifier': resource_identifier + } + + +def test_get_context_for_component_not_in_source(new_component, new_location): + '''Retrieve context for component not in source location.''' + context = new_location._get_context(new_component, new_location) + assert context == {} + + +def test_data_transfer(session, new_location, origin_location): + '''Transfer a real file and make sure it is identical.''' + video_file = os.path.abspath( + os.path.join( + os.path.dirname(__file__), + '..', + '..', + 'fixture', + 'media', + 'colour_wheel.mov' + ) + ) + component = session.create_component( + video_file, location=new_location + ) + new_video_file = new_location.get_filesystem_path(component) + + assert filecmp.cmp(video_file, new_video_file) + + +def test_get_thumbnail_url(server_location, server_image_component): + '''Test download a thumbnail image from server location''' + thumbnail_url = server_location.get_thumbnail_url( + server_image_component, + size=10 + ) + assert thumbnail_url + + response = requests.get(thumbnail_url) + response.raise_for_status() + + image_file = os.path.abspath( + os.path.join( + os.path.dirname(__file__), + '..', + '..', + 'fixture', + 'media', + 'image-resized-10.png' + ) + ) + expected_image_contents = open(image_file).read() + assert response.content == expected_image_contents diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py new file mode 100644 index 0000000000..3a81fdbe85 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py @@ -0,0 +1,135 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import uuid + +import ftrack_api + + +def test_query_metadata(new_project): + '''Query metadata.''' + session = new_project.session + + metadata_key = uuid.uuid1().hex + metadata_value = uuid.uuid1().hex + new_project['metadata'][metadata_key] = metadata_value + session.commit() + + results = session.query( + 'Project where metadata.key is {0}'.format(metadata_key) + ) + + assert len(results) == 1 + assert new_project['id'] == results[0]['id'] + + results = session.query( + 'Project where metadata.value is {0}'.format(metadata_value) + ) + + assert len(results) == 1 + assert new_project['id'] == results[0]['id'] + + results = session.query( + 'Project where metadata.key is {0} and ' + 'metadata.value is {1}'.format(metadata_key, metadata_value) + ) + + assert len(results) == 1 + assert new_project['id'] == results[0]['id'] + + +def test_set_get_metadata_from_different_sessions(new_project): + '''Get and set metadata using different sessions.''' + session = new_project.session + + metadata_key = uuid.uuid1().hex + metadata_value = uuid.uuid1().hex + new_project['metadata'][metadata_key] = metadata_value + session.commit() + + new_session = ftrack_api.Session() + project = new_session.query( + 'Project where id is {0}'.format(new_project['id']) + )[0] + + assert project['metadata'][metadata_key] == metadata_value + + project['metadata'][metadata_key] = uuid.uuid1().hex + + new_session.commit() + + new_session = ftrack_api.Session() + project = new_session.query( + 'Project where id is {0}'.format(project['id']) + )[0] + + assert project['metadata'][metadata_key] != metadata_value + + +def test_get_set_multiple_metadata(new_project): + '''Get and set multiple metadata.''' + session = new_project.session + + new_project['metadata'] = { + 'key1': 'value1', + 'key2': 'value2' + } + session.commit() + + assert set(new_project['metadata'].keys()) == set(['key1', 'key2']) + + new_session = ftrack_api.Session() + retrieved = new_session.query( + 'Project where id is {0}'.format(new_project['id']) + )[0] + + assert set(retrieved['metadata'].keys()) == set(['key1', 'key2']) + + +def test_metadata_parent_type_remains_in_schema_id_format(session, new_project): + '''Metadata parent_type remains in schema id format post commit.''' + entity = session.create('Metadata', { + 'key': 'key', 'value': 'value', + 'parent_type': new_project.entity_type, + 'parent_id': new_project['id'] + }) + + session.commit() + + assert entity['parent_type'] == new_project.entity_type + + +def test_set_metadata_twice(new_project): + '''Set metadata twice in a row.''' + session = new_project.session + + new_project['metadata'] = { + 'key1': 'value1', + 'key2': 'value2' + } + session.commit() + + assert set(new_project['metadata'].keys()) == set(['key1', 'key2']) + + new_project['metadata'] = { + 'key3': 'value3', + 'key4': 'value4' + } + session.commit() + + +def test_set_same_metadata_on_retrieved_entity(new_project): + '''Set same metadata on retrieved entity.''' + session = new_project.session + + new_project['metadata'] = { + 'key1': 'value1' + } + session.commit() + + project = session.get('Project', new_project['id']) + + project['metadata'] = { + 'key1': 'value1' + } + session.commit() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py new file mode 100644 index 0000000000..5d854eaed4 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py @@ -0,0 +1,67 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api +import ftrack_api.inspection + + +def test_create_reply(session, new_note, user, unique_name): + '''Create reply to a note.''' + reply_text = 'My reply on note' + new_note.create_reply(reply_text, user) + + session.commit() + + assert len(new_note['replies']) == 1 + + assert reply_text == new_note['replies'][0]['content'] + + +def test_create_note_on_entity(session, new_task, user, unique_name): + '''Create note attached to an entity.''' + note = new_task.create_note(unique_name, user) + session.commit() + + session.reset() + retrieved_task = session.get(*ftrack_api.inspection.identity(new_task)) + assert len(retrieved_task['notes']) == 1 + assert ( + ftrack_api.inspection.identity(retrieved_task['notes'][0]) + == ftrack_api.inspection.identity(note) + ) + + +def test_create_note_on_entity_specifying_recipients( + session, new_task, user, unique_name, new_user +): + '''Create note with specified recipients attached to an entity.''' + recipient = new_user + note = new_task.create_note(unique_name, user, recipients=[recipient]) + session.commit() + + session.reset() + retrieved_note = session.get(*ftrack_api.inspection.identity(note)) + + # Note: The calling user is automatically added server side so there will be + # 2 recipients. + assert len(retrieved_note['recipients']) == 2 + specified_recipient_present = False + for entry in retrieved_note['recipients']: + if entry['resource_id'] == recipient['id']: + specified_recipient_present = True + break + + assert specified_recipient_present + + +def test_create_note_on_entity_specifying_category( + session, new_task, user, unique_name +): + '''Create note with specified category attached to an entity.''' + category = session.query('NoteCategory').first() + note = new_task.create_note(unique_name, user, category=category) + session.commit() + + session.reset() + retrieved_note = session.get(*ftrack_api.inspection.identity(note)) + assert retrieved_note['category']['id'] == category['id'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py new file mode 100644 index 0000000000..10ef485aed --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py @@ -0,0 +1,64 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import inspect + +import pytest + + +@pytest.mark.parametrize('schema, expected', [ + ('Task', [ + 'Not started', 'In progress', 'Awaiting approval', 'Approved' + ]), + ('Shot', [ + 'Normal', 'Omitted', 'On Hold' + ]), + ('AssetVersion', [ + 'Approved', 'Pending' + ]), + ('AssetBuild', [ + 'Normal', 'Omitted', 'On Hold' + ]), + ('Invalid', ValueError) +], ids=[ + 'task', + 'shot', + 'asset version', + 'asset build', + 'invalid' +]) +def test_get_statuses(project_schema, schema, expected): + '''Retrieve statuses for schema and optional type.''' + if inspect.isclass(expected) and issubclass(expected, Exception): + with pytest.raises(expected): + project_schema.get_statuses(schema) + + else: + statuses = project_schema.get_statuses(schema) + status_names = [status['name'] for status in statuses] + assert sorted(status_names) == sorted(expected) + + +@pytest.mark.parametrize('schema, expected', [ + ('Task', [ + 'Generic', 'Animation', 'Modeling', 'Previz', 'Lookdev', 'Hair', + 'Cloth', 'FX', 'Lighting', 'Compositing', 'Tracking', 'Rigging', + 'test 1', 'test type 2' + ]), + ('AssetBuild', ['Character', 'Prop', 'Environment', 'Matte Painting']), + ('Invalid', ValueError) +], ids=[ + 'task', + 'asset build', + 'invalid' +]) +def test_get_types(project_schema, schema, expected): + '''Retrieve types for schema.''' + if inspect.isclass(expected) and issubclass(expected, Exception): + with pytest.raises(expected): + project_schema.get_types(schema) + + else: + types = project_schema.get_types(schema) + type_names = [type_['name'] for type_ in types] + assert sorted(type_names) == sorted(expected) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py new file mode 100644 index 0000000000..1a5afe70c9 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py @@ -0,0 +1,24 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + + +def test_add_remove_and_query_scopes_for_tasks(session, new_task, new_scope): + '''Add, remove and query scopes for task.''' + query_string = 'Task where scopes.name is {0}'.format(new_scope['name']) + tasks = session.query(query_string) + + assert len(tasks) == 0 + + new_task['scopes'].append(new_scope) + session.commit() + + tasks = session.query(query_string) + + assert len(tasks) == 1 and tasks[0] == new_task + + new_task['scopes'].remove(new_scope) + session.commit() + + tasks = session.query(query_string) + + assert len(tasks) == 0 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py new file mode 100644 index 0000000000..4d7e455042 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py @@ -0,0 +1,49 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2016 ftrack + + +def test_force_start_timer(new_user, task): + '''Successfully force starting a timer when another timer is running.''' + first_timer = new_user.start_timer(context=task) + second_timer = new_user.start_timer(context=task, force=True) + + assert first_timer['id'] + assert second_timer['id'] + assert first_timer['id'] != second_timer['id'] + + +def test_timer_creates_timelog(new_user, task, unique_name): + '''Successfully create time log when stopping timer. + + A timer which was immediately stopped should have a duration less than + a minute. + + ''' + comment = 'comment' + unique_name + timer = new_user.start_timer( + context=task, + name=unique_name, + comment=comment + ) + timer_start = timer['start'] + timelog = new_user.stop_timer() + + assert timelog['user_id'] == new_user['id'] + assert timelog['context_id']== task['id'] + assert timelog['name'] == unique_name + assert timelog['comment'] == comment + assert timelog['start'] == timer_start + assert isinstance(timelog['duration'], (int, long, float)) + assert timelog['duration'] < 60 + + +def test_reset_user_api_key(new_user): + '''Test resetting of api keys.''' + + api_keys = list() + for i in range(0, 10): + api_keys.append(new_user.reset_api_key()) + + # make sure all api keys are unique + assert len(set(api_keys)) == 10 + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py new file mode 100644 index 0000000000..bc98f15de2 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py new file mode 100644 index 0000000000..09b270a043 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py @@ -0,0 +1,92 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import sys +import time +import logging +import argparse + +import ftrack_api +from ftrack_api.event.base import Event + + +TOPIC = 'test_event_hub_server_heartbeat' +RECEIVED = [] + + +def callback(event): + '''Track received messages.''' + counter = event['data']['counter'] + RECEIVED.append(counter) + print('Received message {0} ({1} in total)'.format(counter, len(RECEIVED))) + + +def main(arguments=None): + '''Publish and receive heartbeat test.''' + parser = argparse.ArgumentParser() + parser.add_argument('mode', choices=['publish', 'subscribe']) + + namespace = parser.parse_args(arguments) + logging.basicConfig(level=logging.INFO) + + session = ftrack_api.Session() + + message_count = 100 + sleep_time_per_message = 1 + + if namespace.mode == 'publish': + max_atempts = 100 + retry_interval = 0.1 + atempt = 0 + while not session.event_hub.connected: + print ( + 'Session is not yet connected to event hub, sleeping for 0.1s' + ) + time.sleep(retry_interval) + + atempt = atempt + 1 + if atempt > max_atempts: + raise Exception( + 'Unable to connect to server within {0} seconds'.format( + max_atempts * retry_interval + ) + ) + + print('Sending {0} messages...'.format(message_count)) + + for counter in range(1, message_count + 1): + session.event_hub.publish( + Event(topic=TOPIC, data=dict(counter=counter)) + ) + print('Sent message {0}'.format(counter)) + + if counter < message_count: + time.sleep(sleep_time_per_message) + + elif namespace.mode == 'subscribe': + session.event_hub.subscribe('topic={0}'.format(TOPIC), callback) + session.event_hub.wait( + duration=( + ((message_count - 1) * sleep_time_per_message) + 15 + ) + ) + + if len(RECEIVED) != message_count: + print( + '>> Failed to receive all messages. Dropped {0} <<' + .format(message_count - len(RECEIVED)) + ) + return False + + # Give time to flush all buffers. + time.sleep(5) + + return True + + +if __name__ == '__main__': + result = main(sys.argv[1:]) + if not result: + raise SystemExit(1) + else: + raise SystemExit(0) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py new file mode 100644 index 0000000000..d9496fe070 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py @@ -0,0 +1,36 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api.event.base + + +def test_string_representation(): + '''String representation.''' + event = ftrack_api.event.base.Event('test', id='some-id') + assert str(event) == ( + "" + ) + + +def test_stop(): + '''Set stopped flag on event.''' + event = ftrack_api.event.base.Event('test', id='some-id') + + assert event.is_stopped() is False + + event.stop() + assert event.is_stopped() is True + + +def test_is_stopped(): + '''Report stopped status of event.''' + event = ftrack_api.event.base.Event('test', id='some-id') + + assert event.is_stopped() is False + + event.stop() + assert event.is_stopped() is True + + event.stop() + assert event.is_stopped() is True diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py new file mode 100644 index 0000000000..4cf68b58f0 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py @@ -0,0 +1,174 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import operator +import inspect + +import pytest + +from ftrack_api.event.expression import ( + Expression, All, Any, Not, Condition, Parser +) +from ftrack_api.exception import ParseError + + +@pytest.fixture() +def candidate(): + '''Return common candidate to test expressions against.''' + return { + 'id': 10, + 'name': 'value', + 'change': { + 'name': 'value', + 'new_value': 10 + } + } + + +@pytest.mark.parametrize('expression, expected', [ + pytest.mark.xfail(('', Expression())), + ('invalid', ParseError), + ('key=value nor other=value', ParseError), + ('key=value', Condition('key', operator.eq, 'value')), + ('key="value"', Condition('key', operator.eq, 'value')), + ( + 'a=b and ((c=d or e!=f) and not g.h > 10)', + All([ + Condition('a', operator.eq, 'b'), + All([ + Any([ + Condition('c', operator.eq, 'd'), + Condition('e', operator.ne, 'f') + ]), + Not( + Condition('g.h', operator.gt, 10) + ) + ]) + ]) + ) +], ids=[ + 'empty expression', + 'invalid expression', + 'invalid conjunction', + 'basic condition', + 'basic quoted condition', + 'complex condition' +]) +def test_parser_parse(expression, expected): + '''Parse expression into Expression instances.''' + parser = Parser() + + if inspect.isclass(expected)and issubclass(expected, Exception): + with pytest.raises(expected): + parser.parse(expression) + else: + assert str(parser.parse(expression)) == str(expected) + + +@pytest.mark.parametrize('expression, expected', [ + (Expression(), ''), + (All([Expression(), Expression()]), ' ]>'), + (Any([Expression(), Expression()]), ' ]>'), + (Not(Expression()), '>'), + (Condition('key', '=', 'value'), '') +], ids=[ + 'Expression', + 'All', + 'Any', + 'Not', + 'Condition' +]) +def test_string_representation(expression, expected): + '''String representation of expression.''' + assert str(expression) == expected + + +@pytest.mark.parametrize('expression, expected', [ + # Expression + (Expression(), True), + + # All + (All(), True), + (All([Expression(), Expression()]), True), + (All([Expression(), Condition('test', operator.eq, 'value')]), False), + + # Any + (Any(), False), + (Any([Expression(), Condition('test', operator.eq, 'value')]), True), + (Any([ + Condition('test', operator.eq, 'value'), + Condition('other', operator.eq, 'value') + ]), False), + + # Not + (Not(Expression()), False), + (Not(Not(Expression())), True) +], ids=[ + 'Expression-always matches', + + 'All-no expressions always matches', + 'All-all match', + 'All-not all match', + + 'Any-no expressions never matches', + 'Any-some match', + 'Any-none match', + + 'Not-invert positive match', + 'Not-double negative is positive match' +]) +def test_match(expression, candidate, expected): + '''Determine if candidate matches expression.''' + assert expression.match(candidate) is expected + + +def parametrize_test_condition_match(metafunc): + '''Parametrize condition_match tests.''' + identifiers = [] + data = [] + + matrix = { + # Operator, match, no match + operator.eq: { + 'match': 10, 'no-match': 20, + 'wildcard-match': 'valu*', 'wildcard-no-match': 'values*' + }, + operator.ne: {'match': 20, 'no-match': 10}, + operator.ge: {'match': 10, 'no-match': 20}, + operator.le: {'match': 10, 'no-match': 0}, + operator.gt: {'match': 0, 'no-match': 10}, + operator.lt: {'match': 20, 'no-match': 10} + } + + for operator_function, values in matrix.items(): + for value_label, value in values.items(): + if value_label.startswith('wildcard'): + key_options = { + 'plain': 'name', + 'nested': 'change.name' + } + else: + key_options = { + 'plain': 'id', + 'nested': 'change.new_value' + } + + for key_label, key in key_options.items(): + identifiers.append('{} operator {} key {}'.format( + operator_function.__name__, key_label, value_label + )) + + data.append(( + key, operator_function, value, + 'no-match' not in value_label + )) + + metafunc.parametrize( + 'key, operator, value, expected', data, ids=identifiers + ) + + +def test_condition_match(key, operator, value, candidate, expected): + '''Determine if candidate matches condition expression.''' + condition = Condition(key, operator, value) + assert condition.match(candidate) is expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py new file mode 100644 index 0000000000..6f1920dddf --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py @@ -0,0 +1,701 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import inspect +import json +import os +import time +import subprocess +import sys + +import pytest + +import ftrack_api.event.hub +import ftrack_api.event.subscriber +from ftrack_api.event.base import Event +import ftrack_api.exception + + +class MockClass(object): + '''Mock class for testing.''' + + def method(self): + '''Mock method for testing.''' + + +def mockFunction(): + '''Mock function for testing.''' + + +class MockConnection(object): + '''Mock connection for testing.''' + + @property + def connected(self): + '''Return whether connected.''' + return True + + def close(self): + '''Close mock connection.''' + pass + + +def assert_callbacks(hub, callbacks): + '''Assert hub has exactly *callbacks* subscribed.''' + # Subscribers always starts with internal handle_reply subscriber. + subscribers = hub._subscribers[:] + subscribers.pop(0) + + if len(subscribers) != len(callbacks): + raise AssertionError( + 'Number of subscribers ({0}) != number of callbacks ({1})' + .format(len(subscribers), len(callbacks)) + ) + + for index, subscriber in enumerate(subscribers): + if subscriber.callback != callbacks[index]: + raise AssertionError( + 'Callback at {0} != subscriber callback at same index.' + .format(index) + ) + + +@pytest.fixture() +def event_hub(request, session): + '''Return event hub to test against. + + Hub is automatically connected at start of test and disconnected at end. + + ''' + hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + hub.connect() + + def cleanup(): + '''Cleanup.''' + if hub.connected: + hub.disconnect() + + request.addfinalizer(cleanup) + + return hub + + +@pytest.mark.parametrize('server_url, expected', [ + ('https://test.ftrackapp.com', 'https://test.ftrackapp.com'), + ('https://test.ftrackapp.com:9000', 'https://test.ftrackapp.com:9000') +], ids=[ + 'with port', + 'without port' +]) +def test_get_server_url(server_url, expected): + '''Return server url.''' + event_hub = ftrack_api.event.hub.EventHub( + server_url, 'user', 'key' + ) + assert event_hub.get_server_url() == expected + + +@pytest.mark.parametrize('server_url, expected', [ + ('https://test.ftrackapp.com', 'test.ftrackapp.com'), + ('https://test.ftrackapp.com:9000', 'test.ftrackapp.com:9000') +], ids=[ + 'with port', + 'without port' +]) +def test_get_network_location(server_url, expected): + '''Return network location of server url.''' + event_hub = ftrack_api.event.hub.EventHub( + server_url, 'user', 'key' + ) + assert event_hub.get_network_location() == expected + + +@pytest.mark.parametrize('server_url, expected', [ + ('https://test.ftrackapp.com', True), + ('http://test.ftrackapp.com', False) +], ids=[ + 'secure', + 'not secure' +]) +def test_secure_property(server_url, expected, mocker): + '''Return whether secure connection used.''' + event_hub = ftrack_api.event.hub.EventHub( + server_url, 'user', 'key' + ) + assert event_hub.secure is expected + + +def test_connected_property(session): + '''Return connected state.''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + assert event_hub.connected is False + + event_hub.connect() + assert event_hub.connected is True + + event_hub.disconnect() + assert event_hub.connected is False + + +@pytest.mark.parametrize('server_url, expected', [ + ('https://test.ftrackapp.com', 'https://test.ftrackapp.com'), + ('https://test.ftrackapp.com:9000', 'https://test.ftrackapp.com:9000'), + ('test.ftrackapp.com', ValueError), + ('https://:9000', ValueError), +], ids=[ + 'with port', + 'without port', + 'missing scheme', + 'missing hostname' +]) +def test_initialise_against_server_url(server_url, expected): + '''Initialise against server url.''' + if inspect.isclass(expected) and issubclass(expected, Exception): + with pytest.raises(expected): + ftrack_api.event.hub.EventHub( + server_url, 'user', 'key' + ) + else: + event_hub = ftrack_api.event.hub.EventHub( + server_url, 'user', 'key' + ) + assert event_hub.get_server_url() == expected + + +def test_connect(session): + '''Connect.''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + event_hub.connect() + + assert event_hub.connected is True + event_hub.disconnect() + + +def test_connect_when_already_connected(event_hub): + '''Fail to connect when already connected''' + assert event_hub.connected is True + + with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: + event_hub.connect() + + assert 'Already connected' in str(error) + + +def test_connect_failure(session, mocker): + '''Fail to connect to server.''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + + def force_fail(*args, **kwargs): + '''Force connection failure.''' + raise Exception('Forced fail.') + + mocker.patch('websocket.create_connection', force_fail) + with pytest.raises(ftrack_api.exception.EventHubConnectionError): + event_hub.connect() + + +def test_connect_missing_required_transport(session, mocker, caplog): + '''Fail to connect to server that does not provide correct transport.''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + + original_get_socket_io_session = event_hub._get_socket_io_session + + def _get_socket_io_session(): + '''Patched to return no transports.''' + session = original_get_socket_io_session() + return ftrack_api.event.hub.SocketIoSession( + session[0], session[1], [] + ) + + mocker.patch.object( + event_hub, '_get_socket_io_session', _get_socket_io_session + ) + + with pytest.raises(ftrack_api.exception.EventHubConnectionError): + event_hub.connect() + + logs = caplog.records() + assert ( + 'Server does not support websocket sessions.' in str(logs[-1].exc_info) + ) + + +def test_disconnect(event_hub): + '''Disconnect and unsubscribe all subscribers.''' + event_hub.disconnect() + assert len(event_hub._subscribers) == 0 + assert event_hub.connected is False + + +def test_disconnect_without_unsubscribing(event_hub): + '''Disconnect without unsubscribing all subscribers.''' + event_hub.disconnect(unsubscribe=False) + assert len(event_hub._subscribers) > 0 + assert event_hub.connected is False + + +def test_close_connection_from_manually_connected_hub(session_no_autoconnect_hub): + '''Close connection from manually connected hub.''' + session_no_autoconnect_hub.event_hub.connect() + session_no_autoconnect_hub.close() + assert session_no_autoconnect_hub.event_hub.connected is False + + +def test_disconnect_when_not_connected(session): + '''Fail to disconnect when not connected''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: + event_hub.disconnect() + + assert 'Not currently connected' in str(error) + + +def test_reconnect(event_hub): + '''Reconnect successfully.''' + assert event_hub.connected is True + event_hub.reconnect() + assert event_hub.connected is True + + +def test_reconnect_when_not_connected(session): + '''Reconnect successfully even if not already connected.''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + assert event_hub.connected is False + + event_hub.reconnect() + assert event_hub.connected is True + + event_hub.disconnect() + + +def test_fail_to_reconnect(session, mocker): + '''Fail to reconnect.''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + event_hub.connect() + assert event_hub.connected is True + + def force_fail(*args, **kwargs): + '''Force connection failure.''' + raise Exception('Forced fail.') + + mocker.patch('websocket.create_connection', force_fail) + + attempts = 2 + with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: + event_hub.reconnect(attempts=attempts, delay=0.5) + + assert 'Failed to reconnect to event server' in str(error) + assert 'after {} attempts'.format(attempts) in str(error) + + +def test_wait(event_hub): + '''Wait for event and handle as they arrive.''' + called = {'callback': False} + + def callback(event): + called['callback'] = True + + event_hub.subscribe('topic=test-subscribe', callback) + + event_hub.publish(Event(topic='test-subscribe')) + + # Until wait, the event should not have been processed even if received. + time.sleep(1) + assert called == {'callback': False} + + event_hub.wait(2) + assert called == {'callback': True} + + +def test_wait_interrupted_by_disconnect(event_hub): + '''Interrupt wait loop with disconnect event.''' + wait_time = 5 + start = time.time() + + # Inject event directly for test purposes. + event = Event(topic='ftrack.meta.disconnected') + event_hub._event_queue.put(event) + + event_hub.wait(wait_time) + + assert time.time() - start < wait_time + + +@pytest.mark.parametrize('identifier, registered', [ + ('registered-test-subscriber', True), + ('unregistered-test-subscriber', False) +], ids=[ + 'registered', + 'missing' +]) +def test_get_subscriber_by_identifier(event_hub, identifier, registered): + '''Return subscriber by identifier.''' + def callback(event): + pass + + subscriber = { + 'id': 'registered-test-subscriber' + } + + event_hub.subscribe('topic=test-subscribe', callback, subscriber) + retrieved = event_hub.get_subscriber_by_identifier(identifier) + + if registered: + assert isinstance(retrieved, ftrack_api.event.subscriber.Subscriber) + assert retrieved.metadata.get('id') == subscriber['id'] + else: + assert retrieved is None + + +def test_subscribe(event_hub): + '''Subscribe to topics.''' + called = {'a': False, 'b': False} + + def callback_a(event): + called['a'] = True + + def callback_b(event): + called['b'] = True + + event_hub.subscribe('topic=test-subscribe', callback_a) + event_hub.subscribe('topic=test-subscribe-other', callback_b) + + event_hub.publish(Event(topic='test-subscribe')) + event_hub.wait(2) + + assert called == {'a': True, 'b': False} + + +def test_subscribe_before_connected(session): + '''Subscribe to topic before connected.''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + + called = {'callback': False} + + def callback(event): + called['callback'] = True + + identifier = 'test-subscriber' + event_hub.subscribe( + 'topic=test-subscribe', callback, subscriber={'id': identifier} + ) + assert event_hub.get_subscriber_by_identifier(identifier) is not None + + event_hub.connect() + + try: + event_hub.publish(Event(topic='test-subscribe')) + event_hub.wait(2) + finally: + event_hub.disconnect() + + assert called == {'callback': True} + + +def test_duplicate_subscriber(event_hub): + '''Fail to subscribe same subscriber more than once.''' + subscriber = {'id': 'test-subscriber'} + event_hub.subscribe('topic=test', None, subscriber=subscriber) + + with pytest.raises(ftrack_api.exception.NotUniqueError) as error: + event_hub.subscribe('topic=test', None, subscriber=subscriber) + + assert '{0} already exists'.format(subscriber['id']) in str(error) + + +def test_unsubscribe(event_hub): + '''Unsubscribe a specific callback.''' + def callback_a(event): + pass + + def callback_b(event): + pass + + identifier_a = event_hub.subscribe('topic=test', callback_a) + identifier_b = event_hub.subscribe('topic=test', callback_b) + + assert_callbacks(event_hub, [callback_a, callback_b]) + + event_hub.unsubscribe(identifier_a) + + # Unsubscribe requires confirmation event so wait here to give event a + # chance to process. + time.sleep(5) + + assert_callbacks(event_hub, [callback_b]) + + +def test_unsubscribe_whilst_disconnected(event_hub): + '''Unsubscribe whilst disconnected.''' + identifier = event_hub.subscribe('topic=test', None) + event_hub.disconnect(unsubscribe=False) + + event_hub.unsubscribe(identifier) + assert_callbacks(event_hub, []) + + +def test_unsubscribe_missing_subscriber(event_hub): + '''Fail to unsubscribe a non-subscribed subscriber.''' + identifier = 'non-subscribed-subscriber' + with pytest.raises(ftrack_api.exception.NotFoundError) as error: + event_hub.unsubscribe(identifier) + + assert ( + 'missing subscriber with identifier {}'.format(identifier) + in str(error) + ) + + +@pytest.mark.parametrize('event_data', [ + dict(source=dict(id='1', user=dict(username='auto'))), + dict(source=dict(user=dict(username='auto'))), + dict(source=dict(id='1')), + dict() +], ids=[ + 'pre-prepared', + 'missing id', + 'missing user', + 'no source' +]) +def test_prepare_event(session, event_data): + '''Prepare event.''' + # Replace username `auto` in event data with API user. + try: + if event_data['source']['user']['username'] == 'auto': + event_data['source']['user']['username'] = session.api_user + except KeyError: + pass + + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + event_hub.id = '1' + + event = Event('test', id='event-id', **event_data) + expected = Event( + 'test', id='event-id', source=dict(id='1', user=dict(username=session.api_user)) + ) + event_hub._prepare_event(event) + assert event == expected + + +def test_prepare_reply_event(session): + '''Prepare reply event.''' + event_hub = ftrack_api.event.hub.EventHub( + session.server_url, session.api_user, session.api_key + ) + + source_event = Event('source', source=dict(id='source-id')) + reply_event = Event('reply') + + event_hub._prepare_reply_event(reply_event, source_event) + assert source_event['source']['id'] in reply_event['target'] + assert reply_event['in_reply_to_event'] == source_event['id'] + + event_hub._prepare_reply_event(reply_event, source_event, {'id': 'source'}) + assert reply_event['source'] == {'id': 'source'} + + +def test_publish(event_hub): + '''Publish asynchronous event.''' + called = {'callback': False} + + def callback(event): + called['callback'] = True + + event_hub.subscribe('topic=test-subscribe', callback) + + event_hub.publish(Event(topic='test-subscribe')) + event_hub.wait(2) + + assert called == {'callback': True} + + +def test_publish_raising_error(event_hub): + '''Raise error, when configured, on failed publish.''' + # Note that the event hub currently only fails publish when not connected. + # All other errors are inconsistently swallowed. + event_hub.disconnect() + event = Event(topic='a-topic', data=dict(status='fail')) + + with pytest.raises(Exception): + event_hub.publish(event, on_error='raise') + + +def test_publish_ignoring_error(event_hub): + '''Ignore error, when configured, on failed publish.''' + # Note that the event hub currently only fails publish when not connected. + # All other errors are inconsistently swallowed. + event_hub.disconnect() + event = Event(topic='a-topic', data=dict(status='fail')) + event_hub.publish(event, on_error='ignore') + + +def test_publish_logs_other_errors(event_hub, caplog, mocker): + '''Log publish errors other than connection error.''' + # Mock connection to force error. + mocker.patch.object(event_hub, '_connection', MockConnection()) + + event = Event(topic='a-topic', data=dict(status='fail')) + event_hub.publish(event) + + expected = 'Error sending event {0}.'.format(event) + messages = [record.getMessage().strip() for record in caplog.records()] + assert expected in messages, 'Expected log message missing in output.' + + +def test_synchronous_publish(event_hub): + '''Publish event synchronously and collect results.''' + def callback_a(event): + return 'A' + + def callback_b(event): + return 'B' + + def callback_c(event): + return 'C' + + event_hub.subscribe('topic=test', callback_a, priority=50) + event_hub.subscribe('topic=test', callback_b, priority=60) + event_hub.subscribe('topic=test', callback_c, priority=70) + + results = event_hub.publish(Event(topic='test'), synchronous=True) + assert results == ['A', 'B', 'C'] + + +def test_publish_with_reply(event_hub): + '''Publish asynchronous event with on reply handler.''' + + def replier(event): + '''Replier.''' + return 'Replied' + + event_hub.subscribe('topic=test', replier) + + called = {'callback': None} + + def on_reply(event): + called['callback'] = event['data'] + + event_hub.publish(Event(topic='test'), on_reply=on_reply) + event_hub.wait(2) + + assert called['callback'] == 'Replied' + + +def test_publish_with_multiple_replies(event_hub): + '''Publish asynchronous event and retrieve multiple replies.''' + + def replier_one(event): + '''Replier.''' + return 'One' + + def replier_two(event): + '''Replier.''' + return 'Two' + + event_hub.subscribe('topic=test', replier_one) + event_hub.subscribe('topic=test', replier_two) + + called = {'callback': []} + + def on_reply(event): + called['callback'].append(event['data']) + + event_hub.publish(Event(topic='test'), on_reply=on_reply) + event_hub.wait(2) + + assert sorted(called['callback']) == ['One', 'Two'] + + +@pytest.mark.slow +def test_server_heartbeat_response(): + '''Maintain connection by responding to server heartbeat request.''' + test_script = os.path.join( + os.path.dirname(__file__), 'event_hub_server_heartbeat.py' + ) + + # Start subscriber that will listen for all three messages. + subscriber = subprocess.Popen([sys.executable, test_script, 'subscribe']) + + # Give subscriber time to connect to server. + time.sleep(10) + + # Start publisher to publish three messages. + publisher = subprocess.Popen([sys.executable, test_script, 'publish']) + + publisher.wait() + subscriber.wait() + + assert subscriber.returncode == 0 + + +def test_stop_event(event_hub): + '''Stop processing of subsequent local handlers when stop flag set.''' + called = { + 'a': False, + 'b': False, + 'c': False + } + + def callback_a(event): + called['a'] = True + + def callback_b(event): + called['b'] = True + event.stop() + + def callback_c(event): + called['c'] = True + + event_hub.subscribe('topic=test', callback_a, priority=50) + event_hub.subscribe('topic=test', callback_b, priority=60) + event_hub.subscribe('topic=test', callback_c, priority=70) + + event_hub.publish(Event(topic='test')) + event_hub.wait(2) + + assert called == { + 'a': True, + 'b': True, + 'c': False + } + + +def test_encode(session): + '''Encode event data.''' + encoded = session.event_hub._encode( + dict(name='ftrack.event', args=[Event('test')]) + ) + assert 'inReplyToEvent' in encoded + assert 'in_reply_to_event' not in encoded + + +def test_decode(session): + '''Decode event data.''' + decoded = session.event_hub._decode( + json.dumps({ + 'inReplyToEvent': 'id' + }) + ) + + assert 'in_reply_to_event' in decoded + assert 'inReplyToEvent' not in decoded diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py new file mode 100644 index 0000000000..dc8ac69fd9 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py @@ -0,0 +1,33 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import pytest + +import ftrack_api.event.subscriber +from ftrack_api.event.base import Event + + +def test_string_representation(): + '''String representation.''' + subscriber = ftrack_api.event.subscriber.Subscriber( + 'topic=test', lambda x: None, {'meta': 'info'}, 100 + ) + + assert str(subscriber) == ( + '' + ) + + +@pytest.mark.parametrize('expression, event, expected', [ + ('topic=test', Event(topic='test'), True), + ('topic=test', Event(topic='other-test'), False) +], ids=[ + 'interested', + 'not interested' +]) +def test_interested_in(expression, event, expected): + '''Determine if subscriber interested in event.''' + subscriber = ftrack_api.event.subscriber.Subscriber( + expression, lambda x: None, {'meta': 'info'}, 100 + ) + assert subscriber.interested_in(event) is expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py new file mode 100644 index 0000000000..1535309f25 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py @@ -0,0 +1,28 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import pytest + +import ftrack_api.event.subscription +from ftrack_api.event.base import Event + + +def test_string_representation(): + '''String representation is subscription expression.''' + expression = 'topic=some-topic' + subscription = ftrack_api.event.subscription.Subscription(expression) + + assert str(subscription) == expression + + +@pytest.mark.parametrize('expression, event, expected', [ + ('topic=test', Event(topic='test'), True), + ('topic=test', Event(topic='other-test'), False) +], ids=[ + 'match', + 'no match' +]) +def test_includes(expression, event, expected): + '''Subscription includes event.''' + subscription = ftrack_api.event.subscription.Subscription(expression) + assert subscription.includes(event) is expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py new file mode 100644 index 0000000000..bc98f15de2 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py new file mode 100644 index 0000000000..51c896f96b --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py @@ -0,0 +1,36 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import pytest + +import ftrack_api.resource_identifier_transformer.base as _transformer + + +@pytest.fixture() +def transformer(session): + '''Return instance of ResourceIdentifierTransformer.''' + return _transformer.ResourceIdentifierTransformer(session) + + +@pytest.mark.parametrize('resource_identifier, context, expected', [ + ('identifier', None, 'identifier'), + ('identifier', {'user': {'username': 'user'}}, 'identifier') +], ids=[ + 'no context', + 'basic context' +]) +def test_encode(transformer, resource_identifier, context, expected): + '''Encode resource identifier.''' + assert transformer.encode(resource_identifier, context) == expected + + +@pytest.mark.parametrize('resource_identifier, context, expected', [ + ('identifier', None, 'identifier'), + ('identifier', {'user': {'username': 'user'}}, 'identifier') +], ids=[ + 'no context', + 'basic context' +]) +def test_decode(transformer, resource_identifier, context, expected): + '''Encode resource identifier.''' + assert transformer.decode(resource_identifier, context) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py new file mode 100644 index 0000000000..bc98f15de2 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py @@ -0,0 +1,2 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py new file mode 100644 index 0000000000..dbf91ead20 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py @@ -0,0 +1,31 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import pytest + +import ftrack_api.structure.base + + +class Concrete(ftrack_api.structure.base.Structure): + '''Concrete implementation to allow testing non-abstract methods.''' + + def get_resource_identifier(self, entity, context=None): + '''Return a resource identifier for supplied *entity*. + + *context* can be a mapping that supplies additional information. + + ''' + return 'resource_identifier' + + +@pytest.mark.parametrize('sequence, expected', [ + ({'padding': None}, '%d'), + ({'padding': 4}, '%04d') +], ids=[ + 'no padding', + 'padded' +]) +def test_get_sequence_expression(sequence, expected): + '''Get sequence expression from sequence.''' + structure = Concrete() + assert structure._get_sequence_expression(sequence) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py new file mode 100644 index 0000000000..01ccb35ac8 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py @@ -0,0 +1,49 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import inspect + +import pytest +import mock + +import ftrack_api +import ftrack_api.structure.entity_id + + +@pytest.fixture(scope='session') +def structure(): + '''Return structure.''' + return ftrack_api.structure.entity_id.EntityIdStructure() + + +# Note: When it is possible to use indirect=True on just a few arguments, the +# called functions here can change to standard fixtures. +# https://github.com/pytest-dev/pytest/issues/579 + +def valid_entity(): + '''Return valid entity.''' + session = ftrack_api.Session() + + entity = session.create('FileComponent', { + 'id': 'f6cd40cb-d1c0-469f-a2d5-10369be8a724', + 'name': 'file_component', + 'file_type': '.png' + }) + + return entity + + +@pytest.mark.parametrize('entity, context, expected', [ + (valid_entity(), {}, 'f6cd40cb-d1c0-469f-a2d5-10369be8a724'), + (mock.Mock(), {}, Exception) +], ids=[ + 'valid-entity', + 'non-entity' +]) +def test_get_resource_identifier(structure, entity, context, expected): + '''Get resource identifier.''' + if inspect.isclass(expected) and issubclass(expected, Exception): + with pytest.raises(expected): + structure.get_resource_identifier(entity, context) + else: + assert structure.get_resource_identifier(entity, context) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py new file mode 100644 index 0000000000..ef81da2d65 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py @@ -0,0 +1,115 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import inspect + +import pytest + +import ftrack_api +import ftrack_api.structure.id + + +@pytest.fixture(scope='session') +def structure(): + '''Return structure.''' + return ftrack_api.structure.id.IdStructure(prefix='path') + + +# Note: When it is possible to use indirect=True on just a few arguments, the +# called functions here can change to standard fixtures. +# https://github.com/pytest-dev/pytest/issues/579 + +def file_component(container=None): + '''Return file component.''' + session = ftrack_api.Session() + + entity = session.create('FileComponent', { + 'id': 'f6cd40cb-d1c0-469f-a2d5-10369be8a724', + 'name': '0001', + 'file_type': '.png', + 'container': container + }) + + return entity + + +def sequence_component(padding=0): + '''Return sequence component with *padding*.''' + session = ftrack_api.Session() + + entity = session.create('SequenceComponent', { + 'id': 'ff17edad-2129-483b-8b59-d1a654c8497b', + 'name': 'sequence_component', + 'file_type': '.png', + 'padding': padding + }) + + return entity + + +def container_component(): + '''Return container component.''' + session = ftrack_api.Session() + + entity = session.create('ContainerComponent', { + 'id': '03ab9967-f86c-4b55-8252-cd187d0c244a', + 'name': 'container_component' + }) + + return entity + + +def unsupported_entity(): + '''Return an unsupported entity.''' + session = ftrack_api.Session() + + entity = session.create('User', { + 'username': 'martin' + }) + + return entity + + +@pytest.mark.parametrize('entity, context, expected', [ + ( + file_component(), {}, + 'path/f/6/c/d/40cb-d1c0-469f-a2d5-10369be8a724.png' + ), + ( + file_component(container_component()), {}, + 'path/0/3/a/b/9967-f86c-4b55-8252-cd187d0c244a/' + 'f6cd40cb-d1c0-469f-a2d5-10369be8a724.png' + ), + ( + file_component(sequence_component()), {}, + 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.0001.png' + ), + ( + sequence_component(padding=0), {}, + 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.%d.png' + ), + ( + sequence_component(padding=4), {}, + 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.%04d.png' + ), + ( + container_component(), {}, + 'path/0/3/a/b/9967-f86c-4b55-8252-cd187d0c244a' + ), + (unsupported_entity(), {}, NotImplementedError) +], ids=[ + 'file-component', + 'file-component-in-container', + 'file-component-in-sequence', + 'unpadded-sequence-component', + 'padded-sequence-component', + 'container-component', + 'unsupported-entity' +]) +def test_get_resource_identifier(structure, entity, context, expected): + '''Get resource identifier.''' + if inspect.isclass(expected) and issubclass(expected, Exception): + with pytest.raises(expected): + structure.get_resource_identifier(entity, context) + else: + assert structure.get_resource_identifier(entity, context) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py new file mode 100644 index 0000000000..e294e04a70 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py @@ -0,0 +1,33 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import inspect + +import pytest +import mock + +import ftrack_api.structure.origin + + +@pytest.fixture(scope='session') +def structure(): + '''Return structure.''' + return ftrack_api.structure.origin.OriginStructure() + + +@pytest.mark.parametrize('entity, context, expected', [ + (mock.Mock(), {'source_resource_identifier': 'identifier'}, 'identifier'), + (mock.Mock(), {}, ValueError), + (mock.Mock(), None, ValueError) +], ids=[ + 'valid-context', + 'invalid-context', + 'unspecified-context' +]) +def test_get_resource_identifier(structure, entity, context, expected): + '''Get resource identifier.''' + if inspect.isclass(expected) and issubclass(expected, Exception): + with pytest.raises(expected): + structure.get_resource_identifier(entity, context) + else: + assert structure.get_resource_identifier(entity, context) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py new file mode 100644 index 0000000000..dd72f8ec3f --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py @@ -0,0 +1,309 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import uuid + +import pytest + +import ftrack_api +import ftrack_api.structure.standard + + +@pytest.fixture(scope='session') +def new_project(request): + '''Return new empty project.''' + session = ftrack_api.Session() + + project_schema = session.query('ProjectSchema').first() + project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) + project = session.create('Project', { + 'name': project_name, + 'full_name': project_name + '_full', + 'project_schema': project_schema + }) + + session.commit() + + def cleanup(): + '''Remove created entity.''' + session.delete(project) + session.commit() + + request.addfinalizer(cleanup) + + return project + + +def new_container_component(): + '''Return container component.''' + session = ftrack_api.Session() + + entity = session.create('ContainerComponent', { + 'name': 'container_component' + }) + + return entity + + +def new_sequence_component(): + '''Return sequence component.''' + session = ftrack_api.Session() + + entity = session.create_component( + '/tmp/foo/%04d.jpg [1-10]', location=None, data={'name': 'baz'} + ) + + return entity + + +def new_file_component(name='foo', container=None): + '''Return file component with *name* and *container*.''' + if container: + session = container.session + else: + session = ftrack_api.Session() + + entity = session.create('FileComponent', { + 'name': name, + 'file_type': '.png', + 'container': container + }) + + return entity + + +# Reusable fixtures. +file_component = new_file_component() +container_component = new_container_component() +sequence_component = new_sequence_component() + + +# Note: to improve test performance the same project is reused throughout the +# tests. This means that all hierarchical names must be unique, otherwise an +# IntegrityError will be raised on the server. + +@pytest.mark.parametrize( + 'component, hierarchy, expected, structure, asset_name', + [ + ( + file_component, + [], + '{project_name}/my_new_asset/v001/foo.png', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + file_component, + [], + '{project_name}/foobar/my_new_asset/v001/foo.png', + ftrack_api.structure.standard.StandardStructure( + project_versions_prefix='foobar' + ), + 'my_new_asset' + ), + ( + file_component, + ['baz1', 'bar'], + '{project_name}/baz1/bar/my_new_asset/v001/foo.png', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + sequence_component, + ['baz2', 'bar'], + '{project_name}/baz2/bar/my_new_asset/v001/baz.%04d.jpg', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + sequence_component['members'][3], + ['baz3', 'bar'], + '{project_name}/baz3/bar/my_new_asset/v001/baz.0004.jpg', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + container_component, + ['baz4', 'bar'], + '{project_name}/baz4/bar/my_new_asset/v001/container_component', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + new_file_component(container=container_component), + ['baz5', 'bar'], + ( + '{project_name}/baz5/bar/my_new_asset/v001/container_component/' + 'foo.png' + ), + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + file_component, + [u'björn'], + '{project_name}/bjorn/my_new_asset/v001/foo.png', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + file_component, + [u'björn!'], + '{project_name}/bjorn_/my_new_asset/v001/foo.png', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + new_file_component(name=u'fää'), + [], + '{project_name}/my_new_asset/v001/faa.png', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + new_file_component(name=u'fo/o'), + [], + '{project_name}/my_new_asset/v001/fo_o.png', + ftrack_api.structure.standard.StandardStructure(), + 'my_new_asset' + ), + ( + file_component, + [], + '{project_name}/aao/v001/foo.png', + ftrack_api.structure.standard.StandardStructure(), + u'åäö' + ), + ( + file_component, + [], + '{project_name}/my_ne____w_asset/v001/foo.png', + ftrack_api.structure.standard.StandardStructure(), + u'my_ne!!!!w_asset' + ), + ( + file_component, + [u'björn2'], + u'{project_name}/björn2/my_new_asset/v001/foo.png', + ftrack_api.structure.standard.StandardStructure( + illegal_character_substitute=None + ), + 'my_new_asset' + ), + ( + file_component, + [u'bj!rn'], + '{project_name}/bj^rn/my_new_asset/v001/foo.png', + ftrack_api.structure.standard.StandardStructure( + illegal_character_substitute='^' + ), + 'my_new_asset' + ) + ], ids=[ + 'file_component_on_project', + 'file_component_on_project_with_prefix', + 'file_component_with_hierarchy', + 'sequence_component', + 'sequence_component_member', + 'container_component', + 'container_component_member', + 'slugify_non_ascii_hierarchy', + 'slugify_illegal_hierarchy', + 'slugify_non_ascii_component_name', + 'slugify_illegal_component_name', + 'slugify_non_ascii_asset_name', + 'slugify_illegal_asset_name', + 'slugify_none', + 'slugify_other_character' + ] +) +def test_get_resource_identifier( + component, hierarchy, expected, structure, asset_name, new_project +): + '''Get resource identifier.''' + session = component.session + + # Create structure, asset and version. + context_id = new_project['id'] + for name in hierarchy: + context_id = session.create('Folder', { + 'name': name, + 'project_id': new_project['id'], + 'parent_id': context_id + })['id'] + + asset = session.create( + 'Asset', {'name': asset_name, 'context_id': context_id} + ) + version = session.create('AssetVersion', {'asset': asset}) + + # Update component with version. + if component['container']: + component['container']['version'] = version + else: + component['version'] = version + + session.commit() + + assert structure.get_resource_identifier(component) == expected.format( + project_name=new_project['name'] + ) + + +def test_unsupported_entity(user): + '''Fail to get resource identifier for unsupported entity.''' + structure = ftrack_api.structure.standard.StandardStructure() + with pytest.raises(NotImplementedError): + structure.get_resource_identifier(user) + + +def test_component_without_version_relation(new_project): + '''Get an identifer for component without a version relation.''' + session = new_project.session + + asset = session.create( + 'Asset', {'name': 'foo', 'context_id': new_project['id']} + ) + version = session.create('AssetVersion', {'asset': asset}) + + session.commit() + + file_component = new_file_component() + file_component['version_id'] = version['id'] + + structure = ftrack_api.structure.standard.StandardStructure() + structure.get_resource_identifier(file_component) + + +def test_component_without_committed_version_relation(): + '''Fail to get an identifer for component without a committed version.''' + file_component = new_file_component() + session = file_component.session + version = session.create('AssetVersion', {}) + + file_component['version'] = version + + structure = ftrack_api.structure.standard.StandardStructure() + + with pytest.raises(ftrack_api.exception.StructureError): + structure.get_resource_identifier(file_component) + + +@pytest.mark.xfail( + raises=ftrack_api.exception.ServerError, + reason='Due to user permission errors.' +) +def test_component_without_committed_asset_relation(): + '''Fail to get an identifer for component without a committed asset.''' + file_component = new_file_component() + session = file_component.session + version = session.create('AssetVersion', {}) + + file_component['version'] = version + + session.commit() + + structure = ftrack_api.structure.standard.StandardStructure() + + with pytest.raises(ftrack_api.exception.StructureError): + structure.get_resource_identifier(file_component) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py new file mode 100644 index 0000000000..555adb2d89 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py @@ -0,0 +1,146 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import pytest + +import ftrack_api.attribute +import ftrack_api.exception + + +@pytest.mark.parametrize('attributes', [ + [], + [ftrack_api.attribute.Attribute('test')] +], ids=[ + 'no initial attributes', + 'with initial attributes' +]) +def test_initialise_attributes_collection(attributes): + '''Initialise attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes(attributes) + assert sorted(list(attribute_collection)) == sorted(attributes) + + +def test_add_attribute_to_attributes_collection(): + '''Add valid attribute to attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes() + attribute = ftrack_api.attribute.Attribute('test') + + assert attribute_collection.keys() == [] + attribute_collection.add(attribute) + assert attribute_collection.keys() == ['test'] + + +def test_add_duplicate_attribute_to_attributes_collection(): + '''Fail to add attribute with duplicate name to attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes() + attribute = ftrack_api.attribute.Attribute('test') + + attribute_collection.add(attribute) + with pytest.raises(ftrack_api.exception.NotUniqueError): + attribute_collection.add(attribute) + + +def test_remove_attribute_from_attributes_collection(): + '''Remove attribute from attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes() + attribute = ftrack_api.attribute.Attribute('test') + + attribute_collection.add(attribute) + assert len(attribute_collection) == 1 + + attribute_collection.remove(attribute) + assert len(attribute_collection) == 0 + + +def test_remove_missing_attribute_from_attributes_collection(): + '''Fail to remove attribute not present in attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes() + attribute = ftrack_api.attribute.Attribute('test') + + with pytest.raises(KeyError): + attribute_collection.remove(attribute) + + +def test_get_attribute_from_attributes_collection(): + '''Get attribute from attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes() + attribute = ftrack_api.attribute.Attribute('test') + attribute_collection.add(attribute) + + retrieved_attribute = attribute_collection.get('test') + + assert retrieved_attribute is attribute + + +def test_get_missing_attribute_from_attributes_collection(): + '''Get attribute not present in attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes() + assert attribute_collection.get('test') is None + + +@pytest.mark.parametrize('attributes, expected', [ + ([], []), + ([ftrack_api.attribute.Attribute('test')], ['test']) +], ids=[ + 'no initial attributes', + 'with initial attributes' +]) +def test_attribute_collection_keys(attributes, expected): + '''Retrieve keys for attribute collection.''' + attribute_collection = ftrack_api.attribute.Attributes(attributes) + assert sorted(attribute_collection.keys()) == sorted(expected) + + +@pytest.mark.parametrize('attribute, expected', [ + (None, False), + (ftrack_api.attribute.Attribute('b'), True), + (ftrack_api.attribute.Attribute('c'), False) +], ids=[ + 'none attribute', + 'present attribute', + 'missing attribute' +]) +def test_attributes_collection_contains(attribute, expected): + '''Check presence in attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes([ + ftrack_api.attribute.Attribute('a'), + ftrack_api.attribute.Attribute('b') + ]) + + assert (attribute in attribute_collection) is expected + + +@pytest.mark.parametrize('attributes, expected', [ + ([], 0), + ([ftrack_api.attribute.Attribute('test')], 1), + ( + [ + ftrack_api.attribute.Attribute('a'), + ftrack_api.attribute.Attribute('b') + ], + 2 + ) +], ids=[ + 'no attributes', + 'single attribute', + 'multiple attributes' +]) +def test_attributes_collection_count(attributes, expected): + '''Count attributes in attributes collection.''' + attribute_collection = ftrack_api.attribute.Attributes(attributes) + assert len(attribute_collection) == expected + + +def test_iterate_over_attributes_collection(): + '''Iterate over attributes collection.''' + attributes = [ + ftrack_api.attribute.Attribute('a'), + ftrack_api.attribute.Attribute('b') + ] + + attribute_collection = ftrack_api.attribute.Attributes(attributes) + for attribute in attribute_collection: + attributes.remove(attribute) + + assert len(attributes) == 0 + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py new file mode 100644 index 0000000000..7915737253 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py @@ -0,0 +1,416 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import os +import uuid +import tempfile + +import pytest + +import ftrack_api.cache + + +@pytest.fixture(params=['proxy', 'layered', 'memory', 'file', 'serialised']) +def cache(request): + '''Return cache.''' + if request.param == 'proxy': + cache = ftrack_api.cache.ProxyCache( + ftrack_api.cache.MemoryCache() + ) + + elif request.param == 'layered': + cache = ftrack_api.cache.LayeredCache( + [ftrack_api.cache.MemoryCache()] + ) + + elif request.param == 'memory': + cache = ftrack_api.cache.MemoryCache() + + elif request.param == 'file': + cache_path = os.path.join( + tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) + ) + + cache = ftrack_api.cache.FileCache(cache_path) + + def cleanup(): + '''Cleanup.''' + try: + os.remove(cache_path) + except OSError: + # BSD DB (Mac OSX) implementation of the interface will append + # a .db extension. + os.remove(cache_path + '.db') + + request.addfinalizer(cleanup) + + elif request.param == 'serialised': + cache = ftrack_api.cache.SerialisedCache( + ftrack_api.cache.MemoryCache(), + encode=lambda value: value, + decode=lambda value: value + ) + + else: + raise ValueError( + 'Unrecognised cache fixture type {0!r}'.format(request.param) + ) + + return cache + + + +class Class(object): + '''Class for testing.''' + + def method(self, key): + '''Method for testing.''' + + +def function(mutable, x, y=2): + '''Function for testing.''' + mutable['called'] = True + return {'result': x + y} + + +def assert_memoised_call( + memoiser, function, expected, args=None, kw=None, memoised=True +): + '''Assert *function* call via *memoiser* was *memoised*.''' + mapping = {'called': False} + if args is not None: + args = (mapping,) + args + else: + args = (mapping,) + + result = memoiser.call(function, args, kw) + + assert result == expected + assert mapping['called'] is not memoised + + +def test_get(cache): + '''Retrieve item from cache.''' + cache.set('key', 'value') + assert cache.get('key') == 'value' + + +def test_get_missing_key(cache): + '''Fail to retrieve missing item from cache.''' + with pytest.raises(KeyError): + cache.get('key') + + +def test_set(cache): + '''Set item in cache.''' + with pytest.raises(KeyError): + cache.get('key') + + cache.set('key', 'value') + assert cache.get('key') == 'value' + + +def test_remove(cache): + '''Remove item from cache.''' + cache.set('key', 'value') + cache.remove('key') + + with pytest.raises(KeyError): + cache.get('key') + + +def test_remove_missing_key(cache): + '''Fail to remove missing key.''' + with pytest.raises(KeyError): + cache.remove('key') + + +def test_keys(cache): + '''Retrieve keys of items in cache.''' + assert cache.keys() == [] + cache.set('a', 'a_value') + cache.set('b', 'b_value') + cache.set('c', 'c_value') + assert sorted(cache.keys()) == sorted(['a', 'b', 'c']) + + +def test_clear(cache): + '''Remove items from cache.''' + cache.set('a', 'a_value') + cache.set('b', 'b_value') + cache.set('c', 'c_value') + + assert cache.keys() + cache.clear() + + assert not cache.keys() + + +def test_clear_using_pattern(cache): + '''Remove items that match pattern from cache.''' + cache.set('matching_key', 'value') + cache.set('another_matching_key', 'value') + cache.set('key_not_matching', 'value') + + assert cache.keys() + cache.clear(pattern='.*matching_key$') + + assert cache.keys() == ['key_not_matching'] + + +def test_clear_encountering_missing_key(cache, mocker): + '''Clear missing key.''' + # Force reporting keys that are not actually valid for test purposes. + mocker.patch.object(cache, 'keys', lambda: ['missing']) + assert cache.keys() == ['missing'] + + # Should not error even though key not valid. + cache.clear() + + # The key was not successfully removed so should still be present. + assert cache.keys() == ['missing'] + + +def test_layered_cache_propagates_value_on_get(): + '''Layered cache propagates value on get.''' + caches = [ + ftrack_api.cache.MemoryCache(), + ftrack_api.cache.MemoryCache(), + ftrack_api.cache.MemoryCache() + ] + + cache = ftrack_api.cache.LayeredCache(caches) + + # Set item on second level cache only. + caches[1].set('key', 'value') + + # Retrieving key via layered cache should propagate it automatically to + # higher level caches only. + assert cache.get('key') == 'value' + assert caches[0].get('key') == 'value' + + with pytest.raises(KeyError): + caches[2].get('key') + + +def test_layered_cache_remove_at_depth(): + '''Remove key that only exists at depth in LayeredCache.''' + caches = [ + ftrack_api.cache.MemoryCache(), + ftrack_api.cache.MemoryCache() + ] + + cache = ftrack_api.cache.LayeredCache(caches) + + # Set item on second level cache only. + caches[1].set('key', 'value') + + # Removing key that only exists at depth should not raise key error. + cache.remove('key') + + # Ensure key was removed. + assert not cache.keys() + + +def test_expand_references(): + '''Test that references are expanded from serialized cache.''' + + cache_path = os.path.join( + tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) + ) + + def make_cache(session, cache_path): + '''Create a serialised file cache.''' + serialized_file_cache = ftrack_api.cache.SerialisedCache( + ftrack_api.cache.FileCache(cache_path), + encode=session.encode, + decode=session.decode + ) + + return serialized_file_cache + + # Populate the serialized file cache. + session = ftrack_api.Session( + cache=lambda session, cache_path=cache_path:make_cache( + session, cache_path + ) + ) + + expanded_results = dict() + + query_string = 'select asset.parent from AssetVersion where asset is_not None limit 10' + + for sequence in session.query(query_string): + asset = sequence.get('asset') + + expanded_results.setdefault( + asset.get('id'), asset.get('parent') + ) + + # Fetch the data from cache. + new_session = ftrack_api.Session( + cache=lambda session, cache_path=cache_path:make_cache( + session, cache_path + ) + ) + + + new_session_two = ftrack_api.Session( + cache=lambda session, cache_path=cache_path:make_cache( + session, cache_path + ) + ) + + + # Make sure references are merged. + for sequence in new_session.query(query_string): + asset = sequence.get('asset') + + assert ( + asset.get('parent') == expanded_results[asset.get('id')] + ) + + # Use for fetching directly using get. + assert ( + new_session_two.get(asset.entity_type, asset.get('id')).get('parent') == + expanded_results[asset.get('id')] + ) + + + +@pytest.mark.parametrize('items, key', [ + (({},), '{}'), + (({}, {}), '{}{}') +], ids=[ + 'single object', + 'multiple objects' +]) +def test_string_key_maker_key(items, key): + '''Generate key using string key maker.''' + key_maker = ftrack_api.cache.StringKeyMaker() + assert key_maker.key(*items) == key + + +@pytest.mark.parametrize('items, key', [ + ( + ({},), + '\x01\x01' + ), + ( + ({'a': 'b'}, [1, 2]), + '\x01' + '\x80\x02U\x01a.' '\x02' '\x80\x02U\x01b.' + '\x01' + '\x00' + '\x03' + '\x80\x02K\x01.' '\x00' '\x80\x02K\x02.' + '\x03' + ), + ( + (function,), + '\x04function\x00unit.test_cache' + ), + ( + (Class,), + '\x04Class\x00unit.test_cache' + ), + ( + (Class.method,), + '\x04method\x00Class\x00unit.test_cache' + ), + ( + (callable,), + '\x04callable' + ) +], ids=[ + 'single mapping', + 'multiple objects', + 'function', + 'class', + 'method', + 'builtin' +]) +def test_object_key_maker_key(items, key): + '''Generate key using string key maker.''' + key_maker = ftrack_api.cache.ObjectKeyMaker() + assert key_maker.key(*items) == key + + +def test_memoised_call(): + '''Call memoised function.''' + memoiser = ftrack_api.cache.Memoiser() + + # Initial call should not be memoised so function is executed. + assert_memoised_call( + memoiser, function, args=(1,), expected={'result': 3}, memoised=False + ) + + # Identical call should be memoised so function is not executed again. + assert_memoised_call( + memoiser, function, args=(1,), expected={'result': 3}, memoised=True + ) + + # Differing call is not memoised so function is executed. + assert_memoised_call( + memoiser, function, args=(3,), expected={'result': 5}, memoised=False + ) + + +def test_memoised_call_variations(): + '''Call memoised function with identical arguments using variable format.''' + memoiser = ftrack_api.cache.Memoiser() + expected = {'result': 3} + + # Call function once to ensure is memoised. + assert_memoised_call( + memoiser, function, args=(1,), expected=expected, memoised=False + ) + + # Each of the following calls should equate to the same key and make + # use of the memoised value. + for args, kw in [ + ((), {'x': 1}), + ((), {'x': 1, 'y': 2}), + ((1,), {'y': 2}), + ((1,), {}) + ]: + assert_memoised_call( + memoiser, function, args=args, kw=kw, expected=expected + ) + + # The following calls should all be treated as new variations and so + # not use any memoised value. + assert_memoised_call( + memoiser, function, kw={'x': 2}, expected={'result': 4}, memoised=False + ) + assert_memoised_call( + memoiser, function, kw={'x': 3, 'y': 2}, expected={'result': 5}, + memoised=False + ) + assert_memoised_call( + memoiser, function, args=(4, ), kw={'y': 2}, expected={'result': 6}, + memoised=False + ) + assert_memoised_call( + memoiser, function, args=(5, ), expected={'result': 7}, memoised=False + ) + + +def test_memoised_mutable_return_value(): + '''Avoid side effects for returned mutable arguments when memoising.''' + memoiser = ftrack_api.cache.Memoiser() + arguments = ({'called': False}, 1) + + result_a = memoiser.call(function, arguments) + assert result_a == {'result': 3} + assert arguments[0]['called'] + + # Modify mutable externally and check that stored memoised value is + # unchanged. + del result_a['result'] + + arguments[0]['called'] = False + result_b = memoiser.call(function, arguments) + + assert result_b == {'result': 3} + assert not arguments[0]['called'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py new file mode 100644 index 0000000000..15c3e5cf39 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py @@ -0,0 +1,574 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import copy +import uuid + +import mock +import pytest + +import ftrack_api.collection +import ftrack_api.symbol +import ftrack_api.inspection +import ftrack_api.exception +import ftrack_api.operation + + +def create_mock_entity(session): + '''Return new mock entity for *session*.''' + entity = mock.MagicMock() + entity.session = session + entity.primary_key_attributes = ['id'] + entity['id'] = str(uuid.uuid4()) + return entity + + +@pytest.fixture +def mock_entity(session): + '''Return mock entity.''' + return create_mock_entity(session) + + +@pytest.fixture +def mock_entities(session): + '''Return list of two mock entities.''' + return [ + create_mock_entity(session), + create_mock_entity(session) + ] + + +@pytest.fixture +def mock_attribute(): + '''Return mock attribute.''' + attribute = mock.MagicMock() + attribute.name = 'test' + return attribute + + +def test_collection_initialisation_does_not_modify_entity_state( + mock_entity, mock_attribute, mock_entities +): + '''Initialising collection does not modify entity state.''' + ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + + assert ftrack_api.inspection.state(mock_entity) is ftrack_api.symbol.NOT_SET + + +def test_immutable_collection_initialisation( + mock_entity, mock_attribute, mock_entities +): + '''Initialise immutable collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities, mutable=False + ) + + assert list(collection) == mock_entities + assert collection.mutable is False + + +def test_collection_shallow_copy( + mock_entity, mock_attribute, mock_entities, session +): + '''Shallow copying collection should avoid indirect mutation.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + + with mock_entity.session.operation_recording(False): + collection_copy = copy.copy(collection) + new_entity = create_mock_entity(session) + collection_copy.append(new_entity) + + assert list(collection) == mock_entities + assert list(collection_copy) == mock_entities + [new_entity] + + +def test_collection_insert( + mock_entity, mock_attribute, mock_entities, session +): + '''Insert a value into collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + + new_entity = create_mock_entity(session) + collection.insert(0, new_entity) + assert list(collection) == [new_entity] + mock_entities + + +def test_collection_insert_duplicate( + mock_entity, mock_attribute, mock_entities +): + '''Fail to insert a duplicate value into collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + + with pytest.raises(ftrack_api.exception.DuplicateItemInCollectionError): + collection.insert(0, mock_entities[1]) + + +def test_immutable_collection_insert( + mock_entity, mock_attribute, mock_entities, session +): + '''Fail to insert a value into immutable collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities, mutable=False + ) + + with pytest.raises(ftrack_api.exception.ImmutableCollectionError): + collection.insert(0, create_mock_entity(session)) + + +def test_collection_set_item( + mock_entity, mock_attribute, mock_entities, session +): + '''Set item at index in collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + + new_entity = create_mock_entity(session) + collection[0] = new_entity + assert list(collection) == [new_entity, mock_entities[1]] + + +def test_collection_re_set_item( + mock_entity, mock_attribute, mock_entities +): + '''Re-set value at exact same index in collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + + collection[0] = mock_entities[0] + assert list(collection) == mock_entities + + +def test_collection_set_duplicate_item( + mock_entity, mock_attribute, mock_entities +): + '''Fail to set a duplicate value into collection at different index.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + + with pytest.raises(ftrack_api.exception.DuplicateItemInCollectionError): + collection[0] = mock_entities[1] + + +def test_immutable_collection_set_item( + mock_entity, mock_attribute, mock_entities +): + '''Fail to set item at index in immutable collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities, mutable=False + ) + + with pytest.raises(ftrack_api.exception.ImmutableCollectionError): + collection[0] = mock_entities[0] + + +def test_collection_delete_item( + mock_entity, mock_attribute, mock_entities +): + '''Remove item at index from collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + del collection[0] + assert list(collection) == [mock_entities[1]] + + +def test_collection_delete_item_at_invalid_index( + mock_entity, mock_attribute, mock_entities +): + '''Fail to remove item at missing index from immutable collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + + with pytest.raises(IndexError): + del collection[4] + + +def test_immutable_collection_delete_item( + mock_entity, mock_attribute, mock_entities +): + '''Fail to remove item at index from immutable collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities, mutable=False + ) + + with pytest.raises(ftrack_api.exception.ImmutableCollectionError): + del collection[0] + + +def test_collection_count( + mock_entity, mock_attribute, mock_entities, session +): + '''Count items in collection.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + assert len(collection) == 2 + + collection.append(create_mock_entity(session)) + assert len(collection) == 3 + + del collection[0] + assert len(collection) == 2 + + +@pytest.mark.parametrize('other, expected', [ + ([], False), + ([1, 2], True), + ([1, 2, 3], False), + ([1], False) +], ids=[ + 'empty', + 'same', + 'additional', + 'missing' +]) +def test_collection_equal(mocker, mock_entity, mock_attribute, other, expected): + '''Determine collection equality against another collection.''' + # Temporarily override determination of entity identity so that it works + # against simple scalar values for purpose of test. + mocker.patch.object( + ftrack_api.inspection, 'identity', lambda entity: str(entity) + ) + + collection_a = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=[1, 2] + ) + + collection_b = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=other + ) + assert (collection_a == collection_b) is expected + + +def test_collection_not_equal_to_non_collection( + mocker, mock_entity, mock_attribute +): + '''Collection not equal to a non-collection.''' + # Temporarily override determination of entity identity so that it works + # against simple scalar values for purpose of test. + mocker.patch.object( + ftrack_api.inspection, 'identity', lambda entity: str(entity) + ) + + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=[1, 2] + ) + + assert (collection != {}) is True + + +def test_collection_notify_on_modification( + mock_entity, mock_attribute, mock_entities, session +): + '''Record UpdateEntityOperation on collection modification.''' + collection = ftrack_api.collection.Collection( + mock_entity, mock_attribute, data=mock_entities + ) + assert len(session.recorded_operations) == 0 + + collection.append(create_mock_entity(session)) + assert len(session.recorded_operations) == 1 + operation = session.recorded_operations.pop() + assert isinstance(operation, ftrack_api.operation.UpdateEntityOperation) + assert operation.new_value == collection + + +def test_mapped_collection_proxy_shallow_copy(new_project, unique_name): + '''Shallow copying mapped collection proxy avoids indirect mutation.''' + metadata = new_project['metadata'] + + with new_project.session.operation_recording(False): + metadata_copy = copy.copy(metadata) + metadata_copy[unique_name] = True + + assert unique_name not in metadata + assert unique_name in metadata_copy + + +def test_mapped_collection_proxy_mutable_property(new_project): + '''Mapped collection mutable property maps to underlying collection.''' + metadata = new_project['metadata'] + + assert metadata.mutable is True + assert metadata.collection.mutable is True + + metadata.mutable = False + assert metadata.collection.mutable is False + + +def test_mapped_collection_proxy_attribute_property( + new_project, mock_attribute +): + '''Mapped collection attribute property maps to underlying collection.''' + metadata = new_project['metadata'] + + assert metadata.attribute is metadata.collection.attribute + + metadata.attribute = mock_attribute + assert metadata.collection.attribute is mock_attribute + + +def test_mapped_collection_proxy_get_item(new_project, unique_name): + '''Retrieve item in mapped collection proxy.''' + session = new_project.session + + # Prepare data. + metadata = new_project['metadata'] + value = 'value' + metadata[unique_name] = value + session.commit() + + # Check in clean session retrieval of value. + session.reset() + retrieved = session.get(*ftrack_api.inspection.identity(new_project)) + + assert retrieved is not new_project + assert retrieved['metadata'].keys() == [unique_name] + assert retrieved['metadata'][unique_name] == value + + +def test_mapped_collection_proxy_set_item(new_project, unique_name): + '''Set new item in mapped collection proxy.''' + session = new_project.session + + metadata = new_project['metadata'] + assert unique_name not in metadata + + value = 'value' + metadata[unique_name] = value + assert metadata[unique_name] == value + + # Check change persisted correctly. + session.commit() + session.reset() + retrieved = session.get(*ftrack_api.inspection.identity(new_project)) + + assert retrieved is not new_project + assert retrieved['metadata'].keys() == [unique_name] + assert retrieved['metadata'][unique_name] == value + + +def test_mapped_collection_proxy_update_item(new_project, unique_name): + '''Update existing item in mapped collection proxy.''' + session = new_project.session + + # Prepare a pre-existing value. + metadata = new_project['metadata'] + value = 'value' + metadata[unique_name] = value + session.commit() + + # Set new value. + new_value = 'new_value' + metadata[unique_name] = new_value + + # Confirm change persisted correctly. + session.commit() + session.reset() + retrieved = session.get(*ftrack_api.inspection.identity(new_project)) + + assert retrieved is not new_project + assert retrieved['metadata'].keys() == [unique_name] + assert retrieved['metadata'][unique_name] == new_value + + +def test_mapped_collection_proxy_delete_item(new_project, unique_name): + '''Remove existing item from mapped collection proxy.''' + session = new_project.session + + # Prepare a pre-existing value to remove. + metadata = new_project['metadata'] + value = 'value' + metadata[unique_name] = value + session.commit() + + # Now remove value. + del new_project['metadata'][unique_name] + assert unique_name not in new_project['metadata'] + + # Confirm change persisted correctly. + session.commit() + session.reset() + retrieved = session.get(*ftrack_api.inspection.identity(new_project)) + + assert retrieved is not new_project + assert retrieved['metadata'].keys() == [] + assert unique_name not in retrieved['metadata'] + + +def test_mapped_collection_proxy_delete_missing_item(new_project, unique_name): + '''Fail to remove item for missing key from mapped collection proxy.''' + metadata = new_project['metadata'] + assert unique_name not in metadata + with pytest.raises(KeyError): + del metadata[unique_name] + + +def test_mapped_collection_proxy_iterate_keys(new_project, unique_name): + '''Iterate over keys in mapped collection proxy.''' + metadata = new_project['metadata'] + metadata.update({ + 'a': 'value-a', + 'b': 'value-b', + 'c': 'value-c' + }) + + # Commit here as otherwise cleanup operation will fail because transaction + # will include updating metadata to refer to a deleted entity. + new_project.session.commit() + + iterated = set() + for key in metadata: + iterated.add(key) + + assert iterated == set(['a', 'b', 'c']) + + +def test_mapped_collection_proxy_count(new_project, unique_name): + '''Count items in mapped collection proxy.''' + metadata = new_project['metadata'] + metadata.update({ + 'a': 'value-a', + 'b': 'value-b', + 'c': 'value-c' + }) + + # Commit here as otherwise cleanup operation will fail because transaction + # will include updating metadata to refer to a deleted entity. + new_project.session.commit() + + assert len(metadata) == 3 + + +def test_mapped_collection_on_create(session, unique_name, project): + '''Test that it is possible to set relational attributes on create''' + metadata = { + 'a': 'value-a', + 'b': 'value-b', + 'c': 'value-c' + } + + task_id = session.create( + 'Task', { + 'name': unique_name, + 'parent': project, + 'metadata': metadata, + + } + ).get('id') + + session.commit() + + # Reset the session and check that we have the expected + # values. + session.reset() + + task = session.get( + 'Task', task_id + ) + + for key, value in metadata.items(): + assert value == task['metadata'][key] + + +def test_collection_refresh(new_asset_version, new_component): + '''Test collection reload.''' + session_two = ftrack_api.Session(auto_connect_event_hub=False) + + query_string = 'select components from AssetVersion where id is "{0}"'.format( + new_asset_version.get('id') + ) + + # Fetch the new asset version in a new session. + new_asset_version_two = session_two.query( + query_string + ).one() + + # Modify our asset version + new_asset_version.get('components').append( + new_component + ) + + new_asset_version.session.commit() + + # Query the same asset version again and make sure we get the newly + # populated data. + session_two.query( + query_string + ).all() + + assert ( + new_asset_version.get('components') == new_asset_version_two.get('components') + ) + + # Make a local change to our asset version + new_asset_version_two.get('components').pop() + + # Query the same asset version again and make sure our local changes + # are not overwritten. + + session_two.query( + query_string + ).all() + + assert len(new_asset_version_two.get('components')) == 0 + + +def test_mapped_collection_reload(new_asset_version): + '''Test mapped collection reload.''' + session_two = ftrack_api.Session(auto_connect_event_hub=False) + + query_string = 'select metadata from AssetVersion where id is "{0}"'.format( + new_asset_version.get('id') + ) + + # Fetch the new asset version in a new session. + new_asset_version_two = session_two.query( + query_string + ).one() + + # Modify our asset version + new_asset_version['metadata']['test'] = str(uuid.uuid4()) + + new_asset_version.session.commit() + + # Query the same asset version again and make sure we get the newly + # populated data. + session_two.query( + query_string + ).all() + + assert ( + new_asset_version['metadata']['test'] == new_asset_version_two['metadata']['test'] + ) + + local_data = str(uuid.uuid4()) + + new_asset_version_two['metadata']['test'] = local_data + + # Modify our asset version again + new_asset_version['metadata']['test'] = str(uuid.uuid4()) + + new_asset_version.session.commit() + + # Query the same asset version again and make sure our local changes + # are not overwritten. + session_two.query( + query_string + ).all() + + assert ( + new_asset_version_two['metadata']['test'] == local_data + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py new file mode 100644 index 0000000000..7a9b0fadaa --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py @@ -0,0 +1,251 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import uuid + +import pytest + +import ftrack_api + +@pytest.fixture( + params=[ + 'AssetVersion', 'Shot', 'AssetVersionList', 'TypedContextList', 'User', + 'Asset' + ] +) +def new_entity_and_custom_attribute(request, session): + '''Return tuple with new entity, custom attribute name and value.''' + if request.param == 'AssetVersion': + entity = session.create( + request.param, { + 'asset': session.query('Asset').first() + } + ) + return (entity, 'versiontest', 123) + + elif request.param == 'Shot': + sequence = session.query('Sequence').first() + entity = session.create( + request.param, { + 'parent_id': sequence['id'], + 'project_id': sequence['project_id'], + 'name': str(uuid.uuid1()) + } + ) + return (entity, 'fstart', 1005) + + elif request.param == 'Asset': + shot = session.query('Shot').first() + entity = session.create( + request.param, { + 'context_id': shot['project_id'], + 'name': str(uuid.uuid1()) + } + ) + return (entity, 'htest', 1005) + + elif request.param in ('AssetVersionList', 'TypedContextList'): + entity = session.create( + request.param, { + 'project_id': session.query('Project').first()['id'], + 'category_id': session.query('ListCategory').first()['id'], + 'name': str(uuid.uuid1()) + } + ) + return (entity, 'listbool', True) + + elif request.param == 'User': + entity = session.create( + request.param, { + 'first_name': 'Custom attribute test', + 'last_name': 'Custom attribute test', + 'username': str(uuid.uuid1()) + } + ) + return (entity, 'teststring', 'foo') + + +@pytest.mark.parametrize( + 'entity_type, entity_model_name, custom_attribute_name', + [ + ('Task', 'task', 'customNumber'), + ('AssetVersion', 'assetversion', 'NumberField') + ], + ids=[ + 'task', + 'asset_version' + ] +) +def test_read_set_custom_attribute( + session, entity_type, entity_model_name, custom_attribute_name +): + '''Retrieve custom attribute value set on instance.''' + custom_attribute_value = session.query( + 'CustomAttributeValue where configuration.key is ' + '{custom_attribute_name}' + .format( + custom_attribute_name=custom_attribute_name + ) + ).first() + + entity = session.query( + 'select custom_attributes from {entity_type} where id is ' + '{entity_id}'.format( + entity_type=entity_type, + entity_id=custom_attribute_value['entity_id'], + ) + ).first() + + assert custom_attribute_value + + assert entity['id'] == entity['custom_attributes'].collection.entity['id'] + assert entity is entity['custom_attributes'].collection.entity + assert ( + entity['custom_attributes'][custom_attribute_name] == + custom_attribute_value['value'] + ) + + assert custom_attribute_name in entity['custom_attributes'].keys() + + +@pytest.mark.parametrize( + 'entity_type, custom_attribute_name', + [ + ('Task', 'customNumber'), + ('Shot', 'fstart'), + ( + 'AssetVersion', 'NumberField' + ) + ], + ids=[ + 'task', + 'shot', + 'asset_version' + ] +) +def test_write_set_custom_attribute_value( + session, entity_type, custom_attribute_name +): + '''Overwrite existing instance level custom attribute value.''' + entity = session.query( + 'select custom_attributes from {entity_type} where ' + 'custom_attributes.configuration.key is {custom_attribute_name}'.format( + entity_type=entity_type, + custom_attribute_name=custom_attribute_name + ) + ).first() + + entity['custom_attributes'][custom_attribute_name] = 42 + + assert entity['custom_attributes'][custom_attribute_name] == 42 + + session.commit() + + +@pytest.mark.parametrize( + 'entity_type, custom_attribute_name', + [ + ('Task', 'fstart'), + ('Shot', 'Not existing'), + ('AssetVersion', 'fstart') + ], + ids=[ + 'task', + 'shot', + 'asset_version' + ] +) +def test_read_custom_attribute_that_does_not_exist( + session, entity_type, custom_attribute_name +): + '''Fail to read value from a custom attribute that does not exist.''' + entity = session.query( + 'select custom_attributes from {entity_type}'.format( + entity_type=entity_type + ) + ).first() + + with pytest.raises(KeyError): + entity['custom_attributes'][custom_attribute_name] + + +@pytest.mark.parametrize( + 'entity_type, custom_attribute_name', + [ + ('Task', 'fstart'), + ('Shot', 'Not existing'), + ('AssetVersion', 'fstart') + ], + ids=[ + 'task', + 'shot', + 'asset_version' + ] +) +def test_write_custom_attribute_that_does_not_exist( + session, entity_type, custom_attribute_name +): + '''Fail to write a value to a custom attribute that does not exist.''' + entity = session.query( + 'select custom_attributes from {entity_type}'.format( + entity_type=entity_type + ) + ).first() + + with pytest.raises(KeyError): + entity['custom_attributes'][custom_attribute_name] = 42 + + +def test_set_custom_attribute_on_new_but_persisted_version( + session, new_asset_version +): + '''Set custom attribute on new persisted version.''' + new_asset_version['custom_attributes']['versiontest'] = 5 + session.commit() + + +@pytest.mark.xfail( + raises=ftrack_api.exception.ServerError, + reason='Due to user permission errors.' +) +def test_batch_create_entity_and_custom_attributes( + new_entity_and_custom_attribute +): + '''Write custom attribute value and entity in the same batch.''' + entity, name, value = new_entity_and_custom_attribute + session = entity.session + entity['custom_attributes'][name] = value + + assert entity['custom_attributes'][name] == value + session.commit() + + assert entity['custom_attributes'][name] == value + + +def test_refresh_custom_attribute(new_asset_version): + '''Test custom attribute refresh.''' + session_two = ftrack_api.Session() + + query_string = 'select custom_attributes from AssetVersion where id is "{0}"'.format( + new_asset_version.get('id') + ) + + asset_version_two = session_two.query( + query_string + ).first() + + new_asset_version['custom_attributes']['versiontest'] = 42 + + new_asset_version.session.commit() + + asset_version_two = session_two.query( + query_string + ).first() + + assert ( + new_asset_version['custom_attributes']['versiontest'] == + asset_version_two['custom_attributes']['versiontest'] + ) + + + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py new file mode 100644 index 0000000000..c53dda9630 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py @@ -0,0 +1,129 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import os +import tempfile + +import pytest + +import ftrack_api.data + + +@pytest.fixture() +def content(): + '''Return initial content.''' + return 'test data' + + +@pytest.fixture(params=['file', 'file_wrapper', 'string']) +def data(request, content): + '''Return cache.''' + + if request.param == 'string': + data_object = ftrack_api.data.String(content) + + elif request.param == 'file': + file_handle, path = tempfile.mkstemp() + file_object = os.fdopen(file_handle, 'r+') + file_object.write(content) + file_object.flush() + file_object.close() + + data_object = ftrack_api.data.File(path, 'r+') + + def cleanup(): + '''Cleanup.''' + data_object.close() + os.remove(path) + + request.addfinalizer(cleanup) + + elif request.param == 'file_wrapper': + file_handle, path = tempfile.mkstemp() + file_object = os.fdopen(file_handle, 'r+') + file_object.write(content) + file_object.seek(0) + + data_object = ftrack_api.data.FileWrapper(file_object) + + def cleanup(): + '''Cleanup.''' + data_object.close() + os.remove(path) + + request.addfinalizer(cleanup) + + else: + raise ValueError('Unrecognised parameter: {0}'.format(request.param)) + + return data_object + + +def test_read(data, content): + '''Return content from current position up to *limit*.''' + assert data.read(5) == content[:5] + assert data.read() == content[5:] + + +def test_write(data, content): + '''Write content at current position.''' + assert data.read() == content + data.write('more test data') + data.seek(0) + assert data.read() == content + 'more test data' + + +def test_flush(data): + '''Flush buffers ensuring data written.''' + # TODO: Implement better test than just calling function. + data.flush() + + +def test_seek(data, content): + '''Move internal pointer to *position*.''' + data.seek(5) + assert data.read() == content[5:] + + +def test_tell(data): + '''Return current position of internal pointer.''' + assert data.tell() == 0 + data.seek(5) + assert data.tell() == 5 + + +def test_close(data): + '''Flush buffers and prevent further access.''' + data.close() + with pytest.raises(ValueError) as error: + data.read() + + assert 'I/O operation on closed file' in str(error.value) + + +class Dummy(ftrack_api.data.Data): + '''Dummy string.''' + + def read(self, limit=None): + '''Return content from current position up to *limit*.''' + + def write(self, content): + '''Write content at current position.''' + + +def test_unsupported_tell(): + '''Fail when tell unsupported.''' + data = Dummy() + with pytest.raises(NotImplementedError) as error: + data.tell() + + assert 'Tell not supported' in str(error.value) + + +def test_unsupported_seek(): + '''Fail when seek unsupported.''' + data = Dummy() + with pytest.raises(NotImplementedError) as error: + data.seek(5) + + assert 'Seek not supported' in str(error.value) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py new file mode 100644 index 0000000000..ae565cb3f5 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py @@ -0,0 +1,70 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import termcolor + +import ftrack_api.formatter + + +def colored(text, *args, **kwargs): + '''Pass through so there are no escape sequences in output.''' + return text + + +def test_format(user, mocker): + '''Return formatted representation of entity.''' + mocker.patch.object(termcolor, 'colored', colored) + + result = ftrack_api.formatter.format(user) + + # Cannot test entire string as too variable so check for key text. + assert result.startswith('User\n') + assert ' username: jenkins' in result + assert ' email: ' in result + + +def test_format_using_custom_formatters(user): + '''Return formatted representation of entity using custom formatters.''' + result = ftrack_api.formatter.format( + user, formatters={ + 'header': lambda text: '*{0}*'.format(text), + 'label': lambda text: '-{0}'.format(text) + } + ) + + # Cannot test entire string as too variable so check for key text. + assert result.startswith('*User*\n') + assert ' -username: jenkins' in result + assert ' -email: ' in result + + +def test_format_filtering(new_user, mocker): + '''Return formatted representation using custom filter.''' + mocker.patch.object(termcolor, 'colored', colored) + + with new_user.session.auto_populating(False): + result = ftrack_api.formatter.format( + new_user, + attribute_filter=ftrack_api.formatter.FILTER['ignore_unset'] + ) + + # Cannot test entire string as too variable so check for key text. + assert result.startswith('User\n') + assert ' username: {0}'.format(new_user['username']) in result + assert ' email: ' not in result + + +def test_format_recursive(user, mocker): + '''Return formatted recursive representation.''' + mocker.patch.object(termcolor, 'colored', colored) + + user.session.populate(user, 'timelogs.user') + + with user.session.auto_populating(False): + result = ftrack_api.formatter.format(user, recursive=True) + + # Cannot test entire string as too variable so check for key text. + assert result.startswith('User\n') + assert ' username: jenkins' + assert ' timelogs: Timelog' in result + assert ' user: User{...}' in result diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py new file mode 100644 index 0000000000..57b44613a8 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py @@ -0,0 +1,101 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2014 ftrack + +import ftrack_api.inspection +import ftrack_api.symbol + + +def test_identity(user): + '''Retrieve identity of *user*.''' + identity = ftrack_api.inspection.identity(user) + assert identity[0] == 'User' + assert identity[1] == ['d07ae5d0-66e1-11e1-b5e9-f23c91df25eb'] + + +def test_primary_key(user): + '''Retrieve primary key of *user*.''' + primary_key = ftrack_api.inspection.primary_key(user) + assert primary_key == { + 'id': 'd07ae5d0-66e1-11e1-b5e9-f23c91df25eb' + } + + +def test_created_entity_state(session, unique_name): + '''Created entity has CREATED state.''' + new_user = session.create('User', {'username': unique_name}) + assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED + + # Even after a modification the state should remain as CREATED. + new_user['username'] = 'changed' + assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED + + +def test_retrieved_entity_state(user): + '''Retrieved entity has NOT_SET state.''' + assert ftrack_api.inspection.state(user) is ftrack_api.symbol.NOT_SET + + +def test_modified_entity_state(user): + '''Modified entity has MODIFIED state.''' + user['username'] = 'changed' + assert ftrack_api.inspection.state(user) is ftrack_api.symbol.MODIFIED + + +def test_deleted_entity_state(session, user): + '''Deleted entity has DELETED state.''' + session.delete(user) + assert ftrack_api.inspection.state(user) is ftrack_api.symbol.DELETED + + +def test_post_commit_entity_state(session, unique_name): + '''Entity has NOT_SET state post commit.''' + new_user = session.create('User', {'username': unique_name}) + assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED + + session.commit() + + assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.NOT_SET + + +def test_states(session, unique_name, user): + '''Determine correct states for multiple entities.''' + # NOT_SET + user_a = session.create('User', {'username': unique_name}) + session.commit() + + # CREATED + user_b = session.create('User', {'username': unique_name}) + user_b['username'] = 'changed' + + # MODIFIED + user_c = user + user_c['username'] = 'changed' + + # DELETED + user_d = session.create('User', {'username': unique_name}) + session.delete(user_d) + + # Assert states. + states = ftrack_api.inspection.states([user_a, user_b, user_c, user_d]) + + assert states == [ + ftrack_api.symbol.NOT_SET, + ftrack_api.symbol.CREATED, + ftrack_api.symbol.MODIFIED, + ftrack_api.symbol.DELETED + ] + + +def test_states_for_no_entities(): + '''Return empty list of states when no entities passed.''' + states = ftrack_api.inspection.states([]) + assert states == [] + + +def test_skip_operations_for_non_inspected_entities(session, unique_name): + '''Skip operations for non inspected entities.''' + user_a = session.create('User', {'username': unique_name + '-1'}) + user_b = session.create('User', {'username': unique_name + '-2'}) + + states = ftrack_api.inspection.states([user_a]) + assert states == [ftrack_api.symbol.CREATED] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py new file mode 100644 index 0000000000..702bfae355 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py @@ -0,0 +1,79 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api.operation + + +def test_operations_initialise(): + '''Initialise empty operations stack.''' + operations = ftrack_api.operation.Operations() + assert len(operations) == 0 + + +def test_operations_push(): + '''Push new operation onto stack.''' + operations = ftrack_api.operation.Operations() + assert len(operations) == 0 + + operation = ftrack_api.operation.Operation() + operations.push(operation) + assert list(operations)[-1] is operation + + +def test_operations_pop(): + '''Pop and return operation from stack.''' + operations = ftrack_api.operation.Operations() + assert len(operations) == 0 + + operations.push(ftrack_api.operation.Operation()) + operations.push(ftrack_api.operation.Operation()) + operation = ftrack_api.operation.Operation() + operations.push(operation) + + assert len(operations) == 3 + popped = operations.pop() + assert popped is operation + assert len(operations) == 2 + + +def test_operations_count(): + '''Count operations in stack.''' + operations = ftrack_api.operation.Operations() + assert len(operations) == 0 + + operations.push(ftrack_api.operation.Operation()) + assert len(operations) == 1 + + operations.pop() + assert len(operations) == 0 + + +def test_operations_clear(): + '''Clear operations stack.''' + operations = ftrack_api.operation.Operations() + operations.push(ftrack_api.operation.Operation()) + operations.push(ftrack_api.operation.Operation()) + operations.push(ftrack_api.operation.Operation()) + assert len(operations) == 3 + + operations.clear() + assert len(operations) == 0 + + +def test_operations_iter(): + '''Iterate over operations stack.''' + operations = ftrack_api.operation.Operations() + operation_a = ftrack_api.operation.Operation() + operation_b = ftrack_api.operation.Operation() + operation_c = ftrack_api.operation.Operation() + + operations.push(operation_a) + operations.push(operation_b) + operations.push(operation_c) + + assert len(operations) == 3 + for operation, expected in zip( + operations, [operation_a, operation_b, operation_c] + ): + assert operation is expected + diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py new file mode 100644 index 0000000000..247b496d96 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py @@ -0,0 +1,48 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import ftrack_api + + +class Class(object): + '''Class.''' + + +class Mixin(object): + '''Mixin.''' + + def method(self): + '''Method.''' + return True + + +def test_mixin(): + '''Mixin class to instance.''' + instance_a = Class() + instance_b = Class() + + assert not hasattr(instance_a, 'method') + assert not hasattr(instance_b, 'method') + + ftrack_api.mixin(instance_a, Mixin) + + assert hasattr(instance_a, 'method') + assert instance_a.method() is True + assert not hasattr(instance_b, 'method') + + +def test_mixin_same_class_multiple_times(): + '''Mixin class to instance multiple times.''' + instance = Class() + assert not hasattr(instance, 'method') + assert len(instance.__class__.mro()) == 2 + + ftrack_api.mixin(instance, Mixin) + assert hasattr(instance, 'method') + assert instance.method() is True + assert len(instance.__class__.mro()) == 4 + + ftrack_api.mixin(instance, Mixin) + assert hasattr(instance, 'method') + assert instance.method() is True + assert len(instance.__class__.mro()) == 4 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py new file mode 100644 index 0000000000..252c813a9b --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py @@ -0,0 +1,192 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import os +import textwrap +import logging +import re + +import pytest + +import ftrack_api.plugin + + +@pytest.fixture() +def valid_plugin(temporary_path): + '''Return path to directory containing a valid plugin.''' + with open(os.path.join(temporary_path, 'plugin.py'), 'w') as file_object: + file_object.write(textwrap.dedent(''' + def register(*args, **kw): + print "Registered", args, kw + ''')) + + return temporary_path + + +@pytest.fixture() +def python_non_plugin(temporary_path): + '''Return path to directory containing Python file that is non plugin.''' + with open(os.path.join(temporary_path, 'non.py'), 'w') as file_object: + file_object.write(textwrap.dedent(''' + print "Not a plugin" + + def not_called(): + print "Not called" + ''')) + + return temporary_path + + +@pytest.fixture() +def non_plugin(temporary_path): + '''Return path to directory containing file that is non plugin.''' + with open(os.path.join(temporary_path, 'non.txt'), 'w') as file_object: + file_object.write('Never seen') + + return temporary_path + + +@pytest.fixture() +def broken_plugin(temporary_path): + '''Return path to directory containing broken plugin.''' + with open(os.path.join(temporary_path, 'broken.py'), 'w') as file_object: + file_object.write('syntax error') + + return temporary_path + + +@pytest.fixture() +def plugin(request, temporary_path): + '''Return path containing a plugin with requested specification.''' + specification = request.param + output = re.sub('(\w+)=\w+', '"\g<1>={}".format(\g<1>)', specification) + output = re.sub('\*args', 'args', output) + output = re.sub('\*\*kwargs', 'sorted(kwargs.items())', output) + + with open(os.path.join(temporary_path, 'plugin.py'), 'w') as file_object: + content = textwrap.dedent(''' + def register({}): + print {} + '''.format(specification, output)) + file_object.write(content) + + return temporary_path + + +def test_discover_empty_paths(capsys): + '''Discover no plugins when paths are empty.''' + ftrack_api.plugin.discover([' ']) + output, error = capsys.readouterr() + assert not output + assert not error + + +def test_discover_valid_plugin(valid_plugin, capsys): + '''Discover valid plugin.''' + ftrack_api.plugin.discover([valid_plugin], (1, 2), {'3': 4}) + output, error = capsys.readouterr() + assert 'Registered (1, 2) {\'3\': 4}' in output + + +def test_discover_python_non_plugin(python_non_plugin, capsys): + '''Discover Python non plugin.''' + ftrack_api.plugin.discover([python_non_plugin]) + output, error = capsys.readouterr() + assert 'Not a plugin' in output + assert 'Not called' not in output + + +def test_discover_non_plugin(non_plugin, capsys): + '''Discover non plugin.''' + ftrack_api.plugin.discover([non_plugin]) + output, error = capsys.readouterr() + assert not output + assert not error + + +def test_discover_broken_plugin(broken_plugin, caplog): + '''Discover broken plugin.''' + ftrack_api.plugin.discover([broken_plugin]) + + records = caplog.records() + assert len(records) == 1 + assert records[0].levelno is logging.WARNING + assert 'Failed to load plugin' in records[0].message + + +@pytest.mark.parametrize( + 'plugin, positional, keyword, expected', + [ + ( + 'a, b=False, c=False, d=False', + (1, 2), {'c': True, 'd': True, 'e': True}, + '1 b=2 c=True d=True' + ), + ( + '*args', + (1, 2), {'b': True, 'c': False}, + '(1, 2)' + ), + ( + '**kwargs', + tuple(), {'b': True, 'c': False}, + '[(\'b\', True), (\'c\', False)]' + ), + ( + 'a=False, b=False', + (True,), {'b': True}, + 'a=True b=True' + ), + ( + 'a, c=False, *args', + (1, 2, 3, 4), {}, + '1 c=2 (3, 4)' + ), + ( + 'a, c=False, **kwargs', + tuple(), {'a': 1, 'b': 2, 'c': 3, 'd': 4}, + '1 c=3 [(\'b\', 2), (\'d\', 4)]' + ), + ], + indirect=['plugin'], + ids=[ + 'mixed-explicit', + 'variable-args-only', + 'variable-kwargs-only', + 'keyword-from-positional', + 'trailing-variable-args', + 'trailing-keyword-args' + ] +) +def test_discover_plugin_with_specific_signature( + plugin, positional, keyword, expected, capsys +): + '''Discover plugin passing only supported arguments.''' + ftrack_api.plugin.discover( + [plugin], positional, keyword + ) + output, error = capsys.readouterr() + assert expected in output + + +def test_discover_plugin_varying_signatures(temporary_path, capsys): + '''Discover multiple plugins with varying signatures.''' + with open(os.path.join(temporary_path, 'plugin_a.py'), 'w') as file_object: + file_object.write(textwrap.dedent(''' + def register(a): + print (a,) + ''')) + + with open(os.path.join(temporary_path, 'plugin_b.py'), 'w') as file_object: + file_object.write(textwrap.dedent(''' + def register(a, b=False): + print (a,), {'b': b} + ''')) + + ftrack_api.plugin.discover( + [temporary_path], (True,), {'b': True} + ) + + output, error = capsys.readouterr() + assert '(True,)'in output + assert '(True,) {\'b\': True}' in output diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py new file mode 100644 index 0000000000..f8e3f9dec3 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py @@ -0,0 +1,164 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import math + +import pytest + +import ftrack_api +import ftrack_api.query +import ftrack_api.exception + + +def test_index(session): + '''Index into query result.''' + results = session.query('User') + assert isinstance(results[2], session.types['User']) + + +def test_len(session): + '''Return count of results using len.''' + results = session.query('User where username is jenkins') + assert len(results) == 1 + + +def test_all(session): + '''Return all results using convenience method.''' + results = session.query('User').all() + assert isinstance(results, list) + assert len(results) + + +def test_implicit_iteration(session): + '''Implicitly iterate through query result.''' + results = session.query('User') + assert isinstance(results, ftrack_api.query.QueryResult) + + records = [] + for record in results: + records.append(record) + + assert len(records) == len(results) + + +def test_one(session): + '''Return single result using convenience method.''' + user = session.query('User where username is jenkins').one() + assert user['username'] == 'jenkins' + + +def test_one_fails_for_no_results(session): + '''Fail to fetch single result when no results available.''' + with pytest.raises(ftrack_api.exception.NoResultFoundError): + session.query('User where username is does_not_exist').one() + + +def test_one_fails_for_multiple_results(session): + '''Fail to fetch single result when multiple results available.''' + with pytest.raises(ftrack_api.exception.MultipleResultsFoundError): + session.query('User').one() + + +def test_one_with_existing_limit(session): + '''Fail to return single result when existing limit in expression.''' + with pytest.raises(ValueError): + session.query('User where username is jenkins limit 0').one() + + +def test_one_with_existing_offset(session): + '''Fail to return single result when existing offset in expression.''' + with pytest.raises(ValueError): + session.query('User where username is jenkins offset 2').one() + + +def test_one_with_prefetched_data(session): + '''Return single result ignoring prefetched data.''' + query = session.query('User where username is jenkins') + query.all() + + user = query.one() + assert user['username'] == 'jenkins' + + +def test_first(session): + '''Return first result using convenience method.''' + users = session.query('User').all() + + user = session.query('User').first() + assert user == users[0] + + +def test_first_returns_none_when_no_results(session): + '''Return None when no results available.''' + user = session.query('User where username is does_not_exist').first() + assert user is None + + +def test_first_with_existing_limit(session): + '''Fail to return first result when existing limit in expression.''' + with pytest.raises(ValueError): + session.query('User where username is jenkins limit 0').first() + + +def test_first_with_existing_offset(session): + '''Return first result whilst respecting custom offset.''' + users = session.query('User').all() + + user = session.query('User offset 2').first() + assert user == users[2] + + +def test_first_with_prefetched_data(session): + '''Return first result ignoring prefetched data.''' + query = session.query('User where username is jenkins') + query.all() + + user = query.first() + assert user['username'] == 'jenkins' + + +def test_paging(session, mocker): + '''Page through results.''' + mocker.patch.object(session, 'call', wraps=session.call) + + page_size = 5 + query = session.query('User limit 50', page_size=page_size) + records = query.all() + + assert session.call.call_count == ( + math.ceil(len(records) / float(page_size)) + ) + + +def test_paging_respects_offset_and_limit(session, mocker): + '''Page through results respecting offset and limit.''' + users = session.query('User').all() + + mocker.patch.object(session, 'call', wraps=session.call) + + page_size = 6 + query = session.query('User offset 2 limit 8', page_size=page_size) + records = query.all() + + assert session.call.call_count == 2 + assert len(records) == 8 + assert records == users[2:10] + + +def test_paging_respects_limit_smaller_than_page_size(session, mocker): + '''Use initial limit when less than page size.''' + mocker.patch.object(session, 'call', wraps=session.call) + + page_size = 100 + query = session.query('User limit 10', page_size=page_size) + records = query.all() + + assert session.call.call_count == 1 + session.call.assert_called_once_with( + [{ + 'action': 'query', + 'expression': 'select id from User offset 0 limit 10' + }] + ) + + assert len(records) == 10 \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py new file mode 100644 index 0000000000..5087efcc08 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py @@ -0,0 +1,1519 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import os +import tempfile +import functools +import uuid +import textwrap +import datetime +import json +import random + +import pytest +import mock +import arrow +import requests + +import ftrack_api +import ftrack_api.cache +import ftrack_api.inspection +import ftrack_api.symbol +import ftrack_api.exception +import ftrack_api.session +import ftrack_api.collection + + +@pytest.fixture(params=['memory', 'persisted']) +def cache(request): + '''Return cache.''' + if request.param == 'memory': + cache = None # There is already a default Memory cache present. + elif request.param == 'persisted': + cache_path = os.path.join( + tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) + ) + + cache = lambda session: ftrack_api.cache.SerialisedCache( + ftrack_api.cache.FileCache(cache_path), + encode=functools.partial( + session.encode, entity_attribute_strategy='persisted_only' + ), + decode=session.decode + ) + + def cleanup(): + '''Cleanup.''' + try: + os.remove(cache_path) + except OSError: + # BSD DB (Mac OSX) implementation of the interface will append + # a .db extension. + os.remove(cache_path + '.db') + + request.addfinalizer(cleanup) + + return cache + + +@pytest.fixture() +def temporary_invalid_schema_cache(request): + '''Return schema cache path to invalid schema cache file.''' + schema_cache_path = os.path.join( + tempfile.gettempdir(), + 'ftrack_api_schema_cache_test_{0}.json'.format(uuid.uuid4().hex) + ) + + with open(schema_cache_path, 'w') as file_: + file_.write('${invalid json}') + + def cleanup(): + '''Cleanup.''' + os.remove(schema_cache_path) + + request.addfinalizer(cleanup) + + return schema_cache_path + + +@pytest.fixture() +def temporary_valid_schema_cache(request, mocked_schemas): + '''Return schema cache path to valid schema cache file.''' + schema_cache_path = os.path.join( + tempfile.gettempdir(), + 'ftrack_api_schema_cache_test_{0}.json'.format(uuid.uuid4().hex) + ) + + with open(schema_cache_path, 'w') as file_: + json.dump(mocked_schemas, file_, indent=4) + + def cleanup(): + '''Cleanup.''' + os.remove(schema_cache_path) + + request.addfinalizer(cleanup) + + return schema_cache_path + + +class SelectiveCache(ftrack_api.cache.ProxyCache): + '''Proxy cache that should not cache newly created entities.''' + + def set(self, key, value): + '''Set *value* for *key*.''' + if isinstance(value, ftrack_api.entity.base.Entity): + if ( + ftrack_api.inspection.state(value) + is ftrack_api.symbol.CREATED + ): + return + + super(SelectiveCache, self).set(key, value) + + +def test_get_entity(session, user): + '''Retrieve an entity by type and id.''' + matching = session.get(*ftrack_api.inspection.identity(user)) + assert matching == user + + +def test_get_non_existant_entity(session): + '''Retrieve a non-existant entity by type and id.''' + matching = session.get('User', 'non-existant-id') + assert matching is None + + +def test_get_entity_of_invalid_type(session): + '''Fail to retrieve an entity using an invalid type.''' + with pytest.raises(KeyError): + session.get('InvalidType', 'id') + + +def test_create(session): + '''Create entity.''' + user = session.create('User', {'username': 'martin'}) + with session.auto_populating(False): + assert user['id'] is not ftrack_api.symbol.NOT_SET + assert user['username'] == 'martin' + assert user['email'] is ftrack_api.symbol.NOT_SET + + +def test_create_using_only_defaults(session): + '''Create entity using defaults only.''' + user = session.create('User') + with session.auto_populating(False): + assert user['id'] is not ftrack_api.symbol.NOT_SET + assert user['username'] is ftrack_api.symbol.NOT_SET + + +def test_create_using_server_side_defaults(session): + '''Create entity using server side defaults.''' + user = session.create('User') + with session.auto_populating(False): + assert user['id'] is not ftrack_api.symbol.NOT_SET + assert user['username'] is ftrack_api.symbol.NOT_SET + + session.commit() + assert user['username'] is not ftrack_api.symbol.NOT_SET + + +def test_create_overriding_defaults(session): + '''Create entity overriding defaults.''' + uid = str(uuid.uuid4()) + user = session.create('User', {'id': uid}) + with session.auto_populating(False): + assert user['id'] == uid + + +def test_create_with_reference(session): + '''Create entity with a reference to another.''' + status = session.query('Status')[0] + task = session.create('Task', {'status': status}) + assert task['status'] is status + + +def test_ensure_new_entity(session, unique_name): + '''Ensure entity, creating first.''' + entity = session.ensure('User', {'username': unique_name}) + assert entity['username'] == unique_name + + +def test_ensure_entity_with_non_string_data_types(session): + '''Ensure entity against non-string data types, creating first.''' + datetime = arrow.get() + + task = session.query('Task').first() + user = session.query( + 'User where username is {}'.format(session.api_user) + ).first() + + first = session.ensure( + 'Timelog', + { + 'start': datetime, + 'duration': 10, + 'user_id': user['id'], + 'context_id': task['id'] + } + ) + + with mock.patch.object(session, 'create') as mocked: + session.ensure( + 'Timelog', + { + 'start': datetime, + 'duration': 10, + 'user_id': user['id'], + 'context_id': task['id'] + } + ) + assert not mocked.called + + assert first['start'] == datetime + assert first['duration'] == 10 + + +def test_ensure_entity_with_identifying_keys(session, unique_name): + '''Ensure entity, checking using keys subset and then creating.''' + entity = session.ensure( + 'User', {'username': unique_name, 'email': 'test@example.com'}, + identifying_keys=['username'] + ) + assert entity['username'] == unique_name + + +def test_ensure_entity_with_invalid_identifying_keys(session, unique_name): + '''Fail to ensure entity when identifying key missing from data.''' + with pytest.raises(KeyError): + session.ensure( + 'User', {'username': unique_name, 'email': 'test@example.com'}, + identifying_keys=['invalid'] + ) + + +def test_ensure_entity_with_missing_identifying_keys(session): + '''Fail to ensure entity when no identifying keys determined.''' + with pytest.raises(ValueError): + session.ensure('User', {}) + + +def test_ensure_existing_entity(session, unique_name): + '''Ensure existing entity.''' + entity = session.ensure('User', {'first_name': unique_name}) + + # Second call should not commit any new entity, just retrieve the existing. + with mock.patch.object(session, 'create') as mocked: + retrieved = session.ensure('User', {'first_name': unique_name}) + assert not mocked.called + assert retrieved == entity + + +def test_ensure_update_existing_entity(session, unique_name): + '''Ensure and update existing entity.''' + entity = session.ensure( + 'User', {'first_name': unique_name, 'email': 'anon@example.com'} + ) + assert entity['email'] == 'anon@example.com' + + # Second call should commit updates. + retrieved = session.ensure( + 'User', {'first_name': unique_name, 'email': 'test@example.com'}, + identifying_keys=['first_name'] + ) + assert retrieved == entity + assert retrieved['email'] == 'test@example.com' + + +def test_reconstruct_entity(session): + '''Reconstruct entity.''' + uid = str(uuid.uuid4()) + data = { + 'id': uid, + 'username': 'martin', + 'email': 'martin@example.com' + } + user = session.create('User', data, reconstructing=True) + + for attribute in user.attributes: + # No local attributes should be set. + assert attribute.get_local_value(user) is ftrack_api.symbol.NOT_SET + + # Only remote attributes that had explicit values should be set. + value = attribute.get_remote_value(user) + if attribute.name in data: + assert value == data[attribute.name] + else: + assert value is ftrack_api.symbol.NOT_SET + + +def test_reconstruct_entity_does_not_apply_defaults(session): + '''Reconstruct entity does not apply defaults.''' + # Note: Use private method to avoid merge which requires id be set. + user = session._create('User', {}, reconstructing=True) + with session.auto_populating(False): + assert user['id'] is ftrack_api.symbol.NOT_SET + + +def test_reconstruct_empty_entity(session): + '''Reconstruct empty entity.''' + # Note: Use private method to avoid merge which requires id be set. + user = session._create('User', {}, reconstructing=True) + + for attribute in user.attributes: + # No local attributes should be set. + assert attribute.get_local_value(user) is ftrack_api.symbol.NOT_SET + + # No remote attributes should be set. + assert attribute.get_remote_value(user) is ftrack_api.symbol.NOT_SET + + +def test_delete_operation_ordering(session, unique_name): + '''Delete entities in valid order.''' + # Construct entities. + project_schema = session.query('ProjectSchema').first() + project = session.create('Project', { + 'name': unique_name, + 'full_name': unique_name, + 'project_schema': project_schema + }) + + sequence = session.create('Sequence', { + 'name': unique_name, + 'parent': project + }) + + session.commit() + + # Delete in order that should succeed. + session.delete(sequence) + session.delete(project) + + session.commit() + + +def test_create_then_delete_operation_ordering(session, unique_name): + '''Create and delete entity in one transaction.''' + entity = session.create('User', {'username': unique_name}) + session.delete(entity) + session.commit() + + +def test_create_and_modify_to_have_required_attribute(session, unique_name): + '''Create and modify entity to have required attribute in transaction.''' + entity = session.create('Scope', {}) + other = session.create('Scope', {'name': unique_name}) + entity['name'] = '{0}2'.format(unique_name) + session.commit() + + +def test_ignore_in_create_entity_payload_values_set_to_not_set( + mocker, unique_name, session +): + '''Ignore in commit, created entity data set to NOT_SET''' + mocked = mocker.patch.object(session, 'call') + + # Should ignore 'email' attribute in payload. + new_user = session.create( + 'User', {'username': unique_name, 'email': 'test'} + ) + new_user['email'] = ftrack_api.symbol.NOT_SET + session.commit() + payloads = mocked.call_args[0][0] + assert len(payloads) == 1 + + +def test_ignore_operation_that_modifies_attribute_to_not_set( + mocker, session, user +): + '''Ignore in commit, operation that sets attribute value to NOT_SET''' + mocked = mocker.patch.object(session, 'call') + + # Should result in no call to server. + user['email'] = ftrack_api.symbol.NOT_SET + session.commit() + + assert not mocked.called + + +def test_operation_optimisation_on_commit(session, mocker): + '''Optimise operations on commit.''' + mocked = mocker.patch.object(session, 'call') + + user_a = session.create('User', {'username': 'bob'}) + user_a['username'] = 'foo' + user_a['email'] = 'bob@example.com' + + user_b = session.create('User', {'username': 'martin'}) + user_b['email'] = 'martin@ftrack.com' + + user_a['email'] = 'bob@example.com' + user_a['first_name'] = 'Bob' + + user_c = session.create('User', {'username': 'neverexist'}) + user_c['email'] = 'ignore@example.com' + session.delete(user_c) + + user_a_entity_key = ftrack_api.inspection.primary_key(user_a).values() + user_b_entity_key = ftrack_api.inspection.primary_key(user_b).values() + + session.commit() + + # The above operations should have translated into three payloads to call + # (two creates and one update). + payloads = mocked.call_args[0][0] + assert len(payloads) == 3 + + assert payloads[0]['action'] == 'create' + assert payloads[0]['entity_key'] == user_a_entity_key + assert set(payloads[0]['entity_data'].keys()) == set([ + '__entity_type__', 'id', 'resource_type', 'username' + ]) + + assert payloads[1]['action'] == 'create' + assert payloads[1]['entity_key'] == user_b_entity_key + assert set(payloads[1]['entity_data'].keys()) == set([ + '__entity_type__', 'id', 'resource_type', 'username', 'email' + ]) + + assert payloads[2]['action'] == 'update' + assert payloads[2]['entity_key'] == user_a_entity_key + assert set(payloads[2]['entity_data'].keys()) == set([ + '__entity_type__', 'email', 'first_name' + ]) + + +def test_state_collection(session, unique_name, user): + '''Session state collection holds correct entities.''' + # NOT_SET + user_a = session.create('User', {'username': unique_name}) + session.commit() + + # CREATED + user_b = session.create('User', {'username': unique_name}) + user_b['username'] = 'changed' + + # MODIFIED + user_c = user + user_c['username'] = 'changed' + + # DELETED + user_d = session.create('User', {'username': unique_name}) + session.delete(user_d) + + assert session.created == [user_b] + assert session.modified == [user_c] + assert session.deleted == [user_d] + + +def test_get_entity_with_composite_primary_key(session, new_project): + '''Retrieve entity that uses a composite primary key.''' + entity = session.create('Metadata', { + 'key': 'key', 'value': 'value', + 'parent_type': new_project.entity_type, + 'parent_id': new_project['id'] + }) + + session.commit() + + # Avoid cache. + new_session = ftrack_api.Session() + retrieved_entity = new_session.get( + 'Metadata', ftrack_api.inspection.primary_key(entity).values() + ) + + assert retrieved_entity == entity + + +def test_get_entity_with_incomplete_composite_primary_key(session, new_project): + '''Fail to retrieve entity using incomplete composite primary key.''' + entity = session.create('Metadata', { + 'key': 'key', 'value': 'value', + 'parent_type': new_project.entity_type, + 'parent_id': new_project['id'] + }) + + session.commit() + + # Avoid cache. + new_session = ftrack_api.Session() + with pytest.raises(ValueError): + new_session.get( + 'Metadata', ftrack_api.inspection.primary_key(entity).values()[0] + ) + + +def test_populate_entity(session, new_user): + '''Populate entity that uses single primary key.''' + with session.auto_populating(False): + assert new_user['email'] is ftrack_api.symbol.NOT_SET + + session.populate(new_user, 'email') + assert new_user['email'] is not ftrack_api.symbol.NOT_SET + + +def test_populate_entities(session, unique_name): + '''Populate multiple entities that use single primary key.''' + users = [] + for index in range(3): + users.append( + session.create( + 'User', {'username': '{0}-{1}'.format(unique_name, index)} + ) + ) + + session.commit() + + with session.auto_populating(False): + for user in users: + assert user['email'] is ftrack_api.symbol.NOT_SET + + session.populate(users, 'email') + + for user in users: + assert user['email'] is not ftrack_api.symbol.NOT_SET + + +def test_populate_entity_with_composite_primary_key(session, new_project): + '''Populate entity that uses a composite primary key.''' + entity = session.create('Metadata', { + 'key': 'key', 'value': 'value', + 'parent_type': new_project.entity_type, + 'parent_id': new_project['id'] + }) + + session.commit() + + # Avoid cache. + new_session = ftrack_api.Session() + retrieved_entity = new_session.get( + 'Metadata', ftrack_api.inspection.primary_key(entity).values() + ) + + # Manually change already populated remote value so can test it gets reset + # on populate call. + retrieved_entity.attributes.get('value').set_remote_value( + retrieved_entity, 'changed' + ) + + new_session.populate(retrieved_entity, 'value') + assert retrieved_entity['value'] == 'value' + + +@pytest.mark.parametrize('server_information, compatible', [ + ({}, False), + ({'version': '3.3.11'}, True), + ({'version': '3.3.12'}, True), + ({'version': '3.4'}, True), + ({'version': '3.4.1'}, True), + ({'version': '3.5.16'}, True), + ({'version': '3.3.10'}, False) +], ids=[ + 'No information', + 'Valid current version', + 'Valid higher version', + 'Valid higher version', + 'Valid higher version', + 'Valid higher version', + 'Invalid lower version' +]) +def test_check_server_compatibility( + server_information, compatible, session +): + '''Check server compatibility.''' + with mock.patch.dict( + session._server_information, server_information, clear=True + ): + if compatible: + session.check_server_compatibility() + else: + with pytest.raises(ftrack_api.exception.ServerCompatibilityError): + session.check_server_compatibility() + + +def test_encode_entity_using_all_attributes_strategy(mocked_schema_session): + '''Encode entity using "all" entity_attribute_strategy.''' + new_bar = mocked_schema_session.create( + 'Bar', + { + 'name': 'myBar', + 'id': 'bar_unique_id' + } + ) + + new_foo = mocked_schema_session.create( + 'Foo', + { + 'id': 'a_unique_id', + 'string': 'abc', + 'integer': 42, + 'number': 12345678.9, + 'boolean': False, + 'date': arrow.get('2015-11-18 15:24:09'), + 'bars': [new_bar] + } + ) + + encoded = mocked_schema_session.encode( + new_foo, entity_attribute_strategy='all' + ) + + assert encoded == textwrap.dedent(''' + {"__entity_type__": "Foo", + "bars": [{"__entity_type__": "Bar", "id": "bar_unique_id"}], + "boolean": false, + "date": {"__type__": "datetime", "value": "2015-11-18T15:24:09+00:00"}, + "id": "a_unique_id", + "integer": 42, + "number": 12345678.9, + "string": "abc"} + ''').replace('\n', '') + + +def test_encode_entity_using_only_set_attributes_strategy( + mocked_schema_session +): + '''Encode entity using "set_only" entity_attribute_strategy.''' + new_foo = mocked_schema_session.create( + 'Foo', + { + 'id': 'a_unique_id', + 'string': 'abc', + 'integer': 42 + } + ) + + encoded = mocked_schema_session.encode( + new_foo, entity_attribute_strategy='set_only' + ) + + assert encoded == textwrap.dedent(''' + {"__entity_type__": "Foo", + "id": "a_unique_id", + "integer": 42, + "string": "abc"} + ''').replace('\n', '') + + +def test_encode_computed_attribute_using_persisted_only_attributes_strategy( + mocked_schema_session +): + '''Encode computed attribute, "persisted_only" entity_attribute_strategy.''' + new_bar = mocked_schema_session._create( + 'Bar', + { + 'name': 'myBar', + 'id': 'bar_unique_id', + 'computed_value': 'FOO' + }, + reconstructing=True + ) + + encoded = mocked_schema_session.encode( + new_bar, entity_attribute_strategy='persisted_only' + ) + + assert encoded == textwrap.dedent(''' + {"__entity_type__": "Bar", + "id": "bar_unique_id", + "name": "myBar"} + ''').replace('\n', '') + + +def test_encode_entity_using_only_modified_attributes_strategy( + mocked_schema_session +): + '''Encode entity using "modified_only" entity_attribute_strategy.''' + new_foo = mocked_schema_session._create( + 'Foo', + { + 'id': 'a_unique_id', + 'string': 'abc', + 'integer': 42 + }, + reconstructing=True + ) + + new_foo['string'] = 'Modified' + + encoded = mocked_schema_session.encode( + new_foo, entity_attribute_strategy='modified_only' + ) + + assert encoded == textwrap.dedent(''' + {"__entity_type__": "Foo", + "id": "a_unique_id", + "string": "Modified"} + ''').replace('\n', '') + + +def test_encode_entity_using_invalid_strategy(session, new_task): + '''Fail to encode entity using invalid strategy.''' + with pytest.raises(ValueError): + session.encode(new_task, entity_attribute_strategy='invalid') + + +def test_encode_operation_payload(session): + '''Encode operation payload.''' + sequence_component = session.create_component( + "/path/to/sequence.%d.jpg [1]", location=None + ) + file_component = sequence_component["members"][0] + + encoded = session.encode([ + ftrack_api.session.OperationPayload({ + 'action': 'create', + 'entity_data': { + '__entity_type__': u'FileComponent', + u'container': sequence_component, + 'id': file_component['id'] + }, + 'entity_key': [file_component['id']], + 'entity_type': u'FileComponent' + }), + ftrack_api.session.OperationPayload({ + 'action': 'update', + 'entity_data': { + '__entity_type__': u'SequenceComponent', + u'members': ftrack_api.collection.Collection( + sequence_component, + sequence_component.attributes.get('members'), + data=[file_component] + ) + }, + 'entity_key': [sequence_component['id']], + 'entity_type': u'SequenceComponent' + }) + ]) + + expected = textwrap.dedent(''' + [{{"action": "create", + "entity_data": {{"__entity_type__": "FileComponent", + "container": {{"__entity_type__": "SequenceComponent", + "id": "{0[id]}"}}, + "id": "{1[id]}"}}, + "entity_key": ["{1[id]}"], + "entity_type": "FileComponent"}}, + {{"action": "update", + "entity_data": {{"__entity_type__": "SequenceComponent", + "members": [{{"__entity_type__": "FileComponent", "id": "{1[id]}"}}]}}, + "entity_key": ["{0[id]}"], + "entity_type": "SequenceComponent"}}] + '''.format(sequence_component, file_component)).replace('\n', '') + + assert encoded == expected + + +def test_decode_partial_entity( + session, new_task +): + '''Decode partially encoded entity.''' + encoded = session.encode( + new_task, entity_attribute_strategy='set_only' + ) + + entity = session.decode(encoded) + + assert entity == new_task + assert entity is not new_task + + +def test_reset(mocker): + '''Reset session.''' + plugin_path = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', 'fixture', 'plugin') + ) + session = ftrack_api.Session(plugin_paths=[plugin_path]) + + assert hasattr(session.types.get('User'), 'stub') + location = session.query('Location where name is "test.location"').one() + assert location.accessor is not ftrack_api.symbol.NOT_SET + + mocked_close = mocker.patch.object(session._request, 'close') + mocked_fetch = mocker.patch.object(session, '_load_schemas') + + session.reset() + + # Assert custom entity type maintained. + assert hasattr(session.types.get('User'), 'stub') + + # Assert location plugin re-configured. + location = session.query('Location where name is "test.location"').one() + assert location.accessor is not ftrack_api.symbol.NOT_SET + + # Assert connection not closed and no schema fetch issued. + assert not mocked_close.called + assert not mocked_fetch.called + + +def test_rollback_scalar_attribute_change(session, new_user): + '''Rollback scalar attribute change via session.''' + assert not session.recorded_operations + current_first_name = new_user['first_name'] + + new_user['first_name'] = 'NewName' + assert new_user['first_name'] == 'NewName' + assert session.recorded_operations + + session.rollback() + + assert not session.recorded_operations + assert new_user['first_name'] == current_first_name + + +def test_rollback_collection_attribute_change(session, new_user): + '''Rollback collection attribute change via session.''' + assert not session.recorded_operations + current_timelogs = new_user['timelogs'] + assert list(current_timelogs) == [] + + timelog = session.create('Timelog', {}) + new_user['timelogs'].append(timelog) + assert list(new_user['timelogs']) == [timelog] + assert session.recorded_operations + + session.rollback() + + assert not session.recorded_operations + assert list(new_user['timelogs']) == [] + + +def test_rollback_entity_creation(session): + '''Rollback entity creation via session.''' + assert not session.recorded_operations + + new_user = session.create('User') + assert session.recorded_operations + assert new_user in session.created + + session.rollback() + + assert not session.recorded_operations + assert new_user not in session.created + assert new_user not in session._local_cache.values() + + +def test_rollback_entity_deletion(session, new_user): + '''Rollback entity deletion via session.''' + assert not session.recorded_operations + + session.delete(new_user) + assert session.recorded_operations + assert new_user in session.deleted + + session.rollback() + assert not session.recorded_operations + assert new_user not in session.deleted + assert new_user in session._local_cache.values() + + +# Caching +# ------------------------------------------------------------------------------ + + +def test_get_entity_bypassing_cache(session, user, mocker): + '''Retrieve an entity by type and id bypassing cache.''' + mocker.patch.object(session, 'call', wraps=session.call) + + session.cache.remove( + session.cache_key_maker.key(ftrack_api.inspection.identity(user)) + ) + + matching = session.get(*ftrack_api.inspection.identity(user)) + + # Check a different instance returned. + assert matching is not user + + # Check instances have the same identity. + assert matching == user + + # Check cache was bypassed and server was called. + assert session.call.called + + +def test_get_entity_from_cache(cache, task, mocker): + '''Retrieve an entity by type and id from cache.''' + session = ftrack_api.Session(cache=cache) + + # Prepare cache. + session.merge(task) + + # Disable server calls. + mocker.patch.object(session, 'call') + + # Retrieve entity from cache. + entity = session.get(*ftrack_api.inspection.identity(task)) + + assert entity is not None, 'Failed to retrieve entity from cache.' + assert entity == task + assert entity is not task + + # Check that no call was made to server. + assert not session.call.called + + +def test_get_entity_tree_from_cache(cache, new_project_tree, mocker): + '''Retrieve an entity tree from cache.''' + session = ftrack_api.Session(cache=cache) + + # Prepare cache. + # TODO: Maybe cache should be prepopulated for a better check here. + session.query( + 'select children, children.children, children.children.children, ' + 'children.children.children.assignments, ' + 'children.children.children.assignments.resource ' + 'from Project where id is "{0}"' + .format(new_project_tree['id']) + ).one() + + # Disable server calls. + mocker.patch.object(session, 'call') + + # Retrieve entity from cache. + entity = session.get(*ftrack_api.inspection.identity(new_project_tree)) + + assert entity is not None, 'Failed to retrieve entity from cache.' + assert entity == new_project_tree + assert entity is not new_project_tree + + # Check tree. + with session.auto_populating(False): + for sequence in entity['children']: + for shot in sequence['children']: + for task in shot['children']: + assignments = task['assignments'] + for assignment in assignments: + resource = assignment['resource'] + + assert resource is not ftrack_api.symbol.NOT_SET + + # Check that no call was made to server. + assert not session.call.called + + +def test_get_metadata_from_cache(session, mocker, cache, new_task): + '''Retrieve an entity along with its metadata from cache.''' + new_task['metadata']['key'] = 'value' + session.commit() + + fresh_session = ftrack_api.Session(cache=cache) + + # Prepare cache. + fresh_session.query( + 'select metadata.key, metadata.value from ' + 'Task where id is "{0}"' + .format(new_task['id']) + ).all() + + # Disable server calls. + mocker.patch.object(fresh_session, 'call') + + # Retrieve entity from cache. + entity = fresh_session.get(*ftrack_api.inspection.identity(new_task)) + + assert entity is not None, 'Failed to retrieve entity from cache.' + assert entity == new_task + assert entity is not new_task + + # Check metadata cached correctly. + with fresh_session.auto_populating(False): + metadata = entity['metadata'] + assert metadata['key'] == 'value' + + assert not fresh_session.call.called + + +def test_merge_circular_reference(cache, temporary_file): + '''Merge circular reference into cache.''' + session = ftrack_api.Session(cache=cache) + # The following will test the condition as a FileComponent will be created + # with corresponding ComponentLocation. The server will return the file + # component data with the component location embedded. The component + # location will in turn have an embedded reference to the file component. + # If the merge does not prioritise the primary keys of the instance then + # any cache that relies on using the identity of the file component will + # fail. + component = session.create_component(path=temporary_file) + assert component + + +def test_create_with_selective_cache(session): + '''Create entity does not store entity in selective cache.''' + cache = ftrack_api.cache.MemoryCache() + session.cache.caches.append(SelectiveCache(cache)) + try: + user = session.create('User', {'username': 'martin'}) + cache_key = session.cache_key_maker.key( + ftrack_api.inspection.identity(user) + ) + + with pytest.raises(KeyError): + cache.get(cache_key) + + finally: + session.cache.caches.pop() + + +def test_correct_file_type_on_sequence_component(session): + '''Create sequence component with correct file type.''' + path = '/path/to/image/sequence.%04d.dpx [1-10]' + sequence_component = session.create_component(path) + + assert sequence_component['file_type'] == '.dpx' + + +def test_read_schemas_from_cache( + session, temporary_valid_schema_cache +): + '''Read valid content from schema cache.''' + expected_hash = 'a98d0627b5e33966e43e1cb89b082db7' + + schemas, hash_ = session._read_schemas_from_cache( + temporary_valid_schema_cache + ) + + assert expected_hash == hash_ + + +def test_fail_to_read_schemas_from_invalid_cache( + session, temporary_invalid_schema_cache +): + '''Fail to read invalid content from schema cache.''' + with pytest.raises(ValueError): + session._read_schemas_from_cache( + temporary_invalid_schema_cache + ) + + +def test_write_schemas_to_cache( + session, temporary_valid_schema_cache +): + '''Write valid content to schema cache.''' + expected_hash = 'a98d0627b5e33966e43e1cb89b082db7' + schemas, _ = session._read_schemas_from_cache(temporary_valid_schema_cache) + + session._write_schemas_to_cache(schemas, temporary_valid_schema_cache) + + schemas, hash_ = session._read_schemas_from_cache( + temporary_valid_schema_cache + ) + + assert expected_hash == hash_ + + +def test_fail_to_write_invalid_schemas_to_cache( + session, temporary_valid_schema_cache +): + '''Fail to write invalid content to schema cache.''' + # Datetime not serialisable by default. + invalid_content = datetime.datetime.now() + + with pytest.raises(TypeError): + session._write_schemas_to_cache( + invalid_content, temporary_valid_schema_cache + ) + + +def test_load_schemas_from_valid_cache( + mocker, session, temporary_valid_schema_cache, mocked_schemas +): + '''Load schemas from cache.''' + expected_schemas = session._load_schemas(temporary_valid_schema_cache) + + mocked = mocker.patch.object(session, 'call') + schemas = session._load_schemas(temporary_valid_schema_cache) + + assert schemas == expected_schemas + assert not mocked.called + + +def test_load_schemas_from_server_when_cache_invalid( + mocker, session, temporary_invalid_schema_cache +): + '''Load schemas from server when cache invalid.''' + mocked = mocker.patch.object(session, 'call', wraps=session.call) + + session._load_schemas(temporary_invalid_schema_cache) + assert mocked.called + + +def test_load_schemas_from_server_when_cache_outdated( + mocker, session, temporary_valid_schema_cache +): + '''Load schemas from server when cache outdated.''' + schemas, _ = session._read_schemas_from_cache(temporary_valid_schema_cache) + schemas.append({ + 'id': 'NewTest' + }) + session._write_schemas_to_cache(schemas, temporary_valid_schema_cache) + + mocked = mocker.patch.object(session, 'call', wraps=session.call) + session._load_schemas(temporary_valid_schema_cache) + + assert mocked.called + + +def test_load_schemas_from_server_not_reporting_schema_hash( + mocker, session, temporary_valid_schema_cache +): + '''Load schemas from server when server does not report schema hash.''' + mocked_write = mocker.patch.object( + session, '_write_schemas_to_cache', + wraps=session._write_schemas_to_cache + ) + + server_information = session._server_information.copy() + server_information.pop('schema_hash') + mocker.patch.object( + session, '_server_information', new=server_information + ) + + session._load_schemas(temporary_valid_schema_cache) + + # Cache still written even if hash not reported. + assert mocked_write.called + + mocked = mocker.patch.object(session, 'call', wraps=session.call) + session._load_schemas(temporary_valid_schema_cache) + + # No hash reported by server so cache should have been bypassed. + assert mocked.called + + +def test_load_schemas_bypassing_cache( + mocker, session, temporary_valid_schema_cache +): + '''Load schemas bypassing cache when set to False.''' + with mocker.patch.object(session, 'call', wraps=session.call): + + session._load_schemas(temporary_valid_schema_cache) + assert session.call.call_count == 1 + + session._load_schemas(False) + assert session.call.call_count == 2 + + +def test_get_tasks_widget_url(session): + '''Tasks widget URL returns valid HTTP status.''' + url = session.get_widget_url('tasks') + response = requests.get(url) + response.raise_for_status() + + +def test_get_info_widget_url(session, task): + '''Info widget URL for *task* returns valid HTTP status.''' + url = session.get_widget_url('info', entity=task, theme='light') + response = requests.get(url) + response.raise_for_status() + + +def test_encode_media_from_path(session, video_path): + '''Encode media based on a file path.''' + job = session.encode_media(video_path) + + assert job.entity_type == 'Job' + + job_data = json.loads(job['data']) + assert 'output' in job_data + assert 'source_component_id' in job_data + assert 'keep_original' in job_data and job_data['keep_original'] is False + assert len(job_data['output']) + assert 'component_id' in job_data['output'][0] + assert 'format' in job_data['output'][0] + + +def test_encode_media_from_component(session, video_path): + '''Encode media based on a component.''' + location = session.query('Location where name is "ftrack.server"').one() + component = session.create_component( + video_path, + location=location + ) + session.commit() + + job = session.encode_media(component) + + assert job.entity_type == 'Job' + + job_data = json.loads(job['data']) + assert 'keep_original' in job_data and job_data['keep_original'] is True + + +def test_create_sequence_component_with_size(session, temporary_sequence): + '''Create a sequence component and verify that is has a size.''' + location = session.query('Location where name is "ftrack.server"').one() + component = session.create_component( + temporary_sequence + ) + + assert component['size'] > 0 + + +def test_plugin_arguments(mocker): + '''Pass plugin arguments to plugin discovery mechanism.''' + mock = mocker.patch( + 'ftrack_api.plugin.discover' + ) + session = ftrack_api.Session( + plugin_paths=[], plugin_arguments={"test": "value"} + ) + assert mock.called + mock.assert_called_once_with([], [session], {"test": "value"}) + +def test_remote_reset(session, new_user): + '''Reset user api key.''' + key_1 = session.reset_remote( + 'api_key', entity=new_user + ) + + key_2 = session.reset_remote( + 'api_key', entity=new_user + ) + + + assert key_1 != key_2 + + +@pytest.mark.parametrize('attribute', [ + ('id',), + ('email',) + +], ids=[ + 'Fail resetting primary key', + 'Fail resetting attribute without default value', +]) +def test_fail_remote_reset(session, user, attribute): + '''Fail trying to rest invalid attributes.''' + + with pytest.raises(ftrack_api.exception.ServerError): + session.reset_remote( + attribute, user + ) + + +def test_close(session): + '''Close session.''' + assert session.closed is False + session.close() + assert session.closed is True + + +def test_close_already_closed_session(session): + '''Close session that is already closed.''' + session.close() + assert session.closed is True + session.close() + assert session.closed is True + + +def test_server_call_after_close(session): + '''Fail to issue calls to server after session closed.''' + session.close() + assert session.closed is True + + with pytest.raises(ftrack_api.exception.ConnectionClosedError): + session.query('User').first() + + +def test_context_manager(session): + '''Use session as context manager.''' + with session: + assert session.closed is False + + assert session.closed is True + + +def test_delayed_job(session): + '''Test the delayed_job action''' + + with pytest.raises(ValueError): + session.delayed_job( + 'DUMMY_JOB' + ) + + +@pytest.mark.skip(reason='No configured ldap server.') +def test_delayed_job_ldap_sync(session): + '''Test the a delayed_job ldap sync action''' + result = session.delayed_job( + ftrack_api.symbol.JOB_SYNC_USERS_LDAP + ) + + assert isinstance( + result, ftrack_api.entity.job.Job + ) + + +def test_query_nested_custom_attributes(session, new_asset_version): + '''Query custom attributes nested and update a value and query again. + + This test will query custom attributes via 2 relations, then update the + value in one API session and read it back in another to verify that it gets + the new value. + + ''' + session_one = session + session_two = ftrack_api.Session( + auto_connect_event_hub=False + ) + + # Read the version via a relation in both sessions. + def get_versions(sessions): + versions = [] + for _session in sessions: + asset = _session.query( + 'select versions.custom_attributes from Asset where id is "{0}"'.format( + new_asset_version.get('asset_id') + ) + ).first() + + for version in asset['versions']: + if version.get('id') == new_asset_version.get('id'): + versions.append(version) + + return versions + + # Get version from both sessions. + versions = get_versions((session_one, session_two)) + + # Read attribute for both sessions. + for version in versions: + version['custom_attributes']['versiontest'] + + # Set attribute on session_one. + versions[0]['custom_attributes']['versiontest'] = random.randint( + 0, 99999 + ) + + session.commit() + + # Read version from server for session_two. + session_two_version = get_versions((session_two, ))[0] + + # Verify that value in session 2 is the same as set and committed in + # session 1. + assert ( + session_two_version['custom_attributes']['versiontest'] == + versions[0]['custom_attributes']['versiontest'] + ) + + +def test_query_nested(session): + '''Query components nested and update a value and query again. + + This test will query components via 2 relations, then update the + value in one API session and read it back in another to verify that it gets + the new value. + + ''' + session_one = session + session_two = ftrack_api.Session( + auto_connect_event_hub=False + ) + + query = ( + 'select versions.components.name from Asset where id is ' + '"12939d0c-6766-11e1-8104-f23c91df25eb"' + ) + + def get_version(session): + '''Return the test version from *session*.''' + asset = session.query(query).first() + asset_version = None + for version in asset['versions']: + if version['version'] == 8: + asset_version = version + break + + return asset_version + + asset_version = get_version(session_one) + asset_version2 = get_version(session_two) + + # This assert is not needed, but reading the collections are to ensure they + # are inflated. + assert ( + asset_version2['components'][0]['name'] == + asset_version['components'][0]['name'] + ) + + asset_version['components'][0]['name'] = str(uuid.uuid4()) + + session.commit() + + asset_version2 = get_version(session_two) + + assert ( + asset_version['components'][0]['name'] == + asset_version2['components'][0]['name'] + ) + + +def test_merge_iterations(session, mocker, project): + '''Ensure merge does not happen to many times when querying.''' + mocker.spy(session, '_merge') + + session.query( + 'select status from Task where project_id is {} limit 10'.format( + project['id'] + ) + ).all() + + assert session._merge.call_count < 75 + + +@pytest.mark.parametrize( + 'get_versions', + [ + lambda component, asset_version, asset: component['version']['asset']['versions'], + lambda component, asset_version, asset: asset_version['asset']['versions'], + lambda component, asset_version, asset: asset['versions'], + ], + ids=[ + 'from_component', + 'from_asset_version', + 'from_asset', + ] +) +def test_query_nested2(session, get_versions): + '''Query version.asset.versions from component and then add new version. + + This test will query versions via multiple relations and ensure a new + version appears when added to a different session and then is queried + again. + + ''' + session_one = session + session_two = ftrack_api.Session( + auto_connect_event_hub=False + ) + + # Get a random component that is linked to a version and asset. + component_id = session_two.query( + 'FileComponent where version.asset_id != None' + ).first()['id'] + + query = ( + 'select version.asset.versions from Component where id is "{}"'.format( + component_id + ) + ) + + component = session_one.query(query).one() + asset_version = component['version'] + asset = component['version']['asset'] + versions = component['version']['asset']['versions'] + length = len(versions) + + session_two.create('AssetVersion', { + 'asset_id': asset['id'] + }) + + session_two.commit() + + component = session_one.query(query).one() + versions = get_versions(component, asset_version, asset) + new_length = len(versions) + + assert length + 1 == new_length + + +def test_session_ready_reset_events(mocker): + '''Session ready and reset events.''' + plugin_path = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', 'fixture', 'plugin') + ) + session = ftrack_api.Session(plugin_paths=[plugin_path]) + + assert session._test_called_events['ftrack.api.session.ready'] is 1 + assert session._test_called_events['ftrack.api.session.reset'] is 0 + + session.reset() + assert session._test_called_events['ftrack.api.session.ready'] is 1 + assert session._test_called_events['ftrack.api.session.reset'] is 1 + + +def test_entity_reference(mocker, session): + '''Return entity reference that uniquely identifies entity.''' + mock_entity = mocker.Mock(entity_type="MockEntityType") + mock_auto_populating = mocker.patch.object(session, "auto_populating") + mock_primary_key = mocker.patch( + "ftrack_api.inspection.primary_key", return_value={"id": "mock-id"} + ) + + reference = session.entity_reference(mock_entity) + + assert reference == { + "__entity_type__": "MockEntityType", + "id": "mock-id" + } + + mock_auto_populating.assert_called_once_with(False) + mock_primary_key.assert_called_once_with(mock_entity) + + +def test__entity_reference(mocker, session): + '''Act as alias to entity_reference.''' + mock_entity = mocker.Mock(entity_type="MockEntityType") + mock_entity_reference = mocker.patch.object(session, "entity_reference") + mocker.patch("warnings.warn") + + session._entity_reference(mock_entity) + + mock_entity_reference.assert_called_once_with(mock_entity) + + +def test__entity_reference_issues_deprecation_warning(mocker, session): + '''Issue deprecation warning for usage of _entity_reference.''' + mocker.patch.object(session, "entity_reference") + mock_warn = mocker.patch("warnings.warn") + + session._entity_reference({}) + + mock_warn.assert_called_once_with( + ( + "Session._entity_reference is now available as public method " + "Session.entity_reference. The private method will be removed " + "in version 2.0." + ), + PendingDeprecationWarning + ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py new file mode 100644 index 0000000000..cf8b014ee5 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py @@ -0,0 +1,74 @@ +# :coding: utf-8 +# :copyright: Copyright (c) 2015 ftrack + +import pytest +import ftrack_api.exception + + +def test_manually_create_multiple_timers_with_error(session, new_user): + '''Fail to create a second timer.''' + session.create('Timer', { + 'user': new_user + }) + + session.commit() + + with pytest.raises(ftrack_api.exception.ServerError): + session.create('Timer', { + 'user': new_user + }) + + session.commit() + + session.reset() + + +def test_create_multiple_timers_with_error(session, new_user): + '''Fail to create a second timer.''' + new_user.start_timer() + + with pytest.raises(ftrack_api.exception.NotUniqueError): + new_user.start_timer() + + session.reset() + + +def test_start_and_stop_a_timer(session, new_user, new_task): + '''Start a new timer and stop it to create a timelog.''' + new_user.start_timer(new_task) + + new_user.stop_timer() + + timelog = session.query( + 'Timelog where context_id = "{0}"'.format(new_task['id']) + ).one() + + assert timelog['user_id'] == new_user['id'], 'User id is correct.' + assert timelog['context_id'] == new_task['id'], 'Task id is correct.' + + +def test_start_a_timer_when_timer_is_running(session, new_user, new_task): + '''Start a timer when an existing timer is already running.''' + new_user.start_timer(new_task) + + # Create the second timer without context. + new_user.start_timer(force=True) + + # There should be only one existing timelog for this user. + timelogs = session.query( + 'Timelog where user_id = "{0}"'.format(new_user['id']) + ).all() + assert len(timelogs) == 1, 'One timelog exists.' + + timelog = session.query( + 'Timer where user_id = "{0}"'.format(new_user['id']) + ).one() + + # Make sure running timer has no context. + assert timelog['context_id'] is None, 'Timer does not have a context.' + + +def test_stop_timer_without_timer_running(session, new_user): + '''Stop a timer when no timer is running.''' + with pytest.raises(ftrack_api.exception.NoResultFoundError): + new_user.stop_timer() diff --git a/openpype/modules/ftrack/scripts/sub_event_processor.py b/openpype/modules/default_modules/ftrack/scripts/sub_event_processor.py similarity index 95% rename from openpype/modules/ftrack/scripts/sub_event_processor.py rename to openpype/modules/default_modules/ftrack/scripts/sub_event_processor.py index 0d94fa7264..51b45eb93b 100644 --- a/openpype/modules/ftrack/scripts/sub_event_processor.py +++ b/openpype/modules/default_modules/ftrack/scripts/sub_event_processor.py @@ -4,8 +4,8 @@ import signal import socket import datetime -from openpype.modules.ftrack.ftrack_server.ftrack_server import FtrackServer -from openpype.modules.ftrack.ftrack_server.lib import ( +from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer +from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, ProcessEventHub, TOPIC_STATUS_SERVER diff --git a/openpype/modules/ftrack/scripts/sub_event_status.py b/openpype/modules/default_modules/ftrack/scripts/sub_event_status.py similarity index 98% rename from openpype/modules/ftrack/scripts/sub_event_status.py rename to openpype/modules/default_modules/ftrack/scripts/sub_event_status.py index 24b9bfb789..8a2733b635 100644 --- a/openpype/modules/ftrack/scripts/sub_event_status.py +++ b/openpype/modules/default_modules/ftrack/scripts/sub_event_status.py @@ -7,8 +7,8 @@ import socket import datetime import ftrack_api -from openpype.modules.ftrack.ftrack_server.ftrack_server import FtrackServer -from openpype.modules.ftrack.ftrack_server.lib import ( +from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer +from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, StatusEventHub, TOPIC_STATUS_SERVER, diff --git a/openpype/modules/ftrack/scripts/sub_event_storer.py b/openpype/modules/default_modules/ftrack/scripts/sub_event_storer.py similarity index 96% rename from openpype/modules/ftrack/scripts/sub_event_storer.py rename to openpype/modules/default_modules/ftrack/scripts/sub_event_storer.py index 6e2990ef0b..a8649e0ccc 100644 --- a/openpype/modules/ftrack/scripts/sub_event_storer.py +++ b/openpype/modules/default_modules/ftrack/scripts/sub_event_storer.py @@ -6,14 +6,14 @@ import socket import pymongo import ftrack_api -from openpype.modules.ftrack.ftrack_server.ftrack_server import FtrackServer -from openpype.modules.ftrack.ftrack_server.lib import ( +from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer +from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, StorerEventHub, TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT ) -from openpype.modules.ftrack.lib import get_ftrack_event_mongo_info +from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info from openpype.lib import OpenPypeMongoConnection from openpype.api import Logger diff --git a/openpype/modules/ftrack/scripts/sub_legacy_server.py b/openpype/modules/default_modules/ftrack/scripts/sub_legacy_server.py similarity index 97% rename from openpype/modules/ftrack/scripts/sub_legacy_server.py rename to openpype/modules/default_modules/ftrack/scripts/sub_legacy_server.py index ae6aefa908..e3a623c376 100644 --- a/openpype/modules/ftrack/scripts/sub_legacy_server.py +++ b/openpype/modules/default_modules/ftrack/scripts/sub_legacy_server.py @@ -7,7 +7,7 @@ import threading import ftrack_api from openpype.api import Logger from openpype.modules import ModulesManager -from openpype.modules.ftrack.ftrack_server.ftrack_server import FtrackServer +from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer log = Logger().get_logger("Event Server Legacy") diff --git a/openpype/modules/ftrack/scripts/sub_user_server.py b/openpype/modules/default_modules/ftrack/scripts/sub_user_server.py similarity index 93% rename from openpype/modules/ftrack/scripts/sub_user_server.py rename to openpype/modules/default_modules/ftrack/scripts/sub_user_server.py index 971a31b703..a3701a0950 100644 --- a/openpype/modules/ftrack/scripts/sub_user_server.py +++ b/openpype/modules/default_modules/ftrack/scripts/sub_user_server.py @@ -2,8 +2,8 @@ import sys import signal import socket -from openpype.modules.ftrack.ftrack_server.ftrack_server import FtrackServer -from openpype.modules.ftrack.ftrack_server.lib import ( +from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer +from openpype_modules.ftrack.ftrack_server.lib import ( SocketSession, SocketBaseEventHub ) diff --git a/openpype/modules/ftrack/tray/__init__.py b/openpype/modules/default_modules/ftrack/tray/__init__.py similarity index 100% rename from openpype/modules/ftrack/tray/__init__.py rename to openpype/modules/default_modules/ftrack/tray/__init__.py diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/default_modules/ftrack/tray/ftrack_tray.py similarity index 100% rename from openpype/modules/ftrack/tray/ftrack_tray.py rename to openpype/modules/default_modules/ftrack/tray/ftrack_tray.py diff --git a/openpype/modules/ftrack/tray/login_dialog.py b/openpype/modules/default_modules/ftrack/tray/login_dialog.py similarity index 99% rename from openpype/modules/ftrack/tray/login_dialog.py rename to openpype/modules/default_modules/ftrack/tray/login_dialog.py index cc5689bee5..6384621c8e 100644 --- a/openpype/modules/ftrack/tray/login_dialog.py +++ b/openpype/modules/default_modules/ftrack/tray/login_dialog.py @@ -1,7 +1,7 @@ import os import requests from openpype import style -from openpype.modules.ftrack.lib import credentials +from openpype_modules.ftrack.lib import credentials from . import login_tools from openpype import resources from Qt import QtCore, QtGui, QtWidgets diff --git a/openpype/modules/ftrack/tray/login_tools.py b/openpype/modules/default_modules/ftrack/tray/login_tools.py similarity index 100% rename from openpype/modules/ftrack/tray/login_tools.py rename to openpype/modules/default_modules/ftrack/tray/login_tools.py diff --git a/openpype/modules/ftrack/python2_vendor/arrow b/openpype/modules/ftrack/python2_vendor/arrow deleted file mode 160000 index b746fedf72..0000000000 --- a/openpype/modules/ftrack/python2_vendor/arrow +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b746fedf7286c3755a46f07ab72f4c414cd41fc0 diff --git a/openpype/modules/ftrack/python2_vendor/ftrack-python-api b/openpype/modules/ftrack/python2_vendor/ftrack-python-api deleted file mode 160000 index d277f474ab..0000000000 --- a/openpype/modules/ftrack/python2_vendor/ftrack-python-api +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d277f474ab016e7b53479c36af87cb861d0cc53e From 9a66e93b48e037aac95b60d1c24281f369dc74ae Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:21:14 +0200 Subject: [PATCH 074/308] define function for modules directory paths --- openpype/modules/base.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 877c363f61..1895281cb8 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -61,6 +61,14 @@ class _InterfacesClass(_ModuleClass): return self.__attributes__[attr_name] +def module_dirs(): + current_dir = os.path.abspath(os.path.dirname(__file__)) + dirpaths = [ + os.path.join(current_dir, "default_modules") + ] + return dirpaths + + def load_interfaces(force=False): if not force and "openpype_interfaces" in sys.modules: return From 9cdacdf397a8bdbaf379dda17a9ff50d7e6b463e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:46:56 +0200 Subject: [PATCH 075/308] use modified meta class for interface _OpenPypeInterfaceMeta --- openpype/modules/base.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 1895281cb8..fc5a29a345 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -170,7 +170,15 @@ def load_modules(force=False): setattr(openpype_modules, "project_manager_action", project_manager_action) -@six.add_metaclass(ABCMeta) + + +class _OpenPypeInterfaceMeta(ABCMeta): + """OpenPypeInterface meta class to print proper string.""" + def __str__(self): + return "<'OpenPypeInterface.{}'>".format(self.__name__) + + +@six.add_metaclass(_OpenPypeInterfaceMeta) class OpenPypeInterface: """Base class of Interface that can be used as Mixin with abstract parts. From d2fb85b2358cc46a6cf89bada705f0872a0e8fb6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:47:09 +0200 Subject: [PATCH 076/308] added dictionary access to modules --- openpype/modules/base.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index fc5a29a345..662a7b02e1 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -38,6 +38,15 @@ class _ModuleClass(object): def __setattr__(self, attr_name, value): self.__attributes__[attr_name] = value + def __setitem__(self, key, value): + self.__setattr__(key, value) + + def __getitem__(self, key): + return getattr(self, key) + + def get(self, key, default=None): + return self.__attributes__.get(key, default) + def keys(self): return self.__attributes__.keys() From f6d1fd9740446e3fd5c3e23c1293c5ea07ddd768 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:47:43 +0200 Subject: [PATCH 077/308] dynamic loading of modules --- openpype/modules/base.py | 124 +++++++++++++++++++++++---------------- 1 file changed, 72 insertions(+), 52 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 662a7b02e1..c812bdfc37 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -70,10 +70,15 @@ class _InterfacesClass(_ModuleClass): return self.__attributes__[attr_name] -def module_dirs(): +def get_default_modules_dir(): current_dir = os.path.abspath(os.path.dirname(__file__)) + + return os.path.join(current_dir, "default_modules") + + +def get_module_dirs(): dirpaths = [ - os.path.join(current_dir, "default_modules") + get_default_modules_dir() ] return dirpaths @@ -90,18 +95,24 @@ def load_interfaces(force=False): log = PypeLogger.get_logger("InterfacesLoader") - current_dir = os.path.abspath(os.path.dirname(__file__)) + dirpaths = get_module_dirs() - interface_paths = [ - os.path.join(current_dir, "interfaces.py") - ] + interface_paths = [] + interface_paths.append( + os.path.join(get_default_modules_dir(), "interfaces.py") + ) + for dirpath in dirpaths: + for filename in os.listdir(dirpath): + if filename in ("__pycache__", ): + continue - for filename in os.listdir(current_dir): - full_path = os.path.join(current_dir, filename) - if os.path.isdir(full_path): - interface_paths.append( - os.path.join(full_path, "interfaces.py") - ) + full_path = os.path.join(dirpath, filename) + if not os.path.isdir(full_path): + continue + + interfaces_path = os.path.join(full_path, "interfaces.py") + if os.path.exists(interfaces_path): + interface_paths.append(interfaces_path) # print(interface_paths) for full_path in interface_paths: @@ -131,54 +142,63 @@ def load_interfaces(force=False): def load_modules(force=False): - if not force and "openpype_modules" in sys.modules: + # TODO add thread lock + + # First load interfaces + # - modules must not be imported before interfaces + load_interfaces(force) + + # Key under which will be modules imported in `sys.modules` + modules_key = "openpype_modules" + + # Check if are modules already loaded or no + if not force and modules_key in sys.modules: return - from openpype.lib import modules_from_path - - sys.modules["openpype_modules"] = openpype_modules = _ModuleClass( - "openpype_modules" + # Import helper functions from lib + from openpype.lib import ( + import_filepath, + load_module_from_dirpath ) + # Change `sys.modules` + sys.modules[modules_key] = openpype_modules = _ModuleClass(modules_key) + log = PypeLogger.get_logger("ModulesLoader") - # TODO import dynamically from defined paths - from . import ( - avalon_apps, - clockify, - deadline, - ftrack, - idle_manager, - log_viewer, - muster, - settings_module, - slack, - sync_server, - timers_manager, - webserver, - launcher_action, - standalonepublish_action, - project_manager_action - ) - setattr(openpype_modules, "avalon_apps", avalon_apps) - setattr(openpype_modules, "clockify", clockify) - setattr(openpype_modules, "deadline", deadline) - setattr(openpype_modules, "ftrack", ftrack) - setattr(openpype_modules, "idle_manager", idle_manager) - setattr(openpype_modules, "log_viewer", log_viewer) - setattr(openpype_modules, "muster", muster) - setattr(openpype_modules, "settings_module", settings_module) - setattr(openpype_modules, "sync_server", sync_server) - setattr(openpype_modules, "slack", slack) - setattr(openpype_modules, "timers_manager", timers_manager) - setattr(openpype_modules, "webserver", webserver) - setattr(openpype_modules, "launcher_action", launcher_action) - setattr( - openpype_modules, "standalonepublish_action", standalonepublish_action - ) - setattr(openpype_modules, "project_manager_action", project_manager_action) + # Look for OpenPype modules in paths defined with `get_module_dirs` + dirpaths = get_module_dirs() + for dirpath in dirpaths: + if not os.path.exists(dirpath): + log.warning(( + "Could not find path when loading OpenPype modules \"{}\"" + ).format(dirpath)) + continue + for filename in os.listdir(dirpath): + # Ignore filenames + if filename in ("__pycache__", ): + continue + + fullpath = os.path.join(dirpath, filename) + basename, ext = os.path.splitext(filename) + + module = None + # TODO add more logic how to define if folder is module or not + # - check manifest and content of manifest + if os.path.isdir(fullpath): + module = load_module_from_dirpath( + dirpath, filename, modules_key + ) + module_name = filename + + elif ext in (".py", ): + module = import_filepath(fullpath) + module_name = basename + + if module is not None: + setattr(openpype_modules, module_name, module) class _OpenPypeInterfaceMeta(ABCMeta): From a0e80dec6091b4e4949112911d506eabeafb1f9a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:48:07 +0200 Subject: [PATCH 078/308] skip collect_modules method --- openpype/modules/base.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index c812bdfc37..4dfe382030 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -305,13 +305,10 @@ class ModulesManager: self.initialize_modules() self.connect_modules() - def collect_modules(self): - load_interfaces() - load_modules() - def initialize_modules(self): """Import and initialize modules.""" - self.collect_modules() + # Make sure modules are loaded + load_modules() import openpype_modules From 49c649e36d3e9073d9899a22a11b11a515b91ea8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:48:16 +0200 Subject: [PATCH 079/308] added few docstrings --- openpype/modules/base.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 4dfe382030..f0fb6c91fa 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -18,14 +18,23 @@ from openpype.lib import PypeLogger # Inherit from `object` for Python 2 hosts class _ModuleClass(object): + """Fake module class for storing OpenPype modules. + + Object of this class can be stored to `sys.modules` and used for storing + dynamically imported modules. + """ def __init__(self, name): # Call setattr on super class super(_ModuleClass, self).__setattr__("name", name) + + # Where modules and interfaces are stored super(_ModuleClass, self).__setattr__("__attributes__", dict()) super(_ModuleClass, self).__setattr__("__defaults__", set()) def __getattr__(self, attr_name): if attr_name not in self.__attributes__: + if attr_name in ("__path__"): + return None raise ImportError("No module named {}.{}".format( self.name, attr_name )) @@ -58,6 +67,12 @@ class _ModuleClass(object): class _InterfacesClass(_ModuleClass): + """Fake module class for storing OpenPype interfaces. + + MissingInterface object is returned if interfaces does not exists. + - this is because interfaces must be available even if are missing + implementation + """ def __getattr__(self, attr_name): if attr_name not in self.__attributes__: # Fake Interface if is not missing @@ -213,11 +228,19 @@ class OpenPypeInterface: This is way how OpenPype module or addon can tell that has implementation for specific part or for other module/addon. + + Child classes of OpenPypeInterface may be used as mixin in different + OpenPype modules which means they have to have implemented methods defined + in the interface. By default interface does not have any abstract parts. """ pass class MissingInteface(OpenPypeInterface): + """Class representing missing interface class. + + Used when interface is not available from currently registered paths. + """ pass From 4a5f015f4f939e94f79244eafb32054aa86ae003 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 14:57:04 +0200 Subject: [PATCH 080/308] renamed function 'load_module_from_dirpath' to 'import_module_from_dirpath' --- openpype/lib/__init__.py | 4 ++-- openpype/lib/python_module_tools.py | 10 +++++----- openpype/modules/base.py | 4 ++-- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 52a6024feb..9bcd0f7587 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -56,7 +56,7 @@ from .python_module_tools import ( modules_from_path, recursive_bases_from_class, classes_from_module, - load_module_from_dirpath + import_module_from_dirpath ) from .avalon_context import ( @@ -176,7 +176,7 @@ __all__ = [ "modules_from_path", "recursive_bases_from_class", "classes_from_module", - "load_module_from_dirpath", + "import_module_from_dirpath", "CURRENT_DOC_SCHEMAS", "PROJECT_NAME_ALLOWED_SYMBOLS", diff --git a/openpype/lib/python_module_tools.py b/openpype/lib/python_module_tools.py index 102ae7e71a..59e7ad9123 100644 --- a/openpype/lib/python_module_tools.py +++ b/openpype/lib/python_module_tools.py @@ -136,7 +136,7 @@ def classes_from_module(superclass, module): return classes -def _load_module_from_dirpath_py2(dirpath, module_name, dst_module_name): +def _import_module_from_dirpath_py2(dirpath, module_name, dst_module_name): full_module_name = "{}.{}".format(dst_module_name, module_name) if full_module_name in sys.modules: return sys.modules[full_module_name] @@ -152,7 +152,7 @@ def _load_module_from_dirpath_py2(dirpath, module_name, dst_module_name): return module -def _load_module_from_dirpath_py3(dirpath, module_name, dst_module_name): +def _import_module_from_dirpath_py3(dirpath, module_name, dst_module_name): full_module_name = "{}.{}".format(dst_module_name, module_name) if full_module_name in sys.modules: return sys.modules[full_module_name] @@ -179,13 +179,13 @@ def _load_module_from_dirpath_py3(dirpath, module_name, dst_module_name): return module -def load_module_from_dirpath(dirpath, folder_name, dst_module_name): +def import_module_from_dirpath(dirpath, folder_name, dst_module_name): if PY3: - module = _load_module_from_dirpath_py3( + module = _import_module_from_dirpath_py3( dirpath, folder_name, dst_module_name ) else: - module = _load_module_from_dirpath_py2( + module = _import_module_from_dirpath_py2( dirpath, folder_name, dst_module_name ) return module diff --git a/openpype/modules/base.py b/openpype/modules/base.py index f0fb6c91fa..fc53d3b27a 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -173,7 +173,7 @@ def load_modules(force=False): # Import helper functions from lib from openpype.lib import ( import_filepath, - load_module_from_dirpath + import_module_from_dirpath ) # Change `sys.modules` @@ -203,7 +203,7 @@ def load_modules(force=False): # TODO add more logic how to define if folder is module or not # - check manifest and content of manifest if os.path.isdir(fullpath): - module = load_module_from_dirpath( + module = import_module_from_dirpath( dirpath, filename, modules_key ) module_name = filename From 05c6e450f59aae428669e2685d27a9614ab02b88 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 15:09:59 +0200 Subject: [PATCH 081/308] slighlty modified import function --- openpype/lib/python_module_tools.py | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/openpype/lib/python_module_tools.py b/openpype/lib/python_module_tools.py index 59e7ad9123..c5849225ce 100644 --- a/openpype/lib/python_module_tools.py +++ b/openpype/lib/python_module_tools.py @@ -137,23 +137,33 @@ def classes_from_module(superclass, module): def _import_module_from_dirpath_py2(dirpath, module_name, dst_module_name): - full_module_name = "{}.{}".format(dst_module_name, module_name) + if dst_module_name: + full_module_name = "{}.{}".format(dst_module_name, module_name) + dst_module = sys.modules[dst_module_name] + else: + full_module_name = module_name + dst_module = None + if full_module_name in sys.modules: return sys.modules[full_module_name] import imp - dst_module = sys.modules[dst_module_name] - fp, pathname, description = imp.find_module(module_name, [dirpath]) module = imp.load_module(full_module_name, fp, pathname, description) - setattr(dst_module, module_name, module) + if dst_module is not None: + setattr(dst_module, module_name, module) return module def _import_module_from_dirpath_py3(dirpath, module_name, dst_module_name): - full_module_name = "{}.{}".format(dst_module_name, module_name) + if dst_module_name: + full_module_name = "{}.{}".format(dst_module_name, module_name) + dst_module = sys.modules[dst_module_name] + else: + full_module_name = module_name + dst_module = None if full_module_name in sys.modules: return sys.modules[full_module_name] @@ -179,7 +189,7 @@ def _import_module_from_dirpath_py3(dirpath, module_name, dst_module_name): return module -def import_module_from_dirpath(dirpath, folder_name, dst_module_name): +def import_module_from_dirpath(dirpath, folder_name, dst_module_name=None): if PY3: module = _import_module_from_dirpath_py3( dirpath, folder_name, dst_module_name From da4e4e750152c59b0139fe0b05110ba99e53a86c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 15:23:14 +0200 Subject: [PATCH 082/308] added docstrings --- openpype/lib/python_module_tools.py | 39 ++++++++++++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/openpype/lib/python_module_tools.py b/openpype/lib/python_module_tools.py index c5849225ce..cb5f285ddd 100644 --- a/openpype/lib/python_module_tools.py +++ b/openpype/lib/python_module_tools.py @@ -10,6 +10,15 @@ PY3 = sys.version_info[0] == 3 def import_filepath(filepath, module_name=None): + """Import python file as python module. + + Python 2 and Python 3 compatibility. + + Args: + filepath(str): Path to python file. + module_name(str): Name of loaded module. Only for Python 3. By default + is filled with filename of filepath. + """ if module_name is None: module_name = os.path.splitext(os.path.basename(filepath))[0] @@ -137,6 +146,7 @@ def classes_from_module(superclass, module): def _import_module_from_dirpath_py2(dirpath, module_name, dst_module_name): + """Import passed dirpath as python module using `imp`.""" if dst_module_name: full_module_name = "{}.{}".format(dst_module_name, module_name) dst_module = sys.modules[dst_module_name] @@ -158,38 +168,65 @@ def _import_module_from_dirpath_py2(dirpath, module_name, dst_module_name): def _import_module_from_dirpath_py3(dirpath, module_name, dst_module_name): + """Import passed dirpath as python module using Python 3 modules.""" if dst_module_name: full_module_name = "{}.{}".format(dst_module_name, module_name) dst_module = sys.modules[dst_module_name] else: full_module_name = module_name dst_module = None + + # Skip import if is already imported if full_module_name in sys.modules: return sys.modules[full_module_name] import importlib.util from importlib._bootstrap_external import PathFinder - dst_module = sys.modules[dst_module_name] + # Find loader for passed path and name loader = PathFinder.find_module(full_module_name, [dirpath]) + # Load specs of module spec = importlib.util.spec_from_loader( full_module_name, loader, origin=dirpath ) + # Create module based on specs module = importlib.util.module_from_spec(spec) + # Store module to destination module and `sys.modules` + # WARNING this mus be done before module execution if dst_module is not None: setattr(dst_module, module_name, module) sys.modules[full_module_name] = module + # Execute module import loader.exec_module(module) return module def import_module_from_dirpath(dirpath, folder_name, dst_module_name=None): + """Import passed directory as a python module. + + Python 2 and 3 compatible. + + Imported module can be assigned as a child attribute of already loaded + module from `sys.modules` if has support of `setattr`. That is not default + behavior of python modules so parent module must be a custom module with + that ability. + + It is not possible to reimport already cached module. If you need to + reimport module you have to remove it from caches manually. + + Args: + dirpath(str): Parent directory path of loaded folder. + folder_name(str): Folder name which should be imported inside passed + directory. + dst_module_name(str): Parent module name under which can be loaded + module added. + """ if PY3: module = _import_module_from_dirpath_py3( dirpath, folder_name, dst_module_name From 3579a62cb0a9bc422b3738cab1139b313ae8d5a8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 28 Jul 2021 15:26:14 +0200 Subject: [PATCH 083/308] force to load openpype modules on install --- openpype/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/__init__.py b/openpype/__init__.py index a86d2bc2be..70d2871468 100644 --- a/openpype/__init__.py +++ b/openpype/__init__.py @@ -68,6 +68,10 @@ def patched_discover(superclass): def install(): """Install Pype to Avalon.""" from pyblish.lib import MessageHandler + from openpype.modules import load_modules + + # Make sure modules are loaded + load_modules() def modified_emit(obj, record): """Method replacing `emit` in Pyblish's MessageHandler.""" From 52c6bdc0e5669d729402dee9221d3d1a44087109 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 28 Jul 2021 18:01:09 +0200 Subject: [PATCH 084/308] Webpublisher - backend - skip version collect for webpublisher --- openpype/plugins/publish/collect_scene_version.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_scene_version.py b/openpype/plugins/publish/collect_scene_version.py index 669e6752f3..62969858c5 100644 --- a/openpype/plugins/publish/collect_scene_version.py +++ b/openpype/plugins/publish/collect_scene_version.py @@ -16,7 +16,8 @@ class CollectSceneVersion(pyblish.api.ContextPlugin): if "standalonepublisher" in context.data.get("host", []): return - if "unreal" in pyblish.api.registered_hosts(): + if "unreal" in pyblish.api.registered_hosts() or \ + "webpublisher" in pyblish.api.registered_hosts(): return assert context.data.get('currentFile'), "Cannot get current file" From f104d601319efcec086d6a3a44a11c37ec74832e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 28 Jul 2021 18:03:02 +0200 Subject: [PATCH 085/308] Webpublisher - backend - updated command Added logging to DB for reports --- openpype/cli.py | 7 +++-- openpype/pype_commands.py | 64 +++++++++++++++++++++++++++++++++++---- 2 files changed, 62 insertions(+), 9 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index e56a572c9c..8dc32b307a 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -146,13 +146,14 @@ def publish(debug, paths, targets): @main.command() -@click.argument("paths", nargs=-1) +@click.argument("path") @click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("-h", "--host", help="Host") +@click.option("-u", "--user", help="User email address") @click.option("-p", "--project", help="Project") @click.option("-t", "--targets", help="Targets module", default=None, multiple=True) -def remotepublish(debug, project, paths, host, targets=None): +def remotepublish(debug, project, path, host, targets=None, user=None): """Start CLI publishing. Publish collects json from paths provided as an argument. @@ -160,7 +161,7 @@ def remotepublish(debug, project, paths, host, targets=None): """ if debug: os.environ['OPENPYPE_DEBUG'] = '3' - PypeCommands.remotepublish(project, list(paths), host, targets=None) + PypeCommands.remotepublish(project, path, host, user, targets=targets) @main.command() diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index d2726fd2a6..24becd2423 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -3,7 +3,7 @@ import os import sys import json -from pathlib import Path +from datetime import datetime from openpype.lib import PypeLogger from openpype.api import get_app_environments_for_context @@ -112,25 +112,30 @@ class PypeCommands: uninstall() @staticmethod - def remotepublish(project, paths, host, targets=None): + def remotepublish(project, batch_path, host, user, targets=None): """Start headless publishing. Publish use json from passed paths argument. Args: - paths (list): Paths to jsons. + project (str): project to publish (only single context is expected + per call of remotepublish + batch_path (str): Path batch folder. Contains subfolders with + resources (workfile, another subfolder 'renders' etc.) targets (string): What module should be targeted (to choose validator for example) host (string) + user (string): email address for webpublisher Raises: RuntimeError: When there is no path to process. """ - if not any(paths): + if not batch_path: raise RuntimeError("No publish paths specified") from openpype import install, uninstall from openpype.api import Logger + from openpype.lib import OpenPypeMongoConnection # Register target and host import pyblish.api @@ -149,20 +154,67 @@ class PypeCommands: for target in targets: pyblish.api.register_target(target) - os.environ["OPENPYPE_PUBLISH_DATA"] = os.pathsep.join(paths) + os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path os.environ["AVALON_PROJECT"] = project - os.environ["AVALON_APP"] = host # to trigger proper plugings + os.environ["AVALON_APP_NAME"] = host # to trigger proper plugings + + # this should be more generic + from openpype.hosts.webpublisher.api import install as w_install + w_install() + pyblish.api.register_host(host) log.info("Running publish ...") # Error exit as soon as any error occurs. error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}" + mongo_client = OpenPypeMongoConnection.get_mongo_client() + database_name = os.environ["OPENPYPE_DATABASE_NAME"] + dbcon = mongo_client[database_name]["webpublishes"] + + _, batch_id = os.path.split(batch_path) + _id = dbcon.insert_one({ + "batch_id": batch_id, + "start_date": datetime.now(), + "user": user, + "status": "in_progress" + }).inserted_id + for result in pyblish.util.publish_iter(): if result["error"]: log.error(error_format.format(**result)) uninstall() + dbcon.update_one( + {"_id": _id}, + {"$set": + { + "finish_date": datetime.now(), + "status": "error", + "msg": error_format.format(**result) + } + } + ) sys.exit(1) + else: + dbcon.update_one( + {"_id": _id}, + {"$set": + { + "progress": result["progress"] + } + } + ) + + dbcon.update_one( + {"_id": _id}, + {"$set": + { + "finish_date": datetime.now(), + "state": "finished_ok", + "progress": 1 + } + } + ) log.info("Publish finished.") uninstall() From a43837ca91983d7251d1bbb8232b302abc29c950 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 28 Jul 2021 18:03:39 +0200 Subject: [PATCH 086/308] Webpublisher - backend - added collector plugin --- openpype/hosts/webpublisher/api/__init__.py | 36 +++ .../plugins/collect_published_files.py | 159 ---------- .../publish/collect_published_files.py | 292 ++++++++++++++++++ 3 files changed, 328 insertions(+), 159 deletions(-) create mode 100644 openpype/hosts/webpublisher/api/__init__.py delete mode 100644 openpype/hosts/webpublisher/plugins/collect_published_files.py create mode 100644 openpype/hosts/webpublisher/plugins/publish/collect_published_files.py diff --git a/openpype/hosts/webpublisher/api/__init__.py b/openpype/hosts/webpublisher/api/__init__.py new file mode 100644 index 0000000000..908c9b10be --- /dev/null +++ b/openpype/hosts/webpublisher/api/__init__.py @@ -0,0 +1,36 @@ +import os +import logging + +from avalon import api as avalon +from pyblish import api as pyblish +import openpype.hosts.webpublisher + +log = logging.getLogger("openpype.hosts.webpublisher") + +HOST_DIR = os.path.dirname(os.path.abspath( + openpype.hosts.webpublisher.__file__)) +PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") +LOAD_PATH = os.path.join(PLUGINS_DIR, "load") +CREATE_PATH = os.path.join(PLUGINS_DIR, "create") + + +def application_launch(): + pass + + +def install(): + print("Installing Pype config...") + + pyblish.register_plugin_path(PUBLISH_PATH) + avalon.register_plugin_path(avalon.Loader, LOAD_PATH) + avalon.register_plugin_path(avalon.Creator, CREATE_PATH) + log.info(PUBLISH_PATH) + + avalon.on("application.launched", application_launch) + +def uninstall(): + pyblish.deregister_plugin_path(PUBLISH_PATH) + avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH) + avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH) + diff --git a/openpype/hosts/webpublisher/plugins/collect_published_files.py b/openpype/hosts/webpublisher/plugins/collect_published_files.py deleted file mode 100644 index 1cc0dfe83f..0000000000 --- a/openpype/hosts/webpublisher/plugins/collect_published_files.py +++ /dev/null @@ -1,159 +0,0 @@ -"""Loads publishing context from json and continues in publish process. - -Requires: - anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11) - -Provides: - context, instances -> All data from previous publishing process. -""" - -import os -import json - -import pyblish.api -from avalon import api - - -class CollectPublishedFiles(pyblish.api.ContextPlugin): - """ - This collector will try to find json files in provided - `OPENPYPE_PUBLISH_DATA`. Those files _MUST_ share same context. - - """ - # must be really early, context values are only in json file - order = pyblish.api.CollectorOrder - 0.495 - label = "Collect rendered frames" - host = ["webpublisher"] - - _context = None - - def _load_json(self, path): - path = path.strip('\"') - assert os.path.isfile(path), ( - "Path to json file doesn't exist. \"{}\"".format(path) - ) - data = None - with open(path, "r") as json_file: - try: - data = json.load(json_file) - except Exception as exc: - self.log.error( - "Error loading json: " - "{} - Exception: {}".format(path, exc) - ) - return data - - def _fill_staging_dir(self, data_object, anatomy): - staging_dir = data_object.get("stagingDir") - if staging_dir: - data_object["stagingDir"] = anatomy.fill_root(staging_dir) - - def _process_path(self, data, anatomy): - # validate basic necessary data - data_err = "invalid json file - missing data" - required = ["asset", "user", "comment", - "job", "instances", "session", "version"] - assert all(elem in data.keys() for elem in required), data_err - - # set context by first json file - ctx = self._context.data - - ctx["asset"] = ctx.get("asset") or data.get("asset") - ctx["intent"] = ctx.get("intent") or data.get("intent") - ctx["comment"] = ctx.get("comment") or data.get("comment") - ctx["user"] = ctx.get("user") or data.get("user") - ctx["version"] = ctx.get("version") or data.get("version") - - # basic sanity check to see if we are working in same context - # if some other json file has different context, bail out. - ctx_err = "inconsistent contexts in json files - %s" - assert ctx.get("asset") == data.get("asset"), ctx_err % "asset" - assert ctx.get("intent") == data.get("intent"), ctx_err % "intent" - assert ctx.get("comment") == data.get("comment"), ctx_err % "comment" - assert ctx.get("user") == data.get("user"), ctx_err % "user" - assert ctx.get("version") == data.get("version"), ctx_err % "version" - - # ftrack credentials are passed as environment variables by Deadline - # to publish job, but Muster doesn't pass them. - if data.get("ftrack") and not os.environ.get("FTRACK_API_USER"): - ftrack = data.get("ftrack") - os.environ["FTRACK_API_USER"] = ftrack["FTRACK_API_USER"] - os.environ["FTRACK_API_KEY"] = ftrack["FTRACK_API_KEY"] - os.environ["FTRACK_SERVER"] = ftrack["FTRACK_SERVER"] - - # now we can just add instances from json file and we are done - for instance_data in data.get("instances"): - self.log.info(" - processing instance for {}".format( - instance_data.get("subset"))) - instance = self._context.create_instance( - instance_data.get("subset") - ) - self.log.info("Filling stagingDir...") - - self._fill_staging_dir(instance_data, anatomy) - instance.data.update(instance_data) - - # stash render job id for later validation - instance.data["render_job_id"] = data.get("job").get("_id") - - representations = [] - for repre_data in instance_data.get("representations") or []: - self._fill_staging_dir(repre_data, anatomy) - representations.append(repre_data) - - instance.data["representations"] = representations - - # add audio if in metadata data - if data.get("audio"): - instance.data.update({ - "audio": [{ - "filename": data.get("audio"), - "offset": 0 - }] - }) - self.log.info( - f"Adding audio to instance: {instance.data['audio']}") - - def process(self, context): - self._context = context - - assert os.environ.get("OPENPYPE_PUBLISH_DATA"), ( - "Missing `OPENPYPE_PUBLISH_DATA`") - paths = os.environ["OPENPYPE_PUBLISH_DATA"].split(os.pathsep) - - project_name = os.environ.get("AVALON_PROJECT") - if project_name is None: - raise AssertionError( - "Environment `AVALON_PROJECT` was not found." - "Could not set project `root` which may cause issues." - ) - - # TODO root filling should happen after collect Anatomy - self.log.info("Getting root setting for project \"{}\"".format( - project_name - )) - - anatomy = context.data["anatomy"] - self.log.info("anatomy: {}".format(anatomy.roots)) - try: - session_is_set = False - for path in paths: - path = anatomy.fill_root(path) - data = self._load_json(path) - assert data, "failed to load json file" - if not session_is_set: - session_data = data["session"] - remapped = anatomy.roots_obj.path_remapper( - session_data["AVALON_WORKDIR"] - ) - if remapped: - session_data["AVALON_WORKDIR"] = remapped - - self.log.info("Setting session using data from file") - api.Session.update(session_data) - os.environ.update(session_data) - session_is_set = True - self._process_path(data, anatomy) - except Exception as e: - self.log.error(e, exc_info=True) - raise Exception("Error") from e diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py new file mode 100644 index 0000000000..69d30e06e1 --- /dev/null +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -0,0 +1,292 @@ +"""Loads publishing context from json and continues in publish process. + +Requires: + anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11) + +Provides: + context, instances -> All data from previous publishing process. +""" + +import os +import json +import clique + +import pyblish.api +from avalon import api + +FAMILY_SETTING = { # TEMP + "Animation": { + "workfile": { + "is_sequence": False, + "extensions": ["tvp"], + "families": [] + }, + "render": { + "is_sequence": True, + "extensions": [ + "png", "exr", "tiff", "tif" + ], + "families": ["review"] + } + }, + "Compositing": { + "workfile": { + "is_sequence": False, + "extensions": ["aep"], + "families": [] + }, + "render": { + "is_sequence": True, + "extensions": [ + "png", "exr", "tiff", "tif" + ], + "families": ["review"] + } + }, + "Layout": { + "workfile": { + "is_sequence": False, + "extensions": [ + ".psd" + ], + "families": [] + }, + "image": { + "is_sequence": False, + "extensions": [ + "png", + "jpg", + "jpeg", + "tiff", + "tif" + ], + "families": [ + "review" + ] + } + } +} + +class CollectPublishedFiles(pyblish.api.ContextPlugin): + """ + This collector will try to find json files in provided + `OPENPYPE_PUBLISH_DATA`. Those files _MUST_ share same context. + + """ + # must be really early, context values are only in json file + order = pyblish.api.CollectorOrder - 0.490 + label = "Collect rendered frames" + host = ["webpublisher"] + + _context = None + + def _load_json(self, path): + path = path.strip('\"') + assert os.path.isfile(path), ( + "Path to json file doesn't exist. \"{}\"".format(path) + ) + data = None + with open(path, "r") as json_file: + try: + data = json.load(json_file) + except Exception as exc: + self.log.error( + "Error loading json: " + "{} - Exception: {}".format(path, exc) + ) + return data + + def _fill_staging_dir(self, data_object, anatomy): + staging_dir = data_object.get("stagingDir") + if staging_dir: + data_object["stagingDir"] = anatomy.fill_root(staging_dir) + + def _process_path(self, data): + # validate basic necessary data + data_err = "invalid json file - missing data" + # required = ["asset", "user", "comment", + # "job", "instances", "session", "version"] + # assert all(elem in data.keys() for elem in required), data_err + + # set context by first json file + ctx = self._context.data + + ctx["asset"] = ctx.get("asset") or data.get("asset") + ctx["intent"] = ctx.get("intent") or data.get("intent") + ctx["comment"] = ctx.get("comment") or data.get("comment") + ctx["user"] = ctx.get("user") or data.get("user") + ctx["version"] = ctx.get("version") or data.get("version") + + # basic sanity check to see if we are working in same context + # if some other json file has different context, bail out. + ctx_err = "inconsistent contexts in json files - %s" + assert ctx.get("asset") == data.get("asset"), ctx_err % "asset" + assert ctx.get("intent") == data.get("intent"), ctx_err % "intent" + assert ctx.get("comment") == data.get("comment"), ctx_err % "comment" + assert ctx.get("user") == data.get("user"), ctx_err % "user" + assert ctx.get("version") == data.get("version"), ctx_err % "version" + + # now we can just add instances from json file and we are done + for instance_data in data.get("instances"): + self.log.info(" - processing instance for {}".format( + instance_data.get("subset"))) + instance = self._context.create_instance( + instance_data.get("subset") + ) + self.log.info("Filling stagingDir...") + + self._fill_staging_dir(instance_data, anatomy) + instance.data.update(instance_data) + + # stash render job id for later validation + instance.data["render_job_id"] = data.get("job").get("_id") + + representations = [] + for repre_data in instance_data.get("representations") or []: + self._fill_staging_dir(repre_data, anatomy) + representations.append(repre_data) + + instance.data["representations"] = representations + + # add audio if in metadata data + if data.get("audio"): + instance.data.update({ + "audio": [{ + "filename": data.get("audio"), + "offset": 0 + }] + }) + self.log.info( + f"Adding audio to instance: {instance.data['audio']}") + + def _process_batch(self, dir_url): + task_subfolders = [os.path.join(dir_url, o) + for o in os.listdir(dir_url) + if os.path.isdir(os.path.join(dir_url, o))] + self.log.info("task_sub:: {}".format(task_subfolders)) + for task_dir in task_subfolders: + task_data = self._load_json(os.path.join(task_dir, + "manifest.json")) + self.log.info("task_data:: {}".format(task_data)) + ctx = task_data["context"] + asset = subset = task = task_type = None + + subset = "Main" # temp + if ctx["type"] == "task": + items = ctx["path"].split('/') + asset = items[-2] + os.environ["AVALON_TASK"] = ctx["name"] + task_type = ctx["attributes"]["type"] + else: + asset = ctx["name"] + + is_sequence = len(task_data["files"]) > 1 + + instance = self._context.create_instance(subset) + _, extension = os.path.splitext(task_data["files"][0]) + self.log.info("asset:: {}".format(asset)) + family, families = self._get_family(FAMILY_SETTING, # todo + task_type, + is_sequence, + extension.replace(".", '')) + os.environ["AVALON_ASSET"] = asset + instance.data["asset"] = asset + instance.data["subset"] = subset + instance.data["family"] = family + instance.data["families"] = families + # instance.data["version"] = self._get_version(task_data["subset"]) + instance.data["stagingDir"] = task_dir + instance.data["source"] = "webpublisher" + + os.environ["FTRACK_API_USER"] = task_data["user"] + + if is_sequence: + instance.data["representations"] = self._process_sequence( + task_data["files"], task_dir + ) + else: + _, ext = os.path.splittext(task_data["files"][0]) + repre_data = { + "name": ext[1:], + "ext": ext[1:], + "files": task_data["files"], + "stagingDir": task_dir + } + instance.data["representation"] = repre_data + + self.log.info("instance.data:: {}".format(instance.data)) + + def _process_sequence(self, files, task_dir): + """Prepare reprentations for sequence of files.""" + collections, remainder = clique.assemble(files) + assert len(collections) == 1, \ + "Too many collections in {}".format(files) + + frame_start = list(collections[0].indexes)[0] + frame_end = list(collections[0].indexes)[-1] + ext = collections[0].tail + repre_data = { + "frameStart": frame_start, + "frameEnd": frame_end, + "name": ext[1:], + "ext": ext[1:], + "files": files, + "stagingDir": task_dir + } + self.log.info("repre_data.data:: {}".format(repre_data)) + return [repre_data] + + def _get_family(self, settings, task_type, is_sequence, extension): + """Guess family based on input data. + + Args: + settings (dict): configuration per task_type + task_type (str): Animation|Art etc + is_sequence (bool): single file or sequence + extension (str): without '.' + + Returns: + (family, [families]) tuple + AssertionError if not matching family found + """ + task_obj = settings.get(task_type) + assert task_obj, "No family configuration for '{}'".format(task_type) + + found_family = None + for family, content in task_obj.items(): + if is_sequence != content["is_sequence"]: + continue + if extension in content["extensions"]: + found_family = family + break + + msg = "No family found for combination of " +\ + "task_type: {}, is_sequence:{}, extension: {}".format( + task_type, is_sequence, extension) + assert found_family, msg + + return found_family, content["families"] + + def _get_version(self, subset_name): + return 1 + + def process(self, context): + self._context = context + + batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA") + + assert batch_dir, ( + "Missing `OPENPYPE_PUBLISH_DATA`") + + assert batch_dir, \ + "Folder {} doesn't exist".format(batch_dir) + + project_name = os.environ.get("AVALON_PROJECT") + if project_name is None: + raise AssertionError( + "Environment `AVALON_PROJECT` was not found." + "Could not set project `root` which may cause issues." + ) + + self._process_batch(batch_dir) + From 824714c2f898a6eb37569d88b875a3137c16d1f9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 28 Jul 2021 18:04:20 +0200 Subject: [PATCH 087/308] Webpublisher - backend - added endpoints for reporting --- openpype/modules/webserver/webserver_cli.py | 126 ++++++++++++++------ 1 file changed, 88 insertions(+), 38 deletions(-) diff --git a/openpype/modules/webserver/webserver_cli.py b/openpype/modules/webserver/webserver_cli.py index 00caa24d27..04d0002787 100644 --- a/openpype/modules/webserver/webserver_cli.py +++ b/openpype/modules/webserver/webserver_cli.py @@ -8,6 +8,8 @@ from aiohttp.web_response import Response import subprocess from avalon.api import AvalonMongoDB + +from openpype.lib import OpenPypeMongoConnection from openpype.modules.avalon_apps.rest_api import _RestApiEndpoint @@ -32,36 +34,6 @@ class WebpublisherProjectsEndpoint(_RestApiEndpoint): ) -class Node(dict): - """Node element in context tree.""" - - def __init__(self, uid, node_type, name): - self._parent = None # pointer to parent Node - self["type"] = node_type - self["name"] = name - self['id'] = uid # keep reference to id # - self['children'] = [] # collection of pointers to child Nodes - - @property - def parent(self): - return self._parent # simply return the object at the _parent pointer - - @parent.setter - def parent(self, node): - self._parent = node - # add this node to parent's list of children - node['children'].append(self) - - -class TaskNode(Node): - """Special node type only for Tasks.""" - def __init__(self, node_type, name): - self._parent = None - self["type"] = node_type - self["name"] = name - self["attributes"] = {} - - class WebpublisherHiearchyEndpoint(_RestApiEndpoint): """Returns dictionary with context tree from assets.""" async def get(self, project_name) -> Response: @@ -129,21 +101,52 @@ class WebpublisherHiearchyEndpoint(_RestApiEndpoint): ) -class WebpublisherTaskFinishEndpoint(_RestApiEndpoint): +class Node(dict): + """Node element in context tree.""" + + def __init__(self, uid, node_type, name): + self._parent = None # pointer to parent Node + self["type"] = node_type + self["name"] = name + self['id'] = uid # keep reference to id # + self['children'] = [] # collection of pointers to child Nodes + + @property + def parent(self): + return self._parent # simply return the object at the _parent pointer + + @parent.setter + def parent(self, node): + self._parent = node + # add this node to parent's list of children + node['children'].append(self) + + +class TaskNode(Node): + """Special node type only for Tasks.""" + + def __init__(self, node_type, name): + self._parent = None + self["type"] = node_type + self["name"] = name + self["attributes"] = {} + + +class WebpublisherPublishEndpoint(_RestApiEndpoint): """Returns list of project names.""" async def post(self, request) -> Response: output = {} print(request) - json_path = os.path.join(self.resource.upload_dir, - "webpublisher.json") # temp - pull from request + batch_path = os.path.join(self.resource.upload_dir, + request.query["batch_id"]) openpype_app = self.resource.executable args = [ openpype_app, 'remotepublish', - json_path + batch_path ] if not openpype_app or not os.path.exists(openpype_app): @@ -152,7 +155,8 @@ class WebpublisherTaskFinishEndpoint(_RestApiEndpoint): add_args = { "host": "webpublisher", - "project": request.query["project"] + "project": request.query["project"], + "user": request.query["user"] } for key, value in add_args.items(): @@ -169,6 +173,30 @@ class WebpublisherTaskFinishEndpoint(_RestApiEndpoint): ) +class BatchStatusEndpoint(_RestApiEndpoint): + """Returns list of project names.""" + async def get(self, batch_id) -> Response: + output = self.dbcon.find_one({"batch_id": batch_id}) + + return Response( + status=200, + body=self.resource.encode(output), + content_type="application/json" + ) + + +class PublishesStatusEndpoint(_RestApiEndpoint): + """Returns list of project names.""" + async def get(self, user) -> Response: + output = self.dbcon.find({"user": user}) + + return Response( + status=200, + body=self.resource.encode(output), + content_type="application/json" + ) + + class RestApiResource: def __init__(self, server_manager, executable, upload_dir): self.server_manager = server_manager @@ -195,6 +223,13 @@ class RestApiResource: ).encode("utf-8") +class OpenPypeRestApiResource(RestApiResource): + def __init__(self, ): + mongo_client = OpenPypeMongoConnection.get_mongo_client() + database_name = os.environ["OPENPYPE_DATABASE_NAME"] + self.dbcon = mongo_client[database_name]["webpublishes"] + + def run_webserver(*args, **kwargs): from openpype.modules import ModulesManager @@ -219,11 +254,26 @@ def run_webserver(*args, **kwargs): hiearchy_endpoint.dispatch ) - task_finish_endpoint = WebpublisherTaskFinishEndpoint(resource) + webpublisher_publish_endpoint = WebpublisherPublishEndpoint(resource) webserver_module.server_manager.add_route( "POST", - "/api/task_finish", - task_finish_endpoint.dispatch + "/api/webpublish/{batch_id}", + webpublisher_publish_endpoint.dispatch + ) + + openpype_resource = OpenPypeRestApiResource() + batch_status_endpoint = BatchStatusEndpoint(openpype_resource) + webserver_module.server_manager.add_route( + "GET", + "/api/batch_status/{batch_id}", + batch_status_endpoint.dispatch + ) + + user_status_endpoint = PublishesStatusEndpoint(openpype_resource) + webserver_module.server_manager.add_route( + "GET", + "/api/publishes/{user}", + user_status_endpoint.dispatch ) webserver_module.start_server() From bfd2ad65cf2877f245192ba8d28548ae1edc0ad2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 28 Jul 2021 18:10:09 +0200 Subject: [PATCH 088/308] Webpublisher - backend - hound --- openpype/hosts/webpublisher/api/__init__.py | 2 +- .../plugins/publish/collect_published_files.py | 3 +-- openpype/modules/webserver/webserver_cli.py | 3 +-- openpype/pype_commands.py | 9 +++------ 4 files changed, 6 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/webpublisher/api/__init__.py b/openpype/hosts/webpublisher/api/__init__.py index 908c9b10be..1b6edcf24d 100644 --- a/openpype/hosts/webpublisher/api/__init__.py +++ b/openpype/hosts/webpublisher/api/__init__.py @@ -29,8 +29,8 @@ def install(): avalon.on("application.launched", application_launch) + def uninstall(): pyblish.deregister_plugin_path(PUBLISH_PATH) avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH) avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH) - diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 69d30e06e1..dde9713c7a 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -169,7 +169,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): "manifest.json")) self.log.info("task_data:: {}".format(task_data)) ctx = task_data["context"] - asset = subset = task = task_type = None + task_type = None subset = "Main" # temp if ctx["type"] == "task": @@ -289,4 +289,3 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): ) self._process_batch(batch_dir) - diff --git a/openpype/modules/webserver/webserver_cli.py b/openpype/modules/webserver/webserver_cli.py index 04d0002787..484c25c6b3 100644 --- a/openpype/modules/webserver/webserver_cli.py +++ b/openpype/modules/webserver/webserver_cli.py @@ -165,7 +165,7 @@ class WebpublisherPublishEndpoint(_RestApiEndpoint): print("args:: {}".format(args)) - exit_code = subprocess.call(args, shell=True) + _exit_code = subprocess.call(args, shell=True) return Response( status=200, body=self.resource.encode(output), @@ -279,4 +279,3 @@ def run_webserver(*args, **kwargs): webserver_module.start_server() while True: time.sleep(0.5) - diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 24becd2423..01fa6b8d33 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -191,8 +191,7 @@ class PypeCommands: "finish_date": datetime.now(), "status": "error", "msg": error_format.format(**result) - } - } + }} ) sys.exit(1) else: @@ -201,8 +200,7 @@ class PypeCommands: {"$set": { "progress": result["progress"] - } - } + }} ) dbcon.update_one( @@ -212,8 +210,7 @@ class PypeCommands: "finish_date": datetime.now(), "state": "finished_ok", "progress": 1 - } - } + }} ) log.info("Publish finished.") From 60c0a8adf7656cbf67b4ec528418105c0527f378 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 29 Jul 2021 10:49:58 +0200 Subject: [PATCH 089/308] removed code of submodules --- .../python2_vendor/arrow/.github/FUNDING.yml | 1 - .../.github/ISSUE_TEMPLATE/bug_report.md | 27 - .../.github/ISSUE_TEMPLATE/documentation.md | 17 - .../.github/ISSUE_TEMPLATE/feature_request.md | 17 - .../arrow/.github/pull_request_template.md | 22 - .../workflows/continuous_integration.yml | 123 - .../ftrack/python2_vendor/arrow/.gitignore | 211 - .../arrow/.pre-commit-config.yaml | 41 - .../ftrack/python2_vendor/arrow/CHANGELOG.rst | 598 --- .../ftrack/python2_vendor/arrow/LICENSE | 201 - .../ftrack/python2_vendor/arrow/MANIFEST.in | 3 - .../ftrack/python2_vendor/arrow/Makefile | 44 - .../ftrack/python2_vendor/arrow/README.rst | 133 - .../python2_vendor/arrow/arrow/__init__.py | 18 - .../python2_vendor/arrow/arrow/_version.py | 1 - .../ftrack/python2_vendor/arrow/arrow/api.py | 54 - .../python2_vendor/arrow/arrow/arrow.py | 1584 ------ .../python2_vendor/arrow/arrow/constants.py | 9 - .../python2_vendor/arrow/arrow/factory.py | 301 -- .../python2_vendor/arrow/arrow/formatter.py | 139 - .../python2_vendor/arrow/arrow/locales.py | 4267 ----------------- .../python2_vendor/arrow/arrow/parser.py | 596 --- .../ftrack/python2_vendor/arrow/arrow/util.py | 115 - .../ftrack/python2_vendor/arrow/docs/Makefile | 20 - .../ftrack/python2_vendor/arrow/docs/conf.py | 62 - .../python2_vendor/arrow/docs/index.rst | 566 --- .../ftrack/python2_vendor/arrow/docs/make.bat | 35 - .../python2_vendor/arrow/docs/releases.rst | 3 - .../python2_vendor/arrow/requirements.txt | 14 - .../ftrack/python2_vendor/arrow/setup.cfg | 2 - .../ftrack/python2_vendor/arrow/setup.py | 50 - .../python2_vendor/arrow/tests/__init__.py | 0 .../python2_vendor/arrow/tests/conftest.py | 76 - .../python2_vendor/arrow/tests/test_api.py | 28 - .../python2_vendor/arrow/tests/test_arrow.py | 2150 --------- .../arrow/tests/test_factory.py | 390 -- .../arrow/tests/test_formatter.py | 282 -- .../arrow/tests/test_locales.py | 1352 ------ .../python2_vendor/arrow/tests/test_parser.py | 1657 ------- .../python2_vendor/arrow/tests/test_util.py | 81 - .../python2_vendor/arrow/tests/utils.py | 16 - .../ftrack/python2_vendor/arrow/tox.ini | 53 - .../ftrack-python-api/.gitignore | 42 - .../ftrack-python-api/LICENSE.python | 254 - .../ftrack-python-api/LICENSE.txt | 176 - .../ftrack-python-api/MANIFEST.in | 4 - .../ftrack-python-api/README.rst | 34 - .../ftrack-python-api/bitbucket-pipelines.yml | 24 - .../ftrack-python-api/doc/_static/ftrack.css | 16 - .../doc/api_reference/accessor/base.rst | 8 - .../doc/api_reference/accessor/disk.rst | 8 - .../doc/api_reference/accessor/index.rst | 14 - .../doc/api_reference/accessor/server.rst | 8 - .../doc/api_reference/attribute.rst | 8 - .../doc/api_reference/cache.rst | 8 - .../doc/api_reference/collection.rst | 8 - .../api_reference/entity/asset_version.rst | 8 - .../doc/api_reference/entity/base.rst | 8 - .../doc/api_reference/entity/component.rst | 8 - .../doc/api_reference/entity/factory.rst | 8 - .../doc/api_reference/entity/index.rst | 14 - .../doc/api_reference/entity/job.rst | 8 - .../doc/api_reference/entity/location.rst | 8 - .../doc/api_reference/entity/note.rst | 8 - .../api_reference/entity/project_schema.rst | 8 - .../doc/api_reference/entity/user.rst | 8 - .../doc/api_reference/event/base.rst | 8 - .../doc/api_reference/event/expression.rst | 8 - .../doc/api_reference/event/hub.rst | 8 - .../doc/api_reference/event/index.rst | 14 - .../doc/api_reference/event/subscriber.rst | 8 - .../doc/api_reference/event/subscription.rst | 8 - .../doc/api_reference/exception.rst | 8 - .../doc/api_reference/formatter.rst | 8 - .../doc/api_reference/index.rst | 20 - .../doc/api_reference/inspection.rst | 8 - .../doc/api_reference/logging.rst | 8 - .../doc/api_reference/operation.rst | 8 - .../doc/api_reference/plugin.rst | 8 - .../doc/api_reference/query.rst | 8 - .../resource_identifier_transformer/base.rst | 10 - .../resource_identifier_transformer/index.rst | 16 - .../doc/api_reference/session.rst | 8 - .../doc/api_reference/structure/base.rst | 8 - .../doc/api_reference/structure/id.rst | 8 - .../doc/api_reference/structure/index.rst | 14 - .../doc/api_reference/structure/origin.rst | 8 - .../doc/api_reference/structure/standard.rst | 8 - .../doc/api_reference/symbol.rst | 8 - .../ftrack-python-api/doc/caching.rst | 175 - .../ftrack-python-api/doc/conf.py | 102 - .../ftrack-python-api/doc/docutils.conf | 2 - .../doc/environment_variables.rst | 56 - .../ftrack-python-api/doc/event_list.rst | 137 - .../example/assignments_and_allocations.rst | 82 - .../doc/example/component.rst | 23 - .../doc/example/custom_attribute.rst | 94 - .../doc/example/encode_media.rst | 53 - .../doc/example/entity_links.rst | 56 - .../ftrack-python-api/doc/example/index.rst | 52 - .../doc/example/invite_user.rst | 31 - .../ftrack-python-api/doc/example/job.rst | 97 - .../doc/example/link_attribute.rst | 55 - .../ftrack-python-api/doc/example/list.rst | 46 - .../manage_custom_attribute_configuration.rst | 320 -- .../doc/example/metadata.rst | 43 - .../ftrack-python-api/doc/example/note.rst | 169 - .../ftrack-python-api/doc/example/project.rst | 65 - .../doc/example/publishing.rst | 73 - .../doc/example/review_session.rst | 87 - .../ftrack-python-api/doc/example/scope.rst | 27 - .../doc/example/security_roles.rst | 73 - .../doc/example/sync_ldap_users.rst | 30 - .../doc/example/task_template.rst | 56 - .../doc/example/thumbnail.rst | 71 - .../ftrack-python-api/doc/example/timer.rst | 37 - .../doc/example/web_review.rst | 78 - .../ftrack-python-api/doc/glossary.rst | 76 - .../ftrack-python-api/doc/handling_events.rst | 315 -- .../image/configuring_plugins_directory.png | Bin 7313 -> 0 bytes .../ftrack-python-api/doc/index.rst | 42 - .../ftrack-python-api/doc/installing.rst | 77 - .../ftrack-python-api/doc/introduction.rst | 26 - .../doc/locations/configuring.rst | 87 - .../ftrack-python-api/doc/locations/index.rst | 18 - .../doc/locations/overview.rst | 143 - .../doc/locations/tutorial.rst | 193 - .../ftrack-python-api/doc/querying.rst | 263 - .../ftrack-python-api/doc/release/index.rst | 18 - .../doc/release/migrating_from_old_api.rst | 613 --- .../doc/release/migration.rst | 98 - .../doc/release/release_notes.rst | 1478 ------ .../doc/resource/example_plugin.py | 24 - .../doc/resource/example_plugin_safe.py | 0 .../resource/example_plugin_using_session.py | 37 - .../doc/security_and_authentication.rst | 38 - .../ftrack-python-api/doc/tutorial.rst | 156 - .../doc/understanding_sessions.rst | 281 -- .../doc/working_with_entities.rst | 434 -- .../ftrack-python-api/pytest.ini | 7 - .../resource/plugin/configure_locations.py | 39 - .../resource/plugin/construct_entity_type.py | 46 - .../ftrack-python-api/setup.cfg | 6 - .../python2_vendor/ftrack-python-api/setup.py | 81 - .../ftrack-python-api/source/__init__.py | 1 - .../source/ftrack_api/__init__.py | 32 - .../_centralized_storage_scenario.py | 656 --- .../source/ftrack_api/_python_ntpath.py | 534 --- .../source/ftrack_api/_version.py | 1 - .../source/ftrack_api/_weakref.py | 66 - .../source/ftrack_api/accessor/__init__.py | 2 - .../source/ftrack_api/accessor/base.py | 124 - .../source/ftrack_api/accessor/disk.py | 250 - .../source/ftrack_api/accessor/server.py | 240 - .../source/ftrack_api/attribute.py | 707 --- .../source/ftrack_api/cache.py | 579 --- .../source/ftrack_api/collection.py | 507 -- .../source/ftrack_api/data.py | 119 - .../source/ftrack_api/entity/__init__.py | 2 - .../source/ftrack_api/entity/asset_version.py | 91 - .../source/ftrack_api/entity/base.py | 402 -- .../source/ftrack_api/entity/component.py | 74 - .../source/ftrack_api/entity/factory.py | 435 -- .../source/ftrack_api/entity/job.py | 48 - .../source/ftrack_api/entity/location.py | 733 --- .../source/ftrack_api/entity/note.py | 105 - .../ftrack_api/entity/project_schema.py | 94 - .../source/ftrack_api/entity/user.py | 123 - .../source/ftrack_api/event/__init__.py | 2 - .../source/ftrack_api/event/base.py | 85 - .../source/ftrack_api/event/expression.py | 282 -- .../source/ftrack_api/event/hub.py | 1091 ----- .../source/ftrack_api/event/subscriber.py | 27 - .../source/ftrack_api/event/subscription.py | 23 - .../source/ftrack_api/exception.py | 392 -- .../source/ftrack_api/formatter.py | 131 - .../source/ftrack_api/inspection.py | 135 - .../source/ftrack_api/logging.py | 43 - .../source/ftrack_api/operation.py | 115 - .../source/ftrack_api/plugin.py | 121 - .../source/ftrack_api/query.py | 202 - .../__init__.py | 2 - .../resource_identifier_transformer/base.py | 50 - .../source/ftrack_api/session.py | 2515 ---------- .../source/ftrack_api/structure/__init__.py | 2 - .../source/ftrack_api/structure/base.py | 38 - .../source/ftrack_api/structure/entity_id.py | 12 - .../source/ftrack_api/structure/id.py | 91 - .../source/ftrack_api/structure/origin.py | 28 - .../source/ftrack_api/structure/standard.py | 217 - .../source/ftrack_api/symbol.py | 77 - .../test/fixture/media/colour_wheel.mov | Bin 17627 -> 0 bytes .../test/fixture/media/image-resized-10.png | Bin 115 -> 0 bytes .../test/fixture/media/image.png | Bin 883 -> 0 bytes .../fixture/plugin/configure_locations.py | 40 - .../fixture/plugin/construct_entity_type.py | 52 - .../fixture/plugin/count_session_event.py | 41 - .../ftrack-python-api/test/unit/__init__.py | 2 - .../test/unit/accessor/__init__.py | 2 - .../test/unit/accessor/test_disk.py | 267 -- .../test/unit/accessor/test_server.py | 41 - .../ftrack-python-api/test/unit/conftest.py | 539 --- .../test/unit/entity/__init__.py | 2 - .../test/unit/entity/test_asset_version.py | 54 - .../test/unit/entity/test_base.py | 14 - .../test/unit/entity/test_component.py | 70 - .../test/unit/entity/test_factory.py | 25 - .../test/unit/entity/test_job.py | 42 - .../test/unit/entity/test_location.py | 516 -- .../test/unit/entity/test_metadata.py | 135 - .../test/unit/entity/test_note.py | 67 - .../test/unit/entity/test_project_schema.py | 64 - .../test/unit/entity/test_scopes.py | 24 - .../test/unit/entity/test_user.py | 49 - .../test/unit/event/__init__.py | 2 - .../unit/event/event_hub_server_heartbeat.py | 92 - .../test/unit/event/test_base.py | 36 - .../test/unit/event/test_expression.py | 174 - .../test/unit/event/test_hub.py | 701 --- .../test/unit/event/test_subscriber.py | 33 - .../test/unit/event/test_subscription.py | 28 - .../__init__.py | 2 - .../test_base.py | 36 - .../test/unit/structure/__init__.py | 2 - .../test/unit/structure/test_base.py | 31 - .../test/unit/structure/test_entity_id.py | 49 - .../test/unit/structure/test_id.py | 115 - .../test/unit/structure/test_origin.py | 33 - .../test/unit/structure/test_standard.py | 309 -- .../test/unit/test_attribute.py | 146 - .../ftrack-python-api/test/unit/test_cache.py | 416 -- .../test/unit/test_collection.py | 574 --- .../test/unit/test_custom_attribute.py | 251 - .../ftrack-python-api/test/unit/test_data.py | 129 - .../test/unit/test_formatter.py | 70 - .../test/unit/test_inspection.py | 101 - .../test/unit/test_operation.py | 79 - .../test/unit/test_package.py | 48 - .../test/unit/test_plugin.py | 192 - .../ftrack-python-api/test/unit/test_query.py | 164 - .../test/unit/test_session.py | 1519 ------ .../ftrack-python-api/test/unit/test_timer.py | 74 - 242 files changed, 42119 deletions(-) delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/FUNDING.yml delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/bug_report.md delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/documentation.md delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/feature_request.md delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/pull_request_template.md delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/workflows/continuous_integration.yml delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.gitignore delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/.pre-commit-config.yaml delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/CHANGELOG.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/LICENSE delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/MANIFEST.in delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/Makefile delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/README.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/_version.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/api.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/arrow.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/constants.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/factory.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/formatter.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/locales.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/parser.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/util.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/Makefile delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/conf.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/make.bat delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/releases.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/requirements.txt delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.cfg delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/conftest.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_api.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_arrow.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_factory.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_formatter.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_locales.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_parser.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_util.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/utils.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/arrow/tox.ini delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/.gitignore delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.python delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.txt delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/MANIFEST.in delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/README.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/caching.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/conf.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/image/configuring_plugins_directory.png delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/installing.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/querying.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_safe.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/pytest.ini delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.cfg delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/colour_wheel.mov delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image-resized-10.png delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image.png delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/configure_locations.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/construct_entity_type.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/count_session_event.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/test_disk.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/test_server.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/conftest.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py delete mode 100644 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/FUNDING.yml b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/FUNDING.yml deleted file mode 100644 index c3608357a4..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/FUNDING.yml +++ /dev/null @@ -1 +0,0 @@ -open_collective: arrow diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/bug_report.md b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index e4e242ee42..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,27 +0,0 @@ ---- -name: "🐞 Bug Report" -about: Find a bug? Create a report to help us improve. -title: '' -labels: 'bug' -assignees: '' ---- - - - -## Issue Description - - - -## System Info - -- 🖥 **OS name and version**: -- 🐍 **Python version**: -- 🏹 **Arrow version**: diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/documentation.md b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/documentation.md deleted file mode 100644 index 753ed0c620..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/documentation.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -name: "📚 Documentation" -about: Find errors or problems in the docs (https://arrow.readthedocs.io)? -title: '' -labels: 'documentation' -assignees: '' ---- - - - -## Issue Description - - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/feature_request.md b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index fcab9213f5..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -name: "💡 Feature Request" -about: Have an idea for a new feature or improvement? -title: '' -labels: 'enhancement' -assignees: '' ---- - - - -## Feature Request - - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/pull_request_template.md b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/pull_request_template.md deleted file mode 100644 index 0e07c288af..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/pull_request_template.md +++ /dev/null @@ -1,22 +0,0 @@ -## Pull Request Checklist - -Thank you for taking the time to improve Arrow! Before submitting your pull request, please check all *appropriate* boxes: - - -- [ ] 🧪 Added **tests** for changed code. -- [ ] 🛠️ All tests **pass** when run locally (run `tox` or `make test` to find out!). -- [ ] 🧹 All linting checks **pass** when run locally (run `tox -e lint` or `make lint` to find out!). -- [ ] 📚 Updated **documentation** for changed code. -- [ ] ⏩ Code is **up-to-date** with the `master` branch. - -If you have *any* questions about your code changes or any of the points above, please submit your questions along with the pull request and we will try our best to help! - -## Description of Changes - - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/workflows/continuous_integration.yml b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/workflows/continuous_integration.yml deleted file mode 100644 index d800f399c6..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.github/workflows/continuous_integration.yml +++ /dev/null @@ -1,123 +0,0 @@ -name: tests - -on: - pull_request: # Run on all pull requests - push: # Run only on pushes to master - branches: - - master - schedule: # Run monthly - - cron: "0 0 1 * *" - -jobs: - test: - name: ${{ matrix.os }} (${{ matrix.python-version }}) - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - python-version: ["pypy3", "2.7", "3.5", "3.6", "3.7", "3.8", "3.9-dev"] - os: [ubuntu-latest, macos-latest, windows-latest] - exclude: - # pypy3 randomly fails on Windows builds - - os: windows-latest - python-version: "pypy3" - - steps: - # Check out latest code - - uses: actions/checkout@v2 - - # Configure pip cache - - name: Cache pip (Linux) - uses: actions/cache@v2 - if: startsWith(runner.os, 'Linux') - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - - name: Cache pip (macOS) - uses: actions/cache@v2 - if: startsWith(runner.os, 'macOS') - with: - path: ~/Library/Caches/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - - name: Cache pip (Windows) - uses: actions/cache@v2 - if: startsWith(runner.os, 'Windows') - with: - path: ~\AppData\Local\pip\Cache - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - # Set up Python - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - # Install dependencies - - name: Install dependencies - run: | - pip install -U pip setuptools wheel - pip install -U tox tox-gh-actions - - # Run tests - - name: Test with tox - run: tox - - # Upload coverage report - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 - with: - file: coverage.xml - - lint: - runs-on: ubuntu-latest - - steps: - # Check out latest code - - uses: actions/checkout@v2 - - # Set up Python - - name: Set up Python 3.8 - uses: actions/setup-python@v2 - with: - python-version: "3.8" - - # Configure pip cache - - name: Cache pip - uses: actions/cache@v2 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - # Configure pre-commit cache - - name: Cache pre-commit - uses: actions/cache@v2 - with: - path: ~/.cache/pre-commit - key: ${{ runner.os }}-pre-commit-${{ hashFiles('**/.pre-commit-config.yaml') }} - restore-keys: | - ${{ runner.os }}-pre-commit- - - # Install dependencies - - name: Install dependencies - run: | - pip install -U pip setuptools wheel - pip install -U tox - - # Lint code - - name: Lint code - run: tox -e lint - - # Lint docs - - name: Lint docs - run: tox -e docs diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.gitignore b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.gitignore deleted file mode 100644 index 0448d0cf0c..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.gitignore +++ /dev/null @@ -1,211 +0,0 @@ -README.rst.new - -# Small entry point file for debugging tasks -test.py - -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -pip-wheel-metadata/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -.hypothesis/ -.pytest_cache/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -.python-version - -# celery beat schedule file -celerybeat-schedule - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -local/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -# Swap -[._]*.s[a-v][a-z] -[._]*.sw[a-p] -[._]s[a-rt-v][a-z] -[._]ss[a-gi-z] -[._]sw[a-p] - -# Session -Session.vim -Sessionx.vim - -# Temporary -.netrwhist -*~ -# Auto-generated tag files -tags -# Persistent undo -[._]*.un~ - -.idea/ -.vscode/ - -# General -.DS_Store -.AppleDouble -.LSOverride - -# Icon must end with two \r -Icon - - -# Thumbnails -._* - -# Files that might appear in the root of a volume -.DocumentRevisions-V100 -.fseventsd -.Spotlight-V100 -.TemporaryItems -.Trashes -.VolumeIcon.icns -.com.apple.timemachine.donotpresent - -# Directories potentially created on remote AFP share -.AppleDB -.AppleDesktop -Network Trash Folder -Temporary Items -.apdisk - -*~ - -# temporary files which can be created if a process still has a handle open of a deleted file -.fuse_hidden* - -# KDE directory preferences -.directory - -# Linux trash folder which might appear on any partition or disk -.Trash-* - -# .nfs files are created when an open file is removed but is still being accessed -.nfs* - -# Windows thumbnail cache files -Thumbs.db -Thumbs.db:encryptable -ehthumbs.db -ehthumbs_vista.db - -# Dump file -*.stackdump - -# Folder config file -[Dd]esktop.ini - -# Recycle Bin used on file shares -$RECYCLE.BIN/ - -# Windows Installer files -*.cab -*.msi -*.msix -*.msm -*.msp - -# Windows shortcuts -*.lnk diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.pre-commit-config.yaml b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.pre-commit-config.yaml deleted file mode 100644 index 1f5128595b..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/.pre-commit-config.yaml +++ /dev/null @@ -1,41 +0,0 @@ -default_language_version: - python: python3 -repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 - hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: fix-encoding-pragma - exclude: ^arrow/_version.py - - id: requirements-txt-fixer - - id: check-ast - - id: check-yaml - - id: check-case-conflict - - id: check-docstring-first - - id: check-merge-conflict - - id: debug-statements - - repo: https://github.com/timothycrosley/isort - rev: 5.4.2 - hooks: - - id: isort - - repo: https://github.com/asottile/pyupgrade - rev: v2.7.2 - hooks: - - id: pyupgrade - - repo: https://github.com/pre-commit/pygrep-hooks - rev: v1.6.0 - hooks: - - id: python-no-eval - - id: python-check-blanket-noqa - - id: rst-backticks - - repo: https://github.com/psf/black - rev: 20.8b1 - hooks: - - id: black - args: [--safe, --quiet] - - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.3 - hooks: - - id: flake8 - additional_dependencies: [flake8-bugbear] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/CHANGELOG.rst b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/CHANGELOG.rst deleted file mode 100644 index 0b55a4522c..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/CHANGELOG.rst +++ /dev/null @@ -1,598 +0,0 @@ -Changelog -========= - -0.17.0 (2020-10-2) -------------------- - -- [WARN] Arrow will **drop support** for Python 2.7 and 3.5 in the upcoming 1.0.0 release. This is the last major release to support Python 2.7 and Python 3.5. -- [NEW] Arrow now properly handles imaginary datetimes during DST shifts. For example: - -..code-block:: python - >>> just_before = arrow.get(2013, 3, 31, 1, 55, tzinfo="Europe/Paris") - >>> just_before.shift(minutes=+10) - - -..code-block:: python - >>> before = arrow.get("2018-03-10 23:00:00", "YYYY-MM-DD HH:mm:ss", tzinfo="US/Pacific") - >>> after = arrow.get("2018-03-11 04:00:00", "YYYY-MM-DD HH:mm:ss", tzinfo="US/Pacific") - >>> result=[(t, t.to("utc")) for t in arrow.Arrow.range("hour", before, after)] - >>> for r in result: - ... print(r) - ... - (, ) - (, ) - (, ) - (, ) - (, ) - -- [NEW] Added ``humanize`` week granularity translation for Tagalog. -- [CHANGE] Calls to the ``timestamp`` property now emit a ``DeprecationWarning``. In a future release, ``timestamp`` will be changed to a method to align with Python's datetime module. If you would like to continue using the property, please change your code to use the ``int_timestamp`` or ``float_timestamp`` properties instead. -- [CHANGE] Expanded and improved Catalan locale. -- [FIX] Fixed a bug that caused ``Arrow.range()`` to incorrectly cut off ranges in certain scenarios when using month, quarter, or year endings. -- [FIX] Fixed a bug that caused day of week token parsing to be case sensitive. -- [INTERNAL] A number of functions were reordered in arrow.py for better organization and grouping of related methods. This change will have no impact on usage. -- [INTERNAL] A minimum tox version is now enforced for compatibility reasons. Contributors must use tox >3.18.0 going forward. - -0.16.0 (2020-08-23) -------------------- - -- [WARN] Arrow will **drop support** for Python 2.7 and 3.5 in the upcoming 1.0.0 release. The 0.16.x and 0.17.x releases are the last to support Python 2.7 and 3.5. -- [NEW] Implemented `PEP 495 `_ to handle ambiguous datetimes. This is achieved by the addition of the ``fold`` attribute for Arrow objects. For example: - -.. code-block:: python - - >>> before = Arrow(2017, 10, 29, 2, 0, tzinfo='Europe/Stockholm') - - >>> before.fold - 0 - >>> before.ambiguous - True - >>> after = Arrow(2017, 10, 29, 2, 0, tzinfo='Europe/Stockholm', fold=1) - - >>> after = before.replace(fold=1) - - -- [NEW] Added ``normalize_whitespace`` flag to ``arrow.get``. This is useful for parsing log files and/or any files that may contain inconsistent spacing. For example: - -.. code-block:: python - - >>> arrow.get("Jun 1 2005 1:33PM", "MMM D YYYY H:mmA", normalize_whitespace=True) - - >>> arrow.get("2013-036 \t 04:05:06Z", normalize_whitespace=True) - - -0.15.8 (2020-07-23) -------------------- - -- [WARN] Arrow will **drop support** for Python 2.7 and 3.5 in the upcoming 1.0.0 release. The 0.15.x, 0.16.x, and 0.17.x releases are the last to support Python 2.7 and 3.5. -- [NEW] Added ``humanize`` week granularity translation for Czech. -- [FIX] ``arrow.get`` will now pick sane defaults when weekdays are passed with particular token combinations, see `#446 `_. -- [INTERNAL] Moved arrow to an organization. The repo can now be found `here `_. -- [INTERNAL] Started issuing deprecation warnings for Python 2.7 and 3.5. -- [INTERNAL] Added Python 3.9 to CI pipeline. - -0.15.7 (2020-06-19) -------------------- - -- [NEW] Added a number of built-in format strings. See the `docs `_ for a complete list of supported formats. For example: - -.. code-block:: python - - >>> arw = arrow.utcnow() - >>> arw.format(arrow.FORMAT_COOKIE) - 'Wednesday, 27-May-2020 10:30:35 UTC' - -- [NEW] Arrow is now fully compatible with Python 3.9 and PyPy3. -- [NEW] Added Makefile, tox.ini, and requirements.txt files to the distribution bundle. -- [NEW] Added French Canadian and Swahili locales. -- [NEW] Added ``humanize`` week granularity translation for Hebrew, Greek, Macedonian, Swedish, Slovak. -- [FIX] ms and μs timestamps are now normalized in ``arrow.get()``, ``arrow.fromtimestamp()``, and ``arrow.utcfromtimestamp()``. For example: - -.. code-block:: python - - >>> ts = 1591161115194556 - >>> arw = arrow.get(ts) - - >>> arw.timestamp - 1591161115 - -- [FIX] Refactored and updated Macedonian, Hebrew, Korean, and Portuguese locales. - -0.15.6 (2020-04-29) -------------------- - -- [NEW] Added support for parsing and formatting `ISO 8601 week dates `_ via a new token ``W``, for example: - -.. code-block:: python - - >>> arrow.get("2013-W29-6", "W") - - >>> utc=arrow.utcnow() - >>> utc - - >>> utc.format("W") - '2020-W04-4' - -- [NEW] Formatting with ``x`` token (microseconds) is now possible, for example: - -.. code-block:: python - - >>> dt = arrow.utcnow() - >>> dt.format("x") - '1585669870688329' - >>> dt.format("X") - '1585669870' - -- [NEW] Added ``humanize`` week granularity translation for German, Italian, Polish & Taiwanese locales. -- [FIX] Consolidated and simplified German locales. -- [INTERNAL] Moved testing suite from nosetest/Chai to pytest/pytest-mock. -- [INTERNAL] Converted xunit-style setup and teardown functions in tests to pytest fixtures. -- [INTERNAL] Setup Github Actions for CI alongside Travis. -- [INTERNAL] Help support Arrow's future development by donating to the project on `Open Collective `_. - -0.15.5 (2020-01-03) -------------------- - -- [WARN] Python 2 reached EOL on 2020-01-01. arrow will **drop support** for Python 2 in a future release to be decided (see `#739 `_). -- [NEW] Added bounds parameter to ``span_range``, ``interval`` and ``span`` methods. This allows you to include or exclude the start and end values. -- [NEW] ``arrow.get()`` can now create arrow objects from a timestamp with a timezone, for example: - -.. code-block:: python - - >>> arrow.get(1367900664, tzinfo=tz.gettz('US/Pacific')) - - -- [NEW] ``humanize`` can now combine multiple levels of granularity, for example: - -.. code-block:: python - - >>> later140 = arrow.utcnow().shift(seconds=+8400) - >>> later140.humanize(granularity="minute") - 'in 139 minutes' - >>> later140.humanize(granularity=["hour", "minute"]) - 'in 2 hours and 19 minutes' - -- [NEW] Added Hong Kong locale (``zh_hk``). -- [NEW] Added ``humanize`` week granularity translation for Dutch. -- [NEW] Numbers are now displayed when using the seconds granularity in ``humanize``. -- [CHANGE] ``range`` now supports both the singular and plural forms of the ``frames`` argument (e.g. day and days). -- [FIX] Improved parsing of strings that contain punctuation. -- [FIX] Improved behaviour of ``humanize`` when singular seconds are involved. - -0.15.4 (2019-11-02) -------------------- - -- [FIX] Fixed an issue that caused package installs to fail on Conda Forge. - -0.15.3 (2019-11-02) -------------------- - -- [NEW] ``factory.get()`` can now create arrow objects from a ISO calendar tuple, for example: - -.. code-block:: python - - >>> arrow.get((2013, 18, 7)) - - -- [NEW] Added a new token ``x`` to allow parsing of integer timestamps with milliseconds and microseconds. -- [NEW] Formatting now supports escaping of characters using the same syntax as parsing, for example: - -.. code-block:: python - - >>> arw = arrow.now() - >>> fmt = "YYYY-MM-DD h [h] m" - >>> arw.format(fmt) - '2019-11-02 3 h 32' - -- [NEW] Added ``humanize`` week granularity translations for Chinese, Spanish and Vietnamese. -- [CHANGE] Added ``ParserError`` to module exports. -- [FIX] Added support for midnight at end of day. See `#703 `_ for details. -- [INTERNAL] Created Travis build for macOS. -- [INTERNAL] Test parsing and formatting against full timezone database. - -0.15.2 (2019-09-14) -------------------- - -- [NEW] Added ``humanize`` week granularity translations for Portuguese and Brazilian Portuguese. -- [NEW] Embedded changelog within docs and added release dates to versions. -- [FIX] Fixed a bug that caused test failures on Windows only, see `#668 `_ for details. - -0.15.1 (2019-09-10) -------------------- - -- [NEW] Added ``humanize`` week granularity translations for Japanese. -- [FIX] Fixed a bug that caused Arrow to fail when passed a negative timestamp string. -- [FIX] Fixed a bug that caused Arrow to fail when passed a datetime object with ``tzinfo`` of type ``StaticTzInfo``. - -0.15.0 (2019-09-08) -------------------- - -- [NEW] Added support for DDD and DDDD ordinal date tokens. The following functionality is now possible: ``arrow.get("1998-045")``, ``arrow.get("1998-45", "YYYY-DDD")``, ``arrow.get("1998-045", "YYYY-DDDD")``. -- [NEW] ISO 8601 basic format for dates and times is now supported (e.g. ``YYYYMMDDTHHmmssZ``). -- [NEW] Added ``humanize`` week granularity translations for French, Russian and Swiss German locales. -- [CHANGE] Timestamps of type ``str`` are no longer supported **without a format string** in the ``arrow.get()`` method. This change was made to support the ISO 8601 basic format and to address bugs such as `#447 `_. - -The following will NOT work in v0.15.0: - -.. code-block:: python - - >>> arrow.get("1565358758") - >>> arrow.get("1565358758.123413") - -The following will work in v0.15.0: - -.. code-block:: python - - >>> arrow.get("1565358758", "X") - >>> arrow.get("1565358758.123413", "X") - >>> arrow.get(1565358758) - >>> arrow.get(1565358758.123413) - -- [CHANGE] When a meridian token (a|A) is passed and no meridians are available for the specified locale (e.g. unsupported or untranslated) a ``ParserError`` is raised. -- [CHANGE] The timestamp token (``X``) will now match float timestamps of type ``str``: ``arrow.get(“1565358758.123415”, “X”)``. -- [CHANGE] Strings with leading and/or trailing whitespace will no longer be parsed without a format string. Please see `the docs `_ for ways to handle this. -- [FIX] The timestamp token (``X``) will now only match on strings that **strictly contain integers and floats**, preventing incorrect matches. -- [FIX] Most instances of ``arrow.get()`` returning an incorrect ``Arrow`` object from a partial parsing match have been eliminated. The following issue have been addressed: `#91 `_, `#196 `_, `#396 `_, `#434 `_, `#447 `_, `#456 `_, `#519 `_, `#538 `_, `#560 `_. - -0.14.7 (2019-09-04) -------------------- - -- [CHANGE] ``ArrowParseWarning`` will no longer be printed on every call to ``arrow.get()`` with a datetime string. The purpose of the warning was to start a conversation about the upcoming 0.15.0 changes and we appreciate all the feedback that the community has given us! - -0.14.6 (2019-08-28) -------------------- - -- [NEW] Added support for ``week`` granularity in ``Arrow.humanize()``. For example, ``arrow.utcnow().shift(weeks=-1).humanize(granularity="week")`` outputs "a week ago". This change introduced two new untranslated words, ``week`` and ``weeks``, to all locale dictionaries, so locale contributions are welcome! -- [NEW] Fully translated the Brazilian Portugese locale. -- [CHANGE] Updated the Macedonian locale to inherit from a Slavic base. -- [FIX] Fixed a bug that caused ``arrow.get()`` to ignore tzinfo arguments of type string (e.g. ``arrow.get(tzinfo="Europe/Paris")``). -- [FIX] Fixed a bug that occurred when ``arrow.Arrow()`` was instantiated with a ``pytz`` tzinfo object. -- [FIX] Fixed a bug that caused Arrow to fail when passed a sub-second token, that when rounded, had a value greater than 999999 (e.g. ``arrow.get("2015-01-12T01:13:15.9999995")``). Arrow should now accurately propagate the rounding for large sub-second tokens. - -0.14.5 (2019-08-09) -------------------- - -- [NEW] Added Afrikaans locale. -- [CHANGE] Removed deprecated ``replace`` shift functionality. Users looking to pass plural properties to the ``replace`` function to shift values should use ``shift`` instead. -- [FIX] Fixed bug that occurred when ``factory.get()`` was passed a locale kwarg. - -0.14.4 (2019-07-30) -------------------- - -- [FIX] Fixed a regression in 0.14.3 that prevented a tzinfo argument of type string to be passed to the ``get()`` function. Functionality such as ``arrow.get("2019072807", "YYYYMMDDHH", tzinfo="UTC")`` should work as normal again. -- [CHANGE] Moved ``backports.functools_lru_cache`` dependency from ``extra_requires`` to ``install_requires`` for ``Python 2.7`` installs to fix `#495 `_. - -0.14.3 (2019-07-28) -------------------- - -- [NEW] Added full support for Python 3.8. -- [CHANGE] Added warnings for upcoming factory.get() parsing changes in 0.15.0. Please see `#612 `_ for full details. -- [FIX] Extensive refactor and update of documentation. -- [FIX] factory.get() can now construct from kwargs. -- [FIX] Added meridians to Spanish Locale. - -0.14.2 (2019-06-06) -------------------- - -- [CHANGE] Travis CI builds now use tox to lint and run tests. -- [FIX] Fixed UnicodeDecodeError on certain locales (#600). - -0.14.1 (2019-06-06) -------------------- - -- [FIX] Fixed ``ImportError: No module named 'dateutil'`` (#598). - -0.14.0 (2019-06-06) -------------------- - -- [NEW] Added provisional support for Python 3.8. -- [CHANGE] Removed support for EOL Python 3.4. -- [FIX] Updated setup.py with modern Python standards. -- [FIX] Upgraded dependencies to latest versions. -- [FIX] Enabled flake8 and black on travis builds. -- [FIX] Formatted code using black and isort. - -0.13.2 (2019-05-30) -------------------- - -- [NEW] Add is_between method. -- [FIX] Improved humanize behaviour for near zero durations (#416). -- [FIX] Correct humanize behaviour with future days (#541). -- [FIX] Documentation updates. -- [FIX] Improvements to German Locale. - -0.13.1 (2019-02-17) -------------------- - -- [NEW] Add support for Python 3.7. -- [CHANGE] Remove deprecation decorators for Arrow.range(), Arrow.span_range() and Arrow.interval(), all now return generators, wrap with list() to get old behavior. -- [FIX] Documentation and docstring updates. - -0.13.0 (2019-01-09) -------------------- - -- [NEW] Added support for Python 3.6. -- [CHANGE] Drop support for Python 2.6/3.3. -- [CHANGE] Return generator instead of list for Arrow.range(), Arrow.span_range() and Arrow.interval(). -- [FIX] Make arrow.get() work with str & tzinfo combo. -- [FIX] Make sure special RegEx characters are escaped in format string. -- [NEW] Added support for ZZZ when formatting. -- [FIX] Stop using datetime.utcnow() in internals, use datetime.now(UTC) instead. -- [FIX] Return NotImplemented instead of TypeError in arrow math internals. -- [NEW] Added Estonian Locale. -- [FIX] Small fixes to Greek locale. -- [FIX] TagalogLocale improvements. -- [FIX] Added test requirements to setup. -- [FIX] Improve docs for get, now and utcnow methods. -- [FIX] Correct typo in depreciation warning. - -0.12.1 ------- - -- [FIX] Allow universal wheels to be generated and reliably installed. -- [FIX] Make humanize respect only_distance when granularity argument is also given. - -0.12.0 ------- - -- [FIX] Compatibility fix for Python 2.x - -0.11.0 ------- - -- [FIX] Fix grammar of ArabicLocale -- [NEW] Add Nepali Locale -- [FIX] Fix month name + rename AustriaLocale -> AustrianLocale -- [FIX] Fix typo in Basque Locale -- [FIX] Fix grammar in PortugueseBrazilian locale -- [FIX] Remove pip --user-mirrors flag -- [NEW] Add Indonesian Locale - -0.10.0 ------- - -- [FIX] Fix getattr off by one for quarter -- [FIX] Fix negative offset for UTC -- [FIX] Update arrow.py - -0.9.0 ------ - -- [NEW] Remove duplicate code -- [NEW] Support gnu date iso 8601 -- [NEW] Add support for universal wheels -- [NEW] Slovenian locale -- [NEW] Slovak locale -- [NEW] Romanian locale -- [FIX] respect limit even if end is defined range -- [FIX] Separate replace & shift functions -- [NEW] Added tox -- [FIX] Fix supported Python versions in documentation -- [NEW] Azerbaijani locale added, locale issue fixed in Turkish. -- [FIX] Format ParserError's raise message - -0.8.0 ------ - -- [] - -0.7.1 ------ - -- [NEW] Esperanto locale (batisteo) - -0.7.0 ------ - -- [FIX] Parse localized strings #228 (swistakm) -- [FIX] Modify tzinfo parameter in ``get`` api #221 (bottleimp) -- [FIX] Fix Czech locale (PrehistoricTeam) -- [FIX] Raise TypeError when adding/subtracting non-dates (itsmeolivia) -- [FIX] Fix pytz conversion error (Kudo) -- [FIX] Fix overzealous time truncation in span_range (kdeldycke) -- [NEW] Humanize for time duration #232 (ybrs) -- [NEW] Add Thai locale (sipp11) -- [NEW] Adding Belarusian (be) locale (oire) -- [NEW] Search date in strings (beenje) -- [NEW] Note that arrow's tokens differ from strptime's. (offby1) - -0.6.0 ------ - -- [FIX] Added support for Python 3 -- [FIX] Avoid truncating oversized epoch timestamps. Fixes #216. -- [FIX] Fixed month abbreviations for Ukrainian -- [FIX] Fix typo timezone -- [FIX] A couple of dialect fixes and two new languages -- [FIX] Spanish locale: ``Miercoles`` should have acute accent -- [Fix] Fix Finnish grammar -- [FIX] Fix typo in 'Arrow.floor' docstring -- [FIX] Use read() utility to open README -- [FIX] span_range for week frame -- [NEW] Add minimal support for fractional seconds longer than six digits. -- [NEW] Adding locale support for Marathi (mr) -- [NEW] Add count argument to span method -- [NEW] Improved docs - -0.5.1 - 0.5.4 -------------- - -- [FIX] test the behavior of simplejson instead of calling for_json directly (tonyseek) -- [FIX] Add Hebrew Locale (doodyparizada) -- [FIX] Update documentation location (andrewelkins) -- [FIX] Update setup.py Development Status level (andrewelkins) -- [FIX] Case insensitive month match (cshowe) - -0.5.0 ------ - -- [NEW] struct_time addition. (mhworth) -- [NEW] Version grep (eirnym) -- [NEW] Default to ISO 8601 format (emonty) -- [NEW] Raise TypeError on comparison (sniekamp) -- [NEW] Adding Macedonian(mk) locale (krisfremen) -- [FIX] Fix for ISO seconds and fractional seconds (sdispater) (andrewelkins) -- [FIX] Use correct Dutch wording for "hours" (wbolster) -- [FIX] Complete the list of english locales (indorilftw) -- [FIX] Change README to reStructuredText (nyuszika7h) -- [FIX] Parse lower-cased 'h' (tamentis) -- [FIX] Slight modifications to Dutch locale (nvie) - -0.4.4 ------ - -- [NEW] Include the docs in the released tarball -- [NEW] Czech localization Czech localization for Arrow -- [NEW] Add fa_ir to locales -- [FIX] Fixes parsing of time strings with a final Z -- [FIX] Fixes ISO parsing and formatting for fractional seconds -- [FIX] test_fromtimestamp sp -- [FIX] some typos fixed -- [FIX] removed an unused import statement -- [FIX] docs table fix -- [FIX] Issue with specify 'X' template and no template at all to arrow.get -- [FIX] Fix "import" typo in docs/index.rst -- [FIX] Fix unit tests for zero passed -- [FIX] Update layout.html -- [FIX] In Norwegian and new Norwegian months and weekdays should not be capitalized -- [FIX] Fixed discrepancy between specifying 'X' to arrow.get and specifying no template - -0.4.3 ------ - -- [NEW] Turkish locale (Emre) -- [NEW] Arabic locale (Mosab Ahmad) -- [NEW] Danish locale (Holmars) -- [NEW] Icelandic locale (Holmars) -- [NEW] Hindi locale (Atmb4u) -- [NEW] Malayalam locale (Atmb4u) -- [NEW] Finnish locale (Stormpat) -- [NEW] Portuguese locale (Danielcorreia) -- [NEW] ``h`` and ``hh`` strings are now supported (Averyonghub) -- [FIX] An incorrect inflection in the Polish locale has been fixed (Avalanchy) -- [FIX] ``arrow.get`` now properly handles ``Date`` (Jaapz) -- [FIX] Tests are now declared in ``setup.py`` and the manifest (Pypingou) -- [FIX] ``__version__`` has been added to ``__init__.py`` (Sametmax) -- [FIX] ISO 8601 strings can be parsed without a separator (Ivandiguisto / Root) -- [FIX] Documentation is now more clear regarding some inputs on ``arrow.get`` (Eriktaubeneck) -- [FIX] Some documentation links have been fixed (Vrutsky) -- [FIX] Error messages for parse errors are now more descriptive (Maciej Albin) -- [FIX] The parser now correctly checks for separators in strings (Mschwager) - -0.4.2 ------ - -- [NEW] Factory ``get`` method now accepts a single ``Arrow`` argument. -- [NEW] Tokens SSSS, SSSSS and SSSSSS are supported in parsing. -- [NEW] ``Arrow`` objects have a ``float_timestamp`` property. -- [NEW] Vietnamese locale (Iu1nguoi) -- [NEW] Factory ``get`` method now accepts a list of format strings (Dgilland) -- [NEW] A MANIFEST.in file has been added (Pypingou) -- [NEW] Tests can be run directly from ``setup.py`` (Pypingou) -- [FIX] Arrow docs now list 'day of week' format tokens correctly (Rudolphfroger) -- [FIX] Several issues with the Korean locale have been resolved (Yoloseem) -- [FIX] ``humanize`` now correctly returns unicode (Shvechikov) -- [FIX] ``Arrow`` objects now pickle / unpickle correctly (Yoloseem) - -0.4.1 ------ - -- [NEW] Table / explanation of formatting & parsing tokens in docs -- [NEW] Brazilian locale (Augusto2112) -- [NEW] Dutch locale (OrangeTux) -- [NEW] Italian locale (Pertux) -- [NEW] Austrain locale (LeChewbacca) -- [NEW] Tagalog locale (Marksteve) -- [FIX] Corrected spelling and day numbers in German locale (LeChewbacca) -- [FIX] Factory ``get`` method should now handle unicode strings correctly (Bwells) -- [FIX] Midnight and noon should now parse and format correctly (Bwells) - -0.4.0 ------ - -- [NEW] Format-free ISO 8601 parsing in factory ``get`` method -- [NEW] Support for 'week' / 'weeks' in ``span``, ``range``, ``span_range``, ``floor`` and ``ceil`` -- [NEW] Support for 'weeks' in ``replace`` -- [NEW] Norwegian locale (Martinp) -- [NEW] Japanese locale (CortYuming) -- [FIX] Timezones no longer show the wrong sign when formatted (Bean) -- [FIX] Microseconds are parsed correctly from strings (Bsidhom) -- [FIX] Locale day-of-week is no longer off by one (Cynddl) -- [FIX] Corrected plurals of Ukrainian and Russian nouns (Catchagain) -- [CHANGE] Old 0.1 ``arrow`` module method removed -- [CHANGE] Dropped timestamp support in ``range`` and ``span_range`` (never worked correctly) -- [CHANGE] Dropped parsing of single string as tz string in factory ``get`` method (replaced by ISO 8601) - -0.3.5 ------ - -- [NEW] French locale (Cynddl) -- [NEW] Spanish locale (Slapresta) -- [FIX] Ranges handle multiple timezones correctly (Ftobia) - -0.3.4 ------ - -- [FIX] Humanize no longer sometimes returns the wrong month delta -- [FIX] ``__format__`` works correctly with no format string - -0.3.3 ------ - -- [NEW] Python 2.6 support -- [NEW] Initial support for locale-based parsing and formatting -- [NEW] ArrowFactory class, now proxied as the module API -- [NEW] ``factory`` api method to obtain a factory for a custom type -- [FIX] Python 3 support and tests completely ironed out - -0.3.2 ------ - -- [NEW] Python 3+ support - -0.3.1 ------ - -- [FIX] The old ``arrow`` module function handles timestamps correctly as it used to - -0.3.0 ------ - -- [NEW] ``Arrow.replace`` method -- [NEW] Accept timestamps, datetimes and Arrows for datetime inputs, where reasonable -- [FIX] ``range`` and ``span_range`` respect end and limit parameters correctly -- [CHANGE] Arrow objects are no longer mutable -- [CHANGE] Plural attribute name semantics altered: single -> absolute, plural -> relative -- [CHANGE] Plural names no longer supported as properties (e.g. ``arrow.utcnow().years``) - -0.2.1 ------ - -- [NEW] Support for localized humanization -- [NEW] English, Russian, Greek, Korean, Chinese locales - -0.2.0 ------ - -- **REWRITE** -- [NEW] Date parsing -- [NEW] Date formatting -- [NEW] ``floor``, ``ceil`` and ``span`` methods -- [NEW] ``datetime`` interface implementation -- [NEW] ``clone`` method -- [NEW] ``get``, ``now`` and ``utcnow`` API methods - -0.1.6 ------ - -- [NEW] Humanized time deltas -- [NEW] ``__eq__`` implemented -- [FIX] Issues with conversions related to daylight savings time resolved -- [CHANGE] ``__str__`` uses ISO formatting - -0.1.5 ------ - -- **Started tracking changes** -- [NEW] Parsing of ISO-formatted time zone offsets (e.g. '+02:30', '-05:00') -- [NEW] Resolved some issues with timestamps and delta / Olson time zones diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/LICENSE b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/LICENSE deleted file mode 100644 index 2bef500de7..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2019 Chris Smith - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/MANIFEST.in b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/MANIFEST.in deleted file mode 100644 index d9955ed96a..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/MANIFEST.in +++ /dev/null @@ -1,3 +0,0 @@ -include LICENSE CHANGELOG.rst README.rst Makefile requirements.txt tox.ini -recursive-include tests *.py -recursive-include docs *.py *.rst *.bat Makefile diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/Makefile b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/Makefile deleted file mode 100644 index f294985dc6..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/Makefile +++ /dev/null @@ -1,44 +0,0 @@ -.PHONY: auto test docs clean - -auto: build38 - -build27: PYTHON_VER = python2.7 -build35: PYTHON_VER = python3.5 -build36: PYTHON_VER = python3.6 -build37: PYTHON_VER = python3.7 -build38: PYTHON_VER = python3.8 -build39: PYTHON_VER = python3.9 - -build27 build35 build36 build37 build38 build39: clean - virtualenv venv --python=$(PYTHON_VER) - . venv/bin/activate; \ - pip install -r requirements.txt; \ - pre-commit install - -test: - rm -f .coverage coverage.xml - . venv/bin/activate; pytest - -lint: - . venv/bin/activate; pre-commit run --all-files --show-diff-on-failure - -docs: - rm -rf docs/_build - . venv/bin/activate; cd docs; make html - -clean: clean-dist - rm -rf venv .pytest_cache ./**/__pycache__ - rm -f .coverage coverage.xml ./**/*.pyc - -clean-dist: - rm -rf dist build .egg .eggs arrow.egg-info - -build-dist: - . venv/bin/activate; \ - pip install -U setuptools twine wheel; \ - python setup.py sdist bdist_wheel - -upload-dist: - . venv/bin/activate; twine upload dist/* - -publish: test clean-dist build-dist upload-dist clean-dist diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/README.rst b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/README.rst deleted file mode 100644 index 69f6c50d81..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/README.rst +++ /dev/null @@ -1,133 +0,0 @@ -Arrow: Better dates & times for Python -====================================== - -.. start-inclusion-marker-do-not-remove - -.. image:: https://github.com/arrow-py/arrow/workflows/tests/badge.svg?branch=master - :alt: Build Status - :target: https://github.com/arrow-py/arrow/actions?query=workflow%3Atests+branch%3Amaster - -.. image:: https://codecov.io/gh/arrow-py/arrow/branch/master/graph/badge.svg - :alt: Coverage - :target: https://codecov.io/gh/arrow-py/arrow - -.. image:: https://img.shields.io/pypi/v/arrow.svg - :alt: PyPI Version - :target: https://pypi.python.org/pypi/arrow - -.. image:: https://img.shields.io/pypi/pyversions/arrow.svg - :alt: Supported Python Versions - :target: https://pypi.python.org/pypi/arrow - -.. image:: https://img.shields.io/pypi/l/arrow.svg - :alt: License - :target: https://pypi.python.org/pypi/arrow - -.. image:: https://img.shields.io/badge/code%20style-black-000000.svg - :alt: Code Style: Black - :target: https://github.com/psf/black - - -**Arrow** is a Python library that offers a sensible and human-friendly approach to creating, manipulating, formatting and converting dates, times and timestamps. It implements and updates the datetime type, plugging gaps in functionality and providing an intelligent module API that supports many common creation scenarios. Simply put, it helps you work with dates and times with fewer imports and a lot less code. - -Arrow is named after the `arrow of time `_ and is heavily inspired by `moment.js `_ and `requests `_. - -Why use Arrow over built-in modules? ------------------------------------- - -Python's standard library and some other low-level modules have near-complete date, time and timezone functionality, but don't work very well from a usability perspective: - -- Too many modules: datetime, time, calendar, dateutil, pytz and more -- Too many types: date, time, datetime, tzinfo, timedelta, relativedelta, etc. -- Timezones and timestamp conversions are verbose and unpleasant -- Timezone naivety is the norm -- Gaps in functionality: ISO 8601 parsing, timespans, humanization - -Features --------- - -- Fully-implemented, drop-in replacement for datetime -- Supports Python 2.7, 3.5, 3.6, 3.7, 3.8 and 3.9 -- Timezone-aware and UTC by default -- Provides super-simple creation options for many common input scenarios -- :code:`shift` method with support for relative offsets, including weeks -- Formats and parses strings automatically -- Wide support for ISO 8601 -- Timezone conversion -- Timestamp available as a property -- Generates time spans, ranges, floors and ceilings for time frames ranging from microsecond to year -- Humanizes and supports a growing list of contributed locales -- Extensible for your own Arrow-derived types - -Quick Start ------------ - -Installation -~~~~~~~~~~~~ - -To install Arrow, use `pip `_ or `pipenv `_: - -.. code-block:: console - - $ pip install -U arrow - -Example Usage -~~~~~~~~~~~~~ - -.. code-block:: python - - >>> import arrow - >>> arrow.get('2013-05-11T21:23:58.970460+07:00') - - - >>> utc = arrow.utcnow() - >>> utc - - - >>> utc = utc.shift(hours=-1) - >>> utc - - - >>> local = utc.to('US/Pacific') - >>> local - - - >>> local.timestamp - 1368303838 - - >>> local.format() - '2013-05-11 13:23:58 -07:00' - - >>> local.format('YYYY-MM-DD HH:mm:ss ZZ') - '2013-05-11 13:23:58 -07:00' - - >>> local.humanize() - 'an hour ago' - - >>> local.humanize(locale='ko_kr') - '1시간 전' - -.. end-inclusion-marker-do-not-remove - -Documentation -------------- - -For full documentation, please visit `arrow.readthedocs.io `_. - -Contributing ------------- - -Contributions are welcome for both code and localizations (adding and updating locales). Begin by gaining familiarity with the Arrow library and its features. Then, jump into contributing: - -#. Find an issue or feature to tackle on the `issue tracker `_. Issues marked with the `"good first issue" label `_ may be a great place to start! -#. Fork `this repository `_ on GitHub and begin making changes in a branch. -#. Add a few tests to ensure that the bug was fixed or the feature works as expected. -#. Run the entire test suite and linting checks by running one of the following commands: :code:`tox` (if you have `tox `_ installed) **OR** :code:`make build38 && make test && make lint` (if you do not have Python 3.8 installed, replace :code:`build38` with the latest Python version on your system). -#. Submit a pull request and await feedback 😃. - -If you have any questions along the way, feel free to ask them `here `_. - -Support Arrow -------------- - -`Open Collective `_ is an online funding platform that provides tools to raise money and share your finances with full transparency. It is the platform of choice for individuals and companies to make one-time or recurring donations directly to the project. If you are interested in making a financial contribution, please visit the `Arrow collective `_. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/__init__.py deleted file mode 100644 index 2883527be8..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- coding: utf-8 -*- -from ._version import __version__ -from .api import get, now, utcnow -from .arrow import Arrow -from .factory import ArrowFactory -from .formatter import ( - FORMAT_ATOM, - FORMAT_COOKIE, - FORMAT_RFC822, - FORMAT_RFC850, - FORMAT_RFC1036, - FORMAT_RFC1123, - FORMAT_RFC2822, - FORMAT_RFC3339, - FORMAT_RSS, - FORMAT_W3C, -) -from .parser import ParserError diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/_version.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/_version.py deleted file mode 100644 index fd86b3ee91..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/_version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.17.0" diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/api.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/api.py deleted file mode 100644 index a6b7be3de2..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/api.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Provides the default implementation of :class:`ArrowFactory ` -methods for use as a module API. - -""" - -from __future__ import absolute_import - -from arrow.factory import ArrowFactory - -# internal default factory. -_factory = ArrowFactory() - - -def get(*args, **kwargs): - """Calls the default :class:`ArrowFactory ` ``get`` method.""" - - return _factory.get(*args, **kwargs) - - -get.__doc__ = _factory.get.__doc__ - - -def utcnow(): - """Calls the default :class:`ArrowFactory ` ``utcnow`` method.""" - - return _factory.utcnow() - - -utcnow.__doc__ = _factory.utcnow.__doc__ - - -def now(tz=None): - """Calls the default :class:`ArrowFactory ` ``now`` method.""" - - return _factory.now(tz) - - -now.__doc__ = _factory.now.__doc__ - - -def factory(type): - """Returns an :class:`.ArrowFactory` for the specified :class:`Arrow ` - or derived type. - - :param type: the type, :class:`Arrow ` or derived. - - """ - - return ArrowFactory(type) - - -__all__ = ["get", "utcnow", "now", "factory"] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/arrow.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/arrow.py deleted file mode 100644 index 4fe9541789..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/arrow.py +++ /dev/null @@ -1,1584 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Provides the :class:`Arrow ` class, an enhanced ``datetime`` -replacement. - -""" - -from __future__ import absolute_import - -import calendar -import sys -import warnings -from datetime import datetime, timedelta -from datetime import tzinfo as dt_tzinfo -from math import trunc - -from dateutil import tz as dateutil_tz -from dateutil.relativedelta import relativedelta - -from arrow import formatter, locales, parser, util - -if sys.version_info[:2] < (3, 6): # pragma: no cover - with warnings.catch_warnings(): - warnings.simplefilter("default", DeprecationWarning) - warnings.warn( - "Arrow will drop support for Python 2.7 and 3.5 in the upcoming v1.0.0 release. Please upgrade to " - "Python 3.6+ to continue receiving updates for Arrow.", - DeprecationWarning, - ) - - -class Arrow(object): - """An :class:`Arrow ` object. - - Implements the ``datetime`` interface, behaving as an aware ``datetime`` while implementing - additional functionality. - - :param year: the calendar year. - :param month: the calendar month. - :param day: the calendar day. - :param hour: (optional) the hour. Defaults to 0. - :param minute: (optional) the minute, Defaults to 0. - :param second: (optional) the second, Defaults to 0. - :param microsecond: (optional) the microsecond. Defaults to 0. - :param tzinfo: (optional) A timezone expression. Defaults to UTC. - :param fold: (optional) 0 or 1, used to disambiguate repeated times. Defaults to 0. - - .. _tz-expr: - - Recognized timezone expressions: - - - A ``tzinfo`` object. - - A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'. - - A ``str`` in ISO 8601 style, as in '+07:00'. - - A ``str``, one of the following: 'local', 'utc', 'UTC'. - - Usage:: - - >>> import arrow - >>> arrow.Arrow(2013, 5, 5, 12, 30, 45) - - - """ - - resolution = datetime.resolution - - _ATTRS = ["year", "month", "day", "hour", "minute", "second", "microsecond"] - _ATTRS_PLURAL = ["{}s".format(a) for a in _ATTRS] - _MONTHS_PER_QUARTER = 3 - _SECS_PER_MINUTE = float(60) - _SECS_PER_HOUR = float(60 * 60) - _SECS_PER_DAY = float(60 * 60 * 24) - _SECS_PER_WEEK = float(60 * 60 * 24 * 7) - _SECS_PER_MONTH = float(60 * 60 * 24 * 30.5) - _SECS_PER_YEAR = float(60 * 60 * 24 * 365.25) - - def __init__( - self, - year, - month, - day, - hour=0, - minute=0, - second=0, - microsecond=0, - tzinfo=None, - **kwargs - ): - if tzinfo is None: - tzinfo = dateutil_tz.tzutc() - # detect that tzinfo is a pytz object (issue #626) - elif ( - isinstance(tzinfo, dt_tzinfo) - and hasattr(tzinfo, "localize") - and hasattr(tzinfo, "zone") - and tzinfo.zone - ): - tzinfo = parser.TzinfoParser.parse(tzinfo.zone) - elif util.isstr(tzinfo): - tzinfo = parser.TzinfoParser.parse(tzinfo) - - fold = kwargs.get("fold", 0) - - # use enfold here to cover direct arrow.Arrow init on 2.7/3.5 - self._datetime = dateutil_tz.enfold( - datetime(year, month, day, hour, minute, second, microsecond, tzinfo), - fold=fold, - ) - - # factories: single object, both original and from datetime. - - @classmethod - def now(cls, tzinfo=None): - """Constructs an :class:`Arrow ` object, representing "now" in the given - timezone. - - :param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time. - - Usage:: - - >>> arrow.now('Asia/Baku') - - - """ - - if tzinfo is None: - tzinfo = dateutil_tz.tzlocal() - - dt = datetime.now(tzinfo) - - return cls( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second, - dt.microsecond, - dt.tzinfo, - fold=getattr(dt, "fold", 0), - ) - - @classmethod - def utcnow(cls): - """Constructs an :class:`Arrow ` object, representing "now" in UTC - time. - - Usage:: - - >>> arrow.utcnow() - - - """ - - dt = datetime.now(dateutil_tz.tzutc()) - - return cls( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second, - dt.microsecond, - dt.tzinfo, - fold=getattr(dt, "fold", 0), - ) - - @classmethod - def fromtimestamp(cls, timestamp, tzinfo=None): - """Constructs an :class:`Arrow ` object from a timestamp, converted to - the given timezone. - - :param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either. - :param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time. - """ - - if tzinfo is None: - tzinfo = dateutil_tz.tzlocal() - elif util.isstr(tzinfo): - tzinfo = parser.TzinfoParser.parse(tzinfo) - - if not util.is_timestamp(timestamp): - raise ValueError( - "The provided timestamp '{}' is invalid.".format(timestamp) - ) - - timestamp = util.normalize_timestamp(float(timestamp)) - dt = datetime.fromtimestamp(timestamp, tzinfo) - - return cls( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second, - dt.microsecond, - dt.tzinfo, - fold=getattr(dt, "fold", 0), - ) - - @classmethod - def utcfromtimestamp(cls, timestamp): - """Constructs an :class:`Arrow ` object from a timestamp, in UTC time. - - :param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either. - - """ - - if not util.is_timestamp(timestamp): - raise ValueError( - "The provided timestamp '{}' is invalid.".format(timestamp) - ) - - timestamp = util.normalize_timestamp(float(timestamp)) - dt = datetime.utcfromtimestamp(timestamp) - - return cls( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second, - dt.microsecond, - dateutil_tz.tzutc(), - fold=getattr(dt, "fold", 0), - ) - - @classmethod - def fromdatetime(cls, dt, tzinfo=None): - """Constructs an :class:`Arrow ` object from a ``datetime`` and - optional replacement timezone. - - :param dt: the ``datetime`` - :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to ``dt``'s - timezone, or UTC if naive. - - If you only want to replace the timezone of naive datetimes:: - - >>> dt - datetime.datetime(2013, 5, 5, 0, 0, tzinfo=tzutc()) - >>> arrow.Arrow.fromdatetime(dt, dt.tzinfo or 'US/Pacific') - - - """ - - if tzinfo is None: - if dt.tzinfo is None: - tzinfo = dateutil_tz.tzutc() - else: - tzinfo = dt.tzinfo - - return cls( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second, - dt.microsecond, - tzinfo, - fold=getattr(dt, "fold", 0), - ) - - @classmethod - def fromdate(cls, date, tzinfo=None): - """Constructs an :class:`Arrow ` object from a ``date`` and optional - replacement timezone. Time values are set to 0. - - :param date: the ``date`` - :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to UTC. - """ - - if tzinfo is None: - tzinfo = dateutil_tz.tzutc() - - return cls(date.year, date.month, date.day, tzinfo=tzinfo) - - @classmethod - def strptime(cls, date_str, fmt, tzinfo=None): - """Constructs an :class:`Arrow ` object from a date string and format, - in the style of ``datetime.strptime``. Optionally replaces the parsed timezone. - - :param date_str: the date string. - :param fmt: the format string. - :param tzinfo: (optional) A :ref:`timezone expression `. Defaults to the parsed - timezone if ``fmt`` contains a timezone directive, otherwise UTC. - - Usage:: - - >>> arrow.Arrow.strptime('20-01-2019 15:49:10', '%d-%m-%Y %H:%M:%S') - - - """ - - dt = datetime.strptime(date_str, fmt) - if tzinfo is None: - tzinfo = dt.tzinfo - - return cls( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second, - dt.microsecond, - tzinfo, - fold=getattr(dt, "fold", 0), - ) - - # factories: ranges and spans - - @classmethod - def range(cls, frame, start, end=None, tz=None, limit=None): - """Returns an iterator of :class:`Arrow ` objects, representing - points in time between two inputs. - - :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...). - :param start: A datetime expression, the start of the range. - :param end: (optional) A datetime expression, the end of the range. - :param tz: (optional) A :ref:`timezone expression `. Defaults to - ``start``'s timezone, or UTC if ``start`` is naive. - :param limit: (optional) A maximum number of tuples to return. - - **NOTE**: The ``end`` or ``limit`` must be provided. Call with ``end`` alone to - return the entire range. Call with ``limit`` alone to return a maximum # of results from - the start. Call with both to cap a range at a maximum # of results. - - **NOTE**: ``tz`` internally **replaces** the timezones of both ``start`` and ``end`` before - iterating. As such, either call with naive objects and ``tz``, or aware objects from the - same timezone and no ``tz``. - - Supported frame values: year, quarter, month, week, day, hour, minute, second. - - Recognized datetime expressions: - - - An :class:`Arrow ` object. - - A ``datetime`` object. - - Usage:: - - >>> start = datetime(2013, 5, 5, 12, 30) - >>> end = datetime(2013, 5, 5, 17, 15) - >>> for r in arrow.Arrow.range('hour', start, end): - ... print(repr(r)) - ... - - - - - - - **NOTE**: Unlike Python's ``range``, ``end`` *may* be included in the returned iterator:: - - >>> start = datetime(2013, 5, 5, 12, 30) - >>> end = datetime(2013, 5, 5, 13, 30) - >>> for r in arrow.Arrow.range('hour', start, end): - ... print(repr(r)) - ... - - - - """ - - _, frame_relative, relative_steps = cls._get_frames(frame) - - tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz) - - start = cls._get_datetime(start).replace(tzinfo=tzinfo) - end, limit = cls._get_iteration_params(end, limit) - end = cls._get_datetime(end).replace(tzinfo=tzinfo) - - current = cls.fromdatetime(start) - original_day = start.day - day_is_clipped = False - i = 0 - - while current <= end and i < limit: - i += 1 - yield current - - values = [getattr(current, f) for f in cls._ATTRS] - current = cls(*values, tzinfo=tzinfo).shift( - **{frame_relative: relative_steps} - ) - - if frame in ["month", "quarter", "year"] and current.day < original_day: - day_is_clipped = True - - if day_is_clipped and not cls._is_last_day_of_month(current): - current = current.replace(day=original_day) - - def span(self, frame, count=1, bounds="[)"): - """Returns two new :class:`Arrow ` objects, representing the timespan - of the :class:`Arrow ` object in a given timeframe. - - :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). - :param count: (optional) the number of frames to span. - :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies - whether to include or exclude the start and end values in the span. '(' excludes - the start, '[' includes the start, ')' excludes the end, and ']' includes the end. - If the bounds are not specified, the default bound '[)' is used. - - Supported frame values: year, quarter, month, week, day, hour, minute, second. - - Usage:: - - >>> arrow.utcnow() - - - >>> arrow.utcnow().span('hour') - (, ) - - >>> arrow.utcnow().span('day') - (, ) - - >>> arrow.utcnow().span('day', count=2) - (, ) - - >>> arrow.utcnow().span('day', bounds='[]') - (, ) - - """ - - util.validate_bounds(bounds) - - frame_absolute, frame_relative, relative_steps = self._get_frames(frame) - - if frame_absolute == "week": - attr = "day" - elif frame_absolute == "quarter": - attr = "month" - else: - attr = frame_absolute - - index = self._ATTRS.index(attr) - frames = self._ATTRS[: index + 1] - - values = [getattr(self, f) for f in frames] - - for _ in range(3 - len(values)): - values.append(1) - - floor = self.__class__(*values, tzinfo=self.tzinfo) - - if frame_absolute == "week": - floor = floor.shift(days=-(self.isoweekday() - 1)) - elif frame_absolute == "quarter": - floor = floor.shift(months=-((self.month - 1) % 3)) - - ceil = floor.shift(**{frame_relative: count * relative_steps}) - - if bounds[0] == "(": - floor = floor.shift(microseconds=+1) - - if bounds[1] == ")": - ceil = ceil.shift(microseconds=-1) - - return floor, ceil - - def floor(self, frame): - """Returns a new :class:`Arrow ` object, representing the "floor" - of the timespan of the :class:`Arrow ` object in a given timeframe. - Equivalent to the first element in the 2-tuple returned by - :func:`span `. - - :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). - - Usage:: - - >>> arrow.utcnow().floor('hour') - - """ - - return self.span(frame)[0] - - def ceil(self, frame): - """Returns a new :class:`Arrow ` object, representing the "ceiling" - of the timespan of the :class:`Arrow ` object in a given timeframe. - Equivalent to the second element in the 2-tuple returned by - :func:`span `. - - :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...). - - Usage:: - - >>> arrow.utcnow().ceil('hour') - - """ - - return self.span(frame)[1] - - @classmethod - def span_range(cls, frame, start, end, tz=None, limit=None, bounds="[)"): - """Returns an iterator of tuples, each :class:`Arrow ` objects, - representing a series of timespans between two inputs. - - :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...). - :param start: A datetime expression, the start of the range. - :param end: (optional) A datetime expression, the end of the range. - :param tz: (optional) A :ref:`timezone expression `. Defaults to - ``start``'s timezone, or UTC if ``start`` is naive. - :param limit: (optional) A maximum number of tuples to return. - :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies - whether to include or exclude the start and end values in each span in the range. '(' excludes - the start, '[' includes the start, ')' excludes the end, and ']' includes the end. - If the bounds are not specified, the default bound '[)' is used. - - **NOTE**: The ``end`` or ``limit`` must be provided. Call with ``end`` alone to - return the entire range. Call with ``limit`` alone to return a maximum # of results from - the start. Call with both to cap a range at a maximum # of results. - - **NOTE**: ``tz`` internally **replaces** the timezones of both ``start`` and ``end`` before - iterating. As such, either call with naive objects and ``tz``, or aware objects from the - same timezone and no ``tz``. - - Supported frame values: year, quarter, month, week, day, hour, minute, second. - - Recognized datetime expressions: - - - An :class:`Arrow ` object. - - A ``datetime`` object. - - **NOTE**: Unlike Python's ``range``, ``end`` will *always* be included in the returned - iterator of timespans. - - Usage: - - >>> start = datetime(2013, 5, 5, 12, 30) - >>> end = datetime(2013, 5, 5, 17, 15) - >>> for r in arrow.Arrow.span_range('hour', start, end): - ... print(r) - ... - (, ) - (, ) - (, ) - (, ) - (, ) - (, ) - - """ - - tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz) - start = cls.fromdatetime(start, tzinfo).span(frame)[0] - _range = cls.range(frame, start, end, tz, limit) - return (r.span(frame, bounds=bounds) for r in _range) - - @classmethod - def interval(cls, frame, start, end, interval=1, tz=None, bounds="[)"): - """Returns an iterator of tuples, each :class:`Arrow ` objects, - representing a series of intervals between two inputs. - - :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...). - :param start: A datetime expression, the start of the range. - :param end: (optional) A datetime expression, the end of the range. - :param interval: (optional) Time interval for the given time frame. - :param tz: (optional) A timezone expression. Defaults to UTC. - :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies - whether to include or exclude the start and end values in the intervals. '(' excludes - the start, '[' includes the start, ')' excludes the end, and ']' includes the end. - If the bounds are not specified, the default bound '[)' is used. - - Supported frame values: year, quarter, month, week, day, hour, minute, second - - Recognized datetime expressions: - - - An :class:`Arrow ` object. - - A ``datetime`` object. - - Recognized timezone expressions: - - - A ``tzinfo`` object. - - A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'. - - A ``str`` in ISO 8601 style, as in '+07:00'. - - A ``str``, one of the following: 'local', 'utc', 'UTC'. - - Usage: - - >>> start = datetime(2013, 5, 5, 12, 30) - >>> end = datetime(2013, 5, 5, 17, 15) - >>> for r in arrow.Arrow.interval('hour', start, end, 2): - ... print r - ... - (, ) - (, ) - (, ) - """ - if interval < 1: - raise ValueError("interval has to be a positive integer") - - spanRange = iter(cls.span_range(frame, start, end, tz, bounds=bounds)) - while True: - try: - intvlStart, intvlEnd = next(spanRange) - for _ in range(interval - 1): - _, intvlEnd = next(spanRange) - yield intvlStart, intvlEnd - except StopIteration: - return - - # representations - - def __repr__(self): - return "<{} [{}]>".format(self.__class__.__name__, self.__str__()) - - def __str__(self): - return self._datetime.isoformat() - - def __format__(self, formatstr): - - if len(formatstr) > 0: - return self.format(formatstr) - - return str(self) - - def __hash__(self): - return self._datetime.__hash__() - - # attributes and properties - - def __getattr__(self, name): - - if name == "week": - return self.isocalendar()[1] - - if name == "quarter": - return int((self.month - 1) / self._MONTHS_PER_QUARTER) + 1 - - if not name.startswith("_"): - value = getattr(self._datetime, name, None) - - if value is not None: - return value - - return object.__getattribute__(self, name) - - @property - def tzinfo(self): - """Gets the ``tzinfo`` of the :class:`Arrow ` object. - - Usage:: - - >>> arw=arrow.utcnow() - >>> arw.tzinfo - tzutc() - - """ - - return self._datetime.tzinfo - - @tzinfo.setter - def tzinfo(self, tzinfo): - """ Sets the ``tzinfo`` of the :class:`Arrow ` object. """ - - self._datetime = self._datetime.replace(tzinfo=tzinfo) - - @property - def datetime(self): - """Returns a datetime representation of the :class:`Arrow ` object. - - Usage:: - - >>> arw=arrow.utcnow() - >>> arw.datetime - datetime.datetime(2019, 1, 24, 16, 35, 27, 276649, tzinfo=tzutc()) - - """ - - return self._datetime - - @property - def naive(self): - """Returns a naive datetime representation of the :class:`Arrow ` - object. - - Usage:: - - >>> nairobi = arrow.now('Africa/Nairobi') - >>> nairobi - - >>> nairobi.naive - datetime.datetime(2019, 1, 23, 19, 27, 12, 297999) - - """ - - return self._datetime.replace(tzinfo=None) - - @property - def timestamp(self): - """Returns a timestamp representation of the :class:`Arrow ` object, in - UTC time. - - Usage:: - - >>> arrow.utcnow().timestamp - 1548260567 - - """ - - warnings.warn( - "For compatibility with the datetime.timestamp() method this property will be replaced with a method in " - "the 1.0.0 release, please switch to the .int_timestamp property for identical behaviour as soon as " - "possible.", - DeprecationWarning, - ) - return calendar.timegm(self._datetime.utctimetuple()) - - @property - def int_timestamp(self): - """Returns a timestamp representation of the :class:`Arrow ` object, in - UTC time. - - Usage:: - - >>> arrow.utcnow().int_timestamp - 1548260567 - - """ - - return calendar.timegm(self._datetime.utctimetuple()) - - @property - def float_timestamp(self): - """Returns a floating-point representation of the :class:`Arrow ` - object, in UTC time. - - Usage:: - - >>> arrow.utcnow().float_timestamp - 1548260516.830896 - - """ - - # IDEA get rid of this in 1.0.0 and wrap datetime.timestamp() - # Or for compatibility retain this but make it call the timestamp method - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - return self.timestamp + float(self.microsecond) / 1000000 - - @property - def fold(self): - """ Returns the ``fold`` value of the :class:`Arrow ` object. """ - - # in python < 3.6 _datetime will be a _DatetimeWithFold if fold=1 and a datetime with no fold attribute - # otherwise, so we need to return zero to cover the latter case - return getattr(self._datetime, "fold", 0) - - @property - def ambiguous(self): - """ Returns a boolean indicating whether the :class:`Arrow ` object is ambiguous.""" - - return dateutil_tz.datetime_ambiguous(self._datetime) - - @property - def imaginary(self): - """Indicates whether the :class: `Arrow ` object exists in the current timezone.""" - - return not dateutil_tz.datetime_exists(self._datetime) - - # mutation and duplication. - - def clone(self): - """Returns a new :class:`Arrow ` object, cloned from the current one. - - Usage: - - >>> arw = arrow.utcnow() - >>> cloned = arw.clone() - - """ - - return self.fromdatetime(self._datetime) - - def replace(self, **kwargs): - """Returns a new :class:`Arrow ` object with attributes updated - according to inputs. - - Use property names to set their value absolutely:: - - >>> import arrow - >>> arw = arrow.utcnow() - >>> arw - - >>> arw.replace(year=2014, month=6) - - - You can also replace the timezone without conversion, using a - :ref:`timezone expression `:: - - >>> arw.replace(tzinfo=tz.tzlocal()) - - - """ - - absolute_kwargs = {} - - for key, value in kwargs.items(): - - if key in self._ATTRS: - absolute_kwargs[key] = value - elif key in ["week", "quarter"]: - raise AttributeError("setting absolute {} is not supported".format(key)) - elif key not in ["tzinfo", "fold"]: - raise AttributeError('unknown attribute: "{}"'.format(key)) - - current = self._datetime.replace(**absolute_kwargs) - - tzinfo = kwargs.get("tzinfo") - - if tzinfo is not None: - tzinfo = self._get_tzinfo(tzinfo) - current = current.replace(tzinfo=tzinfo) - - fold = kwargs.get("fold") - - # TODO revisit this once we drop support for 2.7/3.5 - if fold is not None: - current = dateutil_tz.enfold(current, fold=fold) - - return self.fromdatetime(current) - - def shift(self, **kwargs): - """Returns a new :class:`Arrow ` object with attributes updated - according to inputs. - - Use pluralized property names to relatively shift their current value: - - >>> import arrow - >>> arw = arrow.utcnow() - >>> arw - - >>> arw.shift(years=1, months=-1) - - - Day-of-the-week relative shifting can use either Python's weekday numbers - (Monday = 0, Tuesday = 1 .. Sunday = 6) or using dateutil.relativedelta's - day instances (MO, TU .. SU). When using weekday numbers, the returned - date will always be greater than or equal to the starting date. - - Using the above code (which is a Saturday) and asking it to shift to Saturday: - - >>> arw.shift(weekday=5) - - - While asking for a Monday: - - >>> arw.shift(weekday=0) - - - """ - - relative_kwargs = {} - additional_attrs = ["weeks", "quarters", "weekday"] - - for key, value in kwargs.items(): - - if key in self._ATTRS_PLURAL or key in additional_attrs: - relative_kwargs[key] = value - else: - raise AttributeError( - "Invalid shift time frame. Please select one of the following: {}.".format( - ", ".join(self._ATTRS_PLURAL + additional_attrs) - ) - ) - - # core datetime does not support quarters, translate to months. - relative_kwargs.setdefault("months", 0) - relative_kwargs["months"] += ( - relative_kwargs.pop("quarters", 0) * self._MONTHS_PER_QUARTER - ) - - current = self._datetime + relativedelta(**relative_kwargs) - - if not dateutil_tz.datetime_exists(current): - current = dateutil_tz.resolve_imaginary(current) - - return self.fromdatetime(current) - - def to(self, tz): - """Returns a new :class:`Arrow ` object, converted - to the target timezone. - - :param tz: A :ref:`timezone expression `. - - Usage:: - - >>> utc = arrow.utcnow() - >>> utc - - - >>> utc.to('US/Pacific') - - - >>> utc.to(tz.tzlocal()) - - - >>> utc.to('-07:00') - - - >>> utc.to('local') - - - >>> utc.to('local').to('utc') - - - """ - - if not isinstance(tz, dt_tzinfo): - tz = parser.TzinfoParser.parse(tz) - - dt = self._datetime.astimezone(tz) - - return self.__class__( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second, - dt.microsecond, - dt.tzinfo, - fold=getattr(dt, "fold", 0), - ) - - # string output and formatting - - def format(self, fmt="YYYY-MM-DD HH:mm:ssZZ", locale="en_us"): - """Returns a string representation of the :class:`Arrow ` object, - formatted according to a format string. - - :param fmt: the format string. - - Usage:: - - >>> arrow.utcnow().format('YYYY-MM-DD HH:mm:ss ZZ') - '2013-05-09 03:56:47 -00:00' - - >>> arrow.utcnow().format('X') - '1368071882' - - >>> arrow.utcnow().format('MMMM DD, YYYY') - 'May 09, 2013' - - >>> arrow.utcnow().format() - '2013-05-09 03:56:47 -00:00' - - """ - - return formatter.DateTimeFormatter(locale).format(self._datetime, fmt) - - def humanize( - self, other=None, locale="en_us", only_distance=False, granularity="auto" - ): - """Returns a localized, humanized representation of a relative difference in time. - - :param other: (optional) an :class:`Arrow ` or ``datetime`` object. - Defaults to now in the current :class:`Arrow ` object's timezone. - :param locale: (optional) a ``str`` specifying a locale. Defaults to 'en_us'. - :param only_distance: (optional) returns only time difference eg: "11 seconds" without "in" or "ago" part. - :param granularity: (optional) defines the precision of the output. Set it to strings 'second', 'minute', - 'hour', 'day', 'week', 'month' or 'year' or a list of any combination of these strings - - Usage:: - - >>> earlier = arrow.utcnow().shift(hours=-2) - >>> earlier.humanize() - '2 hours ago' - - >>> later = earlier.shift(hours=4) - >>> later.humanize(earlier) - 'in 4 hours' - - """ - - locale_name = locale - locale = locales.get_locale(locale) - - if other is None: - utc = datetime.utcnow().replace(tzinfo=dateutil_tz.tzutc()) - dt = utc.astimezone(self._datetime.tzinfo) - - elif isinstance(other, Arrow): - dt = other._datetime - - elif isinstance(other, datetime): - if other.tzinfo is None: - dt = other.replace(tzinfo=self._datetime.tzinfo) - else: - dt = other.astimezone(self._datetime.tzinfo) - - else: - raise TypeError( - "Invalid 'other' argument of type '{}'. " - "Argument must be of type None, Arrow, or datetime.".format( - type(other).__name__ - ) - ) - - if isinstance(granularity, list) and len(granularity) == 1: - granularity = granularity[0] - - delta = int(round(util.total_seconds(self._datetime - dt))) - sign = -1 if delta < 0 else 1 - diff = abs(delta) - delta = diff - - try: - if granularity == "auto": - if diff < 10: - return locale.describe("now", only_distance=only_distance) - - if diff < 45: - seconds = sign * delta - return locale.describe( - "seconds", seconds, only_distance=only_distance - ) - - elif diff < 90: - return locale.describe("minute", sign, only_distance=only_distance) - elif diff < 2700: - minutes = sign * int(max(delta / 60, 2)) - return locale.describe( - "minutes", minutes, only_distance=only_distance - ) - - elif diff < 5400: - return locale.describe("hour", sign, only_distance=only_distance) - elif diff < 79200: - hours = sign * int(max(delta / 3600, 2)) - return locale.describe("hours", hours, only_distance=only_distance) - - # anything less than 48 hours should be 1 day - elif diff < 172800: - return locale.describe("day", sign, only_distance=only_distance) - elif diff < 554400: - days = sign * int(max(delta / 86400, 2)) - return locale.describe("days", days, only_distance=only_distance) - - elif diff < 907200: - return locale.describe("week", sign, only_distance=only_distance) - elif diff < 2419200: - weeks = sign * int(max(delta / 604800, 2)) - return locale.describe("weeks", weeks, only_distance=only_distance) - - elif diff < 3888000: - return locale.describe("month", sign, only_distance=only_distance) - elif diff < 29808000: - self_months = self._datetime.year * 12 + self._datetime.month - other_months = dt.year * 12 + dt.month - - months = sign * int(max(abs(other_months - self_months), 2)) - - return locale.describe( - "months", months, only_distance=only_distance - ) - - elif diff < 47260800: - return locale.describe("year", sign, only_distance=only_distance) - else: - years = sign * int(max(delta / 31536000, 2)) - return locale.describe("years", years, only_distance=only_distance) - - elif util.isstr(granularity): - if granularity == "second": - delta = sign * delta - if abs(delta) < 2: - return locale.describe("now", only_distance=only_distance) - elif granularity == "minute": - delta = sign * delta / self._SECS_PER_MINUTE - elif granularity == "hour": - delta = sign * delta / self._SECS_PER_HOUR - elif granularity == "day": - delta = sign * delta / self._SECS_PER_DAY - elif granularity == "week": - delta = sign * delta / self._SECS_PER_WEEK - elif granularity == "month": - delta = sign * delta / self._SECS_PER_MONTH - elif granularity == "year": - delta = sign * delta / self._SECS_PER_YEAR - else: - raise AttributeError( - "Invalid level of granularity. Please select between 'second', 'minute', 'hour', 'day', 'week', 'month' or 'year'" - ) - - if trunc(abs(delta)) != 1: - granularity += "s" - return locale.describe(granularity, delta, only_distance=only_distance) - - else: - timeframes = [] - if "year" in granularity: - years = sign * delta / self._SECS_PER_YEAR - delta %= self._SECS_PER_YEAR - timeframes.append(["year", years]) - - if "month" in granularity: - months = sign * delta / self._SECS_PER_MONTH - delta %= self._SECS_PER_MONTH - timeframes.append(["month", months]) - - if "week" in granularity: - weeks = sign * delta / self._SECS_PER_WEEK - delta %= self._SECS_PER_WEEK - timeframes.append(["week", weeks]) - - if "day" in granularity: - days = sign * delta / self._SECS_PER_DAY - delta %= self._SECS_PER_DAY - timeframes.append(["day", days]) - - if "hour" in granularity: - hours = sign * delta / self._SECS_PER_HOUR - delta %= self._SECS_PER_HOUR - timeframes.append(["hour", hours]) - - if "minute" in granularity: - minutes = sign * delta / self._SECS_PER_MINUTE - delta %= self._SECS_PER_MINUTE - timeframes.append(["minute", minutes]) - - if "second" in granularity: - seconds = sign * delta - timeframes.append(["second", seconds]) - - if len(timeframes) < len(granularity): - raise AttributeError( - "Invalid level of granularity. " - "Please select between 'second', 'minute', 'hour', 'day', 'week', 'month' or 'year'." - ) - - for tf in timeframes: - # Make granularity plural if the delta is not equal to 1 - if trunc(abs(tf[1])) != 1: - tf[0] += "s" - return locale.describe_multi(timeframes, only_distance=only_distance) - - except KeyError as e: - raise ValueError( - "Humanization of the {} granularity is not currently translated in the '{}' locale. " - "Please consider making a contribution to this locale.".format( - e, locale_name - ) - ) - - # query functions - - def is_between(self, start, end, bounds="()"): - """Returns a boolean denoting whether the specified date and time is between - the start and end dates and times. - - :param start: an :class:`Arrow ` object. - :param end: an :class:`Arrow ` object. - :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies - whether to include or exclude the start and end values in the range. '(' excludes - the start, '[' includes the start, ')' excludes the end, and ']' includes the end. - If the bounds are not specified, the default bound '()' is used. - - Usage:: - - >>> start = arrow.get(datetime(2013, 5, 5, 12, 30, 10)) - >>> end = arrow.get(datetime(2013, 5, 5, 12, 30, 36)) - >>> arrow.get(datetime(2013, 5, 5, 12, 30, 27)).is_between(start, end) - True - - >>> start = arrow.get(datetime(2013, 5, 5)) - >>> end = arrow.get(datetime(2013, 5, 8)) - >>> arrow.get(datetime(2013, 5, 8)).is_between(start, end, '[]') - True - - >>> start = arrow.get(datetime(2013, 5, 5)) - >>> end = arrow.get(datetime(2013, 5, 8)) - >>> arrow.get(datetime(2013, 5, 8)).is_between(start, end, '[)') - False - - """ - - util.validate_bounds(bounds) - - if not isinstance(start, Arrow): - raise TypeError( - "Can't parse start date argument type of '{}'".format(type(start)) - ) - - if not isinstance(end, Arrow): - raise TypeError( - "Can't parse end date argument type of '{}'".format(type(end)) - ) - - include_start = bounds[0] == "[" - include_end = bounds[1] == "]" - - target_timestamp = self.float_timestamp - start_timestamp = start.float_timestamp - end_timestamp = end.float_timestamp - - if include_start and include_end: - return ( - target_timestamp >= start_timestamp - and target_timestamp <= end_timestamp - ) - elif include_start and not include_end: - return ( - target_timestamp >= start_timestamp and target_timestamp < end_timestamp - ) - elif not include_start and include_end: - return ( - target_timestamp > start_timestamp and target_timestamp <= end_timestamp - ) - else: - return ( - target_timestamp > start_timestamp and target_timestamp < end_timestamp - ) - - # datetime methods - - def date(self): - """Returns a ``date`` object with the same year, month and day. - - Usage:: - - >>> arrow.utcnow().date() - datetime.date(2019, 1, 23) - - """ - - return self._datetime.date() - - def time(self): - """Returns a ``time`` object with the same hour, minute, second, microsecond. - - Usage:: - - >>> arrow.utcnow().time() - datetime.time(12, 15, 34, 68352) - - """ - - return self._datetime.time() - - def timetz(self): - """Returns a ``time`` object with the same hour, minute, second, microsecond and - tzinfo. - - Usage:: - - >>> arrow.utcnow().timetz() - datetime.time(12, 5, 18, 298893, tzinfo=tzutc()) - - """ - - return self._datetime.timetz() - - def astimezone(self, tz): - """Returns a ``datetime`` object, converted to the specified timezone. - - :param tz: a ``tzinfo`` object. - - Usage:: - - >>> pacific=arrow.now('US/Pacific') - >>> nyc=arrow.now('America/New_York').tzinfo - >>> pacific.astimezone(nyc) - datetime.datetime(2019, 1, 20, 10, 24, 22, 328172, tzinfo=tzfile('/usr/share/zoneinfo/America/New_York')) - - """ - - return self._datetime.astimezone(tz) - - def utcoffset(self): - """Returns a ``timedelta`` object representing the whole number of minutes difference from - UTC time. - - Usage:: - - >>> arrow.now('US/Pacific').utcoffset() - datetime.timedelta(-1, 57600) - - """ - - return self._datetime.utcoffset() - - def dst(self): - """Returns the daylight savings time adjustment. - - Usage:: - - >>> arrow.utcnow().dst() - datetime.timedelta(0) - - """ - - return self._datetime.dst() - - def timetuple(self): - """Returns a ``time.struct_time``, in the current timezone. - - Usage:: - - >>> arrow.utcnow().timetuple() - time.struct_time(tm_year=2019, tm_mon=1, tm_mday=20, tm_hour=15, tm_min=17, tm_sec=8, tm_wday=6, tm_yday=20, tm_isdst=0) - - """ - - return self._datetime.timetuple() - - def utctimetuple(self): - """Returns a ``time.struct_time``, in UTC time. - - Usage:: - - >>> arrow.utcnow().utctimetuple() - time.struct_time(tm_year=2019, tm_mon=1, tm_mday=19, tm_hour=21, tm_min=41, tm_sec=7, tm_wday=5, tm_yday=19, tm_isdst=0) - - """ - - return self._datetime.utctimetuple() - - def toordinal(self): - """Returns the proleptic Gregorian ordinal of the date. - - Usage:: - - >>> arrow.utcnow().toordinal() - 737078 - - """ - - return self._datetime.toordinal() - - def weekday(self): - """Returns the day of the week as an integer (0-6). - - Usage:: - - >>> arrow.utcnow().weekday() - 5 - - """ - - return self._datetime.weekday() - - def isoweekday(self): - """Returns the ISO day of the week as an integer (1-7). - - Usage:: - - >>> arrow.utcnow().isoweekday() - 6 - - """ - - return self._datetime.isoweekday() - - def isocalendar(self): - """Returns a 3-tuple, (ISO year, ISO week number, ISO weekday). - - Usage:: - - >>> arrow.utcnow().isocalendar() - (2019, 3, 6) - - """ - - return self._datetime.isocalendar() - - def isoformat(self, sep="T"): - """Returns an ISO 8601 formatted representation of the date and time. - - Usage:: - - >>> arrow.utcnow().isoformat() - '2019-01-19T18:30:52.442118+00:00' - - """ - - return self._datetime.isoformat(sep) - - def ctime(self): - """Returns a ctime formatted representation of the date and time. - - Usage:: - - >>> arrow.utcnow().ctime() - 'Sat Jan 19 18:26:50 2019' - - """ - - return self._datetime.ctime() - - def strftime(self, format): - """Formats in the style of ``datetime.strftime``. - - :param format: the format string. - - Usage:: - - >>> arrow.utcnow().strftime('%d-%m-%Y %H:%M:%S') - '23-01-2019 12:28:17' - - """ - - return self._datetime.strftime(format) - - def for_json(self): - """Serializes for the ``for_json`` protocol of simplejson. - - Usage:: - - >>> arrow.utcnow().for_json() - '2019-01-19T18:25:36.760079+00:00' - - """ - - return self.isoformat() - - # math - - def __add__(self, other): - - if isinstance(other, (timedelta, relativedelta)): - return self.fromdatetime(self._datetime + other, self._datetime.tzinfo) - - return NotImplemented - - def __radd__(self, other): - return self.__add__(other) - - def __sub__(self, other): - - if isinstance(other, (timedelta, relativedelta)): - return self.fromdatetime(self._datetime - other, self._datetime.tzinfo) - - elif isinstance(other, datetime): - return self._datetime - other - - elif isinstance(other, Arrow): - return self._datetime - other._datetime - - return NotImplemented - - def __rsub__(self, other): - - if isinstance(other, datetime): - return other - self._datetime - - return NotImplemented - - # comparisons - - def __eq__(self, other): - - if not isinstance(other, (Arrow, datetime)): - return False - - return self._datetime == self._get_datetime(other) - - def __ne__(self, other): - - if not isinstance(other, (Arrow, datetime)): - return True - - return not self.__eq__(other) - - def __gt__(self, other): - - if not isinstance(other, (Arrow, datetime)): - return NotImplemented - - return self._datetime > self._get_datetime(other) - - def __ge__(self, other): - - if not isinstance(other, (Arrow, datetime)): - return NotImplemented - - return self._datetime >= self._get_datetime(other) - - def __lt__(self, other): - - if not isinstance(other, (Arrow, datetime)): - return NotImplemented - - return self._datetime < self._get_datetime(other) - - def __le__(self, other): - - if not isinstance(other, (Arrow, datetime)): - return NotImplemented - - return self._datetime <= self._get_datetime(other) - - def __cmp__(self, other): - if sys.version_info[0] < 3: # pragma: no cover - if not isinstance(other, (Arrow, datetime)): - raise TypeError( - "can't compare '{}' to '{}'".format(type(self), type(other)) - ) - - # internal methods - - @staticmethod - def _get_tzinfo(tz_expr): - - if tz_expr is None: - return dateutil_tz.tzutc() - if isinstance(tz_expr, dt_tzinfo): - return tz_expr - else: - try: - return parser.TzinfoParser.parse(tz_expr) - except parser.ParserError: - raise ValueError("'{}' not recognized as a timezone".format(tz_expr)) - - @classmethod - def _get_datetime(cls, expr): - """Get datetime object for a specified expression.""" - if isinstance(expr, Arrow): - return expr.datetime - elif isinstance(expr, datetime): - return expr - elif util.is_timestamp(expr): - timestamp = float(expr) - return cls.utcfromtimestamp(timestamp).datetime - else: - raise ValueError( - "'{}' not recognized as a datetime or timestamp.".format(expr) - ) - - @classmethod - def _get_frames(cls, name): - - if name in cls._ATTRS: - return name, "{}s".format(name), 1 - elif name[-1] == "s" and name[:-1] in cls._ATTRS: - return name[:-1], name, 1 - elif name in ["week", "weeks"]: - return "week", "weeks", 1 - elif name in ["quarter", "quarters"]: - return "quarter", "months", 3 - - supported = ", ".join( - [ - "year(s)", - "month(s)", - "day(s)", - "hour(s)", - "minute(s)", - "second(s)", - "microsecond(s)", - "week(s)", - "quarter(s)", - ] - ) - raise AttributeError( - "range/span over frame {} not supported. Supported frames: {}".format( - name, supported - ) - ) - - @classmethod - def _get_iteration_params(cls, end, limit): - - if end is None: - - if limit is None: - raise ValueError("one of 'end' or 'limit' is required") - - return cls.max, limit - - else: - if limit is None: - return end, sys.maxsize - return end, limit - - @staticmethod - def _is_last_day_of_month(date): - return date.day == calendar.monthrange(date.year, date.month)[1] - - -Arrow.min = Arrow.fromdatetime(datetime.min) -Arrow.max = Arrow.fromdatetime(datetime.max) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/constants.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/constants.py deleted file mode 100644 index 81e37b26de..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/constants.py +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- - -# Output of time.mktime(datetime.max.timetuple()) on macOS -# This value must be hardcoded for compatibility with Windows -# Platform-independent max timestamps are hard to form -# https://stackoverflow.com/q/46133223 -MAX_TIMESTAMP = 253402318799.0 -MAX_TIMESTAMP_MS = MAX_TIMESTAMP * 1000 -MAX_TIMESTAMP_US = MAX_TIMESTAMP * 1000000 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/factory.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/factory.py deleted file mode 100644 index 05933e8151..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/factory.py +++ /dev/null @@ -1,301 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Implements the :class:`ArrowFactory ` class, -providing factory methods for common :class:`Arrow ` -construction scenarios. - -""" - -from __future__ import absolute_import - -import calendar -from datetime import date, datetime -from datetime import tzinfo as dt_tzinfo -from time import struct_time - -from dateutil import tz as dateutil_tz - -from arrow import parser -from arrow.arrow import Arrow -from arrow.util import is_timestamp, iso_to_gregorian, isstr - - -class ArrowFactory(object): - """A factory for generating :class:`Arrow ` objects. - - :param type: (optional) the :class:`Arrow `-based class to construct from. - Defaults to :class:`Arrow `. - - """ - - def __init__(self, type=Arrow): - self.type = type - - def get(self, *args, **kwargs): - """Returns an :class:`Arrow ` object based on flexible inputs. - - :param locale: (optional) a ``str`` specifying a locale for the parser. Defaults to 'en_us'. - :param tzinfo: (optional) a :ref:`timezone expression ` or tzinfo object. - Replaces the timezone unless using an input form that is explicitly UTC or specifies - the timezone in a positional argument. Defaults to UTC. - :param normalize_whitespace: (optional) a ``bool`` specifying whether or not to normalize - redundant whitespace (spaces, tabs, and newlines) in a datetime string before parsing. - Defaults to false. - - Usage:: - - >>> import arrow - - **No inputs** to get current UTC time:: - - >>> arrow.get() - - - **None** to also get current UTC time:: - - >>> arrow.get(None) - - - **One** :class:`Arrow ` object, to get a copy. - - >>> arw = arrow.utcnow() - >>> arrow.get(arw) - - - **One** ``float`` or ``int``, convertible to a floating-point timestamp, to get - that timestamp in UTC:: - - >>> arrow.get(1367992474.293378) - - - >>> arrow.get(1367992474) - - - **One** ISO 8601-formatted ``str``, to parse it:: - - >>> arrow.get('2013-09-29T01:26:43.830580') - - - **One** ISO 8601-formatted ``str``, in basic format, to parse it:: - - >>> arrow.get('20160413T133656.456289') - - - **One** ``tzinfo``, to get the current time **converted** to that timezone:: - - >>> arrow.get(tz.tzlocal()) - - - **One** naive ``datetime``, to get that datetime in UTC:: - - >>> arrow.get(datetime(2013, 5, 5)) - - - **One** aware ``datetime``, to get that datetime:: - - >>> arrow.get(datetime(2013, 5, 5, tzinfo=tz.tzlocal())) - - - **One** naive ``date``, to get that date in UTC:: - - >>> arrow.get(date(2013, 5, 5)) - - - **One** time.struct time:: - - >>> arrow.get(gmtime(0)) - - - **One** iso calendar ``tuple``, to get that week date in UTC:: - - >>> arrow.get((2013, 18, 7)) - - - **Two** arguments, a naive or aware ``datetime``, and a replacement - :ref:`timezone expression `:: - - >>> arrow.get(datetime(2013, 5, 5), 'US/Pacific') - - - **Two** arguments, a naive ``date``, and a replacement - :ref:`timezone expression `:: - - >>> arrow.get(date(2013, 5, 5), 'US/Pacific') - - - **Two** arguments, both ``str``, to parse the first according to the format of the second:: - - >>> arrow.get('2013-05-05 12:30:45 America/Chicago', 'YYYY-MM-DD HH:mm:ss ZZZ') - - - **Two** arguments, first a ``str`` to parse and second a ``list`` of formats to try:: - - >>> arrow.get('2013-05-05 12:30:45', ['MM/DD/YYYY', 'YYYY-MM-DD HH:mm:ss']) - - - **Three or more** arguments, as for the constructor of a ``datetime``:: - - >>> arrow.get(2013, 5, 5, 12, 30, 45) - - - """ - - arg_count = len(args) - locale = kwargs.pop("locale", "en_us") - tz = kwargs.get("tzinfo", None) - normalize_whitespace = kwargs.pop("normalize_whitespace", False) - - # if kwargs given, send to constructor unless only tzinfo provided - if len(kwargs) > 1: - arg_count = 3 - - # tzinfo kwarg is not provided - if len(kwargs) == 1 and tz is None: - arg_count = 3 - - # () -> now, @ utc. - if arg_count == 0: - if isstr(tz): - tz = parser.TzinfoParser.parse(tz) - return self.type.now(tz) - - if isinstance(tz, dt_tzinfo): - return self.type.now(tz) - - return self.type.utcnow() - - if arg_count == 1: - arg = args[0] - - # (None) -> now, @ utc. - if arg is None: - return self.type.utcnow() - - # try (int, float) -> from timestamp with tz - elif not isstr(arg) and is_timestamp(arg): - if tz is None: - # set to UTC by default - tz = dateutil_tz.tzutc() - return self.type.fromtimestamp(arg, tzinfo=tz) - - # (Arrow) -> from the object's datetime. - elif isinstance(arg, Arrow): - return self.type.fromdatetime(arg.datetime) - - # (datetime) -> from datetime. - elif isinstance(arg, datetime): - return self.type.fromdatetime(arg) - - # (date) -> from date. - elif isinstance(arg, date): - return self.type.fromdate(arg) - - # (tzinfo) -> now, @ tzinfo. - elif isinstance(arg, dt_tzinfo): - return self.type.now(arg) - - # (str) -> parse. - elif isstr(arg): - dt = parser.DateTimeParser(locale).parse_iso(arg, normalize_whitespace) - return self.type.fromdatetime(dt, tz) - - # (struct_time) -> from struct_time - elif isinstance(arg, struct_time): - return self.type.utcfromtimestamp(calendar.timegm(arg)) - - # (iso calendar) -> convert then from date - elif isinstance(arg, tuple) and len(arg) == 3: - dt = iso_to_gregorian(*arg) - return self.type.fromdate(dt) - - else: - raise TypeError( - "Can't parse single argument of type '{}'".format(type(arg)) - ) - - elif arg_count == 2: - - arg_1, arg_2 = args[0], args[1] - - if isinstance(arg_1, datetime): - - # (datetime, tzinfo/str) -> fromdatetime replace tzinfo. - if isinstance(arg_2, dt_tzinfo) or isstr(arg_2): - return self.type.fromdatetime(arg_1, arg_2) - else: - raise TypeError( - "Can't parse two arguments of types 'datetime', '{}'".format( - type(arg_2) - ) - ) - - elif isinstance(arg_1, date): - - # (date, tzinfo/str) -> fromdate replace tzinfo. - if isinstance(arg_2, dt_tzinfo) or isstr(arg_2): - return self.type.fromdate(arg_1, tzinfo=arg_2) - else: - raise TypeError( - "Can't parse two arguments of types 'date', '{}'".format( - type(arg_2) - ) - ) - - # (str, format) -> parse. - elif isstr(arg_1) and (isstr(arg_2) or isinstance(arg_2, list)): - dt = parser.DateTimeParser(locale).parse( - args[0], args[1], normalize_whitespace - ) - return self.type.fromdatetime(dt, tzinfo=tz) - - else: - raise TypeError( - "Can't parse two arguments of types '{}' and '{}'".format( - type(arg_1), type(arg_2) - ) - ) - - # 3+ args -> datetime-like via constructor. - else: - return self.type(*args, **kwargs) - - def utcnow(self): - """Returns an :class:`Arrow ` object, representing "now" in UTC time. - - Usage:: - - >>> import arrow - >>> arrow.utcnow() - - """ - - return self.type.utcnow() - - def now(self, tz=None): - """Returns an :class:`Arrow ` object, representing "now" in the given - timezone. - - :param tz: (optional) A :ref:`timezone expression `. Defaults to local time. - - Usage:: - - >>> import arrow - >>> arrow.now() - - - >>> arrow.now('US/Pacific') - - - >>> arrow.now('+02:00') - - - >>> arrow.now('local') - - """ - - if tz is None: - tz = dateutil_tz.tzlocal() - elif not isinstance(tz, dt_tzinfo): - tz = parser.TzinfoParser.parse(tz) - - return self.type.now(tz) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/formatter.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/formatter.py deleted file mode 100644 index 9f9d7a44da..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/formatter.py +++ /dev/null @@ -1,139 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import, division - -import calendar -import re - -from dateutil import tz as dateutil_tz - -from arrow import locales, util - -FORMAT_ATOM = "YYYY-MM-DD HH:mm:ssZZ" -FORMAT_COOKIE = "dddd, DD-MMM-YYYY HH:mm:ss ZZZ" -FORMAT_RFC822 = "ddd, DD MMM YY HH:mm:ss Z" -FORMAT_RFC850 = "dddd, DD-MMM-YY HH:mm:ss ZZZ" -FORMAT_RFC1036 = "ddd, DD MMM YY HH:mm:ss Z" -FORMAT_RFC1123 = "ddd, DD MMM YYYY HH:mm:ss Z" -FORMAT_RFC2822 = "ddd, DD MMM YYYY HH:mm:ss Z" -FORMAT_RFC3339 = "YYYY-MM-DD HH:mm:ssZZ" -FORMAT_RSS = "ddd, DD MMM YYYY HH:mm:ss Z" -FORMAT_W3C = "YYYY-MM-DD HH:mm:ssZZ" - - -class DateTimeFormatter(object): - - # This pattern matches characters enclosed in square brackets are matched as - # an atomic group. For more info on atomic groups and how to they are - # emulated in Python's re library, see https://stackoverflow.com/a/13577411/2701578 - - _FORMAT_RE = re.compile( - r"(\[(?:(?=(?P[^]]))(?P=literal))*\]|YYY?Y?|MM?M?M?|Do|DD?D?D?|d?dd?d?|HH?|hh?|mm?|ss?|SS?S?S?S?S?|ZZ?Z?|a|A|X|x|W)" - ) - - def __init__(self, locale="en_us"): - - self.locale = locales.get_locale(locale) - - def format(cls, dt, fmt): - - return cls._FORMAT_RE.sub(lambda m: cls._format_token(dt, m.group(0)), fmt) - - def _format_token(self, dt, token): - - if token and token.startswith("[") and token.endswith("]"): - return token[1:-1] - - if token == "YYYY": - return self.locale.year_full(dt.year) - if token == "YY": - return self.locale.year_abbreviation(dt.year) - - if token == "MMMM": - return self.locale.month_name(dt.month) - if token == "MMM": - return self.locale.month_abbreviation(dt.month) - if token == "MM": - return "{:02d}".format(dt.month) - if token == "M": - return str(dt.month) - - if token == "DDDD": - return "{:03d}".format(dt.timetuple().tm_yday) - if token == "DDD": - return str(dt.timetuple().tm_yday) - if token == "DD": - return "{:02d}".format(dt.day) - if token == "D": - return str(dt.day) - - if token == "Do": - return self.locale.ordinal_number(dt.day) - - if token == "dddd": - return self.locale.day_name(dt.isoweekday()) - if token == "ddd": - return self.locale.day_abbreviation(dt.isoweekday()) - if token == "d": - return str(dt.isoweekday()) - - if token == "HH": - return "{:02d}".format(dt.hour) - if token == "H": - return str(dt.hour) - if token == "hh": - return "{:02d}".format(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12)) - if token == "h": - return str(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12)) - - if token == "mm": - return "{:02d}".format(dt.minute) - if token == "m": - return str(dt.minute) - - if token == "ss": - return "{:02d}".format(dt.second) - if token == "s": - return str(dt.second) - - if token == "SSSSSS": - return str("{:06d}".format(int(dt.microsecond))) - if token == "SSSSS": - return str("{:05d}".format(int(dt.microsecond / 10))) - if token == "SSSS": - return str("{:04d}".format(int(dt.microsecond / 100))) - if token == "SSS": - return str("{:03d}".format(int(dt.microsecond / 1000))) - if token == "SS": - return str("{:02d}".format(int(dt.microsecond / 10000))) - if token == "S": - return str(int(dt.microsecond / 100000)) - - if token == "X": - # TODO: replace with a call to dt.timestamp() when we drop Python 2.7 - return str(calendar.timegm(dt.utctimetuple())) - - if token == "x": - # TODO: replace with a call to dt.timestamp() when we drop Python 2.7 - ts = calendar.timegm(dt.utctimetuple()) + (dt.microsecond / 1000000) - return str(int(ts * 1000000)) - - if token == "ZZZ": - return dt.tzname() - - if token in ["ZZ", "Z"]: - separator = ":" if token == "ZZ" else "" - tz = dateutil_tz.tzutc() if dt.tzinfo is None else dt.tzinfo - total_minutes = int(util.total_seconds(tz.utcoffset(dt)) / 60) - - sign = "+" if total_minutes >= 0 else "-" - total_minutes = abs(total_minutes) - hour, minute = divmod(total_minutes, 60) - - return "{}{:02d}{}{:02d}".format(sign, hour, separator, minute) - - if token in ("a", "A"): - return self.locale.meridian(dt.hour, token) - - if token == "W": - year, week, day = dt.isocalendar() - return "{}-W{:02d}-{}".format(year, week, day) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/locales.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/locales.py deleted file mode 100644 index 6833da5a78..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/locales.py +++ /dev/null @@ -1,4267 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import, unicode_literals - -import inspect -import sys -from math import trunc - - -def get_locale(name): - """Returns an appropriate :class:`Locale ` - corresponding to an inpute locale name. - - :param name: the name of the locale. - - """ - - locale_cls = _locales.get(name.lower()) - - if locale_cls is None: - raise ValueError("Unsupported locale '{}'".format(name)) - - return locale_cls() - - -def get_locale_by_class_name(name): - """Returns an appropriate :class:`Locale ` - corresponding to an locale class name. - - :param name: the name of the locale class. - - """ - locale_cls = globals().get(name) - - if locale_cls is None: - raise ValueError("Unsupported locale '{}'".format(name)) - - return locale_cls() - - -# base locale type. - - -class Locale(object): - """ Represents locale-specific data and functionality. """ - - names = [] - - timeframes = { - "now": "", - "second": "", - "seconds": "", - "minute": "", - "minutes": "", - "hour": "", - "hours": "", - "day": "", - "days": "", - "week": "", - "weeks": "", - "month": "", - "months": "", - "year": "", - "years": "", - } - - meridians = {"am": "", "pm": "", "AM": "", "PM": ""} - - past = None - future = None - and_word = None - - month_names = [] - month_abbreviations = [] - - day_names = [] - day_abbreviations = [] - - ordinal_day_re = r"(\d+)" - - def __init__(self): - - self._month_name_to_ordinal = None - - def describe(self, timeframe, delta=0, only_distance=False): - """Describes a delta within a timeframe in plain language. - - :param timeframe: a string representing a timeframe. - :param delta: a quantity representing a delta in a timeframe. - :param only_distance: return only distance eg: "11 seconds" without "in" or "ago" keywords - """ - - humanized = self._format_timeframe(timeframe, delta) - if not only_distance: - humanized = self._format_relative(humanized, timeframe, delta) - - return humanized - - def describe_multi(self, timeframes, only_distance=False): - """Describes a delta within multiple timeframes in plain language. - - :param timeframes: a list of string, quantity pairs each representing a timeframe and delta. - :param only_distance: return only distance eg: "2 hours and 11 seconds" without "in" or "ago" keywords - """ - - humanized = "" - for index, (timeframe, delta) in enumerate(timeframes): - humanized += self._format_timeframe(timeframe, delta) - if index == len(timeframes) - 2 and self.and_word: - humanized += " " + self.and_word + " " - elif index < len(timeframes) - 1: - humanized += " " - - if not only_distance: - humanized = self._format_relative(humanized, timeframe, delta) - - return humanized - - def day_name(self, day): - """Returns the day name for a specified day of the week. - - :param day: the ``int`` day of the week (1-7). - - """ - - return self.day_names[day] - - def day_abbreviation(self, day): - """Returns the day abbreviation for a specified day of the week. - - :param day: the ``int`` day of the week (1-7). - - """ - - return self.day_abbreviations[day] - - def month_name(self, month): - """Returns the month name for a specified month of the year. - - :param month: the ``int`` month of the year (1-12). - - """ - - return self.month_names[month] - - def month_abbreviation(self, month): - """Returns the month abbreviation for a specified month of the year. - - :param month: the ``int`` month of the year (1-12). - - """ - - return self.month_abbreviations[month] - - def month_number(self, name): - """Returns the month number for a month specified by name or abbreviation. - - :param name: the month name or abbreviation. - - """ - - if self._month_name_to_ordinal is None: - self._month_name_to_ordinal = self._name_to_ordinal(self.month_names) - self._month_name_to_ordinal.update( - self._name_to_ordinal(self.month_abbreviations) - ) - - return self._month_name_to_ordinal.get(name) - - def year_full(self, year): - """Returns the year for specific locale if available - - :param name: the ``int`` year (4-digit) - """ - return "{:04d}".format(year) - - def year_abbreviation(self, year): - """Returns the year for specific locale if available - - :param name: the ``int`` year (4-digit) - """ - return "{:04d}".format(year)[2:] - - def meridian(self, hour, token): - """Returns the meridian indicator for a specified hour and format token. - - :param hour: the ``int`` hour of the day. - :param token: the format token. - """ - - if token == "a": - return self.meridians["am"] if hour < 12 else self.meridians["pm"] - if token == "A": - return self.meridians["AM"] if hour < 12 else self.meridians["PM"] - - def ordinal_number(self, n): - """Returns the ordinal format of a given integer - - :param n: an integer - """ - return self._ordinal_number(n) - - def _ordinal_number(self, n): - return "{}".format(n) - - def _name_to_ordinal(self, lst): - return dict(map(lambda i: (i[1].lower(), i[0] + 1), enumerate(lst[1:]))) - - def _format_timeframe(self, timeframe, delta): - return self.timeframes[timeframe].format(trunc(abs(delta))) - - def _format_relative(self, humanized, timeframe, delta): - - if timeframe == "now": - return humanized - - direction = self.past if delta < 0 else self.future - - return direction.format(humanized) - - -# base locale type implementations. - - -class EnglishLocale(Locale): - - names = [ - "en", - "en_us", - "en_gb", - "en_au", - "en_be", - "en_jp", - "en_za", - "en_ca", - "en_ph", - ] - - past = "{0} ago" - future = "in {0}" - and_word = "and" - - timeframes = { - "now": "just now", - "second": "a second", - "seconds": "{0} seconds", - "minute": "a minute", - "minutes": "{0} minutes", - "hour": "an hour", - "hours": "{0} hours", - "day": "a day", - "days": "{0} days", - "week": "a week", - "weeks": "{0} weeks", - "month": "a month", - "months": "{0} months", - "year": "a year", - "years": "{0} years", - } - - meridians = {"am": "am", "pm": "pm", "AM": "AM", "PM": "PM"} - - month_names = [ - "", - "January", - "February", - "March", - "April", - "May", - "June", - "July", - "August", - "September", - "October", - "November", - "December", - ] - month_abbreviations = [ - "", - "Jan", - "Feb", - "Mar", - "Apr", - "May", - "Jun", - "Jul", - "Aug", - "Sep", - "Oct", - "Nov", - "Dec", - ] - - day_names = [ - "", - "Monday", - "Tuesday", - "Wednesday", - "Thursday", - "Friday", - "Saturday", - "Sunday", - ] - day_abbreviations = ["", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] - - ordinal_day_re = r"((?P[2-3]?1(?=st)|[2-3]?2(?=nd)|[2-3]?3(?=rd)|[1-3]?[04-9](?=th)|1[1-3](?=th))(st|nd|rd|th))" - - def _ordinal_number(self, n): - if n % 100 not in (11, 12, 13): - remainder = abs(n) % 10 - if remainder == 1: - return "{}st".format(n) - elif remainder == 2: - return "{}nd".format(n) - elif remainder == 3: - return "{}rd".format(n) - return "{}th".format(n) - - def describe(self, timeframe, delta=0, only_distance=False): - """Describes a delta within a timeframe in plain language. - - :param timeframe: a string representing a timeframe. - :param delta: a quantity representing a delta in a timeframe. - :param only_distance: return only distance eg: "11 seconds" without "in" or "ago" keywords - """ - - humanized = super(EnglishLocale, self).describe(timeframe, delta, only_distance) - if only_distance and timeframe == "now": - humanized = "instantly" - - return humanized - - -class ItalianLocale(Locale): - names = ["it", "it_it"] - past = "{0} fa" - future = "tra {0}" - and_word = "e" - - timeframes = { - "now": "adesso", - "second": "un secondo", - "seconds": "{0} qualche secondo", - "minute": "un minuto", - "minutes": "{0} minuti", - "hour": "un'ora", - "hours": "{0} ore", - "day": "un giorno", - "days": "{0} giorni", - "week": "una settimana,", - "weeks": "{0} settimane", - "month": "un mese", - "months": "{0} mesi", - "year": "un anno", - "years": "{0} anni", - } - - month_names = [ - "", - "gennaio", - "febbraio", - "marzo", - "aprile", - "maggio", - "giugno", - "luglio", - "agosto", - "settembre", - "ottobre", - "novembre", - "dicembre", - ] - month_abbreviations = [ - "", - "gen", - "feb", - "mar", - "apr", - "mag", - "giu", - "lug", - "ago", - "set", - "ott", - "nov", - "dic", - ] - - day_names = [ - "", - "lunedì", - "martedì", - "mercoledì", - "giovedì", - "venerdì", - "sabato", - "domenica", - ] - day_abbreviations = ["", "lun", "mar", "mer", "gio", "ven", "sab", "dom"] - - ordinal_day_re = r"((?P[1-3]?[0-9](?=[ºª]))[ºª])" - - def _ordinal_number(self, n): - return "{}º".format(n) - - -class SpanishLocale(Locale): - names = ["es", "es_es"] - past = "hace {0}" - future = "en {0}" - and_word = "y" - - timeframes = { - "now": "ahora", - "second": "un segundo", - "seconds": "{0} segundos", - "minute": "un minuto", - "minutes": "{0} minutos", - "hour": "una hora", - "hours": "{0} horas", - "day": "un día", - "days": "{0} días", - "week": "una semana", - "weeks": "{0} semanas", - "month": "un mes", - "months": "{0} meses", - "year": "un año", - "years": "{0} años", - } - - meridians = {"am": "am", "pm": "pm", "AM": "AM", "PM": "PM"} - - month_names = [ - "", - "enero", - "febrero", - "marzo", - "abril", - "mayo", - "junio", - "julio", - "agosto", - "septiembre", - "octubre", - "noviembre", - "diciembre", - ] - month_abbreviations = [ - "", - "ene", - "feb", - "mar", - "abr", - "may", - "jun", - "jul", - "ago", - "sep", - "oct", - "nov", - "dic", - ] - - day_names = [ - "", - "lunes", - "martes", - "miércoles", - "jueves", - "viernes", - "sábado", - "domingo", - ] - day_abbreviations = ["", "lun", "mar", "mie", "jue", "vie", "sab", "dom"] - - ordinal_day_re = r"((?P[1-3]?[0-9](?=[ºª]))[ºª])" - - def _ordinal_number(self, n): - return "{}º".format(n) - - -class FrenchBaseLocale(Locale): - - past = "il y a {0}" - future = "dans {0}" - and_word = "et" - - timeframes = { - "now": "maintenant", - "second": "une seconde", - "seconds": "{0} quelques secondes", - "minute": "une minute", - "minutes": "{0} minutes", - "hour": "une heure", - "hours": "{0} heures", - "day": "un jour", - "days": "{0} jours", - "week": "une semaine", - "weeks": "{0} semaines", - "month": "un mois", - "months": "{0} mois", - "year": "un an", - "years": "{0} ans", - } - - month_names = [ - "", - "janvier", - "février", - "mars", - "avril", - "mai", - "juin", - "juillet", - "août", - "septembre", - "octobre", - "novembre", - "décembre", - ] - - day_names = [ - "", - "lundi", - "mardi", - "mercredi", - "jeudi", - "vendredi", - "samedi", - "dimanche", - ] - day_abbreviations = ["", "lun", "mar", "mer", "jeu", "ven", "sam", "dim"] - - ordinal_day_re = ( - r"((?P\b1(?=er\b)|[1-3]?[02-9](?=e\b)|[1-3]1(?=e\b))(er|e)\b)" - ) - - def _ordinal_number(self, n): - if abs(n) == 1: - return "{}er".format(n) - return "{}e".format(n) - - -class FrenchLocale(FrenchBaseLocale, Locale): - - names = ["fr", "fr_fr"] - - month_abbreviations = [ - "", - "janv", - "févr", - "mars", - "avr", - "mai", - "juin", - "juil", - "août", - "sept", - "oct", - "nov", - "déc", - ] - - -class FrenchCanadianLocale(FrenchBaseLocale, Locale): - - names = ["fr_ca"] - - month_abbreviations = [ - "", - "janv", - "févr", - "mars", - "avr", - "mai", - "juin", - "juill", - "août", - "sept", - "oct", - "nov", - "déc", - ] - - -class GreekLocale(Locale): - - names = ["el", "el_gr"] - - past = "{0} πριν" - future = "σε {0}" - and_word = "και" - - timeframes = { - "now": "τώρα", - "second": "ένα δεύτερο", - "seconds": "{0} δευτερόλεπτα", - "minute": "ένα λεπτό", - "minutes": "{0} λεπτά", - "hour": "μία ώρα", - "hours": "{0} ώρες", - "day": "μία μέρα", - "days": "{0} μέρες", - "month": "ένα μήνα", - "months": "{0} μήνες", - "year": "ένα χρόνο", - "years": "{0} χρόνια", - } - - month_names = [ - "", - "Ιανουαρίου", - "Φεβρουαρίου", - "Μαρτίου", - "Απριλίου", - "Μαΐου", - "Ιουνίου", - "Ιουλίου", - "Αυγούστου", - "Σεπτεμβρίου", - "Οκτωβρίου", - "Νοεμβρίου", - "Δεκεμβρίου", - ] - month_abbreviations = [ - "", - "Ιαν", - "Φεβ", - "Μαρ", - "Απρ", - "Μαϊ", - "Ιον", - "Ιολ", - "Αυγ", - "Σεπ", - "Οκτ", - "Νοε", - "Δεκ", - ] - - day_names = [ - "", - "Δευτέρα", - "Τρίτη", - "Τετάρτη", - "Πέμπτη", - "Παρασκευή", - "Σάββατο", - "Κυριακή", - ] - day_abbreviations = ["", "Δευ", "Τρι", "Τετ", "Πεμ", "Παρ", "Σαβ", "Κυρ"] - - -class JapaneseLocale(Locale): - - names = ["ja", "ja_jp"] - - past = "{0}前" - future = "{0}後" - - timeframes = { - "now": "現在", - "second": "二番目の", - "seconds": "{0}数秒", - "minute": "1分", - "minutes": "{0}分", - "hour": "1時間", - "hours": "{0}時間", - "day": "1日", - "days": "{0}日", - "week": "1週間", - "weeks": "{0}週間", - "month": "1ヶ月", - "months": "{0}ヶ月", - "year": "1年", - "years": "{0}年", - } - - month_names = [ - "", - "1月", - "2月", - "3月", - "4月", - "5月", - "6月", - "7月", - "8月", - "9月", - "10月", - "11月", - "12月", - ] - month_abbreviations = [ - "", - " 1", - " 2", - " 3", - " 4", - " 5", - " 6", - " 7", - " 8", - " 9", - "10", - "11", - "12", - ] - - day_names = ["", "月曜日", "火曜日", "水曜日", "木曜日", "金曜日", "土曜日", "日曜日"] - day_abbreviations = ["", "月", "火", "水", "木", "金", "土", "日"] - - -class SwedishLocale(Locale): - - names = ["sv", "sv_se"] - - past = "för {0} sen" - future = "om {0}" - and_word = "och" - - timeframes = { - "now": "just nu", - "second": "en sekund", - "seconds": "{0} några sekunder", - "minute": "en minut", - "minutes": "{0} minuter", - "hour": "en timme", - "hours": "{0} timmar", - "day": "en dag", - "days": "{0} dagar", - "week": "en vecka", - "weeks": "{0} veckor", - "month": "en månad", - "months": "{0} månader", - "year": "ett år", - "years": "{0} år", - } - - month_names = [ - "", - "januari", - "februari", - "mars", - "april", - "maj", - "juni", - "juli", - "augusti", - "september", - "oktober", - "november", - "december", - ] - month_abbreviations = [ - "", - "jan", - "feb", - "mar", - "apr", - "maj", - "jun", - "jul", - "aug", - "sep", - "okt", - "nov", - "dec", - ] - - day_names = [ - "", - "måndag", - "tisdag", - "onsdag", - "torsdag", - "fredag", - "lördag", - "söndag", - ] - day_abbreviations = ["", "mån", "tis", "ons", "tor", "fre", "lör", "sön"] - - -class FinnishLocale(Locale): - - names = ["fi", "fi_fi"] - - # The finnish grammar is very complex, and its hard to convert - # 1-to-1 to something like English. - - past = "{0} sitten" - future = "{0} kuluttua" - - timeframes = { - "now": ["juuri nyt", "juuri nyt"], - "second": ["sekunti", "sekunti"], - "seconds": ["{0} muutama sekunti", "{0} muutaman sekunnin"], - "minute": ["minuutti", "minuutin"], - "minutes": ["{0} minuuttia", "{0} minuutin"], - "hour": ["tunti", "tunnin"], - "hours": ["{0} tuntia", "{0} tunnin"], - "day": ["päivä", "päivä"], - "days": ["{0} päivää", "{0} päivän"], - "month": ["kuukausi", "kuukauden"], - "months": ["{0} kuukautta", "{0} kuukauden"], - "year": ["vuosi", "vuoden"], - "years": ["{0} vuotta", "{0} vuoden"], - } - - # Months and days are lowercase in Finnish - month_names = [ - "", - "tammikuu", - "helmikuu", - "maaliskuu", - "huhtikuu", - "toukokuu", - "kesäkuu", - "heinäkuu", - "elokuu", - "syyskuu", - "lokakuu", - "marraskuu", - "joulukuu", - ] - - month_abbreviations = [ - "", - "tammi", - "helmi", - "maalis", - "huhti", - "touko", - "kesä", - "heinä", - "elo", - "syys", - "loka", - "marras", - "joulu", - ] - - day_names = [ - "", - "maanantai", - "tiistai", - "keskiviikko", - "torstai", - "perjantai", - "lauantai", - "sunnuntai", - ] - - day_abbreviations = ["", "ma", "ti", "ke", "to", "pe", "la", "su"] - - def _format_timeframe(self, timeframe, delta): - return ( - self.timeframes[timeframe][0].format(abs(delta)), - self.timeframes[timeframe][1].format(abs(delta)), - ) - - def _format_relative(self, humanized, timeframe, delta): - if timeframe == "now": - return humanized[0] - - direction = self.past if delta < 0 else self.future - which = 0 if delta < 0 else 1 - - return direction.format(humanized[which]) - - def _ordinal_number(self, n): - return "{}.".format(n) - - -class ChineseCNLocale(Locale): - - names = ["zh", "zh_cn"] - - past = "{0}前" - future = "{0}后" - - timeframes = { - "now": "刚才", - "second": "一秒", - "seconds": "{0}秒", - "minute": "1分钟", - "minutes": "{0}分钟", - "hour": "1小时", - "hours": "{0}小时", - "day": "1天", - "days": "{0}天", - "week": "一周", - "weeks": "{0}周", - "month": "1个月", - "months": "{0}个月", - "year": "1年", - "years": "{0}年", - } - - month_names = [ - "", - "一月", - "二月", - "三月", - "四月", - "五月", - "六月", - "七月", - "八月", - "九月", - "十月", - "十一月", - "十二月", - ] - month_abbreviations = [ - "", - " 1", - " 2", - " 3", - " 4", - " 5", - " 6", - " 7", - " 8", - " 9", - "10", - "11", - "12", - ] - - day_names = ["", "星期一", "星期二", "星期三", "星期四", "星期五", "星期六", "星期日"] - day_abbreviations = ["", "一", "二", "三", "四", "五", "六", "日"] - - -class ChineseTWLocale(Locale): - - names = ["zh_tw"] - - past = "{0}前" - future = "{0}後" - and_word = "和" - - timeframes = { - "now": "剛才", - "second": "1秒", - "seconds": "{0}秒", - "minute": "1分鐘", - "minutes": "{0}分鐘", - "hour": "1小時", - "hours": "{0}小時", - "day": "1天", - "days": "{0}天", - "week": "1週", - "weeks": "{0}週", - "month": "1個月", - "months": "{0}個月", - "year": "1年", - "years": "{0}年", - } - - month_names = [ - "", - "1月", - "2月", - "3月", - "4月", - "5月", - "6月", - "7月", - "8月", - "9月", - "10月", - "11月", - "12月", - ] - month_abbreviations = [ - "", - " 1", - " 2", - " 3", - " 4", - " 5", - " 6", - " 7", - " 8", - " 9", - "10", - "11", - "12", - ] - - day_names = ["", "週一", "週二", "週三", "週四", "週五", "週六", "週日"] - day_abbreviations = ["", "一", "二", "三", "四", "五", "六", "日"] - - -class HongKongLocale(Locale): - - names = ["zh_hk"] - - past = "{0}前" - future = "{0}後" - - timeframes = { - "now": "剛才", - "second": "1秒", - "seconds": "{0}秒", - "minute": "1分鐘", - "minutes": "{0}分鐘", - "hour": "1小時", - "hours": "{0}小時", - "day": "1天", - "days": "{0}天", - "week": "1星期", - "weeks": "{0}星期", - "month": "1個月", - "months": "{0}個月", - "year": "1年", - "years": "{0}年", - } - - month_names = [ - "", - "1月", - "2月", - "3月", - "4月", - "5月", - "6月", - "7月", - "8月", - "9月", - "10月", - "11月", - "12月", - ] - month_abbreviations = [ - "", - " 1", - " 2", - " 3", - " 4", - " 5", - " 6", - " 7", - " 8", - " 9", - "10", - "11", - "12", - ] - - day_names = ["", "星期一", "星期二", "星期三", "星期四", "星期五", "星期六", "星期日"] - day_abbreviations = ["", "一", "二", "三", "四", "五", "六", "日"] - - -class KoreanLocale(Locale): - - names = ["ko", "ko_kr"] - - past = "{0} 전" - future = "{0} 후" - - timeframes = { - "now": "지금", - "second": "1초", - "seconds": "{0}초", - "minute": "1분", - "minutes": "{0}분", - "hour": "한시간", - "hours": "{0}시간", - "day": "하루", - "days": "{0}일", - "week": "1주", - "weeks": "{0}주", - "month": "한달", - "months": "{0}개월", - "year": "1년", - "years": "{0}년", - } - - special_dayframes = { - -3: "그끄제", - -2: "그제", - -1: "어제", - 1: "내일", - 2: "모레", - 3: "글피", - 4: "그글피", - } - - special_yearframes = {-2: "제작년", -1: "작년", 1: "내년", 2: "내후년"} - - month_names = [ - "", - "1월", - "2월", - "3월", - "4월", - "5월", - "6월", - "7월", - "8월", - "9월", - "10월", - "11월", - "12월", - ] - month_abbreviations = [ - "", - " 1", - " 2", - " 3", - " 4", - " 5", - " 6", - " 7", - " 8", - " 9", - "10", - "11", - "12", - ] - - day_names = ["", "월요일", "화요일", "수요일", "목요일", "금요일", "토요일", "일요일"] - day_abbreviations = ["", "월", "화", "수", "목", "금", "토", "일"] - - def _ordinal_number(self, n): - ordinals = ["0", "첫", "두", "세", "네", "다섯", "여섯", "일곱", "여덟", "아홉", "열"] - if n < len(ordinals): - return "{}번째".format(ordinals[n]) - return "{}번째".format(n) - - def _format_relative(self, humanized, timeframe, delta): - if timeframe in ("day", "days"): - special = self.special_dayframes.get(delta) - if special: - return special - elif timeframe in ("year", "years"): - special = self.special_yearframes.get(delta) - if special: - return special - - return super(KoreanLocale, self)._format_relative(humanized, timeframe, delta) - - -# derived locale types & implementations. -class DutchLocale(Locale): - - names = ["nl", "nl_nl"] - - past = "{0} geleden" - future = "over {0}" - - timeframes = { - "now": "nu", - "second": "een seconde", - "seconds": "{0} seconden", - "minute": "een minuut", - "minutes": "{0} minuten", - "hour": "een uur", - "hours": "{0} uur", - "day": "een dag", - "days": "{0} dagen", - "week": "een week", - "weeks": "{0} weken", - "month": "een maand", - "months": "{0} maanden", - "year": "een jaar", - "years": "{0} jaar", - } - - # In Dutch names of months and days are not starting with a capital letter - # like in the English language. - month_names = [ - "", - "januari", - "februari", - "maart", - "april", - "mei", - "juni", - "juli", - "augustus", - "september", - "oktober", - "november", - "december", - ] - month_abbreviations = [ - "", - "jan", - "feb", - "mrt", - "apr", - "mei", - "jun", - "jul", - "aug", - "sep", - "okt", - "nov", - "dec", - ] - - day_names = [ - "", - "maandag", - "dinsdag", - "woensdag", - "donderdag", - "vrijdag", - "zaterdag", - "zondag", - ] - day_abbreviations = ["", "ma", "di", "wo", "do", "vr", "za", "zo"] - - -class SlavicBaseLocale(Locale): - def _format_timeframe(self, timeframe, delta): - - form = self.timeframes[timeframe] - delta = abs(delta) - - if isinstance(form, list): - - if delta % 10 == 1 and delta % 100 != 11: - form = form[0] - elif 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): - form = form[1] - else: - form = form[2] - - return form.format(delta) - - -class BelarusianLocale(SlavicBaseLocale): - - names = ["be", "be_by"] - - past = "{0} таму" - future = "праз {0}" - - timeframes = { - "now": "зараз", - "second": "секунду", - "seconds": "{0} некалькі секунд", - "minute": "хвіліну", - "minutes": ["{0} хвіліну", "{0} хвіліны", "{0} хвілін"], - "hour": "гадзіну", - "hours": ["{0} гадзіну", "{0} гадзіны", "{0} гадзін"], - "day": "дзень", - "days": ["{0} дзень", "{0} дні", "{0} дзён"], - "month": "месяц", - "months": ["{0} месяц", "{0} месяцы", "{0} месяцаў"], - "year": "год", - "years": ["{0} год", "{0} гады", "{0} гадоў"], - } - - month_names = [ - "", - "студзеня", - "лютага", - "сакавіка", - "красавіка", - "траўня", - "чэрвеня", - "ліпеня", - "жніўня", - "верасня", - "кастрычніка", - "лістапада", - "снежня", - ] - month_abbreviations = [ - "", - "студ", - "лют", - "сак", - "крас", - "трав", - "чэрв", - "ліп", - "жнів", - "вер", - "каст", - "ліст", - "снеж", - ] - - day_names = [ - "", - "панядзелак", - "аўторак", - "серада", - "чацвер", - "пятніца", - "субота", - "нядзеля", - ] - day_abbreviations = ["", "пн", "ат", "ср", "чц", "пт", "сб", "нд"] - - -class PolishLocale(SlavicBaseLocale): - - names = ["pl", "pl_pl"] - - past = "{0} temu" - future = "za {0}" - - # The nouns should be in genitive case (Polish: "dopełniacz") - # in order to correctly form `past` & `future` expressions. - timeframes = { - "now": "teraz", - "second": "sekundę", - "seconds": ["{0} sekund", "{0} sekundy", "{0} sekund"], - "minute": "minutę", - "minutes": ["{0} minut", "{0} minuty", "{0} minut"], - "hour": "godzinę", - "hours": ["{0} godzin", "{0} godziny", "{0} godzin"], - "day": "dzień", - "days": "{0} dni", - "week": "tydzień", - "weeks": ["{0} tygodni", "{0} tygodnie", "{0} tygodni"], - "month": "miesiąc", - "months": ["{0} miesięcy", "{0} miesiące", "{0} miesięcy"], - "year": "rok", - "years": ["{0} lat", "{0} lata", "{0} lat"], - } - - month_names = [ - "", - "styczeń", - "luty", - "marzec", - "kwiecień", - "maj", - "czerwiec", - "lipiec", - "sierpień", - "wrzesień", - "październik", - "listopad", - "grudzień", - ] - month_abbreviations = [ - "", - "sty", - "lut", - "mar", - "kwi", - "maj", - "cze", - "lip", - "sie", - "wrz", - "paź", - "lis", - "gru", - ] - - day_names = [ - "", - "poniedziałek", - "wtorek", - "środa", - "czwartek", - "piątek", - "sobota", - "niedziela", - ] - day_abbreviations = ["", "Pn", "Wt", "Śr", "Czw", "Pt", "So", "Nd"] - - -class RussianLocale(SlavicBaseLocale): - - names = ["ru", "ru_ru"] - - past = "{0} назад" - future = "через {0}" - - timeframes = { - "now": "сейчас", - "second": "Второй", - "seconds": "{0} несколько секунд", - "minute": "минуту", - "minutes": ["{0} минуту", "{0} минуты", "{0} минут"], - "hour": "час", - "hours": ["{0} час", "{0} часа", "{0} часов"], - "day": "день", - "days": ["{0} день", "{0} дня", "{0} дней"], - "week": "неделю", - "weeks": ["{0} неделю", "{0} недели", "{0} недель"], - "month": "месяц", - "months": ["{0} месяц", "{0} месяца", "{0} месяцев"], - "year": "год", - "years": ["{0} год", "{0} года", "{0} лет"], - } - - month_names = [ - "", - "января", - "февраля", - "марта", - "апреля", - "мая", - "июня", - "июля", - "августа", - "сентября", - "октября", - "ноября", - "декабря", - ] - month_abbreviations = [ - "", - "янв", - "фев", - "мар", - "апр", - "май", - "июн", - "июл", - "авг", - "сен", - "окт", - "ноя", - "дек", - ] - - day_names = [ - "", - "понедельник", - "вторник", - "среда", - "четверг", - "пятница", - "суббота", - "воскресенье", - ] - day_abbreviations = ["", "пн", "вт", "ср", "чт", "пт", "сб", "вс"] - - -class AfrikaansLocale(Locale): - - names = ["af", "af_nl"] - - past = "{0} gelede" - future = "in {0}" - - timeframes = { - "now": "nou", - "second": "n sekonde", - "seconds": "{0} sekondes", - "minute": "minuut", - "minutes": "{0} minute", - "hour": "uur", - "hours": "{0} ure", - "day": "een dag", - "days": "{0} dae", - "month": "een maand", - "months": "{0} maande", - "year": "een jaar", - "years": "{0} jaar", - } - - month_names = [ - "", - "Januarie", - "Februarie", - "Maart", - "April", - "Mei", - "Junie", - "Julie", - "Augustus", - "September", - "Oktober", - "November", - "Desember", - ] - month_abbreviations = [ - "", - "Jan", - "Feb", - "Mrt", - "Apr", - "Mei", - "Jun", - "Jul", - "Aug", - "Sep", - "Okt", - "Nov", - "Des", - ] - - day_names = [ - "", - "Maandag", - "Dinsdag", - "Woensdag", - "Donderdag", - "Vrydag", - "Saterdag", - "Sondag", - ] - day_abbreviations = ["", "Ma", "Di", "Wo", "Do", "Vr", "Za", "So"] - - -class BulgarianLocale(SlavicBaseLocale): - - names = ["bg", "bg_BG"] - - past = "{0} назад" - future = "напред {0}" - - timeframes = { - "now": "сега", - "second": "секунда", - "seconds": "{0} няколко секунди", - "minute": "минута", - "minutes": ["{0} минута", "{0} минути", "{0} минути"], - "hour": "час", - "hours": ["{0} час", "{0} часа", "{0} часа"], - "day": "ден", - "days": ["{0} ден", "{0} дни", "{0} дни"], - "month": "месец", - "months": ["{0} месец", "{0} месеца", "{0} месеца"], - "year": "година", - "years": ["{0} година", "{0} години", "{0} години"], - } - - month_names = [ - "", - "януари", - "февруари", - "март", - "април", - "май", - "юни", - "юли", - "август", - "септември", - "октомври", - "ноември", - "декември", - ] - month_abbreviations = [ - "", - "ян", - "февр", - "март", - "апр", - "май", - "юни", - "юли", - "авг", - "септ", - "окт", - "ноем", - "дек", - ] - - day_names = [ - "", - "понеделник", - "вторник", - "сряда", - "четвъртък", - "петък", - "събота", - "неделя", - ] - day_abbreviations = ["", "пон", "вт", "ср", "четв", "пет", "съб", "нед"] - - -class UkrainianLocale(SlavicBaseLocale): - - names = ["ua", "uk_ua"] - - past = "{0} тому" - future = "за {0}" - - timeframes = { - "now": "зараз", - "second": "секунда", - "seconds": "{0} кілька секунд", - "minute": "хвилину", - "minutes": ["{0} хвилину", "{0} хвилини", "{0} хвилин"], - "hour": "годину", - "hours": ["{0} годину", "{0} години", "{0} годин"], - "day": "день", - "days": ["{0} день", "{0} дні", "{0} днів"], - "month": "місяць", - "months": ["{0} місяць", "{0} місяці", "{0} місяців"], - "year": "рік", - "years": ["{0} рік", "{0} роки", "{0} років"], - } - - month_names = [ - "", - "січня", - "лютого", - "березня", - "квітня", - "травня", - "червня", - "липня", - "серпня", - "вересня", - "жовтня", - "листопада", - "грудня", - ] - month_abbreviations = [ - "", - "січ", - "лют", - "бер", - "квіт", - "трав", - "черв", - "лип", - "серп", - "вер", - "жовт", - "лист", - "груд", - ] - - day_names = [ - "", - "понеділок", - "вівторок", - "середа", - "четвер", - "п’ятниця", - "субота", - "неділя", - ] - day_abbreviations = ["", "пн", "вт", "ср", "чт", "пт", "сб", "нд"] - - -class MacedonianLocale(SlavicBaseLocale): - names = ["mk", "mk_mk"] - - past = "пред {0}" - future = "за {0}" - - timeframes = { - "now": "сега", - "second": "една секунда", - "seconds": ["{0} секунда", "{0} секунди", "{0} секунди"], - "minute": "една минута", - "minutes": ["{0} минута", "{0} минути", "{0} минути"], - "hour": "еден саат", - "hours": ["{0} саат", "{0} саати", "{0} саати"], - "day": "еден ден", - "days": ["{0} ден", "{0} дена", "{0} дена"], - "week": "една недела", - "weeks": ["{0} недела", "{0} недели", "{0} недели"], - "month": "еден месец", - "months": ["{0} месец", "{0} месеци", "{0} месеци"], - "year": "една година", - "years": ["{0} година", "{0} години", "{0} години"], - } - - meridians = {"am": "дп", "pm": "пп", "AM": "претпладне", "PM": "попладне"} - - month_names = [ - "", - "Јануари", - "Февруари", - "Март", - "Април", - "Мај", - "Јуни", - "Јули", - "Август", - "Септември", - "Октомври", - "Ноември", - "Декември", - ] - month_abbreviations = [ - "", - "Јан", - "Фев", - "Мар", - "Апр", - "Мај", - "Јун", - "Јул", - "Авг", - "Септ", - "Окт", - "Ноем", - "Декем", - ] - - day_names = [ - "", - "Понеделник", - "Вторник", - "Среда", - "Четврток", - "Петок", - "Сабота", - "Недела", - ] - day_abbreviations = [ - "", - "Пон", - "Вт", - "Сре", - "Чет", - "Пет", - "Саб", - "Нед", - ] - - -class GermanBaseLocale(Locale): - - past = "vor {0}" - future = "in {0}" - and_word = "und" - - timeframes = { - "now": "gerade eben", - "second": "eine Sekunde", - "seconds": "{0} Sekunden", - "minute": "einer Minute", - "minutes": "{0} Minuten", - "hour": "einer Stunde", - "hours": "{0} Stunden", - "day": "einem Tag", - "days": "{0} Tagen", - "week": "einer Woche", - "weeks": "{0} Wochen", - "month": "einem Monat", - "months": "{0} Monaten", - "year": "einem Jahr", - "years": "{0} Jahren", - } - - timeframes_only_distance = timeframes.copy() - timeframes_only_distance["minute"] = "eine Minute" - timeframes_only_distance["hour"] = "eine Stunde" - timeframes_only_distance["day"] = "ein Tag" - timeframes_only_distance["week"] = "eine Woche" - timeframes_only_distance["month"] = "ein Monat" - timeframes_only_distance["year"] = "ein Jahr" - - month_names = [ - "", - "Januar", - "Februar", - "März", - "April", - "Mai", - "Juni", - "Juli", - "August", - "September", - "Oktober", - "November", - "Dezember", - ] - - month_abbreviations = [ - "", - "Jan", - "Feb", - "Mär", - "Apr", - "Mai", - "Jun", - "Jul", - "Aug", - "Sep", - "Okt", - "Nov", - "Dez", - ] - - day_names = [ - "", - "Montag", - "Dienstag", - "Mittwoch", - "Donnerstag", - "Freitag", - "Samstag", - "Sonntag", - ] - - day_abbreviations = ["", "Mo", "Di", "Mi", "Do", "Fr", "Sa", "So"] - - def _ordinal_number(self, n): - return "{}.".format(n) - - def describe(self, timeframe, delta=0, only_distance=False): - """Describes a delta within a timeframe in plain language. - - :param timeframe: a string representing a timeframe. - :param delta: a quantity representing a delta in a timeframe. - :param only_distance: return only distance eg: "11 seconds" without "in" or "ago" keywords - """ - - if not only_distance: - return super(GermanBaseLocale, self).describe( - timeframe, delta, only_distance - ) - - # German uses a different case without 'in' or 'ago' - humanized = self.timeframes_only_distance[timeframe].format(trunc(abs(delta))) - - return humanized - - -class GermanLocale(GermanBaseLocale, Locale): - - names = ["de", "de_de"] - - -class SwissLocale(GermanBaseLocale, Locale): - - names = ["de_ch"] - - -class AustrianLocale(GermanBaseLocale, Locale): - - names = ["de_at"] - - month_names = [ - "", - "Jänner", - "Februar", - "März", - "April", - "Mai", - "Juni", - "Juli", - "August", - "September", - "Oktober", - "November", - "Dezember", - ] - - -class NorwegianLocale(Locale): - - names = ["nb", "nb_no"] - - past = "for {0} siden" - future = "om {0}" - - timeframes = { - "now": "nå nettopp", - "second": "et sekund", - "seconds": "{0} noen sekunder", - "minute": "ett minutt", - "minutes": "{0} minutter", - "hour": "en time", - "hours": "{0} timer", - "day": "en dag", - "days": "{0} dager", - "month": "en måned", - "months": "{0} måneder", - "year": "ett år", - "years": "{0} år", - } - - month_names = [ - "", - "januar", - "februar", - "mars", - "april", - "mai", - "juni", - "juli", - "august", - "september", - "oktober", - "november", - "desember", - ] - month_abbreviations = [ - "", - "jan", - "feb", - "mar", - "apr", - "mai", - "jun", - "jul", - "aug", - "sep", - "okt", - "nov", - "des", - ] - - day_names = [ - "", - "mandag", - "tirsdag", - "onsdag", - "torsdag", - "fredag", - "lørdag", - "søndag", - ] - day_abbreviations = ["", "ma", "ti", "on", "to", "fr", "lø", "sø"] - - -class NewNorwegianLocale(Locale): - - names = ["nn", "nn_no"] - - past = "for {0} sidan" - future = "om {0}" - - timeframes = { - "now": "no nettopp", - "second": "et sekund", - "seconds": "{0} nokre sekund", - "minute": "ett minutt", - "minutes": "{0} minutt", - "hour": "ein time", - "hours": "{0} timar", - "day": "ein dag", - "days": "{0} dagar", - "month": "en månad", - "months": "{0} månader", - "year": "eit år", - "years": "{0} år", - } - - month_names = [ - "", - "januar", - "februar", - "mars", - "april", - "mai", - "juni", - "juli", - "august", - "september", - "oktober", - "november", - "desember", - ] - month_abbreviations = [ - "", - "jan", - "feb", - "mar", - "apr", - "mai", - "jun", - "jul", - "aug", - "sep", - "okt", - "nov", - "des", - ] - - day_names = [ - "", - "måndag", - "tysdag", - "onsdag", - "torsdag", - "fredag", - "laurdag", - "sundag", - ] - day_abbreviations = ["", "må", "ty", "on", "to", "fr", "la", "su"] - - -class PortugueseLocale(Locale): - names = ["pt", "pt_pt"] - - past = "há {0}" - future = "em {0}" - and_word = "e" - - timeframes = { - "now": "agora", - "second": "um segundo", - "seconds": "{0} segundos", - "minute": "um minuto", - "minutes": "{0} minutos", - "hour": "uma hora", - "hours": "{0} horas", - "day": "um dia", - "days": "{0} dias", - "week": "uma semana", - "weeks": "{0} semanas", - "month": "um mês", - "months": "{0} meses", - "year": "um ano", - "years": "{0} anos", - } - - month_names = [ - "", - "Janeiro", - "Fevereiro", - "Março", - "Abril", - "Maio", - "Junho", - "Julho", - "Agosto", - "Setembro", - "Outubro", - "Novembro", - "Dezembro", - ] - month_abbreviations = [ - "", - "Jan", - "Fev", - "Mar", - "Abr", - "Mai", - "Jun", - "Jul", - "Ago", - "Set", - "Out", - "Nov", - "Dez", - ] - - day_names = [ - "", - "Segunda-feira", - "Terça-feira", - "Quarta-feira", - "Quinta-feira", - "Sexta-feira", - "Sábado", - "Domingo", - ] - day_abbreviations = ["", "Seg", "Ter", "Qua", "Qui", "Sex", "Sab", "Dom"] - - -class BrazilianPortugueseLocale(PortugueseLocale): - names = ["pt_br"] - - past = "faz {0}" - - -class TagalogLocale(Locale): - - names = ["tl", "tl_ph"] - - past = "nakaraang {0}" - future = "{0} mula ngayon" - - timeframes = { - "now": "ngayon lang", - "second": "isang segundo", - "seconds": "{0} segundo", - "minute": "isang minuto", - "minutes": "{0} minuto", - "hour": "isang oras", - "hours": "{0} oras", - "day": "isang araw", - "days": "{0} araw", - "week": "isang linggo", - "weeks": "{0} linggo", - "month": "isang buwan", - "months": "{0} buwan", - "year": "isang taon", - "years": "{0} taon", - } - - month_names = [ - "", - "Enero", - "Pebrero", - "Marso", - "Abril", - "Mayo", - "Hunyo", - "Hulyo", - "Agosto", - "Setyembre", - "Oktubre", - "Nobyembre", - "Disyembre", - ] - month_abbreviations = [ - "", - "Ene", - "Peb", - "Mar", - "Abr", - "May", - "Hun", - "Hul", - "Ago", - "Set", - "Okt", - "Nob", - "Dis", - ] - - day_names = [ - "", - "Lunes", - "Martes", - "Miyerkules", - "Huwebes", - "Biyernes", - "Sabado", - "Linggo", - ] - day_abbreviations = ["", "Lun", "Mar", "Miy", "Huw", "Biy", "Sab", "Lin"] - - meridians = {"am": "nu", "pm": "nh", "AM": "ng umaga", "PM": "ng hapon"} - - def _ordinal_number(self, n): - return "ika-{}".format(n) - - -class VietnameseLocale(Locale): - - names = ["vi", "vi_vn"] - - past = "{0} trước" - future = "{0} nữa" - - timeframes = { - "now": "hiện tại", - "second": "một giây", - "seconds": "{0} giây", - "minute": "một phút", - "minutes": "{0} phút", - "hour": "một giờ", - "hours": "{0} giờ", - "day": "một ngày", - "days": "{0} ngày", - "week": "một tuần", - "weeks": "{0} tuần", - "month": "một tháng", - "months": "{0} tháng", - "year": "một năm", - "years": "{0} năm", - } - - month_names = [ - "", - "Tháng Một", - "Tháng Hai", - "Tháng Ba", - "Tháng Tư", - "Tháng Năm", - "Tháng Sáu", - "Tháng Bảy", - "Tháng Tám", - "Tháng Chín", - "Tháng Mười", - "Tháng Mười Một", - "Tháng Mười Hai", - ] - month_abbreviations = [ - "", - "Tháng 1", - "Tháng 2", - "Tháng 3", - "Tháng 4", - "Tháng 5", - "Tháng 6", - "Tháng 7", - "Tháng 8", - "Tháng 9", - "Tháng 10", - "Tháng 11", - "Tháng 12", - ] - - day_names = [ - "", - "Thứ Hai", - "Thứ Ba", - "Thứ Tư", - "Thứ Năm", - "Thứ Sáu", - "Thứ Bảy", - "Chủ Nhật", - ] - day_abbreviations = ["", "Thứ 2", "Thứ 3", "Thứ 4", "Thứ 5", "Thứ 6", "Thứ 7", "CN"] - - -class TurkishLocale(Locale): - - names = ["tr", "tr_tr"] - - past = "{0} önce" - future = "{0} sonra" - - timeframes = { - "now": "şimdi", - "second": "bir saniye", - "seconds": "{0} saniye", - "minute": "bir dakika", - "minutes": "{0} dakika", - "hour": "bir saat", - "hours": "{0} saat", - "day": "bir gün", - "days": "{0} gün", - "month": "bir ay", - "months": "{0} ay", - "year": "yıl", - "years": "{0} yıl", - } - - month_names = [ - "", - "Ocak", - "Şubat", - "Mart", - "Nisan", - "Mayıs", - "Haziran", - "Temmuz", - "Ağustos", - "Eylül", - "Ekim", - "Kasım", - "Aralık", - ] - month_abbreviations = [ - "", - "Oca", - "Şub", - "Mar", - "Nis", - "May", - "Haz", - "Tem", - "Ağu", - "Eyl", - "Eki", - "Kas", - "Ara", - ] - - day_names = [ - "", - "Pazartesi", - "Salı", - "Çarşamba", - "Perşembe", - "Cuma", - "Cumartesi", - "Pazar", - ] - day_abbreviations = ["", "Pzt", "Sal", "Çar", "Per", "Cum", "Cmt", "Paz"] - - -class AzerbaijaniLocale(Locale): - - names = ["az", "az_az"] - - past = "{0} əvvəl" - future = "{0} sonra" - - timeframes = { - "now": "indi", - "second": "saniyə", - "seconds": "{0} saniyə", - "minute": "bir dəqiqə", - "minutes": "{0} dəqiqə", - "hour": "bir saat", - "hours": "{0} saat", - "day": "bir gün", - "days": "{0} gün", - "month": "bir ay", - "months": "{0} ay", - "year": "il", - "years": "{0} il", - } - - month_names = [ - "", - "Yanvar", - "Fevral", - "Mart", - "Aprel", - "May", - "İyun", - "İyul", - "Avqust", - "Sentyabr", - "Oktyabr", - "Noyabr", - "Dekabr", - ] - month_abbreviations = [ - "", - "Yan", - "Fev", - "Mar", - "Apr", - "May", - "İyn", - "İyl", - "Avq", - "Sen", - "Okt", - "Noy", - "Dek", - ] - - day_names = [ - "", - "Bazar ertəsi", - "Çərşənbə axşamı", - "Çərşənbə", - "Cümə axşamı", - "Cümə", - "Şənbə", - "Bazar", - ] - day_abbreviations = ["", "Ber", "Çax", "Çər", "Cax", "Cüm", "Şnb", "Bzr"] - - -class ArabicLocale(Locale): - names = [ - "ar", - "ar_ae", - "ar_bh", - "ar_dj", - "ar_eg", - "ar_eh", - "ar_er", - "ar_km", - "ar_kw", - "ar_ly", - "ar_om", - "ar_qa", - "ar_sa", - "ar_sd", - "ar_so", - "ar_ss", - "ar_td", - "ar_ye", - ] - - past = "منذ {0}" - future = "خلال {0}" - - timeframes = { - "now": "الآن", - "second": "ثانية", - "seconds": {"double": "ثانيتين", "ten": "{0} ثوان", "higher": "{0} ثانية"}, - "minute": "دقيقة", - "minutes": {"double": "دقيقتين", "ten": "{0} دقائق", "higher": "{0} دقيقة"}, - "hour": "ساعة", - "hours": {"double": "ساعتين", "ten": "{0} ساعات", "higher": "{0} ساعة"}, - "day": "يوم", - "days": {"double": "يومين", "ten": "{0} أيام", "higher": "{0} يوم"}, - "month": "شهر", - "months": {"double": "شهرين", "ten": "{0} أشهر", "higher": "{0} شهر"}, - "year": "سنة", - "years": {"double": "سنتين", "ten": "{0} سنوات", "higher": "{0} سنة"}, - } - - month_names = [ - "", - "يناير", - "فبراير", - "مارس", - "أبريل", - "مايو", - "يونيو", - "يوليو", - "أغسطس", - "سبتمبر", - "أكتوبر", - "نوفمبر", - "ديسمبر", - ] - month_abbreviations = [ - "", - "يناير", - "فبراير", - "مارس", - "أبريل", - "مايو", - "يونيو", - "يوليو", - "أغسطس", - "سبتمبر", - "أكتوبر", - "نوفمبر", - "ديسمبر", - ] - - day_names = [ - "", - "الإثنين", - "الثلاثاء", - "الأربعاء", - "الخميس", - "الجمعة", - "السبت", - "الأحد", - ] - day_abbreviations = ["", "إثنين", "ثلاثاء", "أربعاء", "خميس", "جمعة", "سبت", "أحد"] - - def _format_timeframe(self, timeframe, delta): - form = self.timeframes[timeframe] - delta = abs(delta) - if isinstance(form, dict): - if delta == 2: - form = form["double"] - elif delta > 2 and delta <= 10: - form = form["ten"] - else: - form = form["higher"] - - return form.format(delta) - - -class LevantArabicLocale(ArabicLocale): - names = ["ar_iq", "ar_jo", "ar_lb", "ar_ps", "ar_sy"] - month_names = [ - "", - "كانون الثاني", - "شباط", - "آذار", - "نيسان", - "أيار", - "حزيران", - "تموز", - "آب", - "أيلول", - "تشرين الأول", - "تشرين الثاني", - "كانون الأول", - ] - month_abbreviations = [ - "", - "كانون الثاني", - "شباط", - "آذار", - "نيسان", - "أيار", - "حزيران", - "تموز", - "آب", - "أيلول", - "تشرين الأول", - "تشرين الثاني", - "كانون الأول", - ] - - -class AlgeriaTunisiaArabicLocale(ArabicLocale): - names = ["ar_tn", "ar_dz"] - month_names = [ - "", - "جانفي", - "فيفري", - "مارس", - "أفريل", - "ماي", - "جوان", - "جويلية", - "أوت", - "سبتمبر", - "أكتوبر", - "نوفمبر", - "ديسمبر", - ] - month_abbreviations = [ - "", - "جانفي", - "فيفري", - "مارس", - "أفريل", - "ماي", - "جوان", - "جويلية", - "أوت", - "سبتمبر", - "أكتوبر", - "نوفمبر", - "ديسمبر", - ] - - -class MauritaniaArabicLocale(ArabicLocale): - names = ["ar_mr"] - month_names = [ - "", - "يناير", - "فبراير", - "مارس", - "إبريل", - "مايو", - "يونيو", - "يوليو", - "أغشت", - "شتمبر", - "أكتوبر", - "نوفمبر", - "دجمبر", - ] - month_abbreviations = [ - "", - "يناير", - "فبراير", - "مارس", - "إبريل", - "مايو", - "يونيو", - "يوليو", - "أغشت", - "شتمبر", - "أكتوبر", - "نوفمبر", - "دجمبر", - ] - - -class MoroccoArabicLocale(ArabicLocale): - names = ["ar_ma"] - month_names = [ - "", - "يناير", - "فبراير", - "مارس", - "أبريل", - "ماي", - "يونيو", - "يوليوز", - "غشت", - "شتنبر", - "أكتوبر", - "نونبر", - "دجنبر", - ] - month_abbreviations = [ - "", - "يناير", - "فبراير", - "مارس", - "أبريل", - "ماي", - "يونيو", - "يوليوز", - "غشت", - "شتنبر", - "أكتوبر", - "نونبر", - "دجنبر", - ] - - -class IcelandicLocale(Locale): - def _format_timeframe(self, timeframe, delta): - - timeframe = self.timeframes[timeframe] - if delta < 0: - timeframe = timeframe[0] - elif delta > 0: - timeframe = timeframe[1] - - return timeframe.format(abs(delta)) - - names = ["is", "is_is"] - - past = "fyrir {0} síðan" - future = "eftir {0}" - - timeframes = { - "now": "rétt í þessu", - "second": ("sekúndu", "sekúndu"), - "seconds": ("{0} nokkrum sekúndum", "nokkrar sekúndur"), - "minute": ("einni mínútu", "eina mínútu"), - "minutes": ("{0} mínútum", "{0} mínútur"), - "hour": ("einum tíma", "einn tíma"), - "hours": ("{0} tímum", "{0} tíma"), - "day": ("einum degi", "einn dag"), - "days": ("{0} dögum", "{0} daga"), - "month": ("einum mánuði", "einn mánuð"), - "months": ("{0} mánuðum", "{0} mánuði"), - "year": ("einu ári", "eitt ár"), - "years": ("{0} árum", "{0} ár"), - } - - meridians = {"am": "f.h.", "pm": "e.h.", "AM": "f.h.", "PM": "e.h."} - - month_names = [ - "", - "janúar", - "febrúar", - "mars", - "apríl", - "maí", - "júní", - "júlí", - "ágúst", - "september", - "október", - "nóvember", - "desember", - ] - month_abbreviations = [ - "", - "jan", - "feb", - "mar", - "apr", - "maí", - "jún", - "júl", - "ágú", - "sep", - "okt", - "nóv", - "des", - ] - - day_names = [ - "", - "mánudagur", - "þriðjudagur", - "miðvikudagur", - "fimmtudagur", - "föstudagur", - "laugardagur", - "sunnudagur", - ] - day_abbreviations = ["", "mán", "þri", "mið", "fim", "fös", "lau", "sun"] - - -class DanishLocale(Locale): - - names = ["da", "da_dk"] - - past = "for {0} siden" - future = "efter {0}" - and_word = "og" - - timeframes = { - "now": "lige nu", - "second": "et sekund", - "seconds": "{0} et par sekunder", - "minute": "et minut", - "minutes": "{0} minutter", - "hour": "en time", - "hours": "{0} timer", - "day": "en dag", - "days": "{0} dage", - "month": "en måned", - "months": "{0} måneder", - "year": "et år", - "years": "{0} år", - } - - month_names = [ - "", - "januar", - "februar", - "marts", - "april", - "maj", - "juni", - "juli", - "august", - "september", - "oktober", - "november", - "december", - ] - month_abbreviations = [ - "", - "jan", - "feb", - "mar", - "apr", - "maj", - "jun", - "jul", - "aug", - "sep", - "okt", - "nov", - "dec", - ] - - day_names = [ - "", - "mandag", - "tirsdag", - "onsdag", - "torsdag", - "fredag", - "lørdag", - "søndag", - ] - day_abbreviations = ["", "man", "tir", "ons", "tor", "fre", "lør", "søn"] - - -class MalayalamLocale(Locale): - - names = ["ml"] - - past = "{0} മുമ്പ്" - future = "{0} ശേഷം" - - timeframes = { - "now": "ഇപ്പോൾ", - "second": "ഒരു നിമിഷം", - "seconds": "{0} സെക്കന്റ്‌", - "minute": "ഒരു മിനിറ്റ്", - "minutes": "{0} മിനിറ്റ്", - "hour": "ഒരു മണിക്കൂർ", - "hours": "{0} മണിക്കൂർ", - "day": "ഒരു ദിവസം ", - "days": "{0} ദിവസം ", - "month": "ഒരു മാസം ", - "months": "{0} മാസം ", - "year": "ഒരു വർഷം ", - "years": "{0} വർഷം ", - } - - meridians = { - "am": "രാവിലെ", - "pm": "ഉച്ചക്ക് ശേഷം", - "AM": "രാവിലെ", - "PM": "ഉച്ചക്ക് ശേഷം", - } - - month_names = [ - "", - "ജനുവരി", - "ഫെബ്രുവരി", - "മാർച്ച്‌", - "ഏപ്രിൽ ", - "മെയ്‌ ", - "ജൂണ്‍", - "ജൂലൈ", - "ഓഗസ്റ്റ്‌", - "സെപ്റ്റംബർ", - "ഒക്ടോബർ", - "നവംബർ", - "ഡിസംബർ", - ] - month_abbreviations = [ - "", - "ജനു", - "ഫെബ് ", - "മാർ", - "ഏപ്രിൽ", - "മേയ്", - "ജൂണ്‍", - "ജൂലൈ", - "ഓഗസ്റ", - "സെപ്റ്റ", - "ഒക്ടോ", - "നവം", - "ഡിസം", - ] - - day_names = ["", "തിങ്കള്‍", "ചൊവ്വ", "ബുധന്‍", "വ്യാഴം", "വെള്ളി", "ശനി", "ഞായര്‍"] - day_abbreviations = [ - "", - "തിങ്കള്‍", - "ചൊവ്വ", - "ബുധന്‍", - "വ്യാഴം", - "വെള്ളി", - "ശനി", - "ഞായര്‍", - ] - - -class HindiLocale(Locale): - - names = ["hi"] - - past = "{0} पहले" - future = "{0} बाद" - - timeframes = { - "now": "अभी", - "second": "एक पल", - "seconds": "{0} सेकंड्", - "minute": "एक मिनट ", - "minutes": "{0} मिनट ", - "hour": "एक घंटा", - "hours": "{0} घंटे", - "day": "एक दिन", - "days": "{0} दिन", - "month": "एक माह ", - "months": "{0} महीने ", - "year": "एक वर्ष ", - "years": "{0} साल ", - } - - meridians = {"am": "सुबह", "pm": "शाम", "AM": "सुबह", "PM": "शाम"} - - month_names = [ - "", - "जनवरी", - "फरवरी", - "मार्च", - "अप्रैल ", - "मई", - "जून", - "जुलाई", - "अगस्त", - "सितंबर", - "अक्टूबर", - "नवंबर", - "दिसंबर", - ] - month_abbreviations = [ - "", - "जन", - "फ़र", - "मार्च", - "अप्रै", - "मई", - "जून", - "जुलाई", - "आग", - "सित", - "अकत", - "नवे", - "दिस", - ] - - day_names = [ - "", - "सोमवार", - "मंगलवार", - "बुधवार", - "गुरुवार", - "शुक्रवार", - "शनिवार", - "रविवार", - ] - day_abbreviations = ["", "सोम", "मंगल", "बुध", "गुरुवार", "शुक्र", "शनि", "रवि"] - - -class CzechLocale(Locale): - names = ["cs", "cs_cz"] - - timeframes = { - "now": "Teď", - "second": {"past": "vteřina", "future": "vteřina", "zero": "vteřina"}, - "seconds": {"past": "{0} sekundami", "future": ["{0} sekundy", "{0} sekund"]}, - "minute": {"past": "minutou", "future": "minutu", "zero": "{0} minut"}, - "minutes": {"past": "{0} minutami", "future": ["{0} minuty", "{0} minut"]}, - "hour": {"past": "hodinou", "future": "hodinu", "zero": "{0} hodin"}, - "hours": {"past": "{0} hodinami", "future": ["{0} hodiny", "{0} hodin"]}, - "day": {"past": "dnem", "future": "den", "zero": "{0} dnů"}, - "days": {"past": "{0} dny", "future": ["{0} dny", "{0} dnů"]}, - "week": {"past": "týdnem", "future": "týden", "zero": "{0} týdnů"}, - "weeks": {"past": "{0} týdny", "future": ["{0} týdny", "{0} týdnů"]}, - "month": {"past": "měsícem", "future": "měsíc", "zero": "{0} měsíců"}, - "months": {"past": "{0} měsíci", "future": ["{0} měsíce", "{0} měsíců"]}, - "year": {"past": "rokem", "future": "rok", "zero": "{0} let"}, - "years": {"past": "{0} lety", "future": ["{0} roky", "{0} let"]}, - } - - past = "Před {0}" - future = "Za {0}" - - month_names = [ - "", - "leden", - "únor", - "březen", - "duben", - "květen", - "červen", - "červenec", - "srpen", - "září", - "říjen", - "listopad", - "prosinec", - ] - month_abbreviations = [ - "", - "led", - "úno", - "bře", - "dub", - "kvě", - "čvn", - "čvc", - "srp", - "zář", - "říj", - "lis", - "pro", - ] - - day_names = [ - "", - "pondělí", - "úterý", - "středa", - "čtvrtek", - "pátek", - "sobota", - "neděle", - ] - day_abbreviations = ["", "po", "út", "st", "čt", "pá", "so", "ne"] - - def _format_timeframe(self, timeframe, delta): - """Czech aware time frame format function, takes into account - the differences between past and future forms.""" - form = self.timeframes[timeframe] - if isinstance(form, dict): - if delta == 0: - form = form["zero"] # And *never* use 0 in the singular! - elif delta > 0: - form = form["future"] - else: - form = form["past"] - delta = abs(delta) - - if isinstance(form, list): - if 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): - form = form[0] - else: - form = form[1] - - return form.format(delta) - - -class SlovakLocale(Locale): - names = ["sk", "sk_sk"] - - timeframes = { - "now": "Teraz", - "second": {"past": "sekundou", "future": "sekundu", "zero": "{0} sekúnd"}, - "seconds": {"past": "{0} sekundami", "future": ["{0} sekundy", "{0} sekúnd"]}, - "minute": {"past": "minútou", "future": "minútu", "zero": "{0} minút"}, - "minutes": {"past": "{0} minútami", "future": ["{0} minúty", "{0} minút"]}, - "hour": {"past": "hodinou", "future": "hodinu", "zero": "{0} hodín"}, - "hours": {"past": "{0} hodinami", "future": ["{0} hodiny", "{0} hodín"]}, - "day": {"past": "dňom", "future": "deň", "zero": "{0} dní"}, - "days": {"past": "{0} dňami", "future": ["{0} dni", "{0} dní"]}, - "week": {"past": "týždňom", "future": "týždeň", "zero": "{0} týždňov"}, - "weeks": {"past": "{0} týždňami", "future": ["{0} týždne", "{0} týždňov"]}, - "month": {"past": "mesiacom", "future": "mesiac", "zero": "{0} mesiacov"}, - "months": {"past": "{0} mesiacmi", "future": ["{0} mesiace", "{0} mesiacov"]}, - "year": {"past": "rokom", "future": "rok", "zero": "{0} rokov"}, - "years": {"past": "{0} rokmi", "future": ["{0} roky", "{0} rokov"]}, - } - - past = "Pred {0}" - future = "O {0}" - and_word = "a" - - month_names = [ - "", - "január", - "február", - "marec", - "apríl", - "máj", - "jún", - "júl", - "august", - "september", - "október", - "november", - "december", - ] - month_abbreviations = [ - "", - "jan", - "feb", - "mar", - "apr", - "máj", - "jún", - "júl", - "aug", - "sep", - "okt", - "nov", - "dec", - ] - - day_names = [ - "", - "pondelok", - "utorok", - "streda", - "štvrtok", - "piatok", - "sobota", - "nedeľa", - ] - day_abbreviations = ["", "po", "ut", "st", "št", "pi", "so", "ne"] - - def _format_timeframe(self, timeframe, delta): - """Slovak aware time frame format function, takes into account - the differences between past and future forms.""" - form = self.timeframes[timeframe] - if isinstance(form, dict): - if delta == 0: - form = form["zero"] # And *never* use 0 in the singular! - elif delta > 0: - form = form["future"] - else: - form = form["past"] - delta = abs(delta) - - if isinstance(form, list): - if 2 <= delta % 10 <= 4 and (delta % 100 < 10 or delta % 100 >= 20): - form = form[0] - else: - form = form[1] - - return form.format(delta) - - -class FarsiLocale(Locale): - - names = ["fa", "fa_ir"] - - past = "{0} قبل" - future = "در {0}" - - timeframes = { - "now": "اکنون", - "second": "یک لحظه", - "seconds": "{0} ثانیه", - "minute": "یک دقیقه", - "minutes": "{0} دقیقه", - "hour": "یک ساعت", - "hours": "{0} ساعت", - "day": "یک روز", - "days": "{0} روز", - "month": "یک ماه", - "months": "{0} ماه", - "year": "یک سال", - "years": "{0} سال", - } - - meridians = { - "am": "قبل از ظهر", - "pm": "بعد از ظهر", - "AM": "قبل از ظهر", - "PM": "بعد از ظهر", - } - - month_names = [ - "", - "January", - "February", - "March", - "April", - "May", - "June", - "July", - "August", - "September", - "October", - "November", - "December", - ] - month_abbreviations = [ - "", - "Jan", - "Feb", - "Mar", - "Apr", - "May", - "Jun", - "Jul", - "Aug", - "Sep", - "Oct", - "Nov", - "Dec", - ] - - day_names = [ - "", - "دو شنبه", - "سه شنبه", - "چهارشنبه", - "پنجشنبه", - "جمعه", - "شنبه", - "یکشنبه", - ] - day_abbreviations = ["", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] - - -class HebrewLocale(Locale): - - names = ["he", "he_IL"] - - past = "לפני {0}" - future = "בעוד {0}" - and_word = "ו" - - timeframes = { - "now": "הרגע", - "second": "שנייה", - "seconds": "{0} שניות", - "minute": "דקה", - "minutes": "{0} דקות", - "hour": "שעה", - "hours": "{0} שעות", - "2-hours": "שעתיים", - "day": "יום", - "days": "{0} ימים", - "2-days": "יומיים", - "week": "שבוע", - "weeks": "{0} שבועות", - "2-weeks": "שבועיים", - "month": "חודש", - "months": "{0} חודשים", - "2-months": "חודשיים", - "year": "שנה", - "years": "{0} שנים", - "2-years": "שנתיים", - } - - meridians = { - "am": 'לפנ"צ', - "pm": 'אחר"צ', - "AM": "לפני הצהריים", - "PM": "אחרי הצהריים", - } - - month_names = [ - "", - "ינואר", - "פברואר", - "מרץ", - "אפריל", - "מאי", - "יוני", - "יולי", - "אוגוסט", - "ספטמבר", - "אוקטובר", - "נובמבר", - "דצמבר", - ] - month_abbreviations = [ - "", - "ינו׳", - "פבר׳", - "מרץ", - "אפר׳", - "מאי", - "יוני", - "יולי", - "אוג׳", - "ספט׳", - "אוק׳", - "נוב׳", - "דצמ׳", - ] - - day_names = ["", "שני", "שלישי", "רביעי", "חמישי", "שישי", "שבת", "ראשון"] - day_abbreviations = ["", "ב׳", "ג׳", "ד׳", "ה׳", "ו׳", "ש׳", "א׳"] - - def _format_timeframe(self, timeframe, delta): - """Hebrew couple of aware""" - couple = "2-{}".format(timeframe) - single = timeframe.rstrip("s") - if abs(delta) == 2 and couple in self.timeframes: - key = couple - elif abs(delta) == 1 and single in self.timeframes: - key = single - else: - key = timeframe - - return self.timeframes[key].format(trunc(abs(delta))) - - def describe_multi(self, timeframes, only_distance=False): - """Describes a delta within multiple timeframes in plain language. - In Hebrew, the and word behaves a bit differently. - - :param timeframes: a list of string, quantity pairs each representing a timeframe and delta. - :param only_distance: return only distance eg: "2 hours and 11 seconds" without "in" or "ago" keywords - """ - - humanized = "" - for index, (timeframe, delta) in enumerate(timeframes): - last_humanized = self._format_timeframe(timeframe, delta) - if index == 0: - humanized = last_humanized - elif index == len(timeframes) - 1: # Must have at least 2 items - humanized += " " + self.and_word - if last_humanized[0].isdecimal(): - humanized += "־" - humanized += last_humanized - else: # Don't add for the last one - humanized += ", " + last_humanized - - if not only_distance: - humanized = self._format_relative(humanized, timeframe, delta) - - return humanized - - -class MarathiLocale(Locale): - - names = ["mr"] - - past = "{0} आधी" - future = "{0} नंतर" - - timeframes = { - "now": "सद्य", - "second": "एक सेकंद", - "seconds": "{0} सेकंद", - "minute": "एक मिनिट ", - "minutes": "{0} मिनिट ", - "hour": "एक तास", - "hours": "{0} तास", - "day": "एक दिवस", - "days": "{0} दिवस", - "month": "एक महिना ", - "months": "{0} महिने ", - "year": "एक वर्ष ", - "years": "{0} वर्ष ", - } - - meridians = {"am": "सकाळ", "pm": "संध्याकाळ", "AM": "सकाळ", "PM": "संध्याकाळ"} - - month_names = [ - "", - "जानेवारी", - "फेब्रुवारी", - "मार्च", - "एप्रिल", - "मे", - "जून", - "जुलै", - "अॉगस्ट", - "सप्टेंबर", - "अॉक्टोबर", - "नोव्हेंबर", - "डिसेंबर", - ] - month_abbreviations = [ - "", - "जान", - "फेब्रु", - "मार्च", - "एप्रि", - "मे", - "जून", - "जुलै", - "अॉग", - "सप्टें", - "अॉक्टो", - "नोव्हें", - "डिसें", - ] - - day_names = [ - "", - "सोमवार", - "मंगळवार", - "बुधवार", - "गुरुवार", - "शुक्रवार", - "शनिवार", - "रविवार", - ] - day_abbreviations = ["", "सोम", "मंगळ", "बुध", "गुरु", "शुक्र", "शनि", "रवि"] - - -def _map_locales(): - - locales = {} - - for _, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass): - if issubclass(cls, Locale): # pragma: no branch - for name in cls.names: - locales[name.lower()] = cls - - return locales - - -class CatalanLocale(Locale): - names = ["ca", "ca_es", "ca_ad", "ca_fr", "ca_it"] - past = "Fa {0}" - future = "En {0}" - and_word = "i" - - timeframes = { - "now": "Ara mateix", - "second": "un segon", - "seconds": "{0} segons", - "minute": "1 minut", - "minutes": "{0} minuts", - "hour": "una hora", - "hours": "{0} hores", - "day": "un dia", - "days": "{0} dies", - "month": "un mes", - "months": "{0} mesos", - "year": "un any", - "years": "{0} anys", - } - - month_names = [ - "", - "gener", - "febrer", - "març", - "abril", - "maig", - "juny", - "juliol", - "agost", - "setembre", - "octubre", - "novembre", - "desembre", - ] - month_abbreviations = [ - "", - "gen.", - "febr.", - "març", - "abr.", - "maig", - "juny", - "jul.", - "ag.", - "set.", - "oct.", - "nov.", - "des.", - ] - day_names = [ - "", - "dilluns", - "dimarts", - "dimecres", - "dijous", - "divendres", - "dissabte", - "diumenge", - ] - day_abbreviations = [ - "", - "dl.", - "dt.", - "dc.", - "dj.", - "dv.", - "ds.", - "dg.", - ] - - -class BasqueLocale(Locale): - names = ["eu", "eu_eu"] - past = "duela {0}" - future = "{0}" # I don't know what's the right phrase in Basque for the future. - - timeframes = { - "now": "Orain", - "second": "segundo bat", - "seconds": "{0} segundu", - "minute": "minutu bat", - "minutes": "{0} minutu", - "hour": "ordu bat", - "hours": "{0} ordu", - "day": "egun bat", - "days": "{0} egun", - "month": "hilabete bat", - "months": "{0} hilabet", - "year": "urte bat", - "years": "{0} urte", - } - - month_names = [ - "", - "urtarrilak", - "otsailak", - "martxoak", - "apirilak", - "maiatzak", - "ekainak", - "uztailak", - "abuztuak", - "irailak", - "urriak", - "azaroak", - "abenduak", - ] - month_abbreviations = [ - "", - "urt", - "ots", - "mar", - "api", - "mai", - "eka", - "uzt", - "abu", - "ira", - "urr", - "aza", - "abe", - ] - day_names = [ - "", - "astelehena", - "asteartea", - "asteazkena", - "osteguna", - "ostirala", - "larunbata", - "igandea", - ] - day_abbreviations = ["", "al", "ar", "az", "og", "ol", "lr", "ig"] - - -class HungarianLocale(Locale): - - names = ["hu", "hu_hu"] - - past = "{0} ezelőtt" - future = "{0} múlva" - - timeframes = { - "now": "éppen most", - "second": {"past": "egy második", "future": "egy második"}, - "seconds": {"past": "{0} másodpercekkel", "future": "{0} pár másodperc"}, - "minute": {"past": "egy perccel", "future": "egy perc"}, - "minutes": {"past": "{0} perccel", "future": "{0} perc"}, - "hour": {"past": "egy órával", "future": "egy óra"}, - "hours": {"past": "{0} órával", "future": "{0} óra"}, - "day": {"past": "egy nappal", "future": "egy nap"}, - "days": {"past": "{0} nappal", "future": "{0} nap"}, - "month": {"past": "egy hónappal", "future": "egy hónap"}, - "months": {"past": "{0} hónappal", "future": "{0} hónap"}, - "year": {"past": "egy évvel", "future": "egy év"}, - "years": {"past": "{0} évvel", "future": "{0} év"}, - } - - month_names = [ - "", - "január", - "február", - "március", - "április", - "május", - "június", - "július", - "augusztus", - "szeptember", - "október", - "november", - "december", - ] - month_abbreviations = [ - "", - "jan", - "febr", - "márc", - "ápr", - "máj", - "jún", - "júl", - "aug", - "szept", - "okt", - "nov", - "dec", - ] - - day_names = [ - "", - "hétfő", - "kedd", - "szerda", - "csütörtök", - "péntek", - "szombat", - "vasárnap", - ] - day_abbreviations = ["", "hét", "kedd", "szer", "csüt", "pént", "szom", "vas"] - - meridians = {"am": "de", "pm": "du", "AM": "DE", "PM": "DU"} - - def _format_timeframe(self, timeframe, delta): - form = self.timeframes[timeframe] - - if isinstance(form, dict): - if delta > 0: - form = form["future"] - else: - form = form["past"] - - return form.format(abs(delta)) - - -class EsperantoLocale(Locale): - names = ["eo", "eo_xx"] - past = "antaŭ {0}" - future = "post {0}" - - timeframes = { - "now": "nun", - "second": "sekundo", - "seconds": "{0} kelkaj sekundoj", - "minute": "unu minuto", - "minutes": "{0} minutoj", - "hour": "un horo", - "hours": "{0} horoj", - "day": "unu tago", - "days": "{0} tagoj", - "month": "unu monato", - "months": "{0} monatoj", - "year": "unu jaro", - "years": "{0} jaroj", - } - - month_names = [ - "", - "januaro", - "februaro", - "marto", - "aprilo", - "majo", - "junio", - "julio", - "aŭgusto", - "septembro", - "oktobro", - "novembro", - "decembro", - ] - month_abbreviations = [ - "", - "jan", - "feb", - "mar", - "apr", - "maj", - "jun", - "jul", - "aŭg", - "sep", - "okt", - "nov", - "dec", - ] - - day_names = [ - "", - "lundo", - "mardo", - "merkredo", - "ĵaŭdo", - "vendredo", - "sabato", - "dimanĉo", - ] - day_abbreviations = ["", "lun", "mar", "mer", "ĵaŭ", "ven", "sab", "dim"] - - meridians = {"am": "atm", "pm": "ptm", "AM": "ATM", "PM": "PTM"} - - ordinal_day_re = r"((?P[1-3]?[0-9](?=a))a)" - - def _ordinal_number(self, n): - return "{}a".format(n) - - -class ThaiLocale(Locale): - - names = ["th", "th_th"] - - past = "{0}{1}ที่ผ่านมา" - future = "ในอีก{1}{0}" - - timeframes = { - "now": "ขณะนี้", - "second": "วินาที", - "seconds": "{0} ไม่กี่วินาที", - "minute": "1 นาที", - "minutes": "{0} นาที", - "hour": "1 ชั่วโมง", - "hours": "{0} ชั่วโมง", - "day": "1 วัน", - "days": "{0} วัน", - "month": "1 เดือน", - "months": "{0} เดือน", - "year": "1 ปี", - "years": "{0} ปี", - } - - month_names = [ - "", - "มกราคม", - "กุมภาพันธ์", - "มีนาคม", - "เมษายน", - "พฤษภาคม", - "มิถุนายน", - "กรกฎาคม", - "สิงหาคม", - "กันยายน", - "ตุลาคม", - "พฤศจิกายน", - "ธันวาคม", - ] - month_abbreviations = [ - "", - "ม.ค.", - "ก.พ.", - "มี.ค.", - "เม.ย.", - "พ.ค.", - "มิ.ย.", - "ก.ค.", - "ส.ค.", - "ก.ย.", - "ต.ค.", - "พ.ย.", - "ธ.ค.", - ] - - day_names = ["", "จันทร์", "อังคาร", "พุธ", "พฤหัสบดี", "ศุกร์", "เสาร์", "อาทิตย์"] - day_abbreviations = ["", "จ", "อ", "พ", "พฤ", "ศ", "ส", "อา"] - - meridians = {"am": "am", "pm": "pm", "AM": "AM", "PM": "PM"} - - BE_OFFSET = 543 - - def year_full(self, year): - """Thai always use Buddhist Era (BE) which is CE + 543""" - year += self.BE_OFFSET - return "{:04d}".format(year) - - def year_abbreviation(self, year): - """Thai always use Buddhist Era (BE) which is CE + 543""" - year += self.BE_OFFSET - return "{:04d}".format(year)[2:] - - def _format_relative(self, humanized, timeframe, delta): - """Thai normally doesn't have any space between words""" - if timeframe == "now": - return humanized - space = "" if timeframe == "seconds" else " " - direction = self.past if delta < 0 else self.future - - return direction.format(humanized, space) - - -class BengaliLocale(Locale): - - names = ["bn", "bn_bd", "bn_in"] - - past = "{0} আগে" - future = "{0} পরে" - - timeframes = { - "now": "এখন", - "second": "একটি দ্বিতীয়", - "seconds": "{0} সেকেন্ড", - "minute": "এক মিনিট", - "minutes": "{0} মিনিট", - "hour": "এক ঘণ্টা", - "hours": "{0} ঘণ্টা", - "day": "এক দিন", - "days": "{0} দিন", - "month": "এক মাস", - "months": "{0} মাস ", - "year": "এক বছর", - "years": "{0} বছর", - } - - meridians = {"am": "সকাল", "pm": "বিকাল", "AM": "সকাল", "PM": "বিকাল"} - - month_names = [ - "", - "জানুয়ারি", - "ফেব্রুয়ারি", - "মার্চ", - "এপ্রিল", - "মে", - "জুন", - "জুলাই", - "আগস্ট", - "সেপ্টেম্বর", - "অক্টোবর", - "নভেম্বর", - "ডিসেম্বর", - ] - month_abbreviations = [ - "", - "জানু", - "ফেব", - "মার্চ", - "এপ্রি", - "মে", - "জুন", - "জুল", - "অগা", - "সেপ্ট", - "অক্টো", - "নভে", - "ডিসে", - ] - - day_names = [ - "", - "সোমবার", - "মঙ্গলবার", - "বুধবার", - "বৃহস্পতিবার", - "শুক্রবার", - "শনিবার", - "রবিবার", - ] - day_abbreviations = ["", "সোম", "মঙ্গল", "বুধ", "বৃহঃ", "শুক্র", "শনি", "রবি"] - - def _ordinal_number(self, n): - if n > 10 or n == 0: - return "{}তম".format(n) - if n in [1, 5, 7, 8, 9, 10]: - return "{}ম".format(n) - if n in [2, 3]: - return "{}য়".format(n) - if n == 4: - return "{}র্থ".format(n) - if n == 6: - return "{}ষ্ঠ".format(n) - - -class RomanshLocale(Locale): - - names = ["rm", "rm_ch"] - - past = "avant {0}" - future = "en {0}" - - timeframes = { - "now": "en quest mument", - "second": "in secunda", - "seconds": "{0} secundas", - "minute": "ina minuta", - "minutes": "{0} minutas", - "hour": "in'ura", - "hours": "{0} ura", - "day": "in di", - "days": "{0} dis", - "month": "in mais", - "months": "{0} mais", - "year": "in onn", - "years": "{0} onns", - } - - month_names = [ - "", - "schaner", - "favrer", - "mars", - "avrigl", - "matg", - "zercladur", - "fanadur", - "avust", - "settember", - "october", - "november", - "december", - ] - - month_abbreviations = [ - "", - "schan", - "fav", - "mars", - "avr", - "matg", - "zer", - "fan", - "avu", - "set", - "oct", - "nov", - "dec", - ] - - day_names = [ - "", - "glindesdi", - "mardi", - "mesemna", - "gievgia", - "venderdi", - "sonda", - "dumengia", - ] - - day_abbreviations = ["", "gli", "ma", "me", "gie", "ve", "so", "du"] - - -class RomanianLocale(Locale): - names = ["ro", "ro_ro"] - - past = "{0} în urmă" - future = "peste {0}" - and_word = "și" - - timeframes = { - "now": "acum", - "second": "o secunda", - "seconds": "{0} câteva secunde", - "minute": "un minut", - "minutes": "{0} minute", - "hour": "o oră", - "hours": "{0} ore", - "day": "o zi", - "days": "{0} zile", - "month": "o lună", - "months": "{0} luni", - "year": "un an", - "years": "{0} ani", - } - - month_names = [ - "", - "ianuarie", - "februarie", - "martie", - "aprilie", - "mai", - "iunie", - "iulie", - "august", - "septembrie", - "octombrie", - "noiembrie", - "decembrie", - ] - month_abbreviations = [ - "", - "ian", - "febr", - "mart", - "apr", - "mai", - "iun", - "iul", - "aug", - "sept", - "oct", - "nov", - "dec", - ] - - day_names = [ - "", - "luni", - "marți", - "miercuri", - "joi", - "vineri", - "sâmbătă", - "duminică", - ] - day_abbreviations = ["", "Lun", "Mar", "Mie", "Joi", "Vin", "Sâm", "Dum"] - - -class SlovenianLocale(Locale): - names = ["sl", "sl_si"] - - past = "pred {0}" - future = "čez {0}" - and_word = "in" - - timeframes = { - "now": "zdaj", - "second": "sekundo", - "seconds": "{0} sekund", - "minute": "minuta", - "minutes": "{0} minutami", - "hour": "uro", - "hours": "{0} ur", - "day": "dan", - "days": "{0} dni", - "month": "mesec", - "months": "{0} mesecev", - "year": "leto", - "years": "{0} let", - } - - meridians = {"am": "", "pm": "", "AM": "", "PM": ""} - - month_names = [ - "", - "Januar", - "Februar", - "Marec", - "April", - "Maj", - "Junij", - "Julij", - "Avgust", - "September", - "Oktober", - "November", - "December", - ] - - month_abbreviations = [ - "", - "Jan", - "Feb", - "Mar", - "Apr", - "Maj", - "Jun", - "Jul", - "Avg", - "Sep", - "Okt", - "Nov", - "Dec", - ] - - day_names = [ - "", - "Ponedeljek", - "Torek", - "Sreda", - "Četrtek", - "Petek", - "Sobota", - "Nedelja", - ] - - day_abbreviations = ["", "Pon", "Tor", "Sre", "Čet", "Pet", "Sob", "Ned"] - - -class IndonesianLocale(Locale): - - names = ["id", "id_id"] - - past = "{0} yang lalu" - future = "dalam {0}" - and_word = "dan" - - timeframes = { - "now": "baru saja", - "second": "1 sebentar", - "seconds": "{0} detik", - "minute": "1 menit", - "minutes": "{0} menit", - "hour": "1 jam", - "hours": "{0} jam", - "day": "1 hari", - "days": "{0} hari", - "month": "1 bulan", - "months": "{0} bulan", - "year": "1 tahun", - "years": "{0} tahun", - } - - meridians = {"am": "", "pm": "", "AM": "", "PM": ""} - - month_names = [ - "", - "Januari", - "Februari", - "Maret", - "April", - "Mei", - "Juni", - "Juli", - "Agustus", - "September", - "Oktober", - "November", - "Desember", - ] - - month_abbreviations = [ - "", - "Jan", - "Feb", - "Mar", - "Apr", - "Mei", - "Jun", - "Jul", - "Ags", - "Sept", - "Okt", - "Nov", - "Des", - ] - - day_names = ["", "Senin", "Selasa", "Rabu", "Kamis", "Jumat", "Sabtu", "Minggu"] - - day_abbreviations = [ - "", - "Senin", - "Selasa", - "Rabu", - "Kamis", - "Jumat", - "Sabtu", - "Minggu", - ] - - -class NepaliLocale(Locale): - names = ["ne", "ne_np"] - - past = "{0} पहिले" - future = "{0} पछी" - - timeframes = { - "now": "अहिले", - "second": "एक सेकेन्ड", - "seconds": "{0} सेकण्ड", - "minute": "मिनेट", - "minutes": "{0} मिनेट", - "hour": "एक घण्टा", - "hours": "{0} घण्टा", - "day": "एक दिन", - "days": "{0} दिन", - "month": "एक महिना", - "months": "{0} महिना", - "year": "एक बर्ष", - "years": "बर्ष", - } - - meridians = {"am": "पूर्वाह्न", "pm": "अपरान्ह", "AM": "पूर्वाह्न", "PM": "अपरान्ह"} - - month_names = [ - "", - "जनवरी", - "फेब्रुअरी", - "मार्च", - "एप्रील", - "मे", - "जुन", - "जुलाई", - "अगष्ट", - "सेप्टेम्बर", - "अक्टोबर", - "नोवेम्बर", - "डिसेम्बर", - ] - month_abbreviations = [ - "", - "जन", - "फेब", - "मार्च", - "एप्रील", - "मे", - "जुन", - "जुलाई", - "अग", - "सेप", - "अक्ट", - "नोव", - "डिस", - ] - - day_names = [ - "", - "सोमवार", - "मंगलवार", - "बुधवार", - "बिहिवार", - "शुक्रवार", - "शनिवार", - "आइतवार", - ] - - day_abbreviations = ["", "सोम", "मंगल", "बुध", "बिहि", "शुक्र", "शनि", "आइत"] - - -class EstonianLocale(Locale): - names = ["ee", "et"] - - past = "{0} tagasi" - future = "{0} pärast" - and_word = "ja" - - timeframes = { - "now": {"past": "just nüüd", "future": "just nüüd"}, - "second": {"past": "üks sekund", "future": "ühe sekundi"}, - "seconds": {"past": "{0} sekundit", "future": "{0} sekundi"}, - "minute": {"past": "üks minut", "future": "ühe minuti"}, - "minutes": {"past": "{0} minutit", "future": "{0} minuti"}, - "hour": {"past": "tund aega", "future": "tunni aja"}, - "hours": {"past": "{0} tundi", "future": "{0} tunni"}, - "day": {"past": "üks päev", "future": "ühe päeva"}, - "days": {"past": "{0} päeva", "future": "{0} päeva"}, - "month": {"past": "üks kuu", "future": "ühe kuu"}, - "months": {"past": "{0} kuud", "future": "{0} kuu"}, - "year": {"past": "üks aasta", "future": "ühe aasta"}, - "years": {"past": "{0} aastat", "future": "{0} aasta"}, - } - - month_names = [ - "", - "Jaanuar", - "Veebruar", - "Märts", - "Aprill", - "Mai", - "Juuni", - "Juuli", - "August", - "September", - "Oktoober", - "November", - "Detsember", - ] - month_abbreviations = [ - "", - "Jan", - "Veb", - "Mär", - "Apr", - "Mai", - "Jun", - "Jul", - "Aug", - "Sep", - "Okt", - "Nov", - "Dets", - ] - - day_names = [ - "", - "Esmaspäev", - "Teisipäev", - "Kolmapäev", - "Neljapäev", - "Reede", - "Laupäev", - "Pühapäev", - ] - day_abbreviations = ["", "Esm", "Teis", "Kolm", "Nelj", "Re", "Lau", "Püh"] - - def _format_timeframe(self, timeframe, delta): - form = self.timeframes[timeframe] - if delta > 0: - form = form["future"] - else: - form = form["past"] - return form.format(abs(delta)) - - -class SwahiliLocale(Locale): - - names = [ - "sw", - "sw_ke", - "sw_tz", - ] - - past = "{0} iliyopita" - future = "muda wa {0}" - and_word = "na" - - timeframes = { - "now": "sasa hivi", - "second": "sekunde", - "seconds": "sekunde {0}", - "minute": "dakika moja", - "minutes": "dakika {0}", - "hour": "saa moja", - "hours": "saa {0}", - "day": "siku moja", - "days": "siku {0}", - "week": "wiki moja", - "weeks": "wiki {0}", - "month": "mwezi moja", - "months": "miezi {0}", - "year": "mwaka moja", - "years": "miaka {0}", - } - - meridians = {"am": "asu", "pm": "mch", "AM": "ASU", "PM": "MCH"} - - month_names = [ - "", - "Januari", - "Februari", - "Machi", - "Aprili", - "Mei", - "Juni", - "Julai", - "Agosti", - "Septemba", - "Oktoba", - "Novemba", - "Desemba", - ] - month_abbreviations = [ - "", - "Jan", - "Feb", - "Mac", - "Apr", - "Mei", - "Jun", - "Jul", - "Ago", - "Sep", - "Okt", - "Nov", - "Des", - ] - - day_names = [ - "", - "Jumatatu", - "Jumanne", - "Jumatano", - "Alhamisi", - "Ijumaa", - "Jumamosi", - "Jumapili", - ] - day_abbreviations = [ - "", - "Jumatatu", - "Jumanne", - "Jumatano", - "Alhamisi", - "Ijumaa", - "Jumamosi", - "Jumapili", - ] - - -_locales = _map_locales() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/parser.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/parser.py deleted file mode 100644 index 243fd1721c..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/parser.py +++ /dev/null @@ -1,596 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import, unicode_literals - -import re -from datetime import datetime, timedelta - -from dateutil import tz - -from arrow import locales -from arrow.util import iso_to_gregorian, next_weekday, normalize_timestamp - -try: - from functools import lru_cache -except ImportError: # pragma: no cover - from backports.functools_lru_cache import lru_cache # pragma: no cover - - -class ParserError(ValueError): - pass - - -# Allows for ParserErrors to be propagated from _build_datetime() -# when day_of_year errors occur. -# Before this, the ParserErrors were caught by the try/except in -# _parse_multiformat() and the appropriate error message was not -# transmitted to the user. -class ParserMatchError(ParserError): - pass - - -class DateTimeParser(object): - - _FORMAT_RE = re.compile( - r"(YYY?Y?|MM?M?M?|Do|DD?D?D?|d?d?d?d|HH?|hh?|mm?|ss?|S+|ZZ?Z?|a|A|x|X|W)" - ) - _ESCAPE_RE = re.compile(r"\[[^\[\]]*\]") - - _ONE_OR_TWO_DIGIT_RE = re.compile(r"\d{1,2}") - _ONE_OR_TWO_OR_THREE_DIGIT_RE = re.compile(r"\d{1,3}") - _ONE_OR_MORE_DIGIT_RE = re.compile(r"\d+") - _TWO_DIGIT_RE = re.compile(r"\d{2}") - _THREE_DIGIT_RE = re.compile(r"\d{3}") - _FOUR_DIGIT_RE = re.compile(r"\d{4}") - _TZ_Z_RE = re.compile(r"([\+\-])(\d{2})(?:(\d{2}))?|Z") - _TZ_ZZ_RE = re.compile(r"([\+\-])(\d{2})(?:\:(\d{2}))?|Z") - _TZ_NAME_RE = re.compile(r"\w[\w+\-/]+") - # NOTE: timestamps cannot be parsed from natural language strings (by removing the ^...$) because it will - # break cases like "15 Jul 2000" and a format list (see issue #447) - _TIMESTAMP_RE = re.compile(r"^\-?\d+\.?\d+$") - _TIMESTAMP_EXPANDED_RE = re.compile(r"^\-?\d+$") - _TIME_RE = re.compile(r"^(\d{2})(?:\:?(\d{2}))?(?:\:?(\d{2}))?(?:([\.\,])(\d+))?$") - _WEEK_DATE_RE = re.compile(r"(?P\d{4})[\-]?W(?P\d{2})[\-]?(?P\d)?") - - _BASE_INPUT_RE_MAP = { - "YYYY": _FOUR_DIGIT_RE, - "YY": _TWO_DIGIT_RE, - "MM": _TWO_DIGIT_RE, - "M": _ONE_OR_TWO_DIGIT_RE, - "DDDD": _THREE_DIGIT_RE, - "DDD": _ONE_OR_TWO_OR_THREE_DIGIT_RE, - "DD": _TWO_DIGIT_RE, - "D": _ONE_OR_TWO_DIGIT_RE, - "HH": _TWO_DIGIT_RE, - "H": _ONE_OR_TWO_DIGIT_RE, - "hh": _TWO_DIGIT_RE, - "h": _ONE_OR_TWO_DIGIT_RE, - "mm": _TWO_DIGIT_RE, - "m": _ONE_OR_TWO_DIGIT_RE, - "ss": _TWO_DIGIT_RE, - "s": _ONE_OR_TWO_DIGIT_RE, - "X": _TIMESTAMP_RE, - "x": _TIMESTAMP_EXPANDED_RE, - "ZZZ": _TZ_NAME_RE, - "ZZ": _TZ_ZZ_RE, - "Z": _TZ_Z_RE, - "S": _ONE_OR_MORE_DIGIT_RE, - "W": _WEEK_DATE_RE, - } - - SEPARATORS = ["-", "/", "."] - - def __init__(self, locale="en_us", cache_size=0): - - self.locale = locales.get_locale(locale) - self._input_re_map = self._BASE_INPUT_RE_MAP.copy() - self._input_re_map.update( - { - "MMMM": self._generate_choice_re( - self.locale.month_names[1:], re.IGNORECASE - ), - "MMM": self._generate_choice_re( - self.locale.month_abbreviations[1:], re.IGNORECASE - ), - "Do": re.compile(self.locale.ordinal_day_re), - "dddd": self._generate_choice_re( - self.locale.day_names[1:], re.IGNORECASE - ), - "ddd": self._generate_choice_re( - self.locale.day_abbreviations[1:], re.IGNORECASE - ), - "d": re.compile(r"[1-7]"), - "a": self._generate_choice_re( - (self.locale.meridians["am"], self.locale.meridians["pm"]) - ), - # note: 'A' token accepts both 'am/pm' and 'AM/PM' formats to - # ensure backwards compatibility of this token - "A": self._generate_choice_re(self.locale.meridians.values()), - } - ) - if cache_size > 0: - self._generate_pattern_re = lru_cache(maxsize=cache_size)( - self._generate_pattern_re - ) - - # TODO: since we support more than ISO 8601, we should rename this function - # IDEA: break into multiple functions - def parse_iso(self, datetime_string, normalize_whitespace=False): - - if normalize_whitespace: - datetime_string = re.sub(r"\s+", " ", datetime_string.strip()) - - has_space_divider = " " in datetime_string - has_t_divider = "T" in datetime_string - - num_spaces = datetime_string.count(" ") - if has_space_divider and num_spaces != 1 or has_t_divider and num_spaces > 0: - raise ParserError( - "Expected an ISO 8601-like string, but was given '{}'. Try passing in a format string to resolve this.".format( - datetime_string - ) - ) - - has_time = has_space_divider or has_t_divider - has_tz = False - - # date formats (ISO 8601 and others) to test against - # NOTE: YYYYMM is omitted to avoid confusion with YYMMDD (no longer part of ISO 8601, but is still often used) - formats = [ - "YYYY-MM-DD", - "YYYY-M-DD", - "YYYY-M-D", - "YYYY/MM/DD", - "YYYY/M/DD", - "YYYY/M/D", - "YYYY.MM.DD", - "YYYY.M.DD", - "YYYY.M.D", - "YYYYMMDD", - "YYYY-DDDD", - "YYYYDDDD", - "YYYY-MM", - "YYYY/MM", - "YYYY.MM", - "YYYY", - "W", - ] - - if has_time: - - if has_space_divider: - date_string, time_string = datetime_string.split(" ", 1) - else: - date_string, time_string = datetime_string.split("T", 1) - - time_parts = re.split(r"[\+\-Z]", time_string, 1, re.IGNORECASE) - - time_components = self._TIME_RE.match(time_parts[0]) - - if time_components is None: - raise ParserError( - "Invalid time component provided. Please specify a format or provide a valid time component in the basic or extended ISO 8601 time format." - ) - - ( - hours, - minutes, - seconds, - subseconds_sep, - subseconds, - ) = time_components.groups() - - has_tz = len(time_parts) == 2 - has_minutes = minutes is not None - has_seconds = seconds is not None - has_subseconds = subseconds is not None - - is_basic_time_format = ":" not in time_parts[0] - tz_format = "Z" - - # use 'ZZ' token instead since tz offset is present in non-basic format - if has_tz and ":" in time_parts[1]: - tz_format = "ZZ" - - time_sep = "" if is_basic_time_format else ":" - - if has_subseconds: - time_string = "HH{time_sep}mm{time_sep}ss{subseconds_sep}S".format( - time_sep=time_sep, subseconds_sep=subseconds_sep - ) - elif has_seconds: - time_string = "HH{time_sep}mm{time_sep}ss".format(time_sep=time_sep) - elif has_minutes: - time_string = "HH{time_sep}mm".format(time_sep=time_sep) - else: - time_string = "HH" - - if has_space_divider: - formats = ["{} {}".format(f, time_string) for f in formats] - else: - formats = ["{}T{}".format(f, time_string) for f in formats] - - if has_time and has_tz: - # Add "Z" or "ZZ" to the format strings to indicate to - # _parse_token() that a timezone needs to be parsed - formats = ["{}{}".format(f, tz_format) for f in formats] - - return self._parse_multiformat(datetime_string, formats) - - def parse(self, datetime_string, fmt, normalize_whitespace=False): - - if normalize_whitespace: - datetime_string = re.sub(r"\s+", " ", datetime_string) - - if isinstance(fmt, list): - return self._parse_multiformat(datetime_string, fmt) - - fmt_tokens, fmt_pattern_re = self._generate_pattern_re(fmt) - - match = fmt_pattern_re.search(datetime_string) - - if match is None: - raise ParserMatchError( - "Failed to match '{}' when parsing '{}'".format(fmt, datetime_string) - ) - - parts = {} - for token in fmt_tokens: - if token == "Do": - value = match.group("value") - elif token == "W": - value = (match.group("year"), match.group("week"), match.group("day")) - else: - value = match.group(token) - self._parse_token(token, value, parts) - - return self._build_datetime(parts) - - def _generate_pattern_re(self, fmt): - - # fmt is a string of tokens like 'YYYY-MM-DD' - # we construct a new string by replacing each - # token by its pattern: - # 'YYYY-MM-DD' -> '(?P\d{4})-(?P\d{2})-(?P
\d{2})' - tokens = [] - offset = 0 - - # Escape all special RegEx chars - escaped_fmt = re.escape(fmt) - - # Extract the bracketed expressions to be reinserted later. - escaped_fmt = re.sub(self._ESCAPE_RE, "#", escaped_fmt) - - # Any number of S is the same as one. - # TODO: allow users to specify the number of digits to parse - escaped_fmt = re.sub(r"S+", "S", escaped_fmt) - - escaped_data = re.findall(self._ESCAPE_RE, fmt) - - fmt_pattern = escaped_fmt - - for m in self._FORMAT_RE.finditer(escaped_fmt): - token = m.group(0) - try: - input_re = self._input_re_map[token] - except KeyError: - raise ParserError("Unrecognized token '{}'".format(token)) - input_pattern = "(?P<{}>{})".format(token, input_re.pattern) - tokens.append(token) - # a pattern doesn't have the same length as the token - # it replaces! We keep the difference in the offset variable. - # This works because the string is scanned left-to-right and matches - # are returned in the order found by finditer. - fmt_pattern = ( - fmt_pattern[: m.start() + offset] - + input_pattern - + fmt_pattern[m.end() + offset :] - ) - offset += len(input_pattern) - (m.end() - m.start()) - - final_fmt_pattern = "" - split_fmt = fmt_pattern.split(r"\#") - - # Due to the way Python splits, 'split_fmt' will always be longer - for i in range(len(split_fmt)): - final_fmt_pattern += split_fmt[i] - if i < len(escaped_data): - final_fmt_pattern += escaped_data[i][1:-1] - - # Wrap final_fmt_pattern in a custom word boundary to strictly - # match the formatting pattern and filter out date and time formats - # that include junk such as: blah1998-09-12 blah, blah 1998-09-12blah, - # blah1998-09-12blah. The custom word boundary matches every character - # that is not a whitespace character to allow for searching for a date - # and time string in a natural language sentence. Therefore, searching - # for a string of the form YYYY-MM-DD in "blah 1998-09-12 blah" will - # work properly. - # Certain punctuation before or after the target pattern such as - # "1998-09-12," is permitted. For the full list of valid punctuation, - # see the documentation. - - starting_word_boundary = ( - r"(?\s])" # This is the list of punctuation that is ok before the pattern (i.e. "It can't not be these characters before the pattern") - r"(\b|^)" # The \b is to block cases like 1201912 but allow 201912 for pattern YYYYMM. The ^ was necessary to allow a negative number through i.e. before epoch numbers - ) - ending_word_boundary = ( - r"(?=[\,\.\;\:\?\!\"\'\`\[\]\{\}\(\)\<\>]?" # Positive lookahead stating that these punctuation marks can appear after the pattern at most 1 time - r"(?!\S))" # Don't allow any non-whitespace character after the punctuation - ) - bounded_fmt_pattern = r"{}{}{}".format( - starting_word_boundary, final_fmt_pattern, ending_word_boundary - ) - - return tokens, re.compile(bounded_fmt_pattern, flags=re.IGNORECASE) - - def _parse_token(self, token, value, parts): - - if token == "YYYY": - parts["year"] = int(value) - - elif token == "YY": - value = int(value) - parts["year"] = 1900 + value if value > 68 else 2000 + value - - elif token in ["MMMM", "MMM"]: - parts["month"] = self.locale.month_number(value.lower()) - - elif token in ["MM", "M"]: - parts["month"] = int(value) - - elif token in ["DDDD", "DDD"]: - parts["day_of_year"] = int(value) - - elif token in ["DD", "D"]: - parts["day"] = int(value) - - elif token == "Do": - parts["day"] = int(value) - - elif token == "dddd": - # locale day names are 1-indexed - day_of_week = [x.lower() for x in self.locale.day_names].index( - value.lower() - ) - parts["day_of_week"] = day_of_week - 1 - - elif token == "ddd": - # locale day abbreviations are 1-indexed - day_of_week = [x.lower() for x in self.locale.day_abbreviations].index( - value.lower() - ) - parts["day_of_week"] = day_of_week - 1 - - elif token.upper() in ["HH", "H"]: - parts["hour"] = int(value) - - elif token in ["mm", "m"]: - parts["minute"] = int(value) - - elif token in ["ss", "s"]: - parts["second"] = int(value) - - elif token == "S": - # We have the *most significant* digits of an arbitrary-precision integer. - # We want the six most significant digits as an integer, rounded. - # IDEA: add nanosecond support somehow? Need datetime support for it first. - value = value.ljust(7, str("0")) - - # floating-point (IEEE-754) defaults to half-to-even rounding - seventh_digit = int(value[6]) - if seventh_digit == 5: - rounding = int(value[5]) % 2 - elif seventh_digit > 5: - rounding = 1 - else: - rounding = 0 - - parts["microsecond"] = int(value[:6]) + rounding - - elif token == "X": - parts["timestamp"] = float(value) - - elif token == "x": - parts["expanded_timestamp"] = int(value) - - elif token in ["ZZZ", "ZZ", "Z"]: - parts["tzinfo"] = TzinfoParser.parse(value) - - elif token in ["a", "A"]: - if value in (self.locale.meridians["am"], self.locale.meridians["AM"]): - parts["am_pm"] = "am" - elif value in (self.locale.meridians["pm"], self.locale.meridians["PM"]): - parts["am_pm"] = "pm" - - elif token == "W": - parts["weekdate"] = value - - @staticmethod - def _build_datetime(parts): - - weekdate = parts.get("weekdate") - - if weekdate is not None: - # we can use strptime (%G, %V, %u) in python 3.6 but these tokens aren't available before that - year, week = int(weekdate[0]), int(weekdate[1]) - - if weekdate[2] is not None: - day = int(weekdate[2]) - else: - # day not given, default to 1 - day = 1 - - dt = iso_to_gregorian(year, week, day) - parts["year"] = dt.year - parts["month"] = dt.month - parts["day"] = dt.day - - timestamp = parts.get("timestamp") - - if timestamp is not None: - return datetime.fromtimestamp(timestamp, tz=tz.tzutc()) - - expanded_timestamp = parts.get("expanded_timestamp") - - if expanded_timestamp is not None: - return datetime.fromtimestamp( - normalize_timestamp(expanded_timestamp), - tz=tz.tzutc(), - ) - - day_of_year = parts.get("day_of_year") - - if day_of_year is not None: - year = parts.get("year") - month = parts.get("month") - if year is None: - raise ParserError( - "Year component is required with the DDD and DDDD tokens." - ) - - if month is not None: - raise ParserError( - "Month component is not allowed with the DDD and DDDD tokens." - ) - - date_string = "{}-{}".format(year, day_of_year) - try: - dt = datetime.strptime(date_string, "%Y-%j") - except ValueError: - raise ParserError( - "The provided day of year '{}' is invalid.".format(day_of_year) - ) - - parts["year"] = dt.year - parts["month"] = dt.month - parts["day"] = dt.day - - day_of_week = parts.get("day_of_week") - day = parts.get("day") - - # If day is passed, ignore day of week - if day_of_week is not None and day is None: - year = parts.get("year", 1970) - month = parts.get("month", 1) - day = 1 - - # dddd => first day of week after epoch - # dddd YYYY => first day of week in specified year - # dddd MM YYYY => first day of week in specified year and month - # dddd MM => first day after epoch in specified month - next_weekday_dt = next_weekday(datetime(year, month, day), day_of_week) - parts["year"] = next_weekday_dt.year - parts["month"] = next_weekday_dt.month - parts["day"] = next_weekday_dt.day - - am_pm = parts.get("am_pm") - hour = parts.get("hour", 0) - - if am_pm == "pm" and hour < 12: - hour += 12 - elif am_pm == "am" and hour == 12: - hour = 0 - - # Support for midnight at the end of day - if hour == 24: - if parts.get("minute", 0) != 0: - raise ParserError("Midnight at the end of day must not contain minutes") - if parts.get("second", 0) != 0: - raise ParserError("Midnight at the end of day must not contain seconds") - if parts.get("microsecond", 0) != 0: - raise ParserError( - "Midnight at the end of day must not contain microseconds" - ) - hour = 0 - day_increment = 1 - else: - day_increment = 0 - - # account for rounding up to 1000000 - microsecond = parts.get("microsecond", 0) - if microsecond == 1000000: - microsecond = 0 - second_increment = 1 - else: - second_increment = 0 - - increment = timedelta(days=day_increment, seconds=second_increment) - - return ( - datetime( - year=parts.get("year", 1), - month=parts.get("month", 1), - day=parts.get("day", 1), - hour=hour, - minute=parts.get("minute", 0), - second=parts.get("second", 0), - microsecond=microsecond, - tzinfo=parts.get("tzinfo"), - ) - + increment - ) - - def _parse_multiformat(self, string, formats): - - _datetime = None - - for fmt in formats: - try: - _datetime = self.parse(string, fmt) - break - except ParserMatchError: - pass - - if _datetime is None: - raise ParserError( - "Could not match input '{}' to any of the following formats: {}".format( - string, ", ".join(formats) - ) - ) - - return _datetime - - # generates a capture group of choices separated by an OR operator - @staticmethod - def _generate_choice_re(choices, flags=0): - return re.compile(r"({})".format("|".join(choices)), flags=flags) - - -class TzinfoParser(object): - _TZINFO_RE = re.compile(r"^([\+\-])?(\d{2})(?:\:?(\d{2}))?$") - - @classmethod - def parse(cls, tzinfo_string): - - tzinfo = None - - if tzinfo_string == "local": - tzinfo = tz.tzlocal() - - elif tzinfo_string in ["utc", "UTC", "Z"]: - tzinfo = tz.tzutc() - - else: - - iso_match = cls._TZINFO_RE.match(tzinfo_string) - - if iso_match: - sign, hours, minutes = iso_match.groups() - if minutes is None: - minutes = 0 - seconds = int(hours) * 3600 + int(minutes) * 60 - - if sign == "-": - seconds *= -1 - - tzinfo = tz.tzoffset(None, seconds) - - else: - tzinfo = tz.gettz(tzinfo_string) - - if tzinfo is None: - raise ParserError( - 'Could not parse timezone expression "{}"'.format(tzinfo_string) - ) - - return tzinfo diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/util.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/util.py deleted file mode 100644 index acce8878df..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/arrow/util.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import - -import datetime -import numbers - -from dateutil.rrule import WEEKLY, rrule - -from arrow.constants import MAX_TIMESTAMP, MAX_TIMESTAMP_MS, MAX_TIMESTAMP_US - - -def next_weekday(start_date, weekday): - """Get next weekday from the specified start date. - - :param start_date: Datetime object representing the start date. - :param weekday: Next weekday to obtain. Can be a value between 0 (Monday) and 6 (Sunday). - :return: Datetime object corresponding to the next weekday after start_date. - - Usage:: - - # Get first Monday after epoch - >>> next_weekday(datetime(1970, 1, 1), 0) - 1970-01-05 00:00:00 - - # Get first Thursday after epoch - >>> next_weekday(datetime(1970, 1, 1), 3) - 1970-01-01 00:00:00 - - # Get first Sunday after epoch - >>> next_weekday(datetime(1970, 1, 1), 6) - 1970-01-04 00:00:00 - """ - if weekday < 0 or weekday > 6: - raise ValueError("Weekday must be between 0 (Monday) and 6 (Sunday).") - return rrule(freq=WEEKLY, dtstart=start_date, byweekday=weekday, count=1)[0] - - -def total_seconds(td): - """Get total seconds for timedelta.""" - return td.total_seconds() - - -def is_timestamp(value): - """Check if value is a valid timestamp.""" - if isinstance(value, bool): - return False - if not ( - isinstance(value, numbers.Integral) - or isinstance(value, float) - or isinstance(value, str) - ): - return False - try: - float(value) - return True - except ValueError: - return False - - -def normalize_timestamp(timestamp): - """Normalize millisecond and microsecond timestamps into normal timestamps.""" - if timestamp > MAX_TIMESTAMP: - if timestamp < MAX_TIMESTAMP_MS: - timestamp /= 1e3 - elif timestamp < MAX_TIMESTAMP_US: - timestamp /= 1e6 - else: - raise ValueError( - "The specified timestamp '{}' is too large.".format(timestamp) - ) - return timestamp - - -# Credit to https://stackoverflow.com/a/1700069 -def iso_to_gregorian(iso_year, iso_week, iso_day): - """Converts an ISO week date tuple into a datetime object.""" - - if not 1 <= iso_week <= 53: - raise ValueError("ISO Calendar week value must be between 1-53.") - - if not 1 <= iso_day <= 7: - raise ValueError("ISO Calendar day value must be between 1-7") - - # The first week of the year always contains 4 Jan. - fourth_jan = datetime.date(iso_year, 1, 4) - delta = datetime.timedelta(fourth_jan.isoweekday() - 1) - year_start = fourth_jan - delta - gregorian = year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1) - - return gregorian - - -def validate_bounds(bounds): - if bounds != "()" and bounds != "(]" and bounds != "[)" and bounds != "[]": - raise ValueError( - 'Invalid bounds. Please select between "()", "(]", "[)", or "[]".' - ) - - -# Python 2.7 / 3.0+ definitions for isstr function. - -try: # pragma: no cover - basestring - - def isstr(s): - return isinstance(s, basestring) # noqa: F821 - - -except NameError: # pragma: no cover - - def isstr(s): - return isinstance(s, str) - - -__all__ = ["next_weekday", "total_seconds", "is_timestamp", "isstr", "iso_to_gregorian"] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/Makefile b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/Makefile deleted file mode 100644 index d4bb2cbb9e..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = . -BUILDDIR = _build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/conf.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/conf.py deleted file mode 100644 index aaf3c50822..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/conf.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- - -# -- Path setup -------------------------------------------------------------- - -import io -import os -import sys - -sys.path.insert(0, os.path.abspath("..")) - -about = {} -with io.open("../arrow/_version.py", "r", encoding="utf-8") as f: - exec(f.read(), about) - -# -- Project information ----------------------------------------------------- - -project = u"Arrow 🏹" -copyright = "2020, Chris Smith" -author = "Chris Smith" - -release = about["__version__"] - -# -- General configuration --------------------------------------------------- - -extensions = ["sphinx.ext.autodoc"] - -templates_path = [] - -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] - -master_doc = "index" -source_suffix = ".rst" -pygments_style = "sphinx" - -language = None - -# -- Options for HTML output ------------------------------------------------- - -html_theme = "alabaster" -html_theme_path = [] -html_static_path = [] - -html_show_sourcelink = False -html_show_sphinx = False -html_show_copyright = True - -# https://alabaster.readthedocs.io/en/latest/customization.html -html_theme_options = { - "description": "Arrow is a sensible and human-friendly approach to dates, times and timestamps.", - "github_user": "arrow-py", - "github_repo": "arrow", - "github_banner": True, - "show_related": False, - "show_powered_by": False, - "github_button": True, - "github_type": "star", - "github_count": "true", # must be a string -} - -html_sidebars = { - "**": ["about.html", "localtoc.html", "relations.html", "searchbox.html"] -} diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/index.rst deleted file mode 100644 index e2830b04f3..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/index.rst +++ /dev/null @@ -1,566 +0,0 @@ -Arrow: Better dates & times for Python -====================================== - -Release v\ |release| (`Installation`_) (`Changelog `_) - -.. include:: ../README.rst - :start-after: start-inclusion-marker-do-not-remove - :end-before: end-inclusion-marker-do-not-remove - -User's Guide ------------- - -Creation -~~~~~~~~ - -Get 'now' easily: - -.. code-block:: python - - >>> arrow.utcnow() - - - >>> arrow.now() - - - >>> arrow.now('US/Pacific') - - -Create from timestamps (:code:`int` or :code:`float`): - -.. code-block:: python - - >>> arrow.get(1367900664) - - - >>> arrow.get(1367900664.152325) - - -Use a naive or timezone-aware datetime, or flexibly specify a timezone: - -.. code-block:: python - - >>> arrow.get(datetime.utcnow()) - - - >>> arrow.get(datetime(2013, 5, 5), 'US/Pacific') - - - >>> from dateutil import tz - >>> arrow.get(datetime(2013, 5, 5), tz.gettz('US/Pacific')) - - - >>> arrow.get(datetime.now(tz.gettz('US/Pacific'))) - - -Parse from a string: - -.. code-block:: python - - >>> arrow.get('2013-05-05 12:30:45', 'YYYY-MM-DD HH:mm:ss') - - -Search a date in a string: - -.. code-block:: python - - >>> arrow.get('June was born in May 1980', 'MMMM YYYY') - - -Some ISO 8601 compliant strings are recognized and parsed without a format string: - - >>> arrow.get('2013-09-30T15:34:00.000-07:00') - - -Arrow objects can be instantiated directly too, with the same arguments as a datetime: - -.. code-block:: python - - >>> arrow.get(2013, 5, 5) - - - >>> arrow.Arrow(2013, 5, 5) - - -Properties -~~~~~~~~~~ - -Get a datetime or timestamp representation: - -.. code-block:: python - - >>> a = arrow.utcnow() - >>> a.datetime - datetime.datetime(2013, 5, 7, 4, 38, 15, 447644, tzinfo=tzutc()) - - >>> a.timestamp - 1367901495 - -Get a naive datetime, and tzinfo: - -.. code-block:: python - - >>> a.naive - datetime.datetime(2013, 5, 7, 4, 38, 15, 447644) - - >>> a.tzinfo - tzutc() - -Get any datetime value: - -.. code-block:: python - - >>> a.year - 2013 - -Call datetime functions that return properties: - -.. code-block:: python - - >>> a.date() - datetime.date(2013, 5, 7) - - >>> a.time() - datetime.time(4, 38, 15, 447644) - -Replace & Shift -~~~~~~~~~~~~~~~ - -Get a new :class:`Arrow ` object, with altered attributes, just as you would with a datetime: - -.. code-block:: python - - >>> arw = arrow.utcnow() - >>> arw - - - >>> arw.replace(hour=4, minute=40) - - -Or, get one with attributes shifted forward or backward: - -.. code-block:: python - - >>> arw.shift(weeks=+3) - - -Even replace the timezone without altering other attributes: - -.. code-block:: python - - >>> arw.replace(tzinfo='US/Pacific') - - -Move between the earlier and later moments of an ambiguous time: - -.. code-block:: python - - >>> paris_transition = arrow.Arrow(2019, 10, 27, 2, tzinfo="Europe/Paris", fold=0) - >>> paris_transition - - >>> paris_transition.ambiguous - True - >>> paris_transition.replace(fold=1) - - -Format -~~~~~~ - -.. code-block:: python - - >>> arrow.utcnow().format('YYYY-MM-DD HH:mm:ss ZZ') - '2013-05-07 05:23:16 -00:00' - -Convert -~~~~~~~ - -Convert from UTC to other timezones by name or tzinfo: - -.. code-block:: python - - >>> utc = arrow.utcnow() - >>> utc - - - >>> utc.to('US/Pacific') - - - >>> utc.to(tz.gettz('US/Pacific')) - - -Or using shorthand: - -.. code-block:: python - - >>> utc.to('local') - - - >>> utc.to('local').to('utc') - - - -Humanize -~~~~~~~~ - -Humanize relative to now: - -.. code-block:: python - - >>> past = arrow.utcnow().shift(hours=-1) - >>> past.humanize() - 'an hour ago' - -Or another Arrow, or datetime: - -.. code-block:: python - - >>> present = arrow.utcnow() - >>> future = present.shift(hours=2) - >>> future.humanize(present) - 'in 2 hours' - -Indicate time as relative or include only the distance - -.. code-block:: python - - >>> present = arrow.utcnow() - >>> future = present.shift(hours=2) - >>> future.humanize(present) - 'in 2 hours' - >>> future.humanize(present, only_distance=True) - '2 hours' - - -Indicate a specific time granularity (or multiple): - -.. code-block:: python - - >>> present = arrow.utcnow() - >>> future = present.shift(minutes=66) - >>> future.humanize(present, granularity="minute") - 'in 66 minutes' - >>> future.humanize(present, granularity=["hour", "minute"]) - 'in an hour and 6 minutes' - >>> present.humanize(future, granularity=["hour", "minute"]) - 'an hour and 6 minutes ago' - >>> future.humanize(present, only_distance=True, granularity=["hour", "minute"]) - 'an hour and 6 minutes' - -Support for a growing number of locales (see ``locales.py`` for supported languages): - -.. code-block:: python - - - >>> future = arrow.utcnow().shift(hours=1) - >>> future.humanize(a, locale='ru') - 'через 2 час(а,ов)' - - -Ranges & Spans -~~~~~~~~~~~~~~ - -Get the time span of any unit: - -.. code-block:: python - - >>> arrow.utcnow().span('hour') - (, ) - -Or just get the floor and ceiling: - -.. code-block:: python - - >>> arrow.utcnow().floor('hour') - - - >>> arrow.utcnow().ceil('hour') - - -You can also get a range of time spans: - -.. code-block:: python - - >>> start = datetime(2013, 5, 5, 12, 30) - >>> end = datetime(2013, 5, 5, 17, 15) - >>> for r in arrow.Arrow.span_range('hour', start, end): - ... print r - ... - (, ) - (, ) - (, ) - (, ) - (, ) - -Or just iterate over a range of time: - -.. code-block:: python - - >>> start = datetime(2013, 5, 5, 12, 30) - >>> end = datetime(2013, 5, 5, 17, 15) - >>> for r in arrow.Arrow.range('hour', start, end): - ... print repr(r) - ... - - - - - - -.. toctree:: - :maxdepth: 2 - -Factories -~~~~~~~~~ - -Use factories to harness Arrow's module API for a custom Arrow-derived type. First, derive your type: - -.. code-block:: python - - >>> class CustomArrow(arrow.Arrow): - ... - ... def days_till_xmas(self): - ... - ... xmas = arrow.Arrow(self.year, 12, 25) - ... - ... if self > xmas: - ... xmas = xmas.shift(years=1) - ... - ... return (xmas - self).days - - -Then get and use a factory for it: - -.. code-block:: python - - >>> factory = arrow.ArrowFactory(CustomArrow) - >>> custom = factory.utcnow() - >>> custom - >>> - - >>> custom.days_till_xmas() - >>> 211 - -Supported Tokens -~~~~~~~~~~~~~~~~ - -Use the following tokens for parsing and formatting. Note that they are **not** the same as the tokens for `strptime `_: - -+--------------------------------+--------------+-------------------------------------------+ -| |Token |Output | -+================================+==============+===========================================+ -|**Year** |YYYY |2000, 2001, 2002 ... 2012, 2013 | -+--------------------------------+--------------+-------------------------------------------+ -| |YY |00, 01, 02 ... 12, 13 | -+--------------------------------+--------------+-------------------------------------------+ -|**Month** |MMMM |January, February, March ... [#t1]_ | -+--------------------------------+--------------+-------------------------------------------+ -| |MMM |Jan, Feb, Mar ... [#t1]_ | -+--------------------------------+--------------+-------------------------------------------+ -| |MM |01, 02, 03 ... 11, 12 | -+--------------------------------+--------------+-------------------------------------------+ -| |M |1, 2, 3 ... 11, 12 | -+--------------------------------+--------------+-------------------------------------------+ -|**Day of Year** |DDDD |001, 002, 003 ... 364, 365 | -+--------------------------------+--------------+-------------------------------------------+ -| |DDD |1, 2, 3 ... 364, 365 | -+--------------------------------+--------------+-------------------------------------------+ -|**Day of Month** |DD |01, 02, 03 ... 30, 31 | -+--------------------------------+--------------+-------------------------------------------+ -| |D |1, 2, 3 ... 30, 31 | -+--------------------------------+--------------+-------------------------------------------+ -| |Do |1st, 2nd, 3rd ... 30th, 31st | -+--------------------------------+--------------+-------------------------------------------+ -|**Day of Week** |dddd |Monday, Tuesday, Wednesday ... [#t2]_ | -+--------------------------------+--------------+-------------------------------------------+ -| |ddd |Mon, Tue, Wed ... [#t2]_ | -+--------------------------------+--------------+-------------------------------------------+ -| |d |1, 2, 3 ... 6, 7 | -+--------------------------------+--------------+-------------------------------------------+ -|**ISO week date** |W |2011-W05-4, 2019-W17 | -+--------------------------------+--------------+-------------------------------------------+ -|**Hour** |HH |00, 01, 02 ... 23, 24 | -+--------------------------------+--------------+-------------------------------------------+ -| |H |0, 1, 2 ... 23, 24 | -+--------------------------------+--------------+-------------------------------------------+ -| |hh |01, 02, 03 ... 11, 12 | -+--------------------------------+--------------+-------------------------------------------+ -| |h |1, 2, 3 ... 11, 12 | -+--------------------------------+--------------+-------------------------------------------+ -|**AM / PM** |A |AM, PM, am, pm [#t1]_ | -+--------------------------------+--------------+-------------------------------------------+ -| |a |am, pm [#t1]_ | -+--------------------------------+--------------+-------------------------------------------+ -|**Minute** |mm |00, 01, 02 ... 58, 59 | -+--------------------------------+--------------+-------------------------------------------+ -| |m |0, 1, 2 ... 58, 59 | -+--------------------------------+--------------+-------------------------------------------+ -|**Second** |ss |00, 01, 02 ... 58, 59 | -+--------------------------------+--------------+-------------------------------------------+ -| |s |0, 1, 2 ... 58, 59 | -+--------------------------------+--------------+-------------------------------------------+ -|**Sub-second** |S... |0, 02, 003, 000006, 123123123123... [#t3]_ | -+--------------------------------+--------------+-------------------------------------------+ -|**Timezone** |ZZZ |Asia/Baku, Europe/Warsaw, GMT ... [#t4]_ | -+--------------------------------+--------------+-------------------------------------------+ -| |ZZ |-07:00, -06:00 ... +06:00, +07:00, +08, Z | -+--------------------------------+--------------+-------------------------------------------+ -| |Z |-0700, -0600 ... +0600, +0700, +08, Z | -+--------------------------------+--------------+-------------------------------------------+ -|**Seconds Timestamp** |X |1381685817, 1381685817.915482 ... [#t5]_ | -+--------------------------------+--------------+-------------------------------------------+ -|**ms or µs Timestamp** |x |1569980330813, 1569980330813221 | -+--------------------------------+--------------+-------------------------------------------+ - -.. rubric:: Footnotes - -.. [#t1] localization support for parsing and formatting -.. [#t2] localization support only for formatting -.. [#t3] the result is truncated to microseconds, with `half-to-even rounding `_. -.. [#t4] timezone names from `tz database `_ provided via dateutil package, note that abbreviations such as MST, PDT, BRST are unlikely to parse due to ambiguity. Use the full IANA zone name instead (Asia/Shanghai, Europe/London, America/Chicago etc). -.. [#t5] this token cannot be used for parsing timestamps out of natural language strings due to compatibility reasons - -Built-in Formats -++++++++++++++++ - -There are several formatting standards that are provided as built-in tokens. - -.. code-block:: python - - >>> arw = arrow.utcnow() - >>> arw.format(arrow.FORMAT_ATOM) - '2020-05-27 10:30:35+00:00' - >>> arw.format(arrow.FORMAT_COOKIE) - 'Wednesday, 27-May-2020 10:30:35 UTC' - >>> arw.format(arrow.FORMAT_RSS) - 'Wed, 27 May 2020 10:30:35 +0000' - >>> arw.format(arrow.FORMAT_RFC822) - 'Wed, 27 May 20 10:30:35 +0000' - >>> arw.format(arrow.FORMAT_RFC850) - 'Wednesday, 27-May-20 10:30:35 UTC' - >>> arw.format(arrow.FORMAT_RFC1036) - 'Wed, 27 May 20 10:30:35 +0000' - >>> arw.format(arrow.FORMAT_RFC1123) - 'Wed, 27 May 2020 10:30:35 +0000' - >>> arw.format(arrow.FORMAT_RFC2822) - 'Wed, 27 May 2020 10:30:35 +0000' - >>> arw.format(arrow.FORMAT_RFC3339) - '2020-05-27 10:30:35+00:00' - >>> arw.format(arrow.FORMAT_W3C) - '2020-05-27 10:30:35+00:00' - -Escaping Formats -~~~~~~~~~~~~~~~~ - -Tokens, phrases, and regular expressions in a format string can be escaped when parsing and formatting by enclosing them within square brackets. - -Tokens & Phrases -++++++++++++++++ - -Any `token `_ or phrase can be escaped as follows: - -.. code-block:: python - - >>> fmt = "YYYY-MM-DD h [h] m" - >>> arw = arrow.get("2018-03-09 8 h 40", fmt) - - >>> arw.format(fmt) - '2018-03-09 8 h 40' - - >>> fmt = "YYYY-MM-DD h [hello] m" - >>> arw = arrow.get("2018-03-09 8 hello 40", fmt) - - >>> arw.format(fmt) - '2018-03-09 8 hello 40' - - >>> fmt = "YYYY-MM-DD h [hello world] m" - >>> arw = arrow.get("2018-03-09 8 hello world 40", fmt) - - >>> arw.format(fmt) - '2018-03-09 8 hello world 40' - -This can be useful for parsing dates in different locales such as French, in which it is common to format time strings as "8 h 40" rather than "8:40". - -Regular Expressions -+++++++++++++++++++ - -You can also escape regular expressions by enclosing them within square brackets. In the following example, we are using the regular expression :code:`\s+` to match any number of whitespace characters that separate the tokens. This is useful if you do not know the number of spaces between tokens ahead of time (e.g. in log files). - -.. code-block:: python - - >>> fmt = r"ddd[\s+]MMM[\s+]DD[\s+]HH:mm:ss[\s+]YYYY" - >>> arrow.get("Mon Sep 08 16:41:45 2014", fmt) - - - >>> arrow.get("Mon \tSep 08 16:41:45 2014", fmt) - - - >>> arrow.get("Mon Sep 08 16:41:45 2014", fmt) - - -Punctuation -~~~~~~~~~~~ - -Date and time formats may be fenced on either side by one punctuation character from the following list: ``, . ; : ? ! " \` ' [ ] { } ( ) < >`` - -.. code-block:: python - - >>> arrow.get("Cool date: 2019-10-31T09:12:45.123456+04:30.", "YYYY-MM-DDTHH:mm:ss.SZZ") - - - >>> arrow.get("Tomorrow (2019-10-31) is Halloween!", "YYYY-MM-DD") - - - >>> arrow.get("Halloween is on 2019.10.31.", "YYYY.MM.DD") - - - >>> arrow.get("It's Halloween tomorrow (2019-10-31)!", "YYYY-MM-DD") - # Raises exception because there are multiple punctuation marks following the date - -Redundant Whitespace -~~~~~~~~~~~~~~~~~~~~ - -Redundant whitespace characters (spaces, tabs, and newlines) can be normalized automatically by passing in the ``normalize_whitespace`` flag to ``arrow.get``: - -.. code-block:: python - - >>> arrow.get('\t \n 2013-05-05T12:30:45.123456 \t \n', normalize_whitespace=True) - - - >>> arrow.get('2013-05-05 T \n 12:30:45\t123456', 'YYYY-MM-DD T HH:mm:ss S', normalize_whitespace=True) - - -API Guide ---------- - -arrow.arrow -~~~~~~~~~~~ - -.. automodule:: arrow.arrow - :members: - -arrow.factory -~~~~~~~~~~~~~ - -.. automodule:: arrow.factory - :members: - -arrow.api -~~~~~~~~~ - -.. automodule:: arrow.api - :members: - -arrow.locale -~~~~~~~~~~~~ - -.. automodule:: arrow.locales - :members: - :undoc-members: - -Release History ---------------- - -.. toctree:: - :maxdepth: 2 - - releases diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/make.bat b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/make.bat deleted file mode 100644 index 922152e96a..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/make.bat +++ /dev/null @@ -1,35 +0,0 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set SOURCEDIR=. -set BUILDDIR=_build - -if "%1" == "" goto help - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% - -:end -popd diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/releases.rst b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/releases.rst deleted file mode 100644 index 22e1e59c8c..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/docs/releases.rst +++ /dev/null @@ -1,3 +0,0 @@ -.. _releases: - -.. include:: ../CHANGELOG.rst diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/requirements.txt b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/requirements.txt deleted file mode 100644 index df565d8384..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/requirements.txt +++ /dev/null @@ -1,14 +0,0 @@ -backports.functools_lru_cache==1.6.1; python_version == "2.7" -dateparser==0.7.* -pre-commit==1.21.*; python_version <= "3.5" -pre-commit==2.6.*; python_version >= "3.6" -pytest==4.6.*; python_version == "2.7" -pytest==6.0.*; python_version >= "3.5" -pytest-cov==2.10.* -pytest-mock==2.0.*; python_version == "2.7" -pytest-mock==3.2.*; python_version >= "3.5" -python-dateutil==2.8.* -pytz==2019.* -simplejson==3.17.* -sphinx==1.8.*; python_version == "2.7" -sphinx==3.2.*; python_version >= "3.5" diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.cfg b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.cfg deleted file mode 100644 index 2a9acf13da..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[bdist_wheel] -universal = 1 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.py deleted file mode 100644 index dc4f0e77d5..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/setup.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -import io - -from setuptools import setup - -with io.open("README.rst", "r", encoding="utf-8") as f: - readme = f.read() - -about = {} -with io.open("arrow/_version.py", "r", encoding="utf-8") as f: - exec(f.read(), about) - -setup( - name="arrow", - version=about["__version__"], - description="Better dates & times for Python", - long_description=readme, - long_description_content_type="text/x-rst", - url="https://arrow.readthedocs.io", - author="Chris Smith", - author_email="crsmithdev@gmail.com", - license="Apache 2.0", - packages=["arrow"], - zip_safe=False, - python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*", - install_requires=[ - "python-dateutil>=2.7.0", - "backports.functools_lru_cache>=1.2.1;python_version=='2.7'", - ], - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Topic :: Software Development :: Libraries :: Python Modules", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - ], - keywords="arrow date time datetime timestamp timezone humanize", - project_urls={ - "Repository": "https://github.com/arrow-py/arrow", - "Bug Reports": "https://github.com/arrow-py/arrow/issues", - "Documentation": "https://arrow.readthedocs.io", - }, -) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/conftest.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/conftest.py deleted file mode 100644 index 5bc8a4af2e..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/conftest.py +++ /dev/null @@ -1,76 +0,0 @@ -# -*- coding: utf-8 -*- -from datetime import datetime - -import pytest -from dateutil import tz as dateutil_tz - -from arrow import arrow, factory, formatter, locales, parser - - -@pytest.fixture(scope="class") -def time_utcnow(request): - request.cls.arrow = arrow.Arrow.utcnow() - - -@pytest.fixture(scope="class") -def time_2013_01_01(request): - request.cls.now = arrow.Arrow.utcnow() - request.cls.arrow = arrow.Arrow(2013, 1, 1) - request.cls.datetime = datetime(2013, 1, 1) - - -@pytest.fixture(scope="class") -def time_2013_02_03(request): - request.cls.arrow = arrow.Arrow(2013, 2, 3, 12, 30, 45, 1) - - -@pytest.fixture(scope="class") -def time_2013_02_15(request): - request.cls.datetime = datetime(2013, 2, 15, 3, 41, 22, 8923) - request.cls.arrow = arrow.Arrow.fromdatetime(request.cls.datetime) - - -@pytest.fixture(scope="class") -def time_1975_12_25(request): - request.cls.datetime = datetime( - 1975, 12, 25, 14, 15, 16, tzinfo=dateutil_tz.gettz("America/New_York") - ) - request.cls.arrow = arrow.Arrow.fromdatetime(request.cls.datetime) - - -@pytest.fixture(scope="class") -def arrow_formatter(request): - request.cls.formatter = formatter.DateTimeFormatter() - - -@pytest.fixture(scope="class") -def arrow_factory(request): - request.cls.factory = factory.ArrowFactory() - - -@pytest.fixture(scope="class") -def lang_locales(request): - request.cls.locales = locales._locales - - -@pytest.fixture(scope="class") -def lang_locale(request): - # As locale test classes are prefixed with Test, we are dynamically getting the locale by the test class name. - # TestEnglishLocale -> EnglishLocale - name = request.cls.__name__[4:] - request.cls.locale = locales.get_locale_by_class_name(name) - - -@pytest.fixture(scope="class") -def dt_parser(request): - request.cls.parser = parser.DateTimeParser() - - -@pytest.fixture(scope="class") -def dt_parser_regex(request): - request.cls.format_regex = parser.DateTimeParser._FORMAT_RE - - -@pytest.fixture(scope="class") -def tzinfo_parser(request): - request.cls.parser = parser.TzinfoParser() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_api.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_api.py deleted file mode 100644 index 9b19a27cd9..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_api.py +++ /dev/null @@ -1,28 +0,0 @@ -# -*- coding: utf-8 -*- -import arrow - - -class TestModule: - def test_get(self, mocker): - mocker.patch("arrow.api._factory.get", return_value="result") - - assert arrow.api.get() == "result" - - def test_utcnow(self, mocker): - mocker.patch("arrow.api._factory.utcnow", return_value="utcnow") - - assert arrow.api.utcnow() == "utcnow" - - def test_now(self, mocker): - mocker.patch("arrow.api._factory.now", tz="tz", return_value="now") - - assert arrow.api.now("tz") == "now" - - def test_factory(self): - class MockCustomArrowClass(arrow.Arrow): - pass - - result = arrow.api.factory(MockCustomArrowClass) - - assert isinstance(result, arrow.factory.ArrowFactory) - assert isinstance(result.utcnow(), MockCustomArrowClass) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_arrow.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_arrow.py deleted file mode 100644 index b0bd20a5e3..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_arrow.py +++ /dev/null @@ -1,2150 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import, unicode_literals - -import calendar -import pickle -import sys -import time -from datetime import date, datetime, timedelta - -import dateutil -import pytest -import pytz -import simplejson as json -from dateutil import tz -from dateutil.relativedelta import FR, MO, SA, SU, TH, TU, WE - -from arrow import arrow - -from .utils import assert_datetime_equality - - -class TestTestArrowInit: - def test_init_bad_input(self): - - with pytest.raises(TypeError): - arrow.Arrow(2013) - - with pytest.raises(TypeError): - arrow.Arrow(2013, 2) - - with pytest.raises(ValueError): - arrow.Arrow(2013, 2, 2, 12, 30, 45, 9999999) - - def test_init(self): - - result = arrow.Arrow(2013, 2, 2) - self.expected = datetime(2013, 2, 2, tzinfo=tz.tzutc()) - assert result._datetime == self.expected - - result = arrow.Arrow(2013, 2, 2, 12) - self.expected = datetime(2013, 2, 2, 12, tzinfo=tz.tzutc()) - assert result._datetime == self.expected - - result = arrow.Arrow(2013, 2, 2, 12, 30) - self.expected = datetime(2013, 2, 2, 12, 30, tzinfo=tz.tzutc()) - assert result._datetime == self.expected - - result = arrow.Arrow(2013, 2, 2, 12, 30, 45) - self.expected = datetime(2013, 2, 2, 12, 30, 45, tzinfo=tz.tzutc()) - assert result._datetime == self.expected - - result = arrow.Arrow(2013, 2, 2, 12, 30, 45, 999999) - self.expected = datetime(2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.tzutc()) - assert result._datetime == self.expected - - result = arrow.Arrow( - 2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.gettz("Europe/Paris") - ) - self.expected = datetime( - 2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.gettz("Europe/Paris") - ) - assert result._datetime == self.expected - - # regression tests for issue #626 - def test_init_pytz_timezone(self): - - result = arrow.Arrow( - 2013, 2, 2, 12, 30, 45, 999999, tzinfo=pytz.timezone("Europe/Paris") - ) - self.expected = datetime( - 2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.gettz("Europe/Paris") - ) - assert result._datetime == self.expected - assert_datetime_equality(result._datetime, self.expected, 1) - - def test_init_with_fold(self): - before = arrow.Arrow(2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm") - after = arrow.Arrow(2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm", fold=1) - - assert hasattr(before, "fold") - assert hasattr(after, "fold") - - # PEP-495 requires the comparisons below to be true - assert before == after - assert before.utcoffset() != after.utcoffset() - - -class TestTestArrowFactory: - def test_now(self): - - result = arrow.Arrow.now() - - assert_datetime_equality( - result._datetime, datetime.now().replace(tzinfo=tz.tzlocal()) - ) - - def test_utcnow(self): - - result = arrow.Arrow.utcnow() - - assert_datetime_equality( - result._datetime, datetime.utcnow().replace(tzinfo=tz.tzutc()) - ) - - assert result.fold == 0 - - def test_fromtimestamp(self): - - timestamp = time.time() - - result = arrow.Arrow.fromtimestamp(timestamp) - assert_datetime_equality( - result._datetime, datetime.now().replace(tzinfo=tz.tzlocal()) - ) - - result = arrow.Arrow.fromtimestamp(timestamp, tzinfo=tz.gettz("Europe/Paris")) - assert_datetime_equality( - result._datetime, - datetime.fromtimestamp(timestamp, tz.gettz("Europe/Paris")), - ) - - result = arrow.Arrow.fromtimestamp(timestamp, tzinfo="Europe/Paris") - assert_datetime_equality( - result._datetime, - datetime.fromtimestamp(timestamp, tz.gettz("Europe/Paris")), - ) - - with pytest.raises(ValueError): - arrow.Arrow.fromtimestamp("invalid timestamp") - - def test_utcfromtimestamp(self): - - timestamp = time.time() - - result = arrow.Arrow.utcfromtimestamp(timestamp) - assert_datetime_equality( - result._datetime, datetime.utcnow().replace(tzinfo=tz.tzutc()) - ) - - with pytest.raises(ValueError): - arrow.Arrow.utcfromtimestamp("invalid timestamp") - - def test_fromdatetime(self): - - dt = datetime(2013, 2, 3, 12, 30, 45, 1) - - result = arrow.Arrow.fromdatetime(dt) - - assert result._datetime == dt.replace(tzinfo=tz.tzutc()) - - def test_fromdatetime_dt_tzinfo(self): - - dt = datetime(2013, 2, 3, 12, 30, 45, 1, tzinfo=tz.gettz("US/Pacific")) - - result = arrow.Arrow.fromdatetime(dt) - - assert result._datetime == dt.replace(tzinfo=tz.gettz("US/Pacific")) - - def test_fromdatetime_tzinfo_arg(self): - - dt = datetime(2013, 2, 3, 12, 30, 45, 1) - - result = arrow.Arrow.fromdatetime(dt, tz.gettz("US/Pacific")) - - assert result._datetime == dt.replace(tzinfo=tz.gettz("US/Pacific")) - - def test_fromdate(self): - - dt = date(2013, 2, 3) - - result = arrow.Arrow.fromdate(dt, tz.gettz("US/Pacific")) - - assert result._datetime == datetime(2013, 2, 3, tzinfo=tz.gettz("US/Pacific")) - - def test_strptime(self): - - formatted = datetime(2013, 2, 3, 12, 30, 45).strftime("%Y-%m-%d %H:%M:%S") - - result = arrow.Arrow.strptime(formatted, "%Y-%m-%d %H:%M:%S") - assert result._datetime == datetime(2013, 2, 3, 12, 30, 45, tzinfo=tz.tzutc()) - - result = arrow.Arrow.strptime( - formatted, "%Y-%m-%d %H:%M:%S", tzinfo=tz.gettz("Europe/Paris") - ) - assert result._datetime == datetime( - 2013, 2, 3, 12, 30, 45, tzinfo=tz.gettz("Europe/Paris") - ) - - -@pytest.mark.usefixtures("time_2013_02_03") -class TestTestArrowRepresentation: - def test_repr(self): - - result = self.arrow.__repr__() - - assert result == "".format(self.arrow._datetime.isoformat()) - - def test_str(self): - - result = self.arrow.__str__() - - assert result == self.arrow._datetime.isoformat() - - def test_hash(self): - - result = self.arrow.__hash__() - - assert result == self.arrow._datetime.__hash__() - - def test_format(self): - - result = "{:YYYY-MM-DD}".format(self.arrow) - - assert result == "2013-02-03" - - def test_bare_format(self): - - result = self.arrow.format() - - assert result == "2013-02-03 12:30:45+00:00" - - def test_format_no_format_string(self): - - result = "{}".format(self.arrow) - - assert result == str(self.arrow) - - def test_clone(self): - - result = self.arrow.clone() - - assert result is not self.arrow - assert result._datetime == self.arrow._datetime - - -@pytest.mark.usefixtures("time_2013_01_01") -class TestArrowAttribute: - def test_getattr_base(self): - - with pytest.raises(AttributeError): - self.arrow.prop - - def test_getattr_week(self): - - assert self.arrow.week == 1 - - def test_getattr_quarter(self): - # start dates - q1 = arrow.Arrow(2013, 1, 1) - q2 = arrow.Arrow(2013, 4, 1) - q3 = arrow.Arrow(2013, 8, 1) - q4 = arrow.Arrow(2013, 10, 1) - assert q1.quarter == 1 - assert q2.quarter == 2 - assert q3.quarter == 3 - assert q4.quarter == 4 - - # end dates - q1 = arrow.Arrow(2013, 3, 31) - q2 = arrow.Arrow(2013, 6, 30) - q3 = arrow.Arrow(2013, 9, 30) - q4 = arrow.Arrow(2013, 12, 31) - assert q1.quarter == 1 - assert q2.quarter == 2 - assert q3.quarter == 3 - assert q4.quarter == 4 - - def test_getattr_dt_value(self): - - assert self.arrow.year == 2013 - - def test_tzinfo(self): - - self.arrow.tzinfo = tz.gettz("PST") - assert self.arrow.tzinfo == tz.gettz("PST") - - def test_naive(self): - - assert self.arrow.naive == self.arrow._datetime.replace(tzinfo=None) - - def test_timestamp(self): - - assert self.arrow.timestamp == calendar.timegm( - self.arrow._datetime.utctimetuple() - ) - - with pytest.warns(DeprecationWarning): - self.arrow.timestamp - - def test_int_timestamp(self): - - assert self.arrow.int_timestamp == calendar.timegm( - self.arrow._datetime.utctimetuple() - ) - - def test_float_timestamp(self): - - result = self.arrow.float_timestamp - self.arrow.timestamp - - assert result == self.arrow.microsecond - - def test_getattr_fold(self): - - # UTC is always unambiguous - assert self.now.fold == 0 - - ambiguous_dt = arrow.Arrow( - 2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm", fold=1 - ) - assert ambiguous_dt.fold == 1 - - with pytest.raises(AttributeError): - ambiguous_dt.fold = 0 - - def test_getattr_ambiguous(self): - - assert not self.now.ambiguous - - ambiguous_dt = arrow.Arrow(2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm") - - assert ambiguous_dt.ambiguous - - def test_getattr_imaginary(self): - - assert not self.now.imaginary - - imaginary_dt = arrow.Arrow(2013, 3, 31, 2, 30, tzinfo="Europe/Paris") - - assert imaginary_dt.imaginary - - -@pytest.mark.usefixtures("time_utcnow") -class TestArrowComparison: - def test_eq(self): - - assert self.arrow == self.arrow - assert self.arrow == self.arrow.datetime - assert not (self.arrow == "abc") - - def test_ne(self): - - assert not (self.arrow != self.arrow) - assert not (self.arrow != self.arrow.datetime) - assert self.arrow != "abc" - - def test_gt(self): - - arrow_cmp = self.arrow.shift(minutes=1) - - assert not (self.arrow > self.arrow) - assert not (self.arrow > self.arrow.datetime) - - with pytest.raises(TypeError): - self.arrow > "abc" - - assert self.arrow < arrow_cmp - assert self.arrow < arrow_cmp.datetime - - def test_ge(self): - - with pytest.raises(TypeError): - self.arrow >= "abc" - - assert self.arrow >= self.arrow - assert self.arrow >= self.arrow.datetime - - def test_lt(self): - - arrow_cmp = self.arrow.shift(minutes=1) - - assert not (self.arrow < self.arrow) - assert not (self.arrow < self.arrow.datetime) - - with pytest.raises(TypeError): - self.arrow < "abc" - - assert self.arrow < arrow_cmp - assert self.arrow < arrow_cmp.datetime - - def test_le(self): - - with pytest.raises(TypeError): - self.arrow <= "abc" - - assert self.arrow <= self.arrow - assert self.arrow <= self.arrow.datetime - - -@pytest.mark.usefixtures("time_2013_01_01") -class TestArrowMath: - def test_add_timedelta(self): - - result = self.arrow.__add__(timedelta(days=1)) - - assert result._datetime == datetime(2013, 1, 2, tzinfo=tz.tzutc()) - - def test_add_other(self): - - with pytest.raises(TypeError): - self.arrow + 1 - - def test_radd(self): - - result = self.arrow.__radd__(timedelta(days=1)) - - assert result._datetime == datetime(2013, 1, 2, tzinfo=tz.tzutc()) - - def test_sub_timedelta(self): - - result = self.arrow.__sub__(timedelta(days=1)) - - assert result._datetime == datetime(2012, 12, 31, tzinfo=tz.tzutc()) - - def test_sub_datetime(self): - - result = self.arrow.__sub__(datetime(2012, 12, 21, tzinfo=tz.tzutc())) - - assert result == timedelta(days=11) - - def test_sub_arrow(self): - - result = self.arrow.__sub__(arrow.Arrow(2012, 12, 21, tzinfo=tz.tzutc())) - - assert result == timedelta(days=11) - - def test_sub_other(self): - - with pytest.raises(TypeError): - self.arrow - object() - - def test_rsub_datetime(self): - - result = self.arrow.__rsub__(datetime(2012, 12, 21, tzinfo=tz.tzutc())) - - assert result == timedelta(days=-11) - - def test_rsub_other(self): - - with pytest.raises(TypeError): - timedelta(days=1) - self.arrow - - -@pytest.mark.usefixtures("time_utcnow") -class TestArrowDatetimeInterface: - def test_date(self): - - result = self.arrow.date() - - assert result == self.arrow._datetime.date() - - def test_time(self): - - result = self.arrow.time() - - assert result == self.arrow._datetime.time() - - def test_timetz(self): - - result = self.arrow.timetz() - - assert result == self.arrow._datetime.timetz() - - def test_astimezone(self): - - other_tz = tz.gettz("US/Pacific") - - result = self.arrow.astimezone(other_tz) - - assert result == self.arrow._datetime.astimezone(other_tz) - - def test_utcoffset(self): - - result = self.arrow.utcoffset() - - assert result == self.arrow._datetime.utcoffset() - - def test_dst(self): - - result = self.arrow.dst() - - assert result == self.arrow._datetime.dst() - - def test_timetuple(self): - - result = self.arrow.timetuple() - - assert result == self.arrow._datetime.timetuple() - - def test_utctimetuple(self): - - result = self.arrow.utctimetuple() - - assert result == self.arrow._datetime.utctimetuple() - - def test_toordinal(self): - - result = self.arrow.toordinal() - - assert result == self.arrow._datetime.toordinal() - - def test_weekday(self): - - result = self.arrow.weekday() - - assert result == self.arrow._datetime.weekday() - - def test_isoweekday(self): - - result = self.arrow.isoweekday() - - assert result == self.arrow._datetime.isoweekday() - - def test_isocalendar(self): - - result = self.arrow.isocalendar() - - assert result == self.arrow._datetime.isocalendar() - - def test_isoformat(self): - - result = self.arrow.isoformat() - - assert result == self.arrow._datetime.isoformat() - - def test_simplejson(self): - - result = json.dumps({"v": self.arrow.for_json()}, for_json=True) - - assert json.loads(result)["v"] == self.arrow._datetime.isoformat() - - def test_ctime(self): - - result = self.arrow.ctime() - - assert result == self.arrow._datetime.ctime() - - def test_strftime(self): - - result = self.arrow.strftime("%Y") - - assert result == self.arrow._datetime.strftime("%Y") - - -class TestArrowFalsePositiveDst: - """These tests relate to issues #376 and #551. - The key points in both issues are that arrow will assign a UTC timezone if none is provided and - .to() will change other attributes to be correct whereas .replace() only changes the specified attribute. - - Issue 376 - >>> arrow.get('2016-11-06').to('America/New_York').ceil('day') - < Arrow [2016-11-05T23:59:59.999999-04:00] > - - Issue 551 - >>> just_before = arrow.get('2018-11-04T01:59:59.999999') - >>> just_before - 2018-11-04T01:59:59.999999+00:00 - >>> just_after = just_before.shift(microseconds=1) - >>> just_after - 2018-11-04T02:00:00+00:00 - >>> just_before_eastern = just_before.replace(tzinfo='US/Eastern') - >>> just_before_eastern - 2018-11-04T01:59:59.999999-04:00 - >>> just_after_eastern = just_after.replace(tzinfo='US/Eastern') - >>> just_after_eastern - 2018-11-04T02:00:00-05:00 - """ - - def test_dst(self): - self.before_1 = arrow.Arrow( - 2016, 11, 6, 3, 59, tzinfo=tz.gettz("America/New_York") - ) - self.before_2 = arrow.Arrow(2016, 11, 6, tzinfo=tz.gettz("America/New_York")) - self.after_1 = arrow.Arrow(2016, 11, 6, 4, tzinfo=tz.gettz("America/New_York")) - self.after_2 = arrow.Arrow( - 2016, 11, 6, 23, 59, tzinfo=tz.gettz("America/New_York") - ) - self.before_3 = arrow.Arrow( - 2018, 11, 4, 3, 59, tzinfo=tz.gettz("America/New_York") - ) - self.before_4 = arrow.Arrow(2018, 11, 4, tzinfo=tz.gettz("America/New_York")) - self.after_3 = arrow.Arrow(2018, 11, 4, 4, tzinfo=tz.gettz("America/New_York")) - self.after_4 = arrow.Arrow( - 2018, 11, 4, 23, 59, tzinfo=tz.gettz("America/New_York") - ) - assert self.before_1.day == self.before_2.day - assert self.after_1.day == self.after_2.day - assert self.before_3.day == self.before_4.day - assert self.after_3.day == self.after_4.day - - -class TestArrowConversion: - def test_to(self): - - dt_from = datetime.now() - arrow_from = arrow.Arrow.fromdatetime(dt_from, tz.gettz("US/Pacific")) - - self.expected = dt_from.replace(tzinfo=tz.gettz("US/Pacific")).astimezone( - tz.tzutc() - ) - - assert arrow_from.to("UTC").datetime == self.expected - assert arrow_from.to(tz.tzutc()).datetime == self.expected - - # issue #368 - def test_to_pacific_then_utc(self): - result = arrow.Arrow(2018, 11, 4, 1, tzinfo="-08:00").to("US/Pacific").to("UTC") - assert result == arrow.Arrow(2018, 11, 4, 9) - - # issue #368 - def test_to_amsterdam_then_utc(self): - result = arrow.Arrow(2016, 10, 30).to("Europe/Amsterdam") - assert result.utcoffset() == timedelta(seconds=7200) - - # regression test for #690 - def test_to_israel_same_offset(self): - - result = arrow.Arrow(2019, 10, 27, 2, 21, 1, tzinfo="+03:00").to("Israel") - expected = arrow.Arrow(2019, 10, 27, 1, 21, 1, tzinfo="Israel") - - assert result == expected - assert result.utcoffset() != expected.utcoffset() - - # issue 315 - def test_anchorage_dst(self): - before = arrow.Arrow(2016, 3, 13, 1, 59, tzinfo="America/Anchorage") - after = arrow.Arrow(2016, 3, 13, 2, 1, tzinfo="America/Anchorage") - - assert before.utcoffset() != after.utcoffset() - - # issue 476 - def test_chicago_fall(self): - - result = arrow.Arrow(2017, 11, 5, 2, 1, tzinfo="-05:00").to("America/Chicago") - expected = arrow.Arrow(2017, 11, 5, 1, 1, tzinfo="America/Chicago") - - assert result == expected - assert result.utcoffset() != expected.utcoffset() - - def test_toronto_gap(self): - - before = arrow.Arrow(2011, 3, 13, 6, 30, tzinfo="UTC").to("America/Toronto") - after = arrow.Arrow(2011, 3, 13, 7, 30, tzinfo="UTC").to("America/Toronto") - - assert before.datetime.replace(tzinfo=None) == datetime(2011, 3, 13, 1, 30) - assert after.datetime.replace(tzinfo=None) == datetime(2011, 3, 13, 3, 30) - - assert before.utcoffset() != after.utcoffset() - - def test_sydney_gap(self): - - before = arrow.Arrow(2012, 10, 6, 15, 30, tzinfo="UTC").to("Australia/Sydney") - after = arrow.Arrow(2012, 10, 6, 16, 30, tzinfo="UTC").to("Australia/Sydney") - - assert before.datetime.replace(tzinfo=None) == datetime(2012, 10, 7, 1, 30) - assert after.datetime.replace(tzinfo=None) == datetime(2012, 10, 7, 3, 30) - - assert before.utcoffset() != after.utcoffset() - - -class TestArrowPickling: - def test_pickle_and_unpickle(self): - - dt = arrow.Arrow.utcnow() - - pickled = pickle.dumps(dt) - - unpickled = pickle.loads(pickled) - - assert unpickled == dt - - -class TestArrowReplace: - def test_not_attr(self): - - with pytest.raises(AttributeError): - arrow.Arrow.utcnow().replace(abc=1) - - def test_replace(self): - - arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) - - assert arw.replace(year=2012) == arrow.Arrow(2012, 5, 5, 12, 30, 45) - assert arw.replace(month=1) == arrow.Arrow(2013, 1, 5, 12, 30, 45) - assert arw.replace(day=1) == arrow.Arrow(2013, 5, 1, 12, 30, 45) - assert arw.replace(hour=1) == arrow.Arrow(2013, 5, 5, 1, 30, 45) - assert arw.replace(minute=1) == arrow.Arrow(2013, 5, 5, 12, 1, 45) - assert arw.replace(second=1) == arrow.Arrow(2013, 5, 5, 12, 30, 1) - - def test_replace_tzinfo(self): - - arw = arrow.Arrow.utcnow().to("US/Eastern") - - result = arw.replace(tzinfo=tz.gettz("US/Pacific")) - - assert result == arw.datetime.replace(tzinfo=tz.gettz("US/Pacific")) - - def test_replace_fold(self): - - before = arrow.Arrow(2017, 11, 5, 1, tzinfo="America/New_York") - after = before.replace(fold=1) - - assert before.fold == 0 - assert after.fold == 1 - assert before == after - assert before.utcoffset() != after.utcoffset() - - def test_replace_fold_and_other(self): - - arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) - - assert arw.replace(fold=1, minute=50) == arrow.Arrow(2013, 5, 5, 12, 50, 45) - assert arw.replace(minute=50, fold=1) == arrow.Arrow(2013, 5, 5, 12, 50, 45) - - def test_replace_week(self): - - with pytest.raises(AttributeError): - arrow.Arrow.utcnow().replace(week=1) - - def test_replace_quarter(self): - - with pytest.raises(AttributeError): - arrow.Arrow.utcnow().replace(quarter=1) - - def test_replace_quarter_and_fold(self): - with pytest.raises(AttributeError): - arrow.utcnow().replace(fold=1, quarter=1) - - with pytest.raises(AttributeError): - arrow.utcnow().replace(quarter=1, fold=1) - - def test_replace_other_kwargs(self): - - with pytest.raises(AttributeError): - arrow.utcnow().replace(abc="def") - - -class TestArrowShift: - def test_not_attr(self): - - now = arrow.Arrow.utcnow() - - with pytest.raises(AttributeError): - now.shift(abc=1) - - with pytest.raises(AttributeError): - now.shift(week=1) - - def test_shift(self): - - arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) - - assert arw.shift(years=1) == arrow.Arrow(2014, 5, 5, 12, 30, 45) - assert arw.shift(quarters=1) == arrow.Arrow(2013, 8, 5, 12, 30, 45) - assert arw.shift(quarters=1, months=1) == arrow.Arrow(2013, 9, 5, 12, 30, 45) - assert arw.shift(months=1) == arrow.Arrow(2013, 6, 5, 12, 30, 45) - assert arw.shift(weeks=1) == arrow.Arrow(2013, 5, 12, 12, 30, 45) - assert arw.shift(days=1) == arrow.Arrow(2013, 5, 6, 12, 30, 45) - assert arw.shift(hours=1) == arrow.Arrow(2013, 5, 5, 13, 30, 45) - assert arw.shift(minutes=1) == arrow.Arrow(2013, 5, 5, 12, 31, 45) - assert arw.shift(seconds=1) == arrow.Arrow(2013, 5, 5, 12, 30, 46) - assert arw.shift(microseconds=1) == arrow.Arrow(2013, 5, 5, 12, 30, 45, 1) - - # Remember: Python's weekday 0 is Monday - assert arw.shift(weekday=0) == arrow.Arrow(2013, 5, 6, 12, 30, 45) - assert arw.shift(weekday=1) == arrow.Arrow(2013, 5, 7, 12, 30, 45) - assert arw.shift(weekday=2) == arrow.Arrow(2013, 5, 8, 12, 30, 45) - assert arw.shift(weekday=3) == arrow.Arrow(2013, 5, 9, 12, 30, 45) - assert arw.shift(weekday=4) == arrow.Arrow(2013, 5, 10, 12, 30, 45) - assert arw.shift(weekday=5) == arrow.Arrow(2013, 5, 11, 12, 30, 45) - assert arw.shift(weekday=6) == arw - - with pytest.raises(IndexError): - arw.shift(weekday=7) - - # Use dateutil.relativedelta's convenient day instances - assert arw.shift(weekday=MO) == arrow.Arrow(2013, 5, 6, 12, 30, 45) - assert arw.shift(weekday=MO(0)) == arrow.Arrow(2013, 5, 6, 12, 30, 45) - assert arw.shift(weekday=MO(1)) == arrow.Arrow(2013, 5, 6, 12, 30, 45) - assert arw.shift(weekday=MO(2)) == arrow.Arrow(2013, 5, 13, 12, 30, 45) - assert arw.shift(weekday=TU) == arrow.Arrow(2013, 5, 7, 12, 30, 45) - assert arw.shift(weekday=TU(0)) == arrow.Arrow(2013, 5, 7, 12, 30, 45) - assert arw.shift(weekday=TU(1)) == arrow.Arrow(2013, 5, 7, 12, 30, 45) - assert arw.shift(weekday=TU(2)) == arrow.Arrow(2013, 5, 14, 12, 30, 45) - assert arw.shift(weekday=WE) == arrow.Arrow(2013, 5, 8, 12, 30, 45) - assert arw.shift(weekday=WE(0)) == arrow.Arrow(2013, 5, 8, 12, 30, 45) - assert arw.shift(weekday=WE(1)) == arrow.Arrow(2013, 5, 8, 12, 30, 45) - assert arw.shift(weekday=WE(2)) == arrow.Arrow(2013, 5, 15, 12, 30, 45) - assert arw.shift(weekday=TH) == arrow.Arrow(2013, 5, 9, 12, 30, 45) - assert arw.shift(weekday=TH(0)) == arrow.Arrow(2013, 5, 9, 12, 30, 45) - assert arw.shift(weekday=TH(1)) == arrow.Arrow(2013, 5, 9, 12, 30, 45) - assert arw.shift(weekday=TH(2)) == arrow.Arrow(2013, 5, 16, 12, 30, 45) - assert arw.shift(weekday=FR) == arrow.Arrow(2013, 5, 10, 12, 30, 45) - assert arw.shift(weekday=FR(0)) == arrow.Arrow(2013, 5, 10, 12, 30, 45) - assert arw.shift(weekday=FR(1)) == arrow.Arrow(2013, 5, 10, 12, 30, 45) - assert arw.shift(weekday=FR(2)) == arrow.Arrow(2013, 5, 17, 12, 30, 45) - assert arw.shift(weekday=SA) == arrow.Arrow(2013, 5, 11, 12, 30, 45) - assert arw.shift(weekday=SA(0)) == arrow.Arrow(2013, 5, 11, 12, 30, 45) - assert arw.shift(weekday=SA(1)) == arrow.Arrow(2013, 5, 11, 12, 30, 45) - assert arw.shift(weekday=SA(2)) == arrow.Arrow(2013, 5, 18, 12, 30, 45) - assert arw.shift(weekday=SU) == arw - assert arw.shift(weekday=SU(0)) == arw - assert arw.shift(weekday=SU(1)) == arw - assert arw.shift(weekday=SU(2)) == arrow.Arrow(2013, 5, 12, 12, 30, 45) - - def test_shift_negative(self): - - arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) - - assert arw.shift(years=-1) == arrow.Arrow(2012, 5, 5, 12, 30, 45) - assert arw.shift(quarters=-1) == arrow.Arrow(2013, 2, 5, 12, 30, 45) - assert arw.shift(quarters=-1, months=-1) == arrow.Arrow(2013, 1, 5, 12, 30, 45) - assert arw.shift(months=-1) == arrow.Arrow(2013, 4, 5, 12, 30, 45) - assert arw.shift(weeks=-1) == arrow.Arrow(2013, 4, 28, 12, 30, 45) - assert arw.shift(days=-1) == arrow.Arrow(2013, 5, 4, 12, 30, 45) - assert arw.shift(hours=-1) == arrow.Arrow(2013, 5, 5, 11, 30, 45) - assert arw.shift(minutes=-1) == arrow.Arrow(2013, 5, 5, 12, 29, 45) - assert arw.shift(seconds=-1) == arrow.Arrow(2013, 5, 5, 12, 30, 44) - assert arw.shift(microseconds=-1) == arrow.Arrow(2013, 5, 5, 12, 30, 44, 999999) - - # Not sure how practical these negative weekdays are - assert arw.shift(weekday=-1) == arw.shift(weekday=SU) - assert arw.shift(weekday=-2) == arw.shift(weekday=SA) - assert arw.shift(weekday=-3) == arw.shift(weekday=FR) - assert arw.shift(weekday=-4) == arw.shift(weekday=TH) - assert arw.shift(weekday=-5) == arw.shift(weekday=WE) - assert arw.shift(weekday=-6) == arw.shift(weekday=TU) - assert arw.shift(weekday=-7) == arw.shift(weekday=MO) - - with pytest.raises(IndexError): - arw.shift(weekday=-8) - - assert arw.shift(weekday=MO(-1)) == arrow.Arrow(2013, 4, 29, 12, 30, 45) - assert arw.shift(weekday=TU(-1)) == arrow.Arrow(2013, 4, 30, 12, 30, 45) - assert arw.shift(weekday=WE(-1)) == arrow.Arrow(2013, 5, 1, 12, 30, 45) - assert arw.shift(weekday=TH(-1)) == arrow.Arrow(2013, 5, 2, 12, 30, 45) - assert arw.shift(weekday=FR(-1)) == arrow.Arrow(2013, 5, 3, 12, 30, 45) - assert arw.shift(weekday=SA(-1)) == arrow.Arrow(2013, 5, 4, 12, 30, 45) - assert arw.shift(weekday=SU(-1)) == arw - assert arw.shift(weekday=SU(-2)) == arrow.Arrow(2013, 4, 28, 12, 30, 45) - - def test_shift_quarters_bug(self): - - arw = arrow.Arrow(2013, 5, 5, 12, 30, 45) - - # The value of the last-read argument was used instead of the ``quarters`` argument. - # Recall that the keyword argument dict, like all dicts, is unordered, so only certain - # combinations of arguments would exhibit this. - assert arw.shift(quarters=0, years=1) == arrow.Arrow(2014, 5, 5, 12, 30, 45) - assert arw.shift(quarters=0, months=1) == arrow.Arrow(2013, 6, 5, 12, 30, 45) - assert arw.shift(quarters=0, weeks=1) == arrow.Arrow(2013, 5, 12, 12, 30, 45) - assert arw.shift(quarters=0, days=1) == arrow.Arrow(2013, 5, 6, 12, 30, 45) - assert arw.shift(quarters=0, hours=1) == arrow.Arrow(2013, 5, 5, 13, 30, 45) - assert arw.shift(quarters=0, minutes=1) == arrow.Arrow(2013, 5, 5, 12, 31, 45) - assert arw.shift(quarters=0, seconds=1) == arrow.Arrow(2013, 5, 5, 12, 30, 46) - assert arw.shift(quarters=0, microseconds=1) == arrow.Arrow( - 2013, 5, 5, 12, 30, 45, 1 - ) - - def test_shift_positive_imaginary(self): - - # Avoid shifting into imaginary datetimes, take into account DST and other timezone changes. - - new_york = arrow.Arrow(2017, 3, 12, 1, 30, tzinfo="America/New_York") - assert new_york.shift(hours=+1) == arrow.Arrow( - 2017, 3, 12, 3, 30, tzinfo="America/New_York" - ) - - # pendulum example - paris = arrow.Arrow(2013, 3, 31, 1, 50, tzinfo="Europe/Paris") - assert paris.shift(minutes=+20) == arrow.Arrow( - 2013, 3, 31, 3, 10, tzinfo="Europe/Paris" - ) - - canberra = arrow.Arrow(2018, 10, 7, 1, 30, tzinfo="Australia/Canberra") - assert canberra.shift(hours=+1) == arrow.Arrow( - 2018, 10, 7, 3, 30, tzinfo="Australia/Canberra" - ) - - kiev = arrow.Arrow(2018, 3, 25, 2, 30, tzinfo="Europe/Kiev") - assert kiev.shift(hours=+1) == arrow.Arrow( - 2018, 3, 25, 4, 30, tzinfo="Europe/Kiev" - ) - - # Edge case, the entire day of 2011-12-30 is imaginary in this zone! - apia = arrow.Arrow(2011, 12, 29, 23, tzinfo="Pacific/Apia") - assert apia.shift(hours=+2) == arrow.Arrow( - 2011, 12, 31, 1, tzinfo="Pacific/Apia" - ) - - def test_shift_negative_imaginary(self): - - new_york = arrow.Arrow(2011, 3, 13, 3, 30, tzinfo="America/New_York") - assert new_york.shift(hours=-1) == arrow.Arrow( - 2011, 3, 13, 3, 30, tzinfo="America/New_York" - ) - assert new_york.shift(hours=-2) == arrow.Arrow( - 2011, 3, 13, 1, 30, tzinfo="America/New_York" - ) - - london = arrow.Arrow(2019, 3, 31, 2, tzinfo="Europe/London") - assert london.shift(hours=-1) == arrow.Arrow( - 2019, 3, 31, 2, tzinfo="Europe/London" - ) - assert london.shift(hours=-2) == arrow.Arrow( - 2019, 3, 31, 0, tzinfo="Europe/London" - ) - - # edge case, crossing the international dateline - apia = arrow.Arrow(2011, 12, 31, 1, tzinfo="Pacific/Apia") - assert apia.shift(hours=-2) == arrow.Arrow( - 2011, 12, 31, 23, tzinfo="Pacific/Apia" - ) - - @pytest.mark.skipif( - dateutil.__version__ < "2.7.1", reason="old tz database (2018d needed)" - ) - def test_shift_kiritimati(self): - # corrected 2018d tz database release, will fail in earlier versions - - kiritimati = arrow.Arrow(1994, 12, 30, 12, 30, tzinfo="Pacific/Kiritimati") - assert kiritimati.shift(days=+1) == arrow.Arrow( - 1995, 1, 1, 12, 30, tzinfo="Pacific/Kiritimati" - ) - - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="unsupported before python 3.6" - ) - def shift_imaginary_seconds(self): - # offset has a seconds component - monrovia = arrow.Arrow(1972, 1, 6, 23, tzinfo="Africa/Monrovia") - assert monrovia.shift(hours=+1, minutes=+30) == arrow.Arrow( - 1972, 1, 7, 1, 14, 30, tzinfo="Africa/Monrovia" - ) - - -class TestArrowRange: - def test_year(self): - - result = list( - arrow.Arrow.range( - "year", datetime(2013, 1, 2, 3, 4, 5), datetime(2016, 4, 5, 6, 7, 8) - ) - ) - - assert result == [ - arrow.Arrow(2013, 1, 2, 3, 4, 5), - arrow.Arrow(2014, 1, 2, 3, 4, 5), - arrow.Arrow(2015, 1, 2, 3, 4, 5), - arrow.Arrow(2016, 1, 2, 3, 4, 5), - ] - - def test_quarter(self): - - result = list( - arrow.Arrow.range( - "quarter", datetime(2013, 2, 3, 4, 5, 6), datetime(2013, 5, 6, 7, 8, 9) - ) - ) - - assert result == [ - arrow.Arrow(2013, 2, 3, 4, 5, 6), - arrow.Arrow(2013, 5, 3, 4, 5, 6), - ] - - def test_month(self): - - result = list( - arrow.Arrow.range( - "month", datetime(2013, 2, 3, 4, 5, 6), datetime(2013, 5, 6, 7, 8, 9) - ) - ) - - assert result == [ - arrow.Arrow(2013, 2, 3, 4, 5, 6), - arrow.Arrow(2013, 3, 3, 4, 5, 6), - arrow.Arrow(2013, 4, 3, 4, 5, 6), - arrow.Arrow(2013, 5, 3, 4, 5, 6), - ] - - def test_week(self): - - result = list( - arrow.Arrow.range( - "week", datetime(2013, 9, 1, 2, 3, 4), datetime(2013, 10, 1, 2, 3, 4) - ) - ) - - assert result == [ - arrow.Arrow(2013, 9, 1, 2, 3, 4), - arrow.Arrow(2013, 9, 8, 2, 3, 4), - arrow.Arrow(2013, 9, 15, 2, 3, 4), - arrow.Arrow(2013, 9, 22, 2, 3, 4), - arrow.Arrow(2013, 9, 29, 2, 3, 4), - ] - - def test_day(self): - - result = list( - arrow.Arrow.range( - "day", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 5, 6, 7, 8) - ) - ) - - assert result == [ - arrow.Arrow(2013, 1, 2, 3, 4, 5), - arrow.Arrow(2013, 1, 3, 3, 4, 5), - arrow.Arrow(2013, 1, 4, 3, 4, 5), - arrow.Arrow(2013, 1, 5, 3, 4, 5), - ] - - def test_hour(self): - - result = list( - arrow.Arrow.range( - "hour", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 6, 7, 8) - ) - ) - - assert result == [ - arrow.Arrow(2013, 1, 2, 3, 4, 5), - arrow.Arrow(2013, 1, 2, 4, 4, 5), - arrow.Arrow(2013, 1, 2, 5, 4, 5), - arrow.Arrow(2013, 1, 2, 6, 4, 5), - ] - - result = list( - arrow.Arrow.range( - "hour", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 3, 4, 5) - ) - ) - - assert result == [arrow.Arrow(2013, 1, 2, 3, 4, 5)] - - def test_minute(self): - - result = list( - arrow.Arrow.range( - "minute", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 3, 7, 8) - ) - ) - - assert result == [ - arrow.Arrow(2013, 1, 2, 3, 4, 5), - arrow.Arrow(2013, 1, 2, 3, 5, 5), - arrow.Arrow(2013, 1, 2, 3, 6, 5), - arrow.Arrow(2013, 1, 2, 3, 7, 5), - ] - - def test_second(self): - - result = list( - arrow.Arrow.range( - "second", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 3, 4, 8) - ) - ) - - assert result == [ - arrow.Arrow(2013, 1, 2, 3, 4, 5), - arrow.Arrow(2013, 1, 2, 3, 4, 6), - arrow.Arrow(2013, 1, 2, 3, 4, 7), - arrow.Arrow(2013, 1, 2, 3, 4, 8), - ] - - def test_arrow(self): - - result = list( - arrow.Arrow.range( - "day", - arrow.Arrow(2013, 1, 2, 3, 4, 5), - arrow.Arrow(2013, 1, 5, 6, 7, 8), - ) - ) - - assert result == [ - arrow.Arrow(2013, 1, 2, 3, 4, 5), - arrow.Arrow(2013, 1, 3, 3, 4, 5), - arrow.Arrow(2013, 1, 4, 3, 4, 5), - arrow.Arrow(2013, 1, 5, 3, 4, 5), - ] - - def test_naive_tz(self): - - result = arrow.Arrow.range( - "year", datetime(2013, 1, 2, 3), datetime(2016, 4, 5, 6), "US/Pacific" - ) - - for r in result: - assert r.tzinfo == tz.gettz("US/Pacific") - - def test_aware_same_tz(self): - - result = arrow.Arrow.range( - "day", - arrow.Arrow(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")), - arrow.Arrow(2013, 1, 3, tzinfo=tz.gettz("US/Pacific")), - ) - - for r in result: - assert r.tzinfo == tz.gettz("US/Pacific") - - def test_aware_different_tz(self): - - result = arrow.Arrow.range( - "day", - datetime(2013, 1, 1, tzinfo=tz.gettz("US/Eastern")), - datetime(2013, 1, 3, tzinfo=tz.gettz("US/Pacific")), - ) - - for r in result: - assert r.tzinfo == tz.gettz("US/Eastern") - - def test_aware_tz(self): - - result = arrow.Arrow.range( - "day", - datetime(2013, 1, 1, tzinfo=tz.gettz("US/Eastern")), - datetime(2013, 1, 3, tzinfo=tz.gettz("US/Pacific")), - tz=tz.gettz("US/Central"), - ) - - for r in result: - assert r.tzinfo == tz.gettz("US/Central") - - def test_imaginary(self): - # issue #72, avoid duplication in utc column - - before = arrow.Arrow(2018, 3, 10, 23, tzinfo="US/Pacific") - after = arrow.Arrow(2018, 3, 11, 4, tzinfo="US/Pacific") - - pacific_range = [t for t in arrow.Arrow.range("hour", before, after)] - utc_range = [t.to("utc") for t in arrow.Arrow.range("hour", before, after)] - - assert len(pacific_range) == len(set(pacific_range)) - assert len(utc_range) == len(set(utc_range)) - - def test_unsupported(self): - - with pytest.raises(AttributeError): - next(arrow.Arrow.range("abc", datetime.utcnow(), datetime.utcnow())) - - def test_range_over_months_ending_on_different_days(self): - # regression test for issue #842 - result = list(arrow.Arrow.range("month", datetime(2015, 1, 31), limit=4)) - assert result == [ - arrow.Arrow(2015, 1, 31), - arrow.Arrow(2015, 2, 28), - arrow.Arrow(2015, 3, 31), - arrow.Arrow(2015, 4, 30), - ] - - result = list(arrow.Arrow.range("month", datetime(2015, 1, 30), limit=3)) - assert result == [ - arrow.Arrow(2015, 1, 30), - arrow.Arrow(2015, 2, 28), - arrow.Arrow(2015, 3, 30), - ] - - result = list(arrow.Arrow.range("month", datetime(2015, 2, 28), limit=3)) - assert result == [ - arrow.Arrow(2015, 2, 28), - arrow.Arrow(2015, 3, 28), - arrow.Arrow(2015, 4, 28), - ] - - result = list(arrow.Arrow.range("month", datetime(2015, 3, 31), limit=3)) - assert result == [ - arrow.Arrow(2015, 3, 31), - arrow.Arrow(2015, 4, 30), - arrow.Arrow(2015, 5, 31), - ] - - def test_range_over_quarter_months_ending_on_different_days(self): - result = list(arrow.Arrow.range("quarter", datetime(2014, 11, 30), limit=3)) - assert result == [ - arrow.Arrow(2014, 11, 30), - arrow.Arrow(2015, 2, 28), - arrow.Arrow(2015, 5, 30), - ] - - def test_range_over_year_maintains_end_date_across_leap_year(self): - result = list(arrow.Arrow.range("year", datetime(2012, 2, 29), limit=5)) - assert result == [ - arrow.Arrow(2012, 2, 29), - arrow.Arrow(2013, 2, 28), - arrow.Arrow(2014, 2, 28), - arrow.Arrow(2015, 2, 28), - arrow.Arrow(2016, 2, 29), - ] - - -class TestArrowSpanRange: - def test_year(self): - - result = list( - arrow.Arrow.span_range("year", datetime(2013, 2, 1), datetime(2016, 3, 31)) - ) - - assert result == [ - ( - arrow.Arrow(2013, 1, 1), - arrow.Arrow(2013, 12, 31, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2014, 1, 1), - arrow.Arrow(2014, 12, 31, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2015, 1, 1), - arrow.Arrow(2015, 12, 31, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2016, 1, 1), - arrow.Arrow(2016, 12, 31, 23, 59, 59, 999999), - ), - ] - - def test_quarter(self): - - result = list( - arrow.Arrow.span_range( - "quarter", datetime(2013, 2, 2), datetime(2013, 5, 15) - ) - ) - - assert result == [ - (arrow.Arrow(2013, 1, 1), arrow.Arrow(2013, 3, 31, 23, 59, 59, 999999)), - (arrow.Arrow(2013, 4, 1), arrow.Arrow(2013, 6, 30, 23, 59, 59, 999999)), - ] - - def test_month(self): - - result = list( - arrow.Arrow.span_range("month", datetime(2013, 1, 2), datetime(2013, 4, 15)) - ) - - assert result == [ - (arrow.Arrow(2013, 1, 1), arrow.Arrow(2013, 1, 31, 23, 59, 59, 999999)), - (arrow.Arrow(2013, 2, 1), arrow.Arrow(2013, 2, 28, 23, 59, 59, 999999)), - (arrow.Arrow(2013, 3, 1), arrow.Arrow(2013, 3, 31, 23, 59, 59, 999999)), - (arrow.Arrow(2013, 4, 1), arrow.Arrow(2013, 4, 30, 23, 59, 59, 999999)), - ] - - def test_week(self): - - result = list( - arrow.Arrow.span_range("week", datetime(2013, 2, 2), datetime(2013, 2, 28)) - ) - - assert result == [ - (arrow.Arrow(2013, 1, 28), arrow.Arrow(2013, 2, 3, 23, 59, 59, 999999)), - (arrow.Arrow(2013, 2, 4), arrow.Arrow(2013, 2, 10, 23, 59, 59, 999999)), - ( - arrow.Arrow(2013, 2, 11), - arrow.Arrow(2013, 2, 17, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 2, 18), - arrow.Arrow(2013, 2, 24, 23, 59, 59, 999999), - ), - (arrow.Arrow(2013, 2, 25), arrow.Arrow(2013, 3, 3, 23, 59, 59, 999999)), - ] - - def test_day(self): - - result = list( - arrow.Arrow.span_range( - "day", datetime(2013, 1, 1, 12), datetime(2013, 1, 4, 12) - ) - ) - - assert result == [ - ( - arrow.Arrow(2013, 1, 1, 0), - arrow.Arrow(2013, 1, 1, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 2, 0), - arrow.Arrow(2013, 1, 2, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 3, 0), - arrow.Arrow(2013, 1, 3, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 4, 0), - arrow.Arrow(2013, 1, 4, 23, 59, 59, 999999), - ), - ] - - def test_days(self): - - result = list( - arrow.Arrow.span_range( - "days", datetime(2013, 1, 1, 12), datetime(2013, 1, 4, 12) - ) - ) - - assert result == [ - ( - arrow.Arrow(2013, 1, 1, 0), - arrow.Arrow(2013, 1, 1, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 2, 0), - arrow.Arrow(2013, 1, 2, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 3, 0), - arrow.Arrow(2013, 1, 3, 23, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 4, 0), - arrow.Arrow(2013, 1, 4, 23, 59, 59, 999999), - ), - ] - - def test_hour(self): - - result = list( - arrow.Arrow.span_range( - "hour", datetime(2013, 1, 1, 0, 30), datetime(2013, 1, 1, 3, 30) - ) - ) - - assert result == [ - ( - arrow.Arrow(2013, 1, 1, 0), - arrow.Arrow(2013, 1, 1, 0, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 1), - arrow.Arrow(2013, 1, 1, 1, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 2), - arrow.Arrow(2013, 1, 1, 2, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 3), - arrow.Arrow(2013, 1, 1, 3, 59, 59, 999999), - ), - ] - - result = list( - arrow.Arrow.span_range( - "hour", datetime(2013, 1, 1, 3, 30), datetime(2013, 1, 1, 3, 30) - ) - ) - - assert result == [ - (arrow.Arrow(2013, 1, 1, 3), arrow.Arrow(2013, 1, 1, 3, 59, 59, 999999)) - ] - - def test_minute(self): - - result = list( - arrow.Arrow.span_range( - "minute", datetime(2013, 1, 1, 0, 0, 30), datetime(2013, 1, 1, 0, 3, 30) - ) - ) - - assert result == [ - ( - arrow.Arrow(2013, 1, 1, 0, 0), - arrow.Arrow(2013, 1, 1, 0, 0, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 0, 1), - arrow.Arrow(2013, 1, 1, 0, 1, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 0, 2), - arrow.Arrow(2013, 1, 1, 0, 2, 59, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 0, 3), - arrow.Arrow(2013, 1, 1, 0, 3, 59, 999999), - ), - ] - - def test_second(self): - - result = list( - arrow.Arrow.span_range( - "second", datetime(2013, 1, 1), datetime(2013, 1, 1, 0, 0, 3) - ) - ) - - assert result == [ - ( - arrow.Arrow(2013, 1, 1, 0, 0, 0), - arrow.Arrow(2013, 1, 1, 0, 0, 0, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 0, 0, 1), - arrow.Arrow(2013, 1, 1, 0, 0, 1, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 0, 0, 2), - arrow.Arrow(2013, 1, 1, 0, 0, 2, 999999), - ), - ( - arrow.Arrow(2013, 1, 1, 0, 0, 3), - arrow.Arrow(2013, 1, 1, 0, 0, 3, 999999), - ), - ] - - def test_naive_tz(self): - - tzinfo = tz.gettz("US/Pacific") - - result = arrow.Arrow.span_range( - "hour", datetime(2013, 1, 1, 0), datetime(2013, 1, 1, 3, 59), "US/Pacific" - ) - - for f, c in result: - assert f.tzinfo == tzinfo - assert c.tzinfo == tzinfo - - def test_aware_same_tz(self): - - tzinfo = tz.gettz("US/Pacific") - - result = arrow.Arrow.span_range( - "hour", - datetime(2013, 1, 1, 0, tzinfo=tzinfo), - datetime(2013, 1, 1, 2, 59, tzinfo=tzinfo), - ) - - for f, c in result: - assert f.tzinfo == tzinfo - assert c.tzinfo == tzinfo - - def test_aware_different_tz(self): - - tzinfo1 = tz.gettz("US/Pacific") - tzinfo2 = tz.gettz("US/Eastern") - - result = arrow.Arrow.span_range( - "hour", - datetime(2013, 1, 1, 0, tzinfo=tzinfo1), - datetime(2013, 1, 1, 2, 59, tzinfo=tzinfo2), - ) - - for f, c in result: - assert f.tzinfo == tzinfo1 - assert c.tzinfo == tzinfo1 - - def test_aware_tz(self): - - result = arrow.Arrow.span_range( - "hour", - datetime(2013, 1, 1, 0, tzinfo=tz.gettz("US/Eastern")), - datetime(2013, 1, 1, 2, 59, tzinfo=tz.gettz("US/Eastern")), - tz="US/Central", - ) - - for f, c in result: - assert f.tzinfo == tz.gettz("US/Central") - assert c.tzinfo == tz.gettz("US/Central") - - def test_bounds_param_is_passed(self): - - result = list( - arrow.Arrow.span_range( - "quarter", datetime(2013, 2, 2), datetime(2013, 5, 15), bounds="[]" - ) - ) - - assert result == [ - (arrow.Arrow(2013, 1, 1), arrow.Arrow(2013, 4, 1)), - (arrow.Arrow(2013, 4, 1), arrow.Arrow(2013, 7, 1)), - ] - - -class TestArrowInterval: - def test_incorrect_input(self): - with pytest.raises(ValueError): - list( - arrow.Arrow.interval( - "month", datetime(2013, 1, 2), datetime(2013, 4, 15), 0 - ) - ) - - def test_correct(self): - result = list( - arrow.Arrow.interval( - "hour", datetime(2013, 5, 5, 12, 30), datetime(2013, 5, 5, 17, 15), 2 - ) - ) - - assert result == [ - ( - arrow.Arrow(2013, 5, 5, 12), - arrow.Arrow(2013, 5, 5, 13, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 5, 5, 14), - arrow.Arrow(2013, 5, 5, 15, 59, 59, 999999), - ), - ( - arrow.Arrow(2013, 5, 5, 16), - arrow.Arrow(2013, 5, 5, 17, 59, 59, 999999), - ), - ] - - def test_bounds_param_is_passed(self): - result = list( - arrow.Arrow.interval( - "hour", - datetime(2013, 5, 5, 12, 30), - datetime(2013, 5, 5, 17, 15), - 2, - bounds="[]", - ) - ) - - assert result == [ - (arrow.Arrow(2013, 5, 5, 12), arrow.Arrow(2013, 5, 5, 14)), - (arrow.Arrow(2013, 5, 5, 14), arrow.Arrow(2013, 5, 5, 16)), - (arrow.Arrow(2013, 5, 5, 16), arrow.Arrow(2013, 5, 5, 18)), - ] - - -@pytest.mark.usefixtures("time_2013_02_15") -class TestArrowSpan: - def test_span_attribute(self): - - with pytest.raises(AttributeError): - self.arrow.span("span") - - def test_span_year(self): - - floor, ceil = self.arrow.span("year") - - assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 12, 31, 23, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_quarter(self): - - floor, ceil = self.arrow.span("quarter") - - assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 3, 31, 23, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_quarter_count(self): - - floor, ceil = self.arrow.span("quarter", 2) - - assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 6, 30, 23, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_year_count(self): - - floor, ceil = self.arrow.span("year", 2) - - assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc()) - assert ceil == datetime(2014, 12, 31, 23, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_month(self): - - floor, ceil = self.arrow.span("month") - - assert floor == datetime(2013, 2, 1, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 28, 23, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_week(self): - - floor, ceil = self.arrow.span("week") - - assert floor == datetime(2013, 2, 11, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 17, 23, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_day(self): - - floor, ceil = self.arrow.span("day") - - assert floor == datetime(2013, 2, 15, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 15, 23, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_hour(self): - - floor, ceil = self.arrow.span("hour") - - assert floor == datetime(2013, 2, 15, 3, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 15, 3, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_minute(self): - - floor, ceil = self.arrow.span("minute") - - assert floor == datetime(2013, 2, 15, 3, 41, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 15, 3, 41, 59, 999999, tzinfo=tz.tzutc()) - - def test_span_second(self): - - floor, ceil = self.arrow.span("second") - - assert floor == datetime(2013, 2, 15, 3, 41, 22, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 15, 3, 41, 22, 999999, tzinfo=tz.tzutc()) - - def test_span_microsecond(self): - - floor, ceil = self.arrow.span("microsecond") - - assert floor == datetime(2013, 2, 15, 3, 41, 22, 8923, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 15, 3, 41, 22, 8923, tzinfo=tz.tzutc()) - - def test_floor(self): - - floor, ceil = self.arrow.span("month") - - assert floor == self.arrow.floor("month") - assert ceil == self.arrow.ceil("month") - - def test_span_inclusive_inclusive(self): - - floor, ceil = self.arrow.span("hour", bounds="[]") - - assert floor == datetime(2013, 2, 15, 3, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 15, 4, tzinfo=tz.tzutc()) - - def test_span_exclusive_inclusive(self): - - floor, ceil = self.arrow.span("hour", bounds="(]") - - assert floor == datetime(2013, 2, 15, 3, 0, 0, 1, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 15, 4, tzinfo=tz.tzutc()) - - def test_span_exclusive_exclusive(self): - - floor, ceil = self.arrow.span("hour", bounds="()") - - assert floor == datetime(2013, 2, 15, 3, 0, 0, 1, tzinfo=tz.tzutc()) - assert ceil == datetime(2013, 2, 15, 3, 59, 59, 999999, tzinfo=tz.tzutc()) - - def test_bounds_are_validated(self): - - with pytest.raises(ValueError): - floor, ceil = self.arrow.span("hour", bounds="][") - - -@pytest.mark.usefixtures("time_2013_01_01") -class TestArrowHumanize: - def test_granularity(self): - - assert self.now.humanize(granularity="second") == "just now" - - later1 = self.now.shift(seconds=1) - assert self.now.humanize(later1, granularity="second") == "just now" - assert later1.humanize(self.now, granularity="second") == "just now" - assert self.now.humanize(later1, granularity="minute") == "0 minutes ago" - assert later1.humanize(self.now, granularity="minute") == "in 0 minutes" - - later100 = self.now.shift(seconds=100) - assert self.now.humanize(later100, granularity="second") == "100 seconds ago" - assert later100.humanize(self.now, granularity="second") == "in 100 seconds" - assert self.now.humanize(later100, granularity="minute") == "a minute ago" - assert later100.humanize(self.now, granularity="minute") == "in a minute" - assert self.now.humanize(later100, granularity="hour") == "0 hours ago" - assert later100.humanize(self.now, granularity="hour") == "in 0 hours" - - later4000 = self.now.shift(seconds=4000) - assert self.now.humanize(later4000, granularity="minute") == "66 minutes ago" - assert later4000.humanize(self.now, granularity="minute") == "in 66 minutes" - assert self.now.humanize(later4000, granularity="hour") == "an hour ago" - assert later4000.humanize(self.now, granularity="hour") == "in an hour" - assert self.now.humanize(later4000, granularity="day") == "0 days ago" - assert later4000.humanize(self.now, granularity="day") == "in 0 days" - - later105 = self.now.shift(seconds=10 ** 5) - assert self.now.humanize(later105, granularity="hour") == "27 hours ago" - assert later105.humanize(self.now, granularity="hour") == "in 27 hours" - assert self.now.humanize(later105, granularity="day") == "a day ago" - assert later105.humanize(self.now, granularity="day") == "in a day" - assert self.now.humanize(later105, granularity="week") == "0 weeks ago" - assert later105.humanize(self.now, granularity="week") == "in 0 weeks" - assert self.now.humanize(later105, granularity="month") == "0 months ago" - assert later105.humanize(self.now, granularity="month") == "in 0 months" - assert self.now.humanize(later105, granularity=["month"]) == "0 months ago" - assert later105.humanize(self.now, granularity=["month"]) == "in 0 months" - - later106 = self.now.shift(seconds=3 * 10 ** 6) - assert self.now.humanize(later106, granularity="day") == "34 days ago" - assert later106.humanize(self.now, granularity="day") == "in 34 days" - assert self.now.humanize(later106, granularity="week") == "4 weeks ago" - assert later106.humanize(self.now, granularity="week") == "in 4 weeks" - assert self.now.humanize(later106, granularity="month") == "a month ago" - assert later106.humanize(self.now, granularity="month") == "in a month" - assert self.now.humanize(later106, granularity="year") == "0 years ago" - assert later106.humanize(self.now, granularity="year") == "in 0 years" - - later506 = self.now.shift(seconds=50 * 10 ** 6) - assert self.now.humanize(later506, granularity="week") == "82 weeks ago" - assert later506.humanize(self.now, granularity="week") == "in 82 weeks" - assert self.now.humanize(later506, granularity="month") == "18 months ago" - assert later506.humanize(self.now, granularity="month") == "in 18 months" - assert self.now.humanize(later506, granularity="year") == "a year ago" - assert later506.humanize(self.now, granularity="year") == "in a year" - - later108 = self.now.shift(seconds=10 ** 8) - assert self.now.humanize(later108, granularity="year") == "3 years ago" - assert later108.humanize(self.now, granularity="year") == "in 3 years" - - later108onlydistance = self.now.shift(seconds=10 ** 8) - assert ( - self.now.humanize( - later108onlydistance, only_distance=True, granularity="year" - ) - == "3 years" - ) - assert ( - later108onlydistance.humanize( - self.now, only_distance=True, granularity="year" - ) - == "3 years" - ) - - with pytest.raises(AttributeError): - self.now.humanize(later108, granularity="years") - - def test_multiple_granularity(self): - assert self.now.humanize(granularity="second") == "just now" - assert self.now.humanize(granularity=["second"]) == "just now" - assert ( - self.now.humanize(granularity=["year", "month", "day", "hour", "second"]) - == "in 0 years 0 months 0 days 0 hours and 0 seconds" - ) - - later4000 = self.now.shift(seconds=4000) - assert ( - later4000.humanize(self.now, granularity=["hour", "minute"]) - == "in an hour and 6 minutes" - ) - assert ( - self.now.humanize(later4000, granularity=["hour", "minute"]) - == "an hour and 6 minutes ago" - ) - assert ( - later4000.humanize( - self.now, granularity=["hour", "minute"], only_distance=True - ) - == "an hour and 6 minutes" - ) - assert ( - later4000.humanize(self.now, granularity=["day", "hour", "minute"]) - == "in 0 days an hour and 6 minutes" - ) - assert ( - self.now.humanize(later4000, granularity=["day", "hour", "minute"]) - == "0 days an hour and 6 minutes ago" - ) - - later105 = self.now.shift(seconds=10 ** 5) - assert ( - self.now.humanize(later105, granularity=["hour", "day", "minute"]) - == "a day 3 hours and 46 minutes ago" - ) - with pytest.raises(AttributeError): - self.now.humanize(later105, granularity=["error", "second"]) - - later108onlydistance = self.now.shift(seconds=10 ** 8) - assert ( - self.now.humanize( - later108onlydistance, only_distance=True, granularity=["year"] - ) - == "3 years" - ) - assert ( - self.now.humanize( - later108onlydistance, only_distance=True, granularity=["month", "week"] - ) - == "37 months and 4 weeks" - ) - assert ( - self.now.humanize( - later108onlydistance, only_distance=True, granularity=["year", "second"] - ) - == "3 years and 5327200 seconds" - ) - - one_min_one_sec_ago = self.now.shift(minutes=-1, seconds=-1) - assert ( - one_min_one_sec_ago.humanize(self.now, granularity=["minute", "second"]) - == "a minute and a second ago" - ) - - one_min_two_secs_ago = self.now.shift(minutes=-1, seconds=-2) - assert ( - one_min_two_secs_ago.humanize(self.now, granularity=["minute", "second"]) - == "a minute and 2 seconds ago" - ) - - def test_seconds(self): - - later = self.now.shift(seconds=10) - - # regression test for issue #727 - assert self.now.humanize(later) == "10 seconds ago" - assert later.humanize(self.now) == "in 10 seconds" - - assert self.now.humanize(later, only_distance=True) == "10 seconds" - assert later.humanize(self.now, only_distance=True) == "10 seconds" - - def test_minute(self): - - later = self.now.shift(minutes=1) - - assert self.now.humanize(later) == "a minute ago" - assert later.humanize(self.now) == "in a minute" - - assert self.now.humanize(later, only_distance=True) == "a minute" - assert later.humanize(self.now, only_distance=True) == "a minute" - - def test_minutes(self): - - later = self.now.shift(minutes=2) - - assert self.now.humanize(later) == "2 minutes ago" - assert later.humanize(self.now) == "in 2 minutes" - - assert self.now.humanize(later, only_distance=True) == "2 minutes" - assert later.humanize(self.now, only_distance=True) == "2 minutes" - - def test_hour(self): - - later = self.now.shift(hours=1) - - assert self.now.humanize(later) == "an hour ago" - assert later.humanize(self.now) == "in an hour" - - assert self.now.humanize(later, only_distance=True) == "an hour" - assert later.humanize(self.now, only_distance=True) == "an hour" - - def test_hours(self): - - later = self.now.shift(hours=2) - - assert self.now.humanize(later) == "2 hours ago" - assert later.humanize(self.now) == "in 2 hours" - - assert self.now.humanize(later, only_distance=True) == "2 hours" - assert later.humanize(self.now, only_distance=True) == "2 hours" - - def test_day(self): - - later = self.now.shift(days=1) - - assert self.now.humanize(later) == "a day ago" - assert later.humanize(self.now) == "in a day" - - # regression test for issue #697 - less_than_48_hours = self.now.shift( - days=1, hours=23, seconds=59, microseconds=999999 - ) - assert self.now.humanize(less_than_48_hours) == "a day ago" - assert less_than_48_hours.humanize(self.now) == "in a day" - - less_than_48_hours_date = less_than_48_hours._datetime.date() - with pytest.raises(TypeError): - # humanize other argument does not take raw datetime.date objects - self.now.humanize(less_than_48_hours_date) - - # convert from date to arrow object - less_than_48_hours_date = arrow.Arrow.fromdate(less_than_48_hours_date) - assert self.now.humanize(less_than_48_hours_date) == "a day ago" - assert less_than_48_hours_date.humanize(self.now) == "in a day" - - assert self.now.humanize(later, only_distance=True) == "a day" - assert later.humanize(self.now, only_distance=True) == "a day" - - def test_days(self): - - later = self.now.shift(days=2) - - assert self.now.humanize(later) == "2 days ago" - assert later.humanize(self.now) == "in 2 days" - - assert self.now.humanize(later, only_distance=True) == "2 days" - assert later.humanize(self.now, only_distance=True) == "2 days" - - # Regression tests for humanize bug referenced in issue 541 - later = self.now.shift(days=3) - assert later.humanize(self.now) == "in 3 days" - - later = self.now.shift(days=3, seconds=1) - assert later.humanize(self.now) == "in 3 days" - - later = self.now.shift(days=4) - assert later.humanize(self.now) == "in 4 days" - - def test_week(self): - - later = self.now.shift(weeks=1) - - assert self.now.humanize(later) == "a week ago" - assert later.humanize(self.now) == "in a week" - - assert self.now.humanize(later, only_distance=True) == "a week" - assert later.humanize(self.now, only_distance=True) == "a week" - - def test_weeks(self): - - later = self.now.shift(weeks=2) - - assert self.now.humanize(later) == "2 weeks ago" - assert later.humanize(self.now) == "in 2 weeks" - - assert self.now.humanize(later, only_distance=True) == "2 weeks" - assert later.humanize(self.now, only_distance=True) == "2 weeks" - - def test_month(self): - - later = self.now.shift(months=1) - - assert self.now.humanize(later) == "a month ago" - assert later.humanize(self.now) == "in a month" - - assert self.now.humanize(later, only_distance=True) == "a month" - assert later.humanize(self.now, only_distance=True) == "a month" - - def test_months(self): - - later = self.now.shift(months=2) - earlier = self.now.shift(months=-2) - - assert earlier.humanize(self.now) == "2 months ago" - assert later.humanize(self.now) == "in 2 months" - - assert self.now.humanize(later, only_distance=True) == "2 months" - assert later.humanize(self.now, only_distance=True) == "2 months" - - def test_year(self): - - later = self.now.shift(years=1) - - assert self.now.humanize(later) == "a year ago" - assert later.humanize(self.now) == "in a year" - - assert self.now.humanize(later, only_distance=True) == "a year" - assert later.humanize(self.now, only_distance=True) == "a year" - - def test_years(self): - - later = self.now.shift(years=2) - - assert self.now.humanize(later) == "2 years ago" - assert later.humanize(self.now) == "in 2 years" - - assert self.now.humanize(later, only_distance=True) == "2 years" - assert later.humanize(self.now, only_distance=True) == "2 years" - - arw = arrow.Arrow(2014, 7, 2) - - result = arw.humanize(self.datetime) - - assert result == "in 2 years" - - def test_arrow(self): - - arw = arrow.Arrow.fromdatetime(self.datetime) - - result = arw.humanize(arrow.Arrow.fromdatetime(self.datetime)) - - assert result == "just now" - - def test_datetime_tzinfo(self): - - arw = arrow.Arrow.fromdatetime(self.datetime) - - result = arw.humanize(self.datetime.replace(tzinfo=tz.tzutc())) - - assert result == "just now" - - def test_other(self): - - arw = arrow.Arrow.fromdatetime(self.datetime) - - with pytest.raises(TypeError): - arw.humanize(object()) - - def test_invalid_locale(self): - - arw = arrow.Arrow.fromdatetime(self.datetime) - - with pytest.raises(ValueError): - arw.humanize(locale="klingon") - - def test_none(self): - - arw = arrow.Arrow.utcnow() - - result = arw.humanize() - - assert result == "just now" - - result = arw.humanize(None) - - assert result == "just now" - - def test_untranslated_granularity(self, mocker): - - arw = arrow.Arrow.utcnow() - later = arw.shift(weeks=1) - - # simulate an untranslated timeframe key - mocker.patch.dict("arrow.locales.EnglishLocale.timeframes") - del arrow.locales.EnglishLocale.timeframes["week"] - with pytest.raises(ValueError): - arw.humanize(later, granularity="week") - - -@pytest.mark.usefixtures("time_2013_01_01") -class TestArrowHumanizeTestsWithLocale: - def test_now(self): - - arw = arrow.Arrow(2013, 1, 1, 0, 0, 0) - - result = arw.humanize(self.datetime, locale="ru") - - assert result == "сейчас" - - def test_seconds(self): - arw = arrow.Arrow(2013, 1, 1, 0, 0, 44) - - result = arw.humanize(self.datetime, locale="ru") - - assert result == "через 44 несколько секунд" - - def test_years(self): - - arw = arrow.Arrow(2011, 7, 2) - - result = arw.humanize(self.datetime, locale="ru") - - assert result == "2 года назад" - - -class TestArrowIsBetween: - def test_start_before_end(self): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - start = arrow.Arrow.fromdatetime(datetime(2013, 5, 8)) - end = arrow.Arrow.fromdatetime(datetime(2013, 5, 5)) - result = target.is_between(start, end) - assert not result - - def test_exclusive_exclusive_bounds(self): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 5, 12, 30, 27)) - start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5, 12, 30, 10)) - end = arrow.Arrow.fromdatetime(datetime(2013, 5, 5, 12, 30, 36)) - result = target.is_between(start, end, "()") - assert result - result = target.is_between(start, end) - assert result - - def test_exclusive_exclusive_bounds_same_date(self): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - start = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - end = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - result = target.is_between(start, end, "()") - assert not result - - def test_inclusive_exclusive_bounds(self): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 6)) - start = arrow.Arrow.fromdatetime(datetime(2013, 5, 4)) - end = arrow.Arrow.fromdatetime(datetime(2013, 5, 6)) - result = target.is_between(start, end, "[)") - assert not result - - def test_exclusive_inclusive_bounds(self): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5)) - end = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - result = target.is_between(start, end, "(]") - assert result - - def test_inclusive_inclusive_bounds_same_date(self): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - start = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - end = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - result = target.is_between(start, end, "[]") - assert result - - def test_type_error_exception(self): - with pytest.raises(TypeError): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - start = datetime(2013, 5, 5) - end = arrow.Arrow.fromdatetime(datetime(2013, 5, 8)) - target.is_between(start, end) - - with pytest.raises(TypeError): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5)) - end = datetime(2013, 5, 8) - target.is_between(start, end) - - with pytest.raises(TypeError): - target.is_between(None, None) - - def test_value_error_exception(self): - target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7)) - start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5)) - end = arrow.Arrow.fromdatetime(datetime(2013, 5, 8)) - with pytest.raises(ValueError): - target.is_between(start, end, "][") - with pytest.raises(ValueError): - target.is_between(start, end, "") - with pytest.raises(ValueError): - target.is_between(start, end, "]") - with pytest.raises(ValueError): - target.is_between(start, end, "[") - with pytest.raises(ValueError): - target.is_between(start, end, "hello") - - -class TestArrowUtil: - def test_get_datetime(self): - - get_datetime = arrow.Arrow._get_datetime - - arw = arrow.Arrow.utcnow() - dt = datetime.utcnow() - timestamp = time.time() - - assert get_datetime(arw) == arw.datetime - assert get_datetime(dt) == dt - assert ( - get_datetime(timestamp) == arrow.Arrow.utcfromtimestamp(timestamp).datetime - ) - - with pytest.raises(ValueError) as raise_ctx: - get_datetime("abc") - assert "not recognized as a datetime or timestamp" in str(raise_ctx.value) - - def test_get_tzinfo(self): - - get_tzinfo = arrow.Arrow._get_tzinfo - - with pytest.raises(ValueError) as raise_ctx: - get_tzinfo("abc") - assert "not recognized as a timezone" in str(raise_ctx.value) - - def test_get_iteration_params(self): - - assert arrow.Arrow._get_iteration_params("end", None) == ("end", sys.maxsize) - assert arrow.Arrow._get_iteration_params(None, 100) == (arrow.Arrow.max, 100) - assert arrow.Arrow._get_iteration_params(100, 120) == (100, 120) - - with pytest.raises(ValueError): - arrow.Arrow._get_iteration_params(None, None) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_factory.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_factory.py deleted file mode 100644 index 2b8df5168f..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_factory.py +++ /dev/null @@ -1,390 +0,0 @@ -# -*- coding: utf-8 -*- -import time -from datetime import date, datetime - -import pytest -from dateutil import tz - -from arrow.parser import ParserError - -from .utils import assert_datetime_equality - - -@pytest.mark.usefixtures("arrow_factory") -class TestGet: - def test_no_args(self): - - assert_datetime_equality( - self.factory.get(), datetime.utcnow().replace(tzinfo=tz.tzutc()) - ) - - def test_timestamp_one_arg_no_arg(self): - - no_arg = self.factory.get(1406430900).timestamp - one_arg = self.factory.get("1406430900", "X").timestamp - - assert no_arg == one_arg - - def test_one_arg_none(self): - - assert_datetime_equality( - self.factory.get(None), datetime.utcnow().replace(tzinfo=tz.tzutc()) - ) - - def test_struct_time(self): - - assert_datetime_equality( - self.factory.get(time.gmtime()), - datetime.utcnow().replace(tzinfo=tz.tzutc()), - ) - - def test_one_arg_timestamp(self): - - int_timestamp = int(time.time()) - timestamp_dt = datetime.utcfromtimestamp(int_timestamp).replace( - tzinfo=tz.tzutc() - ) - - assert self.factory.get(int_timestamp) == timestamp_dt - - with pytest.raises(ParserError): - self.factory.get(str(int_timestamp)) - - float_timestamp = time.time() - timestamp_dt = datetime.utcfromtimestamp(float_timestamp).replace( - tzinfo=tz.tzutc() - ) - - assert self.factory.get(float_timestamp) == timestamp_dt - - with pytest.raises(ParserError): - self.factory.get(str(float_timestamp)) - - # Regression test for issue #216 - # Python 3 raises OverflowError, Python 2 raises ValueError - timestamp = 99999999999999999999999999.99999999999999999999999999 - with pytest.raises((OverflowError, ValueError)): - self.factory.get(timestamp) - - def test_one_arg_expanded_timestamp(self): - - millisecond_timestamp = 1591328104308 - microsecond_timestamp = 1591328104308505 - - # Regression test for issue #796 - assert self.factory.get(millisecond_timestamp) == datetime.utcfromtimestamp( - 1591328104.308 - ).replace(tzinfo=tz.tzutc()) - assert self.factory.get(microsecond_timestamp) == datetime.utcfromtimestamp( - 1591328104.308505 - ).replace(tzinfo=tz.tzutc()) - - def test_one_arg_timestamp_with_tzinfo(self): - - timestamp = time.time() - timestamp_dt = datetime.fromtimestamp(timestamp, tz=tz.tzutc()).astimezone( - tz.gettz("US/Pacific") - ) - timezone = tz.gettz("US/Pacific") - - assert_datetime_equality( - self.factory.get(timestamp, tzinfo=timezone), timestamp_dt - ) - - def test_one_arg_arrow(self): - - arw = self.factory.utcnow() - result = self.factory.get(arw) - - assert arw == result - - def test_one_arg_datetime(self): - - dt = datetime.utcnow().replace(tzinfo=tz.tzutc()) - - assert self.factory.get(dt) == dt - - def test_one_arg_date(self): - - d = date.today() - dt = datetime(d.year, d.month, d.day, tzinfo=tz.tzutc()) - - assert self.factory.get(d) == dt - - def test_one_arg_tzinfo(self): - - self.expected = ( - datetime.utcnow() - .replace(tzinfo=tz.tzutc()) - .astimezone(tz.gettz("US/Pacific")) - ) - - assert_datetime_equality( - self.factory.get(tz.gettz("US/Pacific")), self.expected - ) - - # regression test for issue #658 - def test_one_arg_dateparser_datetime(self): - dateparser = pytest.importorskip("dateparser") - expected = datetime(1990, 1, 1).replace(tzinfo=tz.tzutc()) - # dateparser outputs: datetime.datetime(1990, 1, 1, 0, 0, tzinfo=) - parsed_date = dateparser.parse("1990-01-01T00:00:00+00:00") - dt_output = self.factory.get(parsed_date)._datetime.replace(tzinfo=tz.tzutc()) - assert dt_output == expected - - def test_kwarg_tzinfo(self): - - self.expected = ( - datetime.utcnow() - .replace(tzinfo=tz.tzutc()) - .astimezone(tz.gettz("US/Pacific")) - ) - - assert_datetime_equality( - self.factory.get(tzinfo=tz.gettz("US/Pacific")), self.expected - ) - - def test_kwarg_tzinfo_string(self): - - self.expected = ( - datetime.utcnow() - .replace(tzinfo=tz.tzutc()) - .astimezone(tz.gettz("US/Pacific")) - ) - - assert_datetime_equality(self.factory.get(tzinfo="US/Pacific"), self.expected) - - with pytest.raises(ParserError): - self.factory.get(tzinfo="US/PacificInvalidTzinfo") - - def test_kwarg_normalize_whitespace(self): - result = self.factory.get( - "Jun 1 2005 1:33PM", - "MMM D YYYY H:mmA", - tzinfo=tz.tzutc(), - normalize_whitespace=True, - ) - assert result._datetime == datetime(2005, 6, 1, 13, 33, tzinfo=tz.tzutc()) - - result = self.factory.get( - "\t 2013-05-05T12:30:45.123456 \t \n", - tzinfo=tz.tzutc(), - normalize_whitespace=True, - ) - assert result._datetime == datetime( - 2013, 5, 5, 12, 30, 45, 123456, tzinfo=tz.tzutc() - ) - - def test_one_arg_iso_str(self): - - dt = datetime.utcnow() - - assert_datetime_equality( - self.factory.get(dt.isoformat()), dt.replace(tzinfo=tz.tzutc()) - ) - - def test_one_arg_iso_calendar(self): - - pairs = [ - (datetime(2004, 1, 4), (2004, 1, 7)), - (datetime(2008, 12, 30), (2009, 1, 2)), - (datetime(2010, 1, 2), (2009, 53, 6)), - (datetime(2000, 2, 29), (2000, 9, 2)), - (datetime(2005, 1, 1), (2004, 53, 6)), - (datetime(2010, 1, 4), (2010, 1, 1)), - (datetime(2010, 1, 3), (2009, 53, 7)), - (datetime(2003, 12, 29), (2004, 1, 1)), - ] - - for pair in pairs: - dt, iso = pair - assert self.factory.get(iso) == self.factory.get(dt) - - with pytest.raises(TypeError): - self.factory.get((2014, 7, 1, 4)) - - with pytest.raises(TypeError): - self.factory.get((2014, 7)) - - with pytest.raises(ValueError): - self.factory.get((2014, 70, 1)) - - with pytest.raises(ValueError): - self.factory.get((2014, 7, 10)) - - def test_one_arg_other(self): - - with pytest.raises(TypeError): - self.factory.get(object()) - - def test_one_arg_bool(self): - - with pytest.raises(TypeError): - self.factory.get(False) - - with pytest.raises(TypeError): - self.factory.get(True) - - def test_two_args_datetime_tzinfo(self): - - result = self.factory.get(datetime(2013, 1, 1), tz.gettz("US/Pacific")) - - assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) - - def test_two_args_datetime_tz_str(self): - - result = self.factory.get(datetime(2013, 1, 1), "US/Pacific") - - assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) - - def test_two_args_date_tzinfo(self): - - result = self.factory.get(date(2013, 1, 1), tz.gettz("US/Pacific")) - - assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) - - def test_two_args_date_tz_str(self): - - result = self.factory.get(date(2013, 1, 1), "US/Pacific") - - assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) - - def test_two_args_datetime_other(self): - - with pytest.raises(TypeError): - self.factory.get(datetime.utcnow(), object()) - - def test_two_args_date_other(self): - - with pytest.raises(TypeError): - self.factory.get(date.today(), object()) - - def test_two_args_str_str(self): - - result = self.factory.get("2013-01-01", "YYYY-MM-DD") - - assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.tzutc()) - - def test_two_args_str_tzinfo(self): - - result = self.factory.get("2013-01-01", tzinfo=tz.gettz("US/Pacific")) - - assert_datetime_equality( - result._datetime, datetime(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")) - ) - - def test_two_args_twitter_format(self): - - # format returned by twitter API for created_at: - twitter_date = "Fri Apr 08 21:08:54 +0000 2016" - result = self.factory.get(twitter_date, "ddd MMM DD HH:mm:ss Z YYYY") - - assert result._datetime == datetime(2016, 4, 8, 21, 8, 54, tzinfo=tz.tzutc()) - - def test_two_args_str_list(self): - - result = self.factory.get("2013-01-01", ["MM/DD/YYYY", "YYYY-MM-DD"]) - - assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.tzutc()) - - def test_two_args_unicode_unicode(self): - - result = self.factory.get(u"2013-01-01", u"YYYY-MM-DD") - - assert result._datetime == datetime(2013, 1, 1, tzinfo=tz.tzutc()) - - def test_two_args_other(self): - - with pytest.raises(TypeError): - self.factory.get(object(), object()) - - def test_three_args_with_tzinfo(self): - - timefmt = "YYYYMMDD" - d = "20150514" - - assert self.factory.get(d, timefmt, tzinfo=tz.tzlocal()) == datetime( - 2015, 5, 14, tzinfo=tz.tzlocal() - ) - - def test_three_args(self): - - assert self.factory.get(2013, 1, 1) == datetime(2013, 1, 1, tzinfo=tz.tzutc()) - - def test_full_kwargs(self): - - assert ( - self.factory.get( - year=2016, - month=7, - day=14, - hour=7, - minute=16, - second=45, - microsecond=631092, - ) - == datetime(2016, 7, 14, 7, 16, 45, 631092, tzinfo=tz.tzutc()) - ) - - def test_three_kwargs(self): - - assert self.factory.get(year=2016, month=7, day=14) == datetime( - 2016, 7, 14, 0, 0, tzinfo=tz.tzutc() - ) - - def test_tzinfo_string_kwargs(self): - result = self.factory.get("2019072807", "YYYYMMDDHH", tzinfo="UTC") - assert result._datetime == datetime(2019, 7, 28, 7, 0, 0, 0, tzinfo=tz.tzutc()) - - def test_insufficient_kwargs(self): - - with pytest.raises(TypeError): - self.factory.get(year=2016) - - with pytest.raises(TypeError): - self.factory.get(year=2016, month=7) - - def test_locale(self): - result = self.factory.get("2010", "YYYY", locale="ja") - assert result._datetime == datetime(2010, 1, 1, 0, 0, 0, 0, tzinfo=tz.tzutc()) - - # regression test for issue #701 - result = self.factory.get( - "Montag, 9. September 2019, 16:15-20:00", "dddd, D. MMMM YYYY", locale="de" - ) - assert result._datetime == datetime(2019, 9, 9, 0, 0, 0, 0, tzinfo=tz.tzutc()) - - def test_locale_kwarg_only(self): - res = self.factory.get(locale="ja") - assert res.tzinfo == tz.tzutc() - - def test_locale_with_tzinfo(self): - res = self.factory.get(locale="ja", tzinfo=tz.gettz("Asia/Tokyo")) - assert res.tzinfo == tz.gettz("Asia/Tokyo") - - -@pytest.mark.usefixtures("arrow_factory") -class TestUtcNow: - def test_utcnow(self): - - assert_datetime_equality( - self.factory.utcnow()._datetime, - datetime.utcnow().replace(tzinfo=tz.tzutc()), - ) - - -@pytest.mark.usefixtures("arrow_factory") -class TestNow: - def test_no_tz(self): - - assert_datetime_equality(self.factory.now(), datetime.now(tz.tzlocal())) - - def test_tzinfo(self): - - assert_datetime_equality( - self.factory.now(tz.gettz("EST")), datetime.now(tz.gettz("EST")) - ) - - def test_tz_str(self): - - assert_datetime_equality(self.factory.now("EST"), datetime.now(tz.gettz("EST"))) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_formatter.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_formatter.py deleted file mode 100644 index e97aeb5dcc..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_formatter.py +++ /dev/null @@ -1,282 +0,0 @@ -# -*- coding: utf-8 -*- -from datetime import datetime - -import pytest -import pytz -from dateutil import tz as dateutil_tz - -from arrow import ( - FORMAT_ATOM, - FORMAT_COOKIE, - FORMAT_RFC822, - FORMAT_RFC850, - FORMAT_RFC1036, - FORMAT_RFC1123, - FORMAT_RFC2822, - FORMAT_RFC3339, - FORMAT_RSS, - FORMAT_W3C, -) - -from .utils import make_full_tz_list - - -@pytest.mark.usefixtures("arrow_formatter") -class TestFormatterFormatToken: - def test_format(self): - - dt = datetime(2013, 2, 5, 12, 32, 51) - - result = self.formatter.format(dt, "MM-DD-YYYY hh:mm:ss a") - - assert result == "02-05-2013 12:32:51 pm" - - def test_year(self): - - dt = datetime(2013, 1, 1) - assert self.formatter._format_token(dt, "YYYY") == "2013" - assert self.formatter._format_token(dt, "YY") == "13" - - def test_month(self): - - dt = datetime(2013, 1, 1) - assert self.formatter._format_token(dt, "MMMM") == "January" - assert self.formatter._format_token(dt, "MMM") == "Jan" - assert self.formatter._format_token(dt, "MM") == "01" - assert self.formatter._format_token(dt, "M") == "1" - - def test_day(self): - - dt = datetime(2013, 2, 1) - assert self.formatter._format_token(dt, "DDDD") == "032" - assert self.formatter._format_token(dt, "DDD") == "32" - assert self.formatter._format_token(dt, "DD") == "01" - assert self.formatter._format_token(dt, "D") == "1" - assert self.formatter._format_token(dt, "Do") == "1st" - - assert self.formatter._format_token(dt, "dddd") == "Friday" - assert self.formatter._format_token(dt, "ddd") == "Fri" - assert self.formatter._format_token(dt, "d") == "5" - - def test_hour(self): - - dt = datetime(2013, 1, 1, 2) - assert self.formatter._format_token(dt, "HH") == "02" - assert self.formatter._format_token(dt, "H") == "2" - - dt = datetime(2013, 1, 1, 13) - assert self.formatter._format_token(dt, "HH") == "13" - assert self.formatter._format_token(dt, "H") == "13" - - dt = datetime(2013, 1, 1, 2) - assert self.formatter._format_token(dt, "hh") == "02" - assert self.formatter._format_token(dt, "h") == "2" - - dt = datetime(2013, 1, 1, 13) - assert self.formatter._format_token(dt, "hh") == "01" - assert self.formatter._format_token(dt, "h") == "1" - - # test that 12-hour time converts to '12' at midnight - dt = datetime(2013, 1, 1, 0) - assert self.formatter._format_token(dt, "hh") == "12" - assert self.formatter._format_token(dt, "h") == "12" - - def test_minute(self): - - dt = datetime(2013, 1, 1, 0, 1) - assert self.formatter._format_token(dt, "mm") == "01" - assert self.formatter._format_token(dt, "m") == "1" - - def test_second(self): - - dt = datetime(2013, 1, 1, 0, 0, 1) - assert self.formatter._format_token(dt, "ss") == "01" - assert self.formatter._format_token(dt, "s") == "1" - - def test_sub_second(self): - - dt = datetime(2013, 1, 1, 0, 0, 0, 123456) - assert self.formatter._format_token(dt, "SSSSSS") == "123456" - assert self.formatter._format_token(dt, "SSSSS") == "12345" - assert self.formatter._format_token(dt, "SSSS") == "1234" - assert self.formatter._format_token(dt, "SSS") == "123" - assert self.formatter._format_token(dt, "SS") == "12" - assert self.formatter._format_token(dt, "S") == "1" - - dt = datetime(2013, 1, 1, 0, 0, 0, 2000) - assert self.formatter._format_token(dt, "SSSSSS") == "002000" - assert self.formatter._format_token(dt, "SSSSS") == "00200" - assert self.formatter._format_token(dt, "SSSS") == "0020" - assert self.formatter._format_token(dt, "SSS") == "002" - assert self.formatter._format_token(dt, "SS") == "00" - assert self.formatter._format_token(dt, "S") == "0" - - def test_timestamp(self): - - timestamp = 1588437009.8952794 - dt = datetime.utcfromtimestamp(timestamp) - expected = str(int(timestamp)) - assert self.formatter._format_token(dt, "X") == expected - - # Must round because time.time() may return a float with greater - # than 6 digits of precision - expected = str(int(timestamp * 1000000)) - assert self.formatter._format_token(dt, "x") == expected - - def test_timezone(self): - - dt = datetime.utcnow().replace(tzinfo=dateutil_tz.gettz("US/Pacific")) - - result = self.formatter._format_token(dt, "ZZ") - assert result == "-07:00" or result == "-08:00" - - result = self.formatter._format_token(dt, "Z") - assert result == "-0700" or result == "-0800" - - @pytest.mark.parametrize("full_tz_name", make_full_tz_list()) - def test_timezone_formatter(self, full_tz_name): - - # This test will fail if we use "now" as date as soon as we change from/to DST - dt = datetime(1986, 2, 14, tzinfo=pytz.timezone("UTC")).replace( - tzinfo=dateutil_tz.gettz(full_tz_name) - ) - abbreviation = dt.tzname() - - result = self.formatter._format_token(dt, "ZZZ") - assert result == abbreviation - - def test_am_pm(self): - - dt = datetime(2012, 1, 1, 11) - assert self.formatter._format_token(dt, "a") == "am" - assert self.formatter._format_token(dt, "A") == "AM" - - dt = datetime(2012, 1, 1, 13) - assert self.formatter._format_token(dt, "a") == "pm" - assert self.formatter._format_token(dt, "A") == "PM" - - def test_week(self): - dt = datetime(2017, 5, 19) - assert self.formatter._format_token(dt, "W") == "2017-W20-5" - - # make sure week is zero padded when needed - dt_early = datetime(2011, 1, 20) - assert self.formatter._format_token(dt_early, "W") == "2011-W03-4" - - def test_nonsense(self): - dt = datetime(2012, 1, 1, 11) - assert self.formatter._format_token(dt, None) is None - assert self.formatter._format_token(dt, "NONSENSE") is None - - def test_escape(self): - - assert ( - self.formatter.format( - datetime(2015, 12, 10, 17, 9), "MMMM D, YYYY [at] h:mma" - ) - == "December 10, 2015 at 5:09pm" - ) - - assert ( - self.formatter.format( - datetime(2015, 12, 10, 17, 9), "[MMMM] M D, YYYY [at] h:mma" - ) - == "MMMM 12 10, 2015 at 5:09pm" - ) - - assert ( - self.formatter.format( - datetime(1990, 11, 25), - "[It happened on] MMMM Do [in the year] YYYY [a long time ago]", - ) - == "It happened on November 25th in the year 1990 a long time ago" - ) - - assert ( - self.formatter.format( - datetime(1990, 11, 25), - "[It happened on] MMMM Do [in the][ year] YYYY [a long time ago]", - ) - == "It happened on November 25th in the year 1990 a long time ago" - ) - - assert ( - self.formatter.format( - datetime(1, 1, 1), "[I'm][ entirely][ escaped,][ weee!]" - ) - == "I'm entirely escaped, weee!" - ) - - # Special RegEx characters - assert ( - self.formatter.format( - datetime(2017, 12, 31, 2, 0), "MMM DD, YYYY |^${}().*+?<>-& h:mm A" - ) - == "Dec 31, 2017 |^${}().*+?<>-& 2:00 AM" - ) - - # Escaping is atomic: brackets inside brackets are treated literally - assert self.formatter.format(datetime(1, 1, 1), "[[[ ]]") == "[[ ]" - - -@pytest.mark.usefixtures("arrow_formatter", "time_1975_12_25") -class TestFormatterBuiltinFormats: - def test_atom(self): - assert ( - self.formatter.format(self.datetime, FORMAT_ATOM) - == "1975-12-25 14:15:16-05:00" - ) - - def test_cookie(self): - assert ( - self.formatter.format(self.datetime, FORMAT_COOKIE) - == "Thursday, 25-Dec-1975 14:15:16 EST" - ) - - def test_rfc_822(self): - assert ( - self.formatter.format(self.datetime, FORMAT_RFC822) - == "Thu, 25 Dec 75 14:15:16 -0500" - ) - - def test_rfc_850(self): - assert ( - self.formatter.format(self.datetime, FORMAT_RFC850) - == "Thursday, 25-Dec-75 14:15:16 EST" - ) - - def test_rfc_1036(self): - assert ( - self.formatter.format(self.datetime, FORMAT_RFC1036) - == "Thu, 25 Dec 75 14:15:16 -0500" - ) - - def test_rfc_1123(self): - assert ( - self.formatter.format(self.datetime, FORMAT_RFC1123) - == "Thu, 25 Dec 1975 14:15:16 -0500" - ) - - def test_rfc_2822(self): - assert ( - self.formatter.format(self.datetime, FORMAT_RFC2822) - == "Thu, 25 Dec 1975 14:15:16 -0500" - ) - - def test_rfc3339(self): - assert ( - self.formatter.format(self.datetime, FORMAT_RFC3339) - == "1975-12-25 14:15:16-05:00" - ) - - def test_rss(self): - assert ( - self.formatter.format(self.datetime, FORMAT_RSS) - == "Thu, 25 Dec 1975 14:15:16 -0500" - ) - - def test_w3c(self): - assert ( - self.formatter.format(self.datetime, FORMAT_W3C) - == "1975-12-25 14:15:16-05:00" - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_locales.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_locales.py deleted file mode 100644 index 006ccdd5ba..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_locales.py +++ /dev/null @@ -1,1352 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -import pytest - -from arrow import arrow, locales - - -@pytest.mark.usefixtures("lang_locales") -class TestLocaleValidation: - """Validate locales to ensure that translations are valid and complete""" - - def test_locale_validation(self): - - for _, locale_cls in self.locales.items(): - # 7 days + 1 spacer to allow for 1-indexing of months - assert len(locale_cls.day_names) == 8 - assert locale_cls.day_names[0] == "" - # ensure that all string from index 1 onward are valid (not blank or None) - assert all(locale_cls.day_names[1:]) - - assert len(locale_cls.day_abbreviations) == 8 - assert locale_cls.day_abbreviations[0] == "" - assert all(locale_cls.day_abbreviations[1:]) - - # 12 months + 1 spacer to allow for 1-indexing of months - assert len(locale_cls.month_names) == 13 - assert locale_cls.month_names[0] == "" - assert all(locale_cls.month_names[1:]) - - assert len(locale_cls.month_abbreviations) == 13 - assert locale_cls.month_abbreviations[0] == "" - assert all(locale_cls.month_abbreviations[1:]) - - assert len(locale_cls.names) > 0 - assert locale_cls.past is not None - assert locale_cls.future is not None - - -class TestModule: - def test_get_locale(self, mocker): - mock_locale = mocker.Mock() - mock_locale_cls = mocker.Mock() - mock_locale_cls.return_value = mock_locale - - with pytest.raises(ValueError): - arrow.locales.get_locale("locale_name") - - cls_dict = arrow.locales._locales - mocker.patch.dict(cls_dict, {"locale_name": mock_locale_cls}) - - result = arrow.locales.get_locale("locale_name") - - assert result == mock_locale - - def test_get_locale_by_class_name(self, mocker): - mock_locale_cls = mocker.Mock() - mock_locale_obj = mock_locale_cls.return_value = mocker.Mock() - - globals_fn = mocker.Mock() - globals_fn.return_value = {"NonExistentLocale": mock_locale_cls} - - with pytest.raises(ValueError): - arrow.locales.get_locale_by_class_name("NonExistentLocale") - - mocker.patch.object(locales, "globals", globals_fn) - result = arrow.locales.get_locale_by_class_name("NonExistentLocale") - - mock_locale_cls.assert_called_once_with() - assert result == mock_locale_obj - - def test_locales(self): - - assert len(locales._locales) > 0 - - -@pytest.mark.usefixtures("lang_locale") -class TestEnglishLocale: - def test_describe(self): - assert self.locale.describe("now", only_distance=True) == "instantly" - assert self.locale.describe("now", only_distance=False) == "just now" - - def test_format_timeframe(self): - - assert self.locale._format_timeframe("hours", 2) == "2 hours" - assert self.locale._format_timeframe("hour", 0) == "an hour" - - def test_format_relative_now(self): - - result = self.locale._format_relative("just now", "now", 0) - - assert result == "just now" - - def test_format_relative_past(self): - - result = self.locale._format_relative("an hour", "hour", 1) - - assert result == "in an hour" - - def test_format_relative_future(self): - - result = self.locale._format_relative("an hour", "hour", -1) - - assert result == "an hour ago" - - def test_ordinal_number(self): - assert self.locale.ordinal_number(0) == "0th" - assert self.locale.ordinal_number(1) == "1st" - assert self.locale.ordinal_number(2) == "2nd" - assert self.locale.ordinal_number(3) == "3rd" - assert self.locale.ordinal_number(4) == "4th" - assert self.locale.ordinal_number(10) == "10th" - assert self.locale.ordinal_number(11) == "11th" - assert self.locale.ordinal_number(12) == "12th" - assert self.locale.ordinal_number(13) == "13th" - assert self.locale.ordinal_number(14) == "14th" - assert self.locale.ordinal_number(21) == "21st" - assert self.locale.ordinal_number(22) == "22nd" - assert self.locale.ordinal_number(23) == "23rd" - assert self.locale.ordinal_number(24) == "24th" - - assert self.locale.ordinal_number(100) == "100th" - assert self.locale.ordinal_number(101) == "101st" - assert self.locale.ordinal_number(102) == "102nd" - assert self.locale.ordinal_number(103) == "103rd" - assert self.locale.ordinal_number(104) == "104th" - assert self.locale.ordinal_number(110) == "110th" - assert self.locale.ordinal_number(111) == "111th" - assert self.locale.ordinal_number(112) == "112th" - assert self.locale.ordinal_number(113) == "113th" - assert self.locale.ordinal_number(114) == "114th" - assert self.locale.ordinal_number(121) == "121st" - assert self.locale.ordinal_number(122) == "122nd" - assert self.locale.ordinal_number(123) == "123rd" - assert self.locale.ordinal_number(124) == "124th" - - def test_meridian_invalid_token(self): - assert self.locale.meridian(7, None) is None - assert self.locale.meridian(7, "B") is None - assert self.locale.meridian(7, "NONSENSE") is None - - -@pytest.mark.usefixtures("lang_locale") -class TestItalianLocale: - def test_ordinal_number(self): - assert self.locale.ordinal_number(1) == "1º" - - -@pytest.mark.usefixtures("lang_locale") -class TestSpanishLocale: - def test_ordinal_number(self): - assert self.locale.ordinal_number(1) == "1º" - - def test_format_timeframe(self): - assert self.locale._format_timeframe("now", 0) == "ahora" - assert self.locale._format_timeframe("seconds", 1) == "1 segundos" - assert self.locale._format_timeframe("seconds", 3) == "3 segundos" - assert self.locale._format_timeframe("seconds", 30) == "30 segundos" - assert self.locale._format_timeframe("minute", 1) == "un minuto" - assert self.locale._format_timeframe("minutes", 4) == "4 minutos" - assert self.locale._format_timeframe("minutes", 40) == "40 minutos" - assert self.locale._format_timeframe("hour", 1) == "una hora" - assert self.locale._format_timeframe("hours", 5) == "5 horas" - assert self.locale._format_timeframe("hours", 23) == "23 horas" - assert self.locale._format_timeframe("day", 1) == "un día" - assert self.locale._format_timeframe("days", 6) == "6 días" - assert self.locale._format_timeframe("days", 12) == "12 días" - assert self.locale._format_timeframe("week", 1) == "una semana" - assert self.locale._format_timeframe("weeks", 2) == "2 semanas" - assert self.locale._format_timeframe("weeks", 3) == "3 semanas" - assert self.locale._format_timeframe("month", 1) == "un mes" - assert self.locale._format_timeframe("months", 7) == "7 meses" - assert self.locale._format_timeframe("months", 11) == "11 meses" - assert self.locale._format_timeframe("year", 1) == "un año" - assert self.locale._format_timeframe("years", 8) == "8 años" - assert self.locale._format_timeframe("years", 12) == "12 años" - - assert self.locale._format_timeframe("now", 0) == "ahora" - assert self.locale._format_timeframe("seconds", -1) == "1 segundos" - assert self.locale._format_timeframe("seconds", -9) == "9 segundos" - assert self.locale._format_timeframe("seconds", -12) == "12 segundos" - assert self.locale._format_timeframe("minute", -1) == "un minuto" - assert self.locale._format_timeframe("minutes", -2) == "2 minutos" - assert self.locale._format_timeframe("minutes", -10) == "10 minutos" - assert self.locale._format_timeframe("hour", -1) == "una hora" - assert self.locale._format_timeframe("hours", -3) == "3 horas" - assert self.locale._format_timeframe("hours", -11) == "11 horas" - assert self.locale._format_timeframe("day", -1) == "un día" - assert self.locale._format_timeframe("days", -2) == "2 días" - assert self.locale._format_timeframe("days", -12) == "12 días" - assert self.locale._format_timeframe("week", -1) == "una semana" - assert self.locale._format_timeframe("weeks", -2) == "2 semanas" - assert self.locale._format_timeframe("weeks", -3) == "3 semanas" - assert self.locale._format_timeframe("month", -1) == "un mes" - assert self.locale._format_timeframe("months", -3) == "3 meses" - assert self.locale._format_timeframe("months", -13) == "13 meses" - assert self.locale._format_timeframe("year", -1) == "un año" - assert self.locale._format_timeframe("years", -4) == "4 años" - assert self.locale._format_timeframe("years", -14) == "14 años" - - -@pytest.mark.usefixtures("lang_locale") -class TestFrenchLocale: - def test_ordinal_number(self): - assert self.locale.ordinal_number(1) == "1er" - assert self.locale.ordinal_number(2) == "2e" - - def test_month_abbreviation(self): - assert "juil" in self.locale.month_abbreviations - - -@pytest.mark.usefixtures("lang_locale") -class TestFrenchCanadianLocale: - def test_month_abbreviation(self): - assert "juill" in self.locale.month_abbreviations - - -@pytest.mark.usefixtures("lang_locale") -class TestRussianLocale: - def test_plurals2(self): - assert self.locale._format_timeframe("hours", 0) == "0 часов" - assert self.locale._format_timeframe("hours", 1) == "1 час" - assert self.locale._format_timeframe("hours", 2) == "2 часа" - assert self.locale._format_timeframe("hours", 4) == "4 часа" - assert self.locale._format_timeframe("hours", 5) == "5 часов" - assert self.locale._format_timeframe("hours", 21) == "21 час" - assert self.locale._format_timeframe("hours", 22) == "22 часа" - assert self.locale._format_timeframe("hours", 25) == "25 часов" - - # feminine grammatical gender should be tested separately - assert self.locale._format_timeframe("minutes", 0) == "0 минут" - assert self.locale._format_timeframe("minutes", 1) == "1 минуту" - assert self.locale._format_timeframe("minutes", 2) == "2 минуты" - assert self.locale._format_timeframe("minutes", 4) == "4 минуты" - assert self.locale._format_timeframe("minutes", 5) == "5 минут" - assert self.locale._format_timeframe("minutes", 21) == "21 минуту" - assert self.locale._format_timeframe("minutes", 22) == "22 минуты" - assert self.locale._format_timeframe("minutes", 25) == "25 минут" - - -@pytest.mark.usefixtures("lang_locale") -class TestPolishLocale: - def test_plurals(self): - - assert self.locale._format_timeframe("seconds", 0) == "0 sekund" - assert self.locale._format_timeframe("second", 1) == "sekundę" - assert self.locale._format_timeframe("seconds", 2) == "2 sekundy" - assert self.locale._format_timeframe("seconds", 5) == "5 sekund" - assert self.locale._format_timeframe("seconds", 21) == "21 sekund" - assert self.locale._format_timeframe("seconds", 22) == "22 sekundy" - assert self.locale._format_timeframe("seconds", 25) == "25 sekund" - - assert self.locale._format_timeframe("minutes", 0) == "0 minut" - assert self.locale._format_timeframe("minute", 1) == "minutę" - assert self.locale._format_timeframe("minutes", 2) == "2 minuty" - assert self.locale._format_timeframe("minutes", 5) == "5 minut" - assert self.locale._format_timeframe("minutes", 21) == "21 minut" - assert self.locale._format_timeframe("minutes", 22) == "22 minuty" - assert self.locale._format_timeframe("minutes", 25) == "25 minut" - - assert self.locale._format_timeframe("hours", 0) == "0 godzin" - assert self.locale._format_timeframe("hour", 1) == "godzinę" - assert self.locale._format_timeframe("hours", 2) == "2 godziny" - assert self.locale._format_timeframe("hours", 5) == "5 godzin" - assert self.locale._format_timeframe("hours", 21) == "21 godzin" - assert self.locale._format_timeframe("hours", 22) == "22 godziny" - assert self.locale._format_timeframe("hours", 25) == "25 godzin" - - assert self.locale._format_timeframe("weeks", 0) == "0 tygodni" - assert self.locale._format_timeframe("week", 1) == "tydzień" - assert self.locale._format_timeframe("weeks", 2) == "2 tygodnie" - assert self.locale._format_timeframe("weeks", 5) == "5 tygodni" - assert self.locale._format_timeframe("weeks", 21) == "21 tygodni" - assert self.locale._format_timeframe("weeks", 22) == "22 tygodnie" - assert self.locale._format_timeframe("weeks", 25) == "25 tygodni" - - assert self.locale._format_timeframe("months", 0) == "0 miesięcy" - assert self.locale._format_timeframe("month", 1) == "miesiąc" - assert self.locale._format_timeframe("months", 2) == "2 miesiące" - assert self.locale._format_timeframe("months", 5) == "5 miesięcy" - assert self.locale._format_timeframe("months", 21) == "21 miesięcy" - assert self.locale._format_timeframe("months", 22) == "22 miesiące" - assert self.locale._format_timeframe("months", 25) == "25 miesięcy" - - assert self.locale._format_timeframe("years", 0) == "0 lat" - assert self.locale._format_timeframe("year", 1) == "rok" - assert self.locale._format_timeframe("years", 2) == "2 lata" - assert self.locale._format_timeframe("years", 5) == "5 lat" - assert self.locale._format_timeframe("years", 21) == "21 lat" - assert self.locale._format_timeframe("years", 22) == "22 lata" - assert self.locale._format_timeframe("years", 25) == "25 lat" - - -@pytest.mark.usefixtures("lang_locale") -class TestIcelandicLocale: - def test_format_timeframe(self): - - assert self.locale._format_timeframe("minute", -1) == "einni mínútu" - assert self.locale._format_timeframe("minute", 1) == "eina mínútu" - - assert self.locale._format_timeframe("hours", -2) == "2 tímum" - assert self.locale._format_timeframe("hours", 2) == "2 tíma" - assert self.locale._format_timeframe("now", 0) == "rétt í þessu" - - -@pytest.mark.usefixtures("lang_locale") -class TestMalayalamLocale: - def test_format_timeframe(self): - - assert self.locale._format_timeframe("hours", 2) == "2 മണിക്കൂർ" - assert self.locale._format_timeframe("hour", 0) == "ഒരു മണിക്കൂർ" - - def test_format_relative_now(self): - - result = self.locale._format_relative("ഇപ്പോൾ", "now", 0) - - assert result == "ഇപ്പോൾ" - - def test_format_relative_past(self): - - result = self.locale._format_relative("ഒരു മണിക്കൂർ", "hour", 1) - assert result == "ഒരു മണിക്കൂർ ശേഷം" - - def test_format_relative_future(self): - - result = self.locale._format_relative("ഒരു മണിക്കൂർ", "hour", -1) - assert result == "ഒരു മണിക്കൂർ മുമ്പ്" - - -@pytest.mark.usefixtures("lang_locale") -class TestHindiLocale: - def test_format_timeframe(self): - - assert self.locale._format_timeframe("hours", 2) == "2 घंटे" - assert self.locale._format_timeframe("hour", 0) == "एक घंटा" - - def test_format_relative_now(self): - - result = self.locale._format_relative("अभी", "now", 0) - assert result == "अभी" - - def test_format_relative_past(self): - - result = self.locale._format_relative("एक घंटा", "hour", 1) - assert result == "एक घंटा बाद" - - def test_format_relative_future(self): - - result = self.locale._format_relative("एक घंटा", "hour", -1) - assert result == "एक घंटा पहले" - - -@pytest.mark.usefixtures("lang_locale") -class TestCzechLocale: - def test_format_timeframe(self): - - assert self.locale._format_timeframe("hours", 2) == "2 hodiny" - assert self.locale._format_timeframe("hours", 5) == "5 hodin" - assert self.locale._format_timeframe("hour", 0) == "0 hodin" - assert self.locale._format_timeframe("hours", -2) == "2 hodinami" - assert self.locale._format_timeframe("hours", -5) == "5 hodinami" - assert self.locale._format_timeframe("now", 0) == "Teď" - - assert self.locale._format_timeframe("weeks", 2) == "2 týdny" - assert self.locale._format_timeframe("weeks", 5) == "5 týdnů" - assert self.locale._format_timeframe("week", 0) == "0 týdnů" - assert self.locale._format_timeframe("weeks", -2) == "2 týdny" - assert self.locale._format_timeframe("weeks", -5) == "5 týdny" - - def test_format_relative_now(self): - - result = self.locale._format_relative("Teď", "now", 0) - assert result == "Teď" - - def test_format_relative_future(self): - - result = self.locale._format_relative("hodinu", "hour", 1) - assert result == "Za hodinu" - - def test_format_relative_past(self): - - result = self.locale._format_relative("hodinou", "hour", -1) - assert result == "Před hodinou" - - -@pytest.mark.usefixtures("lang_locale") -class TestSlovakLocale: - def test_format_timeframe(self): - - assert self.locale._format_timeframe("seconds", -5) == "5 sekundami" - assert self.locale._format_timeframe("seconds", -2) == "2 sekundami" - assert self.locale._format_timeframe("second", -1) == "sekundou" - assert self.locale._format_timeframe("second", 0) == "0 sekúnd" - assert self.locale._format_timeframe("second", 1) == "sekundu" - assert self.locale._format_timeframe("seconds", 2) == "2 sekundy" - assert self.locale._format_timeframe("seconds", 5) == "5 sekúnd" - - assert self.locale._format_timeframe("minutes", -5) == "5 minútami" - assert self.locale._format_timeframe("minutes", -2) == "2 minútami" - assert self.locale._format_timeframe("minute", -1) == "minútou" - assert self.locale._format_timeframe("minute", 0) == "0 minút" - assert self.locale._format_timeframe("minute", 1) == "minútu" - assert self.locale._format_timeframe("minutes", 2) == "2 minúty" - assert self.locale._format_timeframe("minutes", 5) == "5 minút" - - assert self.locale._format_timeframe("hours", -5) == "5 hodinami" - assert self.locale._format_timeframe("hours", -2) == "2 hodinami" - assert self.locale._format_timeframe("hour", -1) == "hodinou" - assert self.locale._format_timeframe("hour", 0) == "0 hodín" - assert self.locale._format_timeframe("hour", 1) == "hodinu" - assert self.locale._format_timeframe("hours", 2) == "2 hodiny" - assert self.locale._format_timeframe("hours", 5) == "5 hodín" - - assert self.locale._format_timeframe("days", -5) == "5 dňami" - assert self.locale._format_timeframe("days", -2) == "2 dňami" - assert self.locale._format_timeframe("day", -1) == "dňom" - assert self.locale._format_timeframe("day", 0) == "0 dní" - assert self.locale._format_timeframe("day", 1) == "deň" - assert self.locale._format_timeframe("days", 2) == "2 dni" - assert self.locale._format_timeframe("days", 5) == "5 dní" - - assert self.locale._format_timeframe("weeks", -5) == "5 týždňami" - assert self.locale._format_timeframe("weeks", -2) == "2 týždňami" - assert self.locale._format_timeframe("week", -1) == "týždňom" - assert self.locale._format_timeframe("week", 0) == "0 týždňov" - assert self.locale._format_timeframe("week", 1) == "týždeň" - assert self.locale._format_timeframe("weeks", 2) == "2 týždne" - assert self.locale._format_timeframe("weeks", 5) == "5 týždňov" - - assert self.locale._format_timeframe("months", -5) == "5 mesiacmi" - assert self.locale._format_timeframe("months", -2) == "2 mesiacmi" - assert self.locale._format_timeframe("month", -1) == "mesiacom" - assert self.locale._format_timeframe("month", 0) == "0 mesiacov" - assert self.locale._format_timeframe("month", 1) == "mesiac" - assert self.locale._format_timeframe("months", 2) == "2 mesiace" - assert self.locale._format_timeframe("months", 5) == "5 mesiacov" - - assert self.locale._format_timeframe("years", -5) == "5 rokmi" - assert self.locale._format_timeframe("years", -2) == "2 rokmi" - assert self.locale._format_timeframe("year", -1) == "rokom" - assert self.locale._format_timeframe("year", 0) == "0 rokov" - assert self.locale._format_timeframe("year", 1) == "rok" - assert self.locale._format_timeframe("years", 2) == "2 roky" - assert self.locale._format_timeframe("years", 5) == "5 rokov" - - assert self.locale._format_timeframe("now", 0) == "Teraz" - - def test_format_relative_now(self): - - result = self.locale._format_relative("Teraz", "now", 0) - assert result == "Teraz" - - def test_format_relative_future(self): - - result = self.locale._format_relative("hodinu", "hour", 1) - assert result == "O hodinu" - - def test_format_relative_past(self): - - result = self.locale._format_relative("hodinou", "hour", -1) - assert result == "Pred hodinou" - - -@pytest.mark.usefixtures("lang_locale") -class TestBulgarianLocale: - def test_plurals2(self): - assert self.locale._format_timeframe("hours", 0) == "0 часа" - assert self.locale._format_timeframe("hours", 1) == "1 час" - assert self.locale._format_timeframe("hours", 2) == "2 часа" - assert self.locale._format_timeframe("hours", 4) == "4 часа" - assert self.locale._format_timeframe("hours", 5) == "5 часа" - assert self.locale._format_timeframe("hours", 21) == "21 час" - assert self.locale._format_timeframe("hours", 22) == "22 часа" - assert self.locale._format_timeframe("hours", 25) == "25 часа" - - # feminine grammatical gender should be tested separately - assert self.locale._format_timeframe("minutes", 0) == "0 минути" - assert self.locale._format_timeframe("minutes", 1) == "1 минута" - assert self.locale._format_timeframe("minutes", 2) == "2 минути" - assert self.locale._format_timeframe("minutes", 4) == "4 минути" - assert self.locale._format_timeframe("minutes", 5) == "5 минути" - assert self.locale._format_timeframe("minutes", 21) == "21 минута" - assert self.locale._format_timeframe("minutes", 22) == "22 минути" - assert self.locale._format_timeframe("minutes", 25) == "25 минути" - - -@pytest.mark.usefixtures("lang_locale") -class TestMacedonianLocale: - def test_singles_mk(self): - assert self.locale._format_timeframe("second", 1) == "една секунда" - assert self.locale._format_timeframe("minute", 1) == "една минута" - assert self.locale._format_timeframe("hour", 1) == "еден саат" - assert self.locale._format_timeframe("day", 1) == "еден ден" - assert self.locale._format_timeframe("week", 1) == "една недела" - assert self.locale._format_timeframe("month", 1) == "еден месец" - assert self.locale._format_timeframe("year", 1) == "една година" - - def test_meridians_mk(self): - assert self.locale.meridian(7, "A") == "претпладне" - assert self.locale.meridian(18, "A") == "попладне" - assert self.locale.meridian(10, "a") == "дп" - assert self.locale.meridian(22, "a") == "пп" - - def test_describe_mk(self): - assert self.locale.describe("second", only_distance=True) == "една секунда" - assert self.locale.describe("second", only_distance=False) == "за една секунда" - assert self.locale.describe("minute", only_distance=True) == "една минута" - assert self.locale.describe("minute", only_distance=False) == "за една минута" - assert self.locale.describe("hour", only_distance=True) == "еден саат" - assert self.locale.describe("hour", only_distance=False) == "за еден саат" - assert self.locale.describe("day", only_distance=True) == "еден ден" - assert self.locale.describe("day", only_distance=False) == "за еден ден" - assert self.locale.describe("week", only_distance=True) == "една недела" - assert self.locale.describe("week", only_distance=False) == "за една недела" - assert self.locale.describe("month", only_distance=True) == "еден месец" - assert self.locale.describe("month", only_distance=False) == "за еден месец" - assert self.locale.describe("year", only_distance=True) == "една година" - assert self.locale.describe("year", only_distance=False) == "за една година" - - def test_relative_mk(self): - # time - assert self.locale._format_relative("сега", "now", 0) == "сега" - assert self.locale._format_relative("1 секунда", "seconds", 1) == "за 1 секунда" - assert self.locale._format_relative("1 минута", "minutes", 1) == "за 1 минута" - assert self.locale._format_relative("1 саат", "hours", 1) == "за 1 саат" - assert self.locale._format_relative("1 ден", "days", 1) == "за 1 ден" - assert self.locale._format_relative("1 недела", "weeks", 1) == "за 1 недела" - assert self.locale._format_relative("1 месец", "months", 1) == "за 1 месец" - assert self.locale._format_relative("1 година", "years", 1) == "за 1 година" - assert ( - self.locale._format_relative("1 секунда", "seconds", -1) == "пред 1 секунда" - ) - assert ( - self.locale._format_relative("1 минута", "minutes", -1) == "пред 1 минута" - ) - assert self.locale._format_relative("1 саат", "hours", -1) == "пред 1 саат" - assert self.locale._format_relative("1 ден", "days", -1) == "пред 1 ден" - assert self.locale._format_relative("1 недела", "weeks", -1) == "пред 1 недела" - assert self.locale._format_relative("1 месец", "months", -1) == "пред 1 месец" - assert self.locale._format_relative("1 година", "years", -1) == "пред 1 година" - - def test_plurals_mk(self): - # Seconds - assert self.locale._format_timeframe("seconds", 0) == "0 секунди" - assert self.locale._format_timeframe("seconds", 1) == "1 секунда" - assert self.locale._format_timeframe("seconds", 2) == "2 секунди" - assert self.locale._format_timeframe("seconds", 4) == "4 секунди" - assert self.locale._format_timeframe("seconds", 5) == "5 секунди" - assert self.locale._format_timeframe("seconds", 21) == "21 секунда" - assert self.locale._format_timeframe("seconds", 22) == "22 секунди" - assert self.locale._format_timeframe("seconds", 25) == "25 секунди" - - # Minutes - assert self.locale._format_timeframe("minutes", 0) == "0 минути" - assert self.locale._format_timeframe("minutes", 1) == "1 минута" - assert self.locale._format_timeframe("minutes", 2) == "2 минути" - assert self.locale._format_timeframe("minutes", 4) == "4 минути" - assert self.locale._format_timeframe("minutes", 5) == "5 минути" - assert self.locale._format_timeframe("minutes", 21) == "21 минута" - assert self.locale._format_timeframe("minutes", 22) == "22 минути" - assert self.locale._format_timeframe("minutes", 25) == "25 минути" - - # Hours - assert self.locale._format_timeframe("hours", 0) == "0 саати" - assert self.locale._format_timeframe("hours", 1) == "1 саат" - assert self.locale._format_timeframe("hours", 2) == "2 саати" - assert self.locale._format_timeframe("hours", 4) == "4 саати" - assert self.locale._format_timeframe("hours", 5) == "5 саати" - assert self.locale._format_timeframe("hours", 21) == "21 саат" - assert self.locale._format_timeframe("hours", 22) == "22 саати" - assert self.locale._format_timeframe("hours", 25) == "25 саати" - - # Days - assert self.locale._format_timeframe("days", 0) == "0 дена" - assert self.locale._format_timeframe("days", 1) == "1 ден" - assert self.locale._format_timeframe("days", 2) == "2 дена" - assert self.locale._format_timeframe("days", 3) == "3 дена" - assert self.locale._format_timeframe("days", 21) == "21 ден" - - # Weeks - assert self.locale._format_timeframe("weeks", 0) == "0 недели" - assert self.locale._format_timeframe("weeks", 1) == "1 недела" - assert self.locale._format_timeframe("weeks", 2) == "2 недели" - assert self.locale._format_timeframe("weeks", 4) == "4 недели" - assert self.locale._format_timeframe("weeks", 5) == "5 недели" - assert self.locale._format_timeframe("weeks", 21) == "21 недела" - assert self.locale._format_timeframe("weeks", 22) == "22 недели" - assert self.locale._format_timeframe("weeks", 25) == "25 недели" - - # Months - assert self.locale._format_timeframe("months", 0) == "0 месеци" - assert self.locale._format_timeframe("months", 1) == "1 месец" - assert self.locale._format_timeframe("months", 2) == "2 месеци" - assert self.locale._format_timeframe("months", 4) == "4 месеци" - assert self.locale._format_timeframe("months", 5) == "5 месеци" - assert self.locale._format_timeframe("months", 21) == "21 месец" - assert self.locale._format_timeframe("months", 22) == "22 месеци" - assert self.locale._format_timeframe("months", 25) == "25 месеци" - - # Years - assert self.locale._format_timeframe("years", 1) == "1 година" - assert self.locale._format_timeframe("years", 2) == "2 години" - assert self.locale._format_timeframe("years", 5) == "5 години" - - def test_multi_describe_mk(self): - describe = self.locale.describe_multi - - fulltest = [("years", 5), ("weeks", 1), ("hours", 1), ("minutes", 6)] - assert describe(fulltest) == "за 5 години 1 недела 1 саат 6 минути" - seconds4000_0days = [("days", 0), ("hours", 1), ("minutes", 6)] - assert describe(seconds4000_0days) == "за 0 дена 1 саат 6 минути" - seconds4000 = [("hours", 1), ("minutes", 6)] - assert describe(seconds4000) == "за 1 саат 6 минути" - assert describe(seconds4000, only_distance=True) == "1 саат 6 минути" - seconds3700 = [("hours", 1), ("minutes", 1)] - assert describe(seconds3700) == "за 1 саат 1 минута" - seconds300_0hours = [("hours", 0), ("minutes", 5)] - assert describe(seconds300_0hours) == "за 0 саати 5 минути" - seconds300 = [("minutes", 5)] - assert describe(seconds300) == "за 5 минути" - seconds60 = [("minutes", 1)] - assert describe(seconds60) == "за 1 минута" - assert describe(seconds60, only_distance=True) == "1 минута" - seconds60 = [("seconds", 1)] - assert describe(seconds60) == "за 1 секунда" - assert describe(seconds60, only_distance=True) == "1 секунда" - - -@pytest.mark.usefixtures("time_2013_01_01") -@pytest.mark.usefixtures("lang_locale") -class TestHebrewLocale: - def test_couple_of_timeframe(self): - assert self.locale._format_timeframe("days", 1) == "יום" - assert self.locale._format_timeframe("days", 2) == "יומיים" - assert self.locale._format_timeframe("days", 3) == "3 ימים" - - assert self.locale._format_timeframe("hours", 1) == "שעה" - assert self.locale._format_timeframe("hours", 2) == "שעתיים" - assert self.locale._format_timeframe("hours", 3) == "3 שעות" - - assert self.locale._format_timeframe("week", 1) == "שבוע" - assert self.locale._format_timeframe("weeks", 2) == "שבועיים" - assert self.locale._format_timeframe("weeks", 3) == "3 שבועות" - - assert self.locale._format_timeframe("months", 1) == "חודש" - assert self.locale._format_timeframe("months", 2) == "חודשיים" - assert self.locale._format_timeframe("months", 4) == "4 חודשים" - - assert self.locale._format_timeframe("years", 1) == "שנה" - assert self.locale._format_timeframe("years", 2) == "שנתיים" - assert self.locale._format_timeframe("years", 5) == "5 שנים" - - def test_describe_multi(self): - describe = self.locale.describe_multi - - fulltest = [("years", 5), ("weeks", 1), ("hours", 1), ("minutes", 6)] - assert describe(fulltest) == "בעוד 5 שנים, שבוע, שעה ו־6 דקות" - seconds4000_0days = [("days", 0), ("hours", 1), ("minutes", 6)] - assert describe(seconds4000_0days) == "בעוד 0 ימים, שעה ו־6 דקות" - seconds4000 = [("hours", 1), ("minutes", 6)] - assert describe(seconds4000) == "בעוד שעה ו־6 דקות" - assert describe(seconds4000, only_distance=True) == "שעה ו־6 דקות" - seconds3700 = [("hours", 1), ("minutes", 1)] - assert describe(seconds3700) == "בעוד שעה ודקה" - seconds300_0hours = [("hours", 0), ("minutes", 5)] - assert describe(seconds300_0hours) == "בעוד 0 שעות ו־5 דקות" - seconds300 = [("minutes", 5)] - assert describe(seconds300) == "בעוד 5 דקות" - seconds60 = [("minutes", 1)] - assert describe(seconds60) == "בעוד דקה" - assert describe(seconds60, only_distance=True) == "דקה" - - -@pytest.mark.usefixtures("lang_locale") -class TestMarathiLocale: - def test_dateCoreFunctionality(self): - dt = arrow.Arrow(2015, 4, 11, 17, 30, 00) - assert self.locale.month_name(dt.month) == "एप्रिल" - assert self.locale.month_abbreviation(dt.month) == "एप्रि" - assert self.locale.day_name(dt.isoweekday()) == "शनिवार" - assert self.locale.day_abbreviation(dt.isoweekday()) == "शनि" - - def test_format_timeframe(self): - assert self.locale._format_timeframe("hours", 2) == "2 तास" - assert self.locale._format_timeframe("hour", 0) == "एक तास" - - def test_format_relative_now(self): - result = self.locale._format_relative("सद्य", "now", 0) - assert result == "सद्य" - - def test_format_relative_past(self): - result = self.locale._format_relative("एक तास", "hour", 1) - assert result == "एक तास नंतर" - - def test_format_relative_future(self): - result = self.locale._format_relative("एक तास", "hour", -1) - assert result == "एक तास आधी" - - # Not currently implemented - def test_ordinal_number(self): - assert self.locale.ordinal_number(1) == "1" - - -@pytest.mark.usefixtures("lang_locale") -class TestFinnishLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("hours", 2) == ("2 tuntia", "2 tunnin") - assert self.locale._format_timeframe("hour", 0) == ("tunti", "tunnin") - - def test_format_relative_now(self): - result = self.locale._format_relative(["juuri nyt", "juuri nyt"], "now", 0) - assert result == "juuri nyt" - - def test_format_relative_past(self): - result = self.locale._format_relative(["tunti", "tunnin"], "hour", 1) - assert result == "tunnin kuluttua" - - def test_format_relative_future(self): - result = self.locale._format_relative(["tunti", "tunnin"], "hour", -1) - assert result == "tunti sitten" - - def test_ordinal_number(self): - assert self.locale.ordinal_number(1) == "1." - - -@pytest.mark.usefixtures("lang_locale") -class TestGermanLocale: - def test_ordinal_number(self): - assert self.locale.ordinal_number(1) == "1." - - def test_define(self): - assert self.locale.describe("minute", only_distance=True) == "eine Minute" - assert self.locale.describe("minute", only_distance=False) == "in einer Minute" - assert self.locale.describe("hour", only_distance=True) == "eine Stunde" - assert self.locale.describe("hour", only_distance=False) == "in einer Stunde" - assert self.locale.describe("day", only_distance=True) == "ein Tag" - assert self.locale.describe("day", only_distance=False) == "in einem Tag" - assert self.locale.describe("week", only_distance=True) == "eine Woche" - assert self.locale.describe("week", only_distance=False) == "in einer Woche" - assert self.locale.describe("month", only_distance=True) == "ein Monat" - assert self.locale.describe("month", only_distance=False) == "in einem Monat" - assert self.locale.describe("year", only_distance=True) == "ein Jahr" - assert self.locale.describe("year", only_distance=False) == "in einem Jahr" - - def test_weekday(self): - dt = arrow.Arrow(2015, 4, 11, 17, 30, 00) - assert self.locale.day_name(dt.isoweekday()) == "Samstag" - assert self.locale.day_abbreviation(dt.isoweekday()) == "Sa" - - -@pytest.mark.usefixtures("lang_locale") -class TestHungarianLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("hours", 2) == "2 óra" - assert self.locale._format_timeframe("hour", 0) == "egy órával" - assert self.locale._format_timeframe("hours", -2) == "2 órával" - assert self.locale._format_timeframe("now", 0) == "éppen most" - - -@pytest.mark.usefixtures("lang_locale") -class TestEsperantoLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("hours", 2) == "2 horoj" - assert self.locale._format_timeframe("hour", 0) == "un horo" - assert self.locale._format_timeframe("hours", -2) == "2 horoj" - assert self.locale._format_timeframe("now", 0) == "nun" - - def test_ordinal_number(self): - assert self.locale.ordinal_number(1) == "1a" - - -@pytest.mark.usefixtures("lang_locale") -class TestThaiLocale: - def test_year_full(self): - assert self.locale.year_full(2015) == "2558" - - def test_year_abbreviation(self): - assert self.locale.year_abbreviation(2015) == "58" - - def test_format_relative_now(self): - result = self.locale._format_relative("ขณะนี้", "now", 0) - assert result == "ขณะนี้" - - def test_format_relative_past(self): - result = self.locale._format_relative("1 ชั่วโมง", "hour", 1) - assert result == "ในอีก 1 ชั่วโมง" - result = self.locale._format_relative("{0} ชั่วโมง", "hours", 2) - assert result == "ในอีก {0} ชั่วโมง" - result = self.locale._format_relative("ไม่กี่วินาที", "seconds", 42) - assert result == "ในอีกไม่กี่วินาที" - - def test_format_relative_future(self): - result = self.locale._format_relative("1 ชั่วโมง", "hour", -1) - assert result == "1 ชั่วโมง ที่ผ่านมา" - - -@pytest.mark.usefixtures("lang_locale") -class TestBengaliLocale: - def test_ordinal_number(self): - assert self.locale._ordinal_number(0) == "0তম" - assert self.locale._ordinal_number(1) == "1ম" - assert self.locale._ordinal_number(3) == "3য়" - assert self.locale._ordinal_number(4) == "4র্থ" - assert self.locale._ordinal_number(5) == "5ম" - assert self.locale._ordinal_number(6) == "6ষ্ঠ" - assert self.locale._ordinal_number(10) == "10ম" - assert self.locale._ordinal_number(11) == "11তম" - assert self.locale._ordinal_number(42) == "42তম" - assert self.locale._ordinal_number(-1) is None - - -@pytest.mark.usefixtures("lang_locale") -class TestRomanianLocale: - def test_timeframes(self): - - assert self.locale._format_timeframe("hours", 2) == "2 ore" - assert self.locale._format_timeframe("months", 2) == "2 luni" - - assert self.locale._format_timeframe("days", 2) == "2 zile" - assert self.locale._format_timeframe("years", 2) == "2 ani" - - assert self.locale._format_timeframe("hours", 3) == "3 ore" - assert self.locale._format_timeframe("months", 4) == "4 luni" - assert self.locale._format_timeframe("days", 3) == "3 zile" - assert self.locale._format_timeframe("years", 5) == "5 ani" - - def test_relative_timeframes(self): - assert self.locale._format_relative("acum", "now", 0) == "acum" - assert self.locale._format_relative("o oră", "hour", 1) == "peste o oră" - assert self.locale._format_relative("o oră", "hour", -1) == "o oră în urmă" - assert self.locale._format_relative("un minut", "minute", 1) == "peste un minut" - assert ( - self.locale._format_relative("un minut", "minute", -1) == "un minut în urmă" - ) - assert ( - self.locale._format_relative("câteva secunde", "seconds", -1) - == "câteva secunde în urmă" - ) - assert ( - self.locale._format_relative("câteva secunde", "seconds", 1) - == "peste câteva secunde" - ) - assert self.locale._format_relative("o zi", "day", -1) == "o zi în urmă" - assert self.locale._format_relative("o zi", "day", 1) == "peste o zi" - - -@pytest.mark.usefixtures("lang_locale") -class TestArabicLocale: - def test_timeframes(self): - - # single - assert self.locale._format_timeframe("minute", 1) == "دقيقة" - assert self.locale._format_timeframe("hour", 1) == "ساعة" - assert self.locale._format_timeframe("day", 1) == "يوم" - assert self.locale._format_timeframe("month", 1) == "شهر" - assert self.locale._format_timeframe("year", 1) == "سنة" - - # double - assert self.locale._format_timeframe("minutes", 2) == "دقيقتين" - assert self.locale._format_timeframe("hours", 2) == "ساعتين" - assert self.locale._format_timeframe("days", 2) == "يومين" - assert self.locale._format_timeframe("months", 2) == "شهرين" - assert self.locale._format_timeframe("years", 2) == "سنتين" - - # up to ten - assert self.locale._format_timeframe("minutes", 3) == "3 دقائق" - assert self.locale._format_timeframe("hours", 4) == "4 ساعات" - assert self.locale._format_timeframe("days", 5) == "5 أيام" - assert self.locale._format_timeframe("months", 6) == "6 أشهر" - assert self.locale._format_timeframe("years", 10) == "10 سنوات" - - # more than ten - assert self.locale._format_timeframe("minutes", 11) == "11 دقيقة" - assert self.locale._format_timeframe("hours", 19) == "19 ساعة" - assert self.locale._format_timeframe("months", 24) == "24 شهر" - assert self.locale._format_timeframe("days", 50) == "50 يوم" - assert self.locale._format_timeframe("years", 115) == "115 سنة" - - -@pytest.mark.usefixtures("lang_locale") -class TestNepaliLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("hours", 3) == "3 घण्टा" - assert self.locale._format_timeframe("hour", 0) == "एक घण्टा" - - def test_format_relative_now(self): - result = self.locale._format_relative("अहिले", "now", 0) - assert result == "अहिले" - - def test_format_relative_future(self): - result = self.locale._format_relative("एक घण्टा", "hour", 1) - assert result == "एक घण्टा पछी" - - def test_format_relative_past(self): - result = self.locale._format_relative("एक घण्टा", "hour", -1) - assert result == "एक घण्टा पहिले" - - -@pytest.mark.usefixtures("lang_locale") -class TestIndonesianLocale: - def test_timeframes(self): - assert self.locale._format_timeframe("hours", 2) == "2 jam" - assert self.locale._format_timeframe("months", 2) == "2 bulan" - - assert self.locale._format_timeframe("days", 2) == "2 hari" - assert self.locale._format_timeframe("years", 2) == "2 tahun" - - assert self.locale._format_timeframe("hours", 3) == "3 jam" - assert self.locale._format_timeframe("months", 4) == "4 bulan" - assert self.locale._format_timeframe("days", 3) == "3 hari" - assert self.locale._format_timeframe("years", 5) == "5 tahun" - - def test_format_relative_now(self): - assert self.locale._format_relative("baru saja", "now", 0) == "baru saja" - - def test_format_relative_past(self): - assert self.locale._format_relative("1 jam", "hour", 1) == "dalam 1 jam" - assert self.locale._format_relative("1 detik", "seconds", 1) == "dalam 1 detik" - - def test_format_relative_future(self): - assert self.locale._format_relative("1 jam", "hour", -1) == "1 jam yang lalu" - - -@pytest.mark.usefixtures("lang_locale") -class TestTagalogLocale: - def test_singles_tl(self): - assert self.locale._format_timeframe("second", 1) == "isang segundo" - assert self.locale._format_timeframe("minute", 1) == "isang minuto" - assert self.locale._format_timeframe("hour", 1) == "isang oras" - assert self.locale._format_timeframe("day", 1) == "isang araw" - assert self.locale._format_timeframe("week", 1) == "isang linggo" - assert self.locale._format_timeframe("month", 1) == "isang buwan" - assert self.locale._format_timeframe("year", 1) == "isang taon" - - def test_meridians_tl(self): - assert self.locale.meridian(7, "A") == "ng umaga" - assert self.locale.meridian(18, "A") == "ng hapon" - assert self.locale.meridian(10, "a") == "nu" - assert self.locale.meridian(22, "a") == "nh" - - def test_describe_tl(self): - assert self.locale.describe("second", only_distance=True) == "isang segundo" - assert ( - self.locale.describe("second", only_distance=False) - == "isang segundo mula ngayon" - ) - assert self.locale.describe("minute", only_distance=True) == "isang minuto" - assert ( - self.locale.describe("minute", only_distance=False) - == "isang minuto mula ngayon" - ) - assert self.locale.describe("hour", only_distance=True) == "isang oras" - assert ( - self.locale.describe("hour", only_distance=False) - == "isang oras mula ngayon" - ) - assert self.locale.describe("day", only_distance=True) == "isang araw" - assert ( - self.locale.describe("day", only_distance=False) == "isang araw mula ngayon" - ) - assert self.locale.describe("week", only_distance=True) == "isang linggo" - assert ( - self.locale.describe("week", only_distance=False) - == "isang linggo mula ngayon" - ) - assert self.locale.describe("month", only_distance=True) == "isang buwan" - assert ( - self.locale.describe("month", only_distance=False) - == "isang buwan mula ngayon" - ) - assert self.locale.describe("year", only_distance=True) == "isang taon" - assert ( - self.locale.describe("year", only_distance=False) - == "isang taon mula ngayon" - ) - - def test_relative_tl(self): - # time - assert self.locale._format_relative("ngayon", "now", 0) == "ngayon" - assert ( - self.locale._format_relative("1 segundo", "seconds", 1) - == "1 segundo mula ngayon" - ) - assert ( - self.locale._format_relative("1 minuto", "minutes", 1) - == "1 minuto mula ngayon" - ) - assert ( - self.locale._format_relative("1 oras", "hours", 1) == "1 oras mula ngayon" - ) - assert self.locale._format_relative("1 araw", "days", 1) == "1 araw mula ngayon" - assert ( - self.locale._format_relative("1 linggo", "weeks", 1) - == "1 linggo mula ngayon" - ) - assert ( - self.locale._format_relative("1 buwan", "months", 1) - == "1 buwan mula ngayon" - ) - assert ( - self.locale._format_relative("1 taon", "years", 1) == "1 taon mula ngayon" - ) - assert ( - self.locale._format_relative("1 segundo", "seconds", -1) - == "nakaraang 1 segundo" - ) - assert ( - self.locale._format_relative("1 minuto", "minutes", -1) - == "nakaraang 1 minuto" - ) - assert self.locale._format_relative("1 oras", "hours", -1) == "nakaraang 1 oras" - assert self.locale._format_relative("1 araw", "days", -1) == "nakaraang 1 araw" - assert ( - self.locale._format_relative("1 linggo", "weeks", -1) - == "nakaraang 1 linggo" - ) - assert ( - self.locale._format_relative("1 buwan", "months", -1) == "nakaraang 1 buwan" - ) - assert self.locale._format_relative("1 taon", "years", -1) == "nakaraang 1 taon" - - def test_plurals_tl(self): - # Seconds - assert self.locale._format_timeframe("seconds", 0) == "0 segundo" - assert self.locale._format_timeframe("seconds", 1) == "1 segundo" - assert self.locale._format_timeframe("seconds", 2) == "2 segundo" - assert self.locale._format_timeframe("seconds", 4) == "4 segundo" - assert self.locale._format_timeframe("seconds", 5) == "5 segundo" - assert self.locale._format_timeframe("seconds", 21) == "21 segundo" - assert self.locale._format_timeframe("seconds", 22) == "22 segundo" - assert self.locale._format_timeframe("seconds", 25) == "25 segundo" - - # Minutes - assert self.locale._format_timeframe("minutes", 0) == "0 minuto" - assert self.locale._format_timeframe("minutes", 1) == "1 minuto" - assert self.locale._format_timeframe("minutes", 2) == "2 minuto" - assert self.locale._format_timeframe("minutes", 4) == "4 minuto" - assert self.locale._format_timeframe("minutes", 5) == "5 minuto" - assert self.locale._format_timeframe("minutes", 21) == "21 minuto" - assert self.locale._format_timeframe("minutes", 22) == "22 minuto" - assert self.locale._format_timeframe("minutes", 25) == "25 minuto" - - # Hours - assert self.locale._format_timeframe("hours", 0) == "0 oras" - assert self.locale._format_timeframe("hours", 1) == "1 oras" - assert self.locale._format_timeframe("hours", 2) == "2 oras" - assert self.locale._format_timeframe("hours", 4) == "4 oras" - assert self.locale._format_timeframe("hours", 5) == "5 oras" - assert self.locale._format_timeframe("hours", 21) == "21 oras" - assert self.locale._format_timeframe("hours", 22) == "22 oras" - assert self.locale._format_timeframe("hours", 25) == "25 oras" - - # Days - assert self.locale._format_timeframe("days", 0) == "0 araw" - assert self.locale._format_timeframe("days", 1) == "1 araw" - assert self.locale._format_timeframe("days", 2) == "2 araw" - assert self.locale._format_timeframe("days", 3) == "3 araw" - assert self.locale._format_timeframe("days", 21) == "21 araw" - - # Weeks - assert self.locale._format_timeframe("weeks", 0) == "0 linggo" - assert self.locale._format_timeframe("weeks", 1) == "1 linggo" - assert self.locale._format_timeframe("weeks", 2) == "2 linggo" - assert self.locale._format_timeframe("weeks", 4) == "4 linggo" - assert self.locale._format_timeframe("weeks", 5) == "5 linggo" - assert self.locale._format_timeframe("weeks", 21) == "21 linggo" - assert self.locale._format_timeframe("weeks", 22) == "22 linggo" - assert self.locale._format_timeframe("weeks", 25) == "25 linggo" - - # Months - assert self.locale._format_timeframe("months", 0) == "0 buwan" - assert self.locale._format_timeframe("months", 1) == "1 buwan" - assert self.locale._format_timeframe("months", 2) == "2 buwan" - assert self.locale._format_timeframe("months", 4) == "4 buwan" - assert self.locale._format_timeframe("months", 5) == "5 buwan" - assert self.locale._format_timeframe("months", 21) == "21 buwan" - assert self.locale._format_timeframe("months", 22) == "22 buwan" - assert self.locale._format_timeframe("months", 25) == "25 buwan" - - # Years - assert self.locale._format_timeframe("years", 1) == "1 taon" - assert self.locale._format_timeframe("years", 2) == "2 taon" - assert self.locale._format_timeframe("years", 5) == "5 taon" - - def test_multi_describe_tl(self): - describe = self.locale.describe_multi - - fulltest = [("years", 5), ("weeks", 1), ("hours", 1), ("minutes", 6)] - assert describe(fulltest) == "5 taon 1 linggo 1 oras 6 minuto mula ngayon" - seconds4000_0days = [("days", 0), ("hours", 1), ("minutes", 6)] - assert describe(seconds4000_0days) == "0 araw 1 oras 6 minuto mula ngayon" - seconds4000 = [("hours", 1), ("minutes", 6)] - assert describe(seconds4000) == "1 oras 6 minuto mula ngayon" - assert describe(seconds4000, only_distance=True) == "1 oras 6 minuto" - seconds3700 = [("hours", 1), ("minutes", 1)] - assert describe(seconds3700) == "1 oras 1 minuto mula ngayon" - seconds300_0hours = [("hours", 0), ("minutes", 5)] - assert describe(seconds300_0hours) == "0 oras 5 minuto mula ngayon" - seconds300 = [("minutes", 5)] - assert describe(seconds300) == "5 minuto mula ngayon" - seconds60 = [("minutes", 1)] - assert describe(seconds60) == "1 minuto mula ngayon" - assert describe(seconds60, only_distance=True) == "1 minuto" - seconds60 = [("seconds", 1)] - assert describe(seconds60) == "1 segundo mula ngayon" - assert describe(seconds60, only_distance=True) == "1 segundo" - - def test_ordinal_number_tl(self): - assert self.locale.ordinal_number(0) == "ika-0" - assert self.locale.ordinal_number(1) == "ika-1" - assert self.locale.ordinal_number(2) == "ika-2" - assert self.locale.ordinal_number(3) == "ika-3" - assert self.locale.ordinal_number(10) == "ika-10" - assert self.locale.ordinal_number(23) == "ika-23" - assert self.locale.ordinal_number(100) == "ika-100" - assert self.locale.ordinal_number(103) == "ika-103" - assert self.locale.ordinal_number(114) == "ika-114" - - -@pytest.mark.usefixtures("lang_locale") -class TestEstonianLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("now", 0) == "just nüüd" - assert self.locale._format_timeframe("second", 1) == "ühe sekundi" - assert self.locale._format_timeframe("seconds", 3) == "3 sekundi" - assert self.locale._format_timeframe("seconds", 30) == "30 sekundi" - assert self.locale._format_timeframe("minute", 1) == "ühe minuti" - assert self.locale._format_timeframe("minutes", 4) == "4 minuti" - assert self.locale._format_timeframe("minutes", 40) == "40 minuti" - assert self.locale._format_timeframe("hour", 1) == "tunni aja" - assert self.locale._format_timeframe("hours", 5) == "5 tunni" - assert self.locale._format_timeframe("hours", 23) == "23 tunni" - assert self.locale._format_timeframe("day", 1) == "ühe päeva" - assert self.locale._format_timeframe("days", 6) == "6 päeva" - assert self.locale._format_timeframe("days", 12) == "12 päeva" - assert self.locale._format_timeframe("month", 1) == "ühe kuu" - assert self.locale._format_timeframe("months", 7) == "7 kuu" - assert self.locale._format_timeframe("months", 11) == "11 kuu" - assert self.locale._format_timeframe("year", 1) == "ühe aasta" - assert self.locale._format_timeframe("years", 8) == "8 aasta" - assert self.locale._format_timeframe("years", 12) == "12 aasta" - - assert self.locale._format_timeframe("now", 0) == "just nüüd" - assert self.locale._format_timeframe("second", -1) == "üks sekund" - assert self.locale._format_timeframe("seconds", -9) == "9 sekundit" - assert self.locale._format_timeframe("seconds", -12) == "12 sekundit" - assert self.locale._format_timeframe("minute", -1) == "üks minut" - assert self.locale._format_timeframe("minutes", -2) == "2 minutit" - assert self.locale._format_timeframe("minutes", -10) == "10 minutit" - assert self.locale._format_timeframe("hour", -1) == "tund aega" - assert self.locale._format_timeframe("hours", -3) == "3 tundi" - assert self.locale._format_timeframe("hours", -11) == "11 tundi" - assert self.locale._format_timeframe("day", -1) == "üks päev" - assert self.locale._format_timeframe("days", -2) == "2 päeva" - assert self.locale._format_timeframe("days", -12) == "12 päeva" - assert self.locale._format_timeframe("month", -1) == "üks kuu" - assert self.locale._format_timeframe("months", -3) == "3 kuud" - assert self.locale._format_timeframe("months", -13) == "13 kuud" - assert self.locale._format_timeframe("year", -1) == "üks aasta" - assert self.locale._format_timeframe("years", -4) == "4 aastat" - assert self.locale._format_timeframe("years", -14) == "14 aastat" - - -@pytest.mark.usefixtures("lang_locale") -class TestPortugueseLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("now", 0) == "agora" - assert self.locale._format_timeframe("second", 1) == "um segundo" - assert self.locale._format_timeframe("seconds", 30) == "30 segundos" - assert self.locale._format_timeframe("minute", 1) == "um minuto" - assert self.locale._format_timeframe("minutes", 40) == "40 minutos" - assert self.locale._format_timeframe("hour", 1) == "uma hora" - assert self.locale._format_timeframe("hours", 23) == "23 horas" - assert self.locale._format_timeframe("day", 1) == "um dia" - assert self.locale._format_timeframe("days", 12) == "12 dias" - assert self.locale._format_timeframe("month", 1) == "um mês" - assert self.locale._format_timeframe("months", 11) == "11 meses" - assert self.locale._format_timeframe("year", 1) == "um ano" - assert self.locale._format_timeframe("years", 12) == "12 anos" - - -@pytest.mark.usefixtures("lang_locale") -class TestBrazilianPortugueseLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("now", 0) == "agora" - assert self.locale._format_timeframe("second", 1) == "um segundo" - assert self.locale._format_timeframe("seconds", 30) == "30 segundos" - assert self.locale._format_timeframe("minute", 1) == "um minuto" - assert self.locale._format_timeframe("minutes", 40) == "40 minutos" - assert self.locale._format_timeframe("hour", 1) == "uma hora" - assert self.locale._format_timeframe("hours", 23) == "23 horas" - assert self.locale._format_timeframe("day", 1) == "um dia" - assert self.locale._format_timeframe("days", 12) == "12 dias" - assert self.locale._format_timeframe("month", 1) == "um mês" - assert self.locale._format_timeframe("months", 11) == "11 meses" - assert self.locale._format_timeframe("year", 1) == "um ano" - assert self.locale._format_timeframe("years", 12) == "12 anos" - assert self.locale._format_relative("uma hora", "hour", -1) == "faz uma hora" - - -@pytest.mark.usefixtures("lang_locale") -class TestHongKongLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("now", 0) == "剛才" - assert self.locale._format_timeframe("second", 1) == "1秒" - assert self.locale._format_timeframe("seconds", 30) == "30秒" - assert self.locale._format_timeframe("minute", 1) == "1分鐘" - assert self.locale._format_timeframe("minutes", 40) == "40分鐘" - assert self.locale._format_timeframe("hour", 1) == "1小時" - assert self.locale._format_timeframe("hours", 23) == "23小時" - assert self.locale._format_timeframe("day", 1) == "1天" - assert self.locale._format_timeframe("days", 12) == "12天" - assert self.locale._format_timeframe("week", 1) == "1星期" - assert self.locale._format_timeframe("weeks", 38) == "38星期" - assert self.locale._format_timeframe("month", 1) == "1個月" - assert self.locale._format_timeframe("months", 11) == "11個月" - assert self.locale._format_timeframe("year", 1) == "1年" - assert self.locale._format_timeframe("years", 12) == "12年" - - -@pytest.mark.usefixtures("lang_locale") -class TestChineseTWLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("now", 0) == "剛才" - assert self.locale._format_timeframe("second", 1) == "1秒" - assert self.locale._format_timeframe("seconds", 30) == "30秒" - assert self.locale._format_timeframe("minute", 1) == "1分鐘" - assert self.locale._format_timeframe("minutes", 40) == "40分鐘" - assert self.locale._format_timeframe("hour", 1) == "1小時" - assert self.locale._format_timeframe("hours", 23) == "23小時" - assert self.locale._format_timeframe("day", 1) == "1天" - assert self.locale._format_timeframe("days", 12) == "12天" - assert self.locale._format_timeframe("week", 1) == "1週" - assert self.locale._format_timeframe("weeks", 38) == "38週" - assert self.locale._format_timeframe("month", 1) == "1個月" - assert self.locale._format_timeframe("months", 11) == "11個月" - assert self.locale._format_timeframe("year", 1) == "1年" - assert self.locale._format_timeframe("years", 12) == "12年" - - -@pytest.mark.usefixtures("lang_locale") -class TestSwahiliLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("now", 0) == "sasa hivi" - assert self.locale._format_timeframe("second", 1) == "sekunde" - assert self.locale._format_timeframe("seconds", 3) == "sekunde 3" - assert self.locale._format_timeframe("seconds", 30) == "sekunde 30" - assert self.locale._format_timeframe("minute", 1) == "dakika moja" - assert self.locale._format_timeframe("minutes", 4) == "dakika 4" - assert self.locale._format_timeframe("minutes", 40) == "dakika 40" - assert self.locale._format_timeframe("hour", 1) == "saa moja" - assert self.locale._format_timeframe("hours", 5) == "saa 5" - assert self.locale._format_timeframe("hours", 23) == "saa 23" - assert self.locale._format_timeframe("day", 1) == "siku moja" - assert self.locale._format_timeframe("days", 6) == "siku 6" - assert self.locale._format_timeframe("days", 12) == "siku 12" - assert self.locale._format_timeframe("month", 1) == "mwezi moja" - assert self.locale._format_timeframe("months", 7) == "miezi 7" - assert self.locale._format_timeframe("week", 1) == "wiki moja" - assert self.locale._format_timeframe("weeks", 2) == "wiki 2" - assert self.locale._format_timeframe("months", 11) == "miezi 11" - assert self.locale._format_timeframe("year", 1) == "mwaka moja" - assert self.locale._format_timeframe("years", 8) == "miaka 8" - assert self.locale._format_timeframe("years", 12) == "miaka 12" - - def test_format_relative_now(self): - result = self.locale._format_relative("sasa hivi", "now", 0) - assert result == "sasa hivi" - - def test_format_relative_past(self): - result = self.locale._format_relative("saa moja", "hour", 1) - assert result == "muda wa saa moja" - - def test_format_relative_future(self): - result = self.locale._format_relative("saa moja", "hour", -1) - assert result == "saa moja iliyopita" - - -@pytest.mark.usefixtures("lang_locale") -class TestKoreanLocale: - def test_format_timeframe(self): - assert self.locale._format_timeframe("now", 0) == "지금" - assert self.locale._format_timeframe("second", 1) == "1초" - assert self.locale._format_timeframe("seconds", 2) == "2초" - assert self.locale._format_timeframe("minute", 1) == "1분" - assert self.locale._format_timeframe("minutes", 2) == "2분" - assert self.locale._format_timeframe("hour", 1) == "한시간" - assert self.locale._format_timeframe("hours", 2) == "2시간" - assert self.locale._format_timeframe("day", 1) == "하루" - assert self.locale._format_timeframe("days", 2) == "2일" - assert self.locale._format_timeframe("week", 1) == "1주" - assert self.locale._format_timeframe("weeks", 2) == "2주" - assert self.locale._format_timeframe("month", 1) == "한달" - assert self.locale._format_timeframe("months", 2) == "2개월" - assert self.locale._format_timeframe("year", 1) == "1년" - assert self.locale._format_timeframe("years", 2) == "2년" - - def test_format_relative(self): - assert self.locale._format_relative("지금", "now", 0) == "지금" - - assert self.locale._format_relative("1초", "second", 1) == "1초 후" - assert self.locale._format_relative("2초", "seconds", 2) == "2초 후" - assert self.locale._format_relative("1분", "minute", 1) == "1분 후" - assert self.locale._format_relative("2분", "minutes", 2) == "2분 후" - assert self.locale._format_relative("한시간", "hour", 1) == "한시간 후" - assert self.locale._format_relative("2시간", "hours", 2) == "2시간 후" - assert self.locale._format_relative("하루", "day", 1) == "내일" - assert self.locale._format_relative("2일", "days", 2) == "모레" - assert self.locale._format_relative("3일", "days", 3) == "글피" - assert self.locale._format_relative("4일", "days", 4) == "그글피" - assert self.locale._format_relative("5일", "days", 5) == "5일 후" - assert self.locale._format_relative("1주", "week", 1) == "1주 후" - assert self.locale._format_relative("2주", "weeks", 2) == "2주 후" - assert self.locale._format_relative("한달", "month", 1) == "한달 후" - assert self.locale._format_relative("2개월", "months", 2) == "2개월 후" - assert self.locale._format_relative("1년", "year", 1) == "내년" - assert self.locale._format_relative("2년", "years", 2) == "내후년" - assert self.locale._format_relative("3년", "years", 3) == "3년 후" - - assert self.locale._format_relative("1초", "second", -1) == "1초 전" - assert self.locale._format_relative("2초", "seconds", -2) == "2초 전" - assert self.locale._format_relative("1분", "minute", -1) == "1분 전" - assert self.locale._format_relative("2분", "minutes", -2) == "2분 전" - assert self.locale._format_relative("한시간", "hour", -1) == "한시간 전" - assert self.locale._format_relative("2시간", "hours", -2) == "2시간 전" - assert self.locale._format_relative("하루", "day", -1) == "어제" - assert self.locale._format_relative("2일", "days", -2) == "그제" - assert self.locale._format_relative("3일", "days", -3) == "그끄제" - assert self.locale._format_relative("4일", "days", -4) == "4일 전" - assert self.locale._format_relative("1주", "week", -1) == "1주 전" - assert self.locale._format_relative("2주", "weeks", -2) == "2주 전" - assert self.locale._format_relative("한달", "month", -1) == "한달 전" - assert self.locale._format_relative("2개월", "months", -2) == "2개월 전" - assert self.locale._format_relative("1년", "year", -1) == "작년" - assert self.locale._format_relative("2년", "years", -2) == "제작년" - assert self.locale._format_relative("3년", "years", -3) == "3년 전" - - def test_ordinal_number(self): - assert self.locale.ordinal_number(0) == "0번째" - assert self.locale.ordinal_number(1) == "첫번째" - assert self.locale.ordinal_number(2) == "두번째" - assert self.locale.ordinal_number(3) == "세번째" - assert self.locale.ordinal_number(4) == "네번째" - assert self.locale.ordinal_number(5) == "다섯번째" - assert self.locale.ordinal_number(6) == "여섯번째" - assert self.locale.ordinal_number(7) == "일곱번째" - assert self.locale.ordinal_number(8) == "여덟번째" - assert self.locale.ordinal_number(9) == "아홉번째" - assert self.locale.ordinal_number(10) == "열번째" - assert self.locale.ordinal_number(11) == "11번째" - assert self.locale.ordinal_number(12) == "12번째" - assert self.locale.ordinal_number(100) == "100번째" diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_parser.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_parser.py deleted file mode 100644 index 9fb4e68f3c..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_parser.py +++ /dev/null @@ -1,1657 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -import calendar -import os -import time -from datetime import datetime - -import pytest -from dateutil import tz - -import arrow -from arrow import formatter, parser -from arrow.constants import MAX_TIMESTAMP_US -from arrow.parser import DateTimeParser, ParserError, ParserMatchError - -from .utils import make_full_tz_list - - -@pytest.mark.usefixtures("dt_parser") -class TestDateTimeParser: - def test_parse_multiformat(self, mocker): - mocker.patch( - "arrow.parser.DateTimeParser.parse", - string="str", - fmt="fmt_a", - side_effect=parser.ParserError, - ) - - with pytest.raises(parser.ParserError): - self.parser._parse_multiformat("str", ["fmt_a"]) - - mock_datetime = mocker.Mock() - mocker.patch( - "arrow.parser.DateTimeParser.parse", - string="str", - fmt="fmt_b", - return_value=mock_datetime, - ) - - result = self.parser._parse_multiformat("str", ["fmt_a", "fmt_b"]) - assert result == mock_datetime - - def test_parse_multiformat_all_fail(self, mocker): - mocker.patch( - "arrow.parser.DateTimeParser.parse", - string="str", - fmt="fmt_a", - side_effect=parser.ParserError, - ) - - mocker.patch( - "arrow.parser.DateTimeParser.parse", - string="str", - fmt="fmt_b", - side_effect=parser.ParserError, - ) - - with pytest.raises(parser.ParserError): - self.parser._parse_multiformat("str", ["fmt_a", "fmt_b"]) - - def test_parse_multiformat_unself_expected_fail(self, mocker): - class UnselfExpectedError(Exception): - pass - - mocker.patch( - "arrow.parser.DateTimeParser.parse", - string="str", - fmt="fmt_a", - side_effect=UnselfExpectedError, - ) - - with pytest.raises(UnselfExpectedError): - self.parser._parse_multiformat("str", ["fmt_a", "fmt_b"]) - - def test_parse_token_nonsense(self): - parts = {} - self.parser._parse_token("NONSENSE", "1900", parts) - assert parts == {} - - def test_parse_token_invalid_meridians(self): - parts = {} - self.parser._parse_token("A", "a..m", parts) - assert parts == {} - self.parser._parse_token("a", "p..m", parts) - assert parts == {} - - def test_parser_no_caching(self, mocker): - - mocked_parser = mocker.patch( - "arrow.parser.DateTimeParser._generate_pattern_re", fmt="fmt_a" - ) - self.parser = parser.DateTimeParser(cache_size=0) - for _ in range(100): - self.parser._generate_pattern_re("fmt_a") - assert mocked_parser.call_count == 100 - - def test_parser_1_line_caching(self, mocker): - mocked_parser = mocker.patch("arrow.parser.DateTimeParser._generate_pattern_re") - self.parser = parser.DateTimeParser(cache_size=1) - - for _ in range(100): - self.parser._generate_pattern_re(fmt="fmt_a") - assert mocked_parser.call_count == 1 - assert mocked_parser.call_args_list[0] == mocker.call(fmt="fmt_a") - - for _ in range(100): - self.parser._generate_pattern_re(fmt="fmt_b") - assert mocked_parser.call_count == 2 - assert mocked_parser.call_args_list[1] == mocker.call(fmt="fmt_b") - - for _ in range(100): - self.parser._generate_pattern_re(fmt="fmt_a") - assert mocked_parser.call_count == 3 - assert mocked_parser.call_args_list[2] == mocker.call(fmt="fmt_a") - - def test_parser_multiple_line_caching(self, mocker): - mocked_parser = mocker.patch("arrow.parser.DateTimeParser._generate_pattern_re") - self.parser = parser.DateTimeParser(cache_size=2) - - for _ in range(100): - self.parser._generate_pattern_re(fmt="fmt_a") - assert mocked_parser.call_count == 1 - assert mocked_parser.call_args_list[0] == mocker.call(fmt="fmt_a") - - for _ in range(100): - self.parser._generate_pattern_re(fmt="fmt_b") - assert mocked_parser.call_count == 2 - assert mocked_parser.call_args_list[1] == mocker.call(fmt="fmt_b") - - # fmt_a and fmt_b are in the cache, so no new calls should be made - for _ in range(100): - self.parser._generate_pattern_re(fmt="fmt_a") - for _ in range(100): - self.parser._generate_pattern_re(fmt="fmt_b") - assert mocked_parser.call_count == 2 - assert mocked_parser.call_args_list[0] == mocker.call(fmt="fmt_a") - assert mocked_parser.call_args_list[1] == mocker.call(fmt="fmt_b") - - def test_YY_and_YYYY_format_list(self): - - assert self.parser.parse("15/01/19", ["DD/MM/YY", "DD/MM/YYYY"]) == datetime( - 2019, 1, 15 - ) - - # Regression test for issue #580 - assert self.parser.parse("15/01/2019", ["DD/MM/YY", "DD/MM/YYYY"]) == datetime( - 2019, 1, 15 - ) - - assert ( - self.parser.parse( - "15/01/2019T04:05:06.789120Z", - ["D/M/YYThh:mm:ss.SZ", "D/M/YYYYThh:mm:ss.SZ"], - ) - == datetime(2019, 1, 15, 4, 5, 6, 789120, tzinfo=tz.tzutc()) - ) - - # regression test for issue #447 - def test_timestamp_format_list(self): - # should not match on the "X" token - assert ( - self.parser.parse( - "15 Jul 2000", - ["MM/DD/YYYY", "YYYY-MM-DD", "X", "DD-MMMM-YYYY", "D MMM YYYY"], - ) - == datetime(2000, 7, 15) - ) - - with pytest.raises(ParserError): - self.parser.parse("15 Jul", "X") - - -@pytest.mark.usefixtures("dt_parser") -class TestDateTimeParserParse: - def test_parse_list(self, mocker): - - mocker.patch( - "arrow.parser.DateTimeParser._parse_multiformat", - string="str", - formats=["fmt_a", "fmt_b"], - return_value="result", - ) - - result = self.parser.parse("str", ["fmt_a", "fmt_b"]) - assert result == "result" - - def test_parse_unrecognized_token(self, mocker): - - mocker.patch.dict("arrow.parser.DateTimeParser._BASE_INPUT_RE_MAP") - del arrow.parser.DateTimeParser._BASE_INPUT_RE_MAP["YYYY"] - - # need to make another local parser to apply patch changes - _parser = parser.DateTimeParser() - with pytest.raises(parser.ParserError): - _parser.parse("2013-01-01", "YYYY-MM-DD") - - def test_parse_parse_no_match(self): - - with pytest.raises(ParserError): - self.parser.parse("01-01", "YYYY-MM-DD") - - def test_parse_separators(self): - - with pytest.raises(ParserError): - self.parser.parse("1403549231", "YYYY-MM-DD") - - def test_parse_numbers(self): - - self.expected = datetime(2012, 1, 1, 12, 5, 10) - assert ( - self.parser.parse("2012-01-01 12:05:10", "YYYY-MM-DD HH:mm:ss") - == self.expected - ) - - def test_parse_year_two_digit(self): - - self.expected = datetime(1979, 1, 1, 12, 5, 10) - assert ( - self.parser.parse("79-01-01 12:05:10", "YY-MM-DD HH:mm:ss") == self.expected - ) - - def test_parse_timestamp(self): - - tz_utc = tz.tzutc() - int_timestamp = int(time.time()) - self.expected = datetime.fromtimestamp(int_timestamp, tz=tz_utc) - assert self.parser.parse("{:d}".format(int_timestamp), "X") == self.expected - - float_timestamp = time.time() - self.expected = datetime.fromtimestamp(float_timestamp, tz=tz_utc) - assert self.parser.parse("{:f}".format(float_timestamp), "X") == self.expected - - # test handling of ns timestamp (arrow will round to 6 digits regardless) - self.expected = datetime.fromtimestamp(float_timestamp, tz=tz_utc) - assert ( - self.parser.parse("{:f}123".format(float_timestamp), "X") == self.expected - ) - - # test ps timestamp (arrow will round to 6 digits regardless) - self.expected = datetime.fromtimestamp(float_timestamp, tz=tz_utc) - assert ( - self.parser.parse("{:f}123456".format(float_timestamp), "X") - == self.expected - ) - - # NOTE: negative timestamps cannot be handled by datetime on Window - # Must use timedelta to handle them. ref: https://stackoverflow.com/questions/36179914 - if os.name != "nt": - # regression test for issue #662 - negative_int_timestamp = -int_timestamp - self.expected = datetime.fromtimestamp(negative_int_timestamp, tz=tz_utc) - assert ( - self.parser.parse("{:d}".format(negative_int_timestamp), "X") - == self.expected - ) - - negative_float_timestamp = -float_timestamp - self.expected = datetime.fromtimestamp(negative_float_timestamp, tz=tz_utc) - assert ( - self.parser.parse("{:f}".format(negative_float_timestamp), "X") - == self.expected - ) - - # NOTE: timestamps cannot be parsed from natural language strings (by removing the ^...$) because it will - # break cases like "15 Jul 2000" and a format list (see issue #447) - with pytest.raises(ParserError): - natural_lang_string = "Meet me at {} at the restaurant.".format( - float_timestamp - ) - self.parser.parse(natural_lang_string, "X") - - with pytest.raises(ParserError): - self.parser.parse("1565982019.", "X") - - with pytest.raises(ParserError): - self.parser.parse(".1565982019", "X") - - def test_parse_expanded_timestamp(self): - # test expanded timestamps that include milliseconds - # and microseconds as multiples rather than decimals - # requested in issue #357 - - tz_utc = tz.tzutc() - timestamp = 1569982581.413132 - timestamp_milli = int(round(timestamp * 1000)) - timestamp_micro = int(round(timestamp * 1000000)) - - # "x" token should parse integer timestamps below MAX_TIMESTAMP normally - self.expected = datetime.fromtimestamp(int(timestamp), tz=tz_utc) - assert self.parser.parse("{:d}".format(int(timestamp)), "x") == self.expected - - self.expected = datetime.fromtimestamp(round(timestamp, 3), tz=tz_utc) - assert self.parser.parse("{:d}".format(timestamp_milli), "x") == self.expected - - self.expected = datetime.fromtimestamp(timestamp, tz=tz_utc) - assert self.parser.parse("{:d}".format(timestamp_micro), "x") == self.expected - - # anything above max µs timestamp should fail - with pytest.raises(ValueError): - self.parser.parse("{:d}".format(int(MAX_TIMESTAMP_US) + 1), "x") - - # floats are not allowed with the "x" token - with pytest.raises(ParserMatchError): - self.parser.parse("{:f}".format(timestamp), "x") - - def test_parse_names(self): - - self.expected = datetime(2012, 1, 1) - - assert self.parser.parse("January 1, 2012", "MMMM D, YYYY") == self.expected - assert self.parser.parse("Jan 1, 2012", "MMM D, YYYY") == self.expected - - def test_parse_pm(self): - - self.expected = datetime(1, 1, 1, 13, 0, 0) - assert self.parser.parse("1 pm", "H a") == self.expected - assert self.parser.parse("1 pm", "h a") == self.expected - - self.expected = datetime(1, 1, 1, 1, 0, 0) - assert self.parser.parse("1 am", "H A") == self.expected - assert self.parser.parse("1 am", "h A") == self.expected - - self.expected = datetime(1, 1, 1, 0, 0, 0) - assert self.parser.parse("12 am", "H A") == self.expected - assert self.parser.parse("12 am", "h A") == self.expected - - self.expected = datetime(1, 1, 1, 12, 0, 0) - assert self.parser.parse("12 pm", "H A") == self.expected - assert self.parser.parse("12 pm", "h A") == self.expected - - def test_parse_tz_hours_only(self): - - self.expected = datetime(2025, 10, 17, 5, 30, 10, tzinfo=tz.tzoffset(None, 0)) - parsed = self.parser.parse("2025-10-17 05:30:10+00", "YYYY-MM-DD HH:mm:ssZ") - assert parsed == self.expected - - def test_parse_tz_zz(self): - - self.expected = datetime(2013, 1, 1, tzinfo=tz.tzoffset(None, -7 * 3600)) - assert self.parser.parse("2013-01-01 -07:00", "YYYY-MM-DD ZZ") == self.expected - - @pytest.mark.parametrize("full_tz_name", make_full_tz_list()) - def test_parse_tz_name_zzz(self, full_tz_name): - - self.expected = datetime(2013, 1, 1, tzinfo=tz.gettz(full_tz_name)) - assert ( - self.parser.parse("2013-01-01 {}".format(full_tz_name), "YYYY-MM-DD ZZZ") - == self.expected - ) - - # note that offsets are not timezones - with pytest.raises(ParserError): - self.parser.parse("2013-01-01 12:30:45.9+1000", "YYYY-MM-DDZZZ") - - with pytest.raises(ParserError): - self.parser.parse("2013-01-01 12:30:45.9+10:00", "YYYY-MM-DDZZZ") - - with pytest.raises(ParserError): - self.parser.parse("2013-01-01 12:30:45.9-10", "YYYY-MM-DDZZZ") - - def test_parse_subsecond(self): - self.expected = datetime(2013, 1, 1, 12, 30, 45, 900000) - assert ( - self.parser.parse("2013-01-01 12:30:45.9", "YYYY-MM-DD HH:mm:ss.S") - == self.expected - ) - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 980000) - assert ( - self.parser.parse("2013-01-01 12:30:45.98", "YYYY-MM-DD HH:mm:ss.SS") - == self.expected - ) - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987000) - assert ( - self.parser.parse("2013-01-01 12:30:45.987", "YYYY-MM-DD HH:mm:ss.SSS") - == self.expected - ) - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987600) - assert ( - self.parser.parse("2013-01-01 12:30:45.9876", "YYYY-MM-DD HH:mm:ss.SSSS") - == self.expected - ) - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987650) - assert ( - self.parser.parse("2013-01-01 12:30:45.98765", "YYYY-MM-DD HH:mm:ss.SSSSS") - == self.expected - ) - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987654) - assert ( - self.parser.parse( - "2013-01-01 12:30:45.987654", "YYYY-MM-DD HH:mm:ss.SSSSSS" - ) - == self.expected - ) - - def test_parse_subsecond_rounding(self): - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987654) - datetime_format = "YYYY-MM-DD HH:mm:ss.S" - - # round up - string = "2013-01-01 12:30:45.9876539" - assert self.parser.parse(string, datetime_format) == self.expected - assert self.parser.parse_iso(string) == self.expected - - # round down - string = "2013-01-01 12:30:45.98765432" - assert self.parser.parse(string, datetime_format) == self.expected - assert self.parser.parse_iso(string) == self.expected - - # round half-up - string = "2013-01-01 12:30:45.987653521" - assert self.parser.parse(string, datetime_format) == self.expected - assert self.parser.parse_iso(string) == self.expected - - # round half-down - string = "2013-01-01 12:30:45.9876545210" - assert self.parser.parse(string, datetime_format) == self.expected - assert self.parser.parse_iso(string) == self.expected - - # overflow (zero out the subseconds and increment the seconds) - # regression tests for issue #636 - def test_parse_subsecond_rounding_overflow(self): - datetime_format = "YYYY-MM-DD HH:mm:ss.S" - - self.expected = datetime(2013, 1, 1, 12, 30, 46) - string = "2013-01-01 12:30:45.9999995" - assert self.parser.parse(string, datetime_format) == self.expected - assert self.parser.parse_iso(string) == self.expected - - self.expected = datetime(2013, 1, 1, 12, 31, 0) - string = "2013-01-01 12:30:59.9999999" - assert self.parser.parse(string, datetime_format) == self.expected - assert self.parser.parse_iso(string) == self.expected - - self.expected = datetime(2013, 1, 2, 0, 0, 0) - string = "2013-01-01 23:59:59.9999999" - assert self.parser.parse(string, datetime_format) == self.expected - assert self.parser.parse_iso(string) == self.expected - - # 6 digits should remain unrounded - self.expected = datetime(2013, 1, 1, 12, 30, 45, 999999) - string = "2013-01-01 12:30:45.999999" - assert self.parser.parse(string, datetime_format) == self.expected - assert self.parser.parse_iso(string) == self.expected - - # Regression tests for issue #560 - def test_parse_long_year(self): - with pytest.raises(ParserError): - self.parser.parse("09 January 123456789101112", "DD MMMM YYYY") - - with pytest.raises(ParserError): - self.parser.parse("123456789101112 09 January", "YYYY DD MMMM") - - with pytest.raises(ParserError): - self.parser.parse("68096653015/01/19", "YY/M/DD") - - def test_parse_with_extra_words_at_start_and_end_invalid(self): - input_format_pairs = [ - ("blah2016", "YYYY"), - ("blah2016blah", "YYYY"), - ("2016blah", "YYYY"), - ("2016-05blah", "YYYY-MM"), - ("2016-05-16blah", "YYYY-MM-DD"), - ("2016-05-16T04:05:06.789120blah", "YYYY-MM-DDThh:mm:ss.S"), - ("2016-05-16T04:05:06.789120ZblahZ", "YYYY-MM-DDThh:mm:ss.SZ"), - ("2016-05-16T04:05:06.789120Zblah", "YYYY-MM-DDThh:mm:ss.SZ"), - ("2016-05-16T04:05:06.789120blahZ", "YYYY-MM-DDThh:mm:ss.SZ"), - ] - - for pair in input_format_pairs: - with pytest.raises(ParserError): - self.parser.parse(pair[0], pair[1]) - - def test_parse_with_extra_words_at_start_and_end_valid(self): - # Spaces surrounding the parsable date are ok because we - # allow the parsing of natural language input. Additionally, a single - # character of specific punctuation before or after the date is okay. - # See docs for full list of valid punctuation. - - assert self.parser.parse("blah 2016 blah", "YYYY") == datetime(2016, 1, 1) - - assert self.parser.parse("blah 2016", "YYYY") == datetime(2016, 1, 1) - - assert self.parser.parse("2016 blah", "YYYY") == datetime(2016, 1, 1) - - # test one additional space along with space divider - assert self.parser.parse( - "blah 2016-05-16 04:05:06.789120", "YYYY-MM-DD hh:mm:ss.S" - ) == datetime(2016, 5, 16, 4, 5, 6, 789120) - - assert self.parser.parse( - "2016-05-16 04:05:06.789120 blah", "YYYY-MM-DD hh:mm:ss.S" - ) == datetime(2016, 5, 16, 4, 5, 6, 789120) - - # test one additional space along with T divider - assert self.parser.parse( - "blah 2016-05-16T04:05:06.789120", "YYYY-MM-DDThh:mm:ss.S" - ) == datetime(2016, 5, 16, 4, 5, 6, 789120) - - assert self.parser.parse( - "2016-05-16T04:05:06.789120 blah", "YYYY-MM-DDThh:mm:ss.S" - ) == datetime(2016, 5, 16, 4, 5, 6, 789120) - - assert ( - self.parser.parse( - "Meet me at 2016-05-16T04:05:06.789120 at the restaurant.", - "YYYY-MM-DDThh:mm:ss.S", - ) - == datetime(2016, 5, 16, 4, 5, 6, 789120) - ) - - assert ( - self.parser.parse( - "Meet me at 2016-05-16 04:05:06.789120 at the restaurant.", - "YYYY-MM-DD hh:mm:ss.S", - ) - == datetime(2016, 5, 16, 4, 5, 6, 789120) - ) - - # regression test for issue #701 - # tests cases of a partial match surrounded by punctuation - # for the list of valid punctuation, see documentation - def test_parse_with_punctuation_fences(self): - assert self.parser.parse( - "Meet me at my house on Halloween (2019-31-10)", "YYYY-DD-MM" - ) == datetime(2019, 10, 31) - - assert self.parser.parse( - "Monday, 9. September 2019, 16:15-20:00", "dddd, D. MMMM YYYY" - ) == datetime(2019, 9, 9) - - assert self.parser.parse("A date is 11.11.2011.", "DD.MM.YYYY") == datetime( - 2011, 11, 11 - ) - - with pytest.raises(ParserMatchError): - self.parser.parse("11.11.2011.1 is not a valid date.", "DD.MM.YYYY") - - with pytest.raises(ParserMatchError): - self.parser.parse( - "This date has too many punctuation marks following it (11.11.2011).", - "DD.MM.YYYY", - ) - - def test_parse_with_leading_and_trailing_whitespace(self): - assert self.parser.parse(" 2016", "YYYY") == datetime(2016, 1, 1) - - assert self.parser.parse("2016 ", "YYYY") == datetime(2016, 1, 1) - - assert self.parser.parse(" 2016 ", "YYYY") == datetime(2016, 1, 1) - - assert self.parser.parse( - " 2016-05-16 04:05:06.789120 ", "YYYY-MM-DD hh:mm:ss.S" - ) == datetime(2016, 5, 16, 4, 5, 6, 789120) - - assert self.parser.parse( - " 2016-05-16T04:05:06.789120 ", "YYYY-MM-DDThh:mm:ss.S" - ) == datetime(2016, 5, 16, 4, 5, 6, 789120) - - def test_parse_YYYY_DDDD(self): - assert self.parser.parse("1998-136", "YYYY-DDDD") == datetime(1998, 5, 16) - - assert self.parser.parse("1998-006", "YYYY-DDDD") == datetime(1998, 1, 6) - - with pytest.raises(ParserError): - self.parser.parse("1998-456", "YYYY-DDDD") - - def test_parse_YYYY_DDD(self): - assert self.parser.parse("1998-6", "YYYY-DDD") == datetime(1998, 1, 6) - - assert self.parser.parse("1998-136", "YYYY-DDD") == datetime(1998, 5, 16) - - with pytest.raises(ParserError): - self.parser.parse("1998-756", "YYYY-DDD") - - # month cannot be passed with DDD and DDDD tokens - def test_parse_YYYY_MM_DDDD(self): - with pytest.raises(ParserError): - self.parser.parse("2015-01-009", "YYYY-MM-DDDD") - - # year is required with the DDD and DDDD tokens - def test_parse_DDD_only(self): - with pytest.raises(ParserError): - self.parser.parse("5", "DDD") - - def test_parse_DDDD_only(self): - with pytest.raises(ParserError): - self.parser.parse("145", "DDDD") - - def test_parse_ddd_and_dddd(self): - fr_parser = parser.DateTimeParser("fr") - - # Day of week should be ignored when a day is passed - # 2019-10-17 is a Thursday, so we know day of week - # is ignored if the same date is outputted - expected = datetime(2019, 10, 17) - assert self.parser.parse("Tue 2019-10-17", "ddd YYYY-MM-DD") == expected - assert fr_parser.parse("mar 2019-10-17", "ddd YYYY-MM-DD") == expected - assert self.parser.parse("Tuesday 2019-10-17", "dddd YYYY-MM-DD") == expected - assert fr_parser.parse("mardi 2019-10-17", "dddd YYYY-MM-DD") == expected - - # Get first Tuesday after epoch - expected = datetime(1970, 1, 6) - assert self.parser.parse("Tue", "ddd") == expected - assert fr_parser.parse("mar", "ddd") == expected - assert self.parser.parse("Tuesday", "dddd") == expected - assert fr_parser.parse("mardi", "dddd") == expected - - # Get first Tuesday in 2020 - expected = datetime(2020, 1, 7) - assert self.parser.parse("Tue 2020", "ddd YYYY") == expected - assert fr_parser.parse("mar 2020", "ddd YYYY") == expected - assert self.parser.parse("Tuesday 2020", "dddd YYYY") == expected - assert fr_parser.parse("mardi 2020", "dddd YYYY") == expected - - # Get first Tuesday in February 2020 - expected = datetime(2020, 2, 4) - assert self.parser.parse("Tue 02 2020", "ddd MM YYYY") == expected - assert fr_parser.parse("mar 02 2020", "ddd MM YYYY") == expected - assert self.parser.parse("Tuesday 02 2020", "dddd MM YYYY") == expected - assert fr_parser.parse("mardi 02 2020", "dddd MM YYYY") == expected - - # Get first Tuesday in February after epoch - expected = datetime(1970, 2, 3) - assert self.parser.parse("Tue 02", "ddd MM") == expected - assert fr_parser.parse("mar 02", "ddd MM") == expected - assert self.parser.parse("Tuesday 02", "dddd MM") == expected - assert fr_parser.parse("mardi 02", "dddd MM") == expected - - # Times remain intact - expected = datetime(2020, 2, 4, 10, 25, 54, 123456, tz.tzoffset(None, -3600)) - assert ( - self.parser.parse( - "Tue 02 2020 10:25:54.123456-01:00", "ddd MM YYYY HH:mm:ss.SZZ" - ) - == expected - ) - assert ( - fr_parser.parse( - "mar 02 2020 10:25:54.123456-01:00", "ddd MM YYYY HH:mm:ss.SZZ" - ) - == expected - ) - assert ( - self.parser.parse( - "Tuesday 02 2020 10:25:54.123456-01:00", "dddd MM YYYY HH:mm:ss.SZZ" - ) - == expected - ) - assert ( - fr_parser.parse( - "mardi 02 2020 10:25:54.123456-01:00", "dddd MM YYYY HH:mm:ss.SZZ" - ) - == expected - ) - - def test_parse_ddd_and_dddd_ignore_case(self): - # Regression test for issue #851 - expected = datetime(2019, 6, 24) - assert ( - self.parser.parse("MONDAY, June 24, 2019", "dddd, MMMM DD, YYYY") - == expected - ) - - def test_parse_ddd_and_dddd_then_format(self): - # Regression test for issue #446 - arw_formatter = formatter.DateTimeFormatter() - assert arw_formatter.format(self.parser.parse("Mon", "ddd"), "ddd") == "Mon" - assert ( - arw_formatter.format(self.parser.parse("Monday", "dddd"), "dddd") - == "Monday" - ) - assert arw_formatter.format(self.parser.parse("Tue", "ddd"), "ddd") == "Tue" - assert ( - arw_formatter.format(self.parser.parse("Tuesday", "dddd"), "dddd") - == "Tuesday" - ) - assert arw_formatter.format(self.parser.parse("Wed", "ddd"), "ddd") == "Wed" - assert ( - arw_formatter.format(self.parser.parse("Wednesday", "dddd"), "dddd") - == "Wednesday" - ) - assert arw_formatter.format(self.parser.parse("Thu", "ddd"), "ddd") == "Thu" - assert ( - arw_formatter.format(self.parser.parse("Thursday", "dddd"), "dddd") - == "Thursday" - ) - assert arw_formatter.format(self.parser.parse("Fri", "ddd"), "ddd") == "Fri" - assert ( - arw_formatter.format(self.parser.parse("Friday", "dddd"), "dddd") - == "Friday" - ) - assert arw_formatter.format(self.parser.parse("Sat", "ddd"), "ddd") == "Sat" - assert ( - arw_formatter.format(self.parser.parse("Saturday", "dddd"), "dddd") - == "Saturday" - ) - assert arw_formatter.format(self.parser.parse("Sun", "ddd"), "ddd") == "Sun" - assert ( - arw_formatter.format(self.parser.parse("Sunday", "dddd"), "dddd") - == "Sunday" - ) - - def test_parse_HH_24(self): - assert self.parser.parse( - "2019-10-30T24:00:00", "YYYY-MM-DDTHH:mm:ss" - ) == datetime(2019, 10, 31, 0, 0, 0, 0) - assert self.parser.parse("2019-10-30T24:00", "YYYY-MM-DDTHH:mm") == datetime( - 2019, 10, 31, 0, 0, 0, 0 - ) - assert self.parser.parse("2019-10-30T24", "YYYY-MM-DDTHH") == datetime( - 2019, 10, 31, 0, 0, 0, 0 - ) - assert self.parser.parse( - "2019-10-30T24:00:00.0", "YYYY-MM-DDTHH:mm:ss.S" - ) == datetime(2019, 10, 31, 0, 0, 0, 0) - assert self.parser.parse( - "2019-10-31T24:00:00", "YYYY-MM-DDTHH:mm:ss" - ) == datetime(2019, 11, 1, 0, 0, 0, 0) - assert self.parser.parse( - "2019-12-31T24:00:00", "YYYY-MM-DDTHH:mm:ss" - ) == datetime(2020, 1, 1, 0, 0, 0, 0) - assert self.parser.parse( - "2019-12-31T23:59:59.9999999", "YYYY-MM-DDTHH:mm:ss.S" - ) == datetime(2020, 1, 1, 0, 0, 0, 0) - - with pytest.raises(ParserError): - self.parser.parse("2019-12-31T24:01:00", "YYYY-MM-DDTHH:mm:ss") - - with pytest.raises(ParserError): - self.parser.parse("2019-12-31T24:00:01", "YYYY-MM-DDTHH:mm:ss") - - with pytest.raises(ParserError): - self.parser.parse("2019-12-31T24:00:00.1", "YYYY-MM-DDTHH:mm:ss.S") - - with pytest.raises(ParserError): - self.parser.parse("2019-12-31T24:00:00.999999", "YYYY-MM-DDTHH:mm:ss.S") - - def test_parse_W(self): - - assert self.parser.parse("2011-W05-4", "W") == datetime(2011, 2, 3) - assert self.parser.parse("2011W054", "W") == datetime(2011, 2, 3) - assert self.parser.parse("2011-W05", "W") == datetime(2011, 1, 31) - assert self.parser.parse("2011W05", "W") == datetime(2011, 1, 31) - assert self.parser.parse("2011-W05-4T14:17:01", "WTHH:mm:ss") == datetime( - 2011, 2, 3, 14, 17, 1 - ) - assert self.parser.parse("2011W054T14:17:01", "WTHH:mm:ss") == datetime( - 2011, 2, 3, 14, 17, 1 - ) - assert self.parser.parse("2011-W05T14:17:01", "WTHH:mm:ss") == datetime( - 2011, 1, 31, 14, 17, 1 - ) - assert self.parser.parse("2011W05T141701", "WTHHmmss") == datetime( - 2011, 1, 31, 14, 17, 1 - ) - assert self.parser.parse("2011W054T141701", "WTHHmmss") == datetime( - 2011, 2, 3, 14, 17, 1 - ) - - bad_formats = [ - "201W22", - "1995-W1-4", - "2001-W34-90", - "2001--W34", - "2011-W03--3", - "thstrdjtrsrd676776r65", - "2002-W66-1T14:17:01", - "2002-W23-03T14:17:01", - ] - - for fmt in bad_formats: - with pytest.raises(ParserError): - self.parser.parse(fmt, "W") - - def test_parse_normalize_whitespace(self): - assert self.parser.parse( - "Jun 1 2005 1:33PM", "MMM D YYYY H:mmA", normalize_whitespace=True - ) == datetime(2005, 6, 1, 13, 33) - - with pytest.raises(ParserError): - self.parser.parse("Jun 1 2005 1:33PM", "MMM D YYYY H:mmA") - - assert ( - self.parser.parse( - "\t 2013-05-05 T \n 12:30:45\t123456 \t \n", - "YYYY-MM-DD T HH:mm:ss S", - normalize_whitespace=True, - ) - == datetime(2013, 5, 5, 12, 30, 45, 123456) - ) - - with pytest.raises(ParserError): - self.parser.parse( - "\t 2013-05-05 T \n 12:30:45\t123456 \t \n", - "YYYY-MM-DD T HH:mm:ss S", - ) - - assert self.parser.parse( - " \n Jun 1\t 2005\n ", "MMM D YYYY", normalize_whitespace=True - ) == datetime(2005, 6, 1) - - with pytest.raises(ParserError): - self.parser.parse(" \n Jun 1\t 2005\n ", "MMM D YYYY") - - -@pytest.mark.usefixtures("dt_parser_regex") -class TestDateTimeParserRegex: - def test_format_year(self): - - assert self.format_regex.findall("YYYY-YY") == ["YYYY", "YY"] - - def test_format_month(self): - - assert self.format_regex.findall("MMMM-MMM-MM-M") == ["MMMM", "MMM", "MM", "M"] - - def test_format_day(self): - - assert self.format_regex.findall("DDDD-DDD-DD-D") == ["DDDD", "DDD", "DD", "D"] - - def test_format_hour(self): - - assert self.format_regex.findall("HH-H-hh-h") == ["HH", "H", "hh", "h"] - - def test_format_minute(self): - - assert self.format_regex.findall("mm-m") == ["mm", "m"] - - def test_format_second(self): - - assert self.format_regex.findall("ss-s") == ["ss", "s"] - - def test_format_subsecond(self): - - assert self.format_regex.findall("SSSSSS-SSSSS-SSSS-SSS-SS-S") == [ - "SSSSSS", - "SSSSS", - "SSSS", - "SSS", - "SS", - "S", - ] - - def test_format_tz(self): - - assert self.format_regex.findall("ZZZ-ZZ-Z") == ["ZZZ", "ZZ", "Z"] - - def test_format_am_pm(self): - - assert self.format_regex.findall("A-a") == ["A", "a"] - - def test_format_timestamp(self): - - assert self.format_regex.findall("X") == ["X"] - - def test_format_timestamp_milli(self): - - assert self.format_regex.findall("x") == ["x"] - - def test_escape(self): - - escape_regex = parser.DateTimeParser._ESCAPE_RE - - assert escape_regex.findall("2018-03-09 8 [h] 40 [hello]") == ["[h]", "[hello]"] - - def test_month_names(self): - p = parser.DateTimeParser("en_us") - - text = "_".join(calendar.month_name[1:]) - - result = p._input_re_map["MMMM"].findall(text) - - assert result == calendar.month_name[1:] - - def test_month_abbreviations(self): - p = parser.DateTimeParser("en_us") - - text = "_".join(calendar.month_abbr[1:]) - - result = p._input_re_map["MMM"].findall(text) - - assert result == calendar.month_abbr[1:] - - def test_digits(self): - - assert parser.DateTimeParser._ONE_OR_TWO_DIGIT_RE.findall("4-56") == ["4", "56"] - assert parser.DateTimeParser._ONE_OR_TWO_OR_THREE_DIGIT_RE.findall( - "4-56-789" - ) == ["4", "56", "789"] - assert parser.DateTimeParser._ONE_OR_MORE_DIGIT_RE.findall( - "4-56-789-1234-12345" - ) == ["4", "56", "789", "1234", "12345"] - assert parser.DateTimeParser._TWO_DIGIT_RE.findall("12-3-45") == ["12", "45"] - assert parser.DateTimeParser._THREE_DIGIT_RE.findall("123-4-56") == ["123"] - assert parser.DateTimeParser._FOUR_DIGIT_RE.findall("1234-56") == ["1234"] - - def test_tz(self): - tz_z_re = parser.DateTimeParser._TZ_Z_RE - assert tz_z_re.findall("-0700") == [("-", "07", "00")] - assert tz_z_re.findall("+07") == [("+", "07", "")] - assert tz_z_re.search("15/01/2019T04:05:06.789120Z") is not None - assert tz_z_re.search("15/01/2019T04:05:06.789120") is None - - tz_zz_re = parser.DateTimeParser._TZ_ZZ_RE - assert tz_zz_re.findall("-07:00") == [("-", "07", "00")] - assert tz_zz_re.findall("+07") == [("+", "07", "")] - assert tz_zz_re.search("15/01/2019T04:05:06.789120Z") is not None - assert tz_zz_re.search("15/01/2019T04:05:06.789120") is None - - tz_name_re = parser.DateTimeParser._TZ_NAME_RE - assert tz_name_re.findall("Europe/Warsaw") == ["Europe/Warsaw"] - assert tz_name_re.findall("GMT") == ["GMT"] - - def test_timestamp(self): - timestamp_re = parser.DateTimeParser._TIMESTAMP_RE - assert timestamp_re.findall("1565707550.452729") == ["1565707550.452729"] - assert timestamp_re.findall("-1565707550.452729") == ["-1565707550.452729"] - assert timestamp_re.findall("-1565707550") == ["-1565707550"] - assert timestamp_re.findall("1565707550") == ["1565707550"] - assert timestamp_re.findall("1565707550.") == [] - assert timestamp_re.findall(".1565707550") == [] - - def test_timestamp_milli(self): - timestamp_expanded_re = parser.DateTimeParser._TIMESTAMP_EXPANDED_RE - assert timestamp_expanded_re.findall("-1565707550") == ["-1565707550"] - assert timestamp_expanded_re.findall("1565707550") == ["1565707550"] - assert timestamp_expanded_re.findall("1565707550.452729") == [] - assert timestamp_expanded_re.findall("1565707550.") == [] - assert timestamp_expanded_re.findall(".1565707550") == [] - - def test_time(self): - time_re = parser.DateTimeParser._TIME_RE - time_seperators = [":", ""] - - for sep in time_seperators: - assert time_re.findall("12") == [("12", "", "", "", "")] - assert time_re.findall("12{sep}35".format(sep=sep)) == [ - ("12", "35", "", "", "") - ] - assert time_re.findall("12{sep}35{sep}46".format(sep=sep)) == [ - ("12", "35", "46", "", "") - ] - assert time_re.findall("12{sep}35{sep}46.952313".format(sep=sep)) == [ - ("12", "35", "46", ".", "952313") - ] - assert time_re.findall("12{sep}35{sep}46,952313".format(sep=sep)) == [ - ("12", "35", "46", ",", "952313") - ] - - assert time_re.findall("12:") == [] - assert time_re.findall("12:35:46.") == [] - assert time_re.findall("12:35:46,") == [] - - -@pytest.mark.usefixtures("dt_parser") -class TestDateTimeParserISO: - def test_YYYY(self): - - assert self.parser.parse_iso("2013") == datetime(2013, 1, 1) - - def test_YYYY_DDDD(self): - assert self.parser.parse_iso("1998-136") == datetime(1998, 5, 16) - - assert self.parser.parse_iso("1998-006") == datetime(1998, 1, 6) - - with pytest.raises(ParserError): - self.parser.parse_iso("1998-456") - - # 2016 is a leap year, so Feb 29 exists (leap day) - assert self.parser.parse_iso("2016-059") == datetime(2016, 2, 28) - assert self.parser.parse_iso("2016-060") == datetime(2016, 2, 29) - assert self.parser.parse_iso("2016-061") == datetime(2016, 3, 1) - - # 2017 is not a leap year, so Feb 29 does not exist - assert self.parser.parse_iso("2017-059") == datetime(2017, 2, 28) - assert self.parser.parse_iso("2017-060") == datetime(2017, 3, 1) - assert self.parser.parse_iso("2017-061") == datetime(2017, 3, 2) - - # Since 2016 is a leap year, the 366th day falls in the same year - assert self.parser.parse_iso("2016-366") == datetime(2016, 12, 31) - - # Since 2017 is not a leap year, the 366th day falls in the next year - assert self.parser.parse_iso("2017-366") == datetime(2018, 1, 1) - - def test_YYYY_DDDD_HH_mm_ssZ(self): - - assert self.parser.parse_iso("2013-036 04:05:06+01:00") == datetime( - 2013, 2, 5, 4, 5, 6, tzinfo=tz.tzoffset(None, 3600) - ) - - assert self.parser.parse_iso("2013-036 04:05:06Z") == datetime( - 2013, 2, 5, 4, 5, 6, tzinfo=tz.tzutc() - ) - - def test_YYYY_MM_DDDD(self): - with pytest.raises(ParserError): - self.parser.parse_iso("2014-05-125") - - def test_YYYY_MM(self): - - for separator in DateTimeParser.SEPARATORS: - assert self.parser.parse_iso(separator.join(("2013", "02"))) == datetime( - 2013, 2, 1 - ) - - def test_YYYY_MM_DD(self): - - for separator in DateTimeParser.SEPARATORS: - assert self.parser.parse_iso( - separator.join(("2013", "02", "03")) - ) == datetime(2013, 2, 3) - - def test_YYYY_MM_DDTHH_mmZ(self): - - assert self.parser.parse_iso("2013-02-03T04:05+01:00") == datetime( - 2013, 2, 3, 4, 5, tzinfo=tz.tzoffset(None, 3600) - ) - - def test_YYYY_MM_DDTHH_mm(self): - - assert self.parser.parse_iso("2013-02-03T04:05") == datetime(2013, 2, 3, 4, 5) - - def test_YYYY_MM_DDTHH(self): - - assert self.parser.parse_iso("2013-02-03T04") == datetime(2013, 2, 3, 4) - - def test_YYYY_MM_DDTHHZ(self): - - assert self.parser.parse_iso("2013-02-03T04+01:00") == datetime( - 2013, 2, 3, 4, tzinfo=tz.tzoffset(None, 3600) - ) - - def test_YYYY_MM_DDTHH_mm_ssZ(self): - - assert self.parser.parse_iso("2013-02-03T04:05:06+01:00") == datetime( - 2013, 2, 3, 4, 5, 6, tzinfo=tz.tzoffset(None, 3600) - ) - - def test_YYYY_MM_DDTHH_mm_ss(self): - - assert self.parser.parse_iso("2013-02-03T04:05:06") == datetime( - 2013, 2, 3, 4, 5, 6 - ) - - def test_YYYY_MM_DD_HH_mmZ(self): - - assert self.parser.parse_iso("2013-02-03 04:05+01:00") == datetime( - 2013, 2, 3, 4, 5, tzinfo=tz.tzoffset(None, 3600) - ) - - def test_YYYY_MM_DD_HH_mm(self): - - assert self.parser.parse_iso("2013-02-03 04:05") == datetime(2013, 2, 3, 4, 5) - - def test_YYYY_MM_DD_HH(self): - - assert self.parser.parse_iso("2013-02-03 04") == datetime(2013, 2, 3, 4) - - def test_invalid_time(self): - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03T") - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03 044") - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03 04:05:06.") - - def test_YYYY_MM_DD_HH_mm_ssZ(self): - - assert self.parser.parse_iso("2013-02-03 04:05:06+01:00") == datetime( - 2013, 2, 3, 4, 5, 6, tzinfo=tz.tzoffset(None, 3600) - ) - - def test_YYYY_MM_DD_HH_mm_ss(self): - - assert self.parser.parse_iso("2013-02-03 04:05:06") == datetime( - 2013, 2, 3, 4, 5, 6 - ) - - def test_YYYY_MM_DDTHH_mm_ss_S(self): - - assert self.parser.parse_iso("2013-02-03T04:05:06.7") == datetime( - 2013, 2, 3, 4, 5, 6, 700000 - ) - - assert self.parser.parse_iso("2013-02-03T04:05:06.78") == datetime( - 2013, 2, 3, 4, 5, 6, 780000 - ) - - assert self.parser.parse_iso("2013-02-03T04:05:06.789") == datetime( - 2013, 2, 3, 4, 5, 6, 789000 - ) - - assert self.parser.parse_iso("2013-02-03T04:05:06.7891") == datetime( - 2013, 2, 3, 4, 5, 6, 789100 - ) - - assert self.parser.parse_iso("2013-02-03T04:05:06.78912") == datetime( - 2013, 2, 3, 4, 5, 6, 789120 - ) - - # ISO 8601:2004(E), ISO, 2004-12-01, 4.2.2.4 ... the decimal fraction - # shall be divided from the integer part by the decimal sign specified - # in ISO 31-0, i.e. the comma [,] or full stop [.]. Of these, the comma - # is the preferred sign. - assert self.parser.parse_iso("2013-02-03T04:05:06,789123678") == datetime( - 2013, 2, 3, 4, 5, 6, 789124 - ) - - # there is no limit on the number of decimal places - assert self.parser.parse_iso("2013-02-03T04:05:06.789123678") == datetime( - 2013, 2, 3, 4, 5, 6, 789124 - ) - - def test_YYYY_MM_DDTHH_mm_ss_SZ(self): - - assert self.parser.parse_iso("2013-02-03T04:05:06.7+01:00") == datetime( - 2013, 2, 3, 4, 5, 6, 700000, tzinfo=tz.tzoffset(None, 3600) - ) - - assert self.parser.parse_iso("2013-02-03T04:05:06.78+01:00") == datetime( - 2013, 2, 3, 4, 5, 6, 780000, tzinfo=tz.tzoffset(None, 3600) - ) - - assert self.parser.parse_iso("2013-02-03T04:05:06.789+01:00") == datetime( - 2013, 2, 3, 4, 5, 6, 789000, tzinfo=tz.tzoffset(None, 3600) - ) - - assert self.parser.parse_iso("2013-02-03T04:05:06.7891+01:00") == datetime( - 2013, 2, 3, 4, 5, 6, 789100, tzinfo=tz.tzoffset(None, 3600) - ) - - assert self.parser.parse_iso("2013-02-03T04:05:06.78912+01:00") == datetime( - 2013, 2, 3, 4, 5, 6, 789120, tzinfo=tz.tzoffset(None, 3600) - ) - - assert self.parser.parse_iso("2013-02-03 04:05:06.78912Z") == datetime( - 2013, 2, 3, 4, 5, 6, 789120, tzinfo=tz.tzutc() - ) - - def test_W(self): - - assert self.parser.parse_iso("2011-W05-4") == datetime(2011, 2, 3) - - assert self.parser.parse_iso("2011-W05-4T14:17:01") == datetime( - 2011, 2, 3, 14, 17, 1 - ) - - assert self.parser.parse_iso("2011W054") == datetime(2011, 2, 3) - - assert self.parser.parse_iso("2011W054T141701") == datetime( - 2011, 2, 3, 14, 17, 1 - ) - - def test_invalid_Z(self): - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03T04:05:06.78912z") - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03T04:05:06.78912zz") - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03T04:05:06.78912Zz") - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03T04:05:06.78912ZZ") - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03T04:05:06.78912+Z") - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03T04:05:06.78912-Z") - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-02-03T04:05:06.78912 Z") - - def test_parse_subsecond(self): - self.expected = datetime(2013, 1, 1, 12, 30, 45, 900000) - assert self.parser.parse_iso("2013-01-01 12:30:45.9") == self.expected - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 980000) - assert self.parser.parse_iso("2013-01-01 12:30:45.98") == self.expected - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987000) - assert self.parser.parse_iso("2013-01-01 12:30:45.987") == self.expected - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987600) - assert self.parser.parse_iso("2013-01-01 12:30:45.9876") == self.expected - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987650) - assert self.parser.parse_iso("2013-01-01 12:30:45.98765") == self.expected - - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987654) - assert self.parser.parse_iso("2013-01-01 12:30:45.987654") == self.expected - - # use comma as subsecond separator - self.expected = datetime(2013, 1, 1, 12, 30, 45, 987654) - assert self.parser.parse_iso("2013-01-01 12:30:45,987654") == self.expected - - def test_gnu_date(self): - """Regression tests for parsing output from GNU date.""" - # date -Ins - assert self.parser.parse_iso("2016-11-16T09:46:30,895636557-0800") == datetime( - 2016, 11, 16, 9, 46, 30, 895636, tzinfo=tz.tzoffset(None, -3600 * 8) - ) - - # date --rfc-3339=ns - assert self.parser.parse_iso("2016-11-16 09:51:14.682141526-08:00") == datetime( - 2016, 11, 16, 9, 51, 14, 682142, tzinfo=tz.tzoffset(None, -3600 * 8) - ) - - def test_isoformat(self): - - dt = datetime.utcnow() - - assert self.parser.parse_iso(dt.isoformat()) == dt - - def test_parse_iso_normalize_whitespace(self): - assert self.parser.parse_iso( - "2013-036 \t 04:05:06Z", normalize_whitespace=True - ) == datetime(2013, 2, 5, 4, 5, 6, tzinfo=tz.tzutc()) - - with pytest.raises(ParserError): - self.parser.parse_iso("2013-036 \t 04:05:06Z") - - assert self.parser.parse_iso( - "\t 2013-05-05T12:30:45.123456 \t \n", normalize_whitespace=True - ) == datetime(2013, 5, 5, 12, 30, 45, 123456) - - with pytest.raises(ParserError): - self.parser.parse_iso("\t 2013-05-05T12:30:45.123456 \t \n") - - def test_parse_iso_with_leading_and_trailing_whitespace(self): - datetime_string = " 2016-11-15T06:37:19.123456" - with pytest.raises(ParserError): - self.parser.parse_iso(datetime_string) - - datetime_string = " 2016-11-15T06:37:19.123456 " - with pytest.raises(ParserError): - self.parser.parse_iso(datetime_string) - - datetime_string = "2016-11-15T06:37:19.123456 " - with pytest.raises(ParserError): - self.parser.parse_iso(datetime_string) - - datetime_string = "2016-11-15T 06:37:19.123456" - with pytest.raises(ParserError): - self.parser.parse_iso(datetime_string) - - # leading whitespace - datetime_string = " 2016-11-15 06:37:19.123456" - with pytest.raises(ParserError): - self.parser.parse_iso(datetime_string) - - # trailing whitespace - datetime_string = "2016-11-15 06:37:19.123456 " - with pytest.raises(ParserError): - self.parser.parse_iso(datetime_string) - - datetime_string = " 2016-11-15 06:37:19.123456 " - with pytest.raises(ParserError): - self.parser.parse_iso(datetime_string) - - # two dividing spaces - datetime_string = "2016-11-15 06:37:19.123456" - with pytest.raises(ParserError): - self.parser.parse_iso(datetime_string) - - def test_parse_iso_with_extra_words_at_start_and_end_invalid(self): - test_inputs = [ - "blah2016", - "blah2016blah", - "blah 2016 blah", - "blah 2016", - "2016 blah", - "blah 2016-05-16 04:05:06.789120", - "2016-05-16 04:05:06.789120 blah", - "blah 2016-05-16T04:05:06.789120", - "2016-05-16T04:05:06.789120 blah", - "2016blah", - "2016-05blah", - "2016-05-16blah", - "2016-05-16T04:05:06.789120blah", - "2016-05-16T04:05:06.789120ZblahZ", - "2016-05-16T04:05:06.789120Zblah", - "2016-05-16T04:05:06.789120blahZ", - "Meet me at 2016-05-16T04:05:06.789120 at the restaurant.", - "Meet me at 2016-05-16 04:05:06.789120 at the restaurant.", - ] - - for ti in test_inputs: - with pytest.raises(ParserError): - self.parser.parse_iso(ti) - - def test_iso8601_basic_format(self): - assert self.parser.parse_iso("20180517") == datetime(2018, 5, 17) - - assert self.parser.parse_iso("20180517T10") == datetime(2018, 5, 17, 10) - - assert self.parser.parse_iso("20180517T105513.843456") == datetime( - 2018, 5, 17, 10, 55, 13, 843456 - ) - - assert self.parser.parse_iso("20180517T105513Z") == datetime( - 2018, 5, 17, 10, 55, 13, tzinfo=tz.tzutc() - ) - - assert self.parser.parse_iso("20180517T105513.843456-0700") == datetime( - 2018, 5, 17, 10, 55, 13, 843456, tzinfo=tz.tzoffset(None, -25200) - ) - - assert self.parser.parse_iso("20180517T105513-0700") == datetime( - 2018, 5, 17, 10, 55, 13, tzinfo=tz.tzoffset(None, -25200) - ) - - assert self.parser.parse_iso("20180517T105513-07") == datetime( - 2018, 5, 17, 10, 55, 13, tzinfo=tz.tzoffset(None, -25200) - ) - - # ordinal in basic format: YYYYDDDD - assert self.parser.parse_iso("1998136") == datetime(1998, 5, 16) - - # timezone requires +- seperator - with pytest.raises(ParserError): - self.parser.parse_iso("20180517T1055130700") - - with pytest.raises(ParserError): - self.parser.parse_iso("20180517T10551307") - - # too many digits in date - with pytest.raises(ParserError): - self.parser.parse_iso("201860517T105513Z") - - # too many digits in time - with pytest.raises(ParserError): - self.parser.parse_iso("20180517T1055213Z") - - def test_midnight_end_day(self): - assert self.parser.parse_iso("2019-10-30T24:00:00") == datetime( - 2019, 10, 31, 0, 0, 0, 0 - ) - assert self.parser.parse_iso("2019-10-30T24:00") == datetime( - 2019, 10, 31, 0, 0, 0, 0 - ) - assert self.parser.parse_iso("2019-10-30T24:00:00.0") == datetime( - 2019, 10, 31, 0, 0, 0, 0 - ) - assert self.parser.parse_iso("2019-10-31T24:00:00") == datetime( - 2019, 11, 1, 0, 0, 0, 0 - ) - assert self.parser.parse_iso("2019-12-31T24:00:00") == datetime( - 2020, 1, 1, 0, 0, 0, 0 - ) - assert self.parser.parse_iso("2019-12-31T23:59:59.9999999") == datetime( - 2020, 1, 1, 0, 0, 0, 0 - ) - - with pytest.raises(ParserError): - self.parser.parse_iso("2019-12-31T24:01:00") - - with pytest.raises(ParserError): - self.parser.parse_iso("2019-12-31T24:00:01") - - with pytest.raises(ParserError): - self.parser.parse_iso("2019-12-31T24:00:00.1") - - with pytest.raises(ParserError): - self.parser.parse_iso("2019-12-31T24:00:00.999999") - - -@pytest.mark.usefixtures("tzinfo_parser") -class TestTzinfoParser: - def test_parse_local(self): - - assert self.parser.parse("local") == tz.tzlocal() - - def test_parse_utc(self): - - assert self.parser.parse("utc") == tz.tzutc() - assert self.parser.parse("UTC") == tz.tzutc() - - def test_parse_iso(self): - - assert self.parser.parse("01:00") == tz.tzoffset(None, 3600) - assert self.parser.parse("11:35") == tz.tzoffset(None, 11 * 3600 + 2100) - assert self.parser.parse("+01:00") == tz.tzoffset(None, 3600) - assert self.parser.parse("-01:00") == tz.tzoffset(None, -3600) - - assert self.parser.parse("0100") == tz.tzoffset(None, 3600) - assert self.parser.parse("+0100") == tz.tzoffset(None, 3600) - assert self.parser.parse("-0100") == tz.tzoffset(None, -3600) - - assert self.parser.parse("01") == tz.tzoffset(None, 3600) - assert self.parser.parse("+01") == tz.tzoffset(None, 3600) - assert self.parser.parse("-01") == tz.tzoffset(None, -3600) - - def test_parse_str(self): - - assert self.parser.parse("US/Pacific") == tz.gettz("US/Pacific") - - def test_parse_fails(self): - - with pytest.raises(parser.ParserError): - self.parser.parse("fail") - - -@pytest.mark.usefixtures("dt_parser") -class TestDateTimeParserMonthName: - def test_shortmonth_capitalized(self): - - assert self.parser.parse("2013-Jan-01", "YYYY-MMM-DD") == datetime(2013, 1, 1) - - def test_shortmonth_allupper(self): - - assert self.parser.parse("2013-JAN-01", "YYYY-MMM-DD") == datetime(2013, 1, 1) - - def test_shortmonth_alllower(self): - - assert self.parser.parse("2013-jan-01", "YYYY-MMM-DD") == datetime(2013, 1, 1) - - def test_month_capitalized(self): - - assert self.parser.parse("2013-January-01", "YYYY-MMMM-DD") == datetime( - 2013, 1, 1 - ) - - def test_month_allupper(self): - - assert self.parser.parse("2013-JANUARY-01", "YYYY-MMMM-DD") == datetime( - 2013, 1, 1 - ) - - def test_month_alllower(self): - - assert self.parser.parse("2013-january-01", "YYYY-MMMM-DD") == datetime( - 2013, 1, 1 - ) - - def test_localized_month_name(self): - parser_ = parser.DateTimeParser("fr_fr") - - assert parser_.parse("2013-Janvier-01", "YYYY-MMMM-DD") == datetime(2013, 1, 1) - - def test_localized_month_abbreviation(self): - parser_ = parser.DateTimeParser("it_it") - - assert parser_.parse("2013-Gen-01", "YYYY-MMM-DD") == datetime(2013, 1, 1) - - -@pytest.mark.usefixtures("dt_parser") -class TestDateTimeParserMeridians: - def test_meridians_lowercase(self): - assert self.parser.parse("2013-01-01 5am", "YYYY-MM-DD ha") == datetime( - 2013, 1, 1, 5 - ) - - assert self.parser.parse("2013-01-01 5pm", "YYYY-MM-DD ha") == datetime( - 2013, 1, 1, 17 - ) - - def test_meridians_capitalized(self): - assert self.parser.parse("2013-01-01 5AM", "YYYY-MM-DD hA") == datetime( - 2013, 1, 1, 5 - ) - - assert self.parser.parse("2013-01-01 5PM", "YYYY-MM-DD hA") == datetime( - 2013, 1, 1, 17 - ) - - def test_localized_meridians_lowercase(self): - parser_ = parser.DateTimeParser("hu_hu") - assert parser_.parse("2013-01-01 5 de", "YYYY-MM-DD h a") == datetime( - 2013, 1, 1, 5 - ) - - assert parser_.parse("2013-01-01 5 du", "YYYY-MM-DD h a") == datetime( - 2013, 1, 1, 17 - ) - - def test_localized_meridians_capitalized(self): - parser_ = parser.DateTimeParser("hu_hu") - assert parser_.parse("2013-01-01 5 DE", "YYYY-MM-DD h A") == datetime( - 2013, 1, 1, 5 - ) - - assert parser_.parse("2013-01-01 5 DU", "YYYY-MM-DD h A") == datetime( - 2013, 1, 1, 17 - ) - - # regression test for issue #607 - def test_es_meridians(self): - parser_ = parser.DateTimeParser("es") - - assert parser_.parse( - "Junio 30, 2019 - 08:00 pm", "MMMM DD, YYYY - hh:mm a" - ) == datetime(2019, 6, 30, 20, 0) - - with pytest.raises(ParserError): - parser_.parse( - "Junio 30, 2019 - 08:00 pasdfasdfm", "MMMM DD, YYYY - hh:mm a" - ) - - def test_fr_meridians(self): - parser_ = parser.DateTimeParser("fr") - - # the French locale always uses a 24 hour clock, so it does not support meridians - with pytest.raises(ParserError): - parser_.parse("Janvier 30, 2019 - 08:00 pm", "MMMM DD, YYYY - hh:mm a") - - -@pytest.mark.usefixtures("dt_parser") -class TestDateTimeParserMonthOrdinalDay: - def test_english(self): - parser_ = parser.DateTimeParser("en_us") - - assert parser_.parse("January 1st, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 1 - ) - assert parser_.parse("January 2nd, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 2 - ) - assert parser_.parse("January 3rd, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 3 - ) - assert parser_.parse("January 4th, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 4 - ) - assert parser_.parse("January 11th, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 11 - ) - assert parser_.parse("January 12th, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 12 - ) - assert parser_.parse("January 13th, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 13 - ) - assert parser_.parse("January 21st, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 21 - ) - assert parser_.parse("January 31st, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 31 - ) - - with pytest.raises(ParserError): - parser_.parse("January 1th, 2013", "MMMM Do, YYYY") - - with pytest.raises(ParserError): - parser_.parse("January 11st, 2013", "MMMM Do, YYYY") - - def test_italian(self): - parser_ = parser.DateTimeParser("it_it") - - assert parser_.parse("Gennaio 1º, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 1 - ) - - def test_spanish(self): - parser_ = parser.DateTimeParser("es_es") - - assert parser_.parse("Enero 1º, 2013", "MMMM Do, YYYY") == datetime(2013, 1, 1) - - def test_french(self): - parser_ = parser.DateTimeParser("fr_fr") - - assert parser_.parse("Janvier 1er, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 1 - ) - - assert parser_.parse("Janvier 2e, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 2 - ) - - assert parser_.parse("Janvier 11e, 2013", "MMMM Do, YYYY") == datetime( - 2013, 1, 11 - ) - - -@pytest.mark.usefixtures("dt_parser") -class TestDateTimeParserSearchDate: - def test_parse_search(self): - - assert self.parser.parse( - "Today is 25 of September of 2003", "DD of MMMM of YYYY" - ) == datetime(2003, 9, 25) - - def test_parse_search_with_numbers(self): - - assert self.parser.parse( - "2000 people met the 2012-01-01 12:05:10", "YYYY-MM-DD HH:mm:ss" - ) == datetime(2012, 1, 1, 12, 5, 10) - - assert self.parser.parse( - "Call 01-02-03 on 79-01-01 12:05:10", "YY-MM-DD HH:mm:ss" - ) == datetime(1979, 1, 1, 12, 5, 10) - - def test_parse_search_with_names(self): - - assert self.parser.parse("June was born in May 1980", "MMMM YYYY") == datetime( - 1980, 5, 1 - ) - - def test_parse_search_locale_with_names(self): - p = parser.DateTimeParser("sv_se") - - assert p.parse("Jan föddes den 31 Dec 1980", "DD MMM YYYY") == datetime( - 1980, 12, 31 - ) - - assert p.parse("Jag föddes den 25 Augusti 1975", "DD MMMM YYYY") == datetime( - 1975, 8, 25 - ) - - def test_parse_search_fails(self): - - with pytest.raises(parser.ParserError): - self.parser.parse("Jag föddes den 25 Augusti 1975", "DD MMMM YYYY") - - def test_escape(self): - - format = "MMMM D, YYYY [at] h:mma" - assert self.parser.parse( - "Thursday, December 10, 2015 at 5:09pm", format - ) == datetime(2015, 12, 10, 17, 9) - - format = "[MMMM] M D, YYYY [at] h:mma" - assert self.parser.parse("MMMM 12 10, 2015 at 5:09pm", format) == datetime( - 2015, 12, 10, 17, 9 - ) - - format = "[It happened on] MMMM Do [in the year] YYYY [a long time ago]" - assert self.parser.parse( - "It happened on November 25th in the year 1990 a long time ago", format - ) == datetime(1990, 11, 25) - - format = "[It happened on] MMMM Do [in the][ year] YYYY [a long time ago]" - assert self.parser.parse( - "It happened on November 25th in the year 1990 a long time ago", format - ) == datetime(1990, 11, 25) - - format = "[I'm][ entirely][ escaped,][ weee!]" - assert self.parser.parse("I'm entirely escaped, weee!", format) == datetime( - 1, 1, 1 - ) - - # Special RegEx characters - format = "MMM DD, YYYY |^${}().*+?<>-& h:mm A" - assert self.parser.parse( - "Dec 31, 2017 |^${}().*+?<>-& 2:00 AM", format - ) == datetime(2017, 12, 31, 2, 0) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_util.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_util.py deleted file mode 100644 index e48b4de066..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/test_util.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- -import time -from datetime import datetime - -import pytest - -from arrow import util - - -class TestUtil: - def test_next_weekday(self): - # Get first Monday after epoch - assert util.next_weekday(datetime(1970, 1, 1), 0) == datetime(1970, 1, 5) - - # Get first Tuesday after epoch - assert util.next_weekday(datetime(1970, 1, 1), 1) == datetime(1970, 1, 6) - - # Get first Wednesday after epoch - assert util.next_weekday(datetime(1970, 1, 1), 2) == datetime(1970, 1, 7) - - # Get first Thursday after epoch - assert util.next_weekday(datetime(1970, 1, 1), 3) == datetime(1970, 1, 1) - - # Get first Friday after epoch - assert util.next_weekday(datetime(1970, 1, 1), 4) == datetime(1970, 1, 2) - - # Get first Saturday after epoch - assert util.next_weekday(datetime(1970, 1, 1), 5) == datetime(1970, 1, 3) - - # Get first Sunday after epoch - assert util.next_weekday(datetime(1970, 1, 1), 6) == datetime(1970, 1, 4) - - # Weekdays are 0-indexed - with pytest.raises(ValueError): - util.next_weekday(datetime(1970, 1, 1), 7) - - with pytest.raises(ValueError): - util.next_weekday(datetime(1970, 1, 1), -1) - - def test_total_seconds(self): - td = datetime(2019, 1, 1) - datetime(2018, 1, 1) - assert util.total_seconds(td) == td.total_seconds() - - def test_is_timestamp(self): - timestamp_float = time.time() - timestamp_int = int(timestamp_float) - - assert util.is_timestamp(timestamp_int) - assert util.is_timestamp(timestamp_float) - assert util.is_timestamp(str(timestamp_int)) - assert util.is_timestamp(str(timestamp_float)) - - assert not util.is_timestamp(True) - assert not util.is_timestamp(False) - - class InvalidTimestamp: - pass - - assert not util.is_timestamp(InvalidTimestamp()) - - full_datetime = "2019-06-23T13:12:42" - assert not util.is_timestamp(full_datetime) - - def test_normalize_timestamp(self): - timestamp = 1591161115.194556 - millisecond_timestamp = 1591161115194 - microsecond_timestamp = 1591161115194556 - - assert util.normalize_timestamp(timestamp) == timestamp - assert util.normalize_timestamp(millisecond_timestamp) == 1591161115.194 - assert util.normalize_timestamp(microsecond_timestamp) == 1591161115.194556 - - with pytest.raises(ValueError): - util.normalize_timestamp(3e17) - - def test_iso_gregorian(self): - with pytest.raises(ValueError): - util.iso_to_gregorian(2013, 0, 5) - - with pytest.raises(ValueError): - util.iso_to_gregorian(2013, 8, 0) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/utils.py b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/utils.py deleted file mode 100644 index 2a048feb3f..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tests/utils.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -import pytz -from dateutil.zoneinfo import get_zonefile_instance - -from arrow import util - - -def make_full_tz_list(): - dateutil_zones = set(get_zonefile_instance().zones) - pytz_zones = set(pytz.all_timezones) - return dateutil_zones.union(pytz_zones) - - -def assert_datetime_equality(dt1, dt2, within=10): - assert dt1.tzinfo == dt2.tzinfo - assert abs(util.total_seconds(dt1 - dt2)) < within diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tox.ini b/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tox.ini deleted file mode 100644 index 46576b12e3..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/arrow/tox.ini +++ /dev/null @@ -1,53 +0,0 @@ -[tox] -minversion = 3.18.0 -envlist = py{py3,27,35,36,37,38,39},lint,docs -skip_missing_interpreters = true - -[gh-actions] -python = - pypy3: pypy3 - 2.7: py27 - 3.5: py35 - 3.6: py36 - 3.7: py37 - 3.8: py38 - 3.9: py39 - -[testenv] -deps = -rrequirements.txt -allowlist_externals = pytest -commands = pytest - -[testenv:lint] -basepython = python3 -skip_install = true -deps = pre-commit -commands = - pre-commit install - pre-commit run --all-files --show-diff-on-failure - -[testenv:docs] -basepython = python3 -skip_install = true -changedir = docs -deps = - doc8 - sphinx - python-dateutil -allowlist_externals = make -commands = - doc8 index.rst ../README.rst --extension .rst --ignore D001 - make html SPHINXOPTS="-W --keep-going" - -[pytest] -addopts = -v --cov-branch --cov=arrow --cov-fail-under=100 --cov-report=term-missing --cov-report=xml -testpaths = tests - -[isort] -line_length = 88 -multi_line_output = 3 -include_trailing_comma = true - -[flake8] -per-file-ignores = arrow/__init__.py:F401 -ignore = E203,E501,W503 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/.gitignore b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/.gitignore deleted file mode 100644 index be621609ab..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/.gitignore +++ /dev/null @@ -1,42 +0,0 @@ -# General -*.py[cod] - -# Packages -*.egg -*.egg-info -dist -build -.eggs/ -eggs -parts -bin -var -sdist -develop-eggs -.installed.cfg -lib -lib64 -__pycache__ - -# Installer logs -pip-log.txt - -# Unit test / coverage reports -.coverage -.tox - -# Caches -Thumbs.db - -# Development -.project -.pydevproject -.settings -.idea/ -.history/ -.vscode/ - -# Testing -.cache -test-reports/* -.pytest_cache/* \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.python b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.python deleted file mode 100644 index 9dc010d803..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.python +++ /dev/null @@ -1,254 +0,0 @@ -A. HISTORY OF THE SOFTWARE -========================== - -Python was created in the early 1990s by Guido van Rossum at Stichting -Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands -as a successor of a language called ABC. Guido remains Python's -principal author, although it includes many contributions from others. - -In 1995, Guido continued his work on Python at the Corporation for -National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) -in Reston, Virginia where he released several versions of the -software. - -In May 2000, Guido and the Python core development team moved to -BeOpen.com to form the BeOpen PythonLabs team. In October of the same -year, the PythonLabs team moved to Digital Creations, which became -Zope Corporation. In 2001, the Python Software Foundation (PSF, see -https://www.python.org/psf/) was formed, a non-profit organization -created specifically to own Python-related Intellectual Property. -Zope Corporation was a sponsoring member of the PSF. - -All Python releases are Open Source (see http://www.opensource.org for -the Open Source Definition). Historically, most, but not all, Python -releases have also been GPL-compatible; the table below summarizes -the various releases. - - Release Derived Year Owner GPL- - from compatible? (1) - - 0.9.0 thru 1.2 1991-1995 CWI yes - 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes - 1.6 1.5.2 2000 CNRI no - 2.0 1.6 2000 BeOpen.com no - 1.6.1 1.6 2001 CNRI yes (2) - 2.1 2.0+1.6.1 2001 PSF no - 2.0.1 2.0+1.6.1 2001 PSF yes - 2.1.1 2.1+2.0.1 2001 PSF yes - 2.1.2 2.1.1 2002 PSF yes - 2.1.3 2.1.2 2002 PSF yes - 2.2 and above 2.1.1 2001-now PSF yes - -Footnotes: - -(1) GPL-compatible doesn't mean that we're distributing Python under - the GPL. All Python licenses, unlike the GPL, let you distribute - a modified version without making your changes open source. The - GPL-compatible licenses make it possible to combine Python with - other software that is released under the GPL; the others don't. - -(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, - because its license has a choice of law clause. According to - CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 - is "not incompatible" with the GPL. - -Thanks to the many outside volunteers who have worked under Guido's -direction to make these releases possible. - - -B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON -=============================================================== - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019 Python Software Foundation; -All Rights Reserved" are retained in Python alone or in any derivative version -prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 -------------------------------------------- - -BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 - -1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an -office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the -Individual or Organization ("Licensee") accessing and otherwise using -this software in source or binary form and its associated -documentation ("the Software"). - -2. Subject to the terms and conditions of this BeOpen Python License -Agreement, BeOpen hereby grants Licensee a non-exclusive, -royalty-free, world-wide license to reproduce, analyze, test, perform -and/or display publicly, prepare derivative works, distribute, and -otherwise use the Software alone or in any derivative version, -provided, however, that the BeOpen Python License is retained in the -Software, alone or in any derivative version prepared by Licensee. - -3. BeOpen is making the Software available to Licensee on an "AS IS" -basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE -SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS -AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY -DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -5. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -6. This License Agreement shall be governed by and interpreted in all -respects by the law of the State of California, excluding conflict of -law provisions. Nothing in this License Agreement shall be deemed to -create any relationship of agency, partnership, or joint venture -between BeOpen and Licensee. This License Agreement does not grant -permission to use BeOpen trademarks or trade names in a trademark -sense to endorse or promote products or services of Licensee, or any -third party. As an exception, the "BeOpen Python" logos available at -http://www.pythonlabs.com/logos.html may be used according to the -permissions granted on that web page. - -7. By copying, installing or otherwise using the software, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 ---------------------------------------- - -1. This LICENSE AGREEMENT is between the Corporation for National -Research Initiatives, having an office at 1895 Preston White Drive, -Reston, VA 20191 ("CNRI"), and the Individual or Organization -("Licensee") accessing and otherwise using Python 1.6.1 software in -source or binary form and its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, CNRI -hereby grants Licensee a nonexclusive, royalty-free, world-wide -license to reproduce, analyze, test, perform and/or display publicly, -prepare derivative works, distribute, and otherwise use Python 1.6.1 -alone or in any derivative version, provided, however, that CNRI's -License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) -1995-2001 Corporation for National Research Initiatives; All Rights -Reserved" are retained in Python 1.6.1 alone or in any derivative -version prepared by Licensee. Alternately, in lieu of CNRI's License -Agreement, Licensee may substitute the following text (omitting the -quotes): "Python 1.6.1 is made available subject to the terms and -conditions in CNRI's License Agreement. This Agreement together with -Python 1.6.1 may be located on the Internet using the following -unique, persistent identifier (known as a handle): 1895.22/1013. This -Agreement may also be obtained from a proxy server on the Internet -using the following URL: http://hdl.handle.net/1895.22/1013". - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python 1.6.1 or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python 1.6.1. - -4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" -basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. This License Agreement shall be governed by the federal -intellectual property law of the United States, including without -limitation the federal copyright law, and, to the extent such -U.S. federal law does not apply, by the law of the Commonwealth of -Virginia, excluding Virginia's conflict of law provisions. -Notwithstanding the foregoing, with regard to derivative works based -on Python 1.6.1 that incorporate non-separable material that was -previously distributed under the GNU General Public License (GPL), the -law of the Commonwealth of Virginia shall govern this License -Agreement only as to issues arising under or with respect to -Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this -License Agreement shall be deemed to create any relationship of -agency, partnership, or joint venture between CNRI and Licensee. This -License Agreement does not grant permission to use CNRI trademarks or -trade name in a trademark sense to endorse or promote products or -services of Licensee, or any third party. - -8. By clicking on the "ACCEPT" button where indicated, or by copying, -installing or otherwise using Python 1.6.1, Licensee agrees to be -bound by the terms and conditions of this License Agreement. - - ACCEPT - - -CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 --------------------------------------------------- - -Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, -The Netherlands. All rights reserved. - -Permission to use, copy, modify, and distribute this software and its -documentation for any purpose and without fee is hereby granted, -provided that the above copyright notice appear in all copies and that -both that copyright notice and this permission notice appear in -supporting documentation, and that the name of Stichting Mathematisch -Centrum or CWI not be used in advertising or publicity pertaining to -distribution of the software without specific, written prior -permission. - -STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO -THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE -FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.txt b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.txt deleted file mode 100644 index d9a10c0d8e..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/LICENSE.txt +++ /dev/null @@ -1,176 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/MANIFEST.in b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/MANIFEST.in deleted file mode 100644 index 3216ee548c..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/MANIFEST.in +++ /dev/null @@ -1,4 +0,0 @@ -include LICENSE.txt -include README.rst -recursive-include resource *.py -recursive-include doc *.rst *.conf *.py *.png *.css diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/README.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/README.rst deleted file mode 100644 index 074a35f97c..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/README.rst +++ /dev/null @@ -1,34 +0,0 @@ -################# -ftrack Python API -################# - -Python API for ftrack. - -.. important:: - - This is the new Python client for the ftrack API. If you are migrating from - the old client then please read the dedicated `migration guide `_. - -************* -Documentation -************* - -Full documentation, including installation and setup guides, can be found at -http://ftrack-python-api.rtd.ftrack.com/en/stable/ - -********************* -Copyright and license -********************* - -Copyright (c) 2014 ftrack - -Licensed under the Apache License, Version 2.0 (the "License"); you may not use -this work except in compliance with the License. You may obtain a copy of the -License in the LICENSE.txt file, or at: - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software distributed -under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml deleted file mode 100644 index 355f00f752..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml +++ /dev/null @@ -1,24 +0,0 @@ -# Test configuration for bitbucket pipelines. -options: - max-time: 20 -definitions: - services: - ftrack: - image: - name: ftrackdocker/test-server:latest - username: $DOCKER_HUB_USERNAME - password: $DOCKER_HUB_PASSWORD - email: $DOCKER_HUB_EMAIL -pipelines: - default: - - parallel: - - step: - name: run tests against python 2.7.x - image: python:2.7 - caches: - - pip - services: - - ftrack - script: - - bash -c 'while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' $FTRACK_SERVER)" != "200" ]]; do sleep 1; done' - - python setup.py test \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css deleted file mode 100644 index 3456b0c3c5..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css +++ /dev/null @@ -1,16 +0,0 @@ -@import "css/theme.css"; - -.domain-summary li { - float: left; - min-width: 12em; -} - -.domain-summary ul:before, ul:after { - content: ''; - clear: both; - display:block; -} - -.rst-content table.docutils td:last-child { - white-space: normal; -} diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst deleted file mode 100644 index 4e165b0122..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -************************ -ftrack_api.accessor.base -************************ - -.. automodule:: ftrack_api.accessor.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst deleted file mode 100644 index f7d9dddf37..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -************************ -ftrack_api.accessor.disk -************************ - -.. automodule:: ftrack_api.accessor.disk diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst deleted file mode 100644 index 0adc23fe2d..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************* -ftrack_api.accessor -******************* - -.. automodule:: ftrack_api.accessor - -.. toctree:: - :maxdepth: 1 - :glob: - - * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst deleted file mode 100644 index 62bd7f4165..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -************************ -ftrack_api.accessor.server -************************ - -.. automodule:: ftrack_api.accessor.server diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst deleted file mode 100644 index 9fd8994eb1..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************** -ftrack_api.attribute -******************** - -.. automodule:: ftrack_api.attribute diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst deleted file mode 100644 index cbf9128a5a..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -**************** -ftrack_api.cache -**************** - -.. automodule:: ftrack_api.cache diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst deleted file mode 100644 index 607d574cb5..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -********************* -ftrack_api.collection -********************* - -.. automodule:: ftrack_api.collection diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst deleted file mode 100644 index 0bc4ce35f1..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -******************************* -ftrack_api.entity.asset_version -******************************* - -.. automodule:: ftrack_api.entity.asset_version diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst deleted file mode 100644 index f4beedc9a4..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -********************** -ftrack_api.entity.base -********************** - -.. automodule:: ftrack_api.entity.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst deleted file mode 100644 index c9ce0a0cf1..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -*************************** -ftrack_api.entity.component -*************************** - -.. automodule:: ftrack_api.entity.component diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst deleted file mode 100644 index 483c16641b..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -************************* -ftrack_api.entity.factory -************************* - -.. automodule:: ftrack_api.entity.factory diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst deleted file mode 100644 index fce68c0e94..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -***************** -ftrack_api.entity -***************** - -.. automodule:: ftrack_api.entity - -.. toctree:: - :maxdepth: 1 - :glob: - - * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst deleted file mode 100644 index 9d22a7c378..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -********************* -ftrack_api.entity.job -********************* - -.. automodule:: ftrack_api.entity.job diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst deleted file mode 100644 index 60e006a10c..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -************************** -ftrack_api.entity.location -************************** - -.. automodule:: ftrack_api.entity.location diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst deleted file mode 100644 index 3588e48e5b..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -********************** -ftrack_api.entity.note -********************** - -.. automodule:: ftrack_api.entity.note diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst deleted file mode 100644 index 5777ab0b40..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -******************************** -ftrack_api.entity.project_schema -******************************** - -.. automodule:: ftrack_api.entity.project_schema diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst deleted file mode 100644 index 0014498b9c..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -********************** -ftrack_api.entity.user -********************** - -.. automodule:: ftrack_api.entity.user diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst deleted file mode 100644 index 2b0ca8d3ed..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -********************* -ftrack_api.event.base -********************* - -.. automodule:: ftrack_api.event.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst deleted file mode 100644 index f582717060..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -*************************** -ftrack_api.event.expression -*************************** - -.. automodule:: ftrack_api.event.expression diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst deleted file mode 100644 index 36d7a33163..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************** -ftrack_api.event.hub -******************** - -.. automodule:: ftrack_api.event.hub diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst deleted file mode 100644 index 0986e8e2f4..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -**************** -ftrack_api.event -**************** - -.. automodule:: ftrack_api.event - -.. toctree:: - :maxdepth: 1 - :glob: - - * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst deleted file mode 100644 index 974f375817..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -*************************** -ftrack_api.event.subscriber -*************************** - -.. automodule:: ftrack_api.event.subscriber diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst deleted file mode 100644 index 94a20e3611..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -***************************** -ftrack_api.event.subscription -***************************** - -.. automodule:: ftrack_api.event.subscription diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst deleted file mode 100644 index 64c3a699d7..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************** -ftrack_api.exception -******************** - -.. automodule:: ftrack_api.exception diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst deleted file mode 100644 index 9b8154bdc3..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************** -ftrack_api.formatter -******************** - -.. automodule:: ftrack_api.formatter diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst deleted file mode 100644 index ea3517ca68..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _api_reference: - -************* -API Reference -************* - -ftrack_api -========== - -.. automodule:: ftrack_api - -.. toctree:: - :maxdepth: 1 - :glob: - - */index - * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst deleted file mode 100644 index 8223ee72f2..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -********************* -ftrack_api.inspection -********************* - -.. automodule:: ftrack_api.inspection diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst deleted file mode 100644 index ecb883d385..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -****************** -ftrack_api.logging -****************** - -.. automodule:: ftrack_api.logging diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst deleted file mode 100644 index b2dff9933d..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -******************** -ftrack_api.operation -******************** - -.. automodule:: ftrack_api.operation diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst deleted file mode 100644 index a4993d94cf..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -***************** -ftrack_api.plugin -***************** - -.. automodule:: ftrack_api.plugin diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst deleted file mode 100644 index acbd8d237a..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -**************** -ftrack_api.query -**************** - -.. automodule:: ftrack_api.query diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst deleted file mode 100644 index 09cdad8627..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _api_reference/resource_identifier_transformer.base: - -*********************************************** -ftrack_api.resource_identifier_transformer.base -*********************************************** - -.. automodule:: ftrack_api.resource_identifier_transformer.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst deleted file mode 100644 index 755f052c9d..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _api_reference/resource_identifier_transformer: - -****************************************** -ftrack_api.resource_identifier_transformer -****************************************** - -.. automodule:: ftrack_api.resource_identifier_transformer - -.. toctree:: - :maxdepth: 1 - :glob: - - * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst deleted file mode 100644 index dcce173d1f..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -****************** -ftrack_api.session -****************** - -.. automodule:: ftrack_api.session diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst deleted file mode 100644 index 55a1cc75d2..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -************************* -ftrack_api.structure.base -************************* - -.. automodule:: ftrack_api.structure.base diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst deleted file mode 100644 index ade2c7ae88..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -*********************** -ftrack_api.structure.id -*********************** - -.. automodule:: ftrack_api.structure.id diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst deleted file mode 100644 index cbd4545cf7..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************** -ftrack_api.structure -******************** - -.. automodule:: ftrack_api.structure - -.. toctree:: - :maxdepth: 1 - :glob: - - * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst deleted file mode 100644 index 403173e257..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -*************************** -ftrack_api.structure.origin -*************************** - -.. automodule:: ftrack_api.structure.origin diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst deleted file mode 100644 index 5c0d88026b..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -***************************** -ftrack_api.structure.standard -***************************** - -.. automodule:: ftrack_api.structure.standard diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst deleted file mode 100644 index 55dc0125a8..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -***************** -ftrack_api.symbol -***************** - -.. automodule:: ftrack_api.symbol diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/caching.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/caching.rst deleted file mode 100644 index bfc5cef401..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/caching.rst +++ /dev/null @@ -1,175 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - - -.. _caching: - -******* -Caching -******* - -The API makes use of caching in order to provide more efficient retrieval of -data by reducing the number of calls to the remote server:: - - # First call to retrieve user performs a request to the server. - user = session.get('User', 'some-user-id') - - # A later call in the same session to retrieve the same user just gets - # the existing instance from the cache without a request to the server. - user = session.get('User', 'some-user-id') - -It also seamlessly merges related data together regardless of how it was -retrieved:: - - >>> timelog = user['timelogs'][0] - >>> with session.auto_populating(False): - >>> print timelog['comment'] - NOT_SET - >>> session.query( - ... 'select comment from Timelog where id is "{0}"' - ... .format(timelog['id']) - ... ).all() - >>> with session.auto_populating(False): - >>> print timelog['comment'] - 'Some comment' - -By default, each :class:`~ftrack_api.session.Session` is configured with a -simple :class:`~ftrack_api.cache.MemoryCache()` and the cache is lost as soon as -the session expires. - -Configuring a session cache -=========================== - -It is possible to configure the cache that a session uses. An example would be a -persistent auto-populated cache that survives between sessions:: - - import os - import ftrack_api.cache - - # Specify where the file based cache should be stored. - cache_path = os.path.join(tempfile.gettempdir(), 'ftrack_session_cache.dbm') - - - # Define a cache maker that returns a file based cache. Note that a - # function is used because the file based cache should use the session's - # encode and decode methods to serialise the entity data to a format that - # can be written to disk (JSON). - def cache_maker(session): - '''Return cache to use for *session*.''' - return ftrack_api.cache.SerialisedCache( - ftrack_api.cache.FileCache(cache_path), - encode=session.encode, - decode=session.decode - ) - - # Create the session using the cache maker. - session = ftrack_api.Session(cache=cache_maker) - -.. note:: - - There can be a performance penalty when using a more complex cache setup. - For example, serialising data and also writing and reading from disk can be - relatively slow operations. - -Regardless of the cache specified, the session will always construct a -:class:`~ftrack_api.cache.LayeredCache` with a -:class:`~ftrack_api.cache.MemoryCache` at the top level and then your cache at -the second level. This is to ensure consistency of instances returned by the -session. - -You can check (or even modify) at any time what cache configuration a session is -using by accessing the `cache` attribute on a -:class:`~ftrack_api.session.Session`:: - - >>> print session.cache - - -Writing a new cache interface -============================= - -If you have a custom cache backend you should be able to integrate it into the -system by writing a cache interface that matches the one defined by -:class:`ftrack_api.cache.Cache`. This typically involves a subclass and -overriding the :meth:`~ftrack_api.cache.Cache.get`, -:meth:`~ftrack_api.cache.Cache.set` and :meth:`~ftrack_api.cache.Cache.remove` -methods. - - -Managing what gets cached -========================= - -The cache system is quite flexible when it comes to controlling what should be -cached. - -Consider you have a layered cache where the bottom layer cache should be -persisted between sessions. In this setup you probably don't want the persisted -cache to hold non-persisted values, such as modified entity values or newly -created entities not yet committed to the server. However, you might want the -top level memory cache to hold onto these values. - -Here is one way to set this up. First define a new proxy cache that is selective -about what it sets:: - - import ftrack_api.inspection - - - class SelectiveCache(ftrack_api.cache.ProxyCache): - '''Proxy cache that won't cache newly created entities.''' - - def set(self, key, value): - '''Set *value* for *key*.''' - if isinstance(value, ftrack_api.entity.base.Entity): - if ( - ftrack_api.inspection.state(value) - is ftrack_api.symbol.CREATED - ): - return - - super(SelectiveCache, self).set(key, value) - -Now use this custom cache to wrap the serialised cache in the setup above: - -.. code-block:: python - :emphasize-lines: 3, 9 - - def cache_maker(session): - '''Return cache to use for *session*.''' - return SelectiveCache( - ftrack_api.cache.SerialisedCache( - ftrack_api.cache.FileCache(cache_path), - encode=session.encode, - decode=session.decode - ) - ) - -Now to prevent modified attributes also being persisted, tweak the encode -settings for the file cache: - -.. code-block:: python - :emphasize-lines: 1, 9-12 - - import functools - - - def cache_maker(session): - '''Return cache to use for *session*.''' - return SelectiveCache( - ftrack_api.cache.SerialisedCache( - ftrack_api.cache.FileCache(cache_path), - encode=functools.partial( - session.encode, - entity_attribute_strategy='persisted_only' - ), - decode=session.decode - ) - ) - -And use the updated cache maker for your session:: - - session = ftrack_api.Session(cache=cache_maker) - -.. note:: - - For some type of attributes that are computed, long term caching is not - recommended and such values will not be encoded with the `persisted_only` - strategy. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/conf.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/conf.py deleted file mode 100644 index 1154472155..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/conf.py +++ /dev/null @@ -1,102 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -'''ftrack Python API documentation build configuration file.''' - -import os -import re - -# -- General ------------------------------------------------------------------ - -# Extensions. -extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.extlinks', - 'sphinx.ext.intersphinx', - 'sphinx.ext.todo', - 'sphinx.ext.viewcode', - 'lowdown' -] - - -# The suffix of source filenames. -source_suffix = '.rst' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'ftrack Python API' -copyright = u'2014, ftrack' - -# Version -with open( - os.path.join( - os.path.dirname(__file__), '..', 'source', - 'ftrack_api', '_version.py' - ) -) as _version_file: - _version = re.match( - r'.*__version__ = \'(.*?)\'', _version_file.read(), re.DOTALL - ).group(1) - -version = _version -release = _version - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_template'] - -# A list of prefixes to ignore for module listings. -modindex_common_prefix = [ - 'ftrack_api.' -] - -# -- HTML output -------------------------------------------------------------- - -if not os.environ.get('READTHEDOCS', None) == 'True': - # Only import and set the theme if building locally. - import sphinx_rtd_theme - html_theme = 'sphinx_rtd_theme' - html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] - -html_static_path = ['_static'] -html_style = 'ftrack.css' - -# If True, copy source rst files to output for reference. -html_copy_source = True - - -# -- Autodoc ------------------------------------------------------------------ - -autodoc_default_flags = ['members', 'undoc-members', 'inherited-members'] -autodoc_member_order = 'bysource' - - -def autodoc_skip(app, what, name, obj, skip, options): - '''Don't skip __init__ method for autodoc.''' - if name == '__init__': - return False - - return skip - - -# -- Intersphinx -------------------------------------------------------------- - -intersphinx_mapping = { - 'python': ('http://docs.python.org/', None), - 'ftrack': ( - 'http://rtd.ftrack.com/docs/ftrack/en/stable/', None - ) -} - - -# -- Todos --------------------------------------------------------------------- - -todo_include_todos = os.environ.get('FTRACK_DOC_INCLUDE_TODOS', False) == 'True' - - -# -- Setup -------------------------------------------------------------------- - -def setup(app): - app.connect('autodoc-skip-member', autodoc_skip) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf deleted file mode 100644 index 3c927cc1ee..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf +++ /dev/null @@ -1,2 +0,0 @@ -[html4css1 writer] -field-name-limit:0 \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst deleted file mode 100644 index 99019ee44f..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst +++ /dev/null @@ -1,56 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _environment_variables: - -********************* -Environment variables -********************* - -The following is a consolidated list of environment variables that this API -can reference: - -.. envvar:: FTRACK_SERVER - - The full url of the ftrack server to connect to. For example - "https://mycompany.ftrackapp.com" - -.. envvar:: FTRACK_API_USER - - The username of the ftrack user to act on behalf of when performing actions - in the system. - - .. note:: - - When this environment variable is not set, the API will typically also - check other standard operating system variables that hold the username - of the current logged in user. To do this it uses - :func:`getpass.getuser`. - -.. envvar:: FTRACK_API_KEY - - The API key to use when performing actions in the system. The API key is - used to determine the permissions that a script has in the system. - -.. envvar:: FTRACK_APIKEY - - For backwards compatibility. See :envvar:`FTRACK_API_KEY`. - -.. envvar:: FTRACK_EVENT_PLUGIN_PATH - - Paths to search recursively for plugins to load and use in a session. - Multiple paths can be specified by separating with the value of - :attr:`os.pathsep` (e.g. ':' or ';'). - -.. envvar:: FTRACK_API_SCHEMA_CACHE_PATH - - Path to a directory that will be used for storing and retrieving a cache of - the entity schemas fetched from the server. - -.. envvar:: http_proxy / https_proxy - - If you need to use a proxy to connect to ftrack you can use the - "standard" :envvar:`http_proxy` and :envvar:`https_proxy`. Please note that they - are lowercase. - - For example "export https_proxy=http://proxy.mycompany.com:8080" \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst deleted file mode 100644 index 0c44a1b68c..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst +++ /dev/null @@ -1,137 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _event_list: - -********** -Event list -********** - -The following is a consolidated list of events published directly by this API. - -For some events, a template plugin file is also listed for download -(:guilabel:`Download template plugin`) to help get you started with writing your -own plugin for a particular event. - -.. seealso:: - - * :ref:`handling_events` - * :ref:`ftrack server event list ` - -.. _event_list/ftrack.api.session.construct-entity-type: - -ftrack.api.session.construct-entity-type -======================================== - -:download:`Download template plugin -` - -:ref:`Synchronous `. Published by -the session to retrieve constructed class for specified schema:: - - Event( - topic='ftrack.api.session.construct-entity-type', - data=dict( - schema=schema, - schemas=schemas - ) - ) - -Expects returned data to be:: - - A Python class. - -.. seealso:: :ref:`working_with_entities/entity_types`. - -.. _event_list/ftrack.api.session.configure-location: - -ftrack.api.session.configure-location -===================================== - -:download:`Download template plugin -` - -:ref:`Synchronous `. Published by -the session to allow configuring of location instances:: - - Event( - topic='ftrack.api.session.configure-location', - data=dict( - session=self - ) - ) - -.. seealso:: :ref:`Configuring locations `. - -.. _event_list/ftrack.location.component-added: - -ftrack.location.component-added -=============================== - -Published whenever a component is added to a location:: - - Event( - topic='ftrack.location.component-added', - data=dict( - component_id='e2dc0524-b576-11d3-9612-080027331d74', - location_id='07b82a97-8cf9-11e3-9383-20c9d081909b' - ) - ) - -.. _event_list/ftrack.location.component-removed: - -ftrack.location.component-removed -================================= - -Published whenever a component is removed from a location:: - - Event( - topic='ftrack.location.component-removed', - data=dict( - component_id='e2dc0524-b576-11d3-9612-080027331d74', - location_id='07b82a97-8cf9-11e3-9383-20c9d081909b' - ) - ) - -.. _event_list/ftrack.api.session.ready: - -ftrack.api.session.ready -======================== - -:ref:`Synchronous `. Published after -a :class:`~ftrack_api.session.Session` has been initialized and -is ready to be used:: - - Event( - topic='ftrack.api.session.ready', - data=dict( - session=, - ) - ) - -.. warning:: - - Since the event is synchronous and blocking, avoid doing any unnecessary - work as it will slow down session initialization. - -.. seealso:: - - Also see example usage in :download:`example_plugin_using_session.py - `. - - -.. _event_list/ftrack.api.session.reset: - -ftrack.api.session.reset -======================== - -:ref:`Synchronous `. Published after -a :class:`~ftrack_api.session.Session` has been reset and is ready to be used -again:: - - Event( - topic='ftrack.api.session.reset', - data=dict( - session=, - ) - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst deleted file mode 100644 index 985eb9bb44..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst +++ /dev/null @@ -1,82 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/assignments_and_allocations: - -**************************************** -Working with assignments and allocations -**************************************** - -.. currentmodule:: ftrack_api.session - -The API exposes `assignments` and `allocations` relationships on objects in -the project hierarchy. You can use these to retrieve the allocated or assigned -resources, which can be either groups or users. - -Allocations can be used to allocate users or groups to a project team, while -assignments are more explicit and is used to assign users to tasks. Both -assignment and allocations are modelled as `Appointment` objects, with a -`type` attribute indicating the type of the appoinment. - -The following example retrieves all users part of the project team:: - - # Retrieve a project - project = session.query('Project').first() - - # Set to hold all users part of the project team - project_team = set() - - # Add all allocated groups and users - for allocation in project['allocations']: - - # Resource may be either a group or a user - resource = allocation['resource'] - - # If the resource is a group, add its members - if isinstance(resource, session.types['Group']): - for membership in resource['memberships']: - user = membership['user'] - project_team.add(user) - - # The resource is a user, add it. - else: - user = resource - project_team.add(user) - -The next example shows how to assign the current user to a task:: - - # Retrieve a task and the current user - task = session.query('Task').first() - current_user = session.query( - u'User where username is {0}'.format(session.api_user) - ).one() - - # Create a new Appointment of type assignment. - session.create('Appointment', { - 'context': task, - 'resource': current_user, - 'type': 'assignment' - }) - - # Finally, persist the new assignment - session.commit() - -To list all users assigned to a task, see the following example:: - - task = session.query('Task').first() - users = session.query( - 'select first_name, last_name from User ' - 'where assignments any (context_id = "{0}")'.format(task['id']) - ) - for user in users: - print user['first_name'], user['last_name'] - -To list the current user's assigned tasks, see the example below:: - - assigned_tasks = session.query( - 'select link from Task ' - 'where assignments any (resource.username = "{0}")'.format(session.api_user) - ) - for task in assigned_tasks: - print u' / '.join(item['name'] for item in task['link']) - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst deleted file mode 100644 index 6a39bb20d1..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst +++ /dev/null @@ -1,23 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _example/component: - -*********************** -Working with components -*********************** - -.. currentmodule:: ftrack_api.session - -Components can be created manually or using the provide helper methods on a -:meth:`session ` or existing -:meth:`asset version -`:: - - component = version.create_component('/path/to/file_or_sequence.jpg') - session.commit() - -When a component is created using the helpers it is automatically added to a -location. - -.. seealso:: :ref:`Locations tutorial ` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst deleted file mode 100644 index 033942b442..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst +++ /dev/null @@ -1,94 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/custom_attribute: - -*********************** -Using custom attributes -*********************** - -.. currentmodule:: ftrack_api.session - -Custom attributes can be written and read from entities using the -``custom_attributes`` property. - -The ``custom_attributes`` property provides a similar interface to a dictionary. - -Keys can be printed using the keys method:: - - >>> task['custom_attributes'].keys() - [u'my_text_field'] - -or access keys and values as items:: - - >>> print task['custom_attributes'].items() - [(u'my_text_field', u'some text')] - -Read existing custom attribute values:: - - >>> print task['custom_attributes']['my_text_field'] - 'some text' - -Updating a custom attributes can also be done similar to a dictionary:: - - task['custom_attributes']['my_text_field'] = 'foo' - -To query for tasks with a custom attribute, ``my_text_field``, you can use the -key from the configuration:: - - for task in session.query( - 'Task where custom_attributes any ' - '(key is "my_text_field" and value is "bar")' - ): - print task['name'] - -Limitations -=========== - -Expression attributes ---------------------- - -Expression attributes are not yet supported and the reported value will -always be the non-evaluated expression. - -Hierarchical attributes ------------------------ - -Hierarchical attributes are not yet fully supported in the API. Hierarchical -attributes support both read and write, but when read they are not calculated -and instead the `raw` value is returned:: - - # The hierarchical attribute `my_attribute` is set on Shot but this will not - # be reflected on the children. Instead the raw value is returned. - print shot['custom_attributes']['my_attribute'] - 'foo' - print task['custom_attributes']['my_attribute'] - None - -To work around this limitation it is possible to use the legacy api for -hierarchical attributes or to manually query the parents for values and use the -first value that is set. - -Validation -========== - -Custom attributes are validated on the ftrack server before persisted. The -validation will check that the type of the data is correct for the custom -attribute. - - * number - :py:class:`int` or :py:class:`float` - * text - :py:class:`str` or :py:class:`unicode` - * enumerator - :py:class:`list` - * boolean - :py:class:`bool` - * date - :py:class:`datetime.datetime` or :py:class:`datetime.date` - -If the value set is not valid a :py:exc:`ftrack_api.exception.ServerError` is -raised with debug information:: - - shot['custom_attributes']['fstart'] = 'test' - - Traceback (most recent call last): - ... - ftrack_api.exception.ServerError: Server reported error: - ValidationError(Custom attribute value for "fstart" must be of type number. - Got "test" of type ) \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst deleted file mode 100644 index 2be01ffe47..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst +++ /dev/null @@ -1,53 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -.. currentmodule:: ftrack_api.session - -.. _example/encode_media: - -************** -Encoding media -************** - -Media such as images and video can be encoded by the ftrack server to allow -playing it in the ftrack web interface. Media can be encoded using -:meth:`ftrack_api.session.Session.encode_media` which accepts a path to a file -or an existing component in the ftrack.server location. - -Here is an example of how to encode a video and read the output:: - - job = session.encode_media('/PATH/TO/MEDIA') - job_data = json.loads(job['data']) - - print 'Source component id', job_data['source_component_id'] - print 'Keeping original component', job_data['keep_original'] - for output in job_data['output']: - print u'Output component - id: {0}, format: {1}'.format( - output['component_id'], output['format'] - ) - -You can also call the corresponding helper method on an :meth:`asset version -`, to have the -encoded components automatically associated with the version:: - - job = asset_version.encode_media('/PATH/TO/MEDIA') - -It is also possible to get the URL to an encoded component once the job has -finished:: - - job = session.encode_media('/PATH/TO/MEDIA') - - # Wait for job to finish. - - location = session.query('Location where name is "ftrack.server"').one() - for component in job['job_components']: - print location.get_url(component) - -Media can also be an existing component in another location. Before encoding it, -the component needs to be added to the ftrack.server location:: - - location = session.query('Location where name is "ftrack.server"').one() - location.add_component(component) - session.commit() - - job = session.encode_media(component) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst deleted file mode 100644 index 43e31484f4..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst +++ /dev/null @@ -1,56 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -.. _example/entity_links: - -****************** -Using entity links -****************** - -A link can be used to represent a dependency or another relation between -two entities in ftrack. - -There are two types of entities that can be linked: - -* Versions can be linked to other asset versions, where the link entity type - is `AssetVersionLink`. -* Objects like Task, Shot or Folder, where the link entity type is - `TypedContextLink`. - -Both `AssetVersion` and `TypedContext` objects have the same relations -`incoming_links` and `outgoing_links`. To list the incoming links to a Shot we -can use the relationship `incoming_links`:: - - for link in shot['incoming_links']: - print link['from'], link['to'] - -In the above example `link['to']` is the shot and `link['from']` could be an -asset build or something else that is linked to the shot. There is an equivalent -`outgoing_links` that can be used to access outgoing links on an object. - -To create a new link between objects or asset versions create a new -`TypedContextLink` or `AssetVersionLink` entity with the from and to properties -set. In this example we will link two asset versions:: - - session.create('AssetVersionLink', { - 'from': from_asset_version, - 'to': to_asset_version - }) - session.commit() - -Using asset version link shortcut -================================= - -Links on asset version can also be created by the use of the `uses_versions` and -`used_in_versions` relations:: - - rig_version['uses_versions'].append(model_version) - session.commit() - -This has the same result as creating the `AssetVersionLink` entity as in the -previous section. - -Which versions are using the model can be listed with:: - - for version in model_version['used_in_versions']: - print '{0} is using {1}'.format(version, model_version) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst deleted file mode 100644 index 4fca37d754..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst +++ /dev/null @@ -1,52 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. currentmodule:: ftrack_api.session - -.. _example: - -************** -Usage examples -************** - -The following examples show how to use the API to accomplish specific tasks -using the default configuration. - -.. note:: - - If you are using a server with a customised configuration you may need to - alter the examples slightly to make them work correctly. - -Most of the examples assume you have the *ftrack_api* package imported and have -already constructed a :class:`Session`:: - - import ftrack_api - - session = ftrack_api.Session() - - -.. toctree:: - - project - component - review_session - metadata - custom_attribute - manage_custom_attribute_configuration - link_attribute - scope - job - note - list - timer - assignments_and_allocations - thumbnail - encode_media - entity_links - web_review - publishing - security_roles - task_template - sync_ldap_users - invite_user - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst deleted file mode 100644 index 342f0ef602..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst +++ /dev/null @@ -1,31 +0,0 @@ -.. - :copyright: Copyright (c) 2017 ftrack - -.. _example/invite_user: - -********************* -Invite user -********************* - -Here we create a new user and send them a invitation through mail - - -Create a new user:: - - user_email = 'artist@mail.vfx-company.com' - - new_user = session.create( - 'User', { - 'username':user_email, - 'email':user_email, - 'is_active':True - } - ) - - session.commit() - - -Invite our new user:: - - new_user.send_invite() - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst deleted file mode 100644 index 296a0f5e17..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst +++ /dev/null @@ -1,97 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _example/job: - -************* -Managing jobs -************* - -.. currentmodule:: ftrack_api.session - -Jobs can be used to display feedback to users in the ftrack web interface when -performing long running tasks in the API. - -To create a job use :meth:`Session.create`:: - - user = # Get a user from ftrack. - - job = session.create('Job', { - 'user': user, - 'status': 'running' - }) - -The created job will appear as running in the :guilabel:`jobs` menu for the -specified user. To set a description on the job, add a dictionary containing -description as the `data` key: - -.. note:: - - In the current version of the API the dictionary needs to be JSON - serialised. - -.. code-block:: python - - import json - - job = session.create('Job', { - 'user': user, - 'status': 'running', - 'data': json.dumps({ - 'description': 'My custom job description.' - }) - }) - -When the long running task has finished simply set the job as completed and -continue with the next task. - -.. code-block:: python - - job['status'] = 'done' - session.commit() - -Attachments -=========== - -Job attachments are files that are attached to a job. In the ftrack web -interface these attachments can be downloaded by clicking on a job in the `Jobs` -menu. - -To get a job's attachments through the API you can use the `job_components` -relation and then use the ftrack server location to get the download URL:: - - server_location = session.query( - 'Location where name is "ftrack.server"' - ).one() - - for job_component in job['job_components']: - print 'Download URL: {0}'.format( - server_location.get_url(job_component['component']) - ) - -To add an attachment to a job you have to add it to the ftrack server location -and create a `jobComponent`:: - - server_location = session.query( - 'Location where name is "ftrack.server"' - ).one() - - # Create component and name it "My file". - component = session.create_component( - '/path/to/file', - data={'name': 'My file'}, - location=server_location - ) - - # Attach the component to the job. - session.create( - 'JobComponent', - {'component_id': component['id'], 'job_id': job['id']} - ) - - session.commit() - -.. note:: - - The ftrack web interface does only support downloading one attachment so - attaching more than one will have limited support in the web interface. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst deleted file mode 100644 index 1dcea842cd..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst +++ /dev/null @@ -1,55 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/link_attribute: - -********************* -Using link attributes -********************* - -The `link` attribute can be used to retreive the ids and names of the parents of -an object. It is particularly useful in cases where the path of an object must -be presented in a UI, but can also be used to speedup certain query patterns. - -You can use the `link` attribute on any entity inheriting from a -`Context` or `AssetVersion`. Here we use it on the `Task` entity:: - - task = session.query( - 'select link from Task where name is "myTask"' - ).first() - print task['link'] - -It can also be used create a list of parent entities, including the task -itself:: - - entities = [] - for item in task['link']: - entities.append(session.get(item['type'], item['id'])) - -The `link` attribute is an ordered list of dictionaries containting data -of the parents and the item itself. Each dictionary contains the following -entries: - - id - The id of the object and can be used to do a :meth:`Session.get`. - name - The name of the object. - type - The schema id of the object. - -A more advanced use-case is to get the parent names and ids of all timelogs for -a user:: - - for timelog in session.query( - 'select context.link, start, duration from Timelog ' - 'where user.username is "john.doe"' - ): - print timelog['context']['link'], timelog['start'], timelog['duration'] - -The attribute is also available from the `AssetVersion` asset relation:: - - for asset_version in session.query( - 'select link from AssetVersion ' - 'where user.username is "john.doe"' - ): - print asset_version['link'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst deleted file mode 100644 index 155b25f9af..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst +++ /dev/null @@ -1,46 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/list: - -*********** -Using lists -*********** - -.. currentmodule:: ftrack_api.session - -Lists can be used to create a collection of asset versions or objects such as -tasks. It could be a list of items that should be sent to client, be included in -todays review session or items that belong together in way that is different -from the project hierarchy. - -There are two types of lists, one for asset versions and one for other objects -such as tasks. - -To create a list use :meth:`Session.create`:: - - user = # Get a user from ftrack. - project = # Get a project from ftrack. - list_category = # Get a list category from ftrack. - - asset_version_list = session.create('AssetVersionList', { - 'owner': user, - 'project': project, - 'category': list_category - }) - - task_list = session.create('TypedContextList', { - 'owner': user, - 'project': project, - 'category': list_category - }) - -Then add items to the list like this:: - - asset_version_list['items'].append(asset_version) - task_list['items'].append(task) - -And remove items from the list like this:: - - asset_version_list['items'].remove(asset_version) - task_list['items'].remove(task) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst deleted file mode 100644 index e3d7c4062c..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst +++ /dev/null @@ -1,320 +0,0 @@ -.. - :copyright: Copyright (c) 2017 ftrack - -.. _example/manage_custom_attribute_configuration: - -**************************************** -Managing custom attribute configurations -**************************************** - -From the API it is not only possible to -:ref:`read and update custom attributes for entities `, -but also managing custom attribute configurations. - -Existing custom attribute configurations can be queried as :: - - # Print all existing custom attribute configurations. - print session.query('CustomAttributeConfiguration').all() - -Use :meth:`Session.create` to create a new custom attribute configuration:: - - # Get the custom attribute type. - custom_attribute_type = session.query( - 'CustomAttributeType where name is "text"' - ).one() - - # Create a custom attribute configuration. - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Asset version text attribute', - 'key': 'asset_version_text_attribute', - 'default': 'bar', - 'config': json.dumps({'markdown': False}) - }) - - # Persist it to the ftrack instance. - session.commit() - -.. tip:: - - The example above does not add security roles. This can be done either - from System Settings in the ftrack web application, or by following the - :ref:`example/manage_custom_attribute_configuration/security_roles` example. - -Global or project specific -========================== - -A custom attribute can be global or project specific depending on the -`project_id` attribute:: - - # Create a custom attribute configuration. - session.create('CustomAttributeConfiguration', { - # Set the `project_id` and the custom attribute will only be available - # on `my_project`. - 'project_id': my_project['id'], - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Asset version text attribute', - 'key': 'asset_version_text_attribute', - 'default': 'bar', - 'config': json.dumps({'markdown': False}) - }) - session.commit() - -A project specific custom attribute can be changed to a global:: - - custom_attribute_configuration['project_id'] = None - session.commit() - -Changing a global custom attribute configuration to a project specific is not -allowed. - -Entity types -============ - -Custom attribute configuration entity types are using a legacy notation. A -configuration can have one of the following as `entity_type`: - -:task: - Represents TypedContext (Folder, Shot, Sequence, Task, etc.) custom - attribute configurations. When setting this as entity_type the - object_type_id must be set as well. - - Creating a text custom attribute for Folder:: - - custom_attribute_type = session.query( - 'CustomAttributeType where name is "text"' - ).one() - object_type = session.query('ObjectType where name is "Folder"').one() - session.create('CustomAttributeConfiguration', { - 'entity_type': 'task', - 'object_type_id': object_type['id'], - 'type': custom_attribute_type, - 'label': 'Foo', - 'key': 'foo', - 'default': 'bar', - }) - session.commit() - - Can be associated with a `project_id`. - -:show: - Represents Projects custom attribute configurations. - - Can be associated with a `project_id`. - -:assetversion: - Represents AssetVersion custom attribute configurations. - - Can be associated with a `project_id`. - -:user: - Represents User custom attribute configurations. - - Must be `global` and cannot be associated with a `project_id`. - -:list: - Represents List custom attribute configurations. - - Can be associated with a `project_id`. - -:asset: - Represents Asset custom attribute configurations. - - .. note:: - - Asset custom attributes have limited support in the ftrack web - interface. - - Can be associated with a `project_id`. - -It is not possible to change type after a custom attribute configuration has -been created. - -Custom attribute configuration types -==================================== - -Custom attributes can be of different data types depending on what type is set -in the configuration. Some types requires an extra json encoded config to be -set: - -:text: - A sting type custom attribute. - - The `default` value must be either :py:class:`str` or :py:class:`unicode`. - - Can be either presented as raw text or markdown formatted in applicaitons - which support it. This is configured through a markwdown key:: - - # Get the custom attribute type. - custom_attribute_type = session.query( - 'CustomAttributeType where name is "text"' - ).one() - - # Create a custom attribute configuration. - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Asset version text attribute', - 'key': 'asset_version_text_attribute', - 'default': 'bar', - 'config': json.dumps({'markdown': False}) - }) - - # Persist it to the ftrack instance. - session.commit() - -:boolean: - - A boolean type custom attribute. - - The `default` value must be a :py:class:`bool`. - - No config is required. - -:date: - A date type custom attribute. - - The `default` value must be an :term:`arrow` date - e.g. - arrow.Arrow(2017, 2, 8). - - No config is required. - -:enumerator: - An enumerator type custom attribute. - - The `default` value must be a list with either :py:class:`str` or - :py:class:`unicode`. - - The enumerator can either be single or multi select. The config must a json - dump of a dictionary containing `multiSelect` and `data`. Where - `multiSelect` is True or False and data is a list of options. Each option - should be a dictionary containing `value` and `menu`, where `menu` is meant - to be used as label in a user interface. - - Create a custom attribute enumerator:: - - custom_attribute_type = session.query( - 'CustomAttributeType where name is "enumerator"' - ).first() - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Enumerator attribute', - 'key': 'enumerator_attribute', - 'default': ['bar'], - 'config': json.dumps({ - 'multiSelect': True, - 'data': json.dumps([ - {'menu': 'Foo', 'value': 'foo'}, - {'menu': 'Bar', 'value': 'bar'} - ]) - }) - }) - session.commit() - -:dynamic enumerator: - - An enumerator type where available options are fetched from remote. Created - in the same way as enumerator but without `data`. - -:number: - - A number custom attribute can be either decimal or integer for presentation. - - This can be configured through the `isdecimal` config option:: - - custom_attribute_type = session.query( - 'CustomAttributeType where name is "number"' - ).first() - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Number attribute', - 'key': 'number_attribute', - 'default': 42, - 'config': json.dumps({ - 'isdecimal': True - }) - }) - session.commit() - -Changing default -================ - -It is possible to update the `default` value of a custom attribute -configuration. This will not change the value of any existing custom -attributes:: - - # Change the default value of custom attributes. This will only affect - # newly created entities. - custom_attribute_configuration['default'] = 43 - session.commit() - -.. _example/manage_custom_attribute_configuration/security_roles: - -Security roles -============== - -By default new custom attribute configurations and the entity values are not -readable or writable by any security role. - -This can be configured through the `read_security_roles` and `write_security_roles` -attributes:: - - # Pick random security role. - security_role = session.query('SecurityRole').first() - custom_attribute_type = session.query( - 'CustomAttributeType where name is "date"' - ).first() - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Date attribute', - 'key': 'date_attribute', - 'default': arrow.Arrow(2017, 2, 8), - 'write_security_roles': [security_role], - 'read_security_roles': [security_role] - }) - session.commit() - -.. note:: - - Setting the correct security role is important and must be changed to - whatever security role is appropriate for your configuration and intended - purpose. - -Custom attribute groups -======================= - -A custom attribute configuration can be categorized using a -`CustomAttributeGroup`:: - - group = session.query('CustomAttributeGroup').first() - security_role = session.query('SecurityRole').first() - custom_attribute_type = session.query( - 'CustomAttributeType where name is "enumerator"' - ).first() - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Enumerator attribute', - 'key': 'enumerator_attribute', - 'default': ['bar'], - 'config': json.dumps({ - 'multiSelect': True, - 'data': json.dumps([ - {'menu': 'Foo', 'value': 'foo'}, - {'menu': 'Bar', 'value': 'bar'} - ]) - }), - 'group': group, - 'write_security_roles': [security_role], - 'read_security_roles': [security_role] - }) - session.commit() - -.. seealso:: - - :ref:`example/custom_attribute` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst deleted file mode 100644 index 7b16881017..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst +++ /dev/null @@ -1,43 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _example/metadata: - -************** -Using metadata -************** - -.. currentmodule:: ftrack_api.session - -Key/value metadata can be written to entities using the metadata property -and also used to query entities. - -The metadata property has a similar interface as a dictionary and keys can be -printed using the keys method:: - - >>> print new_sequence['metadata'].keys() - ['frame_padding', 'focal_length'] - -or items:: - - >>> print new_sequence['metadata'].items() - [('frame_padding': '4'), ('focal_length': '70')] - -Read existing metadata:: - - >>> print new_sequence['metadata']['frame_padding'] - '4' - -Setting metadata can be done in a few ways where that later one will replace -any existing metadata:: - - new_sequence['metadata']['frame_padding'] = '5' - new_sequence['metadata'] = { - 'frame_padding': '4' - } - -Entities can also be queried using metadata:: - - session.query( - 'Sequence where metadata any (key is "frame_padding" and value is "4")' - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst deleted file mode 100644 index 8f8f1bb57d..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst +++ /dev/null @@ -1,169 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. currentmodule:: ftrack_api.session - -.. _example/note: - -*********** -Using notes -*********** - -Notes can be written on almost all levels in ftrack. To retrieve notes on an -entity you can either query them or use the relation called `notes`:: - - task = session.query('Task').first() - - # Retrieve notes using notes property. - notes_on_task = task['notes'] - - # Or query them. - notes_on_task = session.query('Note where parent_id is "{}"'.format( - task['id'] - )) - -.. note:: - - It's currently not possible to use the `parent` property when querying - notes or to use the `parent` property on notes:: - - task = session.query('Task').first() - - # This won't work in the current version of the API. - session.query('Note where parent.id is "{}"'.format( - task['id'] - )) - - # Neither will this. - parent_of_note = note['parent'] - -To create new notes you can either use the helper method called -:meth:`~ftrack_api.entity.note.CreateNoteMixin.create_note` on any entity that -can have notes or use :meth:`Session.create` to create them manually:: - - user = session.query('User').first() - - # Create note using the helper method. - note = task.create_note('My new note', author=user) - - # Manually create a note - note = session.create('Note', { - 'content': 'My new note', - 'author': user - }) - - task['notes'].append(note) - -Replying to an existing note can also be done with a helper method or by -using :meth:`Session.create`:: - - # Create using helper method. - first_note_on_task = task['notes'][0] - first_note_on_task.create_reply('My new reply on note', author=user) - - # Create manually - reply = session.create('Note', { - 'content': 'My new note', - 'author': user - }) - - first_note_on_task.replies.append(reply) - -Notes can have labels. Use the label argument to set labels on the -note using the helper method:: - - label = session.query( - 'NoteLabel where name is "External Note"' - ).first() - - note = task.create_note( - 'New note with external category', author=user, labels=[label] - ) - -Or add labels to notes when creating a note manually:: - - label = session.query( - 'NoteLabel where name is "External Note"' - ).first() - - note = session.create('Note', { - 'content': 'New note with external category', - 'author': user - }) - - session.create('NoteLabelLink', { - 'note_id': note['id], - 'label_id': label['id'] - }) - - task['notes'].append(note) - -.. note:: - - Support for labels on notes was added in ftrack server version 4.3. For - older versions of the server, NoteCategory can be used instead. - -To specify a category when creating a note simply pass a `NoteCategory` instance -to the helper method:: - - category = session.query( - 'NoteCategory where name is "External Note"' - ).first() - - note = task.create_note( - 'New note with external category', author=user, category=category - ) - -When writing notes you might want to direct the note to someone. This is done -by adding users as recipients. If a user is added as a recipient the user will -receive notifications and the note will be displayed in their inbox. - -To add recipients pass a list of user or group instances to the helper method:: - - john = session.query('User where username is "john"').one() - animation_group = session.query('Group where name is "Animation"').first() - - note = task.create_note( - 'Note with recipients', author=user, recipients=[john, animation_group] - ) - -Attachments -=========== - -Note attachments are files that are attached to a note. In the ftrack web -interface these attachments appears next to the note and can be downloaded by -the user. - -To get a note's attachments through the API you can use the `note_components` -relation and then use the ftrack server location to get the download URL:: - - server_location = session.query( - 'Location where name is "ftrack.server"' - ).one() - - for note_component in note['note_components']: - print 'Download URL: {0}'.format( - server_location.get_url(note_component['component']) - ) - -To add an attachment to a note you have to add it to the ftrack server location -and create a `NoteComponent`:: - - server_location = session.query( - 'Location where name is "ftrack.server"' - ).one() - - # Create component and name it "My file". - component = session.create_component( - '/path/to/file', - data={'name': 'My file'}, - location=server_location - ) - - # Attach the component to the note. - session.create( - 'NoteComponent', - {'component_id': component['id'], 'note_id': note['id']} - ) - - session.commit() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst deleted file mode 100644 index 0b4c0879d6..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst +++ /dev/null @@ -1,65 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/project: - -********************* -Working with projects -********************* - -.. currentmodule:: ftrack_api.session - -Creating a project -================== - -A project with sequences, shots and tasks can be created in one single -transaction. Tasks need to have a type and status set on creation based on the -project schema:: - - import uuid - - # Create a unique name for the project. - name = 'projectname_{0}'.format(uuid.uuid1().hex) - - # Naively pick the first project schema. For this example to work the - # schema must contain `Shot` and `Sequence` object types. - project_schema = session.query('ProjectSchema').first() - - # Create the project with the chosen schema. - project = session.create('Project', { - 'name': name, - 'full_name': name + '_full', - 'project_schema': project_schema - }) - - # Retrieve default types. - default_shot_status = project_schema.get_statuses('Shot')[0] - default_task_type = project_schema.get_types('Task')[0] - default_task_status = project_schema.get_statuses( - 'Task', default_task_type['id'] - )[0] - - # Create sequences, shots and tasks. - for sequence_number in range(1, 5): - sequence = session.create('Sequence', { - 'name': 'seq_{0}'.format(sequence_number), - 'parent': project - }) - - for shot_number in range(1, 5): - shot = session.create('Shot', { - 'name': '{0}0'.format(shot_number).zfill(3), - 'parent': sequence, - 'status': default_shot_status - }) - - for task_number in range(1, 5): - session.create('Task', { - 'name': 'task_{0}'.format(task_number), - 'parent': shot, - 'status': default_task_status, - 'type': default_task_type - }) - - # Commit all changes to the server. - session.commit() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst deleted file mode 100644 index bf1da18ab9..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst +++ /dev/null @@ -1,73 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -.. currentmodule:: ftrack_api.session - -.. _example/publishing: - -******************* -Publishing versions -******************* - -To know more about publishing and the concepts around publishing, read the -`ftrack article `_ -about publishing. - -To publish an asset you first need to get the context where the asset should be -published:: - - # Get a task from a given id. - task = session.get('Task', '423ac382-e61d-4802-8914-dce20c92b740') - -And the parent of the task which will be used to publish the asset on:: - - asset_parent = task['parent'] - -Then we create an asset and a version on the asset:: - - asset_type = session.query('AssetType where name is "Geometry"').one() - asset = session.create('Asset', { - 'name': 'My asset', - 'type': asset_type, - 'parent': asset_parent - }) - asset_version = session.create('AssetVersion', { - 'asset': asset, - 'task': task - }) - -.. note:: - - The task is not used as the parent of the asset, instead the task is linked - directly to the AssetVersion. - -Then when we have a version where we can create the components:: - - asset_version.create_component( - '/path/to/a/file.mov', location='auto' - ) - asset_version.create_component( - '/path/to/a/another-file.mov', location='auto' - ) - - session.commit() - -This will automatically create a new component and add it to the location which -has been configured as the first in priority. - -Components can also be named and added to a custom location like this:: - - location = session.query('Location where name is "my-location"') - asset_version.create_component( - '/path/to/a/file.mov', - data={ - 'name': 'foobar' - }, - location=location - ) - -.. seealso:: - - * :ref:`example/component` - * :ref:`example/web_review` - * :ref:`example/thumbnail` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst deleted file mode 100644 index 68f7870d1c..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst +++ /dev/null @@ -1,87 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/review_session: - -********************* -Using review sessions -********************* - -.. currentmodule:: ftrack_api.session - -Client review sessions can either be queried manually or by using a project -instance. - -.. code-block:: python - - review_sessions = session.query( - 'ReviewSession where name is "Weekly review"' - ) - - project_review_sessions = project['review_sessions'] - -To create a new review session on a specific project use :meth:`Session.create`. - -.. code-block:: python - - review_session = session.create('ReviewSession', { - 'name': 'Weekly review', - 'description': 'See updates from last week.', - 'project': project - }) - -To add objects to a review session create them using -:meth:`Session.create` and reference a review session and an asset version. - -.. code-block:: python - - review_session = session.create('ReviewSessionObject', { - 'name': 'Compositing', - 'description': 'Fixed shadows.', - 'version': 'Version 3', - 'review_session': review_session, - 'asset_version': asset_version - }) - -To list all objects in a review session. - -.. code-block:: python - - review_session_objects = review_session['review_session_objects'] - -Listing and adding collaborators to review session can be done using -:meth:`Session.create` and the `review_session_invitees` relation on a -review session. - -.. code-block:: python - - invitee = session.create('ReviewSessionInvitee', { - 'name': 'John Doe', - 'email': 'john.doe@example.com', - 'review_session': review_session - }) - - session.commit() - - invitees = review_session['review_session_invitees'] - -To remove a collaborator simply delete the object using -:meth:`Session.delete`. - -.. code-block:: python - - session.delete(invitee) - -To send out an invite email to a signle collaborator use -:meth:`Session.send_review_session_invite`. - -.. code-block:: python - - session.send_review_session_invite(invitee) - -Multiple invitees can have emails sent to them in one batch using -:meth:`Session.send_review_session_invites`. - -.. code-block:: python - - session.send_review_session_invites(a_list_of_invitees) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst deleted file mode 100644 index 3be42322ce..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst +++ /dev/null @@ -1,27 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _example/scope: - -************ -Using scopes -************ - -.. currentmodule:: ftrack_api.session - -Entities can be queried based on their scopes:: - - >>> tasks = session.query( - ... 'Task where scopes.name is "London"' - ... ) - -Scopes can be read and modified for entities:: - - >>> scope = session.query( - ... 'Scope where name is "London"' - ... )[0] - ... - ... if scope in task['scopes']: - ... task['scopes'].remove(scope) - ... else: - ... task['scopes'].append(scope) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst deleted file mode 100644 index 4219e3d126..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst +++ /dev/null @@ -1,73 +0,0 @@ -.. - :copyright: Copyright (c) 2017 ftrack - -.. _example/security_roles: - -********************************* -Working with user security roles -********************************* - -.. currentmodule:: ftrack_api.session - -The API exposes `SecurityRole` and `UserSecurityRole` that can be used to -specify who should have access to certain data on different projects. - -List all available security roles like this:: - - security_roles = session.query( - 'select name from SecurityRole where type is "PROJECT"' - ) - -.. note:: - - We only query for project roles since those are the ones we can add to a - user for certain projects. Other types include API and ASSIGNED. Type API - can only be added to global API keys, which is currently not supported via - the api and type ASSIGNED only applies to assigned tasks. - -To get all security roles from a user we can either use relations like this:: - - for user_security_role in user['user_security_roles']: - if user_security_role['is_all_projects']: - result_string = 'all projects' - else: - result_string = ', '.join( - [project['full_name'] for project in user_security_role['projects']] - ) - - print 'User has security role "{0}" which is valid on {1}.'.format( - user_security_role['security_role']['name'], - result_string - ) - -or query them directly like this:: - - user_security_roles = session.query( - 'UserSecurityRole where user.username is "{0}"'.format(session.api_user) - ).all() - -User security roles can also be added to a user for all projects like this:: - - project_manager_role = session.query( - 'SecurityRole where name is "Project Manager"' - ).one() - - session.create('UserSecurityRole', { - 'is_all_projects': True, - 'user': user, - 'security_role': project_manager_role - }) - session.commit() - -or for certain projects only like this:: - - projects = session.query( - 'Project where full_name is "project1" or full_name is "project2"' - ).all()[:] - - session.create('UserSecurityRole', { - 'user': user, - 'security_role': project_manager_role, - 'projects': projects - }) - session.commit() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst deleted file mode 100644 index 5ea0e47dc6..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst +++ /dev/null @@ -1,30 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _example/sync_with_ldap: - -******************** -Sync users with LDAP -******************** - -.. currentmodule:: ftrack_api.session - - -If ftrack is configured to connect to LDAP you may trigger a -synchronization through the api using the -:meth:`ftrack_api.session.Session.call`:: - - result = session.call([ - dict( - action='delayed_job', - job_type='SYNC_USERS_LDAP' - ) - ]) - job = result[0]['data] - -You will get a `ftrack_api.entity.job.Job` instance back which can be used -to check the success of the job:: - - if job.get('status') == 'failed': - # The job failed get the error. - logging.error(job.get('data')) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst deleted file mode 100644 index c6161e834a..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst +++ /dev/null @@ -1,56 +0,0 @@ -.. - :copyright: Copyright (c) 2017 ftrack - -.. _example/task_template: - -*************************** -Working with Task Templates -*************************** - -Task templates can help you organize your workflows by building a collection -of tasks to be applied for specific contexts. They can be applied to all `Context` -objects for example Project, Sequences, Shots, etc... - -Query task templates -======================= - -Retrive all task templates and there tasks for a project:: - - project = session.query('Project').first() - - for task_template in project['project_schema']['task_templates']: - print('\ntask template: {0}'.format( - task_template['name'] - )) - - for task_type in [t['task_type'] for t in task_template['items']]: - print('\ttask type: {0}'.format( - task_type['name'] - )) - - - -"Apply" a task template -======================= -Create all tasks in a random task template directly under the project:: - - - project = session.query('Project').first() - - task_template = random.choice( - project['project_schema']['task_templates'] - ) - - for task_type in [t['task_type'] for t in task_template['items']]: - session.create( - 'Task', { - 'name': task_type['name'], - 'type': task_type, - 'parent': project - } - ) - - session.commit() - - - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst deleted file mode 100644 index 64199869a5..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst +++ /dev/null @@ -1,71 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -.. _example/thumbnail: - -*********************** -Working with thumbnails -*********************** - -Components can be used as thumbnails on various entities, including -`Project`, `Task`, `AssetVersion` and `User`. To create and set a thumbnail -you can use the helper method -:meth:`~ftrack_api.entity.component.CreateThumbnailMixin.create_thumbnail` on -any entity that can have a thumbnail:: - - task = session.get('Task', my_task_id) - thumbnail_component = task.create_thumbnail('/path/to/image.jpg') - -It is also possible to set an entity thumbnail by setting its `thumbnail` -relation or `thumbnail_id` attribute to a component you would -like to use as a thumbnail. For a component to be usable as a thumbnail, -it should - - 1. Be a FileComponent. - 2. Exist in the *ftrack.server* :term:`location`. - 3. Be of an appropriate resolution and valid file type. - -The following example creates a new component in the server location, and -uses that as a thumbnail for a task:: - - task = session.get('Task', my_task_id) - server_location = session.query( - 'Location where name is "ftrack.server"' - ).one() - - thumbnail_component = session.create_component( - '/path/to/image.jpg', - dict(name='thumbnail'), - location=server_location - ) - task['thumbnail'] = thumbnail_component - session.commit() - -The next example reuses a version's thumbnail for the asset parent thumbnail:: - - asset_version = session.get('AssetVersion', my_asset_version_id) - asset_parent = asset_version['asset']['parent'] - asset_parent['thumbnail_id'] = asset_version['thumbnail_id'] - session.commit() - -.. _example/thumbnail/url: - -Retrieving thumbnail URL -======================== - -To get an URL to a thumbnail, `thumbnail_component`, which can be used used -to download or display the image in an interface, use the following:: - - import ftrack_api.symbol - server_location = session.get('Location', ftrack_api.symbol.SERVER_LOCATION_ID) - thumbnail_url = server_location.get_thumbnail_url(thumbnail_component) - thumbnail_url_tiny = server_location.get_thumbnail_url( - thumbnail_component, size=100 - ) - thumbnail_url_large = server_location.get_thumbnail_url( - thumbnail_component, size=500 - ) - -.. seealso:: - - :ref:`example/component` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst deleted file mode 100644 index eb86e2f897..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst +++ /dev/null @@ -1,37 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/timer: - -************ -Using timers -************ - -.. currentmodule:: ftrack_api.session - -Timers can be used to track how much time has been spend working on something. - -To start a timer for a user:: - - user = # Get a user from ftrack. - task = # Get a task from ftrack. - - user.start_timer(task) - -A timer has now been created for that user and should show up in the ftrack web -UI. - -To stop the currently running timer for a user and create a timelog from it:: - - user = # Get a user from ftrack. - - timelog = user.stop_timer() - -.. note:: - - Starting a timer when a timer is already running will raise in an exception. - Use the force parameter to automatically stop the running timer first. - - .. code-block:: python - - user.start_timer(task, force=True) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst deleted file mode 100644 index f1dede570f..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst +++ /dev/null @@ -1,78 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -.. currentmodule:: ftrack_api.session - -.. _example/web_review: - -************************* -Publishing for web review -************************* - -Follow the :ref:`example/encode_media` example if you want to -upload and encode media using ftrack. - -If you already have a file encoded in the correct format and want to bypass -the built-in encoding in ftrack, you can create the component manually -and add it to the `ftrack.server` location:: - - # Retrieve or create version. - version = session.query('AssetVersion', 'SOME-ID') - - server_location = session.query('Location where name is "ftrack.server"').one() - filepath = '/path/to/local/file.mp4' - - component = version.create_component( - path=filepath, - data={ - 'name': 'ftrackreview-mp4' - }, - location=server_location - ) - - # Meta data needs to contain *frameIn*, *frameOut* and *frameRate*. - component['metadata']['ftr_meta'] = json.dumps({ - 'frameIn': 0, - 'frameOut': 150, - 'frameRate': 25 - }) - - component.session.commit() - -To publish an image for review the steps are similar:: - - # Retrieve or create version. - version = session.query('AssetVersion', 'SOME-ID') - - server_location = session.query('Location where name is "ftrack.server"').one() - filepath = '/path/to/image.jpg' - - component = version.create_component( - path=filepath, - data={ - 'name': 'ftrackreview-image' - }, - location=server_location - ) - - # Meta data needs to contain *format*. - component['metadata']['ftr_meta'] = json.dumps({ - 'format': 'image' - }) - - component.session.commit() - -Here is a list of components names and how they should be used: - -================== ===================================== -Component name Use -================== ===================================== -ftrackreview-image Images reviewable in the browser -ftrackreview-mp4 H.264/mp4 video reviewable in browser -ftrackreview-webm WebM video reviewable in browser -================== ===================================== - -.. note:: - - Make sure to use the pre-defined component names and set the `ftr_meta` on - the components or review will not work. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst deleted file mode 100644 index aa5cc77976..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst +++ /dev/null @@ -1,76 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******** -Glossary -******** - -.. glossary:: - - accessor - An implementation (typically a :term:`Python` plugin) for accessing - a particular type of storage using a specific protocol. - - .. seealso:: :ref:`locations/overview/accessors` - - action - Actions in ftrack provide a standardised way to integrate other tools, - either off-the-shelf or custom built, directly into your ftrack - workflow. - - .. seealso:: :ref:`ftrack:using/actions` - - api - Application programming interface. - - arrow - A Python library that offers a sensible, human-friendly approach to - creating, manipulating, formatting and converting dates, times, and - timestamps. Read more at http://crsmithdev.com/arrow/ - - asset - A container for :term:`asset versions `, typically - representing the output from an artist. For example, 'geometry' - from a modeling artist. Has an :term:`asset type` that categorises the - asset. - - asset type - Category for a particular asset. - - asset version - A specific version of data for an :term:`asset`. Can contain multiple - :term:`components `. - - component - A container to hold any type of data (such as a file or file sequence). - An :term:`asset version` can have any number of components, each with - a specific name. For example, a published version of geometry might - have two components containing the high and low resolution files, with - the component names as 'hires' and 'lowres' respectively. - - PEP-8 - Style guide for :term:`Python` code. Read the guide at - https://www.python.org/dev/peps/pep-0008/ - - plugin - :term:`Python` plugins are used by the API to extend it with new - functionality, such as :term:`locations ` or :term:`actions `. - - .. seealso:: :ref:`understanding_sessions/plugins` - - python - A programming language that lets you work more quickly and integrate - your systems more effectively. Often used in creative industries. Visit - the language website at http://www.python.org - - PyPi - :term:`Python` package index. The Python Package Index or PyPI is the - official third-party software repository for the Python programming - language. Visit the website at https://pypi.python.org/pypi - - resource identifier - A string that is stored in ftrack as a reference to a resource (such as - a file) in a specific location. Used by :term:`accessors ` to - determine how to access data. - - .. seealso:: :ref:`locations/overview/resource_identifiers` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst deleted file mode 100644 index 1d378473fa..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst +++ /dev/null @@ -1,315 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _handling_events: - -*************** -Handling events -*************** - -.. currentmodule:: ftrack_api.event - -Events are generated in ftrack when things happen such as a task being updated -or a new version being published. Each :class:`~ftrack_api.session.Session` -automatically connects to the event server and can be used to subscribe to -specific events and perform an action as a result. That action could be updating -another related entity based on a status change or generating folders when a new -shot is created for example. - -The :class:`~hub.EventHub` for each :class:`~ftrack_api.session.Session` is -accessible via :attr:`Session.event_hub -<~ftrack_api.session.Session.event_hub>`. - -.. _handling_events/subscribing: - -Subscribing to events -===================== - -To listen to events, you register a function against a subscription using -:meth:`Session.event_hub.subscribe `. The subscription -uses the :ref:`expression ` syntax and will filter -against each :class:`~base.Event` instance to determine if the registered -function should receive that event. If the subscription matches, the registered -function will be called with the :class:`~base.Event` instance as its sole -argument. The :class:`~base.Event` instance is a mapping like structure and can -be used like a normal dictionary. - -The following example subscribes a function to receive all 'ftrack.update' -events and then print out the entities that were updated:: - - import ftrack_api - - - def my_callback(event): - '''Event callback printing all new or updated entities.''' - for entity in event['data'].get('entities', []): - - # Print data for the entity. - print(entity) - - - # Subscribe to events with the update topic. - session = ftrack_api.Session() - session.event_hub.subscribe('topic=ftrack.update', my_callback) - -At this point, if you run this, your code would exit almost immediately. This -is because the event hub listens for events in a background thread. Typically, -you only want to stay connected whilst using the session, but in some cases you -will want to block and listen for events solely - a dedicated event processor. -To do this, use the :meth:`EventHub.wait ` method:: - - # Wait for events to be received and handled. - session.event_hub.wait() - -You cancel waiting for events by using a system interrupt (:kbd:`Ctrl-C`). -Alternatively, you can specify a *duration* to process events for:: - - # Only wait and process events for 5 seconds. - session.event_hub.wait(duration=5) - -.. note:: - - Events are continually received and queued for processing in the background - as soon as the connection to the server is established. As a result you may - see a flurry of activity as soon as you call - :meth:`~hub.EventHub.wait` for the first time. - -.. _handling_events/subscribing/subscriber_information: - -Subscriber information ----------------------- - -When subscribing, you can also specify additional information about your -subscriber. This contextual information can be useful when routing events, -particularly when :ref:`targeting events -`. By default, the -:class:`~hub.EventHub` will set some default information, but it can be -useful to enhance this. To do so, simply pass in *subscriber* as a dictionary of -data to the :meth:`~hub.EventHub.subscribe` method:: - - session.event_hub.subscribe( - 'topic=ftrack.update', - my_callback, - subscriber={ - 'id': 'my-unique-subscriber-id', - 'applicationId': 'maya' - } - ) - -.. _handling_events/subscribing/sending_replies: - -Sending replies ---------------- - -When handling an event it is sometimes useful to be able to send information -back to the source of the event. For example, -:ref:`ftrack:developing/events/list/ftrack.location.request-resolve` would -expect a resolved path to be sent back. - -You can craft a custom reply event if you want, but an easier way is just to -return the appropriate data from your handler. Any non *None* value will be -automatically sent as a reply:: - - def on_event(event): - # Send following data in automatic reply. - return {'success': True, 'message': 'Cool!'} - - session.event_hub.subscribe('topic=test-reply', on_event) - -.. seealso:: - - :ref:`handling_events/publishing/handling_replies` - -.. note:: - - Some events are published :ref:`synchronously - `. In this case, any returned data - is passed back to the publisher directly. - -.. _handling_events/subscribing/stopping_events: - -Stopping events ---------------- - -The *event* instance passed to each event handler also provides a method for -stopping the event, :meth:`Event.stop `. - -Once an event has been stopped, no further handlers for that specific event -will be called **locally**. Other handlers in other processes may still be -called. - -Combining this with setting appropriate priorities when subscribing to a topic -allows handlers to prevent lower priority handlers running when desired. - - >>> import ftrack_api - >>> import ftrack_api.event.base - >>> - >>> def callback_a(event): - ... '''Stop the event!''' - ... print('Callback A') - ... event.stop() - >>> - >>> def callback_b(event): - ... '''Never run.''' - ... print('Callback B') - >>> - >>> session = ftrack_api.Session() - >>> session.event_hub.subscribe( - ... 'topic=test-stop-event', callback_a, priority=10 - ... ) - >>> session.event_hub.subscribe( - ... 'topic=test-stop-event', callback_b, priority=20 - ... ) - >>> session.event_hub.publish( - ... ftrack_api.event.base.Event(topic='test-stop-event') - ... ) - >>> session.event_hub.wait(duration=5) - Callback A called. - -.. _handling_events/publishing: - -Publishing events -================= - -So far we have looked at listening to events coming from ftrack. However, you -are also free to publish your own events (or even publish relevant ftrack -events). - -To do this, simply construct an instance of :class:`ftrack_api.event.base.Event` -and pass it to :meth:`EventHub.publish ` via the session:: - - import ftrack_api.event.base - - event = ftrack_api.event.base.Event( - topic='my-company.some-topic', - data={'key': 'value'} - ) - session.event_hub.publish(event) - -The event hub will automatically add some information to your event before it -gets published, including the *source* of the event. By default the event source -is just the event hub, but you can customise this to provide more relevant -information if you want. For example, if you were publishing from within Maya:: - - session.event_hub.publish(ftrack_api.event.base.Event( - topic='my-company.some-topic', - data={'key': 'value'}, - source={ - 'applicationId': 'maya' - } - )) - -Remember that all supplied information can be used by subscribers to filter -events so the more accurate the information the better. - -.. _handling_events/publishing/synchronously: - -Publish synchronously ---------------------- - -It is also possible to call :meth:`~hub.EventHub.publish` synchronously by -passing `synchronous=True`. In synchronous mode, only local handlers will be -called. The result from each called handler is collected and all the results -returned together in a list:: - - >>> import ftrack_api - >>> import ftrack_api.event.base - >>> - >>> def callback_a(event): - ... return 'A' - >>> - >>> def callback_b(event): - ... return 'B' - >>> - >>> session = ftrack_api.Session() - >>> session.event_hub.subscribe( - ... 'topic=test-synchronous', callback_a, priority=10 - ... ) - >>> session.event_hub.subscribe( - ... 'topic=test-synchronous', callback_b, priority=20 - ... ) - >>> results = session.event_hub.publish( - ... ftrack_api.event.base.Event(topic='test-synchronous'), - ... synchronous=True - ... ) - >>> print results - ['A', 'B'] - -.. _handling_events/publishing/handling_replies: - -Handling replies ----------------- - -When publishing an event it is also possible to pass a callable that will be -called with any :ref:`reply event ` -received in response to the published event. - -To do so, simply pass in a callable as the *on_reply* parameter:: - - def handle_reply(event): - print 'Got reply', event - - session.event_hub.publish( - ftrack_api.event.base.Event(topic='test-reply'), - on_reply=handle_reply - ) - -.. _handling_events/publishing/targeting: - -Targeting events ----------------- - -In addition to subscribers filtering events to receive, it is also possible to -give an event a specific target to help route it to the right subscriber. - -To do this, set the *target* value on the event to an :ref:`expression -`. The expression will filter against registered -:ref:`subscriber information -`. - -For example, if you have many subscribers listening for a event, but only want -one of those subscribers to get the event, you can target the event to the -subscriber using its registered subscriber id:: - - session.event_hub.publish( - ftrack_api.event.base.Event( - topic='my-company.topic', - data={'key': 'value'}, - target='id=my-custom-subscriber-id' - ) - ) - -.. _handling_events/expressions: - -Expressions -=========== - -An expression is used to filter against a data structure, returning whether the -structure fulfils the expression requirements. Expressions are currently used -for subscriptions when :ref:`subscribing to events -` and for targets when :ref:`publishing targeted -events `. - -The form of the expression is loosely groupings of 'key=value' with conjunctions -to join them. - -For example, a common expression for subscriptions is to filter against an event -topic:: - - 'topic=ftrack.location.component-added' - -However, you can also perform more complex filtering, including accessing -nested parameters:: - - 'topic=ftrack.location.component-added and data.locationId=london' - -.. note:: - - If the structure being tested does not have any value for the specified - key reference then it is treated as *not* matching. - -You can also use a single wildcard '*' at the end of any value for matching -multiple values. For example, the following would match all events that have a -topic starting with 'ftrack.':: - - 'topic=ftrack.*' diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/image/configuring_plugins_directory.png b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/image/configuring_plugins_directory.png deleted file mode 100644 index 7438cb52bebd5dd1c0c5814cd7e1d5f2fdf6a572..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7313 zcmV;C9B$)@P)KOJ#IsdX9YK2_oxysWkE--^>ub$^C=yJ%K%Nmb;2;67`L)eAY4kcqufz9M);<8V8!p z=o?uv&!ZhL4m4L)*EDnoGk5M=e08kfw{zRawV>DKmo+j(-X%mx?aklXJ34-A7N39K ze@&v_bdT*GepTS$Og#Q7dZ2A3&$bdOFRNkcPiuQ=E~c>2)91UHrmhjwBz$)TLRRNP zR|~zM+_`ToU4~HW(JdcWod`9orf&147T29$o58)^S`X+i%A<8<#YDSnTx70uIw!yKJOJ(=j z_ibhR<=Sq>C~i};bUo6;?ouuandaZ8P_5UzeB~e3mw({$&@~8xy5|X}E(ysG)Yu33 z^P8z^w=Q`m##C7lgr%;z)@FS_pP%=BWOp!mA=L-f6`cXLzq$XPX;6iXTY*)o}39>}p6n$tmGpgu~iD$?*G{5_Z4unBilu z{R3c;HHe3GZuSE_?i;+UDDaB`KBEh1ab&xL%>g>FpL*yUXeA1=I*7S=F^gkYe)=^C5*GB&v?!spa8xuMf* zmPG5fz_8}ut|zHpefHCZ^axtf=98)&#TF=C$aJV2A&Y}8G7U0VK62K9sP(*hO=SyT zR9I?eV`KUxTr8xuSV%nUDjYag=0Cak=Aq&`S?z?Aj^G=;P3bpi53r^6e&OpB!vrGa z*`NQ?)zu9)y6)eHot_&LHFS8#pCzZkw+l?Hilf2kSWq1OB06H={iEF2F>I1VAHnqH zp>GIT!5UKGlGTDBD5?z3VQYnV%Wn{}0^eMCC96?$s-{9h z24`5k)U}MvRH1y>n)wn-Rss+vMF+&!rW&CsSJ~9_l&rsYoAQ$JxEXE@QLP!H4|dvhV@zzFEit5UD1(uV%F(A@$o(wL6|ZUd->r zbmBpu!>XXN{(z{YkJ%+gRCPC8S*Q~2_|j}FcV)?ntPeOTtEDFJLi8iCSSJ>QMD?sJ zO*BFSuK70vn`y!n)k**os=<-E+eUTm?Zuh{Yh3E%Pb0Pa}Z-vat3=DLAFQibi8U4Gs`V6VN@w&x^%vX|Fp?m0USUnp?k!k3pgx!>WvAV4voV2?O zD_enq%~g2B>0??fsCH5^re$LdSuA9EHxCW`>!1f!&z|JS5d2mkLVid;rY9q9H7l_GM+q?69=#2NpVjScK5agadSJ$Z|vVmHHoh)~$3Yj)nC^yaP|b3lI~N-js~>WMo9FBO+oPZCFR5L zQs()w`8VKA7KCJ@a9n*WA*snU7^Rc*J3)Sh_~kEuDTMf&i>7Jv4d4I%_YL9OkmpoI zBq-(0d_C8M#O%7XvobX1UwWZ`Ro?VG4wKNfNXUjD&Y8fM&Z{)ShY4^1b6-UP zK~zj07y{P%vKfYPVuFOF-2Dw{fXrf6hiOgdD+oxCGz$7YB4O6n7c^@o0e+@BAgKus zTODj_=(Do+II zZz-Yxn+hPun1n4? zudyH>KYsklC!hS&g}CD5k3Tj9ud%{6k#uaH>Cn@~l%y?+o^j6F(3Gs@h9$<#l)Il7 z&W3JW;0Z0DWiJ)uGws`vt3$&c`ZMkrpZY2!D9Al%n0L#ih zBe3VTC_B0=QyJSeMHT_csc;0TXlU}P>B?C@cs84Zxz8yGeZuC>Va$$bI9Wh$rj^Gi zGs(vkYFv(#34Ja|o3jE{MI~SPIE<_QT(xhGZlv793VAWr>5ItBe%8;+n3T-`@6N(H z2~D83h{4;i4tZcaa9ri<*o>FgdWU@U=#f%NdSHGsgwqcUQca(49<3xVOsD1JCdk~- ziq1BfZd3Ch!@_wD6@ym<$^=a1eq@!jgrJnK>(b?hT8QJ;-8uc@sML&%^R2<6=)dd1JXZD41vkL z{w4J!0~6dJiU9gV&kcA6cPlLX_4IllSsy-p`0UxU&pa?c8N%t?irmFq!1{;B7xUZR zk!zg>``QLOq<|KRur>XLipBO8!8R@iMR-qlPA56$&e-87xAO|<3>1kwc2#6Wi-NR< z*i|S<-kTvHASXK{U~02{PDOvPLwe?pD?+mr}+JG3x$euNJ+AxO`^A>M9C@;?dC$q+th~3es|xK7+B_c~*o6f(*;u z_tFx0F0G-{F8e_gmhHKU&UQ$I7DJVK&K;h)<6e*|Bp-o88)~WVO zM3-A}8tzbSAm0X{uYwHr2Adl=FvsAl)vx=d&PN}8Wd09__Q3pP2tHff#7LCWMTlh@ z*)rD&d@gdq$2uVU`E6x=&x~r-kOz2DU55jLt&e z6Qt%{WJ*2B-4Go2LTldVW?VA#I7S43Z|C(Eiq zDvHz!vV1N`JdVOslooaXD+Ql{8KZKYWw3nTjW}saNZJ9PP+sEkf(s(+{ zGj}m;9|6=$!*0e2y5p(2AZe5yLEbNmtgY)Hr9$7Y_RY}&da2UT=>(^YK(FD?oxuw;%hI+4?g(dT#!gOh2E}zv|Yx9kQ%Fr1E9dok)b#+k0d@kGZaFo!=Q$( z6b@Gx)>=VcZqy?`HiuiG(Ww@<*(wP%tkfxAD`}u{EY^Dv`u^PgxSfVkl|Cusd7=on z+P6nX!vk;rI}32_)^W}M&)${&rqLwv`Gbc26UP65%`@^_jyGF=GWzb*Lb)wDht~cM zqh;;f3GseFg833Yc4yfgv>MBYG)REJ5};v(T4=Q5d8?XA(rlW^e5bE2)x)@_y1J^T ze_h>e)b%Gm{hb;>h7vf|za}A)hs~2n{=X@35K$02quodr;xbk21mOTe!_(JO07%QS zo;`bZ@7}#{zy0?1?b|Dy-~s-GLU;uN$HGuWiu{h_NdD{Zdv&Mo{ieUvQKU$b`sth^ zMT*p&I*JvmORgqxFkG4m# zVg<;6D?E-9CjH{(ciRpB%#NE{>Hg~0{wa4;0rJOJrQj<(K;U?o%C2eJr=>HS1sT)4 zQ$m#Aj{BJ7EK>9oAdd|fUj7t(g$D@yK22ll|5!5glrQ9dJOUQ1Ju0R&n7^qbgRZlk zv)aA_FhM1;&+gZ7K!j$nV9i`#U|>slMB=FR5tm+$mWfm|LcG9 zdgIX@gL>W^@LED{GJj)!3cFDOG7P`iei?j)2MSE8&i2pqPn)hdd*x!b+!~b_iA=3w zFTp38&kQI#I?~dk;={!3DA}u9H4(S5JH@iy_TR`%tv<@zhrU}$Ehe7Y)z$OS-r+^Fh z2g5Kf4Ab+Yiq2js#j` zodY{tuwe?=d$hc=Wz`z{D=|t7|4z44mUyX7A+;_{c9}(;JeBl3c%3sJUPs}jLdUIq zhaT_&6Kv zyvdaAuNs$1u<;wN&j66Vb3IgU zBuvTjfwAob)s=NS_LW>GC7)lgeKHt7sHEp3*+UA^*Yr_8w?2iw_$&{F<}Dfq3Xq{V z_o5JZg$D|dup9K47=OZ+!%Oa#)Mzogg$9URV8t&9w2p6EDa4uyLAbX%$q}Z)sK#C3uIN{3Yw(4RP zCr2wuU84vo>vmKn*C{kQX%j0}6qVaa==qC#UVRbfphtDJhYZWv=i9+ocz{4~gxshO z$4esFuKb$?0+Pxp9L*q@oA|tMKsl2GGPsFm?gMzpRxxC)8)K2Df4F}Q*P36{WZGmh zg0&t-$i-Ysa2Wz%2p4#SoTJe>%zhTp3?3mbAX)+q zL+}RxQm!*BAfuN58j$+yem>0Wpd&U#lavBvNd6)FBKQgq5SUn<**IcnE}4y9yRl!a zw5a65oxQH#_gYrP=?4H3zF7Qe+aJm%T2PXzc^y2Jp|$AG2IO86>PEBGo>NNU;7AnV zZL^ZkV)kW!I}wosQZ6G}%hth3{R8We^}G9@8nhJ*;ew9?U5hLwQ%m@OS=rs!E9Awb zX*n7S+~n| zP6Q-_bDHV+Q~J0dd@BzXo!lk#w8FT-KK2qc8_%m#)}O(D%RCRhF#rSts=GT+r!}I{ zyz3tDa}Fwcv&4!LT7+kN^8=K@@FJEY*K(Ig!ThDg8I&8PVGRdizT=Z+!f~?4m#j7e=S7vv{hKxSziMc1vdAISSGAEqekW zqnbh3XqHn3iG!*PMJ>O+av`D-&6_TX8yKP=E~2TR-H20SN-{Q!;ct)@Q@9l5Wps@Bb15##~-e!tl!Dul#Wt zpEVCVBDY6ka`xL@HW-1H9zcWNXRPlBY9pB-Yx^EpNIr&h0jtof(biFn9zK{#ExKLN zBxKi@xaNA2-s@wbFZ|gX=^S{ZTy!LyPU7Hmgn`!^t4IezmzqC&Oe#PI5&`nKFl}6k zggAVJ+QPG$;Kxg`S)IqIbXAIHhWpQPqBEr9$8H(JpyancduVa;p*{w{vU^UsPaLQ}hJ9Wv4l;5)} z`)yzSQb)03#R`y$6`xR6cj_ooq%OIdKnDj0|B}%Wn4(CL0wiEhdWDC=q#^~#BcO7* zJn0G#c=bnHqezhgWWW_3|7-8c6=b!QXnzs^z!&HTh>1Jxl#CFscqPJq@8OMzyKxtJ z!+utxgoJ=Z34sD73KX=Zf>uwf2VGBlw>c5RluA{lIv{nx`SUob23#KEI6mZ~I&RqT zZz1Hj-+t@=goifxGw4_vd84ez)4Z3zvGWZQq=-B5*Nbc;vmi9BLHq z%PL!!>EunecVy3p_7qaMZ%1(_B%DQXJ8Aet=DDJz=GLHX)9@kypEoB#KwqbGQu};S zHH31NZF!jeRy{weMdCX!ipZ5bDpDoEWeV4D>kLq=OQP^JlsYRV$0iC7RGuu+JV}@} z{c$a}OU(7J>SgMEp_5`63ShkeqKkH6J<5%c55uchul%3z&<1N`#n_qNjq;9At~5WP zsZrWu%5}fiS>6!F`aC*o>lbM5`w}I2nIQC?Fvmg)oLV_?P_$36)f)zddRZc~_ITizTh{QTS-%L6 zPZMj5tBju-q8j9yk{~i1?k*^5T0LI|K(7N^Kt_T?{l8hieAEezZY@QSCU`n*78_qHtAz!?B;s1n(Hdq@A{Rchg==cN1(~d5% z(w`t7pHbnAm8^|-0F?Bu3_V=mMO2(6vgPU-G%}C(g+=G4N!km>X2gbM32<>u_(}3! zoc3}tyj#59+yr?ZUS^OJr(E{hCO)IDlVwQOy1A~`*(wdMt(2X`oo?pdN^CP64^F0; zc}HSyPQn4^+ZpoVc>esk{}UeC;1)v8+bwuT4h357FU`%Ju>i$L6+&CPgoLMf%RLR@ z!ZO5tLxzQ1{-OxBXEvlu61s%UsI)Ds$4!23Zi1>dEY2?(wL(aexepq}aj|#bc!0G! zgOgn~s@D<{EW^`us)9JhFC`?R>f}6FUi(93HUp7~saD&MIyd0ivuFNKcxVG3A#wln(N>goIt9c2?j) zh>5iAD37c;z(HjcshuE-ZU7 zdw?}*^}w+;+2s_>MF)pyDBTU%2>JBsQ|}i8z+fGHb+GdHG)H$-=|z1AcYu9z2#iS~ zbX#2ZE6S<9D5KqiU{AR17(}eEp|K|O6xLuBYRu!nZW7@oAlp2)Gv$~qVVns`k?AC1 z8y%X=#JNLg8Hcjbxbk^(69mvIu^GV4i*nHdL47oM5uehWc?Ad$mD}LHIv+(}BRe)D zmn(Hqy@`~eEmPX)duy@5C}0{~L*Dks@J`*?iNH@gH(?{>lP6ESgaiXGloUeVzdezH zO9eJy;&-CqHxZl*Deq}XJ~^_V7II=Kis+1o06RS>tMr{QYoh}E17!g ztRQ>kUnyHBe`F5qF;xjCr6~>IiE}$PLf&rhSAZzc&OzGo-_U=mANU>r7i9VX2x7LW zopA3@t$7c()2)6tLjLf>4_|%t)u*3+`oRYuy!F;w89-m?KlM%hFKuxAS3|L(XU@xe rgJ69>%In+H&=0rLaU`. - -.. toctree:: - :maxdepth: 1 - - introduction - installing - tutorial - understanding_sessions - working_with_entities - querying - handling_events - caching - locations/index - example/index - api_reference/index - event_list - environment_variables - security_and_authentication - release/index - glossary - -****************** -Indices and tables -****************** - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/installing.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/installing.rst deleted file mode 100644 index 5e42621bee..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/installing.rst +++ /dev/null @@ -1,77 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _installing: - -********** -Installing -********** - -.. highlight:: bash - -Installation is simple with `pip `_:: - - pip install ftrack-python-api - -Building from source -==================== - -You can also build manually from the source for more control. First obtain a -copy of the source by either downloading the -`zipball `_ or -cloning the public repository:: - - git clone git@bitbucket.org:ftrack/ftrack-python-api.git - -Then you can build and install the package into your current Python -site-packages folder:: - - python setup.py install - -Alternatively, just build locally and manage yourself:: - - python setup.py build - -Building documentation from source ----------------------------------- - -To build the documentation from source:: - - python setup.py build_sphinx - -Then view in your browser:: - - file:///path/to/ftrack-python-api/build/doc/html/index.html - -Running tests against the source --------------------------------- - -With a copy of the source it is also possible to run the unit tests:: - - python setup.py test - -Dependencies -============ - -* `ftrack server `_ >= 3.3.11 -* `Python `_ >= 2.7, < 3 -* `Requests `_ >= 2, <3, -* `Arrow `_ >= 0.4.4, < 1, -* `termcolor `_ >= 1.1.0, < 2, -* `pyparsing `_ >= 2.0, < 3, -* `Clique `_ >= 1.2.0, < 2, -* `websocket-client `_ >= 0.40.0, < 1 - -Additional For building ------------------------ - -* `Sphinx `_ >= 1.2.2, < 2 -* `sphinx_rtd_theme `_ >= 0.1.6, < 1 -* `Lowdown `_ >= 0.1.0, < 2 - -Additional For testing ----------------------- - -* `Pytest `_ >= 2.3.5, < 3 -* `pytest-mock `_ >= 0.4, < 1, -* `pytest-catchlog `_ >= 1, <=2 \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst deleted file mode 100644 index 63fe980749..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst +++ /dev/null @@ -1,26 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _introduction: - -************ -Introduction -************ - -This API allows developers to write :term:`Python` scripts that talk directly -with an ftrack server. The scripts can perform operations against that server -depending on granted permissions. - -With any API it is important to find the right balance between flexibility and -usefulness. If an API is too low level then everyone ends up writing boilerplate -code for common problems and usually in an non-uniform way making it harder to -share scripts with others. It's also harder to get started with such an API. -Conversely, an API that attempts to be too smart can often become restrictive -when trying to do more advanced functionality or optimise for performance. - -With this API we have tried to strike the right balance between these two, -providing an API that should be simple to use out-of-the-box, but also expose -more flexibility and power when needed. - -Nothing is perfect though, so please do provide feedback on ways that we can -continue to improve this API for your specific needs. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst deleted file mode 100644 index 97483221aa..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst +++ /dev/null @@ -1,87 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _locations/configuring: - -********************* -Configuring locations -********************* - -To allow management of data by a location or retrieval of filesystem paths where -supported, a location instance needs to be configured in a session with an -:term:`accessor` and :term:`structure`. - -.. note:: - - The standard builtin locations require no further setup or configuration - and it is not necessary to read the rest of this section to use them. - -Before continuing, make sure that you are familiar with the general concepts -of locations by reading the :ref:`locations/overview`. - -.. _locations/configuring/manually: - -Configuring manually -==================== - -Locations can be configured manually when using a session by retrieving the -location and setting the appropriate attributes:: - - location = session.query('Location where name is "my.location"').one() - location.structure = ftrack_api.structure.id.IdStructure() - location.priority = 50 - -.. _locations/configuring/automatically: - -Configuring automatically -========================= - -Often the configuration of locations should be determined by developers -looking after the core pipeline and so ftrack provides a way for a plugin to -be registered to configure the necessary locations for each session. This can -then be managed centrally if desired. - -The configuration is handled through the standard events system via a topic -*ftrack.api.session.configure-location*. Set up an :ref:`event listener plugin -` as normal with a register function that -accepts a :class:`~ftrack_api.session.Session` instance. Then register a -callback against the relevant topic to configure locations at the appropriate -time:: - - import ftrack_api - import ftrack_api.entity.location - import ftrack_api.accessor.disk - import ftrack_api.structure.id - - - def configure_locations(event): - '''Configure locations for session.''' - session = event['data']['session'] - - # Find location(s) and customise instances. - location = session.query('Location where name is "my.location"').one() - ftrack_api.mixin(location, ftrack_api.entity.location.UnmanagedLocationMixin) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.id.IdStructure() - location.priority = 50 - - - def register(session): - '''Register plugin with *session*.''' - session.event_hub.subscribe( - 'topic=ftrack.api.session.configure-location', - configure_locations - ) - -.. note:: - - If you expect the plugin to also be evaluated by the legacy API, remember - to :ref:`validate the arguments `. - -So long as the directory containing the plugin exists on your -:envvar:`FTRACK_EVENT_PLUGIN_PATH`, the plugin will run for each session -created and any configured locations will then remain configured for the -duration of that related session. - -Be aware that you can configure many locations in one plugin or have separate -plugins for different locations - the choice is entirely up to you! diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst deleted file mode 100644 index ac1eaba649..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst +++ /dev/null @@ -1,18 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _developing/locations: - -********* -Locations -********* - -Learn how to access locations using the API and configure your own location -plugins. - -.. toctree:: - :maxdepth: 1 - - overview - tutorial - configuring diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst deleted file mode 100644 index 0a6ec171aa..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst +++ /dev/null @@ -1,143 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _locations/overview: - -******** -Overview -******** - -Locations provides a way to easily track and manage data (files, image sequences -etc.) using ftrack. - -With locations it is possible to see where published data is in the world and -also to transfer data automatically between different locations, even different -storage mechanisms, by defining a few simple :term:`Python` plugins. By keeping -track of the size of the data it also helps manage storage capacity better. In -addition, the intrinsic links to production information makes assigning work to -others and transferring only the relevant data much simpler as well as greatly -reducing the burden on those responsible for archiving finished work. - -Concepts -======== - -The system is implemented in layers using a few key concepts in order to provide -a balance between out of the box functionality and custom configuration. - -.. _locations/overview/locations: - -Locations ---------- - -Data locations can be varied in scope and meaning - a facility, a laptop, a -specific drive. As such, rather than place a hard limit on what can be -considered a location, ftrack simply requires that a location be identifiable by -a string and that string be unique to that location. - -A global company with facilities in many different parts of the world might -follow a location naming convention similar to the following: - - * 'ftrack.london.server01' - * 'ftrack.london.server02' - * 'ftrack.nyc.server01' - * 'ftrack.amsterdam.server01' - * '..' - -Whereas, for a looser setup, the following might suit better: - - * 'bjorns-workstation' - * 'fredriks-mobile' - * 'martins-laptop' - * 'cloud-backup' - -Availability ------------- - -When tracking data across several locations it is important to be able to -quickly find out where data is available and where it is not. As such, ftrack -provides simple mechanisms for retrieving information on the availability of a -:term:`component` in each location. - -For a single file, the availability with be either 0% or 100%. For containers, -such as file sequences, each file is tracked separately and the availability of -the container calculated as an overall percentage (e.g. 47%). - -.. _locations/overview/accessors: - -Accessors ---------- - -Due to the flexibility of what can be considered a location, the system must be -able to cope with locations that represent different ways of storing data. For -example, data might be stored on a local hard drive, a cloud service or even in -a database. - -In addition, the method of accessing that storage can change depending on -perspective - local filesystem, FTP, S3 API etc. - -To handle this, ftrack introduces the idea of an :term:`accessor` that provides -access to the data in a standard way. An accessor is implemented in -:term:`Python` following a set interface and can be configured at runtime to -provide relevant access to a location. - -With an accessor configured for a location, it becomes possible to not only -track data, but also manage it through ftrack by using the accessor to add and -remove data from the location. - -At present, ftrack includes a :py:class:`disk accessor -` for local filesystem access. More will be -added over time and developers are encouraged to contribute their own. - -.. _locations/overview/structure: - -Structure ---------- - -Another important consideration for locations is how data should be structured -in the location (folder structure and naming conventions). For example, -different facilities may want to use different folder structures, or different -storage mechanisms may use different paths for the data. - -For this, ftrack supports the use of a :term:`Python` structure plugin. This -plugin is called when adding a :term:`component` to a location in order to -determine the correct structure to use. - -.. note:: - - A structure plugin accepts an ftrack entity as its input and so can be - reused for generating general structures as well. For example, an action - callback could be implemented to create the base folder structure for some - selected shots by reusing a structure plugin. - -.. _locations/overview/resource_identifiers: - -Resource identifiers --------------------- - -When a :term:`component` can be linked to multiple locations it becomes -necessary to store information about the relationship on the link rather than -directly on the :term:`component` itself. The most important information is the -path to the data in that location. - -However, as seen above, not all locations may be filesystem based or accessed -using standard filesystem protocols. For this reason, and to help avoid -confusion, this *path* is referred to as a :term:`resource identifier` and no -limitations are placed on the format. Keep in mind though that accessors use -this information (retrieved from the database) in order to work out how to -access the data, so the format used must be compatible with all the accessors -used for any one location. For this reason, most -:term:`resource identifiers ` should ideally look like -relative filesystem paths. - -.. _locations/overview/resource_identifiers/transformer: - -Transformer -^^^^^^^^^^^ - -To further support custom formats for -:term:`resource identifiers `, it is also possible to -configure a resource identifier transformer plugin which will convert -the identifiers before they are stored centrally and after they are retrieved. - -A possible use case of this might be to store JSON encoded metadata about a path -in the database and convert this to an actual filesystem path on retrieval. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst deleted file mode 100644 index 4c5a6c0f13..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst +++ /dev/null @@ -1,193 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _locations/tutorial: - -******** -Tutorial -******** - -This tutorial is a walkthrough on how you interact with Locations using the -ftrack :term:`API`. Before you read this tutorial, make sure you familiarize -yourself with the location concepts by reading the :ref:`locations/overview`. - -All examples assume you are using Python 2.x, have the :mod:`ftrack_api` -module imported and a :class:`session ` created. - -.. code-block:: python - - import ftrack_api - session = ftrack_api.Session() - -.. _locations/creating-locations: - -Creating locations -================== - -Locations can be created just like any other entity using -:meth:`Session.create `:: - - location = session.create('Location', dict(name='my.location')) - session.commit() - -.. note:: - Location names beginning with ``ftrack.`` are reserved for internal use. Do - not use this prefix for your location names. - -To create a location only if it doesn't already exist use the convenience -method :meth:`Session.ensure `. This will return -either an existing matching location or a newly created one. - -Retrieving locations -==================== - -You can retrieve existing locations using the standard session -:meth:`~ftrack_api.session.Session.get` and -:meth:`~ftrack_api.session.Session.query` methods:: - - # Retrieve location by unique id. - location_by_id = session.get('Location', 'unique-id') - - # Retrieve location by name. - location_by_name = session.query( - 'Location where name is "my.location"' - ).one() - -To retrieve all existing locations use a standard query:: - - all_locations = session.query('Location').all() - for existing_location in all_locations: - print existing_location['name'] - -Configuring locations -===================== - -At this point you have created a custom location "my.location" in the database -and have an instance to reflect that. However, the location cannot be used in -this session to manage data unless it has been configured. To configure a -location for the session, set the appropriate attributes for accessor and -structure:: - - import tempfile - import ftrack_api.accessor.disk - import ftrack_api.structure.id - - # Assign a disk accessor with *temporary* storage - location.accessor = ftrack_api.accessor.disk.DiskAccessor( - prefix=tempfile.mkdtemp() - ) - - # Assign using ID structure. - location.structure = ftrack_api.structure.id.IdStructure() - - # Set a priority which will be used when automatically picking locations. - # Lower number is higher priority. - location.priority = 30 - -To learn more about how to configure locations automatically in a session, see -:ref:`locations/configuring`. - -.. note:: - - If a location is not configured in a session it can still be used as a - standard entity and to find out availability of components - -Using components with locations -=============================== - -The Locations :term:`API` tries to use sane defaults to stay out of your way. -When creating :term:`components `, a location is automatically picked -using :meth:`Session.pick_location `:: - - (_, component_path) = tempfile.mkstemp(suffix='.txt') - component_a = session.create_component(path=component_path) - -To override, specify a location explicitly:: - - (_, component_path) = tempfile.mkstemp(suffix='.txt') - component_b = session.create_component( - path=component_path, location=location - ) - -If you set the location to ``None``, the component will only be present in the -special origin location for the duration of the session:: - - (_, component_path) = tempfile.mkstemp(suffix='.txt') - component_c = session.create_component(path=component_path, location=None) - -After creating a :term:`component` in a location, it can be added to another -location by calling :meth:`Location.add_component -` and passing the location to -use as the *source* location:: - - origin_location = session.query( - 'Location where name is "ftrack.origin"' - ).one() - location.add_component(component_c, origin_location) - -To remove a component from a location use :meth:`Location.remove_component -`:: - - location.remove_component(component_b) - -Each location specifies whether to automatically manage data when adding or -removing components. To ensure that a location does not manage data, mixin the -relevant location mixin class before use:: - - import ftrack_api - import ftrack_api.entity.location - - ftrack_api.mixin(location, ftrack_api.entity.location.UnmanagedLocationMixin) - -Accessing paths -=============== - -The locations system is designed to help avoid having to deal with filesystem -paths directly. This is particularly important when you consider that a number -of locations won't provide any direct filesystem access (such as cloud storage). - -However, it is useful to still be able to get a filesystem path from locations -that support them (typically those configured with a -:class:`~ftrack_api.accessor.disk.DiskAccessor`). For example, you might need to -pass a filesystem path to another application or perform a copy using a faster -protocol. - -To retrieve the path if available, use :meth:`Location.get_filesystem_path -`:: - - print location.get_filesystem_path(component_c) - -Obtaining component availability -================================ - -Components in locations have a notion of availability. For regular components, -consisting of a single file, the availability would be either 0 if the -component is unavailable or 100 percent if the component is available in the -location. Composite components, like image sequences, have an availability -which is proportional to the amount of child components that have been added to -the location. - -For example, an image sequence might currently be in a state of being -transferred to :data:`test.location`. If half of the images are transferred, it -might be possible to start working with the sequence. To check availability use -the helper :meth:`Session.get_component_availability -` method:: - - print session.get_component_availability(component_c) - -There are also convenience methods on both :meth:`components -` and :meth:`locations -` for -retrieving availability as well:: - - print component_c.get_availability() - print location.get_component_availability(component_c) - -Location events -=============== - -If you want to receive event notifications when components are added to or -removed from locations, you can subscribe to the topics published, -:data:`ftrack_api.symbol.COMPONENT_ADDED_TO_LOCATION_TOPIC` or -:data:`ftrack_api.symbol.COMPONENT_REMOVED_FROM_LOCATION_TOPIC` and the callback -you want to be run. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/querying.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/querying.rst deleted file mode 100644 index 7a200529ab..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/querying.rst +++ /dev/null @@ -1,263 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _querying: - -******** -Querying -******** - -.. currentmodule:: ftrack_api.session - -The API provides a simple, but powerful query language in addition to iterating -directly over entity attributes. Using queries can often substantially speed -up your code as well as reduce the amount of code written. - -A query is issued using :meth:`Session.query` and returns a list of matching -entities. The query always has a single *target* entity type that the query -is built against. This means that you cannot currently retrieve back a list of -different entity types in one query, though using :ref:`projections -` does allow retrieving related entities of a different -type in one go. - -The syntax for a query is: - -.. code-block:: none - - select from where - -However, both the selection of projections and criteria are optional. This means -the most basic query is just to fetch all entities of a particular type, such as -all projects in the system:: - - projects = session.query('Project') - -A query always returns a :class:`~ftrack_api.query.QueryResult` instance that -acts like a list with some special behaviour. The main special behaviour is that -the actual query to the server is not issued until you iterate or index into the -query results:: - - for project in projects: - print project['name'] - -You can also explicitly call :meth:`~ftrack_api.query.QueryResult.all` on the -result set:: - - projects = session.query('Project').all() - -.. note:: - - This behaviour exists in order to make way for efficient *paging* and other - optimisations in future. - -.. _querying/criteria: - -Using criteria to narrow results -================================ - -Often you will have some idea of the entities you want to retrieve. In this -case you can optimise your code by not fetching more data than you need. To do -this, add criteria to your query:: - - projects = session.query('Project where status is active') - -Each criteria follows the form: - -.. code-block:: none - - - -You can inspect the entity type or instance to find out which :ref:`attributes -` are available to filter on for a particular -entity type. The list of :ref:`operators ` that can -be applied and the types of values they expect is listed later on. - -.. _querying/criteria/combining: - -Combining criteria ------------------- - -Multiple criteria can be applied in a single expression by joining them with -either ``and`` or ``or``:: - - projects = session.query( - 'Project where status is active and name like "%thrones"' - ) - -You can use parenthesis to control the precedence when compound criteria are -used (by default ``and`` takes precedence):: - - projects = session.query( - 'Project where status is active and ' - '(name like "%thrones" or full_name like "%thrones")' - ) - -.. _querying/criteria/relationships: - -Filtering on relationships --------------------------- - -Filtering on relationships is also intuitively supported. Simply follow the -relationship using a dotted notation:: - - tasks_in_project = session.query( - 'Task where project.id is "{0}"'.format(project['id']) - ) - -This works even for multiple strides across relationships (though do note that -excessive strides can affect performance):: - - tasks_completed_in_project = session.query( - 'Task where project.id is "{0}" and ' - 'status.type.name is "Done"' - .format(project['id']) - ) - -The same works for collections (where each entity in the collection is compared -against the subsequent condition):: - - import arrow - - tasks_with_time_logged_today = session.query( - 'Task where timelogs.start >= "{0}"'.format(arrow.now().floor('day')) - ) - -In the above query, each *Task* that has at least one *Timelog* with a *start* -time greater than the start of today is returned. - -When filtering on relationships, the conjunctions ``has`` and ``any`` can be -used to specify how the criteria should be applied. This becomes important when -querying using multiple conditions on collection relationships. The relationship -condition can be written against the following form:: - - () - -For optimal performance ``has`` should be used for scalar relationships when -multiple conditions are involved. For example, to find notes by a specific -author when only name is known:: - - notes_written_by_jane_doe = session.query( - 'Note where author has (first_name is "Jane" and last_name is "Doe")' - ) - -This query could be written without ``has``, giving the same results:: - - notes_written_by_jane_doe = session.query( - 'Note where author.first_name is "Jane" and author.last_name is "Doe"' - ) - -``any`` should be used for collection relationships. For example, to find all -projects that have at least one metadata instance that has `key=some_key` -and `value=some_value` the query would be:: - - projects_where_some_key_is_some_value = session.query( - 'Project where metadata any (key=some_key and value=some_value)' - ) - -If the query was written without ``any``, projects with one metadata matching -*key* and another matching the *value* would be returned. - -``any`` can also be used to query for empty relationship collections:: - - users_without_timelogs = session.query( - 'User where not timelogs any ()' - ) - -.. _querying/criteria/operators: - -Supported operators -------------------- - -This is the list of currently supported operators: - -+--------------+----------------+----------------------------------------------+ -| Operators | Description | Example | -+==============+================+==============================================+ -| = | Exactly equal. | name is "martin" | -| is | | | -+--------------+----------------+----------------------------------------------+ -| != | Not exactly | name is_not "martin" | -| is_not | equal. | | -+--------------+----------------+----------------------------------------------+ -| > | Greater than | start after "2015-06-01" | -| after | exclusive. | | -| greater_than | | | -+--------------+----------------+----------------------------------------------+ -| < | Less than | end before "2015-06-01" | -| before | exclusive. | | -| less_than | | | -+--------------+----------------+----------------------------------------------+ -| >= | Greater than | bid >= 10 | -| | inclusive. | | -+--------------+----------------+----------------------------------------------+ -| <= | Less than | bid <= 10 | -| | inclusive. | | -+--------------+----------------+----------------------------------------------+ -| in | One of. | status.type.name in ("In Progress", "Done") | -+--------------+----------------+----------------------------------------------+ -| not_in | Not one of. | status.name not_in ("Omitted", "On Hold") | -+--------------+----------------+----------------------------------------------+ -| like | Matches | name like "%thrones" | -| | pattern. | | -+--------------+----------------+----------------------------------------------+ -| not_like | Does not match | name not_like "%thrones" | -| | pattern. | | -+--------------+----------------+----------------------------------------------+ -| has | Test scalar | author has (first_name is "Jane" and | -| | relationship. | last_name is "Doe") | -+--------------+----------------+----------------------------------------------+ -| any | Test collection| metadata any (key=some_key and | -| | relationship. | value=some_value) | -+--------------+----------------+----------------------------------------------+ - -.. _querying/projections: - -Optimising using projections -============================ - -In :ref:`understanding_sessions` we mentioned :ref:`auto-population -` of attribute values on access. This -meant that when iterating over a lot of entities and attributes a large number -of queries were being sent to the server. Ultimately, this can cause your code -to run slowly:: - - >>> projects = session.query('Project') - >>> for project in projects: - ... print( - ... # Multiple queries issued here for each attribute accessed for - ... # each project in the loop! - ... '{project[full_name]} - {project[status][name]})' - ... .format(project=project) - ... ) - - -Fortunately, there is an easy way to optimise. If you know what attributes you -are interested in ahead of time you can include them in your query string as -*projections* in order to fetch them in one go:: - - >>> projects = session.query( - ... 'select full_name, status.name from Project' - ... ) - >>> for project in projects: - ... print( - ... # No additional queries issued here as the values were already - ... # loaded by the above query! - ... '{project[full_name]} - {project[status][name]})' - ... .format(project=project) - ... ) - -Notice how this works for related entities as well. In the example above, we -also fetched the name of each *Status* entity attached to a project in the same -query, which meant that no further queries had to be issued when accessing those -nested attributes. - -.. note:: - - There are no arbitrary limits to the number (or depth) of projections, but - do be aware that excessive projections can ultimately result in poor - performance also. As always, it is about choosing the right tool for the - job. - -You can also customise the -:ref:`working_with_entities/entity_types/default_projections` to use for each -entity type when none are specified in the query string. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst deleted file mode 100644 index 0eef0b7407..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst +++ /dev/null @@ -1,18 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _release: - -*************************** -Release and migration notes -*************************** - -Find out information about what has changed between versions and any important -migration notes to be aware of when switching to a new version. - -.. toctree:: - :maxdepth: 1 - - release_notes - migration - migrating_from_old_api diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst deleted file mode 100644 index 699ccf224a..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst +++ /dev/null @@ -1,613 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _release/migrating_from_old_api: - -********************** -Migrating from old API -********************** - -.. currentmodule:: ftrack_api.session - -Why a new API? -============== - -With the introduction of Workflows, ftrack is capable of supporting a greater -diversity of industries. We're enabling teams to closely align the system with -their existing practices and naming conventions, resulting in a tool that feels -more natural and intuitive. The old API was locked to specific workflows, making -it impractical to support this new feature naturally. - -We also wanted this new flexibility to extend to developers, so we set about -redesigning the API to fully leverage the power in the system. And while we had -the wrenches out, we figured why not go that extra mile and build in some of the -features that we see developers having to continually implement in-house across -different companies - features such as caching and support for custom pipeline -extensions. In essence, we decided to build the API that, as pipeline -developers, we had always wanted from our production tracking and asset -management systems. We think we succeeded, and we hope you agree. - -Installing -========== - -Before, you used to download the API package from your ftrack instance. With -each release of the new API we make it available on :term:`PyPi`, and -installing is super simple: - -.. code-block:: none - - pip install ftrack-python-api - -Before installing, it is always good to check the latest -:ref:`release/release_notes` to see which version of the ftrack server is -required. - -.. seealso:: :ref:`installing` - -Overview -======== - -An API needs to be approachable, so we built the new API to feel -intuitive and familiar. We bundle all the core functionality into one place – a -session – with consistent methods for interacting with entities in the system:: - - import ftrack_api - session = ftrack_api.Session() - -The session is responsible for loading plugins and communicating with the ftrack -server and allows you to use multiple simultaneous sessions. You will no longer -need to explicitly call :meth:`ftrack.setup` to load plugins. - -The core methods are straightforward: - -Session.create - create a new entity, like a new version. -Session.query - fetch entities from the server using a powerful query language. -Session.delete - delete existing entities. -Session.commit - commit all changes in one efficient call. - -.. note:: - - The new API batches create, update and delete operations by default for - efficiency. To synchronise local changes with the server you need to call - :meth:`Session.commit`. - -In addition all entities in the API now act like simple Python dictionaries, -with some additional helper methods where appropriate. If you know a little -Python (or even if you don't) getting up to speed should be a breeze:: - - >>> print user.keys() - ['first_name', 'last_name', 'email', ...] - >>> print user['email'] - 'old@example.com' - >>> user['email'] = 'new@example.com' - -And of course, relationships between entities are reflected in a natural way as -well:: - - new_timelog = session.create('Timelog', {...}) - task['timelogs'].append(new_timelog) - -.. seealso :: :ref:`tutorial` - -The new API also makes use of caching in order to provide more efficient -retrieval of data by reducing the number of calls to the remote server. - -.. seealso:: :ref:`caching` - -Open source and standard code style -=================================== - -The new API is open source software and developed in public at -`Bitbucket `_. We welcome you -to join us in the development and create pull requests there. - -In the new API, we also follow the standard code style for Python, -:term:`PEP-8`. This means that you will now find that methods and variables are -written using ``snake_case`` instead of ``camelCase``, amongst other things. - -Package name -============ - -The new package is named :mod:`ftrack_api`. By using a new package name, we -enable you to use the old API and the new side-by-side in the same process. - -Old API:: - - import ftrack - -New API:: - - import ftrack_api - -Specifying your credentials -=========================== - -The old API used three environment variables to authenticate with your ftrack -instance. While these continue to work as before, you now also have -the option to specify them when initializing the session:: - - >>> import ftrack_api - >>> session = ftrack_api.Session( - ... server_url='https://mycompany.ftrackapp.com', - ... api_key='7545384e-a653-11e1-a82c-f22c11dd25eq', - ... api_user='martin' - ... ) - -In the examples below, will assume that you have imported the package and -created a session. - -.. seealso:: - - * :ref:`environment_variables` - * :ref:`tutorial` - - -Querying objects -================ - -The old API relied on predefined methods for querying objects and constructors -which enabled you to get an entity by it's id or name. - -Old API:: - - project = ftrack.getProject('dev_tutorial') - task = ftrack.Task('8923b7b3-4bf0-11e5-8811-3c0754289fd3') - user = ftrack.User('jane') - -New API:: - - project = session.query('Project where name is "dev_tutorial"').one() - task = session.get('Task', '8923b7b3-4bf0-11e5-8811-3c0754289fd3') - user = session.query('User where username is "jane"').one() - -While the new API can be a bit more verbose for simple queries, it is much more -powerful and allows you to filter on any field and preload related data:: - - tasks = session.query( - 'select name, parent.name from Task ' - 'where project.full_name is "My Project" ' - 'and status.type.short is "DONE" ' - 'and not timelogs any ()' - ).all() - -The above fetches all tasks for “My Project” that are done but have no timelogs. -It also pre-fetches related information about the tasks parent – all in one -efficient query. - -.. seealso:: :ref:`querying` - -Creating objects -================ - -In the old API, you create objects using specialized methods, such as -:meth:`ftrack.createProject`, :meth:`Project.createSequence` and -:meth:`Task.createShot`. - -In the new API, you can create any object using :meth:`Session.create`. In -addition, there are a few helper methods to reduce the amount of boilerplate -necessary to create certain objects. Don't forget to call :meth:`Session.commit` -once you have issued your create statements to commit your changes. - -As an example, let's look at populating a project with a few entities. - -Old API:: - - project = ftrack.getProject('migration_test') - - # Get default task type and status from project schema - taskType = project.getTaskTypes()[0] - taskStatus = project.getTaskStatuses(taskType)[0] - - sequence = project.createSequence('001') - - # Create five shots with one task each - for shot_number in xrange(10, 60, 10): - shot = sequence.createShot( - '{0:03d}'.format(shot_number) - ) - shot.createTask( - 'Task name', - taskType, - taskStatus - ) - - -New API:: - - project = session.query('Project where name is "migration_test"').one() - - # Get default task type and status from project schema - project_schema = project['project_schema'] - default_shot_status = project_schema.get_statuses('Shot')[0] - default_task_type = project_schema.get_types('Task')[0] - default_task_status = project_schema.get_statuses( - 'Task', default_task_type['id'] - )[0] - - # Create sequence - sequence = session.create('Sequence', { - 'name': '001', - 'parent': project - }) - - # Create five shots with one task each - for shot_number in xrange(10, 60, 10): - shot = session.create('Shot', { - 'name': '{0:03d}'.format(shot_number), - 'parent': sequence, - 'status': default_shot_status - }) - session.create('Task', { - 'name': 'Task name', - 'parent': shot, - 'status': default_task_status, - 'type': default_task_type - }) - - # Commit all changes to the server. - session.commit() - -If you test the example above, one thing you might notice is that the new API -is much more efficient. Thanks to the transaction-based architecture in the new -API only a single call to the server is required to create all the objects. - -.. seealso:: :ref:`working_with_entities/creating` - -Updating objects -================ - -Updating objects in the new API works in a similar way to the old API. Instead -of using the :meth:`set` method on objects, you simply set the key of the -entity to the new value, and call :meth:`Session.commit` to persist the -changes to the database. - -The following example adjusts the duration and comment of a timelog for a -user using the old and new API, respectively. - -Old API:: - - import ftrack - - user = ftrack.User('john') - user.set('email', 'john@example.com') - -New API:: - - import ftrack_api - session = ftrack_api.Session() - - user = session.query('User where username is "john"').one() - user['email'] = 'john@example.com' - session.commit() - -.. seealso:: :ref:`working_with_entities/updating` - - -Date and datetime attributes -============================ - -In the old API, date and datetime attributes where represented using a standard -:mod:`datetime` object. In the new API we have opted to use the :term:`arrow` -library instead. Datetime attributes are represented in the server timezone, -but with the timezone information stripped. - -Old API:: - - >>> import datetime - - >>> task_old_api = ftrack.Task(task_id) - >>> task_old_api.get('startdate') - datetime.datetime(2015, 9, 2, 0, 0) - - >>> # Updating a datetime attribute - >>> task_old_api.set('startdate', datetime.date.today()) - -New API:: - - >>> import arrow - - >>> task_new_api = session.get('Task', task_id) - >>> task_new_api['start_date'] - - - >>> # In the new API, utilize the arrow library when updating a datetime. - >>> task_new_api['start_date'] = arrow.utcnow().floor('day') - >>> session.commit() - -Custom attributes -================= - -In the old API, custom attributes could be retrieved from an entity by using -the methods :meth:`get` and :meth:`set`, like standard attributes. In the new -API, custom attributes can be written and read from entities using the -``custom_attributes`` property, which provides a dictionary-like interface. - -Old API:: - - >>> task_old_api = ftrack.Task(task_id) - >>> task_old_api.get('my_custom_attribute') - - >>> task_old_api.set('my_custom_attribute', 'My new value') - - -New API:: - - >>> task_new_api = session.get('Task', task_id) - >>> task_new_api['custom_attributes']['my_custom_attribute'] - - - >>> task_new_api['custom_attributes']['my_custom_attribute'] = 'My new value' - -For more information on working with custom attributes and existing -limitations, please see: - -.. seealso:: - - :ref:`example/custom_attribute` - - -Using both APIs side-by-side -============================ - -With so many powerful new features and the necessary support for more flexible -workflows, we chose early on to not limit the new API design by necessitating -backwards compatibility. However, we also didn't want to force teams using the -existing API to make a costly all-or-nothing switchover. As such, we have made -the new API capable of coexisting in the same process as the old API:: - - import ftrack - import ftrack_api - -In addition, the old API will continue to be supported for some time, but do -note that it will not support the new `Workflows -`_ and will not have new features back ported -to it. - -In the first example, we obtain a task reference using the old API and -then use the new API to assign a user to it:: - - import ftrack - import ftrack_api - - # Create session for new API, authenticating using envvars. - session = ftrack_api.Session() - - # Obtain task id using old API - shot = ftrack.getShot(['migration_test', '001', '010']) - task = shot.getTasks()[0] - task_id = task.getId() - - user = session.query( - 'User where username is "{0}"'.format(session.api_user) - ).one() - session.create('Appointment', { - 'resource': user, - 'context_id': task_id, - 'type': 'assignment' - }) - -The second example fetches a version using the new API and uploads and sets a -thumbnail using the old API:: - - import arrow - import ftrack - - # fetch a version published today - version = session.query( - 'AssetVersion where date >= "{0}"'.format( - arrow.now().floor('day') - ) - ).first() - - # Create a thumbnail using the old api. - thumbnail_path = '/path/to/thumbnail.jpg' - version_old_api = ftrack.AssetVersion(version['id']) - thumbnail = version_old_api.createThumbnail(thumbnail_path) - - # Also set the same thumbnail on the task linked to the version. - task_old_api = ftrack.Task(version['task_id']) - task_old_api.setThumbnail(thumbnail) - -.. note:: - - It is now possible to set thumbnails using the new API as well, for more - info see :ref:`example/thumbnail`. - -Plugin registration -------------------- - -To make event and location plugin register functions work with both old and new -API the function should be updated to validate the input arguments. For old -plugins the register method should validate that the first input is of type -``ftrack.Registry``, and for the new API it should be of type -:class:`ftrack_api.session.Session`. - -If the input parameter is not validated, a plugin might be mistakenly -registered twice, since both the new and old API will look for plugins the -same directories. - -.. seealso:: - - :ref:`ftrack:release/migration/3.0.29/developer_notes/register_function` - - -Example: publishing a new version -================================= - -In the following example, we look at migrating a script which publishes a new -version with two components. - -Old API:: - - # Query a shot and a task to create the asset against. - shot = ftrack.getShot(['dev_tutorial', '001', '010']) - task = shot.getTasks()[0] - - # Create new asset. - asset = shot.createAsset(name='forest', assetType='geo') - - # Create a new version for the asset. - version = asset.createVersion( - comment='Added more leaves.', - taskid=task.getId() - ) - - # Get the calculated version number. - print version.getVersion() - - # Add some components. - previewPath = '/path/to/forest_preview.mov' - previewComponent = version.createComponent(path=previewPath) - - modelPath = '/path/to/forest_mode.ma' - modelComponent = version.createComponent(name='model', path=modelPath) - - # Publish. - asset.publish() - - # Add thumbnail to version. - thumbnail = version.createThumbnail('/path/to/forest_thumbnail.jpg') - - # Set thumbnail on other objects without duplicating it. - task.setThumbnail(thumbnail) - -New API:: - - # Query a shot and a task to create the asset against. - shot = session.query( - 'Shot where project.name is "dev_tutorial" ' - 'and parent.name is "001" and name is "010"' - ).one() - task = shot['children'][0] - - # Create new asset. - asset_type = session.query('AssetType where short is "geo"').first() - asset = session.create('Asset', { - 'parent': shot, - 'name': 'forest', - 'type': asset_type - }) - - # Create a new version for the asset. - status = session.query('Status where name is "Pending"').one() - version = session.create('AssetVersion', { - 'asset': asset, - 'status': status, - 'comment': 'Added more leaves.', - 'task': task - }) - - # In the new API, the version number is not set until we persist the changes - print 'Version number before commit: {0}'.format(version['version']) - session.commit() - print 'Version number after commit: {0}'.format(version['version']) - - # Add some components. - preview_path = '/path/to/forest_preview.mov' - preview_component = version.create_component(preview_path, location='auto') - - model_path = '/path/to/forest_mode.ma' - model_component = version.create_component(model_path, { - 'name': 'model' - }, location='auto') - - # Publish. Newly created version defaults to being published in the new api, - # but if set to false you can update it by setting the key on the version. - version['is_published'] = True - - # Persist the changes - session.commit() - - # Add thumbnail to version. - thumbnail = version.create_thumbnail( - '/path/to/forest_thumbnail.jpg' - ) - - # Set thumbnail on other objects without duplicating it. - task['thumbnail'] = thumbnail - session.commit() - - -Workarounds for missing convenience methods -=========================================== - -Query object by path --------------------- - -In the old API, there existed a convenience methods to get an object by -referencing the path (i.e object and parent names). - -Old API:: - - shot = ftrack.getShot(['dev_tutorial', '001', '010']) - -New API:: - - shot = session.query( - 'Shot where project.name is "dev_tutorial" ' - 'and parent.name is "001" and name is "010"' - ) - - -Retrieving an object's parents ------------------------------- - -To retrieve a list of an object's parents, you could call the method -:meth:`getParents` in the old API. Currently, it is not possible to fetch this -in a single call using the new API, so you will have to traverse the ancestors -one-by-one and fetch each object's parent. - -Old API:: - - parents = task.getParents() - -New API:: - - parents = [] - for item in task['link'][:-1]: - parents.append(session.get(item['type'], item['id'])) - -Note that link includes the task itself so `[:-1]` is used to only retreive the -parents. To learn more about the `link` attribute, see -:ref:`Using link attributes example`. - -Limitations in the current version of the API -============================================= - -The new API is still quite young and in active development and there are a few -limitations currently to keep in mind when using it. - -Missing schemas ---------------- - -The following entities are as of the time of writing not currently available -in the new API. Let us know if you depend on any of them. - - * Booking - * Calendar and Calendar Type - * Dependency - * Manager and Manager Type - * Phase - * Role - * Task template - * Temp data - -Action base class ------------------ -There is currently no helper class for creating actions using the new API. We -will add one in the near future. - -In the meantime, it is still possible to create actions without the base class -by listening and responding to the -:ref:`ftrack:developing/events/list/ftrack.action.discover` and -:ref:`ftrack:developing/events/list/ftrack.action.launch` events. - -Legacy location ---------------- - -The ftrack legacy disk locations utilizing the -:class:`InternalResourceIdentifierTransformer` has been deprecated. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst deleted file mode 100644 index 1df2211f96..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst +++ /dev/null @@ -1,98 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _release/migration: - -*************** -Migration notes -*************** - -.. note:: - - Migrating from the old ftrack API? Read the dedicated :ref:`guide - `. - -Migrate to upcoming 2.0.0 -========================= - -.. _release/migration/2.0.0/event_hub: - -Default behavior for connecting to event hub --------------------------------------------- - -The default behavior for the `ftrack_api.Session` class will change -for the argument `auto_connect_event_hub`, the default value will -switch from True to False. In order for code relying on the event hub -to continue functioning as expected you must modify your code -to explicitly set the argument to True or that you manually call -`session.event_hub.connect()`. - -.. note:: - If you rely on the `ftrack.location.component-added` or - `ftrack.location.component-removed` events to further process created - or deleted components remember that your session must be connected - to the event hub for the events to be published. - - -Migrate to 1.0.3 -================ - -.. _release/migration/1.0.3/mutating_dictionary: - -Mutating custom attribute dictionary ------------------------------------- - -Custom attributes can no longer be set by mutating entire dictionary:: - - # This will result in an error. - task['custom_attributes'] = dict(foo='baz', bar=2) - session.commit() - -Instead the individual values should be changed:: - - # This works better. - task['custom_attributes']['foo'] = 'baz' - task['custom_attributes']['bar'] = 2 - session.commit() - -Migrate to 1.0.0 -================ - -.. _release/migration/1.0.0/chunked_transfer: - -Chunked accessor transfers --------------------------- - -Data transfers between accessors is now buffered using smaller chunks instead of -all data at the same time. Included accessor file representations such as -:class:`ftrack_api.data.File` and :class:`ftrack_api.accessor.server.ServerFile` -are built to handle that. If you have written your own accessor and file -representation you may have to update it to support multiple reads using the -limit parameter and multiple writes. - -Migrate to 0.2.0 -================ - -.. _release/migration/0.2.0/new_api_name: - -New API name ------------- - -In this release the API has been renamed from `ftrack` to `ftrack_api`. This is -to allow both the old and new API to co-exist in the same environment without -confusion. - -As such, any scripts using this new API need to be updated to import -`ftrack_api` instead of `ftrack`. For example: - -**Previously**:: - - import ftrack - import ftrack.formatter - ... - -**Now**:: - - import ftrack_api - import ftrack_api.formatter - ... diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst deleted file mode 100644 index d7978ac0b8..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst +++ /dev/null @@ -1,1478 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _release/release_notes: - -************* -Release Notes -************* - -.. currentmodule:: ftrack_api.session - -.. release:: 1.8.2 - :date: 2020-01-14 - - .. change:: fixed - :tag: Test - - test_ensure_entity_with_non_string_data_types test fails due to missing parents. - - .. change:: changed - :tags: session - - Use WeakMethod when registering atexit handler to prevent memory leak. - -.. release:: 1.8.1 - :date: 2019-10-30 - - .. change:: changed - :tags: Location - - Increase chunk size for file operations to 1 Megabyte. - This value can now also be set from the environment variable: - - :envvar:`FTRACK_API_FILE_CHUNK_SIZE` - - .. change:: new - :tag: setup - - Add check for correct python version when installing with pip. - - .. change:: new - :tags: Notes - - Add support for note labels in create_note helper method. - - .. change:: changed - :tags: session - - Ensure errors from server are fully reported with stack trace. - -.. release:: 1.8.0 - :date: 2019-02-21 - - .. change:: fixed - :tags: documentation - - Event description component-removed report component-added event signature. - - .. change:: new - :tags: session, attribute - - Add new scalar type `object` to factory. - - .. change:: new - :tags: session, attribute - - Add support for list of `computed` attributes as part of schema - definition. A computed attribute is derived on the server side, and can - be time dependentant and differ between users. As such a computed - attribute is not suitable for long term encoding and will not be encoded - with the `persisted_only` stragey. - - .. change:: changed - - The `delayed_job` method has been deprecated in favour of a direct - `Session.call`. See :ref:`example/sync_with_ldap` for example - usage. - - .. change:: changed - - Private method :meth:`Session._call` has been converted to - a public method, :meth:`Session.call`. - - The private method will continue to work, but a pending deprecation - warning will be issued when used. The private method will be removed - entirely in version 2.0. - - .. change:: changed - :tags: session, events - - Event server connection error is too generic, - the actual error is now reported to users. - -.. release:: 1.7.1 - :date: 2018-11-13 - - .. change:: fixed - :tags: session, events - - Meta events for event hub connect and disconnect does not include - source. - - .. change:: fixed - :tags: session, location - - Missing context argument to - :meth:`ResourceIdentifierTransformer.decode` - in :meth:`Location.get_resource_identifier`. - -.. release:: 1.7.0 - :date: 2018-07-27 - - .. change:: new - :tags: session, events - - Added new events :ref:`event_list/ftrack.api.session.ready` and - :ref:`event_list/ftrack.api.session.reset` which can be used to perform - operations after the session is ready or has been reset, respectively. - - .. change:: changed - - Private method :meth:`Session._entity_reference` has been converted to - a public method, :meth:`Session.entity_reference`. - - The private method will continue to work, but a pending deprecation - warning will be issued when used. The private method will be removed - entirely in version 2.0. - - .. change:: fixed - :tags: session, events - - :meth:`Session.close` raises an exception if event hub was explicitly - connected after session initialization. - -.. release:: 1.6.0 - :date: 2018-05-17 - - .. change:: new - :tags: depreciation, events - - In version 2.0.0 of the `ftrack-python-api` the default behavior for - the :class:`Session` class will change for the argument - *auto_connect_event_hub*, the default value will switch from *True* to - *False*. - - A warning will now be emitted if async events are published or - subscribed to without *auto_connect_event_hub* has not explicitly been - set to *True*. - - .. seealso:: :ref:`release/migration/2.0.0/event_hub`. - - .. change:: fixed - :tags: documentation - - Event payload not same as what is being emitted for - :ref:`event_list/ftrack.location.component-added` and - :ref:`event_list/ftrack.location.component-removed`. - - .. change:: fixed - :tags: events - - Pyparsing is causing random errors in a threaded environment. - -.. release:: 1.5.0 - :date: 2018-04-19 - - .. change:: fixed - :tags: session, cache - - Cached entities not updated correctly when fetched in a nested - query. - -.. release:: 1.4.0 - :date: 2018-02-05 - - .. change:: fixed - :tags: session, cache - - Collection attributes not merged correctly when fetched from - server. - - .. change:: new - :tags: session, user, api key - - New function :meth:`ftrack_api.session.Session.reset_remote` allows - resetting of attributes to their default value. A convenience method - for resetting a users api key utalizing this was also added - :meth:`ftrack_api.entity.user.User.reset_api_key`. - - .. seealso:: :ref:`working_with_entities/resetting` - - .. change:: new - - Add support for sending out invitation emails to users. - See :ref:`example/invite_user` for example usage. - - .. change:: changed - :tags: cache, performance - - Entities fetched from cache are now lazily merged. Improved - performance when dealing with highly populated caches. - -.. release:: 1.3.3 - :date: 2017-11-16 - - - .. change:: new - :tags: users, ldap - - Add support for triggering a synchronization of - users between ldap and ftrack. See :ref:`example/sync_with_ldap` - for example usage. - - .. note:: - - This requires that you run ftrack 3.5.10 or later. - - .. change:: fixed - :tags: metadata - - Not possible to set metadata on creation. - -.. release:: 1.3.2 - :date: 2017-09-18 - - - .. change:: new - :tags: task template - - Added example for managing task templates through the API. See - :ref:`example/task_template` for example usage. - - .. change:: fixed - :tags: custom attributes - - Not possible to set hierarchical custom attributes on an entity that - has not been committed. - - .. change:: fixed - :tags: custom attributes - - Not possible to set custom attributes on an `Asset` that has not been - committed. - - .. change:: fixed - :tags: metadata - - Not possible to set metadata on creation. - -.. release:: 1.3.1 - :date: 2017-07-21 - - .. change:: fixed - :tags: session, events - - Calling disconnect on the event hub is slow. - -.. release:: 1.3.0 - :date: 2017-07-17 - - .. change:: new - :tags: session - - Support using a :class:`Session` as a context manager to aid closing of - session after use:: - - with ftrack_api.Session() as session: - # Perform operations with session. - - .. change:: new - :tags: session - - :meth:`Session.close` automatically called on Python exit if session not - already closed. - - .. change:: new - :tags: session - - Added :meth:`Session.close` to properly close a session's connections to - the server(s) as well as ensure event listeners are properly - unsubscribed. - - .. change:: new - - Added :exc:`ftrack_api.exception.ConnectionClosedError` to represent - error caused when trying to access servers over closed connection. - -.. release:: 1.2.0 - :date: 2017-06-16 - - .. change:: changed - :tags: events - - Updated the websocket-client dependency to version >= 0.40.0 to allow - for http proxies. - - .. change:: fixed - :tags: documentation - - The :ref:`example/publishing` example incorrectly stated that a - location would be automatically picked if the *location* keyword - argument was omitted. - -.. release:: 1.1.1 - :date: 2017-04-27 - - .. change:: fixed - :tags: custom attributes - - Cannot use custom attributes for `Asset` in ftrack versions prior to - `3.5.0`. - - .. change:: fixed - :tags: documentation - - The :ref:`example ` - section for managing `text` custom attributes is not correct. - -.. release:: 1.1.0 - :date: 2017-03-08 - - .. change:: new - :tags: server location, thumbnail - - Added method :meth:`get_thumbnail_url() ` - to server location, which can be used to retrieve a thumbnail URL. - See :ref:`example/thumbnail/url` for example usage. - - .. change:: new - :tags: documentation - - Added :ref:`example ` on how to manage entity - links from the API. - - .. change:: new - :tags: documentation - - Added :ref:`example ` on - how to manage custom attribute configurations from the API. - - .. change:: new - :tags: documentation - - Added :ref:`example ` on how to use - `SecurityRole` and `UserSecurityRole` to manage security roles for - users. - - .. change:: new - :tags: documentation - - Added :ref:`examples ` to show how - to list a user's assigned tasks and all users assigned to a task. - - .. change:: changed - :tags: session, plugins - - Added *plugin_arguments* to :class:`Session` to allow passing of - optional keyword arguments to discovered plugin register functions. Only - arguments defined in a plugin register function signature are passed so - existing plugin register functions do not need updating if the new - functionality is not desired. - - .. change:: fixed - :tags: documentation - - The :ref:`example/project` example can be confusing since the project - schema may not contain the necessary object types. - - .. change:: fixed - :tags: documentation - - Query tutorial article gives misleading information about the ``has`` - operator. - - .. change:: fixed - :tags: session - - Size is not set on sequence components when using - :meth:`Session.create_component`. - -.. release:: 1.0.4 - :date: 2017-01-13 - - .. change:: fixed - :tags: custom attributes - - Custom attribute values cannot be set on entities that are not - persisted. - - .. change:: fixed - :tags: events - - `username` in published event's source data is set to the operating - system user and not the API user. - -.. release:: 1.0.3 - :date: 2017-01-04 - - .. change:: changed - :tags: session, custom attributes - - Increased performance of custom attributes and better support for - filtering when using a version of ftrack that supports non-sparse - attribute values. - - .. change:: changed - :tags: session, custom attributes - - Custom attributes can no longer be set by mutating entire dictionary. - - .. seealso:: :ref:`release/migration/1.0.3/mutating_dictionary`. - -.. release:: 1.0.2 - :date: 2016-11-17 - - .. change:: changed - :tags: session - - Removed version restriction for higher server versions. - -.. release:: 1.0.1 - :date: 2016-11-11 - - .. change:: fixed - - :meth:`EventHub.publish ` - *on_reply* callback only called for first received reply. It should be - called for all relevant replies received. - -.. release:: 1.0.0 - :date: 2016-10-28 - - .. change:: new - :tags: session - - :meth:`Session.get_upload_metadata` has been added. - - .. change:: changed - :tags: locations, backwards-incompatible - - Data transfer between locations using accessors is now chunked to avoid - reading large files into memory. - - .. seealso:: :ref:`release/migration/1.0.0/chunked_transfer`. - - .. change:: changed - :tags: server accessor - - :class:`ftrack_api.accessor.server.ServerFile` has been refactored to - work with large files more efficiently. - - .. change:: changed - :tags: server accessor - - :class:`ftrack_api.accessor.server.ServerFile` has been updated to use - the get_upload_metadata API endpoint instead of - /component/getPutMetadata. - - .. change:: changed - :tags: locations - - :class:`ftrack_api.data.String` is now using a temporary file instead of - StringIO to avoid reading large files into memory. - - .. change:: fixed - :tags: session, locations - - `ftrack.centralized-storage` does not properly validate location - selection during user configuration. - -.. release:: 0.16.0 - :date: 2016-10-18 - - .. change:: new - :tags: session, encode media - - :meth:`Session.encode_media` can now automatically associate the output - with a version by specifying a *version_id* keyword argument. A new - helper method on versions, :meth:`AssetVersion.encode_media - `, can be - used to make versions playable in a browser. A server version of 3.3.32 - or higher is required for it to function properly. - - .. seealso:: :ref:`example/encode_media`. - - .. change:: changed - :tags: session, encode media - - You can now decide if :meth:`Session.encode_media` should keep or - delete the original component by specifying the *keep_original* - keyword argument. - - .. change:: changed - :tags: backwards-incompatible, collection - - Collection mutation now stores collection instance in operations rather - than underlying data structure. - - .. change:: changed - :tags: performance - - Improve performance of commit operations by optimising encoding and - reducing payload sent to server. - - .. change:: fixed - :tags: documentation - - Asset parent variable is declared but never used in - :ref:`example/publishing`. - - .. change:: fixed - :tags: documentation - - Documentation of hierarchical attributes and their limitations are - misleading. See :ref:`example/custom_attribute`. - -.. release:: 0.15.5 - :date: 2016-08-12 - - .. change:: new - :tags: documentation - - Added two new examples for :ref:`example/publishing` and - :ref:`example/web_review`. - - .. change:: fixed - :tags: session, availability - - :meth:`Session.get_component_availabilities` ignores passed locations - shortlist and includes all locations in returned availability mapping. - - .. change:: fixed - :tags: documentation - - Source distribution of ftrack-python-api does not include ftrack.css - in the documentation. - -.. release:: 0.15.4 - :date: 2016-07-12 - - .. change:: fixed - :tags: querying - - Custom offset not respected by - :meth:`QueryResult.first `. - - .. change:: changed - :tags: querying - - Using a custom offset with :meth:`QueryResult.one - ` helper method now raises an - exception as an offset is inappropriate when expecting to select a - single item. - - .. change:: fixed - :tags: caching - - :meth:`LayeredCache.remove ` - incorrectly raises :exc:`~exceptions.KeyError` if key only exists in - sub-layer cache. - -.. release:: 0.15.3 - :date: 2016-06-30 - - .. change:: fixed - :tags: session, caching - - A newly created entity now has the correct - :attr:`ftrack_api.symbol.CREATED` state when checked in caching layer. - Previously the state was :attr:`ftrack_api.symbol.NOT_SET`. Note that - this fix causes a change in logic and the stored - :class:`ftrack_api.operation.CreateEntityOperation` might hold data that - has not been fully :meth:`merged `. - - .. change:: fixed - :tags: documentation - - The second example in the assignments article is not working. - - .. change:: changed - :tags: session, caching - - A callable cache maker can now return ``None`` to indicate that it could - not create a suitable cache, but :class:`Session` instantiation can - continue safely. - -.. release:: 0.15.2 - :date: 2016-06-02 - - .. change:: new - :tags: documentation - - Added an example on how to work with assignments and allocations - :ref:`example/assignments_and_allocations`. - - .. change:: new - :tags: documentation - - Added :ref:`example/entity_links` article with - examples of how to manage asset version dependencies. - - .. change:: fixed - :tags: performance - - Improve performance of large collection management. - - .. change:: fixed - - Entities are not hashable because - :meth:`ftrack_api.entity.base.Entity.__hash__` raises `TypeError`. - -.. release:: 0.15.1 - :date: 2016-05-02 - - .. change:: fixed - :tags: collection, attribute, performance - - Custom attribute configurations does not cache necessary keys, leading - to performance issues. - - .. change:: fixed - :tags: locations, structure - - Standard structure does not work if version relation is not set on - the `Component`. - -.. release:: 0.15.0 - :date: 2016-04-04 - - .. change:: new - :tags: session, locations - - `ftrack.centralized-storage` not working properly on Windows. - -.. release:: 0.14.0 - :date: 2016-03-14 - - .. change:: changed - :tags: session, locations - - The `ftrack.centralized-storage` configurator now validates that name, - label and description for new locations are filled in. - - .. change:: new - :tags: session, client review - - Added :meth:`Session.send_review_session_invite` and - :meth:`Session.send_review_session_invites` that can be used to inform - review session invitees about a review session. - - .. seealso:: :ref:`Usage guide `. - - .. change:: new - :tags: session, locations - - Added `ftrack.centralized-storage` configurator as a private module. It - implements a wizard like interface used to configure a centralised - storage scenario. - - .. change:: new - :tags: session, locations - - `ftrack.centralized-storage` storage scenario is automatically - configured based on information passed from the server with the - `query_server_information` action. - - .. change:: new - :tags: structure - - Added :class:`ftrack_api.structure.standard.StandardStructure` with - hierarchy based resource identifier generation. - - .. change:: new - :tags: documentation - - Added more information to the :ref:`understanding_sessions/plugins` - article. - - .. change:: fixed - - :meth:`~ftrack_api.entity.user.User.start_timer` arguments *comment* - and *name* are ignored. - - .. change:: fixed - - :meth:`~ftrack_api.entity.user.User.stop_timer` calculates the wrong - duration when the server is not running in UTC. - - For the duration to be calculated correctly ftrack server version - >= 3.3.15 is required. - -.. release:: 0.13.0 - :date: 2016-02-10 - - .. change:: new - :tags: component, thumbnail - - Added improved support for handling thumbnails. - - .. seealso:: :ref:`example/thumbnail`. - - .. change:: new - :tags: session, encode media - - Added :meth:`Session.encode_media` that can be used to encode - media to make it playable in a browser. - - .. seealso:: :ref:`example/encode_media`. - - .. change:: fixed - - :meth:`Session.commit` fails when setting a custom attribute on an asset - version that has been created and committed in the same session. - - .. change:: new - :tags: locations - - Added :meth:`ftrack_api.entity.location.Location.get_url` to retrieve a - URL to a component in a location if supported by the - :class:`ftrack_api.accessor.base.Accessor`. - - .. change:: new - :tags: documentation - - Updated :ref:`example/note` and :ref:`example/job` articles with - examples of how to use note and job components. - - .. change:: changed - :tags: logging, performance - - Logged messages now evaluated lazily using - :class:`ftrack_api.logging.LazyLogMessage` as optimisation. - - .. change:: changed - :tags: session, events - - Auto connection of event hub for :class:`Session` now takes place in - background to improve session startup time. - - .. change:: changed - :tags: session, events - - Event hub connection timeout is now 60 seconds instead of 10. - - .. change:: changed - :tags: server version - - ftrack server version >= 3.3.11, < 3.4 required. - - .. change:: changed - :tags: querying, performance - - :class:`ftrack_api.query.QueryResult` now pages internally using a - specified page size in order to optimise record retrieval for large - query results. :meth:`Session.query` has also been updated to allow - passing a custom page size at runtime if desired. - - .. change:: changed - :tags: querying, performance - - Increased performance of :meth:`~ftrack_api.query.QueryResult.first` and - :meth:`~ftrack_api.query.QueryResult.one` by using new `limit` syntax. - -.. release:: 0.12.0 - :date: 2015-12-17 - - .. change:: new - :tags: session, widget url - - Added :meth:`ftrack_api.session.Session.get_widget_url` to retrieve an - authenticated URL to info or tasks widgets. - -.. release:: 0.11.0 - :date: 2015-12-04 - - .. change:: new - :tags: documentation - - Updated :ref:`release/migrating_from_old_api` with new link attribute - and added a :ref:`usage example `. - - .. change:: new - :tags: caching, schemas, performance - - Caching of schemas for increased performance. - :meth:`ftrack_api.session.Session` now accepts `schema_cache_path` - argument to specify location of schema cache. If not set it will use a - temporary folder. - -.. release:: 0.10.0 - :date: 2015-11-24 - - .. change:: changed - :tags: tests - - Updated session test to use mocked schemas for encoding tests. - - .. change:: fixed - - Documentation specifies Python 2.6 instead of Python 2.7 as minimum - interpreter version. - - .. change:: fixed - - Documentation does not reflect current dependencies. - - .. change:: changed - :tags: session, component, locations, performance - - Improved performance of - :meth:`ftrack_api.entity.location.Location.add_components` by batching - database operations. - - As a result it is no longer possible to determine progress of transfer - for container components in realtime as events will be emitted in batch - at end of operation. - - In addition, it is now the callers responsibility to clean up any - transferred data should an error occur during either data transfer or - database registration. - - .. change:: changed - :tags: exception, locations - - :exc:`ftrack_api.exception.ComponentInLocationError` now accepts either - a single component or multiple components and makes them available as - *components* in its *details* parameter. - - .. change:: changed - :tags: tests - - Updated session test to not fail on the new private link attribute. - - .. change:: changed - :tags: session - - Internal method :py:meth:`_fetch_schemas` has beed renamed to - :py:meth:`Session._load_schemas` and now requires a `schema_cache_path` - argument. - -.. release:: 0.9.0 - :date: 2015-10-30 - - .. change:: new - :tags: caching - - Added :meth:`ftrack_api.cache.Cache.values` as helper for retrieving - all values in cache. - - .. change:: fixed - :tags: session, caching - - :meth:`Session.merge` redundantly attempts to expand entity references - that have already been expanded causing performance degradation. - - .. change:: new - :tags: session - - :meth:`Session.rollback` has been added to support cleanly reverting - session state to last good state following a failed commit. - - .. change:: changed - :tags: events - - Event hub will no longer allow unverified SSL connections. - - .. seealso:: :ref:`security_and_authentication`. - - .. change:: changed - :tags: session - - :meth:`Session.reset` no longer resets the connection. It also clears - all local state and re-configures certain aspects that are cache - dependant, such as location plugins. - - .. change:: fixed - :tags: factory - - Debug logging messages using incorrect index for formatting leading to - misleading exception. - -.. release:: 0.8.4 - :date: 2015-10-08 - - .. change:: new - - Added initial support for custom attributes. - - .. seealso:: :ref:`example/custom_attribute`. - - .. change:: new - :tags: collection, attribute - - Added :class:`ftrack_api.collection.CustomAttributeCollectionProxy` and - :class:`ftrack_api.attribute.CustomAttributeCollectionAttribute` to - handle custom attributes. - - .. change:: changed - :tags: collection, attribute - - ``ftrack_api.attribute.MappedCollectionAttribute`` renamed to - :class:`ftrack_api.attribute.KeyValueMappedCollectionAttribute` to more - closely reflect purpose. - - .. change:: changed - :tags: collection - - :class:`ftrack_api.collection.MappedCollectionProxy` has been refactored - as a generic base class with key, value specialisation handled in new - dedicated class - :class:`ftrack_api.collection.KeyValueMappedCollectionProxy`. This is - done to avoid confusion following introduction of new - :class:`ftrack_api.collection.CustomAttributeCollectionProxy` class. - - .. change:: fixed - :tags: events - - The event hub does not always reconnect after computer has come back - from sleep. - -.. release:: 0.8.3 - :date: 2015-09-28 - - .. change:: changed - :tags: server version - - ftrack server version >= 3.2.1, < 3.4 required. - - .. change:: changed - - Updated *ftrack.server* location implementation. A server version of 3.3 - or higher is required for it to function properly. - - .. change:: fixed - - :meth:`ftrack_api.entity.factory.StandardFactory.create` not respecting - *bases* argument. - -.. release:: 0.8.2 - :date: 2015-09-16 - - .. change:: fixed - :tags: session - - Wrong file type set on component when publishing image sequence using - :meth:`Session.create_component`. - -.. release:: 0.8.1 - :date: 2015-09-08 - - .. change:: fixed - :tags: session - - :meth:`Session.ensure` not implemented. - -.. release:: 0.8.0 - :date: 2015-08-28 - - .. change:: changed - :tags: server version - - ftrack server version >= 3.2.1, < 3.3 required. - - .. change:: new - - Added lists example. - - .. seealso:: :ref:`example/list`. - - .. change:: new - - Added convenience methods for handling timers - :class:`~ftrack_api.entity.user.User.start_timer` and - :class:`~ftrack_api.entity.user.User.stop_timer`. - - .. change:: changed - - The dynamic API classes Type, Status, Priority and - StatusType have been renamed to Type, Status, Priority and State. - - .. change:: changed - - :meth:`Session.reset` now also clears the top most level cache (by - default a :class:`~ftrack_api.cache.MemoryCache`). - - .. change:: fixed - - Some invalid server url formats not detected. - - .. change:: fixed - - Reply events not encoded correctly causing them to be misinterpreted by - the server. - -.. release:: 0.7.0 - :date: 2015-08-24 - - .. change:: changed - :tags: server version - - ftrack server version >= 3.2, < 3.3 required. - - .. change:: changed - - Removed automatic set of default statusid, priorityid and typeid on - objects as that is now either not mandatory or handled on server. - - .. change:: changed - - Updated :meth:`~ftrack_api.entity.project_schema.ProjectSchema.get_statuses` - and :meth:`~ftrack_api.entity.project_schema.ProjectSchema.get_types` to - handle custom objects. - -.. release:: 0.6.0 - :date: 2015-08-19 - - .. change:: changed - :tags: server version - - ftrack server version >= 3.1.8, < 3.2 required. - - .. change:: changed - :tags: querying, documentation - - Updated documentation with details on new operators ``has`` and ``any`` - for querying relationships. - - .. seealso:: :ref:`querying/criteria/operators` - -.. release:: 0.5.2 - :date: 2015-07-29 - - .. change:: changed - :tags: server version - - ftrack server version 3.1.5 or greater required. - - .. change:: changed - - Server reported errors are now more readable and are no longer sometimes - presented as an HTML page. - -.. release:: 0.5.1 - :date: 2015-07-06 - - .. change:: changed - - Defaults computed by :class:`~ftrack_api.entity.factory.StandardFactory` - are now memoised per session to improve performance. - - .. change:: changed - - :class:`~ftrack_api.cache.Memoiser` now supports a *return_copies* - parameter to control whether deep copies should be returned when a value - was retrieved from the cache. - -.. release:: 0.5.0 - :date: 2015-07-02 - - .. change:: changed - - Now checks for server compatibility and requires an ftrack server - version of 3.1 or greater. - - .. change:: new - - Added convenience methods to :class:`~ftrack_api.query.QueryResult` to - fetch :meth:`~ftrack_api.query.QueryResult.first` or exactly - :meth:`~ftrack_api.query.QueryResult.one` result. - - .. change:: new - :tags: notes - - Added support for handling notes. - - .. seealso:: :ref:`example/note`. - - .. change:: changed - - Collection attributes generate empty collection on first access when no - remote value available. This allows interacting with a collection on a - newly created entity before committing. - - .. change:: fixed - :tags: session - - Ambiguous error raised when :class:`Session` is started with an invalid - user or key. - - .. change:: fixed - :tags: caching, session - - :meth:`Session.merge` fails against - :class:`~ftrack_api.cache.SerialisedCache` when circular reference - encountered due to entity identity not being prioritised in merge. - -.. release:: 0.4.3 - :date: 2015-06-29 - - .. change:: fixed - :tags: plugins, session, entity types - - Entity types not constructed following standard install. - - This is because the discovery of the default plugins is unreliable - across Python installation processes (pip, wheel etc). Instead, the - default plugins have been added as templates to the :ref:`event_list` - documentation and the - :class:`~ftrack_api.entity.factory.StandardFactory` used to create any - missing classes on :class:`Session` startup. - -.. release:: 0.4.2 - :date: 2015-06-26 - - .. change:: fixed - :tags: metadata - - Setting exact same metadata twice can cause - :exc:`~ftrack_api.exception.ImmutableAttributeError` to be incorrectly - raised. - - .. change:: fixed - :tags: session - - Calling :meth:`Session.commit` does not clear locally set attribute - values leading to immutability checks being bypassed in certain cases. - -.. release:: 0.4.1 - :date: 2015-06-25 - - .. change:: fixed - :tags: metadata - - Setting metadata twice in one session causes `KeyError`. - -.. release:: 0.4.0 - :date: 2015-06-22 - - .. change:: changed - :tags: documentation - - Documentation extensively updated. - - .. change:: new - :tags: Client review - - Added support for handling review sessions. - - .. seealso:: :ref:`Usage guide `. - - .. change:: fixed - - Metadata property not working in line with rest of system, particularly - the caching framework. - - .. change:: new - :tags: collection - - Added :class:`ftrack_api.collection.MappedCollectionProxy` class for - providing a dictionary interface to a standard - :class:`ftrack_api.collection.Collection`. - - .. change:: new - :tags: collection, attribute - - Added :class:`ftrack_api.attribute.MappedCollectionAttribute` class for - describing an attribute that should use the - :class:`ftrack_api.collection.MappedCollectionProxy`. - - .. change:: new - - Entities that use composite primary keys are now fully supported in the - session, including for :meth:`Session.get` and :meth:`Session.populate`. - - .. change:: change - - Base :class:`ftrack_api.entity.factory.Factory` refactored to separate - out attribute instantiation into dedicated methods to make extending - simpler. - - .. change:: change - :tags: collection, attribute - - :class:`ftrack_api.attribute.DictionaryAttribute` and - :class:`ftrack_api.attribute.DictionaryAttributeCollection` removed. - They have been replaced by the new - :class:`ftrack_api.attribute.MappedCollectionAttribute` and - :class:`ftrack_api.collection.MappedCollectionProxy` respectively. - - .. change:: new - :tags: events - - :class:`Session` now supports an *auto_connect_event_hub* argument to - control whether the built in event hub should connect to the server on - session initialisation. This is useful for when only local events should - be supported or when the connection should be manually controlled. - -.. release:: 0.3.0 - :date: 2015-06-14 - - .. change:: fixed - - Session operations may be applied server side in invalid order resulting - in unexpected error. - - .. change:: fixed - - Creating and deleting an entity in single commit causes error as create - operation never persisted to server. - - Now all operations for the entity are ignored on commit when this case - is detected. - - .. change:: changed - - Internally moved from differential state to operation tracking for - determining session changes when persisting. - - .. change:: new - - ``Session.recorded_operations`` attribute for examining current - pending operations on a :class:`Session`. - - .. change:: new - - :meth:`Session.operation_recording` context manager for suspending - recording operations temporarily. Can also manually control - ``Session.record_operations`` boolean. - - .. change:: new - - Operation classes to track individual operations occurring in session. - - .. change:: new - - Public :meth:`Session.merge` method for merging arbitrary values into - the session manually. - - .. change:: changed - - An entity's state is now computed from the operations performed on it - and is no longer manually settable. - - .. change:: changed - - ``Entity.state`` attribute removed. Instead use the new inspection - :func:`ftrack_api.inspection.state`. - - Previously:: - - print entity.state - - Now:: - - import ftrack_api.inspection - print ftrack_api.inspection.state(entity) - - There is also an optimised inspection, - :func:`ftrack_api.inspection.states`. for determining state of many - entities at once. - - .. change:: changed - - Shallow copying a :class:`ftrack_api.symbol.Symbol` instance now - returns same instance. - -.. release:: 0.2.0 - :date: 2015-06-04 - - .. change:: changed - - Changed name of API from `ftrack` to `ftrack_api`. - - .. seealso:: :ref:`release/migration/0.2.0/new_api_name`. - - .. change:: new - :tags: caching - - Configurable caching support in :class:`Session`, including the ability - to use an external persisted cache and new cache implementations. - - .. seealso:: :ref:`caching`. - - .. change:: new - :tags: caching - - :meth:`Session.get` now tries to retrieve matching entity from - configured cache first. - - .. change:: new - :tags: serialisation, caching - - :meth:`Session.encode` supports a new mode *persisted_only* that will - only encode persisted attribute values. - - .. change:: changed - - Session.merge method is now private (:meth:`Session._merge`) until it is - qualified for general usage. - - .. change:: changed - :tags: entity state - - :class:`~ftrack_api.entity.base.Entity` state now managed on the entity - directly rather than stored separately in the :class:`Session`. - - Previously:: - - session.set_state(entity, state) - print session.get_state(entity) - - Now:: - - entity.state = state - print entity.state - - .. change:: changed - :tags: entity state - - Entity states are now :class:`ftrack_api.symbol.Symbol` instances rather - than strings. - - Previously:: - - entity.state = 'created' - - Now:: - - entity.state = ftrack_api.symbol.CREATED - - .. change:: fixed - :tags: entity state - - It is now valid to transition from most entity states to an - :attr:`ftrack_api.symbol.NOT_SET` state. - - .. change:: changed - :tags: caching - - :class:`~ftrack_api.cache.EntityKeyMaker` removed and replaced by - :class:`~ftrack_api.cache.StringKeyMaker`. Entity identity now - computed separately and passed to key maker to allow key maker to work - with non entity instances. - - .. change:: fixed - :tags: entity - - Internal data keys ignored when re/constructing entities reducing - distracting and irrelevant warnings in logs. - - .. change:: fixed - :tags: entity - - :class:`~ftrack_api.entity.base.Entity` equality test raises error when - other is not an entity instance. - - .. change:: changed - :tags: entity, caching - - :meth:`~ftrack_api.entity.base.Entity.merge` now also merges state and - local attributes. In addition, it ensures values being merged have also - been merged into the session and outputs more log messages. - - .. change:: fixed - :tags: inspection - - :func:`ftrack_api.inspection.identity` returns different result for same - entity depending on whether entity type is unicode or string. - - .. change:: fixed - - :func:`ftrack_api.mixin` causes method resolution failure when same - class mixed in multiple times. - - .. change:: changed - - Representations of objects now show plain id rather than converting to - hex. - - .. change:: fixed - :tags: events - - Event hub raises TypeError when listening to ftrack.update events. - - .. change:: fixed - :tags: events - - :meth:`ftrack_api.event.hub.EventHub.subscribe` fails when subscription - argument contains special characters such as `@` or `+`. - - .. change:: fixed - :tags: collection - - :meth:`ftrack_api.collection.Collection` incorrectly modifies entity - state on initialisation. - -.. release:: 0.1.0 - :date: 2015-03-25 - - .. change:: changed - - Moved standardised construct entity type logic to core package (as part - of the :class:`~ftrack_api.entity.factory.StandardFactory`) for easier - reuse and extension. - -.. release:: 0.1.0-beta.2 - :date: 2015-03-17 - - .. change:: new - :tags: locations - - Support for ftrack.server location. The corresponding server build is - required for it to function properly. - - .. change:: new - :tags: locations - - Support for managing components in locations has been added. Check out - the :ref:`dedicated tutorial `. - - .. change:: new - - A new inspection API (:mod:`ftrack_api.inspection`) has been added for - extracting useful information from objects in the system, such as the - identity of an entity. - - .. change:: changed - - ``Entity.primary_key`` and ``Entity.identity`` have been removed. - Instead, use the new :func:`ftrack_api.inspection.primary_key` and - :func:`ftrack_api.inspection.identity` functions. This was done to make it - clearer the the extracted information is determined from the current - entity state and modifying the returned object will have no effect on - the entity instance itself. - - .. change:: changed - - :func:`ftrack_api.inspection.primary_key` now returns a mapping of the - attribute names and values that make up the primary key, rather than - the previous behaviour of returning a tuple of just the values. To - emulate previous behaviour do:: - - ftrack_api.inspection.primary_key(entity).values() - - .. change:: changed - - :meth:`Session.encode` now supports different strategies for encoding - entities via the entity_attribute_strategy* keyword argument. This makes - it possible to use this method for general serialisation of entity - instances. - - .. change:: changed - - Encoded referenced entities are now a mapping containing - *__entity_type__* and then each key, value pair that makes up the - entity's primary key. For example:: - - { - '__entity_type__': 'User', - 'id': '8b90a444-4e65-11e1-a500-f23c91df25eb' - } - - .. change:: changed - - :meth:`Session.decode` no longer automatically adds decoded entities to - the :class:`Session` cache making it possible to use decode - independently. - - .. change:: new - - Added :meth:`Session.merge` for merging entities recursively into the - session cache. - - .. change:: fixed - - Replacing an entity in a :class:`ftrack_api.collection.Collection` with an - identical entity no longer raises - :exc:`ftrack_api.exception.DuplicateItemInCollectionError`. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py deleted file mode 100644 index 5fda0195a9..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py +++ /dev/null @@ -1,24 +0,0 @@ -# :coding: utf-8 -import logging - -import ftrack_api.session - - -def register(session, **kw): - '''Register plugin. Called when used as an plugin.''' - logger = logging.getLogger('com.example.example-plugin') - - # Validate that session is an instance of ftrack_api.Session. If not, - # assume that register is being called from an old or incompatible API and - # return without doing anything. - if not isinstance(session, ftrack_api.session.Session): - logger.debug( - 'Not subscribing plugin as passed argument {0!r} is not an ' - 'ftrack_api.Session instance.'.format(session) - ) - return - - # Perform your logic here, such as subscribe to an event. - pass - - logger.debug('Plugin registered') diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_safe.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_safe.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py deleted file mode 100644 index dd11136d69..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py +++ /dev/null @@ -1,37 +0,0 @@ -# :coding: utf-8 -import logging - -import ftrack_api.session - - -def register_with_session_ready(event): - '''Called when session is ready to be used.''' - logger = logging.getLogger('com.example.example-plugin') - logger.debug('Session ready.') - session = event['data']['session'] - - # Session is now ready and can be used to e.g. query objects. - task = session.query('Task').first() - print task['name'] - - -def register(session, **kw): - '''Register plugin. Called when used as an plugin.''' - logger = logging.getLogger('com.example.example-plugin') - - # Validate that session is an instance of ftrack_api.Session. If not, - # assume that register is being called from an old or incompatible API and - # return without doing anything. - if not isinstance(session, ftrack_api.session.Session): - logger.debug( - 'Not subscribing plugin as passed argument {0!r} is not an ' - 'ftrack_api.Session instance.'.format(session) - ) - return - - session.event_hub.subscribe( - 'topic=ftrack.api.session.ready', - register_with_session_ready - ) - - logger.debug('Plugin registered') diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst deleted file mode 100644 index 724afa81a6..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst +++ /dev/null @@ -1,38 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _security_and_authentication: - -*************************** -Security and authentication -*************************** - -Self signed SSL certificate -=========================== - -When using a self signed SSL certificate the API may fail to connect if it -cannot verify the SSL certificate. Under the hood the -`requests `_ library is used and it -must be specified where the trusted certificate authority can be found using the -environment variable ``REQUESTS_CA_BUNDLE``. - -.. seealso:: `SSL Cert Verification `_ - -InsecurePlatformWarning -======================= - -When using this API you may sometimes see a warning:: - - InsecurePlatformWarning: A true SSLContext object is not available. This - prevents urllib3 from configuring SSL appropriately and may cause certain - SSL connections to fail. - -If you encounter this warning, its recommended you upgrade to Python 2.7.9, or -use pyOpenSSL. To use pyOpenSSL simply:: - - pip install pyopenssl ndg-httpsclient pyasn1 - -and the `requests `_ library used by -this API will use pyOpenSSL instead. - -.. seealso:: `InsecurePlatformWarning `_ diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst deleted file mode 100644 index 73b352eb2f..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst +++ /dev/null @@ -1,156 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _tutorial: - -******** -Tutorial -******** - -.. currentmodule:: ftrack_api.session - -This tutorial provides a quick dive into using the API and the broad stroke -concepts involved. - -First make sure the ftrack Python API is :ref:`installed `. - -Then start a Python session and import the ftrack API:: - - >>> import ftrack_api - -The API uses :ref:`sessions ` to manage communication -with an ftrack server. Create a session that connects to your ftrack server -(changing the passed values as appropriate):: - - >>> session = ftrack_api.Session( - ... server_url='https://mycompany.ftrackapp.com', - ... api_key='7545384e-a653-11e1-a82c-f22c11dd25eq', - ... api_user='martin' - ... ) - -.. note:: - - A session can use :ref:`environment variables - ` to configure itself. - -Now print a list of the available entity types retrieved from the server:: - - >>> print session.types.keys() - [u'TypedContext', u'ObjectType', u'Priority', u'Project', u'Sequence', - u'Shot', u'Task', u'Status', u'Type', u'Timelog', u'User'] - -Now the list of possible entity types is known, :ref:`query ` the -server to retrieve entities of a particular type by using the -:meth:`Session.query` method:: - - >>> projects = session.query('Project') - -Each project retrieved will be an :ref:`entity ` instance -that behaves much like a standard Python dictionary. For example, to find out -the available keys for an entity, call the -:meth:`~ftrack_api.entity.Entity.keys` method:: - - >>> print projects[0].keys() - [u'status', u'is_global', u'name', u'end_date', u'context_type', - u'id', u'full_name', u'root', u'start_date'] - -Now, iterate over the retrieved entities and print each ones name:: - - >>> for project in projects: - ... print project['name'] - test - client_review - tdb - man_test - ftrack - bunny - -.. note:: - - Many attributes for retrieved entities are loaded on demand when the - attribute is first accessed. Doing this lots of times in a script can be - inefficient, so it is worth using :ref:`projections ` - in queries or :ref:`pre-populating ` - entities where appropriate. You can also :ref:`customise default projections - ` to help others - pre-load common attributes. - -To narrow a search, add :ref:`criteria ` to the query:: - - >>> active_projects = session.query('Project where status is active') - -Combine criteria for more powerful queries:: - - >>> import arrow - >>> - >>> active_projects_ending_before_next_week = session.query( - ... 'Project where status is active and end_date before "{0}"' - ... .format(arrow.now().replace(weeks=+1)) - ... ) - -Some attributes on an entity will refer to another entity or collection of -entities, such as *children* on a *Project* being a collection of *Context* -entities that have the project as their parent:: - - >>> project = session.query('Project').first() - >>> print project['children'] - - -And on each *Context* there is a corresponding *parent* attribute which is a -link back to the parent:: - - >>> child = project['children'][0] - >>> print child['parent'] is project - True - -These relationships can also be used in the criteria for a query:: - - >>> results = session.query( - ... 'Context where parent.name like "te%"' - ... ) - -To create new entities in the system use :meth:`Session.create`:: - - >>> new_sequence = session.create('Sequence', { - ... 'name': 'Starlord Reveal' - ... }) - -The created entity is not yet persisted to the server, but it is still possible -to modify it. - - >>> new_sequence['description'] = 'First hero character reveal.' - -The sequence also needs a parent. This can be done in one of two ways: - -* Set the parent attribute on the sequence:: - - >>> new_sequence['parent'] = project - -* Add the sequence to a parent's children attribute:: - - >>> project['children'].append(new_sequence) - -When ready, persist to the server using :meth:`Session.commit`:: - - >>> session.commit() - -When finished with a :class:`Session`, it is important to :meth:`~Session.close` -it in order to release resources and properly unsubscribe any registered event -listeners. It is also possible to use the session as a context manager in order -to have it closed automatically after use:: - - >>> with ftrack_api.Session() as session: - ... print session.query('User').first() - - >>> print session.closed - True - -Once a :class:`Session` is closed, any operations that attempt to use the closed -connection to the ftrack server will fail:: - - >>> session.query('Project').first() - ConnectionClosedError: Connection closed. - -Continue to the next section to start learning more about the API in greater -depth or jump over to the :ref:`usage examples ` if you prefer to learn -by example. diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst deleted file mode 100644 index e3602c4fa9..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst +++ /dev/null @@ -1,281 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _understanding_sessions: - -********************** -Understanding sessions -********************** - -.. currentmodule:: ftrack_api.session - -All communication with an ftrack server takes place through a :class:`Session`. -This allows more opportunity for configuring the connection, plugins etc. and -also makes it possible to connect to multiple ftrack servers from within the -same Python process. - -.. _understanding_sessions/connection: - -Connection -========== - -A session can be manually configured at runtime to connect to a server with -certain credentials:: - - >>> session = ftrack_api.Session( - ... server_url='https://mycompany.ftrackapp.com', - ... api_key='7545384e-a653-11e1-a82c-f22c11dd25eq', - ... api_user='martin' - ... ) - -Alternatively, a session can use the following environment variables to -configure itself: - - * :envvar:`FTRACK_SERVER` - * :envvar:`FTRACK_API_USER` - * :envvar:`FTRACK_API_KEY` - -When using environment variables, no server connection arguments need to be -passed manually:: - - >>> session = ftrack_api.Session() - -.. _understanding_sessions/unit_of_work: - -Unit of work -============ - -Each session follows the unit of work pattern. This means that many of the -operations performed using a session will happen locally and only be persisted -to the server at certain times, notably when calling :meth:`Session.commit`. -This approach helps optimise calls to the server and also group related logic -together in a transaction:: - - user = session.create('User', {}) - user['username'] = 'martin' - other_user = session.create('User', {'username': 'bjorn'}) - other_user['email'] = 'bjorn@example.com' - -Behind the scenes a series of :class:`operations -` are recorded reflecting the changes made. You -can take a peek at these operations if desired by examining the -``Session.recorded_operations`` property:: - - >>> for operation in session.recorded_operations: - ... print operation - - - - - -Calling :meth:`Session.commit` persists all recorded operations to the server -and clears the operation log:: - - session.commit() - -.. note:: - - The commit call will optimise operations to be as efficient as possible - without breaking logical ordering. For example, a create followed by updates - on the same entity will be compressed into a single create. - -Queries are special and always issued on demand. As a result, a query may return -unexpected results if the relevant local changes have not yet been sent to the -server:: - - >>> user = session.create('User', {'username': 'some_unique_username'}) - >>> query = 'User where username is "{0}"'.format(user['username']) - >>> print len(session.query(query)) - 0 - >>> session.commit() - >>> print len(session.query(query)) - 1 - -Where possible, query results are merged in with existing data transparently -with any local changes preserved:: - - >>> user = session.query('User').first() - >>> user['email'] = 'me@example.com' # Not yet committed to server. - >>> retrieved = session.query( - ... 'User where id is "{0}"'.format(user['id']) - ... ).one() - >>> print retrieved['email'] # Displays locally set value. - 'me@example.com' - >>> print retrieved is user - True - -This is possible due to the smart :ref:`caching` layer in the session. - -.. _understanding_sessions/auto_population: - -Auto-population -=============== - -Another important concept in a session is that of auto-population. By default a -session is configured to auto-populate missing attribute values on access. This -means that the first time you access an attribute on an entity instance a query -will be sent to the server to fetch the value:: - - user = session.query('User').first() - # The next command will issue a request to the server to fetch the - # 'username' value on demand at this is the first time it is accessed. - print user['username'] - -Once a value has been retrieved it is :ref:`cached ` locally in the -session and accessing it again will not issue more server calls:: - - # On second access no server call is made. - print user['username'] - -You can control the auto population behaviour of a session by either changing -the ``Session.auto_populate`` attribute on a session or using the provided -context helper :meth:`Session.auto_populating` to temporarily change the -setting. When turned off you may see a special -:attr:`~ftrack_api.symbol.NOT_SET` symbol that represents a value has not yet -been fetched:: - - >>> with session.auto_populating(False): - ... print user['email'] - NOT_SET - -Whilst convenient for simple scripts, making many requests to the server for -each attribute can slow execution of a script. To support optimisation the API -includes methods for batch fetching attributes. Read about them in -:ref:`querying/projections` and :ref:`working_with_entities/populating`. - -.. _understanding_sessions/entity_types: - -Entity types -============ - -When a session has successfully connected to the server it will automatically -download schema information and :ref:`create appropriate classes -` for use. This is important as different -servers can support different entity types and configurations. - -This information is readily available and useful if you need to check that the -entity types you expect are present. Here's how to print a list of all entity -types registered for use in the current API session:: - - >>> print session.types.keys() - [u'Task', u'Shot', u'TypedContext', u'Sequence', u'Priority', - u'Status', u'Project', u'User', u'Type', u'ObjectType'] - -Each entity type is backed by a :ref:`customisable class -` that further describes the entity type and -the attributes that are available. - -.. hint:: - - If you need to use an :func:`isinstance` check, always go through the - session as the classes are built dynamically:: - - >>> isinstance(entity, session.types['Project']) - -.. _understanding_sessions/plugins: - -Configuring plugins -=================== - -Plugins are used by the API to extend it with new functionality, such as -:term:`locations ` or adding convenience methods to -:ref:`understanding_sessions/entity_types`. In addition to new API -functionality, event plugins may also be used for event processing by listening -to :ref:`ftrack update events ` or adding custom functionality to ftrack by registering -:term:`actions `. - - -When starting a new :class:`Session` either pass the *plugins_paths* to search -explicitly or rely on the environment variable -:envvar:`FTRACK_EVENT_PLUGIN_PATH`. As each session is independent of others, -you can configure plugins per session. - -The paths will be searched for :term:`plugins `, python files -which expose a `register` function. These functions will be evaluated and can -be used extend the API with new functionality, such as locations or actions. - -If you do not specify any override then the session will attempt to discover and -use the default plugins. - -Plugins are discovered using :func:`ftrack_api.plugin.discover` with the -session instance passed as the sole positional argument. Most plugins should -take the form of a mount function that then subscribes to specific :ref:`events -` on the session:: - - def configure_locations(event): - '''Configure locations for session.''' - session = event['data']['session'] - # Find location(s) and customise instances. - - def register(session): - '''Register plugin with *session*.''' - session.event_hub.subscribe( - 'topic=ftrack.api.session.configure-location', - configure_locations - ) - -Additional keyword arguments can be passed as *plugin_arguments* to the -:class:`Session` on instantiation. These are passed to the plugin register -function if its signature supports them:: - - # a_plugin.py - def register(session, reticulate_splines=False): - '''Register plugin with *session*.''' - ... - - # main.py - session = ftrack_api.Session( - plugin_arguments={ - 'reticulate_splines': True, - 'some_other_argument': 42 - } - ) - -.. seealso:: - - Lists of events which you can subscribe to in your plugins are available - both for :ref:`synchronous event published by the python API ` - and :ref:`asynchronous events published by the server ` - - -Quick setup ------------ - -1. Create a directory where plugins will be stored. Place any plugins you want -loaded automatically in an API *session* here. - -.. image:: /image/configuring_plugins_directory.png - -2. Configure the :envvar:`FTRACK_EVENT_PLUGIN_PATH` to point to the directory. - - -Detailed setup --------------- - -Start out by creating a directory on your machine where you will store your -plugins. Download :download:`example_plugin.py ` -and place it in the directory. - -Open up a terminal window, and ensure that plugin is picked up when -instantiating the session and manually setting the *plugin_paths*:: - - >>> # Set up basic logging - >>> import logging - >>> logging.basicConfig() - >>> plugin_logger = logging.getLogger('com.example.example-plugin') - >>> plugin_logger.setLevel(logging.DEBUG) - >>> - >>> # Configure the API, loading plugins in the specified paths. - >>> import ftrack_api - >>> plugin_paths = ['/path/to/plugins'] - >>> session = ftrack_api.Session(plugin_paths=plugin_paths) - -If everything is working as expected, you should see the following in the -output:: - - DEBUG:com.example.example-plugin:Plugin registered - -Instead of specifying the plugin paths when instantiating the session, you can -also specify the :envvar:`FTRACK_EVENT_PLUGIN_PATH` to point to the directory. -To specify multiple directories, use the path separator for your operating -system. \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst deleted file mode 100644 index 2d9d26f986..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst +++ /dev/null @@ -1,434 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _working_with_entities: - -********************* -Working with entities -********************* - -.. currentmodule:: ftrack_api.session - -:class:`Entity ` instances are Python dict-like -objects whose keys correspond to attributes for that type in the system. They -may also provide helper methods to perform common operations such as replying to -a note:: - - note = session.query('Note').first() - print note.keys() - print note['content'] - note['content'] = 'A different message!' - reply = note.create_reply(...) - -.. _working_with_entities/attributes: - -Attributes -========== - -Each entity instance is typed according to its underlying entity type on the -server and configured with appropriate attributes. For example, a *task* will be -represented by a *Task* class and have corresponding attributes. You can -:ref:`customise entity classes ` to alter -attribute access or provide your own helper methods. - -To see the available attribute names on an entity use the -:meth:`~ftrack_api.entity.base.Entity.keys` method on the instance:: - - >>> task = session.query('Task').first() - >>> print task.keys() - ['id', 'name', ...] - -If you need more information about the type of attribute, examine the -``attributes`` property on the corresponding class:: - - >>> for attribute in type(task).attributes: - ... print attribute - - - - - - ... - -Notice that there are different types of attribute such as -:class:`~ftrack_api.attribute.ScalarAttribute` for plain values or -:class:`~ftrack_api.attribute.ReferenceAttribute` for relationships. These -different types are reflected in the behaviour on the entity instance when -accessing a particular attribute by key: - - >>> # Scalar - >>> print task['name'] - 'model' - >>> task['name'] = 'comp' - - >>> # Single reference - >>> print task['status'] - - >>> new_status = session.query('Status').first() - >>> task['status'] = new_status - - >>> # Collection - >>> print task['timelogs'] - - >>> print task['timelogs'][:] - [, ...] - >>> new_timelog = session.create('Timelog', {...}) - >>> task['timelogs'].append(new_timelog) - -.. _working_with_entities/attributes/bidirectional: - -Bi-directional relationships ----------------------------- - -Some attributes refer to different sides of a bi-directional relationship. In -the current version of the API bi-directional updates are not propagated -automatically to the other side of the relationship. For example, setting a -*parent* will not update the parent entity's *children* collection locally. -There are plans to support this behaviour better in the future. For now, after -commit, :ref:`populate ` the reverse side -attribute manually. - -.. _working_with_entities/creating: - -Creating entities -================= - -In order to create a new instance of an entity call :meth:`Session.create` -passing in the entity type to create and any initial attribute values:: - - new_user = session.create('User', {'username': 'martin'}) - -If there are any default values that can be set client side then they will be -applied at this point. Typically this will be the unique entity key:: - - >>> print new_user['id'] - 170f02a4-6656-4f15-a5cb-c4dd77ce0540 - -At this point no information has been sent to the server. However, you are free -to continue :ref:`updating ` this object -locally until you are ready to persist the changes by calling -:meth:`Session.commit`. - -If you are wondering about what would happen if you accessed an unset attribute -on a newly created entity, go ahead and give it a go:: - - >>> print new_user['first_name'] - NOT_SET - -The session knows that it is a newly created entity that has not yet been -persisted so it doesn't try to fetch any attributes on access even when -``session.auto_populate`` is turned on. - -.. _working_with_entities/updating: - -Updating entities -================= - -Updating an entity is as simple as modifying the values for specific keys on -the dict-like instance and calling :meth:`Session.commit` when ready. The entity -to update can either be a new entity or a retrieved entity:: - - task = session.query('Task').first() - task['bid'] = 8 - -Remember that, for existing entities, accessing an attribute will load it from -the server automatically. If you are interested in just setting values without -first fetching them from the server, turn :ref:`auto-population -` off temporarily:: - - >>> with session.auto_populating(False): - ... task = session.query('Task').first() - ... task['bid'] = 8 - - -.. _working_with_entities/resetting: - -Server side reset of entity attributes or settings. -=========================== - -Some entities support resetting of attributes, for example -to reset a users api key:: - - - session.reset_remote( - 'api_key', entity=session.query('User where username is "test_user"').one() - ) - -.. note:: - Currently the only attribute possible to reset is 'api_key' on - the user entity type. - - -.. _working_with_entities/deleting: - -Deleting entities -================= - -To delete an entity you need an instance of the entity in your session (either -from having created one or retrieving one). Then call :meth:`Session.delete` on -the entity and :meth:`Session.commit` when ready:: - - task_to_delete = session.query('Task').first() - session.delete(task_to_delete) - ... - session.commit() - -.. note:: - - Even though the entity is deleted, you will still have access to the local - instance and any local data stored on that instance whilst that instance - remains in memory. - -Keep in mind that some deletions, when propagated to the server, will cause -other entities to be deleted also, so you don't have to worry about deleting an -entire hierarchy manually. For example, deleting a *Task* will also delete all -*Notes* on that task. - -.. _working_with_entities/populating: - -Populating entities -=================== - -When an entity is retrieved via :meth:`Session.query` or :meth:`Session.get` it -will have some attributes prepopulated. The rest are dynamically loaded when -they are accessed. If you need to access many attributes it can be more -efficient to request all those attributes be loaded in one go. One way to do -this is to use a :ref:`projections ` in queries. - -However, if you have entities that have been passed to you from elsewhere you -don't have control over the query that was issued to get those entities. In this -case you can you can populate those entities in one go using -:meth:`Session.populate` which works exactly like :ref:`projections -` in queries do, but operating against known entities:: - - >>> users = session.query('User') - >>> session.populate(users, 'first_name, last_name') - >>> with session.auto_populating(False): # Turn off for example purpose. - ... for user in users: - ... print 'Name: {0}'.format(user['first_name']) - ... print 'Email: {0}'.format(user['email']) - Name: Martin - Email: NOT_SET - ... - -.. note:: - - You can populate a single or many entities in one call so long as they are - all the same entity type. - -.. _working_with_entities/entity_states: - -Entity states -============= - -Operations on entities are :ref:`recorded in the session -` as they happen. At any time you can -inspect an entity to determine its current state from those pending operations. - -To do this, use :func:`ftrack_api.inspection.state`:: - - >>> import ftrack_api.inspection - >>> new_user = session.create('User', {}) - >>> print ftrack_api.inspection.state(new_user) - CREATED - >>> existing_user = session.query('User').first() - >>> print ftrack_api.inspection.state(existing_user) - NOT_SET - >>> existing_user['email'] = 'martin@example.com' - >>> print ftrack_api.inspection.state(existing_user) - MODIFIED - >>> session.delete(new_user) - >>> print ftrack_api.inspection.state(new_user) - DELETED - -.. _working_with_entities/entity_types: - -Customising entity types -======================== - -Each type of entity in the system is represented in the Python client by a -dedicated class. However, because the types of entities can vary these classes -are built on demand using schema information retrieved from the server. - -Many of the default classes provide additional helper methods which are mixed -into the generated class at runtime when a session is started. - -In some cases it can be useful to tailor the custom classes to your own pipeline -workflows. Perhaps you want to add more helper functions, change attribute -access rules or even providing a layer of backwards compatibility for existing -code. The Python client was built with this in mind and makes such -customisations as easy as possible. - -When a :class:`Session` is constructed it fetches schema details from the -connected server and then calls an :class:`Entity factory -` to create classes from those schemas. It -does this by emitting a synchronous event, -*ftrack.api.session.construct-entity-type*, for each schema and expecting a -*class* object to be returned. - -In the default setup, a :download:`construct_entity_type.py -<../resource/plugin/construct_entity_type.py>` plugin is placed on the -:envvar:`FTRACK_EVENT_PLUGIN_PATH`. This plugin will register a trivial subclass -of :class:`ftrack_api.entity.factory.StandardFactory` to create the classes in -response to the construct event. The simplest way to get started is to edit this -default plugin as required. - -.. seealso:: :ref:`understanding_sessions/plugins` - -.. _working_with_entities/entity_types/default_projections: - -Default projections -------------------- - -When a :ref:`query ` is issued without any :ref:`projections -`, the session will automatically add default projections -according to the type of the entity. - -For example, the following shows that for a *User*, only *id* is fetched by -default when no projections added to the query:: - - >>> user = session.query('User').first() - >>> with session.auto_populating(False): # For demonstration purpose only. - ... print user.items() - [ - (u'id', u'59f0963a-15e2-11e1-a5f1-0019bb4983d8') - (u'username', Symbol(NOT_SET)), - (u'first_name', Symbol(NOT_SET)), - ... - ] - -.. note:: - - These default projections are also used when you access a relationship - attribute using the dictionary key syntax. - -If you want to default to fetching *username* for a *Task* as well then you can -change the default_projections* in your class factory plugin:: - - class Factory(ftrack_api.entity.factory.StandardFactory): - '''Entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - cls = super(Factory, self).create(schema, bases=bases) - - # Further customise cls before returning. - if schema['id'] == 'User': - cls.default_projections = ['id', 'username'] - - return cls - -Now a projection-less query will also query *username* by default: - -.. note:: - - You will need to start a new session to pick up the change you made:: - - session = ftrack_api.Session() - -.. code-block:: python - - >>> user = session.query('User').first() - >>> with session.auto_populating(False): # For demonstration purpose only. - ... print user.items() - [ - (u'id', u'59f0963a-15e2-11e1-a5f1-0019bb4983d8') - (u'username', u'martin'), - (u'first_name', Symbol(NOT_SET)), - ... - ] - -Note that if any specific projections are applied in a query, those override -the default projections entirely. This allows you to also *reduce* the data -loaded on demand:: - - >>> session = ftrack_api.Session() # Start new session to avoid cache. - >>> user = session.query('select id from User').first() - >>> with session.auto_populating(False): # For demonstration purpose only. - ... print user.items() - [ - (u'id', u'59f0963a-15e2-11e1-a5f1-0019bb4983d8') - (u'username', Symbol(NOT_SET)), - (u'first_name', Symbol(NOT_SET)), - ... - ] - -.. _working_with_entities/entity_types/helper_methods: - -Helper methods --------------- - -If you want to add additional helper methods to the constructed classes to -better support your pipeline logic, then you can simply patch the created -classes in your factory, much like with changing the default projections:: - - def get_full_name(self): - '''Return full name for user.''' - return '{0} {1}'.format(self['first_name'], self['last_name']).strip() - - class Factory(ftrack_api.entity.factory.StandardFactory): - '''Entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - cls = super(Factory, self).create(schema, bases=bases) - - # Further customise cls before returning. - if schema['id'] == 'User': - cls.get_full_name = get_full_name - - return cls - -Now you have a new helper method *get_full_name* on your *User* entities:: - - >>> session = ftrack_api.Session() # New session to pick up changes. - >>> user = session.query('User').first() - >>> print user.get_full_name() - Martin Pengelly-Phillips - -If you'd rather not patch the existing classes, or perhaps have a lot of helpers -to mixin, you can instead inject your own class as the base class. The only -requirement is that it has the base :class:`~ftrack_api.entity.base.Entity` -class in its ancestor classes:: - - import ftrack_api.entity.base - - - class CustomUser(ftrack_api.entity.base.Entity): - '''Represent user.''' - - def get_full_name(self): - '''Return full name for user.''' - return '{0} {1}'.format(self['first_name'], self['last_name']).strip() - - - class Factory(ftrack_api.entity.factory.StandardFactory): - '''Entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - # Alter base class for constructed class. - if bases is None: - bases = [ftrack_api.entity.base.Entity] - - if schema['id'] == 'User': - bases = [CustomUser] - - cls = super(Factory, self).create(schema, bases=bases) - return cls - -The resulting effect is the same:: - - >>> session = ftrack_api.Session() # New session to pick up changes. - >>> user = session.query('User').first() - >>> print user.get_full_name() - Martin Pengelly-Phillips - -.. note:: - - Your custom class is not the leaf class which will still be a dynamically - generated class. Instead your custom class becomes the base for the leaf - class:: - - >>> print type(user).__mro__ - (, , ...) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/pytest.ini b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/pytest.ini deleted file mode 100644 index b1f515ee18..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/pytest.ini +++ /dev/null @@ -1,7 +0,0 @@ -[pytest] -minversion = 2.4.2 -addopts = -v -k-slow --junitxml=test-reports/junit.xml --cache-clear -norecursedirs = .* _* -python_files = test_*.py -python_functions = test_* -mock_use_standalone_module = true \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py deleted file mode 100644 index 0682a5eeb0..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py +++ /dev/null @@ -1,39 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import logging - -import ftrack_api -import ftrack_api.entity.location -import ftrack_api.accessor.disk - - -def configure_locations(event): - '''Configure locations for session.''' - session = event['data']['session'] - - # Find location(s) and customise instances. - # - # location = session.query('Location where name is "my.location"').one() - # ftrack_api.mixin(location, ftrack_api.entity.location.UnmanagedLocationMixin) - # location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - - -def register(session): - '''Register plugin with *session*.''' - logger = logging.getLogger('ftrack_plugin:configure_locations.register') - - # Validate that session is an instance of ftrack_api.Session. If not, assume - # that register is being called from an old or incompatible API and return - # without doing anything. - if not isinstance(session, ftrack_api.Session): - logger.debug( - 'Not subscribing plugin as passed argument {0} is not an ' - 'ftrack_api.Session instance.'.format(session) - ) - return - - session.event_hub.subscribe( - 'topic=ftrack.api.session.configure-location', - configure_locations - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py deleted file mode 100644 index 45f7841670..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py +++ /dev/null @@ -1,46 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import logging - -import ftrack_api.entity.factory - - -class Factory(ftrack_api.entity.factory.StandardFactory): - '''Entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - # Optionally change bases for class to be generated. - cls = super(Factory, self).create(schema, bases=bases) - - # Further customise cls before returning. - - return cls - - -def register(session): - '''Register plugin with *session*.''' - logger = logging.getLogger('ftrack_plugin:construct_entity_type.register') - - # Validate that session is an instance of ftrack_api.Session. If not, assume - # that register is being called from an old or incompatible API and return - # without doing anything. - if not isinstance(session, ftrack_api.Session): - logger.debug( - 'Not subscribing plugin as passed argument {0!r} is not an ' - 'ftrack_api.Session instance.'.format(session) - ) - return - - factory = Factory() - - def construct_entity_type(event): - '''Return class to represent entity type specified by *event*.''' - schema = event['data']['schema'] - return factory.create(schema) - - session.event_hub.subscribe( - 'topic=ftrack.api.session.construct-entity-type', - construct_entity_type - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.cfg b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.cfg deleted file mode 100644 index b2ad8fd086..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.cfg +++ /dev/null @@ -1,6 +0,0 @@ -[build_sphinx] -config-dir = doc -source-dir = doc -build-dir = build/doc -builder = html -all_files = 1 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.py deleted file mode 100644 index da99a572b4..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import os -import re - -from setuptools import setup, find_packages -from setuptools.command.test import test as TestCommand - - -ROOT_PATH = os.path.dirname(os.path.realpath(__file__)) -RESOURCE_PATH = os.path.join(ROOT_PATH, 'resource') -SOURCE_PATH = os.path.join(ROOT_PATH, 'source') -README_PATH = os.path.join(ROOT_PATH, 'README.rst') - - -# Read version from source. -with open( - os.path.join(SOURCE_PATH, 'ftrack_api', '_version.py') -) as _version_file: - VERSION = re.match( - r'.*__version__ = \'(.*?)\'', _version_file.read(), re.DOTALL - ).group(1) - - -# Custom commands. -class PyTest(TestCommand): - '''Pytest command.''' - - def finalize_options(self): - '''Finalize options to be used.''' - TestCommand.finalize_options(self) - self.test_args = [] - self.test_suite = True - - def run_tests(self): - '''Import pytest and run.''' - import pytest - raise SystemExit(pytest.main(self.test_args)) - - -# Call main setup. -setup( - name='ftrack-python-api', - version=VERSION, - description='Python API for ftrack.', - long_description=open(README_PATH).read(), - keywords='ftrack, python, api', - url='https://bitbucket.org/ftrack/ftrack-python-api', - author='ftrack', - author_email='support@ftrack.com', - license='Apache License (2.0)', - packages=find_packages(SOURCE_PATH), - package_dir={ - '': 'source' - }, - setup_requires=[ - 'sphinx >= 1.2.2, < 2', - 'sphinx_rtd_theme >= 0.1.6, < 1', - 'lowdown >= 0.1.0, < 2' - ], - install_requires=[ - 'requests >= 2, <3', - 'arrow >= 0.4.4, < 1', - 'termcolor >= 1.1.0, < 2', - 'pyparsing >= 2.0, < 3', - 'clique >= 1.2.0, < 2', - 'websocket-client >= 0.40.0, < 1' - ], - tests_require=[ - 'pytest >= 2.7, < 3', - 'pytest-mock >= 0.4, < 1', - 'pytest-catchlog >= 1, <=2' - ], - cmdclass={ - 'test': PyTest - }, - zip_safe=False, - python_requires=">=2.7.9, <3.0" - -) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/__init__.py deleted file mode 100644 index 34833aa0dd..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from ftrack_api import * diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py deleted file mode 100644 index d8ee30bd8f..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from ._version import __version__ -from .session import Session - - -def mixin(instance, mixin_class, name=None): - '''Mixin *mixin_class* to *instance*. - - *name* can be used to specify new class name. If not specified then one will - be generated. - - ''' - if name is None: - name = '{0}{1}'.format( - instance.__class__.__name__, mixin_class.__name__ - ) - - # Check mixin class not already present in mro in order to avoid consistent - # method resolution failure. - if mixin_class in instance.__class__.mro(): - return - - instance.__class__ = type( - name, - ( - mixin_class, - instance.__class__ - ), - {} - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py deleted file mode 100644 index fbe14f3277..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py +++ /dev/null @@ -1,656 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2016 ftrack - -from __future__ import absolute_import - -import logging -import json -import sys -import os - -import ftrack_api -import ftrack_api.structure.standard as _standard -from ftrack_api.logging import LazyLogMessage as L - - -scenario_name = 'ftrack.centralized-storage' - - -class ConfigureCentralizedStorageScenario(object): - '''Configure a centralized storage scenario.''' - - def __init__(self): - '''Instansiate centralized storage scenario.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - - @property - def storage_scenario(self): - '''Return storage scenario setting.''' - return self.session.query( - 'select value from Setting ' - 'where name is "storage_scenario" and group is "STORAGE"' - ).one() - - @property - def existing_centralized_storage_configuration(self): - '''Return existing centralized storage configuration.''' - storage_scenario = self.storage_scenario - - try: - configuration = json.loads(storage_scenario['value']) - except (ValueError, TypeError): - return None - - if not isinstance(configuration, dict): - return None - - if configuration.get('scenario') != scenario_name: - return None - - return configuration.get('data', {}) - - def _get_confirmation_text(self, configuration): - '''Return confirmation text from *configuration*.''' - configure_location = configuration.get('configure_location') - select_location = configuration.get('select_location') - select_mount_point = configuration.get('select_mount_point') - - if configure_location: - location_text = unicode( - 'A new location will be created:\n\n' - '* Label: {location_label}\n' - '* Name: {location_name}\n' - '* Description: {location_description}\n' - ).format(**configure_location) - else: - location = self.session.get( - 'Location', select_location['location_id'] - ) - location_text = ( - u'You have choosen to use an existing location: {0}'.format( - location['label'] - ) - ) - - mount_points_text = unicode( - '* Linux: {linux}\n' - '* OS X: {osx}\n' - '* Windows: {windows}\n\n' - ).format( - linux=select_mount_point.get('linux_mount_point') or '*Not set*', - osx=select_mount_point.get('osx_mount_point') or '*Not set*', - windows=select_mount_point.get('windows_mount_point') or '*Not set*' - ) - - mount_points_not_set = [] - - if not select_mount_point.get('linux_mount_point'): - mount_points_not_set.append('Linux') - - if not select_mount_point.get('osx_mount_point'): - mount_points_not_set.append('OS X') - - if not select_mount_point.get('windows_mount_point'): - mount_points_not_set.append('Windows') - - if mount_points_not_set: - mount_points_text += unicode( - 'Please be aware that this location will not be working on ' - '{missing} because the mount points are not set up.' - ).format( - missing=' and '.join(mount_points_not_set) - ) - - text = unicode( - '#Confirm storage setup#\n\n' - 'Almost there! Please take a moment to verify the settings you ' - 'are about to save. You can always come back later and update the ' - 'configuration.\n' - '##Location##\n\n' - '{location}\n' - '##Mount points##\n\n' - '{mount_points}' - ).format( - location=location_text, - mount_points=mount_points_text - ) - - return text - - def configure_scenario(self, event): - '''Configure scenario based on *event* and return form items.''' - steps = ( - 'select_scenario', - 'select_location', - 'configure_location', - 'select_structure', - 'select_mount_point', - 'confirm_summary', - 'save_configuration' - ) - - warning_message = '' - values = event['data'].get('values', {}) - - # Calculate previous step and the next. - previous_step = values.get('step', 'select_scenario') - next_step = steps[steps.index(previous_step) + 1] - state = 'configuring' - - self.logger.info(L( - u'Configuring scenario, previous step: {0}, next step: {1}. ' - u'Values {2!r}.', - previous_step, next_step, values - )) - - if 'configuration' in values: - configuration = values.pop('configuration') - else: - configuration = {} - - if values: - # Update configuration with values from the previous step. - configuration[previous_step] = values - - if previous_step == 'select_location': - values = configuration['select_location'] - if values.get('location_id') != 'create_new_location': - location_exists = self.session.query( - 'Location where id is "{0}"'.format( - values.get('location_id') - ) - ).first() - if not location_exists: - next_step = 'select_location' - warning_message = ( - '**The selected location does not exist. Please choose ' - 'one from the dropdown or create a new one.**' - ) - - if next_step == 'select_location': - try: - location_id = ( - self.existing_centralized_storage_configuration['location_id'] - ) - except (KeyError, TypeError): - location_id = None - - options = [{ - 'label': 'Create new location', - 'value': 'create_new_location' - }] - for location in self.session.query( - 'select name, label, description from Location' - ): - if location['name'] not in ( - 'ftrack.origin', 'ftrack.unmanaged', 'ftrack.connect', - 'ftrack.server', 'ftrack.review' - ): - options.append({ - 'label': u'{label} ({name})'.format( - label=location['label'], name=location['name'] - ), - 'description': location['description'], - 'value': location['id'] - }) - - warning = '' - if location_id is not None: - # If there is already a location configured we must make the - # user aware that changing the location may be problematic. - warning = ( - '\n\n**Be careful if you switch to another location ' - 'for an existing storage scenario. Components that have ' - 'already been published to the previous location will be ' - 'made unavailable for common use.**' - ) - default_value = location_id - elif location_id is None and len(options) == 1: - # No location configured and no existing locations to use. - default_value = 'create_new_location' - else: - # There are existing locations to choose from but non of them - # are currently active in the centralized storage scenario. - default_value = None - - items = [{ - 'type': 'label', - 'value': ( - '#Select location#\n' - 'Choose an already existing location or create a new one ' - 'to represent your centralized storage. {0}'.format( - warning - ) - ) - }, { - 'type': 'enumerator', - 'label': 'Location', - 'name': 'location_id', - 'value': default_value, - 'data': options - }] - - default_location_name = 'studio.central-storage-location' - default_location_label = 'Studio location' - default_location_description = ( - 'The studio central location where all components are ' - 'stored.' - ) - - if previous_step == 'configure_location': - configure_location = configuration.get( - 'configure_location' - ) - - if configure_location: - try: - existing_location = self.session.query( - u'Location where name is "{0}"'.format( - configure_location.get('location_name') - ) - ).first() - except UnicodeEncodeError: - next_step = 'configure_location' - warning_message += ( - '**The location name contains non-ascii characters. ' - 'Please change the name and try again.**' - ) - values = configuration['select_location'] - else: - if existing_location: - next_step = 'configure_location' - warning_message += ( - u'**There is already a location named {0}. ' - u'Please change the name and try again.**'.format( - configure_location.get('location_name') - ) - ) - values = configuration['select_location'] - - if ( - not configure_location.get('location_name') or - not configure_location.get('location_label') or - not configure_location.get('location_description') - ): - next_step = 'configure_location' - warning_message += ( - '**Location name, label and description cannot ' - 'be empty.**' - ) - values = configuration['select_location'] - - if next_step == 'configure_location': - # Populate form with previous configuration. - default_location_label = configure_location['location_label'] - default_location_name = configure_location['location_name'] - default_location_description = ( - configure_location['location_description'] - ) - - if next_step == 'configure_location': - - if values.get('location_id') == 'create_new_location': - # Add options to create a new location. - items = [{ - 'type': 'label', - 'value': ( - '#Create location#\n' - 'Here you will create a new location to be used ' - 'with your new Storage scenario. For your ' - 'convenience we have already filled in some default ' - 'values. If this is the first time you are configuring ' - 'a storage scenario in ftrack we recommend that you ' - 'stick with these settings.' - ) - }, { - 'label': 'Label', - 'name': 'location_label', - 'value': default_location_label, - 'type': 'text' - }, { - 'label': 'Name', - 'name': 'location_name', - 'value': default_location_name, - 'type': 'text' - }, { - 'label': 'Description', - 'name': 'location_description', - 'value': default_location_description, - 'type': 'text' - }] - - else: - # The user selected an existing location. Move on to next - # step. - next_step = 'select_mount_point' - - if next_step == 'select_structure': - # There is only one structure to choose from, go to next step. - next_step = 'select_mount_point' - # items = [ - # { - # 'type': 'label', - # 'value': ( - # '#Select structure#\n' - # 'Select which structure to use with your location. ' - # 'The structure is used to generate the filesystem ' - # 'path for components that are added to this location.' - # ) - # }, - # { - # 'type': 'enumerator', - # 'label': 'Structure', - # 'name': 'structure_id', - # 'value': 'standard', - # 'data': [{ - # 'label': 'Standard', - # 'value': 'standard', - # 'description': ( - # 'The Standard structure uses the names in your ' - # 'project structure to determine the path.' - # ) - # }] - # } - # ] - - if next_step == 'select_mount_point': - try: - mount_points = ( - self.existing_centralized_storage_configuration['accessor']['mount_points'] - ) - except (KeyError, TypeError): - mount_points = dict() - - items = [ - { - 'value': ( - '#Mount points#\n' - 'Set mount points for your centralized storage ' - 'location. For the location to work as expected each ' - 'platform that you intend to use must have the ' - 'corresponding mount point set and the storage must ' - 'be accessible. If not set correctly files will not be ' - 'saved or read.' - ), - 'type': 'label' - }, { - 'type': 'text', - 'label': 'Linux', - 'name': 'linux_mount_point', - 'empty_text': 'E.g. /usr/mnt/MyStorage ...', - 'value': mount_points.get('linux', '') - }, { - 'type': 'text', - 'label': 'OS X', - 'name': 'osx_mount_point', - 'empty_text': 'E.g. /Volumes/MyStorage ...', - 'value': mount_points.get('osx', '') - }, { - 'type': 'text', - 'label': 'Windows', - 'name': 'windows_mount_point', - 'empty_text': 'E.g. \\\\MyStorage ...', - 'value': mount_points.get('windows', '') - } - ] - - if next_step == 'confirm_summary': - items = [{ - 'type': 'label', - 'value': self._get_confirmation_text(configuration) - }] - state = 'confirm' - - if next_step == 'save_configuration': - mount_points = configuration['select_mount_point'] - select_location = configuration['select_location'] - - if select_location['location_id'] == 'create_new_location': - configure_location = configuration['configure_location'] - location = self.session.create( - 'Location', - { - 'name': configure_location['location_name'], - 'label': configure_location['location_label'], - 'description': ( - configure_location['location_description'] - ) - } - ) - - else: - location = self.session.query( - 'Location where id is "{0}"'.format( - select_location['location_id'] - ) - ).one() - - setting_value = json.dumps({ - 'scenario': scenario_name, - 'data': { - 'location_id': location['id'], - 'location_name': location['name'], - 'accessor': { - 'mount_points': { - 'linux': mount_points['linux_mount_point'], - 'osx': mount_points['osx_mount_point'], - 'windows': mount_points['windows_mount_point'] - } - } - } - }) - - self.storage_scenario['value'] = setting_value - self.session.commit() - - # Broadcast an event that storage scenario has been configured. - event = ftrack_api.event.base.Event( - topic='ftrack.storage-scenario.configure-done' - ) - self.session.event_hub.publish(event) - - items = [{ - 'type': 'label', - 'value': ( - '#Done!#\n' - 'Your storage scenario is now configured and ready ' - 'to use. **Note that you may have to restart Connect and ' - 'other applications to start using it.**' - ) - }] - state = 'done' - - if warning_message: - items.insert(0, { - 'type': 'label', - 'value': warning_message - }) - - items.append({ - 'type': 'hidden', - 'value': configuration, - 'name': 'configuration' - }) - items.append({ - 'type': 'hidden', - 'value': next_step, - 'name': 'step' - }) - - return { - 'items': items, - 'state': state - } - - def discover_centralized_scenario(self, event): - '''Return action discover dictionary for *event*.''' - return { - 'id': scenario_name, - 'name': 'Centralized storage scenario', - 'description': ( - '(Recommended) centralized storage scenario where all files ' - 'are kept on a storage that is mounted and available to ' - 'everyone in the studio.' - ) - } - - def register(self, session): - '''Subscribe to events on *session*.''' - self.session = session - - #: TODO: Move these to a separate function. - session.event_hub.subscribe( - unicode( - 'topic=ftrack.storage-scenario.discover ' - 'and source.user.username="{0}"' - ).format( - session.api_user - ), - self.discover_centralized_scenario - ) - session.event_hub.subscribe( - unicode( - 'topic=ftrack.storage-scenario.configure ' - 'and data.scenario_id="{0}" ' - 'and source.user.username="{1}"' - ).format( - scenario_name, - session.api_user - ), - self.configure_scenario - ) - - -class ActivateCentralizedStorageScenario(object): - '''Activate a centralized storage scenario.''' - - def __init__(self): - '''Instansiate centralized storage scenario.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - - def activate(self, event): - '''Activate scenario in *event*.''' - storage_scenario = event['data']['storage_scenario'] - - try: - location_data = storage_scenario['data'] - location_name = location_data['location_name'] - location_id = location_data['location_id'] - mount_points = location_data['accessor']['mount_points'] - - except KeyError: - error_message = ( - 'Unable to read storage scenario data.' - ) - self.logger.error(L(error_message)) - raise ftrack_api.exception.LocationError( - 'Unable to configure location based on scenario.' - ) - - else: - location = self.session.create( - 'Location', - data=dict( - name=location_name, - id=location_id - ), - reconstructing=True - ) - - if sys.platform == 'darwin': - prefix = mount_points['osx'] - elif sys.platform == 'linux2': - prefix = mount_points['linux'] - elif sys.platform == 'win32': - prefix = mount_points['windows'] - else: - raise ftrack_api.exception.LocationError( - ( - 'Unable to find accessor prefix for platform {0}.' - ).format(sys.platform) - ) - - location.accessor = ftrack_api.accessor.disk.DiskAccessor( - prefix=prefix - ) - location.structure = _standard.StandardStructure() - location.priority = 1 - self.logger.info(L( - u'Storage scenario activated. Configured {0!r} from ' - u'{1!r}', - location, storage_scenario - )) - - def _verify_startup(self, event): - '''Verify the storage scenario configuration.''' - storage_scenario = event['data']['storage_scenario'] - location_data = storage_scenario['data'] - mount_points = location_data['accessor']['mount_points'] - - prefix = None - if sys.platform == 'darwin': - prefix = mount_points['osx'] - elif sys.platform == 'linux2': - prefix = mount_points['linux'] - elif sys.platform == 'win32': - prefix = mount_points['windows'] - - if not prefix: - return ( - u'The storage scenario has not been configured for your ' - u'operating system. ftrack may not be able to ' - u'store and track files correctly.' - ) - - if not os.path.isdir(prefix): - return ( - unicode( - 'The path {0} does not exist. ftrack may not be able to ' - 'store and track files correctly. \n\nIf the storage is ' - 'newly setup you may want to create necessary folder ' - 'structures. If the storage is a network drive you should ' - 'make sure that it is mounted correctly.' - ).format(prefix) - ) - - def register(self, session): - '''Subscribe to events on *session*.''' - self.session = session - - session.event_hub.subscribe( - ( - 'topic=ftrack.storage-scenario.activate ' - 'and data.storage_scenario.scenario="{0}"'.format( - scenario_name - ) - ), - self.activate - ) - - # Listen to verify startup event from ftrack connect to allow responding - # with a message if something is not working correctly with this - # scenario that the user should be notified about. - self.session.event_hub.subscribe( - ( - 'topic=ftrack.connect.verify-startup ' - 'and data.storage_scenario.scenario="{0}"'.format( - scenario_name - ) - ), - self._verify_startup - ) - -def register(session): - '''Register storage scenario.''' - scenario = ActivateCentralizedStorageScenario() - scenario.register(session) - - -def register_configuration(session): - '''Register storage scenario.''' - scenario = ConfigureCentralizedStorageScenario() - scenario.register(session) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py deleted file mode 100644 index 9f79a1850c..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py +++ /dev/null @@ -1,534 +0,0 @@ -# pragma: no cover -# Module 'ntpath' -- common operations on WinNT/Win95 pathnames -"""Common pathname manipulations, WindowsNT/95 version. - -Instead of importing this module directly, import os and refer to this -module as os.path. -""" - -import os -import sys -import stat -import genericpath -import warnings - -from genericpath import * - -__all__ = ["normcase","isabs","join","splitdrive","split","splitext", - "basename","dirname","commonprefix","getsize","getmtime", - "getatime","getctime", "islink","exists","lexists","isdir","isfile", - "ismount","walk","expanduser","expandvars","normpath","abspath", - "splitunc","curdir","pardir","sep","pathsep","defpath","altsep", - "extsep","devnull","realpath","supports_unicode_filenames","relpath"] - -# strings representing various path-related bits and pieces -curdir = '.' -pardir = '..' -extsep = '.' -sep = '\\' -pathsep = ';' -altsep = '/' -defpath = '.;C:\\bin' -if 'ce' in sys.builtin_module_names: - defpath = '\\Windows' -elif 'os2' in sys.builtin_module_names: - # OS/2 w/ VACPP - altsep = '/' -devnull = 'nul' - -# Normalize the case of a pathname and map slashes to backslashes. -# Other normalizations (such as optimizing '../' away) are not done -# (this is done by normpath). - -def normcase(s): - """Normalize case of pathname. - - Makes all characters lowercase and all slashes into backslashes.""" - return s.replace("/", "\\").lower() - - -# Return whether a path is absolute. -# Trivial in Posix, harder on the Mac or MS-DOS. -# For DOS it is absolute if it starts with a slash or backslash (current -# volume), or if a pathname after the volume letter and colon / UNC resource -# starts with a slash or backslash. - -def isabs(s): - """Test whether a path is absolute""" - s = splitdrive(s)[1] - return s != '' and s[:1] in '/\\' - - -# Join two (or more) paths. - -def join(a, *p): - """Join two or more pathname components, inserting "\\" as needed. - If any component is an absolute path, all previous path components - will be discarded.""" - path = a - for b in p: - b_wins = 0 # set to 1 iff b makes path irrelevant - if path == "": - b_wins = 1 - - elif isabs(b): - # This probably wipes out path so far. However, it's more - # complicated if path begins with a drive letter: - # 1. join('c:', '/a') == 'c:/a' - # 2. join('c:/', '/a') == 'c:/a' - # But - # 3. join('c:/a', '/b') == '/b' - # 4. join('c:', 'd:/') = 'd:/' - # 5. join('c:/', 'd:/') = 'd:/' - if path[1:2] != ":" or b[1:2] == ":": - # Path doesn't start with a drive letter, or cases 4 and 5. - b_wins = 1 - - # Else path has a drive letter, and b doesn't but is absolute. - elif len(path) > 3 or (len(path) == 3 and - path[-1] not in "/\\"): - # case 3 - b_wins = 1 - - if b_wins: - path = b - else: - # Join, and ensure there's a separator. - assert len(path) > 0 - if path[-1] in "/\\": - if b and b[0] in "/\\": - path += b[1:] - else: - path += b - elif path[-1] == ":": - path += b - elif b: - if b[0] in "/\\": - path += b - else: - path += "\\" + b - else: - # path is not empty and does not end with a backslash, - # but b is empty; since, e.g., split('a/') produces - # ('a', ''), it's best if join() adds a backslash in - # this case. - path += '\\' - - return path - - -# Split a path in a drive specification (a drive letter followed by a -# colon) and the path specification. -# It is always true that drivespec + pathspec == p -def splitdrive(p): - """Split a pathname into drive and path specifiers. Returns a 2-tuple -"(drive,path)"; either part may be empty""" - if p[1:2] == ':': - return p[0:2], p[2:] - return '', p - - -# Parse UNC paths -def splitunc(p): - """Split a pathname into UNC mount point and relative path specifiers. - - Return a 2-tuple (unc, rest); either part may be empty. - If unc is not empty, it has the form '//host/mount' (or similar - using backslashes). unc+rest is always the input path. - Paths containing drive letters never have an UNC part. - """ - if p[1:2] == ':': - return '', p # Drive letter present - firstTwo = p[0:2] - if firstTwo == '//' or firstTwo == '\\\\': - # is a UNC path: - # vvvvvvvvvvvvvvvvvvvv equivalent to drive letter - # \\machine\mountpoint\directories... - # directory ^^^^^^^^^^^^^^^ - normp = normcase(p) - index = normp.find('\\', 2) - if index == -1: - ##raise RuntimeError, 'illegal UNC path: "' + p + '"' - return ("", p) - index = normp.find('\\', index + 1) - if index == -1: - index = len(p) - return p[:index], p[index:] - return '', p - - -# Split a path in head (everything up to the last '/') and tail (the -# rest). After the trailing '/' is stripped, the invariant -# join(head, tail) == p holds. -# The resulting head won't end in '/' unless it is the root. - -def split(p): - """Split a pathname. - - Return tuple (head, tail) where tail is everything after the final slash. - Either part may be empty.""" - - d, p = splitdrive(p) - # set i to index beyond p's last slash - i = len(p) - while i and p[i-1] not in '/\\': - i = i - 1 - head, tail = p[:i], p[i:] # now tail has no slashes - # remove trailing slashes from head, unless it's all slashes - head2 = head - while head2 and head2[-1] in '/\\': - head2 = head2[:-1] - head = head2 or head - return d + head, tail - - -# Split a path in root and extension. -# The extension is everything starting at the last dot in the last -# pathname component; the root is everything before that. -# It is always true that root + ext == p. - -def splitext(p): - return genericpath._splitext(p, sep, altsep, extsep) -splitext.__doc__ = genericpath._splitext.__doc__ - - -# Return the tail (basename) part of a path. - -def basename(p): - """Returns the final component of a pathname""" - return split(p)[1] - - -# Return the head (dirname) part of a path. - -def dirname(p): - """Returns the directory component of a pathname""" - return split(p)[0] - -# Is a path a symbolic link? -# This will always return false on systems where posix.lstat doesn't exist. - -def islink(path): - """Test for symbolic link. - On WindowsNT/95 and OS/2 always returns false - """ - return False - -# alias exists to lexists -lexists = exists - -# Is a path a mount point? Either a root (with or without drive letter) -# or an UNC path with at most a / or \ after the mount point. - -def ismount(path): - """Test whether a path is a mount point (defined as root of drive)""" - unc, rest = splitunc(path) - if unc: - return rest in ("", "/", "\\") - p = splitdrive(path)[1] - return len(p) == 1 and p[0] in '/\\' - - -# Directory tree walk. -# For each directory under top (including top itself, but excluding -# '.' and '..'), func(arg, dirname, filenames) is called, where -# dirname is the name of the directory and filenames is the list -# of files (and subdirectories etc.) in the directory. -# The func may modify the filenames list, to implement a filter, -# or to impose a different order of visiting. - -def walk(top, func, arg): - """Directory tree walk with callback function. - - For each directory in the directory tree rooted at top (including top - itself, but excluding '.' and '..'), call func(arg, dirname, fnames). - dirname is the name of the directory, and fnames a list of the names of - the files and subdirectories in dirname (excluding '.' and '..'). func - may modify the fnames list in-place (e.g. via del or slice assignment), - and walk will only recurse into the subdirectories whose names remain in - fnames; this can be used to implement a filter, or to impose a specific - order of visiting. No semantics are defined for, or required of, arg, - beyond that arg is always passed to func. It can be used, e.g., to pass - a filename pattern, or a mutable object designed to accumulate - statistics. Passing None for arg is common.""" - warnings.warnpy3k("In 3.x, os.path.walk is removed in favor of os.walk.", - stacklevel=2) - try: - names = os.listdir(top) - except os.error: - return - func(arg, top, names) - for name in names: - name = join(top, name) - if isdir(name): - walk(name, func, arg) - - -# Expand paths beginning with '~' or '~user'. -# '~' means $HOME; '~user' means that user's home directory. -# If the path doesn't begin with '~', or if the user or $HOME is unknown, -# the path is returned unchanged (leaving error reporting to whatever -# function is called with the expanded path as argument). -# See also module 'glob' for expansion of *, ? and [...] in pathnames. -# (A function should also be defined to do full *sh-style environment -# variable expansion.) - -def expanduser(path): - """Expand ~ and ~user constructs. - - If user or $HOME is unknown, do nothing.""" - if path[:1] != '~': - return path - i, n = 1, len(path) - while i < n and path[i] not in '/\\': - i = i + 1 - - if 'HOME' in os.environ: - userhome = os.environ['HOME'] - elif 'USERPROFILE' in os.environ: - userhome = os.environ['USERPROFILE'] - elif not 'HOMEPATH' in os.environ: - return path - else: - try: - drive = os.environ['HOMEDRIVE'] - except KeyError: - drive = '' - userhome = join(drive, os.environ['HOMEPATH']) - - if i != 1: #~user - userhome = join(dirname(userhome), path[1:i]) - - return userhome + path[i:] - - -# Expand paths containing shell variable substitutions. -# The following rules apply: -# - no expansion within single quotes -# - '$$' is translated into '$' -# - '%%' is translated into '%' if '%%' are not seen in %var1%%var2% -# - ${varname} is accepted. -# - $varname is accepted. -# - %varname% is accepted. -# - varnames can be made out of letters, digits and the characters '_-' -# (though is not verified in the ${varname} and %varname% cases) -# XXX With COMMAND.COM you can use any characters in a variable name, -# XXX except '^|<>='. - -def expandvars(path): - """Expand shell variables of the forms $var, ${var} and %var%. - - Unknown variables are left unchanged.""" - if '$' not in path and '%' not in path: - return path - import string - varchars = string.ascii_letters + string.digits + '_-' - res = '' - index = 0 - pathlen = len(path) - while index < pathlen: - c = path[index] - if c == '\'': # no expansion within single quotes - path = path[index + 1:] - pathlen = len(path) - try: - index = path.index('\'') - res = res + '\'' + path[:index + 1] - except ValueError: - res = res + path - index = pathlen - 1 - elif c == '%': # variable or '%' - if path[index + 1:index + 2] == '%': - res = res + c - index = index + 1 - else: - path = path[index+1:] - pathlen = len(path) - try: - index = path.index('%') - except ValueError: - res = res + '%' + path - index = pathlen - 1 - else: - var = path[:index] - if var in os.environ: - res = res + os.environ[var] - else: - res = res + '%' + var + '%' - elif c == '$': # variable or '$$' - if path[index + 1:index + 2] == '$': - res = res + c - index = index + 1 - elif path[index + 1:index + 2] == '{': - path = path[index+2:] - pathlen = len(path) - try: - index = path.index('}') - var = path[:index] - if var in os.environ: - res = res + os.environ[var] - else: - res = res + '${' + var + '}' - except ValueError: - res = res + '${' + path - index = pathlen - 1 - else: - var = '' - index = index + 1 - c = path[index:index + 1] - while c != '' and c in varchars: - var = var + c - index = index + 1 - c = path[index:index + 1] - if var in os.environ: - res = res + os.environ[var] - else: - res = res + '$' + var - if c != '': - index = index - 1 - else: - res = res + c - index = index + 1 - return res - - -# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A\B. -# Previously, this function also truncated pathnames to 8+3 format, -# but as this module is called "ntpath", that's obviously wrong! - -def normpath(path): - """Normalize path, eliminating double slashes, etc.""" - # Preserve unicode (if path is unicode) - backslash, dot = (u'\\', u'.') if isinstance(path, unicode) else ('\\', '.') - if path.startswith(('\\\\.\\', '\\\\?\\')): - # in the case of paths with these prefixes: - # \\.\ -> device names - # \\?\ -> literal paths - # do not do any normalization, but return the path unchanged - return path - path = path.replace("/", "\\") - prefix, path = splitdrive(path) - # We need to be careful here. If the prefix is empty, and the path starts - # with a backslash, it could either be an absolute path on the current - # drive (\dir1\dir2\file) or a UNC filename (\\server\mount\dir1\file). It - # is therefore imperative NOT to collapse multiple backslashes blindly in - # that case. - # The code below preserves multiple backslashes when there is no drive - # letter. This means that the invalid filename \\\a\b is preserved - # unchanged, where a\\\b is normalised to a\b. It's not clear that there - # is any better behaviour for such edge cases. - if prefix == '': - # No drive letter - preserve initial backslashes - while path[:1] == "\\": - prefix = prefix + backslash - path = path[1:] - else: - # We have a drive letter - collapse initial backslashes - if path.startswith("\\"): - prefix = prefix + backslash - path = path.lstrip("\\") - comps = path.split("\\") - i = 0 - while i < len(comps): - if comps[i] in ('.', ''): - del comps[i] - elif comps[i] == '..': - if i > 0 and comps[i-1] != '..': - del comps[i-1:i+1] - i -= 1 - elif i == 0 and prefix.endswith("\\"): - del comps[i] - else: - i += 1 - else: - i += 1 - # If the path is now empty, substitute '.' - if not prefix and not comps: - comps.append(dot) - return prefix + backslash.join(comps) - - -# Return an absolute path. -try: - from nt import _getfullpathname - -except ImportError: # not running on Windows - mock up something sensible - def abspath(path): - """Return the absolute version of a path.""" - if not isabs(path): - if isinstance(path, unicode): - cwd = os.getcwdu() - else: - cwd = os.getcwd() - path = join(cwd, path) - return normpath(path) - -else: # use native Windows method on Windows - def abspath(path): - """Return the absolute version of a path.""" - - if path: # Empty path must return current working directory. - try: - path = _getfullpathname(path) - except WindowsError: - pass # Bad path - return unchanged. - elif isinstance(path, unicode): - path = os.getcwdu() - else: - path = os.getcwd() - return normpath(path) - -# realpath is a no-op on systems without islink support -realpath = abspath -# Win9x family and earlier have no Unicode filename support. -supports_unicode_filenames = (hasattr(sys, "getwindowsversion") and - sys.getwindowsversion()[3] >= 2) - -def _abspath_split(path): - abs = abspath(normpath(path)) - prefix, rest = splitunc(abs) - is_unc = bool(prefix) - if not is_unc: - prefix, rest = splitdrive(abs) - return is_unc, prefix, [x for x in rest.split(sep) if x] - -def relpath(path, start=curdir): - """Return a relative version of a path""" - - if not path: - raise ValueError("no path specified") - - start_is_unc, start_prefix, start_list = _abspath_split(start) - path_is_unc, path_prefix, path_list = _abspath_split(path) - - if path_is_unc ^ start_is_unc: - raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" - % (path, start)) - if path_prefix.lower() != start_prefix.lower(): - if path_is_unc: - raise ValueError("path is on UNC root %s, start on UNC root %s" - % (path_prefix, start_prefix)) - else: - raise ValueError("path is on drive %s, start on drive %s" - % (path_prefix, start_prefix)) - # Work out how much of the filepath is shared by start and path. - i = 0 - for e1, e2 in zip(start_list, path_list): - if e1.lower() != e2.lower(): - break - i += 1 - - rel_list = [pardir] * (len(start_list)-i) + path_list[i:] - if not rel_list: - return curdir - return join(*rel_list) - -try: - # The genericpath.isdir implementation uses os.stat and checks the mode - # attribute to tell whether or not the path is a directory. - # This is overkill on Windows - just pass the path to GetFileAttributes - # and check the attribute from there. - from nt import _isdir as isdir -except ImportError: - # Use genericpath.isdir as imported above. - pass diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py deleted file mode 100644 index aa1a8c4aba..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = '1.8.2' diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py deleted file mode 100644 index 69cc6f4b4f..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py +++ /dev/null @@ -1,66 +0,0 @@ -""" -Yet another backport of WeakMethod for Python 2.7. -Changes include removing exception chaining and adding args to super() calls. - -Copyright (c) 2001-2019 Python Software Foundation.All rights reserved. - -Full license available in LICENSE.python. -""" -from weakref import ref - - -class WeakMethod(ref): - """ - A custom `weakref.ref` subclass which simulates a weak reference to - a bound method, working around the lifetime problem of bound methods. - """ - - __slots__ = "_func_ref", "_meth_type", "_alive", "__weakref__" - - def __new__(cls, meth, callback=None): - try: - obj = meth.__self__ - func = meth.__func__ - except AttributeError: - raise TypeError( - "argument should be a bound method, not {}".format(type(meth)) - ) - - def _cb(arg): - # The self-weakref trick is needed to avoid creating a reference - # cycle. - self = self_wr() - if self._alive: - self._alive = False - if callback is not None: - callback(self) - - self = ref.__new__(cls, obj, _cb) - self._func_ref = ref(func, _cb) - self._meth_type = type(meth) - self._alive = True - self_wr = ref(self) - return self - - def __call__(self): - obj = super(WeakMethod, self).__call__() - func = self._func_ref() - if obj is None or func is None: - return None - return self._meth_type(func, obj) - - def __eq__(self, other): - if isinstance(other, WeakMethod): - if not self._alive or not other._alive: - return self is other - return ref.__eq__(self, other) and self._func_ref == other._func_ref - return NotImplemented - - def __ne__(self, other): - if isinstance(other, WeakMethod): - if not self._alive or not other._alive: - return self is not other - return ref.__ne__(self, other) or self._func_ref != other._func_ref - return NotImplemented - - __hash__ = ref.__hash__ diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py deleted file mode 100644 index 1aab07ed77..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py deleted file mode 100644 index 6aa9cf0281..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py +++ /dev/null @@ -1,124 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -import abc - -import ftrack_api.exception - - -class Accessor(object): - '''Provide data access to a location. - - A location represents a specific storage, but access to that storage may - vary. For example, both local filesystem and FTP access may be possible for - the same storage. An accessor implements these different ways of accessing - the same data location. - - As different accessors may access the same location, only part of a data - path that is commonly understood may be stored in the database. The format - of this path should be a contract between the accessors that require access - to the same location and is left as an implementation detail. As such, this - system provides no guarantee that two different accessors can provide access - to the same location, though this is a clear goal. The path stored centrally - is referred to as the **resource identifier** and should be used when - calling any of the accessor methods that accept a *resource_identifier* - argument. - - ''' - - __metaclass__ = abc.ABCMeta - - def __init__(self): - '''Initialise location accessor.''' - super(Accessor, self).__init__() - - @abc.abstractmethod - def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container. - - Each entry in the returned list should be a valid resource identifier. - - Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist or - :exc:`~ftrack_api.exception.AccessorResourceInvalidError` if - *resource_identifier* is not a container. - - ''' - - @abc.abstractmethod - def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' - - @abc.abstractmethod - def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' - - @abc.abstractmethod - def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' - - @abc.abstractmethod - def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' - - @abc.abstractmethod - def open(self, resource_identifier, mode='rb'): - '''Return :class:`~ftrack_api.data.Data` for *resource_identifier*.''' - - @abc.abstractmethod - def remove(self, resource_identifier): - '''Remove *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist. - - ''' - - @abc.abstractmethod - def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*. - - If *recursive* is True, also make any intermediate containers. - - Should silently ignore existing containers and not recreate them. - - ''' - - @abc.abstractmethod - def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorParentResourceNotFoundError` - if container of *resource_identifier* could not be determined. - - ''' - - def remove_container(self, resource_identifier): # pragma: no cover - '''Remove container at *resource_identifier*.''' - return self.remove(resource_identifier) - - def get_filesystem_path(self, resource_identifier): # pragma: no cover - '''Return filesystem path for *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if - filesystem path could not be determined from *resource_identifier* or - :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if - retrieving filesystem paths is not supported by this accessor. - - ''' - raise ftrack_api.exception.AccessorUnsupportedOperationError( - 'get_filesystem_path', resource_identifier=resource_identifier - ) - - def get_url(self, resource_identifier): - '''Return URL for *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if - URL could not be determined from *resource_identifier* or - :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if - retrieving URL is not supported by this accessor. - - ''' - raise ftrack_api.exception.AccessorUnsupportedOperationError( - 'get_url', resource_identifier=resource_identifier - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py deleted file mode 100644 index 65769603f6..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py +++ /dev/null @@ -1,250 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -import os -import sys -import errno -import contextlib - -import ftrack_api._python_ntpath as ntpath -import ftrack_api.accessor.base -import ftrack_api.data -from ftrack_api.exception import ( - AccessorFilesystemPathError, - AccessorUnsupportedOperationError, - AccessorResourceNotFoundError, - AccessorOperationFailedError, - AccessorPermissionDeniedError, - AccessorResourceInvalidError, - AccessorContainerNotEmptyError, - AccessorParentResourceNotFoundError -) - - -class DiskAccessor(ftrack_api.accessor.base.Accessor): - '''Provide disk access to a location. - - Expect resource identifiers to refer to relative filesystem paths. - - ''' - - def __init__(self, prefix, **kw): - '''Initialise location accessor. - - *prefix* specifies the base folder for the disk based structure and - will be prepended to any path. It should be specified in the syntax of - the current OS. - - ''' - if prefix: - prefix = os.path.expanduser(os.path.expandvars(prefix)) - prefix = os.path.abspath(prefix) - self.prefix = prefix - - super(DiskAccessor, self).__init__(**kw) - - def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container. - - Each entry in the returned list should be a valid resource identifier. - - Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist or - :exc:`~ftrack_api.exception.AccessorResourceInvalidError` if - *resource_identifier* is not a container. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - with error_handler( - operation='list', resource_identifier=resource_identifier - ): - listing = [] - for entry in os.listdir(filesystem_path): - listing.append(os.path.join(resource_identifier, entry)) - - return listing - - def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - return os.path.exists(filesystem_path) - - def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - return os.path.isfile(filesystem_path) - - def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - return os.path.isdir(filesystem_path) - - def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' - raise AccessorUnsupportedOperationError(operation='is_sequence') - - def open(self, resource_identifier, mode='rb'): - '''Return :class:`~ftrack_api.Data` for *resource_identifier*.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - with error_handler( - operation='open', resource_identifier=resource_identifier - ): - data = ftrack_api.data.File(filesystem_path, mode) - - return data - - def remove(self, resource_identifier): - '''Remove *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - if self.is_file(resource_identifier): - with error_handler( - operation='remove', resource_identifier=resource_identifier - ): - os.remove(filesystem_path) - - elif self.is_container(resource_identifier): - with error_handler( - operation='remove', resource_identifier=resource_identifier - ): - os.rmdir(filesystem_path) - - else: - raise AccessorResourceNotFoundError( - resource_identifier=resource_identifier - ) - - def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*. - - If *recursive* is True, also make any intermediate containers. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - with error_handler( - operation='makeContainer', resource_identifier=resource_identifier - ): - try: - if recursive: - os.makedirs(filesystem_path) - else: - try: - os.mkdir(filesystem_path) - except OSError as error: - if error.errno == errno.ENOENT: - raise AccessorParentResourceNotFoundError( - resource_identifier=resource_identifier - ) - else: - raise - - except OSError, error: - if error.errno != errno.EEXIST: - raise - - def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorParentResourceNotFoundError` if - container of *resource_identifier* could not be determined. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - container = os.path.dirname(filesystem_path) - - if self.prefix: - if not container.startswith(self.prefix): - raise AccessorParentResourceNotFoundError( - resource_identifier=resource_identifier, - message='Could not determine container for ' - '{resource_identifier} as container falls outside ' - 'of configured prefix.' - ) - - # Convert container filesystem path into resource identifier. - container = container[len(self.prefix):] - if ntpath.isabs(container): - # Ensure that resulting path is relative by stripping any - # leftover prefixed slashes from string. - # E.g. If prefix was '/tmp' and path was '/tmp/foo/bar' the - # result will be 'foo/bar'. - container = container.lstrip('\\/') - - return container - - def get_filesystem_path(self, resource_identifier): - '''Return filesystem path for *resource_identifier*. - - For example:: - - >>> accessor = DiskAccessor('my.location', '/mountpoint') - >>> print accessor.get_filesystem_path('test.txt') - /mountpoint/test.txt - >>> print accessor.get_filesystem_path('/mountpoint/test.txt') - /mountpoint/test.txt - - Raise :exc:`ftrack_api.exception.AccessorFilesystemPathError` if filesystem - path could not be determined from *resource_identifier*. - - ''' - filesystem_path = resource_identifier - if filesystem_path: - filesystem_path = os.path.normpath(filesystem_path) - - if self.prefix: - if not os.path.isabs(filesystem_path): - filesystem_path = os.path.normpath( - os.path.join(self.prefix, filesystem_path) - ) - - if not filesystem_path.startswith(self.prefix): - raise AccessorFilesystemPathError( - resource_identifier=resource_identifier, - message='Could not determine access path for ' - 'resource_identifier outside of configured prefix: ' - '{resource_identifier}.' - ) - - return filesystem_path - - -@contextlib.contextmanager -def error_handler(**kw): - '''Conform raised OSError/IOError exception to appropriate FTrack error.''' - try: - yield - - except (OSError, IOError) as error: - (exception_type, exception_value, traceback) = sys.exc_info() - kw.setdefault('error', error) - - error_code = getattr(error, 'errno') - if not error_code: - raise AccessorOperationFailedError(**kw), None, traceback - - if error_code == errno.ENOENT: - raise AccessorResourceNotFoundError(**kw), None, traceback - - elif error_code == errno.EPERM: - raise AccessorPermissionDeniedError(**kw), None, traceback - - elif error_code == errno.ENOTEMPTY: - raise AccessorContainerNotEmptyError(**kw), None, traceback - - elif error_code in (errno.ENOTDIR, errno.EISDIR, errno.EINVAL): - raise AccessorResourceInvalidError(**kw), None, traceback - - else: - raise AccessorOperationFailedError(**kw), None, traceback - - except Exception: - raise diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py deleted file mode 100644 index 9c735084d5..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py +++ /dev/null @@ -1,240 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import hashlib -import base64 -import json - -import requests - -from .base import Accessor -from ..data import String -import ftrack_api.exception -import ftrack_api.symbol - - -class ServerFile(String): - '''Representation of a server file.''' - - def __init__(self, resource_identifier, session, mode='rb'): - '''Initialise file.''' - self.mode = mode - self.resource_identifier = resource_identifier - self._session = session - self._has_read = False - - super(ServerFile, self).__init__() - - def flush(self): - '''Flush all changes.''' - super(ServerFile, self).flush() - - if self.mode == 'wb': - self._write() - - def read(self, limit=None): - '''Read file.''' - if not self._has_read: - self._read() - self._has_read = True - - return super(ServerFile, self).read(limit) - - def _read(self): - '''Read all remote content from key into wrapped_file.''' - position = self.tell() - self.seek(0) - - response = requests.get( - '{0}/component/get'.format(self._session.server_url), - params={ - 'id': self.resource_identifier, - 'username': self._session.api_user, - 'apiKey': self._session.api_key - }, - stream=True - ) - - try: - response.raise_for_status() - except requests.exceptions.HTTPError as error: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to read data: {0}.'.format(error) - ) - - for block in response.iter_content(ftrack_api.symbol.CHUNK_SIZE): - self.wrapped_file.write(block) - - self.flush() - self.seek(position) - - def _write(self): - '''Write current data to remote key.''' - position = self.tell() - self.seek(0) - - # Retrieve component from cache to construct a filename. - component = self._session.get('FileComponent', self.resource_identifier) - if not component: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Unable to retrieve component with id: {0}.'.format( - self.resource_identifier - ) - ) - - # Construct a name from component name and file_type. - name = component['name'] - if component['file_type']: - name = u'{0}.{1}'.format( - name, - component['file_type'].lstrip('.') - ) - - try: - metadata = self._session.get_upload_metadata( - component_id=self.resource_identifier, - file_name=name, - file_size=self._get_size(), - checksum=self._compute_checksum() - ) - except Exception as error: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to get put metadata: {0}.'.format(error) - ) - - # Ensure at beginning of file before put. - self.seek(0) - - # Put the file based on the metadata. - response = requests.put( - metadata['url'], - data=self.wrapped_file, - headers=metadata['headers'] - ) - - try: - response.raise_for_status() - except requests.exceptions.HTTPError as error: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to put file to server: {0}.'.format(error) - ) - - self.seek(position) - - def _get_size(self): - '''Return size of file in bytes.''' - position = self.tell() - self.seek(0, os.SEEK_END) - length = self.tell() - self.seek(position) - return length - - def _compute_checksum(self): - '''Return checksum for file.''' - fp = self.wrapped_file - buf_size = ftrack_api.symbol.CHUNK_SIZE - hash_obj = hashlib.md5() - spos = fp.tell() - - s = fp.read(buf_size) - while s: - hash_obj.update(s) - s = fp.read(buf_size) - - base64_digest = base64.encodestring(hash_obj.digest()) - if base64_digest[-1] == '\n': - base64_digest = base64_digest[0:-1] - - fp.seek(spos) - return base64_digest - - -class _ServerAccessor(Accessor): - '''Provide server location access.''' - - def __init__(self, session, **kw): - '''Initialise location accessor.''' - super(_ServerAccessor, self).__init__(**kw) - - self._session = session - - def open(self, resource_identifier, mode='rb'): - '''Return :py:class:`~ftrack_api.Data` for *resource_identifier*.''' - return ServerFile(resource_identifier, session=self._session, mode=mode) - - def remove(self, resourceIdentifier): - '''Remove *resourceIdentifier*.''' - response = requests.get( - '{0}/component/remove'.format(self._session.server_url), - params={ - 'id': resourceIdentifier, - 'username': self._session.api_user, - 'apiKey': self._session.api_key - } - ) - if response.status_code != 200: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to remove file.' - ) - - def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*.''' - return None - - def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*.''' - - def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container.''' - raise NotImplementedError() - - def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' - return False - - def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' - raise NotImplementedError() - - def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' - raise NotImplementedError() - - def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' - raise NotImplementedError() - - def get_url(self, resource_identifier): - '''Return url for *resource_identifier*.''' - url_string = ( - u'{url}/component/get?id={id}&username={username}' - u'&apiKey={apiKey}' - ) - return url_string.format( - url=self._session.server_url, - id=resource_identifier, - username=self._session.api_user, - apiKey=self._session.api_key - ) - - def get_thumbnail_url(self, resource_identifier, size=None): - '''Return thumbnail url for *resource_identifier*. - - Optionally, specify *size* to constrain the downscaled image to size - x size pixels. - ''' - url_string = ( - u'{url}/component/thumbnail?id={id}&username={username}' - u'&apiKey={apiKey}' - ) - url = url_string.format( - url=self._session.server_url, - id=resource_identifier, - username=self._session.api_user, - apiKey=self._session.api_key - ) - if size: - url += u'&size={0}'.format(size) - - return url diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py deleted file mode 100644 index 719b612f39..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py +++ /dev/null @@ -1,707 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import collections -import copy -import logging -import functools - -import ftrack_api.symbol -import ftrack_api.exception -import ftrack_api.collection -import ftrack_api.inspection -import ftrack_api.operation - -logger = logging.getLogger( - __name__ -) - - -def merge_references(function): - '''Decorator to handle merging of references / collections.''' - - @functools.wraps(function) - def get_value(attribute, entity): - '''Merge the attribute with the local cache.''' - - if attribute.name not in entity._inflated: - # Only merge on first access to avoid - # inflating them multiple times. - - logger.debug( - 'Merging potential new data into attached ' - 'entity for attribute {0}.'.format( - attribute.name - ) - ) - - # Local attributes. - local_value = attribute.get_local_value(entity) - if isinstance( - local_value, - ( - ftrack_api.entity.base.Entity, - ftrack_api.collection.Collection, - ftrack_api.collection.MappedCollectionProxy - ) - ): - logger.debug( - 'Merging local value for attribute {0}.'.format(attribute) - ) - - merged_local_value = entity.session._merge( - local_value, merged=dict() - ) - - if merged_local_value is not local_value: - with entity.session.operation_recording(False): - attribute.set_local_value(entity, merged_local_value) - - # Remote attributes. - remote_value = attribute.get_remote_value(entity) - if isinstance( - remote_value, - ( - ftrack_api.entity.base.Entity, - ftrack_api.collection.Collection, - ftrack_api.collection.MappedCollectionProxy - ) - ): - logger.debug( - 'Merging remote value for attribute {0}.'.format(attribute) - ) - - merged_remote_value = entity.session._merge( - remote_value, merged=dict() - ) - - if merged_remote_value is not remote_value: - attribute.set_remote_value(entity, merged_remote_value) - - entity._inflated.add( - attribute.name - ) - - return function( - attribute, entity - ) - - return get_value - - -class Attributes(object): - '''Collection of properties accessible by name.''' - - def __init__(self, attributes=None): - super(Attributes, self).__init__() - self._data = dict() - if attributes is not None: - for attribute in attributes: - self.add(attribute) - - def add(self, attribute): - '''Add *attribute*.''' - existing = self._data.get(attribute.name, None) - if existing: - raise ftrack_api.exception.NotUniqueError( - 'Attribute with name {0} already added as {1}' - .format(attribute.name, existing) - ) - - self._data[attribute.name] = attribute - - def remove(self, attribute): - '''Remove attribute.''' - self._data.pop(attribute.name) - - def get(self, name): - '''Return attribute by *name*. - - If no attribute matches *name* then return None. - - ''' - return self._data.get(name, None) - - def keys(self): - '''Return list of attribute names.''' - return self._data.keys() - - def __contains__(self, item): - '''Return whether *item* present.''' - if not isinstance(item, Attribute): - return False - - return item.name in self._data - - def __iter__(self): - '''Return iterator over attributes.''' - return self._data.itervalues() - - def __len__(self): - '''Return count of attributes.''' - return len(self._data) - - -class Attribute(object): - '''A name and value pair persisted remotely.''' - - def __init__( - self, name, default_value=ftrack_api.symbol.NOT_SET, mutable=True, - computed=False - ): - '''Initialise attribute with *name*. - - *default_value* represents the default value for the attribute. It may - be a callable. It is not used within the attribute when providing - values, but instead exists for other parts of the system to reference. - - If *mutable* is set to False then the local value of the attribute on an - entity can only be set when both the existing local and remote values - are :attr:`ftrack_api.symbol.NOT_SET`. The exception to this is when the - target value is also :attr:`ftrack_api.symbol.NOT_SET`. - - If *computed* is set to True the value is a remote side computed value - and should not be long-term cached. - - ''' - super(Attribute, self).__init__() - self._name = name - self._mutable = mutable - self._computed = computed - self.default_value = default_value - - self._local_key = 'local' - self._remote_key = 'remote' - - def __repr__(self): - '''Return representation of entity.''' - return '<{0}.{1}({2}) object at {3}>'.format( - self.__module__, - self.__class__.__name__, - self.name, - id(self) - ) - - def get_entity_storage(self, entity): - '''Return attribute storage on *entity* creating if missing.''' - storage_key = '_ftrack_attribute_storage' - storage = getattr(entity, storage_key, None) - if storage is None: - storage = collections.defaultdict( - lambda: - { - self._local_key: ftrack_api.symbol.NOT_SET, - self._remote_key: ftrack_api.symbol.NOT_SET - } - ) - setattr(entity, storage_key, storage) - - return storage - - @property - def name(self): - '''Return name.''' - return self._name - - @property - def mutable(self): - '''Return whether attribute is mutable.''' - return self._mutable - - @property - def computed(self): - '''Return whether attribute is computed.''' - return self._computed - - def get_value(self, entity): - '''Return current value for *entity*. - - If a value was set locally then return it, otherwise return last known - remote value. If no remote value yet retrieved, make a request for it - via the session and block until available. - - ''' - value = self.get_local_value(entity) - if value is not ftrack_api.symbol.NOT_SET: - return value - - value = self.get_remote_value(entity) - if value is not ftrack_api.symbol.NOT_SET: - return value - - if not entity.session.auto_populate: - return value - - self.populate_remote_value(entity) - return self.get_remote_value(entity) - - def get_local_value(self, entity): - '''Return locally set value for *entity*.''' - storage = self.get_entity_storage(entity) - return storage[self.name][self._local_key] - - def get_remote_value(self, entity): - '''Return remote value for *entity*. - - .. note:: - - Only return locally stored remote value, do not fetch from remote. - - ''' - storage = self.get_entity_storage(entity) - return storage[self.name][self._remote_key] - - def set_local_value(self, entity, value): - '''Set local *value* for *entity*.''' - if ( - not self.mutable - and self.is_set(entity) - and value is not ftrack_api.symbol.NOT_SET - ): - raise ftrack_api.exception.ImmutableAttributeError(self) - - old_value = self.get_local_value(entity) - - storage = self.get_entity_storage(entity) - storage[self.name][self._local_key] = value - - # Record operation. - if entity.session.record_operations: - entity.session.recorded_operations.push( - ftrack_api.operation.UpdateEntityOperation( - entity.entity_type, - ftrack_api.inspection.primary_key(entity), - self.name, - old_value, - value - ) - ) - - def set_remote_value(self, entity, value): - '''Set remote *value*. - - .. note:: - - Only set locally stored remote value, do not persist to remote. - - ''' - storage = self.get_entity_storage(entity) - storage[self.name][self._remote_key] = value - - def populate_remote_value(self, entity): - '''Populate remote value for *entity*.''' - entity.session.populate([entity], self.name) - - def is_modified(self, entity): - '''Return whether local value set and differs from remote. - - .. note:: - - Will not fetch remote value so may report True even when values - are the same on the remote. - - ''' - local_value = self.get_local_value(entity) - remote_value = self.get_remote_value(entity) - return ( - local_value is not ftrack_api.symbol.NOT_SET - and local_value != remote_value - ) - - def is_set(self, entity): - '''Return whether a value is set for *entity*.''' - return any([ - self.get_local_value(entity) is not ftrack_api.symbol.NOT_SET, - self.get_remote_value(entity) is not ftrack_api.symbol.NOT_SET - ]) - - -class ScalarAttribute(Attribute): - '''Represent a scalar value.''' - - def __init__(self, name, data_type, **kw): - '''Initialise property.''' - super(ScalarAttribute, self).__init__(name, **kw) - self.data_type = data_type - - -class ReferenceAttribute(Attribute): - '''Reference another entity.''' - - def __init__(self, name, entity_type, **kw): - '''Initialise property.''' - super(ReferenceAttribute, self).__init__(name, **kw) - self.entity_type = entity_type - - def populate_remote_value(self, entity): - '''Populate remote value for *entity*. - - As attribute references another entity, use that entity's configured - default projections to auto populate useful attributes when loading. - - ''' - reference_entity_type = entity.session.types[self.entity_type] - default_projections = reference_entity_type.default_projections - - projections = [] - if default_projections: - for projection in default_projections: - projections.append('{0}.{1}'.format(self.name, projection)) - else: - projections.append(self.name) - - entity.session.populate([entity], ', '.join(projections)) - - def is_modified(self, entity): - '''Return whether a local value has been set and differs from remote. - - .. note:: - - Will not fetch remote value so may report True even when values - are the same on the remote. - - ''' - local_value = self.get_local_value(entity) - remote_value = self.get_remote_value(entity) - - if local_value is ftrack_api.symbol.NOT_SET: - return False - - if remote_value is ftrack_api.symbol.NOT_SET: - return True - - if ( - ftrack_api.inspection.identity(local_value) - != ftrack_api.inspection.identity(remote_value) - ): - return True - - return False - - - @merge_references - def get_value(self, entity): - return super(ReferenceAttribute, self).get_value( - entity - ) - -class AbstractCollectionAttribute(Attribute): - '''Base class for collection attributes.''' - - #: Collection class used by attribute. - collection_class = None - - @merge_references - def get_value(self, entity): - '''Return current value for *entity*. - - If a value was set locally then return it, otherwise return last known - remote value. If no remote value yet retrieved, make a request for it - via the session and block until available. - - .. note:: - - As value is a collection that is mutable, will transfer a remote - value into the local value on access if no local value currently - set. - - ''' - super(AbstractCollectionAttribute, self).get_value(entity) - - # Conditionally, copy remote value into local value so that it can be - # mutated without side effects. - local_value = self.get_local_value(entity) - remote_value = self.get_remote_value(entity) - if ( - local_value is ftrack_api.symbol.NOT_SET - and isinstance(remote_value, self.collection_class) - ): - try: - with entity.session.operation_recording(False): - self.set_local_value(entity, copy.copy(remote_value)) - except ftrack_api.exception.ImmutableAttributeError: - pass - - value = self.get_local_value(entity) - - # If the local value is still not set then attempt to set it with a - # suitable placeholder collection so that the caller can interact with - # the collection using its normal interface. This is required for a - # newly created entity for example. It *could* be done as a simple - # default value, but that would incur cost for every collection even - # when they are not modified before commit. - if value is ftrack_api.symbol.NOT_SET: - try: - with entity.session.operation_recording(False): - self.set_local_value( - entity, - # None should be treated as empty collection. - None - ) - except ftrack_api.exception.ImmutableAttributeError: - pass - - return self.get_local_value(entity) - - def set_local_value(self, entity, value): - '''Set local *value* for *entity*.''' - if value is not ftrack_api.symbol.NOT_SET: - value = self._adapt_to_collection(entity, value) - value.mutable = self.mutable - - super(AbstractCollectionAttribute, self).set_local_value(entity, value) - - def set_remote_value(self, entity, value): - '''Set remote *value*. - - .. note:: - - Only set locally stored remote value, do not persist to remote. - - ''' - if value is not ftrack_api.symbol.NOT_SET: - value = self._adapt_to_collection(entity, value) - value.mutable = False - - super(AbstractCollectionAttribute, self).set_remote_value(entity, value) - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to appropriate collection instance for *entity*. - - .. note:: - - If *value* is None then return a suitable empty collection. - - ''' - raise NotImplementedError() - - -class CollectionAttribute(AbstractCollectionAttribute): - '''Represent a collection of other entities.''' - - #: Collection class used by attribute. - collection_class = ftrack_api.collection.Collection - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to a Collection instance on *entity*.''' - - if not isinstance(value, ftrack_api.collection.Collection): - - if value is None: - value = ftrack_api.collection.Collection(entity, self) - - elif isinstance(value, list): - value = ftrack_api.collection.Collection( - entity, self, data=value - ) - - else: - raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) - ) - - else: - if value.attribute is not self: - raise ftrack_api.exception.AttributeError( - 'Collection already bound to a different attribute' - ) - - return value - - -class KeyValueMappedCollectionAttribute(AbstractCollectionAttribute): - '''Represent a mapped key, value collection of entities.''' - - #: Collection class used by attribute. - collection_class = ftrack_api.collection.KeyValueMappedCollectionProxy - - def __init__( - self, name, creator, key_attribute, value_attribute, **kw - ): - '''Initialise attribute with *name*. - - *creator* should be a function that accepts a dictionary of data and - is used by the referenced collection to create new entities in the - collection. - - *key_attribute* should be the name of the attribute on an entity in - the collection that represents the value for 'key' of the dictionary. - - *value_attribute* should be the name of the attribute on an entity in - the collection that represents the value for 'value' of the dictionary. - - ''' - self.creator = creator - self.key_attribute = key_attribute - self.value_attribute = value_attribute - - super(KeyValueMappedCollectionAttribute, self).__init__(name, **kw) - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to an *entity*.''' - if not isinstance( - value, ftrack_api.collection.KeyValueMappedCollectionProxy - ): - - if value is None: - value = ftrack_api.collection.KeyValueMappedCollectionProxy( - ftrack_api.collection.Collection(entity, self), - self.creator, self.key_attribute, - self.value_attribute - ) - - elif isinstance(value, (list, ftrack_api.collection.Collection)): - - if isinstance(value, list): - value = ftrack_api.collection.Collection( - entity, self, data=value - ) - - value = ftrack_api.collection.KeyValueMappedCollectionProxy( - value, self.creator, self.key_attribute, - self.value_attribute - ) - - elif isinstance(value, collections.Mapping): - # Convert mapping. - # TODO: When backend model improves, revisit this logic. - # First get existing value and delete all references. This is - # needed because otherwise they will not be automatically - # removed server side. - # The following should not cause recursion as the internal - # values should be mapped collections already. - current_value = self.get_value(entity) - if not isinstance( - current_value, - ftrack_api.collection.KeyValueMappedCollectionProxy - ): - raise NotImplementedError( - 'Cannot adapt mapping to collection as current value ' - 'type is not a KeyValueMappedCollectionProxy.' - ) - - # Create the new collection using the existing collection as - # basis. Then update through proxy interface to ensure all - # internal operations called consistently (such as entity - # deletion for key removal). - collection = ftrack_api.collection.Collection( - entity, self, data=current_value.collection[:] - ) - collection_proxy = ( - ftrack_api.collection.KeyValueMappedCollectionProxy( - collection, self.creator, - self.key_attribute, self.value_attribute - ) - ) - - # Remove expired keys from collection. - expired_keys = set(current_value.keys()) - set(value.keys()) - for key in expired_keys: - del collection_proxy[key] - - # Set new values for existing keys / add new keys. - for key, value in value.items(): - collection_proxy[key] = value - - value = collection_proxy - - else: - raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) - ) - else: - if value.attribute is not self: - raise ftrack_api.exception.AttributeError( - 'Collection already bound to a different attribute.' - ) - - return value - - -class CustomAttributeCollectionAttribute(AbstractCollectionAttribute): - '''Represent a mapped custom attribute collection of entities.''' - - #: Collection class used by attribute. - collection_class = ( - ftrack_api.collection.CustomAttributeCollectionProxy - ) - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to an *entity*.''' - if not isinstance( - value, ftrack_api.collection.CustomAttributeCollectionProxy - ): - - if value is None: - value = ftrack_api.collection.CustomAttributeCollectionProxy( - ftrack_api.collection.Collection(entity, self) - ) - - elif isinstance(value, (list, ftrack_api.collection.Collection)): - - # Why are we creating a new if it is a list? This will cause - # any merge to create a new proxy and collection. - if isinstance(value, list): - value = ftrack_api.collection.Collection( - entity, self, data=value - ) - - value = ftrack_api.collection.CustomAttributeCollectionProxy( - value - ) - - elif isinstance(value, collections.Mapping): - # Convert mapping. - # TODO: When backend model improves, revisit this logic. - # First get existing value and delete all references. This is - # needed because otherwise they will not be automatically - # removed server side. - # The following should not cause recursion as the internal - # values should be mapped collections already. - current_value = self.get_value(entity) - if not isinstance( - current_value, - ftrack_api.collection.CustomAttributeCollectionProxy - ): - raise NotImplementedError( - 'Cannot adapt mapping to collection as current value ' - 'type is not a MappedCollectionProxy.' - ) - - # Create the new collection using the existing collection as - # basis. Then update through proxy interface to ensure all - # internal operations called consistently (such as entity - # deletion for key removal). - collection = ftrack_api.collection.Collection( - entity, self, data=current_value.collection[:] - ) - collection_proxy = ( - ftrack_api.collection.CustomAttributeCollectionProxy( - collection - ) - ) - - # Remove expired keys from collection. - expired_keys = set(current_value.keys()) - set(value.keys()) - for key in expired_keys: - del collection_proxy[key] - - # Set new values for existing keys / add new keys. - for key, value in value.items(): - collection_proxy[key] = value - - value = collection_proxy - - else: - raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) - ) - else: - if value.attribute is not self: - raise ftrack_api.exception.AttributeError( - 'Collection already bound to a different attribute.' - ) - - return value diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py deleted file mode 100644 index 49456dc2d7..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py +++ /dev/null @@ -1,579 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -'''Caching framework. - -Defines a standardised :class:`Cache` interface for storing data against -specific keys. Key generation is also standardised using a :class:`KeyMaker` -interface. - -Combining a Cache and KeyMaker allows for memoisation of function calls with -respect to the arguments used by using a :class:`Memoiser`. - -As a convenience a simple :func:`memoise` decorator is included for quick -memoisation of function using a global cache and standard key maker. - -''' - -import collections -import functools -import abc -import copy -import inspect -import re -import anydbm -import contextlib -try: - import cPickle as pickle -except ImportError: # pragma: no cover - import pickle - -import ftrack_api.inspection -import ftrack_api.symbol - - -class Cache(object): - '''Cache interface. - - Derive from this to define concrete cache implementations. A cache is - centered around the concept of key:value pairings where the key is unique - across the cache. - - ''' - - __metaclass__ = abc.ABCMeta - - @abc.abstractmethod - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - - @abc.abstractmethod - def set(self, key, value): - '''Set *value* for *key*.''' - - @abc.abstractmethod - def remove(self, key): - '''Remove *key* and return stored value. - - Raise :exc:`KeyError` if *key* not found. - - ''' - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - raise NotImplementedError() # pragma: no cover - - def values(self): - '''Return values for current keys.''' - values = [] - for key in self.keys(): - try: - value = self.get(key) - except KeyError: - continue - else: - values.append(value) - - return values - - def clear(self, pattern=None): - '''Remove all keys matching *pattern*. - - *pattern* should be a regular expression string. - - If *pattern* is None then all keys will be removed. - - ''' - if pattern is not None: - pattern = re.compile(pattern) - - for key in self.keys(): - if pattern is not None: - if not pattern.search(key): - continue - - try: - self.remove(key) - except KeyError: - pass - - -class ProxyCache(Cache): - '''Proxy another cache.''' - - def __init__(self, proxied): - '''Initialise cache with *proxied* cache instance.''' - self.proxied = proxied - super(ProxyCache, self).__init__() - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - return self.proxied.get(key) - - def set(self, key, value): - '''Set *value* for *key*.''' - return self.proxied.set(key, value) - - def remove(self, key): - '''Remove *key* and return stored value. - - Raise :exc:`KeyError` if *key* not found. - - ''' - return self.proxied.remove(key) - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - return self.proxied.keys() - - -class LayeredCache(Cache): - '''Layered cache.''' - - def __init__(self, caches): - '''Initialise cache with *caches*.''' - super(LayeredCache, self).__init__() - self.caches = caches - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - Attempt to retrieve from cache layers in turn, starting with shallowest. - If value retrieved, then also set the value in each higher level cache - up from where retrieved. - - ''' - target_caches = [] - value = ftrack_api.symbol.NOT_SET - - for cache in self.caches: - try: - value = cache.get(key) - except KeyError: - target_caches.append(cache) - continue - else: - break - - if value is ftrack_api.symbol.NOT_SET: - raise KeyError(key) - - # Set value on all higher level caches. - for cache in target_caches: - cache.set(key, value) - - return value - - def set(self, key, value): - '''Set *value* for *key*.''' - for cache in self.caches: - cache.set(key, value) - - def remove(self, key): - '''Remove *key*. - - Raise :exc:`KeyError` if *key* not found in any layer. - - ''' - removed = False - for cache in self.caches: - try: - cache.remove(key) - except KeyError: - pass - else: - removed = True - - if not removed: - raise KeyError(key) - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - keys = [] - for cache in self.caches: - keys.extend(cache.keys()) - - return list(set(keys)) - - -class MemoryCache(Cache): - '''Memory based cache.''' - - def __init__(self): - '''Initialise cache.''' - self._cache = {} - super(MemoryCache, self).__init__() - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - return self._cache[key] - - def set(self, key, value): - '''Set *value* for *key*.''' - self._cache[key] = value - - def remove(self, key): - '''Remove *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - del self._cache[key] - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - return self._cache.keys() - - -class FileCache(Cache): - '''File based cache that uses :mod:`anydbm` module. - - .. note:: - - No locking of the underlying file is performed. - - ''' - - def __init__(self, path): - '''Initialise cache at *path*.''' - self.path = path - - # Initialise cache. - cache = anydbm.open(self.path, 'c') - cache.close() - - super(FileCache, self).__init__() - - @contextlib.contextmanager - def _database(self): - '''Yield opened database file.''' - cache = anydbm.open(self.path, 'w') - try: - yield cache - finally: - cache.close() - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - with self._database() as cache: - return cache[key] - - def set(self, key, value): - '''Set *value* for *key*.''' - with self._database() as cache: - cache[key] = value - - def remove(self, key): - '''Remove *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - with self._database() as cache: - del cache[key] - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - with self._database() as cache: - return cache.keys() - - -class SerialisedCache(ProxyCache): - '''Proxied cache that stores values as serialised data.''' - - def __init__(self, proxied, encode=None, decode=None): - '''Initialise cache with *encode* and *decode* callables. - - *proxied* is the underlying cache to use for storage. - - ''' - self.encode = encode - self.decode = decode - super(SerialisedCache, self).__init__(proxied) - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - value = super(SerialisedCache, self).get(key) - if self.decode: - value = self.decode(value) - - return value - - def set(self, key, value): - '''Set *value* for *key*.''' - if self.encode: - value = self.encode(value) - - super(SerialisedCache, self).set(key, value) - - -class KeyMaker(object): - '''Generate unique keys.''' - - __metaclass__ = abc.ABCMeta - - def __init__(self): - '''Initialise key maker.''' - super(KeyMaker, self).__init__() - self.item_separator = '' - - def key(self, *items): - '''Return key for *items*.''' - keys = [] - for item in items: - keys.append(self._key(item)) - - return self.item_separator.join(keys) - - @abc.abstractmethod - def _key(self, obj): - '''Return key for *obj*.''' - - -class StringKeyMaker(KeyMaker): - '''Generate string key.''' - - def _key(self, obj): - '''Return key for *obj*.''' - return str(obj) - - -class ObjectKeyMaker(KeyMaker): - '''Generate unique keys for objects.''' - - def __init__(self): - '''Initialise key maker.''' - super(ObjectKeyMaker, self).__init__() - self.item_separator = '\0' - self.mapping_identifier = '\1' - self.mapping_pair_separator = '\2' - self.iterable_identifier = '\3' - self.name_identifier = '\4' - - def _key(self, item): - '''Return key for *item*. - - Returned key will be a pickle like string representing the *item*. This - allows for typically non-hashable objects to be used in key generation - (such as dictionaries). - - If *item* is iterable then each item in it shall also be passed to this - method to ensure correct key generation. - - Special markers are used to distinguish handling of specific cases in - order to ensure uniqueness of key corresponds directly to *item*. - - Example:: - - >>> key_maker = ObjectKeyMaker() - >>> def add(x, y): - ... "Return sum of *x* and *y*." - ... return x + y - ... - >>> key_maker.key(add, (1, 2)) - '\x04add\x00__main__\x00\x03\x80\x02K\x01.\x00\x80\x02K\x02.\x03' - >>> key_maker.key(add, (1, 3)) - '\x04add\x00__main__\x00\x03\x80\x02K\x01.\x00\x80\x02K\x03.\x03' - - ''' - # TODO: Consider using a more robust and comprehensive solution such as - # dill (https://github.com/uqfoundation/dill). - if isinstance(item, collections.Iterable): - if isinstance(item, basestring): - return pickle.dumps(item, pickle.HIGHEST_PROTOCOL) - - if isinstance(item, collections.Mapping): - contents = self.item_separator.join([ - ( - self._key(key) + - self.mapping_pair_separator + - self._key(value) - ) - for key, value in sorted(item.items()) - ]) - return ( - self.mapping_identifier + - contents + - self.mapping_identifier - ) - - else: - contents = self.item_separator.join([ - self._key(item) for item in item - ]) - return ( - self.iterable_identifier + - contents + - self.iterable_identifier - ) - - elif inspect.ismethod(item): - return ''.join(( - self.name_identifier, - item.__name__, - self.item_separator, - item.im_class.__name__, - self.item_separator, - item.__module__ - )) - - elif inspect.isfunction(item) or inspect.isclass(item): - return ''.join(( - self.name_identifier, - item.__name__, - self.item_separator, - item.__module__ - )) - - elif inspect.isbuiltin(item): - return self.name_identifier + item.__name__ - - else: - return pickle.dumps(item, pickle.HIGHEST_PROTOCOL) - - -class Memoiser(object): - '''Memoise function calls using a :class:`KeyMaker` and :class:`Cache`. - - Example:: - - >>> memoiser = Memoiser(MemoryCache(), ObjectKeyMaker()) - >>> def add(x, y): - ... "Return sum of *x* and *y*." - ... print 'Called' - ... return x + y - ... - >>> memoiser.call(add, (1, 2), {}) - Called - >>> memoiser.call(add, (1, 2), {}) - >>> memoiser.call(add, (1, 3), {}) - Called - - ''' - - def __init__(self, cache=None, key_maker=None, return_copies=True): - '''Initialise with *cache* and *key_maker* to use. - - If *cache* is not specified a default :class:`MemoryCache` will be - used. Similarly, if *key_maker* is not specified a default - :class:`ObjectKeyMaker` will be used. - - If *return_copies* is True then all results returned from the cache will - be deep copies to avoid indirect mutation of cached values. - - ''' - self.cache = cache - if self.cache is None: - self.cache = MemoryCache() - - self.key_maker = key_maker - if self.key_maker is None: - self.key_maker = ObjectKeyMaker() - - self.return_copies = return_copies - super(Memoiser, self).__init__() - - def call(self, function, args=None, kw=None): - '''Call *function* with *args* and *kw* and return result. - - If *function* was previously called with exactly the same arguments - then return cached result if available. - - Store result for call in cache. - - ''' - if args is None: - args = () - - if kw is None: - kw = {} - - # Support arguments being passed as positionals or keywords. - arguments = inspect.getcallargs(function, *args, **kw) - - key = self.key_maker.key(function, arguments) - try: - value = self.cache.get(key) - - except KeyError: - value = function(*args, **kw) - self.cache.set(key, value) - - # If requested, deep copy value to return in order to avoid cached value - # being inadvertently altered by the caller. - if self.return_copies: - value = copy.deepcopy(value) - - return value - - -def memoise_decorator(memoiser): - '''Decorator to memoise function calls using *memoiser*.''' - def outer(function): - - @functools.wraps(function) - def inner(*args, **kw): - return memoiser.call(function, args, kw) - - return inner - - return outer - - -#: Default memoiser. -memoiser = Memoiser() - -#: Default memoise decorator using standard cache and key maker. -memoise = memoise_decorator(memoiser) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py deleted file mode 100644 index 91655a7b02..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py +++ /dev/null @@ -1,507 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import logging - -import collections -import copy - -import ftrack_api.exception -import ftrack_api.inspection -import ftrack_api.symbol -import ftrack_api.operation -import ftrack_api.cache -from ftrack_api.logging import LazyLogMessage as L - - -class Collection(collections.MutableSequence): - '''A collection of entities.''' - - def __init__(self, entity, attribute, mutable=True, data=None): - '''Initialise collection.''' - self.entity = entity - self.attribute = attribute - self._data = [] - self._identities = set() - - # Set initial dataset. - # Note: For initialisation, immutability is deferred till after initial - # population as otherwise there would be no public way to initialise an - # immutable collection. The reason self._data is not just set directly - # is to ensure other logic can be applied without special handling. - self.mutable = True - try: - if data is None: - data = [] - - with self.entity.session.operation_recording(False): - self.extend(data) - finally: - self.mutable = mutable - - def _identity_key(self, entity): - '''Return identity key for *entity*.''' - return str(ftrack_api.inspection.identity(entity)) - - def __copy__(self): - '''Return shallow copy. - - .. note:: - - To maintain expectations on usage, the shallow copy will include a - shallow copy of the underlying data store. - - ''' - cls = self.__class__ - copied_instance = cls.__new__(cls) - copied_instance.__dict__.update(self.__dict__) - copied_instance._data = copy.copy(self._data) - copied_instance._identities = copy.copy(self._identities) - - return copied_instance - - def _notify(self, old_value): - '''Notify about modification.''' - # Record operation. - if self.entity.session.record_operations: - self.entity.session.recorded_operations.push( - ftrack_api.operation.UpdateEntityOperation( - self.entity.entity_type, - ftrack_api.inspection.primary_key(self.entity), - self.attribute.name, - old_value, - self - ) - ) - - def insert(self, index, item): - '''Insert *item* at *index*.''' - if not self.mutable: - raise ftrack_api.exception.ImmutableCollectionError(self) - - if item in self: - raise ftrack_api.exception.DuplicateItemInCollectionError( - item, self - ) - - old_value = copy.copy(self) - self._data.insert(index, item) - self._identities.add(self._identity_key(item)) - self._notify(old_value) - - def __contains__(self, value): - '''Return whether *value* present in collection.''' - return self._identity_key(value) in self._identities - - def __getitem__(self, index): - '''Return item at *index*.''' - return self._data[index] - - def __setitem__(self, index, item): - '''Set *item* against *index*.''' - if not self.mutable: - raise ftrack_api.exception.ImmutableCollectionError(self) - - try: - existing_index = self.index(item) - except ValueError: - pass - else: - if index != existing_index: - raise ftrack_api.exception.DuplicateItemInCollectionError( - item, self - ) - - old_value = copy.copy(self) - try: - existing_item = self._data[index] - except IndexError: - pass - else: - self._identities.remove(self._identity_key(existing_item)) - - self._data[index] = item - self._identities.add(self._identity_key(item)) - self._notify(old_value) - - def __delitem__(self, index): - '''Remove item at *index*.''' - if not self.mutable: - raise ftrack_api.exception.ImmutableCollectionError(self) - - old_value = copy.copy(self) - item = self._data[index] - del self._data[index] - self._identities.remove(self._identity_key(item)) - self._notify(old_value) - - def __len__(self): - '''Return count of items.''' - return len(self._data) - - def __eq__(self, other): - '''Return whether this collection is equal to *other*.''' - if not isinstance(other, Collection): - return False - - return sorted(self._identities) == sorted(other._identities) - - def __ne__(self, other): - '''Return whether this collection is not equal to *other*.''' - return not self == other - - -class MappedCollectionProxy(collections.MutableMapping): - '''Common base class for mapped collection of entities.''' - - def __init__(self, collection): - '''Initialise proxy for *collection*.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.collection = collection - super(MappedCollectionProxy, self).__init__() - - def __copy__(self): - '''Return shallow copy. - - .. note:: - - To maintain expectations on usage, the shallow copy will include a - shallow copy of the underlying collection. - - ''' - cls = self.__class__ - copied_instance = cls.__new__(cls) - copied_instance.__dict__.update(self.__dict__) - copied_instance.collection = copy.copy(self.collection) - - return copied_instance - - @property - def mutable(self): - '''Return whether collection is mutable.''' - return self.collection.mutable - - @mutable.setter - def mutable(self, value): - '''Set whether collection is mutable to *value*.''' - self.collection.mutable = value - - @property - def attribute(self): - '''Return attribute bound to.''' - return self.collection.attribute - - @attribute.setter - def attribute(self, value): - '''Set bound attribute to *value*.''' - self.collection.attribute = value - - -class KeyValueMappedCollectionProxy(MappedCollectionProxy): - '''A mapped collection of key, value entities. - - Proxy a standard :class:`Collection` as a mapping where certain attributes - from the entities in the collection are mapped to key, value pairs. - - For example:: - - >>> collection = [Metadata(key='foo', value='bar'), ...] - >>> mapped = KeyValueMappedCollectionProxy( - ... collection, create_metadata, - ... key_attribute='key', value_attribute='value' - ... ) - >>> print mapped['foo'] - 'bar' - >>> mapped['bam'] = 'biz' - >>> print mapped.collection[-1] - Metadata(key='bam', value='biz') - - ''' - - def __init__( - self, collection, creator, key_attribute, value_attribute - ): - '''Initialise collection.''' - self.creator = creator - self.key_attribute = key_attribute - self.value_attribute = value_attribute - super(KeyValueMappedCollectionProxy, self).__init__(collection) - - def _get_entity_by_key(self, key): - '''Return entity instance with matching *key* from collection.''' - for entity in self.collection: - if entity[self.key_attribute] == key: - return entity - - raise KeyError(key) - - def __getitem__(self, key): - '''Return value for *key*.''' - entity = self._get_entity_by_key(key) - return entity[self.value_attribute] - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - try: - entity = self._get_entity_by_key(key) - except KeyError: - data = { - self.key_attribute: key, - self.value_attribute: value - } - entity = self.creator(self, data) - - if ( - ftrack_api.inspection.state(entity) is - ftrack_api.symbol.CREATED - ): - # Persisting this entity will be handled here, record the - # operation. - self.collection.append(entity) - - else: - # The entity is created and persisted separately by the - # creator. Do not record this operation. - with self.collection.entity.session.operation_recording(False): - # Do not record this operation since it will trigger - # redudant and potentially failing operations. - self.collection.append(entity) - - else: - entity[self.value_attribute] = value - - def __delitem__(self, key): - '''Remove and delete *key*. - - .. note:: - - The associated entity will be deleted as well. - - ''' - for index, entity in enumerate(self.collection): - if entity[self.key_attribute] == key: - break - else: - raise KeyError(key) - - del self.collection[index] - entity.session.delete(entity) - - def __iter__(self): - '''Iterate over all keys.''' - keys = set() - for entity in self.collection: - keys.add(entity[self.key_attribute]) - - return iter(keys) - - def __len__(self): - '''Return count of keys.''' - keys = set() - for entity in self.collection: - keys.add(entity[self.key_attribute]) - - return len(keys) - - -class PerSessionDefaultKeyMaker(ftrack_api.cache.KeyMaker): - '''Generate key for session.''' - - def _key(self, obj): - '''Return key for *obj*.''' - if isinstance(obj, dict): - session = obj.get('session') - if session is not None: - # Key by session only. - return str(id(session)) - - return str(obj) - - -#: Memoiser for use with callables that should be called once per session. -memoise_session = ftrack_api.cache.memoise_decorator( - ftrack_api.cache.Memoiser( - key_maker=PerSessionDefaultKeyMaker(), return_copies=False - ) -) - - -@memoise_session -def _get_custom_attribute_configurations(session): - '''Return list of custom attribute configurations. - - The configuration objects will have key, project_id, id and object_type_id - populated. - - ''' - return session.query( - 'select key, project_id, id, object_type_id, entity_type from ' - 'CustomAttributeConfiguration' - ).all() - - -class CustomAttributeCollectionProxy(MappedCollectionProxy): - '''A mapped collection of custom attribute value entities.''' - - def __init__( - self, collection - ): - '''Initialise collection.''' - self.key_attribute = 'configuration_id' - self.value_attribute = 'value' - super(CustomAttributeCollectionProxy, self).__init__(collection) - - def _get_entity_configurations(self): - '''Return all configurations for current collection entity.''' - entity = self.collection.entity - entity_type = None - project_id = None - object_type_id = None - - if 'object_type_id' in entity.keys(): - project_id = entity['project_id'] - entity_type = 'task' - object_type_id = entity['object_type_id'] - - if entity.entity_type == 'AssetVersion': - project_id = entity['asset']['parent']['project_id'] - entity_type = 'assetversion' - - if entity.entity_type == 'Asset': - project_id = entity['parent']['project_id'] - entity_type = 'asset' - - if entity.entity_type == 'Project': - project_id = entity['id'] - entity_type = 'show' - - if entity.entity_type == 'User': - entity_type = 'user' - - if entity_type is None: - raise ValueError( - 'Entity {!r} not supported.'.format(entity) - ) - - configurations = [] - for configuration in _get_custom_attribute_configurations( - entity.session - ): - if ( - configuration['entity_type'] == entity_type and - configuration['project_id'] in (project_id, None) and - configuration['object_type_id'] == object_type_id - ): - configurations.append(configuration) - - # Return with global configurations at the end of the list. This is done - # so that global conigurations are shadowed by project specific if the - # configurations list is looped when looking for a matching `key`. - return sorted( - configurations, key=lambda item: item['project_id'] is None - ) - - def _get_keys(self): - '''Return a list of all keys.''' - keys = [] - for configuration in self._get_entity_configurations(): - keys.append(configuration['key']) - - return keys - - def _get_entity_by_key(self, key): - '''Return entity instance with matching *key* from collection.''' - configuration_id = self.get_configuration_id_from_key(key) - for entity in self.collection: - if entity[self.key_attribute] == configuration_id: - return entity - - return None - - def get_configuration_id_from_key(self, key): - '''Return id of configuration with matching *key*. - - Raise :exc:`KeyError` if no configuration with matching *key* found. - - ''' - for configuration in self._get_entity_configurations(): - if key == configuration['key']: - return configuration['id'] - - raise KeyError(key) - - def __getitem__(self, key): - '''Return value for *key*.''' - entity = self._get_entity_by_key(key) - - if entity: - return entity[self.value_attribute] - - for configuration in self._get_entity_configurations(): - if configuration['key'] == key: - return configuration['default'] - - raise KeyError(key) - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - custom_attribute_value = self._get_entity_by_key(key) - - if custom_attribute_value: - custom_attribute_value[self.value_attribute] = value - else: - entity = self.collection.entity - session = entity.session - data = { - self.key_attribute: self.get_configuration_id_from_key(key), - self.value_attribute: value, - 'entity_id': entity['id'] - } - - # Make sure to use the currently active collection. This is - # necessary since a merge might have replaced the current one. - self.collection.entity['custom_attributes'].collection.append( - session.create('CustomAttributeValue', data) - ) - - def __delitem__(self, key): - '''Remove and delete *key*. - - .. note:: - - The associated entity will be deleted as well. - - ''' - custom_attribute_value = self._get_entity_by_key(key) - - if custom_attribute_value: - index = self.collection.index(custom_attribute_value) - del self.collection[index] - - custom_attribute_value.session.delete(custom_attribute_value) - else: - self.logger.warning(L( - 'Cannot delete {0!r} on {1!r}, no custom attribute value set.', - key, self.collection.entity - )) - - def __eq__(self, collection): - '''Return True if *collection* equals proxy collection.''' - if collection is ftrack_api.symbol.NOT_SET: - return False - - return collection.collection == self.collection - - def __iter__(self): - '''Iterate over all keys.''' - keys = self._get_keys() - return iter(keys) - - def __len__(self): - '''Return count of keys.''' - keys = self._get_keys() - return len(keys) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py deleted file mode 100644 index 1802e380c0..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py +++ /dev/null @@ -1,119 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -import os -from abc import ABCMeta, abstractmethod -import tempfile - - -class Data(object): - '''File-like object for manipulating data.''' - - __metaclass__ = ABCMeta - - def __init__(self): - '''Initialise data access.''' - self.closed = False - - @abstractmethod - def read(self, limit=None): - '''Return content from current position up to *limit*.''' - - @abstractmethod - def write(self, content): - '''Write content at current position.''' - - def flush(self): - '''Flush buffers ensuring data written.''' - - def seek(self, offset, whence=os.SEEK_SET): - '''Move internal pointer by *offset*. - - The *whence* argument is optional and defaults to os.SEEK_SET or 0 - (absolute file positioning); other values are os.SEEK_CUR or 1 - (seek relative to the current position) and os.SEEK_END or 2 - (seek relative to the file's end). - - ''' - raise NotImplementedError('Seek not supported.') - - def tell(self): - '''Return current position of internal pointer.''' - raise NotImplementedError('Tell not supported.') - - def close(self): - '''Flush buffers and prevent further access.''' - self.flush() - self.closed = True - - -class FileWrapper(Data): - '''Data wrapper for Python file objects.''' - - def __init__(self, wrapped_file): - '''Initialise access to *wrapped_file*.''' - self.wrapped_file = wrapped_file - self._read_since_last_write = False - super(FileWrapper, self).__init__() - - def read(self, limit=None): - '''Return content from current position up to *limit*.''' - self._read_since_last_write = True - - if limit is None: - limit = -1 - - return self.wrapped_file.read(limit) - - def write(self, content): - '''Write content at current position.''' - if self._read_since_last_write: - # Windows requires a seek before switching from read to write. - self.seek(self.tell()) - - self.wrapped_file.write(content) - self._read_since_last_write = False - - def flush(self): - '''Flush buffers ensuring data written.''' - super(FileWrapper, self).flush() - if hasattr(self.wrapped_file, 'flush'): - self.wrapped_file.flush() - - def seek(self, offset, whence=os.SEEK_SET): - '''Move internal pointer by *offset*.''' - self.wrapped_file.seek(offset, whence) - - def tell(self): - '''Return current position of internal pointer.''' - return self.wrapped_file.tell() - - def close(self): - '''Flush buffers and prevent further access.''' - if not self.closed: - super(FileWrapper, self).close() - if hasattr(self.wrapped_file, 'close'): - self.wrapped_file.close() - - -class File(FileWrapper): - '''Data wrapper accepting filepath.''' - - def __init__(self, path, mode='rb'): - '''Open file at *path* with *mode*.''' - file_object = open(path, mode) - super(File, self).__init__(file_object) - - -class String(FileWrapper): - '''Data wrapper using TemporaryFile instance.''' - - def __init__(self, content=None): - '''Initialise data with *content*.''' - super(String, self).__init__( - tempfile.TemporaryFile() - ) - - if content is not None: - self.wrapped_file.write(content) - self.wrapped_file.seek(0) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py deleted file mode 100644 index 1d452f2828..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py deleted file mode 100644 index 859d94e436..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py +++ /dev/null @@ -1,91 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.base - - -class AssetVersion(ftrack_api.entity.base.Entity): - '''Represent asset version.''' - - def create_component( - self, path, data=None, location=None - ): - '''Create a new component from *path* with additional *data* - - .. note:: - - This is a helper method. To create components manually use the - standard :meth:`Session.create` method. - - *path* can be a string representing a filesystem path to the data to - use for the component. The *path* can also be specified as a sequence - string, in which case a sequence component with child components for - each item in the sequence will be created automatically. The accepted - format for a sequence is '{head}{padding}{tail} [{ranges}]'. For - example:: - - '/path/to/file.%04d.ext [1-5, 7, 8, 10-20]' - - .. seealso:: - - `Clique documentation `_ - - *data* should be a dictionary of any additional data to construct the - component with (as passed to :meth:`Session.create`). This version is - automatically set as the component's version. - - If *location* is specified then automatically add component to that - location. - - ''' - if data is None: - data = {} - - data.pop('version_id', None) - data['version'] = self - - return self.session.create_component(path, data=data, location=location) - - def encode_media(self, media, keep_original='auto'): - '''Return a new Job that encode *media* to make it playable in browsers. - - *media* can be a path to a file or a FileComponent in the ftrack.server - location. - - The job will encode *media* based on the file type and job data contains - information about encoding in the following format:: - - { - 'output': [{ - 'format': 'video/mp4', - 'component_id': 'e2dc0524-b576-11d3-9612-080027331d74' - }, { - 'format': 'image/jpeg', - 'component_id': '07b82a97-8cf9-11e3-9383-20c9d081909b' - }], - 'source_component_id': 'e3791a09-7e11-4792-a398-3d9d4eefc294', - 'keep_original': True - } - - The output components are associated with the job via the job_components - relation. - - An image component will always be generated if possible, and will be - set as the version's thumbnail. - - The new components will automatically be associated with the version. - A server version of 3.3.32 or higher is required for this to function - properly. - - If *media* is a file path, a new source component will be created and - added to the ftrack server location and a call to :meth:`commit` will be - issued. If *media* is a FileComponent, it will be assumed to be in - available in the ftrack.server location. - - If *keep_original* is not set, the original media will be kept if it - is a FileComponent, and deleted if it is a file path. You can specify - True or False to change this behavior. - ''' - return self.session.encode_media( - media, version_id=self['id'], keep_original=keep_original - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py deleted file mode 100644 index f5a1a3cec3..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py +++ /dev/null @@ -1,402 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import abc -import collections -import logging - -import ftrack_api.symbol -import ftrack_api.attribute -import ftrack_api.inspection -import ftrack_api.exception -import ftrack_api.operation -from ftrack_api.logging import LazyLogMessage as L - - -class DynamicEntityTypeMetaclass(abc.ABCMeta): - '''Custom metaclass to customise representation of dynamic classes. - - .. note:: - - Derive from same metaclass as derived bases to avoid conflicts. - - ''' - def __repr__(self): - '''Return representation of class.''' - return ''.format(self.__name__) - - -class Entity(collections.MutableMapping): - '''Base class for all entities.''' - - __metaclass__ = DynamicEntityTypeMetaclass - - entity_type = 'Entity' - attributes = None - primary_key_attributes = None - default_projections = None - - def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. - - *session* is an instance of :class:`ftrack_api.session.Session` that - this entity instance is bound to. - - *data* is a mapping of key, value pairs to apply as initial attribute - values. - - *reconstructing* indicates whether this entity is being reconstructed, - such as from a query, and therefore should not have any special creation - logic applied, such as initialising defaults for missing data. - - ''' - super(Entity, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.session = session - self._inflated = set() - - if data is None: - data = {} - - self.logger.debug(L( - '{0} entity from {1!r}.', - ('Reconstructing' if reconstructing else 'Constructing'), data - )) - - self._ignore_data_keys = ['__entity_type__'] - if not reconstructing: - self._construct(data) - else: - self._reconstruct(data) - - def _construct(self, data): - '''Construct from *data*.''' - # Suspend operation recording so that all modifications can be applied - # in single create operation. In addition, recording a modification - # operation requires a primary key which may not be available yet. - - relational_attributes = dict() - - with self.session.operation_recording(False): - # Set defaults for any unset local attributes. - for attribute in self.__class__.attributes: - if attribute.name not in data: - default_value = attribute.default_value - if callable(default_value): - default_value = default_value(self) - - attribute.set_local_value(self, default_value) - - - # Data represents locally set values. - for key, value in data.items(): - if key in self._ignore_data_keys: - continue - - attribute = self.__class__.attributes.get(key) - if attribute is None: - self.logger.debug(L( - 'Cannot populate {0!r} attribute as no such ' - 'attribute found on entity {1!r}.', key, self - )) - continue - - if not isinstance(attribute, ftrack_api.attribute.ScalarAttribute): - relational_attributes.setdefault( - attribute, value - ) - - else: - attribute.set_local_value(self, value) - - # Record create operation. - # Note: As this operation is recorded *before* any Session.merge takes - # place there is the possibility that the operation will hold references - # to outdated data in entity_data. However, this would be unusual in - # that it would mean the same new entity was created twice and only one - # altered. Conversely, if this operation were recorded *after* - # Session.merge took place, any cache would not be able to determine - # the status of the entity, which could be important if the cache should - # not store newly created entities that have not yet been persisted. Out - # of these two 'evils' this approach is deemed the lesser at this time. - # A third, more involved, approach to satisfy both might be to record - # the operation with a PENDING entity_data value and then update with - # merged values post merge. - if self.session.record_operations: - entity_data = {} - - # Lower level API used here to avoid including any empty - # collections that are automatically generated on access. - for attribute in self.attributes: - value = attribute.get_local_value(self) - if value is not ftrack_api.symbol.NOT_SET: - entity_data[attribute.name] = value - - self.session.recorded_operations.push( - ftrack_api.operation.CreateEntityOperation( - self.entity_type, - ftrack_api.inspection.primary_key(self), - entity_data - ) - ) - - for attribute, value in relational_attributes.items(): - # Finally we set values for "relational" attributes, we need - # to do this at the end in order to get the create operations - # in the correct order as the newly created attributes might - # contain references to the newly created entity. - - attribute.set_local_value( - self, value - ) - - def _reconstruct(self, data): - '''Reconstruct from *data*.''' - # Data represents remote values. - for key, value in data.items(): - if key in self._ignore_data_keys: - continue - - attribute = self.__class__.attributes.get(key) - if attribute is None: - self.logger.debug(L( - 'Cannot populate {0!r} attribute as no such attribute ' - 'found on entity {1!r}.', key, self - )) - continue - - attribute.set_remote_value(self, value) - - def __repr__(self): - '''Return representation of instance.''' - return ''.format( - self.__class__.__name__, id(self) - ) - - def __str__(self): - '''Return string representation of instance.''' - with self.session.auto_populating(False): - primary_key = ['Unknown'] - try: - primary_key = ftrack_api.inspection.primary_key(self).values() - except KeyError: - pass - - return '<{0}({1})>'.format( - self.__class__.__name__, ', '.join(primary_key) - ) - - def __hash__(self): - '''Return hash representing instance.''' - return hash(str(ftrack_api.inspection.identity(self))) - - def __eq__(self, other): - '''Return whether *other* is equal to this instance. - - .. note:: - - Equality is determined by both instances having the same identity. - Values of attributes are not considered. - - ''' - try: - return ( - ftrack_api.inspection.identity(other) - == ftrack_api.inspection.identity(self) - ) - except (AttributeError, KeyError): - return False - - def __getitem__(self, key): - '''Return attribute value for *key*.''' - attribute = self.__class__.attributes.get(key) - if attribute is None: - raise KeyError(key) - - return attribute.get_value(self) - - def __setitem__(self, key, value): - '''Set attribute *value* for *key*.''' - attribute = self.__class__.attributes.get(key) - if attribute is None: - raise KeyError(key) - - attribute.set_local_value(self, value) - - def __delitem__(self, key): - '''Clear attribute value for *key*. - - .. note:: - - Will not remove the attribute, but instead clear any local value - and revert to the last known server value. - - ''' - attribute = self.__class__.attributes.get(key) - attribute.set_local_value(self, ftrack_api.symbol.NOT_SET) - - def __iter__(self): - '''Iterate over all attributes keys.''' - for attribute in self.__class__.attributes: - yield attribute.name - - def __len__(self): - '''Return count of attributes.''' - return len(self.__class__.attributes) - - def values(self): - '''Return list of values.''' - if self.session.auto_populate: - self._populate_unset_scalar_attributes() - - return super(Entity, self).values() - - def items(self): - '''Return list of tuples of (key, value) pairs. - - .. note:: - - Will fetch all values from the server if not already fetched or set - locally. - - ''' - if self.session.auto_populate: - self._populate_unset_scalar_attributes() - - return super(Entity, self).items() - - def clear(self): - '''Reset all locally modified attribute values.''' - for attribute in self: - del self[attribute] - - def merge(self, entity, merged=None): - '''Merge *entity* attribute values and other data into this entity. - - Only merge values from *entity* that are not - :attr:`ftrack_api.symbol.NOT_SET`. - - Return a list of changes made with each change being a mapping with - the keys: - - * type - Either 'remote_attribute', 'local_attribute' or 'property'. - * name - The name of the attribute / property modified. - * old_value - The previous value. - * new_value - The new merged value. - - ''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if merged is None: - merged = {} - - log_message = 'Merged {type} "{name}": {old_value!r} -> {new_value!r}' - changes = [] - - # Attributes. - - # Prioritise by type so that scalar values are set first. This should - # guarantee that the attributes making up the identity of the entity - # are merged before merging any collections that may have references to - # this entity. - attributes = collections.deque() - for attribute in entity.attributes: - if isinstance(attribute, ftrack_api.attribute.ScalarAttribute): - attributes.appendleft(attribute) - else: - attributes.append(attribute) - - for other_attribute in attributes: - attribute = self.attributes.get(other_attribute.name) - - # Local attributes. - other_local_value = other_attribute.get_local_value(entity) - if other_local_value is not ftrack_api.symbol.NOT_SET: - local_value = attribute.get_local_value(self) - if local_value != other_local_value: - merged_local_value = self.session.merge( - other_local_value, merged=merged - ) - - attribute.set_local_value(self, merged_local_value) - changes.append({ - 'type': 'local_attribute', - 'name': attribute.name, - 'old_value': local_value, - 'new_value': merged_local_value - }) - log_debug and self.logger.debug( - log_message.format(**changes[-1]) - ) - - # Remote attributes. - other_remote_value = other_attribute.get_remote_value(entity) - if other_remote_value is not ftrack_api.symbol.NOT_SET: - remote_value = attribute.get_remote_value(self) - if remote_value != other_remote_value: - merged_remote_value = self.session.merge( - other_remote_value, merged=merged - ) - - attribute.set_remote_value( - self, merged_remote_value - ) - - changes.append({ - 'type': 'remote_attribute', - 'name': attribute.name, - 'old_value': remote_value, - 'new_value': merged_remote_value - }) - - log_debug and self.logger.debug( - log_message.format(**changes[-1]) - ) - - # We need to handle collections separately since - # they may store a local copy of the remote attribute - # even though it may not be modified. - if not isinstance( - attribute, ftrack_api.attribute.AbstractCollectionAttribute - ): - continue - - local_value = attribute.get_local_value( - self - ) - - # Populated but not modified, update it. - if ( - local_value is not ftrack_api.symbol.NOT_SET and - local_value == remote_value - ): - attribute.set_local_value( - self, merged_remote_value - ) - changes.append({ - 'type': 'local_attribute', - 'name': attribute.name, - 'old_value': local_value, - 'new_value': merged_remote_value - }) - - log_debug and self.logger.debug( - log_message.format(**changes[-1]) - ) - - return changes - - def _populate_unset_scalar_attributes(self): - '''Populate all unset scalar attributes in one query.''' - projections = [] - for attribute in self.attributes: - if isinstance(attribute, ftrack_api.attribute.ScalarAttribute): - if attribute.get_remote_value(self) is ftrack_api.symbol.NOT_SET: - projections.append(attribute.name) - - if projections: - self.session.populate([self], ', '.join(projections)) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py deleted file mode 100644 index 9d59c4c051..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py +++ /dev/null @@ -1,74 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.base - - -class Component(ftrack_api.entity.base.Entity): - '''Represent a component.''' - - def get_availability(self, locations=None): - '''Return availability in *locations*. - - If *locations* is None, all known locations will be checked. - - Return a dictionary of {location_id:percentage_availability} - - ''' - return self.session.get_component_availability( - self, locations=locations - ) - - -class CreateThumbnailMixin(object): - '''Mixin to add create_thumbnail method on entity class.''' - - def create_thumbnail(self, path, data=None): - '''Set entity thumbnail from *path*. - - Creates a thumbnail component using in the ftrack.server location - :meth:`Session.create_component - ` The thumbnail component - will be created using *data* if specified. If no component name is - given, `thumbnail` will be used. - - The file is expected to be of an appropriate size and valid file - type. - - .. note:: - - A :meth:`Session.commit` will be - automatically issued. - - ''' - if data is None: - data = {} - if not data.get('name'): - data['name'] = 'thumbnail' - - thumbnail_component = self.session.create_component( - path, data, location=None - ) - - origin_location = self.session.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - server_location = self.session.get( - 'Location', ftrack_api.symbol.SERVER_LOCATION_ID - ) - server_location.add_component(thumbnail_component, [origin_location]) - - # TODO: This commit can be avoided by reordering the operations in - # this method so that the component is transferred to ftrack.server - # after the thumbnail has been set. - # - # There is currently a bug in the API backend, causing the operations - # to *some* times be ordered wrongly, where the update occurs before - # the component has been created, causing an integrity error. - # - # Once this issue has been resolved, this commit can be removed and - # and the update placed between component creation and registration. - self['thumbnail_id'] = thumbnail_component['id'] - self.session.commit() - - return thumbnail_component diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py deleted file mode 100644 index e925b70f5a..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py +++ /dev/null @@ -1,435 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import logging -import uuid -import functools - -import ftrack_api.attribute -import ftrack_api.entity.base -import ftrack_api.entity.location -import ftrack_api.entity.component -import ftrack_api.entity.asset_version -import ftrack_api.entity.project_schema -import ftrack_api.entity.note -import ftrack_api.entity.job -import ftrack_api.entity.user -import ftrack_api.symbol -import ftrack_api.cache -from ftrack_api.logging import LazyLogMessage as L - - -class Factory(object): - '''Entity class factory.''' - - def __init__(self): - '''Initialise factory.''' - super(Factory, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*. - - *bases* should be a list of bases to give the constructed class. If not - specified, default to :class:`ftrack_api.entity.base.Entity`. - - ''' - entity_type = schema['id'] - class_name = entity_type - - class_bases = bases - if class_bases is None: - class_bases = [ftrack_api.entity.base.Entity] - - class_namespace = dict() - - # Build attributes for class. - attributes = ftrack_api.attribute.Attributes() - immutable_properties = schema.get('immutable', []) - computed_properties = schema.get('computed', []) - for name, fragment in schema.get('properties', {}).items(): - mutable = name not in immutable_properties - computed = name in computed_properties - - default = fragment.get('default', ftrack_api.symbol.NOT_SET) - if default == '{uid}': - default = lambda instance: str(uuid.uuid4()) - - data_type = fragment.get('type', ftrack_api.symbol.NOT_SET) - - if data_type is not ftrack_api.symbol.NOT_SET: - - if data_type in ( - 'string', 'boolean', 'integer', 'number', 'variable', - 'object' - ): - # Basic scalar attribute. - if data_type == 'number': - data_type = 'float' - - if data_type == 'string': - data_format = fragment.get('format') - if data_format == 'date-time': - data_type = 'datetime' - - attribute = self.create_scalar_attribute( - class_name, name, mutable, computed, default, data_type - ) - if attribute: - attributes.add(attribute) - - elif data_type == 'array': - attribute = self.create_collection_attribute( - class_name, name, mutable - ) - if attribute: - attributes.add(attribute) - - elif data_type == 'mapped_array': - reference = fragment.get('items', {}).get('$ref') - if not reference: - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that does ' - 'not define a schema reference.', class_name, name - )) - continue - - attribute = self.create_mapped_collection_attribute( - class_name, name, mutable, reference - ) - if attribute: - attributes.add(attribute) - - else: - self.logger.debug(L( - 'Skipping {0}.{1} attribute with unrecognised data ' - 'type {2}', class_name, name, data_type - )) - else: - # Reference attribute. - reference = fragment.get('$ref', ftrack_api.symbol.NOT_SET) - if reference is ftrack_api.symbol.NOT_SET: - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that does ' - 'not define a schema reference.', class_name, name - )) - continue - - attribute = self.create_reference_attribute( - class_name, name, mutable, reference - ) - if attribute: - attributes.add(attribute) - - default_projections = schema.get('default_projections', []) - - # Construct class. - class_namespace['entity_type'] = entity_type - class_namespace['attributes'] = attributes - class_namespace['primary_key_attributes'] = schema['primary_key'][:] - class_namespace['default_projections'] = default_projections - - cls = type( - str(class_name), # type doesn't accept unicode. - tuple(class_bases), - class_namespace - ) - - return cls - - def create_scalar_attribute( - self, class_name, name, mutable, computed, default, data_type - ): - '''Return appropriate scalar attribute instance.''' - return ftrack_api.attribute.ScalarAttribute( - name, data_type=data_type, default_value=default, mutable=mutable, - computed=computed - ) - - def create_reference_attribute(self, class_name, name, mutable, reference): - '''Return appropriate reference attribute instance.''' - return ftrack_api.attribute.ReferenceAttribute( - name, reference, mutable=mutable - ) - - def create_collection_attribute(self, class_name, name, mutable): - '''Return appropriate collection attribute instance.''' - return ftrack_api.attribute.CollectionAttribute( - name, mutable=mutable - ) - - def create_mapped_collection_attribute( - self, class_name, name, mutable, reference - ): - '''Return appropriate mapped collection attribute instance.''' - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that has ' - 'no implementation defined for reference {2}.', - class_name, name, reference - )) - - -class PerSessionDefaultKeyMaker(ftrack_api.cache.KeyMaker): - '''Generate key for defaults.''' - - def _key(self, obj): - '''Return key for *obj*.''' - if isinstance(obj, dict): - entity = obj.get('entity') - if entity is not None: - # Key by session only. - return str(id(entity.session)) - - return str(obj) - - -#: Memoiser for use with default callables that should only be called once per -# session. -memoise_defaults = ftrack_api.cache.memoise_decorator( - ftrack_api.cache.Memoiser( - key_maker=PerSessionDefaultKeyMaker(), return_copies=False - ) -) - -#: Memoiser for use with callables that should be called once per session. -memoise_session = ftrack_api.cache.memoise_decorator( - ftrack_api.cache.Memoiser( - key_maker=PerSessionDefaultKeyMaker(), return_copies=False - ) -) - - -@memoise_session -def _get_custom_attribute_configurations(session): - '''Return list of custom attribute configurations. - - The configuration objects will have key, project_id, id and object_type_id - populated. - - ''' - return session.query( - 'select key, project_id, id, object_type_id, entity_type, ' - 'is_hierarchical from CustomAttributeConfiguration' - ).all() - - -def _get_entity_configurations(entity): - '''Return all configurations for current collection entity.''' - entity_type = None - project_id = None - object_type_id = None - - if 'object_type_id' in entity.keys(): - project_id = entity['project_id'] - entity_type = 'task' - object_type_id = entity['object_type_id'] - - if entity.entity_type == 'AssetVersion': - project_id = entity['asset']['parent']['project_id'] - entity_type = 'assetversion' - - if entity.entity_type == 'Project': - project_id = entity['id'] - entity_type = 'show' - - if entity.entity_type == 'User': - entity_type = 'user' - - if entity.entity_type == 'Asset': - entity_type = 'asset' - - if entity.entity_type in ('TypedContextList', 'AssetVersionList'): - entity_type = 'list' - - if entity_type is None: - raise ValueError( - 'Entity {!r} not supported.'.format(entity) - ) - - configurations = [] - for configuration in _get_custom_attribute_configurations( - entity.session - ): - if ( - configuration['entity_type'] == entity_type and - configuration['project_id'] in (project_id, None) and - configuration['object_type_id'] == object_type_id - ): - # The custom attribute configuration is for the target entity type. - configurations.append(configuration) - elif ( - entity_type in ('asset', 'assetversion', 'show', 'task') and - configuration['project_id'] in (project_id, None) and - configuration['is_hierarchical'] - ): - # The target entity type allows hierarchical attributes. - configurations.append(configuration) - - # Return with global configurations at the end of the list. This is done - # so that global conigurations are shadowed by project specific if the - # configurations list is looped when looking for a matching `key`. - return sorted( - configurations, key=lambda item: item['project_id'] is None - ) - - -class StandardFactory(Factory): - '''Standard entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - if not bases: - bases = [] - - extra_bases = [] - # Customise classes. - if schema['id'] == 'ProjectSchema': - extra_bases = [ftrack_api.entity.project_schema.ProjectSchema] - - elif schema['id'] == 'Location': - extra_bases = [ftrack_api.entity.location.Location] - - elif schema['id'] == 'AssetVersion': - extra_bases = [ftrack_api.entity.asset_version.AssetVersion] - - elif schema['id'].endswith('Component'): - extra_bases = [ftrack_api.entity.component.Component] - - elif schema['id'] == 'Note': - extra_bases = [ftrack_api.entity.note.Note] - - elif schema['id'] == 'Job': - extra_bases = [ftrack_api.entity.job.Job] - - elif schema['id'] == 'User': - extra_bases = [ftrack_api.entity.user.User] - - bases = extra_bases + bases - - # If bases does not contain any items, add the base entity class. - if not bases: - bases = [ftrack_api.entity.base.Entity] - - # Add mixins. - if 'notes' in schema.get('properties', {}): - bases.append( - ftrack_api.entity.note.CreateNoteMixin - ) - - if 'thumbnail_id' in schema.get('properties', {}): - bases.append( - ftrack_api.entity.component.CreateThumbnailMixin - ) - - cls = super(StandardFactory, self).create(schema, bases=bases) - - return cls - - def create_mapped_collection_attribute( - self, class_name, name, mutable, reference - ): - '''Return appropriate mapped collection attribute instance.''' - if reference == 'Metadata': - - def create_metadata(proxy, data, reference): - '''Return metadata for *data*.''' - entity = proxy.collection.entity - session = entity.session - data.update({ - 'parent_id': entity['id'], - 'parent_type': entity.entity_type - }) - return session.create(reference, data) - - creator = functools.partial( - create_metadata, reference=reference - ) - key_attribute = 'key' - value_attribute = 'value' - - return ftrack_api.attribute.KeyValueMappedCollectionAttribute( - name, creator, key_attribute, value_attribute, mutable=mutable - ) - - elif reference == 'CustomAttributeValue': - return ( - ftrack_api.attribute.CustomAttributeCollectionAttribute( - name, mutable=mutable - ) - ) - - elif reference.endswith('CustomAttributeValue'): - def creator(proxy, data): - '''Create a custom attribute based on *proxy* and *data*. - - Raise :py:exc:`KeyError` if related entity is already presisted - to the server. The proxy represents dense custom attribute - values and should never create new custom attribute values - through the proxy if entity exists on the remote. - - If the entity is not persisted the ususal - CustomAttributeValue items cannot be updated as - the related entity does not exist on remote and values not in - the proxy. Instead a CustomAttributeValue will - be reconstructed and an update operation will be recorded. - - ''' - entity = proxy.collection.entity - if ( - ftrack_api.inspection.state(entity) is not - ftrack_api.symbol.CREATED - ): - raise KeyError( - 'Custom attributes must be created explicitly for the ' - 'given entity type before being set.' - ) - - configuration = None - for candidate in _get_entity_configurations(entity): - if candidate['key'] == data['key']: - configuration = candidate - break - - if configuration is None: - raise ValueError( - u'No valid custom attribute for data {0!r} was found.' - .format(data) - ) - - create_data = dict(data.items()) - create_data['configuration_id'] = configuration['id'] - create_data['entity_id'] = entity['id'] - - session = entity.session - - # Create custom attribute by reconstructing it and update the - # value. This will prevent a create operation to be sent to the - # remote, as create operations for this entity type is not - # allowed. Instead an update operation will be recorded. - value = create_data.pop('value') - item = session.create( - reference, - create_data, - reconstructing=True - ) - - # Record update operation. - item['value'] = value - - return item - - key_attribute = 'key' - value_attribute = 'value' - - return ftrack_api.attribute.KeyValueMappedCollectionAttribute( - name, creator, key_attribute, value_attribute, mutable=mutable - ) - - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that has no configuration ' - 'for reference {2}.', class_name, name, reference - )) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py deleted file mode 100644 index ae37922c51..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py +++ /dev/null @@ -1,48 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.base - - -class Job(ftrack_api.entity.base.Entity): - '''Represent job.''' - - def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. - - *session* is an instance of :class:`ftrack_api.session.Session` that - this entity instance is bound to. - - *data* is a mapping of key, value pairs to apply as initial attribute - values. - - To set a job `description` visible in the web interface, *data* can - contain a key called `data` which should be a JSON serialised - dictionary containing description:: - - data = { - 'status': 'running', - 'data': json.dumps(dict(description='My job description.')), - ... - } - - Will raise a :py:exc:`ValueError` if *data* contains `type` and `type` - is set to something not equal to "api_job". - - *reconstructing* indicates whether this entity is being reconstructed, - such as from a query, and therefore should not have any special creation - logic applied, such as initialising defaults for missing data. - - ''' - - if not reconstructing: - if data.get('type') not in ('api_job', None): - raise ValueError( - 'Invalid job type "{0}". Must be "api_job"'.format( - data.get('type') - ) - ) - - super(Job, self).__init__( - session, data=data, reconstructing=reconstructing - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py deleted file mode 100644 index 707f4fa652..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py +++ /dev/null @@ -1,733 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import collections -import functools - -import ftrack_api.entity.base -import ftrack_api.exception -import ftrack_api.event.base -import ftrack_api.symbol -import ftrack_api.inspection -from ftrack_api.logging import LazyLogMessage as L - - -class Location(ftrack_api.entity.base.Entity): - '''Represent storage for components.''' - - def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. - - *session* is an instance of :class:`ftrack_api.session.Session` that - this entity instance is bound to. - - *data* is a mapping of key, value pairs to apply as initial attribute - values. - - *reconstructing* indicates whether this entity is being reconstructed, - such as from a query, and therefore should not have any special creation - logic applied, such as initialising defaults for missing data. - - ''' - self.accessor = ftrack_api.symbol.NOT_SET - self.structure = ftrack_api.symbol.NOT_SET - self.resource_identifier_transformer = ftrack_api.symbol.NOT_SET - self.priority = 95 - super(Location, self).__init__( - session, data=data, reconstructing=reconstructing - ) - - def __str__(self): - '''Return string representation of instance.''' - representation = super(Location, self).__str__() - - with self.session.auto_populating(False): - name = self['name'] - if name is not ftrack_api.symbol.NOT_SET: - representation = representation.replace( - '(', '("{0}", '.format(name) - ) - - return representation - - def add_component(self, component, source, recursive=True): - '''Add *component* to location. - - *component* should be a single component instance. - - *source* should be an instance of another location that acts as the - source. - - Raise :exc:`ftrack_api.ComponentInLocationError` if the *component* - already exists in this location. - - Raise :exc:`ftrack_api.LocationError` if managing data and the generated - target structure for the component already exists according to the - accessor. This helps prevent potential data loss by avoiding overwriting - existing data. Note that there is a race condition between the check and - the write so if another process creates data at the same target during - that period it will be overwritten. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the component registration. - - ''' - return self.add_components( - [component], sources=source, recursive=recursive - ) - - def add_components(self, components, sources, recursive=True, _depth=0): - '''Add *components* to location. - - *components* should be a list of component instances. - - *sources* may be either a single source or a list of sources. If a list - then each corresponding index in *sources* will be used for each - *component*. A source should be an instance of another location. - - Raise :exc:`ftrack_api.exception.ComponentInLocationError` if any - component in *components* already exists in this location. In this case, - no changes will be made and no data transferred. - - Raise :exc:`ftrack_api.exception.LocationError` if managing data and the - generated target structure for the component already exists according to - the accessor. This helps prevent potential data loss by avoiding - overwriting existing data. Note that there is a race condition between - the check and the write so if another process creates data at the same - target during that period it will be overwritten. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the components registration. - - .. important:: - - If this location manages data then the *components* data is first - transferred to the target prescribed by the structure plugin, using - the configured accessor. If any component fails to transfer then - :exc:`ftrack_api.exception.LocationError` is raised and none of the - components are registered with the database. In this case it is left - up to the caller to decide and act on manually cleaning up any - transferred data using the 'transferred' detail in the raised error. - - Likewise, after transfer, all components are registered with the - database in a batch call. If any component causes an error then all - components will remain unregistered and - :exc:`ftrack_api.exception.LocationError` will be raised detailing - issues and any transferred data under the 'transferred' detail key. - - ''' - if ( - isinstance(sources, basestring) - or not isinstance(sources, collections.Sequence) - ): - sources = [sources] - - sources_count = len(sources) - if sources_count not in (1, len(components)): - raise ValueError( - 'sources must be either a single source or a sequence of ' - 'sources with indexes corresponding to passed components.' - ) - - if not self.structure: - raise ftrack_api.exception.LocationError( - 'No structure defined for location {location}.', - details=dict(location=self) - ) - - if not components: - # Optimisation: Return early when no components to process, such as - # when called recursively on an empty sequence component. - return - - indent = ' ' * (_depth + 1) - - # Check that components not already added to location. - existing_components = [] - try: - self.get_resource_identifiers(components) - - except ftrack_api.exception.ComponentNotInLocationError as error: - missing_component_ids = [ - missing_component['id'] - for missing_component in error.details['components'] - ] - for component in components: - if component['id'] not in missing_component_ids: - existing_components.append(component) - - else: - existing_components.extend(components) - - if existing_components: - # Some of the components already present in location. - raise ftrack_api.exception.ComponentInLocationError( - existing_components, self - ) - - # Attempt to transfer each component's data to this location. - transferred = [] - - for index, component in enumerate(components): - try: - # Determine appropriate source. - if sources_count == 1: - source = sources[0] - else: - source = sources[index] - - # Add members first for container components. - is_container = 'members' in component.keys() - if is_container and recursive: - self.add_components( - component['members'], source, recursive=recursive, - _depth=(_depth + 1) - ) - - # Add component to this location. - context = self._get_context(component, source) - resource_identifier = self.structure.get_resource_identifier( - component, context - ) - - # Manage data transfer. - self._add_data(component, resource_identifier, source) - - except Exception as error: - raise ftrack_api.exception.LocationError( - 'Failed to transfer component {component} data to location ' - '{location} due to error:\n{indent}{error}\n{indent}' - 'Transferred component data that may require cleanup: ' - '{transferred}', - details=dict( - indent=indent, - component=component, - location=self, - error=error, - transferred=transferred - ) - ) - - else: - transferred.append((component, resource_identifier)) - - # Register all successfully transferred components. - components_to_register = [] - component_resource_identifiers = [] - - try: - for component, resource_identifier in transferred: - if self.resource_identifier_transformer: - # Optionally encode resource identifier before storing. - resource_identifier = ( - self.resource_identifier_transformer.encode( - resource_identifier, - context={'component': component} - ) - ) - - components_to_register.append(component) - component_resource_identifiers.append(resource_identifier) - - # Store component in location information. - self._register_components_in_location( - components, component_resource_identifiers - ) - - except Exception as error: - raise ftrack_api.exception.LocationError( - 'Failed to register components with location {location} due to ' - 'error:\n{indent}{error}\n{indent}Transferred component data ' - 'that may require cleanup: {transferred}', - details=dict( - indent=indent, - location=self, - error=error, - transferred=transferred - ) - ) - - # Publish events. - for component in components_to_register: - - component_id = ftrack_api.inspection.primary_key( - component - ).values()[0] - location_id = ftrack_api.inspection.primary_key(self).values()[0] - - self.session.event_hub.publish( - ftrack_api.event.base.Event( - topic=ftrack_api.symbol.COMPONENT_ADDED_TO_LOCATION_TOPIC, - data=dict( - component_id=component_id, - location_id=location_id - ), - ), - on_error='ignore' - ) - - def _get_context(self, component, source): - '''Return context for *component* and *source*.''' - context = {} - if source: - try: - source_resource_identifier = source.get_resource_identifier( - component - ) - except ftrack_api.exception.ComponentNotInLocationError: - pass - else: - context.update(dict( - source_resource_identifier=source_resource_identifier - )) - - return context - - def _add_data(self, component, resource_identifier, source): - '''Manage transfer of *component* data from *source*. - - *resource_identifier* specifies the identifier to use with this - locations accessor. - - ''' - self.logger.debug(L( - 'Adding data for component {0!r} from source {1!r} to location ' - '{2!r} using resource identifier {3!r}.', - component, resource_identifier, source, self - )) - - # Read data from source and write to this location. - if not source.accessor: - raise ftrack_api.exception.LocationError( - 'No accessor defined for source location {location}.', - details=dict(location=source) - ) - - if not self.accessor: - raise ftrack_api.exception.LocationError( - 'No accessor defined for target location {location}.', - details=dict(location=self) - ) - - is_container = 'members' in component.keys() - if is_container: - # TODO: Improve this check. Possibly introduce an inspection - # such as ftrack_api.inspection.is_sequence_component. - if component.entity_type != 'SequenceComponent': - self.accessor.make_container(resource_identifier) - - else: - # Try to make container of component. - try: - container = self.accessor.get_container( - resource_identifier - ) - - except ftrack_api.exception.AccessorParentResourceNotFoundError: - # Container could not be retrieved from - # resource_identifier. Assume that there is no need to - # make the container. - pass - - else: - # No need for existence check as make_container does not - # recreate existing containers. - self.accessor.make_container(container) - - if self.accessor.exists(resource_identifier): - # Note: There is a race condition here in that the - # data may be added externally between the check for - # existence and the actual write which would still - # result in potential data loss. However, there is no - # good cross platform, cross accessor solution for this - # at present. - raise ftrack_api.exception.LocationError( - 'Cannot add component as data already exists and ' - 'overwriting could result in data loss. Computed ' - 'target resource identifier was: {0}' - .format(resource_identifier) - ) - - # Read and write data. - source_data = source.accessor.open( - source.get_resource_identifier(component), 'rb' - ) - target_data = self.accessor.open(resource_identifier, 'wb') - - # Read/write data in chunks to avoid reading all into memory at the - # same time. - chunked_read = functools.partial( - source_data.read, ftrack_api.symbol.CHUNK_SIZE - ) - for chunk in iter(chunked_read, ''): - target_data.write(chunk) - - target_data.close() - source_data.close() - - def _register_component_in_location(self, component, resource_identifier): - '''Register *component* in location against *resource_identifier*.''' - return self._register_components_in_location( - [component], [resource_identifier] - ) - - def _register_components_in_location( - self, components, resource_identifiers - ): - '''Register *components* in location against *resource_identifiers*. - - Indices of *components* and *resource_identifiers* should align. - - ''' - for component, resource_identifier in zip( - components, resource_identifiers - ): - self.session.create( - 'ComponentLocation', data=dict( - component=component, - location=self, - resource_identifier=resource_identifier - ) - ) - - self.session.commit() - - def remove_component(self, component, recursive=True): - '''Remove *component* from location. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the component deregistration. - - ''' - return self.remove_components([component], recursive=recursive) - - def remove_components(self, components, recursive=True): - '''Remove *components* from location. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the components deregistration. - - ''' - for component in components: - # Check component is in this location - self.get_resource_identifier(component) - - # Remove members first for container components. - is_container = 'members' in component.keys() - if is_container and recursive: - self.remove_components( - component['members'], recursive=recursive - ) - - # Remove data. - self._remove_data(component) - - # Remove metadata. - self._deregister_component_in_location(component) - - # Emit event. - component_id = ftrack_api.inspection.primary_key( - component - ).values()[0] - location_id = ftrack_api.inspection.primary_key(self).values()[0] - self.session.event_hub.publish( - ftrack_api.event.base.Event( - topic=ftrack_api.symbol.COMPONENT_REMOVED_FROM_LOCATION_TOPIC, - data=dict( - component_id=component_id, - location_id=location_id - ) - ), - on_error='ignore' - ) - - def _remove_data(self, component): - '''Remove data associated with *component*.''' - if not self.accessor: - raise ftrack_api.exception.LocationError( - 'No accessor defined for location {location}.', - details=dict(location=self) - ) - - try: - self.accessor.remove( - self.get_resource_identifier(component) - ) - except ftrack_api.exception.AccessorResourceNotFoundError: - # If accessor does not support detecting sequence paths then an - # AccessorResourceNotFoundError is raised. For now, if the - # component type is 'SequenceComponent' assume success. - if not component.entity_type == 'SequenceComponent': - raise - - def _deregister_component_in_location(self, component): - '''Deregister *component* from location.''' - component_id = ftrack_api.inspection.primary_key(component).values()[0] - location_id = ftrack_api.inspection.primary_key(self).values()[0] - - # TODO: Use session.get for optimisation. - component_location = self.session.query( - 'ComponentLocation where component_id is {0} and location_id is ' - '{1}'.format(component_id, location_id) - )[0] - - self.session.delete(component_location) - - # TODO: Should auto-commit here be optional? - self.session.commit() - - def get_component_availability(self, component): - '''Return availability of *component* in this location as a float.''' - return self.session.get_component_availability( - component, locations=[self] - )[self['id']] - - def get_component_availabilities(self, components): - '''Return availabilities of *components* in this location. - - Return list of float values corresponding to each component. - - ''' - return [ - availability[self['id']] for availability in - self.session.get_component_availabilities( - components, locations=[self] - ) - ] - - def get_resource_identifier(self, component): - '''Return resource identifier for *component*. - - Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if the - component is not present in this location. - - ''' - return self.get_resource_identifiers([component])[0] - - def get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. - - Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any - of the components are not present in this location. - - ''' - resource_identifiers = self._get_resource_identifiers(components) - - # Optionally decode resource identifier. - if self.resource_identifier_transformer: - for index, resource_identifier in enumerate(resource_identifiers): - resource_identifiers[index] = ( - self.resource_identifier_transformer.decode( - resource_identifier, - context={'component': components[index]} - ) - ) - - return resource_identifiers - - def _get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. - - Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any - of the components are not present in this location. - - ''' - component_ids_mapping = collections.OrderedDict() - for component in components: - component_id = ftrack_api.inspection.primary_key( - component - ).values()[0] - component_ids_mapping[component_id] = component - - component_locations = self.session.query( - 'select component_id, resource_identifier from ComponentLocation ' - 'where location_id is {0} and component_id in ({1})' - .format( - ftrack_api.inspection.primary_key(self).values()[0], - ', '.join(component_ids_mapping.keys()) - ) - ) - - resource_identifiers_map = {} - for component_location in component_locations: - resource_identifiers_map[component_location['component_id']] = ( - component_location['resource_identifier'] - ) - - resource_identifiers = [] - missing = [] - for component_id, component in component_ids_mapping.items(): - if component_id not in resource_identifiers_map: - missing.append(component) - else: - resource_identifiers.append( - resource_identifiers_map[component_id] - ) - - if missing: - raise ftrack_api.exception.ComponentNotInLocationError( - missing, self - ) - - return resource_identifiers - - def get_filesystem_path(self, component): - '''Return filesystem path for *component*.''' - return self.get_filesystem_paths([component])[0] - - def get_filesystem_paths(self, components): - '''Return filesystem paths for *components*.''' - resource_identifiers = self.get_resource_identifiers(components) - - filesystem_paths = [] - for resource_identifier in resource_identifiers: - filesystem_paths.append( - self.accessor.get_filesystem_path(resource_identifier) - ) - - return filesystem_paths - - def get_url(self, component): - '''Return url for *component*. - - Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if - URL could not be determined from *component* or - :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if - retrieving URL is not supported by the location's accessor. - ''' - resource_identifier = self.get_resource_identifier(component) - - return self.accessor.get_url(resource_identifier) - - -class MemoryLocationMixin(object): - '''Represent storage for components. - - Unlike a standard location, only store metadata for components in this - location in memory rather than persisting to the database. - - ''' - - @property - def _cache(self): - '''Return cache.''' - try: - cache = self.__cache - except AttributeError: - cache = self.__cache = {} - - return cache - - def _register_component_in_location(self, component, resource_identifier): - '''Register *component* in location with *resource_identifier*.''' - component_id = ftrack_api.inspection.primary_key(component).values()[0] - self._cache[component_id] = resource_identifier - - def _register_components_in_location( - self, components, resource_identifiers - ): - '''Register *components* in location against *resource_identifiers*. - - Indices of *components* and *resource_identifiers* should align. - - ''' - for component, resource_identifier in zip( - components, resource_identifiers - ): - self._register_component_in_location(component, resource_identifier) - - def _deregister_component_in_location(self, component): - '''Deregister *component* in location.''' - component_id = ftrack_api.inspection.primary_key(component).values()[0] - self._cache.pop(component_id) - - def _get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. - - Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any - of the referenced components are not present in this location. - - ''' - resource_identifiers = [] - missing = [] - for component in components: - component_id = ftrack_api.inspection.primary_key( - component - ).values()[0] - resource_identifier = self._cache.get(component_id) - if resource_identifier is None: - missing.append(component) - else: - resource_identifiers.append(resource_identifier) - - if missing: - raise ftrack_api.exception.ComponentNotInLocationError( - missing, self - ) - - return resource_identifiers - - -class UnmanagedLocationMixin(object): - '''Location that does not manage data.''' - - def _add_data(self, component, resource_identifier, source): - '''Manage transfer of *component* data from *source*. - - *resource_identifier* specifies the identifier to use with this - locations accessor. - - Overridden to have no effect. - - ''' - return - - def _remove_data(self, component): - '''Remove data associated with *component*. - - Overridden to have no effect. - - ''' - return - - -class OriginLocationMixin(MemoryLocationMixin, UnmanagedLocationMixin): - '''Special origin location that expects sources as filepaths.''' - - def _get_context(self, component, source): - '''Return context for *component* and *source*.''' - context = {} - if source: - context.update(dict( - source_resource_identifier=source - )) - - return context - - -class ServerLocationMixin(object): - '''Location representing ftrack server. - - Adds convenience methods to location, specific to ftrack server. - ''' - def get_thumbnail_url(self, component, size=None): - '''Return thumbnail url for *component*. - - Optionally, specify *size* to constrain the downscaled image to size - x size pixels. - - Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if - URL could not be determined from *resource_identifier* or - :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if - retrieving URL is not supported by the location's accessor. - ''' - resource_identifier = self.get_resource_identifier(component) - return self.accessor.get_thumbnail_url(resource_identifier, size) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py deleted file mode 100644 index f5a9403728..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py +++ /dev/null @@ -1,105 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import warnings - -import ftrack_api.entity.base - - -class Note(ftrack_api.entity.base.Entity): - '''Represent a note.''' - - def create_reply( - self, content, author - ): - '''Create a reply with *content* and *author*. - - .. note:: - - This is a helper method. To create replies manually use the - standard :meth:`Session.create` method. - - ''' - reply = self.session.create( - 'Note', { - 'author': author, - 'content': content - } - ) - - self['replies'].append(reply) - - return reply - - -class CreateNoteMixin(object): - '''Mixin to add create_note method on entity class.''' - - def create_note( - self, content, author, recipients=None, category=None, labels=None - ): - '''Create note with *content*, *author*. - - NoteLabels can be set by including *labels*. - - Note category can be set by including *category*. - - *recipients* can be specified as a list of user or group instances. - - ''' - note_label_support = 'NoteLabel' in self.session.types - - if not labels: - labels = [] - - if labels and not note_label_support: - raise ValueError( - 'NoteLabel is not supported by the current server version.' - ) - - if category and labels: - raise ValueError( - 'Both category and labels cannot be set at the same time.' - ) - - if not recipients: - recipients = [] - - data = { - 'content': content, - 'author': author - } - - if category: - if note_label_support: - labels = [category] - warnings.warn( - 'category argument will be removed in an upcoming version, ' - 'please use labels instead.', - PendingDeprecationWarning - ) - else: - data['category_id'] = category['id'] - - note = self.session.create('Note', data) - - self['notes'].append(note) - - for resource in recipients: - recipient = self.session.create('Recipient', { - 'note_id': note['id'], - 'resource_id': resource['id'] - }) - - note['recipients'].append(recipient) - - for label in labels: - self.session.create( - 'NoteLabelLink', - { - 'label_id': label['id'], - 'note_id': note['id'] - } - ) - - return note diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py deleted file mode 100644 index ec6db7c019..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py +++ /dev/null @@ -1,94 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.base - - -class ProjectSchema(ftrack_api.entity.base.Entity): - '''Class representing ProjectSchema.''' - - def get_statuses(self, schema, type_id=None): - '''Return statuses for *schema* and optional *type_id*. - - *type_id* is the id of the Type for a TypedContext and can be used to - get statuses where the workflow has been overridden. - - ''' - # Task has overrides and need to be handled separately. - if schema == 'Task': - if type_id is not None: - overrides = self['_overrides'] - for override in overrides: - if override['type_id'] == type_id: - return override['workflow_schema']['statuses'][:] - - return self['_task_workflow']['statuses'][:] - - elif schema == 'AssetVersion': - return self['_version_workflow']['statuses'][:] - - else: - try: - EntityTypeClass = self.session.types[schema] - except KeyError: - raise ValueError('Schema {0} does not exist.'.format(schema)) - - object_type_id_attribute = EntityTypeClass.attributes.get( - 'object_type_id' - ) - - try: - object_type_id = object_type_id_attribute.default_value - except AttributeError: - raise ValueError( - 'Schema {0} does not have statuses.'.format(schema) - ) - - for _schema in self['_schemas']: - if _schema['type_id'] == object_type_id: - result = self.session.query( - 'select task_status from SchemaStatus ' - 'where schema_id is {0}'.format(_schema['id']) - ) - return [ - schema_type['task_status'] for schema_type in result - ] - - raise ValueError( - 'No valid statuses were found for schema {0}.'.format(schema) - ) - - def get_types(self, schema): - '''Return types for *schema*.''' - # Task need to be handled separately. - if schema == 'Task': - return self['_task_type_schema']['types'][:] - - else: - try: - EntityTypeClass = self.session.types[schema] - except KeyError: - raise ValueError('Schema {0} does not exist.'.format(schema)) - - object_type_id_attribute = EntityTypeClass.attributes.get( - 'object_type_id' - ) - - try: - object_type_id = object_type_id_attribute.default_value - except AttributeError: - raise ValueError( - 'Schema {0} does not have types.'.format(schema) - ) - - for _schema in self['_schemas']: - if _schema['type_id'] == object_type_id: - result = self.session.query( - 'select task_type from SchemaType ' - 'where schema_id is {0}'.format(_schema['id']) - ) - return [schema_type['task_type'] for schema_type in result] - - raise ValueError( - 'No valid types were found for schema {0}.'.format(schema) - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py deleted file mode 100644 index 511ad4ba99..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py +++ /dev/null @@ -1,123 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import arrow - -import ftrack_api.entity.base -import ftrack_api.exception - - -class User(ftrack_api.entity.base.Entity): - '''Represent a user.''' - - def start_timer(self, context=None, comment='', name=None, force=False): - '''Start a timer for *context* and return it. - - *force* can be used to automatically stop an existing timer and create a - timelog for it. If you need to get access to the created timelog, use - :func:`stop_timer` instead. - - *comment* and *name* are optional but will be set on the timer. - - .. note:: - - This method will automatically commit the changes and if *force* is - False then it will fail with a - :class:`ftrack_api.exception.NotUniqueError` exception if a - timer is already running. - - ''' - if force: - try: - self.stop_timer() - except ftrack_api.exception.NoResultFoundError: - self.logger.debug('Failed to stop existing timer.') - - timer = self.session.create('Timer', { - 'user': self, - 'context': context, - 'name': name, - 'comment': comment - }) - - # Commit the new timer and try to catch any error that indicate another - # timelog already exists and inform the user about it. - try: - self.session.commit() - except ftrack_api.exception.ServerError as error: - if 'IntegrityError' in str(error): - raise ftrack_api.exception.NotUniqueError( - ('Failed to start a timelog for user with id: {0}, it is ' - 'likely that a timer is already running. Either use ' - 'force=True or stop the timer first.').format(self['id']) - ) - else: - # Reraise the error as it might be something unrelated. - raise - - return timer - - def stop_timer(self): - '''Stop the current timer and return a timelog created from it. - - If a timer is not running, a - :exc:`ftrack_api.exception.NoResultFoundError` exception will be - raised. - - .. note:: - - This method will automatically commit the changes. - - ''' - timer = self.session.query( - 'Timer where user_id = "{0}"'.format(self['id']) - ).one() - - # If the server is running in the same timezone as the local - # timezone, we remove the TZ offset to get the correct duration. - is_timezone_support_enabled = self.session.server_information.get( - 'is_timezone_support_enabled', None - ) - if is_timezone_support_enabled is None: - self.logger.warning( - 'Could not identify if server has timezone support enabled. ' - 'Will assume server is running in UTC.' - ) - is_timezone_support_enabled = True - - if is_timezone_support_enabled: - now = arrow.now() - else: - now = arrow.now().replace(tzinfo='utc') - - delta = now - timer['start'] - duration = delta.days * 24 * 60 * 60 + delta.seconds - - timelog = self.session.create('Timelog', { - 'user_id': timer['user_id'], - 'context_id': timer['context_id'], - 'comment': timer['comment'], - 'start': timer['start'], - 'duration': duration, - 'name': timer['name'] - }) - - self.session.delete(timer) - self.session.commit() - - return timelog - - def send_invite(self): - '''Send a invation email to the user''' - - self.session.send_user_invite( - self - ) - def reset_api_key(self): - '''Reset the users api key.''' - - response = self.session.reset_remote( - 'api_key', entity=self - ) - - return response['api_key'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py deleted file mode 100644 index 1aab07ed77..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py deleted file mode 100644 index b5fd57da78..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py +++ /dev/null @@ -1,85 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import uuid -import collections - - -class Event(collections.MutableMapping): - '''Represent a single event.''' - - def __init__(self, topic, id=None, data=None, sent=None, - source=None, target='', in_reply_to_event=None): - '''Initialise event. - - *topic* is the required topic for the event. It can use a dotted - notation to demarcate groupings. For example, 'ftrack.update'. - - *id* is the unique id for this event instance. It is primarily used when - replying to an event. If not supplied a default uuid based value will - be used. - - *data* refers to event specific data. It should be a mapping structure - and defaults to an empty dictionary if not supplied. - - *sent* is the timestamp the event is sent. It will be set automatically - as send time unless specified here. - - *source* is information about where the event originated. It should be - a mapping and include at least a unique id value under an 'id' key. If - not specified, senders usually populate the value automatically at - publish time. - - *target* can be an expression that targets this event. For example, - a reply event would target the event to the sender of the source event. - The expression will be tested against subscriber information only. - - *in_reply_to_event* is used when replying to an event and should contain - the unique id of the event being replied to. - - ''' - super(Event, self).__init__() - self._data = dict( - id=id or uuid.uuid4().hex, - data=data or {}, - topic=topic, - sent=sent, - source=source or {}, - target=target, - in_reply_to_event=in_reply_to_event - ) - self._stopped = False - - def stop(self): - '''Stop further processing of this event.''' - self._stopped = True - - def is_stopped(self): - '''Return whether event has been stopped.''' - return self._stopped - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, str(self._data) - ) - - def __getitem__(self, key): - '''Return value for *key*.''' - return self._data[key] - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - self._data[key] = value - - def __delitem__(self, key): - '''Remove *key*.''' - del self._data[key] - - def __iter__(self): - '''Iterate over all keys.''' - return iter(self._data) - - def __len__(self): - '''Return count of keys.''' - return len(self._data) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py deleted file mode 100644 index 0535e4fd5f..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py +++ /dev/null @@ -1,282 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from operator import eq, ne, ge, le, gt, lt - -from pyparsing import (Group, Word, CaselessKeyword, Forward, - FollowedBy, Suppress, oneOf, OneOrMore, Optional, - alphanums, quotedString, removeQuotes) - -import ftrack_api.exception - -# Do not enable packrat since it is not thread-safe and will result in parsing -# exceptions in a multi threaded environment. -# ParserElement.enablePackrat() - - -class Parser(object): - '''Parse string based expression into :class:`Expression` instance.''' - - def __init__(self): - '''Initialise parser.''' - self._operators = { - '=': eq, - '!=': ne, - '>=': ge, - '<=': le, - '>': gt, - '<': lt - } - self._parser = self._construct_parser() - super(Parser, self).__init__() - - def _construct_parser(self): - '''Construct and return parser.''' - field = Word(alphanums + '_.') - operator = oneOf(self._operators.keys()) - value = Word(alphanums + '-_,./*@+') - quoted_value = quotedString('quoted_value').setParseAction(removeQuotes) - - condition = Group( - field + operator + (quoted_value | value) - )('condition') - - not_ = Optional(Suppress(CaselessKeyword('not')))('not') - and_ = Suppress(CaselessKeyword('and'))('and') - or_ = Suppress(CaselessKeyword('or'))('or') - - expression = Forward() - parenthesis = Suppress('(') + expression + Suppress(')') - previous = condition | parenthesis - - for conjunction in (not_, and_, or_): - current = Forward() - - if conjunction in (and_, or_): - conjunction_expression = ( - FollowedBy(previous + conjunction + previous) - + Group( - previous + OneOrMore(conjunction + previous) - )(conjunction.resultsName) - ) - - elif conjunction in (not_, ): - conjunction_expression = ( - FollowedBy(conjunction.expr + current) - + Group(conjunction + current)(conjunction.resultsName) - ) - - else: # pragma: no cover - raise ValueError('Unrecognised conjunction.') - - current <<= (conjunction_expression | previous) - previous = current - - expression <<= previous - return expression('expression') - - def parse(self, expression): - '''Parse string *expression* into :class:`Expression`. - - Raise :exc:`ftrack_api.exception.ParseError` if *expression* could - not be parsed. - - ''' - result = None - expression = expression.strip() - if expression: - try: - result = self._parser.parseString( - expression, parseAll=True - ) - except Exception as error: - raise ftrack_api.exception.ParseError( - 'Failed to parse: {0}. {1}'.format(expression, error) - ) - - return self._process(result) - - def _process(self, result): - '''Process *result* using appropriate method. - - Method called is determined by the name of the result. - - ''' - method_name = '_process_{0}'.format(result.getName()) - method = getattr(self, method_name) - return method(result) - - def _process_expression(self, result): - '''Process *result* as expression.''' - return self._process(result[0]) - - def _process_not(self, result): - '''Process *result* as NOT operation.''' - return Not(self._process(result[0])) - - def _process_and(self, result): - '''Process *result* as AND operation.''' - return All([self._process(entry) for entry in result]) - - def _process_or(self, result): - '''Process *result* as OR operation.''' - return Any([self._process(entry) for entry in result]) - - def _process_condition(self, result): - '''Process *result* as condition.''' - key, operator, value = result - return Condition(key, self._operators[operator], value) - - def _process_quoted_value(self, result): - '''Process *result* as quoted value.''' - return result - - -class Expression(object): - '''Represent a structured expression to test candidates against.''' - - def __str__(self): - '''Return string representation.''' - return '<{0}>'.format(self.__class__.__name__) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return True - - -class All(Expression): - '''Match candidate that matches all of the specified expressions. - - .. note:: - - If no expressions are supplied then will always match. - - ''' - - def __init__(self, expressions=None): - '''Initialise with list of *expressions* to match against.''' - self._expressions = expressions or [] - super(All, self).__init__() - - def __str__(self): - '''Return string representation.''' - return '<{0} [{1}]>'.format( - self.__class__.__name__, - ' '.join(map(str, self._expressions)) - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return all([ - expression.match(candidate) for expression in self._expressions - ]) - - -class Any(Expression): - '''Match candidate that matches any of the specified expressions. - - .. note:: - - If no expressions are supplied then will never match. - - ''' - - def __init__(self, expressions=None): - '''Initialise with list of *expressions* to match against.''' - self._expressions = expressions or [] - super(Any, self).__init__() - - def __str__(self): - '''Return string representation.''' - return '<{0} [{1}]>'.format( - self.__class__.__name__, - ' '.join(map(str, self._expressions)) - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return any([ - expression.match(candidate) for expression in self._expressions - ]) - - -class Not(Expression): - '''Negate expression.''' - - def __init__(self, expression): - '''Initialise with *expression* to negate.''' - self._expression = expression - super(Not, self).__init__() - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, - self._expression - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return not self._expression.match(candidate) - - -class Condition(Expression): - '''Represent condition.''' - - def __init__(self, key, operator, value): - '''Initialise condition. - - *key* is the key to check on the data when matching. It can be a nested - key represented by dots. For example, 'data.eventType' would attempt to - match candidate['data']['eventType']. If the candidate is missing any - of the requested keys then the match fails immediately. - - *operator* is the operator function to use to perform the match between - the retrieved candidate value and the conditional *value*. - - If *value* is a string, it can use a wildcard '*' at the end to denote - that any values matching the substring portion are valid when matching - equality only. - - ''' - self._key = key - self._operator = operator - self._value = value - self._wildcard = '*' - self._operatorMapping = { - eq: '=', - ne: '!=', - ge: '>=', - le: '<=', - gt: '>', - lt: '<' - } - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}{2}{3}>'.format( - self.__class__.__name__, - self._key, - self._operatorMapping.get(self._operator, self._operator), - self._value - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - key_parts = self._key.split('.') - - try: - value = candidate - for keyPart in key_parts: - value = value[keyPart] - except (KeyError, TypeError): - return False - - if ( - self._operator is eq - and isinstance(self._value, basestring) - and self._value[-1] == self._wildcard - ): - return self._value[:-1] in value - else: - return self._operator(value, self._value) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py deleted file mode 100644 index 9f4ba80c6e..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py +++ /dev/null @@ -1,1091 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -from __future__ import absolute_import - -import collections -import urlparse -import threading -import Queue as queue -import logging -import time -import uuid -import operator -import functools -import json -import socket -import warnings - -import requests -import requests.exceptions -import websocket - -import ftrack_api.exception -import ftrack_api.event.base -import ftrack_api.event.subscriber -import ftrack_api.event.expression -from ftrack_api.logging import LazyLogMessage as L - - -SocketIoSession = collections.namedtuple('SocketIoSession', [ - 'id', - 'heartbeatTimeout', - 'supportedTransports', -]) - - -ServerDetails = collections.namedtuple('ServerDetails', [ - 'scheme', - 'hostname', - 'port', -]) - - - - -class EventHub(object): - '''Manage routing of events.''' - - _future_signature_warning = ( - 'When constructing your Session object you did not explicitly define ' - 'auto_connect_event_hub as True even though you appear to be publishing ' - 'and / or subscribing to asynchronous events. In version version 2.0 of ' - 'the ftrack-python-api the default behavior will change from True ' - 'to False. Please make sure to update your tools. You can read more at ' - 'http://ftrack-python-api.rtd.ftrack.com/en/stable/release/migration.html' - ) - - def __init__(self, server_url, api_user, api_key): - '''Initialise hub, connecting to ftrack *server_url*. - - *api_user* is the user to authenticate as and *api_key* is the API key - to authenticate with. - - ''' - super(EventHub, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.id = uuid.uuid4().hex - self._connection = None - - self._unique_packet_id = 0 - self._packet_callbacks = {} - self._lock = threading.RLock() - - self._wait_timeout = 4 - - self._subscribers = [] - self._reply_callbacks = {} - self._intentional_disconnect = False - - self._event_queue = queue.Queue() - self._event_namespace = 'ftrack.event' - self._expression_parser = ftrack_api.event.expression.Parser() - - # Default values for auto reconnection timeout on unintentional - # disconnection. Equates to 5 minutes. - self._auto_reconnect_attempts = 30 - self._auto_reconnect_delay = 10 - - self._deprecation_warning_auto_connect = False - - # Mapping of Socket.IO codes to meaning. - self._code_name_mapping = { - '0': 'disconnect', - '1': 'connect', - '2': 'heartbeat', - '3': 'message', - '4': 'json', - '5': 'event', - '6': 'acknowledge', - '7': 'error' - } - self._code_name_mapping.update( - dict((name, code) for code, name in self._code_name_mapping.items()) - ) - - self._server_url = server_url - self._api_user = api_user - self._api_key = api_key - - # Parse server URL and store server details. - url_parse_result = urlparse.urlparse(self._server_url) - if not url_parse_result.scheme: - raise ValueError('Could not determine scheme from server url.') - - if not url_parse_result.hostname: - raise ValueError('Could not determine hostname from server url.') - - self.server = ServerDetails( - url_parse_result.scheme, - url_parse_result.hostname, - url_parse_result.port - ) - - def get_server_url(self): - '''Return URL to server.''' - return '{0}://{1}'.format( - self.server.scheme, self.get_network_location() - ) - - def get_network_location(self): - '''Return network location part of url (hostname with optional port).''' - if self.server.port: - return '{0}:{1}'.format(self.server.hostname, self.server.port) - else: - return self.server.hostname - - @property - def secure(self): - '''Return whether secure connection used.''' - return self.server.scheme == 'https' - - def connect(self): - '''Initialise connection to server. - - Raise :exc:`ftrack_api.exception.EventHubConnectionError` if already - connected or connection fails. - - ''' - - self._deprecation_warning_auto_connect = False - - if self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Already connected.' - ) - - # Reset flag tracking whether disconnection was intentional. - self._intentional_disconnect = False - - try: - # Connect to socket.io server using websocket transport. - session = self._get_socket_io_session() - - if 'websocket' not in session.supportedTransports: - raise ValueError( - 'Server does not support websocket sessions.' - ) - - scheme = 'wss' if self.secure else 'ws' - url = '{0}://{1}/socket.io/1/websocket/{2}'.format( - scheme, self.get_network_location(), session.id - ) - - # timeout is set to 60 seconds to avoid the issue where the socket - # ends up in a bad state where it is reported as connected but the - # connection has been closed. The issue happens often when connected - # to a secure socket and the computer goes to sleep. - # More information on how the timeout works can be found here: - # https://docs.python.org/2/library/socket.html#socket.socket.setblocking - self._connection = websocket.create_connection(url, timeout=60) - - except Exception as error: - error_message = ( - 'Failed to connect to event server at {server_url} with ' - 'error: "{error}".' - ) - - error_details = { - 'error': unicode(error), - 'server_url': self.get_server_url() - } - - self.logger.debug( - L( - error_message, **error_details - ), - exc_info=1 - ) - raise ftrack_api.exception.EventHubConnectionError( - error_message, - details=error_details - ) - - # Start background processing thread. - self._processor_thread = _ProcessorThread(self) - self._processor_thread.start() - - # Subscribe to reply events if not already. Note: Only adding the - # subscriber locally as the following block will notify server of all - # existing subscribers, which would cause the server to report a - # duplicate subscriber error if EventHub.subscribe was called here. - try: - self._add_subscriber( - 'topic=ftrack.meta.reply', - self._handle_reply, - subscriber=dict( - id=self.id - ) - ) - except ftrack_api.exception.NotUniqueError: - pass - - # Now resubscribe any existing stored subscribers. This can happen when - # reconnecting automatically for example. - for subscriber in self._subscribers[:]: - self._notify_server_about_subscriber(subscriber) - - @property - def connected(self): - '''Return if connected.''' - return self._connection is not None and self._connection.connected - - def disconnect(self, unsubscribe=True): - '''Disconnect from server. - - Raise :exc:`ftrack_api.exception.EventHubConnectionError` if not - currently connected. - - If *unsubscribe* is True then unsubscribe all current subscribers - automatically before disconnecting. - - ''' - if not self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Not currently connected.' - ) - - else: - # Set flag to indicate disconnection was intentional. - self._intentional_disconnect = True - - # Set blocking to true on socket to make sure unsubscribe events - # are emitted before closing the connection. - self._connection.sock.setblocking(1) - - # Unsubscribe all subscribers. - if unsubscribe: - for subscriber in self._subscribers[:]: - self.unsubscribe(subscriber.metadata['id']) - - # Now disconnect. - self._connection.close() - self._connection = None - - # Shutdown background processing thread. - self._processor_thread.cancel() - - # Join to it if it is not current thread to help ensure a clean - # shutdown. - if threading.current_thread() != self._processor_thread: - self._processor_thread.join(self._wait_timeout) - - def reconnect(self, attempts=10, delay=5): - '''Reconnect to server. - - Make *attempts* number of attempts with *delay* in seconds between each - attempt. - - .. note:: - - All current subscribers will be automatically resubscribed after - successful reconnection. - - Raise :exc:`ftrack_api.exception.EventHubConnectionError` if fail to - reconnect. - - ''' - try: - self.disconnect(unsubscribe=False) - except ftrack_api.exception.EventHubConnectionError: - pass - - for attempt in range(attempts): - self.logger.debug(L( - 'Reconnect attempt {0} of {1}', attempt, attempts - )) - - # Silence logging temporarily to avoid lots of failed connection - # related information. - try: - logging.disable(logging.CRITICAL) - - try: - self.connect() - except ftrack_api.exception.EventHubConnectionError: - time.sleep(delay) - else: - break - - finally: - logging.disable(logging.NOTSET) - - if not self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to reconnect to event server at {0} after {1} attempts.' - .format(self.get_server_url(), attempts) - ) - - def wait(self, duration=None): - '''Wait for events and handle as they arrive. - - If *duration* is specified, then only process events until duration is - reached. *duration* is in seconds though float values can be used for - smaller values. - - ''' - started = time.time() - - while True: - try: - event = self._event_queue.get(timeout=0.1) - except queue.Empty: - pass - else: - self._handle(event) - - # Additional special processing of events. - if event['topic'] == 'ftrack.meta.disconnected': - break - - if duration is not None: - if (time.time() - started) > duration: - break - - def get_subscriber_by_identifier(self, identifier): - '''Return subscriber with matching *identifier*. - - Return None if no subscriber with *identifier* found. - - ''' - for subscriber in self._subscribers[:]: - if subscriber.metadata.get('id') == identifier: - return subscriber - - return None - - def subscribe(self, subscription, callback, subscriber=None, priority=100): - '''Register *callback* for *subscription*. - - A *subscription* is a string that can specify in detail which events the - callback should receive. The filtering is applied against each event - object. Nested references are supported using '.' separators. - For example, 'topic=foo and data.eventType=Shot' would match the - following event:: - - - - The *callback* should accept an instance of - :class:`ftrack_api.event.base.Event` as its sole argument. - - Callbacks are called in order of *priority*. The lower the priority - number the sooner it will be called, with 0 being the first. The - default priority is 100. Note that priority only applies against other - callbacks registered with this hub and not as a global priority. - - An earlier callback can prevent processing of subsequent callbacks by - calling :meth:`Event.stop` on the passed `event` before - returning. - - .. warning:: - - Handlers block processing of other received events. For long - running callbacks it is advisable to delegate the main work to - another process or thread. - - A *callback* can be attached to *subscriber* information that details - the subscriber context. A subscriber context will be generated - automatically if not supplied. - - .. note:: - - The subscription will be stored locally, but until the server - receives notification of the subscription it is possible the - callback will not be called. - - Return subscriber identifier. - - Raise :exc:`ftrack_api.exception.NotUniqueError` if a subscriber with - the same identifier already exists. - - ''' - # Add subscriber locally. - subscriber = self._add_subscriber( - subscription, callback, subscriber, priority - ) - - # Notify server now if possible. - try: - self._notify_server_about_subscriber(subscriber) - except ftrack_api.exception.EventHubConnectionError: - self.logger.debug(L( - 'Failed to notify server about new subscriber {0} ' - 'as server not currently reachable.', subscriber.metadata['id'] - )) - - return subscriber.metadata['id'] - - def _add_subscriber( - self, subscription, callback, subscriber=None, priority=100 - ): - '''Add subscriber locally. - - See :meth:`subscribe` for argument descriptions. - - Return :class:`ftrack_api.event.subscriber.Subscriber` instance. - - Raise :exc:`ftrack_api.exception.NotUniqueError` if a subscriber with - the same identifier already exists. - - ''' - if subscriber is None: - subscriber = {} - - subscriber.setdefault('id', uuid.uuid4().hex) - - # Check subscriber not already subscribed. - existing_subscriber = self.get_subscriber_by_identifier( - subscriber['id'] - ) - - if existing_subscriber is not None: - raise ftrack_api.exception.NotUniqueError( - 'Subscriber with identifier {0} already exists.' - .format(subscriber['id']) - ) - - subscriber = ftrack_api.event.subscriber.Subscriber( - subscription=subscription, - callback=callback, - metadata=subscriber, - priority=priority - ) - - self._subscribers.append(subscriber) - - return subscriber - - def _notify_server_about_subscriber(self, subscriber): - '''Notify server of new *subscriber*.''' - subscribe_event = ftrack_api.event.base.Event( - topic='ftrack.meta.subscribe', - data=dict( - subscriber=subscriber.metadata, - subscription=str(subscriber.subscription) - ) - ) - - self._publish( - subscribe_event, - callback=functools.partial(self._on_subscribed, subscriber) - ) - - def _on_subscribed(self, subscriber, response): - '''Handle acknowledgement of subscription.''' - if response.get('success') is False: - self.logger.warning(L( - 'Server failed to subscribe subscriber {0}: {1}', - subscriber.metadata['id'], response.get('message') - )) - - def unsubscribe(self, subscriber_identifier): - '''Unsubscribe subscriber with *subscriber_identifier*. - - .. note:: - - If the server is not reachable then it won't be notified of the - unsubscription. However, the subscriber will be removed locally - regardless. - - ''' - subscriber = self.get_subscriber_by_identifier(subscriber_identifier) - - if subscriber is None: - raise ftrack_api.exception.NotFoundError( - 'Cannot unsubscribe missing subscriber with identifier {0}' - .format(subscriber_identifier) - ) - - self._subscribers.pop(self._subscribers.index(subscriber)) - - # Notify the server if possible. - unsubscribe_event = ftrack_api.event.base.Event( - topic='ftrack.meta.unsubscribe', - data=dict(subscriber=subscriber.metadata) - ) - - try: - self._publish( - unsubscribe_event, - callback=functools.partial(self._on_unsubscribed, subscriber) - ) - except ftrack_api.exception.EventHubConnectionError: - self.logger.debug(L( - 'Failed to notify server to unsubscribe subscriber {0} as ' - 'server not currently reachable.', subscriber.metadata['id'] - )) - - def _on_unsubscribed(self, subscriber, response): - '''Handle acknowledgement of unsubscribing *subscriber*.''' - if response.get('success') is not True: - self.logger.warning(L( - 'Server failed to unsubscribe subscriber {0}: {1}', - subscriber.metadata['id'], response.get('message') - )) - - def _prepare_event(self, event): - '''Prepare *event* for sending.''' - event['source'].setdefault('id', self.id) - event['source'].setdefault('user', { - 'username': self._api_user - }) - - def _prepare_reply_event(self, event, source_event, source=None): - '''Prepare *event* as a reply to another *source_event*. - - Modify *event*, setting appropriate values to target event correctly as - a reply. - - ''' - event['target'] = 'id={0}'.format(source_event['source']['id']) - event['in_reply_to_event'] = source_event['id'] - if source is not None: - event['source'] = source - - def publish( - self, event, synchronous=False, on_reply=None, on_error='raise' - ): - '''Publish *event*. - - If *synchronous* is specified as True then this method will wait and - return a list of results from any called callbacks. - - .. note:: - - Currently, if synchronous is True then only locally registered - callbacks will be called and no event will be sent to the server. - This may change in future. - - *on_reply* is an optional callable to call with any reply event that is - received in response to the published *event*. - - .. note:: - - Will not be called when *synchronous* is True. - - If *on_error* is set to 'ignore' then errors raised during publish of - event will be caught by this method and ignored. - - ''' - if self._deprecation_warning_auto_connect and not synchronous: - warnings.warn( - self._future_signature_warning, FutureWarning - ) - - try: - return self._publish( - event, synchronous=synchronous, on_reply=on_reply - ) - except Exception: - if on_error == 'ignore': - pass - else: - raise - - def publish_reply(self, source_event, data, source=None): - '''Publish a reply event to *source_event* with supplied *data*. - - If *source* is specified it will be used for the source value of the - sent event. - - ''' - reply_event = ftrack_api.event.base.Event( - 'ftrack.meta.reply', - data=data - ) - self._prepare_reply_event(reply_event, source_event, source=source) - self.publish(reply_event) - - def _publish(self, event, synchronous=False, callback=None, on_reply=None): - '''Publish *event*. - - If *synchronous* is specified as True then this method will wait and - return a list of results from any called callbacks. - - .. note:: - - Currently, if synchronous is True then only locally registered - callbacks will be called and no event will be sent to the server. - This may change in future. - - A *callback* can also be specified. This callback will be called once - the server acknowledges receipt of the sent event. A default callback - that checks for errors from the server will be used if not specified. - - *on_reply* is an optional callable to call with any reply event that is - received in response to the published *event*. Note that there is no - guarantee that a reply will be sent. - - Raise :exc:`ftrack_api.exception.EventHubConnectionError` if not - currently connected. - - ''' - # Prepare event adding any relevant additional information. - self._prepare_event(event) - - if synchronous: - # Bypass emitting event to server and instead call locally - # registered handlers directly, collecting and returning results. - return self._handle(event, synchronous=synchronous) - - if not self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Cannot publish event asynchronously as not connected to ' - 'server.' - ) - - # Use standard callback if none specified. - if callback is None: - callback = functools.partial(self._on_published, event) - - # Emit event to central server for asynchronous processing. - try: - # Register on reply callback if specified. - if on_reply is not None: - # TODO: Add cleanup process that runs after a set duration to - # garbage collect old reply callbacks and prevent dictionary - # growing too large. - self._reply_callbacks[event['id']] = on_reply - - try: - self._emit_event_packet( - self._event_namespace, event, callback=callback - ) - except ftrack_api.exception.EventHubConnectionError: - # Connection may have dropped temporarily. Wait a few moments to - # see if background thread reconnects automatically. - time.sleep(15) - - self._emit_event_packet( - self._event_namespace, event, callback=callback - ) - except: - raise - - except Exception: - # Failure to send event should not cause caller to fail. - # TODO: This behaviour is inconsistent with the failing earlier on - # lack of connection and also with the error handling parameter of - # EventHub.publish. Consider refactoring. - self.logger.exception(L('Error sending event {0}.', event)) - - def _on_published(self, event, response): - '''Handle acknowledgement of published event.''' - if response.get('success', False) is False: - self.logger.error(L( - 'Server responded with error while publishing event {0}. ' - 'Error was: {1}', event, response.get('message') - )) - - def _handle(self, event, synchronous=False): - '''Handle *event*. - - If *synchronous* is True, do not send any automatic reply events. - - ''' - # Sort by priority, lower is higher. - # TODO: Use a sorted list to avoid sorting each time in order to improve - # performance. - subscribers = sorted( - self._subscribers, key=operator.attrgetter('priority') - ) - - results = [] - - target = event.get('target', None) - target_expression = None - if target: - try: - target_expression = self._expression_parser.parse(target) - except Exception: - self.logger.exception(L( - 'Cannot handle event as failed to parse event target ' - 'information: {0}', event - )) - return - - for subscriber in subscribers: - # Check if event is targeted to the subscriber. - if ( - target_expression is not None - and not target_expression.match(subscriber.metadata) - ): - continue - - # Check if subscriber interested in the event. - if not subscriber.interested_in(event): - continue - - response = None - - try: - response = subscriber.callback(event) - results.append(response) - except Exception: - self.logger.exception(L( - 'Error calling subscriber {0} for event {1}.', - subscriber, event - )) - - # Automatically publish a non None response as a reply when not in - # synchronous mode. - if not synchronous: - if self._deprecation_warning_auto_connect: - warnings.warn( - self._future_signature_warning, FutureWarning - ) - - if response is not None: - try: - self.publish_reply( - event, data=response, source=subscriber.metadata - ) - - except Exception: - self.logger.exception(L( - 'Error publishing response {0} from subscriber {1} ' - 'for event {2}.', response, subscriber, event - )) - - # Check whether to continue processing topic event. - if event.is_stopped(): - self.logger.debug(L( - 'Subscriber {0} stopped event {1}. Will not process ' - 'subsequent subscriber callbacks for this event.', - subscriber, event - )) - break - - return results - - def _handle_reply(self, event): - '''Handle reply *event*, passing it to any registered callback.''' - callback = self._reply_callbacks.get(event['in_reply_to_event'], None) - if callback is not None: - callback(event) - - def subscription(self, subscription, callback, subscriber=None, - priority=100): - '''Return context manager with *callback* subscribed to *subscription*. - - The subscribed callback will be automatically unsubscribed on exit - of the context manager. - - ''' - return _SubscriptionContext( - self, subscription, callback, subscriber=subscriber, - priority=priority, - ) - - # Socket.IO interface. - # - - def _get_socket_io_session(self): - '''Connect to server and retrieve session information.''' - socket_io_url = ( - '{0}://{1}/socket.io/1/?api_user={2}&api_key={3}' - ).format( - self.server.scheme, - self.get_network_location(), - self._api_user, - self._api_key - ) - try: - response = requests.get( - socket_io_url, - timeout=60 # 60 seconds timeout to recieve errors faster. - ) - except requests.exceptions.Timeout as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Timed out connecting to server: {0}.'.format(error) - ) - except requests.exceptions.SSLError as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to negotiate SSL with server: {0}.'.format(error) - ) - except requests.exceptions.ConnectionError as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to connect to server: {0}.'.format(error) - ) - else: - status = response.status_code - if status != 200: - raise ftrack_api.exception.EventHubConnectionError( - 'Received unexpected status code {0}.'.format(status) - ) - - # Parse result and return session information. - parts = response.text.split(':') - return SocketIoSession( - parts[0], - parts[1], - parts[3].split(',') - ) - - def _add_packet_callback(self, callback): - '''Store callback against a new unique packet ID. - - Return the unique packet ID. - - ''' - with self._lock: - self._unique_packet_id += 1 - unique_identifier = self._unique_packet_id - - self._packet_callbacks[unique_identifier] = callback - - return '{0}+'.format(unique_identifier) - - def _pop_packet_callback(self, packet_identifier): - '''Pop and return callback for *packet_identifier*.''' - return self._packet_callbacks.pop(packet_identifier) - - def _emit_event_packet(self, namespace, event, callback): - '''Send *event* packet under *namespace*.''' - data = self._encode( - dict(name=namespace, args=[event]) - ) - self._send_packet( - self._code_name_mapping['event'], data=data, callback=callback - ) - - def _acknowledge_packet(self, packet_identifier, *args): - '''Send acknowledgement of packet with *packet_identifier*.''' - packet_identifier = packet_identifier.rstrip('+') - data = str(packet_identifier) - if args: - data += '+{1}'.format(self._encode(args)) - - self._send_packet(self._code_name_mapping['acknowledge'], data=data) - - def _send_packet(self, code, data='', callback=None): - '''Send packet via connection.''' - path = '' - packet_identifier = ( - self._add_packet_callback(callback) if callback else '' - ) - packet_parts = (str(code), packet_identifier, path, data) - packet = ':'.join(packet_parts) - - try: - self._connection.send(packet) - self.logger.debug(L(u'Sent packet: {0}', packet)) - except socket.error as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to send packet: {0}'.format(error) - ) - - def _receive_packet(self): - '''Receive and return packet via connection.''' - try: - packet = self._connection.recv() - except Exception as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Error receiving packet: {0}'.format(error) - ) - - try: - parts = packet.split(':', 3) - except AttributeError: - raise ftrack_api.exception.EventHubPacketError( - 'Received invalid packet {0}'.format(packet) - ) - - code, packet_identifier, path, data = None, None, None, None - - count = len(parts) - if count == 4: - code, packet_identifier, path, data = parts - elif count == 3: - code, packet_identifier, path = parts - elif count == 1: - code = parts[0] - else: - raise ftrack_api.exception.EventHubPacketError( - 'Received invalid packet {0}'.format(packet) - ) - - self.logger.debug(L('Received packet: {0}', packet)) - return code, packet_identifier, path, data - - def _handle_packet(self, code, packet_identifier, path, data): - '''Handle packet received from server.''' - code_name = self._code_name_mapping[code] - - if code_name == 'connect': - self.logger.debug('Connected to event server.') - event = ftrack_api.event.base.Event('ftrack.meta.connected') - self._prepare_event(event) - self._event_queue.put(event) - - elif code_name == 'disconnect': - self.logger.debug('Disconnected from event server.') - if not self._intentional_disconnect: - self.logger.debug( - 'Disconnected unexpectedly. Attempting to reconnect.' - ) - try: - self.reconnect( - attempts=self._auto_reconnect_attempts, - delay=self._auto_reconnect_delay - ) - except ftrack_api.exception.EventHubConnectionError: - self.logger.debug('Failed to reconnect automatically.') - else: - self.logger.debug('Reconnected successfully.') - - if not self.connected: - event = ftrack_api.event.base.Event('ftrack.meta.disconnected') - self._prepare_event(event) - self._event_queue.put(event) - - elif code_name == 'heartbeat': - # Reply with heartbeat. - self._send_packet(self._code_name_mapping['heartbeat']) - - elif code_name == 'message': - self.logger.debug(L('Message received: {0}', data)) - - elif code_name == 'event': - payload = self._decode(data) - args = payload.get('args', []) - - if len(args) == 1: - event_payload = args[0] - if isinstance(event_payload, collections.Mapping): - try: - event = ftrack_api.event.base.Event(**event_payload) - except Exception: - self.logger.exception(L( - 'Failed to convert payload into event: {0}', - event_payload - )) - return - - self._event_queue.put(event) - - elif code_name == 'acknowledge': - parts = data.split('+', 1) - acknowledged_packet_identifier = int(parts[0]) - args = [] - if len(parts) == 2: - args = self._decode(parts[1]) - - try: - callback = self._pop_packet_callback( - acknowledged_packet_identifier - ) - except KeyError: - pass - else: - callback(*args) - - elif code_name == 'error': - self.logger.error(L('Event server reported error: {0}.', data)) - - else: - self.logger.debug(L('{0}: {1}', code_name, data)) - - def _encode(self, data): - '''Return *data* encoded as JSON formatted string.''' - return json.dumps( - data, - default=self._encode_object_hook, - ensure_ascii=False - ) - - def _encode_object_hook(self, item): - '''Return *item* transformed for encoding.''' - if isinstance(item, ftrack_api.event.base.Event): - # Convert to dictionary for encoding. - item = dict(**item) - - if 'in_reply_to_event' in item: - # Convert keys to server convention. - item['inReplyToEvent'] = item.pop('in_reply_to_event') - - return item - - raise TypeError('{0!r} is not JSON serializable'.format(item)) - - def _decode(self, string): - '''Return decoded JSON *string* as Python object.''' - return json.loads(string, object_hook=self._decode_object_hook) - - def _decode_object_hook(self, item): - '''Return *item* transformed.''' - if isinstance(item, collections.Mapping): - if 'inReplyToEvent' in item: - item['in_reply_to_event'] = item.pop('inReplyToEvent') - - return item - - -class _SubscriptionContext(object): - '''Context manager for a one-off subscription.''' - - def __init__(self, hub, subscription, callback, subscriber, priority): - '''Initialise context.''' - self._hub = hub - self._subscription = subscription - self._callback = callback - self._subscriber = subscriber - self._priority = priority - self._subscriberIdentifier = None - - def __enter__(self): - '''Enter context subscribing callback to topic.''' - self._subscriberIdentifier = self._hub.subscribe( - self._subscription, self._callback, subscriber=self._subscriber, - priority=self._priority - ) - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit context unsubscribing callback from topic.''' - self._hub.unsubscribe(self._subscriberIdentifier) - - -class _ProcessorThread(threading.Thread): - '''Process messages from server.''' - - daemon = True - - def __init__(self, client): - '''Initialise thread with Socket.IO *client* instance.''' - super(_ProcessorThread, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.client = client - self.done = threading.Event() - - def run(self): - '''Perform work in thread.''' - while not self.done.is_set(): - try: - code, packet_identifier, path, data = self.client._receive_packet() - self.client._handle_packet(code, packet_identifier, path, data) - - except ftrack_api.exception.EventHubPacketError as error: - self.logger.debug(L('Ignoring invalid packet: {0}', error)) - continue - - except ftrack_api.exception.EventHubConnectionError: - self.cancel() - - # Fake a disconnection event in order to trigger reconnection - # when necessary. - self.client._handle_packet('0', '', '', '') - - break - - except Exception as error: - self.logger.debug(L('Aborting processor thread: {0}', error)) - self.cancel() - break - - def cancel(self): - '''Cancel work as soon as possible.''' - self.done.set() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py deleted file mode 100644 index 0d38463aaf..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py +++ /dev/null @@ -1,27 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import ftrack_api.event.subscription - - -class Subscriber(object): - '''Represent event subscriber.''' - - def __init__(self, subscription, callback, metadata, priority): - '''Initialise subscriber.''' - self.subscription = ftrack_api.event.subscription.Subscription( - subscription - ) - self.callback = callback - self.metadata = metadata - self.priority = priority - - def __str__(self): - '''Return string representation.''' - return '<{0} metadata={1} subscription="{2}">'.format( - self.__class__.__name__, self.metadata, self.subscription - ) - - def interested_in(self, event): - '''Return whether subscriber interested in *event*.''' - return self.subscription.includes(event) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py deleted file mode 100644 index 0b208d9977..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py +++ /dev/null @@ -1,23 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import ftrack_api.event.expression - - -class Subscription(object): - '''Represent a subscription.''' - - parser = ftrack_api.event.expression.Parser() - - def __init__(self, subscription): - '''Initialise with *subscription*.''' - self._subscription = subscription - self._expression = self.parser.parse(subscription) - - def __str__(self): - '''Return string representation.''' - return self._subscription - - def includes(self, event): - '''Return whether subscription includes *event*.''' - return self._expression.match(event) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py deleted file mode 100644 index 8a2eb9bc04..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py +++ /dev/null @@ -1,392 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import sys -import traceback - -import ftrack_api.entity.base - - -class Error(Exception): - '''ftrack specific error.''' - - default_message = 'Unspecified error occurred.' - - def __init__(self, message=None, details=None): - '''Initialise exception with *message*. - - If *message* is None, the class 'default_message' will be used. - - *details* should be a mapping of extra information that can be used in - the message and also to provide more context. - - ''' - if message is None: - message = self.default_message - - self.message = message - self.details = details - if self.details is None: - self.details = {} - - self.traceback = traceback.format_exc() - - def __str__(self): - '''Return string representation.''' - keys = {} - for key, value in self.details.iteritems(): - if isinstance(value, unicode): - value = value.encode(sys.getfilesystemencoding()) - keys[key] = value - - return str(self.message.format(**keys)) - - -class AuthenticationError(Error): - '''Raise when an authentication error occurs.''' - - default_message = 'Authentication error.' - - -class ServerError(Error): - '''Raise when the server reports an error.''' - - default_message = 'Server reported error processing request.' - - -class ServerCompatibilityError(ServerError): - '''Raise when server appears incompatible.''' - - default_message = 'Server incompatible.' - - -class NotFoundError(Error): - '''Raise when something that should exist is not found.''' - - default_message = 'Not found.' - - -class NotUniqueError(Error): - '''Raise when unique value required and duplicate detected.''' - - default_message = 'Non-unique value detected.' - - -class IncorrectResultError(Error): - '''Raise when a result is incorrect.''' - - default_message = 'Incorrect result detected.' - - -class NoResultFoundError(IncorrectResultError): - '''Raise when a result was expected but no result was found.''' - - default_message = 'Expected result, but no result was found.' - - -class MultipleResultsFoundError(IncorrectResultError): - '''Raise when a single result expected, but multiple results found.''' - - default_message = 'Expected single result, but received multiple results.' - - -class EntityTypeError(Error): - '''Raise when an entity type error occurs.''' - - default_message = 'Entity type error.' - - -class UnrecognisedEntityTypeError(EntityTypeError): - '''Raise when an unrecognised entity type detected.''' - - default_message = 'Entity type "{entity_type}" not recognised.' - - def __init__(self, entity_type, **kw): - '''Initialise with *entity_type* that is unrecognised.''' - kw.setdefault('details', {}).update(dict( - entity_type=entity_type - )) - super(UnrecognisedEntityTypeError, self).__init__(**kw) - - -class OperationError(Error): - '''Raise when an operation error occurs.''' - - default_message = 'Operation error.' - - -class InvalidStateError(Error): - '''Raise when an invalid state detected.''' - - default_message = 'Invalid state.' - - -class InvalidStateTransitionError(InvalidStateError): - '''Raise when an invalid state transition detected.''' - - default_message = ( - 'Invalid transition from {current_state!r} to {target_state!r} state ' - 'for entity {entity!r}' - ) - - def __init__(self, current_state, target_state, entity, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - current_state=current_state, - target_state=target_state, - entity=entity - )) - super(InvalidStateTransitionError, self).__init__(**kw) - - -class AttributeError(Error): - '''Raise when an error related to an attribute occurs.''' - - default_message = 'Attribute error.' - - -class ImmutableAttributeError(AttributeError): - '''Raise when modification of immutable attribute attempted.''' - - default_message = ( - 'Cannot modify value of immutable {attribute.name!r} attribute.' - ) - - def __init__(self, attribute, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - attribute=attribute - )) - super(ImmutableAttributeError, self).__init__(**kw) - - -class CollectionError(Error): - '''Raise when an error related to collections occurs.''' - - default_message = 'Collection error.' - - def __init__(self, collection, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - collection=collection - )) - super(CollectionError, self).__init__(**kw) - - -class ImmutableCollectionError(CollectionError): - '''Raise when modification of immutable collection attempted.''' - - default_message = ( - 'Cannot modify value of immutable collection {collection!r}.' - ) - - -class DuplicateItemInCollectionError(CollectionError): - '''Raise when duplicate item in collection detected.''' - - default_message = ( - 'Item {item!r} already exists in collection {collection!r}.' - ) - - def __init__(self, item, collection, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - item=item - )) - super(DuplicateItemInCollectionError, self).__init__(collection, **kw) - - -class ParseError(Error): - '''Raise when a parsing error occurs.''' - - default_message = 'Failed to parse.' - - -class EventHubError(Error): - '''Raise when issues related to event hub occur.''' - - default_message = 'Event hub error occurred.' - - -class EventHubConnectionError(EventHubError): - '''Raise when event hub encounters connection problem.''' - - default_message = 'Event hub is not connected.' - - -class EventHubPacketError(EventHubError): - '''Raise when event hub encounters an issue with a packet.''' - - default_message = 'Invalid packet.' - - -class PermissionDeniedError(Error): - '''Raise when permission is denied.''' - - default_message = 'Permission denied.' - - -class LocationError(Error): - '''Base for errors associated with locations.''' - - default_message = 'Unspecified location error' - - -class ComponentNotInAnyLocationError(LocationError): - '''Raise when component not available in any location.''' - - default_message = 'Component not available in any location.' - - -class ComponentNotInLocationError(LocationError): - '''Raise when component(s) not in location.''' - - default_message = ( - 'Component(s) {formatted_components} not found in location {location}.' - ) - - def __init__(self, components, location, **kw): - '''Initialise with *components* and *location*.''' - if isinstance(components, ftrack_api.entity.base.Entity): - components = [components] - - kw.setdefault('details', {}).update(dict( - components=components, - formatted_components=', '.join( - [str(component) for component in components] - ), - location=location - )) - - super(ComponentNotInLocationError, self).__init__(**kw) - - -class ComponentInLocationError(LocationError): - '''Raise when component(s) already exists in location.''' - - default_message = ( - 'Component(s) {formatted_components} already exist in location ' - '{location}.' - ) - - def __init__(self, components, location, **kw): - '''Initialise with *components* and *location*.''' - if isinstance(components, ftrack_api.entity.base.Entity): - components = [components] - - kw.setdefault('details', {}).update(dict( - components=components, - formatted_components=', '.join( - [str(component) for component in components] - ), - location=location - )) - - super(ComponentInLocationError, self).__init__(**kw) - - -class AccessorError(Error): - '''Base for errors associated with accessors.''' - - default_message = 'Unspecified accessor error' - - -class AccessorOperationFailedError(AccessorError): - '''Base for failed operations on accessors.''' - - default_message = 'Operation {operation} failed: {error}' - - def __init__( - self, operation='', resource_identifier=None, error=None, **kw - ): - kw.setdefault('details', {}).update(dict( - operation=operation, - resource_identifier=resource_identifier, - error=error - )) - super(AccessorOperationFailedError, self).__init__(**kw) - - -class AccessorUnsupportedOperationError(AccessorOperationFailedError): - '''Raise when operation is unsupported.''' - - default_message = 'Operation {operation} unsupported.' - - -class AccessorPermissionDeniedError(AccessorOperationFailedError): - '''Raise when permission denied.''' - - default_message = ( - 'Cannot {operation} {resource_identifier}. Permission denied.' - ) - - -class AccessorResourceIdentifierError(AccessorError): - '''Raise when a error related to a resource_identifier occurs.''' - - default_message = 'Resource identifier is invalid: {resource_identifier}.' - - def __init__(self, resource_identifier, **kw): - kw.setdefault('details', {}).update(dict( - resource_identifier=resource_identifier - )) - super(AccessorResourceIdentifierError, self).__init__(**kw) - - -class AccessorFilesystemPathError(AccessorResourceIdentifierError): - '''Raise when a error related to an accessor filesystem path occurs.''' - - default_message = ( - 'Could not determine filesystem path from resource identifier: ' - '{resource_identifier}.' - ) - - -class AccessorResourceError(AccessorError): - '''Base for errors associated with specific resource.''' - - default_message = 'Unspecified resource error: {resource_identifier}' - - def __init__(self, operation='', resource_identifier=None, error=None, - **kw): - kw.setdefault('details', {}).update(dict( - operation=operation, - resource_identifier=resource_identifier - )) - super(AccessorResourceError, self).__init__(**kw) - - -class AccessorResourceNotFoundError(AccessorResourceError): - '''Raise when a required resource is not found.''' - - default_message = 'Resource not found: {resource_identifier}' - - -class AccessorParentResourceNotFoundError(AccessorResourceError): - '''Raise when a parent resource (such as directory) is not found.''' - - default_message = 'Parent resource is missing: {resource_identifier}' - - -class AccessorResourceInvalidError(AccessorResourceError): - '''Raise when a resource is not the right type.''' - - default_message = 'Resource invalid: {resource_identifier}' - - -class AccessorContainerNotEmptyError(AccessorResourceError): - '''Raise when container is not empty.''' - - default_message = 'Container is not empty: {resource_identifier}' - - -class StructureError(Error): - '''Base for errors associated with structures.''' - - default_message = 'Unspecified structure error' - - -class ConnectionClosedError(Error): - '''Raise when attempt to use closed connection detected.''' - - default_message = "Connection closed." diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py deleted file mode 100644 index c282fcc814..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py +++ /dev/null @@ -1,131 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import termcolor - -import ftrack_api.entity.base -import ftrack_api.collection -import ftrack_api.symbol -import ftrack_api.inspection - - -#: Useful filters to pass to :func:`format`.` -FILTER = { - 'ignore_unset': ( - lambda entity, name, value: value is not ftrack_api.symbol.NOT_SET - ) -} - - -def format( - entity, formatters=None, attribute_filter=None, recursive=False, - indent=0, indent_first_line=True, _seen=None -): - '''Return formatted string representing *entity*. - - *formatters* can be used to customise formatting of elements. It should be a - mapping with one or more of the following keys: - - * header - Used to format entity type. - * label - Used to format attribute names. - - Specify an *attribute_filter* to control which attributes to include. By - default all attributes are included. The *attribute_filter* should be a - callable that accepts `(entity, attribute_name, attribute_value)` and - returns True if the attribute should be included in the output. For example, - to filter out all unset values:: - - attribute_filter=ftrack_api.formatter.FILTER['ignore_unset'] - - If *recursive* is True then recurse into Collections and format each entity - present. - - *indent* specifies the overall indentation in spaces of the formatted text, - whilst *indent_first_line* determines whether to apply that indent to the - first generated line. - - .. warning:: - - Iterates over all *entity* attributes which may cause multiple queries - to the server. Turn off auto populating in the session to prevent this. - - ''' - # Initialise default formatters. - if formatters is None: - formatters = dict() - - formatters.setdefault( - 'header', lambda text: termcolor.colored( - text, 'white', 'on_blue', attrs=['bold'] - ) - ) - formatters.setdefault( - 'label', lambda text: termcolor.colored( - text, 'blue', attrs=['bold'] - ) - ) - - # Determine indents. - spacer = ' ' * indent - if indent_first_line: - first_line_spacer = spacer - else: - first_line_spacer = '' - - # Avoid infinite recursion on circular references. - if _seen is None: - _seen = set() - - identifier = str(ftrack_api.inspection.identity(entity)) - if identifier in _seen: - return ( - first_line_spacer + - formatters['header'](entity.entity_type) + '{...}' - ) - - _seen.add(identifier) - information = list() - - information.append( - first_line_spacer + formatters['header'](entity.entity_type) - ) - for key, value in sorted(entity.items()): - if attribute_filter is not None: - if not attribute_filter(entity, key, value): - continue - - child_indent = indent + len(key) + 3 - - if isinstance(value, ftrack_api.entity.base.Entity): - value = format( - value, - formatters=formatters, - attribute_filter=attribute_filter, - recursive=recursive, - indent=child_indent, - indent_first_line=False, - _seen=_seen.copy() - ) - - if isinstance(value, ftrack_api.collection.Collection): - if recursive: - child_values = [] - for index, child in enumerate(value): - child_value = format( - child, - formatters=formatters, - attribute_filter=attribute_filter, - recursive=recursive, - indent=child_indent, - indent_first_line=index != 0, - _seen=_seen.copy() - ) - child_values.append(child_value) - - value = '\n'.join(child_values) - - information.append( - spacer + u' {0}: {1}'.format(formatters['label'](key), value) - ) - - return '\n'.join(information) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py deleted file mode 100644 index d8b815200e..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py +++ /dev/null @@ -1,135 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import collections - -import ftrack_api.symbol -import ftrack_api.operation - - -def identity(entity): - '''Return unique identity of *entity*.''' - return ( - str(entity.entity_type), - primary_key(entity).values() - ) - - -def primary_key(entity): - '''Return primary key of *entity* as an ordered mapping of {field: value}. - - To get just the primary key values:: - - primary_key(entity).values() - - ''' - primary_key = collections.OrderedDict() - for name in entity.primary_key_attributes: - value = entity[name] - if value is ftrack_api.symbol.NOT_SET: - raise KeyError( - 'Missing required value for primary key attribute "{0}" on ' - 'entity {1!r}.'.format(name, entity) - ) - - primary_key[str(name)] = str(value) - - return primary_key - - -def _state(operation, state): - '''Return state following *operation* against current *state*.''' - if ( - isinstance( - operation, ftrack_api.operation.CreateEntityOperation - ) - and state is ftrack_api.symbol.NOT_SET - ): - state = ftrack_api.symbol.CREATED - - elif ( - isinstance( - operation, ftrack_api.operation.UpdateEntityOperation - ) - and state is ftrack_api.symbol.NOT_SET - ): - state = ftrack_api.symbol.MODIFIED - - elif isinstance( - operation, ftrack_api.operation.DeleteEntityOperation - ): - state = ftrack_api.symbol.DELETED - - return state - - -def state(entity): - '''Return current *entity* state. - - .. seealso:: :func:`ftrack_api.inspection.states`. - - ''' - value = ftrack_api.symbol.NOT_SET - - for operation in entity.session.recorded_operations: - # Determine if operation refers to an entity and whether that entity - # is *entity*. - if ( - isinstance( - operation, - ( - ftrack_api.operation.CreateEntityOperation, - ftrack_api.operation.UpdateEntityOperation, - ftrack_api.operation.DeleteEntityOperation - ) - ) - and operation.entity_type == entity.entity_type - and operation.entity_key == primary_key(entity) - ): - value = _state(operation, value) - - return value - - -def states(entities): - '''Return current states of *entities*. - - An optimised function for determining states of multiple entities in one - go. - - .. note:: - - All *entities* should belong to the same session. - - .. seealso:: :func:`ftrack_api.inspection.state`. - - ''' - if not entities: - return [] - - session = entities[0].session - - entities_by_identity = collections.OrderedDict() - for entity in entities: - key = (entity.entity_type, str(primary_key(entity).values())) - entities_by_identity[key] = ftrack_api.symbol.NOT_SET - - for operation in session.recorded_operations: - if ( - isinstance( - operation, - ( - ftrack_api.operation.CreateEntityOperation, - ftrack_api.operation.UpdateEntityOperation, - ftrack_api.operation.DeleteEntityOperation - ) - ) - ): - key = (operation.entity_type, str(operation.entity_key.values())) - if key not in entities_by_identity: - continue - - value = _state(operation, entities_by_identity[key]) - entities_by_identity[key] = value - - return entities_by_identity.values() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py deleted file mode 100644 index 41969c5b2a..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py +++ /dev/null @@ -1,43 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2016 ftrack - -import functools -import warnings - - -def deprecation_warning(message): - def decorator(function): - @functools.wraps(function) - def wrapper(*args, **kwargs): - warnings.warn( - message, - PendingDeprecationWarning - ) - return function(*args, **kwargs) - return wrapper - - return decorator - - -class LazyLogMessage(object): - '''A log message that can be evaluated lazily for improved performance. - - Example:: - - # Formatting of string will not occur unless debug logging enabled. - logger.debug(LazyLogMessage( - 'Hello {0}', 'world' - )) - - ''' - - def __init__(self, message, *args, **kwargs): - '''Initialise with *message* format string and arguments.''' - self.message = message - self.args = args - self.kwargs = kwargs - - def __str__(self): - '''Return string representation.''' - return self.message.format(*self.args, **self.kwargs) - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py deleted file mode 100644 index bb3bb4ee2c..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py +++ /dev/null @@ -1,115 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import copy - - -class Operations(object): - '''Stack of operations.''' - - def __init__(self): - '''Initialise stack.''' - self._stack = [] - super(Operations, self).__init__() - - def clear(self): - '''Clear all operations.''' - del self._stack[:] - - def push(self, operation): - '''Push *operation* onto stack.''' - self._stack.append(operation) - - def pop(self): - '''Pop and return most recent operation from stack.''' - return self._stack.pop() - - def __len__(self): - '''Return count of operations.''' - return len(self._stack) - - def __iter__(self): - '''Return iterator over operations.''' - return iter(self._stack) - - -class Operation(object): - '''Represent an operation.''' - - -class CreateEntityOperation(Operation): - '''Represent create entity operation.''' - - def __init__(self, entity_type, entity_key, entity_data): - '''Initialise operation. - - *entity_type* should be the type of entity in string form (as returned - from :attr:`ftrack_api.entity.base.Entity.entity_type`). - - *entity_key* should be the unique key for the entity and should follow - the form returned from :func:`ftrack_api.inspection.primary_key`. - - *entity_data* should be a mapping of the initial data to populate the - entity with when creating. - - .. note:: - - Shallow copies will be made of each value in *entity_data*. - - ''' - super(CreateEntityOperation, self).__init__() - self.entity_type = entity_type - self.entity_key = entity_key - self.entity_data = {} - for key, value in entity_data.items(): - self.entity_data[key] = copy.copy(value) - - -class UpdateEntityOperation(Operation): - '''Represent update entity operation.''' - - def __init__( - self, entity_type, entity_key, attribute_name, old_value, new_value - ): - '''Initialise operation. - - *entity_type* should be the type of entity in string form (as returned - from :attr:`ftrack_api.entity.base.Entity.entity_type`). - - *entity_key* should be the unique key for the entity and should follow - the form returned from :func:`ftrack_api.inspection.primary_key`. - - *attribute_name* should be the string name of the attribute being - modified and *old_value* and *new_value* should reflect the change in - value. - - .. note:: - - Shallow copies will be made of both *old_value* and *new_value*. - - ''' - super(UpdateEntityOperation, self).__init__() - self.entity_type = entity_type - self.entity_key = entity_key - self.attribute_name = attribute_name - self.old_value = copy.copy(old_value) - self.new_value = copy.copy(new_value) - - -class DeleteEntityOperation(Operation): - '''Represent delete entity operation.''' - - def __init__(self, entity_type, entity_key): - '''Initialise operation. - - *entity_type* should be the type of entity in string form (as returned - from :attr:`ftrack_api.entity.base.Entity.entity_type`). - - *entity_key* should be the unique key for the entity and should follow - the form returned from :func:`ftrack_api.inspection.primary_key`. - - ''' - super(DeleteEntityOperation, self).__init__() - self.entity_type = entity_type - self.entity_key = entity_key - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py deleted file mode 100644 index 2c7a9a4500..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py +++ /dev/null @@ -1,121 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import logging -import os -import uuid -import imp -import inspect - - -def discover(paths, positional_arguments=None, keyword_arguments=None): - '''Find and load plugins in search *paths*. - - Each discovered module should implement a register function that accepts - *positional_arguments* and *keyword_arguments* as \*args and \*\*kwargs - respectively. - - If a register function does not accept variable arguments, then attempt to - only pass accepted arguments to the function by inspecting its signature. - - ''' - logger = logging.getLogger(__name__ + '.discover') - - if positional_arguments is None: - positional_arguments = [] - - if keyword_arguments is None: - keyword_arguments = {} - - for path in paths: - # Ignore empty paths that could resolve to current directory. - path = path.strip() - if not path: - continue - - for base, directories, filenames in os.walk(path): - for filename in filenames: - name, extension = os.path.splitext(filename) - if extension != '.py': - continue - - module_path = os.path.join(base, filename) - unique_name = uuid.uuid4().hex - - try: - module = imp.load_source(unique_name, module_path) - except Exception as error: - logger.warning( - 'Failed to load plugin from "{0}": {1}' - .format(module_path, error) - ) - continue - - try: - module.register - except AttributeError: - logger.warning( - 'Failed to load plugin that did not define a ' - '"register" function at the module level: {0}' - .format(module_path) - ) - else: - # Attempt to only pass arguments that are accepted by the - # register function. - specification = inspect.getargspec(module.register) - - selected_positional_arguments = positional_arguments - selected_keyword_arguments = keyword_arguments - - if ( - not specification.varargs and - len(positional_arguments) > len(specification.args) - ): - logger.warning( - 'Culling passed arguments to match register ' - 'function signature.' - ) - - selected_positional_arguments = positional_arguments[ - len(specification.args): - ] - selected_keyword_arguments = {} - - elif not specification.keywords: - # Remove arguments that have been passed as positionals. - remainder = specification.args[ - len(positional_arguments): - ] - - # Determine remaining available keyword arguments. - defined_keyword_arguments = [] - if specification.defaults: - defined_keyword_arguments = specification.args[ - -len(specification.defaults): - ] - - remaining_keyword_arguments = set([ - keyword_argument for keyword_argument - in defined_keyword_arguments - if keyword_argument in remainder - ]) - - if not set(keyword_arguments.keys()).issubset( - remaining_keyword_arguments - ): - logger.warning( - 'Culling passed arguments to match register ' - 'function signature.' - ) - selected_keyword_arguments = { - key: value - for key, value in keyword_arguments.items() - if key in remaining_keyword_arguments - } - - module.register( - *selected_positional_arguments, - **selected_keyword_arguments - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py deleted file mode 100644 index ea101a29d4..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py +++ /dev/null @@ -1,202 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import re -import collections - -import ftrack_api.exception - - -class QueryResult(collections.Sequence): - '''Results from a query.''' - - OFFSET_EXPRESSION = re.compile('(?Poffset (?P\d+))') - LIMIT_EXPRESSION = re.compile('(?Plimit (?P\d+))') - - def __init__(self, session, expression, page_size=500): - '''Initialise result set. - - *session* should be an instance of :class:`ftrack_api.session.Session` - that will be used for executing the query *expression*. - - *page_size* should be an integer specifying the maximum number of - records to fetch in one request allowing the results to be fetched - incrementally in a transparent manner for optimal performance. Any - offset or limit specified in *expression* are honoured for final result - set, but intermediate queries may be issued with different offsets and - limits in order to fetch pages. When an embedded limit is smaller than - the given *page_size* it will be used instead and no paging will take - place. - - .. warning:: - - Setting *page_size* to a very large amount may negatively impact - performance of not only the caller, but the server in general. - - ''' - super(QueryResult, self).__init__() - self._session = session - self._results = [] - - ( - self._expression, - self._offset, - self._limit - ) = self._extract_offset_and_limit(expression) - - self._page_size = page_size - if self._limit is not None and self._limit < self._page_size: - # Optimise case where embedded limit is less than fetching a - # single page. - self._page_size = self._limit - - self._next_offset = self._offset - if self._next_offset is None: - # Initialise with zero offset. - self._next_offset = 0 - - def _extract_offset_and_limit(self, expression): - '''Process *expression* extracting offset and limit. - - Return (expression, offset, limit). - - ''' - offset = None - match = self.OFFSET_EXPRESSION.search(expression) - if match: - offset = int(match.group('value')) - expression = ( - expression[:match.start('offset')] + - expression[match.end('offset'):] - ) - - limit = None - match = self.LIMIT_EXPRESSION.search(expression) - if match: - limit = int(match.group('value')) - expression = ( - expression[:match.start('limit')] + - expression[match.end('limit'):] - ) - - return expression.strip(), offset, limit - - def __getitem__(self, index): - '''Return value at *index*.''' - while self._can_fetch_more() and index >= len(self._results): - self._fetch_more() - - return self._results[index] - - def __len__(self): - '''Return number of items.''' - while self._can_fetch_more(): - self._fetch_more() - - return len(self._results) - - def _can_fetch_more(self): - '''Return whether more results are available to fetch.''' - return self._next_offset is not None - - def _fetch_more(self): - '''Fetch next page of results if available.''' - if not self._can_fetch_more(): - return - - expression = '{0} offset {1} limit {2}'.format( - self._expression, self._next_offset, self._page_size - ) - records, metadata = self._session._query(expression) - self._results.extend(records) - - if self._limit is not None and (len(self._results) >= self._limit): - # Original limit reached. - self._next_offset = None - del self._results[self._limit:] - else: - # Retrieve next page offset from returned metadata. - self._next_offset = metadata.get('next', {}).get('offset', None) - - def all(self): - '''Fetch and return all data.''' - return list(self) - - def one(self): - '''Return exactly one single result from query by applying a limit. - - Raise :exc:`ValueError` if an existing limit is already present in the - expression. - - Raise :exc:`ValueError` if an existing offset is already present in the - expression as offset is inappropriate when expecting a single item. - - Raise :exc:`~ftrack_api.exception.MultipleResultsFoundError` if more - than one result was available or - :exc:`~ftrack_api.exception.NoResultFoundError` if no results were - available. - - .. note:: - - Both errors subclass - :exc:`~ftrack_api.exception.IncorrectResultError` if you want to - catch only one error type. - - ''' - expression = self._expression - - if self._limit is not None: - raise ValueError( - 'Expression already contains a limit clause.' - ) - - if self._offset is not None: - raise ValueError( - 'Expression contains an offset clause which does not make ' - 'sense when selecting a single item.' - ) - - # Apply custom limit as optimisation. A limit of 2 is used rather than - # 1 so that it is possible to test for multiple matching entries - # case. - expression += ' limit 2' - - results, metadata = self._session._query(expression) - - if not results: - raise ftrack_api.exception.NoResultFoundError() - - if len(results) != 1: - raise ftrack_api.exception.MultipleResultsFoundError() - - return results[0] - - def first(self): - '''Return first matching result from query by applying a limit. - - Raise :exc:`ValueError` if an existing limit is already present in the - expression. - - If no matching result available return None. - - ''' - expression = self._expression - - if self._limit is not None: - raise ValueError( - 'Expression already contains a limit clause.' - ) - - # Apply custom offset if present. - if self._offset is not None: - expression += ' offset {0}'.format(self._offset) - - # Apply custom limit as optimisation. - expression += ' limit 1' - - results, metadata = self._session._query(expression) - - if results: - return results[0] - - return None diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py deleted file mode 100644 index 1aab07ed77..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py deleted file mode 100644 index ee069b57b6..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py +++ /dev/null @@ -1,50 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - - -class ResourceIdentifierTransformer(object): - '''Transform resource identifiers. - - Provide ability to modify resource identifier before it is stored centrally - (:meth:`encode`), or after it has been retrieved, but before it is used - locally (:meth:`decode`). - - For example, you might want to decompose paths into a set of key, value - pairs to store centrally and then compose a path from those values when - reading back. - - .. note:: - - This is separate from any transformations an - :class:`ftrack_api.accessor.base.Accessor` may perform and is targeted - towards common transformations. - - ''' - - def __init__(self, session): - '''Initialise resource identifier transformer. - - *session* should be the :class:`ftrack_api.session.Session` instance - to use for communication with the server. - - ''' - self.session = session - super(ResourceIdentifierTransformer, self).__init__() - - def encode(self, resource_identifier, context=None): - '''Return encoded *resource_identifier* for storing centrally. - - A mapping of *context* values may be supplied to guide the - transformation. - - ''' - return resource_identifier - - def decode(self, resource_identifier, context=None): - '''Return decoded *resource_identifier* for use locally. - - A mapping of *context* values may be supplied to guide the - transformation. - - ''' - return resource_identifier diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py deleted file mode 100644 index 1a5da44432..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py +++ /dev/null @@ -1,2515 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import json -import logging -import collections -import datetime -import os -import getpass -import functools -import itertools -import distutils.version -import hashlib -import tempfile -import threading -import atexit -import warnings - -import requests -import requests.auth -import arrow -import clique - -import ftrack_api -import ftrack_api.exception -import ftrack_api.entity.factory -import ftrack_api.entity.base -import ftrack_api.entity.location -import ftrack_api.cache -import ftrack_api.symbol -import ftrack_api.query -import ftrack_api.attribute -import ftrack_api.collection -import ftrack_api.event.hub -import ftrack_api.event.base -import ftrack_api.plugin -import ftrack_api.inspection -import ftrack_api.operation -import ftrack_api.accessor.disk -import ftrack_api.structure.origin -import ftrack_api.structure.entity_id -import ftrack_api.accessor.server -import ftrack_api._centralized_storage_scenario -import ftrack_api.logging -from ftrack_api.logging import LazyLogMessage as L - -try: - from weakref import WeakMethod -except ImportError: - from ftrack_api._weakref import WeakMethod - - -class SessionAuthentication(requests.auth.AuthBase): - '''Attach ftrack session authentication information to requests.''' - - def __init__(self, api_key, api_user): - '''Initialise with *api_key* and *api_user*.''' - self.api_key = api_key - self.api_user = api_user - super(SessionAuthentication, self).__init__() - - def __call__(self, request): - '''Modify *request* to have appropriate headers.''' - request.headers.update({ - 'ftrack-api-key': self.api_key, - 'ftrack-user': self.api_user - }) - return request - - -class Session(object): - '''An isolated session for interaction with an ftrack server.''' - - def __init__( - self, server_url=None, api_key=None, api_user=None, auto_populate=True, - plugin_paths=None, cache=None, cache_key_maker=None, - auto_connect_event_hub=None, schema_cache_path=None, - plugin_arguments=None - ): - '''Initialise session. - - *server_url* should be the URL of the ftrack server to connect to - including any port number. If not specified attempt to look up from - :envvar:`FTRACK_SERVER`. - - *api_key* should be the API key to use for authentication whilst - *api_user* should be the username of the user in ftrack to record - operations against. If not specified, *api_key* should be retrieved - from :envvar:`FTRACK_API_KEY` and *api_user* from - :envvar:`FTRACK_API_USER`. - - If *auto_populate* is True (the default), then accessing entity - attributes will cause them to be automatically fetched from the server - if they are not already. This flag can be changed on the session - directly at any time. - - *plugin_paths* should be a list of paths to search for plugins. If not - specified, default to looking up :envvar:`FTRACK_EVENT_PLUGIN_PATH`. - - *cache* should be an instance of a cache that fulfils the - :class:`ftrack_api.cache.Cache` interface and will be used as the cache - for the session. It can also be a callable that will be called with the - session instance as sole argument. The callable should return ``None`` - if a suitable cache could not be configured, but session instantiation - can continue safely. - - .. note:: - - The session will add the specified cache to a pre-configured layered - cache that specifies the top level cache as a - :class:`ftrack_api.cache.MemoryCache`. Therefore, it is unnecessary - to construct a separate memory cache for typical behaviour. Working - around this behaviour or removing the memory cache can lead to - unexpected behaviour. - - *cache_key_maker* should be an instance of a key maker that fulfils the - :class:`ftrack_api.cache.KeyMaker` interface and will be used to - generate keys for objects being stored in the *cache*. If not specified, - a :class:`~ftrack_api.cache.StringKeyMaker` will be used. - - If *auto_connect_event_hub* is True then embedded event hub will be - automatically connected to the event server and allow for publishing and - subscribing to **non-local** events. If False, then only publishing and - subscribing to **local** events will be possible until the hub is - manually connected using :meth:`EventHub.connect - `. - - .. note:: - - The event hub connection is performed in a background thread to - improve session startup time. If a registered plugin requires a - connected event hub then it should check the event hub connection - status explicitly. Subscribing to events does *not* require a - connected event hub. - - Enable schema caching by setting *schema_cache_path* to a folder path. - If not set, :envvar:`FTRACK_API_SCHEMA_CACHE_PATH` will be used to - determine the path to store cache in. If the environment variable is - also not specified then a temporary directory will be used. Set to - `False` to disable schema caching entirely. - - *plugin_arguments* should be an optional mapping (dict) of keyword - arguments to pass to plugin register functions upon discovery. If a - discovered plugin has a signature that is incompatible with the passed - arguments, the discovery mechanism will attempt to reduce the passed - arguments to only those that the plugin accepts. Note that a warning - will be logged in this case. - - ''' - super(Session, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self._closed = False - - if server_url is None: - server_url = os.environ.get('FTRACK_SERVER') - - if not server_url: - raise TypeError( - 'Required "server_url" not specified. Pass as argument or set ' - 'in environment variable FTRACK_SERVER.' - ) - - self._server_url = server_url - - if api_key is None: - api_key = os.environ.get( - 'FTRACK_API_KEY', - # Backwards compatibility - os.environ.get('FTRACK_APIKEY') - ) - - if not api_key: - raise TypeError( - 'Required "api_key" not specified. Pass as argument or set in ' - 'environment variable FTRACK_API_KEY.' - ) - - self._api_key = api_key - - if api_user is None: - api_user = os.environ.get('FTRACK_API_USER') - if not api_user: - try: - api_user = getpass.getuser() - except Exception: - pass - - if not api_user: - raise TypeError( - 'Required "api_user" not specified. Pass as argument, set in ' - 'environment variable FTRACK_API_USER or one of the standard ' - 'environment variables used by Python\'s getpass module.' - ) - - self._api_user = api_user - - # Currently pending operations. - self.recorded_operations = ftrack_api.operation.Operations() - self.record_operations = True - - self.cache_key_maker = cache_key_maker - if self.cache_key_maker is None: - self.cache_key_maker = ftrack_api.cache.StringKeyMaker() - - # Enforce always having a memory cache at top level so that the same - # in-memory instance is returned from session. - self.cache = ftrack_api.cache.LayeredCache([ - ftrack_api.cache.MemoryCache() - ]) - - if cache is not None: - if callable(cache): - cache = cache(self) - - if cache is not None: - self.cache.caches.append(cache) - - self._managed_request = None - self._request = requests.Session() - self._request.auth = SessionAuthentication( - self._api_key, self._api_user - ) - - self.auto_populate = auto_populate - - # Fetch server information and in doing so also check credentials. - self._server_information = self._fetch_server_information() - - # Now check compatibility of server based on retrieved information. - self.check_server_compatibility() - - # Construct event hub and load plugins. - self._event_hub = ftrack_api.event.hub.EventHub( - self._server_url, - self._api_user, - self._api_key, - ) - - self._auto_connect_event_hub_thread = None - if auto_connect_event_hub in (None, True): - # Connect to event hub in background thread so as not to block main - # session usage waiting for event hub connection. - self._auto_connect_event_hub_thread = threading.Thread( - target=self._event_hub.connect - ) - self._auto_connect_event_hub_thread.daemon = True - self._auto_connect_event_hub_thread.start() - - # To help with migration from auto_connect_event_hub default changing - # from True to False. - self._event_hub._deprecation_warning_auto_connect = ( - auto_connect_event_hub is None - ) - - # Register to auto-close session on exit. - atexit.register(WeakMethod(self.close)) - - self._plugin_paths = plugin_paths - if self._plugin_paths is None: - self._plugin_paths = os.environ.get( - 'FTRACK_EVENT_PLUGIN_PATH', '' - ).split(os.pathsep) - - self._discover_plugins(plugin_arguments=plugin_arguments) - - # TODO: Make schemas read-only and non-mutable (or at least without - # rebuilding types)? - if schema_cache_path is not False: - if schema_cache_path is None: - schema_cache_path = os.environ.get( - 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() - ) - - schema_cache_path = os.path.join( - schema_cache_path, 'ftrack_api_schema_cache.json' - ) - - self.schemas = self._load_schemas(schema_cache_path) - self.types = self._build_entity_type_classes(self.schemas) - - ftrack_api._centralized_storage_scenario.register(self) - - self._configure_locations() - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.ready', - data=dict( - session=self - ) - ), - synchronous=True - ) - - def __enter__(self): - '''Return session as context manager.''' - return self - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit session context, closing session in process.''' - self.close() - - @property - def _request(self): - '''Return request session. - - Raise :exc:`ftrack_api.exception.ConnectionClosedError` if session has - been closed and connection unavailable. - - ''' - if self._managed_request is None: - raise ftrack_api.exception.ConnectionClosedError() - - return self._managed_request - - @_request.setter - def _request(self, value): - '''Set request session to *value*.''' - self._managed_request = value - - @property - def closed(self): - '''Return whether session has been closed.''' - return self._closed - - @property - def server_information(self): - '''Return server information such as server version.''' - return self._server_information.copy() - - @property - def server_url(self): - '''Return server ulr used for session.''' - return self._server_url - - @property - def api_user(self): - '''Return username used for session.''' - return self._api_user - - @property - def api_key(self): - '''Return API key used for session.''' - return self._api_key - - @property - def event_hub(self): - '''Return event hub.''' - return self._event_hub - - @property - def _local_cache(self): - '''Return top level memory cache.''' - return self.cache.caches[0] - - def check_server_compatibility(self): - '''Check compatibility with connected server.''' - server_version = self.server_information.get('version') - if server_version is None: - raise ftrack_api.exception.ServerCompatibilityError( - 'Could not determine server version.' - ) - - # Perform basic version check. - if server_version != 'dev': - min_server_version = '3.3.11' - if ( - distutils.version.LooseVersion(min_server_version) - > distutils.version.LooseVersion(server_version) - ): - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0} incompatible with this version of the ' - 'API which requires a server version >= {1}'.format( - server_version, - min_server_version - ) - ) - - def close(self): - '''Close session. - - Close connections to server. Clear any pending operations and local - cache. - - Use this to ensure that session is cleaned up properly after use. - - ''' - if self.closed: - self.logger.debug('Session already closed.') - return - - self._closed = True - - self.logger.debug('Closing session.') - if self.recorded_operations: - self.logger.warning( - 'Closing session with pending operations not persisted.' - ) - - # Clear pending operations. - self.recorded_operations.clear() - - # Clear top level cache (expected to be enforced memory cache). - self._local_cache.clear() - - # Close connections. - self._request.close() - self._request = None - - try: - self.event_hub.disconnect() - if self._auto_connect_event_hub_thread: - self._auto_connect_event_hub_thread.join() - except ftrack_api.exception.EventHubConnectionError: - pass - - self.logger.debug('Session closed.') - - def reset(self): - '''Reset session clearing local state. - - Clear all pending operations and expunge all entities from session. - - Also clear the local cache. If the cache used by the session is a - :class:`~ftrack_api.cache.LayeredCache` then only clear top level cache. - Otherwise, clear the entire cache. - - Plugins are not rediscovered or reinitialised, but certain plugin events - are re-emitted to properly configure session aspects that are dependant - on cache (such as location plugins). - - .. warning:: - - Previously attached entities are not reset in memory and will retain - their state, but should not be used. Doing so will cause errors. - - ''' - if self.recorded_operations: - self.logger.warning( - 'Resetting session with pending operations not persisted.' - ) - - # Clear pending operations. - self.recorded_operations.clear() - - # Clear top level cache (expected to be enforced memory cache). - self._local_cache.clear() - - # Re-configure certain session aspects that may be dependant on cache. - self._configure_locations() - - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.reset', - data=dict( - session=self - ) - ), - synchronous=True - ) - - def auto_populating(self, auto_populate): - '''Temporarily set auto populate to *auto_populate*. - - The current setting will be restored automatically when done. - - Example:: - - with session.auto_populating(False): - print entity['name'] - - ''' - return AutoPopulatingContext(self, auto_populate) - - def operation_recording(self, record_operations): - '''Temporarily set operation recording to *record_operations*. - - The current setting will be restored automatically when done. - - Example:: - - with session.operation_recording(False): - entity['name'] = 'change_not_recorded' - - ''' - return OperationRecordingContext(self, record_operations) - - @property - def created(self): - '''Return list of newly created entities.''' - entities = self._local_cache.values() - states = ftrack_api.inspection.states(entities) - - return [ - entity for (entity, state) in itertools.izip(entities, states) - if state is ftrack_api.symbol.CREATED - ] - - @property - def modified(self): - '''Return list of locally modified entities.''' - entities = self._local_cache.values() - states = ftrack_api.inspection.states(entities) - - return [ - entity for (entity, state) in itertools.izip(entities, states) - if state is ftrack_api.symbol.MODIFIED - ] - - @property - def deleted(self): - '''Return list of deleted entities.''' - entities = self._local_cache.values() - states = ftrack_api.inspection.states(entities) - - return [ - entity for (entity, state) in itertools.izip(entities, states) - if state is ftrack_api.symbol.DELETED - ] - - def reset_remote(self, reset_type, entity=None): - '''Perform a server side reset. - - *reset_type* is a server side supported reset type, - passing the optional *entity* to perform the option upon. - - Please refer to ftrack documentation for a complete list of - supported server side reset types. - ''' - - payload = { - 'action': 'reset_remote', - 'reset_type': reset_type - } - - if entity is not None: - payload.update({ - 'entity_type': entity.entity_type, - 'entity_key': entity.get('id') - }) - - result = self.call( - [payload] - ) - - return result[0]['data'] - - def create(self, entity_type, data=None, reconstructing=False): - '''Create and return an entity of *entity_type* with initial *data*. - - If specified, *data* should be a dictionary of key, value pairs that - should be used to populate attributes on the entity. - - If *reconstructing* is False then create a new entity setting - appropriate defaults for missing data. If True then reconstruct an - existing entity. - - Constructed entity will be automatically :meth:`merged ` - into the session. - - ''' - entity = self._create(entity_type, data, reconstructing=reconstructing) - entity = self.merge(entity) - return entity - - def _create(self, entity_type, data, reconstructing): - '''Create and return an entity of *entity_type* with initial *data*.''' - try: - EntityTypeClass = self.types[entity_type] - except KeyError: - raise ftrack_api.exception.UnrecognisedEntityTypeError(entity_type) - - return EntityTypeClass(self, data=data, reconstructing=reconstructing) - - def ensure(self, entity_type, data, identifying_keys=None): - '''Retrieve entity of *entity_type* with *data*, creating if necessary. - - *data* should be a dictionary of the same form passed to :meth:`create`. - - By default, check for an entity that has matching *data*. If - *identifying_keys* is specified as a list of keys then only consider the - values from *data* for those keys when searching for existing entity. If - *data* is missing an identifying key then raise :exc:`KeyError`. - - If no *identifying_keys* specified then use all of the keys from the - passed *data*. Raise :exc:`ValueError` if no *identifying_keys* can be - determined. - - Each key should be a string. - - .. note:: - - Currently only top level scalars supported. To ensure an entity by - looking at relationships, manually issue the :meth:`query` and - :meth:`create` calls. - - If more than one entity matches the determined filter criteria then - raise :exc:`~ftrack_api.exception.MultipleResultsFoundError`. - - If no matching entity found then create entity using supplied *data*. - - If a matching entity is found, then update it if necessary with *data*. - - .. note:: - - If entity created or updated then a :meth:`commit` will be issued - automatically. If this behaviour is undesired, perform the - :meth:`query` and :meth:`create` calls manually. - - Return retrieved or created entity. - - Example:: - - # First time, a new entity with `username=martin` is created. - entity = session.ensure('User', {'username': 'martin'}) - - # After that, the existing entity is retrieved. - entity = session.ensure('User', {'username': 'martin'}) - - # When existing entity retrieved, entity may also be updated to - # match supplied data. - entity = session.ensure( - 'User', {'username': 'martin', 'email': 'martin@example.com'} - ) - - ''' - if not identifying_keys: - identifying_keys = data.keys() - - self.logger.debug(L( - 'Ensuring entity {0!r} with data {1!r} using identifying keys ' - '{2!r}', entity_type, data, identifying_keys - )) - - if not identifying_keys: - raise ValueError( - 'Could not determine any identifying data to check against ' - 'when ensuring {0!r} with data {1!r}. Identifying keys: {2!r}' - .format(entity_type, data, identifying_keys) - ) - - expression = '{0} where'.format(entity_type) - criteria = [] - for identifying_key in identifying_keys: - value = data[identifying_key] - - if isinstance(value, basestring): - value = '"{0}"'.format(value) - - elif isinstance( - value, (arrow.Arrow, datetime.datetime, datetime.date) - ): - # Server does not store microsecond or timezone currently so - # need to strip from query. - # TODO: When datetime handling improved, update this logic. - value = ( - arrow.get(value).naive.replace(microsecond=0).isoformat() - ) - value = '"{0}"'.format(value) - - criteria.append('{0} is {1}'.format(identifying_key, value)) - - expression = '{0} {1}'.format( - expression, ' and '.join(criteria) - ) - - try: - entity = self.query(expression).one() - - except ftrack_api.exception.NoResultFoundError: - self.logger.debug('Creating entity as did not already exist.') - - # Create entity. - entity = self.create(entity_type, data) - self.commit() - - else: - self.logger.debug('Retrieved matching existing entity.') - - # Update entity if required. - updated = False - for key, target_value in data.items(): - if entity[key] != target_value: - entity[key] = target_value - updated = True - - if updated: - self.logger.debug('Updating existing entity to match new data.') - self.commit() - - return entity - - def delete(self, entity): - '''Mark *entity* for deletion.''' - if self.record_operations: - self.recorded_operations.push( - ftrack_api.operation.DeleteEntityOperation( - entity.entity_type, - ftrack_api.inspection.primary_key(entity) - ) - ) - - def get(self, entity_type, entity_key): - '''Return entity of *entity_type* with unique *entity_key*. - - First check for an existing entry in the configured cache, otherwise - issue a query to the server. - - If no matching entity found, return None. - - ''' - self.logger.debug(L('Get {0} with key {1}', entity_type, entity_key)) - - primary_key_definition = self.types[entity_type].primary_key_attributes - if isinstance(entity_key, basestring): - entity_key = [entity_key] - - if len(entity_key) != len(primary_key_definition): - raise ValueError( - 'Incompatible entity_key {0!r} supplied. Entity type {1} ' - 'expects a primary key composed of {2} values ({3}).' - .format( - entity_key, entity_type, len(primary_key_definition), - ', '.join(primary_key_definition) - ) - ) - - entity = None - try: - entity = self._get(entity_type, entity_key) - - - except KeyError: - - # Query for matching entity. - self.logger.debug( - 'Entity not present in cache. Issuing new query.' - ) - condition = [] - for key, value in zip(primary_key_definition, entity_key): - condition.append('{0} is "{1}"'.format(key, value)) - - expression = '{0} where ({1})'.format( - entity_type, ' and '.join(condition) - ) - - results = self.query(expression).all() - if results: - entity = results[0] - - return entity - - def _get(self, entity_type, entity_key): - '''Return cached entity of *entity_type* with unique *entity_key*. - - Raise :exc:`KeyError` if no such entity in the cache. - - ''' - # Check cache for existing entity emulating - # ftrack_api.inspection.identity result object to pass to key maker. - cache_key = self.cache_key_maker.key( - (str(entity_type), map(str, entity_key)) - ) - self.logger.debug(L( - 'Checking cache for entity with key {0}', cache_key - )) - entity = self.cache.get(cache_key) - self.logger.debug(L( - 'Retrieved existing entity from cache: {0} at {1}', - entity, id(entity) - )) - - return entity - - def query(self, expression, page_size=500): - '''Query against remote data according to *expression*. - - *expression* is not executed directly. Instead return an - :class:`ftrack_api.query.QueryResult` instance that will execute remote - call on access. - - *page_size* specifies the maximum page size that the returned query - result object should be configured with. - - .. seealso:: :ref:`querying` - - ''' - self.logger.debug(L('Query {0!r}', expression)) - - # Add in sensible projections if none specified. Note that this is - # done here rather than on the server to allow local modification of the - # schema setting to include commonly used custom attributes for example. - # TODO: Use a proper parser perhaps? - if not expression.startswith('select'): - entity_type = expression.split(' ', 1)[0] - EntityTypeClass = self.types[entity_type] - projections = EntityTypeClass.default_projections - - expression = 'select {0} from {1}'.format( - ', '.join(projections), - expression - ) - - query_result = ftrack_api.query.QueryResult( - self, expression, page_size=page_size - ) - return query_result - - def _query(self, expression): - '''Execute *query* and return (records, metadata). - - Records will be a list of entities retrieved via the query and metadata - a dictionary of accompanying information about the result set. - - ''' - # TODO: Actually support batching several queries together. - # TODO: Should batches have unique ids to match them up later. - batch = [{ - 'action': 'query', - 'expression': expression - }] - - # TODO: When should this execute? How to handle background=True? - results = self.call(batch) - - # Merge entities into local cache and return merged entities. - data = [] - merged = dict() - for entity in results[0]['data']: - data.append(self._merge_recursive(entity, merged)) - - return data, results[0]['metadata'] - - def merge(self, value, merged=None): - '''Merge *value* into session and return merged value. - - *merged* should be a mapping to record merges during run and should be - used to avoid infinite recursion. If not set will default to a - dictionary. - - ''' - if merged is None: - merged = {} - - with self.operation_recording(False): - return self._merge(value, merged) - - def _merge(self, value, merged): - '''Return merged *value*.''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if isinstance(value, ftrack_api.entity.base.Entity): - log_debug and self.logger.debug( - 'Merging entity into session: {0} at {1}' - .format(value, id(value)) - ) - - return self._merge_entity(value, merged=merged) - - elif isinstance(value, ftrack_api.collection.Collection): - log_debug and self.logger.debug( - 'Merging collection into session: {0!r} at {1}' - .format(value, id(value)) - ) - - merged_collection = [] - for entry in value: - merged_collection.append( - self._merge(entry, merged=merged) - ) - - return merged_collection - - elif isinstance(value, ftrack_api.collection.MappedCollectionProxy): - log_debug and self.logger.debug( - 'Merging mapped collection into session: {0!r} at {1}' - .format(value, id(value)) - ) - - merged_collection = [] - for entry in value.collection: - merged_collection.append( - self._merge(entry, merged=merged) - ) - - return merged_collection - - else: - return value - - def _merge_recursive(self, entity, merged=None): - '''Merge *entity* and all its attributes recursivly.''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if merged is None: - merged = {} - - attached = self.merge(entity, merged) - - for attribute in entity.attributes: - # Remote attributes. - remote_value = attribute.get_remote_value(entity) - - if isinstance( - remote_value, - ( - ftrack_api.entity.base.Entity, - ftrack_api.collection.Collection, - ftrack_api.collection.MappedCollectionProxy - ) - ): - log_debug and self.logger.debug( - 'Merging remote value for attribute {0}.'.format(attribute) - ) - - if isinstance(remote_value, ftrack_api.entity.base.Entity): - self._merge_recursive(remote_value, merged=merged) - - elif isinstance( - remote_value, ftrack_api.collection.Collection - ): - for entry in remote_value: - self._merge_recursive(entry, merged=merged) - - elif isinstance( - remote_value, ftrack_api.collection.MappedCollectionProxy - ): - for entry in remote_value.collection: - self._merge_recursive(entry, merged=merged) - - return attached - - def _merge_entity(self, entity, merged=None): - '''Merge *entity* into session returning merged entity. - - Merge is recursive so any references to other entities will also be - merged. - - *entity* will never be modified in place. Ensure that the returned - merged entity instance is used. - - ''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if merged is None: - merged = {} - - with self.auto_populating(False): - entity_key = self.cache_key_maker.key( - ftrack_api.inspection.identity(entity) - ) - - # Check whether this entity has already been processed. - attached_entity = merged.get(entity_key) - if attached_entity is not None: - log_debug and self.logger.debug( - 'Entity already processed for key {0} as {1} at {2}' - .format(entity_key, attached_entity, id(attached_entity)) - ) - - return attached_entity - else: - log_debug and self.logger.debug( - 'Entity not already processed for key {0}.' - .format(entity_key) - ) - - # Check for existing instance of entity in cache. - log_debug and self.logger.debug( - 'Checking for entity in cache with key {0}'.format(entity_key) - ) - try: - attached_entity = self.cache.get(entity_key) - - log_debug and self.logger.debug( - 'Retrieved existing entity from cache: {0} at {1}' - .format(attached_entity, id(attached_entity)) - ) - - except KeyError: - # Construct new minimal instance to store in cache. - attached_entity = self._create( - entity.entity_type, {}, reconstructing=True - ) - - log_debug and self.logger.debug( - 'Entity not present in cache. Constructed new instance: ' - '{0} at {1}'.format(attached_entity, id(attached_entity)) - ) - - # Mark entity as seen to avoid infinite loops. - merged[entity_key] = attached_entity - - changes = attached_entity.merge(entity, merged=merged) - if changes: - self.cache.set(entity_key, attached_entity) - self.logger.debug('Cache updated with merged entity.') - - else: - self.logger.debug( - 'Cache not updated with merged entity as no differences ' - 'detected.' - ) - - return attached_entity - - def populate(self, entities, projections): - '''Populate *entities* with attributes specified by *projections*. - - Any locally set values included in the *projections* will not be - overwritten with the retrieved remote value. If this 'synchronise' - behaviour is required, first clear the relevant values on the entity by - setting them to :attr:`ftrack_api.symbol.NOT_SET`. Deleting the key will - have the same effect:: - - >>> print(user['username']) - martin - >>> del user['username'] - >>> print(user['username']) - Symbol(NOT_SET) - - .. note:: - - Entities that have been created and not yet persisted will be - skipped as they have no remote values to fetch. - - ''' - self.logger.debug(L( - 'Populate {0!r} projections for {1}.', projections, entities - )) - - if not isinstance( - entities, (list, tuple, ftrack_api.query.QueryResult) - ): - entities = [entities] - - # TODO: How to handle a mixed collection of different entity types - # Should probably fail, but need to consider handling hierarchies such - # as User and Group both deriving from Resource. Actually, could just - # proceed and ignore projections that are not present in entity type. - - entities_to_process = [] - - for entity in entities: - if ftrack_api.inspection.state(entity) is ftrack_api.symbol.CREATED: - # Created entities that are not yet persisted have no remote - # values. Don't raise an error here as it is reasonable to - # iterate over an entities properties and see that some of them - # are NOT_SET. - self.logger.debug(L( - 'Skipping newly created entity {0!r} for population as no ' - 'data will exist in the remote for this entity yet.', entity - )) - continue - - entities_to_process.append(entity) - - if entities_to_process: - reference_entity = entities_to_process[0] - entity_type = reference_entity.entity_type - query = 'select {0} from {1}'.format(projections, entity_type) - - primary_key_definition = reference_entity.primary_key_attributes - entity_keys = [ - ftrack_api.inspection.primary_key(entity).values() - for entity in entities_to_process - ] - - if len(primary_key_definition) > 1: - # Composite keys require full OR syntax unfortunately. - conditions = [] - for entity_key in entity_keys: - condition = [] - for key, value in zip(primary_key_definition, entity_key): - condition.append('{0} is "{1}"'.format(key, value)) - - conditions.append('({0})'.format('and '.join(condition))) - - query = '{0} where {1}'.format(query, ' or '.join(conditions)) - - else: - primary_key = primary_key_definition[0] - - if len(entity_keys) > 1: - query = '{0} where {1} in ({2})'.format( - query, primary_key, - ','.join([ - str(entity_key[0]) for entity_key in entity_keys - ]) - ) - else: - query = '{0} where {1} is {2}'.format( - query, primary_key, str(entity_keys[0][0]) - ) - - result = self.query(query) - - # Fetch all results now. Doing so will cause them to populate the - # relevant entities in the cache. - result.all() - - # TODO: Should we check that all requested attributes were - # actually populated? If some weren't would we mark that to avoid - # repeated calls or perhaps raise an error? - - # TODO: Make atomic. - def commit(self): - '''Commit all local changes to the server.''' - batch = [] - - with self.auto_populating(False): - for operation in self.recorded_operations: - - # Convert operation to payload. - if isinstance( - operation, ftrack_api.operation.CreateEntityOperation - ): - # At present, data payload requires duplicating entity - # type in data and also ensuring primary key added. - entity_data = { - '__entity_type__': operation.entity_type, - } - entity_data.update(operation.entity_key) - entity_data.update(operation.entity_data) - - payload = OperationPayload({ - 'action': 'create', - 'entity_type': operation.entity_type, - 'entity_key': operation.entity_key.values(), - 'entity_data': entity_data - }) - - elif isinstance( - operation, ftrack_api.operation.UpdateEntityOperation - ): - entity_data = { - # At present, data payload requires duplicating entity - # type. - '__entity_type__': operation.entity_type, - operation.attribute_name: operation.new_value - } - - payload = OperationPayload({ - 'action': 'update', - 'entity_type': operation.entity_type, - 'entity_key': operation.entity_key.values(), - 'entity_data': entity_data - }) - - elif isinstance( - operation, ftrack_api.operation.DeleteEntityOperation - ): - payload = OperationPayload({ - 'action': 'delete', - 'entity_type': operation.entity_type, - 'entity_key': operation.entity_key.values() - }) - - else: - raise ValueError( - 'Cannot commit. Unrecognised operation type {0} ' - 'detected.'.format(type(operation)) - ) - - batch.append(payload) - - # Optimise batch. - # TODO: Might be better to perform these on the operations list instead - # so all operation contextual information available. - - # If entity was created and deleted in one batch then remove all - # payloads for that entity. - created = set() - deleted = set() - - for payload in batch: - if payload['action'] == 'create': - created.add( - (payload['entity_type'], str(payload['entity_key'])) - ) - - elif payload['action'] == 'delete': - deleted.add( - (payload['entity_type'], str(payload['entity_key'])) - ) - - created_then_deleted = deleted.intersection(created) - if created_then_deleted: - optimised_batch = [] - for payload in batch: - entity_type = payload.get('entity_type') - entity_key = str(payload.get('entity_key')) - - if (entity_type, entity_key) in created_then_deleted: - continue - - optimised_batch.append(payload) - - batch = optimised_batch - - # Remove early update operations so that only last operation on - # attribute is applied server side. - updates_map = set() - for payload in reversed(batch): - if payload['action'] in ('update', ): - for key, value in payload['entity_data'].items(): - if key == '__entity_type__': - continue - - identity = ( - payload['entity_type'], str(payload['entity_key']), key - ) - if identity in updates_map: - del payload['entity_data'][key] - else: - updates_map.add(identity) - - # Remove NOT_SET values from entity_data. - for payload in batch: - entity_data = payload.get('entity_data', {}) - for key, value in entity_data.items(): - if value is ftrack_api.symbol.NOT_SET: - del entity_data[key] - - # Remove payloads with redundant entity_data. - optimised_batch = [] - for payload in batch: - entity_data = payload.get('entity_data') - if entity_data is not None: - keys = entity_data.keys() - if not keys or keys == ['__entity_type__']: - continue - - optimised_batch.append(payload) - - batch = optimised_batch - - # Collapse updates that are consecutive into one payload. Also, collapse - # updates that occur immediately after creation into the create payload. - optimised_batch = [] - previous_payload = None - - for payload in batch: - if ( - previous_payload is not None - and payload['action'] == 'update' - and previous_payload['action'] in ('create', 'update') - and previous_payload['entity_type'] == payload['entity_type'] - and previous_payload['entity_key'] == payload['entity_key'] - ): - previous_payload['entity_data'].update(payload['entity_data']) - continue - - else: - optimised_batch.append(payload) - previous_payload = payload - - batch = optimised_batch - - # Process batch. - if batch: - result = self.call(batch) - - # Clear recorded operations. - self.recorded_operations.clear() - - # As optimisation, clear local values which are not primary keys to - # avoid redundant merges when merging references. Note: primary keys - # remain as needed for cache retrieval on new entities. - with self.auto_populating(False): - with self.operation_recording(False): - for entity in self._local_cache.values(): - for attribute in entity: - if attribute not in entity.primary_key_attributes: - del entity[attribute] - - # Process results merging into cache relevant data. - for entry in result: - - if entry['action'] in ('create', 'update'): - # Merge returned entities into local cache. - self.merge(entry['data']) - - elif entry['action'] == 'delete': - # TODO: Detach entity - need identity returned? - # TODO: Expunge entity from cache. - pass - # Clear remaining local state, including local values for primary - # keys on entities that were merged. - with self.auto_populating(False): - with self.operation_recording(False): - for entity in self._local_cache.values(): - entity.clear() - - def rollback(self): - '''Clear all recorded operations and local state. - - Typically this would be used following a failed :meth:`commit` in order - to revert the session to a known good state. - - Newly created entities not yet persisted will be detached from the - session / purged from cache and no longer contribute, but the actual - objects are not deleted from memory. They should no longer be used and - doing so could cause errors. - - ''' - with self.auto_populating(False): - with self.operation_recording(False): - - # Detach all newly created entities and remove from cache. This - # is done because simply clearing the local values of newly - # created entities would result in entities with no identity as - # primary key was local while not persisted. In addition, it - # makes no sense for failed created entities to exist in session - # or cache. - for operation in self.recorded_operations: - if isinstance( - operation, ftrack_api.operation.CreateEntityOperation - ): - entity_key = str(( - str(operation.entity_type), - operation.entity_key.values() - )) - try: - self.cache.remove(entity_key) - except KeyError: - pass - - # Clear locally stored modifications on remaining entities. - for entity in self._local_cache.values(): - entity.clear() - - self.recorded_operations.clear() - - def _fetch_server_information(self): - '''Return server information.''' - result = self.call([{'action': 'query_server_information'}]) - return result[0] - - def _discover_plugins(self, plugin_arguments=None): - '''Find and load plugins in search paths. - - Each discovered module should implement a register function that - accepts this session as first argument. Typically the function should - register appropriate event listeners against the session's event hub. - - def register(session): - session.event_hub.subscribe( - 'topic=ftrack.api.session.construct-entity-type', - construct_entity_type - ) - - *plugin_arguments* should be an optional mapping of keyword arguments - and values to pass to plugin register functions upon discovery. - - ''' - plugin_arguments = plugin_arguments or {} - ftrack_api.plugin.discover( - self._plugin_paths, [self], plugin_arguments - ) - - def _read_schemas_from_cache(self, schema_cache_path): - '''Return schemas and schema hash from *schema_cache_path*. - - *schema_cache_path* should be the path to the file containing the - schemas in JSON format. - - ''' - self.logger.debug(L( - 'Reading schemas from cache {0!r}', schema_cache_path - )) - - if not os.path.exists(schema_cache_path): - self.logger.info(L( - 'Cache file not found at {0!r}.', schema_cache_path - )) - - return [], None - - with open(schema_cache_path, 'r') as schema_file: - schemas = json.load(schema_file) - hash_ = hashlib.md5( - json.dumps(schemas, sort_keys=True) - ).hexdigest() - - return schemas, hash_ - - def _write_schemas_to_cache(self, schemas, schema_cache_path): - '''Write *schemas* to *schema_cache_path*. - - *schema_cache_path* should be a path to a file that the schemas can be - written to in JSON format. - - ''' - self.logger.debug(L( - 'Updating schema cache {0!r} with new schemas.', schema_cache_path - )) - - with open(schema_cache_path, 'w') as local_cache_file: - json.dump(schemas, local_cache_file, indent=4) - - def _load_schemas(self, schema_cache_path): - '''Load schemas. - - First try to load schemas from cache at *schema_cache_path*. If the - cache is not available or the cache appears outdated then load schemas - from server and store fresh copy in cache. - - If *schema_cache_path* is set to `False`, always load schemas from - server bypassing cache. - - ''' - local_schema_hash = None - schemas = [] - - if schema_cache_path: - try: - schemas, local_schema_hash = self._read_schemas_from_cache( - schema_cache_path - ) - except (IOError, TypeError, AttributeError, ValueError): - # Catch any known exceptions when trying to read the local - # schema cache to prevent API from being unusable. - self.logger.exception(L( - 'Schema cache could not be loaded from {0!r}', - schema_cache_path - )) - - # Use `dictionary.get` to retrieve hash to support older version of - # ftrack server not returning a schema hash. - server_hash = self._server_information.get( - 'schema_hash', False - ) - if local_schema_hash != server_hash: - self.logger.debug(L( - 'Loading schemas from server due to hash not matching.' - 'Local: {0!r} != Server: {1!r}', local_schema_hash, server_hash - )) - schemas = self.call([{'action': 'query_schemas'}])[0] - - if schema_cache_path: - try: - self._write_schemas_to_cache(schemas, schema_cache_path) - except (IOError, TypeError): - self.logger.exception(L( - 'Failed to update schema cache {0!r}.', - schema_cache_path - )) - - else: - self.logger.debug(L( - 'Using cached schemas from {0!r}', schema_cache_path - )) - - return schemas - - def _build_entity_type_classes(self, schemas): - '''Build default entity type classes.''' - fallback_factory = ftrack_api.entity.factory.StandardFactory() - classes = {} - - for schema in schemas: - results = self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.construct-entity-type', - data=dict( - schema=schema, - schemas=schemas - ) - ), - synchronous=True - ) - - results = [result for result in results if result is not None] - - if not results: - self.logger.debug(L( - 'Using default StandardFactory to construct entity type ' - 'class for "{0}"', schema['id'] - )) - entity_type_class = fallback_factory.create(schema) - - elif len(results) > 1: - raise ValueError( - 'Expected single entity type to represent schema "{0}" but ' - 'received {1} entity types instead.' - .format(schema['id'], len(results)) - ) - - else: - entity_type_class = results[0] - - classes[entity_type_class.entity_type] = entity_type_class - - return classes - - def _configure_locations(self): - '''Configure locations.''' - # First configure builtin locations, by injecting them into local cache. - - # Origin. - location = self.create( - 'Location', - data=dict( - name='ftrack.origin', - id=ftrack_api.symbol.ORIGIN_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api.mixin( - location, ftrack_api.entity.location.OriginLocationMixin, - name='OriginLocation' - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.origin.OriginStructure() - location.priority = 100 - - # Unmanaged. - location = self.create( - 'Location', - data=dict( - name='ftrack.unmanaged', - id=ftrack_api.symbol.UNMANAGED_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api.mixin( - location, ftrack_api.entity.location.UnmanagedLocationMixin, - name='UnmanagedLocation' - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.origin.OriginStructure() - # location.resource_identifier_transformer = ( - # ftrack_api.resource_identifier_transformer.internal.InternalResourceIdentifierTransformer(session) - # ) - location.priority = 90 - - # Review. - location = self.create( - 'Location', - data=dict( - name='ftrack.review', - id=ftrack_api.symbol.REVIEW_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api.mixin( - location, ftrack_api.entity.location.UnmanagedLocationMixin, - name='UnmanagedLocation' - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.origin.OriginStructure() - location.priority = 110 - - # Server. - location = self.create( - 'Location', - data=dict( - name='ftrack.server', - id=ftrack_api.symbol.SERVER_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api.mixin( - location, ftrack_api.entity.location.ServerLocationMixin, - name='ServerLocation' - ) - location.accessor = ftrack_api.accessor.server._ServerAccessor( - session=self - ) - location.structure = ftrack_api.structure.entity_id.EntityIdStructure() - location.priority = 150 - - # Master location based on server scenario. - storage_scenario = self.server_information.get('storage_scenario') - - if ( - storage_scenario and - storage_scenario.get('scenario') - ): - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.storage-scenario.activate', - data=dict( - storage_scenario=storage_scenario - ) - ), - synchronous=True - ) - - # Next, allow further configuration of locations via events. - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.configure-location', - data=dict( - session=self - ) - ), - synchronous=True - ) - - @ftrack_api.logging.deprecation_warning( - 'Session._call is now available as public method Session.call. The ' - 'private method will be removed in version 2.0.' - ) - def _call(self, data): - '''Make request to server with *data* batch describing the actions. - - .. note:: - - This private method is now available as public method - :meth:`entity_reference`. This alias remains for backwards - compatibility, but will be removed in version 2.0. - - ''' - return self.call(data) - - def call(self, data): - '''Make request to server with *data* batch describing the actions.''' - url = self._server_url + '/api' - headers = { - 'content-type': 'application/json', - 'accept': 'application/json' - } - data = self.encode(data, entity_attribute_strategy='modified_only') - - self.logger.debug(L('Calling server {0} with {1!r}', url, data)) - - response = self._request.post( - url, - headers=headers, - data=data - ) - - self.logger.debug(L('Call took: {0}', response.elapsed.total_seconds())) - - self.logger.debug(L('Response: {0!r}', response.text)) - try: - result = self.decode(response.text) - - except Exception: - error_message = ( - 'Server reported error in unexpected format. Raw error was: {0}' - .format(response.text) - ) - self.logger.exception(error_message) - raise ftrack_api.exception.ServerError(error_message) - - else: - if 'exception' in result: - # Handle exceptions. - error_message = 'Server reported error: {0}({1})'.format( - result['exception'], result['content'] - ) - self.logger.exception(error_message) - raise ftrack_api.exception.ServerError(error_message) - - return result - - def encode(self, data, entity_attribute_strategy='set_only'): - '''Return *data* encoded as JSON formatted string. - - *entity_attribute_strategy* specifies how entity attributes should be - handled. The following strategies are available: - - * *all* - Encode all attributes, loading any that are currently NOT_SET. - * *set_only* - Encode only attributes that are currently set without - loading any from the remote. - * *modified_only* - Encode only attributes that have been modified - locally. - * *persisted_only* - Encode only remote (persisted) attribute values. - - ''' - entity_attribute_strategies = ( - 'all', 'set_only', 'modified_only', 'persisted_only' - ) - if entity_attribute_strategy not in entity_attribute_strategies: - raise ValueError( - 'Unsupported entity_attribute_strategy "{0}". Must be one of ' - '{1}'.format( - entity_attribute_strategy, - ', '.join(entity_attribute_strategies) - ) - ) - - return json.dumps( - data, - sort_keys=True, - default=functools.partial( - self._encode, - entity_attribute_strategy=entity_attribute_strategy - ) - ) - - def _encode(self, item, entity_attribute_strategy='set_only'): - '''Return JSON encodable version of *item*. - - *entity_attribute_strategy* specifies how entity attributes should be - handled. See :meth:`Session.encode` for available strategies. - - ''' - if isinstance(item, (arrow.Arrow, datetime.datetime, datetime.date)): - return { - '__type__': 'datetime', - 'value': item.isoformat() - } - - if isinstance(item, OperationPayload): - data = dict(item.items()) - if "entity_data" in data: - for key, value in data["entity_data"].items(): - if isinstance(value, ftrack_api.entity.base.Entity): - data["entity_data"][key] = self.entity_reference(value) - - return data - - if isinstance(item, ftrack_api.entity.base.Entity): - data = self.entity_reference(item) - - with self.auto_populating(True): - - for attribute in item.attributes: - value = ftrack_api.symbol.NOT_SET - - if entity_attribute_strategy == 'all': - value = attribute.get_value(item) - - elif entity_attribute_strategy == 'set_only': - if attribute.is_set(item): - value = attribute.get_local_value(item) - if value is ftrack_api.symbol.NOT_SET: - value = attribute.get_remote_value(item) - - elif entity_attribute_strategy == 'modified_only': - if attribute.is_modified(item): - value = attribute.get_local_value(item) - - elif entity_attribute_strategy == 'persisted_only': - if not attribute.computed: - value = attribute.get_remote_value(item) - - if value is not ftrack_api.symbol.NOT_SET: - if isinstance( - attribute, ftrack_api.attribute.ReferenceAttribute - ): - if isinstance(value, ftrack_api.entity.base.Entity): - value = self.entity_reference(value) - - data[attribute.name] = value - - return data - - if isinstance( - item, ftrack_api.collection.MappedCollectionProxy - ): - # Use proxied collection for serialisation. - item = item.collection - - if isinstance(item, ftrack_api.collection.Collection): - data = [] - for entity in item: - data.append(self.entity_reference(entity)) - - return data - - raise TypeError('{0!r} is not JSON serializable'.format(item)) - - def entity_reference(self, entity): - '''Return entity reference that uniquely identifies *entity*. - - Return a mapping containing the __entity_type__ of the entity along with - the key, value pairs that make up it's primary key. - - ''' - reference = { - '__entity_type__': entity.entity_type - } - with self.auto_populating(False): - reference.update(ftrack_api.inspection.primary_key(entity)) - - return reference - - @ftrack_api.logging.deprecation_warning( - 'Session._entity_reference is now available as public method ' - 'Session.entity_reference. The private method will be removed ' - 'in version 2.0.' - ) - def _entity_reference(self, entity): - '''Return entity reference that uniquely identifies *entity*. - - Return a mapping containing the __entity_type__ of the entity along - with the key, value pairs that make up it's primary key. - - .. note:: - - This private method is now available as public method - :meth:`entity_reference`. This alias remains for backwards - compatibility, but will be removed in version 2.0. - - ''' - return self.entity_reference(entity) - - def decode(self, string): - '''Return decoded JSON *string* as Python object.''' - with self.operation_recording(False): - return json.loads(string, object_hook=self._decode) - - def _decode(self, item): - '''Return *item* transformed into appropriate representation.''' - if isinstance(item, collections.Mapping): - if '__type__' in item: - if item['__type__'] == 'datetime': - item = arrow.get(item['value']) - - elif '__entity_type__' in item: - item = self._create( - item['__entity_type__'], item, reconstructing=True - ) - - return item - - def _get_locations(self, filter_inaccessible=True): - '''Helper to returns locations ordered by priority. - - If *filter_inaccessible* is True then only accessible locations will be - included in result. - - ''' - # Optimise this call. - locations = self.query('Location') - - # Filter. - if filter_inaccessible: - locations = filter( - lambda location: location.accessor, - locations - ) - - # Sort by priority. - locations = sorted( - locations, key=lambda location: location.priority - ) - - return locations - - def pick_location(self, component=None): - '''Return suitable location to use. - - If no *component* specified then return highest priority accessible - location. Otherwise, return highest priority accessible location that - *component* is available in. - - Return None if no suitable location could be picked. - - ''' - if component: - return self.pick_locations([component])[0] - - else: - locations = self._get_locations() - if locations: - return locations[0] - else: - return None - - def pick_locations(self, components): - '''Return suitable locations for *components*. - - Return list of locations corresponding to *components* where each - picked location is the highest priority accessible location for that - component. If a component has no location available then its - corresponding entry will be None. - - ''' - candidate_locations = self._get_locations() - availabilities = self.get_component_availabilities( - components, locations=candidate_locations - ) - - locations = [] - for component, availability in zip(components, availabilities): - location = None - - for candidate_location in candidate_locations: - if availability.get(candidate_location['id']) > 0.0: - location = candidate_location - break - - locations.append(location) - - return locations - - def create_component( - self, path, data=None, location='auto' - ): - '''Create a new component from *path* with additional *data* - - .. note:: - - This is a helper method. To create components manually use the - standard :meth:`Session.create` method. - - *path* can be a string representing a filesystem path to the data to - use for the component. The *path* can also be specified as a sequence - string, in which case a sequence component with child components for - each item in the sequence will be created automatically. The accepted - format for a sequence is '{head}{padding}{tail} [{ranges}]'. For - example:: - - '/path/to/file.%04d.ext [1-5, 7, 8, 10-20]' - - .. seealso:: - - `Clique documentation `_ - - *data* should be a dictionary of any additional data to construct the - component with (as passed to :meth:`Session.create`). - - If *location* is specified then automatically add component to that - location. The default of 'auto' will automatically pick a suitable - location to add the component to if one is available. To not add to any - location specifiy locations as None. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the components registration in the - location. - ''' - if data is None: - data = {} - - if location == 'auto': - # Check if the component name matches one of the ftrackreview - # specific names. Add the component to the ftrack.review location if - # so. This is used to not break backwards compatibility. - if data.get('name') in ( - 'ftrackreview-mp4', 'ftrackreview-webm', 'ftrackreview-image' - ): - location = self.get( - 'Location', ftrack_api.symbol.REVIEW_LOCATION_ID - ) - - else: - location = self.pick_location() - - try: - collection = clique.parse(path) - - except ValueError: - # Assume is a single file. - if 'size' not in data: - data['size'] = self._get_filesystem_size(path) - - data.setdefault('file_type', os.path.splitext(path)[-1]) - - return self._create_component( - 'FileComponent', path, data, location - ) - - else: - # Calculate size of container and members. - member_sizes = {} - container_size = data.get('size') - - if container_size is not None: - if len(collection.indexes) > 0: - member_size = int( - round(container_size / len(collection.indexes)) - ) - for item in collection: - member_sizes[item] = member_size - - else: - container_size = 0 - for item in collection: - member_sizes[item] = self._get_filesystem_size(item) - container_size += member_sizes[item] - - # Create sequence component - container_path = collection.format('{head}{padding}{tail}') - data.setdefault('padding', collection.padding) - data.setdefault('file_type', os.path.splitext(container_path)[-1]) - data.setdefault('size', container_size) - - container = self._create_component( - 'SequenceComponent', container_path, data, location=None - ) - - # Create member components for sequence. - for member_path in collection: - member_data = { - 'name': collection.match(member_path).group('index'), - 'container': container, - 'size': member_sizes[member_path], - 'file_type': os.path.splitext(member_path)[-1] - } - - component = self._create_component( - 'FileComponent', member_path, member_data, location=None - ) - container['members'].append(component) - - if location: - origin_location = self.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - location.add_component( - container, origin_location, recursive=True - ) - - return container - - def _create_component(self, entity_type, path, data, location): - '''Create and return component. - - See public function :py:func:`createComponent` for argument details. - - ''' - component = self.create(entity_type, data) - - # Add to special origin location so that it is possible to add to other - # locations. - origin_location = self.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - origin_location.add_component(component, path, recursive=False) - - if location: - location.add_component(component, origin_location, recursive=False) - - return component - - def _get_filesystem_size(self, path): - '''Return size from *path*''' - try: - size = os.path.getsize(path) - except OSError: - size = 0 - - return size - - def get_component_availability(self, component, locations=None): - '''Return availability of *component*. - - If *locations* is set then limit result to availability of *component* - in those *locations*. - - Return a dictionary of {location_id:percentage_availability} - - ''' - return self.get_component_availabilities( - [component], locations=locations - )[0] - - def get_component_availabilities(self, components, locations=None): - '''Return availabilities of *components*. - - If *locations* is set then limit result to availabilities of - *components* in those *locations*. - - Return a list of dictionaries of {location_id:percentage_availability}. - The list indexes correspond to those of *components*. - - ''' - availabilities = [] - - if locations is None: - locations = self.query('Location') - - # Separate components into two lists, those that are containers and - # those that are not, so that queries can be optimised. - standard_components = [] - container_components = [] - - for component in components: - if 'members' in component.keys(): - container_components.append(component) - else: - standard_components.append(component) - - # Perform queries. - if standard_components: - self.populate( - standard_components, 'component_locations.location_id' - ) - - if container_components: - self.populate( - container_components, - 'members, component_locations.location_id' - ) - - base_availability = {} - for location in locations: - base_availability[location['id']] = 0.0 - - for component in components: - availability = base_availability.copy() - availabilities.append(availability) - - is_container = 'members' in component.keys() - if is_container and len(component['members']): - member_availabilities = self.get_component_availabilities( - component['members'], locations=locations - ) - multiplier = 1.0 / len(component['members']) - for member, member_availability in zip( - component['members'], member_availabilities - ): - for location_id, ratio in member_availability.items(): - availability[location_id] += ( - ratio * multiplier - ) - else: - for component_location in component['component_locations']: - location_id = component_location['location_id'] - if location_id in availability: - availability[location_id] = 100.0 - - for location_id, percentage in availability.items(): - # Avoid quantization error by rounding percentage and clamping - # to range 0-100. - adjusted_percentage = round(percentage, 9) - adjusted_percentage = max(0.0, min(adjusted_percentage, 100.0)) - availability[location_id] = adjusted_percentage - - return availabilities - - @ftrack_api.logging.deprecation_warning( - 'Session.delayed_job has been deprecated in favour of session.call. ' - 'Please refer to the release notes for more information.' - ) - def delayed_job(self, job_type): - '''Execute a delayed job on the server, a `ftrack.entity.job.Job` is returned. - - *job_type* should be one of the allowed job types. There is currently - only one remote job type "SYNC_USERS_LDAP". - ''' - if job_type not in (ftrack_api.symbol.JOB_SYNC_USERS_LDAP, ): - raise ValueError( - u'Invalid Job type: {0}.'.format(job_type) - ) - - operation = { - 'action': 'delayed_job', - 'job_type': job_type.name - } - - try: - result = self.call( - [operation] - )[0] - - except ftrack_api.exception.ServerError as error: - raise - - return result['data'] - - def get_widget_url(self, name, entity=None, theme=None): - '''Return an authenticated URL for widget with *name* and given options. - - The returned URL will be authenticated using a token which will expire - after 6 minutes. - - *name* should be the name of the widget to return and should be one of - 'info', 'tasks' or 'tasks_browser'. - - Certain widgets require an entity to be specified. If so, specify it by - setting *entity* to a valid entity instance. - - *theme* sets the theme of the widget and can be either 'light' or 'dark' - (defaulting to 'dark' if an invalid option given). - - ''' - operation = { - 'action': 'get_widget_url', - 'name': name, - 'theme': theme - } - if entity: - operation['entity_type'] = entity.entity_type - operation['entity_key'] = ( - ftrack_api.inspection.primary_key(entity).values() - ) - - try: - result = self.call([operation]) - - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'get_widget_url\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support "get_widget_url", ' - 'please update server and try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - else: - return result[0]['widget_url'] - - def encode_media(self, media, version_id=None, keep_original='auto'): - '''Return a new Job that encode *media* to make it playable in browsers. - - *media* can be a path to a file or a FileComponent in the ftrack.server - location. - - The job will encode *media* based on the file type and job data contains - information about encoding in the following format:: - - { - 'output': [{ - 'format': 'video/mp4', - 'component_id': 'e2dc0524-b576-11d3-9612-080027331d74' - }, { - 'format': 'image/jpeg', - 'component_id': '07b82a97-8cf9-11e3-9383-20c9d081909b' - }], - 'source_component_id': 'e3791a09-7e11-4792-a398-3d9d4eefc294', - 'keep_original': True - } - - The output components are associated with the job via the job_components - relation. - - An image component will always be generated if possible that can be used - as a thumbnail. - - If *media* is a file path, a new source component will be created and - added to the ftrack server location and a call to :meth:`commit` will be - issued. If *media* is a FileComponent, it will be assumed to be in - available in the ftrack.server location. - - If *version_id* is specified, the new components will automatically be - associated with the AssetVersion. Otherwise, the components will not - be associated to a version even if the supplied *media* belongs to one. - A server version of 3.3.32 or higher is required for the version_id - argument to function properly. - - If *keep_original* is not set, the original media will be kept if it - is a FileComponent, and deleted if it is a file path. You can specify - True or False to change this behavior. - ''' - if isinstance(media, basestring): - # Media is a path to a file. - server_location = self.get( - 'Location', ftrack_api.symbol.SERVER_LOCATION_ID - ) - if keep_original == 'auto': - keep_original = False - - component_data = None - if keep_original: - component_data = dict(version_id=version_id) - - component = self.create_component( - path=media, - data=component_data, - location=server_location - ) - - # Auto commit to ensure component exists when sent to server. - self.commit() - - elif ( - hasattr(media, 'entity_type') and - media.entity_type in ('FileComponent',) - ): - # Existing file component. - component = media - if keep_original == 'auto': - keep_original = True - - else: - raise ValueError( - 'Unable to encode media of type: {0}'.format(type(media)) - ) - - operation = { - 'action': 'encode_media', - 'component_id': component['id'], - 'version_id': version_id, - 'keep_original': keep_original - } - - try: - result = self.call([operation]) - - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'encode_media\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support "encode_media", ' - 'please update server and try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - return self.get('Job', result[0]['job_id']) - - def get_upload_metadata( - self, component_id, file_name, file_size, checksum=None - ): - '''Return URL and headers used to upload data for *component_id*. - - *file_name* and *file_size* should match the components details. - - The returned URL should be requested using HTTP PUT with the specified - headers. - - The *checksum* is used as the Content-MD5 header and should contain - the base64-encoded 128-bit MD5 digest of the message (without the - headers) according to RFC 1864. This can be used as a message integrity - check to verify that the data is the same data that was originally sent. - ''' - operation = { - 'action': 'get_upload_metadata', - 'component_id': component_id, - 'file_name': file_name, - 'file_size': file_size, - 'checksum': checksum - } - - try: - result = self.call([operation]) - - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'get_upload_metadata\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' - '"get_upload_metadata", please update server and try ' - 'again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - return result[0] - - def send_user_invite(self, user): - '''Send a invitation to the provided *user*. - - *user* is a User instance - - ''' - - self.send_user_invites( - [user] - ) - - def send_user_invites(self, users): - '''Send a invitation to the provided *user*. - - *users* is a list of User instances - - ''' - - operations = [] - - for user in users: - operations.append( - { - 'action':'send_user_invite', - 'user_id': user['id'] - } - ) - - try: - self.call(operations) - - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'send_user_invite\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' - '"send_user_invite", please update server and ' - 'try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - def send_review_session_invite(self, invitee): - '''Send an invite to a review session to *invitee*. - - *invitee* is a instance of ReviewSessionInvitee. - - .. note:: - - The *invitee* must be committed. - - ''' - self.send_review_session_invites([invitee]) - - def send_review_session_invites(self, invitees): - '''Send an invite to a review session to a list of *invitees*. - - *invitee* is a list of ReviewSessionInvitee objects. - - .. note:: - - All *invitees* must be committed. - - ''' - operations = [] - - for invitee in invitees: - operations.append( - { - 'action': 'send_review_session_invite', - 'review_session_invitee_id': invitee['id'] - } - ) - - try: - self.call(operations) - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'send_review_session_invite\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' - '"send_review_session_invite", please update server and ' - 'try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - -class AutoPopulatingContext(object): - '''Context manager for temporary change of session auto_populate value.''' - - def __init__(self, session, auto_populate): - '''Initialise context.''' - super(AutoPopulatingContext, self).__init__() - self._session = session - self._auto_populate = auto_populate - self._current_auto_populate = None - - def __enter__(self): - '''Enter context switching to desired auto populate setting.''' - self._current_auto_populate = self._session.auto_populate - self._session.auto_populate = self._auto_populate - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit context resetting auto populate to original setting.''' - self._session.auto_populate = self._current_auto_populate - - -class OperationRecordingContext(object): - '''Context manager for temporary change of session record_operations.''' - - def __init__(self, session, record_operations): - '''Initialise context.''' - super(OperationRecordingContext, self).__init__() - self._session = session - self._record_operations = record_operations - self._current_record_operations = None - - def __enter__(self): - '''Enter context.''' - self._current_record_operations = self._session.record_operations - self._session.record_operations = self._record_operations - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit context.''' - self._session.record_operations = self._current_record_operations - - -class OperationPayload(collections.MutableMapping): - '''Represent operation payload.''' - - def __init__(self, *args, **kwargs): - '''Initialise payload.''' - super(OperationPayload, self).__init__() - self._data = dict() - self.update(dict(*args, **kwargs)) - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, str(self._data) - ) - - def __getitem__(self, key): - '''Return value for *key*.''' - return self._data[key] - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - self._data[key] = value - - def __delitem__(self, key): - '''Remove *key*.''' - del self._data[key] - - def __iter__(self): - '''Iterate over all keys.''' - return iter(self._data) - - def __len__(self): - '''Return count of keys.''' - return len(self._data) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py deleted file mode 100644 index 1aab07ed77..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py deleted file mode 100644 index eae3784dc2..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py +++ /dev/null @@ -1,38 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from abc import ABCMeta, abstractmethod - - -class Structure(object): - '''Structure plugin interface. - - A structure plugin should compute appropriate paths for data. - - ''' - - __metaclass__ = ABCMeta - - def __init__(self, prefix=''): - '''Initialise structure.''' - self.prefix = prefix - self.path_separator = '/' - super(Structure, self).__init__() - - @abstractmethod - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information. - - ''' - - def _get_sequence_expression(self, sequence): - '''Return a sequence expression for *sequence* component.''' - padding = sequence['padding'] - if padding: - expression = '%0{0}d'.format(padding) - else: - expression = '%d' - - return expression diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py deleted file mode 100644 index ae466bf6d9..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py +++ /dev/null @@ -1,12 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.structure.base - - -class EntityIdStructure(ftrack_api.structure.base.Structure): - '''Entity id pass-through structure.''' - - def get_resource_identifier(self, entity, context=None): - '''Return a *resourceIdentifier* for supplied *entity*.''' - return entity['id'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py deleted file mode 100644 index acc3e21b02..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py +++ /dev/null @@ -1,91 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import os - -import ftrack_api.symbol -import ftrack_api.structure.base - - -class IdStructure(ftrack_api.structure.base.Structure): - '''Id based structure supporting Components only. - - A components unique id will be used to form a path to store the data at. - To avoid millions of entries in one directory each id is chunked into four - prefix directories with the remainder used to name the file:: - - /prefix/1/2/3/4/56789 - - If the component has a defined filetype it will be added to the path:: - - /prefix/1/2/3/4/56789.exr - - Components that are children of container components will be placed inside - the id structure of their parent:: - - /prefix/1/2/3/4/56789/355827648d.exr - /prefix/1/2/3/4/56789/ajf24215b5.exr - - However, sequence children will be named using their label as an index and - a common prefix of 'file.':: - - /prefix/1/2/3/4/56789/file.0001.exr - /prefix/1/2/3/4/56789/file.0002.exr - - ''' - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information. - - ''' - if entity.entity_type in ('FileComponent',): - # When in a container, place the file inside a directory named - # after the container. - container = entity['container'] - if container and container is not ftrack_api.symbol.NOT_SET: - path = self.get_resource_identifier(container) - - if container.entity_type in ('SequenceComponent',): - # Label doubles as index for now. - name = 'file.{0}{1}'.format( - entity['name'], entity['file_type'] - ) - parts = [os.path.dirname(path), name] - - else: - # Just place uniquely identified file into directory - name = entity['id'] + entity['file_type'] - parts = [path, name] - - else: - name = entity['id'][4:] + entity['file_type'] - parts = ([self.prefix] + list(entity['id'][:4]) + [name]) - - elif entity.entity_type in ('SequenceComponent',): - name = 'file' - - # Add a sequence identifier. - sequence_expression = self._get_sequence_expression(entity) - name += '.{0}'.format(sequence_expression) - - if ( - entity['file_type'] and - entity['file_type'] is not ftrack_api.symbol.NOT_SET - ): - name += entity['file_type'] - - parts = ([self.prefix] + list(entity['id'][:4]) - + [entity['id'][4:]] + [name]) - - elif entity.entity_type in ('ContainerComponent',): - # Just an id directory - parts = ([self.prefix] + - list(entity['id'][:4]) + [entity['id'][4:]]) - - else: - raise NotImplementedError('Cannot generate path for unsupported ' - 'entity {0}'.format(entity)) - - return self.path_separator.join(parts).strip('/') diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py deleted file mode 100644 index 0d4d3a57f5..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py +++ /dev/null @@ -1,28 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from .base import Structure - - -class OriginStructure(Structure): - '''Origin structure that passes through existing resource identifier.''' - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* should be a mapping that includes at least a - 'source_resource_identifier' key that refers to the resource identifier - to pass through. - - ''' - if context is None: - context = {} - - resource_identifier = context.get('source_resource_identifier') - if resource_identifier is None: - raise ValueError( - 'Could not generate resource identifier as no source resource ' - 'identifier found in passed context.' - ) - - return resource_identifier diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py deleted file mode 100644 index 0b0602df00..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py +++ /dev/null @@ -1,217 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import re -import unicodedata - -import ftrack_api.symbol -import ftrack_api.structure.base - - -class StandardStructure(ftrack_api.structure.base.Structure): - '''Project hierarchy based structure that only supports Components. - - The resource identifier is generated from the project code, the name - of objects in the project structure, asset name and version number:: - - my_project/folder_a/folder_b/asset_name/v003 - - If the component is a `FileComponent` then the name of the component and the - file type are used as filename in the resource_identifier:: - - my_project/folder_a/folder_b/asset_name/v003/foo.jpg - - If the component is a `SequenceComponent` then a sequence expression, - `%04d`, is used. E.g. a component with the name `foo` yields:: - - my_project/folder_a/folder_b/asset_name/v003/foo.%04d.jpg - - For the member components their index in the sequence is used:: - - my_project/folder_a/folder_b/asset_name/v003/foo.0042.jpg - - The name of the component is added to the resource identifier if the - component is a `ContainerComponent`. E.g. a container component with the - name `bar` yields:: - - my_project/folder_a/folder_b/asset_name/v003/bar - - For a member of that container the file name is based on the component name - and file type:: - - my_project/folder_a/folder_b/asset_name/v003/bar/baz.pdf - - ''' - - def __init__( - self, project_versions_prefix=None, illegal_character_substitute='_' - ): - '''Initialise structure. - - If *project_versions_prefix* is defined, insert after the project code - for versions published directly under the project:: - - my_project//v001/foo.jpg - - Replace illegal characters with *illegal_character_substitute* if - defined. - - .. note:: - - Nested component containers/sequences are not supported. - - ''' - super(StandardStructure, self).__init__() - self.project_versions_prefix = project_versions_prefix - self.illegal_character_substitute = illegal_character_substitute - - def _get_parts(self, entity): - '''Return resource identifier parts from *entity*.''' - session = entity.session - - version = entity['version'] - - if version is ftrack_api.symbol.NOT_SET and entity['version_id']: - version = session.get('AssetVersion', entity['version_id']) - - error_message = ( - 'Component {0!r} must be attached to a committed ' - 'version and a committed asset with a parent context.'.format( - entity - ) - ) - - if ( - version is ftrack_api.symbol.NOT_SET or - version in session.created - ): - raise ftrack_api.exception.StructureError(error_message) - - link = version['link'] - - if not link: - raise ftrack_api.exception.StructureError(error_message) - - structure_names = [ - item['name'] - for item in link[1:-1] - ] - - project_id = link[0]['id'] - project = session.get('Project', project_id) - asset = version['asset'] - - version_number = self._format_version(version['version']) - - parts = [] - parts.append(project['name']) - - if structure_names: - parts.extend(structure_names) - elif self.project_versions_prefix: - # Add *project_versions_prefix* if configured and the version is - # published directly under the project. - parts.append(self.project_versions_prefix) - - parts.append(asset['name']) - parts.append(version_number) - - return [self.sanitise_for_filesystem(part) for part in parts] - - def _format_version(self, number): - '''Return a formatted string representing version *number*.''' - return 'v{0:03d}'.format(number) - - def sanitise_for_filesystem(self, value): - '''Return *value* with illegal filesystem characters replaced. - - An illegal character is one that is not typically valid for filesystem - usage, such as non ascii characters, or can be awkward to use in a - filesystem, such as spaces. Replace these characters with - the character specified by *illegal_character_substitute* on - initialisation. If no character was specified as substitute then return - *value* unmodified. - - ''' - if self.illegal_character_substitute is None: - return value - - if isinstance(value, str): - value = value.decode('utf-8') - - value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore') - value = re.sub('[^\w\.-]', self.illegal_character_substitute, value) - return unicode(value.strip().lower()) - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information, but - is unused in this implementation. - - - Raise a :py:exc:`ftrack_api.exeption.StructureError` if *entity* is not - attached to a committed version and a committed asset with a parent - context. - - ''' - if entity.entity_type in ('FileComponent',): - container = entity['container'] - - if container: - # Get resource identifier for container. - container_path = self.get_resource_identifier(container) - - if container.entity_type in ('SequenceComponent',): - # Strip the sequence component expression from the parent - # container and back the correct filename, i.e. - # /sequence/component/sequence_component_name.0012.exr. - name = '{0}.{1}{2}'.format( - container['name'], entity['name'], entity['file_type'] - ) - parts = [ - os.path.dirname(container_path), - self.sanitise_for_filesystem(name) - ] - - else: - # Container is not a sequence component so add it as a - # normal component inside the container. - name = entity['name'] + entity['file_type'] - parts = [ - container_path, self.sanitise_for_filesystem(name) - ] - - else: - # File component does not have a container, construct name from - # component name and file type. - parts = self._get_parts(entity) - name = entity['name'] + entity['file_type'] - parts.append(self.sanitise_for_filesystem(name)) - - elif entity.entity_type in ('SequenceComponent',): - # Create sequence expression for the sequence component and add it - # to the parts. - parts = self._get_parts(entity) - sequence_expression = self._get_sequence_expression(entity) - parts.append( - '{0}.{1}{2}'.format( - self.sanitise_for_filesystem(entity['name']), - sequence_expression, - self.sanitise_for_filesystem(entity['file_type']) - ) - ) - - elif entity.entity_type in ('ContainerComponent',): - # Add the name of the container to the resource identifier parts. - parts = self._get_parts(entity) - parts.append(self.sanitise_for_filesystem(entity['name'])) - - else: - raise NotImplementedError( - 'Cannot generate resource identifier for unsupported ' - 'entity {0!r}'.format(entity) - ) - - return self.path_separator.join(parts) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py deleted file mode 100644 index f46760f634..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py +++ /dev/null @@ -1,77 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import os - - -class Symbol(object): - '''A constant symbol.''' - - def __init__(self, name, value=True): - '''Initialise symbol with unique *name* and *value*. - - *value* is used for nonzero testing. - - ''' - self.name = name - self.value = value - - def __str__(self): - '''Return string representation.''' - return self.name - - def __repr__(self): - '''Return representation.''' - return '{0}({1})'.format(self.__class__.__name__, self.name) - - def __nonzero__(self): - '''Return whether symbol represents non-zero value.''' - return bool(self.value) - - def __copy__(self): - '''Return shallow copy. - - Overridden to always return same instance. - - ''' - return self - - -#: Symbol representing that no value has been set or loaded. -NOT_SET = Symbol('NOT_SET', False) - -#: Symbol representing created state. -CREATED = Symbol('CREATED') - -#: Symbol representing modified state. -MODIFIED = Symbol('MODIFIED') - -#: Symbol representing deleted state. -DELETED = Symbol('DELETED') - -#: Topic published when component added to a location. -COMPONENT_ADDED_TO_LOCATION_TOPIC = 'ftrack.location.component-added' - -#: Topic published when component removed from a location. -COMPONENT_REMOVED_FROM_LOCATION_TOPIC = 'ftrack.location.component-removed' - -#: Identifier of builtin origin location. -ORIGIN_LOCATION_ID = 'ce9b348f-8809-11e3-821c-20c9d081909b' - -#: Identifier of builtin unmanaged location. -UNMANAGED_LOCATION_ID = 'cb268ecc-8809-11e3-a7e2-20c9d081909b' - -#: Identifier of builtin review location. -REVIEW_LOCATION_ID = 'cd41be70-8809-11e3-b98a-20c9d081909b' - -#: Identifier of builtin connect location. -CONNECT_LOCATION_ID = '07b82a97-8cf9-11e3-9383-20c9d081909b' - -#: Identifier of builtin server location. -SERVER_LOCATION_ID = '3a372bde-05bc-11e4-8908-20c9d081909b' - -#: Chunk size used when working with data, default to 1Mb. -CHUNK_SIZE = int(os.getenv('FTRACK_API_FILE_CHUNK_SIZE', 0)) or 1024*1024 - -#: Symbol representing syncing users with ldap -JOB_SYNC_USERS_LDAP = Symbol('SYNC_USERS_LDAP') diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/colour_wheel.mov b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/colour_wheel.mov deleted file mode 100644 index db34709c2426d85147e9512b4de3c66c7dd48a00..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 17627 zcmchf2S5|e_UMz)dsR9F2*nC0QWObIB?ux`L?JX01O&wbQW81{NV6ayAVn--P{9fW z=_m?Vz)C_>G$I6ugpg$31pV&!{qMW?zWcxL|K6BocV}nM%+8$o%{eD)5C{a??|4M$ z(c@?|T7VD1+avyKI_`Ju;6a!b6nxU(mv^aq{j4ExLm*JDi$#$L1pNG&{>ur>{=0Ll zKTH0jBVK9YQvtrPPAgQfuh(&SE-r`=B9b~02tI@r5uPs*6p-Odp569bbnC|}=hnBp zE>O@wzzYH%IuFoj3INHaL_{2+abfcJN1*_)r+5nZ^kY^{_xr}!b+g^8 zBASH_@`eQ+`1Vobb@{MGgoVc=Lz1XhZ}nQ~kGx*}J2)yNsFUkCpKnw1{G8=zT zdVlP8l6s)BZq>J%tks_TQZ{K-7pWo)2?ZlRKMb2?GPe&0r*z+8<|`PVo-#Y4QmAx` zsV*_$Jh86zsK!)vtMUmWAx>aw7G~DC6hD=pniUfVn1k9WLrl%idm1K*AFMNqxAY5o zrF8132yXTnneKW=(3qOKzO`OYm5j39koUB6E9r%NM7xOKENxg+wCl^z%mr^D3ijJ0 zZdtrQqI&tt>wd(6icrH6Pfw-Rd$t8wl)995-e3DzdxF{dIL3zHw}M&kn^@U zAl$iaIFFg*^9Yk2ld5wXlU*^3(-t-_#j(0XUvHD&u5EkfTOF{zqAp|Dpt?JAZwl4U ze@KR=NkjBEF*0pN;`j6QV{dRyxOG%TEnBt9yyfdy=+Ba8o?iY-U8;-tPfA%L4=TT- zO0M^u838vI1S*>z(erckuIXIUsi=D{A#-;|IXx+ndm8=7?a%c4=}F*+b2I9Jd_-vC zxx#iK``t&Q&JVVP3U%ZS70?6oU3Ksx>+6Li^*11yn)Ha+vRYUX$|8{EXrGRzv9ka zTxNuMSN%V`KIzn954Y;59;}cp-0aQn&nrBUl>ZA>X&Xkmtv>a`s-w}a` zG2p;P%M4m6>Z{6pv-}iv;*;#oBGdS@Vm@fM^m<3!u1lzuTefI1vR^n2<5qj@IH+Tn zQ@3pS4sV=Cs_x5NS*J=JH%x(@tk`n7u~gj+!}i>SJNIYs)mAPlJI%k$m_H$oE>-@N zh^=W8w#q_oQVQSOJeE-|9R2)uRMJZdoBPDS-sDq0psrbXF!4?IkI4i(o}F@L(`qZC zHF6?l^9O|T=g>u zln9rHSn#$!rG{vO40ZrCN^;1uu*UngaTWSO`VYsFI7Fy2nPezMt)8cK8_6abGj#Ys(SQgLfv0 z=09u-fMgrfOwG!WDDwj8^Qjy4R3$sQetY-bl}gG#)&d}wfG6A4&P6s1PM%sEX#i>! zM2R}b3tWDB`)(vwm&j1HT@=UXkef=S_T|-Au=5PAyeRo@AJ#KYc;gjnMcl4wWBXb1 zO}wO_zFjgK$Dj5HlN*zYfukwUI<|>*Of#nN{OoA(B`ut;F>ZMu<~=4GJDvk+Y2e_9 zvrNUHA9ZD8QbJF)=CaGnF-1Fam#xO+44clE=)KT4^R{fe*&?G^FW-zhwlsvflUWs- z@%RLZv+Spshm2w5*@Rj$RW9B3m`CLEr7LLrVRGZqkCSol_OKPv-rtcWU+A~}<5Dlp z&w|oXs&RpclUPU8?9`mW`qZoZ+SWf>+tbdr8)2^RUbY-_eipZ4JjZUP59849aAy}z zG4|=XyqsVNwf8%M3Uusb1U_Kcfu zAA=3DOwh)U&G~na`ZcU&C81q~o1ThugN?pKRY+PWzjiYE(R^;DxBW`o{EB63a%Jrw z+vTi&nNxvrzN6!tI@INMF5^xvq`{A_U_gvrUCOLwr{}uwXdCi zL5uKyu)SYZ#Lr{JnG=S>voCwHgEok^pdC<4{R<9@N)b_PGPk*%)a3qRu_CKm} z5x7=#rU@tW%ov8RNxgNA0&V%DAy}wzB4J#kF;{Q@Vo}4u>Tth$StXlzHrR zsNUA|i*Dp=?O%Y!PXu3)$&nwAyU8eyTkG9meX(wufa;K3Qq9_~LMlEp9_A^p9^Y}> zXteo-UCs)KBZ6HHH&rKE)`11t?*Lm`i<2(?+9lVXs-NAFyQgM&Bj0|01kN=|Fe zNG)6)wx%$1Hl57NwkR9ZdFt3&?E>`#Q+&LVWvi-XaNPx;wKSw&5_$zzzDnU)=l^DS9HLlx$k-O`lRvN zp<~38#@LY&hI?RjAPHiL`)I(8)!}0cK!0Oh|AX)TFzYm}I&j1afSHP0jkn+S0x;DO zSRMY#Qe&C$t5HjVsjD~YZk2L;kQsacG&8CLA*_=0kCqUIlG9f;LOLaLjTDE(+~=ZB z8iOHv^M@akY8g^3U?`B6BgJN>2rhm6gZbS*Mgxe2JRZ4SLSOG1X3Vzp67lo!^ZU zDmUD*xEy(;Vh)@HTsgGWiNGe|3*$}c^6Z5j9V?L@*Aib>zLjojxo|iwTrNanwV48t zHW1*W;a^K$lfHsUHQQj9W6*~=qfrXit{j8$oK4tmb_%3=tieM8QPS?$h(i@?J8J}I zbZT7lj=xt7x3Y*m`iL9R=w%_N8e8lhEsG0>+Wt9Jha-y%%S^?P`!Hk;BW?NhS1|!b zUsHW%TW`;{o-O%`1K@4wZW;8!eQU#{s||$}Wpy^GJ`_m%uum}YVCb}$+V5E^#XKVK zxN}`s#%ZW(sUz~8IFavKu69}B4<=+CW^tD1&cC-CFID0{aH=V^@Zd8~gLIm{2=S;+ zypIl_93^CuBWb0)K*dBGdYsVSqIxjWzwAkDRwzP$U*6p}?Cgnznh#f>?3j-8zr|Hu zx=Sqnn}wdznkNe1-xr{*(uU=ZS^Rv-ZTiGId)r4cREc^|lcsobPjL0#jaEMCni3L@ z-&X9OeQtjLRiurVmnvz9=a(_Xyc0&p6AC zx~53l$CM)PqQn9T!`x#|SM!(n-DG}jtzby)v;xFrQVanh{Ittl(R*)Cga3yR!IiJy ztYsGYaGFROBG`97^zFSi3bR-33uQ7}cmsHX+K8;YH?(q{*pQlSKmC4#1kt4a^ z>ZTpyaNLuL1BcDTHyO7aVxL{!&G(M#TRed1=G?H&~*Ix>v$g z_Jf}1=7^0y28s7+l(o}R*o{3J>y}GyAAegj$yqBw`L5r$>Lt1RKywe961{{)cq+a{ zk3kiZsA;eG8O~?Q;?#^yTFJZ!KT+bAxb9)^9Yf;X$S5fn>DOO~?fQE_Fe;kS7ISw! z5}TqqLwRw5Sa8bN{Cbt)-u%LckMu5CRFKjtDie7tY;nx)+h&owgcZc+g=f`&6mAJg>zE+aeC#}ZhkK-zU$_!s3uN}c zkYrz5cc%Bd)aTl>78`UYZS6mN5yF!Iw`H$!`3EtSXF9S_2Xd9H`PIJifd^RO! zDEVofN7F5gDV@L~0+pW3q~78qYC7w>3Z9~ja0x*1Nk@!Tf6p5q0!J|8GqNmT=H~a$ z`<`K`xicY8RSvO|*Mlp!9()SLbAw(n3O+3qPaz#LWDHpaj{#+JXjg_tM4-m(Qm@#L zLD8RPz!VC(#`O@1i>EezLJ-D-l=|l;DPWG!A~G(CS{8T#a-CckqddZm^@=h?)FnPW zGQm}$#LP`z1h%QK*H1K%EyX;zCS=Pm=-x}|Y9Ehluw$)>s3p~5lU&4`{ch_|O$t;; zdnGG&3Ma+SJJ?~UIeSWRughyO=fiVCWT97d`rIX`yt4MUN^wgDrXV;EenTI5waOb= z7pxz?M78FEwA$F$!h(e6U(h_kiplz^BC<7iViK{8@jWhhw0K{IRd&K184bmUacQsi z#WjiFzS?O$)`uB{Lt@~9dFYpdS$Sm^r^`NxIX7R5JNnTscNVABSeW8`b{Lb(F2(sD zId>MFvd(#nKlCFV>T%2Im_f{CyWDFzCwPOq^)+u~KX3!O~idwBG z5mZ{4`@J`EU`T;6%s;Kni6}Xa;71TOnoLfj=crDMA`7i0OHzC;0>Hm}%+?mnGu;{= zd=E%zsX)H%ZS|7f9S@gfz})ZgV1z#@%D}okNgGvvQ%Di|EIo0nj>0mYdv849xiAB9 z=kehq%7h0Uv2q4bV`p}AEs_R$GrktiqO8*?ZZr;J1^^0 zq2u5X=^CjLoz(91blvt#{AY7#o_DExy1Hsv`BW@z7nQnKAOjMM=u&6wdQN+-{}tH* zl`yiBVpN+ST?|-kLEV8etZ1};QHysY;uUM_zG3$8(VH6GBZ^iJMP`ZuYSvt6+UoQL zJw4v0ql+nM&6$+7ujW~-IGv8d3qW>LM5^$`eNM6tP7wO05IUHTtu_(kIc{HZrqC73 zJq>-J6VR_Xs0S9-2?>Nsqk5f-26Iy!DuojL&5WHoUy_COZTJ!A_6~%FOej}DRn7k$nfmG^^fFadzNz>@3pndtFyD3@)ZMX*cMI*O?z@Uk<3q;w22v+kf>TC!ymqBFnVsd6S+}kJ zt?a(qk7u4Qv!-gQDO}x{lE88P9i1V(EFNDf`Q${}V{JcdQ({2ZwhzAEWt6Y8`H>R2 zA5kHZk`(7DYkKP{ltAa(J@HIoTR(xrgP8b_r+A%hLi8{BGR z=q{KlmcuC7b=*;m2S}brS)R+bklhBUg)>dn34uN)jjQ2j7P=R)Hu$TbBR0kB-4tq5 zf1GEtuD7s#W^ACKv#NkQfALb?c|zPBVp&yWrlWbdVO*;;TEVR3(Ys{w=NvnAaMk3| z?>;6MotrNoiOX5iaubUP`jo#1pjM5{2bWN5B%9anSof0iUFJZqMQ`4P`@p%T6I?er z)h;?L+`f1-*qtV7wDwlp1sZ_S3i{~BiiU3A$L?bR1jwZtyCFVlU3 zxe1kN#g1IjzA(TTLun-n zRIi9hiPNk0ncAK6z(u%i>;AhnRd0AShQ5*D2^~cE-+^wj5y;JdSsRZiL7G~6n`KNA z`KGjv@c9#M`y@9OHyLULi+vNfQ5Ql;Ke^&2C9-KVvC2;`v#F@vRz1N+du40B@jO{2 zrRl{Ead}fqy$+wS;i#h-J=Uj@H&h@_gHMk`vawf@+Y*P4U6j28sy|vW7D=0D3~R{)A^*Y=7Dm3V%Gob3-H* za~eLGBx+j(nnm2iK^E^A8kw?c5_XAAh^M-F=LAE;p8qPCv=j^f6Uje$tWrIgi`qG zGEB3{h3FbS!yJ#o$<5+4`Y;?>dr}|fem?Y* zx1P(3vvt%xo|j^|eYU(n)Zw5G(CWOi61`qT{Sfk5eJA3QRC;&5t7Pj`40%Xev64S4 zlh1E0+Bl>8Ci(S}GW4EwvKCh(slE4u#@rB6Q&{%x2g*+h`0VX*Jzy~f{<3$t1eQP? z+V+Q6I;*>YMm36IPRzgNJ}jp;`8`}NJ!(vh%~R8%ir&4}l(!z)hc)&YU^?=}-Bxe$ zQ71PA3O;d#QCM`$+$Vg`py6Iw2lB-q0!Gyjs-s%U9;n%1&VVyE6ae$j& zfA`#6G2_M;ua^bO%HDgRiqe(*b9r=t5zeU(hZQ3#*vm2k-_P0{6@$q**!m z_2446(;eM{h`P_qI}$a{9lB%`&qm5zSCxrrQs~uC(%^jhSkY-je#=*t^=9fw!*@&Wj~yCZW&J(L1c{`x<8RllJ(?9G zHiX!0jWDZC&3mK52QKk@uR1ZG<@Va;gv0}5=p%~+{2mJV^5@b{C?9Niq8dat< zFD$VSKqj@yb(8+%9Lkh3(@G}G>Pq?~h<^fFHKY3P|Izo*!v3e<;Uczc4vFxE0~R24 z0Ruo12b2!!X9Ht^ZA6nKZh{^FG0wKa zI{T-?w^GO~Fnm;l0pwt1*_z0a1dPP6<%2U^?Kzpv1OUJrFl^!c1^^1j(+i&V04#?< zf6Pdsi-Ns$#Zm%+PHx>>K*s`b5y(5swaNa_V^Ug{5M*Qkpg;Zw+T_?S>%Qp~7IO`_ z1Uy$qZHWj2;gUR$7f@aMrtqFWsF z0I5Jxuk|xfV`X_xgd_9bef9b3uXV9v`{|E0GGdz~EOkM5RNSWO)ATIgS3N~FrTPoa zXDp{m`}GS)hr0$|M;8{ln3aa*dKf(GognZk&I&;&sf0Ne^n6VjbHVEQX)9r`a16Jc zT;+fAGC8RRvG$v!z1SMXftX~%2m1Ua#J-y6>Yf!7(jUZ-(zDM|uCL0XdZli!G)J9! zG1%~I&Qro8&+J(SQo~d$epDd+!eFUwz5TMnQT1Ii0 zyv)&A9r(2h9Ji%T2-8rr7cbAf7++(5_~0f(l>0~D_6$X_zF0B$8Z#`;pOq1KcPH{) z=BNlw#5~V-qIj4}U6E}V9c=XAJ!Zwp#1{9juN=CC2gM(3lKPfHSJ(Dn<2jpYi`7=X zJ0A%~MffceW~bSLX77rkHB9teuwa2P48E<2GdQEC>O|o+kEVQ)0ID|mU_Jby0u^f_ z+{|pO218hYbGo^>ShVaEw_&J9lw03e?@YqO8-^}#roG9aYusu$t4A3CEou0IHqb@u z0t@XNOO`t;l?DvhhiofmWzqn>F2jRE06(u}wnwi9pq0RqcLpk8=v+q#Dcpt(TiPMZ z8S!P7u26M?Cue~HLhI(ivX5LvK$9(|ffR1pa`v*lB-X51n(Ochz}13H%+*AM^O~p! z+DlImlz=2>HES*zJA#F?h+5fB0CbH6EWop<$-tRBOc*L=9!gr_p#+kR?C@=JpqT*L zWSBkxBc?K_YzCTLRepLPTN$0Ip#frE}~<87%TkD7;?}RB4J40C0lQ?KL%| z3;+&YP~u9{^JnE_tcwA=nB9@+?FpOp&)(HQ=p(PidWL8)7$Ah>Ra}3G(4yQCwPLP% zn9zI)yU6T*EGDHAp%619a=26=wth>$His}ru56D2c-;miz~!2T7f;j1+S%Lg zMh6mxxzW+10MB82Oh<TZKFG zSb0Ke4rNl$5Y(zydZR`lZyx1+;=qMhpTn#2>d6312bCaKZ#A=dVJ$K4ey>$TSzAEc z##_%raEC;uUw9&SZ2VMjsEERVvT%h*i*^fV&g;~%3o>%m%|D-ai&C7PQrGtBl25fe$SuYVFR7;C=eui(JOPj|6(4v856Vnffy;H1A z-%A4?T`q!Mc2UAGT}!6K!A2+E?DS)SP%>Sx;Nlvq0LOHZ?Ph%k9G-fV;4{kBcA$b_jE z3xGg)v=B-{V-J2Ei{lC>TbxLY1ZjY?SR>trdE)w;on34{*KGW-%h%@%<#x^6&aAOE zw0wW@(#6+ee5zd@;U1DzVnoh2!;NQV0EMgoJjlG8E1wx~2wYA$nKcJkpkM&Nd>Wil zh!Z>*Cj5pWcZuCskcb^20FW|oK_0dnNM%4~0l;F=1}>`TvGMGp`eINOOyJ%obJ#8l zKneC8R0Y_DKz=I;aPR=qW0Fe-?4ufFy?%7g}(<0<%d!F?8m79(mMBJIpo1=I6Uje2p?JD=z; z13=?n>9-#8TFWm00@p*6y7;N~+Z0g_ExdT@sxx;wL|1l@+o(qy_JlG_ApgoF7Wt^x!qC|RzG0w-tMJYz1G19BDWi}ybxF{&4h3?UxX8@0}ki33cwAO;_9Z1(9$zW7i%ja~? zw(MdKfg=N^qx1j;wuuaeoTmwlYKS>*!!OQlg{Wb|d;bOKMwI?D&^bZSrThLP&;bIy zk-Y`LwI7UN=w!NOJq5D2uQO!PTAdpO98nzra{r!jl8|w*F@MK6mtrux6ir1q>AuIA zyd;1o)Lq}cEL*PE_ABBt>ol0UMfoQ&SwrFGGUy$dKb`-^yHOQJsq!%e^K<2{Vd7B@ zsd;fpd_q*AwHkh*ekTrSPfSVGT_hUR$LX`9sCjR$cwCfBo^@=SKR7DVeZwQGnty+X zn}@rti>|mu+n-1~i{8ZbtE+7O;3jNkx>1zDClutUr)<$dR`**c(H%K#^KwPmBL9350& zz0C8?)06t`kuj#{4E;mCZ7#C9yy4);k__pMaxq`dZn%H7ohjihNiz5@aO&q0@}j{u zl$mI0j%uUW|0t6UxSX_#rO7y^N}PL1peh99&5h|vO>O>~IkxACxtUahYzT6IeG??T zz7^NS29a%?K-g@_ zq!Pd=J%(M(n0!V93>qaLnp~#ep~2Ksm_tj zW}2U3)hl*}2X^mQx3tQ4i)>2yhW?`YV?G2C?Czz75Kvr;u23ENQ9b<@nb$q**fetW z_H+~Xi(_w+ct^WcW64(#vnKD&Q85wSU5{j3Cw(79ou2TC=B%cT?Y>PmjK*>n=!D-k z*IE*R9cz$PRD>sf=;zF{H#ZOlz~n$0AQ7NF1>wh_aqD}b>bOI!nG640Sc51_hy52r z1`BX^M)M~8J3@dbBzTb91(op+TaE2N;N>l@l=hHnybAuH8V59ELGGf}$C!jM8vr#M zCb(b%t~_8D19lM^h!X%zcqqB0n0Ev~CJ{FFpRSw`TU-7c*s4(b$Jhe8ro$%+0Et8= zST;@*$kX0)xwMEA+&W%YAvJl}g4O<QLeBp1nKvi%LwiW+FxGp(0vX1Z^iR!XJxl z^OR9T_FL&1>859SlAYL9!tw7P z-fGPIY5ntvudQJ0PtSQ&sKmg8z3ZyF*xjtIky$z;d_&0Qk4-X4=6^#{t>qvH;&yUWw4mJg z*M=!FYULhQ4eoc6r_Kl#`hh~exQB8=Hv)+2U@GUM1NBTR2dl5`4L;y6t_uHsLm7}*EYTQ=?e`X95)$d0@>d^GYU-sj zZ}zEKPB%-HGK&dx0&f&xy4+i1FF@RES*MB>>)>B`ti39x>Tt(&0hhDr55p21m0rR$ zY7J4`rmpAyBr>z^#VHDTA=&CiQlqabwgv6#iA>q^aB^}^IQmu?pb+LcgumEcWaxO% z2H;y2aA^1ODCO`UPzqXLfJp&d0Ooph92&91ILF$;J2xEMqvRtsNxGk*+V>nOx+ycVr``h(Rf%o* z@@bReG4ZCD;55m=lHhQ@?6;ewdn)tC z_9^w3PN&jFTHh#f4sU#Y$;mNHTn|k1b6wY~*JYLkT#8P~VB;4c-QzJsnXjn1iMrJT zXJ(sJ_9u?K%RB6|e$?-<@2NYAz-iy*PBMwu;DoiEO-y|?If|e*$Vf12jj%FL_f_*L zr(F}2Z+B0^zJ>3I#VDEBjXgQ^gCx13Vqp+Q@&I@>Zdmp)rnlt&DvKeM;mNk#1^#5Ma7U=|~v~r*&HP^t_<~l=9aX%1QT+>J}9K|kX=Pxc{c-p@a z-ZIntZ1k__1e#-<0he8rB>j|<%axgfF2gVu;1@N;8QNNTt^Wr;Ex@=+D&sSJC~jg_ z5f}zv81U;6&|9hp=fyB*9!sNK9Dq}eG+zQ&IZKEr*r5L%QD8=xicD~m-L_kMeaNCN z`8~EdHXBPDtI=9|PC05f^9%V8kB{5vcfnO?l>dEH2{%P=_QVw`BFt_6!tPY0eTIwK z#yhJKij`~cBKAKSzQ+l?6=1^naUi;BX^>y0VRstRFd|?1Ky8?{mD=hDS2mH`?jBe@ z<(hC+`NLZZv9%@IPn*KV^ZY89zWA&qNZIivnjzYs4ql8AM$OjceZEpV(t%QuA_m>8 zH8pP5bVYe~e^t72=U`~)9xZR%qlJe(Rb$(3xPBDebAuF}%24j#+I0%i5q3Ry!8s#w zcC@8$=z>C{hXB}z0CYG3pukE7;MNXGl40xt z4-fTnVTKyO!@>aiqi_!K0{n}-g7P7+coG(%M#up6Z|9J}^9A8-g9*w6QctO~-FP=1(Q?id`I=HzC88x{hlZwH zOfDg;GM2JPv!dcTCmoI{*o)ar%sWYMcbD3SkDh$dGg)pa87$^1+IOqQuKKSx>T;X`o&XU0JPeo`fAfsupLxg5SmFP50#5xmogWcX24wNMjzUL|pP}MfT+a z@=wK)7LLG00=Jfvq;3e1S@LOur$S6VkO>Qn-*y=MHtjd-7VWTOzwEGogLUAak`BuF zpCjED^Zy&t6^{Ool1>7W4mRi?kgndoMVu#R+6~s>xhDKIuSqGQYtitYR=7Js(aX}iNF_^jWsjWhi{}DfMI@z?PF36 zXL`K1lKzP8FRxww0v+TQSH45zjM3#@jgtpcnL$@G1JFMRXiB#jOE*ZY&eG~Z*504* z7JA^cCM2A3Dz0WnZ6Pzsi@*Tjg|&v>l#Y7g%NQ3W?+22HFx5|+I`V!oL^Sxu4-$WoY2H`z}AWZO=Q4j>{9Vn@_{Q1*L7#{%%o!j+L}x^P|5S zmC2^B2SUV^9gdY}Rx28-e;N6qR{p?ePpaNrdlPC`GlEzq9d#wkO&tD*rft%P4p7I? z?cMcr3S$Pp-aIRKzQ-M@xaAjJdpbH7Z3iB2J8L%^mq0AQSy&sR`38tgw3JC0|Wwd(D!&4+^Zf8 z3Ol}-v)Iz#au;RsSqJ+Dc)@GvVBf$0<%QqR{_Do|{=PxSc!$CMLB~$QUrz7}4Gog! zWgwAVO4jh?AbJnX4naE|gc8yAUPpX`4#GQ7l;D6Pez2VU$>7Br;CtId_&W+j-o563 zNcDwMy@CS5eoI9{sY8mtq*@0Bc!ivv4cF}yu^DTah{N0$0)zB z&FkiUmC0?0IuaiDTi)XxZ_>Wg9Y$(h^!R~{dXAci~8!sUR>{qWb;c4hU zONs!%ODmvMTcnefIzS@i_2B-xqP);O#A`)<QTLRpa;dTLTyWj@pBkAyd$FSo* zAv{4a@w%`afl!C9i)nenITMRO=y@R!%MQaKoj@Q~{X`%P;q^7m2*kS0(0ztw%we8| K0|H@L{l5Sv%EZ9{ diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image-resized-10.png b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image-resized-10.png deleted file mode 100644 index da6ec772092e788b9db8dd7bf98b9d713255bd72..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 115 zcmeAS@N?(olHy`uVBq!ia0vp^AT|dF8<0HkD{mW+vhs9s45^rt{Nwxo|NpDSef}SR z)AjRwLB+TFMqi%)*FV4eU*G8XwcaNAPd!`P&HpncCH@&33UInDb7J`WPs~|dc=2+|Ns5_85n>V41r7_ z2Z$G#W^V*)67zI%45?szdvPP9g98K0!D87L`y+K3D$*XGO*?h$S#W)QaTODT0|NsG z0|O(20s{jJLjwbY0MII^pec-jRhCRa3|q)F2jR8_Zn(Wbw&7?&LAE(a!FvT)I8$Oc za(q!@4wBoXsALXGT7oAksu>=jjG1By8QyR)iowr~<@`_ format, for example:: - - /tmp/asfjsfjoj3/%04d.jpg [1-3] - - ''' - items = [] - for index in range(3): - item_path = os.path.join( - temporary_directory, '{0:04d}.jpg'.format(index) - ) - with open(item_path, 'w') as file_descriptor: - file_descriptor.write(uuid.uuid4().hex) - file_descriptor.close() - - items.append(item_path) - - collections, _ = clique.assemble(items) - sequence_path = collections[0].format() - - return sequence_path - - -@pytest.fixture() -def video_path(): - '''Return a path to a video file.''' - video = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - '..', - 'fixture', - 'media', - 'colour_wheel.mov' - ) - ) - - return video - - -@pytest.fixture() -def session(): - '''Return session instance.''' - return ftrack_api.Session() - - -@pytest.fixture() -def session_no_autoconnect_hub(): - '''Return session instance not auto connected to hub.''' - return ftrack_api.Session(auto_connect_event_hub=False) - - -@pytest.fixture() -def unique_name(): - '''Return a unique name.''' - return 'test-{0}'.format(uuid.uuid4()) - - -@pytest.fixture() -def temporary_path(request): - '''Return temporary path.''' - path = tempfile.mkdtemp() - - def cleanup(): - '''Remove created path.''' - try: - shutil.rmtree(path) - except OSError: - pass - - request.addfinalizer(cleanup) - - return path - - -@pytest.fixture() -def new_user(request, session, unique_name): - '''Return a newly created unique user.''' - entity = session.create('User', {'username': unique_name}) - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(entity) - session.commit() - - request.addfinalizer(cleanup) - - return entity - - -@pytest.fixture() -def user(session): - '''Return the same user entity for entire session.''' - # Jenkins user - entity = session.get('User', 'd07ae5d0-66e1-11e1-b5e9-f23c91df25eb') - assert entity is not None - - return entity - - -@pytest.fixture() -def project_schema(session): - '''Return project schema.''' - # VFX Scheme - entity = session.get( - 'ProjectSchema', '69cb7f92-4dbf-11e1-9902-f23c91df25eb' - ) - assert entity is not None - return entity - - -@pytest.fixture() -def new_project_tree(request, session, user): - '''Return new project with basic tree.''' - project_schema = session.query('ProjectSchema').first() - default_shot_status = project_schema.get_statuses('Shot')[0] - default_task_type = project_schema.get_types('Task')[0] - default_task_status = project_schema.get_statuses( - 'Task', default_task_type['id'] - )[0] - - project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) - project = session.create('Project', { - 'name': project_name, - 'full_name': project_name + '_full', - 'project_schema': project_schema - }) - - for sequence_number in range(1): - sequence = session.create('Sequence', { - 'name': 'sequence_{0:03d}'.format(sequence_number), - 'parent': project - }) - - for shot_number in range(1): - shot = session.create('Shot', { - 'name': 'shot_{0:03d}'.format(shot_number * 10), - 'parent': sequence, - 'status': default_shot_status - }) - - for task_number in range(1): - task = session.create('Task', { - 'name': 'task_{0:03d}'.format(task_number), - 'parent': shot, - 'status': default_task_status, - 'type': default_task_type - }) - - session.create('Appointment', { - 'type': 'assignment', - 'context': task, - 'resource': user - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(project) - session.commit() - - request.addfinalizer(cleanup) - - return project - - -@pytest.fixture() -def new_project(request, session, user): - '''Return new empty project.''' - project_schema = session.query('ProjectSchema').first() - project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) - project = session.create('Project', { - 'name': project_name, - 'full_name': project_name + '_full', - 'project_schema': project_schema - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(project) - session.commit() - - request.addfinalizer(cleanup) - - return project - - -@pytest.fixture() -def project(session): - '''Return same project for entire session.''' - # Test project. - entity = session.get('Project', '5671dcb0-66de-11e1-8e6e-f23c91df25eb') - assert entity is not None - - return entity - - -@pytest.fixture() -def new_task(request, session, unique_name): - '''Return a new task.''' - project = session.query( - 'Project where id is 5671dcb0-66de-11e1-8e6e-f23c91df25eb' - ).one() - project_schema = project['project_schema'] - default_task_type = project_schema.get_types('Task')[0] - default_task_status = project_schema.get_statuses( - 'Task', default_task_type['id'] - )[0] - - task = session.create('Task', { - 'name': unique_name, - 'parent': project, - 'status': default_task_status, - 'type': default_task_type - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(task) - session.commit() - - request.addfinalizer(cleanup) - - return task - - -@pytest.fixture() -def task(session): - '''Return same task for entire session.''' - # Tests/python_api/tasks/t1 - entity = session.get('Task', 'adb4ad6c-7679-11e2-8df2-f23c91df25eb') - assert entity is not None - - return entity - - -@pytest.fixture() -def new_scope(request, session, unique_name): - '''Return a new scope.''' - scope = session.create('Scope', { - 'name': unique_name - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(scope) - session.commit() - - request.addfinalizer(cleanup) - - return scope - - -@pytest.fixture() -def new_job(request, session, unique_name, user): - '''Return a new scope.''' - job = session.create('Job', { - 'type': 'api_job', - 'user': user - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(job) - session.commit() - - request.addfinalizer(cleanup) - - return job - - -@pytest.fixture() -def new_note(request, session, unique_name, new_task, user): - '''Return a new note attached to a task.''' - note = new_task.create_note(unique_name, user) - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(note) - session.commit() - - request.addfinalizer(cleanup) - - return note - - -@pytest.fixture() -def new_asset_version(request, session): - '''Return a new asset version.''' - asset_version = session.create('AssetVersion', { - 'asset_id': 'dd9a7e2e-c5eb-11e1-9885-f23c91df25eb' - }) - session.commit() - - # Do not cleanup the version as that will sometimes result in a deadlock - # database error. - - return asset_version - - -@pytest.fixture() -def new_component(request, session, temporary_file): - '''Return a new component not in any location except origin.''' - component = session.create_component(temporary_file, location=None) - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(component) - session.commit() - - request.addfinalizer(cleanup) - - return component - - -@pytest.fixture() -def new_container_component(request, session, temporary_directory): - '''Return a new container component not in any location except origin.''' - component = session.create('ContainerComponent') - - # Add to special origin location so that it is possible to add to other - # locations. - origin_location = session.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - origin_location.add_component( - component, temporary_directory, recursive=False - ) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(component) - session.commit() - - request.addfinalizer(cleanup) - - return component - - -@pytest.fixture() -def new_sequence_component(request, session, temporary_sequence): - '''Return a new sequence component not in any location except origin.''' - component = session.create_component(temporary_sequence, location=None) - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(component) - session.commit() - - request.addfinalizer(cleanup) - - return component - - -@pytest.fixture -def mocked_schemas(): - '''Return a list of mocked schemas.''' - return [{ - 'id': 'Foo', - 'type': 'object', - 'properties': { - 'id': { - 'type': 'string' - }, - 'string': { - 'type': 'string' - }, - 'integer': { - 'type': 'integer' - }, - 'number': { - 'type': 'number' - }, - 'boolean': { - 'type': 'boolean' - }, - 'bars': { - 'type': 'array', - 'items': { - 'ref': '$Bar' - } - }, - 'date': { - 'type': 'string', - 'format': 'date-time' - } - }, - 'immutable': [ - 'id' - ], - 'primary_key': [ - 'id' - ], - 'required': [ - 'id' - ], - 'default_projections': [ - 'id' - ] - }, { - 'id': 'Bar', - 'type': 'object', - 'properties': { - 'id': { - 'type': 'string' - }, - 'name': { - 'type': 'string' - }, - 'computed_value': { - 'type': 'string', - } - }, - 'computed': [ - 'computed_value' - ], - 'immutable': [ - 'id' - ], - 'primary_key': [ - 'id' - ], - 'required': [ - 'id' - ], - 'default_projections': [ - 'id' - ] - }] - - -@pytest.yield_fixture -def mocked_schema_session(mocker, mocked_schemas): - '''Return a session instance with mocked schemas.''' - with mocker.patch.object( - ftrack_api.Session, - '_load_schemas', - return_value=mocked_schemas - ): - # Mock _configure_locations since it will fail if no location schemas - # exist. - with mocker.patch.object( - ftrack_api.Session, - '_configure_locations' - ): - patched_session = ftrack_api.Session() - yield patched_session diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py deleted file mode 100644 index bc98f15de2..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py deleted file mode 100644 index 78d61a62d1..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py +++ /dev/null @@ -1,54 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack -import json - - -def test_create_component(new_asset_version, temporary_file): - '''Create component on asset version.''' - session = new_asset_version.session - component = new_asset_version.create_component( - temporary_file, location=None - ) - assert component['version'] is new_asset_version - - # Have to delete component before can delete asset version. - session.delete(component) - - -def test_create_component_specifying_different_version( - new_asset_version, temporary_file -): - '''Create component on asset version ignoring specified version.''' - session = new_asset_version.session - component = new_asset_version.create_component( - temporary_file, location=None, - data=dict( - version_id='this-value-should-be-ignored', - version='this-value-should-be-overridden' - ) - ) - assert component['version'] is new_asset_version - - # Have to delete component before can delete asset version. - session.delete(component) - - -def test_encode_media(new_asset_version, video_path): - '''Encode media based on a file path - - Encoded components should be associated with the version. - ''' - session = new_asset_version.session - job = new_asset_version.encode_media(video_path) - assert job.entity_type == 'Job' - - job_data = json.loads(job['data']) - assert 'output' in job_data - assert len(job_data['output']) - assert 'component_id' in job_data['output'][0] - - component_id = job_data['output'][0]['component_id'] - component = session.get('FileComponent', component_id) - - # Component should be associated with the version. - assert component['version_id'] == new_asset_version['id'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py deleted file mode 100644 index aff456e238..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py +++ /dev/null @@ -1,14 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2016 ftrack - -import pytest - - -def test_hash(project, task, user): - '''Entities can be hashed.''' - test_set = set() - test_set.add(project) - test_set.add(task) - test_set.add(user) - - assert test_set == set((project, task, user)) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py deleted file mode 100644 index 347c74a50d..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py +++ /dev/null @@ -1,70 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack -import os - -import pytest - - -def test_get_availability(new_component): - '''Retrieve availability in locations.''' - session = new_component.session - availability = new_component.get_availability() - - # Note: Currently the origin location is also 0.0 as the link is not - # persisted to the server. This may change in future and this test would - # need updating as a result. - assert set(availability.values()) == set([0.0]) - - # Add to a location. - source_location = session.query( - 'Location where name is "ftrack.origin"' - ).one() - - target_location = session.query( - 'Location where name is "ftrack.unmanaged"' - ).one() - - target_location.add_component(new_component, source_location) - - # Recalculate availability. - - # Currently have to manually expire the related attribute. This should be - # solved in future by bi-directional relationship updating. - del new_component['component_locations'] - - availability = new_component.get_availability() - target_availability = availability.pop(target_location['id']) - assert target_availability == 100.0 - - # All other locations should still be 0. - assert set(availability.values()) == set([0.0]) - -@pytest.fixture() -def image_path(): - '''Return a path to an image file.''' - image_path = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - '..', - '..', - 'fixture', - 'media', - 'image.png' - ) - ) - - return image_path - -def test_create_task_thumbnail(task, image_path): - '''Successfully create thumbnail component and set as task thumbnail.''' - component = task.create_thumbnail(image_path) - component.session.commit() - assert component['id'] == task['thumbnail_id'] - - -def test_create_thumbnail_with_data(task, image_path, unique_name): - '''Successfully create thumbnail component with custom data.''' - data = {'name': unique_name} - component = task.create_thumbnail(image_path, data=data) - component.session.commit() - assert component['name'] == unique_name diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py deleted file mode 100644 index 5d5a0baa7c..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py +++ /dev/null @@ -1,25 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.factory - - -class CustomUser(ftrack_api.entity.base.Entity): - '''Represent custom user.''' - - -def test_extend_standard_factory_with_bases(session): - '''Successfully add extra bases to standard factory.''' - standard_factory = ftrack_api.entity.factory.StandardFactory() - - schemas = session._load_schemas(False) - user_schema = [ - schema for schema in schemas if schema['id'] == 'User' - ].pop() - - user_class = standard_factory.create(user_schema, bases=[CustomUser]) - session.types[user_class.entity_type] = user_class - - user = session.query('User').first() - - assert CustomUser in type(user).__mro__ diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py deleted file mode 100644 index 52ddbda0ac..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py +++ /dev/null @@ -1,42 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - - -def test_create_job(session, user): - '''Create job.''' - job = session.create('Job', { - 'user': user - }) - - assert job - session.commit() - assert job['type'] == 'api_job' - - session.delete(job) - session.commit() - - -def test_create_job_with_valid_type(session, user): - '''Create job explicitly specifying valid type.''' - job = session.create('Job', { - 'user': user, - 'type': 'api_job' - }) - - assert job - session.commit() - assert job['type'] == 'api_job' - - session.delete(job) - session.commit() - - -def test_create_job_using_faulty_type(session, user): - '''Fail to create job with faulty type.''' - with pytest.raises(ValueError): - session.create('Job', { - 'user': user, - 'type': 'not-allowed-type' - }) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py deleted file mode 100644 index 5bb90e451f..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py +++ /dev/null @@ -1,516 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import base64 -import filecmp - -import pytest -import requests - -import ftrack_api.exception -import ftrack_api.accessor.disk -import ftrack_api.structure.origin -import ftrack_api.structure.id -import ftrack_api.entity.location -import ftrack_api.resource_identifier_transformer.base as _transformer -import ftrack_api.symbol - - -class Base64ResourceIdentifierTransformer( - _transformer.ResourceIdentifierTransformer -): - '''Resource identifier transformer for test purposes. - - Store resource identifier as base 64 encoded string. - - ''' - - def encode(self, resource_identifier, context=None): - '''Return encoded *resource_identifier* for storing centrally. - - A mapping of *context* values may be supplied to guide the - transformation. - - ''' - return base64.encodestring(resource_identifier) - - def decode(self, resource_identifier, context=None): - '''Return decoded *resource_identifier* for use locally. - - A mapping of *context* values may be supplied to guide the - transformation. - - ''' - return base64.decodestring(resource_identifier) - - -@pytest.fixture() -def new_location(request, session, unique_name, temporary_directory): - '''Return new managed location.''' - location = session.create('Location', { - 'name': 'test-location-{}'.format(unique_name) - }) - - location.accessor = ftrack_api.accessor.disk.DiskAccessor( - prefix=os.path.join(temporary_directory, 'location') - ) - location.structure = ftrack_api.structure.id.IdStructure() - location.priority = 10 - - session.commit() - - def cleanup(): - '''Remove created entity.''' - # First auto-remove all components in location. - for location_component in location['location_components']: - session.delete(location_component) - - # At present, need this intermediate commit otherwise server errors - # complaining that location still has components in it. - session.commit() - - session.delete(location) - session.commit() - - request.addfinalizer(cleanup) - - return location - - -@pytest.fixture() -def new_unmanaged_location(request, session, unique_name): - '''Return new unmanaged location.''' - location = session.create('Location', { - 'name': 'test-location-{}'.format(unique_name) - }) - - # TODO: Change to managed and use a temporary directory cleaned up after. - ftrack_api.mixin( - location, ftrack_api.entity.location.UnmanagedLocationMixin, - name='UnmanagedTestLocation' - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.origin.OriginStructure() - location.priority = 10 - - session.commit() - - def cleanup(): - '''Remove created entity.''' - # First auto-remove all components in location. - for location_component in location['location_components']: - session.delete(location_component) - - # At present, need this intermediate commit otherwise server errors - # complaining that location still has components in it. - session.commit() - - session.delete(location) - session.commit() - - request.addfinalizer(cleanup) - - return location - - -@pytest.fixture() -def origin_location(session): - '''Return origin location.''' - return session.query('Location where name is "ftrack.origin"').one() - -@pytest.fixture() -def server_location(session): - '''Return server location.''' - return session.get('Location', ftrack_api.symbol.SERVER_LOCATION_ID) - - -@pytest.fixture() -def server_image_component(request, session, server_location): - image_file = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - '..', - '..', - 'fixture', - 'media', - 'image.png' - ) - ) - component = session.create_component( - image_file, location=server_location - ) - - def cleanup(): - server_location.remove_component(component) - request.addfinalizer(cleanup) - - return component - - -@pytest.mark.parametrize('name', [ - 'named', - None -], ids=[ - 'named', - 'unnamed' -]) -def test_string_representation(session, name): - '''Return string representation.''' - location = session.create('Location', {'id': '1'}) - if name: - location['name'] = name - assert str(location) == '' - else: - assert str(location) == '' - - -def test_add_components(new_location, origin_location, session, temporary_file): - '''Add components.''' - component_a = session.create_component( - temporary_file, location=None - ) - component_b = session.create_component( - temporary_file, location=None - ) - - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [0.0, 0.0] - ) - - new_location.add_components( - [component_a, component_b], [origin_location, origin_location] - ) - - # Recalculate availability. - - # Currently have to manually expire the related attribute. This should be - # solved in future by bi-directional relationship updating. - del component_a['component_locations'] - del component_b['component_locations'] - - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [100.0, 100.0] - ) - - -def test_add_components_from_single_location( - new_location, origin_location, session, temporary_file -): - '''Add components from single location.''' - component_a = session.create_component( - temporary_file, location=None - ) - component_b = session.create_component( - temporary_file, location=None - ) - - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [0.0, 0.0] - ) - - new_location.add_components([component_a, component_b], origin_location) - - # Recalculate availability. - - # Currently have to manually expire the related attribute. This should be - # solved in future by bi-directional relationship updating. - del component_a['component_locations'] - del component_b['component_locations'] - - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [100.0, 100.0] - ) - - -def test_add_components_with_mismatching_sources(new_location, new_component): - '''Fail to add components when sources mismatched.''' - with pytest.raises(ValueError): - new_location.add_components([new_component], []) - - -def test_add_components_with_undefined_structure(new_location, mocker): - '''Fail to add components when location structure undefined.''' - mocker.patch.object(new_location, 'structure', None) - - with pytest.raises(ftrack_api.exception.LocationError): - new_location.add_components([], []) - - -def test_add_components_already_in_location( - session, temporary_file, new_location, new_component, origin_location -): - '''Fail to add components already in location.''' - new_location.add_component(new_component, origin_location) - - another_new_component = session.create_component( - temporary_file, location=None - ) - - with pytest.raises(ftrack_api.exception.ComponentInLocationError): - new_location.add_components( - [another_new_component, new_component], origin_location - ) - - -def test_add_component_when_data_already_exists( - new_location, new_component, origin_location -): - '''Fail to add component when data already exists.''' - # Inject pre-existing data on disk. - resource_identifier = new_location.structure.get_resource_identifier( - new_component - ) - container = new_location.accessor.get_container(resource_identifier) - new_location.accessor.make_container(container) - data = new_location.accessor.open(resource_identifier, 'w') - data.close() - - with pytest.raises(ftrack_api.exception.LocationError): - new_location.add_component(new_component, origin_location) - - -def test_add_component_missing_source_accessor( - new_location, new_component, origin_location, mocker -): - '''Fail to add component when source is missing accessor.''' - mocker.patch.object(origin_location, 'accessor', None) - - with pytest.raises(ftrack_api.exception.LocationError): - new_location.add_component(new_component, origin_location) - - -def test_add_component_missing_target_accessor( - new_location, new_component, origin_location, mocker -): - '''Fail to add component when target is missing accessor.''' - mocker.patch.object(new_location, 'accessor', None) - - with pytest.raises(ftrack_api.exception.LocationError): - new_location.add_component(new_component, origin_location) - - -def test_add_container_component( - new_container_component, new_location, origin_location -): - '''Add container component.''' - new_location.add_component(new_container_component, origin_location) - - assert ( - new_location.get_component_availability(new_container_component) - == 100.0 - ) - - -def test_add_sequence_component_recursively( - new_sequence_component, new_location, origin_location -): - '''Add sequence component recursively.''' - new_location.add_component( - new_sequence_component, origin_location, recursive=True - ) - - assert ( - new_location.get_component_availability(new_sequence_component) - == 100.0 - ) - - -def test_add_sequence_component_non_recursively( - new_sequence_component, new_location, origin_location -): - '''Add sequence component non recursively.''' - new_location.add_component( - new_sequence_component, origin_location, recursive=False - ) - - assert ( - new_location.get_component_availability(new_sequence_component) - == 0.0 - ) - - -def test_remove_components( - session, new_location, origin_location, temporary_file -): - '''Remove components.''' - component_a = session.create_component( - temporary_file, location=None - ) - component_b = session.create_component( - temporary_file, location=None - ) - - new_location.add_components([component_a, component_b], origin_location) - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [100.0, 100.0] - ) - - new_location.remove_components([ - component_a, component_b - ]) - - # Recalculate availability. - - # Currently have to manually expire the related attribute. This should be - # solved in future by bi-directional relationship updating. - del component_a['component_locations'] - del component_b['component_locations'] - - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [0.0, 0.0] - ) - - -def test_remove_sequence_component_recursively( - new_sequence_component, new_location, origin_location -): - '''Remove sequence component recursively.''' - new_location.add_component( - new_sequence_component, origin_location, recursive=True - ) - - new_location.remove_component( - new_sequence_component, recursive=True - ) - - assert ( - new_location.get_component_availability(new_sequence_component) - == 0.0 - ) - - -def test_remove_sequence_component_non_recursively( - new_sequence_component, new_location, origin_location -): - '''Remove sequence component non recursively.''' - new_location.add_component( - new_sequence_component, origin_location, recursive=False - ) - - new_location.remove_component( - new_sequence_component, recursive=False - ) - - assert ( - new_location.get_component_availability(new_sequence_component) - == 0.0 - ) - - -def test_remove_component_missing_accessor( - new_location, new_component, origin_location, mocker -): - '''Fail to remove component when location is missing accessor.''' - new_location.add_component(new_component, origin_location) - mocker.patch.object(new_location, 'accessor', None) - - with pytest.raises(ftrack_api.exception.LocationError): - new_location.remove_component(new_component) - - -def test_resource_identifier_transformer( - new_component, new_unmanaged_location, origin_location, mocker -): - '''Transform resource identifier.''' - session = new_unmanaged_location.session - - transformer = Base64ResourceIdentifierTransformer(session) - mocker.patch.object( - new_unmanaged_location, 'resource_identifier_transformer', transformer - ) - - new_unmanaged_location.add_component(new_component, origin_location) - - original_resource_identifier = origin_location.get_resource_identifier( - new_component - ) - assert ( - new_component['component_locations'][0]['resource_identifier'] - == base64.encodestring(original_resource_identifier) - ) - - assert ( - new_unmanaged_location.get_resource_identifier(new_component) - == original_resource_identifier - ) - - -def test_get_filesystem_path(new_component, new_location, origin_location): - '''Retrieve filesystem path.''' - new_location.add_component(new_component, origin_location) - resource_identifier = new_location.structure.get_resource_identifier( - new_component - ) - expected = os.path.normpath( - os.path.join(new_location.accessor.prefix, resource_identifier) - ) - assert new_location.get_filesystem_path(new_component) == expected - - -def test_get_context(new_component, new_location, origin_location): - '''Retrieve context for component.''' - resource_identifier = origin_location.get_resource_identifier( - new_component - ) - context = new_location._get_context(new_component, origin_location) - assert context == { - 'source_resource_identifier': resource_identifier - } - - -def test_get_context_for_component_not_in_source(new_component, new_location): - '''Retrieve context for component not in source location.''' - context = new_location._get_context(new_component, new_location) - assert context == {} - - -def test_data_transfer(session, new_location, origin_location): - '''Transfer a real file and make sure it is identical.''' - video_file = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - '..', - '..', - 'fixture', - 'media', - 'colour_wheel.mov' - ) - ) - component = session.create_component( - video_file, location=new_location - ) - new_video_file = new_location.get_filesystem_path(component) - - assert filecmp.cmp(video_file, new_video_file) - - -def test_get_thumbnail_url(server_location, server_image_component): - '''Test download a thumbnail image from server location''' - thumbnail_url = server_location.get_thumbnail_url( - server_image_component, - size=10 - ) - assert thumbnail_url - - response = requests.get(thumbnail_url) - response.raise_for_status() - - image_file = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - '..', - '..', - 'fixture', - 'media', - 'image-resized-10.png' - ) - ) - expected_image_contents = open(image_file).read() - assert response.content == expected_image_contents diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py deleted file mode 100644 index 3a81fdbe85..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py +++ /dev/null @@ -1,135 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import uuid - -import ftrack_api - - -def test_query_metadata(new_project): - '''Query metadata.''' - session = new_project.session - - metadata_key = uuid.uuid1().hex - metadata_value = uuid.uuid1().hex - new_project['metadata'][metadata_key] = metadata_value - session.commit() - - results = session.query( - 'Project where metadata.key is {0}'.format(metadata_key) - ) - - assert len(results) == 1 - assert new_project['id'] == results[0]['id'] - - results = session.query( - 'Project where metadata.value is {0}'.format(metadata_value) - ) - - assert len(results) == 1 - assert new_project['id'] == results[0]['id'] - - results = session.query( - 'Project where metadata.key is {0} and ' - 'metadata.value is {1}'.format(metadata_key, metadata_value) - ) - - assert len(results) == 1 - assert new_project['id'] == results[0]['id'] - - -def test_set_get_metadata_from_different_sessions(new_project): - '''Get and set metadata using different sessions.''' - session = new_project.session - - metadata_key = uuid.uuid1().hex - metadata_value = uuid.uuid1().hex - new_project['metadata'][metadata_key] = metadata_value - session.commit() - - new_session = ftrack_api.Session() - project = new_session.query( - 'Project where id is {0}'.format(new_project['id']) - )[0] - - assert project['metadata'][metadata_key] == metadata_value - - project['metadata'][metadata_key] = uuid.uuid1().hex - - new_session.commit() - - new_session = ftrack_api.Session() - project = new_session.query( - 'Project where id is {0}'.format(project['id']) - )[0] - - assert project['metadata'][metadata_key] != metadata_value - - -def test_get_set_multiple_metadata(new_project): - '''Get and set multiple metadata.''' - session = new_project.session - - new_project['metadata'] = { - 'key1': 'value1', - 'key2': 'value2' - } - session.commit() - - assert set(new_project['metadata'].keys()) == set(['key1', 'key2']) - - new_session = ftrack_api.Session() - retrieved = new_session.query( - 'Project where id is {0}'.format(new_project['id']) - )[0] - - assert set(retrieved['metadata'].keys()) == set(['key1', 'key2']) - - -def test_metadata_parent_type_remains_in_schema_id_format(session, new_project): - '''Metadata parent_type remains in schema id format post commit.''' - entity = session.create('Metadata', { - 'key': 'key', 'value': 'value', - 'parent_type': new_project.entity_type, - 'parent_id': new_project['id'] - }) - - session.commit() - - assert entity['parent_type'] == new_project.entity_type - - -def test_set_metadata_twice(new_project): - '''Set metadata twice in a row.''' - session = new_project.session - - new_project['metadata'] = { - 'key1': 'value1', - 'key2': 'value2' - } - session.commit() - - assert set(new_project['metadata'].keys()) == set(['key1', 'key2']) - - new_project['metadata'] = { - 'key3': 'value3', - 'key4': 'value4' - } - session.commit() - - -def test_set_same_metadata_on_retrieved_entity(new_project): - '''Set same metadata on retrieved entity.''' - session = new_project.session - - new_project['metadata'] = { - 'key1': 'value1' - } - session.commit() - - project = session.get('Project', new_project['id']) - - project['metadata'] = { - 'key1': 'value1' - } - session.commit() diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py deleted file mode 100644 index 5d854eaed4..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py +++ /dev/null @@ -1,67 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api -import ftrack_api.inspection - - -def test_create_reply(session, new_note, user, unique_name): - '''Create reply to a note.''' - reply_text = 'My reply on note' - new_note.create_reply(reply_text, user) - - session.commit() - - assert len(new_note['replies']) == 1 - - assert reply_text == new_note['replies'][0]['content'] - - -def test_create_note_on_entity(session, new_task, user, unique_name): - '''Create note attached to an entity.''' - note = new_task.create_note(unique_name, user) - session.commit() - - session.reset() - retrieved_task = session.get(*ftrack_api.inspection.identity(new_task)) - assert len(retrieved_task['notes']) == 1 - assert ( - ftrack_api.inspection.identity(retrieved_task['notes'][0]) - == ftrack_api.inspection.identity(note) - ) - - -def test_create_note_on_entity_specifying_recipients( - session, new_task, user, unique_name, new_user -): - '''Create note with specified recipients attached to an entity.''' - recipient = new_user - note = new_task.create_note(unique_name, user, recipients=[recipient]) - session.commit() - - session.reset() - retrieved_note = session.get(*ftrack_api.inspection.identity(note)) - - # Note: The calling user is automatically added server side so there will be - # 2 recipients. - assert len(retrieved_note['recipients']) == 2 - specified_recipient_present = False - for entry in retrieved_note['recipients']: - if entry['resource_id'] == recipient['id']: - specified_recipient_present = True - break - - assert specified_recipient_present - - -def test_create_note_on_entity_specifying_category( - session, new_task, user, unique_name -): - '''Create note with specified category attached to an entity.''' - category = session.query('NoteCategory').first() - note = new_task.create_note(unique_name, user, category=category) - session.commit() - - session.reset() - retrieved_note = session.get(*ftrack_api.inspection.identity(note)) - assert retrieved_note['category']['id'] == category['id'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py deleted file mode 100644 index 10ef485aed..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py +++ /dev/null @@ -1,64 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import inspect - -import pytest - - -@pytest.mark.parametrize('schema, expected', [ - ('Task', [ - 'Not started', 'In progress', 'Awaiting approval', 'Approved' - ]), - ('Shot', [ - 'Normal', 'Omitted', 'On Hold' - ]), - ('AssetVersion', [ - 'Approved', 'Pending' - ]), - ('AssetBuild', [ - 'Normal', 'Omitted', 'On Hold' - ]), - ('Invalid', ValueError) -], ids=[ - 'task', - 'shot', - 'asset version', - 'asset build', - 'invalid' -]) -def test_get_statuses(project_schema, schema, expected): - '''Retrieve statuses for schema and optional type.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - project_schema.get_statuses(schema) - - else: - statuses = project_schema.get_statuses(schema) - status_names = [status['name'] for status in statuses] - assert sorted(status_names) == sorted(expected) - - -@pytest.mark.parametrize('schema, expected', [ - ('Task', [ - 'Generic', 'Animation', 'Modeling', 'Previz', 'Lookdev', 'Hair', - 'Cloth', 'FX', 'Lighting', 'Compositing', 'Tracking', 'Rigging', - 'test 1', 'test type 2' - ]), - ('AssetBuild', ['Character', 'Prop', 'Environment', 'Matte Painting']), - ('Invalid', ValueError) -], ids=[ - 'task', - 'asset build', - 'invalid' -]) -def test_get_types(project_schema, schema, expected): - '''Retrieve types for schema.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - project_schema.get_types(schema) - - else: - types = project_schema.get_types(schema) - type_names = [type_['name'] for type_ in types] - assert sorted(type_names) == sorted(expected) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py deleted file mode 100644 index 1a5afe70c9..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py +++ /dev/null @@ -1,24 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - - -def test_add_remove_and_query_scopes_for_tasks(session, new_task, new_scope): - '''Add, remove and query scopes for task.''' - query_string = 'Task where scopes.name is {0}'.format(new_scope['name']) - tasks = session.query(query_string) - - assert len(tasks) == 0 - - new_task['scopes'].append(new_scope) - session.commit() - - tasks = session.query(query_string) - - assert len(tasks) == 1 and tasks[0] == new_task - - new_task['scopes'].remove(new_scope) - session.commit() - - tasks = session.query(query_string) - - assert len(tasks) == 0 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py deleted file mode 100644 index 4d7e455042..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py +++ /dev/null @@ -1,49 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2016 ftrack - - -def test_force_start_timer(new_user, task): - '''Successfully force starting a timer when another timer is running.''' - first_timer = new_user.start_timer(context=task) - second_timer = new_user.start_timer(context=task, force=True) - - assert first_timer['id'] - assert second_timer['id'] - assert first_timer['id'] != second_timer['id'] - - -def test_timer_creates_timelog(new_user, task, unique_name): - '''Successfully create time log when stopping timer. - - A timer which was immediately stopped should have a duration less than - a minute. - - ''' - comment = 'comment' + unique_name - timer = new_user.start_timer( - context=task, - name=unique_name, - comment=comment - ) - timer_start = timer['start'] - timelog = new_user.stop_timer() - - assert timelog['user_id'] == new_user['id'] - assert timelog['context_id']== task['id'] - assert timelog['name'] == unique_name - assert timelog['comment'] == comment - assert timelog['start'] == timer_start - assert isinstance(timelog['duration'], (int, long, float)) - assert timelog['duration'] < 60 - - -def test_reset_user_api_key(new_user): - '''Test resetting of api keys.''' - - api_keys = list() - for i in range(0, 10): - api_keys.append(new_user.reset_api_key()) - - # make sure all api keys are unique - assert len(set(api_keys)) == 10 - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py deleted file mode 100644 index bc98f15de2..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py deleted file mode 100644 index 09b270a043..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py +++ /dev/null @@ -1,92 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import sys -import time -import logging -import argparse - -import ftrack_api -from ftrack_api.event.base import Event - - -TOPIC = 'test_event_hub_server_heartbeat' -RECEIVED = [] - - -def callback(event): - '''Track received messages.''' - counter = event['data']['counter'] - RECEIVED.append(counter) - print('Received message {0} ({1} in total)'.format(counter, len(RECEIVED))) - - -def main(arguments=None): - '''Publish and receive heartbeat test.''' - parser = argparse.ArgumentParser() - parser.add_argument('mode', choices=['publish', 'subscribe']) - - namespace = parser.parse_args(arguments) - logging.basicConfig(level=logging.INFO) - - session = ftrack_api.Session() - - message_count = 100 - sleep_time_per_message = 1 - - if namespace.mode == 'publish': - max_atempts = 100 - retry_interval = 0.1 - atempt = 0 - while not session.event_hub.connected: - print ( - 'Session is not yet connected to event hub, sleeping for 0.1s' - ) - time.sleep(retry_interval) - - atempt = atempt + 1 - if atempt > max_atempts: - raise Exception( - 'Unable to connect to server within {0} seconds'.format( - max_atempts * retry_interval - ) - ) - - print('Sending {0} messages...'.format(message_count)) - - for counter in range(1, message_count + 1): - session.event_hub.publish( - Event(topic=TOPIC, data=dict(counter=counter)) - ) - print('Sent message {0}'.format(counter)) - - if counter < message_count: - time.sleep(sleep_time_per_message) - - elif namespace.mode == 'subscribe': - session.event_hub.subscribe('topic={0}'.format(TOPIC), callback) - session.event_hub.wait( - duration=( - ((message_count - 1) * sleep_time_per_message) + 15 - ) - ) - - if len(RECEIVED) != message_count: - print( - '>> Failed to receive all messages. Dropped {0} <<' - .format(message_count - len(RECEIVED)) - ) - return False - - # Give time to flush all buffers. - time.sleep(5) - - return True - - -if __name__ == '__main__': - result = main(sys.argv[1:]) - if not result: - raise SystemExit(1) - else: - raise SystemExit(0) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py deleted file mode 100644 index d9496fe070..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py +++ /dev/null @@ -1,36 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.event.base - - -def test_string_representation(): - '''String representation.''' - event = ftrack_api.event.base.Event('test', id='some-id') - assert str(event) == ( - "" - ) - - -def test_stop(): - '''Set stopped flag on event.''' - event = ftrack_api.event.base.Event('test', id='some-id') - - assert event.is_stopped() is False - - event.stop() - assert event.is_stopped() is True - - -def test_is_stopped(): - '''Report stopped status of event.''' - event = ftrack_api.event.base.Event('test', id='some-id') - - assert event.is_stopped() is False - - event.stop() - assert event.is_stopped() is True - - event.stop() - assert event.is_stopped() is True diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py deleted file mode 100644 index 4cf68b58f0..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py +++ /dev/null @@ -1,174 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import operator -import inspect - -import pytest - -from ftrack_api.event.expression import ( - Expression, All, Any, Not, Condition, Parser -) -from ftrack_api.exception import ParseError - - -@pytest.fixture() -def candidate(): - '''Return common candidate to test expressions against.''' - return { - 'id': 10, - 'name': 'value', - 'change': { - 'name': 'value', - 'new_value': 10 - } - } - - -@pytest.mark.parametrize('expression, expected', [ - pytest.mark.xfail(('', Expression())), - ('invalid', ParseError), - ('key=value nor other=value', ParseError), - ('key=value', Condition('key', operator.eq, 'value')), - ('key="value"', Condition('key', operator.eq, 'value')), - ( - 'a=b and ((c=d or e!=f) and not g.h > 10)', - All([ - Condition('a', operator.eq, 'b'), - All([ - Any([ - Condition('c', operator.eq, 'd'), - Condition('e', operator.ne, 'f') - ]), - Not( - Condition('g.h', operator.gt, 10) - ) - ]) - ]) - ) -], ids=[ - 'empty expression', - 'invalid expression', - 'invalid conjunction', - 'basic condition', - 'basic quoted condition', - 'complex condition' -]) -def test_parser_parse(expression, expected): - '''Parse expression into Expression instances.''' - parser = Parser() - - if inspect.isclass(expected)and issubclass(expected, Exception): - with pytest.raises(expected): - parser.parse(expression) - else: - assert str(parser.parse(expression)) == str(expected) - - -@pytest.mark.parametrize('expression, expected', [ - (Expression(), ''), - (All([Expression(), Expression()]), ' ]>'), - (Any([Expression(), Expression()]), ' ]>'), - (Not(Expression()), '>'), - (Condition('key', '=', 'value'), '') -], ids=[ - 'Expression', - 'All', - 'Any', - 'Not', - 'Condition' -]) -def test_string_representation(expression, expected): - '''String representation of expression.''' - assert str(expression) == expected - - -@pytest.mark.parametrize('expression, expected', [ - # Expression - (Expression(), True), - - # All - (All(), True), - (All([Expression(), Expression()]), True), - (All([Expression(), Condition('test', operator.eq, 'value')]), False), - - # Any - (Any(), False), - (Any([Expression(), Condition('test', operator.eq, 'value')]), True), - (Any([ - Condition('test', operator.eq, 'value'), - Condition('other', operator.eq, 'value') - ]), False), - - # Not - (Not(Expression()), False), - (Not(Not(Expression())), True) -], ids=[ - 'Expression-always matches', - - 'All-no expressions always matches', - 'All-all match', - 'All-not all match', - - 'Any-no expressions never matches', - 'Any-some match', - 'Any-none match', - - 'Not-invert positive match', - 'Not-double negative is positive match' -]) -def test_match(expression, candidate, expected): - '''Determine if candidate matches expression.''' - assert expression.match(candidate) is expected - - -def parametrize_test_condition_match(metafunc): - '''Parametrize condition_match tests.''' - identifiers = [] - data = [] - - matrix = { - # Operator, match, no match - operator.eq: { - 'match': 10, 'no-match': 20, - 'wildcard-match': 'valu*', 'wildcard-no-match': 'values*' - }, - operator.ne: {'match': 20, 'no-match': 10}, - operator.ge: {'match': 10, 'no-match': 20}, - operator.le: {'match': 10, 'no-match': 0}, - operator.gt: {'match': 0, 'no-match': 10}, - operator.lt: {'match': 20, 'no-match': 10} - } - - for operator_function, values in matrix.items(): - for value_label, value in values.items(): - if value_label.startswith('wildcard'): - key_options = { - 'plain': 'name', - 'nested': 'change.name' - } - else: - key_options = { - 'plain': 'id', - 'nested': 'change.new_value' - } - - for key_label, key in key_options.items(): - identifiers.append('{} operator {} key {}'.format( - operator_function.__name__, key_label, value_label - )) - - data.append(( - key, operator_function, value, - 'no-match' not in value_label - )) - - metafunc.parametrize( - 'key, operator, value, expected', data, ids=identifiers - ) - - -def test_condition_match(key, operator, value, candidate, expected): - '''Determine if candidate matches condition expression.''' - condition = Condition(key, operator, value) - assert condition.match(candidate) is expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py deleted file mode 100644 index 6f1920dddf..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py +++ /dev/null @@ -1,701 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import inspect -import json -import os -import time -import subprocess -import sys - -import pytest - -import ftrack_api.event.hub -import ftrack_api.event.subscriber -from ftrack_api.event.base import Event -import ftrack_api.exception - - -class MockClass(object): - '''Mock class for testing.''' - - def method(self): - '''Mock method for testing.''' - - -def mockFunction(): - '''Mock function for testing.''' - - -class MockConnection(object): - '''Mock connection for testing.''' - - @property - def connected(self): - '''Return whether connected.''' - return True - - def close(self): - '''Close mock connection.''' - pass - - -def assert_callbacks(hub, callbacks): - '''Assert hub has exactly *callbacks* subscribed.''' - # Subscribers always starts with internal handle_reply subscriber. - subscribers = hub._subscribers[:] - subscribers.pop(0) - - if len(subscribers) != len(callbacks): - raise AssertionError( - 'Number of subscribers ({0}) != number of callbacks ({1})' - .format(len(subscribers), len(callbacks)) - ) - - for index, subscriber in enumerate(subscribers): - if subscriber.callback != callbacks[index]: - raise AssertionError( - 'Callback at {0} != subscriber callback at same index.' - .format(index) - ) - - -@pytest.fixture() -def event_hub(request, session): - '''Return event hub to test against. - - Hub is automatically connected at start of test and disconnected at end. - - ''' - hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - hub.connect() - - def cleanup(): - '''Cleanup.''' - if hub.connected: - hub.disconnect() - - request.addfinalizer(cleanup) - - return hub - - -@pytest.mark.parametrize('server_url, expected', [ - ('https://test.ftrackapp.com', 'https://test.ftrackapp.com'), - ('https://test.ftrackapp.com:9000', 'https://test.ftrackapp.com:9000') -], ids=[ - 'with port', - 'without port' -]) -def test_get_server_url(server_url, expected): - '''Return server url.''' - event_hub = ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) - assert event_hub.get_server_url() == expected - - -@pytest.mark.parametrize('server_url, expected', [ - ('https://test.ftrackapp.com', 'test.ftrackapp.com'), - ('https://test.ftrackapp.com:9000', 'test.ftrackapp.com:9000') -], ids=[ - 'with port', - 'without port' -]) -def test_get_network_location(server_url, expected): - '''Return network location of server url.''' - event_hub = ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) - assert event_hub.get_network_location() == expected - - -@pytest.mark.parametrize('server_url, expected', [ - ('https://test.ftrackapp.com', True), - ('http://test.ftrackapp.com', False) -], ids=[ - 'secure', - 'not secure' -]) -def test_secure_property(server_url, expected, mocker): - '''Return whether secure connection used.''' - event_hub = ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) - assert event_hub.secure is expected - - -def test_connected_property(session): - '''Return connected state.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - assert event_hub.connected is False - - event_hub.connect() - assert event_hub.connected is True - - event_hub.disconnect() - assert event_hub.connected is False - - -@pytest.mark.parametrize('server_url, expected', [ - ('https://test.ftrackapp.com', 'https://test.ftrackapp.com'), - ('https://test.ftrackapp.com:9000', 'https://test.ftrackapp.com:9000'), - ('test.ftrackapp.com', ValueError), - ('https://:9000', ValueError), -], ids=[ - 'with port', - 'without port', - 'missing scheme', - 'missing hostname' -]) -def test_initialise_against_server_url(server_url, expected): - '''Initialise against server url.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) - else: - event_hub = ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) - assert event_hub.get_server_url() == expected - - -def test_connect(session): - '''Connect.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - event_hub.connect() - - assert event_hub.connected is True - event_hub.disconnect() - - -def test_connect_when_already_connected(event_hub): - '''Fail to connect when already connected''' - assert event_hub.connected is True - - with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: - event_hub.connect() - - assert 'Already connected' in str(error) - - -def test_connect_failure(session, mocker): - '''Fail to connect to server.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - - def force_fail(*args, **kwargs): - '''Force connection failure.''' - raise Exception('Forced fail.') - - mocker.patch('websocket.create_connection', force_fail) - with pytest.raises(ftrack_api.exception.EventHubConnectionError): - event_hub.connect() - - -def test_connect_missing_required_transport(session, mocker, caplog): - '''Fail to connect to server that does not provide correct transport.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - - original_get_socket_io_session = event_hub._get_socket_io_session - - def _get_socket_io_session(): - '''Patched to return no transports.''' - session = original_get_socket_io_session() - return ftrack_api.event.hub.SocketIoSession( - session[0], session[1], [] - ) - - mocker.patch.object( - event_hub, '_get_socket_io_session', _get_socket_io_session - ) - - with pytest.raises(ftrack_api.exception.EventHubConnectionError): - event_hub.connect() - - logs = caplog.records() - assert ( - 'Server does not support websocket sessions.' in str(logs[-1].exc_info) - ) - - -def test_disconnect(event_hub): - '''Disconnect and unsubscribe all subscribers.''' - event_hub.disconnect() - assert len(event_hub._subscribers) == 0 - assert event_hub.connected is False - - -def test_disconnect_without_unsubscribing(event_hub): - '''Disconnect without unsubscribing all subscribers.''' - event_hub.disconnect(unsubscribe=False) - assert len(event_hub._subscribers) > 0 - assert event_hub.connected is False - - -def test_close_connection_from_manually_connected_hub(session_no_autoconnect_hub): - '''Close connection from manually connected hub.''' - session_no_autoconnect_hub.event_hub.connect() - session_no_autoconnect_hub.close() - assert session_no_autoconnect_hub.event_hub.connected is False - - -def test_disconnect_when_not_connected(session): - '''Fail to disconnect when not connected''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: - event_hub.disconnect() - - assert 'Not currently connected' in str(error) - - -def test_reconnect(event_hub): - '''Reconnect successfully.''' - assert event_hub.connected is True - event_hub.reconnect() - assert event_hub.connected is True - - -def test_reconnect_when_not_connected(session): - '''Reconnect successfully even if not already connected.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - assert event_hub.connected is False - - event_hub.reconnect() - assert event_hub.connected is True - - event_hub.disconnect() - - -def test_fail_to_reconnect(session, mocker): - '''Fail to reconnect.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - event_hub.connect() - assert event_hub.connected is True - - def force_fail(*args, **kwargs): - '''Force connection failure.''' - raise Exception('Forced fail.') - - mocker.patch('websocket.create_connection', force_fail) - - attempts = 2 - with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: - event_hub.reconnect(attempts=attempts, delay=0.5) - - assert 'Failed to reconnect to event server' in str(error) - assert 'after {} attempts'.format(attempts) in str(error) - - -def test_wait(event_hub): - '''Wait for event and handle as they arrive.''' - called = {'callback': False} - - def callback(event): - called['callback'] = True - - event_hub.subscribe('topic=test-subscribe', callback) - - event_hub.publish(Event(topic='test-subscribe')) - - # Until wait, the event should not have been processed even if received. - time.sleep(1) - assert called == {'callback': False} - - event_hub.wait(2) - assert called == {'callback': True} - - -def test_wait_interrupted_by_disconnect(event_hub): - '''Interrupt wait loop with disconnect event.''' - wait_time = 5 - start = time.time() - - # Inject event directly for test purposes. - event = Event(topic='ftrack.meta.disconnected') - event_hub._event_queue.put(event) - - event_hub.wait(wait_time) - - assert time.time() - start < wait_time - - -@pytest.mark.parametrize('identifier, registered', [ - ('registered-test-subscriber', True), - ('unregistered-test-subscriber', False) -], ids=[ - 'registered', - 'missing' -]) -def test_get_subscriber_by_identifier(event_hub, identifier, registered): - '''Return subscriber by identifier.''' - def callback(event): - pass - - subscriber = { - 'id': 'registered-test-subscriber' - } - - event_hub.subscribe('topic=test-subscribe', callback, subscriber) - retrieved = event_hub.get_subscriber_by_identifier(identifier) - - if registered: - assert isinstance(retrieved, ftrack_api.event.subscriber.Subscriber) - assert retrieved.metadata.get('id') == subscriber['id'] - else: - assert retrieved is None - - -def test_subscribe(event_hub): - '''Subscribe to topics.''' - called = {'a': False, 'b': False} - - def callback_a(event): - called['a'] = True - - def callback_b(event): - called['b'] = True - - event_hub.subscribe('topic=test-subscribe', callback_a) - event_hub.subscribe('topic=test-subscribe-other', callback_b) - - event_hub.publish(Event(topic='test-subscribe')) - event_hub.wait(2) - - assert called == {'a': True, 'b': False} - - -def test_subscribe_before_connected(session): - '''Subscribe to topic before connected.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - - called = {'callback': False} - - def callback(event): - called['callback'] = True - - identifier = 'test-subscriber' - event_hub.subscribe( - 'topic=test-subscribe', callback, subscriber={'id': identifier} - ) - assert event_hub.get_subscriber_by_identifier(identifier) is not None - - event_hub.connect() - - try: - event_hub.publish(Event(topic='test-subscribe')) - event_hub.wait(2) - finally: - event_hub.disconnect() - - assert called == {'callback': True} - - -def test_duplicate_subscriber(event_hub): - '''Fail to subscribe same subscriber more than once.''' - subscriber = {'id': 'test-subscriber'} - event_hub.subscribe('topic=test', None, subscriber=subscriber) - - with pytest.raises(ftrack_api.exception.NotUniqueError) as error: - event_hub.subscribe('topic=test', None, subscriber=subscriber) - - assert '{0} already exists'.format(subscriber['id']) in str(error) - - -def test_unsubscribe(event_hub): - '''Unsubscribe a specific callback.''' - def callback_a(event): - pass - - def callback_b(event): - pass - - identifier_a = event_hub.subscribe('topic=test', callback_a) - identifier_b = event_hub.subscribe('topic=test', callback_b) - - assert_callbacks(event_hub, [callback_a, callback_b]) - - event_hub.unsubscribe(identifier_a) - - # Unsubscribe requires confirmation event so wait here to give event a - # chance to process. - time.sleep(5) - - assert_callbacks(event_hub, [callback_b]) - - -def test_unsubscribe_whilst_disconnected(event_hub): - '''Unsubscribe whilst disconnected.''' - identifier = event_hub.subscribe('topic=test', None) - event_hub.disconnect(unsubscribe=False) - - event_hub.unsubscribe(identifier) - assert_callbacks(event_hub, []) - - -def test_unsubscribe_missing_subscriber(event_hub): - '''Fail to unsubscribe a non-subscribed subscriber.''' - identifier = 'non-subscribed-subscriber' - with pytest.raises(ftrack_api.exception.NotFoundError) as error: - event_hub.unsubscribe(identifier) - - assert ( - 'missing subscriber with identifier {}'.format(identifier) - in str(error) - ) - - -@pytest.mark.parametrize('event_data', [ - dict(source=dict(id='1', user=dict(username='auto'))), - dict(source=dict(user=dict(username='auto'))), - dict(source=dict(id='1')), - dict() -], ids=[ - 'pre-prepared', - 'missing id', - 'missing user', - 'no source' -]) -def test_prepare_event(session, event_data): - '''Prepare event.''' - # Replace username `auto` in event data with API user. - try: - if event_data['source']['user']['username'] == 'auto': - event_data['source']['user']['username'] = session.api_user - except KeyError: - pass - - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - event_hub.id = '1' - - event = Event('test', id='event-id', **event_data) - expected = Event( - 'test', id='event-id', source=dict(id='1', user=dict(username=session.api_user)) - ) - event_hub._prepare_event(event) - assert event == expected - - -def test_prepare_reply_event(session): - '''Prepare reply event.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - - source_event = Event('source', source=dict(id='source-id')) - reply_event = Event('reply') - - event_hub._prepare_reply_event(reply_event, source_event) - assert source_event['source']['id'] in reply_event['target'] - assert reply_event['in_reply_to_event'] == source_event['id'] - - event_hub._prepare_reply_event(reply_event, source_event, {'id': 'source'}) - assert reply_event['source'] == {'id': 'source'} - - -def test_publish(event_hub): - '''Publish asynchronous event.''' - called = {'callback': False} - - def callback(event): - called['callback'] = True - - event_hub.subscribe('topic=test-subscribe', callback) - - event_hub.publish(Event(topic='test-subscribe')) - event_hub.wait(2) - - assert called == {'callback': True} - - -def test_publish_raising_error(event_hub): - '''Raise error, when configured, on failed publish.''' - # Note that the event hub currently only fails publish when not connected. - # All other errors are inconsistently swallowed. - event_hub.disconnect() - event = Event(topic='a-topic', data=dict(status='fail')) - - with pytest.raises(Exception): - event_hub.publish(event, on_error='raise') - - -def test_publish_ignoring_error(event_hub): - '''Ignore error, when configured, on failed publish.''' - # Note that the event hub currently only fails publish when not connected. - # All other errors are inconsistently swallowed. - event_hub.disconnect() - event = Event(topic='a-topic', data=dict(status='fail')) - event_hub.publish(event, on_error='ignore') - - -def test_publish_logs_other_errors(event_hub, caplog, mocker): - '''Log publish errors other than connection error.''' - # Mock connection to force error. - mocker.patch.object(event_hub, '_connection', MockConnection()) - - event = Event(topic='a-topic', data=dict(status='fail')) - event_hub.publish(event) - - expected = 'Error sending event {0}.'.format(event) - messages = [record.getMessage().strip() for record in caplog.records()] - assert expected in messages, 'Expected log message missing in output.' - - -def test_synchronous_publish(event_hub): - '''Publish event synchronously and collect results.''' - def callback_a(event): - return 'A' - - def callback_b(event): - return 'B' - - def callback_c(event): - return 'C' - - event_hub.subscribe('topic=test', callback_a, priority=50) - event_hub.subscribe('topic=test', callback_b, priority=60) - event_hub.subscribe('topic=test', callback_c, priority=70) - - results = event_hub.publish(Event(topic='test'), synchronous=True) - assert results == ['A', 'B', 'C'] - - -def test_publish_with_reply(event_hub): - '''Publish asynchronous event with on reply handler.''' - - def replier(event): - '''Replier.''' - return 'Replied' - - event_hub.subscribe('topic=test', replier) - - called = {'callback': None} - - def on_reply(event): - called['callback'] = event['data'] - - event_hub.publish(Event(topic='test'), on_reply=on_reply) - event_hub.wait(2) - - assert called['callback'] == 'Replied' - - -def test_publish_with_multiple_replies(event_hub): - '''Publish asynchronous event and retrieve multiple replies.''' - - def replier_one(event): - '''Replier.''' - return 'One' - - def replier_two(event): - '''Replier.''' - return 'Two' - - event_hub.subscribe('topic=test', replier_one) - event_hub.subscribe('topic=test', replier_two) - - called = {'callback': []} - - def on_reply(event): - called['callback'].append(event['data']) - - event_hub.publish(Event(topic='test'), on_reply=on_reply) - event_hub.wait(2) - - assert sorted(called['callback']) == ['One', 'Two'] - - -@pytest.mark.slow -def test_server_heartbeat_response(): - '''Maintain connection by responding to server heartbeat request.''' - test_script = os.path.join( - os.path.dirname(__file__), 'event_hub_server_heartbeat.py' - ) - - # Start subscriber that will listen for all three messages. - subscriber = subprocess.Popen([sys.executable, test_script, 'subscribe']) - - # Give subscriber time to connect to server. - time.sleep(10) - - # Start publisher to publish three messages. - publisher = subprocess.Popen([sys.executable, test_script, 'publish']) - - publisher.wait() - subscriber.wait() - - assert subscriber.returncode == 0 - - -def test_stop_event(event_hub): - '''Stop processing of subsequent local handlers when stop flag set.''' - called = { - 'a': False, - 'b': False, - 'c': False - } - - def callback_a(event): - called['a'] = True - - def callback_b(event): - called['b'] = True - event.stop() - - def callback_c(event): - called['c'] = True - - event_hub.subscribe('topic=test', callback_a, priority=50) - event_hub.subscribe('topic=test', callback_b, priority=60) - event_hub.subscribe('topic=test', callback_c, priority=70) - - event_hub.publish(Event(topic='test')) - event_hub.wait(2) - - assert called == { - 'a': True, - 'b': True, - 'c': False - } - - -def test_encode(session): - '''Encode event data.''' - encoded = session.event_hub._encode( - dict(name='ftrack.event', args=[Event('test')]) - ) - assert 'inReplyToEvent' in encoded - assert 'in_reply_to_event' not in encoded - - -def test_decode(session): - '''Decode event data.''' - decoded = session.event_hub._decode( - json.dumps({ - 'inReplyToEvent': 'id' - }) - ) - - assert 'in_reply_to_event' in decoded - assert 'inReplyToEvent' not in decoded diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py deleted file mode 100644 index dc8ac69fd9..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py +++ /dev/null @@ -1,33 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - -import ftrack_api.event.subscriber -from ftrack_api.event.base import Event - - -def test_string_representation(): - '''String representation.''' - subscriber = ftrack_api.event.subscriber.Subscriber( - 'topic=test', lambda x: None, {'meta': 'info'}, 100 - ) - - assert str(subscriber) == ( - '' - ) - - -@pytest.mark.parametrize('expression, event, expected', [ - ('topic=test', Event(topic='test'), True), - ('topic=test', Event(topic='other-test'), False) -], ids=[ - 'interested', - 'not interested' -]) -def test_interested_in(expression, event, expected): - '''Determine if subscriber interested in event.''' - subscriber = ftrack_api.event.subscriber.Subscriber( - expression, lambda x: None, {'meta': 'info'}, 100 - ) - assert subscriber.interested_in(event) is expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py deleted file mode 100644 index 1535309f25..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py +++ /dev/null @@ -1,28 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - -import ftrack_api.event.subscription -from ftrack_api.event.base import Event - - -def test_string_representation(): - '''String representation is subscription expression.''' - expression = 'topic=some-topic' - subscription = ftrack_api.event.subscription.Subscription(expression) - - assert str(subscription) == expression - - -@pytest.mark.parametrize('expression, event, expected', [ - ('topic=test', Event(topic='test'), True), - ('topic=test', Event(topic='other-test'), False) -], ids=[ - 'match', - 'no match' -]) -def test_includes(expression, event, expected): - '''Subscription includes event.''' - subscription = ftrack_api.event.subscription.Subscription(expression) - assert subscription.includes(event) is expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py deleted file mode 100644 index bc98f15de2..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py deleted file mode 100644 index 51c896f96b..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py +++ /dev/null @@ -1,36 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - -import ftrack_api.resource_identifier_transformer.base as _transformer - - -@pytest.fixture() -def transformer(session): - '''Return instance of ResourceIdentifierTransformer.''' - return _transformer.ResourceIdentifierTransformer(session) - - -@pytest.mark.parametrize('resource_identifier, context, expected', [ - ('identifier', None, 'identifier'), - ('identifier', {'user': {'username': 'user'}}, 'identifier') -], ids=[ - 'no context', - 'basic context' -]) -def test_encode(transformer, resource_identifier, context, expected): - '''Encode resource identifier.''' - assert transformer.encode(resource_identifier, context) == expected - - -@pytest.mark.parametrize('resource_identifier, context, expected', [ - ('identifier', None, 'identifier'), - ('identifier', {'user': {'username': 'user'}}, 'identifier') -], ids=[ - 'no context', - 'basic context' -]) -def test_decode(transformer, resource_identifier, context, expected): - '''Encode resource identifier.''' - assert transformer.decode(resource_identifier, context) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py deleted file mode 100644 index bc98f15de2..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py deleted file mode 100644 index dbf91ead20..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py +++ /dev/null @@ -1,31 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - -import ftrack_api.structure.base - - -class Concrete(ftrack_api.structure.base.Structure): - '''Concrete implementation to allow testing non-abstract methods.''' - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information. - - ''' - return 'resource_identifier' - - -@pytest.mark.parametrize('sequence, expected', [ - ({'padding': None}, '%d'), - ({'padding': 4}, '%04d') -], ids=[ - 'no padding', - 'padded' -]) -def test_get_sequence_expression(sequence, expected): - '''Get sequence expression from sequence.''' - structure = Concrete() - assert structure._get_sequence_expression(sequence) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py deleted file mode 100644 index 01ccb35ac8..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py +++ /dev/null @@ -1,49 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import inspect - -import pytest -import mock - -import ftrack_api -import ftrack_api.structure.entity_id - - -@pytest.fixture(scope='session') -def structure(): - '''Return structure.''' - return ftrack_api.structure.entity_id.EntityIdStructure() - - -# Note: When it is possible to use indirect=True on just a few arguments, the -# called functions here can change to standard fixtures. -# https://github.com/pytest-dev/pytest/issues/579 - -def valid_entity(): - '''Return valid entity.''' - session = ftrack_api.Session() - - entity = session.create('FileComponent', { - 'id': 'f6cd40cb-d1c0-469f-a2d5-10369be8a724', - 'name': 'file_component', - 'file_type': '.png' - }) - - return entity - - -@pytest.mark.parametrize('entity, context, expected', [ - (valid_entity(), {}, 'f6cd40cb-d1c0-469f-a2d5-10369be8a724'), - (mock.Mock(), {}, Exception) -], ids=[ - 'valid-entity', - 'non-entity' -]) -def test_get_resource_identifier(structure, entity, context, expected): - '''Get resource identifier.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - structure.get_resource_identifier(entity, context) - else: - assert structure.get_resource_identifier(entity, context) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py deleted file mode 100644 index ef81da2d65..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py +++ /dev/null @@ -1,115 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import inspect - -import pytest - -import ftrack_api -import ftrack_api.structure.id - - -@pytest.fixture(scope='session') -def structure(): - '''Return structure.''' - return ftrack_api.structure.id.IdStructure(prefix='path') - - -# Note: When it is possible to use indirect=True on just a few arguments, the -# called functions here can change to standard fixtures. -# https://github.com/pytest-dev/pytest/issues/579 - -def file_component(container=None): - '''Return file component.''' - session = ftrack_api.Session() - - entity = session.create('FileComponent', { - 'id': 'f6cd40cb-d1c0-469f-a2d5-10369be8a724', - 'name': '0001', - 'file_type': '.png', - 'container': container - }) - - return entity - - -def sequence_component(padding=0): - '''Return sequence component with *padding*.''' - session = ftrack_api.Session() - - entity = session.create('SequenceComponent', { - 'id': 'ff17edad-2129-483b-8b59-d1a654c8497b', - 'name': 'sequence_component', - 'file_type': '.png', - 'padding': padding - }) - - return entity - - -def container_component(): - '''Return container component.''' - session = ftrack_api.Session() - - entity = session.create('ContainerComponent', { - 'id': '03ab9967-f86c-4b55-8252-cd187d0c244a', - 'name': 'container_component' - }) - - return entity - - -def unsupported_entity(): - '''Return an unsupported entity.''' - session = ftrack_api.Session() - - entity = session.create('User', { - 'username': 'martin' - }) - - return entity - - -@pytest.mark.parametrize('entity, context, expected', [ - ( - file_component(), {}, - 'path/f/6/c/d/40cb-d1c0-469f-a2d5-10369be8a724.png' - ), - ( - file_component(container_component()), {}, - 'path/0/3/a/b/9967-f86c-4b55-8252-cd187d0c244a/' - 'f6cd40cb-d1c0-469f-a2d5-10369be8a724.png' - ), - ( - file_component(sequence_component()), {}, - 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.0001.png' - ), - ( - sequence_component(padding=0), {}, - 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.%d.png' - ), - ( - sequence_component(padding=4), {}, - 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.%04d.png' - ), - ( - container_component(), {}, - 'path/0/3/a/b/9967-f86c-4b55-8252-cd187d0c244a' - ), - (unsupported_entity(), {}, NotImplementedError) -], ids=[ - 'file-component', - 'file-component-in-container', - 'file-component-in-sequence', - 'unpadded-sequence-component', - 'padded-sequence-component', - 'container-component', - 'unsupported-entity' -]) -def test_get_resource_identifier(structure, entity, context, expected): - '''Get resource identifier.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - structure.get_resource_identifier(entity, context) - else: - assert structure.get_resource_identifier(entity, context) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py deleted file mode 100644 index e294e04a70..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py +++ /dev/null @@ -1,33 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import inspect - -import pytest -import mock - -import ftrack_api.structure.origin - - -@pytest.fixture(scope='session') -def structure(): - '''Return structure.''' - return ftrack_api.structure.origin.OriginStructure() - - -@pytest.mark.parametrize('entity, context, expected', [ - (mock.Mock(), {'source_resource_identifier': 'identifier'}, 'identifier'), - (mock.Mock(), {}, ValueError), - (mock.Mock(), None, ValueError) -], ids=[ - 'valid-context', - 'invalid-context', - 'unspecified-context' -]) -def test_get_resource_identifier(structure, entity, context, expected): - '''Get resource identifier.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - structure.get_resource_identifier(entity, context) - else: - assert structure.get_resource_identifier(entity, context) == expected diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py deleted file mode 100644 index dd72f8ec3f..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py +++ /dev/null @@ -1,309 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import uuid - -import pytest - -import ftrack_api -import ftrack_api.structure.standard - - -@pytest.fixture(scope='session') -def new_project(request): - '''Return new empty project.''' - session = ftrack_api.Session() - - project_schema = session.query('ProjectSchema').first() - project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) - project = session.create('Project', { - 'name': project_name, - 'full_name': project_name + '_full', - 'project_schema': project_schema - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(project) - session.commit() - - request.addfinalizer(cleanup) - - return project - - -def new_container_component(): - '''Return container component.''' - session = ftrack_api.Session() - - entity = session.create('ContainerComponent', { - 'name': 'container_component' - }) - - return entity - - -def new_sequence_component(): - '''Return sequence component.''' - session = ftrack_api.Session() - - entity = session.create_component( - '/tmp/foo/%04d.jpg [1-10]', location=None, data={'name': 'baz'} - ) - - return entity - - -def new_file_component(name='foo', container=None): - '''Return file component with *name* and *container*.''' - if container: - session = container.session - else: - session = ftrack_api.Session() - - entity = session.create('FileComponent', { - 'name': name, - 'file_type': '.png', - 'container': container - }) - - return entity - - -# Reusable fixtures. -file_component = new_file_component() -container_component = new_container_component() -sequence_component = new_sequence_component() - - -# Note: to improve test performance the same project is reused throughout the -# tests. This means that all hierarchical names must be unique, otherwise an -# IntegrityError will be raised on the server. - -@pytest.mark.parametrize( - 'component, hierarchy, expected, structure, asset_name', - [ - ( - file_component, - [], - '{project_name}/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - file_component, - [], - '{project_name}/foobar/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure( - project_versions_prefix='foobar' - ), - 'my_new_asset' - ), - ( - file_component, - ['baz1', 'bar'], - '{project_name}/baz1/bar/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - sequence_component, - ['baz2', 'bar'], - '{project_name}/baz2/bar/my_new_asset/v001/baz.%04d.jpg', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - sequence_component['members'][3], - ['baz3', 'bar'], - '{project_name}/baz3/bar/my_new_asset/v001/baz.0004.jpg', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - container_component, - ['baz4', 'bar'], - '{project_name}/baz4/bar/my_new_asset/v001/container_component', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - new_file_component(container=container_component), - ['baz5', 'bar'], - ( - '{project_name}/baz5/bar/my_new_asset/v001/container_component/' - 'foo.png' - ), - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - file_component, - [u'björn'], - '{project_name}/bjorn/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - file_component, - [u'björn!'], - '{project_name}/bjorn_/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - new_file_component(name=u'fää'), - [], - '{project_name}/my_new_asset/v001/faa.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - new_file_component(name=u'fo/o'), - [], - '{project_name}/my_new_asset/v001/fo_o.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - file_component, - [], - '{project_name}/aao/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - u'åäö' - ), - ( - file_component, - [], - '{project_name}/my_ne____w_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - u'my_ne!!!!w_asset' - ), - ( - file_component, - [u'björn2'], - u'{project_name}/björn2/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure( - illegal_character_substitute=None - ), - 'my_new_asset' - ), - ( - file_component, - [u'bj!rn'], - '{project_name}/bj^rn/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure( - illegal_character_substitute='^' - ), - 'my_new_asset' - ) - ], ids=[ - 'file_component_on_project', - 'file_component_on_project_with_prefix', - 'file_component_with_hierarchy', - 'sequence_component', - 'sequence_component_member', - 'container_component', - 'container_component_member', - 'slugify_non_ascii_hierarchy', - 'slugify_illegal_hierarchy', - 'slugify_non_ascii_component_name', - 'slugify_illegal_component_name', - 'slugify_non_ascii_asset_name', - 'slugify_illegal_asset_name', - 'slugify_none', - 'slugify_other_character' - ] -) -def test_get_resource_identifier( - component, hierarchy, expected, structure, asset_name, new_project -): - '''Get resource identifier.''' - session = component.session - - # Create structure, asset and version. - context_id = new_project['id'] - for name in hierarchy: - context_id = session.create('Folder', { - 'name': name, - 'project_id': new_project['id'], - 'parent_id': context_id - })['id'] - - asset = session.create( - 'Asset', {'name': asset_name, 'context_id': context_id} - ) - version = session.create('AssetVersion', {'asset': asset}) - - # Update component with version. - if component['container']: - component['container']['version'] = version - else: - component['version'] = version - - session.commit() - - assert structure.get_resource_identifier(component) == expected.format( - project_name=new_project['name'] - ) - - -def test_unsupported_entity(user): - '''Fail to get resource identifier for unsupported entity.''' - structure = ftrack_api.structure.standard.StandardStructure() - with pytest.raises(NotImplementedError): - structure.get_resource_identifier(user) - - -def test_component_without_version_relation(new_project): - '''Get an identifer for component without a version relation.''' - session = new_project.session - - asset = session.create( - 'Asset', {'name': 'foo', 'context_id': new_project['id']} - ) - version = session.create('AssetVersion', {'asset': asset}) - - session.commit() - - file_component = new_file_component() - file_component['version_id'] = version['id'] - - structure = ftrack_api.structure.standard.StandardStructure() - structure.get_resource_identifier(file_component) - - -def test_component_without_committed_version_relation(): - '''Fail to get an identifer for component without a committed version.''' - file_component = new_file_component() - session = file_component.session - version = session.create('AssetVersion', {}) - - file_component['version'] = version - - structure = ftrack_api.structure.standard.StandardStructure() - - with pytest.raises(ftrack_api.exception.StructureError): - structure.get_resource_identifier(file_component) - - -@pytest.mark.xfail( - raises=ftrack_api.exception.ServerError, - reason='Due to user permission errors.' -) -def test_component_without_committed_asset_relation(): - '''Fail to get an identifer for component without a committed asset.''' - file_component = new_file_component() - session = file_component.session - version = session.create('AssetVersion', {}) - - file_component['version'] = version - - session.commit() - - structure = ftrack_api.structure.standard.StandardStructure() - - with pytest.raises(ftrack_api.exception.StructureError): - structure.get_resource_identifier(file_component) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py deleted file mode 100644 index 555adb2d89..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py +++ /dev/null @@ -1,146 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - -import ftrack_api.attribute -import ftrack_api.exception - - -@pytest.mark.parametrize('attributes', [ - [], - [ftrack_api.attribute.Attribute('test')] -], ids=[ - 'no initial attributes', - 'with initial attributes' -]) -def test_initialise_attributes_collection(attributes): - '''Initialise attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes(attributes) - assert sorted(list(attribute_collection)) == sorted(attributes) - - -def test_add_attribute_to_attributes_collection(): - '''Add valid attribute to attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') - - assert attribute_collection.keys() == [] - attribute_collection.add(attribute) - assert attribute_collection.keys() == ['test'] - - -def test_add_duplicate_attribute_to_attributes_collection(): - '''Fail to add attribute with duplicate name to attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') - - attribute_collection.add(attribute) - with pytest.raises(ftrack_api.exception.NotUniqueError): - attribute_collection.add(attribute) - - -def test_remove_attribute_from_attributes_collection(): - '''Remove attribute from attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') - - attribute_collection.add(attribute) - assert len(attribute_collection) == 1 - - attribute_collection.remove(attribute) - assert len(attribute_collection) == 0 - - -def test_remove_missing_attribute_from_attributes_collection(): - '''Fail to remove attribute not present in attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') - - with pytest.raises(KeyError): - attribute_collection.remove(attribute) - - -def test_get_attribute_from_attributes_collection(): - '''Get attribute from attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') - attribute_collection.add(attribute) - - retrieved_attribute = attribute_collection.get('test') - - assert retrieved_attribute is attribute - - -def test_get_missing_attribute_from_attributes_collection(): - '''Get attribute not present in attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - assert attribute_collection.get('test') is None - - -@pytest.mark.parametrize('attributes, expected', [ - ([], []), - ([ftrack_api.attribute.Attribute('test')], ['test']) -], ids=[ - 'no initial attributes', - 'with initial attributes' -]) -def test_attribute_collection_keys(attributes, expected): - '''Retrieve keys for attribute collection.''' - attribute_collection = ftrack_api.attribute.Attributes(attributes) - assert sorted(attribute_collection.keys()) == sorted(expected) - - -@pytest.mark.parametrize('attribute, expected', [ - (None, False), - (ftrack_api.attribute.Attribute('b'), True), - (ftrack_api.attribute.Attribute('c'), False) -], ids=[ - 'none attribute', - 'present attribute', - 'missing attribute' -]) -def test_attributes_collection_contains(attribute, expected): - '''Check presence in attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes([ - ftrack_api.attribute.Attribute('a'), - ftrack_api.attribute.Attribute('b') - ]) - - assert (attribute in attribute_collection) is expected - - -@pytest.mark.parametrize('attributes, expected', [ - ([], 0), - ([ftrack_api.attribute.Attribute('test')], 1), - ( - [ - ftrack_api.attribute.Attribute('a'), - ftrack_api.attribute.Attribute('b') - ], - 2 - ) -], ids=[ - 'no attributes', - 'single attribute', - 'multiple attributes' -]) -def test_attributes_collection_count(attributes, expected): - '''Count attributes in attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes(attributes) - assert len(attribute_collection) == expected - - -def test_iterate_over_attributes_collection(): - '''Iterate over attributes collection.''' - attributes = [ - ftrack_api.attribute.Attribute('a'), - ftrack_api.attribute.Attribute('b') - ] - - attribute_collection = ftrack_api.attribute.Attributes(attributes) - for attribute in attribute_collection: - attributes.remove(attribute) - - assert len(attributes) == 0 - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py deleted file mode 100644 index 7915737253..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py +++ /dev/null @@ -1,416 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import uuid -import tempfile - -import pytest - -import ftrack_api.cache - - -@pytest.fixture(params=['proxy', 'layered', 'memory', 'file', 'serialised']) -def cache(request): - '''Return cache.''' - if request.param == 'proxy': - cache = ftrack_api.cache.ProxyCache( - ftrack_api.cache.MemoryCache() - ) - - elif request.param == 'layered': - cache = ftrack_api.cache.LayeredCache( - [ftrack_api.cache.MemoryCache()] - ) - - elif request.param == 'memory': - cache = ftrack_api.cache.MemoryCache() - - elif request.param == 'file': - cache_path = os.path.join( - tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) - ) - - cache = ftrack_api.cache.FileCache(cache_path) - - def cleanup(): - '''Cleanup.''' - try: - os.remove(cache_path) - except OSError: - # BSD DB (Mac OSX) implementation of the interface will append - # a .db extension. - os.remove(cache_path + '.db') - - request.addfinalizer(cleanup) - - elif request.param == 'serialised': - cache = ftrack_api.cache.SerialisedCache( - ftrack_api.cache.MemoryCache(), - encode=lambda value: value, - decode=lambda value: value - ) - - else: - raise ValueError( - 'Unrecognised cache fixture type {0!r}'.format(request.param) - ) - - return cache - - - -class Class(object): - '''Class for testing.''' - - def method(self, key): - '''Method for testing.''' - - -def function(mutable, x, y=2): - '''Function for testing.''' - mutable['called'] = True - return {'result': x + y} - - -def assert_memoised_call( - memoiser, function, expected, args=None, kw=None, memoised=True -): - '''Assert *function* call via *memoiser* was *memoised*.''' - mapping = {'called': False} - if args is not None: - args = (mapping,) + args - else: - args = (mapping,) - - result = memoiser.call(function, args, kw) - - assert result == expected - assert mapping['called'] is not memoised - - -def test_get(cache): - '''Retrieve item from cache.''' - cache.set('key', 'value') - assert cache.get('key') == 'value' - - -def test_get_missing_key(cache): - '''Fail to retrieve missing item from cache.''' - with pytest.raises(KeyError): - cache.get('key') - - -def test_set(cache): - '''Set item in cache.''' - with pytest.raises(KeyError): - cache.get('key') - - cache.set('key', 'value') - assert cache.get('key') == 'value' - - -def test_remove(cache): - '''Remove item from cache.''' - cache.set('key', 'value') - cache.remove('key') - - with pytest.raises(KeyError): - cache.get('key') - - -def test_remove_missing_key(cache): - '''Fail to remove missing key.''' - with pytest.raises(KeyError): - cache.remove('key') - - -def test_keys(cache): - '''Retrieve keys of items in cache.''' - assert cache.keys() == [] - cache.set('a', 'a_value') - cache.set('b', 'b_value') - cache.set('c', 'c_value') - assert sorted(cache.keys()) == sorted(['a', 'b', 'c']) - - -def test_clear(cache): - '''Remove items from cache.''' - cache.set('a', 'a_value') - cache.set('b', 'b_value') - cache.set('c', 'c_value') - - assert cache.keys() - cache.clear() - - assert not cache.keys() - - -def test_clear_using_pattern(cache): - '''Remove items that match pattern from cache.''' - cache.set('matching_key', 'value') - cache.set('another_matching_key', 'value') - cache.set('key_not_matching', 'value') - - assert cache.keys() - cache.clear(pattern='.*matching_key$') - - assert cache.keys() == ['key_not_matching'] - - -def test_clear_encountering_missing_key(cache, mocker): - '''Clear missing key.''' - # Force reporting keys that are not actually valid for test purposes. - mocker.patch.object(cache, 'keys', lambda: ['missing']) - assert cache.keys() == ['missing'] - - # Should not error even though key not valid. - cache.clear() - - # The key was not successfully removed so should still be present. - assert cache.keys() == ['missing'] - - -def test_layered_cache_propagates_value_on_get(): - '''Layered cache propagates value on get.''' - caches = [ - ftrack_api.cache.MemoryCache(), - ftrack_api.cache.MemoryCache(), - ftrack_api.cache.MemoryCache() - ] - - cache = ftrack_api.cache.LayeredCache(caches) - - # Set item on second level cache only. - caches[1].set('key', 'value') - - # Retrieving key via layered cache should propagate it automatically to - # higher level caches only. - assert cache.get('key') == 'value' - assert caches[0].get('key') == 'value' - - with pytest.raises(KeyError): - caches[2].get('key') - - -def test_layered_cache_remove_at_depth(): - '''Remove key that only exists at depth in LayeredCache.''' - caches = [ - ftrack_api.cache.MemoryCache(), - ftrack_api.cache.MemoryCache() - ] - - cache = ftrack_api.cache.LayeredCache(caches) - - # Set item on second level cache only. - caches[1].set('key', 'value') - - # Removing key that only exists at depth should not raise key error. - cache.remove('key') - - # Ensure key was removed. - assert not cache.keys() - - -def test_expand_references(): - '''Test that references are expanded from serialized cache.''' - - cache_path = os.path.join( - tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) - ) - - def make_cache(session, cache_path): - '''Create a serialised file cache.''' - serialized_file_cache = ftrack_api.cache.SerialisedCache( - ftrack_api.cache.FileCache(cache_path), - encode=session.encode, - decode=session.decode - ) - - return serialized_file_cache - - # Populate the serialized file cache. - session = ftrack_api.Session( - cache=lambda session, cache_path=cache_path:make_cache( - session, cache_path - ) - ) - - expanded_results = dict() - - query_string = 'select asset.parent from AssetVersion where asset is_not None limit 10' - - for sequence in session.query(query_string): - asset = sequence.get('asset') - - expanded_results.setdefault( - asset.get('id'), asset.get('parent') - ) - - # Fetch the data from cache. - new_session = ftrack_api.Session( - cache=lambda session, cache_path=cache_path:make_cache( - session, cache_path - ) - ) - - - new_session_two = ftrack_api.Session( - cache=lambda session, cache_path=cache_path:make_cache( - session, cache_path - ) - ) - - - # Make sure references are merged. - for sequence in new_session.query(query_string): - asset = sequence.get('asset') - - assert ( - asset.get('parent') == expanded_results[asset.get('id')] - ) - - # Use for fetching directly using get. - assert ( - new_session_two.get(asset.entity_type, asset.get('id')).get('parent') == - expanded_results[asset.get('id')] - ) - - - -@pytest.mark.parametrize('items, key', [ - (({},), '{}'), - (({}, {}), '{}{}') -], ids=[ - 'single object', - 'multiple objects' -]) -def test_string_key_maker_key(items, key): - '''Generate key using string key maker.''' - key_maker = ftrack_api.cache.StringKeyMaker() - assert key_maker.key(*items) == key - - -@pytest.mark.parametrize('items, key', [ - ( - ({},), - '\x01\x01' - ), - ( - ({'a': 'b'}, [1, 2]), - '\x01' - '\x80\x02U\x01a.' '\x02' '\x80\x02U\x01b.' - '\x01' - '\x00' - '\x03' - '\x80\x02K\x01.' '\x00' '\x80\x02K\x02.' - '\x03' - ), - ( - (function,), - '\x04function\x00unit.test_cache' - ), - ( - (Class,), - '\x04Class\x00unit.test_cache' - ), - ( - (Class.method,), - '\x04method\x00Class\x00unit.test_cache' - ), - ( - (callable,), - '\x04callable' - ) -], ids=[ - 'single mapping', - 'multiple objects', - 'function', - 'class', - 'method', - 'builtin' -]) -def test_object_key_maker_key(items, key): - '''Generate key using string key maker.''' - key_maker = ftrack_api.cache.ObjectKeyMaker() - assert key_maker.key(*items) == key - - -def test_memoised_call(): - '''Call memoised function.''' - memoiser = ftrack_api.cache.Memoiser() - - # Initial call should not be memoised so function is executed. - assert_memoised_call( - memoiser, function, args=(1,), expected={'result': 3}, memoised=False - ) - - # Identical call should be memoised so function is not executed again. - assert_memoised_call( - memoiser, function, args=(1,), expected={'result': 3}, memoised=True - ) - - # Differing call is not memoised so function is executed. - assert_memoised_call( - memoiser, function, args=(3,), expected={'result': 5}, memoised=False - ) - - -def test_memoised_call_variations(): - '''Call memoised function with identical arguments using variable format.''' - memoiser = ftrack_api.cache.Memoiser() - expected = {'result': 3} - - # Call function once to ensure is memoised. - assert_memoised_call( - memoiser, function, args=(1,), expected=expected, memoised=False - ) - - # Each of the following calls should equate to the same key and make - # use of the memoised value. - for args, kw in [ - ((), {'x': 1}), - ((), {'x': 1, 'y': 2}), - ((1,), {'y': 2}), - ((1,), {}) - ]: - assert_memoised_call( - memoiser, function, args=args, kw=kw, expected=expected - ) - - # The following calls should all be treated as new variations and so - # not use any memoised value. - assert_memoised_call( - memoiser, function, kw={'x': 2}, expected={'result': 4}, memoised=False - ) - assert_memoised_call( - memoiser, function, kw={'x': 3, 'y': 2}, expected={'result': 5}, - memoised=False - ) - assert_memoised_call( - memoiser, function, args=(4, ), kw={'y': 2}, expected={'result': 6}, - memoised=False - ) - assert_memoised_call( - memoiser, function, args=(5, ), expected={'result': 7}, memoised=False - ) - - -def test_memoised_mutable_return_value(): - '''Avoid side effects for returned mutable arguments when memoising.''' - memoiser = ftrack_api.cache.Memoiser() - arguments = ({'called': False}, 1) - - result_a = memoiser.call(function, arguments) - assert result_a == {'result': 3} - assert arguments[0]['called'] - - # Modify mutable externally and check that stored memoised value is - # unchanged. - del result_a['result'] - - arguments[0]['called'] = False - result_b = memoiser.call(function, arguments) - - assert result_b == {'result': 3} - assert not arguments[0]['called'] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py deleted file mode 100644 index 15c3e5cf39..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py +++ /dev/null @@ -1,574 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import copy -import uuid - -import mock -import pytest - -import ftrack_api.collection -import ftrack_api.symbol -import ftrack_api.inspection -import ftrack_api.exception -import ftrack_api.operation - - -def create_mock_entity(session): - '''Return new mock entity for *session*.''' - entity = mock.MagicMock() - entity.session = session - entity.primary_key_attributes = ['id'] - entity['id'] = str(uuid.uuid4()) - return entity - - -@pytest.fixture -def mock_entity(session): - '''Return mock entity.''' - return create_mock_entity(session) - - -@pytest.fixture -def mock_entities(session): - '''Return list of two mock entities.''' - return [ - create_mock_entity(session), - create_mock_entity(session) - ] - - -@pytest.fixture -def mock_attribute(): - '''Return mock attribute.''' - attribute = mock.MagicMock() - attribute.name = 'test' - return attribute - - -def test_collection_initialisation_does_not_modify_entity_state( - mock_entity, mock_attribute, mock_entities -): - '''Initialising collection does not modify entity state.''' - ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - assert ftrack_api.inspection.state(mock_entity) is ftrack_api.symbol.NOT_SET - - -def test_immutable_collection_initialisation( - mock_entity, mock_attribute, mock_entities -): - '''Initialise immutable collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities, mutable=False - ) - - assert list(collection) == mock_entities - assert collection.mutable is False - - -def test_collection_shallow_copy( - mock_entity, mock_attribute, mock_entities, session -): - '''Shallow copying collection should avoid indirect mutation.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - with mock_entity.session.operation_recording(False): - collection_copy = copy.copy(collection) - new_entity = create_mock_entity(session) - collection_copy.append(new_entity) - - assert list(collection) == mock_entities - assert list(collection_copy) == mock_entities + [new_entity] - - -def test_collection_insert( - mock_entity, mock_attribute, mock_entities, session -): - '''Insert a value into collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - new_entity = create_mock_entity(session) - collection.insert(0, new_entity) - assert list(collection) == [new_entity] + mock_entities - - -def test_collection_insert_duplicate( - mock_entity, mock_attribute, mock_entities -): - '''Fail to insert a duplicate value into collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - with pytest.raises(ftrack_api.exception.DuplicateItemInCollectionError): - collection.insert(0, mock_entities[1]) - - -def test_immutable_collection_insert( - mock_entity, mock_attribute, mock_entities, session -): - '''Fail to insert a value into immutable collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities, mutable=False - ) - - with pytest.raises(ftrack_api.exception.ImmutableCollectionError): - collection.insert(0, create_mock_entity(session)) - - -def test_collection_set_item( - mock_entity, mock_attribute, mock_entities, session -): - '''Set item at index in collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - new_entity = create_mock_entity(session) - collection[0] = new_entity - assert list(collection) == [new_entity, mock_entities[1]] - - -def test_collection_re_set_item( - mock_entity, mock_attribute, mock_entities -): - '''Re-set value at exact same index in collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - collection[0] = mock_entities[0] - assert list(collection) == mock_entities - - -def test_collection_set_duplicate_item( - mock_entity, mock_attribute, mock_entities -): - '''Fail to set a duplicate value into collection at different index.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - with pytest.raises(ftrack_api.exception.DuplicateItemInCollectionError): - collection[0] = mock_entities[1] - - -def test_immutable_collection_set_item( - mock_entity, mock_attribute, mock_entities -): - '''Fail to set item at index in immutable collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities, mutable=False - ) - - with pytest.raises(ftrack_api.exception.ImmutableCollectionError): - collection[0] = mock_entities[0] - - -def test_collection_delete_item( - mock_entity, mock_attribute, mock_entities -): - '''Remove item at index from collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - del collection[0] - assert list(collection) == [mock_entities[1]] - - -def test_collection_delete_item_at_invalid_index( - mock_entity, mock_attribute, mock_entities -): - '''Fail to remove item at missing index from immutable collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - with pytest.raises(IndexError): - del collection[4] - - -def test_immutable_collection_delete_item( - mock_entity, mock_attribute, mock_entities -): - '''Fail to remove item at index from immutable collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities, mutable=False - ) - - with pytest.raises(ftrack_api.exception.ImmutableCollectionError): - del collection[0] - - -def test_collection_count( - mock_entity, mock_attribute, mock_entities, session -): - '''Count items in collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - assert len(collection) == 2 - - collection.append(create_mock_entity(session)) - assert len(collection) == 3 - - del collection[0] - assert len(collection) == 2 - - -@pytest.mark.parametrize('other, expected', [ - ([], False), - ([1, 2], True), - ([1, 2, 3], False), - ([1], False) -], ids=[ - 'empty', - 'same', - 'additional', - 'missing' -]) -def test_collection_equal(mocker, mock_entity, mock_attribute, other, expected): - '''Determine collection equality against another collection.''' - # Temporarily override determination of entity identity so that it works - # against simple scalar values for purpose of test. - mocker.patch.object( - ftrack_api.inspection, 'identity', lambda entity: str(entity) - ) - - collection_a = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=[1, 2] - ) - - collection_b = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=other - ) - assert (collection_a == collection_b) is expected - - -def test_collection_not_equal_to_non_collection( - mocker, mock_entity, mock_attribute -): - '''Collection not equal to a non-collection.''' - # Temporarily override determination of entity identity so that it works - # against simple scalar values for purpose of test. - mocker.patch.object( - ftrack_api.inspection, 'identity', lambda entity: str(entity) - ) - - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=[1, 2] - ) - - assert (collection != {}) is True - - -def test_collection_notify_on_modification( - mock_entity, mock_attribute, mock_entities, session -): - '''Record UpdateEntityOperation on collection modification.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - assert len(session.recorded_operations) == 0 - - collection.append(create_mock_entity(session)) - assert len(session.recorded_operations) == 1 - operation = session.recorded_operations.pop() - assert isinstance(operation, ftrack_api.operation.UpdateEntityOperation) - assert operation.new_value == collection - - -def test_mapped_collection_proxy_shallow_copy(new_project, unique_name): - '''Shallow copying mapped collection proxy avoids indirect mutation.''' - metadata = new_project['metadata'] - - with new_project.session.operation_recording(False): - metadata_copy = copy.copy(metadata) - metadata_copy[unique_name] = True - - assert unique_name not in metadata - assert unique_name in metadata_copy - - -def test_mapped_collection_proxy_mutable_property(new_project): - '''Mapped collection mutable property maps to underlying collection.''' - metadata = new_project['metadata'] - - assert metadata.mutable is True - assert metadata.collection.mutable is True - - metadata.mutable = False - assert metadata.collection.mutable is False - - -def test_mapped_collection_proxy_attribute_property( - new_project, mock_attribute -): - '''Mapped collection attribute property maps to underlying collection.''' - metadata = new_project['metadata'] - - assert metadata.attribute is metadata.collection.attribute - - metadata.attribute = mock_attribute - assert metadata.collection.attribute is mock_attribute - - -def test_mapped_collection_proxy_get_item(new_project, unique_name): - '''Retrieve item in mapped collection proxy.''' - session = new_project.session - - # Prepare data. - metadata = new_project['metadata'] - value = 'value' - metadata[unique_name] = value - session.commit() - - # Check in clean session retrieval of value. - session.reset() - retrieved = session.get(*ftrack_api.inspection.identity(new_project)) - - assert retrieved is not new_project - assert retrieved['metadata'].keys() == [unique_name] - assert retrieved['metadata'][unique_name] == value - - -def test_mapped_collection_proxy_set_item(new_project, unique_name): - '''Set new item in mapped collection proxy.''' - session = new_project.session - - metadata = new_project['metadata'] - assert unique_name not in metadata - - value = 'value' - metadata[unique_name] = value - assert metadata[unique_name] == value - - # Check change persisted correctly. - session.commit() - session.reset() - retrieved = session.get(*ftrack_api.inspection.identity(new_project)) - - assert retrieved is not new_project - assert retrieved['metadata'].keys() == [unique_name] - assert retrieved['metadata'][unique_name] == value - - -def test_mapped_collection_proxy_update_item(new_project, unique_name): - '''Update existing item in mapped collection proxy.''' - session = new_project.session - - # Prepare a pre-existing value. - metadata = new_project['metadata'] - value = 'value' - metadata[unique_name] = value - session.commit() - - # Set new value. - new_value = 'new_value' - metadata[unique_name] = new_value - - # Confirm change persisted correctly. - session.commit() - session.reset() - retrieved = session.get(*ftrack_api.inspection.identity(new_project)) - - assert retrieved is not new_project - assert retrieved['metadata'].keys() == [unique_name] - assert retrieved['metadata'][unique_name] == new_value - - -def test_mapped_collection_proxy_delete_item(new_project, unique_name): - '''Remove existing item from mapped collection proxy.''' - session = new_project.session - - # Prepare a pre-existing value to remove. - metadata = new_project['metadata'] - value = 'value' - metadata[unique_name] = value - session.commit() - - # Now remove value. - del new_project['metadata'][unique_name] - assert unique_name not in new_project['metadata'] - - # Confirm change persisted correctly. - session.commit() - session.reset() - retrieved = session.get(*ftrack_api.inspection.identity(new_project)) - - assert retrieved is not new_project - assert retrieved['metadata'].keys() == [] - assert unique_name not in retrieved['metadata'] - - -def test_mapped_collection_proxy_delete_missing_item(new_project, unique_name): - '''Fail to remove item for missing key from mapped collection proxy.''' - metadata = new_project['metadata'] - assert unique_name not in metadata - with pytest.raises(KeyError): - del metadata[unique_name] - - -def test_mapped_collection_proxy_iterate_keys(new_project, unique_name): - '''Iterate over keys in mapped collection proxy.''' - metadata = new_project['metadata'] - metadata.update({ - 'a': 'value-a', - 'b': 'value-b', - 'c': 'value-c' - }) - - # Commit here as otherwise cleanup operation will fail because transaction - # will include updating metadata to refer to a deleted entity. - new_project.session.commit() - - iterated = set() - for key in metadata: - iterated.add(key) - - assert iterated == set(['a', 'b', 'c']) - - -def test_mapped_collection_proxy_count(new_project, unique_name): - '''Count items in mapped collection proxy.''' - metadata = new_project['metadata'] - metadata.update({ - 'a': 'value-a', - 'b': 'value-b', - 'c': 'value-c' - }) - - # Commit here as otherwise cleanup operation will fail because transaction - # will include updating metadata to refer to a deleted entity. - new_project.session.commit() - - assert len(metadata) == 3 - - -def test_mapped_collection_on_create(session, unique_name, project): - '''Test that it is possible to set relational attributes on create''' - metadata = { - 'a': 'value-a', - 'b': 'value-b', - 'c': 'value-c' - } - - task_id = session.create( - 'Task', { - 'name': unique_name, - 'parent': project, - 'metadata': metadata, - - } - ).get('id') - - session.commit() - - # Reset the session and check that we have the expected - # values. - session.reset() - - task = session.get( - 'Task', task_id - ) - - for key, value in metadata.items(): - assert value == task['metadata'][key] - - -def test_collection_refresh(new_asset_version, new_component): - '''Test collection reload.''' - session_two = ftrack_api.Session(auto_connect_event_hub=False) - - query_string = 'select components from AssetVersion where id is "{0}"'.format( - new_asset_version.get('id') - ) - - # Fetch the new asset version in a new session. - new_asset_version_two = session_two.query( - query_string - ).one() - - # Modify our asset version - new_asset_version.get('components').append( - new_component - ) - - new_asset_version.session.commit() - - # Query the same asset version again and make sure we get the newly - # populated data. - session_two.query( - query_string - ).all() - - assert ( - new_asset_version.get('components') == new_asset_version_two.get('components') - ) - - # Make a local change to our asset version - new_asset_version_two.get('components').pop() - - # Query the same asset version again and make sure our local changes - # are not overwritten. - - session_two.query( - query_string - ).all() - - assert len(new_asset_version_two.get('components')) == 0 - - -def test_mapped_collection_reload(new_asset_version): - '''Test mapped collection reload.''' - session_two = ftrack_api.Session(auto_connect_event_hub=False) - - query_string = 'select metadata from AssetVersion where id is "{0}"'.format( - new_asset_version.get('id') - ) - - # Fetch the new asset version in a new session. - new_asset_version_two = session_two.query( - query_string - ).one() - - # Modify our asset version - new_asset_version['metadata']['test'] = str(uuid.uuid4()) - - new_asset_version.session.commit() - - # Query the same asset version again and make sure we get the newly - # populated data. - session_two.query( - query_string - ).all() - - assert ( - new_asset_version['metadata']['test'] == new_asset_version_two['metadata']['test'] - ) - - local_data = str(uuid.uuid4()) - - new_asset_version_two['metadata']['test'] = local_data - - # Modify our asset version again - new_asset_version['metadata']['test'] = str(uuid.uuid4()) - - new_asset_version.session.commit() - - # Query the same asset version again and make sure our local changes - # are not overwritten. - session_two.query( - query_string - ).all() - - assert ( - new_asset_version_two['metadata']['test'] == local_data - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py deleted file mode 100644 index 7a9b0fadaa..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py +++ /dev/null @@ -1,251 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import uuid - -import pytest - -import ftrack_api - -@pytest.fixture( - params=[ - 'AssetVersion', 'Shot', 'AssetVersionList', 'TypedContextList', 'User', - 'Asset' - ] -) -def new_entity_and_custom_attribute(request, session): - '''Return tuple with new entity, custom attribute name and value.''' - if request.param == 'AssetVersion': - entity = session.create( - request.param, { - 'asset': session.query('Asset').first() - } - ) - return (entity, 'versiontest', 123) - - elif request.param == 'Shot': - sequence = session.query('Sequence').first() - entity = session.create( - request.param, { - 'parent_id': sequence['id'], - 'project_id': sequence['project_id'], - 'name': str(uuid.uuid1()) - } - ) - return (entity, 'fstart', 1005) - - elif request.param == 'Asset': - shot = session.query('Shot').first() - entity = session.create( - request.param, { - 'context_id': shot['project_id'], - 'name': str(uuid.uuid1()) - } - ) - return (entity, 'htest', 1005) - - elif request.param in ('AssetVersionList', 'TypedContextList'): - entity = session.create( - request.param, { - 'project_id': session.query('Project').first()['id'], - 'category_id': session.query('ListCategory').first()['id'], - 'name': str(uuid.uuid1()) - } - ) - return (entity, 'listbool', True) - - elif request.param == 'User': - entity = session.create( - request.param, { - 'first_name': 'Custom attribute test', - 'last_name': 'Custom attribute test', - 'username': str(uuid.uuid1()) - } - ) - return (entity, 'teststring', 'foo') - - -@pytest.mark.parametrize( - 'entity_type, entity_model_name, custom_attribute_name', - [ - ('Task', 'task', 'customNumber'), - ('AssetVersion', 'assetversion', 'NumberField') - ], - ids=[ - 'task', - 'asset_version' - ] -) -def test_read_set_custom_attribute( - session, entity_type, entity_model_name, custom_attribute_name -): - '''Retrieve custom attribute value set on instance.''' - custom_attribute_value = session.query( - 'CustomAttributeValue where configuration.key is ' - '{custom_attribute_name}' - .format( - custom_attribute_name=custom_attribute_name - ) - ).first() - - entity = session.query( - 'select custom_attributes from {entity_type} where id is ' - '{entity_id}'.format( - entity_type=entity_type, - entity_id=custom_attribute_value['entity_id'], - ) - ).first() - - assert custom_attribute_value - - assert entity['id'] == entity['custom_attributes'].collection.entity['id'] - assert entity is entity['custom_attributes'].collection.entity - assert ( - entity['custom_attributes'][custom_attribute_name] == - custom_attribute_value['value'] - ) - - assert custom_attribute_name in entity['custom_attributes'].keys() - - -@pytest.mark.parametrize( - 'entity_type, custom_attribute_name', - [ - ('Task', 'customNumber'), - ('Shot', 'fstart'), - ( - 'AssetVersion', 'NumberField' - ) - ], - ids=[ - 'task', - 'shot', - 'asset_version' - ] -) -def test_write_set_custom_attribute_value( - session, entity_type, custom_attribute_name -): - '''Overwrite existing instance level custom attribute value.''' - entity = session.query( - 'select custom_attributes from {entity_type} where ' - 'custom_attributes.configuration.key is {custom_attribute_name}'.format( - entity_type=entity_type, - custom_attribute_name=custom_attribute_name - ) - ).first() - - entity['custom_attributes'][custom_attribute_name] = 42 - - assert entity['custom_attributes'][custom_attribute_name] == 42 - - session.commit() - - -@pytest.mark.parametrize( - 'entity_type, custom_attribute_name', - [ - ('Task', 'fstart'), - ('Shot', 'Not existing'), - ('AssetVersion', 'fstart') - ], - ids=[ - 'task', - 'shot', - 'asset_version' - ] -) -def test_read_custom_attribute_that_does_not_exist( - session, entity_type, custom_attribute_name -): - '''Fail to read value from a custom attribute that does not exist.''' - entity = session.query( - 'select custom_attributes from {entity_type}'.format( - entity_type=entity_type - ) - ).first() - - with pytest.raises(KeyError): - entity['custom_attributes'][custom_attribute_name] - - -@pytest.mark.parametrize( - 'entity_type, custom_attribute_name', - [ - ('Task', 'fstart'), - ('Shot', 'Not existing'), - ('AssetVersion', 'fstart') - ], - ids=[ - 'task', - 'shot', - 'asset_version' - ] -) -def test_write_custom_attribute_that_does_not_exist( - session, entity_type, custom_attribute_name -): - '''Fail to write a value to a custom attribute that does not exist.''' - entity = session.query( - 'select custom_attributes from {entity_type}'.format( - entity_type=entity_type - ) - ).first() - - with pytest.raises(KeyError): - entity['custom_attributes'][custom_attribute_name] = 42 - - -def test_set_custom_attribute_on_new_but_persisted_version( - session, new_asset_version -): - '''Set custom attribute on new persisted version.''' - new_asset_version['custom_attributes']['versiontest'] = 5 - session.commit() - - -@pytest.mark.xfail( - raises=ftrack_api.exception.ServerError, - reason='Due to user permission errors.' -) -def test_batch_create_entity_and_custom_attributes( - new_entity_and_custom_attribute -): - '''Write custom attribute value and entity in the same batch.''' - entity, name, value = new_entity_and_custom_attribute - session = entity.session - entity['custom_attributes'][name] = value - - assert entity['custom_attributes'][name] == value - session.commit() - - assert entity['custom_attributes'][name] == value - - -def test_refresh_custom_attribute(new_asset_version): - '''Test custom attribute refresh.''' - session_two = ftrack_api.Session() - - query_string = 'select custom_attributes from AssetVersion where id is "{0}"'.format( - new_asset_version.get('id') - ) - - asset_version_two = session_two.query( - query_string - ).first() - - new_asset_version['custom_attributes']['versiontest'] = 42 - - new_asset_version.session.commit() - - asset_version_two = session_two.query( - query_string - ).first() - - assert ( - new_asset_version['custom_attributes']['versiontest'] == - asset_version_two['custom_attributes']['versiontest'] - ) - - - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py deleted file mode 100644 index c53dda9630..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py +++ /dev/null @@ -1,129 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import tempfile - -import pytest - -import ftrack_api.data - - -@pytest.fixture() -def content(): - '''Return initial content.''' - return 'test data' - - -@pytest.fixture(params=['file', 'file_wrapper', 'string']) -def data(request, content): - '''Return cache.''' - - if request.param == 'string': - data_object = ftrack_api.data.String(content) - - elif request.param == 'file': - file_handle, path = tempfile.mkstemp() - file_object = os.fdopen(file_handle, 'r+') - file_object.write(content) - file_object.flush() - file_object.close() - - data_object = ftrack_api.data.File(path, 'r+') - - def cleanup(): - '''Cleanup.''' - data_object.close() - os.remove(path) - - request.addfinalizer(cleanup) - - elif request.param == 'file_wrapper': - file_handle, path = tempfile.mkstemp() - file_object = os.fdopen(file_handle, 'r+') - file_object.write(content) - file_object.seek(0) - - data_object = ftrack_api.data.FileWrapper(file_object) - - def cleanup(): - '''Cleanup.''' - data_object.close() - os.remove(path) - - request.addfinalizer(cleanup) - - else: - raise ValueError('Unrecognised parameter: {0}'.format(request.param)) - - return data_object - - -def test_read(data, content): - '''Return content from current position up to *limit*.''' - assert data.read(5) == content[:5] - assert data.read() == content[5:] - - -def test_write(data, content): - '''Write content at current position.''' - assert data.read() == content - data.write('more test data') - data.seek(0) - assert data.read() == content + 'more test data' - - -def test_flush(data): - '''Flush buffers ensuring data written.''' - # TODO: Implement better test than just calling function. - data.flush() - - -def test_seek(data, content): - '''Move internal pointer to *position*.''' - data.seek(5) - assert data.read() == content[5:] - - -def test_tell(data): - '''Return current position of internal pointer.''' - assert data.tell() == 0 - data.seek(5) - assert data.tell() == 5 - - -def test_close(data): - '''Flush buffers and prevent further access.''' - data.close() - with pytest.raises(ValueError) as error: - data.read() - - assert 'I/O operation on closed file' in str(error.value) - - -class Dummy(ftrack_api.data.Data): - '''Dummy string.''' - - def read(self, limit=None): - '''Return content from current position up to *limit*.''' - - def write(self, content): - '''Write content at current position.''' - - -def test_unsupported_tell(): - '''Fail when tell unsupported.''' - data = Dummy() - with pytest.raises(NotImplementedError) as error: - data.tell() - - assert 'Tell not supported' in str(error.value) - - -def test_unsupported_seek(): - '''Fail when seek unsupported.''' - data = Dummy() - with pytest.raises(NotImplementedError) as error: - data.seek(5) - - assert 'Seek not supported' in str(error.value) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py deleted file mode 100644 index ae565cb3f5..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py +++ /dev/null @@ -1,70 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import termcolor - -import ftrack_api.formatter - - -def colored(text, *args, **kwargs): - '''Pass through so there are no escape sequences in output.''' - return text - - -def test_format(user, mocker): - '''Return formatted representation of entity.''' - mocker.patch.object(termcolor, 'colored', colored) - - result = ftrack_api.formatter.format(user) - - # Cannot test entire string as too variable so check for key text. - assert result.startswith('User\n') - assert ' username: jenkins' in result - assert ' email: ' in result - - -def test_format_using_custom_formatters(user): - '''Return formatted representation of entity using custom formatters.''' - result = ftrack_api.formatter.format( - user, formatters={ - 'header': lambda text: '*{0}*'.format(text), - 'label': lambda text: '-{0}'.format(text) - } - ) - - # Cannot test entire string as too variable so check for key text. - assert result.startswith('*User*\n') - assert ' -username: jenkins' in result - assert ' -email: ' in result - - -def test_format_filtering(new_user, mocker): - '''Return formatted representation using custom filter.''' - mocker.patch.object(termcolor, 'colored', colored) - - with new_user.session.auto_populating(False): - result = ftrack_api.formatter.format( - new_user, - attribute_filter=ftrack_api.formatter.FILTER['ignore_unset'] - ) - - # Cannot test entire string as too variable so check for key text. - assert result.startswith('User\n') - assert ' username: {0}'.format(new_user['username']) in result - assert ' email: ' not in result - - -def test_format_recursive(user, mocker): - '''Return formatted recursive representation.''' - mocker.patch.object(termcolor, 'colored', colored) - - user.session.populate(user, 'timelogs.user') - - with user.session.auto_populating(False): - result = ftrack_api.formatter.format(user, recursive=True) - - # Cannot test entire string as too variable so check for key text. - assert result.startswith('User\n') - assert ' username: jenkins' - assert ' timelogs: Timelog' in result - assert ' user: User{...}' in result diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py deleted file mode 100644 index 57b44613a8..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py +++ /dev/null @@ -1,101 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import ftrack_api.inspection -import ftrack_api.symbol - - -def test_identity(user): - '''Retrieve identity of *user*.''' - identity = ftrack_api.inspection.identity(user) - assert identity[0] == 'User' - assert identity[1] == ['d07ae5d0-66e1-11e1-b5e9-f23c91df25eb'] - - -def test_primary_key(user): - '''Retrieve primary key of *user*.''' - primary_key = ftrack_api.inspection.primary_key(user) - assert primary_key == { - 'id': 'd07ae5d0-66e1-11e1-b5e9-f23c91df25eb' - } - - -def test_created_entity_state(session, unique_name): - '''Created entity has CREATED state.''' - new_user = session.create('User', {'username': unique_name}) - assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED - - # Even after a modification the state should remain as CREATED. - new_user['username'] = 'changed' - assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED - - -def test_retrieved_entity_state(user): - '''Retrieved entity has NOT_SET state.''' - assert ftrack_api.inspection.state(user) is ftrack_api.symbol.NOT_SET - - -def test_modified_entity_state(user): - '''Modified entity has MODIFIED state.''' - user['username'] = 'changed' - assert ftrack_api.inspection.state(user) is ftrack_api.symbol.MODIFIED - - -def test_deleted_entity_state(session, user): - '''Deleted entity has DELETED state.''' - session.delete(user) - assert ftrack_api.inspection.state(user) is ftrack_api.symbol.DELETED - - -def test_post_commit_entity_state(session, unique_name): - '''Entity has NOT_SET state post commit.''' - new_user = session.create('User', {'username': unique_name}) - assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED - - session.commit() - - assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.NOT_SET - - -def test_states(session, unique_name, user): - '''Determine correct states for multiple entities.''' - # NOT_SET - user_a = session.create('User', {'username': unique_name}) - session.commit() - - # CREATED - user_b = session.create('User', {'username': unique_name}) - user_b['username'] = 'changed' - - # MODIFIED - user_c = user - user_c['username'] = 'changed' - - # DELETED - user_d = session.create('User', {'username': unique_name}) - session.delete(user_d) - - # Assert states. - states = ftrack_api.inspection.states([user_a, user_b, user_c, user_d]) - - assert states == [ - ftrack_api.symbol.NOT_SET, - ftrack_api.symbol.CREATED, - ftrack_api.symbol.MODIFIED, - ftrack_api.symbol.DELETED - ] - - -def test_states_for_no_entities(): - '''Return empty list of states when no entities passed.''' - states = ftrack_api.inspection.states([]) - assert states == [] - - -def test_skip_operations_for_non_inspected_entities(session, unique_name): - '''Skip operations for non inspected entities.''' - user_a = session.create('User', {'username': unique_name + '-1'}) - user_b = session.create('User', {'username': unique_name + '-2'}) - - states = ftrack_api.inspection.states([user_a]) - assert states == [ftrack_api.symbol.CREATED] diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py deleted file mode 100644 index 702bfae355..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py +++ /dev/null @@ -1,79 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.operation - - -def test_operations_initialise(): - '''Initialise empty operations stack.''' - operations = ftrack_api.operation.Operations() - assert len(operations) == 0 - - -def test_operations_push(): - '''Push new operation onto stack.''' - operations = ftrack_api.operation.Operations() - assert len(operations) == 0 - - operation = ftrack_api.operation.Operation() - operations.push(operation) - assert list(operations)[-1] is operation - - -def test_operations_pop(): - '''Pop and return operation from stack.''' - operations = ftrack_api.operation.Operations() - assert len(operations) == 0 - - operations.push(ftrack_api.operation.Operation()) - operations.push(ftrack_api.operation.Operation()) - operation = ftrack_api.operation.Operation() - operations.push(operation) - - assert len(operations) == 3 - popped = operations.pop() - assert popped is operation - assert len(operations) == 2 - - -def test_operations_count(): - '''Count operations in stack.''' - operations = ftrack_api.operation.Operations() - assert len(operations) == 0 - - operations.push(ftrack_api.operation.Operation()) - assert len(operations) == 1 - - operations.pop() - assert len(operations) == 0 - - -def test_operations_clear(): - '''Clear operations stack.''' - operations = ftrack_api.operation.Operations() - operations.push(ftrack_api.operation.Operation()) - operations.push(ftrack_api.operation.Operation()) - operations.push(ftrack_api.operation.Operation()) - assert len(operations) == 3 - - operations.clear() - assert len(operations) == 0 - - -def test_operations_iter(): - '''Iterate over operations stack.''' - operations = ftrack_api.operation.Operations() - operation_a = ftrack_api.operation.Operation() - operation_b = ftrack_api.operation.Operation() - operation_c = ftrack_api.operation.Operation() - - operations.push(operation_a) - operations.push(operation_b) - operations.push(operation_c) - - assert len(operations) == 3 - for operation, expected in zip( - operations, [operation_a, operation_b, operation_c] - ): - assert operation is expected - diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py deleted file mode 100644 index 247b496d96..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py +++ /dev/null @@ -1,48 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api - - -class Class(object): - '''Class.''' - - -class Mixin(object): - '''Mixin.''' - - def method(self): - '''Method.''' - return True - - -def test_mixin(): - '''Mixin class to instance.''' - instance_a = Class() - instance_b = Class() - - assert not hasattr(instance_a, 'method') - assert not hasattr(instance_b, 'method') - - ftrack_api.mixin(instance_a, Mixin) - - assert hasattr(instance_a, 'method') - assert instance_a.method() is True - assert not hasattr(instance_b, 'method') - - -def test_mixin_same_class_multiple_times(): - '''Mixin class to instance multiple times.''' - instance = Class() - assert not hasattr(instance, 'method') - assert len(instance.__class__.mro()) == 2 - - ftrack_api.mixin(instance, Mixin) - assert hasattr(instance, 'method') - assert instance.method() is True - assert len(instance.__class__.mro()) == 4 - - ftrack_api.mixin(instance, Mixin) - assert hasattr(instance, 'method') - assert instance.method() is True - assert len(instance.__class__.mro()) == 4 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py deleted file mode 100644 index 252c813a9b..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py +++ /dev/null @@ -1,192 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import textwrap -import logging -import re - -import pytest - -import ftrack_api.plugin - - -@pytest.fixture() -def valid_plugin(temporary_path): - '''Return path to directory containing a valid plugin.''' - with open(os.path.join(temporary_path, 'plugin.py'), 'w') as file_object: - file_object.write(textwrap.dedent(''' - def register(*args, **kw): - print "Registered", args, kw - ''')) - - return temporary_path - - -@pytest.fixture() -def python_non_plugin(temporary_path): - '''Return path to directory containing Python file that is non plugin.''' - with open(os.path.join(temporary_path, 'non.py'), 'w') as file_object: - file_object.write(textwrap.dedent(''' - print "Not a plugin" - - def not_called(): - print "Not called" - ''')) - - return temporary_path - - -@pytest.fixture() -def non_plugin(temporary_path): - '''Return path to directory containing file that is non plugin.''' - with open(os.path.join(temporary_path, 'non.txt'), 'w') as file_object: - file_object.write('Never seen') - - return temporary_path - - -@pytest.fixture() -def broken_plugin(temporary_path): - '''Return path to directory containing broken plugin.''' - with open(os.path.join(temporary_path, 'broken.py'), 'w') as file_object: - file_object.write('syntax error') - - return temporary_path - - -@pytest.fixture() -def plugin(request, temporary_path): - '''Return path containing a plugin with requested specification.''' - specification = request.param - output = re.sub('(\w+)=\w+', '"\g<1>={}".format(\g<1>)', specification) - output = re.sub('\*args', 'args', output) - output = re.sub('\*\*kwargs', 'sorted(kwargs.items())', output) - - with open(os.path.join(temporary_path, 'plugin.py'), 'w') as file_object: - content = textwrap.dedent(''' - def register({}): - print {} - '''.format(specification, output)) - file_object.write(content) - - return temporary_path - - -def test_discover_empty_paths(capsys): - '''Discover no plugins when paths are empty.''' - ftrack_api.plugin.discover([' ']) - output, error = capsys.readouterr() - assert not output - assert not error - - -def test_discover_valid_plugin(valid_plugin, capsys): - '''Discover valid plugin.''' - ftrack_api.plugin.discover([valid_plugin], (1, 2), {'3': 4}) - output, error = capsys.readouterr() - assert 'Registered (1, 2) {\'3\': 4}' in output - - -def test_discover_python_non_plugin(python_non_plugin, capsys): - '''Discover Python non plugin.''' - ftrack_api.plugin.discover([python_non_plugin]) - output, error = capsys.readouterr() - assert 'Not a plugin' in output - assert 'Not called' not in output - - -def test_discover_non_plugin(non_plugin, capsys): - '''Discover non plugin.''' - ftrack_api.plugin.discover([non_plugin]) - output, error = capsys.readouterr() - assert not output - assert not error - - -def test_discover_broken_plugin(broken_plugin, caplog): - '''Discover broken plugin.''' - ftrack_api.plugin.discover([broken_plugin]) - - records = caplog.records() - assert len(records) == 1 - assert records[0].levelno is logging.WARNING - assert 'Failed to load plugin' in records[0].message - - -@pytest.mark.parametrize( - 'plugin, positional, keyword, expected', - [ - ( - 'a, b=False, c=False, d=False', - (1, 2), {'c': True, 'd': True, 'e': True}, - '1 b=2 c=True d=True' - ), - ( - '*args', - (1, 2), {'b': True, 'c': False}, - '(1, 2)' - ), - ( - '**kwargs', - tuple(), {'b': True, 'c': False}, - '[(\'b\', True), (\'c\', False)]' - ), - ( - 'a=False, b=False', - (True,), {'b': True}, - 'a=True b=True' - ), - ( - 'a, c=False, *args', - (1, 2, 3, 4), {}, - '1 c=2 (3, 4)' - ), - ( - 'a, c=False, **kwargs', - tuple(), {'a': 1, 'b': 2, 'c': 3, 'd': 4}, - '1 c=3 [(\'b\', 2), (\'d\', 4)]' - ), - ], - indirect=['plugin'], - ids=[ - 'mixed-explicit', - 'variable-args-only', - 'variable-kwargs-only', - 'keyword-from-positional', - 'trailing-variable-args', - 'trailing-keyword-args' - ] -) -def test_discover_plugin_with_specific_signature( - plugin, positional, keyword, expected, capsys -): - '''Discover plugin passing only supported arguments.''' - ftrack_api.plugin.discover( - [plugin], positional, keyword - ) - output, error = capsys.readouterr() - assert expected in output - - -def test_discover_plugin_varying_signatures(temporary_path, capsys): - '''Discover multiple plugins with varying signatures.''' - with open(os.path.join(temporary_path, 'plugin_a.py'), 'w') as file_object: - file_object.write(textwrap.dedent(''' - def register(a): - print (a,) - ''')) - - with open(os.path.join(temporary_path, 'plugin_b.py'), 'w') as file_object: - file_object.write(textwrap.dedent(''' - def register(a, b=False): - print (a,), {'b': b} - ''')) - - ftrack_api.plugin.discover( - [temporary_path], (True,), {'b': True} - ) - - output, error = capsys.readouterr() - assert '(True,)'in output - assert '(True,) {\'b\': True}' in output diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py deleted file mode 100644 index f8e3f9dec3..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py +++ /dev/null @@ -1,164 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import math - -import pytest - -import ftrack_api -import ftrack_api.query -import ftrack_api.exception - - -def test_index(session): - '''Index into query result.''' - results = session.query('User') - assert isinstance(results[2], session.types['User']) - - -def test_len(session): - '''Return count of results using len.''' - results = session.query('User where username is jenkins') - assert len(results) == 1 - - -def test_all(session): - '''Return all results using convenience method.''' - results = session.query('User').all() - assert isinstance(results, list) - assert len(results) - - -def test_implicit_iteration(session): - '''Implicitly iterate through query result.''' - results = session.query('User') - assert isinstance(results, ftrack_api.query.QueryResult) - - records = [] - for record in results: - records.append(record) - - assert len(records) == len(results) - - -def test_one(session): - '''Return single result using convenience method.''' - user = session.query('User where username is jenkins').one() - assert user['username'] == 'jenkins' - - -def test_one_fails_for_no_results(session): - '''Fail to fetch single result when no results available.''' - with pytest.raises(ftrack_api.exception.NoResultFoundError): - session.query('User where username is does_not_exist').one() - - -def test_one_fails_for_multiple_results(session): - '''Fail to fetch single result when multiple results available.''' - with pytest.raises(ftrack_api.exception.MultipleResultsFoundError): - session.query('User').one() - - -def test_one_with_existing_limit(session): - '''Fail to return single result when existing limit in expression.''' - with pytest.raises(ValueError): - session.query('User where username is jenkins limit 0').one() - - -def test_one_with_existing_offset(session): - '''Fail to return single result when existing offset in expression.''' - with pytest.raises(ValueError): - session.query('User where username is jenkins offset 2').one() - - -def test_one_with_prefetched_data(session): - '''Return single result ignoring prefetched data.''' - query = session.query('User where username is jenkins') - query.all() - - user = query.one() - assert user['username'] == 'jenkins' - - -def test_first(session): - '''Return first result using convenience method.''' - users = session.query('User').all() - - user = session.query('User').first() - assert user == users[0] - - -def test_first_returns_none_when_no_results(session): - '''Return None when no results available.''' - user = session.query('User where username is does_not_exist').first() - assert user is None - - -def test_first_with_existing_limit(session): - '''Fail to return first result when existing limit in expression.''' - with pytest.raises(ValueError): - session.query('User where username is jenkins limit 0').first() - - -def test_first_with_existing_offset(session): - '''Return first result whilst respecting custom offset.''' - users = session.query('User').all() - - user = session.query('User offset 2').first() - assert user == users[2] - - -def test_first_with_prefetched_data(session): - '''Return first result ignoring prefetched data.''' - query = session.query('User where username is jenkins') - query.all() - - user = query.first() - assert user['username'] == 'jenkins' - - -def test_paging(session, mocker): - '''Page through results.''' - mocker.patch.object(session, 'call', wraps=session.call) - - page_size = 5 - query = session.query('User limit 50', page_size=page_size) - records = query.all() - - assert session.call.call_count == ( - math.ceil(len(records) / float(page_size)) - ) - - -def test_paging_respects_offset_and_limit(session, mocker): - '''Page through results respecting offset and limit.''' - users = session.query('User').all() - - mocker.patch.object(session, 'call', wraps=session.call) - - page_size = 6 - query = session.query('User offset 2 limit 8', page_size=page_size) - records = query.all() - - assert session.call.call_count == 2 - assert len(records) == 8 - assert records == users[2:10] - - -def test_paging_respects_limit_smaller_than_page_size(session, mocker): - '''Use initial limit when less than page size.''' - mocker.patch.object(session, 'call', wraps=session.call) - - page_size = 100 - query = session.query('User limit 10', page_size=page_size) - records = query.all() - - assert session.call.call_count == 1 - session.call.assert_called_once_with( - [{ - 'action': 'query', - 'expression': 'select id from User offset 0 limit 10' - }] - ) - - assert len(records) == 10 \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py deleted file mode 100644 index 5087efcc08..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py +++ /dev/null @@ -1,1519 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import tempfile -import functools -import uuid -import textwrap -import datetime -import json -import random - -import pytest -import mock -import arrow -import requests - -import ftrack_api -import ftrack_api.cache -import ftrack_api.inspection -import ftrack_api.symbol -import ftrack_api.exception -import ftrack_api.session -import ftrack_api.collection - - -@pytest.fixture(params=['memory', 'persisted']) -def cache(request): - '''Return cache.''' - if request.param == 'memory': - cache = None # There is already a default Memory cache present. - elif request.param == 'persisted': - cache_path = os.path.join( - tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) - ) - - cache = lambda session: ftrack_api.cache.SerialisedCache( - ftrack_api.cache.FileCache(cache_path), - encode=functools.partial( - session.encode, entity_attribute_strategy='persisted_only' - ), - decode=session.decode - ) - - def cleanup(): - '''Cleanup.''' - try: - os.remove(cache_path) - except OSError: - # BSD DB (Mac OSX) implementation of the interface will append - # a .db extension. - os.remove(cache_path + '.db') - - request.addfinalizer(cleanup) - - return cache - - -@pytest.fixture() -def temporary_invalid_schema_cache(request): - '''Return schema cache path to invalid schema cache file.''' - schema_cache_path = os.path.join( - tempfile.gettempdir(), - 'ftrack_api_schema_cache_test_{0}.json'.format(uuid.uuid4().hex) - ) - - with open(schema_cache_path, 'w') as file_: - file_.write('${invalid json}') - - def cleanup(): - '''Cleanup.''' - os.remove(schema_cache_path) - - request.addfinalizer(cleanup) - - return schema_cache_path - - -@pytest.fixture() -def temporary_valid_schema_cache(request, mocked_schemas): - '''Return schema cache path to valid schema cache file.''' - schema_cache_path = os.path.join( - tempfile.gettempdir(), - 'ftrack_api_schema_cache_test_{0}.json'.format(uuid.uuid4().hex) - ) - - with open(schema_cache_path, 'w') as file_: - json.dump(mocked_schemas, file_, indent=4) - - def cleanup(): - '''Cleanup.''' - os.remove(schema_cache_path) - - request.addfinalizer(cleanup) - - return schema_cache_path - - -class SelectiveCache(ftrack_api.cache.ProxyCache): - '''Proxy cache that should not cache newly created entities.''' - - def set(self, key, value): - '''Set *value* for *key*.''' - if isinstance(value, ftrack_api.entity.base.Entity): - if ( - ftrack_api.inspection.state(value) - is ftrack_api.symbol.CREATED - ): - return - - super(SelectiveCache, self).set(key, value) - - -def test_get_entity(session, user): - '''Retrieve an entity by type and id.''' - matching = session.get(*ftrack_api.inspection.identity(user)) - assert matching == user - - -def test_get_non_existant_entity(session): - '''Retrieve a non-existant entity by type and id.''' - matching = session.get('User', 'non-existant-id') - assert matching is None - - -def test_get_entity_of_invalid_type(session): - '''Fail to retrieve an entity using an invalid type.''' - with pytest.raises(KeyError): - session.get('InvalidType', 'id') - - -def test_create(session): - '''Create entity.''' - user = session.create('User', {'username': 'martin'}) - with session.auto_populating(False): - assert user['id'] is not ftrack_api.symbol.NOT_SET - assert user['username'] == 'martin' - assert user['email'] is ftrack_api.symbol.NOT_SET - - -def test_create_using_only_defaults(session): - '''Create entity using defaults only.''' - user = session.create('User') - with session.auto_populating(False): - assert user['id'] is not ftrack_api.symbol.NOT_SET - assert user['username'] is ftrack_api.symbol.NOT_SET - - -def test_create_using_server_side_defaults(session): - '''Create entity using server side defaults.''' - user = session.create('User') - with session.auto_populating(False): - assert user['id'] is not ftrack_api.symbol.NOT_SET - assert user['username'] is ftrack_api.symbol.NOT_SET - - session.commit() - assert user['username'] is not ftrack_api.symbol.NOT_SET - - -def test_create_overriding_defaults(session): - '''Create entity overriding defaults.''' - uid = str(uuid.uuid4()) - user = session.create('User', {'id': uid}) - with session.auto_populating(False): - assert user['id'] == uid - - -def test_create_with_reference(session): - '''Create entity with a reference to another.''' - status = session.query('Status')[0] - task = session.create('Task', {'status': status}) - assert task['status'] is status - - -def test_ensure_new_entity(session, unique_name): - '''Ensure entity, creating first.''' - entity = session.ensure('User', {'username': unique_name}) - assert entity['username'] == unique_name - - -def test_ensure_entity_with_non_string_data_types(session): - '''Ensure entity against non-string data types, creating first.''' - datetime = arrow.get() - - task = session.query('Task').first() - user = session.query( - 'User where username is {}'.format(session.api_user) - ).first() - - first = session.ensure( - 'Timelog', - { - 'start': datetime, - 'duration': 10, - 'user_id': user['id'], - 'context_id': task['id'] - } - ) - - with mock.patch.object(session, 'create') as mocked: - session.ensure( - 'Timelog', - { - 'start': datetime, - 'duration': 10, - 'user_id': user['id'], - 'context_id': task['id'] - } - ) - assert not mocked.called - - assert first['start'] == datetime - assert first['duration'] == 10 - - -def test_ensure_entity_with_identifying_keys(session, unique_name): - '''Ensure entity, checking using keys subset and then creating.''' - entity = session.ensure( - 'User', {'username': unique_name, 'email': 'test@example.com'}, - identifying_keys=['username'] - ) - assert entity['username'] == unique_name - - -def test_ensure_entity_with_invalid_identifying_keys(session, unique_name): - '''Fail to ensure entity when identifying key missing from data.''' - with pytest.raises(KeyError): - session.ensure( - 'User', {'username': unique_name, 'email': 'test@example.com'}, - identifying_keys=['invalid'] - ) - - -def test_ensure_entity_with_missing_identifying_keys(session): - '''Fail to ensure entity when no identifying keys determined.''' - with pytest.raises(ValueError): - session.ensure('User', {}) - - -def test_ensure_existing_entity(session, unique_name): - '''Ensure existing entity.''' - entity = session.ensure('User', {'first_name': unique_name}) - - # Second call should not commit any new entity, just retrieve the existing. - with mock.patch.object(session, 'create') as mocked: - retrieved = session.ensure('User', {'first_name': unique_name}) - assert not mocked.called - assert retrieved == entity - - -def test_ensure_update_existing_entity(session, unique_name): - '''Ensure and update existing entity.''' - entity = session.ensure( - 'User', {'first_name': unique_name, 'email': 'anon@example.com'} - ) - assert entity['email'] == 'anon@example.com' - - # Second call should commit updates. - retrieved = session.ensure( - 'User', {'first_name': unique_name, 'email': 'test@example.com'}, - identifying_keys=['first_name'] - ) - assert retrieved == entity - assert retrieved['email'] == 'test@example.com' - - -def test_reconstruct_entity(session): - '''Reconstruct entity.''' - uid = str(uuid.uuid4()) - data = { - 'id': uid, - 'username': 'martin', - 'email': 'martin@example.com' - } - user = session.create('User', data, reconstructing=True) - - for attribute in user.attributes: - # No local attributes should be set. - assert attribute.get_local_value(user) is ftrack_api.symbol.NOT_SET - - # Only remote attributes that had explicit values should be set. - value = attribute.get_remote_value(user) - if attribute.name in data: - assert value == data[attribute.name] - else: - assert value is ftrack_api.symbol.NOT_SET - - -def test_reconstruct_entity_does_not_apply_defaults(session): - '''Reconstruct entity does not apply defaults.''' - # Note: Use private method to avoid merge which requires id be set. - user = session._create('User', {}, reconstructing=True) - with session.auto_populating(False): - assert user['id'] is ftrack_api.symbol.NOT_SET - - -def test_reconstruct_empty_entity(session): - '''Reconstruct empty entity.''' - # Note: Use private method to avoid merge which requires id be set. - user = session._create('User', {}, reconstructing=True) - - for attribute in user.attributes: - # No local attributes should be set. - assert attribute.get_local_value(user) is ftrack_api.symbol.NOT_SET - - # No remote attributes should be set. - assert attribute.get_remote_value(user) is ftrack_api.symbol.NOT_SET - - -def test_delete_operation_ordering(session, unique_name): - '''Delete entities in valid order.''' - # Construct entities. - project_schema = session.query('ProjectSchema').first() - project = session.create('Project', { - 'name': unique_name, - 'full_name': unique_name, - 'project_schema': project_schema - }) - - sequence = session.create('Sequence', { - 'name': unique_name, - 'parent': project - }) - - session.commit() - - # Delete in order that should succeed. - session.delete(sequence) - session.delete(project) - - session.commit() - - -def test_create_then_delete_operation_ordering(session, unique_name): - '''Create and delete entity in one transaction.''' - entity = session.create('User', {'username': unique_name}) - session.delete(entity) - session.commit() - - -def test_create_and_modify_to_have_required_attribute(session, unique_name): - '''Create and modify entity to have required attribute in transaction.''' - entity = session.create('Scope', {}) - other = session.create('Scope', {'name': unique_name}) - entity['name'] = '{0}2'.format(unique_name) - session.commit() - - -def test_ignore_in_create_entity_payload_values_set_to_not_set( - mocker, unique_name, session -): - '''Ignore in commit, created entity data set to NOT_SET''' - mocked = mocker.patch.object(session, 'call') - - # Should ignore 'email' attribute in payload. - new_user = session.create( - 'User', {'username': unique_name, 'email': 'test'} - ) - new_user['email'] = ftrack_api.symbol.NOT_SET - session.commit() - payloads = mocked.call_args[0][0] - assert len(payloads) == 1 - - -def test_ignore_operation_that_modifies_attribute_to_not_set( - mocker, session, user -): - '''Ignore in commit, operation that sets attribute value to NOT_SET''' - mocked = mocker.patch.object(session, 'call') - - # Should result in no call to server. - user['email'] = ftrack_api.symbol.NOT_SET - session.commit() - - assert not mocked.called - - -def test_operation_optimisation_on_commit(session, mocker): - '''Optimise operations on commit.''' - mocked = mocker.patch.object(session, 'call') - - user_a = session.create('User', {'username': 'bob'}) - user_a['username'] = 'foo' - user_a['email'] = 'bob@example.com' - - user_b = session.create('User', {'username': 'martin'}) - user_b['email'] = 'martin@ftrack.com' - - user_a['email'] = 'bob@example.com' - user_a['first_name'] = 'Bob' - - user_c = session.create('User', {'username': 'neverexist'}) - user_c['email'] = 'ignore@example.com' - session.delete(user_c) - - user_a_entity_key = ftrack_api.inspection.primary_key(user_a).values() - user_b_entity_key = ftrack_api.inspection.primary_key(user_b).values() - - session.commit() - - # The above operations should have translated into three payloads to call - # (two creates and one update). - payloads = mocked.call_args[0][0] - assert len(payloads) == 3 - - assert payloads[0]['action'] == 'create' - assert payloads[0]['entity_key'] == user_a_entity_key - assert set(payloads[0]['entity_data'].keys()) == set([ - '__entity_type__', 'id', 'resource_type', 'username' - ]) - - assert payloads[1]['action'] == 'create' - assert payloads[1]['entity_key'] == user_b_entity_key - assert set(payloads[1]['entity_data'].keys()) == set([ - '__entity_type__', 'id', 'resource_type', 'username', 'email' - ]) - - assert payloads[2]['action'] == 'update' - assert payloads[2]['entity_key'] == user_a_entity_key - assert set(payloads[2]['entity_data'].keys()) == set([ - '__entity_type__', 'email', 'first_name' - ]) - - -def test_state_collection(session, unique_name, user): - '''Session state collection holds correct entities.''' - # NOT_SET - user_a = session.create('User', {'username': unique_name}) - session.commit() - - # CREATED - user_b = session.create('User', {'username': unique_name}) - user_b['username'] = 'changed' - - # MODIFIED - user_c = user - user_c['username'] = 'changed' - - # DELETED - user_d = session.create('User', {'username': unique_name}) - session.delete(user_d) - - assert session.created == [user_b] - assert session.modified == [user_c] - assert session.deleted == [user_d] - - -def test_get_entity_with_composite_primary_key(session, new_project): - '''Retrieve entity that uses a composite primary key.''' - entity = session.create('Metadata', { - 'key': 'key', 'value': 'value', - 'parent_type': new_project.entity_type, - 'parent_id': new_project['id'] - }) - - session.commit() - - # Avoid cache. - new_session = ftrack_api.Session() - retrieved_entity = new_session.get( - 'Metadata', ftrack_api.inspection.primary_key(entity).values() - ) - - assert retrieved_entity == entity - - -def test_get_entity_with_incomplete_composite_primary_key(session, new_project): - '''Fail to retrieve entity using incomplete composite primary key.''' - entity = session.create('Metadata', { - 'key': 'key', 'value': 'value', - 'parent_type': new_project.entity_type, - 'parent_id': new_project['id'] - }) - - session.commit() - - # Avoid cache. - new_session = ftrack_api.Session() - with pytest.raises(ValueError): - new_session.get( - 'Metadata', ftrack_api.inspection.primary_key(entity).values()[0] - ) - - -def test_populate_entity(session, new_user): - '''Populate entity that uses single primary key.''' - with session.auto_populating(False): - assert new_user['email'] is ftrack_api.symbol.NOT_SET - - session.populate(new_user, 'email') - assert new_user['email'] is not ftrack_api.symbol.NOT_SET - - -def test_populate_entities(session, unique_name): - '''Populate multiple entities that use single primary key.''' - users = [] - for index in range(3): - users.append( - session.create( - 'User', {'username': '{0}-{1}'.format(unique_name, index)} - ) - ) - - session.commit() - - with session.auto_populating(False): - for user in users: - assert user['email'] is ftrack_api.symbol.NOT_SET - - session.populate(users, 'email') - - for user in users: - assert user['email'] is not ftrack_api.symbol.NOT_SET - - -def test_populate_entity_with_composite_primary_key(session, new_project): - '''Populate entity that uses a composite primary key.''' - entity = session.create('Metadata', { - 'key': 'key', 'value': 'value', - 'parent_type': new_project.entity_type, - 'parent_id': new_project['id'] - }) - - session.commit() - - # Avoid cache. - new_session = ftrack_api.Session() - retrieved_entity = new_session.get( - 'Metadata', ftrack_api.inspection.primary_key(entity).values() - ) - - # Manually change already populated remote value so can test it gets reset - # on populate call. - retrieved_entity.attributes.get('value').set_remote_value( - retrieved_entity, 'changed' - ) - - new_session.populate(retrieved_entity, 'value') - assert retrieved_entity['value'] == 'value' - - -@pytest.mark.parametrize('server_information, compatible', [ - ({}, False), - ({'version': '3.3.11'}, True), - ({'version': '3.3.12'}, True), - ({'version': '3.4'}, True), - ({'version': '3.4.1'}, True), - ({'version': '3.5.16'}, True), - ({'version': '3.3.10'}, False) -], ids=[ - 'No information', - 'Valid current version', - 'Valid higher version', - 'Valid higher version', - 'Valid higher version', - 'Valid higher version', - 'Invalid lower version' -]) -def test_check_server_compatibility( - server_information, compatible, session -): - '''Check server compatibility.''' - with mock.patch.dict( - session._server_information, server_information, clear=True - ): - if compatible: - session.check_server_compatibility() - else: - with pytest.raises(ftrack_api.exception.ServerCompatibilityError): - session.check_server_compatibility() - - -def test_encode_entity_using_all_attributes_strategy(mocked_schema_session): - '''Encode entity using "all" entity_attribute_strategy.''' - new_bar = mocked_schema_session.create( - 'Bar', - { - 'name': 'myBar', - 'id': 'bar_unique_id' - } - ) - - new_foo = mocked_schema_session.create( - 'Foo', - { - 'id': 'a_unique_id', - 'string': 'abc', - 'integer': 42, - 'number': 12345678.9, - 'boolean': False, - 'date': arrow.get('2015-11-18 15:24:09'), - 'bars': [new_bar] - } - ) - - encoded = mocked_schema_session.encode( - new_foo, entity_attribute_strategy='all' - ) - - assert encoded == textwrap.dedent(''' - {"__entity_type__": "Foo", - "bars": [{"__entity_type__": "Bar", "id": "bar_unique_id"}], - "boolean": false, - "date": {"__type__": "datetime", "value": "2015-11-18T15:24:09+00:00"}, - "id": "a_unique_id", - "integer": 42, - "number": 12345678.9, - "string": "abc"} - ''').replace('\n', '') - - -def test_encode_entity_using_only_set_attributes_strategy( - mocked_schema_session -): - '''Encode entity using "set_only" entity_attribute_strategy.''' - new_foo = mocked_schema_session.create( - 'Foo', - { - 'id': 'a_unique_id', - 'string': 'abc', - 'integer': 42 - } - ) - - encoded = mocked_schema_session.encode( - new_foo, entity_attribute_strategy='set_only' - ) - - assert encoded == textwrap.dedent(''' - {"__entity_type__": "Foo", - "id": "a_unique_id", - "integer": 42, - "string": "abc"} - ''').replace('\n', '') - - -def test_encode_computed_attribute_using_persisted_only_attributes_strategy( - mocked_schema_session -): - '''Encode computed attribute, "persisted_only" entity_attribute_strategy.''' - new_bar = mocked_schema_session._create( - 'Bar', - { - 'name': 'myBar', - 'id': 'bar_unique_id', - 'computed_value': 'FOO' - }, - reconstructing=True - ) - - encoded = mocked_schema_session.encode( - new_bar, entity_attribute_strategy='persisted_only' - ) - - assert encoded == textwrap.dedent(''' - {"__entity_type__": "Bar", - "id": "bar_unique_id", - "name": "myBar"} - ''').replace('\n', '') - - -def test_encode_entity_using_only_modified_attributes_strategy( - mocked_schema_session -): - '''Encode entity using "modified_only" entity_attribute_strategy.''' - new_foo = mocked_schema_session._create( - 'Foo', - { - 'id': 'a_unique_id', - 'string': 'abc', - 'integer': 42 - }, - reconstructing=True - ) - - new_foo['string'] = 'Modified' - - encoded = mocked_schema_session.encode( - new_foo, entity_attribute_strategy='modified_only' - ) - - assert encoded == textwrap.dedent(''' - {"__entity_type__": "Foo", - "id": "a_unique_id", - "string": "Modified"} - ''').replace('\n', '') - - -def test_encode_entity_using_invalid_strategy(session, new_task): - '''Fail to encode entity using invalid strategy.''' - with pytest.raises(ValueError): - session.encode(new_task, entity_attribute_strategy='invalid') - - -def test_encode_operation_payload(session): - '''Encode operation payload.''' - sequence_component = session.create_component( - "/path/to/sequence.%d.jpg [1]", location=None - ) - file_component = sequence_component["members"][0] - - encoded = session.encode([ - ftrack_api.session.OperationPayload({ - 'action': 'create', - 'entity_data': { - '__entity_type__': u'FileComponent', - u'container': sequence_component, - 'id': file_component['id'] - }, - 'entity_key': [file_component['id']], - 'entity_type': u'FileComponent' - }), - ftrack_api.session.OperationPayload({ - 'action': 'update', - 'entity_data': { - '__entity_type__': u'SequenceComponent', - u'members': ftrack_api.collection.Collection( - sequence_component, - sequence_component.attributes.get('members'), - data=[file_component] - ) - }, - 'entity_key': [sequence_component['id']], - 'entity_type': u'SequenceComponent' - }) - ]) - - expected = textwrap.dedent(''' - [{{"action": "create", - "entity_data": {{"__entity_type__": "FileComponent", - "container": {{"__entity_type__": "SequenceComponent", - "id": "{0[id]}"}}, - "id": "{1[id]}"}}, - "entity_key": ["{1[id]}"], - "entity_type": "FileComponent"}}, - {{"action": "update", - "entity_data": {{"__entity_type__": "SequenceComponent", - "members": [{{"__entity_type__": "FileComponent", "id": "{1[id]}"}}]}}, - "entity_key": ["{0[id]}"], - "entity_type": "SequenceComponent"}}] - '''.format(sequence_component, file_component)).replace('\n', '') - - assert encoded == expected - - -def test_decode_partial_entity( - session, new_task -): - '''Decode partially encoded entity.''' - encoded = session.encode( - new_task, entity_attribute_strategy='set_only' - ) - - entity = session.decode(encoded) - - assert entity == new_task - assert entity is not new_task - - -def test_reset(mocker): - '''Reset session.''' - plugin_path = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..', 'fixture', 'plugin') - ) - session = ftrack_api.Session(plugin_paths=[plugin_path]) - - assert hasattr(session.types.get('User'), 'stub') - location = session.query('Location where name is "test.location"').one() - assert location.accessor is not ftrack_api.symbol.NOT_SET - - mocked_close = mocker.patch.object(session._request, 'close') - mocked_fetch = mocker.patch.object(session, '_load_schemas') - - session.reset() - - # Assert custom entity type maintained. - assert hasattr(session.types.get('User'), 'stub') - - # Assert location plugin re-configured. - location = session.query('Location where name is "test.location"').one() - assert location.accessor is not ftrack_api.symbol.NOT_SET - - # Assert connection not closed and no schema fetch issued. - assert not mocked_close.called - assert not mocked_fetch.called - - -def test_rollback_scalar_attribute_change(session, new_user): - '''Rollback scalar attribute change via session.''' - assert not session.recorded_operations - current_first_name = new_user['first_name'] - - new_user['first_name'] = 'NewName' - assert new_user['first_name'] == 'NewName' - assert session.recorded_operations - - session.rollback() - - assert not session.recorded_operations - assert new_user['first_name'] == current_first_name - - -def test_rollback_collection_attribute_change(session, new_user): - '''Rollback collection attribute change via session.''' - assert not session.recorded_operations - current_timelogs = new_user['timelogs'] - assert list(current_timelogs) == [] - - timelog = session.create('Timelog', {}) - new_user['timelogs'].append(timelog) - assert list(new_user['timelogs']) == [timelog] - assert session.recorded_operations - - session.rollback() - - assert not session.recorded_operations - assert list(new_user['timelogs']) == [] - - -def test_rollback_entity_creation(session): - '''Rollback entity creation via session.''' - assert not session.recorded_operations - - new_user = session.create('User') - assert session.recorded_operations - assert new_user in session.created - - session.rollback() - - assert not session.recorded_operations - assert new_user not in session.created - assert new_user not in session._local_cache.values() - - -def test_rollback_entity_deletion(session, new_user): - '''Rollback entity deletion via session.''' - assert not session.recorded_operations - - session.delete(new_user) - assert session.recorded_operations - assert new_user in session.deleted - - session.rollback() - assert not session.recorded_operations - assert new_user not in session.deleted - assert new_user in session._local_cache.values() - - -# Caching -# ------------------------------------------------------------------------------ - - -def test_get_entity_bypassing_cache(session, user, mocker): - '''Retrieve an entity by type and id bypassing cache.''' - mocker.patch.object(session, 'call', wraps=session.call) - - session.cache.remove( - session.cache_key_maker.key(ftrack_api.inspection.identity(user)) - ) - - matching = session.get(*ftrack_api.inspection.identity(user)) - - # Check a different instance returned. - assert matching is not user - - # Check instances have the same identity. - assert matching == user - - # Check cache was bypassed and server was called. - assert session.call.called - - -def test_get_entity_from_cache(cache, task, mocker): - '''Retrieve an entity by type and id from cache.''' - session = ftrack_api.Session(cache=cache) - - # Prepare cache. - session.merge(task) - - # Disable server calls. - mocker.patch.object(session, 'call') - - # Retrieve entity from cache. - entity = session.get(*ftrack_api.inspection.identity(task)) - - assert entity is not None, 'Failed to retrieve entity from cache.' - assert entity == task - assert entity is not task - - # Check that no call was made to server. - assert not session.call.called - - -def test_get_entity_tree_from_cache(cache, new_project_tree, mocker): - '''Retrieve an entity tree from cache.''' - session = ftrack_api.Session(cache=cache) - - # Prepare cache. - # TODO: Maybe cache should be prepopulated for a better check here. - session.query( - 'select children, children.children, children.children.children, ' - 'children.children.children.assignments, ' - 'children.children.children.assignments.resource ' - 'from Project where id is "{0}"' - .format(new_project_tree['id']) - ).one() - - # Disable server calls. - mocker.patch.object(session, 'call') - - # Retrieve entity from cache. - entity = session.get(*ftrack_api.inspection.identity(new_project_tree)) - - assert entity is not None, 'Failed to retrieve entity from cache.' - assert entity == new_project_tree - assert entity is not new_project_tree - - # Check tree. - with session.auto_populating(False): - for sequence in entity['children']: - for shot in sequence['children']: - for task in shot['children']: - assignments = task['assignments'] - for assignment in assignments: - resource = assignment['resource'] - - assert resource is not ftrack_api.symbol.NOT_SET - - # Check that no call was made to server. - assert not session.call.called - - -def test_get_metadata_from_cache(session, mocker, cache, new_task): - '''Retrieve an entity along with its metadata from cache.''' - new_task['metadata']['key'] = 'value' - session.commit() - - fresh_session = ftrack_api.Session(cache=cache) - - # Prepare cache. - fresh_session.query( - 'select metadata.key, metadata.value from ' - 'Task where id is "{0}"' - .format(new_task['id']) - ).all() - - # Disable server calls. - mocker.patch.object(fresh_session, 'call') - - # Retrieve entity from cache. - entity = fresh_session.get(*ftrack_api.inspection.identity(new_task)) - - assert entity is not None, 'Failed to retrieve entity from cache.' - assert entity == new_task - assert entity is not new_task - - # Check metadata cached correctly. - with fresh_session.auto_populating(False): - metadata = entity['metadata'] - assert metadata['key'] == 'value' - - assert not fresh_session.call.called - - -def test_merge_circular_reference(cache, temporary_file): - '''Merge circular reference into cache.''' - session = ftrack_api.Session(cache=cache) - # The following will test the condition as a FileComponent will be created - # with corresponding ComponentLocation. The server will return the file - # component data with the component location embedded. The component - # location will in turn have an embedded reference to the file component. - # If the merge does not prioritise the primary keys of the instance then - # any cache that relies on using the identity of the file component will - # fail. - component = session.create_component(path=temporary_file) - assert component - - -def test_create_with_selective_cache(session): - '''Create entity does not store entity in selective cache.''' - cache = ftrack_api.cache.MemoryCache() - session.cache.caches.append(SelectiveCache(cache)) - try: - user = session.create('User', {'username': 'martin'}) - cache_key = session.cache_key_maker.key( - ftrack_api.inspection.identity(user) - ) - - with pytest.raises(KeyError): - cache.get(cache_key) - - finally: - session.cache.caches.pop() - - -def test_correct_file_type_on_sequence_component(session): - '''Create sequence component with correct file type.''' - path = '/path/to/image/sequence.%04d.dpx [1-10]' - sequence_component = session.create_component(path) - - assert sequence_component['file_type'] == '.dpx' - - -def test_read_schemas_from_cache( - session, temporary_valid_schema_cache -): - '''Read valid content from schema cache.''' - expected_hash = 'a98d0627b5e33966e43e1cb89b082db7' - - schemas, hash_ = session._read_schemas_from_cache( - temporary_valid_schema_cache - ) - - assert expected_hash == hash_ - - -def test_fail_to_read_schemas_from_invalid_cache( - session, temporary_invalid_schema_cache -): - '''Fail to read invalid content from schema cache.''' - with pytest.raises(ValueError): - session._read_schemas_from_cache( - temporary_invalid_schema_cache - ) - - -def test_write_schemas_to_cache( - session, temporary_valid_schema_cache -): - '''Write valid content to schema cache.''' - expected_hash = 'a98d0627b5e33966e43e1cb89b082db7' - schemas, _ = session._read_schemas_from_cache(temporary_valid_schema_cache) - - session._write_schemas_to_cache(schemas, temporary_valid_schema_cache) - - schemas, hash_ = session._read_schemas_from_cache( - temporary_valid_schema_cache - ) - - assert expected_hash == hash_ - - -def test_fail_to_write_invalid_schemas_to_cache( - session, temporary_valid_schema_cache -): - '''Fail to write invalid content to schema cache.''' - # Datetime not serialisable by default. - invalid_content = datetime.datetime.now() - - with pytest.raises(TypeError): - session._write_schemas_to_cache( - invalid_content, temporary_valid_schema_cache - ) - - -def test_load_schemas_from_valid_cache( - mocker, session, temporary_valid_schema_cache, mocked_schemas -): - '''Load schemas from cache.''' - expected_schemas = session._load_schemas(temporary_valid_schema_cache) - - mocked = mocker.patch.object(session, 'call') - schemas = session._load_schemas(temporary_valid_schema_cache) - - assert schemas == expected_schemas - assert not mocked.called - - -def test_load_schemas_from_server_when_cache_invalid( - mocker, session, temporary_invalid_schema_cache -): - '''Load schemas from server when cache invalid.''' - mocked = mocker.patch.object(session, 'call', wraps=session.call) - - session._load_schemas(temporary_invalid_schema_cache) - assert mocked.called - - -def test_load_schemas_from_server_when_cache_outdated( - mocker, session, temporary_valid_schema_cache -): - '''Load schemas from server when cache outdated.''' - schemas, _ = session._read_schemas_from_cache(temporary_valid_schema_cache) - schemas.append({ - 'id': 'NewTest' - }) - session._write_schemas_to_cache(schemas, temporary_valid_schema_cache) - - mocked = mocker.patch.object(session, 'call', wraps=session.call) - session._load_schemas(temporary_valid_schema_cache) - - assert mocked.called - - -def test_load_schemas_from_server_not_reporting_schema_hash( - mocker, session, temporary_valid_schema_cache -): - '''Load schemas from server when server does not report schema hash.''' - mocked_write = mocker.patch.object( - session, '_write_schemas_to_cache', - wraps=session._write_schemas_to_cache - ) - - server_information = session._server_information.copy() - server_information.pop('schema_hash') - mocker.patch.object( - session, '_server_information', new=server_information - ) - - session._load_schemas(temporary_valid_schema_cache) - - # Cache still written even if hash not reported. - assert mocked_write.called - - mocked = mocker.patch.object(session, 'call', wraps=session.call) - session._load_schemas(temporary_valid_schema_cache) - - # No hash reported by server so cache should have been bypassed. - assert mocked.called - - -def test_load_schemas_bypassing_cache( - mocker, session, temporary_valid_schema_cache -): - '''Load schemas bypassing cache when set to False.''' - with mocker.patch.object(session, 'call', wraps=session.call): - - session._load_schemas(temporary_valid_schema_cache) - assert session.call.call_count == 1 - - session._load_schemas(False) - assert session.call.call_count == 2 - - -def test_get_tasks_widget_url(session): - '''Tasks widget URL returns valid HTTP status.''' - url = session.get_widget_url('tasks') - response = requests.get(url) - response.raise_for_status() - - -def test_get_info_widget_url(session, task): - '''Info widget URL for *task* returns valid HTTP status.''' - url = session.get_widget_url('info', entity=task, theme='light') - response = requests.get(url) - response.raise_for_status() - - -def test_encode_media_from_path(session, video_path): - '''Encode media based on a file path.''' - job = session.encode_media(video_path) - - assert job.entity_type == 'Job' - - job_data = json.loads(job['data']) - assert 'output' in job_data - assert 'source_component_id' in job_data - assert 'keep_original' in job_data and job_data['keep_original'] is False - assert len(job_data['output']) - assert 'component_id' in job_data['output'][0] - assert 'format' in job_data['output'][0] - - -def test_encode_media_from_component(session, video_path): - '''Encode media based on a component.''' - location = session.query('Location where name is "ftrack.server"').one() - component = session.create_component( - video_path, - location=location - ) - session.commit() - - job = session.encode_media(component) - - assert job.entity_type == 'Job' - - job_data = json.loads(job['data']) - assert 'keep_original' in job_data and job_data['keep_original'] is True - - -def test_create_sequence_component_with_size(session, temporary_sequence): - '''Create a sequence component and verify that is has a size.''' - location = session.query('Location where name is "ftrack.server"').one() - component = session.create_component( - temporary_sequence - ) - - assert component['size'] > 0 - - -def test_plugin_arguments(mocker): - '''Pass plugin arguments to plugin discovery mechanism.''' - mock = mocker.patch( - 'ftrack_api.plugin.discover' - ) - session = ftrack_api.Session( - plugin_paths=[], plugin_arguments={"test": "value"} - ) - assert mock.called - mock.assert_called_once_with([], [session], {"test": "value"}) - -def test_remote_reset(session, new_user): - '''Reset user api key.''' - key_1 = session.reset_remote( - 'api_key', entity=new_user - ) - - key_2 = session.reset_remote( - 'api_key', entity=new_user - ) - - - assert key_1 != key_2 - - -@pytest.mark.parametrize('attribute', [ - ('id',), - ('email',) - -], ids=[ - 'Fail resetting primary key', - 'Fail resetting attribute without default value', -]) -def test_fail_remote_reset(session, user, attribute): - '''Fail trying to rest invalid attributes.''' - - with pytest.raises(ftrack_api.exception.ServerError): - session.reset_remote( - attribute, user - ) - - -def test_close(session): - '''Close session.''' - assert session.closed is False - session.close() - assert session.closed is True - - -def test_close_already_closed_session(session): - '''Close session that is already closed.''' - session.close() - assert session.closed is True - session.close() - assert session.closed is True - - -def test_server_call_after_close(session): - '''Fail to issue calls to server after session closed.''' - session.close() - assert session.closed is True - - with pytest.raises(ftrack_api.exception.ConnectionClosedError): - session.query('User').first() - - -def test_context_manager(session): - '''Use session as context manager.''' - with session: - assert session.closed is False - - assert session.closed is True - - -def test_delayed_job(session): - '''Test the delayed_job action''' - - with pytest.raises(ValueError): - session.delayed_job( - 'DUMMY_JOB' - ) - - -@pytest.mark.skip(reason='No configured ldap server.') -def test_delayed_job_ldap_sync(session): - '''Test the a delayed_job ldap sync action''' - result = session.delayed_job( - ftrack_api.symbol.JOB_SYNC_USERS_LDAP - ) - - assert isinstance( - result, ftrack_api.entity.job.Job - ) - - -def test_query_nested_custom_attributes(session, new_asset_version): - '''Query custom attributes nested and update a value and query again. - - This test will query custom attributes via 2 relations, then update the - value in one API session and read it back in another to verify that it gets - the new value. - - ''' - session_one = session - session_two = ftrack_api.Session( - auto_connect_event_hub=False - ) - - # Read the version via a relation in both sessions. - def get_versions(sessions): - versions = [] - for _session in sessions: - asset = _session.query( - 'select versions.custom_attributes from Asset where id is "{0}"'.format( - new_asset_version.get('asset_id') - ) - ).first() - - for version in asset['versions']: - if version.get('id') == new_asset_version.get('id'): - versions.append(version) - - return versions - - # Get version from both sessions. - versions = get_versions((session_one, session_two)) - - # Read attribute for both sessions. - for version in versions: - version['custom_attributes']['versiontest'] - - # Set attribute on session_one. - versions[0]['custom_attributes']['versiontest'] = random.randint( - 0, 99999 - ) - - session.commit() - - # Read version from server for session_two. - session_two_version = get_versions((session_two, ))[0] - - # Verify that value in session 2 is the same as set and committed in - # session 1. - assert ( - session_two_version['custom_attributes']['versiontest'] == - versions[0]['custom_attributes']['versiontest'] - ) - - -def test_query_nested(session): - '''Query components nested and update a value and query again. - - This test will query components via 2 relations, then update the - value in one API session and read it back in another to verify that it gets - the new value. - - ''' - session_one = session - session_two = ftrack_api.Session( - auto_connect_event_hub=False - ) - - query = ( - 'select versions.components.name from Asset where id is ' - '"12939d0c-6766-11e1-8104-f23c91df25eb"' - ) - - def get_version(session): - '''Return the test version from *session*.''' - asset = session.query(query).first() - asset_version = None - for version in asset['versions']: - if version['version'] == 8: - asset_version = version - break - - return asset_version - - asset_version = get_version(session_one) - asset_version2 = get_version(session_two) - - # This assert is not needed, but reading the collections are to ensure they - # are inflated. - assert ( - asset_version2['components'][0]['name'] == - asset_version['components'][0]['name'] - ) - - asset_version['components'][0]['name'] = str(uuid.uuid4()) - - session.commit() - - asset_version2 = get_version(session_two) - - assert ( - asset_version['components'][0]['name'] == - asset_version2['components'][0]['name'] - ) - - -def test_merge_iterations(session, mocker, project): - '''Ensure merge does not happen to many times when querying.''' - mocker.spy(session, '_merge') - - session.query( - 'select status from Task where project_id is {} limit 10'.format( - project['id'] - ) - ).all() - - assert session._merge.call_count < 75 - - -@pytest.mark.parametrize( - 'get_versions', - [ - lambda component, asset_version, asset: component['version']['asset']['versions'], - lambda component, asset_version, asset: asset_version['asset']['versions'], - lambda component, asset_version, asset: asset['versions'], - ], - ids=[ - 'from_component', - 'from_asset_version', - 'from_asset', - ] -) -def test_query_nested2(session, get_versions): - '''Query version.asset.versions from component and then add new version. - - This test will query versions via multiple relations and ensure a new - version appears when added to a different session and then is queried - again. - - ''' - session_one = session - session_two = ftrack_api.Session( - auto_connect_event_hub=False - ) - - # Get a random component that is linked to a version and asset. - component_id = session_two.query( - 'FileComponent where version.asset_id != None' - ).first()['id'] - - query = ( - 'select version.asset.versions from Component where id is "{}"'.format( - component_id - ) - ) - - component = session_one.query(query).one() - asset_version = component['version'] - asset = component['version']['asset'] - versions = component['version']['asset']['versions'] - length = len(versions) - - session_two.create('AssetVersion', { - 'asset_id': asset['id'] - }) - - session_two.commit() - - component = session_one.query(query).one() - versions = get_versions(component, asset_version, asset) - new_length = len(versions) - - assert length + 1 == new_length - - -def test_session_ready_reset_events(mocker): - '''Session ready and reset events.''' - plugin_path = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..', 'fixture', 'plugin') - ) - session = ftrack_api.Session(plugin_paths=[plugin_path]) - - assert session._test_called_events['ftrack.api.session.ready'] is 1 - assert session._test_called_events['ftrack.api.session.reset'] is 0 - - session.reset() - assert session._test_called_events['ftrack.api.session.ready'] is 1 - assert session._test_called_events['ftrack.api.session.reset'] is 1 - - -def test_entity_reference(mocker, session): - '''Return entity reference that uniquely identifies entity.''' - mock_entity = mocker.Mock(entity_type="MockEntityType") - mock_auto_populating = mocker.patch.object(session, "auto_populating") - mock_primary_key = mocker.patch( - "ftrack_api.inspection.primary_key", return_value={"id": "mock-id"} - ) - - reference = session.entity_reference(mock_entity) - - assert reference == { - "__entity_type__": "MockEntityType", - "id": "mock-id" - } - - mock_auto_populating.assert_called_once_with(False) - mock_primary_key.assert_called_once_with(mock_entity) - - -def test__entity_reference(mocker, session): - '''Act as alias to entity_reference.''' - mock_entity = mocker.Mock(entity_type="MockEntityType") - mock_entity_reference = mocker.patch.object(session, "entity_reference") - mocker.patch("warnings.warn") - - session._entity_reference(mock_entity) - - mock_entity_reference.assert_called_once_with(mock_entity) - - -def test__entity_reference_issues_deprecation_warning(mocker, session): - '''Issue deprecation warning for usage of _entity_reference.''' - mocker.patch.object(session, "entity_reference") - mock_warn = mocker.patch("warnings.warn") - - session._entity_reference({}) - - mock_warn.assert_called_once_with( - ( - "Session._entity_reference is now available as public method " - "Session.entity_reference. The private method will be removed " - "in version 2.0." - ), - PendingDeprecationWarning - ) diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py deleted file mode 100644 index cf8b014ee5..0000000000 --- a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py +++ /dev/null @@ -1,74 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest -import ftrack_api.exception - - -def test_manually_create_multiple_timers_with_error(session, new_user): - '''Fail to create a second timer.''' - session.create('Timer', { - 'user': new_user - }) - - session.commit() - - with pytest.raises(ftrack_api.exception.ServerError): - session.create('Timer', { - 'user': new_user - }) - - session.commit() - - session.reset() - - -def test_create_multiple_timers_with_error(session, new_user): - '''Fail to create a second timer.''' - new_user.start_timer() - - with pytest.raises(ftrack_api.exception.NotUniqueError): - new_user.start_timer() - - session.reset() - - -def test_start_and_stop_a_timer(session, new_user, new_task): - '''Start a new timer and stop it to create a timelog.''' - new_user.start_timer(new_task) - - new_user.stop_timer() - - timelog = session.query( - 'Timelog where context_id = "{0}"'.format(new_task['id']) - ).one() - - assert timelog['user_id'] == new_user['id'], 'User id is correct.' - assert timelog['context_id'] == new_task['id'], 'Task id is correct.' - - -def test_start_a_timer_when_timer_is_running(session, new_user, new_task): - '''Start a timer when an existing timer is already running.''' - new_user.start_timer(new_task) - - # Create the second timer without context. - new_user.start_timer(force=True) - - # There should be only one existing timelog for this user. - timelogs = session.query( - 'Timelog where user_id = "{0}"'.format(new_user['id']) - ).all() - assert len(timelogs) == 1, 'One timelog exists.' - - timelog = session.query( - 'Timer where user_id = "{0}"'.format(new_user['id']) - ).one() - - # Make sure running timer has no context. - assert timelog['context_id'] is None, 'Timer does not have a context.' - - -def test_stop_timer_without_timer_running(session, new_user): - '''Stop a timer when no timer is running.''' - with pytest.raises(ftrack_api.exception.NoResultFoundError): - new_user.stop_timer() From 7decf0aa911a3fd18d7a91688ae39fd6f098eb13 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 29 Jul 2021 11:06:17 +0200 Subject: [PATCH 090/308] Webpublisher - backend - added settings and defaults --- .../project_settings/webpublisher.json | 72 +++++++++++++++++++ .../schemas/projects_schema/schema_main.json | 4 ++ .../schema_project_webpublisher.json | 60 ++++++++++++++++ 3 files changed, 136 insertions(+) create mode 100644 openpype/settings/defaults/project_settings/webpublisher.json create mode 100644 openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json diff --git a/openpype/settings/defaults/project_settings/webpublisher.json b/openpype/settings/defaults/project_settings/webpublisher.json new file mode 100644 index 0000000000..69b6babc64 --- /dev/null +++ b/openpype/settings/defaults/project_settings/webpublisher.json @@ -0,0 +1,72 @@ +{ + "publish": { + "CollectPublishedFiles": { + "task_type_to_family": { + "Animation": { + "workfile": { + "is_sequence": false, + "extensions": [ + "tvp" + ], + "families": [] + }, + "render": { + "is_sequence": true, + "extensions": [ + "png", + "exr", + "tiff", + "tif" + ], + "families": [ + "review" + ] + } + }, + "Compositing": { + "workfile": { + "is_sequence": false, + "extensions": [ + "aep" + ], + "families": [] + }, + "render": { + "is_sequence": true, + "extensions": [ + "png", + "exr", + "tiff", + "tif" + ], + "families": [ + "review" + ] + } + }, + "Layout": { + "workfile": { + "is_sequence": false, + "extensions": [ + "psd" + ], + "families": [] + }, + "image": { + "is_sequence": false, + "extensions": [ + "png", + "jpg", + "jpeg", + "tiff", + "tif" + ], + "families": [ + "review" + ] + } + } + } + } + } +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_main.json b/openpype/settings/entities/schemas/projects_schema/schema_main.json index 4a8a9d496e..575cfc9e72 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_main.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_main.json @@ -118,6 +118,10 @@ "type": "schema", "name": "schema_project_standalonepublisher" }, + { + "type": "schema", + "name": "schema_project_webpublisher" + }, { "type": "schema", "name": "schema_project_unreal" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json new file mode 100644 index 0000000000..6ae82e0561 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json @@ -0,0 +1,60 @@ +{ + "type": "dict", + "collapsible": true, + "key": "webpublisher", + "label": "Web Publisher", + "is_file": true, + "children": [ + { + "type": "dict", + "collapsible": true, + "key": "publish", + "label": "Publish plugins", + "children": [ + { + "type": "dict", + "collapsible": true, + "key": "CollectPublishedFiles", + "label": "Collect Published Files", + "children": [ + { + "type": "dict-modifiable", + "collapsible": true, + "key": "task_type_to_family", + "label": "Task type to family mapping", + "collapsible_key": true, + "object_type": { + "type": "dict-modifiable", + "collapsible": false, + "key": "task_type", + "collapsible_key": false, + "object_type": { + "type": "dict", + "children": [ + { + "type": "boolean", + "key": "is_sequence", + "label": "Is Sequence" + }, + { + "type": "list", + "key": "extensions", + "label": "Extensions", + "object_type": "text" + }, + { + "type": "list", + "key": "families", + "label": "Families", + "object_type": "text" + } + ] + } + } + } + ] + } + ] + } + ] +} \ No newline at end of file From e9cdcc5fafe513c7ad8c1565a23b3c41988dbffb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 29 Jul 2021 11:16:18 +0200 Subject: [PATCH 091/308] added ftrack submodules to right folder --- .gitmodules | 10 +++++----- .../default_modules/ftrack/python2_vendor/arrow | 1 + .../ftrack/python2_vendor/ftrack-python-api | 1 + 3 files changed, 7 insertions(+), 5 deletions(-) create mode 160000 openpype/modules/default_modules/ftrack/python2_vendor/arrow create mode 160000 openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api diff --git a/.gitmodules b/.gitmodules index 52f2fc0750..82fd194d26 100644 --- a/.gitmodules +++ b/.gitmodules @@ -4,9 +4,9 @@ [submodule "repos/avalon-unreal-integration"] path = repos/avalon-unreal-integration url = https://github.com/pypeclub/avalon-unreal-integration.git -[submodule "openpype/modules/ftrack/python2_vendor/ftrack-python-api"] - path = openpype/modules/ftrack/python2_vendor/ftrack-python-api +[submodule "openpype/modules/default_modules/ftrack/python2_vendor/arrow"] + path = openpype/modules/default_modules/ftrack/python2_vendor/arrow + url = git@github.com:arrow-py/arrow.git +[submodule "openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api"] + path = openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api url = https://bitbucket.org/ftrack/ftrack-python-api.git -[submodule "openpype/modules/ftrack/python2_vendor/arrow"] - path = openpype/modules/ftrack/python2_vendor/arrow - url = https://github.com/arrow-py/arrow.git \ No newline at end of file diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/arrow b/openpype/modules/default_modules/ftrack/python2_vendor/arrow new file mode 160000 index 0000000000..b746fedf72 --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/arrow @@ -0,0 +1 @@ +Subproject commit b746fedf7286c3755a46f07ab72f4c414cd41fc0 diff --git a/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api new file mode 160000 index 0000000000..d277f474ab --- /dev/null +++ b/openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api @@ -0,0 +1 @@ +Subproject commit d277f474ab016e7b53479c36af87cb861d0cc53e From 9d456283bf68ff91ac5b6b7d3d999e96a0114998 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 29 Jul 2021 11:47:09 +0200 Subject: [PATCH 092/308] hound fixes --- openpype/modules/base.py | 9 ++++++--- .../ftrack/ftrack_server/event_server_cli.py | 11 ++--------- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index fc53d3b27a..77d9ddbcec 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -2,7 +2,6 @@ """Base class for Pype Modules.""" import os import sys -import types import time import inspect import logging @@ -366,12 +365,16 @@ class ModulesManager: not_implemented = [] for attr_name in dir(modules_item): attr = getattr(modules_item, attr_name, None) - if attr and getattr(attr, "__isabstractmethod__", None): + abs_method = getattr( + attr, "__isabstractmethod__", None + ) + if attr and abs_method: not_implemented.append(attr_name) # Log missing implementations self.log.warning(( - "Skipping abstract Class: {}. Missing implementations: {}" + "Skipping abstract Class: {}." + " Missing implementations: {}" ).format(name, ", ".join(not_implemented))) continue module_classes.append(modules_item) diff --git a/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py index 1e14929d96..d8e4d05580 100644 --- a/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py +++ b/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py @@ -19,15 +19,8 @@ from openpype.lib import ( OpenPypeMongoConnection ) from openpype_modules.ftrack import FTRACK_MODULE_DIR -from openpype_modules.ftrack.lib import ( - credentials, - get_ftrack_url_from_settings -) -from openpype_modules.ftrack.ftrack_server.lib import ( - check_ftrack_url, - get_ftrack_event_mongo_info -) - +from openpype_modules.ftrack.lib import credentials +from openpype_modules.ftrack.ftrack_server.lib import check_ftrack_url from openpype_modules.ftrack.ftrack_server import socket_thread From 4f63e3d21ffd0e62caef178f8acb5fe2e422f8c3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 29 Jul 2021 17:30:34 +0200 Subject: [PATCH 093/308] Webpublisher - backend - updated settings --- .../project_settings/webpublisher.json | 44 ++++++++++++++++--- .../schema_project_webpublisher.json | 5 +++ 2 files changed, 43 insertions(+), 6 deletions(-) diff --git a/openpype/settings/defaults/project_settings/webpublisher.json b/openpype/settings/defaults/project_settings/webpublisher.json index 69b6babc64..8364b6a39d 100644 --- a/openpype/settings/defaults/project_settings/webpublisher.json +++ b/openpype/settings/defaults/project_settings/webpublisher.json @@ -8,7 +8,8 @@ "extensions": [ "tvp" ], - "families": [] + "families": [], + "subset_template_name": "" }, "render": { "is_sequence": true, @@ -20,7 +21,8 @@ ], "families": [ "review" - ] + ], + "subset_template_name": "" } }, "Compositing": { @@ -29,7 +31,8 @@ "extensions": [ "aep" ], - "families": [] + "families": [], + "subset_template_name": "" }, "render": { "is_sequence": true, @@ -41,7 +44,8 @@ ], "families": [ "review" - ] + ], + "subset_template_name": "" } }, "Layout": { @@ -50,7 +54,8 @@ "extensions": [ "psd" ], - "families": [] + "families": [], + "subset_template_name": "" }, "image": { "is_sequence": false, @@ -63,8 +68,35 @@ ], "families": [ "review" - ] + ], + "subset_template_name": "" } + }, + "default_task_type": { + "workfile": { + "is_sequence": false, + "extensions": [ + "tvp" + ], + "families": [], + "subset_template_name": "{family}{Variant}" + }, + "render": { + "is_sequence": true, + "extensions": [ + "png", + "exr", + "tiff", + "tif" + ], + "families": [ + "review" + ], + "subset_template_name": "{family}{Variant}" + } + }, + "__dynamic_keys_labels__": { + "default_task_type": "Default task type" } } } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json index 6ae82e0561..bf59cd030e 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json @@ -47,6 +47,11 @@ "key": "families", "label": "Families", "object_type": "text" + }, + { + "type": "text", + "key": "subset_template_name", + "label": "Subset template name" } ] } From dea843d851162624e10a810d431e5ed78c1e13cb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 29 Jul 2021 18:17:19 +0200 Subject: [PATCH 094/308] Webpublisher - backend - implemented version and subset name --- openpype/hosts/webpublisher/api/__init__.py | 2 + .../publish/collect_published_files.py | 233 ++++++++---------- openpype/modules/webserver/webserver_cli.py | 13 +- openpype/pype_commands.py | 3 +- 4 files changed, 110 insertions(+), 141 deletions(-) diff --git a/openpype/hosts/webpublisher/api/__init__.py b/openpype/hosts/webpublisher/api/__init__.py index 1b6edcf24d..76709bb2d7 100644 --- a/openpype/hosts/webpublisher/api/__init__.py +++ b/openpype/hosts/webpublisher/api/__init__.py @@ -2,6 +2,7 @@ import os import logging from avalon import api as avalon +from avalon import io from pyblish import api as pyblish import openpype.hosts.webpublisher @@ -27,6 +28,7 @@ def install(): avalon.register_plugin_path(avalon.Creator, CREATE_PATH) log.info(PUBLISH_PATH) + io.install() avalon.on("application.launched", application_launch) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index dde9713c7a..deadbb856b 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -12,60 +12,9 @@ import json import clique import pyblish.api -from avalon import api +from avalon import io +from openpype.lib import prepare_template_data -FAMILY_SETTING = { # TEMP - "Animation": { - "workfile": { - "is_sequence": False, - "extensions": ["tvp"], - "families": [] - }, - "render": { - "is_sequence": True, - "extensions": [ - "png", "exr", "tiff", "tif" - ], - "families": ["review"] - } - }, - "Compositing": { - "workfile": { - "is_sequence": False, - "extensions": ["aep"], - "families": [] - }, - "render": { - "is_sequence": True, - "extensions": [ - "png", "exr", "tiff", "tif" - ], - "families": ["review"] - } - }, - "Layout": { - "workfile": { - "is_sequence": False, - "extensions": [ - ".psd" - ], - "families": [] - }, - "image": { - "is_sequence": False, - "extensions": [ - "png", - "jpg", - "jpeg", - "tiff", - "tif" - ], - "families": [ - "review" - ] - } - } -} class CollectPublishedFiles(pyblish.api.ContextPlugin): """ @@ -80,6 +29,9 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): _context = None + # from Settings + task_type_to_family = {} + def _load_json(self, path): path = path.strip('\"') assert os.path.isfile(path), ( @@ -96,69 +48,6 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): ) return data - def _fill_staging_dir(self, data_object, anatomy): - staging_dir = data_object.get("stagingDir") - if staging_dir: - data_object["stagingDir"] = anatomy.fill_root(staging_dir) - - def _process_path(self, data): - # validate basic necessary data - data_err = "invalid json file - missing data" - # required = ["asset", "user", "comment", - # "job", "instances", "session", "version"] - # assert all(elem in data.keys() for elem in required), data_err - - # set context by first json file - ctx = self._context.data - - ctx["asset"] = ctx.get("asset") or data.get("asset") - ctx["intent"] = ctx.get("intent") or data.get("intent") - ctx["comment"] = ctx.get("comment") or data.get("comment") - ctx["user"] = ctx.get("user") or data.get("user") - ctx["version"] = ctx.get("version") or data.get("version") - - # basic sanity check to see if we are working in same context - # if some other json file has different context, bail out. - ctx_err = "inconsistent contexts in json files - %s" - assert ctx.get("asset") == data.get("asset"), ctx_err % "asset" - assert ctx.get("intent") == data.get("intent"), ctx_err % "intent" - assert ctx.get("comment") == data.get("comment"), ctx_err % "comment" - assert ctx.get("user") == data.get("user"), ctx_err % "user" - assert ctx.get("version") == data.get("version"), ctx_err % "version" - - # now we can just add instances from json file and we are done - for instance_data in data.get("instances"): - self.log.info(" - processing instance for {}".format( - instance_data.get("subset"))) - instance = self._context.create_instance( - instance_data.get("subset") - ) - self.log.info("Filling stagingDir...") - - self._fill_staging_dir(instance_data, anatomy) - instance.data.update(instance_data) - - # stash render job id for later validation - instance.data["render_job_id"] = data.get("job").get("_id") - - representations = [] - for repre_data in instance_data.get("representations") or []: - self._fill_staging_dir(repre_data, anatomy) - representations.append(repre_data) - - instance.data["representations"] = representations - - # add audio if in metadata data - if data.get("audio"): - instance.data.update({ - "audio": [{ - "filename": data.get("audio"), - "offset": 0 - }] - }) - self.log.info( - f"Adding audio to instance: {instance.data['audio']}") - def _process_batch(self, dir_url): task_subfolders = [os.path.join(dir_url, o) for o in os.listdir(dir_url) @@ -169,32 +58,41 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): "manifest.json")) self.log.info("task_data:: {}".format(task_data)) ctx = task_data["context"] - task_type = None + task_type = "default_task_type" + task_name = None subset = "Main" # temp if ctx["type"] == "task": items = ctx["path"].split('/') asset = items[-2] os.environ["AVALON_TASK"] = ctx["name"] + task_name = ctx["name"] task_type = ctx["attributes"]["type"] else: asset = ctx["name"] is_sequence = len(task_data["files"]) > 1 - instance = self._context.create_instance(subset) _, extension = os.path.splitext(task_data["files"][0]) self.log.info("asset:: {}".format(asset)) - family, families = self._get_family(FAMILY_SETTING, # todo - task_type, - is_sequence, - extension.replace(".", '')) + family, families, subset_template = self._get_family( + self.task_type_to_family, + task_type, + is_sequence, + extension.replace(".", '')) + + subset = self._get_subset_name(family, subset_template, task_name, + task_data["variant"]) + os.environ["AVALON_ASSET"] = asset + io.Session["AVALON_ASSET"] = asset + + instance = self._context.create_instance(subset) instance.data["asset"] = asset instance.data["subset"] = subset instance.data["family"] = family instance.data["families"] = families - # instance.data["version"] = self._get_version(task_data["subset"]) + instance.data["version"] = self._get_version(asset, subset) + 1 instance.data["stagingDir"] = task_dir instance.data["source"] = "webpublisher" @@ -205,17 +103,33 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): task_data["files"], task_dir ) else: - _, ext = os.path.splittext(task_data["files"][0]) - repre_data = { - "name": ext[1:], - "ext": ext[1:], - "files": task_data["files"], - "stagingDir": task_dir - } - instance.data["representation"] = repre_data + + instance.data["representation"] = self._get_single_repre( + task_dir, task_data["files"] + ) self.log.info("instance.data:: {}".format(instance.data)) + def _get_subset_name(self, family, subset_template, task_name, variant): + fill_pairs = { + "variant": variant, + "family": family, + "task": task_name + } + subset = subset_template.format(**prepare_template_data(fill_pairs)) + return subset + + def _get_single_repre(self, task_dir, files): + _, ext = os.path.splittext(files[0]) + repre_data = { + "name": ext[1:], + "ext": ext[1:], + "files": files, + "stagingDir": task_dir + } + + return repre_data + def _process_sequence(self, files, task_dir): """Prepare reprentations for sequence of files.""" collections, remainder = clique.assemble(files) @@ -246,7 +160,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): extension (str): without '.' Returns: - (family, [families]) tuple + (family, [families], subset_template_name) tuple AssertionError if not matching family found """ task_obj = settings.get(task_type) @@ -265,10 +179,59 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): task_type, is_sequence, extension) assert found_family, msg - return found_family, content["families"] + return found_family, \ + content["families"], \ + content["subset_template_name"] - def _get_version(self, subset_name): - return 1 + def _get_version(self, asset_name, subset_name): + """Returns version number or 0 for 'asset' and 'subset'""" + query = [ + { + "$match": {"type": "asset", "name": asset_name} + }, + { + "$lookup": + { + "from": os.environ["AVALON_PROJECT"], + "localField": "_id", + "foreignField": "parent", + "as": "subsets" + } + }, + { + "$unwind": "$subsets" + }, + { + "$match": {"subsets.type": "subset", + "subsets.name": subset_name}}, + { + "$lookup": + { + "from": os.environ["AVALON_PROJECT"], + "localField": "subsets._id", + "foreignField": "parent", + "as": "versions" + } + }, + { + "$unwind": "$versions" + }, + { + "$group": { + "_id": { + "asset_name": "$name", + "subset_name": "$subsets.name" + }, + 'version': {'$max': "$versions.name"} + } + } + ] + version = list(io.aggregate(query)) + + if version: + return version[0].get("version") or 0 + else: + return 0 def process(self, context): self._context = context diff --git a/openpype/modules/webserver/webserver_cli.py b/openpype/modules/webserver/webserver_cli.py index 484c25c6b3..7773bde567 100644 --- a/openpype/modules/webserver/webserver_cli.py +++ b/openpype/modules/webserver/webserver_cli.py @@ -146,7 +146,8 @@ class WebpublisherPublishEndpoint(_RestApiEndpoint): args = [ openpype_app, 'remotepublish', - batch_path + batch_id, + task_id ] if not openpype_app or not os.path.exists(openpype_app): @@ -174,7 +175,7 @@ class WebpublisherPublishEndpoint(_RestApiEndpoint): class BatchStatusEndpoint(_RestApiEndpoint): - """Returns list of project names.""" + """Returns dict with info for batch_id.""" async def get(self, batch_id) -> Response: output = self.dbcon.find_one({"batch_id": batch_id}) @@ -186,9 +187,9 @@ class BatchStatusEndpoint(_RestApiEndpoint): class PublishesStatusEndpoint(_RestApiEndpoint): - """Returns list of project names.""" + """Returns list of dict with batch info for user (email address).""" async def get(self, user) -> Response: - output = self.dbcon.find({"user": user}) + output = list(self.dbcon.find({"user": user})) return Response( status=200, @@ -198,6 +199,7 @@ class PublishesStatusEndpoint(_RestApiEndpoint): class RestApiResource: + """Resource carrying needed info and Avalon DB connection for publish.""" def __init__(self, server_manager, executable, upload_dir): self.server_manager = server_manager self.upload_dir = upload_dir @@ -224,6 +226,7 @@ class RestApiResource: class OpenPypeRestApiResource(RestApiResource): + """Resource carrying OP DB connection for storing batch info into DB.""" def __init__(self, ): mongo_client = OpenPypeMongoConnection.get_mongo_client() database_name = os.environ["OPENPYPE_DATABASE_NAME"] @@ -254,6 +257,7 @@ def run_webserver(*args, **kwargs): hiearchy_endpoint.dispatch ) + # triggers publish webpublisher_publish_endpoint = WebpublisherPublishEndpoint(resource) webserver_module.server_manager.add_route( "POST", @@ -261,6 +265,7 @@ def run_webserver(*args, **kwargs): webpublisher_publish_endpoint.dispatch ) + # reporting openpype_resource = OpenPypeRestApiResource() batch_status_endpoint = BatchStatusEndpoint(openpype_resource) webserver_module.server_manager.add_route( diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 01fa6b8d33..1391c36661 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -161,7 +161,6 @@ class PypeCommands: # this should be more generic from openpype.hosts.webpublisher.api import install as w_install w_install() - pyblish.api.register_host(host) log.info("Running publish ...") @@ -199,7 +198,7 @@ class PypeCommands: {"_id": _id}, {"$set": { - "progress": result["progress"] + "progress": max(result["progress"], 0.95) }} ) From 3c7f6a89fe7e72fd808d23c975306a800126579a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 30 Jul 2021 10:22:16 +0200 Subject: [PATCH 095/308] Webpublisher - backend - refactored routes --- .../modules/webserver/webpublish_routes.py | 242 ++++++++++++++++ openpype/modules/webserver/webserver_cli.py | 258 ++---------------- 2 files changed, 265 insertions(+), 235 deletions(-) create mode 100644 openpype/modules/webserver/webpublish_routes.py diff --git a/openpype/modules/webserver/webpublish_routes.py b/openpype/modules/webserver/webpublish_routes.py new file mode 100644 index 0000000000..805ac11a54 --- /dev/null +++ b/openpype/modules/webserver/webpublish_routes.py @@ -0,0 +1,242 @@ +"""Routes and etc. for webpublisher API.""" +import os +import json +import datetime +from bson.objectid import ObjectId +import collections +from aiohttp.web_response import Response +import subprocess + +from avalon.api import AvalonMongoDB + +from openpype.lib import OpenPypeMongoConnection +from openpype.modules.avalon_apps.rest_api import _RestApiEndpoint + + +class RestApiResource: + """Resource carrying needed info and Avalon DB connection for publish.""" + def __init__(self, server_manager, executable, upload_dir): + self.server_manager = server_manager + self.upload_dir = upload_dir + self.executable = executable + + self.dbcon = AvalonMongoDB() + self.dbcon.install() + + @staticmethod + def json_dump_handler(value): + if isinstance(value, datetime.datetime): + return value.isoformat() + if isinstance(value, ObjectId): + return str(value) + raise TypeError(value) + + @classmethod + def encode(cls, data): + return json.dumps( + data, + indent=4, + default=cls.json_dump_handler + ).encode("utf-8") + + +class OpenPypeRestApiResource(RestApiResource): + """Resource carrying OP DB connection for storing batch info into DB.""" + def __init__(self, ): + mongo_client = OpenPypeMongoConnection.get_mongo_client() + database_name = os.environ["OPENPYPE_DATABASE_NAME"] + self.dbcon = mongo_client[database_name]["webpublishes"] + + +class WebpublisherProjectsEndpoint(_RestApiEndpoint): + """Returns list of dict with project info (id, name).""" + async def get(self) -> Response: + output = [] + for project_name in self.dbcon.database.collection_names(): + project_doc = self.dbcon.database[project_name].find_one({ + "type": "project" + }) + if project_doc: + ret_val = { + "id": project_doc["_id"], + "name": project_doc["name"] + } + output.append(ret_val) + return Response( + status=200, + body=self.resource.encode(output), + content_type="application/json" + ) + + +class WebpublisherHiearchyEndpoint(_RestApiEndpoint): + """Returns dictionary with context tree from assets.""" + async def get(self, project_name) -> Response: + query_projection = { + "_id": 1, + "data.tasks": 1, + "data.visualParent": 1, + "data.entityType": 1, + "name": 1, + "type": 1, + } + + asset_docs = self.dbcon.database[project_name].find( + {"type": "asset"}, + query_projection + ) + asset_docs_by_id = { + asset_doc["_id"]: asset_doc + for asset_doc in asset_docs + } + + asset_docs_by_parent_id = collections.defaultdict(list) + for asset_doc in asset_docs_by_id.values(): + parent_id = asset_doc["data"].get("visualParent") + asset_docs_by_parent_id[parent_id].append(asset_doc) + + assets = collections.defaultdict(list) + + for parent_id, children in asset_docs_by_parent_id.items(): + for child in children: + node = assets.get(child["_id"]) + if not node: + node = Node(child["_id"], + child["data"]["entityType"], + child["name"]) + assets[child["_id"]] = node + + tasks = child["data"].get("tasks", {}) + for t_name, t_con in tasks.items(): + task_node = TaskNode("task", t_name) + task_node["attributes"]["type"] = t_con.get("type") + + task_node.parent = node + + parent_node = assets.get(parent_id) + if not parent_node: + asset_doc = asset_docs_by_id.get(parent_id) + if asset_doc: # regular node + parent_node = Node(parent_id, + asset_doc["data"]["entityType"], + asset_doc["name"]) + else: # root + parent_node = Node(parent_id, + "project", + project_name) + assets[parent_id] = parent_node + node.parent = parent_node + + roots = [x for x in assets.values() if x.parent is None] + + return Response( + status=200, + body=self.resource.encode(roots[0]), + content_type="application/json" + ) + + +class Node(dict): + """Node element in context tree.""" + + def __init__(self, uid, node_type, name): + self._parent = None # pointer to parent Node + self["type"] = node_type + self["name"] = name + self['id'] = uid # keep reference to id # + self['children'] = [] # collection of pointers to child Nodes + + @property + def parent(self): + return self._parent # simply return the object at the _parent pointer + + @parent.setter + def parent(self, node): + self._parent = node + # add this node to parent's list of children + node['children'].append(self) + + +class TaskNode(Node): + """Special node type only for Tasks.""" + + def __init__(self, node_type, name): + self._parent = None + self["type"] = node_type + self["name"] = name + self["attributes"] = {} + + +class WebpublisherBatchPublishEndpoint(_RestApiEndpoint): + """Triggers headless publishing of batch.""" + async def post(self, request) -> Response: + output = {} + + print(request) + + batch_path = os.path.join(self.resource.upload_dir, + request.query["batch_id"]) + + openpype_app = self.resource.executable + args = [ + openpype_app, + 'remotepublish', + batch_path + ] + + if not openpype_app or not os.path.exists(openpype_app): + msg = "Non existent OpenPype executable {}".format(openpype_app) + raise RuntimeError(msg) + + add_args = { + "host": "webpublisher", + "project": request.query["project"], + "user": request.query["user"] + } + + for key, value in add_args.items(): + args.append("--{}".format(key)) + args.append(value) + + print("args:: {}".format(args)) + + _exit_code = subprocess.call(args, shell=True) + return Response( + status=200, + body=self.resource.encode(output), + content_type="application/json" + ) + + +class WebpublisherTaskPublishEndpoint(_RestApiEndpoint): + """Prepared endpoint triggered after each task - for future development.""" + async def post(self, request) -> Response: + return Response( + status=200, + body=self.resource.encode([]), + content_type="application/json" + ) + + +class BatchStatusEndpoint(_RestApiEndpoint): + """Returns dict with info for batch_id.""" + async def get(self, batch_id) -> Response: + output = self.dbcon.find_one({"batch_id": batch_id}) + + return Response( + status=200, + body=self.resource.encode(output), + content_type="application/json" + ) + + +class PublishesStatusEndpoint(_RestApiEndpoint): + """Returns list of dict with batch info for user (email address).""" + async def get(self, user) -> Response: + output = list(self.dbcon.find({"user": user})) + + return Response( + status=200, + body=self.resource.encode(output), + content_type="application/json" + ) diff --git a/openpype/modules/webserver/webserver_cli.py b/openpype/modules/webserver/webserver_cli.py index 7773bde567..0812bfa372 100644 --- a/openpype/modules/webserver/webserver_cli.py +++ b/openpype/modules/webserver/webserver_cli.py @@ -1,239 +1,18 @@ -import os import time -import json -import datetime -from bson.objectid import ObjectId -import collections -from aiohttp.web_response import Response -import subprocess - -from avalon.api import AvalonMongoDB - -from openpype.lib import OpenPypeMongoConnection -from openpype.modules.avalon_apps.rest_api import _RestApiEndpoint - - -class WebpublisherProjectsEndpoint(_RestApiEndpoint): - """Returns list of project names.""" - async def get(self) -> Response: - output = [] - for project_name in self.dbcon.database.collection_names(): - project_doc = self.dbcon.database[project_name].find_one({ - "type": "project" - }) - if project_doc: - ret_val = { - "id": project_doc["_id"], - "name": project_doc["name"] - } - output.append(ret_val) - return Response( - status=200, - body=self.resource.encode(output), - content_type="application/json" - ) - - -class WebpublisherHiearchyEndpoint(_RestApiEndpoint): - """Returns dictionary with context tree from assets.""" - async def get(self, project_name) -> Response: - query_projection = { - "_id": 1, - "data.tasks": 1, - "data.visualParent": 1, - "data.entityType": 1, - "name": 1, - "type": 1, - } - - asset_docs = self.dbcon.database[project_name].find( - {"type": "asset"}, - query_projection - ) - asset_docs_by_id = { - asset_doc["_id"]: asset_doc - for asset_doc in asset_docs - } - - asset_docs_by_parent_id = collections.defaultdict(list) - for asset_doc in asset_docs_by_id.values(): - parent_id = asset_doc["data"].get("visualParent") - asset_docs_by_parent_id[parent_id].append(asset_doc) - - assets = collections.defaultdict(list) - - for parent_id, children in asset_docs_by_parent_id.items(): - for child in children: - node = assets.get(child["_id"]) - if not node: - node = Node(child["_id"], - child["data"]["entityType"], - child["name"]) - assets[child["_id"]] = node - - tasks = child["data"].get("tasks", {}) - for t_name, t_con in tasks.items(): - task_node = TaskNode("task", t_name) - task_node["attributes"]["type"] = t_con.get("type") - - task_node.parent = node - - parent_node = assets.get(parent_id) - if not parent_node: - asset_doc = asset_docs_by_id.get(parent_id) - if asset_doc: # regular node - parent_node = Node(parent_id, - asset_doc["data"]["entityType"], - asset_doc["name"]) - else: # root - parent_node = Node(parent_id, - "project", - project_name) - assets[parent_id] = parent_node - node.parent = parent_node - - roots = [x for x in assets.values() if x.parent is None] - - return Response( - status=200, - body=self.resource.encode(roots[0]), - content_type="application/json" - ) - - -class Node(dict): - """Node element in context tree.""" - - def __init__(self, uid, node_type, name): - self._parent = None # pointer to parent Node - self["type"] = node_type - self["name"] = name - self['id'] = uid # keep reference to id # - self['children'] = [] # collection of pointers to child Nodes - - @property - def parent(self): - return self._parent # simply return the object at the _parent pointer - - @parent.setter - def parent(self, node): - self._parent = node - # add this node to parent's list of children - node['children'].append(self) - - -class TaskNode(Node): - """Special node type only for Tasks.""" - - def __init__(self, node_type, name): - self._parent = None - self["type"] = node_type - self["name"] = name - self["attributes"] = {} - - -class WebpublisherPublishEndpoint(_RestApiEndpoint): - """Returns list of project names.""" - async def post(self, request) -> Response: - output = {} - - print(request) - - batch_path = os.path.join(self.resource.upload_dir, - request.query["batch_id"]) - - openpype_app = self.resource.executable - args = [ - openpype_app, - 'remotepublish', - batch_id, - task_id - ] - - if not openpype_app or not os.path.exists(openpype_app): - msg = "Non existent OpenPype executable {}".format(openpype_app) - raise RuntimeError(msg) - - add_args = { - "host": "webpublisher", - "project": request.query["project"], - "user": request.query["user"] - } - - for key, value in add_args.items(): - args.append("--{}".format(key)) - args.append(value) - - print("args:: {}".format(args)) - - _exit_code = subprocess.call(args, shell=True) - return Response( - status=200, - body=self.resource.encode(output), - content_type="application/json" - ) - - -class BatchStatusEndpoint(_RestApiEndpoint): - """Returns dict with info for batch_id.""" - async def get(self, batch_id) -> Response: - output = self.dbcon.find_one({"batch_id": batch_id}) - - return Response( - status=200, - body=self.resource.encode(output), - content_type="application/json" - ) - - -class PublishesStatusEndpoint(_RestApiEndpoint): - """Returns list of dict with batch info for user (email address).""" - async def get(self, user) -> Response: - output = list(self.dbcon.find({"user": user})) - - return Response( - status=200, - body=self.resource.encode(output), - content_type="application/json" - ) - - -class RestApiResource: - """Resource carrying needed info and Avalon DB connection for publish.""" - def __init__(self, server_manager, executable, upload_dir): - self.server_manager = server_manager - self.upload_dir = upload_dir - self.executable = executable - - self.dbcon = AvalonMongoDB() - self.dbcon.install() - - @staticmethod - def json_dump_handler(value): - if isinstance(value, datetime.datetime): - return value.isoformat() - if isinstance(value, ObjectId): - return str(value) - raise TypeError(value) - - @classmethod - def encode(cls, data): - return json.dumps( - data, - indent=4, - default=cls.json_dump_handler - ).encode("utf-8") - - -class OpenPypeRestApiResource(RestApiResource): - """Resource carrying OP DB connection for storing batch info into DB.""" - def __init__(self, ): - mongo_client = OpenPypeMongoConnection.get_mongo_client() - database_name = os.environ["OPENPYPE_DATABASE_NAME"] - self.dbcon = mongo_client[database_name]["webpublishes"] +from .webpublish_routes import ( + RestApiResource, + OpenPypeRestApiResource, + WebpublisherBatchPublishEndpoint, + WebpublisherTaskPublishEndpoint, + WebpublisherHiearchyEndpoint, + WebpublisherProjectsEndpoint, + BatchStatusEndpoint, + PublishesStatusEndpoint +) def run_webserver(*args, **kwargs): + """Runs webserver in command line, adds routes.""" from openpype.modules import ModulesManager manager = ModulesManager() @@ -258,11 +37,20 @@ def run_webserver(*args, **kwargs): ) # triggers publish - webpublisher_publish_endpoint = WebpublisherPublishEndpoint(resource) + webpublisher_task_publish_endpoint = \ + WebpublisherBatchPublishEndpoint(resource) webserver_module.server_manager.add_route( "POST", - "/api/webpublish/{batch_id}", - webpublisher_publish_endpoint.dispatch + "/api/webpublish/batch", + webpublisher_task_publish_endpoint.dispatch + ) + + webpublisher_batch_publish_endpoint = \ + WebpublisherTaskPublishEndpoint(resource) + webserver_module.server_manager.add_route( + "POST", + "/api/webpublish/task", + webpublisher_batch_publish_endpoint.dispatch ) # reporting From 349ddf6d915dff324827bf891b71f4a3026841ab Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 30 Jul 2021 10:22:29 +0200 Subject: [PATCH 096/308] Webpublisher - backend - fix signature --- openpype/pype_commands.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 1391c36661..a4a5cf7a4b 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -215,6 +215,7 @@ class PypeCommands: log.info("Publish finished.") uninstall() + @staticmethod def extractenvironments(output_json_path, project, asset, task, app): env = os.environ.copy() if all((project, asset, task, app)): From e5f58b2c10e7300245457b585a92a6b1e5042b8e Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 30 Jul 2021 12:35:29 +0100 Subject: [PATCH 097/308] Disregard publishing time. --- openpype/plugins/publish/start_timer.py | 15 +++++++++++++++ openpype/plugins/publish/stop_timer.py | 19 +++++++++++++++++++ .../defaults/system_settings/modules.json | 5 +++-- .../schemas/system_schema/schema_modules.json | 5 +++++ 4 files changed, 42 insertions(+), 2 deletions(-) create mode 100644 openpype/plugins/publish/start_timer.py create mode 100644 openpype/plugins/publish/stop_timer.py diff --git a/openpype/plugins/publish/start_timer.py b/openpype/plugins/publish/start_timer.py new file mode 100644 index 0000000000..6312294bf1 --- /dev/null +++ b/openpype/plugins/publish/start_timer.py @@ -0,0 +1,15 @@ +import pyblish.api + +from openpype.api import get_system_settings +from openpype.lib import change_timer_to_current_context + + +class StartTimer(pyblish.api.ContextPlugin): + label = "Start Timer" + order = pyblish.api.IntegratorOrder + 1 + hosts = ["*"] + + def process(self, context): + modules_settings = get_system_settings()["modules"] + if modules_settings["timers_manager"]["disregard_publishing"]: + change_timer_to_current_context() diff --git a/openpype/plugins/publish/stop_timer.py b/openpype/plugins/publish/stop_timer.py new file mode 100644 index 0000000000..81afd16378 --- /dev/null +++ b/openpype/plugins/publish/stop_timer.py @@ -0,0 +1,19 @@ +import os +import requests + +import pyblish.api + +from openpype.api import get_system_settings + + +class StopTimer(pyblish.api.ContextPlugin): + label = "Stop Timer" + order = pyblish.api.ExtractorOrder - 0.5 + hosts = ["*"] + + def process(self, context): + modules_settings = get_system_settings()["modules"] + if modules_settings["timers_manager"]["disregard_publishing"]: + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") + rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url) + requests.post(rest_api_url) diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index 1b74b4695c..e6f5096df7 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -128,7 +128,8 @@ "enabled": true, "auto_stop": true, "full_time": 15.0, - "message_time": 0.5 + "message_time": 0.5, + "disregard_publishing": false }, "clockify": { "enabled": false, @@ -171,4 +172,4 @@ "slack": { "enabled": false } -} \ No newline at end of file +} diff --git a/openpype/settings/entities/schemas/system_schema/schema_modules.json b/openpype/settings/entities/schemas/system_schema/schema_modules.json index 7d734ff4fd..a0f2f626a0 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_modules.json +++ b/openpype/settings/entities/schemas/system_schema/schema_modules.json @@ -60,6 +60,11 @@ "decimal": 2, "key": "message_time", "label": "When dialog will show" + }, + { + "type": "boolean", + "key": "disregard_publishing", + "label": "Disregard Publishing" } ] }, From f12df9af7bbccc2487e9e491413c2df0b8b1be77 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 30 Jul 2021 15:44:04 +0200 Subject: [PATCH 098/308] Webpublisher - backend - fix entityType as optional Fix payload for WebpublisherBatchPublishEndpoint --- openpype/modules/webserver/webpublish_routes.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/modules/webserver/webpublish_routes.py b/openpype/modules/webserver/webpublish_routes.py index 805ac11a54..cf6e4920b6 100644 --- a/openpype/modules/webserver/webpublish_routes.py +++ b/openpype/modules/webserver/webpublish_routes.py @@ -102,7 +102,7 @@ class WebpublisherHiearchyEndpoint(_RestApiEndpoint): node = assets.get(child["_id"]) if not node: node = Node(child["_id"], - child["data"]["entityType"], + child["data"].get("entityType", "Folder"), child["name"]) assets[child["_id"]] = node @@ -118,7 +118,8 @@ class WebpublisherHiearchyEndpoint(_RestApiEndpoint): asset_doc = asset_docs_by_id.get(parent_id) if asset_doc: # regular node parent_node = Node(parent_id, - asset_doc["data"]["entityType"], + asset_doc["data"].get("entityType", + "Folder"), asset_doc["name"]) else: # root parent_node = Node(parent_id, @@ -173,9 +174,10 @@ class WebpublisherBatchPublishEndpoint(_RestApiEndpoint): output = {} print(request) + content = await request.json() batch_path = os.path.join(self.resource.upload_dir, - request.query["batch_id"]) + content["batch"]) openpype_app = self.resource.executable args = [ @@ -190,8 +192,8 @@ class WebpublisherBatchPublishEndpoint(_RestApiEndpoint): add_args = { "host": "webpublisher", - "project": request.query["project"], - "user": request.query["user"] + "project": content["project_name"], + "user": content["user"] } for key, value in add_args.items(): From 61be1cbb14b82e986bdbb9f650c50b0bd279183d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 30 Jul 2021 15:44:32 +0200 Subject: [PATCH 099/308] Webpublisher - backend - fix app name --- openpype/pype_commands.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index a4a5cf7a4b..513d7d0865 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -156,7 +156,7 @@ class PypeCommands: os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path os.environ["AVALON_PROJECT"] = project - os.environ["AVALON_APP_NAME"] = host # to trigger proper plugings + os.environ["AVALON_APP"] = host # to trigger proper plugings # this should be more generic from openpype.hosts.webpublisher.api import install as w_install From 59ff9225d1a659ea2a84b019cddb93261c887143 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 30 Jul 2021 15:45:15 +0200 Subject: [PATCH 100/308] Webpublisher - backend - set to session for Ftrack family collector --- openpype/hosts/webpublisher/api/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/webpublisher/api/__init__.py b/openpype/hosts/webpublisher/api/__init__.py index 76709bb2d7..1bf1ef1a6f 100644 --- a/openpype/hosts/webpublisher/api/__init__.py +++ b/openpype/hosts/webpublisher/api/__init__.py @@ -29,6 +29,7 @@ def install(): log.info(PUBLISH_PATH) io.install() + avalon.Session["AVALON_APP"] = "webpublisher" # because of Ftrack collect avalon.on("application.launched", application_launch) From 276482e43520dd40fd15e880b86b3eac05fcc310 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 2 Aug 2021 17:16:23 +0200 Subject: [PATCH 101/308] Webpublisher - backend - fixes for single file publish --- .../plugins/publish/collect_published_files.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index deadbb856b..67d743278b 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -61,7 +61,6 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): task_type = "default_task_type" task_name = None - subset = "Main" # temp if ctx["type"] == "task": items = ctx["path"].split('/') asset = items[-2] @@ -74,7 +73,6 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): is_sequence = len(task_data["files"]) > 1 _, extension = os.path.splitext(task_data["files"][0]) - self.log.info("asset:: {}".format(asset)) family, families, subset_template = self._get_family( self.task_type_to_family, task_type, @@ -103,8 +101,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): task_data["files"], task_dir ) else: - - instance.data["representation"] = self._get_single_repre( + instance.data["representations"] = self._get_single_repre( task_dir, task_data["files"] ) @@ -120,15 +117,15 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): return subset def _get_single_repre(self, task_dir, files): - _, ext = os.path.splittext(files[0]) + _, ext = os.path.splitext(files[0]) repre_data = { "name": ext[1:], "ext": ext[1:], - "files": files, + "files": files[0], "stagingDir": task_dir } - - return repre_data + self.log.info("single file repre_data.data:: {}".format(repre_data)) + return [repre_data] def _process_sequence(self, files, task_dir): """Prepare reprentations for sequence of files.""" @@ -147,7 +144,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): "files": files, "stagingDir": task_dir } - self.log.info("repre_data.data:: {}".format(repre_data)) + self.log.info("sequences repre_data.data:: {}".format(repre_data)) return [repre_data] def _get_family(self, settings, task_type, is_sequence, extension): @@ -170,7 +167,8 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): for family, content in task_obj.items(): if is_sequence != content["is_sequence"]: continue - if extension in content["extensions"]: + if extension in content["extensions"] or \ + '' in content["extensions"]: # all extensions setting found_family = family break From 63a0c66c881e47c42a1943635a0ed10b72f80a29 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 2 Aug 2021 17:18:07 +0200 Subject: [PATCH 102/308] Webpublisher - backend - fix - removed shell flag causing problems on Linux --- openpype/modules/webserver/webpublish_routes.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/openpype/modules/webserver/webpublish_routes.py b/openpype/modules/webserver/webpublish_routes.py index cf6e4920b6..5322802130 100644 --- a/openpype/modules/webserver/webpublish_routes.py +++ b/openpype/modules/webserver/webpublish_routes.py @@ -12,6 +12,10 @@ from avalon.api import AvalonMongoDB from openpype.lib import OpenPypeMongoConnection from openpype.modules.avalon_apps.rest_api import _RestApiEndpoint +from openpype.lib import PypeLogger + +log = PypeLogger.get_logger("WebServer") + class RestApiResource: """Resource carrying needed info and Avalon DB connection for publish.""" @@ -172,8 +176,7 @@ class WebpublisherBatchPublishEndpoint(_RestApiEndpoint): """Triggers headless publishing of batch.""" async def post(self, request) -> Response: output = {} - - print(request) + log.info("WebpublisherBatchPublishEndpoint called") content = await request.json() batch_path = os.path.join(self.resource.upload_dir, @@ -200,9 +203,9 @@ class WebpublisherBatchPublishEndpoint(_RestApiEndpoint): args.append("--{}".format(key)) args.append(value) - print("args:: {}".format(args)) + log.info("args:: {}".format(args)) - _exit_code = subprocess.call(args, shell=True) + _exit_code = subprocess.call(args) return Response( status=200, body=self.resource.encode(output), From 40f44edd6f5a3f1234995eab51a9d8265d0430aa Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 3 Aug 2021 09:53:04 +0200 Subject: [PATCH 103/308] Webpublisher - backend - fix - wrong key in DB --- openpype/pype_commands.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 513d7d0865..17b6d58ffd 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -143,6 +143,8 @@ class PypeCommands: log = Logger.get_logger() + log.info("remotepublish command") + install() if host: @@ -207,7 +209,7 @@ class PypeCommands: {"$set": { "finish_date": datetime.now(), - "state": "finished_ok", + "status": "finished_ok", "progress": 1 }} ) From c2f48efe10203ce5fcf9015c30e616a5abb10388 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Aug 2021 18:31:50 +0200 Subject: [PATCH 104/308] renamed PypeModule to OpenPypeModule --- openpype/modules/__init__.py | 4 ++-- openpype/modules/base.py | 10 +++++----- .../modules/default_modules/avalon_apps/avalon_app.py | 4 ++-- .../default_modules/clockify/clockify_module.py | 4 ++-- .../default_modules/deadline/deadline_module.py | 4 ++-- .../modules/default_modules/ftrack/ftrack_module.py | 4 ++-- .../default_modules/idle_manager/idle_module.py | 4 ++-- openpype/modules/default_modules/launcher_action.py | 4 ++-- .../default_modules/log_viewer/log_view_module.py | 4 ++-- openpype/modules/default_modules/muster/muster.py | 4 ++-- .../modules/default_modules/project_manager_action.py | 4 ++-- .../default_modules/settings_module/settings_action.py | 6 +++--- openpype/modules/default_modules/slack/slack_module.py | 4 ++-- .../default_modules/standalonepublish_action.py | 4 ++-- .../default_modules/sync_server/sync_server_module.py | 4 ++-- .../default_modules/timers_manager/timers_manager.py | 4 ++-- .../default_modules/webserver/webserver_module.py | 4 ++-- 17 files changed, 38 insertions(+), 38 deletions(-) diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 3ad9a75161..81853faa38 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- from .base import ( - PypeModule, + OpenPypeModule, OpenPypeInterface, ModulesManager, TrayModulesManager @@ -8,7 +8,7 @@ from .base import ( __all__ = ( - "PypeModule", + "OpenPypeModule", "OpenPypeInterface", "ModulesManager", diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 77d9ddbcec..1f8fa6ae25 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -244,7 +244,7 @@ class MissingInteface(OpenPypeInterface): @six.add_metaclass(ABCMeta) -class PypeModule: +class OpenPypeModule: """Base class of pype module. Attributes: @@ -299,7 +299,7 @@ class PypeModule: return {} -class OpenPypeAddOn(PypeModule): +class OpenPypeAddOn(OpenPypeModule): pass @@ -351,11 +351,11 @@ class ModulesManager: for name in dir(module): modules_item = getattr(module, name, None) # Filter globals that are not classes which inherit from - # PypeModule + # OpenPypeModule if ( not inspect.isclass(modules_item) - or modules_item is PypeModule - or not issubclass(modules_item, PypeModule) + or modules_item is OpenPypeModule + or not issubclass(modules_item, OpenPypeModule) ): continue diff --git a/openpype/modules/default_modules/avalon_apps/avalon_app.py b/openpype/modules/default_modules/avalon_apps/avalon_app.py index 7f130bfab1..53e06ec90a 100644 --- a/openpype/modules/default_modules/avalon_apps/avalon_app.py +++ b/openpype/modules/default_modules/avalon_apps/avalon_app.py @@ -1,14 +1,14 @@ import os import openpype from openpype import resources -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayModule, IWebServerRoutes ) -class AvalonModule(PypeModule, ITrayModule, IWebServerRoutes): +class AvalonModule(OpenPypeModule, ITrayModule, IWebServerRoutes): name = "avalon" def initialize(self, modules_settings): diff --git a/openpype/modules/default_modules/clockify/clockify_module.py b/openpype/modules/default_modules/clockify/clockify_module.py index 83f8d07c3a..a9e989f4ec 100644 --- a/openpype/modules/default_modules/clockify/clockify_module.py +++ b/openpype/modules/default_modules/clockify/clockify_module.py @@ -7,7 +7,7 @@ from .constants import ( CLOCKIFY_FTRACK_USER_PATH, CLOCKIFY_FTRACK_SERVER_PATH ) -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayModule, IPluginPaths, @@ -17,7 +17,7 @@ from openpype_interfaces import ( class ClockifyModule( - PypeModule, + OpenPypeModule, ITrayModule, IPluginPaths, IFtrackEventHandlerPaths, diff --git a/openpype/modules/default_modules/deadline/deadline_module.py b/openpype/modules/default_modules/deadline/deadline_module.py index 47fd4e9656..a5e189ee52 100644 --- a/openpype/modules/default_modules/deadline/deadline_module.py +++ b/openpype/modules/default_modules/deadline/deadline_module.py @@ -1,9 +1,9 @@ import os -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import IPluginPaths -class DeadlineModule(PypeModule, IPluginPaths): +class DeadlineModule(OpenPypeModule, IPluginPaths): name = "deadline" def initialize(self, modules_settings): diff --git a/openpype/modules/default_modules/ftrack/ftrack_module.py b/openpype/modules/default_modules/ftrack/ftrack_module.py index 6fd2737261..1de152535c 100644 --- a/openpype/modules/default_modules/ftrack/ftrack_module.py +++ b/openpype/modules/default_modules/ftrack/ftrack_module.py @@ -2,7 +2,7 @@ import os import json import collections import openpype -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayModule, @@ -18,7 +18,7 @@ FTRACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) class FtrackModule( - PypeModule, + OpenPypeModule, ITrayModule, IPluginPaths, ITimersManager, diff --git a/openpype/modules/default_modules/idle_manager/idle_module.py b/openpype/modules/default_modules/idle_manager/idle_module.py index d669fcb90e..1a6d71a961 100644 --- a/openpype/modules/default_modules/idle_manager/idle_module.py +++ b/openpype/modules/default_modules/idle_manager/idle_module.py @@ -1,14 +1,14 @@ import platform import collections -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayService, IIdleManager ) -class IdleManager(PypeModule, ITrayService): +class IdleManager(OpenPypeModule, ITrayService): """ Measure user's idle time in seconds. Idle time resets on keyboard/mouse input. Is able to emit signals at specific time idle. diff --git a/openpype/modules/default_modules/launcher_action.py b/openpype/modules/default_modules/launcher_action.py index 728143ffac..e3252e3842 100644 --- a/openpype/modules/default_modules/launcher_action.py +++ b/openpype/modules/default_modules/launcher_action.py @@ -1,8 +1,8 @@ -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayAction -class LauncherAction(PypeModule, ITrayAction): +class LauncherAction(OpenPypeModule, ITrayAction): label = "Launcher" name = "launcher_tool" diff --git a/openpype/modules/default_modules/log_viewer/log_view_module.py b/openpype/modules/default_modules/log_viewer/log_view_module.py index 22826d8a54..bc1a98f4ad 100644 --- a/openpype/modules/default_modules/log_viewer/log_view_module.py +++ b/openpype/modules/default_modules/log_viewer/log_view_module.py @@ -1,9 +1,9 @@ from openpype.api import Logger -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayModule -class LogViewModule(PypeModule, ITrayModule): +class LogViewModule(OpenPypeModule, ITrayModule): name = "log_viewer" def initialize(self, modules_settings): diff --git a/openpype/modules/default_modules/muster/muster.py b/openpype/modules/default_modules/muster/muster.py index 164f20054a..a0e72006af 100644 --- a/openpype/modules/default_modules/muster/muster.py +++ b/openpype/modules/default_modules/muster/muster.py @@ -2,14 +2,14 @@ import os import json import appdirs import requests -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayModule, IWebServerRoutes ) -class MusterModule(PypeModule, ITrayModule, IWebServerRoutes): +class MusterModule(OpenPypeModule, ITrayModule, IWebServerRoutes): """ Module handling Muster Render credentials. This will display dialog asking for user credentials for Muster if not already specified. diff --git a/openpype/modules/default_modules/project_manager_action.py b/openpype/modules/default_modules/project_manager_action.py index 9a36d973b3..c1f984a8cb 100644 --- a/openpype/modules/default_modules/project_manager_action.py +++ b/openpype/modules/default_modules/project_manager_action.py @@ -1,8 +1,8 @@ -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayAction -class ProjectManagerAction(PypeModule, ITrayAction): +class ProjectManagerAction(OpenPypeModule, ITrayAction): label = "Project Manager (beta)" name = "project_manager" admin_action = True diff --git a/openpype/modules/default_modules/settings_module/settings_action.py b/openpype/modules/default_modules/settings_module/settings_action.py index a6909e1fdf..7140c57bab 100644 --- a/openpype/modules/default_modules/settings_module/settings_action.py +++ b/openpype/modules/default_modules/settings_module/settings_action.py @@ -1,8 +1,8 @@ -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayAction -class SettingsAction(PypeModule, ITrayAction): +class SettingsAction(OpenPypeModule, ITrayAction): """Action to show Setttings tool.""" name = "settings" label = "Studio Settings" @@ -71,7 +71,7 @@ class SettingsAction(PypeModule, ITrayAction): self.settings_window.reset() -class LocalSettingsAction(PypeModule, ITrayAction): +class LocalSettingsAction(OpenPypeModule, ITrayAction): """Action to show Setttings tool.""" name = "local_settings" label = "Settings" diff --git a/openpype/modules/default_modules/slack/slack_module.py b/openpype/modules/default_modules/slack/slack_module.py index 8e6ac10037..e3f7b4ad19 100644 --- a/openpype/modules/default_modules/slack/slack_module.py +++ b/openpype/modules/default_modules/slack/slack_module.py @@ -1,5 +1,5 @@ import os -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ( IPluginPaths, ILaunchHookPaths @@ -8,7 +8,7 @@ from openpype_interfaces import ( SLACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) -class SlackIntegrationModule(PypeModule, IPluginPaths, ILaunchHookPaths): +class SlackIntegrationModule(OpenPypeModule, IPluginPaths, ILaunchHookPaths): """Allows sending notification to Slack channels during publishing.""" name = "slack" diff --git a/openpype/modules/default_modules/standalonepublish_action.py b/openpype/modules/default_modules/standalonepublish_action.py index 53319f9e11..9321a415a9 100644 --- a/openpype/modules/default_modules/standalonepublish_action.py +++ b/openpype/modules/default_modules/standalonepublish_action.py @@ -2,11 +2,11 @@ import os import platform import subprocess from openpype.lib import get_pype_execute_args -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayAction -class StandAlonePublishAction(PypeModule, ITrayAction): +class StandAlonePublishAction(OpenPypeModule, ITrayAction): label = "Publish" name = "standalonepublish_tool" diff --git a/openpype/modules/default_modules/sync_server/sync_server_module.py b/openpype/modules/default_modules/sync_server/sync_server_module.py index 63f39474b1..e65a410551 100644 --- a/openpype/modules/default_modules/sync_server/sync_server_module.py +++ b/openpype/modules/default_modules/sync_server/sync_server_module.py @@ -7,7 +7,7 @@ import copy from avalon.api import AvalonMongoDB -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayModule from openpype.api import ( Anatomy, @@ -29,7 +29,7 @@ from .utils import time_function, SyncStatus, EditableScopes log = PypeLogger().get_logger("SyncServer") -class SyncServerModule(PypeModule, ITrayModule): +class SyncServerModule(OpenPypeModule, ITrayModule): """ Synchronization server that is syncing published files from local to any of implemented providers (like GDrive, S3 etc.) diff --git a/openpype/modules/default_modules/timers_manager/timers_manager.py b/openpype/modules/default_modules/timers_manager/timers_manager.py index b31e14209a..d7dfe390a0 100644 --- a/openpype/modules/default_modules/timers_manager/timers_manager.py +++ b/openpype/modules/default_modules/timers_manager/timers_manager.py @@ -1,6 +1,6 @@ import os import collections -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITimersManager, ITrayService, @@ -10,7 +10,7 @@ from openpype_interfaces import ( from avalon.api import AvalonMongoDB -class TimersManager(PypeModule, ITrayService, IIdleManager, IWebServerRoutes): +class TimersManager(OpenPypeModule, ITrayService, IIdleManager, IWebServerRoutes): """ Handles about Timers. Should be able to start/stop all timers at once. diff --git a/openpype/modules/default_modules/webserver/webserver_module.py b/openpype/modules/default_modules/webserver/webserver_module.py index f81bf52410..ff3456f903 100644 --- a/openpype/modules/default_modules/webserver/webserver_module.py +++ b/openpype/modules/default_modules/webserver/webserver_module.py @@ -2,14 +2,14 @@ import os import socket from openpype import resources -from openpype.modules import PypeModule +from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayService, IWebServerRoutes ) -class WebServerModule(PypeModule, ITrayService): +class WebServerModule(OpenPypeModule, ITrayService): name = "webserver" label = "WebServer" From c4869abd568886b3241d0ba5eb57a6f514dbe4e3 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Aug 2021 18:31:57 +0200 Subject: [PATCH 105/308] update readme a littlebit --- openpype/modules/README.md | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/openpype/modules/README.md b/openpype/modules/README.md index 818375461f..d54ba7c835 100644 --- a/openpype/modules/README.md +++ b/openpype/modules/README.md @@ -1,7 +1,7 @@ -# Pype modules -Pype modules should contain separated logic of specific kind of implementation, like Ftrack connection and usage code or Deadline farm rendering. +# OpenPype modules +OpenPype modules should contain separated logic of specific kind of implementation, like Ftrack connection and usage code or Deadline farm rendering or special plugins. -## Base class `PypeModule` +## Base class `OpenPypeModule` - abstract class as base for each module - implementation should be module's api withou GUI parts - may implement `get_global_environments` method which should return dictionary of environments that are globally appliable and value is the same for whole studio if launched at any workstation (except os specific paths) @@ -17,6 +17,15 @@ Pype modules should contain separated logic of specific kind of implementation, - interface is class that has defined abstract methods to implement and may contain preimplemented helper methods - module that inherit from an interface must implement those abstract methods otherwise won't be initialized - it is easy to find which module object inherited from which interfaces withh 100% chance they have implemented required methods +- interfaces can be defined in `interfaces.py` inside module directory + - the file can't use relative imports or import anything from other parts + of module itself at the header of file + +## Base class `OpenPypeInterface` +- has nothing implemented +- has ABCMeta as metaclass +- is defined to be able find out classes which inherit from this base to be + able tell this is an Interface ## Global interfaces - few interfaces are implemented for global usage @@ -70,7 +79,7 @@ Pype modules should contain separated logic of specific kind of implementation, - Clockify has more inharitance it's class definition looks like ``` class ClockifyModule( - PypeModule, # Says it's Pype module so ModulesManager will try to initialize. + OpenPypeModule, # Says it's Pype module so ModulesManager will try to initialize. ITrayModule, # Says has special implementation when used in tray. IPluginPaths, # Says has plugin paths that want to register (paths to clockify actions for launcher). IFtrackEventHandlerPaths, # Says has Ftrack actions/events for user/server. From b8d25956ac8e483f4f26e4643fa1984e7d7358af Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Aug 2021 18:42:05 +0200 Subject: [PATCH 106/308] fix formatting --- .../modules/default_modules/timers_manager/timers_manager.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/timers_manager/timers_manager.py b/openpype/modules/default_modules/timers_manager/timers_manager.py index d7dfe390a0..80f448095f 100644 --- a/openpype/modules/default_modules/timers_manager/timers_manager.py +++ b/openpype/modules/default_modules/timers_manager/timers_manager.py @@ -10,7 +10,9 @@ from openpype_interfaces import ( from avalon.api import AvalonMongoDB -class TimersManager(OpenPypeModule, ITrayService, IIdleManager, IWebServerRoutes): +class TimersManager( + OpenPypeModule, ITrayService, IIdleManager, IWebServerRoutes +): """ Handles about Timers. Should be able to start/stop all timers at once. From 70393b6772f8ccc1a490ec47a3cc247b38efb50b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Aug 2021 19:00:52 +0200 Subject: [PATCH 107/308] added thread locks on loading functions --- openpype/modules/base.py | 47 ++++++++++++++++++++++++++++++++-------- 1 file changed, 38 insertions(+), 9 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 1f8fa6ae25..c9771b60e4 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -5,6 +5,7 @@ import sys import time import inspect import logging +import threading import collections from uuid import uuid4 from abc import ABCMeta, abstractmethod @@ -84,6 +85,13 @@ class _InterfacesClass(_ModuleClass): return self.__attributes__[attr_name] +class _LoadCache: + interfaces_lock = threading.Lock() + modules_lock = threading.Lock() + interfaces_loaded = False + modules_loaded = False + + def get_default_modules_dir(): current_dir = os.path.abspath(os.path.dirname(__file__)) @@ -98,13 +106,26 @@ def get_module_dirs(): def load_interfaces(force=False): - if not force and "openpype_interfaces" in sys.modules: + if _LoadCache.interfaces_loaded and not force: return + if not _LoadCache.interfaces_lock.locked(): + with _LoadCache.interfaces_lock: + _load_interfaces() + _LoadCache.interfaces_loaded = True + else: + # If lock is locked wait until is finished + while _LoadCache.interfaces_lock.locked(): + time.sleep(0.1) + + +def _load_interfaces(): from openpype.lib import import_filepath - sys.modules["openpype_interfaces"] = openpype_interfaces = ( - _InterfacesClass("openpype_interfaces") + modules_key = "openpype_interfaces" + + sys.modules[modules_key] = openpype_interfaces = ( + _InterfacesClass(modules_key) ) log = PypeLogger.get_logger("InterfacesLoader") @@ -156,25 +177,33 @@ def load_interfaces(force=False): def load_modules(force=False): - # TODO add thread lock + if _LoadCache.modules_loaded and not force: + return # First load interfaces # - modules must not be imported before interfaces load_interfaces(force) - # Key under which will be modules imported in `sys.modules` - modules_key = "openpype_modules" + if not _LoadCache.modules_lock.locked(): + with _LoadCache.modules_lock: + _load_modules() + _LoadCache.modules_loaded = True + else: + # If lock is locked wait until is finished + while _LoadCache.modules_lock.locked(): + time.sleep(0.1) - # Check if are modules already loaded or no - if not force and modules_key in sys.modules: - return +def _load_modules(): # Import helper functions from lib from openpype.lib import ( import_filepath, import_module_from_dirpath ) + # Key under which will be modules imported in `sys.modules` + modules_key = "openpype_modules" + # Change `sys.modules` sys.modules[modules_key] = openpype_modules = _ModuleClass(modules_key) From 611346bf839840d1823a7d28c1a73558dd2550b7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Aug 2021 19:01:09 +0200 Subject: [PATCH 108/308] added logger to module class --- openpype/modules/base.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index c9771b60e4..29fdd9c8df 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -31,6 +31,8 @@ class _ModuleClass(object): super(_ModuleClass, self).__setattr__("__attributes__", dict()) super(_ModuleClass, self).__setattr__("__defaults__", set()) + super(_ModuleClass, self).__setattr__("_log", None) + def __getattr__(self, attr_name): if attr_name not in self.__attributes__: if attr_name in ("__path__"): @@ -45,6 +47,12 @@ class _ModuleClass(object): yield module def __setattr__(self, attr_name, value): + if attr_name in self.__attributes__: + self.log.warning( + "Duplicated name \"{}\" in {}. Overriding.".format( + self.name, attr_name + ) + ) self.__attributes__[attr_name] = value def __setitem__(self, key, value): @@ -53,6 +61,14 @@ class _ModuleClass(object): def __getitem__(self, key): return getattr(self, key) + @property + def log(self): + if self._log is None: + super(_ModuleClass, self).__setattr__( + "_log", PypeLogger.get_logger(self.name) + ) + return self._log + def get(self, key, default=None): return self.__attributes__.get(key, default) From aedbded534da2c2d1d9b9c5fe57e2ab2ae2e22b9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Aug 2021 19:01:19 +0200 Subject: [PATCH 109/308] added few docstrings --- openpype/modules/base.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 29fdd9c8df..6c2eae332f 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -109,12 +109,14 @@ class _LoadCache: def get_default_modules_dir(): + """Path to default OpenPype modules.""" current_dir = os.path.abspath(os.path.dirname(__file__)) return os.path.join(current_dir, "default_modules") def get_module_dirs(): + """List of paths where OpenPype modules can be found.""" dirpaths = [ get_default_modules_dir() ] @@ -122,6 +124,15 @@ def get_module_dirs(): def load_interfaces(force=False): + """Load interfaces from modules into `openpype_interfaces`. + + Only classes which inherit from `OpenPypeInterface` are loaded and stored. + + Args: + force(bool): Force to load interfaces even if are already loaded. + This won't update already loaded and used (cached) interfaces. + """ + if _LoadCache.interfaces_loaded and not force: return @@ -136,6 +147,7 @@ def load_interfaces(force=False): def _load_interfaces(): + # Key under which will be modules imported in `sys.modules` from openpype.lib import import_filepath modules_key = "openpype_interfaces" @@ -193,6 +205,22 @@ def _load_interfaces(): def load_modules(force=False): + """Load OpenPype modules as python modules. + + Modules does not load only classes (like in Interfaces) because there must + be ability to use inner code of module and be able to import it from one + defined place. + + With this it is possible to import module's content from predefined module. + + Function makes sure that `load_interfaces` was triggered. Modules import + has specific order which can't be changed. + + Args: + force(bool): Force to load modules even if are already loaded. + This won't update already loaded and used (cached) modules. + """ + if _LoadCache.modules_loaded and not force: return From c0f669a4b10eda2f5c1f4a5334a17565d38135ea Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Aug 2021 19:01:29 +0200 Subject: [PATCH 110/308] intrefaces has repr --- openpype/modules/base.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 6c2eae332f..4ffc8cc1de 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -293,6 +293,9 @@ class _OpenPypeInterfaceMeta(ABCMeta): def __str__(self): return "<'OpenPypeInterface.{}'>".format(self.__name__) + def __repr__(self): + return str(self) + @six.add_metaclass(_OpenPypeInterfaceMeta) class OpenPypeInterface: From cfabde66fb40f9de1c72207806f56e0f019ef3e0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 4 Aug 2021 19:12:23 +0200 Subject: [PATCH 111/308] fixed double import of modules --- openpype/modules/base.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 4ffc8cc1de..d43d5635d1 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -177,7 +177,6 @@ def _load_interfaces(): if os.path.exists(interfaces_path): interface_paths.append(interfaces_path) - # print(interface_paths) for full_path in interface_paths: if not os.path.exists(full_path): continue @@ -271,21 +270,14 @@ def _load_modules(): fullpath = os.path.join(dirpath, filename) basename, ext = os.path.splitext(filename) - module = None # TODO add more logic how to define if folder is module or not # - check manifest and content of manifest if os.path.isdir(fullpath): - module = import_module_from_dirpath( - dirpath, filename, modules_key - ) - module_name = filename + import_module_from_dirpath(dirpath, filename, modules_key) elif ext in (".py", ): module = import_filepath(fullpath) - module_name = basename - - if module is not None: - setattr(openpype_modules, module_name, module) + setattr(openpype_modules, basename, module) class _OpenPypeInterfaceMeta(ABCMeta): From 8c5941dde81cc520c032cdba901c7fb5611b9dc9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 6 Aug 2021 12:28:50 +0200 Subject: [PATCH 112/308] Webpublisher - added webpublisher host to extract burnin and review --- openpype/plugins/publish/extract_burnin.py | 3 ++- openpype/plugins/publish/extract_review.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index ef52d51325..809cf438c8 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -44,7 +44,8 @@ class ExtractBurnin(openpype.api.Extractor): "harmony", "fusion", "aftereffects", - "tvpaint" + "tvpaint", + "webpublisher" # "resolve" ] optional = True diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index de54b554e3..07e40b0421 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -44,7 +44,8 @@ class ExtractReview(pyblish.api.InstancePlugin): "standalonepublisher", "fusion", "tvpaint", - "resolve" + "resolve", + "webpublisher" ] # Supported extensions From 3e87997b401da0ba7e57ab0707b78ada9168ff2c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Aug 2021 10:54:05 +0200 Subject: [PATCH 113/308] modified how default settings are loaded --- openpype/settings/lib.py | 49 ++++++++++++++++++++++++++++++++++------ 1 file changed, 42 insertions(+), 7 deletions(-) diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 4a363910b8..04d8753869 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -329,6 +329,41 @@ def reset_default_settings(): _DEFAULT_SETTINGS = None +def _get_default_settings(): + from openpype.modules import get_module_settings_defs + + defaults = load_openpype_default_settings() + + module_settings_defs = get_module_settings_defs() + for module_settings_def_cls in module_settings_defs: + module_settings_def = module_settings_def_cls() + system_defaults = module_settings_def.get_system_defaults() + for path, value in system_defaults.items(): + if not path: + continue + + subdict = defaults["system_settings"] + path_items = list(path.split("/")) + last_key = path_items.pop(-1) + for key in path_items: + subdict = subdict[key] + subdict[last_key] = value + + project_defaults = module_settings_def.get_project_defaults() + for path, value in project_defaults.items(): + if not path: + continue + + subdict = defaults["project_settings"] + path_items = list(path.split("/")) + last_key = path_items.pop(-1) + for key in path_items: + subdict = subdict[key] + subdict[last_key] = value + + return defaults + + def get_default_settings(): """Get default settings. @@ -339,11 +374,11 @@ def get_default_settings(): dict: Loaded default settings. """ # TODO add cacher - return load_openpype_default_settings() - # global _DEFAULT_SETTINGS - # if _DEFAULT_SETTINGS is None: - # _DEFAULT_SETTINGS = load_jsons_from_dir(DEFAULTS_DIR) - # return copy.deepcopy(_DEFAULT_SETTINGS) + + global _DEFAULT_SETTINGS + if _DEFAULT_SETTINGS is None: + _DEFAULT_SETTINGS = _get_default_settings() + return copy.deepcopy(_DEFAULT_SETTINGS) def load_json_file(fpath): @@ -380,8 +415,8 @@ def load_jsons_from_dir(path, *args, **kwargs): "data1": "CONTENT OF FILE" }, "folder2": { - "data1": { - "subfolder1": "CONTENT OF FILE" + "subfolder1": { + "data2": "CONTENT OF FILE" } } } From aa2f5d85701fa56ed8eb5b1a568dceced3c2ead1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Aug 2021 10:54:31 +0200 Subject: [PATCH 114/308] defined class which defined base settings --- openpype/modules/__init__.py | 13 ++++++- openpype/modules/base.py | 71 ++++++++++++++++++++++++++++++++++++ 2 files changed, 82 insertions(+), 2 deletions(-) diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 81853faa38..261d65d2ee 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -1,16 +1,25 @@ # -*- coding: utf-8 -*- from .base import ( OpenPypeModule, + OpenPypeAddOn, OpenPypeInterface, + ModulesManager, - TrayModulesManager + TrayModulesManager, + + ModuleSettingsDef, + get_module_settings_defs ) __all__ = ( "OpenPypeModule", + "OpenPypeAddOn", "OpenPypeInterface", "ModulesManager", - "TrayModulesManager" + "TrayModulesManager", + + "ModuleSettingsDef", + "get_module_settings_defs" ) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index d43d5635d1..18bbb75cec 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -920,3 +920,74 @@ class TrayModulesManager(ModulesManager): ), exc_info=True ) + + +def get_module_settings_defs(): + load_modules() + + import openpype_modules + + settings_defs = [] + + log = PypeLogger.get_logger("ModuleSettingsLoad") + + for raw_module in openpype_modules: + for attr_name in dir(raw_module): + attr = getattr(raw_module, attr_name) + if ( + not inspect.isclass(attr) + or attr is ModuleSettingsDef + or not issubclass(attr, ModuleSettingsDef) + ): + continue + + if inspect.isabstract(attr): + # Find missing implementations by convetion on `abc` module + not_implemented = [] + for attr_name in dir(attr): + attr = getattr(attr, attr_name, None) + abs_method = getattr( + attr, "__isabstractmethod__", None + ) + if attr and abs_method: + not_implemented.append(attr_name) + + # Log missing implementations + log.warning(( + "Skipping abstract Class: {} in module {}." + " Missing implementations: {}" + ).format( + attr_name, raw_module.__name__, ", ".join(not_implemented) + )) + continue + + settings_defs.append(attr) + + return settings_defs + + +@six.add_metaclass(ABCMeta) +class ModuleSettingsDef: + @abstractmethod + def get_system_schemas(self): + pass + + @abstractmethod + def get_project_schemas(self): + pass + + @abstractmethod + def save_system_defaults(self, data): + pass + + @abstractmethod + def save_project_defaults(self, data): + pass + + @abstractmethod + def get_system_defaults(self): + pass + + @abstractmethod + def get_project_defaults(self): + pass From 12abf31396f38a6ad8164553224e7c53775684ec Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Aug 2021 12:09:27 +0200 Subject: [PATCH 115/308] tool environments are added in alphabetical order --- openpype/lib/applications.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index ada194f15f..022900e927 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1113,12 +1113,14 @@ def prepare_host_environments(data, implementation_envs=True): if not tool: continue groups_by_name[tool.group.name] = tool.group - tool_by_group_name[tool.group.name].append(tool) + tool_by_group_name[tool.group.name][tool.name] = tool - for group_name, group in groups_by_name.items(): + for group_name in sorted(groups_by_name.keys()): + group = groups_by_name[group_name] environments.append(group.environment) added_env_keys.add(group_name) - for tool in tool_by_group_name[group_name]: + for tool_name in sorted(tool_by_group_name[group_name].keys()): + tool = tool_by_group_name[tool_name] environments.append(tool.environment) added_env_keys.add(tool.name) From 5b71c522a50336d7777c7785e09d9e337f5a9503 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Aug 2021 12:48:06 +0200 Subject: [PATCH 116/308] added missing function to init file --- openpype/modules/__init__.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 81853faa38..583480b049 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -2,6 +2,9 @@ from .base import ( OpenPypeModule, OpenPypeInterface, + + load_modules, + ModulesManager, TrayModulesManager ) @@ -11,6 +14,8 @@ __all__ = ( "OpenPypeModule", "OpenPypeInterface", + "load_modules", + "ModulesManager", "TrayModulesManager" ) From b6383ccb9afec966edd533e293f050160e23a9db Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Aug 2021 12:50:16 +0200 Subject: [PATCH 117/308] fixed conflict changes --- .../ftrack/event_handlers_user/action_where_run_ask.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py index 2c427cfff7..b4133fbe78 100644 --- a/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py @@ -2,7 +2,7 @@ import platform import socket import getpass -from openpype.modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction, statics_icon class ActionWhereIRun(BaseAction): From a44805ae36a378c11e7861f90b891914d91149b6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Aug 2021 12:52:15 +0200 Subject: [PATCH 118/308] removed unused import --- .../ftrack/event_handlers_user/action_where_run_ask.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py index b4133fbe78..0d69913996 100644 --- a/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_where_run_ask.py @@ -2,7 +2,7 @@ import platform import socket import getpass -from openpype_modules.ftrack.lib import BaseAction, statics_icon +from openpype_modules.ftrack.lib import BaseAction class ActionWhereIRun(BaseAction): From f76b5b08679f1e327d1047b5e5217a3e662dc37f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Aug 2021 14:28:12 +0200 Subject: [PATCH 119/308] use constants for schema keys --- openpype/settings/entities/lib.py | 3 +++ openpype/settings/entities/root_entities.py | 6 ++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/openpype/settings/entities/lib.py b/openpype/settings/entities/lib.py index e58281644a..307792edc9 100644 --- a/openpype/settings/entities/lib.py +++ b/openpype/settings/entities/lib.py @@ -23,6 +23,9 @@ TEMPLATE_METADATA_KEYS = ( DEFAULT_VALUES_KEY, ) +SCHEMA_KEY_SYSTEM_SETTINGS = "system_schema" +SCHEMA_KEY_PROJECT_SETTINGS = "projects_schema" + template_key_pattern = re.compile(r"(\{.*?[^{0]*\})") diff --git a/openpype/settings/entities/root_entities.py b/openpype/settings/entities/root_entities.py index 00677480e8..39b5cb5096 100644 --- a/openpype/settings/entities/root_entities.py +++ b/openpype/settings/entities/root_entities.py @@ -9,6 +9,8 @@ from .base_entity import BaseItemEntity from .lib import ( NOT_SET, WRAPPER_TYPES, + SCHEMA_KEY_SYSTEM_SETTINGS, + SCHEMA_KEY_PROJECT_SETTINGS, OverrideState, SchemasHub ) @@ -468,7 +470,7 @@ class SystemSettings(RootEntity): ): if schema_hub is None: # Load system schemas - schema_hub = SchemasHub("system_schema") + schema_hub = SchemasHub(SCHEMA_KEY_SYSTEM_SETTINGS) super(SystemSettings, self).__init__(schema_hub, reset) @@ -599,7 +601,7 @@ class ProjectSettings(RootEntity): if schema_hub is None: # Load system schemas - schema_hub = SchemasHub("projects_schema") + schema_hub = SchemasHub(SCHEMA_KEY_PROJECT_SETTINGS) super(ProjectSettings, self).__init__(schema_hub, reset) From 50e2fce229992d8f1ca5800b3dcaff3796604cd4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Aug 2021 14:28:19 +0200 Subject: [PATCH 120/308] remove TODO --- openpype/settings/lib.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 04d8753869..04e8bffd8f 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -373,8 +373,6 @@ def get_default_settings(): Returns: dict: Loaded default settings. """ - # TODO add cacher - global _DEFAULT_SETTINGS if _DEFAULT_SETTINGS is None: _DEFAULT_SETTINGS = _get_default_settings() From 5db15b273e2a6a36309c51067f432d7d784b369e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Aug 2021 18:20:08 +0200 Subject: [PATCH 121/308] settings def has id --- openpype/modules/base.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 18bbb75cec..8b575bc8cd 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -968,6 +968,14 @@ def get_module_settings_defs(): @six.add_metaclass(ABCMeta) class ModuleSettingsDef: + _id = None + + @property + def id(self): + if self._id is None: + self._id = uuid4() + return self._id + @abstractmethod def get_system_schemas(self): pass From 9d7f0db6d8177860930e533d082bf7e549f2e074 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 9 Aug 2021 18:25:07 +0200 Subject: [PATCH 122/308] changed how schemas are get from openpype --- openpype/modules/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 8b575bc8cd..3d3d7ae6cb 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -977,11 +977,11 @@ class ModuleSettingsDef: return self._id @abstractmethod - def get_system_schemas(self): + def get_settings_schemas(self, schema_type): pass @abstractmethod - def get_project_schemas(self): + def get_dynamic_schemas(self, schema_type): pass @abstractmethod From 5099f5f853d96e5846c7eab31aa0557c3c2ff4da Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 10:33:28 +0200 Subject: [PATCH 123/308] small condition modifications --- openpype/settings/entities/base_entity.py | 2 +- openpype/settings/entities/dict_conditional.py | 2 +- openpype/settings/entities/dict_mutable_keys_entity.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/settings/entities/base_entity.py b/openpype/settings/entities/base_entity.py index b4ebe885f5..d9dcf633e5 100644 --- a/openpype/settings/entities/base_entity.py +++ b/openpype/settings/entities/base_entity.py @@ -253,7 +253,7 @@ class BaseItemEntity(BaseEntity): # Validate that env group entities will be stored into file. # - env group entities must store metadata which is not possible if # metadata would be outside of file - if not self.file_item and self.is_env_group: + if self.file_item is None and self.is_env_group: reason = ( "Environment item is not inside file" " item so can't store metadata for defaults." diff --git a/openpype/settings/entities/dict_conditional.py b/openpype/settings/entities/dict_conditional.py index d275d8ac3d..fc7cbfdee5 100644 --- a/openpype/settings/entities/dict_conditional.py +++ b/openpype/settings/entities/dict_conditional.py @@ -469,7 +469,7 @@ class DictConditionalEntity(ItemEntity): output = {} for key, child_obj in children_items: child_value = child_obj.settings_value() - if not child_obj.is_file and not child_obj.file_item: + if not child_obj.is_file and child_obj.file_item is None: for _key, _value in child_value.items(): new_key = "/".join([key, _key]) output[new_key] = _value diff --git a/openpype/settings/entities/dict_mutable_keys_entity.py b/openpype/settings/entities/dict_mutable_keys_entity.py index c3df935269..f75fb23d82 100644 --- a/openpype/settings/entities/dict_mutable_keys_entity.py +++ b/openpype/settings/entities/dict_mutable_keys_entity.py @@ -261,7 +261,7 @@ class DictMutableKeysEntity(EndpointEntity): raise EntitySchemaError(self, reason) # TODO Ability to store labels should be defined with different key - if self.collapsible_key and not self.file_item: + if self.collapsible_key and self.file_item is None: reason = ( "Modifiable dictionary with collapsible keys is not under" " file item so can't store metadata." From 13720249575860a6aa01c1b7b2e1f21d3ccfefc6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 10:34:26 +0200 Subject: [PATCH 124/308] added loading of setttings modules definitions in SchemaHub --- openpype/settings/entities/lib.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/openpype/settings/entities/lib.py b/openpype/settings/entities/lib.py index 307792edc9..a72908967f 100644 --- a/openpype/settings/entities/lib.py +++ b/openpype/settings/entities/lib.py @@ -117,14 +117,27 @@ class SchemasHub: # It doesn't make sence to reload types on each reset as they can't be # changed self._load_types() + # Attributes for modules settings + self._modules_settings_defs_by_id = {} + self._dynamic_schemas_by_module_id = {} # Trigger reset if reset: self.reset() def reset(self): + self._load_modules_settings_defs() self._load_schemas() + def _load_modules_settings_defs(self): + from openpype.modules import get_module_settings_defs + + module_settings_defs = get_module_settings_defs() + for module_settings_def_cls in module_settings_defs: + module_settings_def = module_settings_def_cls() + def_id = module_settings_def.id + self._modules_settings_defs_by_id[def_id] = module_settings_def + @property def gui_types(self): return self._gui_types From 2e9e0ba09ff17393b90d18717b0f4fc94e09cb27 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 10:34:52 +0200 Subject: [PATCH 125/308] use constant for extending schema types --- openpype/settings/entities/lib.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/settings/entities/lib.py b/openpype/settings/entities/lib.py index a72908967f..4b6ed5a365 100644 --- a/openpype/settings/entities/lib.py +++ b/openpype/settings/entities/lib.py @@ -26,6 +26,10 @@ TEMPLATE_METADATA_KEYS = ( SCHEMA_KEY_SYSTEM_SETTINGS = "system_schema" SCHEMA_KEY_PROJECT_SETTINGS = "projects_schema" +SCHEMA_EXTEND_TYPES = ( + "schema", "template", "schema_template", "dynamic_schema" +) + template_key_pattern = re.compile(r"(\{.*?[^{0]*\})") @@ -217,7 +221,7 @@ class SchemasHub: list: Resolved schema data. """ schema_type = schema_data["type"] - if schema_type not in ("schema", "template", "schema_template"): + if schema_type not in SCHEMA_EXTEND_TYPES: return [schema_data] if schema_type == "schema": From 2bb74c68e9b8ba41de9268f95c0264f4201bc98a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 10:35:48 +0200 Subject: [PATCH 126/308] added resolving of dynamic module items --- openpype/settings/entities/lib.py | 45 +++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/openpype/settings/entities/lib.py b/openpype/settings/entities/lib.py index 4b6ed5a365..dd216f4d90 100644 --- a/openpype/settings/entities/lib.py +++ b/openpype/settings/entities/lib.py @@ -146,6 +146,23 @@ class SchemasHub: def gui_types(self): return self._gui_types + def resolve_dynamic_schema(self, dynamic_key): + output = [] + for def_id, def_keys in self._dynamic_schemas_by_module_id.items(): + if dynamic_key in def_keys: + def_schema = def_keys[dynamic_key] + if not def_schema: + continue + + if isinstance(def_schema, dict): + def_schema = [def_schema] + + for item in def_schema: + item["_module_id"] = def_id + item["_module_store_key"] = dynamic_key + output.extend(def_schema) + return output + def get_schema(self, schema_name): """Get schema definition data by it's name. @@ -229,6 +246,9 @@ class SchemasHub: self.get_schema(schema_data["name"]) ) + if schema_type == "dynamic_schema": + return self.resolve_dynamic_schema(schema_data["name"]) + template_name = schema_data["name"] template_def = self.get_template(template_name) @@ -329,6 +349,7 @@ class SchemasHub: self._crashed_on_load = {} self._loaded_templates = {} self._loaded_schemas = {} + self._dynamic_schemas_by_module_id = {} dirpath = os.path.join( os.path.dirname(os.path.abspath(__file__)), @@ -337,6 +358,7 @@ class SchemasHub: ) loaded_schemas = {} loaded_templates = {} + dynamic_schemas_by_module_id = {} for root, _, filenames in os.walk(dirpath): for filename in filenames: basename, ext = os.path.splitext(filename) @@ -386,8 +408,31 @@ class SchemasHub: ) loaded_schemas[basename] = schema_data + defs_iter = self._modules_settings_defs_by_id.items() + for def_id, module_settings_def in defs_iter: + dynamic_schemas_by_module_id[def_id] = ( + module_settings_def.get_dynamic_schemas(self._schema_subfolder) + ) + module_schemas = module_settings_def.get_settings_schemas( + self._schema_subfolder + ) + for key, schema_data in module_schemas.items(): + if isinstance(schema_data, list): + if key in loaded_templates: + raise KeyError( + "Duplicated template key \"{}\"".format(key) + ) + loaded_templates[key] = schema_data + else: + if key in loaded_schemas: + raise KeyError( + "Duplicated schema key \"{}\"".format(key) + ) + loaded_schemas[key] = schema_data + self._loaded_templates = loaded_templates self._loaded_schemas = loaded_schemas + self._dynamic_schemas_by_module_id = dynamic_schemas_by_module_id def _fill_template(self, child_data, template_def): """Fill template based on schema definition and template definition. From b648dd7dc325029906cf69dd8a6887ee31e567ca Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 10:35:58 +0200 Subject: [PATCH 127/308] load types on each reset --- openpype/settings/entities/lib.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/settings/entities/lib.py b/openpype/settings/entities/lib.py index dd216f4d90..91e66eec8e 100644 --- a/openpype/settings/entities/lib.py +++ b/openpype/settings/entities/lib.py @@ -118,9 +118,6 @@ class SchemasHub: self._loaded_templates = {} self._loaded_schemas = {} - # It doesn't make sence to reload types on each reset as they can't be - # changed - self._load_types() # Attributes for modules settings self._modules_settings_defs_by_id = {} self._dynamic_schemas_by_module_id = {} @@ -131,6 +128,7 @@ class SchemasHub: def reset(self): self._load_modules_settings_defs() + self._load_types() self._load_schemas() def _load_modules_settings_defs(self): From 66fbe78b5a24f3abc8fc2e1ddcd0d5c01c8d2aaf Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 10 Aug 2021 11:36:32 +0200 Subject: [PATCH 128/308] #1894 - adds host to template_name_profiles for filtering --- openpype/plugins/publish/integrate_new.py | 4 +++- .../projects_schema/schemas/schema_global_publish.json | 6 ++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 3504206fe1..7c81c95f39 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -300,7 +300,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): task_name = io.Session.get("AVALON_TASK") family = self.main_family_from_instance(instance) - key_values = {"families": family, "tasks": task_name} + key_values = {"families": family, + "tasks": task_name, + "hosts": instance.data["anatomyData"]["app"]} profile = filter_profiles(self.template_name_profiles, key_values, logger=self.log) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index d265988534..4b91072eb6 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -496,6 +496,12 @@ "type": "list", "object_type": "text" }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, { "key": "tasks", "label": "Task names", From 32011838b3e4404c09249b2b60c69e533d63b300 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 12:10:33 +0200 Subject: [PATCH 129/308] renamed variable --- openpype/settings/entities/lib.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/settings/entities/lib.py b/openpype/settings/entities/lib.py index 91e66eec8e..b87845b95e 100644 --- a/openpype/settings/entities/lib.py +++ b/openpype/settings/entities/lib.py @@ -120,7 +120,7 @@ class SchemasHub: # Attributes for modules settings self._modules_settings_defs_by_id = {} - self._dynamic_schemas_by_module_id = {} + self._dynamic_schemas_def_by_id = {} # Trigger reset if reset: @@ -146,7 +146,7 @@ class SchemasHub: def resolve_dynamic_schema(self, dynamic_key): output = [] - for def_id, def_keys in self._dynamic_schemas_by_module_id.items(): + for def_id, def_keys in self._dynamic_schemas_def_by_id.items(): if dynamic_key in def_keys: def_schema = def_keys[dynamic_key] if not def_schema: @@ -347,7 +347,7 @@ class SchemasHub: self._crashed_on_load = {} self._loaded_templates = {} self._loaded_schemas = {} - self._dynamic_schemas_by_module_id = {} + self._dynamic_schemas_def_by_id = {} dirpath = os.path.join( os.path.dirname(os.path.abspath(__file__)), @@ -356,7 +356,7 @@ class SchemasHub: ) loaded_schemas = {} loaded_templates = {} - dynamic_schemas_by_module_id = {} + dynamic_schemas_def_by_id = {} for root, _, filenames in os.walk(dirpath): for filename in filenames: basename, ext = os.path.splitext(filename) @@ -408,7 +408,7 @@ class SchemasHub: defs_iter = self._modules_settings_defs_by_id.items() for def_id, module_settings_def in defs_iter: - dynamic_schemas_by_module_id[def_id] = ( + dynamic_schemas_def_by_id[def_id] = ( module_settings_def.get_dynamic_schemas(self._schema_subfolder) ) module_schemas = module_settings_def.get_settings_schemas( @@ -430,7 +430,7 @@ class SchemasHub: self._loaded_templates = loaded_templates self._loaded_schemas = loaded_schemas - self._dynamic_schemas_by_module_id = dynamic_schemas_by_module_id + self._dynamic_schemas_def_by_id = dynamic_schemas_def_by_id def _fill_template(self, child_data, template_def): """Fill template based on schema definition and template definition. From f38c7a462e96755d6dce8255becef57810de9b06 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 12:15:21 +0200 Subject: [PATCH 130/308] added few attributes for dynamic schemas --- openpype/settings/entities/base_entity.py | 12 ++++++++++++ openpype/settings/entities/lib.py | 3 +-- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/openpype/settings/entities/base_entity.py b/openpype/settings/entities/base_entity.py index d9dcf633e5..832c8ab854 100644 --- a/openpype/settings/entities/base_entity.py +++ b/openpype/settings/entities/base_entity.py @@ -104,6 +104,12 @@ class BaseItemEntity(BaseEntity): self.is_group = False # Entity's value will be stored into file with name of it's key self.is_file = False + # Default values are not stored to an openpype file + # - these must not be set through schemas directly + self.dynamic_schema_id = None + self.is_dynamic_schema_node = False + self.is_in_dynamic_schema_node = False + # Reference to parent entity which has `is_group` == True # - stays as None if none of parents is group self.group_item = None @@ -800,6 +806,12 @@ class ItemEntity(BaseItemEntity): self.is_dynamic_item = is_dynamic_item self.is_file = self.schema_data.get("is_file", False) + # These keys have underscore as they must not be set in schemas + self.dynamic_schema_id = self.schema_data.get( + "_dynamic_schema_id", None + ) + self.is_dynamic_schema_node = self.dynamic_schema_id is not None + self.is_group = self.schema_data.get("is_group", False) self.is_in_dynamic_item = bool( not self.is_dynamic_item diff --git a/openpype/settings/entities/lib.py b/openpype/settings/entities/lib.py index b87845b95e..2a1bbaa115 100644 --- a/openpype/settings/entities/lib.py +++ b/openpype/settings/entities/lib.py @@ -156,8 +156,7 @@ class SchemasHub: def_schema = [def_schema] for item in def_schema: - item["_module_id"] = def_id - item["_module_store_key"] = dynamic_key + item["_dynamic_schema_id"] = def_id output.extend(def_schema) return output From 5541b3fd0cc8fd5fe60cb11a8c22dfbc8f4911db Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 12:16:57 +0200 Subject: [PATCH 131/308] added few conditions so it is possbile to load dynamic schemas --- openpype/settings/entities/base_entity.py | 25 ++++++++++++++++---- openpype/settings/entities/input_entities.py | 6 ++++- openpype/settings/entities/item_entities.py | 7 +++++- 3 files changed, 31 insertions(+), 7 deletions(-) diff --git a/openpype/settings/entities/base_entity.py b/openpype/settings/entities/base_entity.py index 832c8ab854..bea90882a7 100644 --- a/openpype/settings/entities/base_entity.py +++ b/openpype/settings/entities/base_entity.py @@ -841,10 +841,20 @@ class ItemEntity(BaseItemEntity): self._require_restart_on_change = require_restart_on_change # File item reference - if self.parent.is_file: - self.file_item = self.parent - elif self.parent.file_item: - self.file_item = self.parent.file_item + if not self.is_dynamic_schema_node: + self.is_in_dynamic_schema_node = ( + self.parent.is_dynamic_schema_node + or self.parent.is_in_dynamic_schema_node + ) + + if ( + not self.is_dynamic_schema_node + and not self.is_in_dynamic_schema_node + ): + if self.parent.is_file: + self.file_item = self.parent + elif self.parent.file_item: + self.file_item = self.parent.file_item # Group item reference if self.parent.is_group: @@ -903,7 +913,12 @@ class ItemEntity(BaseItemEntity): ) raise EntitySchemaError(self, reason) - if self.is_file and self.file_item is not None: + if ( + not self.is_dynamic_schema_node + and not self.is_in_dynamic_schema_node + and self.is_file + and self.file_item is not None + ): reason = ( "Entity has set `is_file` to true but" " it's parent is already marked as file item." diff --git a/openpype/settings/entities/input_entities.py b/openpype/settings/entities/input_entities.py index 6952529963..b65c1c440e 100644 --- a/openpype/settings/entities/input_entities.py +++ b/openpype/settings/entities/input_entities.py @@ -116,7 +116,11 @@ class InputEntity(EndpointEntity): def schema_validations(self): # Input entity must have file parent. - if not self.file_item: + if ( + not self.is_dynamic_schema_node + and not self.is_in_dynamic_schema_node + and self.file_item is None + ): raise EntitySchemaError(self, "Missing parent file entity.") super(InputEntity, self).schema_validations() diff --git a/openpype/settings/entities/item_entities.py b/openpype/settings/entities/item_entities.py index 7e84f8c801..1e4f1025cc 100644 --- a/openpype/settings/entities/item_entities.py +++ b/openpype/settings/entities/item_entities.py @@ -215,7 +215,12 @@ class ListStrictEntity(ItemEntity): def schema_validations(self): # List entity must have file parent. - if not self.file_item and not self.is_file: + if ( + not self.is_dynamic_schema_node + and not self.is_in_dynamic_schema_node + and not self.is_file + and self.file_item is None + ): raise EntitySchemaError( self, "Missing file entity in hierarchy." ) From eaa1499510566f5d6ebc8308db50b4c5d8780c5a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 12:17:17 +0200 Subject: [PATCH 132/308] conditional dict does not care about paths as it must be group --- openpype/settings/entities/dict_conditional.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/openpype/settings/entities/dict_conditional.py b/openpype/settings/entities/dict_conditional.py index fc7cbfdee5..8a944e5fdc 100644 --- a/openpype/settings/entities/dict_conditional.py +++ b/openpype/settings/entities/dict_conditional.py @@ -468,13 +468,7 @@ class DictConditionalEntity(ItemEntity): output = {} for key, child_obj in children_items: - child_value = child_obj.settings_value() - if not child_obj.is_file and child_obj.file_item is None: - for _key, _value in child_value.items(): - new_key = "/".join([key, _key]) - output[new_key] = _value - else: - output[key] = child_value + output[key] = child_obj.settings_value() return output if self.is_group: From f65dee0a0e8b40f42eb8485faf1377d01542f52a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 12:22:53 +0200 Subject: [PATCH 133/308] added method which collects dynamic schema entities --- openpype/settings/entities/base_entity.py | 22 ++++++++++++++++++- .../settings/entities/dict_conditional.py | 4 ++++ .../entities/dict_immutable_keys_entity.py | 7 ++++++ openpype/settings/entities/input_entities.py | 4 ++++ openpype/settings/entities/item_entities.py | 7 ++++++ openpype/settings/entities/lib.py | 9 ++++++++ openpype/settings/entities/root_entities.py | 13 ++++++++++- 7 files changed, 64 insertions(+), 2 deletions(-) diff --git a/openpype/settings/entities/base_entity.py b/openpype/settings/entities/base_entity.py index bea90882a7..0d2923f9e0 100644 --- a/openpype/settings/entities/base_entity.py +++ b/openpype/settings/entities/base_entity.py @@ -476,7 +476,15 @@ class BaseItemEntity(BaseEntity): @abstractmethod def settings_value(self): - """Value of an item without key.""" + """Value of an item without key without dynamic items.""" + pass + + @abstractmethod + def collect_dynamic_schema_entities(self): + """Collect entities that are on top of dynamically added schemas. + + This method make sence only when defaults are saved. + """ pass @abstractmethod @@ -905,6 +913,18 @@ class ItemEntity(BaseItemEntity): def root_key(self): return self.root_item.root_key + @abstractmethod + def collect_dynamic_schema_entities(self, collector): + """Collect entities that are on top of dynamically added schemas. + + This method make sence only when defaults are saved. + + Args: + collector(DynamicSchemaValueCollector): Object where dynamic + entities are stored. + """ + pass + def schema_validations(self): if not self.label and self.use_label_wrap: reason = ( diff --git a/openpype/settings/entities/dict_conditional.py b/openpype/settings/entities/dict_conditional.py index 8a944e5fdc..44775e9113 100644 --- a/openpype/settings/entities/dict_conditional.py +++ b/openpype/settings/entities/dict_conditional.py @@ -455,6 +455,10 @@ class DictConditionalEntity(ItemEntity): return True return False + def collect_dynamic_schema_entities(self, collector): + if self.is_dynamic_schema_node: + collector.add_entity(self) + def settings_value(self): if self._override_state is OverrideState.NOT_DEFINED: return NOT_SET diff --git a/openpype/settings/entities/dict_immutable_keys_entity.py b/openpype/settings/entities/dict_immutable_keys_entity.py index bde5304787..24cd9401b9 100644 --- a/openpype/settings/entities/dict_immutable_keys_entity.py +++ b/openpype/settings/entities/dict_immutable_keys_entity.py @@ -318,6 +318,13 @@ class DictImmutableKeysEntity(ItemEntity): return True return False + def collect_dynamic_schema_entities(self, collector): + for child_obj in self.non_gui_children.values(): + child_obj.collect_dynamic_schema_entities(collector) + + if self.is_dynamic_schema_node: + collector.add_entity(self) + def settings_value(self): if self._override_state is OverrideState.NOT_DEFINED: return NOT_SET diff --git a/openpype/settings/entities/input_entities.py b/openpype/settings/entities/input_entities.py index b65c1c440e..469fdee310 100644 --- a/openpype/settings/entities/input_entities.py +++ b/openpype/settings/entities/input_entities.py @@ -49,6 +49,10 @@ class EndpointEntity(ItemEntity): super(EndpointEntity, self).schema_validations() + def collect_dynamic_schema_entities(self, collector): + if self.is_dynamic_schema_node: + collector.add_entity(self) + @abstractmethod def _settings_value(self): pass diff --git a/openpype/settings/entities/item_entities.py b/openpype/settings/entities/item_entities.py index 1e4f1025cc..3823a25c60 100644 --- a/openpype/settings/entities/item_entities.py +++ b/openpype/settings/entities/item_entities.py @@ -112,6 +112,9 @@ class PathEntity(ItemEntity): def set(self, value): self.child_obj.set(value) + def collect_dynamic_schema_entities(self, *args, **kwargs): + self.child_obj.collect_dynamic_schema_entities(*args, **kwargs) + def settings_value(self): if self._override_state is OverrideState.NOT_DEFINED: return NOT_SET @@ -251,6 +254,10 @@ class ListStrictEntity(ItemEntity): for idx, item in enumerate(new_value): self.children[idx].set(item) + def collect_dynamic_schema_entities(self, collector): + if self.is_dynamic_schema_node: + collector.add_entity(self) + def settings_value(self): if self._override_state is OverrideState.NOT_DEFINED: return NOT_SET diff --git a/openpype/settings/entities/lib.py b/openpype/settings/entities/lib.py index 2a1bbaa115..98dede39e8 100644 --- a/openpype/settings/entities/lib.py +++ b/openpype/settings/entities/lib.py @@ -663,3 +663,12 @@ class SchemasHub: if found_idx is not None: metadata_item = template_def.pop(found_idx) return metadata_item + + +class DynamicSchemaValueCollector: + def __init__(self, schema_hub): + self._schema_hub = schema_hub + self._dynamic_entities = [] + + def add_entity(self, entity): + self._dynamic_entities.append(entity) diff --git a/openpype/settings/entities/root_entities.py b/openpype/settings/entities/root_entities.py index 39b5cb5096..2c88016344 100644 --- a/openpype/settings/entities/root_entities.py +++ b/openpype/settings/entities/root_entities.py @@ -12,7 +12,8 @@ from .lib import ( SCHEMA_KEY_SYSTEM_SETTINGS, SCHEMA_KEY_PROJECT_SETTINGS, OverrideState, - SchemasHub + SchemasHub, + DynamicSchemaValueCollector ) from .exceptions import ( SchemaError, @@ -259,6 +260,16 @@ class RootEntity(BaseItemEntity): output[key] = child_obj.value return output + def collect_dynamic_schema_entities(self): + output = DynamicSchemaValueCollector(self.schema_hub) + if self._override_state is not OverrideState.DEFAULTS: + return output + + for child_obj in self.non_gui_children.values(): + child_obj.collect_dynamic_schema_entities(output) + + return output + def settings_value(self): """Value for current override state with metadata. From 6282e8d1ad57e85e7c8c8e11ddb201b4bf56b21a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 12:23:23 +0200 Subject: [PATCH 134/308] skip dynamic schema entities in settings values method --- openpype/settings/entities/dict_immutable_keys_entity.py | 3 +++ openpype/settings/entities/root_entities.py | 2 ++ 2 files changed, 5 insertions(+) diff --git a/openpype/settings/entities/dict_immutable_keys_entity.py b/openpype/settings/entities/dict_immutable_keys_entity.py index 24cd9401b9..a81a64c183 100644 --- a/openpype/settings/entities/dict_immutable_keys_entity.py +++ b/openpype/settings/entities/dict_immutable_keys_entity.py @@ -332,6 +332,9 @@ class DictImmutableKeysEntity(ItemEntity): if self._override_state is OverrideState.DEFAULTS: output = {} for key, child_obj in self.non_gui_children.items(): + if child_obj.is_dynamic_schema_node: + continue + child_value = child_obj.settings_value() if not child_obj.is_file and not child_obj.file_item: for _key, _value in child_value.items(): diff --git a/openpype/settings/entities/root_entities.py b/openpype/settings/entities/root_entities.py index 2c88016344..b178e3fa36 100644 --- a/openpype/settings/entities/root_entities.py +++ b/openpype/settings/entities/root_entities.py @@ -281,6 +281,8 @@ class RootEntity(BaseItemEntity): if self._override_state is not OverrideState.DEFAULTS: output = {} for key, child_obj in self.non_gui_children.items(): + if child_obj.is_dynamic_schema_node: + continue value = child_obj.settings_value() if value is not NOT_SET: output[key] = value From 37ef6d022355f7f325b933c6192665c5811cde6c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 12:23:39 +0200 Subject: [PATCH 135/308] ignore file handling for dynamic schema nodes --- openpype/settings/entities/dict_immutable_keys_entity.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/openpype/settings/entities/dict_immutable_keys_entity.py b/openpype/settings/entities/dict_immutable_keys_entity.py index a81a64c183..8871a3a3d9 100644 --- a/openpype/settings/entities/dict_immutable_keys_entity.py +++ b/openpype/settings/entities/dict_immutable_keys_entity.py @@ -330,13 +330,20 @@ class DictImmutableKeysEntity(ItemEntity): return NOT_SET if self._override_state is OverrideState.DEFAULTS: + is_dynamic_schema_node = ( + self.is_dynamic_schema_node or self.is_in_dynamic_schema_node + ) output = {} for key, child_obj in self.non_gui_children.items(): if child_obj.is_dynamic_schema_node: continue child_value = child_obj.settings_value() - if not child_obj.is_file and not child_obj.file_item: + if ( + not is_dynamic_schema_node + and not child_obj.is_file + and not child_obj.file_item + ): for _key, _value in child_value.items(): new_key = "/".join([key, _key]) output[new_key] = _value From cf9114b0f1dfbea06bb8b688dbfa3627a885d41c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 12:23:57 +0200 Subject: [PATCH 136/308] added schema validation of dynamic schemas --- openpype/settings/entities/base_entity.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/openpype/settings/entities/base_entity.py b/openpype/settings/entities/base_entity.py index 0d2923f9e0..f5f5b4d761 100644 --- a/openpype/settings/entities/base_entity.py +++ b/openpype/settings/entities/base_entity.py @@ -253,9 +253,18 @@ class BaseItemEntity(BaseEntity): ) # Group item can be only once in on hierarchy branch. - if self.is_group and self.group_item: + if self.is_group and self.group_item is not None: raise SchemeGroupHierarchyBug(self) + # Group item can be only once in on hierarchy branch. + if self.group_item is not None and self.is_dynamic_schema_node: + reason = ( + "Dynamic schema is inside grouped item {}." + " Change group hierarchy or remove dynamic" + " schema to be able work properly." + ).format(self.group_item.path) + raise EntitySchemaError(self, reason) + # Validate that env group entities will be stored into file. # - env group entities must store metadata which is not possible if # metadata would be outside of file From fff590f7f88aaf0130adfc7cf6acd98cc6dac05c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 12:43:16 +0200 Subject: [PATCH 137/308] add getter method for dynamic schema definitions --- openpype/settings/entities/lib.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/settings/entities/lib.py b/openpype/settings/entities/lib.py index 98dede39e8..3877b49648 100644 --- a/openpype/settings/entities/lib.py +++ b/openpype/settings/entities/lib.py @@ -431,6 +431,9 @@ class SchemasHub: self._loaded_schemas = loaded_schemas self._dynamic_schemas_def_by_id = dynamic_schemas_def_by_id + def get_dynamic_schema_def(self, schema_def_id): + return self._dynamic_schemas_def_by_id.get(schema_def_id) + def _fill_template(self, child_data, template_def): """Fill template based on schema definition and template definition. From 07fd5ba12cdbbe3d4d04ccd83cfda3ec277f9094 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 10 Aug 2021 11:51:59 +0100 Subject: [PATCH 138/308] Improved animation workflow --- .../plugins/create/create_animation.py | 42 +++- .../blender/plugins/load/load_animation.py | 226 ++---------------- .../blender/plugins/load/load_layout_json.py | 25 +- .../hosts/blender/plugins/load/load_model.py | 2 - .../hosts/blender/plugins/load/load_rig.py | 27 ++- .../plugins/publish/collect_instances.py | 42 ++++ .../blender/plugins/publish/extract_blend.py | 2 +- .../publish/extract_blend_animation.py | 53 ++++ .../plugins/publish/extract_fbx_animation.py | 76 +++--- .../unreal/plugins/publish/extract_layout.py | 2 +- 10 files changed, 235 insertions(+), 262 deletions(-) create mode 100644 openpype/hosts/blender/plugins/publish/extract_blend_animation.py diff --git a/openpype/hosts/blender/plugins/create/create_animation.py b/openpype/hosts/blender/plugins/create/create_animation.py index 9aebf7e9b7..f7887b7e80 100644 --- a/openpype/hosts/blender/plugins/create/create_animation.py +++ b/openpype/hosts/blender/plugins/create/create_animation.py @@ -2,11 +2,13 @@ import bpy -from avalon import api, blender -import openpype.hosts.blender.api.plugin +from avalon import api +from avalon.blender import lib, ops +from avalon.blender.pipeline import AVALON_INSTANCES +from openpype.hosts.blender.api import plugin -class CreateAnimation(openpype.hosts.blender.api.plugin.Creator): +class CreateAnimation(plugin.Creator): """Animation output for character rigs""" name = "animationMain" @@ -15,16 +17,36 @@ class CreateAnimation(openpype.hosts.blender.api.plugin.Creator): icon = "male" def process(self): + """ Run the creator on Blender main thread""" + mti = ops.MainThreadItem(self._process) + ops.execute_in_main_thread(mti) + + def _process(self): + # Get Instance Containter or create it if it does not exist + instances = bpy.data.collections.get(AVALON_INSTANCES) + if not instances: + instances = bpy.data.collections.new(name=AVALON_INSTANCES) + bpy.context.scene.collection.children.link(instances) + + # Create instance object + # name = self.name + # if not name: asset = self.data["asset"] subset = self.data["subset"] - name = openpype.hosts.blender.api.plugin.asset_name(asset, subset) - collection = bpy.data.collections.new(name=name) - bpy.context.scene.collection.children.link(collection) + name = plugin.asset_name(asset, subset) + # asset_group = bpy.data.objects.new(name=name, object_data=None) + # asset_group.empty_display_type = 'SINGLE_ARROW' + asset_group = bpy.data.collections.new(name=name) + instances.children.link(asset_group) self.data['task'] = api.Session.get('AVALON_TASK') - blender.lib.imprint(collection, self.data) + lib.imprint(asset_group, self.data) if (self.options or {}).get("useSelection"): - for obj in blender.lib.get_selection(): - collection.objects.link(obj) + selected = lib.get_selection() + for obj in selected: + asset_group.objects.link(obj) + elif (self.options or {}).get("asset_group"): + obj = (self.options or {}).get("asset_group") + asset_group.objects.link(obj) - return collection + return asset_group diff --git a/openpype/hosts/blender/plugins/load/load_animation.py b/openpype/hosts/blender/plugins/load/load_animation.py index 4025fdfa74..4f589011dd 100644 --- a/openpype/hosts/blender/plugins/load/load_animation.py +++ b/openpype/hosts/blender/plugins/load/load_animation.py @@ -1,20 +1,19 @@ """Load an animation in Blender.""" import logging -from pathlib import Path -from pprint import pformat from typing import Dict, List, Optional -from avalon import api, blender import bpy -import openpype.hosts.blender.api.plugin + +from avalon.blender.pipeline import AVALON_PROPERTY +from openpype.hosts.blender.api import plugin logger = logging.getLogger("openpype").getChild( "blender").getChild("load_animation") -class BlendAnimationLoader(openpype.hosts.blender.api.plugin.AssetLoader): +class BlendAnimationLoader(plugin.AssetLoader): """Load animations from a .blend file. Warning: @@ -29,67 +28,6 @@ class BlendAnimationLoader(openpype.hosts.blender.api.plugin.AssetLoader): icon = "code-fork" color = "orange" - def _remove(self, objects, lib_container): - for obj in list(objects): - if obj.type == 'ARMATURE': - bpy.data.armatures.remove(obj.data) - elif obj.type == 'MESH': - bpy.data.meshes.remove(obj.data) - - bpy.data.collections.remove(bpy.data.collections[lib_container]) - - def _process(self, libpath, lib_container, container_name): - - relative = bpy.context.preferences.filepaths.use_relative_paths - with bpy.data.libraries.load( - libpath, link=True, relative=relative - ) as (_, data_to): - data_to.collections = [lib_container] - - scene = bpy.context.scene - - scene.collection.children.link(bpy.data.collections[lib_container]) - - anim_container = scene.collection.children[lib_container].make_local() - - meshes = [obj for obj in anim_container.objects if obj.type == 'MESH'] - armatures = [ - obj for obj in anim_container.objects if obj.type == 'ARMATURE'] - - # Should check if there is only an armature? - - objects_list = [] - - # Link meshes first, then armatures. - # The armature is unparented for all the non-local meshes, - # when it is made local. - for obj in meshes + armatures: - - obj = obj.make_local() - - obj.data.make_local() - - anim_data = obj.animation_data - - if anim_data is not None and anim_data.action is not None: - - anim_data.action.make_local() - - if not obj.get(blender.pipeline.AVALON_PROPERTY): - - obj[blender.pipeline.AVALON_PROPERTY] = dict() - - avalon_info = obj[blender.pipeline.AVALON_PROPERTY] - avalon_info.update({"container_name": container_name}) - - objects_list.append(obj) - - anim_container.pop(blender.pipeline.AVALON_PROPERTY) - - bpy.ops.object.select_all(action='DESELECT') - - return objects_list - def process_asset( self, context: dict, name: str, namespace: Optional[str] = None, options: Optional[Dict] = None @@ -101,148 +39,32 @@ class BlendAnimationLoader(openpype.hosts.blender.api.plugin.AssetLoader): context: Full parenthood of representation to load options: Additional settings dictionary """ - libpath = self.fname - asset = context["asset"]["name"] - subset = context["subset"]["name"] - lib_container = openpype.hosts.blender.api.plugin.asset_name(asset, subset) - container_name = openpype.hosts.blender.api.plugin.asset_name( - asset, subset, namespace - ) - container = bpy.data.collections.new(lib_container) - container.name = container_name - blender.pipeline.containerise_existing( - container, - name, - namespace, - context, - self.__class__.__name__, - ) + with bpy.data.libraries.load( + libpath, link=True, relative=False + ) as (data_from, data_to): + data_to.objects = data_from.objects + data_to.actions = data_from.actions - container_metadata = container.get( - blender.pipeline.AVALON_PROPERTY) + container = data_to.objects[0] - container_metadata["libpath"] = libpath - container_metadata["lib_container"] = lib_container + assert container, "No asset group found" - objects_list = self._process( - libpath, lib_container, container_name) + target_namespace = container.get(AVALON_PROPERTY).get('namespace') - # Save the list of objects in the metadata container - container_metadata["objects"] = objects_list + action = data_to.actions[0].make_local().copy() - nodes = list(container.objects) - nodes.append(container) - self[:] = nodes - return nodes + for obj in bpy.data.objects: + if obj.get(AVALON_PROPERTY) and obj.get(AVALON_PROPERTY).get( + 'namespace') == target_namespace: + if obj.children[0]: + if not obj.children[0].animation_data: + obj.children[0].animation_data_create() + obj.children[0].animation_data.action = action + break - def update(self, container: Dict, representation: Dict): - """Update the loaded asset. + bpy.data.objects.remove(container) - This will remove all objects of the current collection, load the new - ones and add them to the collection. - If the objects of the collection are used in another collection they - will not be removed, only unlinked. Normally this should not be the - case though. - - Warning: - No nested collections are supported at the moment! - """ - - collection = bpy.data.collections.get( - container["objectName"] - ) - - libpath = Path(api.get_representation_path(representation)) - extension = libpath.suffix.lower() - - logger.info( - "Container: %s\nRepresentation: %s", - pformat(container, indent=2), - pformat(representation, indent=2), - ) - - assert collection, ( - f"The asset is not loaded: {container['objectName']}" - ) - assert not (collection.children), ( - "Nested collections are not supported." - ) - assert libpath, ( - "No existing library file found for {container['objectName']}" - ) - assert libpath.is_file(), ( - f"The file doesn't exist: {libpath}" - ) - assert extension in openpype.hosts.blender.api.plugin.VALID_EXTENSIONS, ( - f"Unsupported file: {libpath}" - ) - - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) - - collection_libpath = collection_metadata["libpath"] - normalized_collection_libpath = ( - str(Path(bpy.path.abspath(collection_libpath)).resolve()) - ) - normalized_libpath = ( - str(Path(bpy.path.abspath(str(libpath))).resolve()) - ) - logger.debug( - "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", - normalized_collection_libpath, - normalized_libpath, - ) - if normalized_collection_libpath == normalized_libpath: - logger.info("Library already loaded, not updating...") - return - - objects = collection_metadata["objects"] - lib_container = collection_metadata["lib_container"] - - self._remove(objects, lib_container) - - objects_list = self._process( - str(libpath), lib_container, collection.name) - - # Save the list of objects in the metadata container - collection_metadata["objects"] = objects_list - collection_metadata["libpath"] = str(libpath) - collection_metadata["representation"] = str(representation["_id"]) - - bpy.ops.object.select_all(action='DESELECT') - - def remove(self, container: Dict) -> bool: - """Remove an existing container from a Blender scene. - - Arguments: - container (openpype:container-1.0): Container to remove, - from `host.ls()`. - - Returns: - bool: Whether the container was deleted. - - Warning: - No nested collections are supported at the moment! - """ - - collection = bpy.data.collections.get( - container["objectName"] - ) - if not collection: - return False - assert not (collection.children), ( - "Nested collections are not supported." - ) - - collection_metadata = collection.get( - blender.pipeline.AVALON_PROPERTY) - objects = collection_metadata["objects"] - lib_container = collection_metadata["lib_container"] - - self._remove(objects, lib_container) - - bpy.data.collections.remove(collection) - - return True + library = bpy.data.libraries.get(bpy.path.basename(libpath)) + bpy.data.libraries.remove(library) diff --git a/openpype/hosts/blender/plugins/load/load_layout_json.py b/openpype/hosts/blender/plugins/load/load_layout_json.py index 8564b52816..dfa4501730 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_json.py +++ b/openpype/hosts/blender/plugins/load/load_layout_json.py @@ -11,6 +11,7 @@ from avalon import api from avalon.blender.pipeline import AVALON_CONTAINERS from avalon.blender.pipeline import AVALON_CONTAINER_ID from avalon.blender.pipeline import AVALON_PROPERTY +from avalon.blender.pipeline import AVALON_INSTANCES from openpype.hosts.blender.api import plugin @@ -32,6 +33,14 @@ class JsonLayoutLoader(plugin.AssetLoader): for obj in objects: api.remove(obj.get(AVALON_PROPERTY)) + def _remove_animation_instances(self, asset_group): + instances = bpy.data.collections.get(AVALON_INSTANCES) + if instances: + for obj in list(asset_group.children): + anim_collection = instances.children.get(obj.name+"_animation") + if anim_collection: + bpy.data.collections.remove(anim_collection) + def _get_loader(self, loaders, family): name = "" if family == 'rig': @@ -48,7 +57,7 @@ class JsonLayoutLoader(plugin.AssetLoader): return None - def _process(self, libpath, asset_group, actions): + def _process(self, libpath, asset, asset_group, actions): bpy.ops.object.select_all(action='DESELECT') with open(libpath, "r") as fp: @@ -76,7 +85,9 @@ class JsonLayoutLoader(plugin.AssetLoader): options = { 'parent': asset_group, 'transform': element.get('transform'), - 'action': action + 'action': action, + 'create_animation': True if family == 'rig' else False, + 'animation_asset': asset } # This should return the loaded asset, but the load call will be @@ -121,7 +132,7 @@ class JsonLayoutLoader(plugin.AssetLoader): asset_group.empty_display_type = 'SINGLE_ARROW' avalon_container.objects.link(asset_group) - self._process(libpath, asset_group, None) + self._process(libpath, asset, asset_group, None) bpy.context.scene.collection.objects.link(asset_group) @@ -206,11 +217,13 @@ class JsonLayoutLoader(plugin.AssetLoader): if not rig: raise Exception("No armature in the rig asset group.") if rig.animation_data and rig.animation_data.action: - instance_name = obj_meta.get('instance_name') - actions[instance_name] = rig.animation_data.action + namespace = obj_meta.get('namespace') + actions[namespace] = rig.animation_data.action mat = asset_group.matrix_basis.copy() + self._remove_animation_instances(asset_group) + self._remove(asset_group) self._process(str(libpath), asset_group, actions) @@ -236,6 +249,8 @@ class JsonLayoutLoader(plugin.AssetLoader): if not asset_group: return False + self._remove_animation_instances(asset_group) + self._remove(asset_group) bpy.data.objects.remove(asset_group) diff --git a/openpype/hosts/blender/plugins/load/load_model.py b/openpype/hosts/blender/plugins/load/load_model.py index dd48be3db7..af5591c299 100644 --- a/openpype/hosts/blender/plugins/load/load_model.py +++ b/openpype/hosts/blender/plugins/load/load_model.py @@ -137,8 +137,6 @@ class BlendModelLoader(plugin.AssetLoader): rotation = transform.get('rotation') scale = transform.get('scale') - # Y position is inverted in sign because Unreal and Blender have the - # Y axis mirrored asset_group.location = ( location.get('x'), location.get('y'), diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py index d12c398794..f3e2991a04 100644 --- a/openpype/hosts/blender/plugins/load/load_rig.py +++ b/openpype/hosts/blender/plugins/load/load_rig.py @@ -10,6 +10,7 @@ from avalon import api from avalon.blender.pipeline import AVALON_CONTAINERS from avalon.blender.pipeline import AVALON_CONTAINER_ID from avalon.blender.pipeline import AVALON_PROPERTY +from openpype import lib from openpype.hosts.blender.api import plugin @@ -164,18 +165,19 @@ class BlendRigLoader(plugin.AssetLoader): bpy.ops.object.select_all(action='DESELECT') + create_animation = False + if options is not None: parent = options.get('parent') transform = options.get('transform') action = options.get('action') + create_animation = options.get('create_animation') if parent and transform: location = transform.get('translation') rotation = transform.get('rotation') scale = transform.get('scale') - # Y position is inverted in sign because Unreal and Blender have the - # Y axis mirrored asset_group.location = ( location.get('x'), location.get('y'), @@ -201,6 +203,27 @@ class BlendRigLoader(plugin.AssetLoader): objects = self._process(libpath, asset_group, group_name, action) + if create_animation: + creator_plugin = lib.get_creator_by_name("CreateAnimation") + if not creator_plugin: + raise ValueError("Creator plugin \"CreateAnimation\" was " + "not found.") + + asset_group.select_set(True) + + animation_asset = options.get('animation_asset') + + api.create( + creator_plugin, + name=namespace+"_animation", + # name=f"{unique_number}_{subset}_animation", + asset=animation_asset, + options={"useSelection": False, "asset_group": asset_group}, + data={"dependencies": str(context["representation"]["_id"])} + ) + + bpy.ops.object.select_all(action='DESELECT') + bpy.context.scene.collection.objects.link(asset_group) asset_group[AVALON_PROPERTY] = { diff --git a/openpype/hosts/blender/plugins/publish/collect_instances.py b/openpype/hosts/blender/plugins/publish/collect_instances.py index 09a60d9725..0d683dace4 100644 --- a/openpype/hosts/blender/plugins/publish/collect_instances.py +++ b/openpype/hosts/blender/plugins/publish/collect_instances.py @@ -29,9 +29,23 @@ class CollectInstances(pyblish.api.ContextPlugin): if avalon_prop.get('id') == 'pyblish.avalon.instance': yield obj + @staticmethod + def get_collections() -> Generator: + """Return all 'model' collections. + + Check if the family is 'model' and if it doesn't have the + representation set. If the representation is set, it is a loaded model + and we don't want to publish it. + """ + for collection in bpy.data.collections: + avalon_prop = collection.get(AVALON_PROPERTY) or dict() + if avalon_prop.get('id') == 'pyblish.avalon.instance': + yield collection + def process(self, context): """Collect the models from the current Blender scene.""" asset_groups = self.get_asset_groups() + collections = self.get_collections() for group in asset_groups: avalon_prop = group[AVALON_PROPERTY] @@ -58,3 +72,31 @@ class CollectInstances(pyblish.api.ContextPlugin): self.log.debug(json.dumps(instance.data, indent=4)) for obj in instance: self.log.debug(obj) + + for collection in collections: + avalon_prop = collection[AVALON_PROPERTY] + asset = avalon_prop['asset'] + family = avalon_prop['family'] + subset = avalon_prop['subset'] + task = avalon_prop['task'] + name = f"{asset}_{subset}" + instance = context.create_instance( + name=name, + family=family, + families=[family], + subset=subset, + asset=asset, + task=task, + ) + members = list(collection.objects) + if family == "animation": + for obj in collection.objects: + if obj.type == 'EMPTY' and obj.get(AVALON_PROPERTY): + for child in obj.children: + if child.type == 'ARMATURE': + members.append(child) + members.append(collection) + instance[:] = members + self.log.debug(json.dumps(instance.data, indent=4)) + for obj in instance: + self.log.debug(obj) diff --git a/openpype/hosts/blender/plugins/publish/extract_blend.py b/openpype/hosts/blender/plugins/publish/extract_blend.py index 60ef20e31c..6687c9fe76 100644 --- a/openpype/hosts/blender/plugins/publish/extract_blend.py +++ b/openpype/hosts/blender/plugins/publish/extract_blend.py @@ -11,7 +11,7 @@ class ExtractBlend(openpype.api.Extractor): label = "Extract Blend" hosts = ["blender"] - families = ["model", "camera", "rig", "action", "layout", "animation"] + families = ["model", "camera", "rig", "action", "layout"] optional = True def process(self, instance): diff --git a/openpype/hosts/blender/plugins/publish/extract_blend_animation.py b/openpype/hosts/blender/plugins/publish/extract_blend_animation.py new file mode 100644 index 0000000000..239ca53f98 --- /dev/null +++ b/openpype/hosts/blender/plugins/publish/extract_blend_animation.py @@ -0,0 +1,53 @@ +import os + +import bpy + +import openpype.api + + +class ExtractBlendAnimation(openpype.api.Extractor): + """Extract a blend file.""" + + label = "Extract Blend" + hosts = ["blender"] + families = ["animation"] + optional = True + + def process(self, instance): + # Define extract output file path + + stagingdir = self.staging_dir(instance) + filename = f"{instance.name}.blend" + filepath = os.path.join(stagingdir, filename) + + # Perform extraction + self.log.info("Performing extraction..") + + data_blocks = set() + + for obj in instance: + if isinstance(obj, bpy.types.Object) and obj.type == 'EMPTY': + child = obj.children[0] + if child and child.type == 'ARMATURE': + if not obj.animation_data: + obj.animation_data_create() + obj.animation_data.action = child.animation_data.action + obj.animation_data_clear() + data_blocks.add(child.animation_data.action) + data_blocks.add(obj) + + bpy.data.libraries.write(filepath, data_blocks) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'blend', + 'ext': 'blend', + 'files': filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(representation) + + self.log.info("Extracted instance '%s' to: %s", + instance.name, representation) diff --git a/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py b/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py index 8312114c7b..16443b760c 100644 --- a/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py +++ b/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py @@ -1,14 +1,16 @@ import os import json -import openpype.api - import bpy import bpy_extras import bpy_extras.anim_utils +from openpype import api +from openpype.hosts.blender.api import plugin +from avalon.blender.pipeline import AVALON_PROPERTY -class ExtractAnimationFBX(openpype.api.Extractor): + +class ExtractAnimationFBX(api.Extractor): """Extract as animation.""" label = "Extract FBX" @@ -20,33 +22,26 @@ class ExtractAnimationFBX(openpype.api.Extractor): # Define extract output file path stagingdir = self.staging_dir(instance) - context = bpy.context - scene = context.scene - # Perform extraction self.log.info("Performing extraction..") - collections = [ - obj for obj in instance if type(obj) is bpy.types.Collection] + # The first collection object in the instance is taken, as there + # should be only one that contains the asset group. + collection = [ + obj for obj in instance if type(obj) is bpy.types.Collection][0] - assert len(collections) == 1, "There should be one and only one " \ - "collection collected for this asset" + # Again, the first object in the collection is taken , as there + # should be only the asset group in the collection. + asset_group = collection.objects[0] - old_scale = scene.unit_settings.scale_length + armature = [ + obj for obj in asset_group.children if obj.type == 'ARMATURE'][0] - # We set the scale of the scene for the export - scene.unit_settings.scale_length = 0.01 - - armatures = [ - obj for obj in collections[0].objects if obj.type == 'ARMATURE'] - - assert len(collections) == 1, "There should be one and only one " \ - "armature collected for this asset" - - armature = armatures[0] + asset_group_name = asset_group.name + asset_group.name = asset_group.get(AVALON_PROPERTY).get("asset_name") armature_name = armature.name - original_name = armature_name.split(':')[0] + original_name = armature_name.split(':')[1] armature.name = original_name object_action_pairs = [] @@ -89,27 +84,29 @@ class ExtractAnimationFBX(openpype.api.Extractor): for obj in bpy.data.objects: obj.select_set(False) + asset_group.select_set(True) armature.select_set(True) fbx_filename = f"{instance.name}_{armature.name}.fbx" filepath = os.path.join(stagingdir, fbx_filename) - override = bpy.context.copy() - override['selected_objects'] = [armature] + override = plugin.create_blender_context( + active=asset_group, selected=[asset_group, armature]) bpy.ops.export_scene.fbx( override, filepath=filepath, + use_active_collection=False, use_selection=True, bake_anim_use_nla_strips=False, bake_anim_use_all_actions=False, add_leaf_bones=False, armature_nodetype='ROOT', - object_types={'ARMATURE'} + object_types={'EMPTY', 'ARMATURE'} ) armature.name = armature_name + asset_group.name = asset_group_name + asset_group.select_set(False) armature.select_set(False) - scene.unit_settings.scale_length = old_scale - # We delete the baked action and set the original one back for i in range(0, len(object_action_pairs)): pair = object_action_pairs[i] @@ -125,18 +122,20 @@ class ExtractAnimationFBX(openpype.api.Extractor): json_filename = f"{instance.name}.json" json_path = os.path.join(stagingdir, json_filename) - json_dict = {} + json_dict = { + "instance_name": asset_group.get(AVALON_PROPERTY).get("namespace") + } - collection = instance.data.get("name") - container = None - for obj in bpy.data.collections[collection].objects: - if obj.type == "ARMATURE": - container_name = obj.get("avalon").get("container_name") - container = bpy.data.collections[container_name] - if container: - json_dict = { - "instance_name": container.get("avalon").get("instance_name") - } + # collection = instance.data.get("name") + # container = None + # for obj in bpy.data.collections[collection].objects: + # if obj.type == "ARMATURE": + # container_name = obj.get("avalon").get("container_name") + # container = bpy.data.collections[container_name] + # if container: + # json_dict = { + # "instance_name": container.get("avalon").get("instance_name") + # } with open(json_path, "w+") as file: json.dump(json_dict, fp=file, indent=2) @@ -159,6 +158,5 @@ class ExtractAnimationFBX(openpype.api.Extractor): instance.data["representations"].append(fbx_representation) instance.data["representations"].append(json_representation) - self.log.info("Extracted instance '{}' to: {}".format( instance.name, fbx_representation)) diff --git a/openpype/hosts/unreal/plugins/publish/extract_layout.py b/openpype/hosts/unreal/plugins/publish/extract_layout.py index 2d9f6eb3d1..a47187cf47 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_layout.py +++ b/openpype/hosts/unreal/plugins/publish/extract_layout.py @@ -83,7 +83,7 @@ class ExtractLayout(openpype.api.Extractor): "z": transform.translation.z }, "rotation": { - "x": math.radians(transform.rotation.euler().x + 90.0), + "x": math.radians(transform.rotation.euler().x), "y": math.radians(transform.rotation.euler().y), "z": math.radians(180.0 - transform.rotation.euler().z) }, From b507b4e9d33b6a65003954545f7fa271030d995d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 13:02:08 +0200 Subject: [PATCH 139/308] modified dynamic schemas attributes --- openpype/settings/entities/lib.py | 36 +++++++++++++++++-------------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/openpype/settings/entities/lib.py b/openpype/settings/entities/lib.py index 3877b49648..457468b18b 100644 --- a/openpype/settings/entities/lib.py +++ b/openpype/settings/entities/lib.py @@ -108,8 +108,8 @@ class OverrideState: class SchemasHub: - def __init__(self, schema_subfolder, reset=True): - self._schema_subfolder = schema_subfolder + def __init__(self, schema_type, reset=True): + self._schema_type = schema_type self._loaded_types = {} self._gui_types = tuple() @@ -119,13 +119,17 @@ class SchemasHub: self._loaded_schemas = {} # Attributes for modules settings - self._modules_settings_defs_by_id = {} - self._dynamic_schemas_def_by_id = {} + self._dynamic_schemas_defs_by_id = {} + self._dynamic_schemas_by_id = {} # Trigger reset if reset: self.reset() + @property + def schema_type(self): + return self._schema_type + def reset(self): self._load_modules_settings_defs() self._load_types() @@ -138,7 +142,7 @@ class SchemasHub: for module_settings_def_cls in module_settings_defs: module_settings_def = module_settings_def_cls() def_id = module_settings_def.id - self._modules_settings_defs_by_id[def_id] = module_settings_def + self._dynamic_schemas_defs_by_id[def_id] = module_settings_def @property def gui_types(self): @@ -146,7 +150,7 @@ class SchemasHub: def resolve_dynamic_schema(self, dynamic_key): output = [] - for def_id, def_keys in self._dynamic_schemas_def_by_id.items(): + for def_id, def_keys in self._dynamic_schemas_by_id.items(): if dynamic_key in def_keys: def_schema = def_keys[dynamic_key] if not def_schema: @@ -346,16 +350,16 @@ class SchemasHub: self._crashed_on_load = {} self._loaded_templates = {} self._loaded_schemas = {} - self._dynamic_schemas_def_by_id = {} + self._dynamic_schemas_by_id = {} dirpath = os.path.join( os.path.dirname(os.path.abspath(__file__)), "schemas", - self._schema_subfolder + self.schema_type ) loaded_schemas = {} loaded_templates = {} - dynamic_schemas_def_by_id = {} + dynamic_schemas_by_id = {} for root, _, filenames in os.walk(dirpath): for filename in filenames: basename, ext = os.path.splitext(filename) @@ -405,13 +409,13 @@ class SchemasHub: ) loaded_schemas[basename] = schema_data - defs_iter = self._modules_settings_defs_by_id.items() + defs_iter = self._dynamic_schemas_defs_by_id.items() for def_id, module_settings_def in defs_iter: - dynamic_schemas_def_by_id[def_id] = ( - module_settings_def.get_dynamic_schemas(self._schema_subfolder) + dynamic_schemas_by_id[def_id] = ( + module_settings_def.get_dynamic_schemas(self.schema_type) ) module_schemas = module_settings_def.get_settings_schemas( - self._schema_subfolder + self.schema_type ) for key, schema_data in module_schemas.items(): if isinstance(schema_data, list): @@ -429,10 +433,10 @@ class SchemasHub: self._loaded_templates = loaded_templates self._loaded_schemas = loaded_schemas - self._dynamic_schemas_def_by_id = dynamic_schemas_def_by_id + self._dynamic_schemas_by_id = dynamic_schemas_by_id - def get_dynamic_schema_def(self, schema_def_id): - return self._dynamic_schemas_def_by_id.get(schema_def_id) + def get_dynamic_modules_settings_defs(self, schema_def_id): + return self._dynamic_schemas_defs_by_id.get(schema_def_id) def _fill_template(self, child_data, template_def): """Fill template based on schema definition and template definition. From 90076d519f389a8a5a50e1df5f9771294a2128b0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 13:02:45 +0200 Subject: [PATCH 140/308] removed project_settings getter --- openpype/settings/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 04e8bffd8f..d7684082f3 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -354,7 +354,7 @@ def _get_default_settings(): if not path: continue - subdict = defaults["project_settings"] + subdict = defaults path_items = list(path.split("/")) last_key = path_items.pop(-1) for key in path_items: From 9d31ec70116589ab0d00bf6a6c0d840420999924 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 13:03:43 +0200 Subject: [PATCH 141/308] implemented save for dynamic schemas --- openpype/settings/entities/lib.py | 21 +++++++++++++++++++++ openpype/settings/entities/root_entities.py | 3 +++ 2 files changed, 24 insertions(+) diff --git a/openpype/settings/entities/lib.py b/openpype/settings/entities/lib.py index 457468b18b..13037ac373 100644 --- a/openpype/settings/entities/lib.py +++ b/openpype/settings/entities/lib.py @@ -3,6 +3,7 @@ import re import json import copy import inspect +import collections from .exceptions import ( SchemaTemplateMissingKeys, @@ -679,3 +680,23 @@ class DynamicSchemaValueCollector: def add_entity(self, entity): self._dynamic_entities.append(entity) + + def create_hierarchy(self): + output = collections.defaultdict(dict) + for entity in self._dynamic_entities: + output[entity.dynamic_schema_id][entity.path] = ( + entity.settings_value() + ) + return output + + def save_values(self): + hierarchy = self.create_hierarchy() + + for schema_def_id, schema_def_value in hierarchy.items(): + schema_def = self._schema_hub.get_dynamic_modules_settings_defs( + schema_def_id + ) + if self._schema_hub.schema_type == SCHEMA_KEY_SYSTEM_SETTINGS: + schema_def.save_system_defaults(schema_def_value) + elif self._schema_hub.schema_type == SCHEMA_KEY_PROJECT_SETTINGS: + schema_def.save_project_defaults(schema_def_value) diff --git a/openpype/settings/entities/root_entities.py b/openpype/settings/entities/root_entities.py index b178e3fa36..6f444d5394 100644 --- a/openpype/settings/entities/root_entities.py +++ b/openpype/settings/entities/root_entities.py @@ -428,6 +428,9 @@ class RootEntity(BaseItemEntity): with open(output_path, "w") as file_stream: json.dump(value, file_stream, indent=4) + dynamic_values_item = self.collect_dynamic_schema_entities() + dynamic_values_item.save_values() + @abstractmethod def _save_studio_values(self): """Save studio override values.""" From 8cfe9bb270e2859c09a0ec17554b11aac4860a87 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 13:04:04 +0200 Subject: [PATCH 142/308] added first dynamic_schema item in schemas --- .../entities/schemas/projects_schema/schema_main.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_main.json b/openpype/settings/entities/schemas/projects_schema/schema_main.json index 4a8a9d496e..058ff492f3 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_main.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_main.json @@ -121,6 +121,10 @@ { "type": "schema", "name": "schema_project_unreal" + }, + { + "type": "dynamic_schema", + "name": "project_settings/global" } ] } From 1aa3d42704813ce67722f3053bd1a9462a90247c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 13:06:43 +0200 Subject: [PATCH 143/308] reset defaults on save defaults --- openpype/settings/entities/root_entities.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/settings/entities/root_entities.py b/openpype/settings/entities/root_entities.py index 6f444d5394..78e8aad47f 100644 --- a/openpype/settings/entities/root_entities.py +++ b/openpype/settings/entities/root_entities.py @@ -31,6 +31,7 @@ from openpype.settings.lib import ( DEFAULTS_DIR, get_default_settings, + reset_default_settings, get_studio_system_settings_overrides, save_studio_settings, @@ -381,6 +382,7 @@ class RootEntity(BaseItemEntity): if self._override_state is OverrideState.DEFAULTS: self._save_default_values() + reset_default_settings() elif self._override_state is OverrideState.STUDIO: self._save_studio_values() From 7789fb8299375f3f471fbd63d6d231659e3cfa7a Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 10 Aug 2021 12:06:52 +0100 Subject: [PATCH 144/308] Hound fixes --- openpype/hosts/blender/api/plugin.py | 3 ++- openpype/hosts/blender/plugins/load/load_animation.py | 2 +- openpype/hosts/blender/plugins/load/load_layout_json.py | 3 ++- openpype/hosts/blender/plugins/load/load_rig.py | 4 ++-- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/blender/api/plugin.py b/openpype/hosts/blender/api/plugin.py index a126f5702f..50b73ade2b 100644 --- a/openpype/hosts/blender/api/plugin.py +++ b/openpype/hosts/blender/api/plugin.py @@ -174,7 +174,8 @@ class AssetLoader(api.Loader): context: dict, name: Optional[str] = None, namespace: Optional[str] = None, - options: Optional[Dict] = None) -> Optional[bpy.types.Collection]: + options: Optional[Dict] = None + ) -> Optional[bpy.types.Collection]: """Load asset via database Arguments: diff --git a/openpype/hosts/blender/plugins/load/load_animation.py b/openpype/hosts/blender/plugins/load/load_animation.py index 4f589011dd..47c48248b2 100644 --- a/openpype/hosts/blender/plugins/load/load_animation.py +++ b/openpype/hosts/blender/plugins/load/load_animation.py @@ -57,7 +57,7 @@ class BlendAnimationLoader(plugin.AssetLoader): for obj in bpy.data.objects: if obj.get(AVALON_PROPERTY) and obj.get(AVALON_PROPERTY).get( - 'namespace') == target_namespace: + 'namespace') == target_namespace: if obj.children[0]: if not obj.children[0].animation_data: obj.children[0].animation_data_create() diff --git a/openpype/hosts/blender/plugins/load/load_layout_json.py b/openpype/hosts/blender/plugins/load/load_layout_json.py index dfa4501730..1a4dbbb5cb 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_json.py +++ b/openpype/hosts/blender/plugins/load/load_layout_json.py @@ -37,7 +37,8 @@ class JsonLayoutLoader(plugin.AssetLoader): instances = bpy.data.collections.get(AVALON_INSTANCES) if instances: for obj in list(asset_group.children): - anim_collection = instances.children.get(obj.name+"_animation") + anim_collection = instances.children.get( + obj.name + "_animation") if anim_collection: bpy.data.collections.remove(anim_collection) diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py index f3e2991a04..5573c081e1 100644 --- a/openpype/hosts/blender/plugins/load/load_rig.py +++ b/openpype/hosts/blender/plugins/load/load_rig.py @@ -207,7 +207,7 @@ class BlendRigLoader(plugin.AssetLoader): creator_plugin = lib.get_creator_by_name("CreateAnimation") if not creator_plugin: raise ValueError("Creator plugin \"CreateAnimation\" was " - "not found.") + "not found.") asset_group.select_set(True) @@ -215,7 +215,7 @@ class BlendRigLoader(plugin.AssetLoader): api.create( creator_plugin, - name=namespace+"_animation", + name=namespace + "_animation", # name=f"{unique_number}_{subset}_animation", asset=animation_asset, options={"useSelection": False, "asset_group": asset_group}, From 1e50751d9def8c3db79bf3d900dfec0f5da21755 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 10 Aug 2021 14:51:22 +0200 Subject: [PATCH 145/308] Changed missed import --- openpype/tools/tray_app/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/tray_app/app.py b/openpype/tools/tray_app/app.py index 339e6343f8..03f8321464 100644 --- a/openpype/tools/tray_app/app.py +++ b/openpype/tools/tray_app/app.py @@ -9,7 +9,7 @@ import itertools from datetime import datetime from avalon import style -from openpype.modules.webserver import host_console_listener +from openpype_modules.webserver import host_console_listener from Qt import QtWidgets, QtCore From 976bc45a1e595273024a3711a6c3269f162247e8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 10 Aug 2021 15:18:53 +0200 Subject: [PATCH 146/308] Changed missed imports --- .../clockify/ftrack/server/action_clockify_sync_server.py | 2 +- .../clockify/ftrack/user/action_clockify_sync_local.py | 2 +- .../default_modules/clockify/launcher_actions/ClockifyStart.py | 2 +- .../default_modules/clockify/launcher_actions/ClockifySync.py | 2 +- .../default_modules/slack/launch_hooks/pre_python2_vendor.py | 2 +- .../event_handlers_server/action_private_project_detection.py | 2 +- openpype/pype_commands.py | 2 +- openpype/settings/entities/enum_entity.py | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py b/openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py index 8379414c0c..c6b55947da 100644 --- a/openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py +++ b/openpype/modules/default_modules/clockify/ftrack/server/action_clockify_sync_server.py @@ -1,7 +1,7 @@ import os import json from openpype_modules.ftrack.lib import ServerAction -from openpype.modules.clockify.clockify_api import ClockifyAPI +from openpype_modules.clockify.clockify_api import ClockifyAPI class SyncClocifyServer(ServerAction): diff --git a/openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py b/openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py index 3d55ee92b6..a430791906 100644 --- a/openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py +++ b/openpype/modules/default_modules/clockify/ftrack/user/action_clockify_sync_local.py @@ -1,6 +1,6 @@ import json from openpype_modules.ftrack.lib import BaseAction, statics_icon -from openpype.modules.clockify.clockify_api import ClockifyAPI +from openpype_modules.clockify.clockify_api import ClockifyAPI class SyncClocifyLocal(BaseAction): diff --git a/openpype/modules/default_modules/clockify/launcher_actions/ClockifyStart.py b/openpype/modules/default_modules/clockify/launcher_actions/ClockifyStart.py index c431ea240d..db51964eb7 100644 --- a/openpype/modules/default_modules/clockify/launcher_actions/ClockifyStart.py +++ b/openpype/modules/default_modules/clockify/launcher_actions/ClockifyStart.py @@ -1,6 +1,6 @@ from avalon import api, io from openpype.api import Logger -from openpype.modules.clockify.clockify_api import ClockifyAPI +from openpype_modules.clockify.clockify_api import ClockifyAPI log = Logger().get_logger(__name__) diff --git a/openpype/modules/default_modules/clockify/launcher_actions/ClockifySync.py b/openpype/modules/default_modules/clockify/launcher_actions/ClockifySync.py index 1bb168a80b..02982d373a 100644 --- a/openpype/modules/default_modules/clockify/launcher_actions/ClockifySync.py +++ b/openpype/modules/default_modules/clockify/launcher_actions/ClockifySync.py @@ -1,5 +1,5 @@ from avalon import api, io -from openpype.modules.clockify.clockify_api import ClockifyAPI +from openpype_modules.clockify.clockify_api import ClockifyAPI from openpype.api import Logger log = Logger().get_logger(__name__) diff --git a/openpype/modules/default_modules/slack/launch_hooks/pre_python2_vendor.py b/openpype/modules/default_modules/slack/launch_hooks/pre_python2_vendor.py index a2c1f8a9e0..0f4bc22a34 100644 --- a/openpype/modules/default_modules/slack/launch_hooks/pre_python2_vendor.py +++ b/openpype/modules/default_modules/slack/launch_hooks/pre_python2_vendor.py @@ -1,6 +1,6 @@ import os from openpype.lib import PreLaunchHook -from openpype.modules.slack import SLACK_MODULE_DIR +from openpype_modules.slack import SLACK_MODULE_DIR class PrePython2Support(PreLaunchHook): diff --git a/openpype/modules/ftrack/event_handlers_server/action_private_project_detection.py b/openpype/modules/ftrack/event_handlers_server/action_private_project_detection.py index 5213e10ba3..62772740cd 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_private_project_detection.py +++ b/openpype/modules/ftrack/event_handlers_server/action_private_project_detection.py @@ -1,4 +1,4 @@ -from openpype.modules.ftrack.lib import ServerAction +from openpype_modules.ftrack.lib import ServerAction class PrivateProjectDetectionAction(ServerAction): diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 7c47d8c613..978dcbc0d7 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -35,7 +35,7 @@ class PypeCommands: @staticmethod def launch_eventservercli(*args): - from openpype.modules.ftrack.ftrack_server.event_server_cli import ( + from openpype_modules.ftrack.ftrack_server.event_server_cli import ( run_event_server ) return run_event_server(*args) diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index 4f6a2886bc..31ce96a059 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -399,7 +399,7 @@ class ProvidersEnum(BaseEnumEntity): self.placeholder = None def _get_enum_values(self): - from openpype.modules.sync_server.providers import lib as lib_providers + from openpype_modules.sync_server.providers import lib as lib_providers providers = lib_providers.factory.providers From 3d4c18941a77efa6c78f1f69aa8b594f3c047e09 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 15:25:45 +0200 Subject: [PATCH 147/308] modified imports in comments --- .../ftrack/event_handlers_user/action_create_cust_attrs.py | 2 +- .../ftrack/plugins/publish/integrate_hierarchy_ftrack.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py index 599d2eb257..3869d8ad08 100644 --- a/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_create_cust_attrs.py @@ -43,7 +43,7 @@ dictionary level, task's attributes are nested more. group (string) - name of group - - based on attribute `openpype.modules.ftrack.lib.CUST_ATTR_GROUP` + - based on attribute `openpype_modules.ftrack.lib.CUST_ATTR_GROUP` - "pype" by default *** Required *************************************************************** diff --git a/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 2fd5296d24..fbd64d9f70 100644 --- a/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -4,7 +4,7 @@ import six import pyblish.api from avalon import io -# Copy of constant `openpype.modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC` +# Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC` CUST_ATTR_AUTO_SYNC = "avalon_auto_sync" CUST_ATTR_GROUP = "openpype" From 2bbb5e0fc10e67b0b3325bcd915a717249237f41 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 15:40:11 +0200 Subject: [PATCH 148/308] added a little bit readme info --- openpype/modules/README.md | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/openpype/modules/README.md b/openpype/modules/README.md index d54ba7c835..a3733518ac 100644 --- a/openpype/modules/README.md +++ b/openpype/modules/README.md @@ -1,5 +1,17 @@ -# OpenPype modules -OpenPype modules should contain separated logic of specific kind of implementation, like Ftrack connection and usage code or Deadline farm rendering or special plugins. +# OpenPype modules/addons +OpenPype modules should contain separated logic of specific kind of implementation, like Ftrack connection and usage code or Deadline farm rendering or may contain only special plugins. Addons work the same way currently there is no difference in module and addon. + +## Modules concept +- modules and addons are dynamically imported to virtual python module `openpype_modules` from which it is possible to import them no matter where is the modulo located +- modules or addons should never be imported directly even if you know possible full import path + - it is because all of their content must be imported in specific order and should not be imported without defined functions as it may also break few implementation parts + +### TODOs +- add module/addon manifest + - definition of module (not 100% defined content e.g. minimum require OpenPype version etc.) + - defying that folder is content of a module or an addon +- module/addon have it's settings schemas and default values outside OpenPype +- add general setting of paths to modules ## Base class `OpenPypeModule` - abstract class as base for each module @@ -20,6 +32,7 @@ OpenPype modules should contain separated logic of specific kind of implementati - interfaces can be defined in `interfaces.py` inside module directory - the file can't use relative imports or import anything from other parts of module itself at the header of file + - this is one of reasons why modules/addons can't be imported directly without using defined functions in OpenPype modules implementation ## Base class `OpenPypeInterface` - has nothing implemented From abd7bfa375df0fdea3241b0019c871627a72c803 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 10 Aug 2021 15:41:03 +0200 Subject: [PATCH 149/308] moved new file to right folder --- .../event_handlers_server/action_private_project_detection.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/ftrack/event_handlers_server/action_private_project_detection.py (100%) diff --git a/openpype/modules/ftrack/event_handlers_server/action_private_project_detection.py b/openpype/modules/default_modules/ftrack/event_handlers_server/action_private_project_detection.py similarity index 100% rename from openpype/modules/ftrack/event_handlers_server/action_private_project_detection.py rename to openpype/modules/default_modules/ftrack/event_handlers_server/action_private_project_detection.py From e3116b0bd0d4b0f46676c1bef5243abbcdbd306f Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 11 Aug 2021 22:06:44 +0200 Subject: [PATCH 150/308] better loader error handling --- openpype/hosts/houdini/api/plugin.py | 15 ++++++++++++++- .../plugins/create/create_alembic_camera.py | 2 +- .../houdini/plugins/create/create_composite.py | 2 +- .../houdini/plugins/create/create_pointcache.py | 2 +- .../houdini/plugins/create/create_redshift_rop.py | 2 +- .../plugins/create/create_remote_publish.py | 2 +- .../hosts/houdini/plugins/create/create_usd.py | 2 +- .../houdini/plugins/create/create_usd_model.py | 2 +- .../plugins/create/create_usd_workspaces.py | 2 +- .../houdini/plugins/create/create_usdrender.py | 2 +- .../houdini/plugins/create/create_vbd_cache.py | 2 +- 11 files changed, 24 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 9820ed49c3..40c4870a06 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -1,6 +1,19 @@ +# -*- coding: utf-8 -*- +"""Houdini specific Avalon/Pyblish plugin definitions.""" + +import sys + from avalon import houdini +import hou +import six from openpype.api import PypeCreatorMixin class Creator(PypeCreatorMixin, houdini.Creator): - pass + def process(self): + # reraise as standard Python exception so + # Avalon can catch it + try: + self._process() + except hou.Error as er: + six.reraise(Exception, er, sys.exc_info()[2]) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index 99a587b035..a36b6642fa 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -18,7 +18,7 @@ class CreateAlembicCamera(plugin.Creator): # Set node type to create for output self.data.update({"node_type": "alembic"}) - def process(self): + def _process(self): instance = super(CreateAlembicCamera, self).process() parms = { diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index 7293669bef..06d10f3ad0 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -17,7 +17,7 @@ class CreateCompositeSequence(plugin.Creator): # Type of ROP node to create self.data.update({"node_type": "comp"}) - def process(self): + def _process(self): instance = super(CreateCompositeSequence, self).process() parms = {"copoutput": "$HIP/pyblish/%s.$F4.exr" % self.name} diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index cc452ed806..8aef274340 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -17,7 +17,7 @@ class CreatePointCache(plugin.Creator): self.data.update({"node_type": "alembic"}) - def process(self): + def _process(self): instance = super(CreatePointCache, self).process() parms = { diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py index 40d2ac58c7..3798bd8240 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py @@ -27,7 +27,7 @@ class CreateRedshiftROP(plugin.Creator): self.data.update({"node_type": "Redshift_ROP"}) - def process(self): + def _process(self): instance = super(CreateRedshiftROP, self).process() basename = instance.name() diff --git a/openpype/hosts/houdini/plugins/create/create_remote_publish.py b/openpype/hosts/houdini/plugins/create/create_remote_publish.py index b9782209cd..66ed35c618 100644 --- a/openpype/hosts/houdini/plugins/create/create_remote_publish.py +++ b/openpype/hosts/houdini/plugins/create/create_remote_publish.py @@ -9,7 +9,7 @@ class CreateRemotePublish(plugin.Creator): family = "remotePublish" icon = "cloud-upload" - def process(self): + def _process(self): """This is a stub creator process. This does not create a regular instance that the instance collector diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index 642612f465..96c56c2918 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -16,7 +16,7 @@ class CreateUSD(plugin.Creator): self.data.update({"node_type": "usd"}) - def process(self): + def _process(self): instance = super(CreateUSD, self).process() parms = { diff --git a/openpype/hosts/houdini/plugins/create/create_usd_model.py b/openpype/hosts/houdini/plugins/create/create_usd_model.py index 5276211f2c..3e4e7d9d69 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd_model.py +++ b/openpype/hosts/houdini/plugins/create/create_usd_model.py @@ -10,7 +10,7 @@ class CreateUSDModel(plugin.Creator): family = "usdModel" icon = "gears" - def process(self): + def _process(self): node_type = "op::author_model:1.0" diff --git a/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py b/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py index fc8ef5c810..2b4577ba41 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py +++ b/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py @@ -10,7 +10,7 @@ class _USDWorkspace(plugin.Creator): step = None icon = "gears" - def process(self): + def _process(self): if not all([self.node_type, self.node_name, self.step]): self.log.error("Incomplete USD Workspace parameters") diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py index 34e1a9cc54..9070457864 100644 --- a/openpype/hosts/houdini/plugins/create/create_usdrender.py +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -19,7 +19,7 @@ class CreateUSDRender(plugin.Creator): self.data.update({"node_type": "usdrender_rop"}) - def process(self): + def _process(self): instance = super(CreateUSDRender, self).process() parms = { diff --git a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py index 677c3d5a9a..b069be3f83 100644 --- a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py @@ -18,7 +18,7 @@ class CreateVDBCache(plugin.Creator): # Set node type to create for output self.data["node_type"] = "geometry" - def process(self): + def _process(self): instance = super(CreateVDBCache, self).process() parms = { From 86a3821657bc0aa9f7e8e35cc3f55115527131ff Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 12 Aug 2021 12:08:34 +0200 Subject: [PATCH 151/308] fix infinite recursion --- openpype/hosts/houdini/api/plugin.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 40c4870a06..610f260426 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -1,11 +1,7 @@ # -*- coding: utf-8 -*- """Houdini specific Avalon/Pyblish plugin definitions.""" - -import sys - from avalon import houdini import hou -import six from openpype.api import PypeCreatorMixin @@ -16,4 +12,6 @@ class Creator(PypeCreatorMixin, houdini.Creator): try: self._process() except hou.Error as er: - six.reraise(Exception, er, sys.exc_info()[2]) + # cannot do re-raise with six as it will cause + # infinite recursion. + raise Exception(er) From 64834df4003c04014dcec7e88fb2b58041ff82b2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 12 Aug 2021 16:43:55 +0200 Subject: [PATCH 152/308] Fix - Deadline publish on Linux started Tray instead of headless publishing --- vendor/deadline/custom/plugins/GlobalJobPreLoad.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vendor/deadline/custom/plugins/GlobalJobPreLoad.py b/vendor/deadline/custom/plugins/GlobalJobPreLoad.py index 41df9d4dc9..8631b035cf 100644 --- a/vendor/deadline/custom/plugins/GlobalJobPreLoad.py +++ b/vendor/deadline/custom/plugins/GlobalJobPreLoad.py @@ -55,9 +55,9 @@ def inject_openpype_environment(deadlinePlugin): "AVALON_TASK, AVALON_APP_NAME" raise RuntimeError(msg) - print("args::{}".format(args)) + print("args:::{}".format(args)) - exit_code = subprocess.call(args, shell=True) + exit_code = subprocess.call(args, cwd=os.path.dirname(openpype_app)) if exit_code != 0: raise RuntimeError("Publishing failed, check worker's log") From 9bb8fe23f786c10629c02a78fdd33cecdfb566f8 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 12 Aug 2021 17:57:35 +0200 Subject: [PATCH 153/308] add creators to Settings --- openpype/hosts/houdini/api/plugin.py | 16 ++++-- .../plugins/create/create_alembic_camera.py | 8 ++- .../plugins/create/create_composite.py | 8 ++- .../plugins/create/create_pointcache.py | 8 ++- .../plugins/create/create_redshift_rop.py | 8 ++- .../plugins/create/create_remote_publish.py | 2 +- .../houdini/plugins/create/create_usd.py | 9 +++- .../plugins/create/create_usd_model.py | 8 ++- .../plugins/create/create_usd_workspaces.py | 7 ++- .../plugins/create/create_usdrender.py | 8 ++- .../plugins/create/create_vbd_cache.py | 8 ++- .../defaults/project_settings/houdini.json | 42 +++++++++++++++ .../schema_project_houdini.json | 4 ++ .../schemas/schema_houdini_create.json | 54 +++++++++++++++++++ 14 files changed, 168 insertions(+), 22 deletions(-) create mode 100644 openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 610f260426..d84427bfee 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -1,17 +1,23 @@ # -*- coding: utf-8 -*- """Houdini specific Avalon/Pyblish plugin definitions.""" +import sys from avalon import houdini +import six + import hou from openpype.api import PypeCreatorMixin +class OpenPypeCreatorError(Exception): + pass + + class Creator(PypeCreatorMixin, houdini.Creator): def process(self): - # reraise as standard Python exception so + instance = super(houdini.Creator, self).process() + # re-raise as standard Python exception so # Avalon can catch it try: - self._process() + self._process(instance) except hou.Error as er: - # cannot do re-raise with six as it will cause - # infinite recursion. - raise Exception(er) + six.reraise(OpenPypeCreatorError, OpenPypeCreatorError("Creator error"), sys.exc_info()[2]) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index a36b6642fa..d65e2a5e98 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -18,9 +18,13 @@ class CreateAlembicCamera(plugin.Creator): # Set node type to create for output self.data.update({"node_type": "alembic"}) - def _process(self): - instance = super(CreateAlembicCamera, self).process() + def _process(self, instance): + """Creator main entry point. + Args: + instance (hou.Node): Created Houdini instance. + + """ parms = { "filename": "$HIP/pyblish/%s.abc" % self.name, "use_sop_path": False, diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index 06d10f3ad0..d19c97de86 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -17,9 +17,13 @@ class CreateCompositeSequence(plugin.Creator): # Type of ROP node to create self.data.update({"node_type": "comp"}) - def _process(self): - instance = super(CreateCompositeSequence, self).process() + def _process(self, instance): + """Creator main entry point. + Args: + instance (hou.Node): Created Houdini instance. + + """ parms = {"copoutput": "$HIP/pyblish/%s.$F4.exr" % self.name} if self.nodes: diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 8aef274340..28468bf073 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -17,9 +17,13 @@ class CreatePointCache(plugin.Creator): self.data.update({"node_type": "alembic"}) - def _process(self): - instance = super(CreatePointCache, self).process() + def _process(self, instance): + """Creator main entry point. + Args: + instance (hou.Node): Created Houdini instance. + + """ parms = { "use_sop_path": True, # Export single node from SOP Path "build_from_path": True, # Direct path of primitive in output diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py index 3798bd8240..06b70a01c2 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py @@ -27,9 +27,13 @@ class CreateRedshiftROP(plugin.Creator): self.data.update({"node_type": "Redshift_ROP"}) - def _process(self): - instance = super(CreateRedshiftROP, self).process() + def _process(self, instance): + """Creator main entry point. + Args: + instance (hou.Node): Created Houdini instance. + + """ basename = instance.name() instance.setName(basename + "_ROP", unique_name=True) diff --git a/openpype/hosts/houdini/plugins/create/create_remote_publish.py b/openpype/hosts/houdini/plugins/create/create_remote_publish.py index 66ed35c618..18074fa560 100644 --- a/openpype/hosts/houdini/plugins/create/create_remote_publish.py +++ b/openpype/hosts/houdini/plugins/create/create_remote_publish.py @@ -9,7 +9,7 @@ class CreateRemotePublish(plugin.Creator): family = "remotePublish" icon = "cloud-upload" - def _process(self): + def _process(self, instance): """This is a stub creator process. This does not create a regular instance that the instance collector diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index 96c56c2918..076197bace 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -7,6 +7,7 @@ class CreateUSD(plugin.Creator): label = "USD" family = "usd" icon = "gears" + enabled = False def __init__(self, *args, **kwargs): super(CreateUSD, self).__init__(*args, **kwargs) @@ -16,9 +17,13 @@ class CreateUSD(plugin.Creator): self.data.update({"node_type": "usd"}) - def _process(self): - instance = super(CreateUSD, self).process() + def _process(self, instance): + """Creator main entry point. + Args: + instance (hou.Node): Created Houdini instance. + + """ parms = { "lopoutput": "$HIP/pyblish/%s.usd" % self.name, "enableoutputprocessor_simplerelativepaths": False, diff --git a/openpype/hosts/houdini/plugins/create/create_usd_model.py b/openpype/hosts/houdini/plugins/create/create_usd_model.py index 3e4e7d9d69..5e6bd9e3b0 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd_model.py +++ b/openpype/hosts/houdini/plugins/create/create_usd_model.py @@ -10,8 +10,13 @@ class CreateUSDModel(plugin.Creator): family = "usdModel" icon = "gears" - def _process(self): + def _process(self, instance): + """Creator main entry point. + Args: + instance (hou.Node): Created Houdini instance. + + """ node_type = "op::author_model:1.0" subset = self.data["subset"] @@ -20,6 +25,7 @@ class CreateUSDModel(plugin.Creator): # Get stage root and create node stage = hou.node("/stage") + print("creating node {}/{}".format(node_type, name)) instance = stage.createNode(node_type, node_name=name) instance.moveToGoodPosition(move_unconnected=True) diff --git a/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py b/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py index 2b4577ba41..0e24ca086b 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py +++ b/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py @@ -10,8 +10,13 @@ class _USDWorkspace(plugin.Creator): step = None icon = "gears" - def _process(self): + def _process(self, instance): + """Creator main entry point. + Args: + instance (hou.Node): Created Houdini instance. + + """ if not all([self.node_type, self.node_name, self.step]): self.log.error("Incomplete USD Workspace parameters") return diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py index 9070457864..5cf03a211f 100644 --- a/openpype/hosts/houdini/plugins/create/create_usdrender.py +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -19,9 +19,13 @@ class CreateUSDRender(plugin.Creator): self.data.update({"node_type": "usdrender_rop"}) - def _process(self): - instance = super(CreateUSDRender, self).process() + def _process(self, instance): + """Creator main entry point. + Args: + instance (hou.Node): Created Houdini instance. + + """ parms = { # Render frame range "trange": 1 diff --git a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py index b069be3f83..2047ae2e76 100644 --- a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py @@ -18,9 +18,13 @@ class CreateVDBCache(plugin.Creator): # Set node type to create for output self.data["node_type"] = "geometry" - def _process(self): - instance = super(CreateVDBCache, self).process() + def _process(self, instance): + """Creator main entry point. + Args: + instance (hou.Node): Created Houdini instance. + + """ parms = { "sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name, "initsim": True, diff --git a/openpype/settings/defaults/project_settings/houdini.json b/openpype/settings/defaults/project_settings/houdini.json index 811a446e59..809c732d6f 100644 --- a/openpype/settings/defaults/project_settings/houdini.json +++ b/openpype/settings/defaults/project_settings/houdini.json @@ -1,4 +1,46 @@ { + "create": { + "CreateAlembicCamera": { + "enabled": true, + "defaults": [] + }, + "CreateCompositeSequence": { + "enabled": true, + "defaults": [] + }, + "CreatePointCache": { + "enabled": true, + "defaults": [] + }, + "CreateRedshiftROP": { + "enabled": true, + "defaults": [] + }, + "CreateRemotePublish": { + "enabled": true, + "defaults": [] + }, + "CreateVDBCache": { + "enabled": true, + "defaults": [] + }, + "CreateUSD": { + "enabled": false, + "defaults": [] + }, + "CreateUSDModel": { + "enabled": false, + "defaults": [] + }, + "USDCreateShadingWorkspace": { + "enabled": false, + "defaults": [] + }, + "CreateUSDRender": { + "enabled": false, + "defaults": [] + } + }, "publish": { "ValidateContainers": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json b/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json index c6de257a61..cad99dde22 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_houdini.json @@ -5,6 +5,10 @@ "label": "Houdini", "is_file": true, "children": [ + { + "type": "schema", + "name": "schema_houdini_create" + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json new file mode 100644 index 0000000000..72b8032d4b --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_houdini_create.json @@ -0,0 +1,54 @@ +{ + "type": "dict", + "collapsible": true, + "key": "create", + "label": "Creator plugins", + "children": [ + { + "type": "schema_template", + "name": "template_create_plugin", + "template_data": [ + { + "key": "CreateAlembicCamera", + "label": "Create Alembic Camera" + }, + { + "key": "CreateCompositeSequence", + "label": "Create Composite (Image Sequence)" + }, + { + "key": "CreatePointCache", + "label": "Create Point Cache" + }, + { + "key": "CreateRedshiftROP", + "label": "Create Redshift ROP" + }, + { + "key": "CreateRemotePublish", + "label": "Create Remote Publish" + }, + { + "key": "CreateVDBCache", + "label": "Create VDB Cache" + }, + { + "key": "CreateUSD", + "label": "Create USD" + }, + { + "key": "CreateUSDModel", + "label": "Create USD Model" + }, + { + "key": "USDCreateShadingWorkspace", + "label": "Create USD Shading Workspace" + }, + { + "key": "CreateUSDRender", + "label": "Create USD Render" + } + ] + } + ] +} \ No newline at end of file From 44a3515ec99d7dc9915f519dd8a4771841f44566 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 13 Aug 2021 00:59:04 +0000 Subject: [PATCH 154/308] Bump path-parse from 1.0.6 to 1.0.7 in /website Bumps [path-parse](https://github.com/jbgutierrez/path-parse) from 1.0.6 to 1.0.7. - [Release notes](https://github.com/jbgutierrez/path-parse/releases) - [Commits](https://github.com/jbgutierrez/path-parse/commits/v1.0.7) --- updated-dependencies: - dependency-name: path-parse dependency-type: indirect ... Signed-off-by: dependabot[bot] --- website/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index a63bf37731..88f3db082e 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -6168,9 +6168,9 @@ path-key@^3.0.0, path-key@^3.1.0: integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== path-parse@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" - integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-to-regexp@0.1.7: version "0.1.7" From b4a3038623961e77c25d6784341370d21cbc08bb Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 13 Aug 2021 18:02:12 +0200 Subject: [PATCH 155/308] add `--validate-version` and `--headless` --- igniter/__init__.py | 3 + igniter/bootstrap_repos.py | 124 +++++++++++++++++++++++- openpype/cli.py | 2 + start.py | 73 ++++++++++++-- website/docs/admin_openpype_commands.md | 3 + website/docs/admin_use.md | 13 +++ 6 files changed, 211 insertions(+), 7 deletions(-) diff --git a/igniter/__init__.py b/igniter/__init__.py index 20bf9be106..73e315d88a 100644 --- a/igniter/__init__.py +++ b/igniter/__init__.py @@ -12,6 +12,9 @@ from .version import __version__ as version def open_dialog(): """Show Igniter dialog.""" + if os.getenv("OPENPYPE_HEADLESS_MODE"): + print("!!! Can't open dialog in headless mode. Exiting.") + sys.exit(1) from Qt import QtWidgets, QtCore from .install_dialog import InstallDialog diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 8c081b8614..22f5e7d94c 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -9,6 +9,7 @@ import sys import tempfile from pathlib import Path from typing import Union, Callable, List, Tuple +import hashlib from zipfile import ZipFile, BadZipFile @@ -28,6 +29,25 @@ LOG_WARNING = 1 LOG_ERROR = 3 +def sha256sum(filename): + """Calculate sha256 for content of the file. + + Args: + filename (str): Path to file. + + Returns: + str: hex encoded sha256 + + """ + h = hashlib.sha256() + b = bytearray(128 * 1024) + mv = memoryview(b) + with open(filename, 'rb', buffering=0) as f: + for n in iter(lambda: f.readinto(mv), 0): + h.update(mv[:n]) + return h.hexdigest() + + class OpenPypeVersion(semver.VersionInfo): """Class for storing information about OpenPype version. @@ -261,7 +281,8 @@ class BootstrapRepos: self.live_repo_dir = Path(Path(__file__).parent / ".." / "repos") @staticmethod - def get_version_path_from_list(version: str, version_list: list) -> Path: + def get_version_path_from_list( + version: str, version_list: list) -> Union[Path, None]: """Get path for specific version in list of OpenPype versions. Args: @@ -275,6 +296,7 @@ class BootstrapRepos: for v in version_list: if str(v) == version: return v.path + return None @staticmethod def get_local_live_version() -> str: @@ -487,6 +509,7 @@ class BootstrapRepos: openpype_root = openpype_path.resolve() # generate list of filtered paths dir_filter = [openpype_root / f for f in self.openpype_filter] + checksums = [] file: Path for file in openpype_list: @@ -508,11 +531,110 @@ class BootstrapRepos: processed_path = file self._print(f"- processing {processed_path}") + checksums.append( + ( + sha256sum(file.as_posix()), + file.resolve().relative_to(openpype_root) + ) + ) zip_file.write(file, file.relative_to(openpype_root)) + checksums_str = "" + for c in checksums: + checksums_str += "{}:{}\n".format(c[0], c[1]) + zip_file.writestr("checksums", checksums_str) # test if zip is ok zip_file.testzip() self._progress_callback(100) + + def validate_openpype_version(self, path: Path) -> tuple: + """Validate version directory or zip file. + + This will load `checksums` file if present, calculate checksums + of existing files in given path and compare. It will also compare + lists of files together for missing files. + + Args: + path (Path): Path to OpenPype version to validate. + + Returns: + tuple(bool, str): with version validity as first item and string with + reason as second. + + """ + if not path.exists(): + return False, "Path doesn't exist" + + if path.is_file(): + return self._validate_zip(path) + return self._validate_dir(path) + + @staticmethod + def _validate_zip(path: Path) -> tuple: + """Validate content of zip file.""" + with ZipFile(path, "r") as zip_file: + # read checksums + try: + checksums_data = str(zip_file.read("checksums")) + except IOError: + # FIXME: This should be set to False sometimes in the future + return True, "Cannot read checksums for archive." + + # split it to the list of tuples + checksums = [ + tuple(line.split(":")) + for line in checksums_data.split("\n") if line + ] + + # calculate and compare checksums in the zip file + for file in checksums: + h = hashlib.sha256() + h.update(zip_file.read(file[1])) + if h.hexdigest() != file[0]: + return False, f"Invalid checksum on {file[1]}" + + # get list of files in zip minus `checksums` file itself + # and turn in to set to compare against list of files + # from checksum file. If difference exists, something is + # wrong + files_in_zip = zip_file.namelist() + files_in_zip.remove("checksums") + files_in_zip = set(files_in_zip) + files_in_checksum = set([file[1] for file in checksums]) + diff = files_in_zip.difference(files_in_checksum) + if diff: + return False, f"Missing files {diff}" + + return True, "All ok" + + @staticmethod + def _validate_dir(path: Path) -> tuple: + checksums_file = Path(path / "checksums") + if not checksums_file.exists(): + # FIXME: This should be set to False sometimes in the future + return True, "Cannot read checksums for archive." + checksums_data = checksums_file.read_text() + checksums = [ + tuple(line.split(":")) + for line in checksums_data.split("\n") if line + ] + files_in_dir = [ + file.relative_to(path).as_posix() + for file in path.iterdir() if file.is_file() + ] + files_in_dir.remove("checksums") + files_in_dir = set(files_in_dir) + files_in_checksum = set([file[1] for file in checksums]) + + for file in checksums: + current = sha256sum((path / file[1]).as_posix()) + if file[0] != current: + return False, f"Invalid checksum on {file[1]}" + diff = files_in_dir.difference(files_in_checksum) + if diff: + return False, f"Missing files {diff}" + + return True, "All ok" @staticmethod def add_paths_from_archive(archive: Path) -> None: diff --git a/openpype/cli.py b/openpype/cli.py index ec5b04c468..be14a8aa7d 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -18,6 +18,8 @@ from .pype_commands import PypeCommands @click.option("--list-versions", is_flag=True, expose_value=False, help=("list all detected versions. Use With `--use-staging " "to list staging versions.")) +@click.option("--validate-version", + help="validate given version integrity") def main(ctx): """Pype is main command serving as entry point to pipeline system. diff --git a/start.py b/start.py index 6473a926d0..ca4b2835bb 100644 --- a/start.py +++ b/start.py @@ -179,8 +179,10 @@ else: ssl_cert_file = certifi.where() os.environ["SSL_CERT_FILE"] = ssl_cert_file +if "--headless" in sys.argv: + os.environ["OPENPYPE_HEADLESS_MODE"] = "1" -import igniter # noqa: E402 +import igniter # noqa: E402 from igniter import BootstrapRepos # noqa: E402 from igniter.tools import ( get_openpype_path_from_db, @@ -343,7 +345,7 @@ def _process_arguments() -> tuple: # check for `--use-version=3.0.0` argument and `--use-staging` use_version = None use_staging = False - print_versions = False + commands = [] for arg in sys.argv: if arg == "--use-version": _print("!!! Please use option --use-version like:") @@ -366,12 +368,30 @@ def _process_arguments() -> tuple: " proper version string.")) sys.exit(1) + if arg == "--validate-version": + _print("!!! Please use option --validate-version like:") + _print(" --validate-version=3.0.0") + sys.exit(1) + + if arg.startswith("--validate-version="): + m = re.search( + r"--validate-version=(?P\d+\.\d+\.\d+(?:\S*)?)", arg) + if m and m.group('version'): + use_version = m.group('version') + sys.argv.remove(arg) + commands.append("validate") + else: + _print("!!! Requested version isn't in correct format.") + _print((" Use --list-versions to find out" + " proper version string.")) + sys.exit(1) + if "--use-staging" in sys.argv: use_staging = True sys.argv.remove("--use-staging") if "--list-versions" in sys.argv: - print_versions = True + commands.append("print_versions") sys.argv.remove("--list-versions") # handle igniter @@ -389,7 +409,7 @@ def _process_arguments() -> tuple: sys.argv.pop(idx) sys.argv.insert(idx, "tray") - return use_version, use_staging, print_versions + return use_version, use_staging, commands def _determine_mongodb() -> str: @@ -738,7 +758,7 @@ def boot(): # Process arguments # ------------------------------------------------------------------------ - use_version, use_staging, print_versions = _process_arguments() + use_version, use_staging, commands = _process_arguments() if os.getenv("OPENPYPE_VERSION"): if use_version: @@ -766,13 +786,47 @@ def boot(): # Get openpype path from database and set it to environment so openpype can # find its versions there and bootstrap them. openpype_path = get_openpype_path_from_db(openpype_mongo) + + if getattr(sys, 'frozen', False): + local_version = bootstrap.get_version(Path(OPENPYPE_ROOT)) + else: + local_version = bootstrap.get_local_live_version() + + if "validate" in commands: + _print(f">>> Validating version [ {use_version} ]") + openpype_versions = bootstrap.find_openpype(include_zips=True, + staging=True) + openpype_versions += bootstrap.find_openpype(include_zips=True, + staging=False) + + v: OpenPypeVersion + found = [v for v in openpype_versions if str(v) == use_version] + if not found: + _print(f"!!! Version [ {use_version} ] not found.") + list_versions(openpype_versions, local_version) + sys.exit(1) + + # print result + result = bootstrap.validate_openpype_version( + bootstrap.get_version_path_from_list( + use_version, openpype_versions)) + + _print("{}{}".format( + ">>> " if result[0] else "!!! ", + bootstrap.validate_openpype_version( + bootstrap.get_version_path_from_list(use_version, openpype_versions) + )[1]) + ) + sys.exit(1) + + if not openpype_path: _print("*** Cannot get OpenPype path from database.") if not os.getenv("OPENPYPE_PATH") and openpype_path: os.environ["OPENPYPE_PATH"] = openpype_path - if print_versions: + if "print_versions" in commands: if not use_staging: _print("--- This will list only non-staging versions detected.") _print(" To see staging versions, use --use-staging argument.") @@ -803,6 +857,13 @@ def boot(): # no version to run _print(f"!!! {e}") sys.exit(1) + # validate version + _print(f">>> Validating version [ {str(version_path)} ]") + result = bootstrap.validate_openpype_version(version_path) + if not result[0]: + _print(f"!!! Invalid version: {result[1]}") + sys.exit(1) + _print(f"--- version is valid") else: version_path = _bootstrap_from_code(use_version, use_staging) diff --git a/website/docs/admin_openpype_commands.md b/website/docs/admin_openpype_commands.md index 1a91e2e7fe..d6ccc883b0 100644 --- a/website/docs/admin_openpype_commands.md +++ b/website/docs/admin_openpype_commands.md @@ -18,11 +18,14 @@ Running OpenPype without any commands will default to `tray`. ```shell openpype_console --use-version=3.0.0-foo+bar ``` +`--headless` - to run OpenPype in headless mode (without using graphical UI) `--use-staging` - to use staging versions of OpenPype. `--list-versions [--use-staging]` - to list available versions. +`--validate-version` to validate integrity of given version + For more information [see here](admin_use#run-openpype). ## Commands diff --git a/website/docs/admin_use.md b/website/docs/admin_use.md index 4ad08a0174..178241ad19 100644 --- a/website/docs/admin_use.md +++ b/website/docs/admin_use.md @@ -56,6 +56,19 @@ openpype_console --list-versions You can add `--use-staging` to list staging versions. ::: +If you want to validate integrity of some available version, you can use: + +```shell +openpype_console --validate-version=3.3.0 +``` + +This will go through the version and validate file content against sha 256 hashes +stored in `checksums` file. + +:::tip Headless mode +Add `--headless` to run OpenPype without graphical UI (useful on server or on automated tasks, etc.) +::: + ### Details When you run OpenPype from executable, few check are made: From b6d831045796656f4bbd4ed98fbe256d22704295 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 13 Aug 2021 18:18:57 +0200 Subject: [PATCH 156/308] hound fixes, checks for missing files --- igniter/bootstrap_repos.py | 19 +++++++++++++------ start.py | 8 ++++---- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 22f5e7d94c..535bb723bc 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -546,7 +546,7 @@ class BootstrapRepos: # test if zip is ok zip_file.testzip() self._progress_callback(100) - + def validate_openpype_version(self, path: Path) -> tuple: """Validate version directory or zip file. @@ -558,13 +558,13 @@ class BootstrapRepos: path (Path): Path to OpenPype version to validate. Returns: - tuple(bool, str): with version validity as first item and string with - reason as second. + tuple(bool, str): with version validity as first item + and string with reason as second. """ if not path.exists(): return False, "Path doesn't exist" - + if path.is_file(): return self._validate_zip(path) return self._validate_dir(path) @@ -589,7 +589,10 @@ class BootstrapRepos: # calculate and compare checksums in the zip file for file in checksums: h = hashlib.sha256() - h.update(zip_file.read(file[1])) + try: + h.update(zip_file.read(file[1])) + except FileNotFoundError: + return False, f"Missing file [ {file[1]} ]" if h.hexdigest() != file[0]: return False, f"Invalid checksum on {file[1]}" @@ -627,7 +630,11 @@ class BootstrapRepos: files_in_checksum = set([file[1] for file in checksums]) for file in checksums: - current = sha256sum((path / file[1]).as_posix()) + try: + current = sha256sum((path / file[1]).as_posix()) + except FileNotFoundError: + return False, f"Missing file [ {file[1]} ]" + if file[0] != current: return False, f"Invalid checksum on {file[1]}" diff = files_in_dir.difference(files_in_checksum) diff --git a/start.py b/start.py index ca4b2835bb..a5f662d39b 100644 --- a/start.py +++ b/start.py @@ -182,7 +182,7 @@ else: if "--headless" in sys.argv: os.environ["OPENPYPE_HEADLESS_MODE"] = "1" -import igniter # noqa: E402 +import igniter # noqa: E402 from igniter import BootstrapRepos # noqa: E402 from igniter.tools import ( get_openpype_path_from_db, @@ -797,8 +797,7 @@ def boot(): openpype_versions = bootstrap.find_openpype(include_zips=True, staging=True) openpype_versions += bootstrap.find_openpype(include_zips=True, - staging=False) - + staging=False) v: OpenPypeVersion found = [v for v in openpype_versions if str(v) == use_version] if not found: @@ -814,7 +813,8 @@ def boot(): _print("{}{}".format( ">>> " if result[0] else "!!! ", bootstrap.validate_openpype_version( - bootstrap.get_version_path_from_list(use_version, openpype_versions) + bootstrap.get_version_path_from_list( + use_version, openpype_versions) )[1]) ) sys.exit(1) From 849b18e168967c821e5586e7e215623177f37e83 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 16 Aug 2021 16:04:41 +0200 Subject: [PATCH 157/308] add face sets to alembics --- openpype/hosts/maya/plugins/create/create_animation.py | 1 + openpype/hosts/maya/plugins/create/create_model.py | 1 + openpype/hosts/maya/plugins/create/create_pointcache.py | 1 + openpype/hosts/maya/plugins/publish/extract_animation.py | 3 ++- openpype/hosts/maya/plugins/publish/extract_model.py | 1 + openpype/hosts/maya/plugins/publish/extract_pointcache.py | 2 ++ 6 files changed, 8 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_animation.py b/openpype/hosts/maya/plugins/create/create_animation.py index 5155aec0ab..7ce96166f7 100644 --- a/openpype/hosts/maya/plugins/create/create_animation.py +++ b/openpype/hosts/maya/plugins/create/create_animation.py @@ -24,6 +24,7 @@ class CreateAnimation(plugin.Creator): # Write vertex colors with the geometry. self.data["writeColorSets"] = False + self.data["writeFaceSets"] = False # Include only renderable visible shapes. # Skips locators and empty transforms diff --git a/openpype/hosts/maya/plugins/create/create_model.py b/openpype/hosts/maya/plugins/create/create_model.py index f1d9d22c1c..37faad23a0 100644 --- a/openpype/hosts/maya/plugins/create/create_model.py +++ b/openpype/hosts/maya/plugins/create/create_model.py @@ -15,6 +15,7 @@ class CreateModel(plugin.Creator): # Vertex colors with the geometry self.data["writeColorSets"] = False + self.data["writeFaceSets"] = False # Include attributes by attribute name or prefix self.data["attr"] = "" diff --git a/openpype/hosts/maya/plugins/create/create_pointcache.py b/openpype/hosts/maya/plugins/create/create_pointcache.py index 9afea731fd..d8e5fd43a7 100644 --- a/openpype/hosts/maya/plugins/create/create_pointcache.py +++ b/openpype/hosts/maya/plugins/create/create_pointcache.py @@ -20,6 +20,7 @@ class CreatePointCache(plugin.Creator): self.data.update(lib.collect_animation_data()) self.data["writeColorSets"] = False # Vertex colors with the geometry. + self.data["writeFaceSets"] = False # Vertex colors with the geometry. self.data["renderableOnly"] = False # Only renderable visible shapes self.data["visibleOnly"] = False # only nodes that are visible self.data["includeParentHierarchy"] = False # Include parent groups diff --git a/openpype/hosts/maya/plugins/publish/extract_animation.py b/openpype/hosts/maya/plugins/publish/extract_animation.py index b86ded1fb0..7ecc40a68d 100644 --- a/openpype/hosts/maya/plugins/publish/extract_animation.py +++ b/openpype/hosts/maya/plugins/publish/extract_animation.py @@ -57,7 +57,8 @@ class ExtractAnimation(openpype.api.Extractor): "uvWrite": True, "selection": True, "worldSpace": instance.data.get("worldSpace", True), - "writeColorSets": instance.data.get("writeColorSets", False) + "writeColorSets": instance.data.get("writeColorSets", False), + "writeFaceSets": instance.data.get("writeFaceSets", False) } if not instance.data.get("includeParentHierarchy", True): diff --git a/openpype/hosts/maya/plugins/publish/extract_model.py b/openpype/hosts/maya/plugins/publish/extract_model.py index 1773297826..40cc9427f3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_model.py +++ b/openpype/hosts/maya/plugins/publish/extract_model.py @@ -28,6 +28,7 @@ class ExtractModel(openpype.api.Extractor): hosts = ["maya"] families = ["model"] scene_type = "ma" + optional = True def process(self, instance): """Plugin entry point.""" diff --git a/openpype/hosts/maya/plugins/publish/extract_pointcache.py b/openpype/hosts/maya/plugins/publish/extract_pointcache.py index ba716c0d18..630cc39398 100644 --- a/openpype/hosts/maya/plugins/publish/extract_pointcache.py +++ b/openpype/hosts/maya/plugins/publish/extract_pointcache.py @@ -38,6 +38,7 @@ class ExtractAlembic(openpype.api.Extractor): # Get extra export arguments writeColorSets = instance.data.get("writeColorSets", False) + writeFaceSets = instance.data.get("writeFaceSets", False) self.log.info("Extracting pointcache..") dirname = self.staging_dir(instance) @@ -53,6 +54,7 @@ class ExtractAlembic(openpype.api.Extractor): "writeVisibility": True, "writeCreases": True, "writeColorSets": writeColorSets, + "writeFaceSets": writeFaceSets, "uvWrite": True, "selection": True, "worldSpace": instance.data.get("worldSpace", True) From 25a742ecda82c1ac2e906dc0c87075af45bb32b6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 17 Aug 2021 10:15:21 +0200 Subject: [PATCH 158/308] moved python console interpreter to default submodules --- .../python_console_interpreter/__init__.py | 0 .../python_console_interpreter/module.py | 5 +++-- .../python_console_interpreter/window/__init__.py | 0 .../python_console_interpreter/window/widgets.py | 0 4 files changed, 3 insertions(+), 2 deletions(-) rename openpype/modules/{ => default_modules}/python_console_interpreter/__init__.py (100%) rename openpype/modules/{ => default_modules}/python_console_interpreter/module.py (88%) rename openpype/modules/{ => default_modules}/python_console_interpreter/window/__init__.py (100%) rename openpype/modules/{ => default_modules}/python_console_interpreter/window/widgets.py (100%) diff --git a/openpype/modules/python_console_interpreter/__init__.py b/openpype/modules/default_modules/python_console_interpreter/__init__.py similarity index 100% rename from openpype/modules/python_console_interpreter/__init__.py rename to openpype/modules/default_modules/python_console_interpreter/__init__.py diff --git a/openpype/modules/python_console_interpreter/module.py b/openpype/modules/default_modules/python_console_interpreter/module.py similarity index 88% rename from openpype/modules/python_console_interpreter/module.py rename to openpype/modules/default_modules/python_console_interpreter/module.py index b37f35dfe0..7fd8d80f28 100644 --- a/openpype/modules/python_console_interpreter/module.py +++ b/openpype/modules/default_modules/python_console_interpreter/module.py @@ -1,7 +1,8 @@ -from .. import PypeModule, ITrayAction +from openpype.modules import OpenPypeModule +from openpype_interfaces import ITrayModule -class PythonInterpreterAction(PypeModule, ITrayAction): +class PythonInterpreterAction(OpenPypeModule, ITrayAction): label = "Console" name = "python_interpreter" admin_action = True diff --git a/openpype/modules/python_console_interpreter/window/__init__.py b/openpype/modules/default_modules/python_console_interpreter/window/__init__.py similarity index 100% rename from openpype/modules/python_console_interpreter/window/__init__.py rename to openpype/modules/default_modules/python_console_interpreter/window/__init__.py diff --git a/openpype/modules/python_console_interpreter/window/widgets.py b/openpype/modules/default_modules/python_console_interpreter/window/widgets.py similarity index 100% rename from openpype/modules/python_console_interpreter/window/widgets.py rename to openpype/modules/default_modules/python_console_interpreter/window/widgets.py From fc2e54ef368cd3b365aadabba9d4637d01319e5e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 17 Aug 2021 10:15:52 +0200 Subject: [PATCH 159/308] moved new deadline plugins --- .../plugins/publish/collect_deadline_server_from_instance.py | 0 .../deadline/plugins/publish/collect_default_deadline_server.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/collect_deadline_server_from_instance.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/collect_default_deadline_server.py (100%) diff --git a/openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py b/openpype/modules/default_modules/deadline/plugins/publish/collect_deadline_server_from_instance.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/collect_deadline_server_from_instance.py rename to openpype/modules/default_modules/deadline/plugins/publish/collect_deadline_server_from_instance.py diff --git a/openpype/modules/deadline/plugins/publish/collect_default_deadline_server.py b/openpype/modules/default_modules/deadline/plugins/publish/collect_default_deadline_server.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/collect_default_deadline_server.py rename to openpype/modules/default_modules/deadline/plugins/publish/collect_default_deadline_server.py From ab4310cf87eefec7d64a52a97b50f5553b5bdcd8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 17 Aug 2021 10:19:16 +0200 Subject: [PATCH 160/308] fixes in console to match new structure --- .../default_modules/python_console_interpreter/module.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/default_modules/python_console_interpreter/module.py b/openpype/modules/default_modules/python_console_interpreter/module.py index 7fd8d80f28..f4df3fb6d8 100644 --- a/openpype/modules/default_modules/python_console_interpreter/module.py +++ b/openpype/modules/default_modules/python_console_interpreter/module.py @@ -1,5 +1,5 @@ from openpype.modules import OpenPypeModule -from openpype_interfaces import ITrayModule +from openpype_interfaces import ITrayAction class PythonInterpreterAction(OpenPypeModule, ITrayAction): @@ -26,7 +26,7 @@ class PythonInterpreterAction(OpenPypeModule, ITrayAction): if self._interpreter_window: return - from openpype.modules.python_console_interpreter.window import ( + from openpype_modules.python_console_interpreter.window import ( PythonInterpreterWidget ) From 96021daebd7cbc7e13108a797e837134bcdc664c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 18 Aug 2021 11:28:06 +0200 Subject: [PATCH 161/308] creating thumbnails from exr in webpublisher --- .../plugins/publish/{extract_jpeg.py => extract_jpeg_exr.py} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename openpype/plugins/publish/{extract_jpeg.py => extract_jpeg_exr.py} (98%) diff --git a/openpype/plugins/publish/extract_jpeg.py b/openpype/plugins/publish/extract_jpeg_exr.py similarity index 98% rename from openpype/plugins/publish/extract_jpeg.py rename to openpype/plugins/publish/extract_jpeg_exr.py index b1289217e6..8d9e48b634 100644 --- a/openpype/plugins/publish/extract_jpeg.py +++ b/openpype/plugins/publish/extract_jpeg_exr.py @@ -17,7 +17,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): "imagesequence", "render", "render2d", "source", "plate", "take" ] - hosts = ["shell", "fusion", "resolve"] + hosts = ["shell", "fusion", "resolve", "webpublisher"] enabled = False # presetable attribute From 9c56eb3b53bc76ca68cefe2e1b9ed6975e3d02f1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 18 Aug 2021 11:48:02 +0200 Subject: [PATCH 162/308] Webpublisher - added translation from email to username --- .../plugins/publish/collect_username.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 openpype/hosts/webpublisher/plugins/publish/collect_username.py diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_username.py b/openpype/hosts/webpublisher/plugins/publish/collect_username.py new file mode 100644 index 0000000000..25d6f190a3 --- /dev/null +++ b/openpype/hosts/webpublisher/plugins/publish/collect_username.py @@ -0,0 +1,45 @@ +"""Loads publishing context from json and continues in publish process. + +Requires: + anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11) + +Provides: + context, instances -> All data from previous publishing process. +""" + +import ftrack_api +import os + +import pyblish.api + + +class CollectUsername(pyblish.api.ContextPlugin): + """ + Translates user email to Ftrack username. + + Emails in Ftrack are same as company's Slack, username is needed to + load data to Ftrack. + + """ + order = pyblish.api.CollectorOrder - 0.488 + label = "Collect ftrack username" + host = ["webpublisher"] + + _context = None + + def process(self, context): + os.environ["FTRACK_API_USER"] = "pype.club" + os.environ["FTRACK_API_KEY"] = os.environ["FTRACK_BOT_API_KEY"] + self.log.info("CollectUsername") + for instance in context: + email = instance.data["user_email"] + self.log.info("email:: {}".format(email)) + session = ftrack_api.Session(auto_connect_event_hub=False) + user = session.query("User where email like '{}'".format( + email)) + + if not user: + raise ValueError("Couldnt find user with {} email".format(email)) + + os.environ["FTRACK_API_USER"] = user[0].get("username") + break From 3d0b470e36f6dee5fc8b5f0160357f73f80254e6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 18 Aug 2021 11:48:19 +0200 Subject: [PATCH 163/308] Webpublisher - added collector for fps --- .../plugins/publish/collect_fps.py | 28 +++++++++++++++++++ .../publish/collect_published_files.py | 13 +++++++-- 2 files changed, 38 insertions(+), 3 deletions(-) create mode 100644 openpype/hosts/webpublisher/plugins/publish/collect_fps.py diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_fps.py b/openpype/hosts/webpublisher/plugins/publish/collect_fps.py new file mode 100644 index 0000000000..79fe53176a --- /dev/null +++ b/openpype/hosts/webpublisher/plugins/publish/collect_fps.py @@ -0,0 +1,28 @@ +""" +Requires: + Nothing + +Provides: + Instance +""" + +import pyblish.api +from pprint import pformat + + +class CollectFPS(pyblish.api.InstancePlugin): + """ + Adds fps from context to instance because of ExtractReview + """ + + label = "Collect fps" + order = pyblish.api.CollectorOrder + 0.49 + hosts = ["webpublisher"] + + def process(self, instance): + fps = instance.context.data["fps"] + + instance.data.update({ + "fps": fps + }) + self.log.debug(f"instance.data: {pformat(instance.data)}") diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 67d743278b..5bc13dff96 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -69,6 +69,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): task_type = ctx["attributes"]["type"] else: asset = ctx["name"] + os.environ["AVALON_TASK"] = "" is_sequence = len(task_data["files"]) > 1 @@ -94,12 +95,16 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): instance.data["stagingDir"] = task_dir instance.data["source"] = "webpublisher" - os.environ["FTRACK_API_USER"] = task_data["user"] + instance.data["user_email"] = task_data["user"] if is_sequence: instance.data["representations"] = self._process_sequence( task_data["files"], task_dir ) + instance.data["frameStart"] = \ + instance.data["representations"][0]["frameStart"] + instance.data["frameEnd"] = \ + instance.data["representations"][0]["frameEnd"] else: instance.data["representations"] = self._get_single_repre( task_dir, task_data["files"] @@ -122,7 +127,8 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): "name": ext[1:], "ext": ext[1:], "files": files[0], - "stagingDir": task_dir + "stagingDir": task_dir, + "tags": ["review"] } self.log.info("single file repre_data.data:: {}".format(repre_data)) return [repre_data] @@ -142,7 +148,8 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): "name": ext[1:], "ext": ext[1:], "files": files, - "stagingDir": task_dir + "stagingDir": task_dir, + "tags": ["review"] } self.log.info("sequences repre_data.data:: {}".format(repre_data)) return [repre_data] From 45fbdcbb564606b59c5f96a8a1232cd2bf596974 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 18 Aug 2021 14:23:45 +0200 Subject: [PATCH 164/308] Webpublisher - added storing full log to Mongo --- openpype/pype_commands.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 17b6d58ffd..19981d2a39 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -181,17 +181,26 @@ class PypeCommands: "status": "in_progress" }).inserted_id + log_lines = [] for result in pyblish.util.publish_iter(): + for record in result["records"]: + log_lines.append("{}: {}".format( + result["plugin"].label, record.msg)) + if result["error"]: log.error(error_format.format(**result)) uninstall() + log_lines.append(error_format.format(**result)) dbcon.update_one( {"_id": _id}, {"$set": { "finish_date": datetime.now(), "status": "error", - "msg": error_format.format(**result) + "msg": "Publishing failed > click here and paste " + "report to slack OpenPype support", + "log": os.linesep.join(log_lines) + }} ) sys.exit(1) @@ -200,7 +209,8 @@ class PypeCommands: {"_id": _id}, {"$set": { - "progress": max(result["progress"], 0.95) + "progress": max(result["progress"], 0.95), + "log": os.linesep.join(log_lines) }} ) @@ -210,7 +220,8 @@ class PypeCommands: { "finish_date": datetime.now(), "status": "finished_ok", - "progress": 1 + "progress": 1, + "log": os.linesep.join(log_lines) }} ) From f459791902877c5c6f3e2a13217e2fe52a5bf70d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 18 Aug 2021 16:59:13 +0200 Subject: [PATCH 165/308] Webpublisher - added reprocess functionality Added system settings to enable webpublish --- openpype/modules/webserver/webserver_cli.py | 151 ++++++++++++------ .../defaults/system_settings/modules.json | 3 + .../schemas/system_schema/schema_modules.json | 14 ++ 3 files changed, 121 insertions(+), 47 deletions(-) diff --git a/openpype/modules/webserver/webserver_cli.py b/openpype/modules/webserver/webserver_cli.py index 0812bfa372..dcaa0b4e7b 100644 --- a/openpype/modules/webserver/webserver_cli.py +++ b/openpype/modules/webserver/webserver_cli.py @@ -1,4 +1,10 @@ import time +import os +from datetime import datetime +import requests +import json + + from .webpublish_routes import ( RestApiResource, OpenPypeRestApiResource, @@ -10,6 +16,8 @@ from .webpublish_routes import ( PublishesStatusEndpoint ) +from openpype.api import get_system_settings + def run_webserver(*args, **kwargs): """Runs webserver in command line, adds routes.""" @@ -19,56 +27,105 @@ def run_webserver(*args, **kwargs): webserver_module = manager.modules_by_name["webserver"] webserver_module.create_server_manager() - resource = RestApiResource(webserver_module.server_manager, - upload_dir=kwargs["upload_dir"], - executable=kwargs["executable"]) - projects_endpoint = WebpublisherProjectsEndpoint(resource) - webserver_module.server_manager.add_route( - "GET", - "/api/projects", - projects_endpoint.dispatch - ) + is_webpublish_enabled = get_system_settings()["modules"]\ + ["webpublish_tool"]["enabled"] - hiearchy_endpoint = WebpublisherHiearchyEndpoint(resource) - webserver_module.server_manager.add_route( - "GET", - "/api/hierarchy/{project_name}", - hiearchy_endpoint.dispatch - ) + if is_webpublish_enabled: + resource = RestApiResource(webserver_module.server_manager, + upload_dir=kwargs["upload_dir"], + executable=kwargs["executable"]) + projects_endpoint = WebpublisherProjectsEndpoint(resource) + webserver_module.server_manager.add_route( + "GET", + "/api/projects", + projects_endpoint.dispatch + ) - # triggers publish - webpublisher_task_publish_endpoint = \ - WebpublisherBatchPublishEndpoint(resource) - webserver_module.server_manager.add_route( - "POST", - "/api/webpublish/batch", - webpublisher_task_publish_endpoint.dispatch - ) + hiearchy_endpoint = WebpublisherHiearchyEndpoint(resource) + webserver_module.server_manager.add_route( + "GET", + "/api/hierarchy/{project_name}", + hiearchy_endpoint.dispatch + ) - webpublisher_batch_publish_endpoint = \ - WebpublisherTaskPublishEndpoint(resource) - webserver_module.server_manager.add_route( - "POST", - "/api/webpublish/task", - webpublisher_batch_publish_endpoint.dispatch - ) + # triggers publish + webpublisher_task_publish_endpoint = \ + WebpublisherBatchPublishEndpoint(resource) + webserver_module.server_manager.add_route( + "POST", + "/api/webpublish/batch", + webpublisher_task_publish_endpoint.dispatch + ) - # reporting - openpype_resource = OpenPypeRestApiResource() - batch_status_endpoint = BatchStatusEndpoint(openpype_resource) - webserver_module.server_manager.add_route( - "GET", - "/api/batch_status/{batch_id}", - batch_status_endpoint.dispatch - ) + webpublisher_batch_publish_endpoint = \ + WebpublisherTaskPublishEndpoint(resource) + webserver_module.server_manager.add_route( + "POST", + "/api/webpublish/task", + webpublisher_batch_publish_endpoint.dispatch + ) - user_status_endpoint = PublishesStatusEndpoint(openpype_resource) - webserver_module.server_manager.add_route( - "GET", - "/api/publishes/{user}", - user_status_endpoint.dispatch - ) + # reporting + openpype_resource = OpenPypeRestApiResource() + batch_status_endpoint = BatchStatusEndpoint(openpype_resource) + webserver_module.server_manager.add_route( + "GET", + "/api/batch_status/{batch_id}", + batch_status_endpoint.dispatch + ) - webserver_module.start_server() - while True: - time.sleep(0.5) + user_status_endpoint = PublishesStatusEndpoint(openpype_resource) + webserver_module.server_manager.add_route( + "GET", + "/api/publishes/{user}", + user_status_endpoint.dispatch + ) + + webserver_module.start_server() + last_reprocessed = time.time() + while True: + if is_webpublish_enabled: + if time.time() - last_reprocessed > 60: + reprocess_failed(kwargs["upload_dir"]) + last_reprocessed = time.time() + time.sleep(1.0) + + +def reprocess_failed(upload_dir): + print("reprocess_failed") + from openpype.lib import OpenPypeMongoConnection + + mongo_client = OpenPypeMongoConnection.get_mongo_client() + database_name = os.environ["OPENPYPE_DATABASE_NAME"] + dbcon = mongo_client[database_name]["webpublishes"] + + results = dbcon.find({"status": "reprocess"}) + + for batch in results: + print("batch:: {}".format(batch)) + batch_url = os.path.join(upload_dir, + batch["batch_id"], + "manifest.json") + if not os.path.exists(batch_url): + msg = "Manifest {} not found".format(batch_url) + print(msg) + dbcon.update_one( + {"_id": batch["_id"]}, + {"$set": + { + "finish_date": datetime.now(), + "status": "error", + "progress": 1, + "log": batch.get("log") + msg + }} + ) + continue + + server_url = "{}/api/webpublish/batch".format( + os.environ["OPENPYPE_WEBSERVER_URL"]) + + with open(batch_url) as f: + data = json.loads(f.read()) + + r = requests.post(server_url, json=data) + print(r.status_code) \ No newline at end of file diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index 1b74b4695c..3f9b098a96 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -165,6 +165,9 @@ "standalonepublish_tool": { "enabled": true }, + "webpublish_tool": { + "enabled": false + }, "project_manager": { "enabled": true }, diff --git a/openpype/settings/entities/schemas/system_schema/schema_modules.json b/openpype/settings/entities/schemas/system_schema/schema_modules.json index 7d734ff4fd..f82c3632a9 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_modules.json +++ b/openpype/settings/entities/schemas/system_schema/schema_modules.json @@ -195,6 +195,20 @@ } ] }, + { + "type": "dict", + "key": "webpublish_tool", + "label": "Web Publish", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + } + ] + }, { "type": "dict", "key": "project_manager", From e19e4f58ec9f07c57018da9746da8b14ed996290 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 18 Aug 2021 19:02:39 +0200 Subject: [PATCH 166/308] add pre-launch hook, minor fixes --- openpype/hosts/houdini/api/plugin.py | 6 +- openpype/hosts/houdini/hooks/set_paths.py | 18 ++++++ .../plugins/create/create_alembic_camera.py | 1 - .../plugins/create/create_composite.py | 9 ++- .../plugins/create/create_redshift_rop.py | 10 +++- .../plugins/create/create_remote_publish.py | 21 ------- .../plugins/create/create_usd_model.py | 40 ------------- .../plugins/create/create_usd_workspaces.py | 59 ------------------- .../plugins/publish/extract_alembic.py | 3 - .../plugins/publish/extract_composite.py | 1 - .../houdini/plugins/publish/extract_usd.py | 13 ++-- .../plugins/publish/extract_usd_layered.py | 3 +- .../plugins/publish/extract_vdb_cache.py | 9 +-- .../publish/validate_outnode_exists.py | 50 ---------------- openpype/hosts/houdini/startup/scripts/123.py | 1 - 15 files changed, 48 insertions(+), 196 deletions(-) create mode 100644 openpype/hosts/houdini/hooks/set_paths.py delete mode 100644 openpype/hosts/houdini/plugins/create/create_remote_publish.py delete mode 100644 openpype/hosts/houdini/plugins/create/create_usd_model.py delete mode 100644 openpype/hosts/houdini/plugins/create/create_usd_workspaces.py delete mode 100644 openpype/hosts/houdini/plugins/publish/validate_outnode_exists.py diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index d84427bfee..989bae12e3 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -14,10 +14,10 @@ class OpenPypeCreatorError(Exception): class Creator(PypeCreatorMixin, houdini.Creator): def process(self): - instance = super(houdini.Creator, self).process() - # re-raise as standard Python exception so - # Avalon can catch it try: + # re-raise as standard Python exception so + # Avalon can catch it + instance = super(Creator, self).process() self._process(instance) except hou.Error as er: six.reraise(OpenPypeCreatorError, OpenPypeCreatorError("Creator error"), sys.exc_info()[2]) diff --git a/openpype/hosts/houdini/hooks/set_paths.py b/openpype/hosts/houdini/hooks/set_paths.py new file mode 100644 index 0000000000..cd2f98fb76 --- /dev/null +++ b/openpype/hosts/houdini/hooks/set_paths.py @@ -0,0 +1,18 @@ +from openpype.lib import PreLaunchHook +import os + + +class SetPath(PreLaunchHook): + """Set current dir to workdir. + + Hook `GlobalHostDataHook` must be executed before this hook. + """ + app_groups = ["houdini"] + + def execute(self): + workdir = self.launch_context.env.get("AVALON_WORKDIR", "") + if not workdir: + self.log.warning("BUG: Workdir is not filled.") + return + + os.chdir(workdir) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index d65e2a5e98..043cd0945f 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -33,7 +33,6 @@ class CreateAlembicCamera(plugin.Creator): if self.nodes: node = self.nodes[0] path = node.path() - # Split the node path into the first root and the remainder # So we can set the root and objects parameters correctly _, root, remainder = path.split("/", 2) diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index d19c97de86..e278708076 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -35,5 +35,10 @@ class CreateCompositeSequence(plugin.Creator): # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] for name in to_lock: - parm = instance.parm(name) - parm.lock(True) + try: + parm = instance.parm(name) + parm.lock(True) + except AttributeError: + # missing lock pattern + self.log.debug( + "missing lock pattern {}".format(name)) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py index 06b70a01c2..6949ca169b 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py @@ -38,9 +38,13 @@ class CreateRedshiftROP(plugin.Creator): instance.setName(basename + "_ROP", unique_name=True) # Also create the linked Redshift IPR Rop - ipr_rop = self.parent.createNode( - "Redshift_IPR", node_name=basename + "_IPR" - ) + try: + ipr_rop = self.parent.createNode( + "Redshift_IPR", node_name=basename + "_IPR" + ) + except hou.OperationFailed: + raise Exception(("Cannot create Redshift node. Is Redshift " + "installed and enabled?")) # Move it to directly under the Redshift ROP ipr_rop.setPosition(instance.position() + hou.Vector2(0, -1)) diff --git a/openpype/hosts/houdini/plugins/create/create_remote_publish.py b/openpype/hosts/houdini/plugins/create/create_remote_publish.py deleted file mode 100644 index 18074fa560..0000000000 --- a/openpype/hosts/houdini/plugins/create/create_remote_publish.py +++ /dev/null @@ -1,21 +0,0 @@ -from openpype.hosts.houdini.api import plugin -from openpype.hosts.houdini.api import lib - - -class CreateRemotePublish(plugin.Creator): - """Create Remote Publish Submission Settings node.""" - - label = "Remote Publish" - family = "remotePublish" - icon = "cloud-upload" - - def _process(self, instance): - """This is a stub creator process. - - This does not create a regular instance that the instance collector - picks up. Instead we force this one to solely create something we - explicitly want to create. The only reason this class is here is so - that Artists can also create the node through the Avalon creator. - - """ - lib.create_remote_publish_node(force=True) diff --git a/openpype/hosts/houdini/plugins/create/create_usd_model.py b/openpype/hosts/houdini/plugins/create/create_usd_model.py deleted file mode 100644 index 5e6bd9e3b0..0000000000 --- a/openpype/hosts/houdini/plugins/create/create_usd_model.py +++ /dev/null @@ -1,40 +0,0 @@ -import re -from openpype.hosts.houdini.api import plugin -import hou - - -class CreateUSDModel(plugin.Creator): - """Author USD Model""" - - label = "USD Model" - family = "usdModel" - icon = "gears" - - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ - node_type = "op::author_model:1.0" - - subset = self.data["subset"] - name = "author_{}".format(subset) - variant = re.match("usdModel(.*)", subset).group(1) - - # Get stage root and create node - stage = hou.node("/stage") - print("creating node {}/{}".format(node_type, name)) - instance = stage.createNode(node_type, node_name=name) - instance.moveToGoodPosition(move_unconnected=True) - - parms = {"asset_name": self.data["asset"], "variant_name": variant} - - # Set the Geo Path to the first selected node (if any) - selection = hou.selectedNodes() - if selection: - node = selection[0] - parms["geo_path"] = node.path() - - instance.setParms(parms) diff --git a/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py b/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py deleted file mode 100644 index 0e24ca086b..0000000000 --- a/openpype/hosts/houdini/plugins/create/create_usd_workspaces.py +++ /dev/null @@ -1,59 +0,0 @@ -from openpype.hosts.houdini.api import plugin -import hou - - -class _USDWorkspace(plugin.Creator): - """Base class to create pre-built USD Workspaces""" - - node_name = None - node_type = None - step = None - icon = "gears" - - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ - if not all([self.node_type, self.node_name, self.step]): - self.log.error("Incomplete USD Workspace parameters") - return - - name = self.node_name - node_type = self.node_type - - # Force the subset to "{asset}.{step}.usd" - subset = "usd{step}".format(step=self.step) - self.data["subset"] = subset - - # Get stage root and create node - stage = hou.node("/stage") - instance = stage.createNode(node_type, node_name=name) - instance.moveToGoodPosition() - - # With the Workspace HDAs there is no need to imprint the instance data - # since this data is pre-built into it. However, we do set the right - # asset as that can be defined by the user. - parms = {"asset": self.data["asset"]} - instance.setParms(parms) - - return instance - - -class USDCreateShadingWorkspace(_USDWorkspace): - """USD Shading Workspace""" - - defaults = ["Shade"] - - label = "USD Shading Workspace" - family = "colorbleed.shade.usd" - - node_type = "op::shadingWorkspace::1.0" - node_name = "shadingWorkspace" - step = "Shade" - - -# Don't allow the base class to be picked up by Avalon -del _USDWorkspace diff --git a/openpype/hosts/houdini/plugins/publish/extract_alembic.py b/openpype/hosts/houdini/plugins/publish/extract_alembic.py index 23f926254b..83b790407f 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_alembic.py +++ b/openpype/hosts/houdini/plugins/publish/extract_alembic.py @@ -10,13 +10,10 @@ class ExtractAlembic(openpype.api.Extractor): order = pyblish.api.ExtractorOrder label = "Extract Alembic" hosts = ["houdini"] - targets = ["local"] families = ["pointcache", "camera"] def process(self, instance): - import hou - ropnode = instance[0] # Get the filename from the filename parameter diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index 63cee5d9c9..f300b6d28d 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -11,7 +11,6 @@ class ExtractComposite(openpype.api.Extractor): order = pyblish.api.ExtractorOrder label = "Extract Composite (Image Sequence)" hosts = ["houdini"] - targets = ["local"] families = ["imagesequence"] def process(self, instance): diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index ae1dfb3f8f..0fc26900fb 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -10,7 +10,6 @@ class ExtractUSD(openpype.api.Extractor): order = pyblish.api.ExtractorOrder label = "Extract USD" hosts = ["houdini"] - targets = ["local"] families = ["usd", "usdModel", "usdSetDress"] @@ -31,7 +30,13 @@ class ExtractUSD(openpype.api.Extractor): assert os.path.exists(output), "Output does not exist: %s" % output - if "files" not in instance.data: - instance.data["files"] = [] + if "representations" not in instance.data: + instance.data["representations"] = [] - instance.data["files"].append(file_name) + representation = { + 'name': 'usd', + 'ext': 'usd', + 'files': file_name, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index b9741c50ca..645bd05d4b 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -155,8 +155,7 @@ class ExtractUSDLayered(openpype.api.Extractor): order = pyblish.api.ExtractorOrder label = "Extract Layered USD" hosts = ["houdini"] - targets = ["local"] - families = ["colorbleed.usd.layered", "usdShade"] + families = ["usdLayered", "usdShade"] # Force Output Processors so it will always save any file # into our unique staging directory with processed Avalon paths diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py index 432faf68c3..78794acc97 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py +++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py @@ -2,7 +2,7 @@ import os import pyblish.api import openpype.api -from openpype.hosts.api.houdini.lib import render_rop +from openpype.hosts.houdini.api.lib import render_rop class ExtractVDBCache(openpype.api.Extractor): @@ -10,13 +10,10 @@ class ExtractVDBCache(openpype.api.Extractor): order = pyblish.api.ExtractorOrder + 0.1 label = "Extract VDB Cache" families = ["vdbcache"] - targets = ["local"] hosts = ["houdini"] def process(self, instance): - import hou - ropnode = instance[0] # Get the filename from the filename parameter @@ -36,8 +33,8 @@ class ExtractVDBCache(openpype.api.Extractor): instance.data["representations"] = [] representation = { - "name": "mov", - "ext": "mov", + "name": "vdb", + "ext": "vdb", "files": output, "stagingDir": staging_dir, } diff --git a/openpype/hosts/houdini/plugins/publish/validate_outnode_exists.py b/openpype/hosts/houdini/plugins/publish/validate_outnode_exists.py deleted file mode 100644 index aedc68d5df..0000000000 --- a/openpype/hosts/houdini/plugins/publish/validate_outnode_exists.py +++ /dev/null @@ -1,50 +0,0 @@ -import pyblish.api -import openpype.api - - -class ValidatOutputNodeExists(pyblish.api.InstancePlugin): - """Validate if node attribute Create intermediate Directories is turned on. - - Rules: - * The node must have Create intermediate Directories turned on to - ensure the output file will be created - - """ - - order = openpype.api.ValidateContentsOrder - families = ["*"] - hosts = ["houdini"] - label = "Output Node Exists" - - def process(self, instance): - invalid = self.get_invalid(instance) - if invalid: - raise RuntimeError("Could not find output node(s)!") - - @classmethod - def get_invalid(cls, instance): - - import hou - - result = set() - - node = instance[0] - if node.type().name() == "alembic": - soppath_parm = "sop_path" - else: - # Fall back to geometry node - soppath_parm = "soppath" - - sop_path = node.parm(soppath_parm).eval() - output_node = hou.node(sop_path) - - if output_node is None: - cls.log.error("Node at '%s' does not exist" % sop_path) - result.add(node.path()) - - # Added cam as this is a legit output type (cameras can't - if output_node.type().name() not in ["output", "cam"]: - cls.log.error("SOP Path does not end path at output node") - result.add(node.path()) - - return result diff --git a/openpype/hosts/houdini/startup/scripts/123.py b/openpype/hosts/houdini/startup/scripts/123.py index 6d90b8352e..4233d68c15 100644 --- a/openpype/hosts/houdini/startup/scripts/123.py +++ b/openpype/hosts/houdini/startup/scripts/123.py @@ -1,5 +1,4 @@ from avalon import api, houdini -import hou def main(): From c6a52d6abba316035cf55844b8857f484904d639 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 19 Aug 2021 10:30:03 +0200 Subject: [PATCH 167/308] =?UTF-8?q?set=20frame=20range=20for=20camera=20al?= =?UTF-8?q?embic,=20first=20hit=20on=20documentation=20=F0=9F=93=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../plugins/create/create_alembic_camera.py | 1 + website/docs/artist_hosts_houdini.md | 26 +++++++++++++++++++ website/sidebars.js | 1 + 3 files changed, 28 insertions(+) create mode 100644 website/docs/artist_hosts_houdini.md diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index 043cd0945f..eef86005f5 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -43,3 +43,4 @@ class CreateAlembicCamera(plugin.Creator): # Lock the Use Sop Path setting so the # user doesn't accidentally enable it. instance.parm("use_sop_path").lock(True) + instance.parm("trange").set(1) diff --git a/website/docs/artist_hosts_houdini.md b/website/docs/artist_hosts_houdini.md new file mode 100644 index 0000000000..d9d85394e2 --- /dev/null +++ b/website/docs/artist_hosts_houdini.md @@ -0,0 +1,26 @@ +--- +id: artist_hosts_houdini +title: Houdini +sidebar_label: Houdini +--- + +## OpenPype global tools + +- [Work Files](artist_tools.md#workfiles) +- [Create](artist_tools.md#creator) +- [Load](artist_tools.md#loader) +- [Manage (Inventory)](artist_tools.md#inventory) +- [Publish](artist_tools.md#publisher) +- [Library Loader](artist_tools.md#library-loader) + +## Publishing +### Publishing Alembic Cameras +You can publish baked camera in Alembic format. Select your camera and go **OpenPype -> Create** and select **Camera (abc)**. +This will create Alembic Driver node in **out** with path and frame range already set. This node will have a name you've +assigned in the **Creator** menu. For example if you name the subset `Default`, output Alembic Driver will be named +`cameraDefault`. After that, you can **OpenPype -> Publish** and after some validations your camera will be published +to `abc` file. + +### Composite - Image Sequence +You can publish image sequence directly from Houdini. You can use any `cop` network you have and publish image +sequence generated from it. \ No newline at end of file diff --git a/website/sidebars.js b/website/sidebars.js index 488814a385..3a4b933b9a 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -22,6 +22,7 @@ module.exports = { "artist_hosts_maya", "artist_hosts_blender", "artist_hosts_harmony", + "artist_hosts_houdini", "artist_hosts_aftereffects", "artist_hosts_resolve", "artist_hosts_photoshop", From d2a34a6c712b65de90e206616128a01ddfe82c4e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 19 Aug 2021 12:52:44 +0200 Subject: [PATCH 168/308] Webpublisher - added reprocess functionality --- openpype/modules/webserver/webserver_cli.py | 46 +++++++++++++-------- 1 file changed, 28 insertions(+), 18 deletions(-) diff --git a/openpype/modules/webserver/webserver_cli.py b/openpype/modules/webserver/webserver_cli.py index dcaa0b4e7b..2eee20f855 100644 --- a/openpype/modules/webserver/webserver_cli.py +++ b/openpype/modules/webserver/webserver_cli.py @@ -4,6 +4,7 @@ from datetime import datetime import requests import json +from openpype.lib import PypeLogger from .webpublish_routes import ( RestApiResource, @@ -18,6 +19,10 @@ from .webpublish_routes import ( from openpype.api import get_system_settings +SERVER_URL = "http://172.17.0.1:8079" # machine is not listening on localhost + +log = PypeLogger().get_logger("webserver_gui") + def run_webserver(*args, **kwargs): """Runs webserver in command line, adds routes.""" @@ -27,9 +32,14 @@ def run_webserver(*args, **kwargs): webserver_module = manager.modules_by_name["webserver"] webserver_module.create_server_manager() - is_webpublish_enabled = get_system_settings()["modules"]\ - ["webpublish_tool"]["enabled"] + is_webpublish_enabled = False + webpublish_tool = get_system_settings()["modules"].\ + get("webpublish_tool") + if webpublish_tool and webpublish_tool["enabled"]: + is_webpublish_enabled = True + + log.debug("is_webpublish_enabled {}".format(is_webpublish_enabled)) if is_webpublish_enabled: resource = RestApiResource(webserver_module.server_manager, upload_dir=kwargs["upload_dir"], @@ -81,18 +91,18 @@ def run_webserver(*args, **kwargs): user_status_endpoint.dispatch ) - webserver_module.start_server() - last_reprocessed = time.time() - while True: - if is_webpublish_enabled: - if time.time() - last_reprocessed > 60: - reprocess_failed(kwargs["upload_dir"]) - last_reprocessed = time.time() - time.sleep(1.0) + webserver_module.start_server() + last_reprocessed = time.time() + while True: + if is_webpublish_enabled: + if time.time() - last_reprocessed > 20: + reprocess_failed(kwargs["upload_dir"]) + last_reprocessed = time.time() + time.sleep(1.0) def reprocess_failed(upload_dir): - print("reprocess_failed") + # log.info("check_reprocesable_records") from openpype.lib import OpenPypeMongoConnection mongo_client = OpenPypeMongoConnection.get_mongo_client() @@ -100,12 +110,11 @@ def reprocess_failed(upload_dir): dbcon = mongo_client[database_name]["webpublishes"] results = dbcon.find({"status": "reprocess"}) - for batch in results: - print("batch:: {}".format(batch)) batch_url = os.path.join(upload_dir, batch["batch_id"], "manifest.json") + log.info("batch:: {} {}".format(os.path.exists(batch_url), batch_url)) if not os.path.exists(batch_url): msg = "Manifest {} not found".format(batch_url) print(msg) @@ -120,12 +129,13 @@ def reprocess_failed(upload_dir): }} ) continue - - server_url = "{}/api/webpublish/batch".format( - os.environ["OPENPYPE_WEBSERVER_URL"]) + server_url = "{}/api/webpublish/batch".format(SERVER_URL) with open(batch_url) as f: data = json.loads(f.read()) - r = requests.post(server_url, json=data) - print(r.status_code) \ No newline at end of file + try: + r = requests.post(server_url, json=data) + log.info("response{}".format(r)) + except: + log.info("exception", exc_info=True) From 932ae5fbb4014e9886a8e679ce8c2b7859439199 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 19 Aug 2021 13:03:25 +0200 Subject: [PATCH 169/308] Hound --- .../plugins/publish/collect_published_files.py | 11 ++++++----- .../webpublisher/plugins/publish/collect_username.py | 6 +++++- openpype/modules/webserver/webpublish_routes.py | 2 +- openpype/pype_commands.py | 2 -- 4 files changed, 12 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 5bc13dff96..cd231a0efc 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -49,9 +49,10 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): return data def _process_batch(self, dir_url): - task_subfolders = [os.path.join(dir_url, o) - for o in os.listdir(dir_url) - if os.path.isdir(os.path.join(dir_url, o))] + task_subfolders = [ + os.path.join(dir_url, o) + for o in os.listdir(dir_url) + if os.path.isdir(os.path.join(dir_url, o))] self.log.info("task_sub:: {}".format(task_subfolders)) for task_dir in task_subfolders: task_data = self._load_json(os.path.join(task_dir, @@ -185,8 +186,8 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): assert found_family, msg return found_family, \ - content["families"], \ - content["subset_template_name"] + content["families"], \ + content["subset_template_name"] def _get_version(self, asset_name, subset_name): """Returns version number or 0 for 'asset' and 'subset'""" diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_username.py b/openpype/hosts/webpublisher/plugins/publish/collect_username.py index 25d6f190a3..0c2c6310f4 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_username.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_username.py @@ -20,6 +20,9 @@ class CollectUsername(pyblish.api.ContextPlugin): Emails in Ftrack are same as company's Slack, username is needed to load data to Ftrack. + Expects "pype.club" user created on Ftrack and FTRACK_BOT_API_KEY env + var set up. + """ order = pyblish.api.CollectorOrder - 0.488 label = "Collect ftrack username" @@ -39,7 +42,8 @@ class CollectUsername(pyblish.api.ContextPlugin): email)) if not user: - raise ValueError("Couldnt find user with {} email".format(email)) + raise ValueError( + "Couldnt find user with {} email".format(email)) os.environ["FTRACK_API_USER"] = user[0].get("username") break diff --git a/openpype/modules/webserver/webpublish_routes.py b/openpype/modules/webserver/webpublish_routes.py index 5322802130..32feb276ed 100644 --- a/openpype/modules/webserver/webpublish_routes.py +++ b/openpype/modules/webserver/webpublish_routes.py @@ -205,7 +205,7 @@ class WebpublisherBatchPublishEndpoint(_RestApiEndpoint): log.info("args:: {}".format(args)) - _exit_code = subprocess.call(args) + subprocess.call(args) return Response( status=200, body=self.resource.encode(output), diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 19981d2a39..d288e9f2a3 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -197,8 +197,6 @@ class PypeCommands: { "finish_date": datetime.now(), "status": "error", - "msg": "Publishing failed > click here and paste " - "report to slack OpenPype support", "log": os.linesep.join(log_lines) }} From 2191385f9f9bbafb175b79fac44c9dda78478010 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 19 Aug 2021 13:18:12 +0200 Subject: [PATCH 170/308] =?UTF-8?q?expanded=20documentation=20=F0=9F=93=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../plugins/create/create_pointcache.py | 1 + .../plugins/create/create_usdrender.py | 5 +- .../plugins/create/create_vbd_cache.py | 1 + website/docs/artist_hosts_houdini.md | 62 ++++++++++++++++-- .../docs/assets/houdini_imagesequence_cop.png | Bin 0 -> 19661 bytes .../docs/assets/houdini_pointcache_path.png | Bin 0 -> 80679 bytes website/docs/assets/houdini_usd_stage.png | Bin 0 -> 762416 bytes website/docs/assets/houdini_vdb_setup.png | Bin 0 -> 28411 bytes 8 files changed, 63 insertions(+), 6 deletions(-) create mode 100644 website/docs/assets/houdini_imagesequence_cop.png create mode 100644 website/docs/assets/houdini_pointcache_path.png create mode 100644 website/docs/assets/houdini_usd_stage.png create mode 100644 website/docs/assets/houdini_vdb_setup.png diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 28468bf073..feb683edf6 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -39,6 +39,7 @@ class CreatePointCache(plugin.Creator): parms.update({"sop_path": node.path()}) instance.setParms(parms) + instance.parm("trange").set(1) # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py index 5cf03a211f..9b98f59ac1 100644 --- a/openpype/hosts/houdini/plugins/create/create_usdrender.py +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -17,7 +17,7 @@ class CreateUSDRender(plugin.Creator): # Remove the active, we are checking the bypass flag of the nodes self.data.pop("active", None) - self.data.update({"node_type": "usdrender_rop"}) + self.data.update({"node_type": "usdrender"}) def _process(self, instance): """Creator main entry point. @@ -30,6 +30,9 @@ class CreateUSDRender(plugin.Creator): # Render frame range "trange": 1 } + if self.nodes: + node = self.nodes[0] + parms.update({"loppath": node.path()}) instance.setParms(parms) # Lock some Avalon attributes diff --git a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py index 2047ae2e76..242c21fc72 100644 --- a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py @@ -28,6 +28,7 @@ class CreateVDBCache(plugin.Creator): parms = { "sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name, "initsim": True, + "trange": 1 } if self.nodes: diff --git a/website/docs/artist_hosts_houdini.md b/website/docs/artist_hosts_houdini.md index d9d85394e2..f70eac625a 100644 --- a/website/docs/artist_hosts_houdini.md +++ b/website/docs/artist_hosts_houdini.md @@ -13,14 +13,66 @@ sidebar_label: Houdini - [Publish](artist_tools.md#publisher) - [Library Loader](artist_tools.md#library-loader) -## Publishing -### Publishing Alembic Cameras +## Publishing Alembic Cameras You can publish baked camera in Alembic format. Select your camera and go **OpenPype -> Create** and select **Camera (abc)**. -This will create Alembic Driver node in **out** with path and frame range already set. This node will have a name you've +This will create Alembic ROP in **out** with path and frame range already set. This node will have a name you've assigned in the **Creator** menu. For example if you name the subset `Default`, output Alembic Driver will be named `cameraDefault`. After that, you can **OpenPype -> Publish** and after some validations your camera will be published to `abc` file. -### Composite - Image Sequence +## Publishing Composites - Image Sequences You can publish image sequence directly from Houdini. You can use any `cop` network you have and publish image -sequence generated from it. \ No newline at end of file +sequence generated from it. For example I've created simple **cop** graph to generate some noise: +![Noise COP](assets/houdini_imagesequence_cop.png) + +If I want to publish it, I'll select node I like - in this case `radialblur1` and go **OpenPype -> Create** and +select **Composite (Image Sequence)**. This will create `/out/imagesequenceNoise` Composite ROP (I've named my subset +*Noise*) with frame range set. When you hit **Publish** it will render image sequence from selected node. + +## Publishing Point Caches (alembic) +Publishing point caches in alembic format is pretty straightforward, but it is by default enforcing better compatibility +with other DCCs, so it needs data do be exported prepared in certain way. You need to add `path` attribute so objects +in alembic are better structured. When using alembic round trip in Houdini (loading alembics, modifying then and +then publishing modifications), `path` is automatically resolved by alembic nodes. + +In this example, I've created this node graph on **sop** level, and I want to publish it as point cache. + +![Pointcache setup](assets/houdini_pointcache_path.png) + +*Note: `connectivity` will add index for each primitive and `primitivewrangle1` will add `path` attribute, so it will +be for each primitive (`sphere1` and `sphere2`) as Maya is expecting - `strange_GRP/strange0_GEO/strange0_GEOShape`. How +you handle `path` attribute is up to you, this is just an example.* + +Now select the `output0` node and go **OpenPype -> Create** and select **Point Cache**. It will create +Alembic ROP `/out/pointcacheStrange` + + +## Redshift +:::note Work in progress +This part of documentation is still work in progress. +::: + +## USD +### Publishing USD +You can publish your Solaris Stage as USD file. +![Solaris USD](assets/houdini_usd_stage.png) + +This is very simple test stage. I've selected `output` **lop** node and went to **OpenPype -> Create** where I've +selected **USD**. This created `/out/usdDefault` USD ROP node. + +### Publishing USD render + +USD Render works in similar manner as USD file, except it will create **USD Render** ROP node in out and will publish +images produced by it. If you have selected node in Solaris Stage it will by added as **lop path** to ROP. + +## Publishing VDB + +Publishing VDB files works as with other data types. In this example I've created simple PyroFX explosion from +sphere. In `pyro_import` I've converted the volume to VDB: + +![VDB Setup](assets/houdini_vdb_setup.png) + +I've selected `vdb1` and went **OpenPype -> Create** and selected **VDB Cache**. This will create +geometry ROP in `/out` and sets its paths to output vdb files. During the publishing process +whole dops are cooked. + diff --git a/website/docs/assets/houdini_imagesequence_cop.png b/website/docs/assets/houdini_imagesequence_cop.png new file mode 100644 index 0000000000000000000000000000000000000000..54ed5977b90c8b552098f22487907a21be7a3b90 GIT binary patch literal 19661 zcmd3t<8vl$^yiZaW@1|t+_CM<#I}9Mwr!ge+qP}nwry{o-|m~O+CN}lbX|SAy81$& zI(@G1`E;nPvteHV{?z;iCmlxp&shq?;`8L3f z|4RJ@0jY_Gc?Uv#+fX*5YW5%?@O}RsphMQh1|T4kdSZfq6Z9{(4x*IdEBH5(e6Cr0Rf2X#S2X{+I~JFS@jJ-xMaSyAMqS*nya=sS%P3k z8fk|@cbGA!x(dQbJ zfBrwdRrkMn{bsIkjTXGSH(n{i_I56l@QL)yf0i#-`jfw+RG7G^AB>(ey?SN>Q^<@LPiRJ( zbWu-n==TuO5Xn$sOXrMtE(Q4nvo_-V!(c$SIO9XsrJL}VBWWzEad!|v*nmvu1ee%f z3-i<*W&*-?IcR3hie?Y9T~y)>Tw20 z#eLxt-eLsCSq!JfIfNpSEV8PfMAE8aEm-7h4e)Y<uGZ3nY1Fezq?(KgRhKy+LbYC&>B{6hX5gS+N`J}*~~TPC#7>2KCzv=dV7RsKNG zf#uajkOWSpCt1_OuaZKpo&3|&QfruUvdiFQpK+Eg?rtQf5BN%3av0ccjuz@pAMzWypED0OHq@L%3f|q0je!^xEvHO#czA#iJKP=gacft zscpz?ySfh4&BXn0Ka0DF#fdf}<|NM!U3E6)BBt1Tyc6DyKtQ6?m#S!GxVQj#OQ8edaiplTfj zGaCyN)Yyg1-XI)TEUvZk0B)9@7*&p;e6t+?Bv5R8R(dyl1OBanwPXvvZ(lqA z!@peK;*68|{sGx~U2L+s-5;5bD+ zb>=_9=_$`o>3R=)L1ld9Vq)gT*mbWnUzSC>%Z8oMypCGlb~XAmdIXsKO$j97#In+N zGO;q-ZyU3==^(6RV`mTf{QR7b8|cfOf*v&qE6?A4A+~yeeBi_b!`m399{-zQdn!&| zVqLZ7A27c^9VdZRa+at2PEI>K^x3rKd*S^ByWsr=p87d8#jv><0ParDW99jR=29V6 ze0f8}!G+WZrjL1v<=?PBi`qGM`N!gkBY~@kfYoun6~gl{t;o`tMuH3S?PQOB0CW54 zXVp{cI9u0?)cwQ5jzZQeD!%JJlA@yG_I!!LwVQu(uf<9&L^RJM=4Ho|c0yuePG=`R zyok+mV!Mjq%-o!RdOGg=O4dh;U1lT>Td5%!7?@dGg+^1U=eqqA&+zCdzolix=w$1L z6*C)KUVc6}Z>FZ1nc4hfxUbNK>TF|7j<`ynWE8W)n>e|;a>V#KQMzgFpBJ|=vL@h| zv9*AkP%7o#35BRFr;^(3nGSG%Eqe3c-{)kU9cGzR4VWF^KpF)sDz_X%U1JSt%$8p1 z8s%s>T*Abvz)y zQ=ew8sb_MDEuipFcrJ|zVLQ=4{1&#vsb`9BM^de4EajBV_4DOONt9GtQAOpaFoW4k zB->^P+op>^OIYy5)Yj)?lc=~jzoB7)IvY6Rd2z6ZQjA# zli)`)W83d6ws?Ipf}|;W&#raFVQZqU2?x=GXn04C{Gy_=7^@#pTO{`=DsAaSSzhXa zp}XSIe4o#OmW+&yiZ0I|SKc~kBP8k-OwM6+kmwcFm5=JBFUNiAjd{om3k^9OnnaXH zE8s+>d(KzBX_k#Wr@TtgjYHW7lWp}9_!3n&iB5Z53cihfKHp-`(w$wVv)uOy`Y*z&9;HfrDc(q`Y$e> zr5a?MJaX*#t?&vD#;Exj{vd@hg!Lq(B&eu^gM+$^5Z)3i zLmAGSC69z_p)g_LG)~{6xK*g?6u4J^t0on#8Wz&h0n>Id(lHq!L9PgSDUt#yk7YYD{LM&3N z&1t7F%}Rc5t;t&DjQ5zjb>_m3ly)rj)g$(kEDJ_TYvd(QX_%JcA8R6Q$<<|{6KaJf zwmKuzhcVZTQ#}-5097s|!7qOrRmlF78u_u(e}eO2itdu#W><@Q{%dSHj>?tt11Vk1 z1-GxJr*T!$!{tO~`l~D}=#aLcb0|6!$UDY8fG1>0H3NWnZTnO4)7w8)w$|$%a z(BzGo^;tLdn{u(ZGLqBzlG?*MGvMlivO#LR&l&h~i)Q9V>330`Z7g;^{#~rina{u zn*MR6`ZKCtdW(>A%TrZ>uuF-35XlT(jhubWlZpiO+^YRPh#tB%G0U(tY6|KHYPj!F z4teVo|L>9u3l%O#Zj8B&Wc$%pe6Yh5k=d$IK0Zrw^5ZwdfM6j2b&GlIdY>NWkG9Ly z)~c|aGzLy+-l==%`a~T2s0X+bl0kk%9Ajk+1Mv_}yuoG6=Y7a)rtPPj02|#fLGfQL zB7-%gS?}PYhtdv;|svfXd9!rH!Q(0ldy44M23M~x(u za3mSOeCiYGadYGY12mGQ*2+z~@ij&v#kZjeqX+1$S8R>C0%7W4Of22G>=(4xp{}Vi zl8Y6P)`v(x!f5}`sDxvR)7n@_rl++R_E|zij^-na@%Zm6xAzUl;_}*zpI$<`7{8l7 zGi;Zm?(;fDL85emjTT!}j)3V6$9gdO7j1S&1m(_4iLTG?>5tpy+L!_ZF3W6OESeGW zM@*yx>0rxtMCI~q*v%h~kuFKvNgvdEP1jjOUh<a z^I?V|VtQ#43Q_;+jF|rh$&aANIVR)?eg)Cuc%x)8?-siz15X`3!R10p%Rg*5PNi9P zMy%T-Hj{vgcxC|{6xfMt;WOH7zm}fg*Ry!#RhdU%Tu+BA4DtxRLaT({rxY`x+F(gB zisJqr6?Kh!A9Jcs)S`m!=an&p6N&eHg=GuNqICt6hYhr%^U3=$*=x$wbWli+)F zv2vnUj}wV0xjg~^%hf1vT9dU&G!;q-we{mhq@zb7Mplf-@_W+@+&;wK+%QdJ;Q^@O z>a?M-6ALxutEx~I8iu5kUQx#-6n@D^$S7YDc=!P=9z1rFbUE{OrkcAyXL()G+%eOY zsyS6=4IAg$Ze{)J>>>?FH3SST+EEwv(*R&6nO?AiY&jhPjVklwvQb2LauIv=DUF`P z9;Nt!71H_l|E7b%({uDZ&VphSxWaY}P%>F>nGSBE&1E2PWO6Tn=rcog-9ggWm>E>n zDT8i(cI^%VSf4TRFkK_y;T2y?e80BKmIAQ($@ln^Q&S(*|!PqFR^s1VUowo z?OPr99Zx)9F86|5IH0O)>7ndOi`bB@$p@-gl4ChvuYXa7sqUbd;4C0>)?zh8T&ga+ zU{kzp_6)D+o2>Re9i~IJEB1qIScX`}Wg6^vvUmTk^(Wo$rcl0j#u{e9VD2+8_2(K5 zht^?pO3f4jrs5eFY>wU?cnuS!AQP#Wi#^sPx_t$AJZ0zUQlwLuvg7OxfVTS=?_fJz z$GnW(^E;ZvO#1xgtW`a44MvG=T?23OGy=YC@WLXv%C)xqIt_aaYGt1I7Cz0#zz&^G(J$v@Z*dP z5Km6nl4iNXoWo2Skmap3NGH(nKYdL!DXdGnVR|kpO(buLmdauetcME>nUD#r63b`f z*?q{cWSz$+=$3tAPbbEA$iNv)@y|H6!kj62eCp0cRCe0c~rKN$f%rX_?cuw~_>$CV{(!RG#)e z2qPTHzc3(24_69W0v76d)Fk{S=Dtcn$V1V!HJ2S5VU<4V79&=>zF=PFN)hsrB*Id@;>A$H``6nMra*4_M_Ag0eb|dbAuCEvZ*f zHTy;v{@60;cd%pA^7Xp}5q;u04ISkE=76sN%&;Flb*}*|TjbA;1uT!-!GfVpvazmo zUFXI>RQNGNuyZPK zA7Bk3FyI}UV?-n|u^U!wGp{*#VYtaiu=#=)3q^>(Xu*KuOo61i8)GD;^`7D^^#0ET z?Beqfad>_LD-5MD%1#Vb9~9igy0|m6Jp}dGO+Uv_cuwj60OzY(u*&T0=qE1_4#DZ; z_`U7wb~7mSl<15?k|#?E{DoOg3HyiC#8vb0B}A%Dm6Bkd8neCC0Fzam@Jo=o|K6IS zGDVd0AVMRho9CDqxRcg#9%GOb?Hu8BovnYTVA~4GP>U zxM7~L-r?-V^v#-o!a%gftrcad{MfQc(^5rNdyFsvPGEcoxD|V}r2VP^IX72jzTayS z3yBa!l&0HV-TajH)eYIdTnh{`vbTFJ!s`I$hi-ZdJj0PLG}Pl`HJ5oj%;=HdP=68 z${W?-B{$OGxK(+1T?m%JEtgVUm*@2Q9AzF5xH-;)wF%qDi8+Q&s<>lP`!(|GX$d&|8CV--NQH8V0Fz?!9rCiI^y$DwL(hheL|F(>V|UDZe5@E z*4_zb3-r9`KWZ-^fOK*c`hSTaTrUK5JWH$dsJn)`<);q)SSdqCrgJ)h$#!awkvMR8t}uM zWZj z9600<9x^yi?m>3J)-v`b#2;$LJ;wcaW?h_u6W&{sZ|_2T@)%Ylz0c!}z^F}~Lu^ot zSf&o>8z9JMpUJcZKck4d*Ism0W-WQT;Xbq8anN&SfJhvt@l5y!YH17F{$c}yhh?jJ zG11rD0_hp|h*B0CmMUFPUZ`)G>w;6!;}j#GaSsloXhpdvf&p>prS4c0dDC%(d4qSr zKr(aPdW&@+hYJrq;7)npNRfleX0Q__NwA2c&0i9{6{0gS@G-D#>|D+u0^+~7T5}jK zL|9G&EfK^rA^aYnF?_rtWcp9{vlwTl&Apn$4L8VXq_E}@y+&^!v+q)Xb$`}G-h@5^ zw#Ii?eMMN|q#bk*Q#b!f0(%yCf6fu&)t9C^G^o_ll@45jS2R>0S8f?SMZeT_t@d0n zHXx_iDeMo~6^Y}8olz8!IQgTzHTzYH&S1uz*;^Ef3U^c+%OJg{v@GpMhUFbgh zIrXTZTrEcE2~SATpNWkGyWaC&Y)44?@<0}AYG~kyP}In59v6%lOU6AGReD>l?izE@ zOp9nYEjETT$ea_>@S20l{=WMd4;E)LLr zI8Dl5T-EOwlS=pm9#n$*m8TvWC-2;(lk-?UxaA68ET;M+&rNd*6!uQhNRzV|seP&i z+msGF#(jL)NAq%uLDKf)H%Td%Dt<1Q)z{*0b9}g2}#a*$v!8-=1{$uUW z32Nb3Xn!i6pv)*LufoCNya*ekC8Z6?EjA>;Yx2Vh&)MfqpP2Q`JjROG#3tlcjSCbHR!l6&&IsV#3R!{NCCk>rP597Q0Fm z0LTlI5MqA!8^cB`v%fpri;OzyyL3d~gHRp58*!615k6`0T_ZeUje;_Zi8XUAQCX5x%%) zt=zemRgwQny&HOz&CF*0semBYZUNPRjVgmJ#Qlq|d=pb_RnaHW`-XmgemlagJG?{i;IZ&pbT};^@TfBh0En)77LJJ|$*RuQ$I|#csS_a+C?{0=UFARxuWa-`7%RdU>6yfW zN3cQ$WVYq$mPQ0Kk-_sM>D3?M3(yePdMcijbn>UypcTK7w1mCJD!am8SXRly>Wk8u z1h_{3d^jRWcH!Vsweo2I63fTWGF-Cc60?Fz8{;cGq{uO@`~P4HytK*jJ|-84Kws#D zabl2==dZjKFDM?jpefX?y5UBqpv2li6UvJ?YX+4VqO;b}Ht0s5P7V+7{ZH;QEswxS zkE%~K;T-xUT%78UoFO)b$MH?}W;B`%d=*^{-eW3P<|xeD@-+>ezdA zQOop>Z7-`f?2!)!O#|$UMa%M_umWhA%i}34yMl$whh{jkI^J1Vk?t=;*ctJby+!;M z-w4>8xY#SALy!8Xw`u2Hc6jAXW+E zNwrRNFBJ&4H^Ycrm%RGLRHaMV*M@{;2o3A&AL96ePqJLXC0Q2GbqV~5&K27#s=Trh zd6dmOgR{{S!v<+qIN}I4d}P~tc?PP}hqJ=0oG{PToOwR9Q&Zw3g86N?2|upv*@`B(fu`ty{m z?=PRLf}HVK2lo+rqjHtk;#%Uw`619IqAw8}VJfnS)M_n&bvvfnXjCG1YB=LWw$YJ4 z??;yCM4T~iJt5kp!lN)2luEZgBolNPQiF4?$XrgcvnA{6K5Hj6I4gr=J8~p*TreDo zTZVsU#apH>6OZZXJ*YW>q2^L-(8N?U7>7&A-SVdb%8Rc1DJ$iS<@wp{gv6GfFYF8H zA*o9`P#wep_grF8TEbytF1&jaJ}Eo;KLDtEhQMEH`yvZ?R{2-#eyCxTNlD1tWr3H( zPg$Z9x;wSY&0O2K1-p-c*`jq5-I}r-w7ArGgcw^XL4Ku?SqbBtfa(wuh5T@d%^lG- zXzM8_7m9r7FcUb>a#4WHe(nhM^D{PGu2xChyNXSeRe^}cx9CkqT|z$BvUnPSqk1tL zi@nF+`h{6*@j{NN5k%Qm|HuR?K&gB520x~zIqM|IX$}Y{yd=c3Z}Fgtq)BI|Bn^XU zx6m1IlAmx$Z--PY$P_3|p$=6qP>zUGF1iD69tYAIua#i-F^}vp5xYnntP3gb zs>%!YXf@lzIk$lw#|y=|AA#Sl83nuY z`Dv=~OJcoLVm2tXy81W(AbF%tC2lRGiTyzD0-sq_O25h&_87pPj%Rp5U~F?>Y)O#E z7(HsL8FxNW?&}d--|S!{CR3xSXV@C6p505bvN)1@wdW)!pQ)52FV!K|Hu{H{Y0fbh z!KO*;4?^FtavzrBj9O}(pM$Qfof(aa?_UnAK)VhGPx*IUoY=F z)Y8cChTgvM5wWd4k)+hfJ+WK|RN;nnzZJos?EIf##(1a+2-dS>(z=LgD%vZ9vsP%@ z-=(EOdm8;s>E$`KlnqCAr*?QUQ}n!(@5GtQKeMkMgs#atPkl8%cMS2@NLOGbZGw%( zfK#V0P%|7SMTvL)(Hvt^(f-XvZZfS02@QE-=DPNTF-jr*gYB{ruGl(vdpW>tzg49< zo1@2Hxn-swsfPdL+A~pT*-z;amZa*WySfB{c)n>z>*d+KSvqRO*A2(HIWr6EsK-VH z_JJqrQ%ebR_?0Xo4Izi`%#k@WdcbsnP)h_jvRo2w(ge*Hez~)WG8`Cf4h;Z$#lAjw z1vIQj8}GYC>nPVWc7{b;b&gv8-8K+hULftMax_X>~AGYBOwi|pA{aU*wPk%Fz z^hR{0F6G8oLeVai&Z0ri{{go~m&vxPaLff6xHJi7(Gach8oDg*WCJIfLALHQ+*cOR`Y4qLUBBP9PC1e8?LRoR|%3@nalscDs{@- zI*E08DdLu+X~oS&MEOF)4AYm3ku*JWU1Z!J?V}OIASvUmf#LP^#MieXV0YDdmX%-5 zeBb0$a&nScNw=le*#-}akraZMB)hd2oToEuK6f|i4KAXI`_gewru9Xw)`Yupq;3;q zS?-*9SR+G7`!adE!2&audp?v3pF9@4o@h5SFn`b5M9j;AWh(|nzNUz!?)ITak15;S ziW^uq8r$W!C4r2dYG0YXW)Mr zw`WfGw!*q*+Nv$SH!nT5@kaM6w#a@FcA7DGUeENV8Y7celS3}g$VWIdt$lK7 zDs=@|XT#IV(6`^e{y1Hj%Dv*LVV|)qcNGp`)I$ieo7``(iFn+5Pf=%ngscjUlguoS zxBPZ%I(1bbiTQ-qF-HR(b5STseRZ#(d@OfCnq?tv%UTiX>~t?Q%E_4ZycJrH2{6$2 zIo&WMQf5dre4SMtCG`6W=tkr0w;slnRL`#}55wT{scRBSiu~UQYnA6Jq|KYc9O|nN zW0wOQ7CKMMlw~NV1?kmQ??RlYCcwYjfMzAL7irkcIZLWY^u_kr&U0b3AFu1TK9o}V z-!c^frv6@xoNJkMjX7=hw<+3{7~r#7!Hn|VC0L1s^J#wIA##0nB@z^u0; z8SwA56m|&HVnS0_UGfR=*;S76U7SXaBn?4dWce)?{@J=DV2?c^RA0SGY)6>4wpc}H zFfSR3zUWS3e!pW%NX5^BBk{pOs1ZZ_tdlFKk(`t*_ z=}I(#`S~|wvN@0HX>W%AAvh;uCnu%r8?{%FxVgC_>Wrs?4l6Sz^D?TRbU0(2tGy8W zDt6raOG&u}HE;ElY#C~cT;#p4OIwLncP_FxoV8( zbnQN$Gc(@{!;gs68^(ZrGa=)JET&AA>h6AX_4d?v2E&VNYvG~UA^lq-vS)-MmS7AS zEl|}pf0#OArRw9g{LgQYv@P!e``-u6D9@hYbWIG{u*nw0TUW6f)&r9VV+#!4PbtPW znY3kUm_+=lP30QF(dWy)$!Z-gX=!DP08-M@B`7yHx0nGNYtD022~;7L zIaZYZp`qZyO)h+22Y zy}I|#U^DZZu_bMVGnO4PqJW1j-bV&hBZKXkuNOYsEii=b&bLdLik5`fSR_RyCE(Fy zMh-y5?(^LhFqjf>P}Jw~4S#gEU3HcnQmH8C+UWys8Rx2>26eksfZ7b2_Ad8k?(S{K zqj3NUeu>KcZeOtFdeeApG)AnK_b%66sPnz2B-!gwvMkt&nSivAVd${p0O=s#xlnOL zXL?49bf}^^s5#gXXsC_OSDj9`zl!{hQY@v>%4H{ouiN&ak#dlNA|hocDTv0YHn}A+ zdq}hxiPf%mMlSq${IhiSMp7=JzkpKDe(bZ7qW{>mM6SW*V~a}rQMjLG?J42-@FYfQ zIiuOvEXGLE~ zlcciUN0A9NRVJtvqq>RHTK?wUkFJ-ruAapTjfHyHo7ZpfD!0&hFKz3cRJlsKqVV9O zHtXcMpN!Y{CxHh&^Ha}nq5wmLK#95?r#55QHiPO5$(=Mh#Bt`!7E6T3bAy$JpKRxy zpl^>CeZf{Ah)$Lp5WyQcVM|O+zHQ(5A1xUeu(GSC;kSQW*Ei(%9r@`-T7D&A8(j)_gsAAR39S)L%2zlp>t@-<%N3Y# zS2H#wxzKEqh&06IgYkpAeFJ(7pwttNsA!|!0JO))na+?(*Q=7Fr1O?1TjxWC^-eI~ zJ7MSh0Ua>~1w}oJth~G*S*FX6t+&mm>*u{_9Xt6LgAoIfPi=|D6YYK=&tytP2%A>;Weaht43KC-w!ol13be9|pZ^&a(xt30sR!Z@;R*J6a0 zG)OO*fdV_kA(}^GXsXCWVKgj|H6%G_RL2ol>U-L7lyYTBi&32cG{x zvE0H!(s~DAXap-d)sThgpiQH!Nn^FSuI`@;5oGAu8SV{;m`IvP_Gjhm9jG^i_MB10 z@q!N$!g&Lcl&BOhe-#~1w2zMuGab9}*&6Dtoa$;>&#c_sZ5F*<$c>E+myd^~#OwkY z8P2LA{jbjtaq5+omD79@{*=l{$~Ux|k)x%>#oWqDX{h~<-$S48cT;6oD3yrzto8bV z|DwNvwa;RCEHXg=nrSYhYdbzJr^~OcugfRf?E^5Ey}*ysU%uUm-eD`a@{3bH?Z#1t zM-D#Uu2gn52P1J7-UXYJGc%(qD`{DHF!JZr`6b10hoMYwJB{=m9PW9dHDgg`-vGr> z|GTU5vDh&3&~Rs&_V7KWp_|06SLn}ox=-An@OZLO4PSp`6(I#s+cT*)>>8)y<9%VP zQeo_0;J|GQ49Aj6SZ5SucpOIz8RS$jiwDC_a5QkmJSs3v6Ojr-GC+s(*{D5W8VXzm9yyH~M}fxV zqw6knT?T9=I#wkBn0d}dOuQGjqeb=|iDwHWltM^{sG<_$<5ASzDNrowY|U+{T2zJ& z^5K|YmJO-%&o!h`+nleIEi3axqDviXFZL1Tdo4I1ZM!bkn)2G)x$6&K35fy@{`IZA zezSR-R0MDRLox9zjtA)%Oa}(^OVehwUB163iO|&259hKz?*Y=p7uo+Ip`~??1Yb7< zN`aM?@bC+*xMG%L)s_ntbh48NZ4oY!+I`&J)Yrgny7iE|2)?XI`~elxbkb@z(H1Zvo4XNwguG$~k6GW+H3 zl{dJP>Ae0-N0YkqtUd}NWwx!Q>KPSZkJew0{Hf1&{T2DQp;VH==`#aBBToIXx2?|; z8Qa_(P(H7nz-f7@=Fne5jFUNdM3Ecj7k#M_=Q69m4po_yD>MS5d7tIZ*E_)ozWN1R zZj--C1GqDazqyi4&9~}+mUqb17h)o>iv=c4J3>(ax>4z{3KjL=e#0<^9-A>_b~ zmzA%ezoN@zgy;~pK*Si$K$6XTug86`A_Lge2m|`O`tE3=Gt3Ar9!$&=qxJOB0{H%1 zthlW>E>UJQW3Z8R-Uc7^xKYm<^wx7~-tIx0s=;u_;~kN7zjaXkR!~+~YQ{L)C=}1L zTN`R~EOBaEQM)!o%Pam~8>-zmHlmwqw844u2n07-7#xXex^p7N@69D^F8s}bv99#b z&d=ki`P7pDKrW?LR_Oml(4iI}k?C0c?Yq&YsVwXNmyz-DFnynUa5<5-aCYbYOfvus z01X`$#+V)RAvArHD2N@uL>hS>5p`%03r8x*qwT-vB9GJVvr{`KU-zEQ^iT%0uj0{^D0($$_4F=Mx#y>3DgP!Cj z?EttXn8OT0me&n!BwlohXmuv!C524F^@?Hu&Fmy`K^)49?Q9xh3*C`kS_3`|63=Yr zSw-33MIn^1Oi8ttx-#x6a^X{pY?P34zeFa_tT9HE3PjSA3FX{18?9Yacr!rl(C*Udi(wOymoj9y!1mMjlm(9*Cr@#O6jp#>e?bY#Y-A z3{7J{HflWqK3Qx`LKjUsuUg*3cV%4${e!FxL`y9YQ*1)uV^oUWyN~qu5ozl_@w;Z2 zd_N1?EZyo7!aYLn5{qPXe$Fj%d(9DjCNs2kDnbx{3zY)5@?XeOND&*IZ5OJ^qJK!3 z#XcvRCM&QcMycC2>^E-&9T0EDC?lrI?3T1FbYq4cho|4JcH=QMN-kihzMaFm3IIoa z!#b4o|0SL-P8sX_)k;o<6U4#8PZ*Yyq05n_HI8V91tEB&dV&rC3w?*RZ#XEgx$f-o zLwDzsB+oeCUCzA3wr04xKbu{+5!j45{MXO77rQ;)egCs?7epcrZjad1CL*Cv#F4}0 zM|Bz`1Ns)2$RYdgE*SZ~^J-zgBNi`MbZZO&-y0ZOzSP416O>ytuZ#+L9e{PnyR$G9 zV^NHB3_TZAEn2!GFtr###GxxnPxu#^I4dDgM;tE=prM;e1^gj|NE9yN7G8n&a|OU1R4N>)wkL;5g2XH^tmJA&%YBeT26ZQ#3dv zRk*V~HpB0Z<;x6QJ2-Id%}p9}n^-`S>%}$(e&xEo~4){k`i*jUi!DaY(Rc9k%cdhE{{Og>d#y%_i-1uEPjtw*I|1P7`mc+>Kh{+6n8~vQJsTU+NA~*#A#jJ}p+|AIg4v%qAmxce^b(ebOseMUQ!jE| zg52>tQ_y#_Wu+p>q|y-_nDPy(m+v7VSRmV_dmP2#?P8SH|Ekmw8_k2-XZ?igTZb@= z^)qSB!Qh}4Lv;jAPRPr+iz9*}*<*Pv4w*B@xu_U?<<`f ziFr=L-yr5u2d2QM zMG8qp9(jPOhQ5@~r2K&%M613w|%;r9VM-U+TVy;JI>pf|^q z)T$b7TS4~v_BU<@g{nvynDjT1WUdF9JXrPfJ&sgt*8wzx0Y-$pMh-1Oc{GhoKeo&T z#q}So!2s-wo=T+PO&R+oMW*}c-<;Kd!Xojpo%%Y50l^ayA;$Sbp^v9X%mg^{Rg6_a z51S$Mz?*qGvA-jb;(;e|LGccd;njXo2Uz(qn62BRKe*@P?&%)X+eSw)FJ*NGrC;Lz(YJ$E`w?BTh(YRA&JTljF_|O+@5En@@AycSgc~t>qlU&&@R- z?u$t_xQk~}cAa}zK!^PCZ1~F`F9=xq1$QrrFnsN-TSik=?hw-DsB- z<3)w!VN&0=p6VCQ^q^m}i9sZxDz5+l5<>myQq&5C0W8oqs2pl(4s=Hp#_)F($eBY` z4(etlw19VtXwe90R>io*h~ixM#aWTSZ$fF z+C*{CiN>ZeO4JG>4uN0bf)fdCSTh)oC?vas& z!yTuE!&;eIggDD8@*6HnUq{(p1JC)xLd0<8z{|Z>Wc3UHiA(%L@_mJ*0o2p2sa!tz z_%RuJIn9%^1tdMH4RGu+kn^=tPH}(`+=%?Yq|VEaH)r><;#T5g6Eg9RUWv~UdqV?n z{Fby6e#mmYVS5xja`Fy&XCP04Us3fH;Grq$^zxW9=?T0#bH5sT8>NVFcw1_Wls5&f z@PMyogoE0<}yNmrMGkMYimBYg5<05`NsUdWAkX3t2iF1YxY zg4kBGSu{^lu?E2=cl`R&yh^;Hy!%8e#YT6NgC_IQk?+?m@365p<*HfEry3!qy%K^@ zV~#<3K$3>Ri7-=iLa}GEM*u77=brDy0w)E<1IB*;(_?hUkl!Rc3{XhG3(P*D&YI{m zL+xF5vD#q`!466{)PhiQI_hShbRx+F59s7ElPK8NB*FX}Zl`$s$oo!2xnB7T!0FIg z!4G+M!o3egp)8ZSYV=zSq&-Yy2xlA4Bw=KkngQXl>Kqd*JYBS;%?fn18=9N^-`3nX zPJ*=B%Y%AZ6`o2bv<1m<$7MZg}uzY+1EtgBgm>@#yiDoN|Ts4^|lBKHh{Vk912UX}3wZh_EwM z(nIpjA))%f?$74*oJITOmAc*Jk^)|;oO(#!gCXgq z>A@dxYY@ z$i-k{W71*G)ciz2%w>7zr19e;upR8I_twqm|8dofHqB5oBLS#O;)saTjSuiIj0Vt) zVgN)aCG|amL0PFV(|;l~E%r|n;5%zQ53`BcRUnt1gsy$#>cva?v8$0rT7r#2d1-U2Sj$%vrld$N2! zTpjGqsms6(O9^%uyOeUH?U~}Pa0!vlO4#j@Dkirbe^kU&PWfms&9Bg@l4RWKrI!rd z>RW3YF?(gDO*;r*L|^&!EC%dehey+kcQbRry2V3CwHnhWmUZuMl|?VDhH1sv$eC9J z3<#DTbKe}LZ@k0E1G>v|XzYXG(^D%|leV6gP=5*~OYP4qHGzJ3TREMhI`uj%%ZvQt z)QQUy>j||b8DefF2Cf@3!=)q-kj>_#4JECv(zW?IBua?)P)QJ6O|o*O>B1F);p-zZkj=AVmsuDs^V4e)wk z1HanBnsJ+L^XyJMq|tVJURiFhuJ^Atqc?uH*DwzA2Fh88m?dbHC()@-&K=9Z?xuXd z>KqI`s9MVg6U)_3^=VD}zS8e+=}Da|K}t_LCy6vQa<0F&;_k@%A}UAx*Ah5v0=-mO z0b|-|Z*E{*Ls@-17}W`uY*L({a^}V@BhBezT*toEofOs*14lwy0Gbes^FW8VtA>8u z@Grf3*o_w~!H`#8@q^_L|G5t=jjyo~5nhck3@A~kj*3Na`un0RZ;Ku;meULW%UU+222FC*! z3Ro5P^n1OyUuoQkS;`?+u`$W9=VM7SLc?@RYYn;2L5QoX3h+~ERs(C?ZAd|8)aqIO z?Au(^Ge?(X3KyqySMC?ukR{Mk&&!}^(<)Y^sg7oxrbbu=8qR@Y9e#4@>f=*4l z8`W5W%=t!%w9&phv{b>ZI%_r6c_}r+{8cj@y_+&B_y_7 z@A-!hB#mn0j$9X=IEceE&XDB7gLm;odI54UGu>wDFFp7?7WuMRftV(9q}!y;RkWPQ zrgKPk8Dlf(=8UU@o{)y~<<`J}>wMm!^r!itf$M+o?dyLXvV){k1bNnR0ar zvyWX+N0sQz&PSLM%Kpc^sHlS>yk>zRJ9n!4iVZD>7I0LDYKZw^} z*F-Ch)D-PkBV%o)SK9?oUB(z2jSc{FdSg$`a}#)=xd<7Bg*Q)cQy;DOz|1iC>hsLn zUQ5q<$is-62)6qM28=z!z?r1nw=Ua(3qn7_OUwB^_0@cy>p4kVJU|h4%7Ts35qfcB zO_~u=VRD(;0As2|a|z5dgY0H4*%E%8ZYvp9edtTDkkGiXKnqftPN}1z05bQYWk5QU zT&&J%gZu5 z7awLfoIrW5#mS>4-B$<3NvAB-0>4CF=G%fk9L)_RJX($|>v|otTYT=CP(+Y>IQc4Z zU+hjov9%24oxdtuuHw!Q;6y%)$_k*J=o&TFH%lH;JK_|$L6p!dR(^wnvzXA%9Bs$= z`>dW$_q=jr0jVe~X6%Hz#~FW`mnS$5JG2ZVigwMQbZ4Bp_E6r*miaq;5lR!WTAv6~ zqm?pvmk64R4tS=Apac`e`zp$!0wb5lKdW{eLikPm;&tNn@?@iN!cGijqLyPDX-?p9?Ba&W5>L8Dm;3KV~{>*p<(~M8EK55p$g)=AYRePX~gYS&(g)@)OR6hwd zRIgI6j6VwwoAnxxcx{Bif+khBg%>NX^9PcnWzo7_;#1@18C1p6%zf zd+w*1-j&fmLur{(m5`GC6nD}yGVIytyGrLYdyB*Ebjz2~2+)h9DJ$WbM+Hy%wq;*SLaQJZ?bry>8dFzbmib|& zr9tdHeUC)h73gQT3tuhdr@5n#Ty7BVJ;7LqbsgknpWj>-JRxDPcXrUiH$OIS+3#B6 zIowQ&e)gYF4%((94~N-mzEep{!3qfd+96E54mIECruA$7+v?^7(Y3%*upGaa!Eh6T zw;>wcbCX>YXziP7m=9gYa&)jGF}sebJG3$PgX7(mKA9b*UnhonG!@ zaUq*!b@Hu~d=*dAz3&%rKkHnIX^+~hL?7%teUh-a?$ENVRCN5Y#GuXS+E-M2{uBM< zYVND{{S{U)krE;F5mg|-{!1Cl42@0@|8?stVYOP$9E0f=&8CEg$I%c6-(M{jT;(~_ z3>-HsR_+AEf9sQP<;WYCq30BpS&-jqyE3>d-)0{2(Mvx&3T5Urp=Defl9`?r6V-^8 ze@OE1?t`Cg1r)3PLCtj2Pytqv_oXdFz{-Kwmk{$xN4Zi}Xi@z^0ph!I+xt*{O3xf>7T=B-4anV+wqAehuaf6XtPp^|Kxr5ps(#A6|dTkE(#7U>i$ctQUiYezvWu}?+mUz dw(Z{+SOb45i{{cBgxGDeu literal 0 HcmV?d00001 diff --git a/website/docs/assets/houdini_pointcache_path.png b/website/docs/assets/houdini_pointcache_path.png new file mode 100644 index 0000000000000000000000000000000000000000..3687a9c0ddffaf5f40d78c4872c853322a67125a GIT binary patch literal 80679 zcmbTeby$>L`z}m(3?LHHA)QKhm(nRMAOcEvcOwlVC8bD*beE(w0)ljdbmv}!KJWYN z@3+6>*vI}uoVjP_zSq5Gt?N3k^E|H!Q&yD0L?cFnfq}u4la*A3fq_?nfq|PwK>|lK zaS98;f3Oa!GU6~LgCv{a34*!Ub1@j0@@VvHV?^*A)mB!=0R{%E1NslP+a})>2IekL zPEt(W)nF$b(Fk8=GFVBEA`l{2+tAREpxVgdG+Da4TVmK!?{QTVr26r}Pq;4q@CIG+ zz*@Sy=Jnm+@2ijNcu*njSHl+0=4T~?gBdP^DLSPFP425`lz|X&6!52JpZgB?{*gF} z9egSzBcqQiD=V_n(!nWef-k#HA-%lx()X#lctMQrxZMmh$(cEMymtCpct-cy@7JuFgwn$pJ0g4|EJ zo1CFhVgbm6*>v3ywg_5QZpEVriTP%))slwYxMV|bDOEK!uDw_pGEKkn&r0N9RC0pP zudWJXE-%(ICo-c%wQI$Fp%95Hn9WAyZ_@YH8UJ$5} zBu|OYCBnTMDB0cF_v5#K=}rBKz3n{NcVufhE}K0PqG=H+)um|SB?gUl%L{qew}uDU zgAWZ`ayVb*+QoL@4pgE3Sqba>a|Tos1zKa9mJ~-f@ZPj4U6_<%gU(a2HY^7e6A2Q)%Mvr46X* z&Gkz)HM;u!jr*`0=kNCHLxP2c<$SH|LnQn%|LNNw>P`G9Dbp1u-K-9nv4iOXL|<}_ z_hytB)kS-BOFBDAlJWbsR!+wY(SGdv%8c5v6jL zmKA}TXtC(Q*zx;YT(fv8n?!SQi5WCvd}FKG<#(~)Rfc4PhY&0FL<@5B+=#N}O|MEM z!}v2Ov?8Y-cX&w4cf8P9vZ3!r^ZO>o{Vdimy*eyf>wwMLg8j`DyW4UUkLkdB4t^2m z9c{PubQjaZm0o@&v~TsAT6%UtDH8YJb|3mL568qXJi6Y%1H6@Qk4lpLf2XI|OXabIrk)=rOsH`C4oeRTn*Bc*3I6zw0O6 znkf9-()F4uY1?sQIO~H;FKzOMpzGF`&reIOf7QRjvXh#rervWj*JMATp_Rhdwfw=V zBv#1dR5q3S?LgYn!H5ibFE~dP^~!}}o_yj;s~`LaWy)-hfcs4bA2?u4Bd8E)WE-r& z+AL|^bfTHUYn)xKSD9E49Ag(+K*DFg63YDIYv(Xhah_EF+Gw5v-R94#_;5@m6zoz) z8EPf>B^-8r=HDs8F6tS1b456FhK&)&$Yt4I-VtK@ZLc!QVJJx&bGv~qo0!|0=0tan8P(!B zt&q?!hz!wa^INOwvORm9=Hie*q^sWN$!Rf>xiGzA1oX>|$1j&lm!oMqujG#|=_0@Q zmq%*S40%d~BRL^_TI>xjd*Gziu6nTvGQ+lh((t+D zKG!1yEM!rSc#^uwee{x;b*Lf;*ti{_!B|z#$o%$rjE&Rl5<~8p;N~0Obq0ER<+GNAg)&xYj#Q zz7KN=xcrWdDYHE|coG<2nu&mb@aVKA)4CtM_k7(7h4C0wx4}4IE1_(-Z%M7j`q$_0 zzwu?CJ&WA=*{n|d<2~ib0hih82Nurty^CP~LTX{62GjP1^tkjTw&|o|6emlXeD$#_ zI)h-c!^7{f&l^bL)oNe12cj16^AIAVn4nA3p9-1wKu!i0md1;8)TG~$d_md`#i7Qi zn$bAYt+LF!*q$t%Jh5r`&8^X$BNMGT)OmYzm8Vr|V1M-LZ0 zR6Y02JjS`|w=hm5XV z3;ONaX96y}{z$L5mMmG{ zIc3ddV^~azK$;>X5euWBwhfz<7ViO)FuSa?ESDR=x$^@+Q0GZ@q{FdLP#Vo|+Gm zkp9G-M3azT`U$H?UKH(hy8BCah_rhZjk$x%k7wV}&*5e8x)cAd#v7jg_ho z#KgobJ^0~0R?p_mEiykEN$^e-Xy_gp*Lz?6<~eL}nM-WB%b@E|E{-|Rb+wqt&U-hi zMMou$oYv~x)M$Tl<9WWXqEq4eMLPQd;@ozQbX43>ODZFBv?Q^An=X~*M8adtZ3R8? z>=pM5`FH29fBI~cF~6vmPAcViqhcbIi@e*Y@}o$n7;~X4^H-L%q|oMkLx`kQH{t0| zRaBG$3cogdjHacakt2*pLJBSDtFk(mP5I0w{M|h^lb@>WZv1~8N&1<@GT;)VvV^>& z=_?WPpy}pOuC~r#f)^;l={*vOa-pE2s&GZkkCz&Xl*0cS$&ooJ4LYi;vYJ$sf_O(; z`EnfDkwHwIcN>osm$UNo5J~)Tx>4AXL^=r~)wW6Y*%#CF{N_yreMlr;z4|0fqh;Wh z_tavvQ!nTBoY$Ft-Cpz6N}`s5iXCz(reM(NKF?R%kAe$ro~P61C$Mc7!siPC^^H1} zuiT7Z$`Ym^7E?Iyt5kQBc|6lBenAVux2g)7U%-vi&XmfeAK}+(yZr?*{NgGr0Y+@} zz;k0hao*3*QsQcE?{4PrQmjHf_uG)&E_f9KkEZ=vyS?phcC;x|AJX%jueE`o?bHGx z`Ozf4!Px$=_{YPqeHi4zdY@I?TJb_WFd*D6N8fo0dxG|7YHTuZe-KkW==O8CyE&WZ z^0_+w?M`yqhku6Je)TEUIu1!dEx9A)beu=HEQ7eb-8LYcwAP zNr*Gbtq-P8oa)xUP8qm8!mh$6r9KujHMIx;o5{oU`xciad3^x>gwwfeq%jQ9X>9Bd zn`zqRk~)yMZoN}Hj{@k%o@4jvl6!rgW2Bk)rTC)Bb)C?xCIu^o(D*#d-$cgX)rm|0LGb=`m5^9f$JHCcikie^F>Xgm2G-;_B}1Q zuSsm3lWNjr2NK=-pR5GjDa0m=D8&B0BC+#wfBkLl8s5S5=AD0&^2QLHQ4e24zR&H2 z44yCSe?5Y1C+P2cc#+O)r;x&>4r{4uUiPO~gf`itNA%bJ`H*)eV^G z@#R2eI#+Ka6y2GxqO!{p*g`ZuHz&!1%X)rJ#l|Gd?^zBI0)8aYA!efod0!l^y8kxt zzA*b9QZs}{alByMak~f`u#PT1gcaCUGw)rvglp_r+f43#p2Zq)UeVskyqCu`+zv|_ zapb+0Vm|)p@NC|ve?rT!xZiH>2e-0VEu7bB8Hqm;)sGkRb3T~m9h$<88G{4q0$NRW zAk}82EE8fPXJ3seiEugW`_;*ecZ4)MLr!al_FC?m-IXSbbaJRrGL(Hy{CZ#^LG&)p zBT)|BLS5V{WVYUrc^E3RnYL3EJ1i1j<)(Ui8Ry-WJMWu_?fE7Tmy>7WD>|)R@4RyK zj6BdEGRN@Dd#aTNG{FJ0m*poN}*WMjkt`*y&WvaS%&Lv}nWpNr>+Ap!Y zyO_3!;uC3IAxuVT5-il}-*I(+V6tFo_Vc|XZO52{c_8?;Lf}!r6kThbI0?l7sy|9= zs(K7v-^DD2DMB9A!h!pO3)^D^`i`-6POz5B!Z|g$LEr{2j-Ra>q%4TO%n^*gh zzXuX_2_nMySBtTy*AIsvjSc|PaVFOz2eKegi@#D4K6vI0LfX%QiAr-N2NozblEAHj z!NFmD%^Ss7?|F#NX}YiM5GD)Xv4&py!3P6oibSI@WP3g#L0zZGGV%JX*v0hrI%vtP z2i=hg6~pm3j}dK3sGM$zR;a&B>aACLVqRO91m zXc<&EN7bTCQ5cqOG^IZTAHtp}5@i>dbUO!sPh*Mwc8i9uQdQ8|+e0R>vSv548eEQ? zXMXj@LR(0ebIgC`%x&7fzY_KE$7FAXnM;zZ$5l^PvH>Go%Vl=s<j(ubmE6sTHd5EMA;&OyQ!uST{wV+s5`SgXbh;eL1_s{ z9+%(e7Gw4UpesMrKQ!oEJoFJGvlf-*NVXIsvA$o7sB6kYR=xZ=`oN&&x_Lu9p;+luJA7n0t>dg>k^wxNfiPOfcfixDm1@rerUl<=E5$ncTk9qyk9CH^ha{(7!dJJ9+ zk^p%^?%I?&&r@%m{cXC#?z6p>JfBbA%>Cxx4BtOI?b9uFW(oO@f@4;*7(vB&`2NkW zYW>gMQFY1%li@KN9mq6X481sg$$qy=Sjg}Q>TgzXz8Ewl9q)~Gltnq1_E(c4M&z@y zVD&#=#*&?Xm!jHK(c-#}prH7v#nk+=C#`Gquw^f9)pe(rosBSuH4`IZF6^7o%)NmC)GA1WotV+d6_&h9KhR#%={gaN*6YFx_EC?R-mmi^cS) z`+?e%rA2VCi`LuWObdMeA&6OlC?-MOcHbh2)>LIK#E3c<$RH0j{cG#5b{m{XU6~ed z#Lj6D@Y@+F^C|oVao9Gn5r@bx)qmOdv$$iI3n_`Yi7|5tszctQR7x50Xnhpay{X&z z_-wIGhWF!5<6>6TnE*Aw5VD&U4(rx@o-1WP>1Cox6IX*-_Gbn*-A>QV;@NltN41G~ zZGDY1F_M|s8uZ@EOof!`_y@q7B^cL4l6m=!7ixWOm3Gx?HMXT0or;cbNx3$a&14V;VUOOSy^?DSv)0mMa@q|hIgfJ=F9ts zVdf4Ogzh*bRRg`T8P}$8Zq6+oJO52K|A` z@6?Q#1<{VOL2kvzlUcG=BXhDLIfsVthaonb^Ib}ofmd6ot;5xEnUAT5y0x%J@spJO z4-h-hQRk;%H#%VV&?23D**P8CyYrn}x9|{^Fxun7@q$Bca+I(ejbPv*t$2ZA)R1}v zqsDBdup>-+Cp`!iO1i&FSR<$@41$6S{b-fC1e1)Gve5ZJj zp7~NnH$OEa3RMTO!2{_a%3CA@ZU|41XXEG?Aca-DNkS5Mcy$U-oNSa;m@9D+My6+Y zW)HH-Gw4XLFY6bHM7OcUk7f~PoYcMBUJ$)pK%a$I5<{6Y&-D7#9k7l%$KpX9NE*nx z#3$AP-}hnlZBPx)5FS#6XaJlj-#1^nd-B%ny1_f3@%Gyz04g&HxwjP#SK^g=edax7 zex!0EzCnyM74p8;#+(effuC{Q{_%b5X{$X3>&R>w{ME@ewT{k3I#Q5IR~bB(3}DiZ zSr;W5DDeYUK|wZY%|S?kfr>e-qce?5nrVu{Vxmty!7Z7v`g&hd(l6lZw1=(bdGh=B z+6QN6@iMzD3c*;U%#?{tMC&0(y@@gMl$@?T6!^;1({;g3^Xl$^IMuH|W7L-Xu05{& z?pv;cvvoaXaD|kRhje@9_BX_SfoRRR8nO}^kwzRw!xlXi>so~6f(Xj@*O4nW*N=S; zpRk5TqvIgLkDytJXA+{LprgRg(h!=VY>TxJu867YWRN==b%z?Wi{ls|EG{vil!}Wm z2Qp^`k$WjIvqyQ8LDUIhU^B=B5J?G;npz>N^xA|7aEqEK(GEdPHu0-r6@o&oXy2JI zj%ECiu@Q+o8kgS-!e(O!A$Fsc5_Ud{bd2ml;=tHLYA-6o!qC;j_M{<1YM|XWn$qZO<6O|k3y<0FvfpFlrN;` ztDsTRBvL!_OT3A zWKka;DXE$DuNAA7i%X@j+xb%xx)WBD|L7U{=Foo>_|@X*mBhxiR`cC~%=F8t+6_xM z%EXxWZ>B1xqUDVIzKxr7E3LRC4~z8A4=Huw5W9_tMk07na6TU7#VuMVOfS{^ZrgEs zyd%-zd}=-anB4m`<+`EwKz}H<1F2u)Y?jhx);?hNCNp(quE9Fi+sl2U0-s5%zuD_5 zPcglo)BPNB09ZOLKT;o0uZmo&h#b`x=f?4p3~IkyERMVv^fLJKYm8nu^je z1JkWaS?fQL#-Z?zOel#b+}8Nx6)JJmp6e-f%H=xecdN0Y=7EcAqNq^+oHl!9qy4Yp z8rwa!GX~>s?Ayz9rt^c(iB(=nl|Kjplyoj2V82~V#4{!`DI}Kl#ib+~PnCAlj>>Mp zJ00(Q7m58M8v@BfDZ$AhazIv9)(4SnIpx@xU-SaSa$Ik|` znB^Z)Z8P7z73=HkU(-1T3#aGjE$OPf15eTl54#61JxIOInm@acF2P%ZKNs%(48Yl& z;25Z^Bs&cgtE+kD>;HPDqzZ$WS34(m!RZ5^LvZ8WT!`vt>$eua3i5tBZhQ*xR^K>< zXB)qELpR+VT_vV8iKb$}c=~fY#QF1JvV_XM^o49lQk3aVjP=hUvJY@7sz#lmvG(lz za<5oklx|ZB+1?Q9RM`hfFKGvi;)kerCxIon{|Dts<1zKF~LO5=+x?J{B$BMFJEzM^wz-hza>r&7X zNw9?UPjTd!6=qG{p6w!Yo+LBapD&1o3A#(h#mjTsKjQORc%+8X)*j#w;c$)IH|FZh z#8o9kS(Nk(gbrK)H+8gJWchcz?q~(QXOSx}MVTcA{36Fqr<=VBJ&!t+f3w9loSKF8Yrh9GXI0X(5rdrj z*_C)39%A$Z;bwB8M{Jo&`X9HY#`CGPqIPp`N;!60iEd8mkOr7QX}C^KLE7M$6mW&z zGt%+S8%Hzbz@Jxt{+7}V`;UYhQyDElg$AEvyw8k6c%0ZRs<;v~kWGI_&_#igf|l&}jR>MNt=AEEzi1buLDeSGRz#?Bk? zQD*z1ZMvo^&`XXmyoPylTwX0I;vTqLx3rdO!~OZtf&WDMOYl)$`$Si+FaO=gf61+a zt8b=n%uQOXv6)r$(v*DR{%F!MhuzZ^S3}>tl^W>!;Lu{>wyqH~{)pN3r+oSu^bwV2 zXPLb>85}rL_xceE<*J_`PL<=P>USWiLrXh|D*Le{v^sz$0OYh&Kia=<%oP;-&xgob zg;mx$zBTcD{`=x?oRZ}84^QQu3^{qxX=t#_QWo<3v z2o)-3J3Di$!G`_4sG#B|-l}{;n$?%awC>Gkf5sBCq+%`gKv3lwBHf<>$sT;9+Y)^K z&#Pie<8B}Rj-V@m>R+#@JeC{}z!s)g5x0wO1OO=8tw@_eS1n*|K<((>}MeVxGOw0UM? zXedfLYt-*V>3ad3GVlR*w3K=xqXtmmY!aM*|CCc!j`4QL_rAI0d^JE={l$*MgKqNvMz^DtsdAHwmtAOba&jY1TjO7- zlMURzL=tn!cWH`?`;&-x>m~aE8LbCjOOFx=17mMBO7wKDZ*I(f|EvP68e4O7GpFad zZ3?el_VMPJOxMr3CXZj-w=TQ03{k{f{8GHN@^5FVCr(5zR`7JnjQoJYCHXDt5nXI- ztouNT_p${sr)g->iz<1=G~Pw~i)mSp)18rhxxZ`4+?6JqQKKNY9p!qz0~?o=v>1u6 zQ`*w9U`>MJm*d!T;=_=)i#%_Ce(lAh1ro3Qc5zLC3_2No5ji1Yev5vMbvhbm=x4=r z{#CbpV(vHjV}M{CyT~q+Ob`%V$;->@1yaMYUfJEJ=c!xe617!trd6W};LsyU_*5kh zV<;W~xca$8#Bn2r_3ZRC$qyd6k2YB^yWjQ}WmUKGRTP@!`ftjwY(hf%K|caf@p2v3 z2kTji&?z_p18aYBET)Y`{8aYxY_D_@Z3*|Kvw`Q%_WS;EDZy>3;&ks*p ztPP~tF9s15oMH5Pob9sws&|TqQpN`Eo3!|nKRB8AY_Q4dE{@jw)s%$tmXId0J&{*O_do3kP!&lXa~Q&0o*wwCnYyd z_G{ytIX8Dea@a4l!Zu;mTGvWzBXNYo3M_QBa~^+g7p8nwF38HNDCBNzY>bPC7r*}8 zf$ra<4(9iLov&H^q2FnTciot{z4e20)hyhi&x}~+QTR_L~HO+&zLxaA#e*e5N>H| zA+N*U^@3s)G8$6W3e@(Mx1aMR|3oAPGO{pwS91X43brW&zi;)A7ichHwsr7pW|FKJ zlid`-NJ5a_mj!AA+lWSFTLj#Qk?{$OfAUsNv~Zl3p_c|A4exq&ZJP4DLUy{+#O=0)zVslnzSTehFesKT})>wSSWqz2qRXbf8Yv4YcNxPk_?)hN>v=W_sD zmUxwwVO#|eY(tAOSYhkti&sr_KWxwy{u5ndL&ZOEzy07YOX0uJBvwm`Bupsc-AKH@ z|E^VbZ>l_pwGt`&J(pF|Em@WY`)x-@$H^nLvpEl&jL`POo2xUR4(mP@-1S2yi-B_T zXHSe;eLcHXv@5Dt73~cOvaR^bAO2`Ovd1|0oi`DHgb}j0w@+N2T4=cu zO2P%WE)X|@yjua5aii04GFpHp%yE=|G8!4jc1;lNKk*0(5N-13r$Vg1r>ud`pD+mQ{Z zuY9owXW&nN^VyJl^;rYeRfrp-0@gS@_Oc8khg7_kik#AG-utT9{kmxWVbVWYsiP1G zvX&EU`t|B9ux3Fs>HScO-~DvQVlchI2nIE9qv>KTm64v3e}n*Y2_uL2MOJi|@pOe5 zKMK-4mZI!@B6up9j*eP;*qmmCN{#zcQer#x1&W8{U4Cb$7Lv_X((DvxH>|) z+JQ81Ovfu|&aA$f{Vp+rwO+PGEa>*Nq0f!eT(@fkdOOQSx&EJ7673(n99TJBeIse&Q$XAl-pZ}YO;&B#vS4;P6ZS zfy$rhg1lUiXRrI$)0p`9WzFnA*bRK6@LC3RT47XwXG0uA5B2f?e7YrwNRHb8cEjpw zvSHGsfyZ+I6of6tK1J0f+ykbVBWx}I-i*e^M$_SsVh>c-si7u{hP`H8Aj-2r(~Tg= zvcNmxh>DHPGueZUhK~DZIjs_DwfAOJR94dR@#%PO{V2YA>TUZMSJ2SVY#_;AtS0GN z$AWaz2c#xeA|j#}6faFoG}Lukp9GoV(J3ZCEf)TW=$-TPhL4}Ur--Lh(nQ1>sq#3p zW_h*%fbv*$0;z!Wv-PXv&75skoeIfya@PgegQKH(bk=NKSYf{^|KzigbTk>ulUD=E z2k{vhtWf1xy+B>g!1F-LZ8I-fT~gn!3xDnB?ntpNPYR#IC&A-kiHbMVoOM8X%@Yqo zTT}hB=m*ILGw8HXG0(u{5m8c7UY>P^;aw8+T|rauF#zCeEXYinMKNAirxwP3`{J2j z481R;R8>{K?q6RY4UIN?c>u@Mr;CVeKU@Dvyg4S<8j3;S%rs`DyJ{4$f0JkHKq2auzS0+UQ^Fmu+U*D*$5PB`=K{*FA6BfNMN>k3? z&60qa)}<;I27oPm`})R6&S;gD){|}_iVTVhMy--JYoWnMZXO=%s7#^EMJ6iaMh`Yx z)T=BfRJZZ|pnF+4TiW+}wBh+jF~Dl0a(S{%ClQPh8;;LZ0I{5Ftg9R6l6glG3rsm1 z6nMbDx+T-*f?rRMti@zWI+o~2z6$l`d`rvPjKy?Co_kx_;Q7HaSCK6MU;mxS z5`_4|&XJ2j2?{;#H&YbALzMvac$67;bkZYpTK!<2uChul87tIc!&f}TxuO6Dr8oK1 z$0xsMw5}Agq8|t<1HF>^&{z~Lw5ZNuUB2?w_e1IR?OzQwyN}>@+tJy^Z8yEYeX#pidF5Vy_P1YUDjvS5I*7 z#UdAev-Y>r;018urx~KYIz|4h-=gPAWP9}Ky|*x?BD`=|=zng;@H7sn%&gPsTFwWp zz@gj(T;`q~-9MF$c&Hy!X?&sNf65~y=!E>|Dc@cjz1KNOw7`FMy}iNm-d?E>NDaH1 zOD-VLz|M#L37nm36p^C9<$KG5_OFl^eDpW;h9duiHbHI1o)(?J1k9!18N>(4xGcxn z?_-@R8L;K9BBA61FXZ;rO<--uCUN8nSUjp#mKqN4pp&qQ8GICL@8?B>zaE{%!I2)d zvb!M+U9@~>pvGLq(n4av8%w#b!5HS1C1dSFm~xWBHlN)WVF`xN$k>r6*5B#)2@g6O zeQeNpy4d&8>?NZG8U{wf?yrUhy|pyxRH-&YM}ws$&4Q;UrxySCd9HjSqv~fx(&Ya1 z|M6itG_d*WowhA6{NLMGy%U1obgt*J2_ce_$*(d$Ju<)iUJ+YQ=Y<|^G3^-+Gdz0hhQt|w9j0ad<0tn$0VPRowxT<<_ z@$uikv|CIRBnActZ+!)Jr`%7U?<(+k1O9_p1_=)=J|#FkXzTbt|}B;~?j-g3qdz z`U@ca+WaIDf@Kf%*M6{_#+mHf5DD^O!!7{U>;bjv09XG*XC-wD&y}yhzO^N#-QwK@ zb;)cI`SZXTP}$hnOao&|-6!~lDMprh=idcLf*IebW!}B7%YACsjTht&HG{3C!Mo;A zHQTMcGzyQy=p%pjRne;g9B59~Z2!k?mSJ!E;Y zu~(kHzi<1Ct*5UqjzR5H{y7p9ap`$qtn%MnZkHCKbPFq2D{C5nSg{FAZ^yt(CX*P@ zGYjgK>)We6_ho<{F=LvRKX;uZ2|r{?)Aff!?mpxM4*}`1C!Ip zeGpAh+X&VkWq(6IlpVp{$;Rtzpe6THXIVtV80 z`ag;VjJJZ?xrVqKnhw=+u#ki|#y&@vG?@0{ru!`TA&C#HZ}goeG&mhMl=0Ee)1An=x^~0l zm6UiKBCrKjpmx0Fxk~eBgOIiWf zr{8(z^Hrjbf;K(?AHu1Xzwb9uL}0u8(eh(YB#5zSxJcHW)+GSkKPa)v%+{gG6V_tNE( zdp5>`TP2-fC1Q}%%LaT#^^f<7jp4JcWAhUfjI9?{mURh1cH3LifQnTVo-O_oJbM8AKcJykOI}|bz0zmJpJL9 zOh9E0(%Da0*sO?c#H^rgltnxUWa04a;d>W@W~g-t_=I(DBCcRc$DqYLKm-$SmxWuI zIDKwCe<25wcXEs4Qs%-`S6WS$G5IruYE*Gx*(ZFVC6U@ZOht{rEWNs@sT-n99N-*NRaAY`yv-=dvKoHw!(At~H2~SXbpn&(NlUnd?7hiCD*`o?txrZf5b@GGTKkc||hAKZ5dtTWm zpkE2LK1Cq8d?PmAZW7E1YaC=BG%W2Z4|#aLkrTa^XP)6E-Nh2fT5_i(a+6CL2*7i( zdwJ0fJ0Gr~Gm3ze50?iq$IHOut%?3so1*`YA3=H-*chG0xs-fe{d+2+A;Gr7gsw?t z;IYS(9sdD*l#YOdHzyj02%(RUaoRbII_uk|cPjUg$ruRFGd&N(!ey9*fG=_FXI=nP zz>hJ&AQHuqYF!MDl_U{x*_N3$#`u)>N$=`{ggq2BQQgpk*=ujUy*e!8UE;JWu^2QqxEQKi|3vegl6V+V|Ex= z=_U#7N0;rbFxGz%9pRiiD9HJH5F&X9?cRTTZ3Qudp{_U1cRUX8D}RG2YJ_}aynxuj z54t)g%fyez4j#DjQUD|q`>iZ|%+O0AXf3q5PNhWDQwuea5iC@U^|0p*rqmyph;g!SCye6=OWA7Rw{ z-ts)mYa(|kkH}vNq>nqobCG%=nuE~TO}INNOq|C=kv%*tdJhQ`ln4i1$W+ut7+hMp zheePn-5~PLux2X6R}82n;)UBKB?zZq!XZrD3nDNW7DM01 z%&rJ!3P=k3l8f4LrIf-c)!$sWdirPbFgq?}>srYML&UgI?2wh(F9c!DIH;)n?+bWY zzMu?x)m__FBBPi5@qf$JVbpahOvvMT}$`<$J%Kr)1?choEA#UIy?w9L`&msm$ zreU=M?&k>%I1cuGf)W%7!ix7z#B2wqS-`==Gkk=G1A7dq2Mk);WB`^vexQ6mf5HE$ zcLjan75)HmfMAEg`SGuZS^0Fw^Hn<1pb%KrErcpO;St40ewW>W#XPwzNhzrmwY+|? zH}gPJ{?lRC_XJ~|dKK=<7k6-e+xR8FS{QsP^rY*^e~@Uc*-QRk_xHMv{pUz4ID+@* z$iLq6KaMae{(tbR{Vn(k`$$VL^d5g&%6`8)B}1yJr@Ddpid+!EC3l>9ysV}DJjJWAoi@W>FF#M78a_2Oz0gL zc@LfHB_uom z*la=PH7Mm=-P|?>1u-EOg_^~2P~WY+z5UZWM^1}T8dP-jc)-i{|E#k5uBWQ{zu{M{ z|08~F6jvJO>jWJ+bXxjOiBR4<81vENCXX{sY7byd{!E*e?|!mn59)JEXz)$K{@N)c zBa>>CKnf7!==p*L0D0p%W!^XL7dw?h<4+KV5E>=t=H@DF=7k-eBRiLCym0*eQzObv zFclQ691e1FQ1Dt|02mvGLD)tWh$^C6PFdu=)U|T$o00E;5}FTiS_;69gAyxxAoW{r z+yR;7gVqZQ$DQ(SvUmWRUT=H2yVpBzjmtr4jCr5CrXt;%LA^>xUlfKgm~xNoxVX51 z*(LZz?SCU#*4hO)iyuWPpjYbI%a<>6K?95>JFw%b38@J*fi6^W|3IC}1@3e-avFQ8@r)et5Olvdz)fkB@_s)s3J6 zyY~|qrky-i)3XItYCr$Qvz*mNZEy~(BL5qtO#}PR32UU>MCNxgYzJBEc~BN6HQA!U zzG_>~e1%yAYY64-r~=y;KMd;Y>XZyCTHOg54g`aQG>vlKlG;}KK zu}{w#6WA*rz55NolM4K!GlVm%)1B#`rJ!e2n*Vj%Qdh)D>B$vH`s&fJOBgeEplf6W zP&%V6K0ZRtLFfcqb>i%co_5$HT4>!EY&xK$%aP0sA>%0t+YcFXauhBSbiwyitb;f+ z!J;DM^o+B3lMf~#XI^K!zb-~WV+}gSYNs=kdcLUA1F*I1$p8;As6rtNs+G^}Xy4Ks zjsck=#IVoV1`k)hQm{(mPfW&YzToFiX;7%i@|zq1C(s0_TCkS_EAS{>%pRadTaDtG zQ)a^n^h11iui`BPU_Z2?2=@2I(Hlje@LlM21Ge6YWn{|BNkgA6-ALP_A4RX*bdj@6 zZ?Lq-w2=)ALeoHFpA|J?iAV`f;KLH3KnMU#QXZ7do_$MBL>QEBF0ydnAggDhV_7 z+_Wz;3WLM0Ha2q`q6z{mVl-N#F^mrjFyQh|?K%tzea^;4HDC*6ix}N#SV3!0(TxMx zE~Rj#v4m9X=v2_^XI~9l&1Hnn5zH9G7Uj1jFPY#gh!;e2Kbzt!jsadEqA=)MaNA5V z&rm4^#D4#AhUf}a;6Bj)G}w}=GLQJ$6hhi`hh@HJK)(G199eQp`F+_ih)th8sA7a5 z(bHO6=}TXMPDME1g%|)aqChnt9zymqU$BGQw`^-b7(y0Q1?BXJXJBNTb_fA-Hufe| zivdAMJ2_KPkEkZ&F}hZN%ua2eONMh*W4>FV4^=z2o#Ahd>pMt&$hL*j{-Ah%{Uzp~ zv6A19_3I6=oLNFl)VY0Cy4lMRknnx!UIQ!52SXC}_pnG}?}4-&9GT_wd?-W63=r3x zK&BSVIRNYwV}Vgoj?KeJd*5bp!dfFBl5ctWXJK)`NE|zw@Vtpg-KlMueJ23sTdSXg zk?OX+;|w6sEy(u6AA2~eVOD6c2qWQbj4{y=C0Ca69YB{SfdFaP58sElOK>SonXm^) zZIw2d=m(OiRlkme>VP|o7>WIL#6KAH2p-GM8*5Rpldp9FlRZJ?d;$I@!U;-LW@LkQ zBt_)6t}C%JPdm})kOQr~KxK5P>&iaoef1V|Tl|j3e4(mUd1#-#O}3jKqV;3L_aPK& zDmRt3+`f=0sq}S0Yw2!9J{Q8c1_F%6)iXYF1v#;e4_uvmjfjB{0vmEfvuA+^U@i?0 z|5FfV^gDArYPNdf{GfZf4){k@S<52wE6zELRGs%t>2E;D`$C7`k*<4lg9b;17U21Z z7F;EV5_=(q>`VZy&|r+i`n`)~)@N~11V99ZPvj4|_YF{ScDTYN0%m;=NOLkh0eQKjqoV_S;)Be7et!2zvBwSWKiKi< zpCcFGnoukQVv;Kia?(F873{xm81T~n`$Kywz{-<(>~MV1D$V%YQH?@Lcko5GW_Rp8 zuj~HDuhzd_;Ns#siirsx_E6&jd0=u=Ws4xw}fU?e<=Z$1YIi{&R-A{ErfSCZIEYaN(9yQc8xB7=U#A*JY>h5X;AauHY~z z2E<)1AodRq;GC?@0?6fNvD%l&I1GBBXn~xEZ)4M4!XaccCm03@BsZn~!^2T%EdZS* zbF^q76>VYnz4jfLJUzfu;b;ict*k8kZn3y# zE)I}h3aYAc;a>rM>IIq$17IZ+fcUWOo;0}wV8^J5BouT^z=v%6Z_E9;IR+osoiKjy z>sk_iNBT@2w6T5VI{3~}kn?G%r6ZNu#sQ3aiZBe9jW}}!vgrx{e90>-rgd8dg#&q5 z6icrL-cv%`QiJEk8bx`9u68O9fni4$Ak;j-p4RW?tK>XQfEb5DyST`Fj4uW;K=^Y9 zwhD5u<5)q^J;V6Uy5+_hU-uy-3h+;Ado!SC#`OC9fN|&V?5{?@#AGgi^_>{1o+yR{ z1<6TD1_I-t*g6ZvYXi4c{O7>vmY%W=V?gC+0t~XQ4 zfLk+V#n&l+8MHG~qjY6tJP!t^VH|uivS$g*3U1v51O#-8i;Eus`ZNX9RM48h6F>|? zyNUqzESS1ljuv^^qFZBqN$}=Zy+#MVo#4LF#l^*@)8BoRAQDcSJ$wcQPPil_MP%eC zkaaQ249`P3w~efjr_kW`sw6oVWgFGlQK-&lJw2+A?Gz|1kA2d-XRUT*o~C%q1! z>v=&=g^`WD=yx{j5FNaXpJanPmy3qaxN@W_MxS-}@N_%8sEGL{!UP&Uzzp4An8F)K zl%-zzKdawvJJXZdM)vO9?-)@ z{W!v*Bd1VqH$}juWY&4+E#oa(Urj)>7UT_RunFFuGTw7*d}#FRgUX`kbFq-T$i2LFCTPo994D zaT}8bp;fi`1=o<|I4GFDlYkq`ePe$oVYPCi#h>Z`y~)kR_4UtG;a=I$ficlT$%iy+ zdhl#DTpq|HzC7~@tY`CV2A~|?orX5X3YV(3pm zx&s|sA(~8RAgxmAw?UOzdm8Eo@vT50urT9|(22j$pO%LchaX5gN6bE;%zG|HOe^K|=Us1i`-!IM3B`q-sQqmyZ4N7+-Agv%Zl=OhoNJ^K0q)2yz zpp-}`jnWN6gXbPU-{(BfS!bO;;QS;D#=T}{?|bj-zOMKC^%m3Xc`qd3F-8wc`84E| zRtogs1z4qlt|kpEP*v&P+4tZsn_=pQRL7Vc7;k~rJa1YS9Z>9WfPVcu4NX;L(d#8cEx}KQlrqy~X{YV+_h=&L8DvY=_Dz{{aZg5NH?w4&$uGUh3cUvO3w=cj{U`6%Fs75C4z9r1YuaaCsQ)NkIrdN#wYI@>t* zRZ?~1dS77w{C?Tb53v~@S2F7R*bD)T78VxA04S8%d}-Y3UHSMh?_lfZ^5on6yX3mN znAq49WQPX%K=$ySYxXFycn4-FBLu?pR_i(DYK8ce`V6rbuM|422GqXsIKHTY>U{E6 z+(LNr+}>`o7Z#tiXP<0y8rDdOrPLZQ(3v!ii==vrZm!N@*;Pt`(Nrfhkg6ev-|*g zN1pCBlvCf)SaAk*e@c0!vZwf9B+%V0edJ-Woln?z?t&_Cuek&& z%VN-tN^+oj3?8;=JRKKuEuLG?s7GJm8 z7Jqo(%vxsD+92w6)eIi=#lX&g-1+xb6UYNy*4H;c_ZZpmK#pl?xV_a1?*f3#u?Q4D z$D`s0>7w=w_sOvM{D5=6Sf^MGtnb1iyV-YF5HFH#T25nxl!WT-(k@TE-?WbVZ_3{# ztEFRjfqf%`>ND=(Ia-!nf93FtNP4Iav^Ruab~vv8E~)sT{P*T0-p?LPR+)if13UpI zOXER7Qi7NO8&~A=HZG5^)u}yw=|1AN7ER+EMu?&PLZj+J|L}X;dvmAv&RKmHlu%=| z-s~&3I_=U^|N30L10(YthN-}M{~3c%ETj6Vk%;eGeRQqYNYo6NeGUcvy|z3$<2K?* zaqyI?bD9U+O@8Yh3{ZepwLAxq0w&N8Y5=kvhp&4;z2S9LJHq`Al!J!v1s$0L>_?Nq zJj(R$B=_$SSK~YrQoo3l#_3dBND?^8j%)#n02gwycboU)cPJkBICc}T$0wzvuy7hy zB`Y%o>YeTVDj7tTFJhz-eo;|b9ngQy-RtJSpL9nWX6?(Zgig_Yqro3`Ais6RMea zH4~oZzDz3hihCKJZ)bgY9rkDWCXBS5&%{)7g2~->i^TY3`h6QV(Jx1TdDDfCh0J#W zg2vA+*P33u?tVRa^>f)}4`$ zWbV?y-G`ZZjhV<6wRTy=@9NCIN#TJw&#TtjNOZ`iC7uMJ2eNqkSGZii&ZN(6J^wPe zq&zT6#rf&iGrKKab)iR?aH^Q>p#59rF;p&~PFE!ZtQuky!$ zM|>$|2ol`T;J3SQ^H8hHz1MSN_Gvf6KE}CDuO#fFfly9jX7Tfxs)5_FNs8Z3bAxhN zhXT;hit@D?pk};%rRV%HR|K8))ixo1gor06>)(EC)`ZniOhtNw4Zu5lvnJgj6Pa*H zpe&pGA@Y1G9%!Ufz{ZBdGCL6I-yqYJ$mjajH0>Q48v6TVdDUb9l~!q(J=n8=?U0$2 zJTBHasQf>|so=1Pja{%$NCF+=?c;()9Pf%2hK-QJg4J3uZgqV+a9!S%T!-JP%g{}x zpKpL^eDzyLv}Lt)ea z*5Yiv11_>>_2YP z1{b~!4V@pe4H2yfl3gK*XvlXU2d=+6_d=hiS=6B0zPP(M&jB7C&F+ z>}F9zhD~_C^g$WujH&mYb=;nr(tY@;Vf-;(Y~jZj*q~rC;-FG^AMrw9MsmEA|9B_S zNz>)pI#cSr2Fh3(_t)QSCWJ0z=st=#E9VdB&)p_nhWhr7O2O8qjQi%^dZ|*M_4>gl zPUqrF@|<jIs*sW{^`w;4Vjf}pk6xu8_&z!O=~GANp=@ImTeS95jS;)Yg*^+8Xjhay&TY_h zHPydrpB^i1>ScMrE^i5gkrXVi6obyr`OWbjiVj32@YL4PwDu{5qi??E6a8oRNu+%f!$Zr zV}-i2Xujc~&+AQI|Cr-tZdF(BOKq)Ev?4ZY8% zPjBfgwtxH--KPtGUviEEm2rz-d%Y}TjbVH(Uuyjw=V^U`Y`@_r4MoQwXa9C0Z`@YR zyuo*pf<)w%UR+v)?h?&&N*RaCQpPF8<@^C3hJbP<+G&OK z$~lTiup_XwOu#GZ>I5qz2WbarV;)2!*Z1AciRLZd1B8oi62IB})evEvIn^r_%5j~!9(=rOgkhT7yh$*Df zYRb@lu^P?sRx3?LP#!J9YH>^7L5DI%ouIh94zHmwR91IqiXMc7H9ePjY0%xwu{PJLTI$ z28y`Py|gDV=j)vh{Uy#zzmH#(6MU0&&QMpR^*e0MbCONUNH1G0U)s5?Lmkkt_HK-5 z56VL9`zXFqxM$XaQYn-UzmVl*Sx3cnim@KgbCatv^Zk>;5jSql2f9`>pMsUkfE&kT z=o|=sLH`UnxHE_Q!lJcM^Dr^1tmu`n~W>>EQFLrY(M(ReaHol_p#?*z?^yUh5!%hhG!W zJ%}9WEvoWNemSjLPFONbXS{npJWOfmBxOQJDOICYLL28$%43rC{VRAVb9QyiU7zN6 z<4w}laCh~>4M=ZB>A&M?+h7Nhjc@ZVe?t|Er%*HGHj_wyIv`?!<-q<$WpvyTuP&*} z?bGP50=ZaRj&ZA+uN|)%8h+me?kv^rX4thbXddWV8RZ!eH!(CR&E}9dwjzF2*6z;P z@wgmdC1_DmWdyGC7K!h~QdSY7@%Nga$F2(rZzss~ zQppB=%%HS%G0Wv-UVzC)(hfXXkPY+94m(+QbQr~#m-#9bhf=53 z(u3kT~=9?Vb@2ae2q! zt(;`+wB>!g*Y?^|;@yO8hKKL1=`@?qemd93F#Z5qNzly77Cv|bt7B88K5LTlYcO1_ z&1VpoSYvE@|DbO4?pfmp>w6-)T7i1>OSFeN`Ed)PUM2hX>-_@yU-K6|_6w0u(n7e_ zr|r~l10GwrKHCojr(wSSBRNY!!i(r+wku&oPOo*>n(?bvcwaTrj9u2+=L#+2*u?nx zCBcb8ujJvImeD=i52geR%)|Y@kb}KUwzy9!lJHt`G+!ts8`?Oe|Mz3)VUZL^MU|{1 zCSy?zT}31>7ev*5STE2v)!fkATHA<#3ikZSrLncl+#)axl{1T4!1c-EF=1oweU-lK zD+jO#TS&pL=A--K~y>iHZ;D_IU z8=O}DW%glGVmj2LD)WH=Pd0nGg=w*ch#;1Dn6v3DBEpdLCn|~i=CvU)3#m5DmjU?c z@WfXgjwmCrFiG-wSGr|6IVDux%6nMgwApHd#pkXezE2T=!jcC zh=_=Al(P7p6RJgd|J|PsCG{_J7!kPp+V+H_;-yeC*j&ub^CHiScqrBPS{B4b*{#Gp zs?yzCO2gmvGCgZnArvZ2)<-(Z*s^W2!R<&n_2V zMfL2Zi!G%$+&=z*T!tw6Sp?@9EkOnXF8 z=zg8~_$Q~7`zd2;mv!N)!pKWMGqW67n~25bpU>@Wsm?F?2%x)mRJ>h-ypPaay>NOg zWn5e;?hyBUw#Od3{!{_oa722}>nle9&Jx_V(d?1}xWd<3JV;Q{d!j^xUcqVPjE`WW zd&sGgoDXil8!nnGJgwJK8STbJsuraD|xf=F!Fyk`=f z{pI{c-BU5-GT>gK{je;@d3rIoR$rw=7wtt{>rm1*rA(}8{f!QB9nwh$XO%73Af|<; zGjb50{>pA7HADSGH+8$PSDnzA+WXH0``fmSfm60MUpM_RBO@;o(rK>`HCK>dieX1_ zmfodlG_jqVx{H4icN`YGc_E6l@A08S=xL1~S)p9Fzz&0H!p~?eQO*zxn1Kb1 zXJ#y|<;{XJ-S2%iB#dunpYCuI)^lOC?U z-;Bn=yw7*rbtyX(d6{4`L*#zva2t$`{y@)Kkjl+rqU`qD_)o_mSmI%UPvmc#xM}pd zCrn@0F0R%J=Ju3BCm`!TXE2;4WKue6#3ApN#&6ds{8cqf%&n$Lc~cqU^z2?q+|g)~cyzy<8j`O5Q1M zA-+;j(;F77^#)f+(;ytuiB{TGkt!4J+k_`}(la|KALd3NfuA~jj?uSLAjG#K6WVtW zI}_K(X=zf{)5qjpUPz9Qg@vWw6()YYJtr2cMc|_{o0+fyS5&r%LY06;v^Py~yh$ZB z6Ryh=3tPK5xsukGnTwy16X*BycjT-V;LRB)xkrvJCf1u0;~Y$(n(MKBgD;Sq3?oof4uZx{mWDwYY8Sc}}g~SCj+Mc5Z#|p{$$d z;i$}LF_bWZH5UA5jbc?^Tqoilv{H9Q@(vYlQufP|*!wY5+u+HoSvT2JBb(s|#?p1< z_Ye$M_qr}>Y$EUr2>bm~yV1uP$h&^U*Dx4n7m#d+-4z0f)nrJJ7E$5XLa&+a11)5M zU~*wiY&^4NRxE-@=&uBJHrF3b`>rR7@6`U0JQ1eb_OIzi)XC9q0_43YTblHMyq z=u}jK&tL{$+#t{vQ6eX4y`mBLTf-dvKthWC=99LeX%Epx@S(WlUT7Yy5M#>4x7BbT=b1)D~+#6#Ps0DeL3~{aHxvU!k8)?4Z(S3j5Z=rj6kW& zNHl1#Roj6uLx#wCD#pDeP|;(p+};3BbnQ4i@>w*S%RS8dVS;(mWblzbqw6legz+>S zzB0&VR|^c%NCStMU}ni0;nJ&wN9Przch=8$8#F?y zSmC;YDgL_qcH!@|Bc83o0mQZa{_S@MaxA0uk9MRlY+-Lu#6*4)sQ3gQ%CUsp>5WtrjBzzUOcD6iy#&zWnXT9@il$!re2>GZ<> z@nf2??nT&SlcpWH^HjOPK=gS}?rZ-2x$3yPCs@A#sF^tKlg?bJWz8;HFCz{F@7?U) zuRmS$%`JpIjxw}siW1wA3^CVtiKF6Sek8G+#I)Y%-C&a>O~Mgohr(4GwKI4yE{$7= zn3Hu8j$t@uQIv%HP4Nd;h*2!X$(naKOfY3Ph9xRb{EYVVYIT>?npqX?Ie(Tg@5?&d zEko7yeZ038UVxBM1)d&6GB{B?*+NLew-LgwG3L4@G^>boIJs#~Ne3{o zB)69hh<76foyQacF*L`k2aXCYf{$=A3B5?SQ3ZErLmIJUP`bs#ZRrfPk-J8Mo;YZ! zm2$xZ9fAT5!~WAOB^MU=qXZI&WBqRV1g}f~9-*B39wLz?8C=A?HF#M5U5~kMj^vt` zuklW)m29F~V`;(Tn$7T{fo>Zm27>#kX-|2u`I#GC%qBLG{$JrM=Y>cyw28+KM#uMH zf_!2OF4w5&_iwk%wOvRerpKV5poaJo^K2G|&l=(D#~G*vcRby6i{h1t*$$V|a3JUs zll(euhKFw)hfNG+kILb#KM|ztf zVn40ZUHbY9ta?~{4|BslR%A0^vANdG%ib0Wk=%NE+KLFvtjIUfnmp7-`9!q)OSguu zueBiJUN-3wKU|%fXPTQx%)(n_KZ1yR%9>q3!*-MJ*^pXR$%$5dLokv`$;rO1_@WDllV1I83t97mNtf!QmZ$D3z65T5_s|eU)DSRR2&_)&j zYEr_=&WefKEKxE9$u<>)6GBOqf4JKQ7=tBKtfhtH)i@*yn=j#47>kTrddS;z7^)oO z{5UrKZ8>8bW5XdA8$`l>ST`_yDCx(`ifOnOtdEa$;ZFLz_5Se^a`q8h1om^Yi=*t~ z`c0P+7mmI7G**<7wL^j8-RP}Y2)c&&C97PBv4(`(tL3JkrN4%e5z2v~e)#!AQCZyy zTREX{m1NrBL)G%wPFIS zmMp9i5@xhfR*Z~K=C;$83HPURH#i`@Aw#{<+o-=9Jya%=8?Q>+I)sOrD&oz0uS#8P z0$FimWjf=oetea@UH(k(fD&A)j2=$6Bd%SduYelBO!<-Fg-Ku6Gx4aze)O0GQgsC` zvnDN5k)dWhv({K8p-F?cr?Z}}vGrI6M%QO1jmqkBtPcLW9drSulXI;eEg{!bl>$+h zJ}} zO`=1YOXW$f)*a>bv=JN45T(u@MmfkOyGHMP2F8Q&l~&fOZ6?nsCANc%B8h3`V#Ffj zem9tWSHurXT__OHm{?`H+*ZND9DUJfl1Vh)Lbpliw?BrUgb6?uPV4-UdVvIiFSkUM zi8Gd4|1kje1wiJ;o_&n$J2PwFg13@27)ZUsbZ~I(U!9%;$U0Xr-BJ+~W$ z#yuVIdksq9L;(MSBN7W+{SW>M@PPh5B82`APw)RPe*71b^#1$*d_cLk!T$r3l7k$f zAQTAhi;QRe=XM9*36(FC6VmZY0ltb?ciN3F%tc{`K-vEg{O$F>uO{JyJ}Mjd0J0k# z!58)i&kuPbl2wrcEA`Yz06RfvMvG_h07w;2fHMT>J-%AM!DuxSS?Kbxbr<`fPNpuR zEkhzVG#7%jlc;2%uXd@~qeW$}!nd0R0>$!znZgr3WB2~&4~Ibe(Df9GOG+|7|F5@? z+v^Mr!p-Y~&`abj1K26vR##VF0A-XVXX1BF6BC;1zrImkOYUQp*Dr2aHn@RUFCYg8 zT6Di#>VMH4R#E?N(8K@xM-^&TMiC$%()NDMWQ~jsB|bo^(SRv8M#J;+Pa4p=+{;CZ@UV#J zT}BfHWWnhEHxLt(1O){N3j`Ud8Y9 z8&Y zhCTo_xq|Vy&%xr=pK~CQcrR*4X;z37?STn>anRp8pt25}>j-QGio%^^ptH~#!!A>f zrx%;r1FCtixgynf$QT}lpZfqwAl>E;IT1?lH;n@3$DOTf4I2;5}><=&ZFw6QzT zXX6SzDjn$F*IO5XPbUd5YBWK>lpW|YKMU!!X(K@NkEP zLm)3p7bQePLj#&(sfmF!;5NdOx}&3@cnD;4Df9C^on668j{w6ZGLAcjv#Lq%{N)cQ zki;b;Ls5V!l_#4=-RPX|e7@yi+R`vD2L{X=EWn0S4eZ}JfoH~Zz<^9RvG)~BNTFbC zn~LOM0NUwCgy8@@%w&-&8TDfqpp^P8l56Qc zHshi3H*he}0@%wv_vh!nvYYSP4U;nj9EKqAYe?zNPK$#KM|tvdU&%imIZAJxo&dSs zD**i_i!z0+yXw#PujYmh%# zYp344TRSp!9TuOCORRPCtzC@* zVpy7W5U6r3j4ViND!sGCRGrsoR_?p)VG+<5or8-)C6hgY!ADZt+f5TWVv}TuZd~+J z#~ei`lE_;`f;~z@0)DwRZdsafj`)XPV^nto2voHl_8=`SZTSewbi^inrq{`9P%@H| zY*90rGApfK((qcgSy6?38k@KhWsxR}wr{CGG5T~~9i2)BJ(4i73ZB^dYZkle4>*0r zCJ3j2qg~F!t2Nj{``zIpz|GsAAcK;SQ>W4m%yW_rxIOC38PK~->?CGoRqPbZuguBX zU=II1S{p>X26rztq{1*T6?~B&#^{GhtN3y>U9is~i#ia7k>2oY)R)j4afvkI4w3j! z32g_XV9FDaq%d=hJ4QXNNcY)$&%_bo&v{&zhwGfb?dlT_?wZlv3 z5W;8C$B}*kz~}K(#B-3&r?jSJBgg;rb(bl8C;E1|$t_S@$=&Sm{pxM#-tyf7(DLwu z44eg`eSx;|!>DF49u!R~nwN)^GsI))UAIK|_)}QiEi1Y8?w`mXStXQhME%E)=I{G{z82)%W#^5kcY7}Y9(=Wo^uvc{sRZSJ-x-LM==+) zGZGU4gP>Ls^=R6ABUefsqtPZ~RkRurO?e+*i%Jldfd<2|vXZ6hhQUcN(1~OYXC?J` z*TEgI@>jp039mIiqhPyhQu0`=r(?lHdxyGPfDCD>hL6e`Bp4L>xiQ9AK}>e! z0=lgfhA8o0w*5wZ9eJ3KbhX`GFa7n|FpIh@`Xd>L2 z1S7iZ)1)KDh5$%Q@&)l;w!_Kneijh>0Hl2#1S1%r6Ijcj6p+tgVND@*#8`amdosTZ zBur$YT5#n#9i$<`a@43Ra41?Y7F?|B6;-@qM5J%H`<;I8$Dux)HN7r#?>Sp)Vec60 zzQ}tHZsgd>P7S&Q7qVtWz|6NO#hbk&Z={>90Q$ZhC->bHLs$}0}p-4NkT@x2Xv|m2Bh(= zqOc6_>3IX|&;#;)z`jYzVc3`6CY!hax^@X7YTAIj*I*^eIe15xX|yX)xgB-ljTmXH zKxrciO*Zc>kpAsR)^6)Uh$@1BGH0v3ei$2rr|(futh*)jmD}Ftsb#b`D z@nL)hw==@9hg5!+H4b|aVl_1vBn&`Au5T2jOmy7?c+spcx|bunL#A~q!cuoayWH1i zmwnM@AIJOvx;ViDpQgtS-y-tS9-s^Ws!%}wM!a1U{DPq8J!4G`5M!`(4-z|VatiJ> zQuD(OWvKZb@@^3462{I{F-?v3XlC|QYQ%32mAV`vBQfaJ>T(JnEgLcVy}nq%Zf9`F zA82pHXUB`egsHlADWPZFb@fX6ro(y~dKCeesoI0O{+1*Rwn$F2G{Eak~$!zBBfe^aA$tT;iA^ zSmarF5?nC)l{ew6dCXaFuH)cn<*2AZl?hU?PH3pxox^W%VM*W}Hs$ZjF2r16&Jdzu z#A;2~%XhTS5a!TFy@Kerme5)K0#d^t&Q1#tAkU`CX0eBHF^GayYO%O_BnLd+_}&n% z;*7y$HzMqBfm?6He_C5=0`gqj$QN;Meunyc$q0|`Yp?kkYk9lrUD4z7+#oRnHmHSC z;NH%fGm_cHUfu*O3_6hBNmB96YOD4(6nuX5bQ%2F2vUoW2F@iX{NXc3SSS`R4j;+p z#WF}+5ZEr?&{72wtHQL=AcEmEbJu#ZF-D#46fe>vyJ}zC3#WhLdDJU4?i2)a7eAKD zhNg!Jmor+W|1`Y?av>i)tWvLvS*$A}Miy@l8}d=IS!%OysrrnRDb$oOAR}aQJmBKgJrvq7+QH(vCNL{FN&qp@vPB39*i_j-B?qW+X$g zqqm@eIcP0vVc{KDt^Vw8bun)Ru0}RX>4KgdOqe(?pdNa_Oz=*Ye})X=t#rgoo~6Hi zfj3VElU=^;x%H@*^cm5RT@KKGY1FCM)4^D$-m}$lK>QRLM)OuP+_<1h$s9vdqHVM3 z`5}sFj)CB7S+-R%`HBM6o)+)JNh@&`ha$F$i6KIXz*%}Z=tO!17E^hJOtSGx#4ORa z*ENcqh(?qc@VwF8clxNeH{#kJoNX>blU$mD#h$U5 z>ZyI_=O!O-#1MwQSIv~aCs=c)PvJ`Ny^GXDMFW!nrm#=>+q&7G) zeELOs1PPoOU%{E~TIv&i<($#_jrcAZ&$IjJCI=I531n?p!sCqOCdg+gV)WE+PxLsOO|U)nDJ4%I-z?D~ch*C!{@EexX=%#@Rb#3Agx_2Glqfd{xd z2k4rl60%?c_BePehP=W@r`$n~!7piq-{Qr1;5UW(8*2rn77WLEi2CFx8+Qx5l(qzl zT>tfj70*Qwl0+;`%nMOekTZ`iMGY3CKBqZH-7)ealaL@T-W+`UT5h^NOsky~VDpwHlh)XcqbBH^^utj#s;+Q1Qtxz8Sv9~Pc>^3>S>U2MB~ zN-I2+A7&RcWdc@u{5|%9asH?@YKzwt#4HA#Opr%yyc1|~cjN%z{SLi5lo$_!9d4Pg zPrN#^O&mAR1XEp zLnkI8hAUsUGHV3^+h{V{A-QW$7`B!2i2jMwz=%Y9GKO_`z%#tG0bKH z^Bz>zRGa93Fn!sAs)F4@8!`Wk^Pgq0J4mBcE;_g>`*ul2y7ioCJ$Bv>yR|Re+g{^UlV_rDOEluABsK$ z=?wYgEc_Y|q=@W4Xb3@&q&kJ{#O&Gl?}rW$Orc)T<=BUR`Pu#WXr5BvoADko6MPQ} zJn@nha3aA?D~4IA^-lyoy~KEH$O|LiAiWk-;a;0AjC;KLtJ7gm^GX{MqCAGWk9rL4 zaocn2bl+Q+?YXp}!!r{%!~0E%mAdwNP11^}yP@kfu|edWp%HehIn@-#7<-M}O$|ZR zL*OxCIX(GI^B7l*C(3zhWx5qR7OF`1QZ-7Ngh%=>zv2hYOm54-qm%%f`57`eG0)R^ z@@iz;f>1GT6k9v^j@Ub%#3F(B@%Y4j9)WTOG>bY|Sw4ce&|-xpO2i++`&*W+t1#jY z8#Xc5X&&a=W(I9zbmNsle;rQ=ZHRC!wBe6Ixl!B|?|N+wVGKaGmgX(Brjl)15kAB+ zwh*Mk;_W;1zajiJ5KT!B2_Ll%;;nM8;#KJB%dE~nVaH*6`D;>b6ub@A_yw4>Ie7ERRGw6qB-gb*dX2#(F?-KUy$>+()nVkk z&kOypnhw7PwK}O?8z=u`lYQxDKC}l)oC@-kO8ANlY*RrZ(iLG>RMx$00!`jcmQCKo zk7v`!WrGuJlJTNwr|i9#5q(!|y0Z+!Xji_gKV@%Lg{PwmQLbf;d)#Grp*KV`i)e%4 zuOK{RRg^TJwHglNKDC9y7({Z@5EW}8+Nrx^EgrsH1cX%A^DSPci-8MvQ@0{@2j(~{PHBI7 zTOF6LNh^N=f&SjrEoH^!x;+Hw=1U1oE)NwdUNo*72OPzrD9M3am*6JMjVcMWzIZ|L@c{l`!j8pRW z$pT+l5rWREeY~8cb}=djZciC84Zt)=r(}q{gw2fy;V$xpdqXQQpwQpUrwZBT0DUK& z&-yVyhZG6DNT?ljq}2XBAHlS}D@Wq>T8>0t3fT@%IL2K2FO3@6b4B{^{xw|t(5Xh1 zaWVcEMmXW-|B0^ruV*eN(dFD1Me@HB2QvS4e*YbS{y%yW@`d!vLI6zvzcHAZS_*UN z0`_sDNDdM51yx6&Q%I$l{3SQ&jvbkgEoyj&bcrJQkh*~LpYbmO7Wg7dd{5TblQ+Qs zuThu+4a#p>$KR^VTaOi{K#2BNGjJ$yykY6Z#eN3*P;~VyD3X*bB=mHZEUE9;pyo$| z@`oA5bL*3z#zyY6H0Bfvk%qn^W^&fULu8u0-{~e4Fdh?gKz5l`a5cb$_iA-Yt@HfPN3$n2 z^y^yqjr6C2gkhLM3&_VBWZkUm1g-1q%dOL`hpeoIuVT=ZkB`qt0V&=?^~)TvCDwr~ zRuEb6?f$unx;hfNC~AF!%x;~mFcxr}uKT0_@&U*>Fle1&j#0~?(`C2P6Y*+u99rfW z#U}k%MpaYY%jNeY)DZg!9)og&r(^wOlO77Sx9EMsb?;&NRS%6{cPClT7k>SUf5h2t z`rr9iDMRDaGiw8#&F!OVU7hbN-0Yyczsnt+#+}BIBF8Mk+hykG_hs~p_Dj9~x8zg5 z);88S7vbp~1fYjn&2+8MJ(B!BmD||ZSf|oSs0<`w>*1HNiOw0X+C#ql{(Vn0;G(9& z`14OrvIY@p|PJ=8}@cf!c!w zzYi08lTuUJySruK3CCReJSM6~dMd@2Si?~`X+!U5u-fadY|tpOpP zfPzl2%VM1>NSfh#`u=eu$X-$@9EdH_abQu^c$&DDI5AKgNJ{Qyw)*TD2pl)ZG_bM? ze%n#2WM-BH#g`W>{y?!uH*#3bSQtq_86^LtxTu(RBMZvTPMzcurI4*LLi~~^XpX_l z2<=^bX6Juq@gf&3Qq5%05m*Uj#m`TAp9me3ylpz&UDWl#MV#%_Z6zh9lM4gG zbt?UYRFyk&>lUuI|y7`dw=`S3yvm>F9 zdGdJSDJCjGazYXlds!JN9j$W|{NB@VCJjlao@*kt9>r=#2 z`;$Hg(4O+tiF0~(0+Sg})4tm2p5D21#qjwKgVLv{K_4)ZJwG`0=#nX#&h&R4S z{g@)p!pk-;^7ql3*f&C>4|D?p0-1r=qS_FEDxi5fQ}ViVLnn{>tQw#fk6+9(1BNK? z`EPn+ZDfCL-v6LoEK1SwUw%P{HspviN?{3?f4`Qte^6RMj^koDr1aigt*{1Dp@zz{ zuQ*@S)YE=?eH_m3<+O7N} zpv`WFfwXOCw3{DxYmBd`dzoj1)txM!tyZ@u7vWKm*ht8vK9w>{C+`^ZcS zcam|#{Bm@hx&_)k1(UYjXKPm20}CecQOVQNA61$Nrd=O&=!W9ke1+;g zns~;dD?`nWx5ygBCNEPatk$m=S7F{MySl2;Bs*WC8SM^;ZI_8-5P!(0o%qZFrOdFgVXDIT zDUF~LBVv2z-8$EMz$bAn({KGSI#anZR8FU@4uED4gko=Kgvvq09=3VhBM3Ux0Dp`V zjLm7PG`|ouDak9#ee&+pNu ztnBe1GV6uUNFJfyEt2MDu@7n%QD1Am!h**p@<-~amuXRI ze5bqw=9T($IJDbB`Kq>KHEN(y z2O&>}{gA3(V04GL;Y%L`TN6%T;%fJET{iC(im;*<_pWg7bQ%UOYQ&v4fY7~YM;LI; zoGkBZ>zq5T769IDMkC2{K_>hg#Wo^@DLA2*35+o}%Oq>al@btm<16e8}F@f7p zF(EAd-iPDz%V4ex3hJ9KgI8GIP#2k5hjC_uo{|_sdI`3PiHVk+5fY-+cRHTvl$td5 z6)SeTU8iChIN!_4*x@2?Q*08I|BNSwy_!`48LoWm1lA<9oT^{mz)75`zQeV*`35>x zT;0DbSISN)*{_gPhul|MESLvjjd+Q`+9a$Cf4 zajo|yzj32ev5Qfw$>ZBr4TDOH2kNO+2}?n_TmrGDGnOZIb8mmtKKo&Ed3}J%?{mjw z+~$%3^*kGD$o99pg}d@!j|IihF8|C#4!)2UjXwaIVfcVHbiLu%3xPB-hxw*YZzD`! zG%JF*9)z%c623eexzgbFPO(>}H+)|bO0d0`cKSRd?m9h*yG*q#DcgP0KO@t1UfQNb z&ZOmng*C%e1P50@jtiU>UvhB}DmPC2C0^=EMgC_h;FHs*15nrKO~laeFYkny2@9|b z&>j$RgQ-2JvgC<#0v)cC) z>oYH6OsXF5<^0g`?qwXvIeOKZuGXsntr@{85Bx;y-^Ms6MyVMY>{|n8Lb7IdMe1<& zT2fN7-(C5=+x5qM3VG#$O=_4SEUv$N5=b7vN5)s=ZTm_S`CWdyZAq7Qp6-5EZ#mzF z-eMCbOQB;?eQz(eNgJIU<;@fK+sxhK3#)DPY3- zN22HnicIQdY(|?%n%O4{oE7KN167jN-Z5_}6cj!Dpr+*1()%C+RS{o%{ap?;WI)mr zD=@$zb{%)Nie-k z*TiZ>I7Z%g4TjT2AQ`0vzo#5XTYkR88NA29P|fgS;FbgTG$T4TR%k=v;2I2ZpujU% z+y=aG0PTQGN=aLxTEUcuF(@V11+(IgXdBSKzOdf=bq-KG@6~aTC3O^CD`5@mYygnQ z@Xx#o_PrM4melT$k|H(84}F`Y*?FM>vfdt)!L6f1NO)mkpB=<9mzk#?L>$Vly?piR z?R{~I;?R*X5u&=`G%ViUi9t?QarIZw!&kMZt56pL`nW5&$nclp$!;dM!24mYRzFn+ zTH?l)qvY>%zRukvj^ak9SG`rBx}<}~!N^SsQiu>~<(I6`7{WzCSFI9W{#NZcq zqK^^EY7s9|SSE=HzPPI#)O7G)5AhJYbPrpwb-^m)3)2QUo9?Ql32FYyw5CT1+6cU6j&q=%J0v3m(0J? zSEHZ%Z5COJoza4=l}$HABaaVz0m_x9UX|7aF_g(N==`(G{QpJUUv|aOwNblpu;31X z;2JDA1PuhY1b5fq9$bUFdvJFP?iM__ySo$I-o0^tspkzW8Zv9VDssHSsX8`wk)cj#I~yEe_?qysvCF#P1uGlo7^C9DUby1o zAi{1Alu8D2Z8wpinu?)e-@f*|4Y$tpK&UQ#pQM$p#a-MJiQX3b5!6#`DKG8;(@GW6 z<-MEzdko$Wx1pYF%ITNfCln8R?q^XA>V1;QFhPY(r`;3nVg^OH3%yY?G@=nShOKsa zdL>Bn8kCYT8yJ){d~)&_fB|Y4bh*VQA@es1+L;*{;?&Fww+Mt#u1wlAgW%gipn1AT zSWjP^b%;lEtp`Lt7m(32ZEqZIVM1RoqwyLP$#W){#fT*cXWM#?Gely7rV#opyJzq3 zht>re#$2nwL)8#E?u6`7otmijP?ryp&%8mzt25GGmB7mUf3|pD(h&G2C~_hmM!J7I zasd(_;?J2iy!h`N?un~S##vNN%}ODh75VP};on_1)4=3^H6TnHE3Z4{<_b{e{~q+) zXCjMSj1aQt-uRLF!zbz3`uapMQuk{`+=voe3?#`n6*7qtX@+AR&ptmnKrl|v;=m=l z`=0Lpa%(eJBC$cmj-n@1WjD}_PUXl}>5D9*zNP;{jVCz#5orAK(y@Z*o%wb~GrV?q zQPuV_Ful4Otae($rQG@?mhZoX9Y*UdxoJkm&Qj#+$4ng`Qo+XRQ``wx=l0;!kcvEr zi#*)-YZLPilsOl3AV0M;c|*Uo`O-kx^+3Y+Mi7s*n1mPcQd(~e{tfd2z=B|tofTm2F!oU|?hqds1aV5R%CumPxS{ zxWUJ9dWg`nn?rMiy(XTDJo#ikA)G?aZb=(Ys~88?aFRnie{0s|(fI$nweHS| zL>CMUF+n2MZ6S|Y_o%;LjgVyS5%P6)Fq-HI{kJH=*!HLck8+0T)txDX*z#Gg(D=|G zGu`KDzG{aVlDLCueNP7q+hIvIF!|tTb;}oQl3_ zY83~lD2)m~e6lop&h`%|WMjy!dQoRLA8G^oM3Wm@J-wK?R5FLK)oG3hhy9T0n1mGC z2NXgUXn~e=G3f--9*j-O$|z5+KifbGFx83re7lLtT42J9F87WmzF5NM?+%%#QrP8C z)S{bJ%*vB!@a>N{-52Cuu9K*v;p3QJgqE5GiUR{d(Ze;H)z&hZA3Z4L+&3O6<{R>0 zJ)Q_42hoO&(8B^?*I~v(s)KB1K~-|s|Kva7Pn134?(=23ERfIkV73q{`!JoeTtFdh z+P-uOUCbx0N(uTHP#+u(e{`_dBcxmB(Cv>Rz4J@r8EE6nFLQC@;}rmL*^{KdTh~ad z>0!&4tVHqzZI1*cq-0|El2p~2-1BmC*K(SRHa3sVg9;BO@*U`kSKhY0X+ggqLX*2!G;VR_ak#$85%M0kLl(b*k`LOi50 zfECim*$OT;gUc4ZI()yuq;)V;RHEPCL^R)PD=Z>95De{C5v*8@jONUt--YPQL^-{W|IpI&iCi6OU=OI;II%e-ZP6 zoa_;El$6fj4h~_FFJ>kzNH6S$W!c)@@2tKOC%>FE`jweUa>cFpgU^`@eI-zUHvemK z_D%eMbIDYVj&wQ(Z6F}z5fO%Hwz0dyp&_4@`s3ZMQhqH`EgW|>tK=09evgx>w73?F zT`40EC6oC=LiPl6{&6LwbIbeQ?KbOOWWtJCLUD%TCgO!eZvvQSNr%F>;r3a}!${L3 zq(r|5Jon-`Y84=7`ES+%QO!_w45qXcX9iVVfOoaYB(J7uqy@#anV;l#CGEx`Cb>3O zGbI(B{6q{U<-KYR0oM<~g!1bI2e*{u|5LHs8rJt9E9AtY&i<$wOr`tuZFF!3Jk(;L zN%TPoV?_|56LcJ*IWdWK`%cXzwR1uc(sB0H-p(MxU;ttDHzMBd)$VX|tThkiH^4C=Rnvtt zQ5ix6b9Gq}zwF~hFL4yYkT@Z+oUOuWvYc#6T3Y4oNHM7&Kf-!?dL)N~IfHr78PBVz zwuB$yC4wRodwQTHK4I`N{EE~4O4TDJsh^!&Kq2*oSrnc3=2iOsI&zE<|LYYffZ%J; zD~RI;NM<<*hx~C&R_M5bHD7gH+rRPVsJ>rvKTt}BL{G%TQv5~xvQI-36CDuuL2X}j zP(0CseB1E*-q|)}q^e_r^R^*AOR1TTssq zmYX^LZ}4OTeBihL$5QeCj`lqRl?j~xzqtQfOG|4jP4@r!te$TzAh#_pG;*V92t2`P z!7~ZS;Qx0VTaRQi!hd>uQ`G!f9S#&5*8}hkL7M#k5DDNlsr*~WAN5`!!IZrk1RRp+ zq{sh1?{x1i(CZG6Q2|{VpdSU6eRN^IcEVrZ_XqjA4P!>%fp&KATO3J)f&Y2|L@bqJyYr|XdnczDOkycA_%!C z&aHmn!@(C(FT4GSO2BJZ_>cGgz zh*XkO{HYmN-D_OB)!nci8Rg$pTEa&U4~n_f(NrFO>SOag%eX~J0f-&V&eSruo7Wgb z19^Vipp5d%o~-@p8vlRa8v+Fh(-&^XLx%3RC`^8GK}Vk}b-urFsa#vFD(lX^ z2`aGC;*pB|@K;gQ{ifTg+VRTO-gMj$-zV2mm=L~PR&Mt<3o9izx1@Y*tV3JP&mgn0 z%AZJtpEy^!_G>PApEoi>A75H8+k}ZD-|@HqCg;@+s<&QL4nQWf*ulz}U2$I|N1tx6 zH7HW9E#^{Ti4YST0+rFfjjV{}dXpc*bHyS@zDu9q?JQ;}y}8AQB%!A6k5+q=XLlNA z7gT+FzjzuH$$WU&>q(l8^5J$cA?XV>_GfKxvT(~R!`-bdyslp6NKy1Oj)XO9s|axu z-H0%g6eTirY@#GuDDrDbVc+i<1o9%6E$)wh78lVn+dMbIO*7t{pI4Cdp;|==@hA^u z=OAx6o55(+=WYcT0)GVv7>s_se`{{NSt-mC3QVyl0~xp~=>Ads{|zGJ*EnXWdg++s zZF9IH0r_@qLauk%^kA*jYO&~C{4`c%4-3MVeWps~q;_LtFt+<%9t1$=tmyJ2Dn5-I zlj>VcNqK|j?#=svd4*A4x?rk}A8j?SG1ve07c~}YtW9^YB3BDuNH2Hk(O!)tyTce6 zDGaDfZuLmVD_ZgZIjq@jgJiA0wDx8|n!mB!879bDma6fv z4c4$P;uvFVH(jO7IFe`rK|(lFQWHN5M+m3eWk;na=G)fW#W z7=5fgq$0(lnB+se!Jcn_5< z3LzJ*XRqGvoeRF;%Ho);HijU{UM-@DBtPtNzg+VD>D4#<6jBSKha zea1JbW|wVhf1ECK52Gj7>!v?)8>LWC&W{c~+1$Yrd4&ujfGz1oDa;4A{T z^&$rCh&c2t_90^YIqtyHkfwyLl;ZqeaPvnfb+fmi>)tq(;o%HD2-mQ`7iygfhMzZ4)tVPrc{aa>76RR$IH)*p z15?2#`;fkFRDLo*9+~NE8F_0YBE0N6M31qp1IhpIv?p6IWm7C*noF+$4*zX`bP%b85FAwGUBCO?h zT>DDnD@tn#+jlP(0k_|Bt?MICK%F6&Gng6hzm!VzdouG48Fgp60@|@b`0}?N4aA+{Iy+}q8?X+2c6vPw7m^1MPVWsPGJBG`@AUs=uKaN?)35)s=o+7F4 z+BBT{S?C?T>PP&SWqqqoRtl zqB=5l&sihr*eNM1bOM_@(u>qhX#4 zq#y&_7QY}N$D47iP|xFg#Vp7d2mnhsP99m(@ z`c6bkDCi!4)~j6)_6rFJgDQG5q|{9Ot3hC-*K@W$(X7JQdV-evs>=2>9YW56-PhD{{%IsljRi2>eu)4Hj#=5Weg9W zh7*Rv*or1|)BKdmjB-=txmpqw5h`fnyUZlSRlgPK=B|z5VPMG%LDDJ5dL!szv|6_k z%(W}HBkN(Ky9=UFD#c8dZXh4Tobt#um|S<6Q{pzOMks2Md;!gnLeb}a0{Q~BpKw0W z)_K6%G^8odhNi&OCdZ|&>Adc5x9l*RY@df3c5hzZC<^=J3m-8!fS+9QRXusgJ?$lNS(So#~PR{CV0$NY-|)0#GBME=>1| z(gnzGqwpw6Y$Sm-Iu&itA~usH#mkoPI|R}!`2t5v_}W^Fhi)moBut0H>1bvHQHj7h z!qH&YpXPCsFDA{VVrwWzGbr34lMTi=9 z8C(vpRM@VLmoY_85PWpPb!M5uu?&iDN3O`p^6?{TJIqwGR?Cq*LQ0FGF!OI}_EN>)m2ope&gP zdL*jA9NIMGw+-w(>|SzBVFX}M?T5K+{78Q~EZRux_s>SR?X+(#+yKUO?p*&i=Yo}H{1 zX+kz_#t~yydmvGZhmN6L7ErZHE1zcz^R0KXdC@~YC;#V?{ai9X(F`2iv_+2Mu$6phw_j**<) z3z3S(gQ?+fqkL~T)JfXz$i5w^5#`~0#t&MX7(B_y3=Vo0g zQMyum-`B&|d7yA!Lr8WdjHipT4 z!?li|695+isQ`lLbAeI?lz+);tw}QN>_DnT_t7u>b8n&ZtL@0NL@?20reQZcNGNg8 z2r!Gv>>w~5=T>TM`I}-auBuwM!$Yl|tmB;f6WX~za`6O5Tv|D0%P=30M#L_BD@pn< zM_R1W$03#rw)fdfsxvi4#_&l05*^0b|CYUrYyrSbzZ35jf)Y- z18<#B`h>JR@`;GdZ>ABYqbhr( z%lgd6mjpB{BN?c2Ti%?;M7C!rmZ&eI$vfnk#`aDWs5k`|DzR4Zyg6e(xZrj$lj;^l z&ecdfF~e2#~Ml-l=eCAZ6$Zf!caOJVlNw^*(|j;nDSLU;W>1z{QgwwhmDbNh(jCM&LifjQ}b=|0{Ov6w*CVP z&xMj2#%1y&{>!iYkTxYs^2U_M>c^_ffvR1TN@cEqfv`dTKAG%i?}aCF^%Y_0 z;Rebt{bH#;v%cIIo#Gx5qNr{+j=`)n^X;*6ukEoDWV<;@w@-Mcu>>p=>z{vxU)A9b z+%{X65O7rvC9Tk!76r@GaY{T}8=DJ_U{_?-&81VT3?WFuZFkQ-QLn*89*kuPwOMvl zdfSEz(CPl@pzefP-{;Ot4q0I6!3?_sZ9HM2a9P`&KTB;99{8N!;l%u^UURyA+TL9| zNTuMhnPiI6^@NseD+`LvEm}2y^B)dnY%3I+>Hb2@W)d}YB^7g~3zUtTFlalr9yX&k z$fXV+nUM8dchj7TJ441AE86ZQHNKJLpG9?Z`jgZHUuO4mWVzf8Mm zLhv7e38rRC6;@8jDD&e~&ndlpK)?)VIi?#{2c7mN;?XF&M_e4+s>t z^0?8_cVc2YR;mL7)pa!(ZfxJiu-flD(s(^>b2kPaK5Ri9@cY^C3LIplucJYDx&sAew?&oz9DyT~%bEWepg)62v1 z!uJ`ESCaY{Da(Qpv1x zOe}`s=J~0PvHP6$3bH zKku$J=!TgyL&zEn{pJ^3#-n%i1Z7-j|JH~)`}tzmILDaIK-XrkZJnl@ZLsYsoIMqP zkhupsA?HY+Fzg6AYYQPrx@II3!>_W(p{k2y6w7{Rc-+m~Eg%G!i2TqshqwEe*)s7P z0e^GvO~>6PESPkhL$ywwIO3z5A8U^-&c*{uc189VZVgRp228e!br9Xw+1=(WlCrISCnAu&6wt?8-NI;IhXhD{jXtYtw> zG{`9ou;stpKge*|$2>gTu zZr<64wWyRxrzn;kaI@9hnUvR9n+Me?oRqs!7~M;JRcC?@P%PJ$p;ak5kV7aLm-WI= zM5pdkkorWn1IzYSIy~jzDOm(WOG(bgLP5*!w!5k*TF*o}%^mOfw>Qp?_DAS6Er1?4A$iAE559(582`L@1A*MV02z-t2s;A5#>ne` z&8Lfo&8cioXpE-)c61Vie-y3jCIs=vt~Cknkzyh1P>YEZ_oOMMlOWhU!v#Y*mzO13 zJC#eRb=|p$Rr?x<3PJIjDc`*!c&^m3u34NIP8&|q9{u7t^dtKE7uXLeSwE}E$82mx za@ia968|<_@$tWS8dWgrD?gs7v9-4>dfv@w&vhuNyV!%ekveXoeA≀iEVFnuB65 z-6^WYt^dy^*?l`NWueZxBp{>U%WYi1?N3q-f`{UkpR#NZZYVhy^J1d4&{!*XxCmN- z!i)xj#Q+`xFq8RYz%pR)S0|2WW;dktmAiWNHA}#CcjOo^*btd)9Zf!BKQ2zuW#^Ad zcw&4NI^&I4QMY~A`k*}$>2#%O^;Bs*gTul5IunzCDRb3I1&FDNP~*N-8i+Pe2n;l= z4Y`X;y3c`~rl`9@fZ~M_5bY!T0gkLj)}V2^*g}b{Jcj2m?tAr@p(^$-P%*V$kF~Ti zD(!S8&p$o{DZn9`6L~1x3@fjq)2;GZ%7FezJOyd)D25!IS5J!vS7rXWo$t-T-EPLH z)GJC4uS^w%Q3)gfhfpeJqxlw6S0X+6OFK~$$2>VB4*ia)9lk$I{djaa!xr$^0~ZPy zi?sp$SvrKY66P+Z6tD10G^-#cb-(FOg-bB=+tMHwe79oskEV-Ra4kEMEMXpGk6asF z@B3V&?n<$$aq^#vcc`OoW3FRdUj#!(haT##CS2B)xlq*9aqluS$0uT8V$w;yqmK$Q7#P@r@&0Z~$@p&C41Jl@0mJc3 z$0S~t$+earyM}>%El#&rEcr9`E=5*0R7H3gBnC|)TX|W81UUuz=Do^VCw)rQmRfbqp%P9$~;0IRXvTSD2# zmR}pmz(DX!pO|*BK`Tz3izi<%e!bfX%-DKaY)0+iEVlQ!t#Ccnsx}(1i{93z`SkY( zTq`EX?30C(Xz=?r;?(`^IqD0BAiSRq4TP0RfM&}6pM2cK1__30BaJ%n#`_&EZi9+h zjqiE!d0L6_f^Ru^1#AcEh-z-C|z!5}zb4pFRqF!|Yg95{q0vf$kI~K?n`I-oqmJ zgquyUTu@K|MQTQ&3;snAQnP4Z7J=nDvRF4+#ZC7^MIGL+@?T6T)!Y^D8o_Rh{uJ1s zykv>GGJNYz43E1LB#9Al-sn?lgc8iE_x`1$lnAj86Yg6fg?@A8Wl+>6QXO);0n$7O z*K-lHVadYtM}V!HNBVjjY@~>wx%?vD>9~pdscoBi9iJ7+m=)h~jeE|@9u^Pnaja@w zNdc9Y-#yeXD+i86Ik3|OiniZ`=o~Pxi(M>s65n5ssX~$>v>RxS63m!wJ1|$6FE&m* zlX%@aLDj`hib18L)*%Bb#%c!V`sDyEQlOy${DtxDQrE7dTYVMO>?HkUfpm0G2Z_tU z{O-2(i?e1n*+$stVW-{-8V){r`|}?y(Wt>0^8l)zTONE}ckP~@Kis?`Vj}zID5%fN z0ADDnAk|$djgG!f zj^?(Rlo$ri7F$h}=XY`XSoU1{WGQ9~I#3p`I)CGJX^TK=#4v(!43aYw2@*tOkx2^l zT{nhPElkqYhS}NDO-J{E$0O8z4jUMB5QTJ1I+F1wbWjvw8X%Y^_7nKwZ$JANn)uD! zc%L+7TkXvUwuKINY_^s!vrI@hf`wg&&^xefJ63wm?{<>(@RZNxB7@ z*ax@m2S~9h4)Om@XxqX6p+g`JowL$oPdPBn3iAYNXzwx52IAIZ&1Fr4wgp8D4^KPO zC*bByv7Ym?T&F-`Zxrt>pjS?{6F+*|aM4OO@Cjyd>5REs4zI)S2?vk)Rr>FS-lRMc zV_SD7;^D>^r=w+_bp}`pa2iS3elq543GGKpsPT9Ce#bcnYYEX?>pnE_Yc6E zLxJYDpbEf@c&o^F?tbRh*-mPKsEyCY$E?S*xw(mYCkUezW;6D^CFA^l{V9|ySorxP z^Jk$hAtA%2bG5+r)7$#X@xl1&yh2sJhYLDWbN(ONHp0#gk0waVjl1eWLYu$jayascd;cj$Kmu~r1>o)@th zh3!|}4kq5*&O~XD=Ql+y*@771?Q)U4x89E%eS`!lKJrfFD9G;VuAQ=Lf2{KMs_ zSlZEoGVIooYvxobO89gvG)jUD)?LQn%NHrJ8*j zmEA#AjV4*)X{w8uDNJins66d+p3po;B&5{(8cZ!ntE$}~o~GRK?5~k#KnOe}l4Gt7 zzI7KYNikB0M;7;s3yAl+y`i6$ z^QiPEw=y_+7tprOMS8>ws9vyji|$r)cfAg0)^JL%p+(Y9duX}&_ zISWaO-Y$exz*=#NyyN!Nuoxt~9Ge9~P!vnGB?X!+xgJSGgaXn^=4dPZ<2 z@@9>9W%TyO-?G#4{B+=vSJ=DDG1~>Y?tKQt?dQ#*jLQ)onbzHLO}}{R3u6h>_ZaVq zKHMo36fz3bThPP_ZnBYDSNq{+;3yPq^#^8@#^fY^vxQ3iOyZvt>&G{ioEZe(-IV^Ok_L^P@eKlFRMe);|ZNkyfm@ZR?x^e?8>OFu-hh-FiiQ17LfNXks&6*WM^0bg=c6j8%wWmEHvh|J-+cm^o8(RL zjt~`rQ_JHsDysOr_GqLIE(XTORc_Qi{Zvxa=Q}-9E3@%L0xbQb8rZ-9!oB`LYtRkX z_{=AdoXYdI|7k2hQv zAN=<>0Quq(k+JYHQOmXHqk4mE8mxPAV_v1~itB?V*K@;WZeF9Bvdj7Huk;Rc=fm*X zAqgr1?wBr+1F2HHep0_=_gmY(5CA}X;221&^}0DOhE^YyL+aiY$!?Zs#oaaYINV9V z7SsA>!Bbs6>Yf#rRK#a2U70toKdiEM?OBkg))i&>Me;uL_qNA=-#;WGP!2Su+PghM z*fD8W#HEFYdBQldNf^x1@ouqvq_<#O!72O#(YDp+?n3YZP8PZYC;7H(4x1PCiZ^`M zK|mm$Ld+3=@o-*tqOHx!^cW!_F@&XO&h^TuU$$T&Y|n;|?7|v%qR#!!PHEg7zz+VY zS8rQ)%Lk=Oh<{7OC0E^&NpmInk_8!`{LA174dpF$VTCo5BXzyFx@76&erMn`Hxg^t zURZ9fv&&`~hvC4+A2aRPg0pbD$|{w>j3Sx=7G|EqN(D)?5KX)DM$p2TEuK7@XAMu2 z8NIO{-kmf!Izrj?vm_jeBC{3{f$JRL)#GE;4dnjxX{Q5Ts1P!2Sqz%$(vNU_y-OS# zsjTL6E$;e^UqpLdqQ752an^a;n@QQTdfh)EvjpHF8*r~F`?(rfmeZOz^NjCdNGMXm z;%m(?Z4!f}C4IGlaTV|5Tp9uhgPK_~^^1H=Cn9@z^PCJrOK#6S$cQ8k`tEeu348iZLj z^c*y`MzY#_7KeMq{S(5d!}3OTZIB=x91mF>m(k;cVo+s(}L z%EaXKSYJhE`fDVt0Tf3!NYE3?*}{x0*qVvKG0-x$-Y z%iDDr{(Vma$Mu7Do3^NmruVSElWV4DxUw&&KikD628ays+bec{do+G;wOEkP-4VDd zUXSs8;X|ihQp$Q0cyk8bbe4R%0%6~Qmd+Hn@-Cv;1cpy$csJ-Q2y-S`M>Hve1Fn1r zUPpN@{Y*3PfpYgJn!oLnvaYc=X-H5|(AOe0Oe1N5wjT~Z26yPc!_cZgx;9sdvDDFHRMR{=Yl|5tn2ubf{F@Zp^Rr@W`+h3IdL6> zx;U<>(tr4*I_?a({vCUKvX&`EGg|$5E8ZEwIs8xXHv_>U&73pgXLz|Jau5wVr;wBp zVoHt-`+&g>`>6?eVQEXc=af6~DQ7x!_LGt@}Y@8*Pu6ALp7;_&=%pPiw5 zGs)&XBo<@sQo|;8lO&{OjR{^*p8Np3lwMuAGI4n05R*P@bxn1knH z>rF@10`Jc>iW)-LOMEl8&UHNy9c!-}SUfq`XkJ*Pz>|>P5usC!-ec8t2Mg=C@DKIR zQq?5lec*FZ`(5Co^+Rw?eQl(A&;e1Gtf<)bbm532 zU_t`Sh2UijI)9=WIZ&hYek2#p;^#*PoA4j1!YubEk6sk@(@w5;C%S#~JZ?T)JKK^@ zGLh(+l149{r^NMRGo%kuy`G!0j)Ya4!D%Y?s?l7GMmEwR$5FJiiR7wTp)}mX(WipD zVh<=4-A0s}NLRLh2a#xtm3IYPpirE27uC>7&1$$6P-elHN?2B2%PdEd6Mo&;v7am-D)WUUlU|6WqrU3r|n+haIY3RGacs*?NXpt4mB9(+*#;h zAny5+K}AOV>vf7YIIjyr zf)TN;Zia?6pKeI3RNLP7F)=aO;Pp6Nu^!%##)lv&xtAMUC*mFIFUk1;=`o5PUJg8f z2lj#X4)4*?$5sEJiEMR)UHxn9rUZGx_QE+s({9XseA*f0ulonrkq>|l6*G7=<>aFI z#MqhunJRab&l5NWkfBzl>2Zu^ePY(kNivj`y z;(#ilifGw8HYSFG$>vcMzK)aXLT`C_x!U1SnZ1;eg++dFXlNT)^AbT;s*aAYi3QL< z`?^MMEdHIDQKtF$F%IZ@#S}FicHN59}8ZG*PpP%gVl0yw{oANPQhqCImtx zU9z7QQ;*hfV_REWiw_KW!mM?5ztGarP2)6eq6=X~<9tnFlgPj)a!0%R_irz691|Nm zZ`1{axVU`BY<$o5hVKt?|MRnF>(-L0OGZFoV97e=iEhE0Qzpo=WCEY7rNiu@TU!m%^=f&`R( zYmT+k(K@&C&>lI=C=79`avjd--)C3Vzz0=;_~yMFW_6_xe+P}+B7;M(p4%qy<6R7| zO9o_sP1pyEifL{p-%2U`$l^nVgh5 zuNO}Tq8l7cO#95@fMWq%3o9$W%FlB+th;{ed^T>H1w}>s9R@7myv99lmJ6N*Hi_#> z2nPW`wGe7`XJ_YtJz{!hrr6ry`*2d$Y(s$8zwloEo#kdJn3q&o$hTo$j}Q|xE2I!C zB;)JoEDn<8=cH)n`-`rae1>Nb+g5Jp0zM;VLkaYg<|tjT$d2luhim`LNhy!$*YaIZz#AT` z+YjB330>Ik;+aRsD9`6j_mx%tg#BGzW%5n!njq#G*pffSwG1?}&=|*j9m-%hmynn+ zaJMO4l&4q>aVoY}DebD~DR-> zvNvk;D1IKsXVy5YpMYsyrGk_UPX{gi2ny?&2#Mp1UQI;vsEMM_82s3$y*CNW{6k9K zkeaB3km#se^_1U>%vrU@s&am~n;hBe|ut9WA`;GFAb!z8s+S3%YdsRp~@wF1wk=WcR`TF(s z0}k%@-M#Czy>kNQK!;z88z$6QcQ;+`yqdG=cYipAu*T7x?~%jf)1>LvVFXHATI7K* zhSK@~3%C+u-)!pG%F1Rg2!w}(#Mx}}Z5SAYrA@xsKRk}D8(}E2Jc&8xk(z?_;ZdeT zR(8}c8>u)qD(uZ-aRG9p!PS>%Cm>@VPYmOOcD6sN7Ey(Q->WaLjk=*0$iD-zG=yny8ZV@tnhu zfYXHQk|iq<=+biTtS5_CQo#(VzNgsLzZ{61R8t1y|NW7C5eI6=bCQuUfMq=-ro$1e6&$95t?TVb8Z#e9bn{&uy;|dwWJYi$kLKQx5|VjTQpxXD__UQEd4M1A9IP z+sBF9H+uCVet4SL?DHJ+~T*)Pil+189Kn-Gbuk9iVmqdv^`j7RagbM>Q%u=$iCQ9`1^t4PU%Gl>}O_FdD;$s&oH zveQ?H8v~=;M4tKi`C{KCpcO9VCQ5fbD1xQsp4GGu*L0XxD*k-)`DQ7{5%RR@kO8El z$R;j!Su!*qvACRd!fr=;J?5m^4bt4l2o#F0J&*Y#&o*v^(4oLLG#o9iKGz*cJAsw$ z;Ih+d86!HFxU1PUsQ=>GNbv$Cn{j#VoV4TT8Co>0uY>m!WPw+Y2(rA!v zsqY%=b*NTz$Ms7(7nPJG-u4LZv-5pD%L2DR?;8Ai^TQ)1$FFX=lefBuCns{mK@9}_ z_`lS{4v|l4u5o!vTc050bxt3hId5%hFtRIKb=hqi0e7Lk5ViYN$GhTsNMCDj;?}-* z=CqS+Tx>m&;d(&Be)8U)=ZN2={bBjEc=u_}AaVdiT1`dJHbFs@y8-P*38~Y|<1YWS z?a#_?ml3Yr!l7@kpaUreKxqatVV8>ms%h?FrzXG=Fai=SGg#1wOgm_^=(Bm?9%ih@ zxbDAsX>}NNzw_PH#FTlYUP4NvnXP_f>I2l% z94XcRI`nh;tCM zqy0Lsd|fv!-1Shn9BD4NzJe8itU&R`mr(YLo3$!H7MR_h74SbvwA~-j-2uPu^avGq z^}MIOzgc%qBwsPZDv)8R2c}H}S?|!R>@*_MF9Xbg9GsU(b^KSG<3~gdwml9cf*@te z4};oUW#o`~!0p}$SO6K?<-lz&@CoZ-6ym5JskYd_OQ*rh{aqtZGU?#+^=*q?P5b)Z zPzyO>5174+Moni4D?1{b|3t^d`k?i>7e&MV*A#nQVcPh8zVfDDn!=`gXjNO$-#M=1 zC`b2A>(nN0?!5Yf*;_OOS)M_G&JljxDnez!jQ=i)^W1v#Wa%BC=?xty@wl9KW?ThcZ1`*!#@>i0RXstWz^|4#J#HopDi zrqj*0Z*BwhzC6o(Tj@eiqm#6X zBWK}!AD$L7&dS@J!e zV}+Ped;77*9~}k6{2kQYntI`Ptj3dvlZKf1IkUv|n>h~*B~xd03dn0{7#t}cM}Ig< z{lq2-)pl%ck`Tac09#wGte%3GS6?wZ_tQKoV#F~xEFxhm7i%CRq2P3Y+36e9*W0K| zpq`X)aH##7#;sHa^@!jo=fA6p1X48cC>dD5 zyf;1Xbl*m~9^wpFnHJ~dbs6-nSFCvts1Bx=k-A|+LPM|Eg}>FbpEMmRa8sf8Azn(*Rc&H6LB&~FjKfn3#`Xkl{KjP^F zRKLJL2~X`hzxOJrY>?`1Zh_oaSO1C?RNM-F)YVEEn>YWv`4@}?4b;_fTev*mh9a~- zqBA)1f}$8Mk7b_SI!arf*K5CNWHr+BAGul+s367|J{*7ku&~*U%i{-vYuXOn1MY~@ z?=Lf6N}Hm!Y&v0zul~u25gM7o;s|BfXp$h2HUOW@^tm^nRG?&ap)f?izt$OwjV!1V zl{p(uw0D=>wdLd_8Mn)&RA80l<#GLE*4TLl6o@ZQEE3iaEawgpl zwv8@sOK-Mg&;vI%Jr zklJ)e3eq4Ujf9&n=|;Md?rua{kd$r&WTP|)NP{5KB@L3^x%i#)oOA!W&*dM!PxfAG zt~uwLbG~DYcf14Y@laSxb}#r%wSFhso%YdeF~4?$WirvK214S#ZloPP2{}7FZ;5+^ zPfLF952Y|JrKsh|YI$X&%)R?wW;0w0rm58w6>g~q;vsLGZNr!Z0x7I~OzaE4@im|V zXRCEC!fE)o#8#m2#xPiphdc_1N)!a`B~qBg`(^$@6J=2!4}N7 zP2WL8E>{nu!f02H2=AuS06P$@-L)FL$VnLS=MNQS9pdS_rR1G`QxGX>^1gQU;|NVT zNy#=Bmj*cn=Qf^kb%i&YWu^rTq(a4a!Ni|mAT2wRaS%H_5|d7x$TR2R(eT@Yw#Gg; zf5s^a?AzNsf~HVlP2&e_?o;6z=C6EbFpIw*Md5IRn%Xh?s!GY(jMBpVIxXw|Q&|(& z=3}=yS<~X)f{1sC>G8P|aoQN}-C#!RdcScM{xBM5M7V%`X|m+I^~nSR%LI_%#ECN9 z=iC?+x~=t&FW5*~gR#S;h|o@gYsm*V-B>qGG8bjiSY&&)=y|T10Ryy0eIbuBSuJvW zKW4?MQe<}8{IWM@iXlhAci(#zOu>2-s-j&(mM?wi zYkjg9#pv9&PsZf{a6LwT#z;#TZtS0qe{b7d_)+l7jzUK!Nhp3wx4xf)Dp9zZ{)85B zj-%Y%ir3i>-8}pbq6{~9dB`Yz_Qg+rXzy1EzJU!rR0;T6Eh3#X;m4t|?zN6RnVA!CEJ71uOb z-?dr2=FmBC~ zX4Z`;P%*&Wg=qH#uaAFBIED?H=srvlYg4|v_X_1Yg;jHl7l4(M{{v8Vo&PtCDNmRX zH!v`eR68uF0P?QYY0=2T3DL0W$rSMM@mD?{4Zqsc#KL^S%na)H`RPLl8ZY>&kBo|! z*Tqi|_nfDLvv)5qIN!g1N!$``v@ifr!&Q*&thya8{dgeKSl+pw5zC zO)Vs(nZJFyVKQ@|MS&n zsnmx?8xF*e9=X3pwr>I#y?Ect(@pyP(GY8b44G*hE?=O;HU-B)FMwP2x>tP@NTBNN z?Tz1jx-Pp>K(NwPiaQtu^qo3sa3xXA$-GO?ClJitqp;pXe=x&E6Zn^ z^6GvV_X%UPmF)D@#>g3L+VzI+0N_{@?7_WpF3&XNEfs4FXvlPbGqS}`M(+OSj$rdT zPdMGm!nfEd#*cY{?kOX8J^cLyz|jeN#^E%Fv=8YGNrg&#Ht0bMh{i@XOs!@Ir17Ps zRxZJ@^h(y|+cR{GsKkwEpSDm-TiN%{4deEgvK-)rJAVc`PVCjmPzSx;pBb+R%i|Xx zSLAI^^-5+AD-0yT73QETDEc0-pvQer{+k)%^>B0}!(mWet@>2G4NRi{+(KnW^B*f| z6#Jxt)CWeBPSmlfpA|?E(7p};bRSB_w03A5eNeeeO~$rC_%7Kme@FKu`S$Wu(B;Q( zICA*hjMo)@kDGz(ClI97K6gr8q-)h!;Q0B;)r$>W$Pc>EtNXqpYKIGkroU@!=ooBL z4HS3|ap#?1w#dmZTqn7Vr$CsP)D10@h{(0QZ#HQ78=0iKsT~cXfQpau%ZNM<_+fkQ zh-{z1slHr>Tjq|>rKnZeYzYS(j1vc#UT-T}3w3n*8;bkCJon4NE0sT{ms&{T-sb5O zTE<>((0YG6yLHO4T>M(p{&inOy!vQUp$;+ zHM~s3o3%NihrR@WXUoh!J)a1=mqG@m&#rW>F&2N=LeV>;qs-Y-cR3l~5)6%bM@q2~D;%Ptm8&GM4|Ss71>Xq=1J;X-x(us5v>>0^g*wtI># z4omc;iro!{d;b-o=AW?j)HIX;7s>`RDPMKH*cx>W_qoQWQ0&T2)`UhVOrJwHYI;nB zz+3SU2edmz&U^x+s5tp^XYRMs6f^#KZY06m-_tLPg&kj#fi}B?_9Q4fs@JP6m3mDD zJlk2uV|i)i{9U6HKZXW37xYwbE7Vu(Gn$|F35*8Hb?TEJ$$9VEcRbYasIVjqcbX)83ogn`vQOL?^r4&sm$FO->&s|Us5BNqnm%pHw&5%FtZF8+Nvp)>zxY=3 zZ5sXUUgBOt8MkwohYUQE<$M=s2YQc>%!Rk0*>{<^t@;l*6Wqa!E{~u5yP(&|hE`-4 zcpe(N-(xm(JkMT$w=)Xs_++F0!~Hq@lxONJlWqM1%3v%^u)t#^#Gv;Wuo+3m&z=ll zdkrOoH7j$bmKuXUyQt*e0GHIz>l`s!U;W)ud>%5~+>1+Z5~zE-IiXt!!8r3q`%3e= z5|UkyQ}79inG17-^QLei66q`8T^fvJ$i*x(ND0b<}dpe(H?<|TPlLCqj9>|6N3?@lmxAu=BCueyt~?rpNQse_g}IB z&=cW$&*SCadp8)|E-P~VV&eT%?dAWudk=JX325bv8X&1|R^2=r=AtyIKXz^0hIU_` zNzm!8SMC>Izl%DK4I z86-*$zt_>#J!ux#f^zf(sorsGGh{im+4`PaSib{p_qh*sY8EOcv8=UqDPX@9q@;qs zc6ZB5Ddr!$*2_6K#G|01%DsM_3niTEdYA)R>EiOT(2x50#RXbHZQF~M*4DzjJaKCq zn?gVV0m=y{m<_zoWV*M5&)qc!m=^Wd7}(gU@$vESFxofU+uKPM73@UiTie@aXMcXy zyuJ4FWrpT8G3aJ8KHLlp3=BFpHYMQILJw@64Sx&amh0%~tUxZM-^20%$-mJH^>_y{ zB_%tQ_`b5Xc42DiRMgn0303g~DG7&L>1!2&|$CB z(sU$eXR`y5Pm}?fXf2xnnb2!We4WH))2*GI@pUOdK|zF#i+6fXh^IDx9sxC0qkhY$ ztu5>GZNq=1X}9fymV?EM-8X(^-?dE|2z%~2Iu0@f=VfGM;E9AJcci9{k!`Y9zAL2d+dOiYZ| zL#VZcphFLBfLCscfb*IxAaRq^(#ninzJ2?42!hKE8~!l)_-kgslwR|M9fE%?$zjkl zNA8R0;dI@Zo12?x`S_9HU@C>S*=tRtee%!1tV2YOe{{17s$A0`oh9Go(DH!fU;uUV! zb;X1tjL21KMp^-no*J(cYhPAq55b2PQI5QR7l^Z#mJTHg3^T_A5^zp+`xiyXPA}$0e_oxuUtC6_~AOXNciqzhaqaq z{JdV<8nrM^CT_*PkBUfHX{nlX3ph64AC7`%iV-wZ_;52v6z}yYAobu_5t84~v-VUz zi9h!VT*91kzZImatnc_j3Yz12@$8gc39()k7@FQL|gDUT~N zi>!)@r-bQ^A=tho)*BdP#<^S%vY$#&V6#WR4BQy&$@7EyItXo!o~C2}N6rnxzG>+J zuO5zyl>{orT?|fBoV#5VC>(QZxTjvx{s;qA_z>Htu^)3qFWdA*YW#lk)tt-}6>J@S zFX$*4g*^|76GEh8u9oYV57@E?d@&OhL*XE&-RA2!j{zJj4GnZ;q{%s3{CB-*Jx$rN z^lk`F-nsgx36rCwFQUfz@)`47hap%v3M(3$-B%10Bb-0bQRygo} zb&tOO{$zAgD>b#xN|7;~^%ip)%yTuWNP^#{vwcOfD(0!l_H2o+|85$6&JS5@-YiRR zK9&fV*ssCQ=R6+Kw*2ZW3TS2Z7sFI}_rl)dOkuK;E|Ow;2JP~}z_EE}p=wSpSHW*5hssgm4a zi{-mL26V-`AP~t0PFNsf{5DL1@Z-c9j=Um0ciOTl>g}`0W?}T%R-6bFrsh!$R8ynp zVN;SwOr&R`9e`O@Zx|SByz|Sa5~M(fao( zn{q2V`&9k82080B6m-nEc^&MrB5=bOH{}YLe2N#9)Wtv-L+lBeY9a7R?NjfXk zUy3@uYY&sm(-*+5ii-8Fptkh|Rh6CQ9eXx;W8s$iP{wy(HRSsc(hE$nKr)R%Nvv|n zpSS=1_J|=0*T)n%)VuCEo`YJy-zC>P;RX7@Fk;L9o0*x}Arh0L7tp;EW^32P#KiqX zntFLUwf_L0tzooPUOTKdmz|tk04hd<{x${0qqp2cd-Urp6TGk?q4Q@@ySVhi+y^rl z8CW&Smiro{*sp7kVoxlmh-T{)+w}h zMu4@`huE!De~67c&&jAk(A}&UUb-l&O0=3XHo5?BYx4zKNBBw1(Ada0jcd1ZW+lk0 z-zvqR9dRvDnVRLkTJkB*)F%BH-ZL;fJgG2jw6RF_@2`yO$O2TCo8k?IKF(h2o@l*2 zFfrI_LY$wUUl8Xb@nSnewg^}>|3+|{>QZ!>hyh@})Y@DlGWCfz4VCY4f)7Y02GZ9D zo)?=%>LaWi9GVj>DEvH;f5ECn2UTKdVY@dCHZoh!w~zmOcgqPe@*!DU#jaS6F_97o z%(JGT$QqmZNt==~p{A`0SD)Zjdo@KpQGcRlbM>_Z2~+uGH-!Hlmr->zft%~8!G5nt1I@@QaZx_+!EC9t^r;H>H^%#t)PNBIsrrR_MK8i z{_tuxTRvoBpYR(|Dg>#5y!`m`yR#^V(5Y{4|5Ab0C3J&j>g^Eu9}D{AUo9Hg4mM@= zh3e`ShaZ)m1|q`nh)?D8m1636`4pn?p3mye6;%3tfuR{$XR87M0pZH}z&@XDwkSt?^XIYe(xTcSEleoWlI> zN2lZ8SYKa~0~riiZ@u?47+wjEDws zqLC&5r^xeiP~*J$p{&OG!+;_;X>r0O3)N^-5|4=|lgf?9(T8CF&6h9Oo=s#g87?X;+_d2jy%S0N$5JpL z&#$?;**Z1_J%)}`5x*w@S=A93cy9vn8(?8Srf?_JetyPzp59fnt0DodMvx9r6q8~Z z_pP2fL;3LI8ZraU;EINdmu@M^y^t+d7vEyTte)6?kQ*DJS0YnTEOv5CwC=#VwhhSPOm-+pf1yj5C;P;hlF z&Uv~qn3k5t*!a`!{XFmhx7MQiv)&_ia}dCJxi0ok`#P4R>DN8iwhys_@~Er?_lWF) zuNHvWgZZ9t7<|@ZBY=*TP{`pktNiwmU;=%3hr|L0&GJPEvOzbKmLwyze=?f%=`mSP zWP^QD?ubf6OxH4M@tb@k2p7h+438$#)LJa!OkEmetxr)lGx_CyCnlO5Zd&LF{vn{I zqc&}xGq7dViC=C~s>sC%nOu3(e*V$m>BUI?6(D_6H4Qvp)bsI8<8*UpPAh{XG+SEy^|W$=+v7h&wL>UwbaNDn5P^J-48Asr+ep zL7~Fr)r&;m*?0K$J5t(LS$p#RfgzL+3F=xXG}gS9csr|_s|j=OsIv~bOTc)R&Vt?6 z{I_VJSdkc<|KB!>Dod>-;Sv+G>lFv0wv5b#61#Xvmf50P54q-WEts zxS2UvlQK#%tSU-U)00i*vUpS_y1M9SMD)4W2UGTYWg{Y^&NhBk@n_?&5j4N=2?-ke z-E3?SPn&)}#3&l_<1iUwyO*IybpH{L&b#?2P9M zX66yR<1*$vIfV1`m-+W>yXL10vR^o#0irbzr$<`?SDXl~rwr+4vOzf;EC$TxOY;Ft z2#&8{%eo>9jmX=5Mkf8*o-p(a9P#eRUIn#tM4)WRjLwLkhKz-Oi#?2mfQ1L9Xl{u_ zKMKdd#8m%gM;D6yhnO?iwc88LW#6ah4&<5m1+Xe$(QmM<2jr;8iV)akKg zq`pXwNT!MC#d3y>AoTQ}fC2$yjo|#3PYdmuvI;L>Y7CL=6ELE%UrC{2cHJ3TR+_ny zG8=fsFUu~GaE*Q!{Vs-BSy@?OO^q6F#;*}!@YQN4Sn0(ad>z6G8X|QQ(*q{n;l0|7 z+y8^2An2#}K0tD%R_HAe0rvjr=;$>jJ=n}FENVS=;Mhg;4{#S!E2#Xvd(e?4?Li*b zSMUbnJYWWY6YB!lwp3Rz@yh9443&2+D~SsK3G^{RFqdxxIVy;{H{A+g!5vpPAY>tw z8E7zbgyvOZX`$XRc?_20QBI;qkG7q|z!D1SH^X}k!G3}F@Rg)w5P+BDIVgBIIjN!b z3{v6J_~HHcoR*RV8UXgqjg9z`a3*wEOafAA+YD5JQ`6V~R0;MSshdEy;fV0hOf<~8 z4D4;vZdaF$x6$VLV<F zUT`P_UX9zI{|~)3hrE(}{6_p;2ppHXVS;etD;s4g9z{?xCI0C!za)6{aVK^4l+TC# ztZJ&Nd8rT@Svd`$JmB~;0%0+|i%_Fi zUZkX?V5xvhy^T!)(1a(bxRVHEHY(}?Uf$j(vy$MMN^%+`|B$1@U@B*$M=aqD`>*Bgk6Sx{fEj*1pc zEi*!oBaR_HK!o)TyvD%G>*FTxhx-#x28-n7Yc?Vxk#~V#eTOPeq7_wep}fkOJM&}{G6AMkE%iR*)xr-+1c3@pw!hk zgWlH#Xt+c%m+ru6o;*>kbi|&LY;q{7t$l^;>N*))BHP|7um&Y$Tg+q1{njaF4uI*? z-VfzMU)_YSb@jc`{4p-X^3h5{3O)v9Z``OJ>(|d$Qk3fx+JhHRh{Cv(36qr$NV_f&9Xrfr6`h zazoj4p54s#jWVTYJCluv_9f})yIXH1QYIf>~Bb34pdHr?9?ixXo`_0 z{~*YZ6rjHj>PFnnm>gE1mYcClvSLm;{VPUu1$pZXI=@*Z z-zr)M;QgZ~6hAA4z+8X~M+c5Kf3S7nM1)(~Ld#Xh6i6q{{njk>M*@-SD8`lKRM1o+ zaazBdpwIh$WuTs8VD^ncntIud@-sU_M(@^)LU)P$-~tqd7n?m}6BFh;;kr3gtM-p= z^Ty1~(tmpdRRq%-F`enYdPNJeDT;*mzubqG+w+$H&eEXC2{cj%vPt`O>^xNdHL1ML zK-S`$U$2{iMrPawSk^?QjveYSFT7%7Yg5H1%3(~XCW!VYda{JKi;v&1__tZo*jRy{ zu0U;8h?|~~Ja)E~!1^dJxeqK%0T{#@!(Cm;Q|ADbI%40E1-n{wo{rD;XCDINLok}Vzr_v%X zvJ629Feh*v?MHp2NB!!fVnUXXd+E6mj@VVzY&KLEs(C%TjO95fJJsxE%R}4wEZ_Ai z2uHLy+Yqdw?NgmgUcgJI(dX|{m=jRgQ#OgVhzYhBW;)ye)O7TWb3?P^BIV-xR(6#C zXBoOf98y2(<0OZcXXN&|rN|l9;3t z+`bxNj*;=7H%Y;%Pv+xLZ`#5ihRxm0$woJvp3)jYzX4tI;Y?<*-N&g<{CVSd`5vlT zH?%DN3N`cX@dv8D`DW+U!Zd@#qHHhu#H6Iviq=1ZwhNOWQp^kO5CI9`HFk5)AXhqU z%%CPilzJ$|Cj0G$6_mSjt4<#49C!nYrp>hbwqmfuWFS+a02W+>e~_J6tjMXB=k}+@ zaT6mg6R`YaPq}t1CP%8ux-zbv( z2#G%le3x%`8NV*DYQ6ow^%<@lItubI<)8bd4QAo7qhcn~(TgY=6MUejVVG@-V~T*q zalNd@CnGlAIX;VGl;Q2c{+u!r_GS6guw(IG>T>EjkoF?J;P=m0y+oWIed1kT5@Xqb z#wQ(m$RK9k@89(nugGrQk7E%mnjj!=MGCv_8Y6K#{m-_9%WzqE-63s%GZG9Vmht&@Tbyrq)b`5nN@+y+`v%SSi zdy6-~G`;k<+3+i*@)IEpvEe_D#Bm0u83-h(-|{sO@q;MY%PBr9h+j~~_#9QB z4B#lNJ>fM-EKb0njDt!NzwYYw%rto2 zbI#*5#CEQ@BYCc3Btg_iK(Y|luuAefoaOPFhbR75a0obqAPYlD>S~OsdME$6AeTZT zYFWWnujp4fF2BZSXg>j^gYF^?y|+z4d!9y1tM`LSn}7-2RiOBJrw->)azM!gEo!L3 zCSrcW-MISBB3l)$rCvk?B)x2L3p222Ecja!hVBQ{=K1`|y$I!Oc-?ugZ(!inHfJR` zmR~)BPzqkg#v$6;QOhm z+h`Z31cG>!{(EY3oBv`&%JXPXRT75q^-y@sABp7J%NM*DN-QcNK%=t=2!V4BaI!zN z0SdN5faI=xrGJ^6b(wFv>fKtuls6IRUq3{E(*6f^6%n9)I*thXKi&Cno zHLYx~ch0uG9|~)E{L+FbLmZf3%tq2;Wx9;X9Rk_AhHHjo{e!Og5iT5Yn}grqwwh#$ zgWEtcunWr1>mVaflWlS|k$eJ<@NihzISdOWw5*IJ-kg4Ey9XTne47D;I{Z<)n& z-rFGDPqEnRYj>fn!&jG*t?;CuN=r43w9@h|#syr$uV@$tlY>ioWOX_t61nzlH?x}* zMdwDJe4{U9XH^3L%;1Sc@4*Aad#HFP5dzxs1%OaVJ**v))1DjtyU)*ZZIU9j0~kg3^<#S4ZE|sIGpYmA;3^ z?wWdmU0nUKvtH;je{-NUVGE89Y5)4pTgQ1umo0z=3YH6oEt}_2T!CVhWvO_u>gX@v z)Yi@n9oKECfFK9Ghgial4jk($1<2jv9h+P4kL4SOSVzs6e!&RNYq^ju6q$f;BtoDz{g=1l`!i|k3Z2QNxV+qyR#{P|M;oLJgqKCP3SLUF~krh z5|f47eC~~=!#r~^ejR#ILXbo77XCd-FmL#PjIXyc<||jeV!90x0R{b2hCe~!4A3?lWP~)Y^pp&o9AuTsx^<9 z)o^!i^Q5X4#4<8E4%N3!Dg(VdglWCCDzyV^V4Zj2!j^fWzhJJ=q zHwUR0HylwaZO~cLOBrP+m1%6kREw>Rra=gd=Ls!Z{}YdXF38P&p2dk{1=2EVz?R^P z4*qsd02UA$jwX--pk8L93%ocJA>AHCV;?@Ub$kgzK~QYv!ssr>$T1BgqYR*v$1U5@ z@7xi>l#>!i{6~33L-z6WXJOwz4xqrr)c=YB33MWa2tZ}W@8Cwa5-spSVAcwd1y&Z8 zvH$hKACW|?1waYFL4nZH0_i{?UJQd+r(R4D#;j%@aU_=fI4rDgkf`Ugun+}p`**9k zpeM!TNdC1?GShfrbsMH!;Q=TM&r@#fn4rMErwEd@gZ!{)8{gA%%*w!b-kDMo#&I{1730o3B& zz`++mU4MZp1^7gOc2`2}o(~85FQ5ha&&6(`s)zsiZ(RtVV5fVj|1lPAqRzeT_R*Ii zv?GClpK|=GSrFPJG-J4-y9ops`J|(TG^~NgkOE3@pne&VrLiBor;!InV}ldA7IxJ; z6wrJD=on~^#nNIyr&{TDAa2E83B+VcPnc_8 znh-&dnI6P6k8wYGh`rp^=?aZ`CKSp?&ut+%{Sp#hnq-exTa0U(G z`C~+hkkMy;F9*R7^w7UlEP1Gu7M|<;=gDnA$5`q~`Xhyl|D@{NjzTY_W+Q7f=EkIR z`>of&0}e_wap3BsO(}xwiY2puLvK;Bno!Ws-G#l$efZ>*jP<%=;}Dh+UJhE8?+EJP z=m7U<>PH410gA8^!~oR9Y*;#wHTXl=@Dw*U_pOgM5L~rM7c@6(XM6lczXe3}-UG_7 z;N!=BP`bD+1?Ys8d&8F9$rbA*BO=b506Pkv73+>L6wN`1BOg zJn|4b@>Ai(V8XO)N>x<1xE58I7o%| z$z0ss%UHs%PeX!(H=NM~bSwtkrVIE`(a^S?v#spxL<;ah_J3Zfn4_bkS4{E10ogCm z|0`Hv@fd9ynVx1ENaL7N2t#5Y3GIFUkcJ($g&Mjq5SZ(q=U?I00_+ELn3h{_pgtBb*4Nj^fBd-2;A!vBP)c1L50Rh) z1;Pek&e*zP@qtU0O*mLfU9Q&F))1eL7J9VAD=RCjauyYdZy)8_H1y?*zhGQ9mAbn6 zR&&Oblkx26SP??1l$Ti};zS!Ja%AleEV8Z@%_D#LI? zcE2tgQ8e6_+^V9i6ltL7@C)uvO(o{Ku`iuM=@M|fj*X4I-{FU7br8y|5-E22UA;el zx01jz3bE^QSjqTJUY)9+$_ zsmdg7{}la+>*4G$}ueGAKriey5FYRN)W0{27xgGOLYgQsU8U@Dn3`tvxO_8xJAZ%-@* zR3?R}cgL>-I6m5aZAeZ{Ev+(ZPbCoy-$h<81@DJUVN^DYd}eQY!^73-yCeuhfTz8H zmVkLJP}l@2qXNjXd?e1>c5u42e=nbV_zEO$rD2}{leaDB*u03qr^Ojffp;1lmL%F- za7)!{&4Ykp>1O4Qb_p=_Vs&<~JxN?`hk_xvOyV6A1-|tGkunPko^dSzYFgKXW znuFTB*n)BsMFVME+^z!(5zFUiP1xCwO3h%eGl5L%_8EFPRzDUE6&gqz zcxaK$-0}I!soCSy_8k!+9cfHV&DZI?w)0?ul{bwGEN6!9S@X^(6Ng6w>`7Eve(f!y z_Uzlf;{rfNz{~n2NI8!~iQogK!?wQu ztFk6Vyx2>f)79Tj9EIjN{w$|o`SkI+jW4&fn0|qYiA+o+QsTi6x=$QC+I)S-?DfoX zh|mzp{ z70c)48w*hlyrl>Xyw0S2KK*5$KA6DyWwKqAb1;VOeHOMRu|WX|)b7m7paDn9yC|HG zFwe|FDGk(n55f&w3f7ViN4niY%V;KI2)5i#eWLrEM^KNE@%pME{@<2=G^AaNtf>uHx^^Pufg<7u>I=` zPqX8l*PVNMIvtD^CKjyMrGqr$e4D7K!pZ4_1(O}0XQV<0uHCV0ZW*n|FN?L+k+HmM z?NVey%YR8tGSy_tBv z9+@HwKIgeLaB;$N&f#?VoDM0u2Yp{2VkPx z%yG#is1@PVuh%J4mcud|3!vd~R|z({N)4kf`J+Lvc= zo~37N%hX?U--YdNM^9nEzh~}+Ox+nsUktvKx(`Fd12^^(_MX$cnFnic;j%R4G$?Sa znWvRe{-OTiTVw{0KWj|0qbQHtFMvM z0UYzylApSUpSf9A(_&AHf4d@eu87+eCihv6%ktVOUw-e0z^ST@%+fFK!WMMfsg8CC zQAshH-y=6!_s*zJ68p(-)J9w1jqOnAXxIjR6;%htRn^Q)YsjRq>Q0w*fFkGM5rYz)+pW7PgqD%tKF+n#Zn8NWl%cl;Q+g zx50MbYy!+libcVEoa#EcJ0U#a|S?!(kE)t zgD#CRw*2%^bw4Edi2{%9*IYS<)q#_EQ)Z% z&@;F8gCFXv7*y@9(qANcC7NbF1xdYkZ13VX(od^4X}hZXK{w%1F}+#wEPH*O%9jqk zIGHr>$xm1yERnf;_!*0G1KHf{O3B$`n%eSPDj=RLf=9-ZBL-E(pN7#GZ%_$h{`R$? zG6rb=&aL(_xEqJ0zi}Lf2IL|^tB8oiw3Na?qRE(GWeiNS39ti>dhE=$2&%h^uWm1gQ5uJnnH9?$F~Lh;HcYT1 zCiZVuI7L7_d`krNVX5}$`L)dvuTERNh(}hZ&Wp1;yC`;j^>YUqONH%TK`3r*48Cnc zz}}nTe!vepI%`lY`upeKqadqrlh&g+gGYo2qyY~J44EO@co>qXv3yQeKeaeV@ zh(B=@EG4y*c+)OwWf_(7H~snMu2LHDc|(CIWntxJy1*0MJPal5K?+?|bf-i}K)9zPLKS{B0fzJ zLu6$)eg$aii+e}}J+O@u0%5b7vmGbT;6egSm2ci5$hvqRNVav;`JYQh>oFKxZ z9HAp1-r{9aP)KXQvdti;Ymr_cMWkTSC8MTaujyugt<|q!kQDjzUt-PAH*!i7?7kes za4DnSG$Kp>C8p;yl4tD+{>{Q0p%!i}k6G$&p!37eU;(M(nwi5%ATl&({rmXMXVn{f z6zgCh>?DH{77o2((&?-#*lr7-u$G@blr65d$e@<%vl<%{Q9jjFPWefcp z-7d!xWjK1aLK4dye7~$EXmXlAd(4s_KQ1ch;39DJg2LmTlG2~gM=w2p1)xFc7G$OW zLyd9q@oB0pFRvO>`Q&2DZexY|s%vVN-LGy5y5CCE^!g@Qh=O7WpXsu&vQY_1%UHaI zeKOH%I@ge;rd_T4(X_rV`1pB=%~l#VVN~=;$leWCNYMvO=MP?VyzAtyhOxONDA*5n z>`7>1*@Al-#HcIL(>hw63iUj8gc&763N1ZAcx|1?%(-I?T`uAHF@ab! zZ_7X2#O~cu#=Z{~z_w1(i@!n@WUML3d^5fOG?xYjHmfzng$1oZd7WFit$(lIaboR5 z+9^Pjso0^tty~0-Hmfb$6-iIO3%_#hliYSknC_=HVN*WhT*MH+KP2MN;lB&&6LoFgQ zGBR!9rmq{@I(+Ae0SCwR-mAlJExY)nMkt@JNZxUC$VAaqdcOK-jUiSua6{xmh3B}uB>Nyt7SgT-_W56#CK!qcwD6!E* zx=*0`H?Dq=&O+C|`9%ohlMdi~v(=<;`RxxZ5V#~Rlu7JE%BSHEfJsJZ0yY$9`!m1$ zA3Y-K{fpO^Z#u5sh>$nu-~IZ*pp`Z3WjYV5E}`1(;*Bd>J`bE*y7&lkTZ>JE@(wL( z{Ma~6kG&T!vjx`(?)?JQ&^U^++%me>!yk~r_~JxP(=oKu^xb9x8ZL_4uU8nwR6G9w zKLqtkctw;^w8s+}JrNmzw9PcBw2%Xk6&BN2S;ML?Ks-9*UB%$ScLtqmZ>JLK0;bx)TroatmeJ;%kdU~ zFP1L5udc6e>yl$k@CH8~wCHkM5-xR)&Ap^)7|mP{%F%kZt^xm&7kC8ut*ARz?ZQNg z@4#Qvex;^+hnqigFw*dzmc7Th%Iu#Gb=ALP;JxbyP!BZ(f(~?U!&X&zX}ZY#KdAiC z>>lm0KX6P^;^f1r6tj=}62W5N<<*kGw-(;I4vrYNNd#{n{#+>6uvWRQn$^m0YkM&} zS6cX~L@!Wqav(awtj6?06n&R1m1A;Zplr{*AM=fS|G)Mh@G?GW1?Mx|fer zRHBA~CKQ^lzwF>z7AjDe5@qvrIiLm*#Mw#ob_!y#J(7QP+e!3{ZxWv9IAsJgK1sd__Z+jy6f{k(bLIc{_oRR(G|P@P3oQ&Iujljz4?h$+ zx?45;SpPQ!w6*(k``!4|N9Xwz>*aByz+_?4|IsVc#tG+BKHhs63>Q{Lp+^kt8+`Gl zkuvrmm%tUAo6*cC7b$w$(%}W7 zG1KvkJmSCd9~pTY8@HV+jAW+dd!G~a%R5+ATlp*4OVt*Abpl1b$1{xmb%CDvglnHmfBna^ z2OaCar2li?H|3JN{p4t~pI>=H$2x1I8b5(OtXu_Xj8y|}RLoL3s4siX`SuefEJ*G0$xzF#;2?_^db8)~X z=;m-kJYN7^`CP5Y(it|%SlE(3Ox64wx>$AQ`}f{jME!WdY*;@e)43iN%7+_;KyGGN zJ$_eVSHJ{IZAiqx=ir5JiHSxwNV4^Wl3LHpyBnURDG}jHmm}w?Se=WIKi;Hde<{Jo zC1Kg94|{;Gig(xPMP9BOr`#sF503fHRT!?s1ls!Rb>Ug zNqvbFVji&>(WW%};h&@z&G&@iFWO^|6B+ZklITTjfLv)PDSe!Pj_t?;9cw62N%u_F zPO=pRHB8=Fp>}7cRmLstPueN=I@@q{kZs|Gap4hlhCq72ld&OR8XjucFMgR@my?mO zX5J5*6PkwVc7uNVK~%-k4=ZxXLagKMi29Eq{q?6Ft~8Yy@#obK!(cYWUU1p>6#G@t z+qvAPh4>*R&+?uwKoWROL*IG~11J;bxNs6w5}5n_7P!f%rR+M$ibcSq+q~TT z+dVGV)ai^Iuh_wFYHPW4=`7j|0ljQb?{=frYzL(P?^5v|V#3~oTJye7IKsJW+5CYN zhQZ-M}S-hF@>x%+8 zW>=f_Q>Lv+!(P*Q-FLl5e`iWRTGL=NY}ccBJYZ`OU=#pLELJfB-T*P|5Y_o6Pc7(v zn$D8kQ#3>)vWu1;&ZW;?`%%tq*!EqrkfsyE_mn~OS9HwGN^2YKUgf1Q0zrrlG}Ud+oaQ_mw&C{g~hUVK%cf zb*u`1D#j?b_c7A@7-DCZk@sE53CrM?T_#65-S!qh4R zaeiybDGMJ2``DrkPWhb)#Yv9)9~k->b{zccrovnf4kfb{1v+L3a}>pZZ-qYnp1dx0 zE;X!Ew^6mFX6IbH^Hf6GTQdzoW^``=LJNP+l}(CQ;bC|If#^Y!LnP-*)o$iz+-iYA zfpilvb1po*L_z{uRr@t&xGY@7?wLfS=Ynbz(lOdWMb`q9J-e&&b7+<2$rFTg`-ttA zRDtLNph;69YJTCo-alcH36CmLJYbT7KQ~j%Wa2t-WD(9|)vG-a)Y#>)hzGooBcv(! z8CyAaIp6&DsO-otD@*OiIGvfRYmM2Sf$I~xr%ax&oNUYKHh-+t@yNwSz1ntg>K4@_ zN==qkGWkG87EXXv2g$G*Q6cA=Fwq$=CYp4yyUukZ4HP}?NEK0@sxNw#H8tFTpiUKB zKJOttdulVwBVaL8Bgx`KpOSJIm%>_DQTU?Lk&T62qvnV~K}Pqz3u)z>bUYf`=v&pQ%4&vE;pIJXG5-#AOhuD1CI!-$0($Y5045Mz$pUuOUYohIvpVMkhBx_X*k;3B z++k=mdaGpO1JMlWIjBvseVJV$d65*|=wd?NUZxhF%^Ckzp(^Rr?JVv%ZKsWQIASW$19K$o0f0>Gx`vGW|2cAjPtsL+Rg`|BQBrUZZoZ}<% zFJk`b=wuYsH_Hjm>K1h}Q#hdogsV^5HhzY^hDh$0x6dePVNoPVne=pZb@d|i z&BpF_%EylYF+<;W6sUlVGz@M(^&gqIWIZ4rfy1`JA;q6HK}U8VTZnOo`m5_0WbT&* z&CiyWmL~R4dMk3A$KF})00!Gd;^^BGtC@J#x9(br@sT$kcZ|E>L>zRo-z zsy6=n-AY-rB^tyqvewAHD`ejdvW_eeNLj6uAuJAiiE^~Vy zPmwPM;v4Am{}=gJ0|+5GmY46lqw2jx^_X4V>RNWUvk0$zYl^c?#by5;F*l!tQoBrv zoc_mth@0`g4NQ3Q&zGUGsjFsEMAdA!@g%8o-QDrH@smNOLs+2$}ASCmJ*eE-*gC)o}ZvOF#FgCm)pcw{XLKRD~CYsIL;7CR^$ibbz# zFpr-@6qcAZ9adNRB-Cfw?M6{swyO+~xdm{`9V#Hz+ovrKmg;5KF z{$Do;s5;%2`bzd@ser{wNx?GxGt9Sdp4}gpoPrZVfNajxd)94ey2b=auWxAdtYXxv zfb2bi0QWZ84)%aAuJvi7Nz$Vtv=360plP!85`n0(xLQw&(f&-3UEZi0X)9|iQ`K3` zO-af%Y3cO!Xs4_xa5+em0Sos@0IW>c#sxiFv5Hfy%pp%=|G`>|vkh}m88K!}@L$@7 zA;cQ&s=VoaEAEw?=pFL1ElcHgOjK3uye7+eG-#mwBCYtNo;DS`l~kz;kKSF(V;;<` z!B@A6xbCGvK&`+g`HY2ZgS)dMiZ^s!CMbiueY2=9$MZq>7MyH_Auhy+&fBJ=KxwQP zBKUk-(GIris`UVlda6F4dSR|J_e%Sl#by41s68|CV+DyyG$^1eCv-h+fO9hl^Us^d zF?=4;&0Sum8d^M0B61cE1YJR{$HOg(zpLSP<)E8H4aI^oTLR9@zPWKv+iX%gN|*Qt z3gV(9xaj+lzUNq9w>}LD?rutIg_##Ymw?1CzoNUk4&Y`5hkP&6e`K=k3=K+V+|vHq zD%j#@AkFkQc>B)> z-Zuo!e^l@~&?|#_hNVYzX3|w5xX2-$v*@wBiRYP|>T0pLXEo+CV{Kt=U=BvZBs!|&#j}83r7SkE zNgt{`EO3-sO9PX54IwF0;mr3}P{>n$gYxXzCVHdr8+S{vkt`Pl+CN?Ib;{;Dk6{!*f#vtlW9kkUW5WK2a z4Es&LzRZ64^4vxjeUu!ka-w~(Y~RdI%tloD7`8AlnXf2%eLV?=<9zn5$h7_O28*)`TtOz||XR z3OnlDM{7D3DrWXWAm439ph$RumGe?LXlIC@Hcf0j3*rZ|qJ6rf8IDufUH_nKayRA7 zdAcs+=WJcZvBZ`m>lcCo*1cVEw@n2EC0@ApLI%S-KjZFYC4*v z<0p?@A9$FL&(f%xS2OijythGG_!cHgiflK;^cwpnE`Ke_U!;&9UHF_zkqA_UyR|_a1t)-tkf?uxA2b{9~c37dJ|B45WaMt7=xh+R`Z{ei>&r-=E`@VRJ?hJ|dm zIV`TyMD53V6z)z@OGylsJkH@blUsr_Y7o!&K9?nquTY_MFCEkU$_n^hvRS)Uw|h2!$R;UIfK zJ75L`N;SQprd2|sI|QiFuy2mMKNc;aNEJ%}!i$Y5{D^+0*iC4^-J&PI@``dAscDU- zRO(;8Tu(_)zVjw5Nj(b1k#NoaXwRN@K=QhH4K>AYSa?IPf^nn&} z<(k!A#u}#exIOZxd?cgOA9mx8D)p?Ygg=ZOot+xHzKGb48L9owKeKh!47m&9WIItv z0t;iS=)Yd$qoie^lOEM@hIOzSbD0SSx3GDf^$G|(K~duBbGEDLmT#TAJLx%`ralB= zeK}#gbNX4L5UMsWfF$kv$wFx3B?>JwuVyq%9##APIJ{YQojEgOwlR47z>umalg;UJ zIQu?>n(GTRn6OEiFG(m*uIu(@r5Z0IFSG;}HPR-56Pnd-{lG%);5(`5otBOgib?uX>M{ zYkJz+ao|9a$8hQV)BNb_sG~lu0VCfqW(>3KSwvL&;bAZ< z)x;=Z5G&$e3UEECj6JsHu;sIR)H*iLsEYCd7DtNO0ODsYaCo6)?mwg!8MiiGk6X=` z4*>o{uSw??+y1P1;ua%%Kmk9rxH5A58YnLQF%U!2edhTo4^h4Tr}Xso%}7~tzNPnH zB>sHsGq2-sZQ}_*pA{*!*bk5(;z+gn78M@gJ){oKtEi*Fj}%X6>8~=Lx&Df|ouP`G z;0w%#!MN^im1}u2AR{$`%E|-4hiY&EmX_1kr`OVX-R2xAN76L~J$qT$;7{lu=8Knk z&4^|xsY%5V80rj%aYhQS4^Hb~lp!nX+ThLTCD5ugZD%Ryq%;2Z7}e#)6_u$JvRD2yk}>*5o$sOn#48Xt z&L@cG1}$8wn~K76RX%5oo$j7}p;9SqUT&TRrx{RCQL*z7i1~pBz5w`7Hv_zFgrij4 zPe5Z+1VGki86eyUG7aDSbRQb;wI#%6>z6!csoCr8JW^?k8?!6gnxP3e{vysnWWX62 z-#mC4Po1$c28(;1n2}k|o>2fiuA+>HJ|rdvR-EC8M~a3+jnEj~sL)n9R$PdI5b z8a$ym=(n8<-h{7lA~Xjl-midD02TfN{_yqS*B8~yg5I041nT`%urXKPD`8IK&0%J` z*t0+b;&U{)Xt)Z#7*is*v)U4))_WbnOFdr7rY>6L2mwTI#cxYywh%|vRIrdg;&;pL z8$Y-#8(6CdfTb;fCmBGg+b?P0A`DP<;EUADpJ7vIb=Tn-v)0VPX-L&E1=P22!!Pr1=qRCxqNi@3+6bw^GP8aEl^+juh*sxv@dR_un2Uq zvgPqy-|QA2^xRIqcLtmPALH`sCB;(}_$T2;aa7%dx{(x3u@<+sVxAZ+cC5>X#Pdjs z9sH(HQ!Qz$UwmKF68`?w=vxPj;>p;ZRd0|0ZC!@8eOldnj#^rF3Fw+D!kboEjCJbB z1ds8RJWM#DeHLy>ylBJ2@l5)!6~aZR6wx+#J*jfQk~F-7RbJ$3p9y8ge1RJa=7OnX2e_xP<{7N z-yICYg6l@R_w3mdAZ8jRab@lTk{wO9br6WQH!i(_1;XGd&>?sM4p_sHL=_dN7aTEk#3W|Fk%O| zDvPGJXAKG{h+lU+TjsLX|3i@x3JrF0x_lTT?#iB7l%W{OL$uj%NM(n2vRs`{KAb3M zvW_glcfMsbLmj<elBJ2_$-?CO#tfK0Mk=Yw9xoYYm`Q3_p9yo)Sf~^;$d|rA}g@n*2JsxbBdI6V=Xa5SJR=SDIP?N z<>c%aW!+{6b8_i8)RRp_X?CbM>Hh0kT3ri(5-_O6ZhsNN6xZFW zl4p23@GEXp5j$VhX2Y+b9Bj2%b4Pk+GOUeYFQ650EbTwq^;!!Y8^ky`5_JJ8kXd@T=0w*2s=? z-j`Q<#z(L-!3_`We<96gpXDbeFi()mtK<|vKY1HFnZR)KiuiR?O8yYDar4*0X{_bw zI^r4zzoC4Edm=1r_{VQ6yrzqGPC$G`$mR*g@bY1$U!Ww8CYiN={cEk~AM2VaD(w(b zbLF+^cMDQ{l9PP5HWYckbdL^=+K5c9or~3$HoJh^1Pe;O$tG=z^q<>uhoFAhGt94XXEiB5$t(2pO{)0Y zz(`q=6YN})4|ueK++3zzpn@o+;F3#)iEqM?GiMozGxhrRnb)Ir200rWtHvs)Bh`it z-y5~4(a|;Ft?18}aTwWd#f0Q>pnv6{E{0yQQwlA6bWkQz+~|{WXldH4VHLj^wf?BQ zIP30LAEJ#AR3d>)w7n_Sj*R}@G;`sdNq|4vBea|ZX4M|pbZ3WPVP|8_XMUC0lr>GR zS?xwN8CbYQ?gRv~+q-2k1*`JA^>?SFN*LPSQ{|(U3=k_!I&+qbPGgz!6AsI$`gv!5 z8dIasU0&ed(P(J;(EXX8G^spaYH+%Y1gm#@jX7-La2nfkBZijEbBh>jgySX%I(Cs@ zi*SM1#Lk?aY~eWFT#*@Z*}zV=owJ=f`hmF@G9Fe@IMHrD%@%p)G?cO(pX7^oL@HW0 zDOfpvZ95U4tqy#Cay6TjjD ztpAqbF+YyGZIlY-M0UgXoCWi}VfEmoayh>s6r zuGOIrsvJ6Q-ax&ot|x;@URf!oJjrl><`IP&-ynH4pORMoR5S85$e83=tf?H!R{RYj z4?L8}EIU|&yaYqN3XV)EDUnbO_%T*a0(4(;6$IUGF{%=y`Z(igRx@~9rABr*+OgIY z@m{R5gZ855bi!_A(y^Bvha1g0m;3GC(sc`|M-hKm&|ihl^+2fa0)2wQ$<|KbXaCQJ o>Ph%~Z-y__7y_SUe%=!m^VbJ?^Kt6Ir@%-1mVtVu>Ya%H0a(wflK=n! literal 0 HcmV?d00001 diff --git a/website/docs/assets/houdini_usd_stage.png b/website/docs/assets/houdini_usd_stage.png new file mode 100644 index 0000000000000000000000000000000000000000..cba942860483d4bb7e01fbb553354bc0a3af812d GIT binary patch literal 762416 zcmY(qXE7?GNk2UiW(L@_g3SRv{y%Cw}zk5t*9mJN-wG2uvS6ddvYJ z`1eLGGw$%;4aZksMd4BP7~{^r37(6*mi(hfbtxpbcKHA1MBb`qzK5T9^-$ZR7I!jzJ2{MrcOj(hyN`e8|*Rh$Cr@$fUly5UBK(j(8Grt_(6WFkk=+S z^l4R_GdAE-&0jljV%ex1@$_M{`*wPSyjPR$GsI@chQ?9_E*(k^py=P^+h2KTIBd4L z-WQu6lx=^8S22-gFLUor7lkXX28{C(rkxKGulM6{$O`34g!a=|?coEMiMtfRiJq6X z)t23f2U~qiFLDV55Br0Jn|Iy~2Ej^j1l^z`CDYZn)_OJ~V2joW)MRIVDRHm`8eP|( zcJI;cb@{iUc^b3;MplDMri`E0TXWl`Flg<~?u^W~$+U~*)TQ;s(Om4ca~ry)F})-3j0o(3RLp(<(Dq)C)zLHo`H#QU1njVsfYO3?3SA z`9l3~?f#6Ja`?MRbM`gLu-S4djSH-({=DJN_#0kR?Tvz`{s)LOB&?0hu;44&)X=or zyer^(qItv3dhi|Xy{3XWh0_r>?;2&WL-9LK3y?NVe}IAdiu$HupficECw zkn`wtnS71A$c+1oww6k#oXzR(=&p1~IS+cvXspVxJ`3CV%F;i*06vH0#Oh>bAj@yv zVTvXW?By4J23#6MDvJBo)W6BjHT1TMFeb=3kE-`Z21VKyZN_!s<@}6cJhO=A;^use zpNGl*PYxxTCJ6~WI`smczg4fz182HLP_BmXAf}<>A`xL`H}yH=0={@ql5_{?IYgBZE z3wQ-5MW$gzirY4PvkwuAWq+o)BTBm7RCJQ1r|VnR6kiwlv7^|E-ajSQiEp0fDV2P0 zW6)}VN)D$-kp7vZ|4`=BFRqbJEIQo*a{Zl)sF3MH*}Gx#>vVp1c}_`c`Zy%<&lWd6 z|4esR(Ku52dDF(kfBbb9Pqx6?u-P|G=ev+5 z736$O@{o;Do(ilt`Jl$=gPE5y;*#&KJS3$k8YA*N$@I|rgNEX%r<+%MX?1;57gyr3 zi7h!i1&c7Tx@6$KOq`V+Fwok_n_8IG;mR9#U#kA3tvH_9th6>i;jaVMsO*UK-Z0YE zIv@XREcBybMjSK|r`}L{8Z{|Th&@gln9A6;m~8tmmwz(CEzsynql(i_!(u0&HJ2*A zSZVBFsv|_d$hN?sUFY>d?zWLZuta);vNg)yYCTBaM?{84w$i@>5p1DDe<3Y;jARlY@|oS+u?jm@-fy23*X~> zW#tNYag|1w_pHXYK0j5y`kvgU0Qk+>m;vhgP*$O5cmH);dR+o}+-u1?H1`}O3=h|E zl^!Wy!KIh)37u!e14~0FCe9;ao1PbZ6fXg69_wlTYIoXUa0g}I7~hZMRG5D_8Mk4n zzqEF={YO`UKQ*DXRzv=?LTJS8tPN>@7w{;OBDBZ){v zY(E|jJ9aPESyzH{<2t*vl-E}mvnbrFVHB=osB`I&7^O^|ZoA>}4}H{cZLs%s5r%Xq zl@Aah$(=%Lz)Rxv6Pz{TS`$ORIvUoX*HpD)Awj$Ej`cn0x;s!KF%|EDlB&>6Wu%M)-jQ(m>S zW9lne7l3qAn(;!Mn(jQ2?OE-va0r&J*Q>KN+F)o|G&VHlE)FO$$k1itp%L>iap0Oc zH0z#g&=&BD(OyKCYnwNlo`q-e@y;uVx}=8w=0oJ&p?(r0E*;AxtN9l1;-k&N<$U9mRh#Qfdx%c!NK}5hxGOP(&(yiKI9MPv(9jkt`f=9{jY{7BJYJ3DGEqzO z##2mj*LJ34;246QWZBZ#f!j#~bJ>iRw=r%D+x7b(sypuotj*K!4S>`ju`fv^cDF6P zn!zSPsp;QdF=(NXPF5K7;i9ekVj1x+AVSM7D+&Aq9tLwUIM>6@N{Ph~`y8^JdH0mYOhfD+UCo`1L3Km)$ z_8&%mUpU~RB`0RX+&0Dz49&0C`9$*3Z+G>~38U&GCq;#wq4ra2^I1#aq;)$EuuPsW z@Ik3Om;B%qcCHKOM68(l!piFMr|2^c^E4Z2;f2f!@~CI>q#NhcCR{&CpUpk(q+55* zE0XaQDD_F#E(Mo-K=(1qd&jdDS87g_e6}^PGiWL*7WG$A93^a03m!md=r~A~3$z!? zzI#Nt&RDiz-B+%jIbr!TK3uIdu1MC%47Vu|7I68@X1cCAgf&-qBd-EnCYt2uXh@)d zH^~aYvt`dzI;D;=nYLN@H7{z!zj>DJJw0;)$F@l@UNO3%luMUq&a20&*ewwJ=PJ;2 z6pmKWaT|yuYF>3euCD#{KM-y3zA&oEJ%W4lyyjQX#rxopl$uXqYIe$8v|xBXKTZ2i zbNG_=s0w`$g~014MlT(^+RsB>8k zja$C*XL1KnxLasD)f$UX7Z}f;y3uSb*E96(IIQv;oImmaPoT1D9`B&d&P*mApt+Lnn@1^K0oj*cj0XkD!)={ zx=TEFGB20GY!1Nk( zpUD9!=Gm3*uA?eQ!az_GPt}`6&j|Z&0jrs8-IWM!A4YprNvo*c+QNXgvSLa4_;u})G`%o^94(>aVwq(CVv>aj;+ zB1|Y!PFc-Z$Yr8;e-x_b6W32snzH&j`vB)V`Fa9dEUo>ge*^PVR`E~C(_mr?~yZ;dcaZws_oQEC-$mL+z-2!JdMgPbD1mOJzia*%)B7^0Z>tk-6yvJ5pI%IR-Q^4$#wOGG24&xA6w1>%6V0n5_HC&4HL)u;AAP@{y!@&y z?Wvl3OM8algRZlWaGfi+ozyzq|6i#IUs^u?Nk5cQHmG`L`J8v~uNsF8^X5kK&?uLd zy=2qhAp7>VL!E(urpa1|P|+-N#s5CqT@4+2&a-qhd0vKb?6&VM((~`#dRB%j(jwD} zy=f7^rZuU3QpvyR$$6V_?Kx*mPGoiJXa+BQ%qeEW zr~q}*oTuevxmzlw+g|1{?EYKxjcc?an%Fw7G0AW%9$Q^i8;dRdHoEYv?0u=5Q66WX zje)5)*cHJXqoBS2RF?KTB0~RqUcg4zCUNN1XiYM;VL7pb>RpWJFkaHjnGaRRX>`eX z%3(8gg&`<@rlej7DA%d0GHKauL4^DLXtTd?Gw}xO(n4Q2A5MxFaOb%)$eAq$vOarP z*@ljO8|h(r+y)h82~1sS_1(IRxH6{n1rK1O z-+FLJJ2ibhUU`}vJK(Dv)@#bCV|Ouq_9%+=6UB>o|BH{ZP(6S>9J`)D@zVS1U%#aL&xj_Odb= zcu`C7%vZ`yN{Tk|qK^mo%6;cH7(-0B9zoS~Bf&H(A%aLA%QgK^XI zLZ|;a7%TZe8AZKO-f?w6RG6MC?IE~Sh4vjIy|8QgqG4Vpq_W%_kC1nhw9z;$7D1yx z+Q@<>mPFDQp5qh;SX@q2!s?%B0>_^iru(zzDh)gLF&YLLJJdenC1f9M$g-l;dfz9e zE3K2%_Gippb3z&72C~wPp%b2<+h?6ku21>;b*`o&Twcl*5ze*qx>5!Zx=26eu=Xy9 zq*G-=e+QLl&nv&(ch&u1JetrDY#}4ibaVfD%lu`dibFlha^VaP*m^R$v&s*Y{dVwcax7+?ijSRM#w@J8@}z;Ay%L)OEl1es zZKMc`W{QZ8V&3XL*Ie%Z|NsT&_9qm|Q;QzEKZT7Laz<~VS_S>LG zU|;&v%#Q=B#n&EZ1>54}7v9XV*R|1eaR%+)G5&y2>K7^8d{K&E()_8AvQHNssfE3R zP)8YgN8b3e>gs_iLx--}oVPrxC$g39G-2;WC-N;TUt_ly{(MI=ZSvoQhiUjR;eCO#r< zFLb)EFZX0vLTBa4GCIHY`>Cwr0s{s1C~||p^7Q>7wv}DB3svct?P6^TSIj-7EG4#m zIIz76=b|8Y33?R@{A`ym)t`Q$oJe23BL4=(B;M5)mPu_vrtyj3NBD}d(d-RYQH&w> z7e8$mmt!IbMVI1l!yNmX+UX}6PgqBkfCx6dZ z-#nu}3j-f5{31II02pNYwRP=S3|#!vHMnlB#bjn=k0bP(e~p>NxTJU0VEa^ACaQZU ze5SN%YoX7F6$8^#Ce|fvP8=D?u%5rm$H>Ds+8G)Q5Mqxb?JD=nAjIW5=0pbCl?i;( zLt6@4tF|2*6z?t`9CO*lCAB&vcZ*4`6CSo=Ga>0ic)py>36_xvK6aSEKYnT2Q_WIQZPg&39Oxei0WnJ3H&q*jZXPmb{+j zKV4ligsfP}>8w5M`X{b6S2~#=05nS#|_r5%X8D?QtmvW?t#-@^wHe>CR{hPIen|8op;qj z5rmlTaO7TDvp&Mt4&g6<-2#lv&}>&8F%6U2y|kS7v2Ls^pXB@b_EQv`2k&_`&bLu> zokryfgGihz;VPU2qqW~)Y)549g26-j%w>Du%Awy(+z8G99a>aqus>T*r=+T2*i`g9oX4hTQ zzMg{K7L(X2Frf!K!~^UUCcOyLOf!1$M@iPy+hQ(_3XEpTi@YIb!Repjas!Ii*EwvR zZrp^Y9*yPd?P(Oqg51p>TWNnGRcNYxjHCr$yEmSYV~L7`GZ{_+-?H%Y^GQ<%w~h(I zl4uku^9brXq_Vim9U_##y$c&sE+&fnlH+BNpZEv$Ec>;F@hJ|QirF#1Xu{Y*%PvVV zp#&yP9mD2fXIcD8e_EPGplP5&M)Cn^A-bO>wgUDFy>Cc!!Eg{h`TI9bbiCvFm{U?* zb04LsOAG}Lr85y~Qhs8NiX39A)dbdZH6uAiFeuE8!^!5TPehHGcDl7ndEw*@_P>Wl z0@rNiYd{^jy0_Jn#k_HwlL*zm@8*;9AIm_KXSQfXxo!BdwQ(DKviOIPK9&+?6z5PDEt9XG>|CODcGED~_k3{=zH9EGGK7VZ2!(3ti+qZWiB4+VM z{p0gjq&K-k}$l?Jg33Q{qKR&ZE}nu zw8Abi(GrpdEVX=fzl&OjLf@(;2aM=leY|c5toVRSQcz`UBb&6sc2Ayvx$x#qsXW)) z`B*&345I@?xRe-X5il*YD#6Pkyi!L(Om4g)1M+% zP?Gemx*tmb#z$F{rN4Q;O0PKu)o+o_@ydYA{!#4G_X|VCk;^MPrtDvv&D!_HtDhvv zwlCNEx{Z)wGTW!+6j3QmcOp$x99HW1;o3d`nr6aCmbelO9#sSUYV9>}Wh$}=kZNeR zcUHqQLRWj&Wi{gT4;0a{>$hSxX2;^RIhS0?c_-7|oVg2rXnYaejX1OUX}SSEO$`t2 zweD5-{QGxvtge|F#;*RB4S&Ly`{~i!7ht8Gltcr^Kbb znQ2LcOSour#tC1kps3z$iGGuQ!&+lD(V^+tuT4Qw-36$rLi>x-`vZh5P|}VP=2S1- z!x^RAfyrfb<8vb=jgL3UG3GRA*F~CxD;kl_MKn0L-*w-ZBh2f@E|Sw@D$6ell_Gy} z+`cbSf8_#We=~*G(zt2YYS~D_%~J3rdb!NV*yK%lqf}o5thcS#)L15ds3Cm})B#_i ztlI7x-@`$&gS>Bo!1Ky%kf4wGa zpXP);K?AFg3*ToLF}D!_T+Jm-AXG>;yw|v;(Hzh_Zmz?y{bVDN`^>avc#!ZsCCBtk zJ~#;l$8=WQPBXD_`^sdRL{wOwtUo|Pknns9b6mm5mC&b+V zEL-#)T-vTYC=N*l$t2p`;C* zz4=SALzqUH291!Q~*we|P^+=1gHy4cpML3DyYeyyRudy_#`pSlL#gfu72% zeq{H8rb0_tb1A|Cmg|d8vUFS*e7n!e_i0uxxLuf!)|fK9xge3CA=`h_6vf4FJ9^#f z2YhOP?BD(~y{x#%iQ?nY<(>Br$vobWIW}!u=7REt?gdYc+ zK(+nj5#Jz%JIECu*rAVjp)v2bv`d5AkAM#{D?VM5@*3=3rF63TBeGKT7G^=%m|Sp_ zj@`2LiW@gB;cS2J&x5#MHSY!Zg_U|>_ZpSqAgSD$VGA86Y9sPnF^{|Aua6pNw$M_T zsn6CI-{6qtGqIct!tOQ@1B=G|Tf5!(;sf!j0mzf~g&S@>mZx3omS7v>Yasg*?H*7~ zCUL+zrhZLT5{3C5T$EAWVt5}?>@*B-q}EX4`TL&lKf5@&dvbm5mYU;O8=_qI%Ts)@ z&gh5u?152l*m1Gs9Eq7mVtFK-xrv3!DhtWfewXP{4yMWz7{@zp6Ju`u%zgX4FiY^6c4$bCw7U^^ zb6-O9(U}S^Lz>1x=m%Ih3!GWJ!K)%a@$0J%m7a{Mxi>XFg#98E(K-&G!rjPiT}l~! z#CnPGj4gFo`qqD*{Y@6%VqjWIarv00!sZv^wj&eHE2A|)EQ^6S^o2uWIfujCo3~#+ z@-t$2DnnX~0_4HHb4fXMdf|AoP{F<$G8PK<7zv>;m$?%+ifkFEw4478%x9>HaRA{} zAOQE-G-sn1J9uU-+|zVZjZW7tt4idjnZfbZ1t5So!m-E5aA*bdPUsk-tpMb?#!k@N zW5^MT}c^u{(jWLE>(QZO@l!QmFO}<9w{gyQCXwJUT!Do+KzaG8NbR@Iw(hnRelRI8iVB9>g{m!1K zK}4fKO9R#SIu{wcK0oZI;w6Q!gEUt`ECnI}Iv|^J1gA)b<#t6Q)Q+dZlj)qxoDrZ$ z=~x)4kdyYKNx6bzU;d}jlx5HDO7?)KA~+zDCFq&4ek|EJKfQNVn9@00-)tI|$gby6 z2oKrVN~XVG`k|fZH!e&Rh zZa0d@OrW6nMfNgK$kcI+O9y_L+A@%Fmn?U1{9_lciXEaSm!4=4O(BD z&;nL-llQ9WIO(R~&(K7o>Ie8ao(uRlk3dVSwC9W)WoHaUvp>walQzYMN!7fU8oK0c zaB3eEW=YHNi8kZGO9pufjJX!%G)*qqC$oNBr5np7h4AR+xuj}a?ncS2ewAj}s(}q( zfA7Ca4iP;Gd`3zCLx~G0Bsn95qtykt@Qss@ek;eOy^8W+2YWkz#)?Hh@Imq>l*qCY18>#)0Zjc+&q!nuFw5Q&LxBvlL4bIOVp4#mT_LLnjK7 zXqisIKGqTHQ{&f}NK<%_dBfMFIEO&=Z}+C9d@0g*BhEo%5~0z_f6nY9*%uOJtJxu^ zSGiAsajE;6mV^=V=#Q>FPhPG`c94YopL{9&c?75tCsI|Lw+n^XMER1l1i#4LdlTdH zgU>*}Y;uAyDYr*hb-Il8{Qgc_yRRiRSc8wxOusQW_78d;u5}_YEKEN;t3i`^PzfQ4 z8vxF`tH(v83x(({>fwlMdQNKuxJmjuOC&Nv?5)y&Bwta1WB^p*yz{Q)LJX;jd=OpQ zkzS`5l*{;yMVORtz@Y4zhO0aL+to>r3XasuZt+(k%>{U8*_n=U2(-Krj{r8M$eq~g1A+Ps5Dn_~Z*-b{w zstp5_**B|zyCUwQyjlG7us)8?A~FUoF$ah<;4Y^+W9i{XiK$Q!WPgrNDo+ z;ljQ}>GFGDd)(2+uim8Z#Gqh!OxulC5v+Bs=PAaWCbxo*ceMQWp!#s0rLq74Bhp?V zL5v%oSL_LUa7e<~z8TLS{E13`=R&IonznW3Ui4^HEB=N~I38LM*pg8 zC3K)MFoTLKFg%VB2ey0zNGdVlF$!($AgrKOh+Wz85R++|n1I0(GrVevz?a&Kr-^z4 zO&HzLN`BYk0V*EFsN+&>@5@0vJp-3HQvHZ&qk<&k!KU(~yT=U-U{O3j^=FJQk#PPa zuBZUx%(l4i)NM%UsTgU3=)c$#9opPDB+lD_D;QgYbYllq!uA2stbYGhRn zqz!Ni>(SqSEi7H5{|F;=nR}`y0YKIFCB>3C(EI9*D2{Y;RoXvw%Q4fwdZNxc0g+FU zqr`h#ODymtj#7}ft&pxn!yOR@HD=%@=e!=tOGfMOpLrzsRKI^>`dyCNYJD7}2QJiv z06@d6jR$S;HjzSS~pOCw=(%u}HnyFDyZ;+a0YFGIUo|H^MS z770T&6t7kv8$c1$2EJcJnrnc!>H-%WAaiQsfKE*6)377(6gBhh^mm)*R;TjuUjQNZ zLlkXeabf$sQUNvI;L10e?K2`aJ0CU$v$=r&a@D)1h~bQX<+(X?c=Ui^*@$_z)K>x{ zgKJurMZ{s$b~J&B!8%@q5um%eY>YUfz8Xc`rhM+qiZW&_o67e`F%~iG0?ifDOr}IQ z?ljl^ zLBH)szDT!Celd{i=^5hzTaUYA(q^Qcmf4v!!y%(G=exCYzTI|@T3L1)1OUe#y?H}8 z;`njl{lH1)OOKAF(+FmkvfUSJL(JMZucO#I{=7Ug_1rr8it4}gCfG@KcX-wu$NwhB zz?@p19k<`2MRuQ985(zbK+Q z4sLlIyOQ2giex4WO9r2@h33lv0>vRIDfh=gdm|LIhCtPCzZ#w1(+$@$%n!e`^A3Kd z)p{(x_BX^`xZ!jvaT&5O_5TO88V;OgGr z5$Bxb+!G`h+*fT~Wg(Q=;8E@#z2PK2X;t%4r z7;jJ(lGi1Qzk1R63#C`-SmlIsMc#oekBCpH(+4l*(5yc-dx$7A+W*`vNbEV zY@%urOjul$g~@+vCAd?9)qVD)Yc#dIs;t?Ihb3F>%Jc1&-m>R>t!`~eveP;Eybpgd zPqV5hpxh+Gqq7I9{=Fd4k?&C9IfHgp%lFl>F#&mF)mKQKrNqPcNViI$diujNuyK&v zhru>_7v9oEP;)Mub{5njsuvssJ46pU()%`*br}(BAi&~1E7O#qfDqAa1T(BXXF*@e z&TW#yvriq|A4n|q6;*9FpGNEDM$&$7G9&vqL9)Tx_8XD&2Ms4-R2iiVFw2oUf$6tU zdQ*wu!f2J<)?;}F9S@}8N#moi_<_OdKN#~56K8KVaCr&w_y80FavAy&&4!{;&l{y< zxv6|L7VnDAXD%h08u*09Lh8{RWA$SrMeu>F=VUA{GFW9YH!h|qisz6U>g2MY%iIaqdaMQLPi3>oW1vCX zdw!(!2tWDdIZPa4$b_mbydNNZQZ@~x0XW7FDRv%p`|9lvxXS)BXYdIRuKx5&Ki{ku zL=0=5)>U*s$tpi9pRxH;teI+nf0j)i6y^yluKoK_fO=@ZR*=SzC?ui_B&au+hHcjW zX{P4DgwvmRZ_xQ9ATcIR#Lr+5hK+ZE_~Y*@L;M?w>}VQ0RIX6<;>Ctn7H$^==KLz2 z;b0|d4-z?ZeeuPI2`Lt@@s4ZJK$x8IoiF`x6$hTY7X@A7DZ%lWg;4XKKCEty> zRLwn1&U?PQ^n&fJ>+5~j^n~V-y9uq07m5B!{ONI=Q!3cnZM98B6;&H5FB_P-l2ijRu?^aO=6S@2Psbj0$YGZE1#w?TybTYjy#N z^=gZIiTkNp2J|W;hB)sGrS8~DS+v6`--){ns06N#+~;oU9gXm{e@PdRaF%Ye@OTzQ=S7Vp?soINzU3&TafHgW$*n_s7}A?}Mm zxeH}?%HcQBrvh;)Q=tr>yDk4Y=j(!>D7;}i@jVn;zoRf?1JtwJ>cqD{@uGlF+atJ_ zeLGWxKKTOpw|tlf0$myK^OfboBfcjQUq(!RAo>q0x5|cHa9?+NR1^d^_dWgMKCIE) z*7(;K$VtEYKx*S*)X9)9S?5+6a==!lB>uB3{eNtw|7#`vt5~~&Y_nC-kfargb=Jcz z+{fA(5rCWWx(6u+$RhB~l{Sa=C6=||xadGW)OnZtcANb1-Js5YCpalt#Z|J&KEUB* zvp;~rvuC8jG~XNBMc1|;e&jxR40HM=2Pv$N8(6~j~wRS0+mOOhDroZlnP~IJ1`NHIkQ1s=S z^m_HuZRgM46G^p>uEGHyZgDlq>x69P?hGffu!rTg12@|>6uijgE+s8>wDo&>D? z&%OhHO~A5gjk&R(JXLAyqM=N=F}+TeXcpR^yBO_ZX6FABWV+cB=uv6zpOm)exS6=3 zCh4${XQlO}gd`Ej2FdtAhVZfm!^tFP^9jnzB}u3)$_}TKm6|#;v=;g{latJ~F2(AB z^z-U9E^rbWFjrO`>JMyGJFk(n{qJ~gK%`=q@i7zTOw}cLL0Lm%R_`uD!^y&M`D8TK zC|)AR_Q2`G@bg7*V10ol!eSF4WNd17)*+H!SJ((_q6!|xrLFmu=7i?e@DmAm7k4%r zyZ_gBOxi`Yq|M5`^5JOFr_Qj^zSHwpz~HwwITGYMlgoypI2StHF$it}Kj@-SIN5+_xVa+4u6mC- zU5L(o>62%AE}9KnywFf`YqnvRDVKQo&sC#&7fAatI7#hxL^GY)88M0+CNCV85En{0-f&5RMXh%*yNS95LMdE#YFvdh}YHL zeP(XZj);pbAs$1AsZ+F5XaS)sAPld&^hZK4{*d1aV|IzuD&g!D*CKc2b-rI3fPd!1*P5^(H1@l@PB%5+Hdb_1--A zUpDuW|CBL_9O4V=Bc;^d4o_&?O{DU~{5E3ZqS49{6fV?J;s*f?nk#PnQQXMp-*BHu zmei_hnsfpj1B*+5M}V;QQTaPF=oBoeN=tEhose+9EO+3_t6O`Vp?B`SJ!1-)FdO@a zbEqL{Le)ul`U#8Cof&6Ky0g)ow)-;w4ylYWEhnbH1+l#AM!0!dttTmrcbo}X_#P=q zeCjoV?ZQYHE?n-?<4oc+T}9a^SACLgtl>o)3VRVq7XOM`C5jx?joys{389313cIq1 zFGb?L;`u;)YY~X@n?WvKU$$b2te?hvRxu5tYM;L$%31Ce*Lq*M_Lt{8yfAb&c-zd1 zMs%=BUny>nZGL2o(n0nDZ;q2M-9b+1*_E$;F*M4V4Uvc!BH|ah!v}NaiRaT|QAtXp@JT8C<?H|4$jf3C{nnDsI!fKUQs(SyaB){=P6 z!X15U8r?OlJ`5!tHp*eAOk#WU(c_`F4J4^d`wRALw$J`X^~;r%3+2?3gahBll4(9w zT?_agt>Cxqa@cMyN7(hJbxv*VLr}4w9b*toS{}iUrd`@!X7O~OB#L(X6R;~#qwo|O z+Q@5bjK6aMqTlq%jXP%Qqg!56d?^P)r*g83T|ylGsPTEVaOwxd9hsMdM;-e-M@p*T zZA&saZ8oG-cb-Ged$S(s47&=B)Qo5jSOd2!39&=Dzg*UPWqc1=MZ9$rnbFvn5EBgHhy)t>fh?4O+VK3%7+ z_W_vc>~mmuafiC|UC5}2emtiM5+T`waH@0*-Q(_nl8?yown&L0j5@6kOgoHyw&sCy zsK>pxPpNz5hTo(%y>IbiN^hOU%zxK!wu;d@ajV2Y<$dLYTo%c8`CrsONg6cZA#2N^ zlyz@;!)1A;VD{81n_F(mvobeI3ZnDrK%7Ab;}lSGK^nqT8Hmjf6iYoVsvG@oZQ2+$ zBv?A`Dmjj}Dt|)HM%RQR$1>{LQ^L&8XO5g|Sg-|AuQ7ZMNPy9yA2F>3IQ!d(53pBO zB)Y=4+aFMm;oIAlq-Z(aPKKniS4ITdsmI3?lwVP9-f`CmUk&krkyisx#HRxdmUV=~ z%YsqPl69f2_RF4g9C^uWLWQLPo4r#W3otZ6CmjPw)R9Cfhcr%f#&SQP?ZCabR3f=M zH|t-E!%`T62h9(V1B%7zkdMT^@}&#h6W0v}CFd64p@|NB6?Z27lb3RMncdf;^O_-oio-5m9-O@L{I+w|k`d>fijO#v*XMjtfi0R` zPD;pM?r~;4Hc=q?3qCkSv=2Y&Uw9C-V*B)o9dX~sjz6f|p4;E5`^un!AhB&kkZ>C% zW8+Nsa7;%In<6g0%f(fnd?l{@NHXSoiLAm0+_j?Yy?uEe!WGU%gb)WRZE0ev$gt@4BonB_iXu_g;Gs7bP#r$cH9KN_fv&^GycooD8dH-CQ7V&=#??~R>>M}CiL|}3Dzhr-zN?|JfAx8d~ zdtj=1Na<%Z>*L6$QRc$ZL05Jh$l~DE!nSo^LSB_n#1|GePb1+z79zB3;+n`BbF|5M z$>D@*uOn090(i|l#Bw>`f68TlU#{KF3iSdhI1IZqh`EVEteAvg^L>`}$;K))As>PM zML(2gm?B;WR;CC95@@LqOO>C2PSaxILZ&ibrSEi^JhS2b8`>hHU<4@w=+O$1B~3Y@ z-n^n;i>^Rk)x4p8!}c`5bF1h^U1B`<{IKc~jUy(vb-U0dmY{o1q;q=IV1I_b+?5Y3H-je-Krg!UyTCqQ08MKO5lWC9}?#gMnJ=`w2)u?oE1r~xmA zUFLeHI*21zCR+P(*{kms!Tak5=EUx=mW zfhN-@D~mFofy)rNOQtQ=1LJ;z^%zG`Fsi6tx2X}GW!n-eaF`gZ$*>V`#=r_i(fSg$ z)6D*hSj~r&y8SR7&aULiFp|}Das`@m+BC{?v|_C+US}*nXsTaloYL`pdU3@eQCGLt zXJUxb`|%Qpy{m!UYN%HZVq zrrekCh>*a$Ix;nQMqU{5WW~^m0kiryZ0ij&yvI+21&qJpUZ__r$N(d1Kcg}1R!RzM zB($xpXJ^4Nsy42}Z)&P(v->+Rn(4YkX|0Pzf#d`~|qD2NlDFU9RBAW zNfR{=dDRcCCZc_j&0rg_%P47M_0t0reBP%c>1@!qk93WhzE!TN)I5IS#6^8-(x=x!Lz!qg)@4VsW?qoxYy)+Q1p^!@9hV;pXB zj|uRe+j_WlZA=pku=va4rW{9oylfm&0%=4DH*)krM3Q#FrEL5J1ry?t%2cSr!e(T` z1y8ov)ox^K;^tB(@Y-LNYpENHMd+vO?S_BfdED@hueee1=1aZ?sN68kQ_USGssM4& z;{Sd28|D#Hh6poge4UnJ!s5)7l9#2t+1|dUvYGwA+=@N_jpgaD6S?y!+8>(edMv)2 zO7Ld$Bp(q=^5L{14{@yVrldqO`y-h0;F$imvXzMQA3hg=Eu7)KQSF4cG{ zrj=|oJLP32a_cc!BIoJvdPJ1p3chVRmVfU?9Vmoal9jbwu&>SC;S$D6_bYNxm9$v*49V1Y@K>UV`D z=C0Q{V+XH)-@q!mdb-KOz6KVFp7!1JtrVk0_s*VNf=jZ-6BgmG8m;@RtfSmF26YNU43+nomM*3mLfb-n*>zNj3fzDNY z%@uL`@c_fWbi%!4Z%f2Y%VL7{8SIEJcAPL*!gnD2DlTGvMTnn}QzyUi;bP_9BIt6v zC*^KFWnpJ(dtrdG-+}6?%i7GUVJq~yVV{~mBI`GuJM;7ZmkkSDV?Dpk>^#X>Rh;ks zbT=O%%|f~ox1CSnnYYzdQxLRYCY#G}(W@gpCO3J!@~Ak5DIxThaEFgq>P+dXfER zE*Cr#bed1r*){1MyBn8Q#iH)K3A;)A#bd6p3ms(iG{}mNcI9Ar_&8r#>a210v4?$h zZ;x@gPqJr~Qhy7k#uo>GCYQBNv6pxn_Y{bvUkf1>jA%mZRw3^zeV&RP55MJ`7q z;-tPa(p{L9)O4FHPwj)khOHnOj$XTQQ#@7`ug<)|A@5%w3U8herubXM7vV*&jH+o6 ztl9S0+Rp)u9KXT!FB8}1DtL(H%XFqD~-i!h7+ue6(SHBDvXY^~)I`t#)(@j*uujq`%#58Y1b<# ztT$eRE{I&7=A!gG-LnD+JtRsZx?K@A47U*Op&@gnQcq~`yS=oXB`X?9_Rg5*9I<08 z=r{4@W?{9(0tP&`Pj&X6*&)=2CA*6Kpg={< zWDatU9w6OdNMn&`F?F;3ggE6v8Mp=+cVH-Z; zIfXjge`lrx+1f7pT;n1!N7kL?e#5=0O3<8}L`#72o_!uBmnl-+Jo9`ctcVFROXqp= z#CVd1)6gZf#*rAGhITR_U#4o*YD437_stm>^?t|at($fQWj zzv41lN$h))_C3{qtr}(;a$UFUS&zs4@rb~SIWNV$m3VpIA+X%dd#45c%u8CWkj~ZZKQ0x|G=kaS zTEi<~fLT=_(@Pn`MaSurB0C?cM1Da?8%i=2A|B8qbOSTg8 z>fH=BCwW!Qu+D1PG5j%;KyNftYOAO3CxV>&NA&2GpZZnyB4WFE%RL0`=fq^J?&{|H z#5B13a@QbW#$cgs^{N{5k4dH}=wa-!1@$NAUWNL6J_5I*``mHu+VOg24YhLJV2*6K z7Jleot4)F5Z(TI+gwXQAuh-E1s{7bSx$A7YMCuByqhh8U^-JSE-2Tgo>i~nr?!;-OjpPvW zi1*3e-&bC2);aM9V*x$isHuyoQqwP?t7ellGUl4|?nQ*-0E zGR(zvTT{isXL}#?GA#ElVG}iG8A#HPwQiBk7dxkMnFmy&w(gwg`$K>c?|#M(GC#Ix z{y;DDYFlZZfq=Zou#cyGP`%{C@4sHZO|o1{9J*3J4-(pW)Gqr%l`SqoW6f(yHiUb@ z%)xjss3~qA)_cX(_IC^62RiH`R$1TA$o97R8!`1o!|vVRM5YQEu&BER^>2^$7@p+8O6JK zC$%GPWAWF(&4_Duuu+88gg>Y0;HT}4V>i2jweKJ69>LL1m4y?A1`mDKy+^;BrfUF1 zgG=K2oCb$yn`If}wEJ20d7U-S-<8`hp6<2laEtX;SNd_o#GT&R%O5%GMXz=qIrR67 z;fHln#E3*Xw}8Qwu#vvjPnT{>nr%Nhy-hFo@V)a}!8ep{A#4~|S?^8Xrq19l<<{BL z%(Q(|zF_`jyP*P*mMgz(cHU|`f$23A6uPAqf*=-V8>O^IU(X=mDd-=;&r16l{)$np z`!T1iZJo9)c71zEn(*FcFWGRZkEB*a-d>JamLgcet4qBYO;n}a}AQM)J>e!h_$a6^NV7>ge? zrP?~szohAy$|1Q3z?{Jg0)5s*B2wu%Uh`+qtGPD@YpS^ZQJbBlqD+Cse=}iwMB~g+}$0Ul&3fbqUjBoE>e7ogX z?kG6n+hXRaUd7rE-=^Hthk1EKWQeZ6M_u98XZu#J{>r9h2O_Y~5_ z9{bM(!bD5i7heOE#}U6@jYd$!QZ*Q_oA2k-LRgHDJ*KzJ z)vwc;54#?H)R7){*}Aj1^R_A%?#Cst-+@RLH*^F zSWPMgctV*J;58}b1}3^XBbGDIt}eBGNJ@o3rC_2eY9di?>19n=OPHw0z2rtFLw|s_ zdx3UEtF^NJr%F5GHxI^xB$ia%ZP~jGg{|wG1?8>N5jEx4=USq_w<`-hXo^HD8a2Y< z<*w0d3_#1vj>V~Q+DD&}ji0lEYW_vKxN59F&Hi2ZNtGUE_b=0gYjEBa?w_^QkYM5S z`mVHY-_1u9nUs$;BCzrpwl{^Qi^V)hiKXn*npkJ~`U<|yJ?9y&r-&e__w=0w>{hv` zwNQA*)jziWJaaY$QUg%9?hyi?prvl|)ZX{h-CeZPbyXu9-VDqtH{2zYJ z=20Bm%=mBtwqPcS&&qV)>dyQc9m0|U8Z~d;C7iJzH9I0c;-lyn zH}7)!YkZ(swY|t?;>R9LEjW{Pb#sAkjwmv>1EO}7d)uD=VU8m-X+bM!K#f%IxZoT5 zY>fn*`@g&o`KaEYTM`sn;5u$#SIr|b=^yDf-)uj_1!tgwXsRl>HD5b<4615_=}vrMY=O~o*+BNusxljn{-Er zgBtHhQ~`F>(v8a!a(rX(MjRCz9CMKrjGI5B@$gRr0e$QS-pc}m%x^qHJY(R|r@L`; zTx&HKr!t}a|9kEAsgx}<{8=J(_eU(-O2c=O+*9ID_T?}3#UI(Hw;h{b65tcrE}L=J zw{*_618XiF7eqjSkMYIt?9pe}%RA7Pj@|8yc!}(*N*ggCvU$KixBJB@MoX$V=EVlu zTsHxxH2Uy>GVSXVvF$z#?vySe(O3szar6s6 zk>#Z{Jj|r_39{}Dq$B{-;+KPkyV@vm zq`CeqLSfSTXp<#)PR(o2^5>R9)kkiZ*E~y_VP0BSuQ(X*A$C>-MsrxUnHY4-`v*ia&s@y0)HGe zn!ZZPsw3iGm{eIaO+d5Ge*@0cPpn=8?hm0)N#QkURWXiLF6~k0l>E`2(6-Z&PHBR| zo$EIYeF+-d8m*_C2GahqdSPl{SZJ$w*x^0qO1oS@*jkhK4h-}d8*G=$B?r6Cr8%W5VZ<`FY z=`QT%nsXHuZArg2Ybsl!SJBdY7tDp=iQOnZ`MNb7x$NjY1~7$x{JQ^$O;h;2u~14O z2E2IG;(}(*yHpjgYFCn$IOq7{-jhLoh9bm3bwI?xMV8ij@boHFU(GlGhba>rB|UmX zb9*b&ooI+roZ!M-Dg#O+Gd+cee#MhTS39M*iSSI~D-g0)^q&pSLu84)h?SA4>b^ z9%_UqpK%|PKT{1s=W3GH%qjZ-bN33b&lhr!OIB9;WZ`9R;5Z}q(WFiN$_PA3um*%t z$dW$jk5kH${(XNJ(NyAz+xiAK?@{x1)b4|@o_RRG;Kox)c4lFxhZL+k2e+;_m6io| z!>D?`!UmPTCaV`$=+WXas z$}>OC5vE`A?6C^V>HE`w9e(2b`e=4X(}3yxfM|QzVkb)$LXKUndZp|v)(iK5J0NQ9 z#co&`-(rj{a9hgyNh>(;k_y-)!6l^IAG$qKeZF23ezi1E)=y}Ue2Y?*G~SmNuphsc zO?5ZkTGxma<1NdK-X_3Etm`?I4?nH&pk$D$CwZ{wPgW^7h==$!^sgw+;9h_+_pAIS zD6zyUMTe1*iy@S{?>)q;q~t8!3^r_Zg<$Kz*!+2t7-G&Lv zcr`#kkF4jEP`QgT37rWomjPga_6>m+-|HAVIw1%Ok_Jizk(-Y4zej^ljX@zEBX=Ry zA$Rs@BNSM)XmX-3$rqH6M$Ny-y|Llvu$WJUTRo$E=jG zU+UJf-FyHbN_OZ3f4ZW&g-(TNH^Sm;g|bE6#l}x{ANhFvj(*=-NiqP;oNn52Ng{G8SNW6)hn(B3EQ8fVn3bHk4G<6ZR^i5k|^Pv+O3 zu7w}B*Dg&_!%N8Hwy|EIWA0&zY3i8RXACi}4<=rZAcc=Y(~LAVXeB2LiB2})9U zMk%1_YVkS_&PRk?Z}8a@*lhO9--eNMUwaPY&fdvu~Bl1L*wkSE9gGq zabgVOJ1&GJe-d!w-*J?H14?#A>{Bh(=iI5anTq6Y2}67B9Y1+0UbP$;B6QYpRfAws z)CI9i{>Lo?y`5)t@QEE9)Z3-xs}CWKK{qE4rX{%UVX#=={;L@S^P^37OUqv5fePX) zs^sx9N30k6=~JK7Rhh<}VQ4=>>qTGE-6M{* zsNo!hT(upZlOhi1Rq3K}v}xZZ(!kDCKaPhoP`FT7sE>DhY^|M3c=%0pODw zi3!M8vrmoQHv73RSE6dQD5gAKF=r31*B`|D1+M{yD3)+^;wCQMyPm(O*?fF=pzL!*eNL|Fu4AioG)5uW2^fbbUgvlc?e9{l;<8FiukWr$|Zr z!g^ZrPi@9`#bMtXgoU^lj+UJCEfueTdw#l{CIk(jxE#Tau9TjZiW)pG_kH<~UPled z%L`XX2e{4cy^J$^8Pz;F)sqimF>^WVMVMcBa=Xzsz+*SUq zp)z2k&8T7CN^SI_<_xa00m>tXIGok9fxW)Lo*n$U0^VrRYInF5QPH$i+q(Y+j4Q3o z67bo9bL}~EEF#VHG}XrrfXi0RnO|^IfFQKk01Ln>&so%DrYw|E#ZAzc(X-Rd%GtTX zK?j70SsnA{{_t}fKHoyu&?iQxN0~P4>Da5iwN4ee@<+r=y5y&eNA|kBhe$27;cxNo za$$DQKsm`(4|M&f6bzi-n;G9P75HX6^%ZCbHZCZLTOQF)5y774KkU+d$=H=h<+w+c zx)4__)pPfD;u{6Z5JELQTDaR|wo^DcCLla2xWrQ%T$VUPU(PAnL$OVD5uoRZ9;7O0 zSMCs=ZZa75r`F@+wlRAu!;-cyfOOmry#1FwI^kv2w$oyTO6EsW?df-lOCiH{T%j}q z#-qLM(kD?v$HwBGaOpO{C4S`d zO;Ig&P~3>s5$sAI43fKevWCevFc-|)9(PkdeKQAl;!c*!WMKV$ns8`91Mig*vH()3DONx;|OVlY;^WIuRo zvT2>)I-X)VA)MC;W~{1cUq2|n|FVy{>8;{n{cs(Qi%y0SAK!DVlg6He!|bNGwOrwQ zmLaVPvsr^i1zgZAnLQ>Wyw%Tcq8lkT6#en*-EN|Bh^o#2=a3s;gu1PLK9vV5b+)D& zqOBj~xI63~(&CbH&hlF6^zBNURFnel1i53+!Jdt&H}zP$P7Li3QJkez=#N95SwqE5 zoO*woDV3#h<4~sk{=#h1ba|DL&Dh0dJCCEyKVrT1B;m6``Ao->Y9NK?N}+SLm)WN+ zmBh;@5sEa}PZEanUSCEV5B&>!^JUn3C|qPRqd~ zd%!yv+WiDA0N8xBk?yIvBoD{kd6mpBca+)BX58DZ;i|B$S!RFverB%SW<`8IYadkW zWD)4X$THmUgMC(lzF4~pu*_>!YJ+}9+8k?CQ1$~drNcl?=dMx%0Dr_Q7Uw+!k&}Jy zfen2_VG3bA)OzqQ2R9xMILY082R&%J^PCIrrs0AI%Ar4L~(J0G`K+y{nvwT^hMuNRN89diuQg*rzNtIy1dfBW-B#IBo_lw!T4$D z;^%mm-FqGmxoPGS_W&dhqLm+9__b5XCLvc0r-$=vS~Ltv*p_uKGo5;#)F|17ulJHD z+IEIHge7r)1e#pcI6IgW>DQ~({bO@>qy4>U*T*>?(6-SAnJ6^^##T=0NwhWT?I`7e z2~uyg#qItiQd}{^D$qL{I}X^oewHIQb(I3&xarkmElQQ|i)7XI`|JXV(z@r3i=z2| z{_Hsrzl^~?xDvq8_0G&Zxr|DlRK5Sp;4N0rI(}?;(>S)Evf)kiU3W3kZ{g@AM%41{ z!|s71Wxq(p+#u@^XEitNCq2%^=c?&ZsU%@(nPek;czwrP#G4wM zwxw_U5-aLP25wKrrHb^sWtnIPbLTfP))LStA5jo^)KD8tP$iyg_MENX-01cUgN23 zaSx@_P_}-}BZUKN#jTBH=`{VenaFlXG{5J49QZrolrNxRe467+AOJ6yfBV|yowPO` z#93u7wQ6ve7WW)xUx``gqi+M9&#j7p(+Px;iX3usZ(q6OXBU|39k)WlY-Pe#$E(ZU z2>LAt#jt%w))?t^L{IBEj&TsGQ$rJCDdO3NH81e%Eg<0*ZpZDr=~|*ZZ)dvOBgsN6 zA2QE9b*WN|L>8<^6bSmTf2P;B*$LCSN@9iBv80P~&qQ^08%b_56}+K7(2KC}sw$9x zxHF$}?9x`z3oy8NB!Ej;KG+ZT?)z%lV~=j}oe1gsLH{z*%aWdVduLw>K;%H9b2C9S zGkEoi0GbpJ9)4m49RWR%L1YdP`Yvlm(t4?Sg0?H&h|+BEBe9ht6&9QPRc|@OhviDg zPc1$v$xhGmNNB6dt`i|qItPn&5fO{j41lf96h1L=~;6k=4IVhVD( zo@eqQ3H{kClNEZ2yv+x>_@z4COU1^xxG zwa;z5#<077nRB4A$NHrvqQzPWFq8UaihGYXprKKxFJpoN$x7SrMx4EhS=HoLC0{TP zs+uFTsrqZ>GtcL@h~&T8Fsj*v6T#xz3!nPlB(zxw^hf)VaNv;H$Os#C1(o${;>9l49YS)^ilpy!DqxG^A$}enMf04 zPxf7bR{p|;p3!UhX-GB@kNwcof-4>S;V)1EOr3WApYN?Pti&p#&Rg6uX14dHtsu zD*wk@gpW3xBT8XZnm|@#jZ=8EwAPoWNSl4_QA}RUp(sWtJA*JHJ&tW$GT@GXH zGCMAD$HE`8LF-Jd->NVv&m%>1cmeYhavl3t*6{gzdOmM4(~RwUuRsiUS33?`@hs{} zrg=sY4j@Nod24{Ll=D%AhKa9Go$)Tewc((N9y!=$JJI%+u^eyvVYc&CgaK~?^xRYL zMpn#zsodUG0Cv9^&3%x*QgP_zTrNE>^U+2K=wmaToPA8@xBeKsvUS)IYRk`Iz#U*Q zWxbVd_FfS)Uj?FGQoC|t`?>#BFMz!x#8VHxqB~jrr}#)H zjy(13PyR0VVxH@Srf916PvezI);QLY4QP{%(FC-?tUiD6_#ghP9%``noBHK>g3e>* z;`b_jhflA>N_oe5I5Wka1!4TJoIMxZ_6m=73z|66C!~`;^p60xW=3D|F4;BBa5fgT zq|AyqmDP~OGrt7C-C$yzQlYt8YU}~4w%#%CmU&Svnu^zWU5hbreOGR_%hxcm1Z@0L zxBxSWHR(N|?6}m<>W)uRV^?5CA4~Ss1O-%#!#ar)rOVvTsD9|Cf9q!q^mGYK*el{r z%`GPnWlZJ@{~#LIVOO_43{|uHq`*7;?d&YvG9X+ZBC>A9Ktv?)i4^O_?w<;J53%z7 zjQIZJw^uy2_ZG&0og1SFOPnfQUK5R@K`!n<86}v!@7alKP}9kF8Tj`m9aj1V4Blr* z$!K`CgvvHOZLQ#rR@SU7x&)1*LkOpG#4OVtRj;@?c-iVHr=?t&+ZXjZ#;tmw&+X+@ zFDUr+0K((rE>kw|%=J6l7r&4cG@kbq4gh71EC?iCh9?Iq>BOxp^L)_z5V;Q-&c*(2 zU^?O81|%*wmpcP2l8WV`Cuy}*&J*P0I&@1o<~`(-TN0LB0KCUb|7L1S%^9PoYzsxk zeDDvGnz|j1VFQSbn{~TNC-W@Pdf$3>14%j~U#+RPlrdhp{b3kQAa%hvF!tzkpe=5l z?~5PDhO-#{ z#UFC8{jW!I8mN{h&!g5tCqELCl-%*7iuC4m()~X%m|QmO#<)~eKFD$rTRbcNu}gkE zZy=B`F0d{-u}KzqhF%ke?H&+^wn%YD=BflA{np4CIPW)-e3g-2 z|88s>kA5F_>w=Dg@$aSxK5(Z4n4h?d5Br8-{=l4h_KO-P6!z}<2B8L-&mFyN56c<4 z#bw~vwOxv$I}F!Xx^JglYB(aSA%4cnoTz+oivss5S&n=mA`Y;su!0 zTuzL?F<-Uq^zqWteyo%=cm}e^#N_o66-Hi2Kf2q<{IqA>=0dYf5N+@5JiY;KS}665 zV;YWu4O!mGii8;z`C3c%(1i0A%4Vzs@56%D#ea#u&l20hNaw;HcXk}PN?=Hp_dxCt zY%k%MLd1#ZMLQo3;Hxwb032!-2$7;IkD~cg3H$b6f_^$+$`5Z4NE}OJ)#EHx^m9qTnoGpayr|5MBSMKEJp&b zZGtR#@0Hh1frpYOflWaJ|4T*ewZywk{?f?Wq=ny#T!| zP!!CVdugHJlZkJC%KYhY`c^UdmWqids5TVRK5J)!8@V}K&8kE`B#^E@O7#1 zY#FdyrfC8>G_(`&*ml309b4=`fX3%)K0U{AXJEhXppW&ccVCb>6&zPRQsN}_dcG*x zCPHsgHzP2#jgNXa8DO4hukg;gdf`gyEV$G>;g2}?1S))S!5 z6mMM-9mD_vioB=Zr)Q9-Cah#{>DhR3@dzxkmA5gy$P;Q(P=z^@(pQ&tGIX>HU*?7C zTraBu=kZl-+ri(tIvO04yapP$byJgc**0@EhnGZZ&Pp1Y*DETjMTnx#VnO+(ULMmL zfCuez`}YFxwKO(l)+7B)WpDjkHOie*VILWjJ>u3#VOFHCC%s^Hslt$CwTOy{NSaUh zRPoa~v<8qEKwS}mugP-;m4sPL>kY#^QY`-XGr?fy?DtL1dT~R^+sbs%d{w2RZfEs> z23w6srWpdp`$Js{RK|o5tLbopdehfkUDzc&3{` zP|Qqn%?;HLmT*by%~skqK+`-~!ZlvSZt-r5CQk)ifOz`2{G9tA!RYfMhVlJ;1o|Vl zNa?`gdhB%>@SwH$#3+MM!j5E7JSJbq!@Vy>iNK*u)EBCp^1C5NfD*m$gkxUm>0nSJ;o9*rh7TC=v0>?Bn*u% zL?=>EcS{cz-%W2wj&kcUot~GU7oQa*zA~yI%XYMPTqPqOZe!Mc$m~J96W4}QJ=3Wu z&@!xA{8&`jsj<|NzwjNh)>rtE!+B-P{zytGYWJC>G<|b?aOVu(5xPLpdHJT97_O3G zs_`zCa_otIc!-ch7)eBti^P(Ac2SxOq{|K=wJ|@auEmloV46VB z1h9`gxF1E^_2G-nOYqG+JLBu{NXGm|Kxzhg)Q7|}wS9xg8DN=! z8~zy}_jYJGWHjM9c=d6443&Aa<@&(S2sxx@+=)7&;TryPcl~!~^V{eju7wlG2YS? zVeLc46}r-Yr{jZgC%oiJdjv1e4;SkF7rDsjXR3!ruDH+*{G^+u{ymw^@U{W*H@$b zQ-xc%*kdoUz7a>A-ibz&)Avwy1)9O*2VYJM69A4;^iS5UvNk3j3F5xI2uyb}rN)Yl zo>C61#sL=bQ-Fm}9zFPJCkuu-0)80q=8`nVX|x3nVRNosG_6@U->#n2IYawM4)3aD zbI)6W$P*q--=(%}h_nCAkHgI--lGOrejSP<|KcVko5i2VCtY?A#U2eb>CmHc9JsV# z!E`TEfsM1LQlv4<={Ll%uwXTaLLctuA0kxJ=lS=+GI#|kza`7{m(?8ZxN~gOD2M!Q#B; zv|9&kurMmLsZU`$<4> z4D#2xQp7|fH)6Se+Bga!o9$mWwOHIa4fG$9+BeJ;sSF)#h6!zQ6_&m+(3|_WAB|L( z6~`V}f_M}n_nYcQZDH|ml9g^fuJrWDB`fxov7A%M)5}cDKCaGIe42Z%@??EZA_e`O zF;A;J>9Mf^8Oz@0cEysbol9W~Wi(dr+t`WUkp<@_eb+Jt9SLT}R#x!oQ926GNC~hs z>N&$$$3tZ-nYu?|x{KIij#s`0mpyDCPZ^N`pU{P|_trL61Oo~l{}IgD2pk1*N~d&D z(M%-nCe&}_>0*i<%2(_M!$fN=41gW9Q@8j=QC~{15~I-2a%kzEVqY{#e`;P=pBE%e z&15Rg!^%9txo5CA{UO6myEcmv{ejV#>f#1pWdS*tJzBwhQzMUHi_Z&uv)lr%F74U+ z3+e&7p}73ubk}1`EZIpJs`Nk}dIgq}DuIkxl_a7~R|8nT17++0_SHb2W^k0yumbL) z49AtJiS1kxP{#=187W33m>ol#vGrkfv7A3(QYZb}zRNe)Qa#&qMjHu|7Hx5V6A2cYknS zsjK^vK4%_Zpm6m0=rN zI^fhWy6QTUqUC>)ZNFhPLq!p;CuX@w%4`)U^lB{f7`fkV({U4OsmB`Yh#VQjnedMc&OvtJ3Nij0t(-@H# zNZ7SRKxAE6MkXrvKWIray`=kc9OC?PM0q7g-*mtjtTU6oBFV9+%-&6n$#dhN(2Vg! zG|WusuRlY3A0-Mk>pbpt6$imAO>tp^PQlw+(r9erZ0@Ad3d4$Kt9%<2EBs0p;QE6s zM4dQYznc|ftO(4M6jn>XXpWf*3!wxUfhI8dX=Pl<`J`)}!vwB_Hid(RR*~5^i_WNi z=Y^W~ttx!+-@%Vi^zD>+3u1Cgv3wOdWFB9jj7!(Zv3t*p9KtB{JbYtXkmUqe6hFQf zv(_YgIGM*M!>!f<7j&qkOXECJ0rRWB#u?jDS7@_(1S`S7>7J>BkS z+9CLJ7dugyr*wQ2UPB4(6M_3SL8EtCB-`qOS9-kuGu(k&=BtB$*|R)>&%2Zb47%SB z$u?1eoCg&<+P{vHNAULZBY&2xIsrSbl-in2nWi_dM-h-U|Df#N9ohfLR1<$fNFH{L zNjH{k2o=7$f4M%5uwWitF9ey^UeLoyoa#+o4_iHO9K@D62*Yj6<|WK{vq1^`q8-8S zbt9!ZSN@HIA(jD$Q{SxK6+Wv@I}f)PI_w)mx%*$m1P*7rZMG>iksp_AFZc1*KOy+N z=W(TdJ68PWqF4NO?1H(l7MjE_nVH@+wY&thNXzrxe=GgRM-cjscJ39Hl z=B~-yC{jN)IfEJpmJkQLhO(Hh>WNb~@ICv*yONYYDsP^S98KTea7_ilRBk{98JAx| zBky1oCXRH4{5-GctM#fT_gd?B{j>v0NI0lh3trHYkr$;I+4~RRj5Jmyb2S=tJ8`)uD1+6aLyHE*aOcc(cojBUj6L?T(&qSN$4~@*8N%;@J(yUr4;^sq&fFxNdYltfJ7a( zfZkIwXFK+WQmV4_X0K)GZTV^LOjjy5M`lOb+d`TX>?dtWLA9Gj-bS>Wd_b7yLFTS! zRDUGrCr1X*9@)Hwag3<0?pnb}PirnDrv^q&{t;krh`SrbNJ|_oZ=YZ2p|inR&O=?) z17ld!9{(z2tJECye6UYcDo_suk1`Svkaby2pj0tE0~nr3_xM5OHcc4TL)#PM9T8?W zSjC`+y|r`%PlX5n1`v=^mL01lumDn2WN-llX=#C^+r&0=Ag}z^gRqGN`Y{&;zwL1k zpK{w)3MHNRCDj)c6pFD;cZMKtagt(W^5sy5!epbb2)ADyY{&19wf2lE&4Pyi%_I@) zSfo1qQlQ=JEIln71f#kg=&bK{{jq3vVAn5lWmK(8Z`CK1b0(#N_9?VHt zZx|MZO!7HGc_2S!*6?psCI~F3dv<9Z4bszRe^pm!(-rN43mAp_1eEWJiSe2A|T%!>8VC5ip4nJg35nvQU>bR739kC}zrW{m4cp!c1Cg_+yoDpsI zEvy0~1n;ha_!>ulR-8bfxUg=UB(@PSuXq$cmy~>i5q|Ah}8!sZLs{x}9KeqG#WcK$u( zw%Q?FV7(VcJ?23T1?Z2*c0FQ`;Zq~m^V;Xyw*`5A%XY~b{cCyFMQsB2nK*90;5iXS zY(F56-T6&G>sGiVYik*fL*S@m@I!aHj`JGrmT-n#GTQ@dC5O*1eT37(rt}#ET!hR6S2! zGQSWs-`u2HhB5f%DF&}SX?=^G-4@@x=fK?A`DZ~`aMqQYTEZ8siCugvzIsNSFi^PK z6tL7J`h)Q)tSLc&j&zY}K6TE)A{%-%7g*JSI&xmi2qPQEft((#JscfVMRxXo6SqYl zZtZqqzjw2Yx{Ea8AQs>UFSCFV;z2tnB07vef&u%#ql!sBeUI`*zFmKJEk62S>-};X z8x*9RQ@_1yq?FO@4K_)xjQJz>ri-$4onfJ`k zq79l|qU(Yi$zT~kDzf0Hx?i`sX7IZV_+DVh3z@G2A`0Tzetb4LkM}!mLeDpG!S{?# zb6eYpCz&mzD=B*{YU5wo)OF|%ZqK0f-4E%8tEWo$|2sRu7nk|jTfCi50Flvi1{yql zjKEFTo@A2yKRFB4LHyr$x-s?r)&urroyxwlpUQl_?G{FP++s5_i{C$6XKmfo##gG9 z!>1x7a;nuW#sL>F+64cQgy%Q6R8j*(GiUX zWWm$Glu$h&?(iNlw-m5b(X*}fm>h-jDa$1-&NI9DQ zVm9nWJ6mH=HSf@J5~9@PY{M^Vf6!Wv-oQt1WdDm;YlBQG6XJd8md2>QoD-7qeb2+R zcYVQJx^@n_R&q!cboj+<$=To%W95db^UGhSw=a*#QQ6$PtwnEQXki6?*h5tFD%p(< zA4jv|4Tf0TvfkJWJ`oT5i$bOTwKl$CT##hfSWpR28@v2SO#ZV3Dl(g0J{&EzYFzYv ziD-Sk)^}#ZA zL;ziBwrV~)TgmwE?i=jUCU1-1l;^W^Sk_T~idRRaHM4elbMbbil6a}%(HRZmqi+Yy zy9{q#e4%{(k>ykQtJtSV;M19pn_pP*70su?$qh4JvmxA9 zzACW)V7TW}>CMtqgTv>pc`$$jskVhNex9m|h_^uCK|{1XXJJy5XlU*8ZaoU-_{k*C zOX{6>C$Yn^km9>wrT$#j`+Z!f_xQDG19lv3=I{xU+f?sEq6 z!Cq*v5cX)#W^wbh{&3BPDrOwvVpy<^(H!i{UEn)U(VE4Nt@!FGmDLska^`B<%(`zQ zLRrFzIIi}-4=zx0zv&4BCVDblV#Ytz=omwjmy4t>^yC6Ww2`0)gGmVmw6I#VR3;BYGfmU%r4N-*YOJev*k_zmT31b%H3;GYv!8{4G)@bR@PA& zVtUtk0*4vfQ`Y5&CPOqQSAnZf4#cl&zrFg*y?V-Z;6i<7DqyZus&?MauHw~Ux=^Za z@6E7+2aqq&BLNMYnOrWT@2gX#t^ACu@|DRXl#Xw9W|Y5@0Kd)P)88Sy5Q-j6=xqBT zuJF-Kiupk`<3vk_=Fa|YqsV#CrTdmm_&(#ZGfLZ&eX~LZF#1YR&Y74hx}eS z5(Y2JruU+#yFN&b2_;;-g36E%(D>Dnl^9mdx2>JSC9XkMFr3p)y$R$$7ZDft|BtM< z?29sL+qb1hauAVbK$MQ5kr*1JyFnPKp+QPu0BLEJ?(R;dVSqt82arajTcqom`*+>f z|Hb`$f%D}$*E)`U-?lGGRV*wTmDjgOLu-q_Jea8(J$%!4^?bf93x@@6YUcNpvR zPGJXdKY4tw?svAh3d^~oZPt~&_yqZR{gz}3bO(g|{1G|yO3&~&Erfwk!j;$}#}4SK zba6_jYP&u*^T|%;SL&9<91n4Bi1sDpPw(2;GiauSsffA%bjkA4+#t$)m8{%i%b_xb zCAIeezeFOnU8(rr-$bRS-pF3!=2KR9_lWYwSF;YE2no;mW;G&u%9stA75hu`TE-Z} zyH%Ubs3a<@tZM$t*$leX!xcMkN3GY*cv!3QtLUJPo!Bl3;qb-b;QYfco)sQWvfEvk zjv=b#67tRp0g+SWq@NSGk=yhY(UDQpdk&6a{khtGJ!LO1k>*(Ce(hF`@1n_c8jhMZ zEx+h`+f~Y}uCvS>b~~AkUwKMuW`VN=_2K&0r)EKH+;^%msOI|(1{x^c_JI^FsD+mX#T4+wd0 zi+F~*{KmF?pNrqgTLN3cGg$!eOlMwP{$BJ1-e?@0`LNi1s3=%aP|+gWKyToe%(B8i zwvD&ji8sVul(1OUnYi|yjOYHg`AT?OU0YH@DRU8h-M3z)b!}Yf)}+R})cj@;;eQbc z|MG5x%HU&gUGv(F%!`>BuX*QcXkL>YPJ+kY4^*ffo~ zLWs{fJ>5E2Z~Y}?N{dB>LqP+7ug=Oe~9*L8g?rJ8*N;`F^6pqtNM9K{ANKc(JbdMnK1xXEl@arOI zCRBmwu#6x!oXeiuF*j&x4n8X)W%#Xl6*X3ju5?b#+=&;#xneKl5ECA|c0@frw{b|$ z;OexY@p7V6YO%bBB4St*!ynd)`&3HHr5B+bWw)IR!Jy3jnK86lXO!k9=FMHf@dMfI zT3hz}e+-r76aQL6gbvt2|gE9 zGf!B3QyP`bZgK_Y*%}AN)>(KL%)Z&guz)emZ*DBL$>VK53HI4!Bsp=!24Dq$AeM~1 zK%W+1FGx&7#Q1v5l!EKkHR#VT9@1FaP%7sa2jAuaAYj-&96d2S?kb5)XRHY$ zp5~%HcjRoD=Fg|fVv!ti)Mhj#Hs6nZia)+aH!m$;AZ7BTAlLQfR8?mZ@`TT<_ zO-KbGzxD}MU&eZp}7D;-l->9c-iJ z8<4=Tvlhs|&?Un7KOQsjset|iKA>1*{C~W-b;JO}`60iJ^Z2t{DTn@=t+90j`;S`> zV}3{Hv3(?p-G4P1|A!Jbr51YXvzJr49>GqCO<588zKs^w_PK`M<6Iloe|oj&TybA|RMqvPY{ zmC&N!s(m9?dxYwgX$~Eof*#>%ZB;@I4J*D_wnse^VMazTvVXZ+)m%M{{{r13h?@OP z7Sa;IqR4Xog(te04I2H9W5v;)CT}`y&L`)?NMMRJoSsHn<&{|7JAAU03=1vr$UKQ(N=y>-_u}lj%`D0OrwI3l$|bJ_6UC# z@Xpb8K-8X09dXJwP0B3*0b7Uql;pb_1@71^0RwXF__QS`Dd@N~VCNpX<54grP;mwq z$7kk@mY9t`aY`GHbbQ%oF)gg{**~6=M_$YAzbY3ES_?B|sFPHLip=awGM2Sk)QD&) z&kE8OUWyj!X(TuEG0%m|3j2q3lX2l-KLd}|+fC}7VFXbVuTiz71`VP;cmY;33aKBd z1&>s4Mny*eX$!OfUkf=F)02c5kqBNzvmKXEjZOkH&pt;TMO)$V*Gu1pjGIv~lY)u| z9SK7wi!)wfHj+Tb5i0-dk7-6p5@;tw`b(>x0}A`lKbD!~f{+{g90HrSHT;;b;7Noi z+ZoG)xM~IovJv@K1pd@+Evjya6peK8>-ZZxA1-nzl(cBVk8{( z_YDU-y!gQ$Hqs^(w?!yJyMtfFhcBBN`KGZIX)J45Tlis-9gheEm&rT#P~H{ta4rv2 zFO6xQ`k^F-BD3MLj1y%94D?Z1VkLwR+k?hVQf8Fl40UaEu?6S_V|BHLy*My@m3`J= zq;@jpyus7&LM^9p^|8odX=$km5%^A|M0r&!AoCZG1KgR73-cxAPcBrGHoM3n>8!Qj z$}L-e#X&E&k-~Q$Y zHRQSVw08+M6kjl04P++$Ej9lbutuPY`ggiL4n&1`8e`BSr!m)8?%X$mvSXz`Gcd$r zmE4zTQu566U;u{VwB>n8Azi+J@kbxH{|I;aA%LXdBsT^<@`^wYup&;~7ItB|v+#*}bNOyXwom1o z7D-4R=$&aUnc|!+Jv_mj01R8#4idj>*~|{Ks}!M4T@ku^UKan%@6{DSowl3JD3gEA zb~G$;nvz7y?m4LJ z+?`)AFl%i+6%f5XiG{RQedK|BzP+z|8sBKX&gk0r--0n+@Ui-t3kz|lQof~kilz4> zOyI8j58L&c9o-}9YDap{$ov#x;`;VLjPD#H8l1a&NDYOv+MfB;ygf0Z^LIYRn!5!Z zKW)2y(?yPNI6qF;9pLz6(x;f9I{pR%iE(*o2%rafKd%k!$`QIE>b@`7!3ih%kU9;n zoF^Y1CX`wL*t&M*WK{TM!pheSHcia$d%bS@@D7&xx94vk?obyT#~-T*^n!IUXi*Bn zipfeKi_{*8tT+}SxhH+=BWYiZ!db3cgowYfEx!Fz+0_Bnuoui?i`F1|hZMq>H_RI@V(>uv1Rhb%;S%< zz9m9J<3^f!x_6a&oLS=w(}LXd_YjUvCmmb8Dfax+qp5t{njN#6Uxjl`Ju;)^L-E(V zs^!n??Yt|^Qezw|=G<{lJ?xtaHUdR?FZNY|56|7J@4+)F8@it1`jHRaRHJLkm$&LX z0zpEdk}VAmVX9M9VDK76g(r=mw^?+4@ycSEZdHvCqsLp;QRjwT2o3OaX4{dmigzpb zq#SUHW6R0*`+9K#c=3-qLd=O`jOg1_?^mljwj+F3E2+ljR~*zPu-*e!2uEwL?!Y!u z>Rg4by2y)%`CD^=FCHfSdC<+Uzfb9r?4*M+XG`z?`TG2#o3b^@jV#LYJ>AGd>B0K% zU;<~f$^GXh&X> zBV{jj5`xw%TGw;wW64yxlAs^2uc!9Ko|E1LpJv(AMm&qty zcH3SfpL|&nU?ILyC|PzIr=a_-wX~x;FW-w{oBUZ}iNk{zYG9(Y zOu11vVLueFi=Q8*V&332m!p4(v+DIdsixX^f7z(b%(^Dh3lIb2ST=B ze>d$_-#u|k;IgVJsI3SK3f5*En6jF5 z?7d~qTn(IToD`o7j>W`#O?%bs{^Ec~1U2{kiP(+lJ<)E;-+tTu#fH1P4^(EZAgZCu zjey^T)v3R;=$zjX_0^}tMFhj89d&WLQ9;{oCm_=ZS4vwC$kFw96@h^q&(`;nr|ze> zuGx=x)!QyI44>gKb33A#)faB>qDTOt+_&Q%#Ipi$EHxv)#_VYpZdQvpd+}}5aiSX~ zOV_}6-cKqHz~UK_?Tv+>$D50krgSJIt5EQcDB_n(#6#eW*QZ*%XSJWA6ULoDIah|0 z3kByvwvr3%;JAit2C#@ zH3DI{AMpRqUAM0P+9pDpR|!>^{npGG$Z{Li_6zz z-yTZ->39!4ynxK~-j=-UveRfejXyA0IgOR>-jwoz%N8=QZ?ggrN(U6u3mtvXv5s36 z3|z7iBCmwM((!|!MtvZ?ysO>CJzqhq#NiE>-_i6@y^H@jmzOUWuzanM-ibVv{S33H z1Cwp*vyj3z;DREMV$q_$Plijdm zxp@lU{>IMMSI1X_?MRnG-!rxgsB;aVisGm^(!gdGv+lggS#_w|3iiMF{(>-V?AzW1 z+Lj_X^U+Y$4_UukSf#7nS#Wox+%t6lRVuZ>Z~qv1)q5$iX_L=~-|_@(D2<~;pH&a2dD z#JY#ym zy~-OylLncGbkbB!U27AROw|3ch|GHNi{V1s|Kx8wP{%tPFg{g^io1QSF*k zFh}T~eW^(5o;>ndh>6d{PpMQ~1`jC{SSbQV5llt-2X+81PR}O`Ccsv7LJ13F;sHn|#HqG?u;e~|HBR)MJB^?^IgR9F$f7>fz1!H8@M@ z=_DnBK;MTe###wWu$?c+Sg+C zWX>wlRHJxd1?T+|2qg-E@_X1`UQc~_J&nDN*`@_M@Q~~zPacczOxaw!<*IVh!cCH| zudXw>+N>TOW!kt{e7v<69z=>}Ox?}vny-6rHM&~9Ak?u+wdvIu)ywBicZEN%iF|Ml z*YH%bd#B@Nw3(Y#U+4j`P3p>RO`F9H)ytq|C*evVn9x~|X{er!_sWBfYArx`2`Iai zarJmGX4xAJqSC)>VTNW^>*a~QbX|XgzyfRuW)Nf!f*^{f)A9n+7UWhJg(+)o&xN0c zRIpY>J(Xs>6;_meUUlX+t{_an+`69f3cyllE<)Sw&3g3q^Y7j>amd<~!#%xnTfDpE zks)A%mukxfG6O<>BpYQ~$G5!!=0%H5ff_{FDsD~~IwNM0tiO6)He`FZm6D1|F5_~? zF}v8b{%vA&esZXDWyLzS4=RqrpXuSZEp4*w=2spdghp@a>5L0*(PLKPQ9B2c<8a2~ ze8%B4H~dUhZ=U)8^YR{~0cg&#dhmo`wDuADMzoBFc} z3|FpgYY2PeKCw`-Nj`P`?p5?ytezNo4R z$ojsIzFID{oZB8h)qClbWC^VfP}DyKz)7#7llN$Nhg{zgZeSFLTn<>QEda#A%9Eav ziM4fDCumP*%~~$(I`@}Ei=dS|Ah14!7XBS|VV}3g9A>;6$KNwK#9GE@eo7)~b9`G= zGGCRG-8#MP*>PgV;%lZ!mF6&R49Tg}h1f>NCOa8Kc{!E1EbFjTuI#r7NoiZYt`TjT zkkT@vctN3xg>>c1?mKNq%(uka$OZ5<#f^nEE|iL%BMUyIb<|rC99B0Cz1}BQI1+)B z`R3x(W{v*XT$Hxk)a{JeRHmiy842&B^g79O;IFMlyu6*l zd0zB4$>kIx@8y1gGeWPM4z&l9#hms_rksh(3Yy8~19gXH^~Mn}M}nx3Uoi=L?Z0`; z1hq_VJF@wIQ0T=&i>wkxoos{mM}JW{!YVB=Ije+zZ2jET(T90f7I?!JKJ$N|%`_3+ z4K8jU8ZVyaHti9hfhh6i_sX3mpc}50>u;nB@h@Xs%oM-0;ul`!+T6Rhg9gQ)UqX8h~)RirHjxS|k<6 z%_0taRLE`w^6jKuLUSvUgW1ZWwE$;wDwX49rfv)1!1PC{F9SfCt(~J?gjORSWs4Tw z3}*IJ){hG=A|xA3guRvwaBWITAK&^D%I3U@V!Cr$AK%TK?BFj&1zrz!@qwIwdI((Wsp%zf+*H845Dk7ZI#ke+7#k%Y%Zs`u0TGt(zG4eUL| z(3c_y#}4r6za9kk!_F;D6> z2OAOd6mZC_$}QB!RDB;LqmFG2(?m5U#|a*}&Bl(GH_oaE)StL`wran~nxUU6jWa@d zn_2VNJ^XlLV7;wtqu}tijkA*auF7sXFL?P4$*tWbKOgf_d+L@- zu2IlO)j)um>BlY}55^IkIYkV4`tp&UMgL3^{QSmvmJo-#ZSU z_FEiNx=Bg#LJ)111VEs0JQ{~en+fa0Mv$v!ll#+K2_(<&^kkmdw%v3K#{Yv$g;4fT zhF#-^;YE{8+`A3N=kEsPvi7*)Jn0*4s;k-NIp+A9lj-x30V6tO{7kC5Z=Nvo`JPXe zPaF^1PK3}4h;q0u%M(Kv9!Ur$`T#V*eUE{*;=B((pkV2{f0z&bVQ%>!&3fb|*IoJws>=Vn2od;K%)45=>4_-%>$^dtX@ueTj-!`hvx>~GF2GcxC?Avav z4DOS(x}bb8!SttqxG+kx>c>? z!rJm6ob02)Ncs^w+C@vtGBjj62j44+2jAtpZlnCVJ#muqoXAB4+XDL^Q!-qlsOl4x zo-oY8y6L@8UD-lYl~SwnA;n*k@%atG0iER&(q|&zb zI@0G&6FE6YC@yR+5hWzhdE%|9Wj5i}GmZnX1%|E}f_jMC^3I~VN&^(cLLuq9SO0FB zmphhgadrxQgtR1p#0)3Hd?Xfy6+|JYeY3jWe_L0{CaHs-6MDk~EFOsSunF%Ahk9Zt zY!E;h3r+%Nc8Gi?aVt+n9Pp`hq)k160V|$66!wmxurStVbh-F1XT>s6lbw=Bt{KdV z;iPoO*a$PhKcB^zg#Z?+8UE5DpNGxGMg08c<&re3yrhsWH}U25mykiDPXXzm)X5Jz zj?_9nRg7(w{Ok!3skc#qnT>qz5svp-!WnIL&{ z8UKR9a+ zA&$SK0u0;zY{7HG=kzCH7-()j&UaJyDZC^@YA2g`tmlYy@Bm5XZ4(%(MVanaHK z@b&Q?A>XtVy8^|lq~Gx{CQ>Pn!|f+5Vh?#@VsI!zQJ_DkA9cBEqw%W)R<--H*(Ra; zk70u)*Xy2xgx6SyL2+?2uFq2)_dL+HlJjrE{$pvKU037u$EpK1+_urXjibsqmjtyl zN5)J!b&*4F^29IM4hnDlD~y{$_jGCEmIoU~k+br(--g@DO53Mw^i~6K0|}nm9%N^$ zXJSMTmZja47hbi^Km;^zDk_rgg_abtnX|zZLa8xMe;`^TbeeMtJ=GVaBg@)KPFY4} zrp?>)jc6 zk6Pe5Mc57F^N?!2a}9GRo7RDNecjdX4%sD+ z>L=w_NDEZG0u}+)n3d)@)@P=(n1@A*B5W0_BwWa zI+D==EsftL7~!cEy}J6!?)89>&c$h&{0F@j5%CBrHGlBaklpgAg(=c|cGNfMGhBXC zfMaX9Nt3pRyK~yFnfdef$~Qf_(y-JCQ7NAoySva=x|f79{7I4VUqg}m_vL4$znVyD zT*PE^Iwnul_7fipsoBMVO4pR%d!a9@2JpFaJ0(;JMn79)x!C)^v(ji^i=rUNf&iwqrB> zg#hp{uMkEwYN)pBEs&#nKS@g5SQb^j@zz5+I`Ud!HQUJ4qlYb>8|R#;xY;o9XMD4% z%sH1G9IWWd80%Bl>}rq%a{8jg4#kSa!VUqv2dgMZ8c@Zv<3ZyxiU6Xwxn;?)-p2AQHuLvq6WaWAzT=KHWgOs!$_#iY<@EC9(jE#PVa$1gsPA z9OSaMpIZB_FgS%Co4H(@hX(X8yr})(P0@e-U419lpJqmD0uH(0wC0#_#fa6Z02+Vc z{g+oFb9f)w?a~B_6-EAfy{~i7=sY#MXgl_)i$yQH9qTvfyBLx5hp)mytw&`&NIFk^ z>COMu(VMvsyFBm5z;aq52<6ZPClUImqi1jkEGI&IKqkt(4Z3<$Zh~ohzCT{)PLXZ- zxWTmw{l3ytAMf&F?Kd(+H<4cfaQW>yvY+049SE`Dfqq%#x%@`p9)kedr{$XEW0Nl5o) ze@fuC;-N&>-Shobv)XrMj*-Uw#!dBR+hWBfLiXNEyCUfr5}Rsq$XPO{hJBhNfvzfn zjrTqH9i6d>lGBGj0%QJoI64)X&x&;4Y5R-TRqj1GD>mmgDfv~;6-ze)Qfr`U)^5v5 zbZfG22$&nfA5aospvh8Q5ad2F5Aw;;$;T0!gIbi_NT^IaMyVhS$%!e;VfBBgun-yL zx?Cl`YXJJ~Gg#9rfp-ha2^{2=<*hYyiMR@I#$(kfv>!1V;MDBx2)_w+AZD;)FE;~s z^O^)J^y>QaS3F;)z*@kcE*yH{v~fTZ=}Rct3a7pCBxeGVzqrqh6z?$u8qII)@x*>m zF~Qt(8H08F1}a>!>Dkm?B9MsKYjJE!V{A^14Bg`(WM&yRc$-%&N#{OI>1sQ*H0S$` z?7ooL1x1F~n1^i`=0v?r0Tv^m`yf9D*tu4?d@JK5^993_Zk3%XEN`?e`L@$B=zDAon8@7~>a8PHBgLr+zTexaP$QpB;*uvS=nGGgS4z z`+UGEDf46wpd~J&tg10d!Q!EC)ou8axWW0)@NQ?t2jGup6h-O|NBJrnjr6ppr$~WU zV;CrwQ1u6NubQAY%j+}52j!FBgtNeDKO#tVY3xpF!%;@y1-=dYG&HKqMiQ*P6FpH; z16SM1hAEh7u#sWv2?X9y698PpsoMymdRIq@|Dgwz4nyqpRm`e4T=p>}VWc?{)LOGz zK1!2>8%AujN_K?KyXk^>k8IYc5Ygq*yT#V(Qpk4V!;GG%iI3^r*!@wkr-1WpXpPR# zR<_pXF;1~{O}a>`MWZS@ahG}mPvL1pt$We*?+(-nT%#3VkwV3o{7tP%&y#0TL*on_ zR;r>QZK5upg#fnSQ51l2h}h3X-+IdGTX*2fWm6}ZngBLO%6ua^Z@Up>s|V~R@or@i zZ^sqaaz!>4)ac}6Ah-pjpaqHnqv$lr7FA53kl~kyb#a=us^*hYbJBN51{|u@(JlMv ziz+=G(nl589luHjT>rPqke1UCb8mHb^C*6VB@*&H#m>?-MCglH)0~Lgdc;nIYkzd+ zfN;bB$M&iF6@pb#SAA_72lQbt!Et5PZQ@~ppRT-jG&;;5ft%oy*$qt}+ju1+8s}&U z5U=Rgl1pF@1{b7|{r1yjMpQHOF(cmp=ehpB;&96MS8%DlYn}l-l`FSH^NAk8ZHdp^ zy*!RnVeOyZWXmA3Z~wJ829$S_mcNMVv({R$zkfqOgq*9+Y?B-njB)a;uP=P<`P&sWU*4(;oUF_Zs z1*gEy+8Bw<0J^)8wO2bM=D@KxKL@Q7BO1)Zck^{9e}}a9IajIGOecp7FyK+O4vaz*@7xIFE3NT7f@`K)=z@R8ShP= z3P~Y%qsxtNS!lF4p^AJPPlnr;v%6D&vAst9oSr|0DHg^d1vVcH!B9!%>}+Io%}mV` zvflkHsl24u&Zpe8+1eFh`YaA7wY2D)L*{cmm8nsIqh~|r+Ww^=)q@4t^F+Oxwv999 zbbna$Q6wO{?dmqbXQmr@h5KK@8Ow4uaQ@_vOo1HY2&tP74GzrXe{-r-WTEv*yvppc^ zW323Me2>yuEMXU$gBi*n(nQ%%CGi`YG+7x~S9}9Yosx&hUIZ^pBNC0dU3{8c4E!Uy z@I&8RF-kL8ar%1e2~!q<x>%2 z3ed3dYS^d4;G~x79Dc**8~v`44sQo0zV>J3keGl4Vq3amXgQAa0?{KkZ{0DRQS@e_%A+{|Zq|U;tbK z$Df4#R+S$mPdx{-l8RdK?*&fcj{=LS<3>o?ed#Bu4~b{9>wlX$D4bs5%Y+Q$aNr+D z1~4TJfQ+TC1bW5+HhU9tGA};8#lvNXsDK9}q%f+8;mxjDn#L4;JJdmR>HLjP@y=nz zjFi$0dOALfXb-GB8wZau9=Jd4yI@k#FTO?`)|xRx(xc=lGcM|vxqYCow-Q=#C8%+K zRk1i=Pv6s26XYHXdZ~)jEax7|;B&sQ+lZvHWXSn0ADh%bElYu-_e>{1nxcv2dya@O zg=g2`cswFIG5?&r?7d#{$qq)^EBUlCRcs=xa~5hTvaL@GHzFwO46pKPkp=ov)5fBS zSG~_c2ovmhyLB_YrFvr3g>~NdW>+r~Z7==b_>D>4q^haw9gLAYcg#y|;F|Q1KHYU( zP|L_7tFF$rOSS0re(wIG(r0DzDx;hTt0*juSf`^@R9yjU-`d^FCT}Z1XQ@47DrW-T zqP)4rfp^#`U2X-+(nIyw7X-*X1DRmED!7MjqkV&KH`Y!A*6nl?6|din&=fs&4zVS7 z7X}cVVAsY^KZ?V7K0IoZj!2gj-3V3)F~OngysK^tN8|6aPxBuWJ9T}B21BW&;v=R?RRbN_gnEbXrcD3d zDKfgy0t|qA4&t?gSQyXTqz`eV|JE{eO;~FVy*{RaQjZ@?pIyzcRBJtVWPXYb_0@Mc z$lpcy$({s+4$jow;0J9#_|3g``r}Ltl00*IMRII>b;$O_zhMH}@q?NI_dv7!es_Wy z`dr5g5s*PVt`S-xB^Itt&|}&c#)A0A!9K|=s36$p#%`+3*d}!aYT{!}eT`ljQYQ7Y)9iKJi-UAvk_q zRyGrw4;Jqx#U`U)Qp8)=B0az+!YsHw85y@;n0N9N&o|^U;lr_%6V3*+zIwY^bYNy) z$<1#6J|SR0-Bw3miKS|BcM%m8u^w_EZ5BCRL89D}FR|?>q-%llh#g7>6v)amK1Al0YUqR%HCH3PTgZ|iT|bEzGlD(d@Z?_4;tn_AKV z<|a6)42F{5QFQint0|Kwx|CnUm^j&{Gw*xaZdtbCIbo?*T7AG-eD)5T*m85w*?G~o zsL(f#3x}!c%l8d%)ig|+?7shKrO)zjt!%lW)4nChQ_&Vyry}yITSZQvm(N#eeT9{ae^=_>-*JnK;sgl5m;X{Q2}UT*0eTsomJ-=Kr$FYO*}bn_C7&&DtV`_bT2<>PQSs ze1;g9xhls!d0FsqoJUlo9uYuxZa@O!jB1wgP_Pob3 z#Er_pC5E5UCYVI5a^C^$6VKS9R|=e0Yc$HUw!MsnG?%}p)3y0K%u(ADEC56>r5$h> zaS7j|cPR(;`LBbq^j~9R0GQ!1r4oRP2`QyAM_XfdB8bLwuGX^AA+ecXYB1i1nLwr{ zELwn}FSYCu!#il=F!iTZ@+rnYJyiZmwzR04&vAg0g;siddU09nRgN3>g{+MP+yvj4 z%XEa0hk7(OK&rKzW2+l%8F4X^KD5V5;!dnaD>dp_MVvXx1RIo987mi~wgk8W2Jndf zd8*+gEgOjY4;#`0KsN0_IYJMTHrj|wCRD^Z#ybA+3u=W#3<2G({4XvQoN$*IK|Bgr z*Ftebkf(qbP-;xqS;fvvKoUKw)xG+VXJISgm@sGQ7c&oA+ zPJsBH2meRqDSftGxHqfw3pV{P^!vxwJ!9E~%WbpI^~(2p%$?do9*?sXAw!ENa0(};rd z_C>b`NoQ+)Qq4UA&d}-6H*)f#MW9wA%@| z?xrX~gH;vf@pEnb#wR?UbyCRUB!Y%RAg;7QaytnWBorKEN=%lWxZN+2FLZ=MJFUP? zEFO(PLNHeNCMKQy1%ZU)_*<;V?upVV-_3f7fI z8}c@b1$lD0Fu{A>ex#Tmu%S#VZNS!2^Wul2qPo(5Xb#rz-e>p1_*T8Wn)^0N$4pvi zzdg3ozqXT=gE7O9*Wv$XWXXg1R^oB9rnO&k;{5br^?-f^&+p&%cj6vT$|1E>mb;tg z0rLhZ>O(K&YH&Y9;7&oQ*Sy=iXD?)R)O2S9^a2y|e5p_VXH1Z^bW4zC(2|z)-N+pi z$aZt==ck~IQNvC{UEd2yJkAJ%4m2b1_cna~o#$DgZn}=w{vnzb;d|!&Lu#|ROf&M6|Vcp z;*9I`v1dZ$4}T-|WUCk?GK{6KMU|3FYK)An>)h$L+;t46i}Lj%m=+zsFP8+nJEB|? zWP0k`h-g%8lBqN}YdY!FQ8kF=2r4`*X92f}i+@53JO{2kuXL*Qz8-jRVPL&V;LW@y zS>W;p#!|f;2~QfZ+^?$sm@WO%DN<%7ydx;%_s~DNHq`~2kRblgEWaAXy3`5rgq*x9 zx0nktwby9uTQAhhJ`h}Q|Fiub_o_3Nuf#z_aJJ6mKKU#u9)?~8$%iPP%WcW+s$ z$fua*j!k4Yroz+!8oDHz5iIh(W=&1C9sAbS3-%>@Huf%x)>cqCM5wNx{fw3e68^#? zH8PxRva$59!*%8ynU2at0hg#%NqNIUUHT`V8lR$@(NG#BpXz4S;Ll_a=L$~MLTRx% zKdr}?tUYx%g{HdZrTc=CleLDqRn#^{lh63ojqYY))i$N^za8wc5JA}s8PB-)lmdI> zRN3#r8SX)Igf=ZxVD({kl0Y6G6vpA1gqay z*avDKs*`AchFM8IMY^OzRKA)7 zoe4WA4y8x!aFS_@3@{)!Wx{TaM!W-s3b(P7N$aaM%DG0=4&cP#hg^uY z*6dMN5y=TDZI$1U5mgoM^+J@Drb?wFgNx5>PkmsL;@V%4j@^9YD5ah*) z-5Qu*=7kJNGvQ1pkT>Dv-Fvb>1|#x}G~6+&_!^D66&|-x)kJi#%)A!)MfcY5mZdpS z9h5|;=?%!FG39Rfa(?Guu>Tg5-af!v`mitNrDf^zo^gx58&PH9)@C?zifAZf;zpGw zY&3i`%E^wt;C_>aKp>^H{F1DKTv&bwvD}a2hbiNdbQ$#Lo1je(KldjXk-=)1`YP#2 zedpOg;Fl;+z&7H31+RbUIL|ieDBrJJ#WIJ$kMkb4UgT!q|s= z@R(_6UO*W~d^DK0Z@UYsJu4%eRmAW9NvLfLp9+z)#?u-fd`)C|b=m!-_u)9i$&&Kf z!@(oYv02arP7ne0TL9wYW%`fquu*nyk4bT~j=2`{v&=jTn7Z!WzuEwAC!_BR+W|a% z4EDq3WKqwo{m^pfBZ0KXQslJwFM963$L0DN2Dim&{JJaxj6ESv!Rr=U{i)}q7$hv<mBA0zX^GN% zmcP8)$yIH2a2as#JN!xynGb7hI2@VOYH;d(-=qKS>Ei7Dx4+dfEVDx#T{pJ55Z6ae zL{L{~B|+;{v&sEq!zJjX+04M72jX?S+34q-P~B=ZNaVeN?gza10j!NBV_>BGb}6&= zy6{f2y}P%p{mY{Y#^1m{q*D)1(Ul5Dk-!)y8F%3i39n7Qp`5!e8~weG{km4YE1SlC zUGA9fD!*W6bjWPDNO!1yvz_WbQa?*=8OO_h#2K)1G~cMtyMJEk!cU<;GkKz!ojDXj z744<+&Mnl95<(xJWh60XQ&j*Nu%ex{ex^oth(6$sFL;Dga(|T`? zT>Y}wBZC?$qAp%xO!xA`Wo zeb--w?({auJUV{|zo4`79~NNp;D-{kS3UoGJnCMmbz~;_R=PcaS&r*uOxE}%XVBAF z7a-i~&*V=n_Ph98MvOD!Z~;sO+CHY39$j(-_(WZggz`rue7335RgQQRNSj1jzk86* z^3sqyp~)){Mzrap>gm8?CgUkn`s7Vkp*@c=v**~1iH|{P2m+w?7Arv2-`aDnmad25 zL1l)Wf*ATCniqr4j7epkn6CKk7TcMxij)0*%n5)fD{V+R8uKtR=;lcR^h&5FhH?vO zh;_0b+-+0d+;(DgE+vb#$^u}W$PQv&+tC)zqS{Y|a@Za4H;+dvq-x8WV2(e9nJ3Bd z3xrIt_fZ8XS_4T;J}RE!fZpn@59CQ=(rt0eB!44@W$0w3rd-Lemz$U2J-FdhdJ4yC}{Dh zJVIohDmLh|ZB~Ib=H-i2iUltOvrluTP;N4tBYU2HCx43x;Hj%7`GZjlKU0s06}YX^ z?W^ar<$&KBj03`oX9H<3au8oh40kO=94uWj9BX6>wJcyWSso?5*ov zeW|jb;R(#Y;yA{=zC1NcWB%FjXH0E| zLS?aL7^FuNbpH+0DPKaGa-3rnI?Foep%wVUhd0aT5g|aikgwHN-G?WadU48VA-%?1 zQ8{l@U}si!8aOjh5HIPh*MSF)drt=azfW~I7sILLc80t-lc zqfCJzNNFX3kuAPiY1b6xmg`!=;;O^|?F!sEQ$!xU^SoztG~wKi2m?w#(3q3 z1S(~D_A0K22cP-t#50S#yq;;TjMH2J+>p}} zDtB@j*CXPlT*2`L=ZnF;9PCstolH5iuFm2qluBlrGbHCvI_gjbwKL@VB8BZXRQ{Vp ztv#QGu{xKp0t)#9yv0*mMi(#AdX6I=zPLNA?uQ8RB4aP}EZm&{0qbA~xM9LpJGp9W z^X7KtD|l>DoLPqh{S>*%I{)3&Vd!1dBpRtB@4l`vB{&PL&}F|PAw5rs#Br=oAAP^4 z4&eeYTX`21e{%xUiOTapugse%NllI{=5ZJCmF|z9lkZz8uQyzTv_lfi^9@EEstf94 zsU_4uS6A^$+B}s3Ybz?c!t4PwHDSG%0E07yQ`1~!mnbmJf06rQ7i=$t{wN3P)) zs_^dwO#H@k)YU|zz06_|W;_K_YrM8H^d`k3Jc3t4MV`c~`KW`qzZV9u@d(Peq&_pm zlt9U|Uh$aBwEWoD)#=vQQstC>{?Dr8-7|>+Q*Rk39kixL(3Tp z@iv_VrGWwep7)-s*ZD*U2---XIxqI0ZFhm&Xu;~&7Lr+)r4+4lOhTENTpM=6K_{=7R zbDQj0fWh~V5bop1EouGx(g^3@AyhK!6@Cz*OWG6DDqVop2Tju$1ydYZ{^+fjoR`~L z^Kd%@PM~broM}B$@NKSQoG#FJq{*SH8N$`lxbpz$IZb#D@eSc8b69U@} z2g=WEhS%*dtt=b8S2G7I4Ien&NQWHAv(5PLNPZL4R@3n}T&0AOLeN4E2Mj7^#kq#?;p`fD8g4e+gUM!el5+u`ztXFSb z?QKR&j3Z3>^UEZ%wKsu>4W=v?%0CPJL??qynn@s=d{c0 zryzZ>&SsS~Rhxp}8m5-9gw-{3cJJ(rp3pu4cG}o>6$Z$P&z}lZDLYJ1kN-hBx(!A|fI>%9 z9sAJ=l=r=ztMDKpR$?JG!k+d~d2#v2KGR>`if;4zgi^|w>=(PM7{`2-`EhT%h9{ zo{oP7%$vkv^d<`Q2?P2Gn~4Zce9eQB{r=9qgJ;n)h9q23wA2Pf#G-hFIwyu0f4lIU zATF!oAP0ebd+srZFgUU75Cr?WJ*?V%QwU=~c-7ukYEb@@e!MtnsrKloxne7R^o{Ax z)0ZR`d8c!lAY^9Tl0DpB3uOPR?6tlPw26el z6dhnxtyBY@*lk#q_lvr`vm6kIRT$Hm{?ZqK1~NHg{|sD9&E(cX@^1lR&1qYn9gx=txm+BfHYG*n7jS5=jtPK$n8Kos=5`=O&pvNQI7FQNa}PWt&r|5582 zLsYlK?HS{XN5D+hDw)OLIvT}G$u*-)?%gz@Y$xWkHAArLNl0i4Z|v#6`-~-`?wxSn zf42#ryk2t+Fn_G|`r2RHCpteU??E^vY(gLS%ch6EkSY}K1I3wiiiom04sTc)ests|1AZ-0iw6m#y>^)|d)C>?B+0l^ z;N!p~6D~kI;=}bs@N`moPbd+SYjYzgJzxp!SihAerdvsln1)Y zWz)||4i)XUUAQxY?H7vE(BCqMH8k5)e;)t1(%sC>HgbJ00_iJ$%-AxqLIb!+cJPiu z)6_(cFRQ&Szp}OZR+ThN-uIeOjDoV`IDAc8MhH*O7D~AA9JaQme$g)vygjjdCqj9= zm+y9=Zm{WaToST?E*;8WIpP}ZfN-i97hBdeGT0;DjgR#^aKzUKS|Tk^spJm$8%3B{ zLhQNhj)T4dbA~GdH$q|_{hnD%B64M{k7H)zk*v~JGj$e%&Mn#EiF=i*%qJyDu`%DT z^ce83t&27iTjDBi(j+!YrWO$G*sRiUmk#i>(UTm}l!`(#{Siapw{`DeG{F{l0~h}1 zl@mv)fqa#(pLFO2l!Ww^p+BL4G&c{t7`y5_bCwkdosS-4=12pNG1Q8S(cxl7bKX%z zx9c>Zk*)o0W!LRiFPWL~JfP5Iu&{U`r#8S3<8>jA{Xs)&!&U^gwY*wT2*!PEBA{#1 zKyJXs@W|v<0x_C37}pFFZk;a>`5#;5BRhi0);hkK7qsxrWH4hIaI7;>57O0|DyzK& zp1h_CC=?hSOMLA(b#nM#hd9Y?V0T zJB&B~;m)%?DQ#LkXv+GSzl2h7xFJQjt5!(z)Sf)C7M zFWAf{#LP!Zo*o{Ot`DQ>rytZ^vxz;4D?#7*Q}$N_qjU5 zmtMb)qpQ5um-8;S#+qYmr_=W0_PL_^d25v>+47cr)aX+x)Wdf)-qnfp;tlpZ<~{j( z?$$fhS2!k8&R4PK2I81wrEAAzRo^U5pYr`RgpQ+!(+wZ^8-H5`wmTGIqiE4)_hjxU zf45>TJAG~(-Yk^Di=ps%HONMsirVI`^c2JYE9E5B%Z`?45c>;aU48Y#2qyE!>P4{-96datX3f#B zn3*sjQ%2A}dQUrt)#A!xgQUf+Fmc`Tjd)fwZOs-no>2`w3+l+;Oz8mh|JMZnKM0W) zORttc@A^&1%Z&}{2wJaJe0MGV?T77=McK}u2E8*FQ--gvlbi`(7Op!8b0aPvwELnZ z^DpE{0^&mlFSGB&7{EwjEvSai2MRend`fJUatgVL4(^TQ1{gfI<~{p~Vk&2J2XvWv zz7Vs~yK0&FkxYA=fE>>8J4dI;Z*vbs{Eg)%H>_w`1G`-359G~PX4NrXTl}J=pxB7F z`c@JrMpWcjzSi`(Z5*%xixS*C3pq=k5B}&_T1eWq!@E#cnmlJ-@e`-CJMm+w1A9Ap zvZEU4WWk_(mmN|vq9(#a2I_AAyy=Yy0*{)SuQ}!(5L8*bH8H&D5WG@jc7v~!WMK`o z4t{%PvU=W+-`uz0vWv7>d$ku&ejmh5q%ucMWUh?A7O{;-xOcJGSquv^ZeHEB1F(l6gGVF6|vLp&+hb}Ujspp|qe=veM?CqxX zpF&9=%VC5j=t|A?7Cgp&QjLkjk_Dsuu;`CTr}!Hv{h2ZQQrVg=hCc6SDr;qWBlInf zUezzV4aS})rSg``&?S}{%-#M@8)=tqk-7YonGLiBrjuP9v$BvJ4g6n-3mjK~mn zsvUrNtG_~4WfI5Jx2##jCsU0p8<}E7)22N4sGj0$miI2bzjLi^xhK`-P&U|i+Sm;| zfY5a!OjN>ArT%VRi|Aa%nm_VuRpn;;f5@`-uxD%^wb|u4pfz^Q%$04qK+QX-1?gw* z%EX$U{CCDS{FcIcQkmRxuHHk2Ohmm><^av5;I!K1OAwwdqY2(M~QD-&J zRt&7COj_FWqv#m#Z6ZpY2HLm(G{CmC&y%8L=AA3DKK-1dLGu*KSHGY=q66-!dv3@U zzl;6oiz@-}HQrI%ua$hG8?-n@H&8{F`3C4$Rgq_0Sw@TC+0m!5lb@i^;m)k|#ifLN z2c@>w2qaMVhYf?cCt9|}okts{e+Y?^yb8Wv%hpL4J-!XoxIs05OF&`-$F<7vg%T1A zn@fa9r#Ak5TzMeaxtdx0g{eA9;_O>h>u0Im?M0)sDBs7BXXqlTi!encue^ zj-ip7V8*AmhP+X05{^?nP~a;WruNeCKoY5xosftbK2<6vy66+wUNDV9Y79Uf$ivhkBSGdvb9F*+R~rQ5=q7{&93yx&wqxz zF6#e}wdVi5I^`f)=a8mU!8ep&|7T%Y6IgYDI+8AEy(UQwQ?Z=Mru(3<&p$qJA98oW zSZ`3eJ^rsqY=Sfw=Qxt`)NdJW+&W|zf=v|!&Hios?)o?&x9;Swr3b_@Cu@(?@mR0D zdLDxkhbmRXg9(SvQwY_!*WDLn?=QqfH!#qB-ev4=2JLE^v650kEf8)*cAqlku@Sqi zkQocQn<7CuDhh_~kF<`o5QYe4K!lVkbbRFUfQiVxE&=;YNt87k`z$Bh+Xb=G)OVZ+ z`q7wXVObx{>#%k?PPFLuvOjWkNQzU5AKg;pZ=S6gMA`LVLRF{@3A&0WmjF$@b1o`I zhCaA;mv^%?d`-tshw%R+$4xsXI{fW&oVYqvc!vPGETzV7As4>34QV=EV5<_Yv8Z90 zXv#z`UJSR7%}#)h-RByvO8@o= zoE+?oCQx~N|CfK6ocy8=9|y)w=ECCNq~w-mdn4Kse3^bn zDfQq5rCB0-Zo-u*c_;(ZsJXFMavvKv1HsX z0e%1bTZ*@w_b%EYsbP~$=tci^>zPkExmOck&EzjOrNm}^WdJa%;EGx~;XD!dh!m1T z$knN~kf1s)(4zgweFaA1zHUG2KzE^p3$kCEYj+8-^KF?akN+T&<<)Zy(*ml@YL`6Y`#&c%ePfT4xsk@coMAkoubhhSeAbm z_Q~e!SkpJyw`u+p4>L>k*Ih4@07zJ3C>HEK`JPhcVluP)gC8hD%kJ;jpS1zpAJOTj zGqQ`7I^rrGb|x%7JvGh3D?QL@8ZOR;Z`;JJor|hSDlQJJP4wSV<;8mw44_naz6^-OTin|R3XC}&D#p#Sp zZAxp;3y$=Tc$lqpgC1~AYu!i_x_;av#%cs^I8A*JpE963g_cuN9WmgF!#J)ik9bu< zj5pF##kF_s747_*?xQ_L^+_=N=M4UhDB?fOKlB)m%=IlVSFNo+N)$(-bEPEiJ zJs$FKmfdf4N@@`;_O#d>npB6~HsJ%cCdwO961~=Bh^`}=Ngq|o?OP6Lci6NrGitHC z*ZMkB>qB_LX-uY<_g4`2D7DvGG7}QhgfyUr?gUhKJOz`(55BwD)c51Ol8ais#))fiIv&C&hSG;wZ03pO@zcnYV?$ra#_E}CdEBo02dc)Ca({Y; zdNmRH*n`Wu4BfQTE+TMye8D=gFk{k?(O{NWJ+R;iHGD<2v_()t#mPn|NSc#^XVGbi zY=ai$ER;hM_n%dX|JW=Trt-O@TQ}&wqG-ndU5j7yE1;-%2k)|KrBsvh0$k=}MTnDd zR%MXjlm|=ucE{6utHSW>e>2pL{_x?CvRfuckeD1jNvjvo^?jO=v8U`iD2V1jM%`4Q z%*S@j0Ey}-uSr?$#dTt}ANp9Rf>)y5xR=Ssx)S}Ilr0G0W_;o%C)vz!!S0z8_x zKujxtCtWWh#OJ~a=pTfhn7|eWX-;?ll&lvzC?@+aHJ@s+T~z`+%C`Emd2uDZd1K~o zk|Y1Npt?tPUXk59k|X0EScTRydA=na{Waaw?_#YD3Q&{hp?l@L|5Owrf(KvI;Lm&( zKkZ7^39`DJQTDB>0jmqhVe&SUUKj&B`D4H*q&tWGu(T`=1!s<8L40v5r2~0G8{)9g z1{1?gY*M{h>@`0At*NpPWtzHE#HbkRp4s`xp0@%C9d?LVNp7-7}&)GesmA|L)y~J8DdpA3gk{-f zysiXBo+9wyl^(~aKuskwtC}Sc;!ylAIO3GK`Gtl{o@R001gvvLpnb48!?tw1UaN3G?I&F8 zkXZ{rvr-=r>OwSrJ7rN!&y5P5_M>_FlE(j>Atl=XTCRQFv-Yjk3BwM6G5`H{-hLHY zfxNAL@`O_9>>oJt+H>59+t-2FKUJ92RTRG`j+8pFPPmU^mo#@P>{QKt{tLC~bZc`| zr)N=9=?Nr5#7w1VuP-7%0RM?e{NQt5Q;tE{snhB!T}qz&|A@b zO1V#_8wKwME!#Sm*z&sr5l0yRVe1=KbyB6oS+~j3G69BPDjaCoXPBbW2lB=w-N?6c zC!WzO@oLu02WJ%`$~qL*YF2XTp|nf&(K-ev-YLTfwag`e?9x%%4bcrDceIUz*JhGg zFJ#iE@y~t)OKEPYzj7cC3MyXEK$v^dwT$5@9AH#77$?MMQQ=!I**wQM(>eT|t7TKG z>wo_I_pp7bY0^UK1c4W08}Jau|I{50e_dF(wFWz57}j%v@x)>dGcQ-^y!ARV_aBTH+g6GB0~7e1r1c^@}amin<9^9EQ+Kngy9 z@yG5X+K^#}FaNSYMFmL)N!!s1jNw0SYJ)%5{8I6_^$gwnmFOZ0Jfx^Q+%ZX|`FXD+L^t~W;M7x6Kgwi@U^z7I zH@QVtd(~KfS^#mm+B+4bS^+I(>xaL6mpAir3)>m{1po+xuZWB6=|@#cbGq{!x?J z!Dq#$^!`JW^30p^qBjygj>$CRywAl`zreRfT5LJ0gm^A#n(cERcvI^e&Ig1Bn1f6m zKUUm`Clm=!*;gTJqOuQYT6c?nV0G?`AbYShwvzDtvV={K8`0WJ_nhIVDRv)$e^z@l zs()&(`TnNlCU?Nt`A*m}`oYKj;j(Bl1Tw5q4pM~+)_O#u6t;G{+%3+|k*$@dtzazb zh&Vg0I9Jjk?GPEruJkrhJqC5((I|qn1A`f80k2o_ptWeXm%%K@t-jm`fvG1%z0XLK z1Xyp=tMZe(a%;IvW8ZJzyUo*p!uBNi(16aOGBPPX%X>N(=ePE4O_xfRWn64tZ1-#8 zpS#(T%yc_Mv51Hg?P&FsCE(ku@+QYS?K{>y1)OH?qI~1^)dfUj_SVmz$Hbux=z|Bi+Lkb zUJM`rQf8y?NF|w~IsY7Dd7Er46Llvi?i7|DV?StQJq{iDY{4AZCkoiaOK}Bg5r1Gp z>&K@M^Lv9SV<6NLf!#+;cuc}RZ735(J~V2({PzNrCp~!|hj3m`yA#iV>?Gb2PR&gA zmvq!j8^2^28V(gxcmr!l49TuKU01-QA<$>P6F7_HPk5J4y}jAHk?HuukP69E`W23b zs{O5{GU#iwfZCivH>9QyTVrxrdxO`gc$_D}K|Nes#Duri@onFGrOD8_(U$wV-(s`E zXe@Iq7^URJ8kLq@`%6*CExcb{gJXc{MPDT1V-lDsQ*>!x?Js{qx`E|#etnaiXOE^)I>*o7zC>X9?&8<+a5CqGds}LUh zCdUp$w~sx)BJX(C?{P-99!bobQI{vSI=jl2oTuxHJja|5LYno)J;sXEoPeeW#2v?i zv06JqSi(1_W0&Q@tK$ifXAB@kW(taYb2-Cb-epQcjK@B_Bv|SGpF9QPEt|O_KtLcF z4o3N(XZ1YV;4D)H;xE*4?nE5!D3+JB>t@6?@6oRo$;If$WV)H?vb(a^o029@aIW3& zQ(uq=)>B#ccTUUByMjr!9q*{^&TvI}%A1SAZYI%-@MPniQWIUyXmK*=wOKEqT7}68Lq)ml9^W_N)2fj7aFzyLLJTd8E zd{9txnOqr5tx(sxLyOm&%sXPS@G`kEuM{*UqPxCqSxON@i+tv%1tm+bqMAR~$L}Jy zQ~UdWUjKd4MkW^ggO8e)t}_jLrpL3T9h4GK_ph5#<|JW?#uG2yutM31+3g*)-0INC zw4Sc{Mp1YrxMHQLS$ROS=Iv7T$vt5fTO-(&l*rx!(xKGV+jBZ!`YcSphX4y4{ zT|ss#g0YzmE0%J6UyMzEKJ(IEA?85Yt=HFn&aP6fVMQX#y4`(~-!sXWP}xk*`r&qxy2-%Et% zCjuDJ;^jC_o6#IHR!Y|H=4N<}P?4(co!>*kT8}O@#W3y}=2|NDa1X9bB7h?#_t?q+ zRg(3LTBq!LmEJ(Q8l0LFO19T5;W|%0R0{=!Sre0aij4y(A>^Yez3~GvXL1W8Kj)P- z*-4tV%r3Mbw$##8M?1}Ja;yVGR!+gI%bg4DEd?s0(1?RP`Q@jbOEyp8_k{r;MDo?@Hy zotD2U>m-R4zTc`fON+&; zdbxffOcHF>sbi+jVoi3?T&^On%o}3jPp)by3Qe^Y3;@@bSuE&=V>2jiVv#@9duYG> zY&P=4cv-FfiXypvy;c4uZ6vsu{8#}UilEI}HQX~9Yl&sclAj1%rS^iNR|@;`ls`Mv za%bXR+4fUY9sZlxekY=RhTdFTdN!*qgTd!Bal%r=AU} zxkllU+5-pIS?vusJy(xR%$p+0S9R|A39I$=RwVcHVlt-fQhR1kDo;q~ATmca`dF;o zU$KngHd5po`pUA*+5Cx$Bj5Io#vMRE2B?=Ui^3<+Sbijpz-Dna;P7<8}6^4Vy zYyUF&`_1_1d4+6A!!%IIF+05=?*1hnm_e49h7dLkLdBp5{+Lt&8uhMYyICGZ8BgH% zqXwxq<#F*q02nAucmz>2L51K4Lgy*`Fgyq@hC+5m-te|pOha4L-Phh9RcA>hSZK6D z_B4e=KFkD?`%{xv3_7^-};!Tga%AVAB`k&;sT;t0TSDWW=gI;^($l;v;wAo_4cMnFqg$ z*GHfJ#Q^!M0b6rrzuZq97FCz}(yaL-HTQTL)X28qBkhe}|L9E=0k-)IAes)qo&WI6 zWY?pueY)OU1L7~jJ5$%Wy_GOU@6xv*dAeA5hvANhLl3fu2oO1}n%4SmU#ioR38UZK zMy}M@IuC}|=FH4>gVG&4ruVCR$1PWbDUet z#({e0@f(Y~!Ec1pI8xp_bE1)x^v>@twqeKJhGJ&BC{uZp)Bt&OmujNC7!8S62mDa3 zJ!PUNVBDve+Pv;^OC@E`S3BK=*>i#(dvUIf#YIOGZ0FehY`*xwMP6BSQV`=OacoHbN4C%*HJQ|ua*f%aI;d_~a-sVrbO$#(dfn^Oqnz9a z@4B>$&c4?R);E+y7_ENQq@AJ+U|ES8a=IHqF%$nZRz2HmyvsY~KcL)F!lPtcCp4(&g{g}JX$yF-9&oN&iM!bx+W-y*`)0d&(J z4&LrwtLKn;Fa>Y)B`wYE@gc4k`IcatdCeD=&rtKp`tml|U9|bYR^oh6y50Gf!bl>a zC<$z5_dTO(xWUX*3I^lvFu#O#mRl=$X=NU3=*DJ=@TJEn_PJ=+{nK@?VfSLL7(M^* zGrtBRZHtdM5|EYTFe|n?)tB7XoMzb@`jB~Ev*6-H!^MY?7hbKK*9(=(daX}M15Y%^ zn@cYQR<-m2jDNF>yyH00L%-oFuuXdY2{0Fczh7Btg34T5CrZf8)LlWQtS?(mb+N2!Hiw zk~3iLT?MxK1aG`=WOQ@8v);OQkXh~h_&J7rg7N@6yuvQhA#+5Mwe8byaos=&>%DFN zn9|m)?`_1N-S~|XdT6E7jrtW2C9doFAL?T#8;hV=Eo>?T|C+VEQO}9wMmZ@mC3O#W z?${{#uLZ6VPc+jOYArFl0OgzK5vMcGyL6I;kY7tHU_%$g)>PWU+~P7}G+zJtZ&ERq zNZEf?=FiiQl7wn;ROj#+83c(}%)5AQG(6V*7esk1Ct<^RrK0k>IXFi1vbyvd7#QCws_q-<-U0h_ z-jbVrh3xUe0(7lMb2qXu1-$?fGozHnHeFhVPT&>pkP(K*Nww-9SFS2{pU?41??uaM zsTm_^OgL&C zV3rT~s9l?Z5S^p@`xDYDi&)(7l0rKdCq9*b#9ioUrohV1iE82{eH0%$KI$dzvlu?XE8DGuYBP+G*GwB%-iwuc&E-8npUzu~BRjr;jG&jBke;BkI7{zUThld(K=h#IP~$quo{b5U znosrD1NX^Bf>3Q&C&RIl?nNddwFAUcjOq_Vh zoVB>v@Y!bOqoh$2kyX%SW4w&}(vs;o@yY~!gz`YWV5V7H;q6W5Q`J)u;}b1I_f0pW zXF}@D!Pnnq6ZR975oiGU2Tq6$$w=_mH7iSh6tCmG0lp7jb_k~JkAZSx?ebsyUFRVu zV4gWHgBpYE0u1!3C8r0?dDjlw!+$*gcr3Zz%(ILml@n}__$jsXvtJ@~m;RDT|3 zUEwXJBU{ukc^)dzRe&08@TYFkb4K^}@cpi5t_Yz@O&CfFqo55i_`48nK^V#47kZB% z#UPg!m5U6&?}TCC&zBt2>q%vvOLMm50>TOk< zIjg%F%JHGS(#?#z#*yY@)WDR9pX};jA5|Y}lNytj_)}Yd^CdC@D;f^4o@H?*uCMY| z!eT0KXVz6FsJ!~m=T$@nddIeae?*C0CL50F)N-%ZcqR}_I_Ing{MqCfE5Wo=JeB--Pn{DGL=wSdD;Ex! ziB=+y9cGb?x86Ug&?<9KnkSw51}^gJZ=e1=&e>TRew+2ZM2lu)MVp~X5lFe|?BZbTe1Dpg9-M(RDQMCkFIc-ouQIWXNBvok=@(Wnp z*SUzfqVUE@w`0 zBIP4*BeD9R#-aZa*8Ob0Q^kXzY;@#?SMsWhS9Y2= zi&x*L3%5`=s&3?z$5-VNq^^`me>c}(!_K4&8pHIb;Gu{j45E8HS6A)$4|{{P1WPrX z0`M$7otb`2sb~{=7UhoFBL=OPZ;bTxo3pYPTf$N340#Ii2h1K(=JkP^SMt1%<89D zO3xC0erPg_^}n8RBNL#zyy~7k_YKXR%kEvKV3}0PHvEex{~%U`4)}bn>+Vjzyw*c$ zZmC8aZuuE9jU$k&EUj+F;B%~+z?7CGoj+^e6!ALHm9E%I0e@s4?YR~Jj2kR}tw@{1Qd?FnT4{fpG zgZThi?Ld@Z*-pSQh6gIfPf8E0x=;lV{!c6a)Fd`X*(GEmRjaj|W`yf%YojCuK_r%^ zw6lV3?XTAQMA`?Ilpd`uhC%vF3t`18s zqhPV7_~`{J+aq;E#*J`ukWsClwAFJzuz&%eI!P$^&^Jb|;gF^yZDn7)BRF!6J$HNq zGUNT3;x?*Sd^sgEw?I2yio9n5%+OVAu1&*48`MWRhyyk4Poc)ID(<9K$0-jnh@cac z^CH3R8#CI>5=P}o%yp-Xw5jO4J^xVF?MIbf$OXZtYh~rP5|!%an>_q5!7id_xzp%) zLfZ7Uq{7YeQXUXF39R|4uf1ZwQuC17TSfgOOQGSenCREy-Fo7I)J#mdIx=d*_9rQM zn-UO{ACAN-pE)&34PFKDQsSn$v~)^6wp5jMt*Fn98TLi&0qIyGpk+Deso{Lte&x30 z8v`MPlG_EMCa-|vldhv&8-|#S=ofp-%KHwFCj1o1`_6$KK$r&G&(yiO0F)_XPf))iqD3c+}`qrj3b_jDzF7s zJsKb)LNEZvtxy;Yy9>_SuG5AqwPkZ!g|*7lAGCBN;;sAM+Ppmks@o_h`1!{-c-IyS zA{>F<->?bTBRwDIvq}#4g?$APH&6g?+&lc05B=rx1Zv&+N7{ z?|o#MNNd{u?%eKPR<$nvCoW-^1)-G5E9LM7HtyY)7I!SpFxRYb8oAq__eKV#htUGV z7t)3TS+1H_pt^LTy;_;5T|)=`JKX5X+XYO2i3#RYZ}8=7k3Bv>PRFtd!9?qE=l55u zT4ao&Ye~G<$CxC~hlJGs9II=gm7VWGm^y!vNJ7%$Vf4>(U-J&ABVpejyr=wea!!vF zdZwb~j(0_SsgVU9AhY<3*VcjmaknQXz9BHYpBemYtWBt%`?+1HdS^i{P!9JULp+h! z$rDDjWfb$NbW46;p`25tN+QOUk=Uth1Qs5|#$4uZa^i2_MG@TTj<7WRMhx5{Qaz8~ z#Gk7f9+`e!ina~ctlXR!%)5UG8Ors2pbyJe@X86uyT2^Wp}8%+@Lh>mO)bap^J!|H z^J(Tayv+6H>3yKNSd1)(xMrbvo7USEpva)_h_BW*D^ma4JG6hSG zU3}&upvoulCNg7Xb@pLoJkeP|gnDi5j+cA&$^U?WU>xHI}`zg z(UAO)@5!SFY9Ivp8<8jLCc65?=F1fv^|5nD_T>)zc#ABrGFvFU>LQ1eGe~xl9NWY& zNr;GwK0U*ka<~^jwSMUd{ydb)W`Vn#X?*abNVmH@m$J5zq*Z51#^(r-%roXv*X>u~ zaar3On9@#OcH8Db^-n)o)JoKtGP;`Gxq&RW@hCfF@-!xhcv+Duo|kTZz$hvvh3ETQ z(;AaR`p5F`&i1{5i0E|%e<8XBpu>n0GS_R76uNbDG%tJ3J_0u!r5}poHD z9j6UGjelo*6qCfzY9?}ke+1^T7B9}(SLmuHy0*FhbTS||?blvJPW}TUz-R3fBa79w|F2^>hU2#Y!_Kj2wu%%kkYh>R^&j#&BhNA7< z@&CoH5bU^)KB_sY$bt6An}*%#QgqjxYYw+9iJD3@{C#$*1Fz{>rW(D~TeIzPD=ub3 zcC{|T*!(L|P+l%o73K6?Cp?{zDDjr%OYM3%m|4|9s-eiqrpL^)fJpKVd*-Jdhs}Y! zGaOSv5#VBlpX!q*qij<=Mn{)GaD*3VwtmA_B&fMru%vEHwdBQB-Wjh;r846xYJ{z) zob1n~@mWYO>q4ev=6k6Kg^==e>=1|A$4yp^kje=WB|ag~Nwl+PgOFwsUm?-+xxLFK z{_X6&L{+XPajbQnwKi2DSQ?_39DYxO4vb-%%uXsT^q8Z5_DWzAVh&_*>NM0Nig=4m zMdq{&-2RDcxp8ypNFu$*Qzk<&Lzxuv-_v=KtT6${+U8f-(H1V@b5nF%z z!FJ1cKK{Hk&#>!;{Q(=SY7z`~;AZdJAJ0qumbxbUi73f_cwzIV{B!+HcpWh zgk6@2Te}<)FolcyhvwoV2=@mu@mC4Y~W_p*=<7vGykVm;JS)9tM&*)bp&( zBLr?9)k19B#k^CUeMHl-`*p=0=*S>sr}NmUw>-+I7gyyw-=^o@OXVSqFbs&G2vW)e z-=>mF-i_TXeuL9i-KJLf=y7a|T(#-?F-(am_UcN4)qO5etx}x^W$MHmjoBSm42B|) z{G_$u^7MVRwTqKd@m`@(v6oaY+J3n#w+g%cfr@IcPtjRSVzGG149q_$5DH~)SKy(k z-BVUmMDSKkG{)=~KTO7*cM_6}qu)X*nJ7M8krB@9W0M+7-FTX$PdnMT51m96a4-En zA-{ceVBC=l>}45$dT0-3pHTh_p@jqm22a#pdnn(+Ot^ytzyAHQsL=7|_eS}3a2G!2 z^+|+$dgP9*!(Z!RRx`#pdtk;-SM{kSuewIgXBii!n7v6RB>&-`bcM#}TDVPX@2{G{>?=!>l?ST zB$+*D9zHVSx;NXs^^IM2D=Y6W?kh>^@<|{QWuDD0FAr-EOa9+vDF6mSrDpl%;+F)z zJ*%saC2kFF&n0ekqQRTAhgo7|HwkN}5*-f&wT*)PYp%^^S!X8ib*H)fPsIJsBY61W zD-*-h=gB-?Z+Vu6hjrTG@b=L@nH-eDIGhQeSC%gwKEyeo&9hjkayVRf@pS{&?h2#2 zY=ef5{rY}qnwpeOG*?%O%!TY?S3kG3-bZ~6c?qCAIxJ4>I$J=$4&n>$w+ILt$qi|8 z&J#DBZ9G`SFN#CwW9c zy9&?do34`HdEo^1C;sythp;TyChX&oUYdmUr4y$*@BkhN>FTNO@>=DUdE}&Qk~;Z+ zxH{{%sG|1W(~Q)BbPkAwG*Uy3v`TlEs0`g9F?5%xz|h?--6<$N(p{26cXQ@F*SXGl zzi0me`-i=+wVw4n>%KoXp}*mFp^4Yd_lBG6ZTr_Bj83Fta7}C-*AZVFY>P}+Vfy`f znFX&!hscrda~yZKHW^Pa@=e{ZL^^@2G5*9w=Neo7#>aCMzO21+3$1zmDg(@o&Kb5x zh8YzrK~u=TC`C`FEvWNpiRZ@&h8v@LBo+D57i5ZhwM*5K&x%=1)?MOJZ8%A}dU-Pa zYK#-Mv$W_v226KxnXPR_7pzQ4cfU6lENvL{SXPI75~}xzENQ<-laRWf8@T!CyN|KC zK~>Ad_d0T^cOWZ>00)a3?&plI{o0(TsaXh%r#Hc(GFXDItO^AO$#n@PW_49avUbov zJ^i#G`tt*4vPgOHX&RqbNbdq)<}AJAeBGGeYW{&ls@s`fu1mAe+_Vf(Ccu`gMuz&G+NW$gK`1l78;M_dV>> zNA=&Gr|EL&BB-T2T{S_@{F4S918Li6p%=3wUq! zrx~yzkU68o*1>g11-+SA7EJuMYxXmnU&B<*dgYc}jk)U-Q#k1P6fZpu7ITWy?2nPZjjI&=?NpVBF!g&pe4%bWu~ zzn6BGt7REE+~4hsDjksX3*XXuFI29)ts0%)qq5nZ0%4{KYb|+zb~5G%R9eSwiYl+W z%&{&IDiuFI#5lV&$X!o+Mr0v|m1h)r|+uT7l8m0=hJNiqJDD6ir0G|(zV_90!@gd&g zvF551^w5IGY5LstOns);n1dd0l{vt_pUG~tl=}kG#-V{Mksd>(-kSLv|{gGHinC zsksP}0v}`Bbz|Eo$hwU!O`0WY6WV(WJ+*N(a9t;fy(=)BQ@N$5B`W8=7g~Gz34rl; z{_7zZtU3+-!1o(`iAfYAbIS_AR_pq;jbrs#b!qp}x2Z6Cez+yJq&9ym;HWqpD!tb2 zWf@m|kWh$kNDgClsBF%68y3e{WhOR%_DGQx%VUhVjrb!dXd^K}K32lc!t`)c(8zNw znIDVB7SBeOM81s2+l(p&B9&d#0o#{DjNH-#g#n&kI+M75!)T$hZ^eY6fo0o&5V)Au-54=?fnn3`%zLLzSgxocc z8DWy|)_K5iqxQc;qeAr|t>U>yBA$4A?PTUwtTJ;_68}ip4 zoI8@a(9J|7caG=CVU6B=wEtyMh1QO^lca+jIUFp9Ii7$EARgZK4@HlqmI+lmeK*FJUlfuZz$km?{ zcdaKBe=G$De}&g`(Ye*c|KUP-2e@c6xP+>mrVkfpj#d5-$bI)JpNbA#Q@gC2VXK2L zW393)Uok1Gj~~voolpvYBaRHufy*>C-)?MH&KbxGkCaOlzFvU#W)pd(e}&4BND~^X z=mmG%w7s#*#D^4_Iqz25+bvq72JJ?fFLN}Mo$J;VOcw~C|Eizol4iAD#}2>Hhq*Ye zi-SdZ{lb=#c%Kf$eOj8Mkl)+*J6T#0;R<2uJDyDwKXs2&7V$5O`;|ui1F)kB$Eomo z7tUTRSJhi5*i~bCL*wFueCt7>|0WIR(|U5}41-i2&>oKhD{K45Sf>l`>cR@syV#)k zlT`)?I>S11y76r|3j!9Nwb}#e(pi@FU6hJA!}BF!P$OP{HpU0%^TC;TS#6<;^Vb>g zKla{9X}47gNJStipON|fNnjjbluJ&=QTEh;nDRJEW31fej+|Q^0_uw-*IdjkzNh)I zWITU67h@Z}s+&&xq5k?a8-FcXBjeWkK zH0|H1LnTz5UBBWv{6JGdDZYp&z=BP*L(3dPFG~K3QpR9XQXBxUVNv-8O!ZKG@FnVt|A5SVfN41sPz zHabN@vznY=ELP0S{mNU=$InWt_xk9cHXDMgOai~WOGYZhaUY8I{A$q>_+C4TbLrE;l_bZwi zll;0m@TH&ELw{_BnXbEv2EM|`Dmw=0N^K`nX(jsu{xWAn)wXpfoAr69W%P&NT1?v) ziuIByntBDT_Afcl&NAe+l4YUMn$_O60K>gSXvZ2gLjCyg8#H=_c!QR zSsB{!pcF;HAXih#P!i29y25t8ojE=H2jYO(DTZ4Ni)SxHvD^Vah#4n510$;#)j-lX zk&*+8_K3XQAcLByTihJ%^FC=vhuE<=b04Zs{9OI2!2^b`3XL!iDwqf1Jlm+t_YxEo z7NR(I|5jw08axhSl2P4ea3@p6vIp>xjN6qQu&n3&r=4j}Q3Sl15mI|~76HYv=hxag z&@{z*`zPd9`l8)*=30kl7tBH9c;huZ|19>Ra8GxtZBEml!VQFb4p*@}fR>Gw(IgM^ z3r3F5=60~m=ShJ7j+HUkPX*$#TcVB7?`7&pe6}K9wwv(8+fIa)wk#LcU4II^6qs@* zzs2*cGSH?%jK9wG5kfB0s;JW>XYQ08A= zp;4M+AF2DuP=3mFteWrVJ4KZ!Xw(XWVDBjn(k4<9>wwA1iFJm_{#^V%!~$gjOK@9D z+3{n$60L}TXM~7u-ZlWk!(&)N2I2EB{PP2ykIj7@9PEt>G+hIGEUal37N!@M&yQ;n z=H34+@|NnwFI=8xo(6$Nfu7VAARfWy^*7ni0lL1+J8f(zLcX>A zQk}SE>$zLP2GRF(dSa43`@>yQ&QC0NBAq^nQK@@*BN{2FV@JJ($dXrvj$Wm!ecuNK zTQ?WGEYY&7*wsIt?7qlGSlPqx`1^>5639i-{^f#WW0mhF=cVMznWV><Ptn$o%3w}K$uU`IYH$eR@u*-HF@0d~E{XZ7C&@cs|hataV*(XA)`Uz<`;_hZ6FHgiPd z>BH5Eqom)R1bMnQf_7@TLx@Lm%kb4q`zo#7>z(;u;Zh|>Fu!nJRaA#@?O+6fEoz3g z)C&~ZHTf+6t<7GXbF-^w7BM_KwG^fYa+{g{{NO%<(iqGg@yhH~>fQHVWi&hWsAhm2 zi{i2x^*XML4q~frR9cZ+<@84Mv>mvkW^Zr9K^TFsG7D^>ukpKKQeQQd%5;mhjrle( zlGNI#R5%$UzHFy~YPVOW>rlKw&_ZzhvL)n-^Qb;NOMjzGLJky=*3C%d5I=ZfPQ1>B zCXbW#Ra(w(ll~9-YeT)n@B|5SlDoS{2i;s33|8C{&Wjx5hQNZ2EmuFh2cF;f#`Y6RhlPPh^$er zyue`|yIAA29fW3I$`>Z+^34Wl2pN~E@R47Wr~D|jlhy&BbJ@Qr zi2#G=!sgN^B=7Zy#Uzoh2$gSg4-u5$2*o@MCiG$5bd=ylGEr9c!-o>@U$6I&2K;P> zSTO^W>ZBRMJ?e*qZOeFuL^+q%6DSXtthHhV5 z7}gqh7TsGTR4o-PF;@iOkE!F0HcGlk+0Vl6ASUaN&f^9bDHLaiN7!dzXelF|wR=l$ z#x0)0kG3s^Zb>*g)0o$%a-pV7~xzc%)^FCP|OHrRC5TmOGa? z-jr3!Fd)Q=AP_ISu+44OpY$^4?)^x=kyjzS+KVks<`>C2EO$b`W~%F@^<`#n=5F?F zhUtY^1zx-kCl4SMpjRedSMh$TLDBVYzw%b1s$SD8aL2%X))nvA;bqqYwpV3lEMSm| zna54zsc?$dE_;=Fz3|r$*76*6{>u7>NFo4)*e(3cyk^JGm+np^bMFSifIX(a#r-&mF_OrK{w7!6*I41|#j1DB ziy+DLAhvc#xp%#H5?Z^=A>5S*LF4ilf!1SU5oqksTgEBvSiuozHunD-tG00;Ojc7i zocnN|5fxZ}?P6l*zBlN?1*`Hpfz0x|gE)jR5)!A_@b$q^O!0UQi8P_)l3y17$-=@* z11cYhPO32pld2TDevbHL>|7>4{RMh0S>yIRUlz$0NfVgmo^oikYn-bNi~DRlBbL$N z-a>z+C4x`_W5CD!*I+0a+6DD9{kqTOLWf<{*8Te+=`1aX5Z^F)z;`LE`#M2Hm)6Vk zHVQ8>^dWESk4L`Enc+%ocADUv8~?shhs%SBE3@h<(FQ!Xk;p*B!e!SUZzlpr%tLmD+pUTDecVw1iw*UJiBM4|SkNEV*Re z|6^N{nBN>qK2Y>OS2StDU<|mK9gzC`bh+|qMA^rve6ynn*?YK)(rfGK*bwjNg{`x} zdWKqVw$cv%1LQfN^gp4LJTkkp^&gpO$h!ZNb$$&wvwDn4qiXUx$z6&6Zvh-663ua& zZ+33(#B)o2^=?%l>qgnjokfBa9e*V_^uzLWe8s|v<-onkLxtmmuFp`~QB7mpCd1ut zMX3);N<$tKDkAz-D_2KoUU_y5$P1t81GlB4e-CDDD?%o>E%7+`g^gKws_oaY!uzZK zn;J+Fk%N})75j{S>;*A;Uo~yVFYOs^hhjz*l;umbMrGnZGcJdtHfmx1la59YvntbV z=WsKtXdzDSiq?(0;IC~@2=dma8uGh2|EH15iKi9EpJq2Bs1Yg1ku5%>!P9>zeopN% zw$lc3FWG7Tbmj@`GQBv>k42UVn@WAa_Tv)AdE?T2Kuy3U z=x; z?iQF^q1mo_pz-$43mKea!aVwWYvIPNK}!#f_IAyf7g7XWYqNZ=A3}rLcNivHG=;T5 zO#Bjs-4UcaeO@0A-gkS}zZm|_v0@xd&H72lU0T{EQ`z9Xv+pABnqd2!Gw!_Q6y}*2 zG$cFW_1~@)9nA5U7TdR6$*ws{NfN2Q`S`vc#96Qc6h6t=_qCCV|5jA0e*>Si+mode z_#}X2SJMEWa*U1O7W_qpmuBa$Sz@he=-IzASGAR>aRD-Q4YSb^Ej!OImH)6$*A$=8 z2cFSt-Y^EF$@57c7kXwO@yWk zH2JOGA>hi8@qWHh&fj6_~U6@i+>sQI{$UY8(p% z_pb5Bm=_G~GQd9n^{a26awdGPC61FE}g4R>dF;96`C`gPQUd^XDv<1Wh#5E?1c0Qn z6QikQ6RzoNO4WToo>+a1*M=xN@n4d}@Vtu3-0Ge|q%Y)m4@NiFszo@YmEMtL@bgYn z$5&*Z>LmhW=>!eNFAX$J`Z*}uuD=F$lpAnw_P%7oV3~DRV`m8Zz;NVjq7|Po=nqbI zmVQN}fC>UJZv0eF9m|7-Z$$GQ+&C~c{BF&rj2l3S*W#FJrjL-wxy~!aJR?5H_|dPb z=PrrzY^)Pntp1&OU=vaDkp9Fhv4k*e%yFJle_-`yC@*^!uUba|ef%gG@B8%`BP~Qv zqNv9QrE&=HdLa=DyEnAzl=>>%2G%cBJj?rCCP_;&|4HyO{JVr69(e_`jjOKhbH7+V z9Q)+1S1^>+_A7vHiIn1VdxI zYn5-{3T`fVG*bb>FfDq0WvlGA&KY-uOy%6`4F*fL1spM|hVP`FnZh1IW{jU-x?-*U zlJ;e&014)d=fANm&UeqcLKkLZ1Rxvcr>E}T>LJB@NS6sw=-a?d={n#VUiA!4T(K(5 z&x`g!ir5qv+=H2($)rqK@GS5uNnZN*dUs{9vhLZ>3DI*Y+*Y`kLL#v@oeM-vU)*}5 z9&MM1VJH+#EKJfj?&+f<_5Kka8}_1bOAko272@bK^~hwR*W*8%{# zUVo0tI?9*2EW9c`7`t+mLsEt%j*Mbl+X9jm=Jb!1sfC6 zMLovMq4x+y22i-!<7u+JrDgBX{bZ5U<|amwYK9A2>eH#&qY^LZNeJ|K9R487``<}G zk1u<>9RqjFzdtu*itf6wuoeyIT?dLE+kZLVX<70{+F$lN!oH_HwrnODnY7Za(&F6* zOJ_H~PEo#Jy^C^WEdL?5VujxZbN*`z!N zrS1_TMtL`4e#He9h7R+bX7~Q{v}R}0t;p%}_Vv&lgPP3_Zk<-$3c-Vax5FJ*84wFf z9{=_@y+eH6>uVghQ({P6tgDyp$$s~aR5kImh#QqY_=FU;dUt^#Mbx=&RWrOpAZML> zhwG#W_Q{AN6*#W{_RIn6NpJO>7@{G%R`IJA7r1(IjM1@^nROm)?-Ng8{~M>$Va&ak8YnNc%_Qg@4yJPOSNaS1V9UXw>t(@83xdJEQeOpxyiSL2)e8Uq zRK9#@=mU0%P0E*2kOseJS*(j!h!)szwrkIg*T*8bW_)uP2@KYCTz!1a%7J{#aWIed z=VbEd{#zYCK58L_uX*uibU9ewL|V@MXtFQzbOU#LXH?x^0 zb<$i?-sE*t3dCk5^#5tocOwVfdc~d45;5@sTdQYCFxn zkZuOp;BW*t59l*@QRiT96%k`Gy=4OKI&*>eZoXBLV^rv#ele0K9Xx^VzXE-y?U(GK z7<{|i@-p3T3dH@K!Zk!Rzp9LvzkT4tfgvmE99F7q^Z=g#hn{3nEH#8y<+%KTkVtk1 zcbRAZB;H~Q$*cT?NO8&D__lsqiYqf2_qVc4?IbbC3|%>tknFQmA-RGjK$@tWCq#WR z#f6F)zS~RvRQlI_5+~%_^($$ry3&>x(-p;bTYie0IqyQ>B*Nx(#B~#s>ao+2dq=7B zO%<0PT!w{)LwNIi;3O?d7YgP^2%7P~iz;}YCW3FWJ!PE5Ck?!a$r6QDyucHU(BCm` z{zh&LWuwsT1d)I+w0M7QbZyXZ= z%aZyA5+{SV2+w1M{iugFBCi;YVrYpq!!L%3t^E19H{t&*dkGhKyai|OQ>5Iy_Jo00`8Q?rWL z@Cqu zE;M^o87h#XMOJQRN4nl1gH)5cv8>Epzu=<5#4g;4_N!&vD!&8UgRzHZh}F>05YJ1C zgKk{>ht8c~>_C7a=WIADuW&BN_gLBy7|>h;x$lLz-L`E@9@XG7cy%#&?=XQVSG&q* z8~qjpi`kU}^y9@O#VQ~&5dM2- zMf<%VpIRXgGh3g}zuXwzL^~e&Ig>le&)62n2CMvzS$%dQb$FTC1-K^tC5ZdviIiFAb)D#EjwKwbZ{W@W-Wai3dg`udN(zh=3V=__f~sPWDAdY!=i~Rw(6ME^jWcYK zETQpmiMW*$+aO+IEBYPg`aktSEGig+(#M@>y{tu8P`Tj?#6l2t74Y`kx;G7*zAMQ3 zD>K<|a~aNGs&^%4(5;rN5X9FZBc6mh(-LSaA6Q%DQcBGNP@uhiGaTkH_@}(syLza- z>XOXeLnX+r|5QuaVZ;vW%(2MAbxj&Az&h?r^EyU`GplWNo~M;-*-kGVhzH7C4*j4i zM=QvyJGESa=yJ)aW-|C~WZhU0`f{Tuko>e;eR$h$dLJyCn9--QR>~n6w}#J0D{q

XiYPn54?8TYoAba=chW z6yP{JVx}wVhd5%N=N*KkDBFGOVSRWx8))`wLBVk~hDrGkbiG*t?NRgdP0;I@C1+kK z{=tjXv~Zefn?TW5Q{O#W3S)N!H9#LS2|8cwe)!|*(VLwWZ3>>}jZodR`K3^*rur~D z-TtZGr?7#sysT_Ei9%K|4t0q%SSa*4EH~8Y<(96t8X2b!r45s>46?YW<6Tr3fNzxD z%)spzD7-}V5ci|7(EUfo+3vd3yU=ke>81eUS2?!6LBp>NYeOK4j-19pyTvUvwVs*p z7e2)b!1>bbr*t;W`|%FQ=;mu(XCJEB%Z&~>v9&&g@HblBY`!0!3~pd5_`)TVc~H<8K4s1Db3zu{sky1Ud407-Sp8} zRx;Y<5&|?ymU-gqI$i-?U+5sH^h9UKU34pdr0*JY+i+{5vRU1c! zX!v~u$)yCtj?rtqjTUrG@0C2(v^-n0U9)cx6I6d*PO?sID#=y=U9pZz0RxZLz5VuNANtAw42dt4>|qhN+_q{#Qo!+{ z{~)pvSX}VmVt^{~sk-EhzpjndA0Hw!S7?mT()dmd+9A7C9yH##O~ts|VatgZl|}W! zX_|gkvK1@dabxvF#|}mDY5P0h4)&Ulf2*?Zi=)A#W!Zx~@G!nv4#ds*T`g^6P}~x@ zTA&$n`fnfI)8FAft%7D9AEq6fpMHpRoW%NIP}FIfKJ4i&&@Lb2_#ll|Y|L)|CNFAJ z*^+H8qF!+Mu=4b9Y1?^edx`9PM8eEXx&FUyM(deAnxJePo#;Ak{Jr=ohtuxg0;~Pt z&?EK1e<-oYU>dE_<&I_iTv$g%9D}4y;|eV8Q0?4a@a*bN5#%?ll3>zQEW549@MY4t zD_0D*t7Im1z5pe$L5l3qHs>EDnjr16CayFG{YvXZ#%QGUpMS5sXdIjMuyV*);-;+p z$C_s0PdX?*SkIe7YPJG4$%?e5~mzHK$ujM_7@9$X2GWEJ|X`+Jqk8lS3K z%Ee6AwZVcB&z7o0Gw&z&jxusHrpo!e_%BpEP8Zj~IZY8`8TqEN*ykfdRcxVGBp>l! zKW{Zt{`vxd`v%+sE4;qti@UJEcYTJgMBEwyWUPz-e)cFCn$;_eL!7@V(&;|>QvlIy z_;(dP>r`@B=(E>?9Bolz87c$*EGqOEiUu{rKb=(x41MNWIuOT%702H={4M|bezs|f z-{V}*#XuwltCze2j|xlFW0=FN@2yhFV#sas60+HJjP)S%Q`8p`1DvzEh-T_UZ00Z8 zRR;E8536(9Ql*ZSt5XEKv`=YqgR-`i@}0622%55k>4h=l#pi;qWCqHBuvVa zZs)11%VjCh>ihUjz-FHB6Um1X%!$Eh(#M1?J@ewE zIKIfRzo4XHE-0uX(@D43$bU!BrcV5DB{#d~>z=#41Zn1nKkrL7^j)a}nd&(3J+;8Q zwLeZ@y2rX+g!O;%eq9@P%twK<$4tTJn<+@GXXC18IfUd8bH9^R0ze+k3jMe>IY|8( z0nkR7FlpKqG|V#?TBwCkMc`-|jKflT6{P zpcCxpqOp7y7`25b_}931F{A2R>4Nr!^jUuXZMETzzK)5Wyxx|Z>?eE9%-5bienua* z8+N*8L-=pLP8XLR+x%Q8T>s*A8H$U0d(Dt4BRe8pcRs5@$o;l8X_`)563UD*(Xt0T z96FCarXSf_q?>P>Ye<30Hru}UHvsP@x5@?E3u4PpAO-!MxBN9W&v_Z@FOr`sN1|pH z{+)ljD;X@AV_9mQq@U~k!vad{zPljtNkl314%y2UILcJiQ1oy=HeN z#H*+O{sF4@BF2J2gDJ}S?1LBvcXDqik3-b^n@pn0f>?B{SC>54cdy({p&OZ7QF}FP z#?P2~2D*==Mo|uR&4VJeruojIVvdU=uJ)88 z)+huW=$Ou23(FN+cmoUuiJ^tppWxB0k)VU$bpoPkxBwrLFX&>hn8NAH6B2N+ZTVx4 zJEq%WE4V!auC+V#a`Wb9YY5wY&7uRarTeu49re}eJFW}D@2Q>kZok_=$KUp!LvD7T zh7Rx6ldHU&6j1*s=)e}8GVGa|qI>H+LT0F_142p7E*@)wGQwxT=*Dm=3%fOy;}4d2 zXKQR2VrNY1UzK=D#geqU61CV%2B!%hIqG{NpGnSmC#4dWx4Wu>SU9Z8J-03N-0AkH zE3Pt@8wiCNrq6P0m6`+nZk0{Px;U+a4XX6o_jEdFw7 zj90hr-9YL8qfc28I3B&z?owc>>&SYKx_B~Qqkkw14Cs{btZlcoE%R77XuRovy?pd^ zxN_t3Nc40w=D*x=d~;`<-Q&4TKi0Y=q5*MwO3u7Y<4GKyKdmvc&vr~v-S}4DI6A*~ zG3L4a!H0AF{X3P`Bv5+8`c6!T)oe!_P)X6T;h@&+)O+2i&f^~{)f@p4159P|{SL)D5z#S|u7JqFU7+!5X-uH3)78;S22?>J9P6@j}wQs;qI zrtmBsewx3Rvsy3y_)+0y&oi#|=t{~l60a7^?^MA8~JpTfIbL@Oi=A zzS2z?y*c_sc1f7gyWNyOo~L6bDY~_zwEzy~N_uVAvn*&SPYT$WhlE5fNER9wKwLzNZ!&K*zVAbJX7AN-jH0`&8ArUT)?Lm*gyn( zM@+U_6e#r{={45`&7%R1@C!B8gU#w*hE7~Q2_=9g9yVu?nO_X^K{>0Et(Y@wMcRY3 zQ?<5p1)(v^#ufIKznQy6m_pj2|J&~y7wBX#q!%{9~Rh79vrELJqMQzQJ4^&(| zeKuwNyApzwsmcXu4q!d&k<@#Q0}48&imVy=YIyQG?6 zwdfP==*0QJcQu&~Vk7SUT9$>S0#fdSq3JseV0o+Z+`vTkE&FLu%Vod1Aa7(xD*)Gg zRtD4`v7SU??tI=N<>>Un_jBW}U||1hUOcfr*67s4XdQ1P>{?(=N~gcM9p6V=sx<^* z<9tB6lt+j4cD$^wNdxX2B#iq7ZHoqjD+CxLqHMurYz}eC@)6Uz`WUseLxW4hTn-$^ zpo8v{ksrCfw^DA+*~Jk5?Oaw_9YMW4@s9CpS30F__ZyUAL!tddZDU@XBW-C@W$H`$ z{ea-;IEJ*>FA4Wr=a%qsbkA|ikqKII2!*^pZH! zp<){1M>VKcMMuRV$FVJv@lE7<{c`YKzgds~c9lJ2C zU_+mtRT=v|A=8s)A;GC2`{IbtSA$BwZOgYBgo@wUdC@}p3uV2KuO$7FCO9HHDOOw?!zV^1x|AiBP%PuFPJY69Vrag6Hb$w*1ggNCcQ{w%yZ0 z{EDYb*3_9C@^}KEPep4`A>=nAi^XZu9v*X^8R-qipy{8nxS@z&Qs_nXMOXY+c0&3T z=xcyYsr#Lc=}C#si!E9|$?cT;gm~0hsGw~+yUkW9ch32^Ymxn?3Y(Zi>*%fePfvD; z)nK54d%G1{h=@p|$#GC8?_UX{rqqDVO*X^J)HEFL{yotig(&CVZ^0^Q?z{$CAhyuv zWlTl;3xkE9e-c=%{ASFh{U^c}gl$Z!5r?ScN1mN8<-pX-%H_>-f>9PbOV1Bh3wky+ zwhA9w{jZMhnTd0TluS>*NlsWmn~pP2m#;czg!FbCrTS;*6nw@* zZYqy%dG0P9pFAMTmko_`LuQXIYT=k_>FzH+-Yg$InEDKeFIx=6gRX$@hC_b+Kad)6 zZlU~0f|I*)RbzLG>!dIEwa4w zTZ_Kz!Zh zQf+3JK^^T#qervK`qSE{$Jh?7j4GScZ{Ph-t6g97QRm89tsqK)6YS_9E#Gn%kMGPS z`DslN$Jo1QR&&^T8Z1|;7Bs4v?8Ey-$EI*`sO52jcfIhp$|_Y^S&Tv@a}EUr(`dVW zDg!HVe|Yx;?VaF-Q^c0C4eH**CHSZUZRZ(e5< zGdx4?_4<`R{;H9T^KhoX%abT`J3u93lCL_^JqarAxg$&B1$w*6soXX27KpT>{=LKP4z#1{5)hG z3!17ZIu)>yHMg<0S?KYvizZfJDd7gURzb_VjejW4#weX~0tFQSa*xAr zAJ`?VCaA^dkYFta!m!94q2C|viZ_~W3lIl}x({GINvF%Kd3(dl>lRx<`x4_Fp@lh* zqH}@7F*vwNj6u(zTrS$Wc2kDk)2*@vK3FVB!^7yL@D-&lmpp!`$vqDH%d`WH;$fKv z-Y?dPwdC1oh06`r<2UF$eRC8AnA`2%K3%0m1DV-P{i3a7!h=8Nyk`6FbwmD_>U`wD z4-b<~Cb)&7D6)$%@h86OmJQHPXXaLSoa)z+k^+*Y@K}ycDW*twhEDOfv9?LW)K-0C zdTl)^t;bEWscpNa{6(rhB%>DSKG^L&LX2SA=jkJiZ<6V_ ze|^~d-le}__RY&Nm67e%8>d*F_{*U-_Gubc(3%^IKuyVjPwSm7gh9$Bc2kB+4sAD| zpPQ0X;Yv!j#+6s>K5&k^d~k5IKiXN!`?ppUJizd!uj6=Ejiy6c>hUCgLGo&mbh-Jt zlkcd~tyW-XZ4@J&Y=xp-Qq$Tka^K|0w-vwSvHR(9td0koq0^o{vVO89(m4maYxEcA z+D$>freJFM{D4u}`Iz6CJ;1VKUM^QF)kOcryRHG47Ek4?weJ zWO^9(fG~PJxVY5?&D7% z?dx~^w3}JFq45)NY=$Jk@Evr}3JUUc%d3pe{%#2GZT#!z${1OgFQJ_Cnysl20D3J- zzg?oYsXh)R^UXT3ruQwzPQESR76anSHugRqI*MAZ+|Os#`E*|E?L+RLI+v*vh~Cru ziwTH7$rm`8NGZox)wK5bR})NM`0i^|ybV0H!$$|?1Sd{WHt)e|W$OmkE+Dak0vhnI ziR5@PAL>Bc_LBquakHU4v)c4U(ceE!Ws&6*4FO;8fFpnrJ$F_E1NGmI*8Y*L;}dlr zTXve>UFX96($QZIFb|dWRxJHAto>x0I~CyFOE&Iz&zT>M?WylSWP5knBjVe`a67Eh za3MFydP~eu3OUtswNHGSa(nuQOu?c4<+>Nk0=K`+u}&HJg#3v7a#uImOAEWx+rkMj0;QZ3ijMr zSR#QyLy7V3xSh6s%+b*SF)ByWdZ%JAou`#h!&kPgvmOy@BO-Lfn+t7PlDJ_P^jZ*i zY9K3=C9Cm|!$c^Nd_8&5<79Hi{gTH_#uJ;I$Ub^ch(ovURQIT_$9Uc!EzpN3PGsw) z5ZIoBru(I-CQS&W`Ojoju1V2c!@mTbwBmzFa_??)5s!3=(>U6-=#r zbX^PgU-gc}-h5;O@5QzKTf(s1HT{l76XeuoI?GH`PQb*5QHKH-T8jAShLH0)Il;G; zU`b1A*rnN;aW98| z(6>AJ8IsuE>0Q$;rGkNyd;dO_#eASO-|zwxSo-+e4E6TS^{ejuAGR@2J5s$#_-BF_ z!-1k2LED{YX`H;7OWbgOUTHaf7((W2M#*CU;e#8>=q_F95laSDs@J_O;PkD7ka*c)S zc;jl$fJwOC`P4{{d@D(>#(+@b7?HRI<_2-JF2C)j-SeJaj;}SJR0s zwn*Re(5Gb{|5L&aiUuPUAHHS|nv*y9GfsdtCWR#2;9^PrHoRURjlpe;2e3*o?Oh}J zY4FcTlP>DqXr3&xX0OW$So#J*RxDJ~{C!Qxs~3uSY0?Ev$B@ThCw-XL!$;pY#FtB` zkg*FR)`1*%l?S!-Dgzo`bg7KVnaa&v{zwBUL9;JXd7ntEJ8=lE z8T|>Isdwzr}RWvH#x2_xF!1H?1R>dV4%muTi0+E zK@`!crS=^wyJ{zggITcrn4#oZa#FIpQd6Z1qnjo7dI7bT{&KMNYJB3;%7(-}pj zC{Nz@)|VGt?{@gw2awc~GpYO=&E=vYjEkYgrTFf$@Sd&ET{85kzEJI93oHR}rNqqJ zxtm9uh}~vfL37qpTq%R>Kar$t#;ajlc&Qx!v!1*-j=eBOOPf~B5F>)NA!a|6N`6)!yaM{yXZk2Tk;o zldBf5Z7hylj<(0b4ZFqig@)WK^LkUe--k*r4a;Ats=NHAhgFqu#?nq2@1w{EzT%!Q zB(K@De;(tM_RL@;Cm+nv(EJRKQya z{`bo)XyXbMF(|BYNa5Y_P$!U#1~DA9P<5Vytw&Y=cURUuM#f!6^`a2{p9B(ua48g_ zhaIHN?HE@iZBri#v*@}-#Xk?Zd3gAuMYYu^%XNxc{U> zs1`Z60vDgJCc3A+Lk1#hu%rgweI08j^7WEzaQFWe-M-(=c3-UAH-3W%98nSD_?s%} zaOJq1$u(P2*`z*L)3-VJHv@(#w)ZM@@6rF3)#V;&F_0qT9!7HxkmlaeC&zXI)xe#7 zBgBq>%AbEV6gI2{c@e{vED@|EW9pH0aleW3nYHm|8J&E!FMHs`KfJ9j6wVkCNyWp$ z4Mk!=Q##D(3N1|2A%}_Jwyb3eZEJff%Aq>p@e^BcGgTB1dqp3}=A7cJ5`Sggrew4x zSFbE~plRxa1b&ux3j57nlcMmqP+wlC7X3#1fhQd4$}&S^`4jg!O)7H+&tX)_w;fS4 zGPvtm*mN-g_hrv$+doF~lVhYv+|BVVRoNTG_nTR~qF!iL*$!W`3|S2ffTR4hr3cmb zG|>mfR26uRcE9(?t$8-iDZ)g`oyL$I`|sR+*(&hdbdtoql|8Jhk8y_#LN%Ux-p%eQ zq#El``FD(e)Oi22M#hOkqm<%ea)CNWLyshz23!z0%$PoXp_rC&2zLg-cU#{-QK!|x?0 z#VLC5pI}YCD@O%f`L9{mt+tl#d zXhRH;Y1ht@IE=VJA*Fxc>n#O1Cqi;VKli0p@~8+oZ+BIgP!@VpxL%h)B8bQ&bvq?N zVLrCz`ZMbuEveIL3tl3BDHJGe9s$=pOEXj|J;WjKVRcOP}g=_F>Gn2PEU;c z4@PCViz@ocH1y>6rt*3or3g0k5kw+2k0(n4o1)r|E{xseP%V@7oGaWBzywn0ainQN zR$O}5Cg3-lKF9S#kM|&YRxVNGi1x$%_V`MdvHc?|+_uD1web#=GmAJ3wp?L=Jbt9&6d4PN_x>^9$r$Psir( zUXWg!lY^drYkQQmf!EqXLbtXr$N8Q_7T#-~>92quU;|#jCbDJRe|xy#K)gDYYf}WI zGzS3!n77b|dBFCC*^sx&HO_!H)@~p{gK3xR)Xdi?htbS`Q9_6tqJNDaZ?_&U4noe_ z*5Z?wr%cSZ02`b-z#RCjg0gK*$@%vCoG|H9f3IA|uB?hLzV4%1 z&-|Vnt9lpo$2OS$@9Mshcyyn$Zw9&EqlAkIKIE}K*D{WK<~7&e=XRZZ$<5gTl*zC@ z!;bikm_hgK_A9!1{8<|VSrUZ_OjyK-gF{5l53s*=qwPbd{@EUa*Pr-6tz5SHHUqQg z&+~^;Im>Rr^qy9jm)|t+Ohc`+eIx z!5((;1t0sfXQktL)s@lN zmDHgaP+LCviyp+nXb_^TY+l1|g4(DJot%4Hp@IK{c-2GCq0CaXQprmAv&=*TBT3df zr&Ra1*H~Yw~i*j992RJ`&EdKWN+0Y#XO0&$!I?s(!gu&oi+A z3Twfc%aiYQVL?G|&uJ#nNO!xGazj+p5k7wiyYeSrSE{mJA?_(LombZy8@5*U=$zNj zvaH$d+>K5!o~y0&D#BZUFmQsr9{mXR3zOT_`n=g6}LnZolu)%R(haQGCW z9QhI?n^-r{^)v~Ov`On+;@b@T72NtPR%d5Uh|w~K8*oFHLGJKq((O#qJVJ3HTIV9X zy4(=Ezn!2t_Z@lYETa-i5ZtlQ+H*h0pEjHoZ&Io8mDlqvqx(qQ2k_fZ5nQ!ireQ1# z2cnIV3|TUG0akP|%Jx&bd+kzR+NY0s^HSH<^ZthOC3IH1UN_Nwgy4i?f@ZGo+i)r;@&nC+q-J#tYSsA(ox&UqJCnaV3eili+ z;zAFN=it}(_K$S0`@fG7u&1l^^hc2VXFe}*)@Cp2i=1$Ul2)j$-=RK14I^b_gdJ6u zMs|~GTVFH_H_ChN8-j$4U34pRzS*%@`bXKL-(#OxTo9x3?+NnXifn-lDae$$0?c-E zz+nd>mqmf$=Z&VU@Bm1hq*&#d+nP(28o$soD_QVc0L`RIzaB8bvB_rAUe4c2-+F82 zZw5F(`yT;H;b6E0sd#Heu%LpR8uV32SK7us7p2@43bnhzu%3a${js-SVPIuq$*RZl zGrseZw;J`wnH78_vWz;4ffB*8EZoH#JWp8?{?*1N6)uMxV7en%Ce;F=O1*TZG9wOh zO_IuDo?OMEbNpVSDp3WMAuQ}QLZdVRil5v;SSdlqCrA2KEtMC-T5ff0N3~KMAm^1b zsb~8K**A;&1R{aZv(c*n9WcoS`yr&a`M(#x5vHlv63d)x;aYn0$lA`W+oBn~Fzs5DG4h z&?7Tvf>%rf`=`WVd4#CsY$^E73w{QGjo9hrS4VEg1&pp=S6bvg`#um|vix9z0(fJ#2Dg_Uh5}uVOcO3#PtY5t8J}Q33pxKBL~mHVTvL1Rm94HczbtN>3lljyHEp>#R~fpM=1r~F z30xay8{+uR_KFpds_}T({f!&mC%;HyZr{H^Wdk&ABUA~0zBh+wta;7!Hoopt_PPKc z9xJ*%)`c%041s%f+){2648<`Bq+$NRtDlk9rI*laY|(Jf zjeuW9TbtpyDcaGf*695bfD0VIK=1u5wq4|?h;$%yZh0Rc_HW4r%`!xn+(UR#ae^pU zV)})KIQaFS_d3-BdbtN|^{|~0+~sy-N^8x0U~PBiZ6;NNK2MfrCa8ORnPGkW(b9c> z%JDFZuCaVONEm^kzP|bBdpbVMR8yLr1Xa^)SZEf(9rP@q4-3Iz+WqduG>N43GWwn6 zZfP?}eUjILsSd(qA|?oem~^aozR=C&3>TQsma2#nWhm)ve5A$ZQvefKl%ls_yb{Vp zMcccuXrp3(JpBCiRtK`QxjPTriQUGEahosS@3r{^L->(onp~@V;SRnS#5L z?=b#)Iqo4QR6&r1B#_-3L_?*|%E``RFN`lVOlJ0_PttUuNW0q>z!1?|*%P@Gc2Hv} zc?Mz!^S6b2+yrVkDSQcI;f#1E+&ScpewFAuC@k;57*2;_U!kr(#a`&YH$#csk&H*W zYo;97U3{N9i=GkshD$ax=|00aM(&EXNxz$wdt!DXL2g$>(fZZc@TIm&{6J&kL6oh- z#!mwHAK|%oN0EPU`E6BzpQ}hV)@WpfB*#`CfmCTTegHinOc3xMKc`ghCG#-86n*r{ zz%Y<5M`Dhw5YcIsFp*qUm@$usjF{7A(Xy5YjW%`6C^D+)0%k9A&QNre;95+xg?mYY z9*8DtILip-E`!URhf#3PbLXtfp+~SCE0y9leMh*FrVH(h;u-G%gk#cUL2-BW)Su#v z>*ApLsqQjPD%#%5%+v{3yA=+V2hXgOHkAzSc%f=?CrXVm(TjpsAa}GzXNlC;3tnw|EQqZU#{P+Gl)L1 zZ+csoyX*R}WtnGi4J6}+{f-*$Fusxg5_fK!?a$g;=tn}bVMr~RDq4hY{Z8O?A*HM2 zvD%Pn7Lo}gSXN*tIj}o1ni7rd%}RtYzX5GBm6Zz+!)b|rHL7(j8 zh{(C)_0@;g#%B_Hv{eflmXSeNC3TxfP`GpWeA&V6#ja!4iycwty}yx zw;w5kKHLtowbGf842b3KK~^X8%0uC@3H9aCDLf$L_AnkWMJBcBg>9&w9xgkWdyVG> zo2;C6)Y!Q^-cKx!2!`+abr3NNo}=83_q#XA`~Z&Q)FZTe80`JJml zJ3@FEFmn>=8KJGcN;_T-@rR3leJ1;ZpkX;lKTMXUlwa`mdXel-J)*EAc1G4!VlI@$ zn5^`OhPYQX?(+%BeEGmMP#iQ?n(Hfj_Fm=1Q9VLCDY**SnushAyI>0NNr3S}Sj+b|*EM5RbzYr}{}88yU#&#yt_n&h+6baWI-gAnc$2Ny+41Z?t#_wWF!(h6 zY)VMLvHS^5Onzex>z#+Lk;p0Y5#Yn9rlpzD_JN&`T!1B%pg3D3cpb7vcd?# z(~$I%!y(J~p7Aa|b{)XJ7M#}Q{OQno7g$8 z1k4IL{Lu=qiF6+NcJF%f!Fv?4vf6O!_-8jIY3rfrORJ2F1Z+w#|Dp<;WL*PHrfw*-CQKj+KJS~La1g*A2oaO zCC6g7sWVCCIB>*F`uAF<>I=wgKtW1KeF&PSKxUoU99F3rWxrng-_p`Wr(}yCMLc0G zikde=Z|6N6hB8NvQ>3uujQIPu{E~Re{5*(4D~QyRcjRl6_RX|S&?uR?r>AJ8XFfrE z(;^W|5EI%{)jOAh z(`_LNNp;rcf>!r9!I~MEs(cy*`!T!1BSTUcE!&-)>MO^$PV9bLx+sNnT4 znAH`DG&g5$QXX5N55nAt={hfvax>)f$~@U!Bnp20WqVtF{;mFyjXatgj%{3e>?0CN zSx$Ka5gG|P)?p!?r3?{h*CNK8J63<`A5~WL=raKSg>D2l;g*U9oTpWQXiPx6x?0p+ z$Jha!SV1%V=cOFJaq01#(6Fd05k*act< zr>{Z(o1?_<(PQ@y6tc{>8yRXJx*dT z!-#6s6z4ozghIw(I80>B3`L4)te!YXIK>w7n8BvD%bPYSYWS1C5Jm$%NV;Tb%K7xU zg;7+zNYbx;KVZW%`?kURF`@e`|0OKDU5>jSRQ|*7^esR2WY*-ORY2AHn<-kb5&VWP z$eH@RJI6#~_ylX8Ci<{!`1f=oenattk0SAQcps1rf6PrF_1LZ|TF|fY*==9B%wK7z z3+1*Q+wyCL1ahPx_@s*jAIyHB0!nqB?mf^#M2a7k;UU~kuJaR(P7H`aq|*|`uc#$^ zxF4vqLRZF$`n)NAC&KPEi}kuQ_G(8*zoL!+X!<;#b3KPS)ct7Yq>*x)h6JN=ZA_4A zNL@q({_TBkS=kM2p=6JidD|xJlO&0No44`GKmJG2X0SEf+$Yn8ZdugK9Ogg*LC`Ge z|2Atfa>C~HgxUJM6aozaRJ{SrOAf+?FXSJDNOo&@+klS~$De}_l4r$@{u}u9(^dHR z_&B1l7Psedhf9h9C+#KemD~THaI2XdD*7#k;O#`IAkCeV__2xiUXg&1W~bIJi2kA{ z|0%M2qTmAjSFB^{=6CCgch5;q_c5xJ(_Ih`KZc_0pi;i)R!Kg?d2*Fp&%>iXJ0`F# zxGq5gFalBw;x@aUN>0XJunE1uvhso@M$X!Q$tF={47>v({V=6JYx#LciNxFmU!zm^B3Xk3_OW+7FiQjf0r7>-EZr04$#= zHf|DK;;|F@^``H)V^?IOLxYYSvToCQ;g%0%8&?_==?6DsaiRZy$bfwH6(MsCUxz~w zaoq9I1b5>XQzhj(m4GM8J5Kg}*5w?Y2Y60Na{lDyZ$PC+bzOfRD&=rXop`OkD-Nja z#JPXA{AhJrR(y#ySmfYU*XUU^U%E^~_DyZLM4G5#rFC_3Tu=3{ zBy+jB9RQ?BzfcU!??gbJfS*iriWx&@%SBi6H>_-L1oApqSg3n6En6z9*FCBuDzApW zVR2i8Fsz35CrOb<5qd;yC1bo9jxt#C8}mCv=0~vEDdO<-Yl#hWX5pyUzHiB;Vq)x3M@x zE<^8)bwN$7_+g*)G*Lv^R95CzDa)lM3;OT+DBV=T8L^k&9k1Fr{?OA5Snu1-zJ53r z0^q!hFaY;(n@<_F%UNYM|E*q@?%m2RDgW%HXAVOVU+meX4W`z7BcxroSTOE z1*r!%6{OPo=K-`yNc67CQ<`C@0R+@)WD-<(#J;GKNKfI=)J+WpdRp{L4r0=^jyK&F z^uKA3w`M0S5AOaH$%b-Mo2`4!wB)-8`z}o|B`YGOnBO!iRwdP&EeDjD=k}6wUje0h zPZgfF$UaLd7U$mR#eGNinH5fwP)E6;2#Pb^Cw#}SBAZLP)eg4uaNv1qzP8F&2iO}D zIH~ma^Fo;6(#G+vA!YX4K}Am%DDl?vk%~y9k3U`kO@5uH_ebFm!4a;z*LDxxc)|Nw zDC`RSfzL6UV=h6B`BLzHy!(F+s8gH}2cEJP0Px>0B?zlU?Ekbhp8xjUdYwBx%|XNQ zB*C@*Yea??IpI$pdmR;h%(v&VV9+i9{#;B6+?vw(l_zRSD&P2Aqxe3Ypn&+EeO zbepX=n4IfUy%s#`DB8Q(VyDvL5K@|{cZcAeETc+1=g*Cg2x`cE@eb~Vmg(e&U&YVpb1!ej^_h%Cr9YE z{Z5u9hdCz;WH0H^&AGP*?A7t|Z}MfB>v_z?R22zPq13>8*9kGmsp=$)+N35emB;tT z<)Y_0S1S949qJA1-_=okjcqr;2UcQsvkKuxSJCX{E-4S3BtdNW$0=Bxs(z(lX2GR^ z*UX21I~`k1IpQX{Nm$I7$M(^;nF({Ar#OnIt6fEI!UTC=m%|tpKBUsEVfSMO{s-px>rp%|0@BTgm zSq|O^VmG#T7@e3Co=-k^!HyiQ zf1{a*oyKIlf3TjAMH8?oA>Zue#x8TOvXRHz$&StAE2)}264WQhiLJhOs0umD4qeV_ zR&Pdj$_oXjO_EpKbO>qz>Apbkev#9s9p+34OS6S8@fvm`%Vf42M1Ahq1>l}f;gVP2 z4w%By^lDRZ)%-mLdTdp~dkcCFLkIlwYdbVE0o;?6GGBP9;-BXTP2AU#hu&WRcvfqZ zVOX-ToWlsb^veBy@d@ywQ80zqy7VuRA@a#yVG(=KZ0Sk?xb% zM2i3#Ih*p0f@+|!jybUZkroU6srqsf-v2WtYtI2Yj64Da<|yzCHLgn;u3#Ng*slzb zST1uw2@{S6mg9iC>ENDww?%B8_@oZ333y~`AvF%Bp;pS?eyqTg)GJnh{q6O!x)?zW zSod_1{;Q03#y!E1!zpUvpccwk1+&`tAfuGksBwE$!@#6@K#zU_ zi)@yO-q4W18NMLS+-n(*4RRGYtZr&8uhbY z5vtVmzC?AfEYf@mqHq6nU=nrPksi35`OteAn%}^+6))H4C%-|1VrXuSTR9GRDI|H7 z2^0YCLz&M$QlJkIu>TRpwoSQJ-eG^s7_*D%$;t^Z#NFM(rcRIoMDKVCza6l&!^6~y z5p%n~4nues@;>DAsScgA@1NC<{4+z~>)0`pL3n_g{*0d@k!_c)HTG(ag)-Q%2k081 zP@W*YAJ+E{|H~WKLhQa7pZEA5*kLrJAHVT zYZ3p1g!|G!;|JRYpz(X}`!0tN(>cxVt;p&EY9iSXCZ=pzO%dN9j_`THfd6ZTx#h<7yXjA{ONc!{jFxkxRePE6+s7QLr<^&pqI;!ub(@ z{Gw}$Y~#Xq>*kYq=d>z8YNOhL50@2VhMGoYD_*5%(w^*0GpDQne)()-9|u){+taoK~c5Oj-_4(*>WH6E}fbpP^B6#iX>JO6Li1+E_xMqA&G<0-G9b;Po$eKvoazv?&!i@}FSSCvo2mUvHJu-efo94LG zSwS@;ZNq@LYRnBZB9i7rA6;s=SkD)!DguaKj!bSF6kZjPXJ9NW9nCD^~l#}95f%s3tvbG4bK zR-zlG(^GoV-!DrOq<-ut8z25Ea8#KD_9}Q6qBz8y-8$zy1RLxEe{|;*5pT@H7 z+>{W2%6J6-dJ_*YR?#!fpBnA;gWqcsq0nWDPMg<6-q*jm zPtmpVmeGvBE3ER6Fn*@uN{aRlvWg_z+u z3j(H#si*J*1b#zYOheKw3%|{Za|Ks{00dcL6lQJ8CM^hY!Kc*-vJeWc5D$R1D|0aZ z#v4euuo<6{)fbVF1EohN>NKgaa<1DAJdZlqw2-CZ?)&Sc%`*&9BY&S34(P{RQcHr3 zH}VIe1DEWtQ!;)Eo(p4$etrs9*uW(Xy9>|Ro1UsIRMHuC=*N4!r$1Z~g;KQeZQ{De zJGAmR_AqQ){JrMnP%?w86{0wA9FP6xXFA8yq6cFePPCxb^3emb!*xfCnR)Z$@HYzBgOhW-!j(u)zASUJYYof1M! zvs~#MQn^N6{9ZC+KB8VSec!+ce7uF1Q%Nl0<~LLffW<|xV9bm~XsjN9ld&Muc&#%Yp3r=eXjrjDO0c z@o8Xwrw=Uc7FKA*#Gp^^2IRL+LSGxYz!?;Y$2s>kr94U7N6k9F1!va@J7-ADwP@6w z20h3qNn+PZ6ID^GiaZNjNIe{dObnt7MpV-j+J5-GQ`k7R_$#UJ>Rdy)^zB^8+pDI- zw6*X&RsO-N%t~%fAUQ+^ZW)!D1QD|m&mIx6$}yV<>lCKe6}8oaBhJ3}e>#Si6z*Ds zJXz0L9DnfgYzYzO;s?Cb+pGM;jTJfYNb8{!7-v89{>g~o1Bw!NiQC5>Pz^|zpTkoO z;yJCqoaM@gn^lA;n|keP5jcfPqz7MMVZ%)%8MXb#>6}QfmYs6kM=plXN2_4zWV_V(72Id!`jj`v8FHBk4<8kb1348|HngKZC0Wj zZtiJpC^3~FI~k&=X(;UeubR(#>fV*_V-c3;$L_)x&yn0+26CBV^hBH{RnCgD!?#cwjY_(9dzPJZ7-k{8qz(6t@JxI4b&i=)#RHRGtmk&prc zDfWyv{(2rg`U`+;0^*sZoWtEPiW-HoS@Yy7P9*_BuF3@Kk~RJOh1ACWPC|#$Bm$V385lMv_oBuJXl2Q6a>24l3w3%c1dA5|w`#cNYmEdP z98G;H&N{54z`WZ$-A7Yx#|(?J$4}BBpSOSap<`C$RfiG+-n@}wCJ^_M#4kYcOxVn9 z{R#vR@1l(2F!H}#oJ8CXe_X4bhcPRp;JUkrr{D5h3iJk8p;>rwJ z#T2kJGcu2K8qrU*p-)sNu6m{1Q!x}Q-0OU(j9QYG4x`)JatiR;hD}&Px?`Ao^TW>j z1)p0vE&6?fun_~Ui-9fi`vNUt#q+y@KvBYym@K9n8FE(l$0!y88o3YH4#8S9Nw$yjFZ z7hdK(guZU>!WMpy~7nyS#+rElT~HIjfT|ENCdT zDX!-WTpBa$GyFB&a6>8P)jrqKF-)U2WMJ@Y-0oPiHJO{X2QkjvDb)>&#mVTzh@rPdm3@}$%$lmThK!^XGuqCWj{P7W8Ijh zmDRMIyYErM0^+c37*9EkQPdwyjYkD?an}@*V7r02g}r7kPg=IHslKa3u9=mLtz_yg z>$G?2^CXtkw-sMRO=NPU>KXw1jn2w*rd|K;Hb-q&SYL@`vjBTsN?D1%TU-V7$JvjR z?jovP{J7j+lt>*j)(@PaskmWjPWCDk`O#S6l;J&cv~J||N%F4%t_>@4O$kL{!lnw` zBB;zC**3*cUl2t7s&N#sSv@wb_`9r_;JLp_hU2D%Yv#|&B{tWl(|tdjkN=zop5sbS zc)C_g&K%a*iJ#BbSj}t~ojCWM(axTiCyL;lS)Y8UAM_zoCt_u*atn%hgdYI(d z`<5%sX%(wn)8wzxF1MHz-Wee$QtX_0bwWx|8iO+4FA1I$@QUIm72r%mXH#MRX&qI8 za5Sh8HtxE>#}r12Q9i`1H()s&&qUWzoYwN3l>LxfiZHAQLGJ0yJ%C?nrJ>?Y7BkMy z0zb@15P$NT*t=TCi{pFKd7DA{<=a_XxG;UNjOFXDq>7kMH|8PL9nlZo;Ub){;llmc z-gjgkv5MSDnhuUR<@y5aWA>nH!B=3u4?Wi|48P}-Knq*t5o6md@vfErzt)ssec|jl zx$K?tX}PL+dlQ`|rUd*7k^ByG;s{>=9S;L6;J1wqsP_DX;gT6r^Las4VPz^n-l`1Z z7Fu`RupG6yowLizIPa{Ll|3tBo3cml@Fht}K=FWRrcEJ$o8+l{o9D)aEwkmSsJ{U+ zBxR)O0|u= zR*mSlqxNVMlXM5o;#FaLimxH3i`dD0tYvdQ#)`yMxy7?1u;7WtWASDC-V*evYU{{h z3@?IhpE2@|%+=4LFtNXzE?{rWx2{VXQ8{8$wIM?u%^jZ~i zv}yD-Yy|6djv72^11Ak8skH*^xEn?eq#KdSR#(P<^~*21nM}Mr`59o7%?g~6=KfZ; zl;Xs+Yp`T*^zf^)Bo!TvZFIr=d4OwW_Sm0un=(*-B5gxp4JNvtEhgz;mDXoUMG1WAmRd zTq?Ypf=A&d1uBdABUCl^IjjwQnr75O<(e<8rOTOK{bA$w-Ue`d=D(5dS?VivnGHpU z@is(R^fb-E?OLucBq>s)LCdLxPg5U52+YGo)C?vPvN*vajq?My|0z$XL}Pe zL>0-s9!$|@CcT9XHRBRYw9z5 zv@GwdYOkwn$L%0UC~+>(SR=|6`&c8bR@s6x4y9@$wfSfRjJ)TVpg0T!EI{Opvay#XphF5UVzQ$MG_ zhxL^AFtg8x8*R1l*pG+dKgZ*5G-vuN?v@*?yQWg8v<08?trEQbw~4~dzQbroTFPnT z)l>EIAY8e8olH!Es*+2fFZ`n12OL9(Tu6bfOugovTKeOjPZG0o0J|`Kml*q{9a@)b z_N$QB!dX+kwk64)le`u1tk@ljl@I2VT}?RMrlUo~ek#DQ9wP6p3|3kIP*e;h)R*(} zgz8C~$e+B&z}vl?1VMczb`Om5GhPzRQ6$0I1ko5`rEjgb@N7C)ZaUt$+GyxXs(l;{ zD=dNG2BE|!XkOMUS{!2Y*-sYNUhMDwC6+9c6odHN?N}vj@}csX3&ljyr1BM=>1oKe zag(*_ur3?#RFK7U8WpaqR-Q@UprBKF0mj;*3r-Xx3E)mxOJe|?xuD&p2AHed1>mc* zapg+j;ZU~zE#!|a$lS%)xFy$lLPyEK(ZqdT9ae$oF5F?*_BIEjL~aBZsL;Fe61?b- zhcJ@#XT8&T{7z$;`iFiJ3P!{)5%C0dG^T|5dbfM(I^Wx?ZHn-Gq9k}GwxD2=g%!6* z2D2KetmV7Hu4Qpw=kbjf!@ayrvBt*E#wrq|hxyU=GR^!`$myZ$c*SAmu(3eJjjXV~ zXRc>?ZUzRj?3G1&o|prd5A4MQalmBbK`Ltcvk*4+co2vRx1WzW2%pS`s7YUQp<3$| z0aNiFC~P%U_FeI%;8$S?9bf^!yvhkO*8V6?LtK4cvNyhzM-z>{5Q>0mG=VqFiikl+SqMSJ#??H{2mjK}<;wwvF}1!ivyqFcol0<#;^%M;`o zA|dWK@b-SPOSFw}ta@g2ycPvNi$sSEM&lxd^=V)2 z&jQ(^uAKs`?rFNYXRi}TlWw~?N0?2eepRapCwAu9ovD_arrYps!k(jedaFUJl}ThU zPilikm1Vol04DcOvSC58cw?ourQGMZjqa(_?key?w`cSj*j#66+lgQ3jk6!CKIC4* z6--jAS6zel&JqH%a`t0NjEXtmCPLgm;jUOB=*fs#+Wn_sLKu{B$&w*4*O7S`v1x%( zLvjpFXCo!!RJ+?uM}!HfeI{S$MjNtR97;t}I#I#9!hms&GI;faCMVgG+6r+szO7F**)QO?e^Y`)2^+B`<^E`|kZCS5-VC9f9C@0( zBFH&J?YDE5L)-b5!(B3x#xqQy%D!rWMEX*b6FgHm!S4>Lm&JQGZCdZ`k;gX)xas7l zH=9zHljqUZW%x=fPphh2m;{1c@1-WF>Bit~H~O?;0&RyBiJ{;2^YqL}1q^tMQ?+uf zSkbR6D5m_ho#Y>o36$iwsTlqAt`Co}i+jo&Y*I;I#1-ECNTQjls^Ez;&jw3!bxc1e z@$K;y;kXx7^HH)anJ$wmy!wmv@^6b^wTB5dIARZ6i#=nR&lv{LiT6aXhUpu9veD{P zJ;@;XSAs0Q4rf*E$E@wdv^jkkI}+XvpX}EXn;DwNhiCUG3`AtcMn6`v{@EW86UvFA zmIwAs0vVoW1#>G!F4}nim(4F(_1y(mZD0f^O4c)^xoxx>Gq9WGw`yj}t=E&7M~X9R zt6ZY6VZWGXOcO;UW2HD3+)%*vosa{6^3A!C-qQ>#x;m~JY)jQT-gU}5aBBa{v?e&I z(^W25zr=+%DV1ys+BHqAKT$q@%51((U)78)C5+yzLBii+XJYb-r;yS8TX6p=-qj#u zuS4-XmUI}K|6`7WdWLS6dXu>wxqJ33samCFQGCZ%u^my{EGJCVVX$o+{8nqhE%mDb zoYPve>PxFgB99t0R{?ah^=s9P{loGkG;Q2& zcF_^XBYcibUX?KjLX^lacgH{ar@6NjgFh+2Z5-^&?Ar_O0*@B<%NzilfX^25?(Tda z`l3*ok(Dlm)*>ltZ}Jn9epU&B$3&gV^^riT1 zF3f%|_eDiy%v;nVyvRBIC`ye~2%blkv<`z8w9!+FU?b7C_6r_%-R9c2CDMn{*+}a` z&<6eJe*J|@22moB3iY%2Y1Py*ZavIbK;QTA{_k1&dThcqLV{JfviT9=A@fR&!3@#B z@%}+x6yRY*%105kH$l0PoAfW7=%GxP%)Pg|UsjR(Pv~=KfjQJ&lB~WVd?9qX+gQAy z8dLqOyd4$f?7y@?l(d-|Q`FXfW*Q@A{geR?E5DxToaBMeG`925MUdqWS9A2=p7PkG z%60CkG*y{;!?3LQ`NiPQT>rD60dZaZ-+NX(wqO3heKr1ZyQMjMR@aEDCZ?T?5Igzz zTG={n*ndwfZ7yYWGHG|j>TbDl5ExNQFfLwd&}()clN~S0P3r!0Ciw~Ize04ZxupOR#}Ffcnm|)vb0Gdd%PY^ z?5LE>kINX0!lT9+_2WtJG;R(tCUjH|vYg01Zl+s*yN_8n8yu+2Q7&^dZ8FO;t9v6G zRc&9`B#dNB7Eo%W2h?-o%ytJ-@XQJ!BW$980!8MG3|ATBV-j((JInhPn_719IpU2VP5*AdO|?+ zV9$i@?DaHIuEd5^h`B7y4LKJgbZV_GeuhPmf+&U8m9k4KuL1LMD)E!7kfw~uc+yLV zr@P+GI^?;ZSWn4ml_kJC)uBb1zp|=2>~cGqal@`NOZX*W!Ppj_R6(m>60bLG zaOSPB7E?0IiNe_ael!d6fuOm;b*Gw1)FZH~h5s}@vijuA)6u=g4of9GBvKG6FAolV z?OB2^96?VuX(ON5OY2SaY0m%21-k754aK3AJuvJh(4;U}-+?k(Td2&-qy9LH2wbu0 zg0h8JI>^f1q`$8B!AZOAi*mb-%6D%*(2Yc8ZQ z6eP}668(pZK^ft3xM4OA0jxtu7f2Ul0ij!zZN%<>t@sT|q^kUO^>*7}R7q#^y#k8M{zk_ zx1^;@MM5UYYPi1X@fiQ&Q`%l=#6GWutYAv^{`D5A!QJ#&0aOp7W4nH{Bj2b1*G<}> zEF3iLejq?3lx3tnqA@Y5HmM;Muhl?RL`wUgO7i~}mEK5i&Ypf9JshjtaP4gK@P?T@ zkG+Br1>jKzeG>s8-~C-BV*WC90c0&7xg}2@(C-n-FfZO>a;6vR8WDzC1_xVyv6cjwNzXXc#$`H`8--dX#7*IJMFhxFQ- zREj2MvIhBV-=?BTr-inGx5p`ThVIe22Xo^ktn=tFgNJA(k2%fso64M*!sh|ZZ1A=H zAXinBu-G_&VQl*l;JC^8=jwqkHDU>RKounUL$M7EqiaMFM_)0SA7yZ#ZCM@ZkR6OZ z5zaTN(l@$Bu@&zeG=04?r zG|&PC!Ae8<4lku)I?4q!=^vIy@X3L{jmF+zV(R=ZWL>q@mv?iKYZ(5ih8)1bQ3jQU zJQe3AAO%qF_H$NOs0dieU%ungfk6)@w1GOZ{FFYlrIH~C8`$l43^N5QAK!DiHrR2L zf%cPxH%==80J#xNX-t-5kwRvK6)RSce;hW24XFJx;2T4weJ* zW&sr|WZPwB;)hH~Xva1tbp_q!4@BDVS^=PKV1Y@4Pl07<<%0sO%({WE`r#c+`m4?A zL&PGb4RPb-FMQRsa~q!A&=55$QYW(Qiunw3es#M@xhj*P#@}a&!F2+udKK&^w_`5@DDbS`kx!08=?cEMEF%Jz93su7&19O+)#TdWOzh z7I4b5bfO73Ci1(;?UV|nv9}cW*S1uR90OFI6$GPFzo?#AA_r;if>$acRns0ojjK#; zD;0dPl5hj2%GWT0^ziLMn-X6`?Qvh(c+60w-rf9%WHHdl*2h=s&~bGNTuy%N9gl%t zo9^bh+Okni@_DOvStVG-Vx@DrA-eSqQ+%t@tSfExy#V7fcWC}7xLTG#|52;G)VT^j zpEr2c*e1@~p<`zkH9!xw>kna(hJy8t;YR1UY1|A#h9_K)|1Eu+Ngp(m_+R&+vt>%Qg zcg(F}e`uH9mF^(`?+y)0I3`ghb{S(C-hR*K6F}_-Ow|m^v&mkZPMQcs20t00q{by; zj*EYSBW?3q%>9;2=xN5b-FjJbwoYVBpC_mP89sMO7!+v&6SAM`1SoiUZuS1~h>IBf zPI1NvJFkL(zODv}bWjo*p*mBWC~z;!!G&Sr*~e$B#>s+Ds-Ipdh-#sI$ts$Hf2I6u_*M|NC>iG4Nj3KMkW7tv-+d&H5P`IN|qn(1B&nBv#m} z5V5>WwoLKw9QXJg*vkbV_EY!l4fR}nU=5l&k{%V=x>2t#^L*v}p&osPD~&{it2UX* zPevH0vW(LT-`k1C_j8bf&9HNnvCH<9csGds%U529?ombchUe#O;S1_$xhkq?J-0dw zxQ>|JC8;8d)ssKxprc)h(|!wH5;koNXtWS_TGi*Pu8+=O$Vw0_d0c|;Mt&;yo53|= zgwypnuo2>0e)IC`o;PdmMQmZ(OD^{~57}7+VB5m<}3YdAiEDoZ^4Vu(e7cIjf z_;L5Fe&I6OR+^A-(J=244O)05`!ocKtMmhAxztv{$EOLt<$TlMA}Q)#Q&}I?#>SpJ zs+AhI7?Glcy%D;JpVBgpJIRT|?dLFg*dUnN2%bczK?zQa7(S*$+BuREvLwh?^0XQp z)yhNprYkh!Gb>|S!OvRJ1x7~`esC^bBgCOJ%gGvFkZQG8_j`?~)=w5>ScJ_wPSCd& ziV=&ANK>i*$RG;qrBEwjrO%YNw#u4}bA9X@BbsHX;)yjHyckVUnYO;FL)B5gZ_8Jw z3`=~7?v3~gkP}L0K%e9)c`&mYCJf6H;TE+kvgjq29sg2L^n%BlOpJ}DyqH8!?>6|` z{|^cW41Y93rU4QuK#uF*h>|0~q{=3a;&ALKG0Q}swGA1r@uQ+C2R2`8u|#kYQVI{)JChF{9a zZZ)%`eWOnRS`7qndhW$c`?zVGs6RH@j{af}l=eD>V?xOf)`&*`1~y^#;Z;8ME~MUo zvMJVX|7N(E-Y@jNc3i`!^(SR;<2k4j&oxiVIsa$(IrDvMC`d*Qmq zU%FWIaI-LAEe+6Z6ldo^20JY{LU-J6DrnB$BWBWgF^0f#)4hTblt#cMxp$vjg&=O9 z*S#RfKnnB}UJF6H%4z;M=E`^s3A?u{YIB)AFjA=*i9N(`Pg+ZRw|$pj7$-|Qw>Q7- z`xqZJ)i2ioQ`2?)IVzQu{r#TqRk`&qh$)T%ml+`y|5HH%RDthn{I`Vf38{^6q|e?^ z)YG+)za-&^JN9Sx&mXQBGJ-@&Ro_6oI5~ql7De#11 zaXeGlN(pDx$`n|FlXbOCGJCd+s8K%~g3tSdBAJyfdy$9oJdw@&`@YjDqV!k$>F+y# z1Zo)$qB0XN+cJ zTlhNzZH%XEOj>M1&f-PQPfnjEeC2%9x~VQbsJ^iT0i+BZnL5m4=aM+scG~YOi%%&u+&bXZ-#wQ**F^5oU55%7MG&Ut{qTZpjQMraABF!UQu<4J7IMd zaw4#r$&?UqRveJ4q<`UZpo+!h3EG_aCZ=hm`y5i>;&H!8Q9QffW)^5xP1wwNf;9`8=g zco}Uv%uaB*?QGm>T6(n%bp2Cw#gA)dAQ@-*bjd1Z;xgi-6`JYtKl6GApZdS(o3@d+U3cF{iKqhH;lTV)`?PD&a%9LsbN@15S7W1N z&q-!~+E!0)w|=kP@b=tzcK+#CQk2f2onN^r8}Unq)(Lg23h9*h0JlEA2kk2F7LP2xCz^EVsU zPTu{u0JPoYN9Nk^n`zdZy3!CV*S8J_WRdLbE_5)}_0%tPr^DU?ggs6e0pZSXKko@Z zu%=i`M0QLNDryi($2!mX5U=ZfA6-~!cla-Rb4-V~`V!u>IK_mtR&&O_Hka;X?NY2` z!N~#?LF+!-FBnip0LmTTd$YVQa8;3T6A>Eck&4a2liz6n)|URC?TJaPRw?J(F8wP1 z$_9q;M07nxAFCYCRqv3r#D3w`a~$73f^3c!^8YCj-+le{`t;0yWnNlbx9?A>cdKcT z@E`{4QH;8ts*W`lAH|?N4TxyK(d?LZ@5K;qR11qgBtq?fsHGm;YIOD|?dm2sEnjMy zN)R4B>HL!p(7n*lu;Dpg{W8_0ZO50T_@L_TmjO#+Y}NA6l+lJyx}q0)RmX9fefg>` zmzOg|w~nZqo_ledjuX1vNl5J96Y<+MW^qOb#(I?;kF5qCi00I)b!~w?cCDv8thpR~ zwM2cf)+)OJdRp2;qv5+@J zIa$P&$G2t|j`cZ?7OO-MhHC{Xr>zem3?55~oY04HOomM4P21~nCAhp%E?MK&`=PLh z4fg&UICR_!Sirk0pCHKX0_y^&4$T&taIv6v+M^ECb6zQE7fmhMFFHh4j5X4;vGtmB!%X{K|X^r6Qx4IbFgWY z3q9mM2;m*-Wz%GB+Tv1`tv!=C&P|0T!JM5t#E8dK%a_w{YJ7Y47TeB}d(McZy*sgV zZ&&y?{K3p;JNDVtViHos907J=58)4WJz@`j>ra8v0>w!E@h8# zeA=Yceqe-SUC+Pe%XD0)trMhrhs)(HoF^NDh5Ax;7w)hQz?54o z-AktiZzbawIR?k68vjXxS1@F*6`G9-sHa;@!G`CJnMtxw8Wlox4?T!KD*uriK4 zZyI(0kK|Qx6?DiJ4PhlTf^Mx0N;H1JA0W~elS=H^i`fbUQ38ylp=}k)PkmpYc2O^yDwBlLNMUus zxs}yo5*etyW{O_;n`Tki;>sql@uu)yESfM))G5nh#XTm!0`LvHnov1lX~p>!MW4z4c40AGhemDxBP1B z7`yHR??&>66W6d805V%8)o_6#P;IZTH$8M6#hlD3u-TcGzRxQ>BlFjOLPvh5Min2# z3iVm#M(`i2c;P~Q?dRr#>8{4j!dmq=n5WtlLARZ!{x=t?-OCOg=Yx_X|6})ThWyav zzuAy|xx~@H88~gvQyJ^(=s$6(9`_sFY+Kr68%emYYMX+YaqfA~_&(V5I#LX)|9nVk zbnMcgbu%W|sBXR8cAoiA&u@qeac&L-!GTLuBa$$u?|dD@jpRB^Kv>^u2Fom={}!i` zhfh;(lXAp~D~F1#FY*T)y%`_>!{&6I-jP_Qo@)O)de?(Q#R?Gi_2iuWqQ}sdM?j}T z^e_?u{?M6#ooe5n*5a|6{i3(-DkSGV8f_u-#8LHURl3=HU>{lW%DM9)E8F7t3qozN zVB80aZj7X9I6%&X&9L2CXV@|eyc9b~87Omq4*C|-o1)srj!xR7PwEsEavzT>spzjg zZe3N@Mq^~dlQH(B_K~U!m}1ksNTN+Kp)3JH38YG^?E7M1aGkEbm~;@f4i&PX#)Kb? z(tLoQ*;#aTtMc$xMr@n5EO5ldJg%b14M6n1Z$*ntAn3H@?TI)Nsj}Q z-xWTV>`DzQt3FLuQ~IE+!7~asWwEQ~Er-Itk*fi^LseBh+kjr)PZIzerT|5rsEEO9 zqp?vxbsUeUKL@%Vx1bxkbd64Jwy^{AvG_=x8E=JWs2PZby0LsX$UBT3!|6r~2QegU zhY4q5sDrwL< zxGBBQ435oOe2y`tSk}wrzM0)z8AG)WV#Q=-JaIvm~t%_eoqBm0+lQ4 z5eHLJE{&G~t=wZ7#dyA^jWZSvK-VN!9GO=t2kq5voA%AH95tzmwZ~B=2_eZXgG2Zm z~eEhiCS9l|Yh(GOwHho?Aw-B{8BNHIfwVM`wYq0Gx3 z8&Nw)Oah~P)uI3QTEjug9O_5I+A?bGE~ft@Sruy_tzU9kL^K*_=|n!X9EgL6F_&97 zDDr$V?#xufL%4egyfpIP+8J_#@xHFOF~Y6yy;>m`9X7AVXnYfc=Ibw1{6-W;nFu!0 zWoc8)A=yglH8af%eLp|6#TK6zdlENBTE7EDKPp*vhTp9a^#6w;y_*yGvTn<(RiW+a zBF@RVk={dt_cE2*^~E}?@%X6O1FT`Rn>ZX%?Fd5=Bvx7u% z(Qmr%I_kzgeW~gu$U!#FG_`sf6uEXCr7ybe-^rD1KDf(jJXpq@(=O{z<{4EO(&xD6 zh@i}322mAvk@>pSt-YL>qYyfBQkHHz!Z=;ESBpIRtA~3UjQQqSxas}H_SEeduCjg z84QD+u}aUSR44RD%;)GNe3LTal7`{tqucLdTb)9CAh!+%x2-{Vs5HZ&)tSivoA&nz<+mdI64RqqIPB zqW)8yv>zd@@oK5t?@y#DR&b{zT7sKqIQxjqK1PR-z>j=w0$=rf{7`6~{fjGb5g^eW zH<$DDSLpwJpud6s02anqnHV%dhc_tW#!eWW+enIGY-Ff*i0L2t?T3KrrS^~6;Cj$` z7M|N<+@Ttq*+v$@8Z%*Y;A;mfD{l%s<7~bufDsDIxG*x9YX7MSiNl01JZ7#}iTbYR zO&!s2somC$K_F1bUj(T)e-dcrQ91vYi*AqLuF>&<*)Lg$<&xmeVcX*Dd9iX9$(8V7 zZ>Jz-rQ*)^Q?dCH!8A~SkLt@6u47g|VtD>zX2{>XC8i)nJkC$TSjhu))(BRY(oO$# z15i-$gc!kn`N+_@m|+vAOm+SNX|j zz2&K@9!k&tAMGMBkZEE1Fydt;!33{(hTADdXt z%}}RR3#bpq%Za9P-q7I0p-UyN!Uv1d`q8)Vb&fq*S8dRgZ@i`& zErjyHO52+=UD=vKjjaWIvU$?&mKj|76{S>lGE?N`@QMK5*pwm+@7pzv_@OM$)zO_w zqUXsucvjAT(X_@8nc@CG4NBzR2$WP%ewG5#e$&_L zF49(∾Dk38m%W#z%0|>W;G^%%kqC7OpqZGLqTObw>yhPBX9f{{5rD*JbmthU%rKGrejWG91>@xj2|Cwk*|A)hU?yYP%%4ouI?s4>$M zn~U4#wMx(jXg~E~5o0|R+RPJorEFT{zd_86Ix!l&Yicvo5mUl6z;a z$5?4V2F!zIFQ0W?3m2WtyVv1_In;Xx-Y6zGur>dy(O*U!IS?L40VPlJ!Yd^x51=}P zh+}Z4-*=8|JU@6}a(&8#=Wu^!I3)7jTN)br7qdduK@eYon|={^&nH+=Nk841V83t_ zY_DRoBHe&VVCKSh4VC7AVd`5H*CgRz$wmd=Mae?cK82@gbd>NR*m3eXK0A;C)X>lC{*cbH(uA zN^igLy$aqF!^AORLK}DAjC7&%r$3wUV^^-@;ZscH$$6DsaLR)PHVp83yc8 z&{juS^hugt3i=S@ixWceCqQrYNw?x{jgNzqbm;5cc-P2togZn*Ei-&^rcx(~&fvv| zV-1&jWOMzNe#?~5@L>THiuhhMtP^2Mg685_xcjEED(lVkDt-ac|ucgHVkk!(CK*NU4pQ z@aXn>3mYStno4e;k`G-#ox&RkP>!N8n_S+myqA`g{4Svh?OO@z+R3)70I$-`S|3gl zl70pgb;aJ9Ng`QKYmIU3LoxMKk%c|u>{!7I8?@1XY*djcc=7qTx#2x9#{VH@`3k`M zy{B_t+;GFs{sZGxfv!09V~3>ux0T4O)5`$KVFv4NMlcR=*)f`eL)Kuz^NF^)Yq&dH z?73_l%Yq;0MU1$M0prQ|Bp}$h-DBb3zrqO=3JSQC!br!kWoooWR~^j zYb-=Dg@y;Y`%`4=^f3F0pX0~B*Mkn)b-N#CLni#gJ-Z)QJjO+Dv8q*Z->UmhUtfk-;9t*vZvC66x5L6#~7_^i~gWlIL=otHr)e@8ZG4VeE@2d(Ucjv%j{p-UGNh1!u{00OG%!HC$|9mwV{<0h}GZ^HCz#UN@_7QR{8icd9;@-t#E zPNCQ^ZhLr0BG%4HHmNCDp?&(`6JG^a@$n(1WeFxC@rXg^kwSbOXIkfRn{nwS>`V^- zEeo50ssB^ME4&TuR|e3sInc6!mpFxmU#H#+JQ6oD-g=7Z)O#0RcZB7XX6%YdJd~=) zw;h1b!4L|jABpRQLHY}|*SPlxioSD?p1DaZDG`7?B)0N=|COqpsq}dw(?nd{@`!Ql zYwMC`a-4H+AR{$vG5!7Sv;pX-^cmz0`<_s0hxSD#oGln@L-H?ytIF5XQx8X0Z59S{ znoNa=>MZ*~q08tLjV<#LTIBM5u>gUy*$_FbozOid<#=<%SgO9XI7PK_1N1;f#gI7# z9%4^!1Mh1!{rTU@gBIkc2*tUXf;F%V7r~6(0ZDTxDR@KL7qvn4sC>wX_)-Sj)9O}_ zh6I2;V=_XuA&ea6rI=V>)TEt`&Ai_>gyXb5dW~mPR$mS>-R`$n`!J&mlXSl0i7cn8 zWppK1YbdwwLvb>mU;GuN8bfnJ%kwUfft-L%8R`znOeIgA9^E!B4FSemVR9=%tpC#b z9aOBQB`I+4MxzK9-hXFJU6~$YU<@Z}t`7bT*oRkFCMjsO{eO`s@3d#rKNv32C%N&AK0g{AzPbbd_a_GD%}p zEp|q^_aT=q2MM$3>W(nKc@O<_vdNl68O;i75Wsat9&1@q#eJ%!NL&dVTjHbiZ!W*u znMbtK3`8#M%4BVFB`MsG@-5xb#z%`sj1&`qsE^7qIS?d4_(l$Tt$uakEoHJI{dw+7 zWko08x5n^pvE!A5JYM7i=ZJgWio48~S>hn!t#MDK-$X>XithbHjK!^|qH74%e)#NI z)_&o!?rW?hT9LVZ6h;_75@{L0VAGZYge6IZh`ShKh5|{C8$?JKsuetMB#4x?FR=;K za$}Bi00i&rruCo~sZ?O?I4H6U8ejV+au)jZJgC~Rn3o@ZTTGlutBFK~%qH@2XHQZ~ z`i2Hb-05nXTD$Fp3YQ6SdU&Q>u;KlVA-cYpXb8R-nt8o6ke{qyti0=A=`tqeWsa6h zQ_a-zuP7%0p+b~lCKG@(MH~8$UHK2H1>h2A-K@V>rt$F=RZL$a={{d;6-WZeovMIi z2%0X(u1Ml6Hy-I8s3>72?+_~c&_43mY`ybZ<_qKh)b2om65uG9o0G?fDv_Pc^;G#tI6BM8 z1Fyvtt7Otaj#K$TKTim;y1Jmws+^@@?^*y}x<49!wHH5o; zmvi`@4O1ngJWS21D&-i z1PXO)t2l$s;kg!WT!7x#s%w+zHCWALmL9jJb7y2t==}ARef10XG`CN4pjlcs$3D>S ze)|SiBIb?f`fw&6_^?~@!A$UW*%dN9GqZls)q6D(v*J8N6;g4^BGG?2E<9A-^)M_s zvftnSdeZr#k}%V(O7ChBEo)tP%7a4|9r&~xxY5ynhkEwjtksjz!Pk1)@5;;nO&E_Q zXZ?NHRJSQD$F<$cxg)dxOUjg}@8Esb;|H@J%JavO!YB1q)~03)eb6CU3x#MoP1{d> z`A`sJG@UD-|LJwp>yl{$JEhQ4@!?RN%Q-^d0IpBOTzB}wNtUp^g&#amWJZ;dVOslR zL9qRP&hWU+uBO4?b8Y{;1QVdA2m*QBhp@Y62j8Ok@1RlOgm(o+ZY@5t{*56mJdeA6 z495Z1+10f7Ruy7NA1SQe9Q#%X0%r|G{Ln;3c&Yq8_j8~> zw;;bQsUAXWA5UKUgK>eDYLXWI90C_F0ZdAkHGmk@TQJA3&0J(^N+aJNAp+vz=QLs{ z!5^P5YLFS=%V0l*?FYe@ZVaFoDj1!?kT;_e*m!x;G3|5RMx{=PGdBdis{ArcESpG1 z+EtgLNQ z#OPiA&4xHb)_B`}CEp@RTl|(s+Q_}v@?vC*_AYVeJ#~m`+Zrp8Hg0lr%NN;=lXBIg z9yOwg!Tr|U_w0UDMUf2VQ$s9^CkF4y1zN(*L&%Da3M{D_u|s&%2B#maR44(M41I4# zTqc2OHhs>X6h>7kvZ27^q*8jz_^!>aGrAd_+kreDNMlK!2!esYzt~yZ>f;cdw^Ar1 zE$<{I?@mZAPmoZ=>5gqjHJoFSwu8$2?|NwX)ivfx!IJ^k1msFFhNL$(hCNX6#iN$^ z1aHwX){kcZFZ>sNu~iLy3RjmKxf0Ea|e;!gAQKr@$uW+EOW?!yDII>TjL4D z$cg}}q5(Ie;*vAW`UwY=zBkwIChlff8x&y|whJ+KY!j(~?-O#pshGhWevvF?)M>tb zB>2y>Z@Xd+qSs3b9!JnAhqt5=G_?UpR=i{N`DDS-O@o4cUzxC6u9*X@(#g?cEK^r2 zjGjEM&ieD8L{{MCeYGL9p=7Xtd1Vli{CQfWbVaCh6qxo*J{j!|VT?GBI!D0(rl!0% z9!AT$B$`xWsSXCT2ezUzv`5oW5=Q^gYtfC z($^=&1;}t9P?NkXQ`Jt6X5{fWEr0}hiz4&hvF8O28hg-G5-Fd z*?pcnv`qYhqt}h+z#%rNA@eB?fi&oy0k?M}$;Jrr>~^kGo^wO85W2irYLjdggAInH ztBj5fiVF6DuBMmHZFFU7A1)RBtPZ=Wo2w)pNC{@0Gn{nLAq&E2@CK0n80v7km1WkR z8?sfllalriy!n}CSz599ZMvhk(403L#w9S*q>xN9{bY4gXB~4Mh0s#)P)HQ$Pyucz zs=B%V+rqxOAvBgBu)G^4W&7abb=S(-@D-R6&+=%lk2-waU@DmXBQ$0z>yVuN{>nRO zEd|f|8`IP6RfW@oniI~Hd}Rk59Q1yt_&lz6XzktDxcSrhVkz>F9j*0+(~@JDL~8)f zNlhPGw&Wtmh!vKyF5QAao;9MB)SMH~g#oW<|BSrIuGx*I90{xJsEUcoq~Gs<#+A5S z;o1W8o0q;N6NA0L=2nw1$$RR5>_3ORiBwOhgdLweb>W%JvL&nBO|ySD^qCWY|Q zg5ivM^FYp7{OIx*#ehdtWvF^_)Rdp9URlzdtJ-09{-oBM!6A;>0U>>=!rR*_xLq z(c?0;x@fkx39;KyyDODW3&d+>L<%jo_9u;{Hvhy`g6n5d0S<=lgNa*o`Q`X^QEsn! zthms7l=j21cK`k{q&I2euXVs@bKe76YV#!$5=isjxU)x|8HxA~r-y2n`EK0x@p4%q zaM#!?GRyNtzsdSBKdV8QjMAFuC>Rq6nh&@X%E-#XeoA#fVCz^%Txzh6zieGj-2_t$ zdmM2XVK27_zUKb>&)*>Cn7I53#WX|2^B+`9jA80?EIg<6Z}v#~rTY zqT5?WoolCpk1WEEl`9@A?(<8N1@c#{A=^I)w)~&AfR3hONe!J1WEoXMkfwy%A3lA> zL}CWfOKz-I^5sHInFSuD`301XAxc7StKEck3kE-_13l)uuB#ju`Vi&D)Uc_$EDeSv zm)bPfK28>{xZkcc@$lOv50F)`QrA69(b%z5)jD|RG?F+pdzpt@y3#F8wJ#KN@wVGV zvaaBYnBgDN=$b;<{Z<=}FQVCDbsFkxCH}%f+!c-u>86gWJeDNJPF;|trcs>Hk%eXD zTE-K%Ts@Y>0&4e0W4Ch@K0!f;twa!?mNg#C7!K}$mz6| zoBP2Bq5z=0{nE`bo0-CXhBv8&n%9mi;+yDz;P{i?-DWW}=`e{}Z0-no1L>qdC)_EN zcGAQ#C-*P~o=q{-#n>KVVXRMLU2i-rfYIy#P2xD?!{j#Z?mAjrvA30dWgk7vQJq78 zMVGcXC(gaVEE$LI=iGq%`xXc#2XokxK+H5Z)&}jA>qo@<*(Pbv#d}xzW4ruoeAe`% z2*-mSw=o{&8;>Qc2K_&L>0?(lcC|eDoYE%gGe|BpKWZWQbjo7|)2<%3%bBuoISP;{ z_IKe{h=C@DrUqPKlPceKUvf2*#;n=iDEneawwPUlUa!OBh*it^T()U_I;vLpf|I5N z=z!f5k5z=j%G_%cOEmgMxu(Q!R@)jUZ9I+CR&QM8^FI7&eHzKHieoM70#k7~pI=T4 zQl?U;a}06H&(+KracF&}3=qg~45OPt9SObWV)S$Yf;|zZ^@nq&NqbA6fE}x(#rz<< zHk$<2cP)E2ez(5T?&aW^_3;5niRYt%p;p+QY{zLW3Dr_mhL&J-zU2DPjzg?3%K6*) zdc7Z}{Qcy5So-`Z;sUS5%wmh8PDjGMV?GD74Uu3d+ff!2oPV2bk^7dU;k(E*U6kq5 zW1bEGvI1xf59Xr|GA3FaO1oB+=SICb)rQl!l}b(E!^~uTt@}IS@(St+p?oJPx9Vse z5EI3vA@&FYx~{Z8*kq;v8qFfeF;9vI;##&fJOfDKW-4hPHc~G)g>m+9QPh7H0 zdpMsqHz% zv#6sR3?I8A(Eq#yW^eIQcdF#^0gl|Ok5CR2Z;rEi>m5u2rL9No0>y)zvBXF}No)C@SMcZYc%90izVjXLr z?rf(wRhr}nsz+~no*TTU3#`9YVoUj5Hs#7N!?^M>8)g;GoXZiSSmvV3fVXkpv+Nbu zdnU<=#mD7`eA^Yaqk`pi9>EHn^#NwRB>Q=I*x{AY3DA6RVVddQF<}hVd;Y& z<6S=_Ma%yEHZMRV<_U3Z+ow028M@|pw+CXNh(TUoT6?+q?-}GcOe%kZ(UFnGo2rbX zYTw&O;h)u1OV2A&x8E1P>7bu>k3YOvFUpb_i9~R=?FOB0OEBM5h&sh7=5qZlU;ANo`_sxx5JFc1@cCDTaf{3D zWsc+JsdqeZo87OM?|DW||8OmEOvv<(;1JFV5`Uk;Rv%CY-uBILtSY*_n=A|Hogxso z1U~*`Fz)GVew8zuAIlQc5o0&HJzDZyL^I$3=-r5aMND}{!&yk}zKDJFLwE_ptx;a1 zVhFB1#Y@R#I%7p1XXVkq{r!3e+c|Z*g-X_o< zFfDB*y$L)E-J#jodSnsEC<6KWE=%;(-z;r?adH5!Qr8z)>53hf$d-ipZ&Az}bjB&1 zq;w5j9nU}vD%YkaYg@+3^!`kZhNLWA3NZhztY;~uhX`vhV=4NruhZy?F9vLEi#rVX zt$Mq?KAZyUaRdHsd5n^Dm!6;TK2FVdZ&+orx1Awoq-VlQ{rTqK*H#@ucEU(2^Bb^D{?ZLKv# z2%{+MgGwl-c&LcNJ9cN@PQ0qSbXWtp%?Ua0e4hDGgCx7I7fb8sHZQ;Bv!#pMU!tAX z9zP&-XB!XT`Kco5A6M^##MTb=-e)zswAvpj{Tg9i%)GYO6aoKs8^rNg)J=LNSqjG{d$|?^ z*Gq>JmNK3@(!A%p0-O2u)idVFaV6@Xa3W3}qn68$8WYZ@#fl0H-2965sV+#csj&j}gA$A2=Y7>Z-$i$dM zCeJw8Xi}ypEx-YpozIjwL=EG`xx)WiIJ2w7ltFc`umZwhw-vF3;mzVxmZ~TuFMQOk zWO&e(#hlTcX4;B4p-Jkq*N1%uz(}EJ8L(-bAqUU8_x z!^A&aE|&aQS@FX^a3l1l0kqu6xo7}{r4`_F)d)sdDP$N+w#5SrLTsnwjK)IqZLW_d z{`S{(jrXSvemeu2uV{DY35L}ox*B(2xDl@A`vBEf@Z{EkXxv6ImZVi_LX^`FwWhA< zd7t#Jtw*Ib=}LS7G80yI5oVUxB&uiUuFxH9L_D5=9qrfeg9rJ;nS>^=uKd2{Qa*0o z5*}1v+aZU^qpt^XJ&~;#`dl_{U}aV1xc1zKC#f-F?US@Y6&-g=<^Cx7jN|Nky7n)X zdu~*kC-uXExi2j6q|g)bQbsz~9ArZtgh+!KOh;XwMlBWcZh5qO(t3*<=v#oP9I&er z*!kiL(q?l=zgTbIW>4C!YAZ?#@g^=L%4Z47OWbA0y<*{QIM^k7Yjp7Ll@3NErKvJCYdovi^%C$+?41<&77X15>YZG;1ElY4Xt5A(_wxhJNi^r&4)V96fdMroD}ad{O-*kK z=KbgVmFafX9=%WC&K{nfC#l)ulI!aFsr1A?ITh%}%!@SeVPbvEwrBEri~3doHxZS< zT%hm2)fOR4-FNU_iHc*{PYAqzk3No!59|=Vt5@pBB~jT)lJz0zQ`-KVED4j_>VJ9ig!MrI(VG0k<>!*Vo&R*T21RrA7*tIn!Ek&dLEY$L@o|0SlK%OdI2}q z0fU3rU4mHiYfd5ld;P#0j^dr{n7~kk+bx;W?xbYe`vEuzEkyH_+w(l z?!O=3n!6p?`Otg}_nd`+{WUsXcVC}?k25PMOC|d+=?q%TKm3oHcJG>* zG7^5IW-jn79ouHR%(-*UHtLFZfde0#1AP_Ya7D};nCRni!uX53p~#s^MpoB+`n7~o zEcS^XbAxwsgJaaRo-+i~U&x%Tvi8uN?|J*x+7no{kcx8*^WR2OP2B#DCO#9eH^5#? zW!U_8WI40*xke^h_!QQ6*Rs)Fz639S(tz7jZ`_MbY3F?zuZRWokgd-H^nh$ z<9Gt~48Rh+wOo-QBINLML68}?>R7fe0=iCP!d1X_*^~0Gkbmc{cssDnRZIaKF{-Vq zHmcTVxj!C;9))WSeq83sE(7_oDUCDLpK1<^f9&w=ohMB0*G@M;NPm9Xb?G{6m%J~^ zKmL5*SVY$z;KOD{e@MMAm=id8&qo})D7#%R?4{N$TrHSj)8yCu0y2G&eO&Gq>DzHQ zHZb+Zh;lW>&i7TGzd~tWGk5C$JG2n+fv7o*{v*z>UQNeR@fmrJ63kLNq9|f@<4Ym4 zCJ-MpqAC@e*2I^0a1>b#w-4zCU}C4SRt7&mxq?)uxpdRTmNp%z&&r`v)XyHgDg^hR z&&Ypg{8MIvS)QtG%Z68`QA`}?Q&II~#hM(_P57loAg+edP|aOVA}H5F5>!Upp+CtS z^_wU;@yGuG_CN{0nsiN#mihq!gbRCAHktvmt$^8_fvK*033YZ?4|^Q)`|f*{nJks8 zCK|D5c0_wqK5(IZg9ZSO*ICGzVhgGp+A|`G`le|y^>1sjtTvdX5=lc=jdS-y2M)Wx z8fc0J1pQU7)m;by;|k_;TYz?Wu?!2%OON0SCegtTvW4#^_6E*-E9XQLVZ_&9zI^M`))YK&$A!D(2cJ zt|g?tM=*1wLtU97s;JVUIf6+j<_+ee`pSp@oHqG%#_!Cc!dazlD-FJW+zgq@L^yzy z7!8}4&S+`G8CyEj4=}yUS}&jj0*Qp$%KWCT2r}dOjQhjsYVP5K1Wk#YaAIZ%p-Lg0 zwpro8M@LtcZfjg-6h=srQjJ^SR6Yb62;>kzAoPg~7xM_smPO42)-Uun=zDykzz}AB zXGA~JAOdLz-l3nlf}lrqy+HS2yPl&o zZX!@;%pM}J!|xBy`nL*ySLqQPVivKhI=d;tV!`ey?XWxIh+J0Iab8s2ERyJvRNGTJ zyU8k*j|L`dU`K5TOsc8BeO%LHfX)O}7CV+MIq#G9`L91GxJ&sESpS4Qp@;pZ$6+4q z!n0#`<)ydVwR_K3l;;TYd9pzn1U)19G+(urBt*ANPIU3;<-4o!eYZRt2?u!A-~-4L zh8#UhPBk5l@aR94v^0R`EPyp1E zTfsRv26w~sUVuVeuN3BSekjcEiP|_QkqG}M9pp?3>^G<{;)+*hrewflDsA#OwOZXFhY)@z94p z)IR_D&ks@^3xE8_f9#up_)vtY^xD_H)^`Cyv+oo<_`wghANrvmvKPJRMfOyEaAV&b z|2n5*e&N0Vb$Ir(pM4Cy7`uP)2Y=x2%FLPn^}qg??c2A{cjtXK!ey6TX0Lh8YmONU z;lWya`|Y<6Qr|bgE;h9J`q#hi^YJ?Y;aLClr$6o5YX2SKL^OEg8{g>j=fHuDnJIex z>tF9$?Dy4S?+8Zly#|w;x7u@`_gq`lcjx@j&T8h=#HKCwlqWpS%@gws`1&`$Wk(Jj z^mm?f&ppSU{>*3CdFPyO4Smae=wlzWFMRGAyWv}V2PK>u9635|H}1XBZn^aq`-gx0 zCu`^%2;RUjq_SDddxNp?jHf@{9{s3CIo%OPZQi`u{os80D_`;VR5qkNkS4+& z`>02{Ij4X9*Y~-=>_o@bty}G^-McNPcC0-<_xWqwpNu~6{2lT{uvUYGcLz32U3%%I z$31xF1;@qqkcT|T>%zOte|+|H!MoT7@S?$T=VHpH@_X&kXMKDS8$c#evuo~ontvY! znh<7V|B?+fm*7)=f9t zV;*(2P3pVt@bpo;=k9$0K~Dh?o^9X0!>-h~oLiXT;2YMDg!A&ge(kmP)vtd;?>+~e zj}K3AVthjN(0A_gk+PJIJMOy6{_{&;KE_1YfVIbmKc{ec4bFT-2jTEM2(FD=J`Wn{ zmxC~%#{0)QMDXaoS2kBD_|9q=r>Shl8Q$fEMJvxQSY@%|=EBf=2?`v>xf-8;<`3gs zgKupcR~QMzhT)aC;JjWOKsjEhheJs9I=1QP-BzksEW5a5F-=f3Eu!kItlHL2C9J8o z5{EWHuHSRV;WE*LiaH%_lbYySt2f=u4?ZOpg$IUyTy;P@V?qzQiqhcv z2(61|B&NwaVLhFhh;c3I6_pu*X-s=m3mD4yfIy=KA00wq)nwcnih~KAMj)XD569$r zJuL9?fv*Y}*5zW(^1_4ja5QP~O;TXDy~>5D3{!t8p-Hd7U*g3?VVJIUksW3^0uh)v z3n~xhcp{f|wJq0*&>y?mV}+cj#cn?mUIclV@ZyDn@qx2)e2=6>4<2$byeq=pQksbH zrm@eihb$U4?IlgJblPH*la|th&Hc@&4%KqiVrp|-ZO2R+yQ?uR#&Wr+axuphYt(H# zCmN_P5niz?zuQoIsh8@9_6r&@ZOse!VWRdl)E>qfihQwh&GIUTeu@hRJB2Hv159`{ z1L||LmQHA~SGdgZgsG3FgWA(?G_9#|hOk&kE7F$~2 z5TsD&ZeBDKJrUR>H0A;gDveJlsWfOy!IVX4hEJDFt`PV-QNQwXho2xA?tR5E2N1?2 zlm~w*Q5|S`wbefePtndmxaJx+Dih|fYoD~5-hYc44>jds?pB4XsdAfYGfM}8MjzWM z3;!ycHBQhdQFYx<%MR5Qb9892Apk0>JOn-no5mD3uK9v?NmjH(;DYZW_p_n`@b3hJ zl)6MTrtw|0N$uHO%)6k8dAJLPwc3DCpsF;CA?OE<>MBp2;o2W%;moLCX0`M*72VgE z%|$i8HA|25l~!xRF5NFF4sE6x(f%@(1%0S@R&7E+hcIBCv(|($=nIovbjimx+tv2G z#^sYl`)S2tQfw29b&a+3V%fH#DqxdyT=QS$lCDu}x@kY2Q_TBd z*N7Kld+!W(pFQ(2Fi?ZImGc0{8hqe-`+J{BK>2R+bC4v+=XD&E5j?9jM-~F%3Li$n zJ&rJ4kT$sHN@tG>t{;eJ<1o&(_<&)YoZFf|yx?GRin+MF>0-IeFs=GY{gTsjq@X@3 zj2HDnhR&DyI?sCqeO1%C-Ow838gzmaw$V{=Jrw5gO_gZbDS<9oIu!v0O0;=!Yd;aSgmmi@#}{KVku%mWPnAOGu;3DlC4BSi#MbS0aR0sb?;rk{)}F4Nv*&D2ccy?f=$-F+m%aE$ zf6QL{yT7mS-`Jo3&Tc)04Pyfl>wvN1-+lZrDuP}4goBT1 zV!|{)7#NHozYNj`u$(XmMpTZUgNSyc;Y_MnJYo4#G0qaX%uY$*JsZ?Y31bb*HoMcGtsmqEv99T~qf^ z(}Ss9hGDLD#g~?>mr~s#QNt?*ZAz%#g;vv@f}>p1gLz*myY#uNdM4C=6Zx#QL=Se| z^Wros+`O#BHEyy9Zq(;6^J%|cC2S04y{>U+4g()wIQR&O>BBo-)x%UY#BW4VV+7hK zRZduJV@Q~i-DjkBD%Vhd8O;)&qYHRZI?zS6ELnZ zzYUr=ha?b2QW2;X48J3tZ+kn`Xr~e)RecT{;1Q>n&V5lhrkR5JQPWtC9QuQ5u+oP1U&#PK^^Z zSs0&ai)|GRXe09%VGm=yptfU{tfIJi-nrg1hdymO)jf=)~WQdfj+s+>(I z-zJU8^Yk%*Ul}(+rEB~)ZPpm`c_F0Z(S*imU-K<)Wvw|dy2rFGf$JX8hqm}U)0o1C zPC;>tOKpkj<-bq}grZ#EnNE7YFs2zxF`ft8w>X1gO7-5Ub&9&h4(XpQDM#X5whOL<|*nfdrb`Nj>u=6M33lLUUr&)|CbzJQ=`e(@WRuq!YBV|(Uf z_9{y0mh*)9f@c+eI=n6jq=$-Gb+MfHyW`pD!4RjzT6`PS!ojv3u{;`^_N9{Q;o>vZe0QNWBX zPX5CKjh|D2wY%YI`~3w7E)cN%-QWG4ed<%6a`SH-AN=44?M-ib(;(G%4sN~mR@cmD zx8yKzjX64Ov;*JmS`)mFg#90CmB8=`t=h-bAP}d`p}2j!s5LBe)*NZaLn<%=Y9Vm^?d`rXK@OB$35S^Psh}z z$@~3%W+RZj;tDqj79Qul|NS1Hdd{0I^K)NWK4KSrpT#p*xJj_^DC`c>-WPcDTi#;7 z_XmHl>Ilt)0lbs_>%aY*{qdjvsr}*WUvGc&?sq?68Y?G*FMjch_SLU`b=~9sgeN}n ziFVahR~`4b`s%CgQIC3*ec=mV@b?>pE4>~(){e&Dppa95{flSWM{oPV!Ijx;ZHv8c z?@PTbeKlAn-Ls1>4S1CoT-dVlaX3{WqSG7mpFeoErJ{Aa{1MNzUZ-Kbc3nr!`Z|Je zeO0*E(Y=o1{HW>0vu5XCe5K*E{hKpztsfXDj*Z*o_?T<%efzdM^vynLv1H2S0?8DX z3=+PcxzA61S-PbW-M{u**E>&qyfkK|;+ZjThA{J2fBiS?AO88hz6pLUus(fBYr-%8 z>Z|NOzx@vr0;vqZwFtl!!K|r4hO#sjNp?!fYs^v@DAPy!!*IcZ?#-;vmP)% zV2A`cgWnKP8w||0q+Httb03&OYU|;HSuu9FB7C6H>~aM{oNMuU$typregjw?<|>Fe zQ5(4$v{A`+9lpyB_h6s*EqdSxTllP-&RR(i(j#a$XcDKC7}GfowWY25m`%#6ZOwd+ zr=mq#ZHwxIE4h7})zmH=*WWsNB*Gy6Ni29_i@S@5}s2(^DN9Y3s zxQmYqrOByI5g5&C2O2OvrGp{bSKIKFf@T7Yb9Po2RhORXM!#iLp9(tzR0bw@+!?Ow zlWa8A$8oD^EWo(OF?x(K(eAp?sG3c?uLyk7;Io7=!H#^!4h&|uGpPEYHN!bC+O~8L zQ(7gBD_A_Ki7|^c^bsh08P&}cA1&mqlgW60u+tx=JiGQQnz%S*Pn3!lotx7{SKTzW zI-&uiwBC=zB-4I5mmA=;0 zLMwWqIYHkb*n|F2UAMjcTGsV)H$OXPZstw2NN6r)b!HB+h@t({sn!(MIKW?!#t`P0 z60wL24iFeYf3#f?e$A`gl4!vg>ZpJEY7b2RCDjpOm)qp8sa}f1%o&;y_{!kJyr8x% zb=qzMt}Q$XmCc;Tj2$m-2&K@9!7=)h=1WQA31|Hgjs2d+?gRoU)qybw9?b163KtJA ze1_of1IPI-#>!yKA~eAe5A~_5@6bM>FHo88i2jV{5^=&CK}aMz=0*2ujS);;Ar!%^ z6#}}v>VvRjv*Iv$hvpUQ5oXIU*QJ}Taf5b9nfB>Ga|Jq6F9ZP483Bz8QlX*f(^FsI z!==fb?y4<=d;n&`FwcgQf9AlV)(6VUsg7ueAq>JaSYI?Msy+xj3n~{AWB3t5Lymr6 zyc34@X|)|+H9d`~p3>77Xdt2WhQoYJtRb9j2wz0?!;ILH!U&HM`i#uY!VdER6^QKsg4R%y^mL#T`CIOyBg*sW@=Zc_Tg z8ml`pDK~Fc7mb>Fu}^9ZMS#V;?yAl?(PN+TY*SoWarY+1?Uv$BtHkMbI)x*IAUyo6 z3Xd=}^fQ2!aA;aAG=0fjyXfp$7nFSXQ%|&(#uE%QG^PMwtCW8jUx;aXA*-=@NWRC!(0V10lpvx7*5C4 zv26^(gJAIE5QGq}wB5`XYZ$bIrZA_2oB%6&m~SQjN(sYt3J!iLD~`ZlbHvw5#+8yX z=P;i@Kd~P1vd#QOV<9NM>$NfXUG?J-gu-jheXX&y6D_GAX87((Xa#~WChCF| zVUD#p0|SECjR5A5PSwdh`1zpwhky77yWoNg+_b~qfysvup74Yx*iZlT zPunw|@eH>QOd8gXVe?-bNpmJ()22=SA^M3=e8T6%dVqR7X1K6ljei>If~kscfBV~R z@(Kve1#yiw3}(_W1BFHjf(HEKoGCy%1`X8r44L;h9#D^;{K=p6`qMA0F?Zj6_u%fC z0p4Gr2U-W;_(tHg6~I0?<}sjCXcCM2q3{ZmA1{6BOYKK~#0!*pK~~x9!PKezKdEVLpXp4EyqorO$otbDod3V`}Y3fAmLvS1R;^76`7+ z6r60V(|^x@{`2jJfB1)8fQQLB`i?q0{NWFG`cvQGyx3+B`|V_O3C(kT<};sJE`uGl zzxkWLX&?RQN4;*p_G@g~2uzYNcNnXfjX6_rA{u}X2fU$)u&_OhQTh=24EG^s`PQxh z)Ri@hJTSjl!|%NFPQO2b5&TyHHdkU5VAIrQ+jG_~JO8|M?J8jckbTh8Z#gK_Yc|4dv7`BxZ}>d?b3@cu~I3} zs@Z$p^ZoFA*!d?D8_CP>rW~!ek*{lEZg52T5(s~&cxf5X53{U6Y?0RZbLGIP? z>Cb+CV5;ln&gR#e2z$gM9^ucZ^)*+Xujl#m?0L_7uH(izIl>j5U+^zJ_k}M8n_o77 zm*?^A3n`nfC+xPF%k3KnA7|e@{3!dt%|B`jjgoIBj>P(Q>4Wgm%y(AsRkUey&ki1p z!R|aQC-*nbZnlSRI&8&6-R6(%vjcbCVEb>o*7o22E!%&`^>!dU?z}fd(XY1p=Fw>|#RSKHB~j4S+E<#Jj3%GbVb)l)Pn>n6hV4fT)*KiIDiA31El@GGxWzZ|wl zJ>p^Z%%?xqc5K~bOY?KSDgSYgd6ezkvEB9`IOx8OKYz^^?A&wC^|=e&uJ@UBO@#Sx z@Qu9!Yv>#>E&!be_l7^ohrfBlSQyvtXq7M#3bdOoTUuE3hfRKbMP1URd8^7;jgKoH z8aVSUsIkR~lACVfqXQoSZdOc3rCfCrE+M{EK;$59Ek0lvXBawu8v_X2m~2#SKN8S- z!|u`pYhFhwk+P+3+YY5uz9Sch;0P@+byJz0ccB743(9IkNrR=@?>Q3|hGxE8v8omg z>-6m6$qAb(=KMn%1_KPec|FAE+bv7vG8QAP^0Z+@>ZO0v&KvD(AKB`=`0JR=(t-=a z6&sv9*l{9`6L%Q6^-Rib%VGYhqI=k$Lu;g-O1j{sIX-4(J#gCt6)c#>XcDlizNxxm z8-0!iN_ERCEzD;8b~M=e$bzLh;mE$JQO1t;K7t1IS4R0^bhWeq6}57Asa%e2U6vawSsOERN{&63$-#Cs~wrP zhVB>CHkj$`s>g&|7a;-@RdEPh;*q{3tmnJoVaCJwZmPes>X(KVzLM%+Y&Wf}j|H5{ zcM!(t!z81=o|#*4-xisC))~W9rN#WwtZ=Ep?gIrARKdO!5mEKYs6O#Z)y_8yoY4=0Cc+4H+`ov~M%P)}n!BzhvwMd0B=PlXZWVBQeMKTh#$ zn$HL~*@4eP3vHMLeHRJl4YXYnUW8RG(OuD+8(PcAqyEgXlS2K1{}+TEl*`;>|1X+v z&>k~cmU&u0 z?ODQ)={3{Zg*M-~#sa=(V7ttT_UDb4?4~Wd?Q7%b+Z}~%Rue%eVi+hnj2nJd9g}eA zx#6(p{_t}Hjj6j11ZF$}P0W&S4$x3?Gi8DH#L9DuqCEGY%#U1iseR*yUveSKC3}J$ z&f&9)e7uBbR9*l$Pzh-!c$6h&$8IMy`*+IfM_ORD`1|!j-7210xtA z-7ro+59J-M2RI2IgAW+O3BIJpAR4p0N4R-0jZ@|VLLS~xmgX0&ikVWa8^nb;*Wx>s zHip-M=~F#3h0_x%-t!B>EXQr*@$dm*A%f%gV-O!E9WHxAZ5#$7!i0jA(GdVzLZ=GM z71q_W&f0C;w{6q0)pvTjpRr-0oMAw)wvi)*_3j<-z&zL;ciiD0znENl$U`2ostlNp zFy3*X4?_}$B_GSDn&wO%Co31h3XJ@>zy0lQzKHj81Y^T#hRdMN2$e96^&7wO8-{O( z;k5VlIQ#6gouQ3jW4-&~fm7a}`?;TUA;<6i-tXDJ|NFl?^ZrRsdeXY-@5{kQ^Mw~) z=-(+^Lm&DK8iZ*=AdJxJ7k}{=otcd}uD|)4zwzCc2wFONj>EV{SQqA{Y-w1i}b& zXd7b%;Sy~PX?Z4(5Z`(Snr8?Ir~@YQF!_eSg?ratd#$$A2!z4$hBv&yub}~R2S(^{`ZI9=8;Peq2&Adgpa1!vJA)PG)$!5;2-Gl#$CzgQ z`u^|#ejnGv=|;!s_+CF|^qq>;icfv^vv$Yb_t@mbYV(jGuxZx%9eendSGi#PEpL63 z-E_m5TeEpqu(5&jt+(7}-~7h6jydkU{cd~fJO0Xk`*(iNkFR~>8~*;cUU?@vSkLSA zhLsBA_vyfb#>t-D_vaBpx!SaAZ#l4h+;~T@k>*I_7Q3hR_2r}5nXyK@XZIYfE+6}6 zS4?^FD!d+^b&o-o5O_y;*ux&SeC*s2m{1x9-nWKL&V@i(>*Dwh9Bbj@AOD1X@{{Wy z8-w-ntUZPSVI><7*jP1uAn@O}?;gFgUg9*p^2#fGL&u76CD39C%3B}L+5_MWKZi}8 z2bQ+m%||b@T05n(lt#z7=hyU%TwTXhqHhnovSraIpH4Sb2hc( z63Zm8bTV`#W6MW286P?ler(!(xm7DmcEj{$`{ceoYQqCFZaSTSE6TKm-rlzXyc7+8b<@9+PCJ^nF|wzs|U&+S#O z{1v+M08UF^Kd*Xw-@GkwFb7LQw+0R|f* z0~o>|MCsT6^{0Eo)g$cn4f6&`gWw>*8FLtzi@{lK2Hz6Hc?b*_Iw&N>2Mpg1@xi3e zm9Udv3QV?W;GpJJS-_9WqT4rznGxn)!MPfrNAc_b!PP2!9Lsrw>p{@lmenAy&&_$q z@4Dw+%cj$Iiv}}{bvFg1#V|QDXOY=?H)G=(IVuIshH;&znV_2BFlA#Z51$EBv8bih z9(;6Mv1OAz?%dt>h^cXV^jN`?-M;N2t!RoCNPK?5(vhe=_(|-`7Y#a1gAIqOX;uRP zVMSbvaSTSVY97mGtfl&m=^>m@d97&7H3JeV3ny%aOxE)8g!^JCBy?^yEt|>s?)ADF z+=@p09&O81DlYUXH=FJ!V;?)Sv|zD=9Swka^+igPA*u)H(vs?{Hh0wj9o3z>P0uda zlgu+h^%=2kYtf=zh{{%^8MtGJ~$NKcFb+KKqBUhK${$xBqs2iyf|jP{2ae-= z)Jt@SnTNC?mCqW^_Ysh6QTfTq39G5xoYLZ(CswJs0Hv$8pk2{b**JH@%$+LgT5)Jt z^flgc8s`X0S_lhNUxZZkdei4BLNqiv7(Xr1q@@RJFKU)o9Ts5d)ifVer>OEGP@&#g z)s=okK!G+_R^u6g2JJ*J*Q0(a8*QHoeG}Ij04>zdjD7ss)O0^7y!v+&X<6Y*t9+cx zqiNDqxoEIe)SfZwuJSi4%>e{I!r#?{2%+Sn?q#%ouWi5?{PE!Lhg|@9joG&Hp;AO3+FlVdi|nxZ^u-ODL`yOej0tbPmA zV0zeQQj7v?>cXXD{_K{o@Jr-#aev8P?e5(7O%P_pXXhP3-jJz&&KdfkR!}HOjCDKuQA4M z71vzRT8z{8_1eh!L~R64*hYszrU3-$0zYth7rF$d#Sqp4tZz$m;UvMhX00$?Fzgt9BWdmrz>f(`)bQYg6xg$8k2}|Y*Sp^3cC-&2I%NO$Z~x}=hvR-lf@a{!JIjXJ&m{qOfaMkt3_IhfG^v_=C8V3Qib()VoD7&R;auU&_yO zaLpI(pZ@jzcIVw%H|{-PU%U33cKFCqpS$<&-)HBZbH49{{^X}VVfzlQ)&{(O;e9ZAHzw#^g ztH1hddapX{-`>7CR%hm8C|rRza@(DE4t*z{9N^(#Wdw#hezA9+UDf>SSywlu#T8dl855dPo~UT-^h?zZ1~&1>vg|KsUau2k(0|M*W%_+1!f zN%dtlTw8Sx*I7=UmAF9}{qX+a;Iw>SXU7%BE9^T$=NFYHGB63YvYXw3w1Z=C8N?0Y z3)A?`@Ol^@pfILda`}Kk5;!JKX#(_|f#r^g-H!{-ZijcIdNo(d48z>~ZZrNC)WfQHAJL1NCR9E#vgQB59(Wuv~G^jX?!Ki3d(jZ}n z>0ubr8c^kE%>6o`ow6&Hw2Snyk>0h_rm|`4Cn9!>=s26p+F2?OzbG}#Rn_P1$o_Nf zz~_tZ=p1L#JuM;#a&R;sOC()$28Mo2*Y~tquF(JkH?GB{olLrUo{AQnn#zW$iP<5{ zn>89uTTnTfSkw}VL$d_)JTPq|3TM>*oF>bJ(l977@5RpjOg7_A_Hi0MrO}3Y8=R-7 zs&%U?FHY{ux}O{?T2-{jM4~QGz`sQa3ah_hrngl7k_L2MXs(|xYD7uf(8sYjvz3JtKBLOr}S0zrKM7Kv{rM+?LE;4 z$MkKL5mDJRv8(nkYB8BmTU`r9*Ik_vI7C$rLXVt2{-f=tHAFA+B3$T~N>I;QBN>ch!c z+_t2XK1PqIjxGSyIG+_AkAg?_)Dz#eOQ31LQg<_B8Rf5Od?3uIC?4}uF|`4&AV{L| zRnk=5F=6En#N!FWFA&gI+LY!nnlUirdGVW6JyAU+T~m1Dg}giKZ;RGl&F`qnL>P$% z1O9C$M2C1%<6d(QK_|jW)*?(h;HVx)>IgN{%nh}VwE}5#T`!AOrL8y7%!yb{=@6J; zHy2Z0j3G>HAqZSj`&jrfNyoh6g&7ldBCl)Wq*YIxYwy>GFFp}>BIVnze7B3v2X!u}y&a9oiS66f-+4$Z#R zOQo}ICZ;t}&&F$S{IwvS z$nd_-j#KBA=Ty-JE?JESRAQT~`(JvauJ%WVYK8#kQ7@Nwq`=RummY48)A zLwqeg=lF;ZG;~fK(CVW+nC5KkKy%eKSO)j+4+sS)Cp%6RU`m$soDLm&hUS4_!~!@% zM@#TT4nH@N=KcV7%(Lq}JVHh<0v2}Se>a)=VHX-|{~P#EZo(N5}&>w3B5NLdm z2D58t64+>hAR+X_!kG`Zx>WD1aL;f2!TNe2pp(um}V_tvI>or-}Q}p&Fa$s0ctT)6`!TZxFt0-!^BbYdcN46g?;OKGoe@H!i4%^`*S+p_ zPKy&U`qqwj`W<252uAQ<4Q8ij?b>f#XYFRyYL$8W=*K^9AN=S??9-q7y#2#_-)G;x z{Z>2w+za(S)3?ul_LFw$C70W8zUtR(`>tTa#hHNcJ?3!7DLyX{ujV8%kqijXA;*J^cp z?eD#Q@Qri)%>rwIdCW#@d?N2TziL~zHEri^y^EGwwtLUF75-w6YvKL zr*tF|XLd8u>B79;VP|INoM(L5B^OzvTDEGXY^_Fp`RKG7%NrUS^{Q*QT=CEciFRE( za`dPz%+LG1(*f3;$3EsUwh8~`gE4W_O*iVj?izc;8~(!IK|=^ON8Rn($3ORqpRy-D z{;_)Zi8xR3cmOxg1FUU?I52CYHJFt-IQtiQaPkm?Fg(Jz!P(OTx>wT&84N}=RxKS_2FlltCoBomn4MFq2RriT zFngxHoKXFG*^E2-t}IoomCM*T_=N}Ncw4l_4BDLP+OE`;F7D>WiYhmz@+q%dZHQ(~ zi%g7LFPU=9864908!b!b^43%R{NqKmMO(&T8tc?oe5Bp6gz%56zL>00;u>adFXAS)WK0XA{N3zQUo-wOQ-)>=roj+Wv_&)gvh+mj ztlHgxX3CGoMwxQ;0095=NklmI^C9QEU; zV+Z(DcQlF6EIhD!Uf#8-$OkCre`Hcq2 zqUwq;ig<)UXhSjQ@PUHHPFZ7%wFMy?0;skgOp|I?N^QxE6*X>aF0?FU5>`yb?Wpyv z-fQbniRxQ4S~hDO;_IfUwlEKqia)AyyP~m3ZWF?@Sr0mdLkME>>7s+^AUkBbJ|MUO*jH$tR+>f>4Jp)?5N5cp*Y zt1gQwe@?ZSo!D)k8av1K`Y6;|p?mrd)@hFz9D_(d4bBeXy+M#8_z5D_?Li*G`Xw(v zeg@g#(VzQH{Q zow*Etg2>f#!~DI07fzlPVH-~sFlCDHlxHR%lsv;%Dsle+?_nD`LRjzKdI#pg0Cv#9 zz{ETf!V-42bHAfG0E7&N^^T3CIg>{S>^Kjd#dC!CIfsmu_3|C>`u;o+Tp@G_uNi~X zIrLQ##t(;wj}X?ocV7<7goOeYOsa7`d>;tIFuu@w`M>}7|1KZD^E)qqPgc+v!Fx7>I$G`l`zpP4+kPEFee3(#2gkShX;@)t&wa4(i;qdX| z)?S~Cy;p8LEXu$R2_hmJX(_@u|#r59glS6=xbKelY%v~F1&IoL#d=*WTlbc~IU9rHf% zojgJ~d;7Tk)=xiq`S`^j3N-B=J^7RN>}|iZeC*0zVN->aJ^QL1%g0k65}1dkG?lt8 zp7oAlnAJM^&+mQj^6~E<{NUhD0Bo?h?)vMO?|&6${9^f!x4pwY^hbYU zcYW-S-1I2V)%9?k>;U44w5787Q##_QK)|wIncvCd@S%ftv));O=RNybwr%Hbo4)sM zH!lru0IME>X|TS&pQ|=|_@HguvBMt!n5)&VXbj%!{QI@#p6mz#UNak~=jLZMHrmc3 zD-?>p7Ol6T4dG{Fz?=WMpZ-aE&Hw(*RhsW>@dTV~fq#Gx0P{>i^x!<$Y8mdh48ExK z!#Rx8MHs~72Rm~{&eQ`FO_nOm z^K9BpYr#ZD>)?Po8jTs4vQxY6v9Er3rweMf#p8BVxNNy09B{5wQd_r-eC zvX!c(kiIjxL<7u(AP~WMKbjkvSlkl%oNLWgG`1k0hjoo@#d)=#Q3*ZS?cP?ON z6-~gX#uQIBYiFs?5MZ=aJ{kmLYF|?2MyeIp0)h^V-6*u6e>B$9m71$ZE~$QtstZ~- znA5^kTV3&(jhj`y-Q<-PNrWybwYMf(;OrmSen;cRmnP8+^JQpC!4%A?{bQmVY4U0t z3l+8&(Kc$RoU-bJPy!QL`0j{t9-Prx*D&H%k7%$jF+;mi~0bF ztG?;3`pBn5#SSbk+QB9?2(;BEp_OnJ( zR{!sd6zr}o7unvJ#;@wF?r=Enb7K%OxGf+ugdoj&&keGNu#souN4|Jj5Dw#3;S3?b z=ONu(`)sVM|K9ZXPqr&BdxQPpv-WGfDfzR5mjIqqWf=JjY!ImVOTls*9Zvn(7`Bnq ziQ4EPPoy8t!7ah+XJY_^G=y|Bz&S{zAf^&|d19`xE5sQu+#bRQ_=&7!8pIK?92~|u z2(I7*f|MI+Bj6!Vq#w@9w+00)pMi)AhH^Y-c(2ei1%EEI3GFsCT-eNk2{_v4yZwgn z9otNuWlWp>1FrEF8#ZM4#(?24+#NRD-QC@xxDDNgyAF3K?hF|2?qd{pD^TF{{paN5 zd}*4do;*Lhrl*=?04&-XnWYhg5i5e@J~&UR1Y>W+{KHp!reMV zK!Qd{cO@Qn*?a|98iUmVLrc>}eSk!!Hm{PR3pBNry1ONxlMy1e|;I@W=hQMWDY2>KvHQMwDFf zNTji|cmPdx2rx}H5egQ^kx`qX+Bxo{hUue3mw15bI%NE9RDD5~CL3lOHQ@G_Q;(g8 zeO;L?fpfom(PjP32OiLFYLgM5geF9--lXTYCuk!_BoBRCaGnM6AjhMNCeIx65Cnh- zlid-GUBk8D^{Miz_xHrVX(+Oy*_f%!FMCf8bao}ODnuL}zIfBIB*iSF>L-m2=Z^Fu z4TBKVCyZYom(mh8bMztcfUFQlgSE8re&zRhuqi`1=jePbwd2$2r>9z1tdkGAFZ8i}5OUDJPvyM1l{`h9==HlMw} zGU0u;By`%&^$E%@vmBqPfW;w0R25pJk0%<8sI(J?7%6#Vxc9E(?L=hx*ELe*<#bAX zxXe{dVC59UhzYpca9g-I4&HUnu91~04L=H5`M_lW;hvu9F9R;&EpQt-^9^_#(^<0Y zNrMQIlz&CgW~u3w$jaU16f^%4z2^`Du4a0$9Dg#DY@NnUw{NK~cc&++4`g&IZ|!4?jUJeHvMp@5rkJBo=ZNLWTvY3B&l16N0tbY3fz`>-r^do z1u&%5Cy3;lRd_dPwfv+^Z?hQvnI1R$+t$O!M06CQTTVrLaHe79U@~!%=*hB7B_iwwb-a$o zPPuTQ9;Ki7R@(T+!`-WbDwL16sMseURKvFANE=tvI^5gf8EyxVD=^_Jkc|Zlae4F! z>Fe!YS4kK>C*PtLMn%RQq>wSSv&F)e(|_HFe{D_q(vIt3a}YTr4qLs+PkUU|4Y%WDQy!-XH=-C4#u1XHj*!`w$oq@)F`h1x zk_U(#K17r#%IsxMfEf}<`g+NpIIJ{hZ9#8on*NUlrD z1FJs~mNtB7*0hcJ6*`z)THw>mSoH;WV;>$kGqhB<~zMvR% z081aQAaJ{)Q&!xT*mtA2s)JI*XMHh)!IG@EU4KER^U9f&#q@GtS!wFG&|2dczK&PYQl01BHY963}<@H0CtJE-q~2y`N=Z}~c9>`}yL;5lYxsE`O=FS!^H zVIX7U#Twt`F)=7Et?R_{?jODAiX!fNmk=efKb?KrSaf+8wvrSsK99(v7sz)zI6=!# z{}hn1@D)A?pZiX^D*|$_hml2^LbN;o60*kS2PXQj?`FTJLb7z}vR`5)7DBiIoSJwu zc|etrC+}xEO|kN&kvC>w+Z`Xd>M#BaQi}u~P@NThlkh{4DF`b3&Qmawl=pI&=M{nx zO0qhpqd(>%pdQ>yz-hb_;(}TN@RV?oAeVUi=*nVJ62DKP^dNa{(kpY&FVycqN;ln_ zSWEgCK;m+yER1`nqO6cCj2k@k_O!q_U2-0ERp{zRB6%_J(M4gZ)711Bt{eMwoHUFD z2uX?#v59<_fd|S)+3P*CvP!yD>?s6uGX(PAP2A@y6zTSw584ICok6FCz!VmUGQxb8a>Yh>XZc%22%CIdjw&#`QF5p-0({Aw)q z2!}v9l5i9$|Fn7EQ?#u4Z@nUd$R-{l*S7)|*j8SBhUOd{?E1}IxfDL|h;Udb2R*8v z5(h%)=hHx4RC&&cosU1f7K$j%Bfj;XpMns_c_t^OR&u{T3_?{C0_4LBW{5=#X+!8mcr+MQrHk@?!e0R~-mh9f1 z4&nZyv1|FjZ}FEp%-ug1-@{1mO)faYr<5tFLRlj(Bhb+vg!Oh(q@(?JuuWJ*f}bo% zQKEJM`>jXV=9mx^*?5n0dTt*!ca05_AXGpE5_P&Qn@5y5zj(@!^#FQ|wM_e1t zNIq5RvcmM;TG&vM;{GbYQ(tM7q|bSm(clh~0|jF#+|>z>(Yy?}$Zu$3^>* zwHe?YP$OkRu)G5iHXQd4oOA6=0{>PXYLg4Y72orj)K7y(hpTYM2a^995J+e3f-g69L|ap@hd zps#%9A8^NC;~qsTuKrethGK`4Kp{GCY4u%rIlqDuZH%wJT3!M8yQaTDjIo-o4a?3z zsN2k6hKf!0WriYpfq&duxc1Y-HjOtaE$cRVrUCsCq&GH7_nd=2pH@wxgff}py2?g{ z$}W;^x{AG07kKCtFd?mfF&W}9<42NfGRO{-T_kZ*z8SyIPTj>s8)D$O1#K~c3JKWM zn`kCr_)3P4t-S_mSP`Yfm%X=*5SV)<`&($8(yb*N2C6TyOQ}d}^HVb;KHQE;@9#E6qXP#x=`wMO{75QK z5?iy{57vL-Mx-+4G}DEk`^~xRv4F zM7hYWow*Evhq4W z$g?JD!M)-pgFa0^*6tH#ijkJEYG7VRK|>MpgI5oh;1}{h<5Jdf-^w*Sr6alLsw_+5 zMY8?2)kS)DhFe?18f9@ED@N~YPdhvz<_?wj1VDwY_=W2N$%Iqwl_W2Ra{=g!O|HKP z*XMT923-3Xf};fqA}+IMuVcZ}TP!yQQpeJqzn3^S>2W)o=B#g^sV>>syQhg zL_8ODyg5cuh{ zIs)fA{cUBReuC^i;ec)2+_v8bH* zDu#*CsU~h{JNCQbKJBfi;Cp8jTs}nr-vBhE1p10VGR)@FhiSls;286SsWVK1Z2T~) zZD4Oo0&lw#xF-zbXITxW{#We`I(F+CXzoj=$94yv=KkmTHhvM?+rZ+q>wH`3&yhKpA$}iM8n3FiddkVe$vs81O}Y;9shv6YEpq_0%;sKJTVa z(nhiQe%n6OsvnMYS`&G0vp+eiq&%A{!}YoMyGqJ6S=w+(f{U#=i=5wqAt+`G%O zo@YT`PQI#<`y7<+jsMEUS*o2U?>qN}!FDl zVoWe{0gev`^dg6F<&wA#ak|WAcCz)_V^hU`wyxxQ~v~&GH6u& zqyQ(dE{OMXJt~S2i3}H-y3Bi%aG8T*Q?o^yocuEh)arppzIuq(ruc6n`C3}JE@P|{!@N`+kf}N(Q)4G2n2)@4u z2WCx7_gz0(`1rnTa1k`Z7IPcjl+e;qpAPc6`nvDB@AY+e!|us9bGG0iH0U(};F+BM zTA8Ss=AGjjZPsYB=P-Zjibsg_X9A)Lh1{EHOJ04D>iJ0f4XCE{$Kdxm4p8cyiit>wP&94Av$Vhm+I-uL=we|(``y^prpN$>h80{TC-&m zi&ljnRnE-vRx1(iIl~(F5@lf`lXbx0s2lT3++Pn7+T9^;l1=5t0%K8}7U5?&JMEwouw{9Fo^-#d|EjpWo~}3KVPl{ZV5)V>2Yl z(IzzUTe?WNUm;=zQi|5F9Lp`35(xHN09A?AHXn@uaK@jk+KwbXqo`sLt!gfIWgp=5 zu73=t!=#)2IU1Xv&zfxxU)~I`Z$%({sBN25c`SLvPI3ab&e|Cj1RhVWEYJhLoB64D zkg{0I6>-m~b(A9uQcjb6p;c#9DED65^mdFN3#FtRDYIb}NZ{nr$O|O5o?ygmuZ&OE zi)|sB6dF#8Myu5{5wp}}yQPBEd|pW&@*4w1ZwLE`hy82|_0kMAt!S9O%AdHQpgSkQ z5sr5!|5FqaCPTPNYT zoN(uNc7fNPv2t758`Bv(m(fHj3cCg60t$>1p*94Xf&vvS0@I)>jrlw%VT?(n0+(d( z*A>^nB2+ZqAcNM1>Lm!mdr%;cbTPr_&O2`N>KCiObheEk$+r1IC5D*b1`a zh1Mf~jeYdJQWo@-tQkIA;_Gf6nM!WKiZ%m{Jnx&}u}W;9RmB5G2W!W|sO_iEop$c1 zRYt?;u5W_)YkTEJ34wCh)L2pJ{abZa>~zeQdvIpmG$)mNlA-Gg*@^eo)m>kXX|{Mf zLMY6pLKB?Zn{SnaA`!qA^b&R2`=jy1e93?(nTeTOfw)Wn0V`Q(w z^D#t_khs|u6&_!AaR46gU|of4o33OMWU2!L*p0XQ(T1+4pAob!uwQG|4RNS&|B|z^ zMPmkq2{GL#I+~~3d#=2n+;#$*+EFtT0NJ-=QK=MG46JYoAV7AMC2>BOWld3AALln~ zIz0!~b0%R?wA~NExW+nK!KS~YneL<3T8XIkaa+k2olrwt5&B)C8K8qBqLw>++|jyy z_cPu-iu-^!(=Z^IWD$v!u=7h}mdHFB-#cnt--j{dz@Z4z|4@wG-aBc5hiL|6d;j^5 z0UR_47lNqfEN{{N^F%M028?OF=E}OG?u=sduG{!Kui6XVoy^Eu%xxXBbG!dXx(btp zobU#oYNPQ_oNan1&Tg3`2A%Al+`k?upY{q5^XB;oeK|0^)SY?}>A0DVJ!ON}mp2#R zPlJ1TWrQ-W?&X6wp1~OBuBB)|gk_t{%%x5%Tz9{VFi~h2tC(!>g_xM;mh69RB*M~CO z@SJxtXR2r|_tg@-4ZI0%hU?2g==R<4yn}m?6Di?UEJhYM9anNV$PON@mnB&zH1Gy> z;j{Wg@YU>3!y@lZ08;)A8w_i*N88>c^1tsj;3RRqAM~=1#!_f3Nm6wd*f^&^qPg_E@!1 zZM$y+Z`Wxpw{mLL8bmPYu7g1ZA8H z6E1f;b{GbX40=uGHe#_9eY@na_ttyV%JQo2%yQ~pc(d-`%i8(xvwvS>-d@)JvP&r1 z_)_;Hp?!XM9ikY?LtSM}!+);zuyh0+z=2K+l2`$-(>3ECitW;L%JMM}PlXK%by6Xi zZY(iG8}_~Xi@5^FfP@P6l?IRyXNMgxKqG^Yb*QN7M7pCj(_GpI&R#jTu=Ci69In?* zk1aS;9l5Z2-tmp(X6CdQe6}T24Ay8gjXiDV-c^++zHSC48HD*X8-T48?>`xFnWyP# z3y0Lk*z{O8f-?t?6q%NeCZ!^~CzRICX8xil{WN^|`b)OMT-;~K)o#;yjhkv^2jWA@ z$Q$CzNqDaG`xyJ)FjVrkm?+N{8)+O^KbL`uZ4I?uKyMUzmYB4HgObQh1JUx(9Z{?q zzzn3`A-$~^G;ezuxC5n#j<{o)93?#^^#Ppb*b9UoI@S%@-ZEnVf0`K3q)aj->I|JZX)B{C*!EYKT7ZF2{y_n;tQ^oJ8~W@48SWRGPc1_VQozV?ojoZX@1; zFqe^9f6ioa6{4XOFFBYP*TcqO-c!mm$j@nWI3cYzP+~fzkjf(`fvJ1WUG$?((YE%- z+Q-?Tvkqj$xLIjGyfH1NRH7XHiDpjdI(-~b%gcPS)G|pT5{!KRFH`TTu_7}6qgd~gu~6Y?8Gh7<{cIF zAvo_~*fUl`ANmRJZ3{j_q-nzG#=~6W85AzLZ`5K;YH9&Zjo1s$Q1W+)%mMy$^BbWmefJh>{ z`=-(H$_dW+)Va=5KT1tspf5!84O`TcbfRMa%4{oH;te;|Di88A?w4|y1nViJKZbNE z!fnv%RzkvXMrNhr)+O=rnamELr{dcsHhCazBVS+xngDms4$Z!+v}b1ihUPBt#*QV2 zE%l^#x%C6&)Z~L@-qdLmn_{l@o`(@FVxO~%$ilwN_kEwK_`T$}q>+Fg#82_?%a|}g zESj^GtUl3zpdt;;TgwHlZGh>lo(H}!pGq;$H~@jdb56T;B(TJyooS|Icv(vJl~s?) zr)-x#t4Qh$AgQOzzXV7U#CO7h9hWwwf3c|f;P4*zYCU~g1N2qWf5v@nNaE$cadxq0 z-LYCY#wlQL>>8Xz-6)4-b(vh4MX3J}Bny%cpC9ocWi#10Rpi{*ri+$M)3JVx;bQ;AoLmngL#VHD+Cp=geKDPcxBs>E&bKcqPN*`A@5;wG>EOO5wRR5_Jq8+ zwV!hYAJ+dcNBM6il;~AhOv=6axD@_aEYcwB!hAs~46hFH4!)ewugjzOAAVO|H?$5WptEr08PmMi3MZ>1;!0O475ss&6SrH33x_IZMCZM9%pMI-3 zo7XwB!LY-d+}&ZdbHoT5N>vB9AE!OIKJ29(30}-^IrqZ_&Z}(?`Ckc) z%tJkBf~mu4NMqLkPMc;_ZJzxioxJ8$?HrD;fuPaNAW;A2Yjx}MnrEMQI^Wq3$bf(2 z9mn27T8IlR^LYni0;PaW-qX1CN#5(B@P9Kg9%X!m1T#G(MK7Y;Mr~)^2 zwwmy6x^z9y^>j{wtvUE%{rj+B;hO?&y$>L>g5?qF3g^@7sg6H2?N@edVDpyvw4nuA zIDsYp;WmubKicr}dsHocO*ZgD&o#vRHZ1Yd-hF#t`6>Re7d(oHG{INd9h=2{wGXTUFy`UkX?GwOFPf=sLpqqYNxXoKDR> zs9a2Nwr?(D8TV~<;sG_wv8DRMV|`FdWt3@`fjdM*Fk^Rg?U3_^rUp+{?xUd~3S=+E z`rO5xe?vfNh{HIgX)JfMp2}yC`>udjboT7gVo2a9(WchEadhBgI78?h2!&WvSj^;m zGu(Ns3|Pf$`ZPJ9h>5STt|aF;6Pi5T^+n%W7-r^IR<|p{w(f4(^yR11o3hF)g3;rS z0#8GA!Ij&@6}o#kSVK?Ehl8V-NI<=%hW$lb#Wl{F6YV&;6EkKkI`GnBoJccJ?Y6ce zyOpTm(&l%?5mhQ;xIFs{N4%DLck&UqEG=VXBB{Am$Mt<2=~;idh=M$diJsVWdqG#a zsDwMJIN{{k%#j%1oS8ScW=7>maSH9tLrM*U-fWNN0);HhFv1@*R%xti^|)J;efO7N z{*REncU)}jY>^`@kKc)ZFm(C!%{xSkuupQ>|DtdIHLo=P)%q=!s=s@GECQw?4&|4V zlhQn2(@LJTGs&W2fPhsQNo|rjt$mpj?cRCC2DGH_W5#D4MaKyX_apfQbx=5wk~YrC zh}zd(@L|cHY2496N~MbpE{$%JUDC1WJ^K_|;q+CWmR1{XJSumeX_QXPoZMUd5F*q- zf86#9IJXCyh+LxF60ol0IfxRnRW=fzcQg zE`vt5A<(amS6wn+3N z*?$pgFO15?idiO&4vv#H6;Kq$aKDw=F2M4tS@lV~Fk!4>Shdn5-t@j?^2jIb{J3or zq~KXm-leZ8qj-IfKQXMkP*4R+lB>kC!%P=5o-~J<_e1 z?LmA=jU^BsqJNiC962L;hEzl|YBLckyXMkatlAFltd8*+{DPyBOgSx9jFt5iEi@57 zJwJ>?7mlGWdv|IVN` zo|bMr#LhR%wT8bx;MP<2dE7lQp1{I2`=7q#oVjZi49TRQm&|=Wf{8Q=h%eMJi((t* z@}1P;5hqq>VQ2xW^ebM>g&qiqKo(Ev>tXc-W%D4J^8E&c^YNP_VJBrd^aHP|2KmF; zn9YJY#actzKDk2m%px$wTsjg2YV^no>~N?8diH;$;#UwNCqWOqq4>BX`t>mHWn^L} z-h>+lzgKQw7w~eONA>%^XK@#(kTzb9FmxPrx~~_{#RIjSBwF1Pq$9(b2I@e=Jc6(N z*VPM$Yh%Wb=6AX9^JcffI6^nt%Ein(w)RE&P3qer>PqLwyqA@{CJMpCT5PxTF2K6S za)}~h(FI({{Q11kZH&?V3I`N?5qd#SY$+Al{q`Si%Dem2@D2{Ontg5)##Nc~X3@j_ zk0T7=D8(s2!wjg1?=`1|)3Z83G1gR}yKV50pf{&fokGEJTCYCP7 zBAv^VY-%f%o>q`A_`^(4ud@eS_P$5Xeha?+=uqVRb5CEzwIKhq zfipA}2M6jt0C1#z@yGjlRCGPn^C$2h9PI=Lh%9sKzCfs6M!H{8YLC`@20b?Yf`i6y zxpb6>C9qQ+0>ENIxOG_FnPF!-zQC#g#yNiRsLOYTx>ACD%-W1^IDkTj9NR5v=Qtvp zfF*GvWe&cmf(2;*TNrax*gs^Q1nl7aEgW5>7}DR>7MK#IHKy6W@uw2>N#5y_%$NUm zgOrg9d+uX5s~9#>#Kw9x>C{~wzFSf$XHs-5Kce%!hz&ufK*3a{1HE^q2uLkkmky^o zZMKiOVbZK*G&TfHykD8q>JJ}Kt?iH~!ne3H*#O z{$-~FNp=sIZ_|={a7Io}V1uRTAJ3%(zv9uOBxoNID)(>87|yc{0Y@WYDp0 z;;ts6(SW(Q!3E`?Yajc>(EQkYJiSs=S3~c5$gqaEf$qa77i6{}CysaUKe->6-HR8w??=!JUU8#FEoVAiVUw?Rh< zbMU*l=iH;n)Nfh4Y8%pQ;Yv1o9|<@r?I|WPwDp?~?G?63$dZg0H9%XQb6j(Ua~73O z^0%4x1zb%oragpxh#oRoRuvizsiS%-{7~``t+_%@ka+q^c<-kg`^Pu&*hBC+3o8T- zi3mj)m|TXer=qw(!m_}-lHa1<_wQGH%7Ylb%%X=&gYq+%Be40q*dKA zCUQj~TIw7>dcli#=bbB+v+%%aG(%k=GnV=~k&;f?pROR8-Xm@Mn8n4P(gTZ>#B&|97*u?}doGGf^D_km_iTB%5*}Z#QcmKOorH zFgKJGH$fJ-WX$dE7cV*VHM*OwytxxFD(EGuA|ZJF!yq~B`DIaAzPkiCK;Gs@F-0_8 zQ>rb+9|$+jTqF;%J=-zsxR2MxQ}E2xg+NV-Ke;Uc9nBk6&Sf3GC-MNMs`FJDP@K2b ztBIxCL)-9#xKRv_Z^mT-N@t?&RTB-QTH#EE1bX6O4SzaA*`j?OuHn?E#B58dHb{f! z|GlEe|L+wo`q0S14&Nma^$Vy~zOws9 z?Z4^M#MM)rV;|gxxLhGJmjWfm?w9P%(-#S_&fLJ6ca*hMIJJ`>bd+|swZoojehsvL z*19>SmQNR{QpDA(jUT@^zeF_sN8J=4YE$&II`jCAfiGO}<#YAJ?;3&9mQt+G)xXN4 z27qh?|9gu-(caexs<( zw8Rho*Vw#~nN0pS8_QSsCA)pP*5}QQ_t}mcf%CcW{(pgMVydBYxc_CXniq8ow-gDe z!`cSJ&&ten8J;;72d~yeiNa!@m`!%{x6$Dp!@{RQLcaf<1m3m4QZRK)0z`1(&iU4x zpYpH(K2cp$yTp;jRYIL@%WD(PU@&%UZWKb*jQj+kTF-=u464NeKoHrI1N&;KuptI=Vst z;uZ(u*emF|1C2>k^5+|p@S9&#!y?;xyX`h_yqMe7HypM<>UEKP#1O6I_nlYqZHK^N zik%RVA0X*SKFNSF%O4^B|8#~%CQ_10G1Z`Z^-#F`R6-IOtjR~nI+d0qp37!iS8Hot zpbIu!3Y_^_9cd_Pa)r&<8kXhDcr0b)&=YUg_B3T}$*?X}jZAIVly}DrV3= zk$3d-RISh$1VY4YG~R*E5Kzx$I*;kBmxK`lgtYplWOABuaGOT1=VE0gKw-6U94v*Nx9(aX45g;8xQqFop#vA` zkAOUCn<5jmIJ{swB&7#a(xP{v_@+juAv;(bpY^CO9}pXo|OmEGsi27QAZ3KqY zFM1k#y8zS-{%(U&MjY#>4LUuv3VZMoQgFgSeSZ62S=55Jq*&W>{bkw$;W{?DQe1DQ z?PjUn2uv=KAd{mt-=z4{$1>GdP{Bn7sOX5|MxHTyS7-OAm)acVclRBWWs+neMBcvB zHVShPn~s78ZW%(+Y5?l002EV=NS=f8txORb8l&Dvym%_@HI1BoA|e+0PC7lvfiooZ zD1|s4o{pu>D5R*a{y|RfLMg3>U8>r@F z)hOF1QHhVV+8dr4M>VLslQ2)2V-&y@ux{G!n_ZP!9_j+%y_~WS_ZQ}Bfx0);Do0F0 z;yt$UWf=5yT_*aRtn4&t^C6355@aaayFj1hGoig6S-eoAvi;P3Hw z5^UijyZx0jj_yZMjz`Lrw2jkjY0V8sk#?}AQE5TU_)om=O2lXXq#7`x_9&!C4e7<9 zf&sn%3}yEkU+{=4e(xS^EYxW_2hZ;aoSFV5Ho%#Ug1!tFIX5w}KAAslv~)7H)@RGuH!Mb@T?-|;(5lE9_3>*^v2{C71>6{= z+{)f{I6vEe&>9sOgl_-@2Tl;NT=3QY`8i5a&(Dy%2CQlHHVk~57^9M{z7B}lwvT&mY>Sc^UI`eLJG08sJ73`$PsXk``B( zx{UniNwpjB0U9^(k?cOTN96)GBXam+Fe;tWb}iypHlG`4eS(jLbZn~89|TS7>%JI6 zZDBHbSRNK3`n5@VLHiv;NA1h|h`0aUki20tuJ1T&8_o?lQT6}#WB6oyRI_Vp5+)37 z+b)PQHoq5k$@A=xT*46lb5%Ypa@eDK_78xo1~FSTo5jFxhFniL zx0?}5E@pnwJT;5H48wTo>RuQaqBFW@q~A)J!opN9N?+$&o`%1&r3G77+;0=(lW0Os zT99i&(4Bn`hro_rzXzzJoq5l6FUipATQ*>@Jx7pI$UybM(S%eI-URZw;8@#MVx{Fa z@5er0AHT%g`vT&37meXb0pp>deeb~1E_fv&$S5@BEy_xE*CRL1DivsoclFrl)z5#b zB_I}CNx5izqgN!%1wwHSJyjho1&H45zODpny89XRUMYV-ly|FktYzD~?GR=e)r5Xd zf}K8pQj%X>0MpaXE=qeXB<>V;S_s(MTc9t?3A(o#ilVD-`f$BbT435k;rtXD4KaPP z^&fDCW!~U~U^nYRjv7p2tI zwuC%~a5AuaN(V510KSoHe-K;+LLVt0e&B_smJQF59Id5=IA8F;$r|_FZ(YadqIOdv zi%nF}s3_Jy!KP7lXjV9PcbDjHx&Y%X9Qa5b^&#_JI^*B5$NB@)MNGOnOrsl@-1|tL zg%VWcfHF7!_z|q3%!?CVvSy9v?!N(-DkJELtI1^X*&GQf7AlpxeU(y*(Y5Qw6C!GpLw(krXY$;CpD;;DY`xF)5sxrguc zq*b$qF!}eQE|q=@V%vcqS%9#^^+ln#GiK8|r?wMV+zEUw!z)liFtRwQ`B)k>-J>7y zMLdfa%i~_O5M0Md$Sj!q7%4K>nifGtMGEx)UbVYqo{vkv2ecHhQ?J(Wa(A++yV(6y z>Y1sX(_nKa)lQ+6r&4n3-?UOc!oXr7aM8T@6RCvC|&6uk?%MOuD6arbcq z$(v(SQU+TN9`>LIRU5Sk3u`zv(AjzN&T%imEN)c>TX4D^V`8XctpfR)U>h1yNa z2uQAHquek<*VSS^4?l2<^vfO-b@D1 z3h~Q{s#|mf;{Ws&ixUm)fr^4+K}({~`_{OhsnW;gFtmd`e;D$i23_j+3{HfDP&iFF zexH6_{I-U>KUkOI1#yGF)@x7N7D{WLsBdF<`4pUpZ$|ebasM7t^~HThPAEtWpZ^P5t5?O&yC;KokT51j&mouUh7dxRo^_&etM?dQ;A4&={PNQ z;_%Gk)dgIHQN^kKzY%-uL}hKw``SdlTPDXjKtPE2V$&C1RlizE6#%{_6}eq5JsF|T zi*Yk1-)^}gz3Fg#vfesHIJbhOhWAb2y*7W0oBKH|{m^+&K}p$uefUU^7WJ0e`}jCli$hIz_=T{9jrTVnTH2M0?5PxC0?Jnxfti3EykNcyB##-~?uVd@%U z>rc6z9?LaSh}1QhAICo88q5##%|v`zJur-ZJCK3Fu8`*C0c*X@gC=u7fdpA0Rs6Yp z*X{=6dWVnSZi&XWb-Ilj!z321el^%DGTZlc5?K|QG+S}ioH`5O@FJ+3S^+Ty6)BMrd zxm0|kZ{v>fi}!ROo)Q;B%wEIOj!Bluj+w-pp!c3LROmR6LAuj~d&Gh(%aGv1^=K}xf7#7O@5c&Y|>|24{+I? zp08-Q>%GN>nT|&xAG5zqYx(0bCo^g4Pgt|^l~=yn)SCRnLgy7h|9mvuq~=x%OUEL! zt@;f7liZwmVaw;{`0MAYu;v3Zr0gn!T4N>=C!o+GJP3m(hl=n>P=v*);!S_602Uj? zQ3^O5Cj@xSk*T0qsoFHujYp$$Xv=(As-~^qD1iIcphC;BXxkieri{3U4X_0D&JFD|#E3Gvw_!W00fvvnOHxAcpw3xgU`5yd0 z=uX{%kl3`o^`+uQupA!9Atjn%sbQ6Nhr{2`WIurB)-?9w8fjQzGmMj3%}9{SX%qYs@SY z#^LjjPC~F2uEdA&cXp$V&mr8>ue#MakwZ~!GgADH;k-1}MX9UDnO1gGFDZ*3R16YU^<}7uX^pMjB%xgXk zfb_Kj#X+NGEZEN`=|vMox1l`|;tc138AlElh1>>(6Tcb&+NYY%4*`{%pjynIlMcED z)A>?nZd6jyA!3zP{^t39;i#+O!l)BD( z4;NofXN8MBd)I5;uK;QDY$ZOkA>A2ObTpoDMu6v+OIu~R(M}TkGzcM=CK%VW-59jT z4s*!OwrU8`$4Dce(NRV&-J*{jQbd#9O5@gTN5Kcf@?Hh=ZzEN9McrEHMv~#%P4KDQTZAvlL*`$oTQ@{Ws)OlfcKW6E?B zuay8C=*z~v2)t5UshSJ+?L~;D5sT?TLDsZN_?>dSmv5u0o-3^b7-!VoFe* zFVsTMfG6@)AcmgJaJvw>(TdeWwI+*bF7USjo`4jgOI_ZCeb1uvyY052n+Kqd4%3PG z>Rq_(ea8h0c*}L2VZA1JkA~{uOR}H6&+F4x-v76}{ofk*tr{PoZR)k5Hh|5l$BF=k z!kle;ZWrcPPGMqYBjPT@ozOGj?|vgQfeOg~p0>@o^nO|D3yneBtd>VTYV!LF^=L^p zo*=x>1Fn&|E`FJXxo@S-{9HHrGgSrRb^~ko=~|8^9)0}~EIr>oI_l?a*0p5kt`1+I za~SKjs5QeZ%#xh8WH3yFX%Kqf61?zg5yV+!fL9F*MwUAbnooQjbO&A6yV?Ab>{jIF z$`z;aRRxS;)+R!j-u69~n0qaDgdN^O^~cpE6PVCWl@3iv#fo%Ckuq7=rj8a9%Q;g*VzVO6Y$#?NIZNpPl48_7G z1pjEZ=D;4-QxSO6G{Y0}V-dglYk|oxy@k)1<6BsUCP>#LjwlN6`=GK+h^5eNwt2hM z1s_vVOzTnF_%$WIL`_u5hRh?rT(<2p+ zC%J)u6*2RyB|i!CLN-zsFhLO8yLcwL6_Z6?Des!*1!;G;06W_B$U@awkzQy5;qVq3 zq;k4nTv%Chr{wJO5#~`QG(}A18&TXbo7DAwpo0eBlz_|4MNLyZQ*eTKlfk_YqE_2> zM~r1N1aw!GzO82xP>;W*y`@O^zh(`!pHX9(3wbzGbM+kx`?yG9OA@@tSY@>YQ}wXRu7(LV$$Ad0q}W%t2GPZz z)22)uZCYPoL zs-0H4kOen;EGok~s{C(71l#VxcjH|+E89nvj&~JNRY7|Tsa^M``i5<54(HQWdvGXY z|7kQucAOH2aH#u}&s(*Ur9-$9z6f0XkvqX4us+icDGlEuxl@XNKW=iDRKJMqr8rY4 zrjs0?1n*+wHK&Gh<&k|Hh625W$&M`zU|FEQ-^OZG|8(ojaYl+%j~&ACNMJ|5;GZ?* z<0sY5FL|e+o*S^y*)JcSpX9sOCd27|Mgc6@tXOlxy|kHcP*3TFXmF)2>e2j@1?R21 zNuo$T2z~Wy(c?Y-KWx2aP@7S-Hd;KmyO$OyPzVs*y|}x3aCa+SilhPsfpeN7KTnDOss|nTV)3N zFv|WaWMg`*3|WgkWGO|L-{n>Rs~lXH&nJl7Gg0JdnmC4*A5|tC)#ev}m(Uo;^PJwp zS3<9uc>fD$VkcSu*iZgs^fBqbQB2vW>@2`A2?7b4)kg2{jsotOBV*i9HA(mroz;gx6zE-u*n%kc> ztKqWbTF5;yuQpub(iNfHvovuyun<`5_^JUkPBk?qBvmTBHMW!>>Wh+i+)%H?ab!#P zfvI1=*5*RIzQo?X1e11Yxo_9U;XioQZp+4&oGcXwZ-R5Nioa|Ue;L_&*;vo5#+$4y zH@7J}Mt#RPCy;bC6X>dtv7y4qlo8blqB4qr`O@a`UoK?+c3yv@7GMs6m4Jot97DrS ziX(m+{cw^z-s;C$$R16Vglc3Ho9nmmZxPk7$2=AfWBp{OG`EVJP44!cg^%o-Y#2{8 zRS%I1S^t~}*&A-&nOb%_3Wl_nDxxhn=@h#%&2?{Qq%7Kqq_?KCv1?11(hPhv$kzXGoagbk_3Jh zuyA%K$|yVZ={NNAe3#-s{6px6W`*Sq6E*f~Lcbl8XfN~2?xaio+MXNS2*rJGAc;mX z%SE)`=$$;%k3Zm%>4EGM9sx%*IJCl5`%+;V^K0E@3~htQ&50+eOj5&EOe@ZmgMPI_ zqc_2f}r>XXu8?&=k{i$$|)EYo0N; zoT129>RpK;uOT}oUJt#g+s%3&u=|Br{!ozdRtr{0DgA!7Z&hgsA*Qod?=sQlqx|Ws zc7aj)3x5frcMCHET1?#JFADys?GrpZX_}(@P+s!pC-KQ0(JVsRq=bhfdeMaDF2+WS zDpo2W7WY#-8zRBdr~xOD)kTx4zdJIl(Dt#mu9xJHHF)+QTa=Qx-+~zSzjw3hcdE! zI2lm>wK(u1x3%NDirTq&dsC|Pty!Jn+(l~v-1#op?Eq#*6ivYpRbP{9K0?`-KLW{QL*MdX{t#Ln`z(v| z=KH7r`=ixrBl>l)NSxR3TwE z!h0$~5ic9F+y>j^ze5T2;)d^RpzpW$>G&D$eUu@;w_jxGF;&pX95){F9Du8Zap))T z$^Jx(UTFhCb)qv-nb7rFO52NHTofLkm}iW*j(A_oSDf z$EKYHWpSqX9Pv*SQ%r~_~{*2als{84FVzKbI_c!r_sfj>^mWvB-I*CIE4S zv@`o-R650q3j&m5c-qhelMyMEdD3Kh(-b25PF8)cC~Og+R`IyKwu8<%p)#zT^C*v0#=b zhYSZ&DTq@-Ut+dvv~E17Pqj9_()-|ic1(^cUrzknut2&V^5TLb@z3h-)a`VG(vvNa z5WrEzM0a}!<(0xalJY142m19)QX8+2J?yAcAt7qf$EnY;g3sCP+MMXENGdjO^1r;J z9*=kD4n5K&Q|JK6WycMLFuby{>(Bq-MC$Wa0SNVv&TnmB%!N(4o6BbzN&V3jDS6+!0sWdX7O`)xP#o^}4@W%)_+igI?;F z|GQxQXZ;!{aNeM9*1)*@SYTFo)tg!J#qyzge+716B8cWnK7k|y961M_Dje~*C_G&h%a7Vxm zTAUUiwo7nyjeutE&rpAREKMmVh~2142F0+0o2X?oWAcHO6;|3ui7fM$JWRUa^dW^- ziz=gP#bAI5ceYF(BA|GV8Ja$v**)8F4K1?b7@U8PUOKcUn zsQ_oLe^q5r+3CzdsMGOHO3L#2Ud=)EKpI-G_K>xN*IWLkl1gVBcdIDfK#t6JS}{fTaQsY!uL`H`tg=B92kBe|4+*m}sB zBzq5{X!}GWfjedDf`)?X@Y9<{bflYXMzEt_DY{_s$V4uz=NdiD8L*mMXytQ0> zB%$C|Yoe(@gil8V8Dw)qnrC(PtU!!|Tm{T4U-)Qc(9l*!fJLoLDhf^&6In|ItymZ=xi3iB{${Ay zhC1#oh6;OsbpZD(01fOVRKF+Dp>v*AM95VvUsXvSrv<)6;nke~!sS~x;8_D#d6HLv;VZ6ebHw(Vj{v6S{g#lC zuAAIGY61R;rr7+PyoPHi!XJ|c`iy}Cbi4%_AqT;~JosZ??lE?#W<-D*!EER6Z}~bT z`pt-<3O?Q$Q)qzn`H&(0WkH*`PK~MnFN)YL6+_`Gqb%HIp^@hlUH;H2sk`Ilb^pnh z-eb6qPjEw&R8BFXpWUFd-0w00<=;iGW|!9ECTn&(8y|)xCM{E4fCiwX>pSJk@y?JebKR$jK2*}^l3Z;l}&ug5IK3R2ZR#9)1;nA4!`3Fd!MVTfGGj!C>~B0^nP8x-z% zL%NLtfT6`0C7p7`1oLi|tu%*;|9gSbTVj9*_!Z=yzfQD$R7kC>vUPW#U4RusQgHQ* zv5Z7y^c?AFpC5OaffzB+Cr#43G=j6|!o*d1Qu?O9fXd^YS~~AapN_i;hmotPf0m`c zY(e)h#`^<8cjFJA2gAiH^QVgtaRFt*la{C^0hdthcaw=e4_r{mrWTwZ38bnP14J7P z`zt=9$1vfeVe}9L@jv>h|13{=-2WauEYJOp`&^0q4=SHGI`o1> zov!g2{JwS3mT!=CNgX5%$%DP)k#R#n?R z>Fl_o3^tDuT_*%se+bFQr`%vGUOl{16$n=sJp3;IAy$<3E;4zI(%?A9h&iwB0BluS zR44Yz)g3o(`_P5ta^J`(%<-r2LZ3F*>kp6H=i5O9d>GiI14yl@C?01eq4P@LVT;cez+ia z+&W5T6(n}c&fupGHE*FxS(dzindMgBBwJ<)V{DO{S?7XaS?NC8N z)^4If{Anx@>AQiYd7Q0=eyj#H447Mf2e&?jt|3_J#GW5PK0-5cf;QyLK*W__8c7m~|~^D@Nd=<%Ao=WGN545ji*1 z&B>f3DzVd}#K#xnDe-O81=_~(dq2-?G#P>34Q!kB%8W5mH~C(j+%&o%N0Yv`S%+@L8i}5pDi9g~Y<-rdu7yEZIY1~}hi|jun$&@F zG6TN`9{{(RT9sd0e%PLphMT5pL2P;1G2BoBW7dI7B9Zs$PEd$3%^#GN&wQ` z$BL84O2lwxz=mt-+iP81FC7CIH!6wFDPo^4*Yn@0jStJ8zR(_|54-sH4lD3yl13194^XC58)2vX5=bgsQo`+*8jn_EC7GP9@E1&L*N`;D}t8sdv3;q zbhIaPj*rSID@v!CpCBFgvBrJFAfDb(uf3G`foE0u+OTd$rG>tq6)-xf0nLf|uj86{ z`zNI^&G^q1@2v0wqb!N<{Crb(iQ`dX+I>3=`d=BDamVq4irO+Pnv9IE`oH>@c?#z& z?A-~3{LuPA3Sur2K6v-QP-#J+j&}K^wHb%oP}N^t+mg}GK}U}WuRTVM zB~~jk8ygMi&Yq?U1}CB@6oNPXqajuc`c2?#l89iF-kV9s77_IoKA|L zbk5tLYFz1qCg1$$Tx2Ft!ej7&oeG3X+W-el8n)~`TbI@L5H8RQSz{7zbO!r0AAYdP zFz_dUbeiJXEWtc#fv=H3rNW~q0r^^lJUJWj-u9&TgNUd$BOU_yE{&kqj#0xOR&k|a z{=pgwsX~1CvD>nje<`y_6q6=knZzT>OH}?(fC3}qxF^1r5aG{clEf^Vm*8pbJgUI# z6(WG?10z+(!273{n(iX7q%7f3utb@J>CGWp6xzMVA*RLX^#LNy%0J+n)-Sth#fz_> z625_j<)bYeJx-pe(TP*ES}$nO3Ur(?q*hCycX?zu_C0(K;3&J4Kc_2e-rt}ga*WtP zsE0hUF*IP+(lzJXl<_Jw#0X3ph7jAqu+Un=A4+QL{H4GbYp1BX4y2>EI<|w^4g#|x zG2Th(TPp%p)8Toh==ht3NbHJU=^1frSvT%b+sjsQ8t2m#^Tb!B&1a9!2Uju2dV#>= zI0$8EV^^TLhUY9JNM41;CB_%<8lEcien8m!7Y~QNX^IkencP>GS0>jS+j>D6$Ix91 zMPVWGY8x3BdE6Hz95g0?Gf|qdsw06IF|_J&eb&+-T7{nGHr&5j7q-<_!8Kn-duc8N z`2I1>W!TAMMeb?25Uz95aI0=MNLkIfakO1`ZdUI>`gaAdgmuK#%-F2k2O?h%sUCS1 z7$j|oOECR&JE`>7oQlN=9ELeN;lPaKPGp?{?rNA((0qdws! zlw_4(Ac%A5FWw7`z76)O)8Z^zC6b`*X#RGbBFTFCUzhx~Q-sy>vPYzlc)W-a<%8R} zE#PMjizi;=EuKWImjJ|dhxwf>@>>W?oM(jFiITqZ(r=dcD~C8nSJ&5OU@1q!x$va- z7@bzq3Y|RX?t*C$iXJ7Fc8de#=IsAXi2Lta6!?b8v_`*bc1?9;rw_3zR1|nIbilh( zC8243!KZG*F0(g;@fN`2nKaZrR|PQpp}zv>oxl~-@I)&_m#{e7aG~dQ*F58lk{4ZX zX|vd9r-PSq1%2$HW0QPuEcP6-W9+m2XYt|%q}s{b>nEgd@Q=zu68}W{@=uA@PLI6F z%ly<|g&g)`5-tRa{>@UJqzaY6GdQYL&Gur{~y2?+Z?@26Sov@%eAAto~q=BpY$FwgFG> zn<|CHdc8C3KhNCMV^9q9;PZN70R#i1)AS$NKx{LEBQSQNBrX9dfbxWi^gcQx`IPry zMURvY^+;@iUymVD6;@jScbeY+w(4O*%1H6N2@ew05>1K-weR}j4wlfX(4PZ#J>fv# zaKdaCXy?_!ejiXl8G&2ys0^oNs)By9(a*-^;PkXPov{0kwRH3duF!=Ry$^z#=2#XQeVbN2(j_n!yTXXr$({x3PPyo!=RK5jZ6Tk9&(u8Rcl{Eo1 zHzL0VjzrdVocx>C%XpJso$nWn|Ml%~bFzR&g)8SFHH{YK%KF-hI>L$D)I^D;dI5`{ z#&l%Y=>WtjIBho4cr^q~jN1Aqe;C7?UYWW?sH%HD&gI2u2xDU+lXIypKnj!!%o~W( zaJX&B9*#K^7?JA2xSNTl3|=14ac zzVnsmt!1qFbuG+vP)T`EP8XgDc$kRAdZjoA7g^CI_T2d0RUP82}^hQP&vJEfuxgZ+M|VDHra|9Z&Vo*+C!9J!#-@O<+@te1cW zDj$#qQA-kRC8O58b)#dG71231g-a!|_uuZdZ0mN#0t<9n}x&oeV0F9Zf#}*gLeKs7b|+WIIb7<-+y9! z3YR@K>8D3da~tYvQRFwMB4n+8jN_9 ztAo41y+;A7I|~zT*UzKP(YLx(XO*$~C14J~si08gXAfi$g^jS-~b9NUI;lVP!|)ob+P-hbp^6*ZtyQj4C^cPRGc7U%Cv z=SrKG5+!?4h^2G#LQqTGHR9c%Q^^YxCl!qMw+NBz)d)nK-&4A)KFYQs# z3~aF&&?UJ+9JuLV0wtC#mjhmGf89$sUX=9{y0CuDhZAez`ucwUN-s`u$_|?k|Dav9 zP-aUB;O8PSpZYXwXtq^w#rNCp?_@8_NhhCSQN^Wgm1aBYFh0HSj_Uq>KbGKr@Si>wzxShhTYp*Ar8apAc@zy? z5$N?%GdmPcV?qkH&A*I93yR@;VItM}IMQ$>*Li0hQaE+qXo|Xzr1+6Tzl?Ep4iR@B zr^~wuWcHL6j=?05ebL`k=4U)X1xfogzQpOPbh(E`Ll3AvF+`P|H6~X7CJv=B=5$ue zGLeckQTLO}rvt&oX8jN6T&8}NWrV^`lJrT{2D1`nu%z1bw%NIYqr~!^g4-z1 z&$Sh|!SvN--y zjq73FI02=p7zZ~ozmR9@6GCLV0`1r1U$j2fsr&?uyVBiQW!sY45`T}RqCOJqI;F` zk(pY?ZzxHfI4Iy;#Tl;R*}pG?ILcsa=isZGn>D(>cMHx2JTNir6f~0{&Aiq>=7te? zEo?6;oo(1?K!82H4-MRua%ORX>;oRqimRo#+}rBtu&0N}Z|GZZS`UBvr7S)!qD3+y zxE(!j>I$}v3q|PgT+5L^SDf944)-=#b!F%(@cRBfdOjC|6s5qXHhsolyFm*#ADEQK zYm2^8CiVixQ$lRZ9QmE@{e9$>p^F!@AU#&?5l!t`9~75AQe{o*8BXIG|7Xy`0Tsn^0v(l|CN{W8jphj4ytUyV5h9CQihE8K&bMLfh&CiPu%%sc$6fqc^h+gQMtL`SQo^1cfvS_x? zC#tX;uPYbr-qy2=ZM20D6he+)wbAq2D2fS z+^YZbZdMw|n=85z-YMjnjFoL&L~o;#mxfdLr2%=|GEC8<`s5EXD z`Km?g%kLM|G9dP)p1hDktG7UZx24W^BIyC^02={cr@ZXXg ztCJ0?a8bI}Og^=^w;*@4bulyUoX>G5C@daf#dG=&KpKQu81V8f3mx!tLOQijGA`>t z3TW$Qj2%~qwS_P3V-{w#Q_?IhC)PR|4WsaKUPTW1mKn1H>>I2C*T1FcvsgPLo?^{l z%VB0ugbtJhYz|dKRKenV!_>4c z`$>_TYR!XmZtBRA5sqM+%DOl zV29JYX*yRQ?yqENusvS-8#L`;A>@|O`fOJAOkB9%Pcin`gm(V}QDoXIuokvXlhoAZ zrlnJ=3k8&X@{--rg9nn4zjUSb>MR@W&76JCGC@Sn>KMD(RQo_IgLnM=`DPy~VFc<| z6Z}Ye>4E$2E}AdZ%xcdua(O0Rv(O)VEh2BIX-tvAGQw{(yt5gm%(nL};2AGH!Ss!? zypMYBb3V4}wpk26`ZM3H&J2__MO$K;c`<&)@W0|6OF8|#=A1%AS<~@x8Q9gK(g}D~mz;`MG9Q zgI7wVzw`$Mug6%8!>ATkD-ZgD8Ng#iB-SP)Ft7RaX*RHR3Xz!>ozu9r1#O57%PwB> z;d7VfyZ*F)L4U3I2lC$LzCGg_lSu#6=DytZ>~UL&beu) zlN}n$Pc8R4CXX4DrN+)s)ME56Lh8Oh{L&d*Or|xbtd9{~rqLm_{$Kn5H*I8}kQJ+! zN7p1?i%X}&UKf*e79G8LY7>$xUzIb6dv-mQc_x4| zr##YF0E8!Ra&eb1f zu1m}Z9o)njpl=;!8I#R?Y>n9GV=Z(3}RJHn}$P#3T4D4C@0UIq~HZ1Lb((6TN zxwrf)F6_&9QU>7~+P~b28zsV#tBt;Osk){bjKE?UFN^khqVJ(-8@HSp&MmCcPtZDoof2i4im4pzR3TQsE)@ ztrkoHNlA=88|h{Cmsg5(P3VO4H3Y%-Bu%YotUuz9rr8&{<8E%4(Vq*h#g(`3TrQ)h zZomTzB=l_Q%PemYR8C9#9FSE!wa?gT&&rVoXS5N~>>pWsHe0x(@mVx^^`7JU6zkF& zZVuTRGdvx4{;RKGQ1adMtrV}mY%15+*(>}h_$P^ycwNr=_6k_)p9w8n-KC!9->O9& zqWWzbc(P_)62%hWa92K6vGfx$;}D!sO9|UaR99zjQ8^xwUQ+UV=*N_OoYNzCdJXPq zw9;_t$dYZAy)WjNW(hIuE zNmPSL**`(85baq^@kp)sDLiX-$}{)vg&kR)QDOuM@HEJx87-`umu_b@fRMB2K#mcT zr<}sNhG7GZGPi|(SMEuRF3-Y7E?C})db6+Hx`;Es>7wy^*}+X&<8i(4hoAcE`-9In zW?q)Bl%XHvab@WT_vP<3Hld#;0re#qQ`!w;?Qei*`#Bn$5UJ#v2FL&3Xe1^mSM}*a7Fq)k^ zf?Qa9>DR+UoIVHQIoE+c56_G!LhSz^=ApGnirk6ydsv})`FK~(e%#QeinRAV%m)SX z6p8-wVUKX=TMRiE8-R3vnI>c({U(arwrV{JOCGS>($iMC#QATuZ;LJ_9cFc+^pVstnt%}m zfHLZH{Z7}1xJ=NWEwg;oUK(BX8I``%zZ-U-%)rvqCB9#u4GgZ9Bk9<4abo4Nv16MRDxs8-_oD>m-I5MN`$jgqC~K1 zA`*pCJ%;bbwL6r8U<9s}SzL;ppH8uvHyI&Ypwkg|a≥@bW1K#p<#5Oy>4;JecidR%kn~h+j{&+ zcGm8bDcw0gUE40v-d~yq=Z7ZmK2D`qovClUCk$Pso7 z#%I?0``9B-V`6ksThYME|$Rr2(SXW9QrbLCsK zZrLiNS3XVOByH{O$#ieLIP4g;kWVY*)@C%n;_7s0GltX9%>Q7mG7c3mEn9MZk6o9U zr5f$E>@jWrM@tS|xNj}#=+|d?DQ!ynLGF|;xiP8V*@$aCRD`hSi%z!*0BgF)q+n+& z`K}?8zl9R;W}2~4pE z8-rTgC8iuLxaur14f)G+`@nX@978=v0!E9^s0Pm5`XQ3p((9e^C-gFz3FX`n3GDKl z7H|i`vuLJF+THaVS*;W+?~O+3l2`?&GyDZ2GcsjAO;yY_QZyJ4YM>xPtd6wOS`-iQsGUbcKs*s9yY?j~c+=DX{C^PqNVg}3hG zP~O23BOAh68-DpA5z~afB)o_^<^>?5@)*E*GWA7wQmu&y+}!oyGGFlJk2g*lM%e%t z&*@b|q>{;T=i~I$5oD_`8sEF)WD}?0t38YY^fTzPx!m8{#H}2S-w0zi;VwmyMwS3% zIAId0EvP42(oLgM`{wtq*w{*GLplB7aWi&n-RxS0LBp5XyBFP4o?zQ#Q$?Q?Y#=Dw z)BD&VEOo4$D9Ff)*Fk7@R&jVlDZ1v{nq^E?$sgoUz1@DHP+Kk*Y}C8 zBQICepBe0)F+b8p#?N>^b+g z#iLH-0=u>Dl|k4=*J)hhp``m%;yG`zKqmOD7o)R!?}vc4q3h?@@{O`b2tz<}`BVbL zuH)~&ymDD+RN`He`uGO3=X}ZbY4!aLiRCMR*edt~HG3!-R)5~1~20{UUIGX#yz+zOI z3rX(JMa0G})hmy59>zW42_LPH9p{?14i~lh@#6nY=;`6a?iqLwM^<2o_VLU*TG%Ql zt>)8P@q4_l6keR7SJq?YE(2No-q;t0OeEs|EzBiKOFnNZoBhkv>}9dY7l0{2*9=5; z0IK<951;n6sK)oJRqW^2e#!0Tv6o*&nLa*5Me6Z(g8q(}( zIqrC<*kFzCIOJGe-0{{~V4t0!S5&ta!I7oAXmE8J092;5INk z|BY-~a#WRW+*&$FHsedxu&w|sSpurNO`AcL@0H0IYGCrSJ(Ye8rDo?o>Q!Jyt8<8A z5V+SaEv3Bt^N>E_m8%|C69tBuPmD}Pg=yI#+GX`Cgm95k>j$raB{ErlD)EPoyF&l7 z;$Y)V>%gMSzgrzRBAm{6)=Mm8#uveY`cK3s&pD*dxf2&MUkg~L)hVdm#_Y`V-vjZ_ zwRK?+_wv4c#=+bgN9~H5YDhm7ST7GPq*=Y{AF8kp$U=JCs<}&^E9B?le z%>Sgik)3=gLKdS`AK*!OBw7;-|VLGm)ji-FSX zQ6t#GQ#Wou@h(Fx4h3?{PWh_u9X&}C`XDDn;lgJ7M10(8&Bv3l?s4Ls_f~s^JkS$m zfCl%{Pu%Y?T!@rn?drLWKyO)@QA(+{oBNRoj!?iQjbsja)$AqtL!JM*YBa48JVU}8 zRRW)gn3w)3kYi9@!h4&Z?M63+)tTXkcJTmalxtj6;1)Y37<2UdP7xuc`;qDAFwOB# z?qXop+jz#X@OUP=%a06-J3`X3QLLXvRu_~s|Klo)9YXH`*4YCX(C&*!N2AnsN!f06 zb#qH!5Bc={6OBlSV2VI^Wm>rIF0pZKdX)huM+(JekA?;oE#iNpFW*maqDCyAN}qIU ztJbS$Iy3&I20pBGUAdf>;MIK!(OzEmL|)k76qto#MB{D|CMl?Gy#VWsICe8gz7Ha# zO0G-3l%6UQV=4*yX5lAT|M!xOYi{3Y>!t*Mh`cfxOOPkkRXA_Kp8SyP>`1=DAK~{^ z@#bB3o@|F1?8?(h6~@A!*`oZT5Rt@k^98)N6WVD;02N7WHmAq0#_Cm!h)*j-Wc}Lc z>*1(O+dS(=dGKO(9JLvIob0^oJU~9)8ZHbx+RmyB7hhWMRMY}Fth9dV*NRgu+*^%@ ziC?5z=!G>(q?~m(i;t`jjaB6f(y@2`TCK+@ap9xp1peG8M{2!+usNG{tJ&gVp|gV2 zUP&5m4$qx}ZPb&`bsl;|B*q5ecf9cTx9Y9HoH(cN*^`bc@yyBniiS!);{J*PL2}8& zQ6vErwRJHJ4)t52R)320qdpZIyj|RN-W}UhbtW`2*RUAcw{xGamJbL;ae!gI^A*hF!L_x}4|3wzf6pJ!F+C^l)miXh z?fk@13Cq9QsEiLue%$g75}SasdT3W0DcU9j3-bjG;ZWN#IE850Y!wdzyuJ1z_o8?x zrIgggiGg(_`3^G3+o(oGhb}UZMBPILS9(oYuvqsO;N~iP@pvJyYF|wsnr{XjtF8=^ zp%R+RPNg&v>yZh&P(t9qgQ4XTOAXXB3Y@rynMr$z^)4FN)%NGq)h_+YE@^zpA2;Pm z*I6|7j3JMGuvy#=Cl#9X+?wG%Ts65ViLddWRU<$Y=E>Nb#_@(T+m*FJ)ywB8wXo8b z=s>ZS7xK2|Ow_9{pcF@m?;4vZcV8e`wsM;Y%VpH#m)S{vydk=p?kd|1uN4wGWU?|XhP%V>z^gqQE%xSt+Q6P?zqsCP;79t+n+f#{=^>z zUg0iPcy=K~BrcK6kE1VK!8q>Gxmn3mcU?}B_V7P;_e2w(zc&SmbO9fPE(#kL^OYk_ zA#bD^h}bV$%?Xp4cO@fG29i-pYB$loXA=wtYTN6Dp(#Y)~@MpdicR0D1uZo z_vSI2?Ie;gv}@&dEI`rFasBRc-`#;X)==;nX&>9V=O+#?@Ey*2`(KVy|GmJz!FwS| z2q_GP8C*24S_*CU66c|u{k>3keqpiv)hR%Q5D+MG4rAvTIHmmV#h~^%xM6)4+T$v!(pgtWl=GYGHS}KDJQogA%SS z!IZz#kx=1mm2n8HKKxmK!U~gP4){QW~>SR0&RU9|MmJVB~`5Ip%`r6)3$60I6JAe~1=qD6o zsP~kBx@mQBcL_{;WLuDa&Q)ilqEElD>B2Ldzm~rReRD6Ye4fG&kj|u+l`XlIR0LPN z$uT7g#9OI%LRyv+G`r6AaAB(Tb6LMF=WVl`9L`bdjq`H#8G=RupvUY)hG>!TVNSUu zjVO4OY&ZU{UCSKZxOAiZu>A05c0$(bCl}U~#YS`<*$97u)0ZsuLsBpkVBqz7wDF+0 zxdYcUrkc7R6+g_rxFdz^+c`!nFI5Yvx53EglQGb2#zj<)h=T^Pd^y+H82oY2Ao@I_ z(U;>k+s``RWyBL8HD}cG-XWA=I`QY$i#*{BirzWE;qO&K;oEObuUdqWO8K^hu|7^C z>jJZQEn+V&FEyAcYT6$B2#woy>QH-RlCh!8H(N4B4jT#E%m-!ZjgKHk+)HX7YvCBB z8?9;K5hCewcLnxg09Q<_S-DgDKEp&tVS&JOcHwEGw5r0!gsypZx2*v|Hx<;5+f|n# zQ^A)Ooh)ExBbF)nnj*Q7&Lu}X%Z*$ zbOnB;w9F~}wLGwmT$=9^Y`0qp3eNqOAM_sw)NiS_G0$(?@LxGS)A?#Lzx%D5aF1jX zfcA+3+a4onIcTq7=~jN6j%2c42zlNhTJ zzMD{^SU|Z6-mp#P3xUoNk_*oKooDqxJB3{d9ui-^<9C8QD9(K^a@#NIyLmgAFxtJW z%=p!_0}{YjQe}JP7|w%IfDmkN_~W0l>jrNVXj%}FK;u9dO5K*z`ky!1wm)iNX&fTC z&C0-_sO}= z>NP?n$57&=iX^DaiD3VOz?A7dMuF%Ps*b2H}N;w~F3KWxyy*`v>+0o2?KNy^ct zVd_##vEBF&mRZ)I6t>vkL9Aq(o@rV4o`7bTe7zVnQWI4eL_5!AL~%kVjOsvcy=8?l za1m}Njq6x;wFxqmTd$dRmibz209$u;SD2B~e~7)q;%B}2lOYz(AjJ$QH$nrGGRwb7 z!^s_}U*|l9B9au94Y4zjs6sUO;Gz8LZmK{jeau5zklkHR^rxh7LQJ_i*{&FBRCdeg z7%19E1h0Q+L1H&sqa8;}|A!GXd(V$6)L4g9F}7%M=W=t>bul>A`drLUt#7x^kkXbG zd@HMsPm3%aD^dJ|&gqt%p-30TdMpBhDwFaSL87pK7Eou`iGKF-<=552W*zwg6@Dei z_W~w*qLMymCXB+_1Fwt}oWNa8`SKgBcK;7kZyD59ynTPSXecg);39iMpxVyW%2MO}z-ueHYx$p8OnRCv}+1cN{*ZQpG85WNc_kcp~ z90U1F;B@+xiQ9}jtyTJXw9xPGe#`WprY#*Y#vm9J`-b|Tfh}kp&)Q0nzE8XwcNcyq ztaPjmM-yiYdp%6v$u`4Gfr45}jpo$C8sV`jdt=Y-hK*mO+nkH&cVMQYPV8o6EIYO1LmifR9PsMBPnEE9E@=}o=FxVW*a+@p?_h%u29 zjIVAw!Y@|FB&!Y<;BzBt93o6(nUc8mK3t`H_OtIcI02!#0r*8{1_*B2TZX%Kb@7lLo(V^r`TK^tkWHd8RYZjoo4&F; zU{JN&(0gtP!iJ|II-EtEY@xZ9M_Gun#V&kb$m0=bi~Q{3mH3++*@?x_^N#d$1=jiT zm9dqxHOL(P!|wC=;?@FVS7qAeSJBH|=nv%H%FYVqrE*ojWg28hK?ikBYOtupkGSK& zA#$OvN6hRSt!aHdN zsh0^!KP_-Du0ZyriYVe{SQ3(3pXbl&K|fJE;s2v3+F2-+isQoQ1NnB z#TQ)mr{!PIZ|@DbPR^e5%4Cm~UdDsqk?;3P18&+f@e0VVUT zRISUm1hrRhXr28dR-Ws^7o{@%9Tw!@+f7TDdp7VA4flqPDCUSExM}4<8qq0wfeSy= z_7CGNH>!juD5KjiCll=P_o^4+aQ|40b2poLz2P`% ziL^yEb5|{)g>T^jt&`Q1Hk?m-N>dy_sy+G5)}Jj;mmm|-^8jJvoYaMqhwQV>7ryyI z^YcshT-#yAunU1N;NjhHo@>FO*Vn{l`(~i(BPTn->!5XqPBiVvAAwI>)qroLWqB{V z%Cej{2ffiqQ}70NE;s_;Yy*w5Thd1i6J9KRCp3++E{|GJXM4T$tnddI`?4AL&9(0~ zWAnt65r z9AkCjbU|$dZ{h*sIx<#E6|G57TA&A0%szWcvr}5#{eD8 zKS4!4xZs(eGp}tiUalE_k*HVI5(Ey6g{^*Kl4sN zC3LiBJxfVhOHE?KIdZ@v&W)wG)nP}IBs(@f5c>XVx0V(4*{FWH#>f{`Rx2dG2@Nhp zs&Yl*ThPNFJ#+o+fzJ7ij6KHno9WaqalwB+twdUUmJdcT>=`d~#~Z`G%k9}el*AWa z=6p?mkjS^ci(nFv;T$0#Se6^*5pR@vO%GEKT@G&vIt?mqv5<{Y4z-0Ec!r1YYh&(^ z5r#h?tJ$abx0S^ZN^j@;RFJf_hTmcuO|9LI5h4-NI&u>epFs@xasD%YT+igxT`YBn zo$K`6EZ0x=kbf=EsGZjILwoa%q&r}KQWq1znI*^UsBK_Ga6{<(asy5I%6O72E(Y-? zUVtKWH_^R_NBY4^%NKZLpBa1R3{>UC%-p-Lg1lK5vr zs(MurK{WHmh!jo`z&QPmod_5aK8{JZaOn2@^1%Zuk@JMULUB}m_Jhb~O+Xwb{p?xn zN2=!g5$@~nOt8tB6ck%1&1yR6shIY?3!bMIxY4(N7`W2xWj6}CELpC1?&{GiFo)BK z@ZjO&m8g}?=^&W*GW_qYKWWV?<+d!@@s+L=E`Ij8kV3ArL9T1tL?tf}H_Nu*EA~}0 z^yQGxn*$6!!^wV?eh<9?6XjqVS#kHjboj0%6X(3 zzk88Qoj57aA2!3=@ZPWjuO<4+kK@F?^FGqy>MP^DTII#0Ve^MU6V8N8vH?Pj?^K`T zS9GDlkr@GArO8)2pt#GMheOTiSPY57^mM(OnnR_ba<`_Ff7C|3=Xvyp;Dx^zE+#Qw z`}4zs5oRd|+jb>&RN8~^DbFJ?EIHrxrsF)aLg@?oCpCZ4#y&a?nsAGJjhbmYJBikT zCDDq(>~E$YYi+s2jq$$Sdq`YYUpDa6_9j#=-}D3_m)*to3nw~8Ug#;4R=j&_w)0AS zn_!=^k5*(`LmFiHOy!Uhe1>ATL@{hS?V(~3Y}Q}c9xxA{nu46Hv^61+^`}Xwz)ngT zK8Dt|_ebYyWiM)p>E94W2PK?*Tci5rn#7)5Rh9;)d|eJ*k>qdEJo*2|Dwtsg*p~`- z&Dhl+SwcSNVxT(lX$LSS%@Xd!GLO#5UY;FUnMuVCx85hxc1BXn>$r|-gN>y-K1b}w z^x&Kpto?g_lj>yJD!m-z6x4GoQ9TUq2+Q&Mud^O!)Q2A z^1HL8t%*2knWYh!ySZD^ZXztnBm@raEUiDc?j$u?O&#hMXY=L>hpGW02{N|oPxaVY z3)A!eJ)#;;&Y7qk=AV?Op1lhcOUuo2&Va2>D)nkndgxNT zP+EKky}swuYA`Rb_1R1v-K^4bHNksihhx+g2!fC}xPvi<*ja_ge;QPsm30S=1d0d} z#!IJM$bO4ysrs8~00$w{#|(#TWmNFggOVfyy3jP$;-tHM2}PKQ+3MSxmiVouDuXN# z14tSWT}Jh?Px>EWlj;IB`lI5vqzqn1Mi8kJ;K&Mn?8i zCFH~8Z*jbVSj&=$J7onv?6=%qNM}y1Lv8ro*b9^Iwn+6|kBqVc4+|sUYUFynH;9Xc zQ77VyQk-?)dE|$QUIF&}zZ~nbzgndrZSNK7UcI9-QQ!wYZeg9LeEfUF5X<<@R^wf^ z%zHW4v=yipNk+lnp1r8Iq!Z$Z5qPBAtx44z zZt;DwV_(W1GAV^GmSwyDW2+$w6wJ!V@{E9;28pL0B&+Z`|wxgFDxdONVUCaAE>`yo*t&I^zcougqUt^G)vzXLr z&HVCayQX7szG)IGrt5E^=&tu8L!=(opuZzu?viRRTrBWMwNL(-1573EzQxe=uwQIe z)+S+ZHq9H=+D&(~I}p`1odBqxsX}~bTpok4dG7g<2JVgH)hl3Z-Rf;t{&l{q{?5fn zO5iz!6OdklKdt?y_0Dd@p)AQ^0t-xZt=CQ7RsFRCPLjPIoZpR-AG-if=bVLplsPz5 z+@{tXc@z;%E$Wp z(Z^T8{jEZfbFeWQv56H(VH)#3BnMF-ZTd;&ci06%q{g7Ymp?F(AUtvUlq>ZfKy%ny z6IwW2?O0z-`Ho?{9z>-lwi)RZ1)>uOG6~s{lCtl5kG3=U=0>w@&7G1$l&Z$`9hM1} zMf2E?{UUH#ZPwE}wzZh8b>qDSCf?et7;>I#EToQ`Cg-?)wmt=dIIv2*x=1 zsr+~l-;?~<%-(nD5nnYJb`Murcgu@mOnmt9gwbT!JKtL`^_u|-x8q*+%&bkdYLgpk z>IJgZZ{;nQ!;Tf09Mrm6wlxzAu8OZh4dtl;AIF$fFYWRKK&lo{N8kJUHmcedesV-T z8!sc5%Xmg(i963sqv?}wjPA7V^x+C$Pm5aVT{GPaN`Roh?VB{zSVbAr9v$9`PR{0~ zl3T8qT=0l?i$VlCBbUWtRme|vN_WhmudTQ_JgM@C8QT37 z%};}!BE-MFa%^yymDSsncW2CwQZ2iyO-qS^DpQ`NI!(;b8y4ygs?P5G1tBIIze9?b z6i`Kg0e-aCO?Bs^%2=fe?fGf2{(jxgFZYlDhM_Tb4gWlUUXp^xlSOIw!<)8+p2$@q zAI%p;B(v`6TT&bS__~&dg09dXfu4O3XBT)%nS_Y3glmzAaW=2fv<}_Byhr(3Vrh}5 z()@u-&eOylycXUz7;?xstT+*(B)b#L^V9t)pUC>0mtZ@hWK8Rw{uPyS`l-~&#HUFa zd}`G`^|90I-{ebUEjt8N+1|g#r6eb9y)F762q)_Rv+{s5(S^~c*>X{N&H;#Z87Jr7 zW@Fr*-^t-~nuz=3O9aw88UcL15%*UdDv!jb)UH`Xw!;VODJ=fSLc&Q{8E!e{>Fzxp&yw7UaIkA zSiHh(60hzT=N7TALBl>>uKh8b^wFy9%59poBGAlU9s>3+vj=0Y{i1OyzZ({Y($SA| zi-xaDOs>({$>^pas=YwxHAub!VTZ56hk*|?Pr^~5)PC=SUK#j=({V^P-V>v=waSK| ze@39$!&2cY_o!q1HQQ&kE){L3j3f|cr6UbYuNTL3&lib<_adrwM&-NvO?1=7cSC{6 zi_fx~7Yt1ZcE`9fzB3o=ihnZf&wXVLuq>z3f%2(Ev>MTyiNghntT z)j2~mHUjX*-3>ZW2?w@6YSFsc2^{IW3si96jIPfyUN|A?h$mF;+d0&VuV^KBIT4kc>I~hHJ#2S(LOaLz@s~=*jDb}@DO8|J zrC{d98R<5SicVuYdWBlRCYhf-GEkz%f%ZNu$%LB*$;Xgh!$x7G==_D!_%S;~{p8+v z-yf8e7_XQlHi`p9*_ zsLsD0`iBsWzjCqyO8qhl!~F`@@gykzC;elP)5 zCoMDXq$1!o&db<^5dlF^qCNJkF{t`cdN_3fP<1x1rnXY0vZbucci%Mze0Kv1?O ziY`iJ1zBruEp(9z<9Zq|3!=H^N(h&D#t}y`++1Wx$)50?NZRg9y)nGb(k(_3bZMgX zzH?1-pN1*NuPAs<%Pd5^KoNVbN258@WtI&Zowkfc4<@fA?0DyIOaNHhMFRt^pwBs(Zb05?P-!^#{(-Qeu4IT9aY!`9c!M=rybb&N8-G$1wHo z8;#u_o0H8D44lqCY0$3xnL2Du%`#P{#=2K1Ouzwxp{$P3RnnCC*UNuW^>h!Hrs7hO z8nAKSPfS}S<+tOH<#ky8I#c+o`I22?v+uhJHb!fE?=<^n&oUN!CAw9V&bL4#LQ;jA zVS+G3d^VRmx2on$YhXFvoBBp>6k?srz@nU}nNJjSS-$Y#U5qRk5&rW_H)>1NtdSE? zcq5fR`314xk`%FnBg9#LzUl?H8T#oe-y6I87$-00xU7 zgL8ZH#oxwpKI_9>+wL7cooMg_TUxcxJ|g)Yrvb9(wi!i3Z!OktQ(m;O0w`1d=op7< z5nW0v&W7S>euF#XO456;Xeg7sA0Sv>mcj4r=ZTD90&1@WlqHadstP|~j=87fhTewL zJlV`e=nvWdBGZOfA?1ueLS3RAZ@5P=#Lrde{)r^2lSoE6$IE>CzAu#f4COyIP3bXf zliu5))!Iu1PWy-Hj-JaRYT-N&#Xjccaih??O1lJ;?j8*T^n`m@_q(7}I-$Opup z36tCu=!vz-uVScnWR8rB-E;rKd{Oi>;)kV5KSIXn?`*YyTRC4D(g~R}Uz5`%(d2R+ zO-*%#zEJHT%Z!I@aJ8lA73KK!AZAe@ z!~b@8ARgpYuI+I7^je!Tg>2p4=Q0?F>6LOwvX$DMcw+K{N~d}@+0pxd4Rj2D=zNR% z4@-8DHE{z26W+ls*hN;;tP>Joh|h-XJ3o%#G9XQIWgMAu-tie=7RO~7YB}%ep{ZB1 z<+@GN6|COpq9YwOD)Z-4NvS2*7Ce-D=50Z#B_$IkPE13|As#3@5jJ_j)U!5xfkiN( zx)gm6p-TFpH-@09ptF%QO@3uJq^$C-g753JzfBIkxTP2hG3WQa*G*4Ty+^yxj}jjD zX2W4d^%-_Sh{NS^EAkfClu>HhITm-j zYpv(6q@p{c;JE5ju2Hpbo{jskm!&`zeXVH`f$P^)HC^CGhmSTzLgr*#Y$V{uolskP z0Frzyv7OMHT&Ww|@`*;9#+~#s)%F%AjO-uFR`TD?+BYRcJ{5%-(nwwNcAg ze3r+;=_+kn8l4!=ysthH5gwUO+3S)`dYw^NA44&^eaB!ypzkxO7e0>}JlRz?gJ!}L zo@}M>g|Tb#+3C=NuE45f>BqLEsDK9o1-kd2bMk~K^ePQJa0`SNv|u3~eZs{JgujW0 zaZP_uiiGfKI10HR=SAIbJ)kG9%kQQiycK5M#nzf?*5Kt>8U%#DX_FYnYE@Hs)V;#c z`!*!Ma!wmPQ*=h>NeN@S3=AZGIUId%HN#$9De}2E?7V$60rEW52>lapwK-duEn+W3 z@kXgw1~3XtE;$SysE>=gcX9;T@&hdZZ^^QeSj<;s_WLlrC#|uFl%f?wSQzAe4OSJ) zDp6e6NeAJf{$^CpV@sEOk|Q-n5V4z10g}rtS$8ufFnnsVKDVMSIC4}>6YHv}<|l0% zR=#7qfY}fslFX(Qm)!Z1Q;+@D`J((p7n|#N`onC}*wYE#)OfSH{+vG5J1DmZw7M8F zLZUdPACmp++Jy{0&WYkFJc0`pqnM`au#t~MqdE^L-yI&qV2nXcvr==gW~!`n8t zGSWXojy`$uG4A`#FZ%a^;CqY5(v+9Z8BO_rG1|VnqJ|lo3hiM$TUgOwEJ+3SG{1dt zeS7-;mPvx(q%{u6QArQy0sh^$zy3)z$`C{BcOD)pWX6c7x218M?gea*fO^Q*WY#c= z+uSSuD!D?*%F>*+*Tx038HoL^s+>7=)2TS4UUH_|dK)lWS7CI#?0kRAu&rt1qrfX; z)o>f}S}28oVt#NL6S9N5%J@|I`Rd5K)emjM+h30KgSqrFe&&xueZdA9u8$k0M78+1 z+-OpoBBz9nz-y|m-e`pGxMRpGJ^tHmHR>A4ru*NLo~-MykaIVqyE7Nt@)uSz0T*o# z*|Ce%VB)ri^5;^Kryy{o3)n~&Ip_Glge{~VVx0%opnc=CyP4oJo@!#2b8KC&d~S=i z&Vo)V^<18xpbdOELNUS5xy@}c`a$Ep90YKVls_n#+x;MWb;m2h`8N(Na(}%3jimvd z9KP&?8y!usdw)G-okeKB@SB6)`qmt2b*ek%Yh;pA#?Vob%XplHYZDf+j(yK67I<}Y zss)%nM-c9e$5^kc9mz$OxL)jiT?`08Hf&{h56bO2zeRkydW!S5$JA7*eFIYlN-20* zcdx?T$3ChrH(shqkSrH8rg72!N z3M{?$V4NqWl5WmuAZF5KT0q>-hy&Ny)*;IiACv@ru5nwNkzXEwQ{g{LN)DyK5(r{~M=`W;u5|+-1vK^s{ zanL%xDc{(2r9{3h;HA3FXDfk*DDo`r=TXDMI7@sD5O6Pj4FQ{SkVOIlh$5YG|GA0vNQc^ee;jGd)e6#W| zzAJbMN4}-Ge2NbUb{Y6K{H99d9&J5J+~~}NgBo831m;R-dTC10fp5-gbNMPLbm23C z!iIBm=#7?p?C0Jv5}JrWPokF03qrXYF~{PVi;u};)f4@(9nm{dFKmbP z#)JqBXH*Co-vyJ__A1(0mwx}{p;&X$W0vf+#j6D}rBI(t`KpBDYVp{wbE7+su83M% zn!gZjjLTz|64OOM3DO~;qn@$hn|Y&*Nk3@SG66~%U-son!!(*1FB*6!>y{O_Ags}* zDY?nyd%$|=*5k)^uNg~j@pNziCUrH>_A2DlIQ6NWq0dKz$~vvnka38J3xfozdsVG| zlS+L}x1C#1Hj^u-|s9GB>mywE!B2mdf^qrO-+;yh8 znFS)FPd#LlONtQsigas#fZ&Ggo_uH*y(L2SjVX=QpHOFm-Yk)QLQo(VNTAoRdAgeL zZC`TRal}#2c``;U@$1M-)1h?7Zwjkt?TS8+A);SJ&xixKavfg@AajYF;<{d>e0E)r zo}s){hpkC5Z(Tl!|InH8&D!p4uDr>tt>aV8^V&q>p$(F#y(Ao)AnnKSpSN(~p4&h} zzweiS5%nY5k%0hqwwaH!&T>|2|GCR}Aaz|2a_zuzzj1xe)GtN zYWLPlXmQ}AicXNGG4~{$ELaTEXulW;*A8}K!1`VAr?Md6T7qvJ>cb$$Vl4N+)_-k~ z*;9`wB3a)t#UEHiM(T&Y+DCimNO$G43k98+z z7TXFVA8X?iNjT#eXUH;^@{-tbeci{~UjIcq!*ZfQ#dvOwp)T&~ zb9;=9xOaSVMvOS#8hX$zJnj$iTI#_j3oxr2%%}O@And0Hhb_v?Cs;@fHsAfx)$<3| z{yfXsR&$uLz{Lh*k4L>mRL3o$R zT;0<$Kuf2QxF6_~=78~hs80_`$4Jycrh8~RuB&N>qwGqm(Gcops4;GDo0XV`CaGc( zrXKt38*y2;5}%#D3-du{YB50F{@|F}>KIOnRop8Zgqr;y*fRPxmIzn=5g)+EOO)Fu|+G51r7VB2?z3-e&qafQu z6xU5^7*bHB=EZQ$%-ZO1p?KNUj-&PWG#Gt{kUci<-U8a@UA!JwuWJeru9ryG14rob zxW=xc1cfhzHjVbIp6&2U!}82?%L|8~#Qg(1LX9SKUw%PLMY9$qiEOLgijr&I7j1x;p;g0dCfM&L1yW6eEc2u!fPwNJJ7=h8%<7vr>yJc3? zX|na!HDIK57tyvuwajBbANKC0fM-O*@Jguh_JaWGOS$hBX*38BV-c5EBq`}-c>53h z_XPRN6dt@z5}YH;`Cg3nWYO3r=Es|L(_GF4?zKU<@kob^`#tqIs)r`vzn40ido1q! z*6tN%g2~VPxQVPcyYC9uT0JJ6j^u+gx8rA*Ub90mR_(jWFv^Z1+IIg)pC@C2kV4P7 zPpfw$djm)EZCCQgkEa1`z2)Gxli&^#BTzl{%fangke%bBAJ{Vav~6^FLxgjz`o^$wywEx=(y$!?e5S5Axfcq3C2ja3agggCy@KXN5 zG`eqRL^SgCmA129;U8yLV}Z0%{zI+O%VA$8!p-jzYMYIg3q&6>LKLN`7DE>kJ9#}5 zu8f5{&RRU~;+O5d9c8Td*<9u=x9BW9nK>MbkGNcVov~*_E4atx+DQl%E{axGZS&rN zhS(DZ_3`U6Ncrrh*xLSD&YD~$$7qQNRrtk*%`%hNYr^LIbJkm_ zE*gx(_f;^(!%gzcFuSvdZF&C|trlvQ<`T5MyXEYtnIXIz%tIXMBo;r_icCT{WABXZ z>}#n@TJ*YkX~$~F_u6yS@uV-@Z20fCi~>JnCa9Svnp2HibZE?JxKWfgrZM)jlO|2i zow;*7aT|knr*BH`)6zEo`Ryn;qhVTUx$!>p)H>t1x`#L9h9zoz$oW_j)!3PMV9aNe zYr0a{)Pn`Z7cl9S^UBhzd$Kxu621>Jvh)e+;(yRX*4wN*utDC_svs4S&AxQx^XYWuT9KMz6_ElQP}!6u1(Rb zzx9{f(ZfHV9k9YPX|<`J64-aLNIt-ekO-K!urv0EMOKev6jPc6CWOr)?{(~Hcd9a) zat--g6-8GS8B`griIj`HkDa^9no7zJwr#`^^n03(I|Hhs4+3Eddk>{4+(w)6ScC9KN-*m0kZMJF{6eaOUSM z;7dn3Wv;ajy1s1c&n`*OW>MVQ96W!%k^Ivdg8??YQ~ zuDm{-oob)nH)1&(S`_Y+=btC97v?m*da&Af7#0dZ{L=xJLbf~O8)6|=V84^yp;`eJ z8`1m+Xji3Xq7f|7;1YW{1$x4(fZ+iwpO4y6Y`>px-?5=4X2-Al!Z--i#mn`y`Bkd~ zDVtBzoBqH&=yy&{7 z%u}mA{MVWQJenj1lnr96d|$qygUH#_JE5d8DwF{px82Z9;zqIRE z%y}*me(|z1kpy(Pb{w#?SmJ&iEsE{Yfmpd|W%8gI^Sti{KXg}s95>gzgj4tbj;i@= ze%+aE8d%cXM zi`skHWbJVOGnc$`@sNJi-RSl2iT)ya35SEmZE@?q~{f89%F|nUQ@0JRN4htjn;jtQ_<7CJ_w87juAXG6sngLS>l((ZiIH}Qx@}U1 zRV`hQoG=RGz^#p}hr??ra4CPuYo&1-gqZUpjkx7?3z!m=5tm$3JQN{fpcMSH?`Qp$ z6|jNMGf}kwZQo%wD8Lzt8fkA>%!?vrM2hM4o$R?w^~uvG3JQ6rO#5!+CJaw2y4r|~ z8AGY&-Pzw@d`zPWq!^*wxWRycZP=p2EVaic|D!~$NeP@(Ln(oRs<_$%=ny785)MP% zY!%hge?6+eI75w2Y-Cy*8+2w~qgRfFsdOPf{e??4AKp%PYHfG-rD4dksKsgur^&E* z#-^@UM?sqJ;Krcur|!E@q76;1$~v`34D8N)HD)We?J)esXI;Zdj-6j#Jx7!aA8SMW zC_i#`&N6L<1ePL!oO?=BBl0zhGT!x?Q=FIO~+0ZOhm zA+9tnjny$(lhICPf=+s0>MTJZDF<*-2!QHd-tpkqauO9HNMWtF+qgS!)_>ik^(M#g zXXr4f=fWlO-KY;OmSk9C)s3=O-^C9d)*A#7AzXk92qw$W!5Y> z&-w&s4wKmDxWi`M{{c9f7;u&yN^$I&;z%5mTV<4lu~N*Y3=}NN>?Oj?ZO;s%c5pj% zr4Om3AFcn@pL%$oKrkb%F@)1cP~^zQQ8z#tVTz)K7>aOaw1d$m2;mWhI$diV42@+R z9r%*7+EDdB7p?Cn3_iA_O}HZXJ_S7a+`c`|>qLIQL^Iu?Ou8Vlbcwh<5yHvW(`6Ow z^Qt0ETJOb>o%k+ylzrnOQPL2P+|4!L%eRirniWqkFdOf?2{lS_7+C>yPBadYYKBK` z#5U#gOh(IHKhySPgyAvM{$KPq&69vj%A8-L3ZI?s@Si$FE84D-pVa=FB(n6%=#`jk zd{2zrhyNRU3t)2?V{vw+U&Bi1{_(XB-rS#;M7I4XUTXj%p=%=(fOn@hB9 zUKjJDO0dFQ;%Fi^qpW*of2r#g{>>s!j_PtJs2$EZ)CshQZh3*VSU;r>59uj>s{;vG z>EZ?Tf8F1yvzctbz24y8F}H55S;<4?U;ULjgfrVWG8ggFz25SM=FdXzWc88f*JLu* zSs`naFV;WZdrD$0;rEC9CE$@y#QvJB2<`I^-NCWOpAQ+1Ki8LmoDPm{4#Vsl3{sAH z4O5Jd>t1V~NOK&D7gIq2FtC9+8-6wT+^Ev*dj3EDjI>17v+v6_BzhmssSe04Jc?AR zTw2OMF#HfIWGYDPdRuOnh> zf!wT>hq2P|R9QUUkdsLr`r82=djl(z?BCg{HR(O%yEj%^YiKk(!kr{Tim|=)l(06z zyvbAla!aq+Wn(^flUIyO(jl#B^!i*Goe)NO?d@@;a{^@M(mbs0LdyoxY*1lQTT*9D z9kZf#&S&UWyXdj5x3d` z=lN6-uNpZUL>U{kY1Jw4Zt;JGYsHX8DJ~Xhr9x^*-IM|N1p)?`Hpx%afT>X4k{baf zp}xfi2v5%~ovM$%k68W2zGQZzGmt>tC*GRS?zD)?R`|28qTpLP8nuy$7>O6d&3ZW& z?8jYPCpYk$nt(l{S>Zb6g%LDqmCj4^SkZBrwi#wQ$YI$G!SBLTsm{bg@km!ZKb%Tj zyPKcQzfBvsbD)5*Kzdq_3=SL2$bHt($YtV^YO;+3%~1 zDfKhfND}u*o{}?wj^uMw!Ybv9{w$+FcUr-HC*h|1dMVC-;Y`tF) zQvdfR8_SyA%ob8n~h!b^PMqI+T-5t$S(+z>vX^SM@#59)RcsD)7{YNKpXuSpZKu!nj$UV_st3P23KqlvkAAxjr~5?H$m%ZU z#<-UE$@}S|)|<*3sHAeI2=tfJ-VUTA@p!*(E|T*xlr`+P$82gJS6{Su$5|s5^hNz+ zJRm|sQoY~@(Io0G@$4UVlZzHAp37)0Z$z??B!EP4JRexc8dzyvY4uIflFypARCi2% z86|*{m{l>jdTz+`YksQ<%(_jl#-GnoU~J_Nl5y+gD_nz3Uaa*ciIeQNt^nDH0!@Or zZki37cSG2E zz1HI1m%HG;+Gc(~5f0@m`&G&Yr8dMLWLP9)d=MAFmR>J0sy3+Hg1*0f3M3R^e_f(d zG*_Wj>q`^vGw<#F_Fsv*rr+Rx9(TF_d9=V-`n!L9N)|Iv&Sl!3&p_C4xNXf;xlx@R zGCm39VuQ(8L*RB+D~i{r4~wZbd|@VERM{u0S>6RU z9tQdfF-p|QkN>G=0r_`2Hmyzqkeq<@!A;8CDCx)^!0v8JS2l7C{b|)`0}}(|skJBN z{PQBZC~vayLDie;j%2{d(Y~qbA*QP({BTt0#~RGo%uR>h#?7AyG;wUK^`K9umZc<@C!;&_o9dSE zZ;Ynp5%k{_+D(G7*E<=mvkKj34)KHk5XH*LxvB?c(P^;8y4sg-cjEX~bM;satZiE6 zw^reC^4f5yh;O1++p5P>?##W?v$VS8;sE4Y98g-Uh1+0uvW3cz!b(2?9F>G-R2H3- zM+OB}dy=Oacc}je8R8`l7l~M1Pw`sbJ_t@3kdS<=1I**OJuB<#FL2a9JHT2t2W83> zg9U|V7(tedHjA?M=Gx>_EFP~CjNA+_8{$J~t1~d`T4_5c?0O2d9pG@2%^h9}kqHE_9i&rEWdF0VPhIyKYRt>;M zTOYg>Evstx8k(h{aqGqVNB>UYM3xVG?w<^2J7N53WN}4*uzu|fZwe9qzn5}cW|bkQ zGj0nRAo*pzr2UZ53T3U;USskLxUTHmH)3ia3Xq3IiFCUcTVUQ^GnJ}Bchk>8bTa6UMqT>?TBMYJjWPBr=o zm4>rLQF$HSGA?r5ID_R%`O{$YIxt2+td>vp-QpGCfy8RRZ#SllJfs3=@~Z<;oR5;v zigY#db2*#LI`hK~W{O5jVqow?j6#edT8Rfp$?NUQw}hj4WT^Ewe>gR+7M~`=yjvVb zK?u7VnYQ|NS1LW`+_Nv?WNInrJ(!@ur@hl#zNXUwRPPV0i>D78<<-yU#jVT(eotrO zb^m8bGQw(i+|Vnv7`C{Ka$rm++#nJe7p9B{}Z{WZbb}Gt zmFsW1xxF5s^JNV#jB9Qj=MBy=_3AzNIQjVzb>?wy;4zv_a`!Xv&f7oJ!xV~LZGQWg z31)=MCbKq{mcW0eN>J-~glI%d-2?=ySd@{qH*8ExFH3VplG6kAr%u8K8YbQIx?XS| zbbt5nB#}_}R7$F?%^K>`*kFg;hMkx81ZYIBYpL`37*pS@ zHXm$+psy1%{vMNbPAa$$&1|0yJ@5c(X~~lYd19i zw(+N&PoFjpJoL`=Vz!AhOv!Edyosuq6nw$Dot^e?fRuN&6F6$dUj8%`I_dY%s`*uC z*k^R_+ut49_&dH0=p|#jPw8W-{Mr)do2C3q_tUCOueCAlB`T9PA*#4#?n=hv{ zXw8K0^NXN0>El@e_=l9I0bNZ~$TN}E)}`)YG!2EqEliio-)1s}&pQ3wHAGsi#i$l~ zA0<6&@EC3_q8UPVS){4qQ1X+78AWXMy$P zKITxFP^YP{SSxX<&pKcKsV060OB#D~<><;To7>vv$a^SpA(oFQSUJ>$XL+}_@q}`C zl>x1gf_7*Icj`3iz;wicH*HSC&(dLRsB??rQ6{JVH0r$1Meh(`g&+F20MS_L1+&F* zeuAJr7MC}UR)xeV!W6-j-{l4F%;yG$i8h+x0-dftFm18h<#Wzp?o&4pW!xJUXHkG` zfddoL)Q7u*CQ`ne_f8zkTa_?gm|{WxDLjMhoOoYSFbroe{Wu5x!?M(}B{ZUHhZKVn zg{GsQOA=Y``nsP5i)V-`eOOO{?e! zJ#?R~C|1?+OTQ~2EN_}Up?o^J-Lio3mEAvh9+) zPx6MlRVoF2Cvm6f9=gLPGGsQ6&yUy%lfXDP^n2gDw-U*LHFNbKDmICaGEqeN%Ciw1 zLiS!&?7#P~O0bE+qy(%4vK9@XL0zE!KGsDcBC;f2%FNo{W{@xq;<#yB?8KT+KCBO=rcLd*;$1Quw@r!qEa3 zYr0>E;2=@rYTGhw^%eXgh|RU>)h`Zk`qb_%0P^az=|;*+cIzH?OItxf0sPzZ$!cvuhW9}0=BL&! zL_|RH-?qnNxcw^Jp61<==fim~b+5K>xAq)=z$O3U=^b+TX!LwPyIB5wUyi6gb9_E- zvqvBh8P9irp6}r5qSxwWez6{Xv`<|DbFH^AZFAP?>FIMmE0;7r%ZEmmv`=Rd%La?9 z4`~6D{wKTsBdw2%ZOgpI{)di_rB8Tgee@84N5{t*$J%eLm%r=&{`YbLUK^`kbE&e>t|FNgNAB#AmY&)lH z!7cY1wmnMr-%s{664;wu{R1)>exdv}X- zTbZ&YIV2roDK3bNf{sV(SyRqb$E~X^4A)b3s*>!dag&_#CchmgB^r6aHEwE3`f0u! zCTwN$SF|30In8i4^=?U1wytvWjcQAFvgr)A_m{{=eHxmFIjHrh|KRHCh2uvCmqKwV zv1Al$yyr`{3wX?j?Lbrw`*;tsZqr!|;HBavJE2NSp)=;-{))H>XEz*IvrN7u0&%7| z)U&+%H2W#TH1tbt)8j6bFFhV z@fr=;(_k4J?=*>$)Lw?)8x0kHU%=v_w2w}TOFw8gGc2NE%o*EAIecTgRPi2PeNj=R z=utCN0H#hLlJa!>Q7k2R%@Rl@a7o3Za%-cy;`fL65dk zy8fe>QzkcU;qO~#6TeE2@vXi6cC-1`9vsXAK(z7B3@^tSo$(UAuUG==y0#9nof#D*J=lf>To^*1>3|anNV(? zL}&eHn5Y(*JFk0YcA{m9BiXbz4_5vC!exAX!ZDG`r|5)}U)jB6mHDR^%MSP2bXManmI>!JImzUb z9DbzL36*-;746MVqPOG!2RA{;z6|w@MdPlCM&t#3+@`nP$2hpKk2-GE$0WT8%)US91jBJwgBueow9RtmSv}CXIXG`NJ+BEYAg)SN&~&_D@*nnQQic z{HgD-fAKH=g|*x5kf$5%tH1iI?XUjTziQw2ec$H-iZA(+FR{P=*Z;bk{X%HM%P+tO z6aVlZ{zLoIfBH}Rcu^1dbARs7xgh2*|K-1I?|tukJ?;1Y-rwuL0sj1-|MT|U-~HV# z8s&KlWok=E4$$Eq~|l{2j-8 z-+lMFS+5xGz4u-_&^sV)L68D`*_VBpeei=Hv_JD_{*1Skc7dM|q10R6@)j?bLJF@W zczLxyUVNG_Z~szAA%zrDxE1i*3<1YocirW`_xQcVZ!A7ILWu!OF2C6T{>Juie#YYE z0Qi6Y-p%v6P%gSZl-Mz?e}%YQxFh;D0vES!KaAeD&pvehikEtifs}-dGd(aFLHX8U zu;(AsTg4F%Bj80QN?so%c$!~0gyc}b83qZOj)Sr>UM`7}MP!?vhdKhfaN*Z};o1p% z!?9&F9vPUj)0sV`ehu=qBg}j^9npeuMnSsf% z>11NHn3W5ThydUbts{`{|_bfgna#|Z*m|BC89aJhVDB*^_OY?H6QMclFVpZX4 z2`_#;Fqt*Q#FqMkU;}eyXqRL(Mq{-bt&*znW4Sh~8Zupdec~%gu_GE0Q*Kc^6 z(A0q$jTTK|iM%K0+7Wj*Tdq-ZRMq&v*vHR|+OVfh2q6h3s2T_}G#_YatU5i7UB(7# z2VdSY5BOw37{Hh92oex7lna_;jRW^YR^!JUR)wRYer7e6nB_v)fUpKOA0aQunA!q& zR(;20Sr+D|`f{Tyx{IY{G=BIpK|2HI_GmZFL?4k!jrIx;0v-G$(EdHmH4faH^_sU8 zO_uR=;wHYjTBrDE8R=zyi1vzJrk~VHu$xEI*qPSCjpJk2{-IANqoMVb2J>JlO2gGs zW1n~74koqm%`*az+Jr_B>zX;)V~qU%%KG{~sMTzwhVE(ZZi*(j5ZEmXMRx*^j}{*b z)j@d0x&e1bcyXxj0vplHXpQXoI?lTWjcbf4uO_Ew&H zY&CTMv7Rcnr~L?l4L*G^wU$+1Z>nue(b;GH3m3u}G>>TiOl#oss6)#%ra}%Vi1#=@ z-Qga`9*vL^|0WbTx2FQ&iOPGH*H<)-tlJWYIw&3=Bt%&;gyc|QO*5n_syOu4ctiBU z=LQf^g(IdHw^Q6>e+bz-Ot*!o4?PqJ;94c_yi%Da~Mp9j9xQS9kmpl6iM9J%e1W)q|-=~|Hr%HSn0z;H9XWS&5j=r7^2$2!0 zC29I8N$*bksPilF-oUIH8p2JjXD{RqI{ zax1v>ng^`5w`afje}13M^}FYjpZug9>-Sp>Kk_3#;#!IPe*5w-|8o1mAN)brZo!P! z2R`rtd()fVcGrTJZ9r&)kmld~n}6fa3$$gPe){QkJ?MSIH++M= z`OR;(AOGwF;m_GjOBT?+7fmG0vLJBFQ@X7#wMHEKS1>`*FVhodZXcX?O|jvX_$TJ=tn#b z>TwJV^CrAd2PBx1h+(K7`k3gb)~ahq_I?UQb$E)$VwzEgpU&hv-l)JfJ!c98aK=Ey zH1IR0=ml&qo`hnB`}DV$eUOFPs{AHtbQxn!?W9r72mscR`*(TmviM&0Hb_o}X8 zAmd~n?TAZHJ>@qL03&vm%Uecmx-ywLDn7*nBWR+qWMr* zQu|ibioeXQ)P~a>!kLU~C*WKk#yq}IFegQMzLu`4UpGVtN9PmK(SW+9%0%M<6BaE7 zgeBC+F+2hcB$t?uLffXU@xgP>0Kt|EgjTLagD@ncx@D~q1Y`&RYW1pBMH_}cCdB4S zTd4n8)j?}UXxy9{%z7OBqiKZD2*&AH^f42b7hN2ocTv^TeD*XxnEo1}y`*^TOdstS zv~#q2U2DbXNx1QkQY{sHy*hJ%wJ;jHHWdN{On_nP3_%F(S!o{jmEP8zRkSAaT5|{+ zG8&Vq+KfO_v+iSp*3Xi0Xm_lkHQCX+!S4zE7^ptD8Jm{s?(MhSbQ&h+(44`18)m~g zS_=qWFiTby?E?FO>Lb{wXl--fq4k4LB>GlTyxQq%&E{O_QEt?9+iF~^B|Fgi2~(x01(*&HN!>=8PF*#9mof((ln#`5<^577=!kSn5&OMcbF%Ajq zBLq>phm?!rQFtcbX-3xPT>Bzd*Zy1`_yC7+b0nHN3fc>L?^pE_&@jN!Df#335evAE;zw%ey z%Qya4swobUy( zoyYa_5l*F$!V3*AmyqETPkqe(mmm0_?c+~;#M=E+`*%O{Z|wVi=6j>umljeXOA0BZ zkiyFgm}TX6)pG&A_b}bX@3P-4z;DcLb6|WR^l|{7mHT@z@5LL4UykF4X$bkk89@oA zI<`%Uxxhu?pdVg9aJ0IZFZ`t+K@AYo*rpe->+BIuAtk^%H#&zG{TKrdUIU|m+)!$r z=V#z-A44hXZLT(9fOVPG(#=re8{*Q9WA}$}BuuMP$yW7>-N^Cvyji*1H~gnG%N6VC zg@7;PdHKssM$Qn$Id{2Ku-+2J@Y1Tn3xoUGc<4Sh7BHq;4ZAs=+kwi>_2Spd<*eEt z*h2Wqqp{62p7*}KXV;(lEE}sG6PVFjNSGj6)vDGO-i7E?djof-J=cLIMztvVdeNL} z-U|7G4RgBbmU_7yO17LKoG=>=K#* zIE=3-y|h@_9s&y0#Wx5}@79rm_=_0Std{ zC)pcXqIsk`%rAlvG>LRqSWcauYpqZ8@{EZ%%(9`4g`fh{VrrT0p`4v)o?r?i2s_q1 zqvhw{APu8YwAnrLo7KP)_~Ad{&(~Q6G4zU#Skhe&%`@;VTLAdwtPss9s6y zzbE`?Bws zOa->IsO}Xm1aoZqy}iqpSI!nSJ!pdl6StGoA9AjS@ye{O`5oZ~vl zx|>{TVOB1t=P3!A8;J7!44iGv8ly*&IlwwMItQAu88jEJ?Cj%)Ub$MaCeHI+Q)1t* zVag3%QzAHF$B{RFK?1~>HuREx4C&kSLSC{&90%eWA;Kx7{M;{p(K^04pQX1JuIRF) zdjSE`Ha+TXY9w%ZBwTI60P8ZUb*_)MSU1U>sIoT+8L8W4&(A{qY2dX`ZDAog~dy;=MtCFuS@FqetSLv*G5PqN(M5oU3MReqal-K@{=}cKfA{bHoeLW=(S<3mfBmojwQGnZA$gzldOa6t z{Dr^p7uI@&x(@GASEU;D6q_fP+4d#sl~mtSbUE~@?FLn>rRA%zrDczJ*iXLkMm;+yAvy83-#IYDxIEQcz zTYcQrHft6OHZK(HIFofIdM977-U0#1+{(S7weXYDtXnsmGqgqaR2wrzi+artYgH@j zMXfZNxbS5;Te(yA<#J_>O4;^BccwazHR`rMo*44TT3&Q!bGuxwSXp>)BACz%8opPm z`+Iix{WopNpJ*I)+HTGYhIK||_V*f=9gJ*MuUcz9bFB!N;vLNozBQW6g=iJk2Q(w9 zS1wzjRJ7J!)3UvR3sOeavX$FCTjg^$)Ih+vM|%Rzj7+)czBVwyg4rh*(&(nfH;WC1 zR-H`kNad;%y-*f%Hd9(w?SsKR9uDoWUbk$g=Z=gq*;LmUVfqXcSZLD}xfV^#U}0J; zgIO-+!ve!`K91NkdQn60gW#l~1IP84vs!5b$?6&{5*XO{D;Y0l-p+!?8h<4SE?753 zY!l}5Hc=Wv7|f+jbzqVUKN@IQ^mH%}_zXcXGJ<)ob{*}te9ZAbg1ILcmuUN-je{9B zA6GOaG$&c5GyX9B>des+pB(Co@X%k{ex&;cA0-G>5Zci`G*d8@hE~WPf*4&_6tBZ1 z)zQ*@Mwn51D+)0$l|c)JG0SCjE!=7cLLvlZhssAgWFqwaxi?nrYx zpHA#RZCL0o7;BAdwB3gfep{AmKTPE6S<8oq6^TDO?CYYR7XHr0AYGbz_Gt~mMi70F`n9q#$Z_2yeL#>HGUb_)_PWJuB^VJ(KJ#2&<4Y# z9sn+PrG>%lBx1 zJ=D4oj%rPNOxDKJk=9$zPNrk6zlrT-F(wy4H>? zN62Zjrv@6y^M+CkXK3ztZp8Hh4Xz@3Au&1ynjtmH6U4Cu1;vwB0@2(>4}cCI0w7>U z+aY9f+U{e3v-H9_m{3DgBAC#`n(%dzs9z;3k=$-2$;l+If0>*F#&d|KY zx^cM0y1{%PW6#{g5Z6TmvewXZG179u3@TcGXtcClBUf|het7!sn=6KMeRPDJcK0!y zLwbA$Znuv+$J7H(fo4bzX9Pth?U@`23W_JM1QE?`dO(t9ln|Xh{XT9WJP+Uaxo>;X zkGt=?Kcbn!f6u^6}Bow3Rtc`@|b_<^0wdoE`QEo+6zbXyWnSk z_Gj(g?|!#kxpKwF1n2O4NWfQ)JOhaH6(c6RlE4^(mwbL}ePFYu&2_~3(H z?tup$u!kOc$jy70w)Ao8N8EKKrbj zb0vK8$tUfZXP$8rs{kLp{AT;Wh2Okrc=K^j{2|uM0ea<)UYPZxJo(5B>ER_meuoJ` z$N}P4==ddvFL_}~!{EToN(EtpI?9*6LBpkM12i&J<}AG=X3>l?;?zJVToGTtR4IsJ z4sm(Hm=zJuQQn&7=De$qNqV^@zDlOnn9i)%sM+P=(0!{+F&zYhUN0dy-(F_3cC=jB zHKiA86_ua5X{$@6y!DO_?6NKGQm5@&7-(LMg@4f-8P@UY!gFK4>HaiErIP#D81%YU z)XUY~{hnPH9?X;>#8_oY_RQn^ZZfHj-x1B=Vm^1xjB$71Om<9B;fxz5yQ$rUC!Vpf z%JeiJ`To!j@&%i%RBtl2`ebGc)frYwR@Y15GMl&kX2Z%u<<;xXaBnCdb6c1&8!Lp- zo>TraqFtQK-5EG-M{;_7zHlEe2#&Bn$N1KjehIT=3SGESuGrzF%kC@XuKhh{sAFcV zs4=PuH-dni#?kAS3O4EWeW8{}D;BJyzF~9iqF z0uVGJ_7R|H>=08jKRJyVE4IkI=q30-bA&IMvaS&-paGN77>%MiX{rt;xR7c4dSFRt z<8@6Jtx-%;br5=}>`-m!E5s>y9SsWMxC~=b>m74nb*1NLORK0349hG+7#*uZ$@&^M zG(-lvMwqrxo54fh&~WN2tf?)G>rDGZPq-Poti~SmaG2Xd>k3nI498sMZU|ScShkt+ zFRPz(jdNc05VRaCt*dxT?O3Snq}w+Qf*on0Af(A^|0xOA{Kg4GYsV@#-*4HRK4>1f zH!=OT65f%@o(kVWWnQJVbE-CFG!Aw36(Q5`xa*o>Q{hFc3hfw#Zdt{1Zdw}@1A8TqxG%z)xKt#}u z$C|CW?H}m(iu>WII&Svw)8nMyxzUNE5XLn zs`~BRwqwbFF^mJyyN?l{Ovp?Gr!tZ|y(-+yB)b|JYCJ z`8=}u!oN$qZQw=sR*v{D0)CVI=->TEcIBbh*dO}Gf6w0Yp7%sK=iwKA;TP;@e&%P~ zQT*Tk`+wgZ(L?hSpZJ6ynEm>eZ}}Gc`Jey!s73(%wtUk!eUm%J=l=iR@BLmM*YEnS z?{cjdoYs^62mZhx@UlGH2tV)hK5wTl7x4e|Pye(#$^Xvp{7(1d^20y;!%^<12VeD7 zUuFOJAOB+);NaJV_-B9iXGggyr0|&wh1Tn={_5Ya2PL0HK0nA;@3Fl%eTQWW&uu#9 zg@9DZl0ph8r0~+hwgY#5=lc780uZR6#etv&KYh0Xek1eo4gOkilF#otG@tR^0>AWy z0)DGwt_n<+$B?YUagiTC`o~8|+ijwJ zY-0D*fz03f=l{xIMdPj|<3N0!41r{M{9wS`fPqzS)X(G41uxi4I$uu1Fy#f9&gBbv zXBHO}CLu|Wai@_eCTgg$!Nk^e z;W$wFI@(i&<%*qZEbuvlMg-=i5Ktfxp>HMi|HeXe>5ux*RD0UOf!0V*^MRl&FWmH( zH91oo5Q@R@$9x%M%6f+h-d9^j8Y4_rjYSjFS!2zGnC9IBv&890D;kk)U5y{zz?_Oy@2v!9-V1ORuc*2qEwRGZOAgdV!v)9&MZH-YQa0 z>xZ!|t4(P7AjrTuepA%iCpP8lZY0+ySG!TAGk~_0! z4iI{=?vIoQt9MW91Jh;TK`???=DW?;R$sy+`?N1ax)i{9>u z?yswdNB*FhMz$Dh!hfPM%+Hs8uN|rWQ?zk5wFA}btIk;W<_KZLe$#Rzt!>RQCi#YF z1fl7oaqQkav7tIu7mkj~>}%c4i^9X)Rce+W554d7J2#)YAhWDJGI!Fm32ABrLNf$7 zd9}N)`**5!INd+6@=#;C*Yb8uM01v9uhhfvu6u2MV;_GAxY?sF@)m-cHcfaFPE@1gb-}4pj-^I;V zsX(%iaR_YV173IkO?%hd|GfQwzWaX*WtEwnM;ESWuR)s(8fe^{!Kug#^=t&rl#dFY zNfX>jAGgbRsK|4z8-!U1yGr`8_&zTlP+U-f;H)&Ud~u$~g1XA`AO5h<@#lQb=R_%I;jzaa^F1*6QkI1G{oGI4>B$Ez zn=9Ev_g}J0^@`;xZ?Q~XzbW(^fcs-PJ+@5$7cBRw58KmE-_Y+L{XVaLfqmBR{I&?o za|0%}c;QRF0B*xS`$OLn#V&?lz=2!}DWvd{!Am7XXuav%EnnTePu(d@Pk+sNpZK2W z>cxf8$I?A1g%naq;iUlhsR#j*z&i|@EAu&@m)}0XqeRmf9@0eskByIx_~>RA@VhoC zgyCZ%-m8*;0Egec+k&BOh=oGi?_}FFKJ}?jxdZ>$H((5y1dII=-cGv^_#j9@Dd8gE z{Q|tS?fF3N?O*%#@8u^%{6fr&w}Zm?(dp?Snuv$kP%rwZ;P92KJ4sgZ8b<>7q7fe- zd{N7nqnHz;W7t7P^M$WOVN5f)e1#Zy8dP!H!Wja75g<-VrU~LNDA)bcal3s?(%YL4 zZ8D!&tv|4l2D>(ySamtK>S|%92Ya?!EX^ILBY{&NW(Z&AODil?R+y&!p;a;~>l8~? zzkck#JBo#@jZV6DsadlcjG=PL##JrCPS55r-B@567yMidwfLL5#;*oKg5Um)KV={P z#rv)Mbj=y^PZx4l)QvUO;>A3Z2GHlDfLSsG3$Vm{J=fT1s7y~cN~u|Q=krs|Lt#8| zLB?2f!?*1C%0T##6CEN0QnHE` zG@3s%20LG{oZ1G{9PJ$(uxBF85Za5JaG}2AoWCSGm>wIeUx?f=^MuxcM#Gu$%ndKT z713rs_&yQ;DmMos7XbO!f}*pW&RlB)O$r?MmzN87UXB?j81-588&h96d7r6_l0I8KG(vF1`e7Dv=gC|CfGGm=xQ zQ#=U5jul5!hqfUApe+;8W{xmHhq*eO#G^4YRT&gxNr-BNsjfPwRI4+8;u<_ zrG|a&S*nB3451p@OzuC3`O@By)mTmt#%c`EtSO34PHP%%uc~m*gwM5-a>4ft0v>$+ zVD1hdNeBZG=4J;y6Bes3v^Q$~&+}Q^uW9a8H=}hu6&+}e=PMVoVJ69$!PHlqvMSHo z1~8w5PY$##NQ?J44v&X~IO*J;3Q#hx%dw6p%E$ELG5}$criXDRKwQT*&0YIAkfcXq zl4cx|7KAHkaRkkVHE^M0n;wbV^w#mXY?L2G{Qx%c5c9ZBF!{I#9(>hxYt-BJs~>%> zehkfAQvy?7;kn002Q2Hb7dZxJZUaBYQ%#qb7w%bvIll-lz4k%7`r3zfzMS!*#oD^afhRAWe1&(B_DFBg zwbwrM#n!6dZ>?s@sX+WsjU-~1u{PWcu67JAC|s&BN@EAKM9`z!q; zmaDJ9q}c5N?d0n~1hW^xFMa=WYk53%arG2ZNa00*!qKhwrxyY+4OYJVCDGNV54p+} z?PHJX8F&(aOX$J-6u+Or zxveDw@cWuFF(BlLg&Y8Rn615)rg;qT{3(~L4{Xz*+_rE9XzP8EcRt{`+7Bp4 ze15lYx9N5v_x5*v{d)SRT03z97I461TMj04+BLW;4kzEd=7Ec)6eE7+Z|VN97Lil(vy88Ercs8EXF># zqe{uL-LCtDSk}w7nk}u+A6T0q|r!6SucnPW5#NKQT1_r zj^K-Nz~MWlt5)T*Eii|s_V^;xd|-+RC+nD%>Wc>N#uMSed=|n9@@Av4+Ej3}W5r6v zroz=w`bzEU&88Y3&86zi)kmD)=QPK(DWmr1Rkxt_z{EC~St^T=1OWhAHgz@wwE_AF zen#V|3s5rotd&&{20U$K?$8SH_UT|A`PvuLU^sWjCr4E{XZ^nWwHdN;s9YXK{B%mx zrmA0qp-+AFWy;2-HfPml#+JD$HEM2N3&BmbT6S|^AVO<}?PQ=bIL^ns8UiMK$ULF7 zgkT0|?wIasc$ZHeAcZQR%ONNWKfF5K7%$q+=LF;hWsp?U~gGBDNEzPj*M(MqD# z8bAD+z=%SCf}s}tv)~73rTHHSe?#T*-Gb&uqgi*Reg?@I!V1k30t|#Bm_lQ#Vl9}~ zt!b>$vcbR064P61J0{rBenB8L91YZ7;bM-2j{5+iAnS*@MNoEp(zcSurX(7DwJnc8 zKyd^slflqtqJarG_iLfC0&iLELhyvZ3O_bw-ZP zpe(gg=kAj$?3}~<|HfKt*Q$N$oDM*DOLvc|wb#sJ&N0JUYmPa_Xzc7*UwyF9oPZgI zotja(U`G_Tr#^<3S@qetH_$js1d=^jjXx1A!TO-CK+*;#6->vv>d>3doZ+YmX;bB$ zXb!>zI-+)DM8|$wA1cCyot=c@Xe%~;X6i4baS9rv8LbXkr7PEKRw-ny-|q^i# z<6&)#7%ZQ;^ETBh7i`*V+%eBtpHLf!v?vw_yXby!n-c7%4OPk7)=j*HUfwCV|WX~f-8{ zES4=gUAB%LSDZ@lix=OC+S9k5-mO|%ciY?N?9|bX~D>U8U>fO4$CD%5GZux;N?@=^?BqdRB38 zYCfB|n{PIrcZWf`bd}A9=k?vW`H-$;;f8~C%N?`%=xkWpO~lmFOT#)G4-{td&66UkL9Q`y5o$>!^@X) zr2j%rKJd{iPlx(O4(XRbTHVMKxdAUD^!P1`;G@`YzL|_o08XM7O4yq~!e!>I z;lhS(D(aY6nQ@#kd{Oz~&%XasZ3N^k2uEW+wt+e>+3EOZ@sWxAO3P_X)J^TGLypcv z6+Uc+){AcY0ST08x33Spy2A7NfJ<1mKlD@f1$FMxiQ0vf#uO=!r0psGC>yeDf)2-4 zO{d&&ejd9Tswnm!PFyjvTb}+!`^ejFvtgs=o3>e{?X$71s&{o|Rc^1}_Y?A!8HTqP z&deFAd<<s>ehRm>4O_70oarkBTQGI8bSY$)U93*bIbOfl+c)yJQRM zb8x9$Z0xh?=xjB$0qF~-KBrfas=!D+Hd9b9rNQ9sfyu;aa3p}3Cd1Lh%8EmpW1>k( z{+#IRk`RMH>I#VkUTu-wfx*Dorfk$TF~*eD86SwVR716e2L{Si@kl2eQ(s`P-lbqfmqbW|iSWYwU|GQcK2jSgUZ9naF_9I0 z2urFSFi&8zl-N{M9cW8h?xbj@R5phxqcSS_LTDF6?NJ%IY|e^`8!2z4RCGzDjOv0U z3`;R~Rxn_n*5GHOu%vL>G1M49f@Puw3Pxu~^{y%(5*^@o9E>yi4%IcHqf7B9??mZf zg0LBx5k3~`uI`NQx%#Bv?OMh+D^-8dA}u$Jpoa2b;|TS46ZI=<(2&UCqXNknHZIYa zQQT}e6C$tr!qCa94XAKqp*$iSsNR}(dp1{lcc*hJi&i7`JN8iKxK&3ajZkBjR^MfH zoiFB{@q?-``VzWMRj;n{gF7jFNW?7EUQVx9)Yj=>X#K>#)$ryPlKI3OI6FN z3o6sFjg+*J@|Uzu4AeHwN?+HIR$`0{74G9ld0--LY5Y*Pj>gVheOb@styZa8`qX_2 zAE@kt9o0HKDO7xpVS|A6i+RH)J!LbeG_q9Rk<*5HAz=g^N$Rlb@G|((6~(z<-Y|GS zVt*ZX(0%o}5#1x_z5@t7+&o|4J$xMpV)#N7?hfOzpTVY_Z@dd)*+K{%+~RFK6kp)? zJI)xsL)$1=I%8osTy-IoV8Ku^`GRmHkbKi#bT{)10095=Nkl%5=4*b!e&M(ONsuwj?>boZjO0ZolTBNhHP|6c!0jQn@i3Ym_H2yZVvZ2- z$MFFnPWSHr`27G*-7ht6mJqtz&9=WIEZ;#qUl4ccHXc51mN*Fy!(z9K9o)zxM;a+o zjWwhze4W!eMPH2d{6T7LsI_6D_Xg&mZ_Fy&Bb^(xxl$|pJU_gRz~$N)xE;cK{>S5-idji<^2@D<^%f_sQQ_!zSr+H?Tpf2J6HN<$9Bgft9E*ptdy-bnYwhys9Lfs+8vvu z)QPF2olu9i$_3jTO>L}tz!*_a+f4WpYJaZXv)jII&(g`7z3*KOSFx2<8a9_VhZ9@W zt2XJYp3{kC8g-kikJz|IS^)J^Bh?qRSsC9%(j-)!@_baPoiKP7suvr_;AZnxBhu>n zm>d`N5+{N|GNewB&cL2ZQR(_AjR*BfNnseJM?wSJ8ol|#)iEI{QBfUW-i*3^mjLPO zs;J+vB{EygU3#XbIby`Iq(eHQ12#R9 zv#D?9Yc|*{lNK$8Zm7Mbx}s_;Eu3>U=2dU(o0vYBv3WC8+gY%%GlMOil4uYuTS!3| z3^gq*Y__w3zes0XW!PAFEva!Q+!N}pFh0IfWz`!EZ76zTO9i_qY}P|xPOW!D$)v`w z2Dfjj!hk|TqoMIV5$%v-aMfw!snd_me;7G)wJ)W5X4D@V^ww1yYZ`xS)FYjdQypCD zMB@p@P6r_%RX<-UyD@w0tvKUivtdd4N&c3`1M0xixt!J1#)+<~%ES%}5*U&3{G8Gv z&4Cg4KuQM2rxtFN>$X*(xB4Qj`Y$xbQH|v)&6E!Y5p#ku2;(TBKFuf$8wOfkT_Oc$ zQ&as5vj^q?Hf^S&8@78G6J_DR&I(dY^d&~c6Izsej6H>qRF?}%kAxG9yN(vm<0_-C zaF~CMwCI`BTu|Df>WmsX>koVkL+f$5WbJOx)wwklmQmgN3g6Hi+11>l|9k>yJeAcB zwpVsEzUlj+#xRmcFo3d3gGALx^FA;7qtc5tY^pKK+{l!RHWPkq0>QLHt0i_^ib%Gp zzlW;Faiwbt4{F%D8fz!i4w#4iOv;K{OD8Z;H3mx6x-GhG+sUV_I+(bGUA3{H{#RL= zi!c{hFL{8mCI%a_oPg7sL4CP2CRsxxwPG-10Mz688c3U24we7nbv*QkX_jt)^noLE zhnEM3cZFZ{+~86fhvNfcVO(*1F`UhvX27g)<_dAaZ9k153cIL{QK0~S-NH2cZo=^Z zkvn!Rod?^vR1x-tn`8J$7sNZ@7LGJ?TKyP2`vU3C@B5Z_Td)5l`?L2S@n>LQ%WNJp zUjpVzrBdB5FDg6UMj(oJAVRmOjobm!M(Kmh;XU#PH^Lv*^FQ)O_#!uO7(RDLVcY_I z9jzC5EA>hczN~+gPS+B`uw@#;1mHLfrw4_1zhZ5SuCT1b+88j*0=01Y5>hT&vz0zz zm<{~h1&O1n)+Uw~-vIW!Q8|atjpjCXW{P@r<(6UlAWsy=^+?+YL}7=uaS5S6OtbGM z91jp@uoTq(i5)q1TDsZY88 zw%cyAm%sew_HEzxZT9Vd@9)`jp7{*Dccty~x8JVk4k{=$KeYZ*Pc9fN|_6^Qu6yQnp6J`n8r62Eyh5Lh&-Qo9(@GTu-=Y@XM zYy@S7C(UxXWJT^#G4JYw zrfke67FK9=ExB2YN6Yo4(kF z86Q7Gl_6I|&iPkgK6($kPsnUF5-Tug>DPTmX zldYl2i6M0s0%r&a2U08avooot*g^@6#!t0?Oq8#neqn$$qEx*`ZA@y8K+ZLfCd{HBPuJUdZpF3T0U%~1#hf=OlfeUj%rAl z%A@^t;Y0cYHCjkRumOdn5O!9OXwnvZ8qkG2PPHjypV>`zb}rsVYWR5my43|wpBMM|rv@^&MCa_Jg8Yi$~VIFF}^-F;b8LtClNpa<)$87ifS*t5gzgo~3P#@}Vnk#!hDA;uv zqv~zVYd%HNf5E5=B(q?`u#TdxXCm5QUxIa%jZoUhhB1pGL7KpaI|%h3UAJA| zv}Zi&r|doN|8C8TO;=5Yx(K8s=%*+tF9zhq_D)a_`J#fTs%}Ba?g+qf;Sa++oS zWDLW+jNl%Gh4+ZA`ibM?=K#*2fWYtfF!+FDAK!s-LwULd2HqDwu7fb83)2&TJ1E&5 z0XVKOU3B?RSg0cjgIDl5c#_aVS?&ze=m)>!43l^o_xrCuZ()2!heeQT#O@C7RW9X% zbgMtZg99VZ0ep@_H-Nw}G>Qs-qK0_GtaMDs&OE!BX(`1r=eWA9PywtfzmK?e6Y+JkJd-)}jj+E;wU^mwq-z z-t?w7`9^wVB%Cyyz&N!SZDT55`~t&V1=k+q~u~E9f0C z{y}-oHP?i$!@=Vp!b&INWg8UwzCz_-nsqbG=VI7_1pAYgogh2M@+z$sK#8 zzRBwP2H#hI5{F^yg3TsFD{h_)Lk_~YyXViVY&m$YMc~pZ?QQFwyUW`5ea_1noV(i= z=RRw*eur~?zWIg4xA7vWZ^NrBmDabZ%6cR)Xr8xhv8r|)JV}h?N+OlB(fL0M11=4Z z6@$g!VJ-v2v;Ojq!3Nrz!Lo)mtl`TY_-=t|!S^EXnCy*T3W(>une`9gJ8G3N1H)qV zTZlc?RYnXF*VEX$>O*C)M8F;y3>ES<8V$cz*Koe?;zspXaln+}yYX`HVD-Y!Ga;fI zX{i_W1LAG`>p!}mrdxgDpW1`2BZU>5!Qd%jD_q*dAek1vW-5-9~pUahkRF z+E%rR-Mr98kUrK^-Hr_x6Wbh&?VJYQ@zL1TZgozcw0Wmv+0n#~Y5?z*^R|Oubtz=i zc2en1_xg6Cl(+N310V6I>b6&}TA|akvC60yOV*#ytkCUSGbj48k+RqFI-_cft=CI@Yz8ZfhF%Nl;Ng%`1f zT~K|nKf%O-J}{&xo6Y*J^1_J7!r)LI7)me-&S@fIAEqw1+Xhe^hT0qBv5obnhb@z~ z1|ysHs4;5RDqio}Xli|xTN5#Rnk=*fX2TiPvnt%!8Nu*;SAE3k{;Kj|JUlI0ZRfG; zlC;Ta7^tgKnhmwHt@cclsbDi-^Oa8 zul#JR4>BoN=Z3@+QY6>}S?C^|pi=x|ZkdF}jQSt-Y%uBIkzpeQ1{&(HC>wh+Fp;1u z8-x>8WsGFXX2U8fF9H2UXBbz^$-d$W%7f~xX@6+>a>>iXo)7j>_LR1xHdQB6XXs_v zI9I;JOk;hnIiaw!`aiF+p_p(r}x2fkXi&29(UX?kx=Qmd=^L^zQYvbvvicZ{ z);M>1Xu`&tvop;@#vKwY%po4M`qvV|Jkwp}z@Vc~-3EfjmM2i`Ve5DR`}i(e$HTO^ z9F!)$juU@KDg}gTJSd0-qO_6ZN>OX=Vb2X9$a)ap!Dm&N5V%AeFLE7dBS8uao#D;X z+lBn2ggG3*xermvOIK9k7{0*2yd5a}k+d;@Fl?obFj$<)qBWHM3z(st zB-PVc>r+L~R?=OB4+wDvLHqa)rj7DOp}McO@erCmoHnky{@~Ys(NB&h%XNQ$xcP}s zR@Q|z&z-R!_};%CxSLIXq5as$K4yRQSAXS_3A{ve@d5F|7rxNG^;^HyHJM6eG?oFLjo9e zcC7cgKeOTK+notB9*ixOFZg#1hucaeee)0d`hG0guzrJWJpJn}TdL`i`^eVP-Mc)bFd%sBGAqUj8k1S`lszRuX&9<_qosY z4YH4XH2hqlJybI7BMBB0%+RoVkOP3g1<$Kv{<|b`Ap$9iBOJCiFdY9C3Ik zd7#0OBS#Lc6VD)|b&x)XK93YQ${c<(>tmT)cn?0B6Ryi11of@PlDQHjkJ9gdPzN94 z{3*mA_PBzOz!!wLBMgf|`HgTo=oa_`kGn#g7vWR=C>JS!Qn~D#FB@AMuA7}|uz9|> zv#YkWtk>=u284rh_}woNC+c$8sDtR&9^0^;WWsW@xh;|@MQ7}ME^Ei;3)>UUfga|W zojuF;`YK{(U7K4ObpVUGZK`jx8YrB4FIS3os+{-J=(TFu?Wv#|D6e|eN<~*mm1uXY zk2(?gyMv)`Za33OS3{N@j@+2GEhcvMa|PcZ$DRd7^K+w-pH^N>#?~q3ZKkrboTBgb z+-}OAvTbWprSu{K11B{a>jkcFlj+QDn4rQ7l|vXIM}h)1RR!UO8G)*;y2?k@)b4cb zhVc`bl-sFj>P0WBc%(kYY~E47a?$F_pV73}*kjL^GECA~ZqRd!B& zjs#FbFQn7~i6=I>Gs2CElC1J!qXTIcHm*>y)zX3ilO!#7qfytJbKa8mn%jLzs4uYD zp$>88LSM8!A{=`vOO>#a>WIuX0!Ul~mfcL|YSWp)*V(HbOIM?^JbynK4oM z6V!zXHxdtwLgz8dpND}UIui!tjD-^hO+)l(s_rItL4671CpVwjj>0g4KZC&_nzXf` z?24uvDgm``WwoiI`*1Sy@is>SOl4t2yOh>AQr?u14LUvRiAHVFq$N7Kq{?XOo3~BP zowmlAOGYv7yiI|v74~!68uQe(r!*M&H_@hYWXqXk*!StGjz~u#ZGn^!%pAr{ex~+n ztRn3JqiL#k^`N)tbVM{u=Cj^@)M^zq{;;9bRGLh0;LJ26R3?lSg~K?5F-X7D&X#a@ z)TRe^n^qI8lM|Jzv2sCkVMjPJNOkB#qSGH*qtmsd`fiZP+6C3)ik+5mh^?Z&=~JfK zdhFb`%{J@8sXB^&UCmk4{;4s-o3o5y-%UITxR9auZR2L+>#-cG}vM5@P zME`~Al^RW~pz_A0yp=^?RKTVC1D6DXWsvWxk43w()|v%Oo@~L#2iEmruG!f$R_N^c zI>-7IFjo}rm!}Mz4#8Rqvjiq%Nn^TFuloAQDS2oQQ<6_fSJZzz`{* zhqclFF5AX~_=5aFz`lDQUr>j@W%_*id7F_0Ac>;q}*fDPF^)2ah+# z!PU=tx;^K+{+?~!czqbMhBd5V4G$lt91y&ObPF)kw^%Y0oS2P6=dO=ip?|;ap1#jN ztd1Pr)HiTW-!luVH5#^Y^r#g}Wt;1pEm_{s`}suAOr*^p*;+d1oA8i&0GBF}Lyh12 zhVQovyXWkm{o)VXumAe5FO3XyuRw2-g>KtuVD>qSi@I3L=1#er%oMUfW%={8s*Ufq)A}Z#2FbptB{6) zf$>-!a*G%_JhxWsz_SU45X`7Jqvqj)KHsAcZ!;m1h6bnKb^9 zt=}LFU-T%BKazty2V)My7sYVl4oVFCQCJ-33cdpc?#G~(!x=7(eTEB$&^ZOWx4mbr z-KO>09ao7%A^lF*)#{WgMZf0bkxL1%am5B^!e+Lq7oKg@`VSOnAC+@plr2b{Y1is zrJ_qhV>Z>zoN%0{hIw$rgb44}!x)qWxELa(99Y@vxZV^ddkxFMH!TPIb~IIlLR zRo`4HV>7)#!UUSZDC+iHvL(xj8ujU{Kk!X^PWhsGi@GF)4;4IFO%&8DVdOod{!Yt{ zq{v)#cZP{*h3YBPk)evIsl{la{8L^|Rd+VNXX~8s$3*E)SEU^ z`l{**69y&%QY7>N@kd0b6nI4clwPc1_MB9Eo1#x@KDCkX!64gIdDv4~XnZlodvd3W zC4)`M;^HdD)R}6n<%ENi;i%|p632q0d^Ih~NSLt6p4YfZbo<`ds2@Xh80x%`Y{}>) zAGK=<=)^eFnvqldNc}b=-cMc?eKQ{EAN(+#P{Fp7(&8^S^AgFQP1Sui9BC}%U1}vI z+8|*ABMU~zkU6RGlPVY8jtTZpFj8MoUu32em#9L;9AoTS)w8JnKU%A}G!D`+3F@Oc zls$UHYMOsVUGytz{-zo;$3&~5+yk{!lgU-ml~m?g_3LSc=bBnGSOY|p&Uj)IwP!wt zfryH`sWT>E*cD-1(bsAV3`nF1=)-~53skWo`O_NDG*@~y)LcR2KcTVSn9iM1hNRb! zu`!xhqLR1mDNLP=6-2|4#`{3-qiR!nr26l)tgG|5E{h22@bX~NEzhWhcKVuaC|{{kS55> z??BIA4ujOR?l!g>-gcOX%)g%2i9Nla?d|MYr=|WIL~{;^;|t0T;-a#HFwz9~eVhR( zEOI*Rw=pU;_=`hSUQl)rx1WMA?4M~F^ee4(m=i+0*uqfZxyG|30@?_pq{u0%BS-b_ zRs@x@_;G*fW$Z_JQSrzeh|1<2Zw+5Q@OWUdk8|~;VH`?>Q z_dD#XzU56}>|+aS>6JCC;mZ%OpORWtyA=Z%EUE0pn_iv!K5OUhzfaG!tR27dq~618 zE)8L9h16QB8 z)@r4?z2h%_U!SoPmnP+OGk~NC?klgn(l^lJjFHcM?sIlKw$t>jg2W1L4ot8?h_nk5 zF2IXk^dkF%Klp>CL1YeBJn@N7^f&-Nn`6;N9ZVX&zfPV!=^JKOUww6$at&)(!y5kY z0jzJi?yXIZQpEd9NF-VH9l9PPi9UyE)zNg_x;c9;FH~x z_zyV)45T3cDn7!O<>`(qjxTWQTH?ePxr4Yk&Q*Lk2;sDEu~M>nqvmZx(gn3QYzo3m z!j=RCbsGT6z@}(H1GYbz+7VqxM^l?YGDTzi1T|0dM9QT)hP|%a={P1gQW3TB)OOQ> z>ZEJ)d7G6AHc>}*Mq{`6aYX*)$Q5oM%`+uj5B3+*gQ}eoBI89%5Hvc zM=2+E=Laj6OQzf?c~RFDYFshgU}){`IfDQmO!oLu%c&7;r0-}zrxQslwma4-7M!^N z(+CEEtNtk!ty~P%{tSlJna%ygEQ}8f$zz;cBhlI_4{3me(hp&T2zR+$bQ>i*{h`{8 zDkrr?w1I>P)kibln!W+3c8bQn5u!R}M9&l(+M-!k6FF5ZTC;%Nmb7J*x}x-1rNMwL zo7Qc$nY#7o8V#DXNWm0UUF>^c>>Oze>O0BiTv`C>71Wn4YE|LW3#94=;|qpWMs45I zOXalRcZSz=uDp6lYpRT%Dmr2VQ}se!3w6LgNk%jo>5j2}q+m8RFUD*}D;^q6*leDR z)n{ERaiSJRgX)7=x8oFUSL3F7V7mkZ?v%^o)}K$Uq&hF;9?(ZB(9l2rT6*R8%x~HmTYby^M>@W#1zFlB0s6Q%lqdtxDhN5pz z;}dC`topVknjqQ3yn-=AJ<5tJjz_KzZddIbs(voHB{vIxQnZ|@?bt1uz)+!Fh4aZm z|CT1RfRUy?#{mC8{fvYbc8r)ed5zWkhht|rV#CBVZpRu!NN5z5ZlIU{sc>&-%%xSo zj_QxBIZRqts6q_BejOW$g*vQ!uu9@h~BV^1dr%iJ2_Ikb^KzHiM z9OrR04C`1zrI^}=9R`?R%>S7_sIbk#)lF+GD}F=uDr%fH+kKzIjJ0!xoONMxP8Ti> zgqpaz#z!kvveB?>CyGeh2{c9=!aKjd8q0CACOY zyzIEaDK|k&eBqs6&nd^{+ZbFCzSTCa;`3-vhdcBH!6BX?&l0CY!k^pNWvOpsbt;IX zOr%^;)mq=sI}B1vice{MAU$(|K5Xx`oaxE?j6)13aJe=H5X2qUMq)18MqmYB0D7Kj zoWV%pN~iU#+1~aK0@e8m?lK=K}d7rV1BsuA$Z!%8SZA z>|6D@bCb}MW(=)?BOsQ4-b_bNoEWB&hA>d zUe~vc+Y!a25C-Tm_iXz$y~~ zK#RaC9HwpzX7&ZYFBpUJrK>rtz16NOC*MLVIFBA7PG?!YQHT<5sK znTNTevU$mv70a&j3YAS`b$Yf|b7qNI#_Klj_FUS8lk5|{#O+opz6lTWD5X1_*RxK? zI^Dh{)t79#bE>{ruUcv_@N$L7H|es|i7k|#;gc&Bd^0|ya;D{y*B@i$9o;w7ei$+Z zm5=R-z9w4x+_os0a1}$96so=SLP=Tej*Vf6xa5p#tx2`WR!VpS9wVO_xe3WU{uYzU{$$5#GGYEUG<;Qr->jV+0@jB|YIu zDjp;4#YEDQg}iUjSJjr9CJA;p5~4q+zfskNWJTYwGZHWhV1yLM6U(Z6Bu9`A=_pM` zv>B^x)RCdCYNmD~C4*Wj7%2tnpnFyA#a@NKHj3tKM8WjI{>?<~W?qd{{#5wNya1}s zBjLbqO26B6yDCV9FqRhTmjvp?R4#SgQ=gY$b_ypO_SwnUlH-vLRF6ir?96~dtzuo_ zA5*s4jI;>SFEf>ON>^I+LxKafa`R@(>&u)$`Ub|VxDF8+b?y z&DGAV!r10?)lzb^Fy&RBzUtFuF3Am(5;bi(7z`R4NLr+nzN3C-=+Vw0<*IJb045NI z`5W@XWWo<~i^Nm4+fP2T0jzkpbEC40Dvu4sDa@nM(CzKOEKRH4b5wz;j9y~lV>zL| zhmqp;L#kDSW!%*`n2n}^Z6f+dbv&;6<#+dNs(Cu3?i#Z>(Ggo0Nc6$%I-~ZLgsWRF z*j%`fz$uEJjPsi6M&BcKQc)jF)VCNSSEpNMtTCqY+R6_@=5#@QBs!PW#~EES#Z^@v zjJD%iGpejDTMgR~edenF5!D|_r-aI?sw^I$BgNe>n(S_D+C90NWk+2*(%rM!_BpE! zTXr;;a(V^pqVC~ubU6?eEru&_sdb#Oj#{d+Xp3}2Ni-^yiasV>t(cNw2Zz)5NFD@g z#aha=JqU6K85cW8{|09 z#sEBt{Fi!etkN`o57NZ*DNd0vNgNKLDS#`E@3~L=P@o#@gHLiv!C`OUcW6wT`G~v} z&P%p&KZm0aW!_P4*? zH_6tphBd6=D-o*Is$WP}96NUGBGq4f{}8_l5o0AXb{_}Sc=3G|LEPYb1ms_3jI8=s z84>X^BZ#M=o_q^F7J#%Q&#P7Xt+okRea<~p2wR$Po%(!`b4rvAv*7!9rBJ;7z8}8y zgEXFm;K;wMP&|RavxfOrza2bjEd7BrF`N-Z_@bc0h_KABIHF@XyPd9|u;xT>(KjW` zij|^oHf?P+>|`+$7i#Nth4p1)p~tv zD9!lDhRs!eJ)N;!e_)Gz+B$5$>&0nLFA;23PE;u*U2+=0sBpJ=TW!1M z#tU}t)NRfLfkD$vrff&`n&~C1&}>?s%^0;U?VAiqrJ3ucqh@1KYqcv+xFK^k z-^ZFH85j<&j!Tg2seJ|22h~$dAEah5`aVX*ZoKT&Rpn<7^?Tx;|l5ChNOd&Q< z9U}E(7&*u0NJ-_sG15ODj~(YsZJDYr8;PVXR3?VtQO>r(#=PP& zDvo^@msKw=2V7B-BLE@W$=#$E3Y_4`IiA&C~VU+&v0TYDrcA<$FVZx&H5e(PPb!~PSwa6{+Xnm^wTAJ0Y|TI?RILq_21E1(jUQAGp-yWkI>Jyxe{fd$ zYNL@=27$y!S!wc|yb@*++35u1fDJLm1)IC5(!%&V?M4+@U+wB=vQ|qUCJu7DOzMTrjSa3ftFG|hFTA1FeNo-M#2MKkU)VcM4qn3D5~)A zFUC`AXpS^BC)1(@^qS6sO~MqC9=VJqG#<~Yk5Ip*iRH|n6We<(kyFr^9)eeS(=*XQ z@tH(MG|yOm)YCep5vw|%>$Ge-=-J@HX{$~7lIV=YE}HI3tMgMM3en2$uO3M|9|?FDrFDvo;#O zVGrY;{L}#n39gQwNl3jc%LgKyz+p69g2D)1;tydpsr)c_oO!3Y5rr+`&^E>s2v*w| z2lrnpedwUfcoN)^|FAZ$;#>M7h@}M%q>)2qV)qUPCTbo%jx-0dt&|bRDK~2*|^3jO=(DSMfdEHhQvH+9jYOEGEICXXFK~&A;#qzpxZ%hl#>- zlzW_!68U*C=j9i7bm!tZ!Fx$uf@RHMS;NBtq*>RnhBd6=ZvrO$jvj-DJqH80l>HT; ze*IHz^O>*HH+92CQ`7g9+4hBPyX&qy?C!hn)O+)e-m}Y=&*(d*zG3Mc2k(+LS{%-> zxg1>1{>tUx-ZOXGNO@oVf^YDHd#C}s<(6A~!;EJyhheahhRu~n0`Gg@``j=+8;ERf zU4Q-c7ag#PfXzVO2YAQ1@x~kNw%cwCW7e>SHLT$)2au*fdL;sw1FVs#V}x0N`efXz z%n>9{BB>U>u~zN0u(o0Yi!!*Ql5!PRB|8AVzv2L1>JzQie7nT~hD(HpddA!GXaWp* z>KfHIPCuSoafrS%9|M$!;FXFi{)G5)IGv?pdJGJY%pL~0u>t>HcUN27iH<56&+A}bXBEj z)%n7X_6BxlJh7tk)Rcft)v^*Vaw$s>2iD%KSt`vIZ^Ev_Cc);08#nLki> z`JCOCFIhwPQ%NBij`U)%a5Ym%2XsHesivr@_am_muJccr!N!RV z`fhxF-tJhpT(WkjW5Z(JN@ES^+0>24!#F66CZ?8JxSQCENxJhVMD$k8PQvaH47q1!%EMXU=FWmFK=C;Z{uQFZQNB25R6VarO4CiGC7Yix+-v)0sF0AunB#bhj!V53N}7af#Wa>l&v<68u~FZL;ewP4jDFF7 zX%i&E72bI{U~?a44z^+(UsTsH-2s2_6NGVv-beD>AiW1g`J#C4%d)~i{89QS9XLZc zP`2C=PT=ONx9w?9{3-jyXMf1Pbl;ZKWU6&Hsqx6VaDchudBTi`;oC>~klKjfgJR{w zK=TcI^O+nGeh4)2c+Vk z`aS>9p8f+rV)>1OAESR=IPvV~+p~V?$L#nspA&|xVGUn?Fd20G`F1J5&gZ36bHVU9 z`lJ_I^+~U^*~w?xEMM2Rc1h2@h4uBVmeV)gmfpdR9XnwgwVEY!6&uyBv1?xV#;}Bk z4%5-#vUXxF1soLo55M=bcJ0aQ?O%QWzq9Z7j_>gBJMOr{FO1KB&-Z+fefYy4_V9-b zANj~f?9c!F&)pdQPyh5!+t+{n*B`3J2KbA=_zT|NE#_VM;dzt=AJ?#kHGKI2Oyt9WZzGrm#IH(Q@V$gO<~YDC;X5l1Ol25MpQxTl%d8%DW6uvfhe@|Q8bJG2C4A!aTLo-|K2k{h)1Ua61$xr5 zY}$(uQQtU8^b>tE6Yg{h;}bX(;OW9dLEb2#Zeh4X!ZBzk8_`={IX_$T$AF#w3 zCXVpYfl;z0&Xv30lmQi6Y~XQ9my@x}IFiZ{;jE+*R+|lMG9BCO^l2MFU!1gn9r?-8l2-tZQN{IV>mQa0S%NVjiK#c-v;HfRnDGu z1Kw<=r&Qt~fuu~*60?bK7%=$zNM7hKt46)$|Mdwweeah2kN>dg>ZUS-p^XYTJEpWHc&Hc{PPg(ldjH?XSOJXIj7uGrA`6Z8F{pOjr_qI6Zy{Agl@ ze9opSHzW5z1mUFktktq)v0zEz!Cu9*+qd+IV>VJ7GpY}p?`PERV=9Zy;j!9=eUm;+ zgvr#hTbq_|ckNV0legD%h68aGO*ZVW%=F@j4VRkQJW@NbX_8E*oH2w1#*kC-E#Xp| zvA@wp$_Mpps_TdeD|%(r=73R^GHksJ)z2I0jN4+_R^EA06JK@dDUYdKm>Dn!U@~%& zzNhxXaOe-mwuyb7`NB602fWa#oQ_KjB;0U46LKP2PDP8XUZe+l;e^R?PW@6$C*8Pw zij6Ymr$1(@E2^Nd0fI!sF7czWGpTx$v2U_t>!(70%8x2G&h?Tut?)gVDp~+&3rvMP zk{imK)p*Wwl3(G7@Toa26@qHAOjfj3-6|TlNH1X1gZTmDsjie=(Q734hG=$1IF4yy zM8#N9aZ{LJUPi!t?1)xXU8qKLrV@2mePBGH8Z4jG!le&@smAhLv??eZ>6MXiJ7b6O zB>EOJSwlK#ta;`}t2GXH=4!X57Go2Gjth+s?9ddd70XR0Hc|O0(F+?c$x_}@8XGW0 zQ0F#K9wa;RjhZzzCX@M$8=SAIju`c43?d~4GpI!ws8PGHr%uX+Kl14Z*Rr7_!8-kipSinMbotKV$ee4??VHZC+4X6o;$>QvB25K?gk(UAoO zW~gSotJ_1(8f!pSV;gA>9&I_%X;b66MLOXaYGJGi_i&=VR$Wfzv!ahyg?3M4Go`!> zOKTi8MYGD*aqCsLtdXAD=>Geysfm_j@PP|2YUM=bb2oCBT^;%(&l0V5A4aAfl&Shq7;Dq|<2!ovKv(ng0E zzPJC{FZjuzbgAmnY&YI=t0#eh`o8zR%XZJ7^|rn7jc>F+`lCN`1~>+4SsSLTTa2mj z&dF3x8?k9~zt%k7k+|OSz5l>Aj|9eXfjhqNdAsZOPka3_hWn~ly~@KmWlO%u{n3wp z)bXt0@d?i!F^V2tNEfPh-HYFBU;5);2}2%TIE=w`^l8tqsos@#zi@jPatT;2C=U&? z)tcS%#&5At|N1Y5ArB2o$4=M{uYH4k>Q{a~40#ZE81*Wjywa|C{)?@3&z*MmuO8Xv z>>AeaHvxrPF2DZdulOd*)}N^Fx{KAZbWY!C<9^3BZ+u!9auE7Er){opoAR-1!jKi< znXGF%?Aqvo&sgsZ@3-d8IlJ8eeNCoH>pmE|g%`eqIfi#|kX zJ@7@#>pLP>z4*j3njx6AdmsDJAmB1E_`*Y9ch<0mHLPI`j||v%L-K?Bg8>YX$kyd5 z@O^}uvBPYwM8LN^%#~G%mk8JgMe1XfS%T{4NHYQI$5sv9M?k$`V8kU^_|`jf=1k}d zzSXE}1ZdJYfBt;vI~*P?4|^#vJ|J|B7C0+KDxvA z;vgUZ-6N+19>Pe3JC>NdgtH;x|11B~B z=sZqvI;Sec7Zu|`S_NPzyOf;UtUs`^h&)IotU8-p|LB&b#uLlv<>SJ1Y@5A-mCj$V zaieOz;Yj|zGbcuDhG}pP>Ka@yTuLQN>tnv5a+}*bwm5mh2Dz-;0l^@6E`hP=gfk~F z#@#O$EiHFOd0{X-^`+1b3sKoO=I8mW51^##G;7pc$|X@LyP6`T8qx*qkIaG- z#pRMEYZdDYKTL)s#=-MBx8*WX91=Qx)!8tNtCzZr+Rwnpuy{%INUB|&qQ^eRNbMV{ zUOr(77abGI!zurYCKh*lg0nEJC`3jeHFE zYew6y;`0ha(uB?JmS|rZ3@xEH?@edUBunW<5;Y@f)dRaMGv%kOtn#1^EZHAgRc-1j z47FCMAwwz%TP1w!VSwHxFLZa^g1nfjAF$m@iLXaqPlQ0cdE)`bRLyaFniL%88I=C(m>S}bm0Y@O?v7@o3#SEnkvT3MCGP6 z4!GwT{2E_X)vKp6VBjD%3TiE+i zcUe{S?TH2{)t~W)IyR(Bu!S-e9g$4IJ_mM8P??7GR7MwcpCipfKPepy$-L%HhA|=> z^x;tDVmgblibNICPJKOWYQjHM88D}I7#qwlrRj;@W0eU_P*KIbps@00!^_BN{ey7? zlL7ldGt|bZPHDM2N46|68rq)f-cve0dgdC7g|Wty#$$rHCOj}$N@{x_xBA2F8bKf7 zLeiqAwp4{9r*Vk-GH9I9xTFsgstfjMu(Om%t4z%uPM4>YZ==<r&1#2qxTm}Z0$3^}SJeB+mt7UkYZnL#kgPR}M@|Pkgn4{zK9%VvO>_xX?irIpJtb+y)Mt=iNJeNg{} zow@&hYwhiNdCz_BbL~YhdXatXV;}Q6@M6!L=jEL^)P+!=-}#;2@t1evxU@vrb6)U5 ze_{4=b?xo$*zWmr?uMaq(@i(|3pb1#{E-{|@T@UwcznVsE$v)a%;E#=Zx>M8Q`o%O z+V_7l40&__#tLmc3{F1pMRxWhAMp2~>t6X9yZ-CH!HzxsS=PGeE^FL)iyeLHGwkY@ zzQQualInf0OS@eA@>ki>r##&m71)8h>2+_itH1gc&b(;fd$&!x9edjM{h$?(9kU}( zewyvw@kKv$@r1wg7Q6C=FSYvhH`&hTKWkS!_l36g#HU#E&M(?^U-w$eRqJ-uOJ8o~ z6IWRE>TB%6r#`N_Jlk%0)3?}(=RDuv`+H~4ShifTTfg}&UO&nQ=WXBmon9{WPiwwb zuDsfA`o=fg#w}0qa)5ECZIzQ(>V0Zp-P5N$Yz=F8EFiOW`E@5FJm5>_5D1@T*lAhlU4OK|R;<2DyDd9^?t-1x)k<$#;ksvA z@%Z(YE!JEj?coCTEYy!!Qr{gH0pD(d;hA^&{cFb(`L zWB9D#a;Ch#i(H-e-kyIe`N7pgfV90{Zz+YscX?C>`{dw&IdeI~_M` zLpxKz)nM8 z1B27@oJK)aR7UBi!!deayuL_%umO*xL}519i(1|pC?(ONkVx8$fu}Z)GilS*uz9WG zk{?Oc7sKGF^+GZNiIbVitmJa8Dh8%N8Rmp=PI`UcL`Q{IK`(Myr9*`k3<=1+P&$|+ z@EbYVih=Zw>YWl!-+)(oUSYe!gOG4pepCJh8$Qa5%CyO3>I@3%<=j-|@2d@LI3p1; z$roIO)J$#m)4A!i&D8FJ$}FhP;9_%mJe^p7G_g`X=bP>&m6^?CUES75ZK)LtUgzvo z^{3t~^`mgGVTk0%kUFT$p=gN|$6SeFoGnxzR8|ccN2))Y$lc-4nQ3gcPkqx^3zNbY zsw3*r=r`)gn1E40y1v|up@iz!hv}p|Fn^FDNx|GuolxhMS2|V0$4XNDh(yaEExZNg zQ~#8-=6YRKK9WjJ<*$kcFuS^1Tr!GxhLt{;QTOKKKy8G1#d3`en~v&`q%Tx9YO+w3 zhSWhPhnhIm4MGVR<2B)i3B%lk`NSet))+=DmP?zcKWEGv%>|fx(^APwN`socJ;smd zgyan4x=LL(>TbLqn=%>AEf^*k#HTEnBdCa*FgHXiKIZd`bNW(wm|qx($A(Wq;|+$* zrs_6qwJoiA1QTqkb)wYmTURvAU?W6fWzi-xox7c*p2Ddwdp`-$ZKis=^pEI*dO6mL zHfjJEkIZM$3YB0B(YdSq^hZkg2g;ileP+TxQ5cLuCU7B#`a6vW%{gdaV}lxrt!&kj z*`j6p7pzO)=|Pelptdck`JA?_`hmH^L#hS+6QU=QKx{yY27Sf@4>zrEgMQBn!f(1@ z?xaS2%TETDOT*+#xg=e*rUKEr8bRa??gxLl$Jf;OI_|g7%{_3d8v2+-nhxm{7z#+Q zAnAr`A=<;(f^mg=3bRT52GeDtb(1jSV4khw3$O(K=;CMfx#7Njr$gip?xZA>?or(x z8TSAuFAlv6=WNtz`5c~V%#AglhP&IgXzkidUiugIg}Yv9JMBQ-LHt>TYOt3+=l`@% z-2S8XXCK@OnJYQ+;>Oe z`ajUd#bw(VxuW0IHU^>mgykLZ2cE;mO@uRo$Qj%Z{&J7gbkHCAbFaP`oFesfA-=$^ z{|KiC?Bff)4!8sV!QXfrxnF8-1pdGSKA2nq(=6u}zdo1`HNpAtZO-@g--XKH&*ZaQ*4G-g>KEg!}fq=RMDvD(G;(=9+7q347ZuPe64-*i|;-({%`BLBxpPzeezc=85uA&KJ{zAXgjxmTHhueD;_=OOps42 zta;ZRo(4OFUwqqtwa%&gobhtuv!Ak2bIbB zJAa>*j$G;Q))&E=!Lo)mtYHn0OW^y5!{IOi_@+T(ggcA_7zBK$Jrp1v>B$$BL76Z{ z9?JI-3>8#?#p|%jl;L|Vs%KO_dnEuk$QNC!OpU1A=z1{b3h+?#P%r8pZ+k>P==;}& zk8=phhbn3#f-i9=ahP!bIUcC2hyxqR{zu+mN5CO)k~+E~2=MXi?hxli_?B4? z;*9Y5e+*|FADb8G7!q$sL32Y=-J9-R+GWE9R|VD)`1Vb?RsU)in@y zGf6wHxJ9+>8@ToH)EOJ8D~{P-rQ|j*GK+*|R0mG{quQu1Y?FPui~1=mmF$Tx3@XhNI8(zg)S#O6a+hQX+mkTy1M`IhX31Uebr!!ViSul~9JcG&9pqxT%rut&A z-=!#Eq=;}Z7dA>os|g2;mI|BOsy9aJXT5<-iY!zf_DQChw13ROMDQ~+!?Dfuf|yIE zY)AMouD+u>PB^)vHe&cZ*tQXfCNq^mpJ6Mcvf1!Wd!%Mi$CWIW+#ovrn-T7!=)^{C zu~5(|GI8Vf*o8sT0coPdWaj+|pqn0>G^5ePr74gU(G0V;;@LoAbG)j0vC$6g*f7Vg zL=p+0jOr*l%~Y?n!aH&ogm)mENVBl{53?hi7ro`5i=O9T5aqJgR{JrgKG7WpSh7;K zIdoK-qHyJsDQ5r-eN$g!OJ&3S!Dv2qSX_ES^@kC}t2%0@YO2pb^y{g-qQ*g2v=~h1 z)=`>NyJuPTcSrH4e9MUz_o`l1m^5lL%&8NNh9wjRty6`fm%~OsG(g1_X zqB;%9qfNCD=?u!73J>OJM`;*k8k5DMB{goC6UBPf(3Xg8oA%DGOZTKyCQOH>Xx38N zg>W7z7j2}rp<;}2gVfVxuW3`!z9T=9K>Y_!TYqcAGW3mx$PtZeELCl)mlK-LQ`Kct zbiinSvei+aO|7J{&YFaM9+-(6q8(CfeU*tSwTRhTR9Qv!aZBS7!~HP&YQ4Up3@P2| z+NS#Zl==#WNS-!o?ChxSU42Xqie8h9e)%`1eG^Bwq#o+o6KbC*RdUrHcn=jvDTwD1?|n<*2+_Q zC2Z<+#Kb)-BYZ+hK#H55Al`8|{;W3b398|KIdOl>>OG44Y zrQYRjmVI4gxybhNOYjU9hqu(C$z_(#Dwgrto2BH5M>O^h!%63plIRn;;l5wd<2zrJ z$ZyQP|5HGAy@iW!J}4k7PgT)BCnK@EtC{iN)RI9nT>&$hqNgpTGG% zh(1wwUQcaW*>(qheGuQwWR=Z-`82`Hrj3TY;M_Ir6fnr^XPm*Lhpmk@+I@O&?ld&cY-7`z|5(i>_xOU(m>8X^AE>X?q~YZWPjtxj%3JP^GbJw4?AgM7#HRSi5sKGOfyiQH9dr zMJ(`6j69KXH{0}oe%$}`yr(H~9|!6?+FLx=o@(-7)vLbdSNxv*FQQ(=xMj1=N(83V zRB`NWNQm$x1lok)2iy{j~8xl~s|HI2Bv@ zr}Ab~5hpZrRi>=c$$R>SE5HNm1#vcPhNSZOjMxWRTs2-UVBu^pTMS@OqIUmxA?&=M zi$o#cRhI ziy>Qq4OrF{9ow-(+(Q~yqtDcd8(8x8!pyqvod4ZsqhzIx??j4RAu=EqsB!FEf!83Iq}JKmSyX&Rt@vG_?Waw+9ed1P{>inG z-48xs7XMgi3YK|Dz`rKVs`X~@O4NAmb`is5*QfNAIQ2*MR%Zt5oS&3>($C=@YMLHH zy299!P_5qt2EwEk`HXp-DO&4luRKIOg#$)O=vHwLZyxzz>Hr3f4OzLp5(q9WP@qE% zD^=}1Q>-K@Gh3xp{y&}`;)sPQi|Y2m8xGXf3vYLRmI{%t3y&D_$H#hRQU78^aEU}U zpIdp#>UBaC>M<$F6-4o0f?>MptB|3wsSGA&p*jJsMpUx-DTxi=(j;YW5j3!KUWbs& z_(c(mUA#V*s%6iWz_;cXHlam&lRpnN3 zbA@EhQ_9S1*7Xg~-MZfb$e?J${w%V#kTVWFQUC&{I2irL$Q8;?y=5<@bJ?mf|80RR zO#fpB&M@Z4WtH}7pHIN-V-z2WsFvv6LlNillCI>nM(48YDj&zH9^h}iNjZWwf_T*% zKIjMn+w&S@)FOnXwq;Fg3Hone{?O*>?x7~&v;p-K^$>|wz`eVu<7i93Ou)HlU@TV( z-$S33SMA%}IBXZ=tTOfeVHc{Y&&ykmF%Jp#j0eoqmU?I>k1iaswU$ZI=lvzMK=3p? zVI5@GP^p-6^5vU3-Vhl`~h< z1MWRN#H7FXIIv<8^A>Z^)o8{qgS>4(AcdGutjRF#0$oZk~-qSmp9Pvx3{Z=koU0T=Z5m!2gE# z4O6l~?}35i-erD+-^*QYM7}nOi|H0&@71aBrflHuCiFS*#tDzz0rqU-j>nV|{at`{ zDa3RNeGx;v%a}x5D1MFE8pr-l0dYf;YgmyB$NHg%U+H2eso=i<_AR*U*IrC&FTD^g z{fw{i|CN;%&+5gUSNf3_L{p0+DxCt{X1kw`Oep@n?WS_y`^biFkhcAH2t}5y9iB=2 zW(jxgS$R&SK>&MOgETT z4LI5nMK7woXo%Pg1GoKztJ0&{!Uo^bc}Qi^)2&$($}19K0gt@a`0n%eg%WzP%u65_ zz9nw_o#2F4Aq}bVR%#)CbuukYcUZ)NV!8d8-7=j)IE)N9h&Gkx2v)WfS)?b_8RfQ2 zHlfU_sCOf_R1kgZcmYZBgi!7nj%XAe>+*e_4O{;1+tBmZilZHL5_i$Sx&3BMF6vh- zrw0g+Iw*l9eVqcsu5Y9WPbuRapeIL7=cQrg2bj)APb+^W43CE|)pyDQGR{_KN9Fpid=hSxr{-rw_knV0zTG*`yyV4) z-d=j44~~KK*74pCZ@5c*l7mdLSn}42yk%QxqD7@?6-7#Z!=sHgYggg>Gic+Te?wxD zj&Qx7y2iR%#NJK?UUZ3NNLH1$=K&e1x3V(nxE-SjQ?{$*jT~M=5A~WHYyVSbWj+h+ zvE;&ZhN03$MX50{1{tEZV`{-KN~$Z#-RQ95*h`dvJ?me@kr9p*JNlK{S}@Ce*m%kJ zIF9S@ICE8wik`&P!LRrfLQ1;w6-UZfx-C9-vNT_RYdfbZ_Wh+W7)aB=)W_+EajM~l z+jGx|Hza{P+eV2a;8J0AEHxKHV7k{JEB(8h5)VwmXyt$u4lY;0VXDfIkc; z%Q)~;7T8&qFdep{F3dF;bPag{kL*ZX89A8Lla~mB%H#GMig{d0F4PRtk^{@Uyl3)T z`)|HZR0(sqDps}=uk6))KCq9?;Jnw8WD8(79u2AZg>N|VhfWNK?Qa{}+te_!%Dlxf zyY4U0Hv-?e!tqe03X8rnkkyYF%yn7z6vq9y$H856BV~a)!Kx;2D6((l+OKp-*}$Vg zd!W$0Em43G50w8p{mruONu{W1OT>FgbeHvSQz(8bj;|{l=Xrru0a}z`c4$fX^4r-f z=Zj^ZcAzmXWjY%F&^gMEG+YqE`kTf#Nbr}N{QPKcMSm0gsITUV5s9-3UGcEuy&`3v zI=P7xUE=3MY&<%HM;i=m&0QD_nJC{W4_-*LSpam-qz^vkh=cw>S8W9b6ZTXRn1lVl z;dREc+)+3I@J7)>+uHu#{URL15ZSfRvuD;QRHVv&NGv&Lsj?ng>bf{|D7wezRL&4w z+n!AQ(I32k`M$1a3-z%x@U`>Upm$sFIJfuFM0;FI*vI4tB6I)7>&valKjT3%ika`2 z^&9PtCr<1B<>Z08?*0koUt1ZeFMZ$Yd#`T+4SZyW>5-DCAyu`_i{0Lxjwu(!k4Xk{ zITkFBs2xoCT?jXZhW^zjP__H3x|d+1OCFO$Ggq5+{|HBst+DBE@(10vmy4@ixc}iH zg!Wfo++=nd_B=?b`2X5Xj8iP1#BHqqFgRQp^ z<8qzvHdIVNob>dqH2dJd_p@!US92q8_Gmv+3g7li^_V@XX;KkYu`g>|#FlYXL;U;K z-)O>>feVSq%eXD)F5kpLxwhEdHYf#F~00Z^-Mt754+ zeMppIeh$Qc0qQb%OL`#EvDDOo=|qgRsF|sp%1Z_o8ZM3S9ov$$7HIbBG#`T}K2v{| zhMI5K8AK2sL`P1&(=-5GeGYBvt2Oq8dZ?|d6%Ixa%^{O|QSSV_5$m;1J{T&M<}jQI zx2Ld0X~vbjBAruCSL{iQ?-7hy1%^i8n6zRQJ%TLTK~ct3(V0zjKr$eqQR1&({nh&( z(jk#cTcIa%iKd65TusIMQ`Z)-oEo(RaQ3I< zC1-vwq%b#w#xkj?Mjyg^%bM<5nNRywnqk4V|C55HhBmuP(WoK0f$}r3AU=$Rm5d#% z1)$Ze?Z9SA6xx<@td69p2p-tLntMmSBMJVSvHw})Aq=(!gts>NJC-4pMgKKJXnG(c zwnIamQ}1p0wNpJa(!Rn`=2NfD8A z#c?%z>Ue@H*EJ#LLPH!)5Axfe>PWmswm}E#OW_N+;7Ti6Wa^VXL zxS$br;Ei_&rlON$+e$hunC{9oSidu7-|@NwP$K)eGCJx#gaj4MO9*Nj@qTpdE^19~ z1e^PW7t;IZz|GE(l62K>Nefn|NhYdOxlUS8S0NIoc|BQ0QLzDweo<~_>HMYN;IZ)Y zIysHq5z~6vmw8Y$Jq1_G)-Jd?G`pSucV8$*loYN~NhnOEonu@j8d$M(5GkTHi_cWo z0OJLi{SY-bufdx#T%RVnOoJKzHvD2Z;|L@d+};k}m6PG(DCHpKbWqt7w39S;%%3DG z3(x{Y$T3#>F1syf*-_51G;a09Uc^IonSh3*y+!2UnF8UMKkMt{ho{E%9-YJ-R18Qg z-rhCUeiD@F>H4GW_iJa!W)yv=X1nN8iVHYqqqgQ%TXQaX;TFc)``T&fCf~@d2j9t9 zw`<>c*3$ZR=yS#*?TX&R(+(r=WI7vU7gi)|bXQ$*A8D+mDEnk*so^iCh(jxkw;+Em zA@7y(3u_+Ioc5$?{o_0Nc(g9Vh%n*3$_ms+tH-m^H`%-k%>`c-zryi8&ue#)HkfUc zqd$Kla(JU4su&sLdQ0H_-~YJM{>h(rYZzEaxPOp5;$2`-+aV3XJ(?g3{%JboyE*-c zgtLkRy(n5cq~-*&O^1QO0%nAN9I;A--k6x(;ryM+tS~TdF!T)AUD5Z>oXk<(K4Bq0y7QnVV@{5MAI@a(+ zxGR=qo*OxJxIY{ziM|7UyBoHW?bOnF+OxWct21O6N%|?2n(2Oxq#GLWCn9j6=XkYu z-l=`(UHTPLz-ep1wpeCFzrHk-tS1Ly)in{u{^v+m^r+=H?>s(WDDUcc)4ts|Cg}Ye z+i+he(Py*(Xx>A&sOb(%ZBw^k&!6yr`8U1K1v_9KQ4iz<+*MQ6*C5J^vNuhqkBEMM z`~$5*a*sJ~wh$*N7Lv*RcHKj7?bjJ!{*ze3X768Z8(o*^k&K>IT$Xt3PVMp>*`BMG zbZr}QFLaU6@DqM!UagXtbhlVyK6r^d_6{ukFZul6oZl*x_&XYSAj)RAcTaFJE6 zv%wdEpThoE=16q@7K1w5%O$cJ)R|KT{kw~b zRzehhZ#+w-hgGts-2AVoF_KnST+DBoAeJ9RuNvLP2ttCY`t0x?(!8=6H3h-GrBVzF zCDg3J9qcM2dga?A?PlTi{8Lt#1W4CPrrwB^SS4X=?{MdU4{aY}!_z?IN!>kMzn}9$Sn3I?`pQAOa@2 zs*Vy)o)WjlVWpNnB7-k;UwLcATr)cmZfELeiAxTVYF$p$_zLr*wO8v7Z<5hZgeo1_ z37KMdC(GG&O?PcVeMaw|bzQIMlxp%|sXFVNf8#sari~RX7@<;a^D#zV7h1b&j;s}* z_DdMAP5Smt-AS_PCNY2)Rs$iJEfNBjT-Qsw+xYNmuhy_LYFlwiQ-)#4`%05Ua3-U( z{4oOM$l7{K)LO;%K%AU}hxqzCnW~@TTpC{;tZo+~n0BUq#h1uvxVx@FphP$c4`KwB z1uet^!T=I@IxC1vW(AvWHb?oaXhcHQJ?zGByF+-wHb>l=)RZo4%C;OfI^rQ|IrvMA zoE+R5g{gRhy2wi6$VS%KT>m?xCK6Sic8pZ#x1pt$mW=rQE$FTm7Ez*HCRmTfELwGL zlE-SOWr#KQHg$b6#UY8k!tp%-!{kk6(*ZPmirl^ehi$pc>pCGzU2qSYhyRkd4|+%5 zAj$Pi*B2?lUN8L%b@(q5n=|AAQ=|KVuM-7gvHl} z%|2IQl76vPr+Sg1Z7Q2Jwy@C>jtigHoz-7D)RX0A1+#0A`BjKsofke>CklO$6s&9j zKXAsO9h7L5xdWJRu)8I?Pq4*fb$6y#xlOzTIeIdFqK3La-D&hkpFQ~H@xD6o%}aWi zatSLnDFGfNsB#ke-lq_zy1k`5zw1JHMjnA!XQdZqj_}W^vysbu@`Mxa9&;;4bLPVE zkLlpGIS^pchKGYF@RF+G!ScpMhc`6ya%s6!^osWP@k8kcj<^2&I>`n0#Z zz7?AN?6{8=MJ-`x zg1zUIk~#=80b9{W?3=G5r>}V5{vh4JeXVwt@36;Ge?7Ofc2DJd+v`dqFOR3KpN&)g zoOx~yBnubvs)rkW?))Dc^DiF%S51b8+4|wz^as>256I6^W82K}Yu|XO^f!G-95oU8 z|4dNwk|l*{@OZmAGfud)UbU$cA+6OtSlW625NRD;V8=kplU212BJ3U)-IxDP>i-S% zms6}r%Ez&MgZlnZR%Gn(A`-z|`Ga#Xp1r@)W2Dpu*PW@C?EZCrAGyDW-yBD7U|Iy_ zz>A-5fa$#hu6s~2QNz`m1X12bZ1>1n;Xou|bp<&ygju|n_*)cC;>bO2raf6zs9|GFx)nDRQnoHuMbB zIu$(5K9}?7<{#_nb{yN54beP$sJE7UZ;ONctlrJNVyF8R*qZj_fV-FFxT^Vb`uMtG z9eB;Kv?{xqeH9B~2K<6?ErjxD#eW0ZCrfPLv^g_=kjK|FB%qg6VE0u{t?)Hd1JEq4 zf^QS2V#PE{^&3D{$K@hs5`F0XFF#fh)w0WL(zGy9bZA1eXiGk*9R$}9kdiz2u!9!= zu6@;6=3)I;oidJ${SN=L36p`-=LYF^i*A*raImD&POBmvOT{x*YN7@p;d5pcPW#!F z4^wi>;&d&*_!RwCTUQj=S7@C+<&&CAiu*}e<9o8g7 z#hm*SmElGag+;|Gjd9np))nb8CV3&&W5sLKoL3<~Z`#4Yz38JrK^!76y*Z?&{KD*q zSb4(;#vGqCBm_qUM9`ovr4RBd6!}yL>=)Chhf@sZa2;y<&n}dWcg*(NKhs>}0L~mR zPQa&jU>%)n)*tUW!K3)85beyI-<10*CPgwf1>6`gA$$sA)Jx>;YmS-~rBsbsFxU^# z7GbzO6fgU#F>=-LAj-Jn=a(0AE@Y;3eg+1Jj+{#YtfoYzpX!_Jb8Jmc&^|(P=@Q|y z^mf)<|NcRxLkYR5FI6|ne8t3W^MjM-y{Q*<&0vO_q{GH>hhuW6OA=nH5z@7}gt@fT z&-Rr(Z3>%gt@m;grQD;*e@JSQ$Rd~^?1-1tb(VYBX`B{cSgQV>Ml%k^f4WF$|4V zwc^wG2Y%ZFhkgj^kB&T%#ti+Iul;);VJoIK{T@@5r-ckR;W=ABYKVb9v|jk~o0Yap zR(>}tIL~AzaqZ;eLMT8Z{T3RzGu&Y99Q!E|-ytBDrQoKmo)h|fh3TdCQdW_X`mqe# zm+y{+IlB$}X$!ieZ$*;CxSFHy%MuS63}hi19A(y6htb-|MVU;c3Bos4C7hsq@=Ysv8@KD0ScDnZ0>gH5$CwK=>i@n4uPWi- z?IU#vohIW-V)5uv)?qCc0mU03K~+(g-gTF2Ew)4-C%@6Sysm-L4>a5n{ekzYy%Zfc zi1bQ1{cfZ=qJaVb1(*?iggAAbp2B&Tg#(`=&`NawlRAV1&eTXh+3Rxs@6-RrY1qQy z%XDi`Dvp85x1U>wzOcuNnD>ukRtv{x{^b5HNH5*pPmFtmd_ST~&!Rmh;(Z1NkP~@3 z;AUMY_74>XiXpVm_mVk}l&`yXMo?2WEj1L)m?V@2@y}@9;V)X>O7JdFF#6J`c4`cN z=_*p*lXA>=0t}e3dJ|l}H(QdoV{_GCgsUp^4X~DTHSiaD_t7y-v__ttc z+6|hsYitLwP*S%i%(O>QIAq79GOQl1D(w`%{Fu6QE&U=L)_vsnIw$fvdsq`@XRh_m zN|r5=yQw%0)RSIyA=sE%1}K@i`>lvI)yg8w<$} z*0s!owH2^;ZV!m#!qmA67?#ez%eTDzPp8L%B6)i9;Sqo4irz9sV+s z+x>F`lVsn0`#h71_sBHa+4L9PxiWw>Om-!rhLV(6?z?GUS52FcN5lZ;sEktMJca`g z{x7=nG!<4(aPCo1!sy-jteSs*9i~%uHtjs2ON( zN*PJE;KB*ypPgCcx+5rV*l~Yv9bh_7FE0pUAeG~AfM!e(U9h=oIWOeMRKyJC;a>Mm zqh`O#-4%r6WJ@N;jC`C|y9z%@C$HMn3Ix6G5aH64miI_tT2tv4Ejm_r+0GW1wh!oL zetG=CzkFc35u1~V7{Xj!A2WxYdeb!yO%YYSXZA*vly&@o6)539nn4*yzo;XS@k1`h zl$$)9iom?WE1SisNTts910nFxiZLgz9f)*Ql47ZbDE#)PBU6}ZeLJoO>n1Ywi_}H9~3*D$k%)(gYB6$JJ3Ou zizI{2OIw;i3?n!&*nV^-Oh9Mlqo>wO=tHcdl|x-g69sim((H#JTs#dM;+@QBZ43yO zrD;)@GLcF4mwAgXj>Ff!Mmn)+h83bsN2i1wg z{$bwBbv&#g{1q?3T6;Y7c}v&_a}S}MlVU2eR#w=?-4y~ysWpb%@eVvSb?r-1q`Rq8 zs!9ve(wTy&8+J4ZizwZ3QR$R~N_M5_0TnR=i|U)ucoz!WnB$>YzoU7Ru3_Wnzqh)r zf!WV5ltwIF6?yX=N<|!;tq|ZFyzAxtTMj&Gv(Mh_gy?JUA9T(WMShXc@}-nD1YAAg zBK8bEBm!DMx~-TTniyyM(dUS0E^g6Vmz^Yk*zjn`vO4|q-T>>b0*ZU2I?%OzI85sC z!?-E4AFezRyvWBc$Ysg{DVtBYn)FkXVDGnelrVgQzI}jjbz_ z-yV?lm#&b~wOas+O@jm_du64a_~G%NN2rOu-sziZrCROl4>mqeW+e7@HO5Q&0?iIR zf1L7Ih0<{qDUI@9hDMQwsJs4@nmFFBySIu)*SG#>qW{X5#NyapC3V7OJ?--frwikk zxJj?Jo!__kKl%+MjN;ygHK^D~v^pvX#ky!OEYjNcYo2q$)6;gFEIq=-lRCxi6>h z@#d}>B43IT?`uh%5TXm-I$E&nP}26YsG0oMg9~m^x+okxs)uEN-0bh((x7*%oJ94x z>uc6;oX>AQ=2YDUi@iL5G57hLZIFd??<9-pp3b+N2w+Na1R=Y^T2`BvgOyt&W0wsH z;rRFb4_E6lBNp6Em-CriC~7ss{FCC$$8#K{N!I3U!g)~J9dTl)+!dvpglHnc@n{R9 z&YH^5W$yv*<5NWZ%7LF-Z3pIM&w+ytmO00j@(<%T^kOSoN@lGk^8o$cgho^XqV0xj zDF)te%ajkhROQdhRHJG)H{W(|IO@^VLCYLcRu|?rfgGUw1iTTvY#6wliy+jxsLi<1 zGE-xgIXTkVNwT$PWnAV)uI;i!C%_@hLP{0r`)DV6V`BORM1)INMC1F#qqoj=QHLKZ zd$oGql}E86aUlGM^I}zidpqGl2~G-HI2x=JYC5`zB6HEeWX=rOzV~-fM>mnUQYvjs z3so<<(1Zr!<0LhOOlbkPLIEMr3Xu@`1QJK3mSh`6 z27MAhisbNxMtD~3nU7pln;*LShe!~xAwnTb7~9{zYHfxTiQ@%4O9Vkd0S;qV(wf+b z8QUNY}n-WXLh@;(WO7y!n z3byJPIsJQXbm(_DW{7JjvXY5MWfUr@GCNVK?6oWoX-sU%E~2gJzrrkg^r_T$JT~*o z65BHOwsj9SWt?b(aV>*HlD>A;0%CYq`{o}4v(#U0x>O8 zrJxGF37c5?81=o>2C&Wi>>Zn(VKE!_7~9B!gn66@;j+Ul<*F-$WR-h6ai#A3JNXA2 zs7_{W3wyr+H^e7HPy5QDDi3RTuuq}!#h&epxi#1?vZ3~Yt$vnfG`LyS9M#Iqk-I+l zogJz7e95kOP^U4djbMIsNs1Z$A)try@zBahe(@IR~+Gy`qqr}4D+}P zTH)i)*CGzmR8xoP-K*+}hH%@Q3nAJMWmpG%-@f~DK45F<)LN-TDWR}5YK?B_&U)3`@hb@t%mQTNXa`b_W2Hhx0ahu&7aY5=e#bpT32{R zYa3P9J%2f* zBsZ5B?w6wW_7BEqcGucJNNHmpLGcypfu_$E1F8Xy>O%P{4qVxvxvQ+`&8wz&wN=Zi z`Ldwg>8*D3me=RMVfS5AsQ#-pDK*-xtF);wtpgtn**G3g$5(CuP6U#E~7J?2+@v=SV z*hnLe)wIb@*#l8uS>U_Tv`PcBVqQzF)LDGxSY`TB=@pd96t3bzfV`|*L4-%#nJGdZ z&b73F?bk4!_aw~rem!T(YwatUZ=D=E>*a?tg6@rVrxXGy~m=jex3mZmFJD#ixT`CttR38wP^ zD>H4+=_q=R$a=vy?%`8Uar**RNU5NG^~Ht4D4&0uwU!DYrtA%YXHsr z1XqUU=9jE2s{^2S>=jC&Y;w)+gq|vG1#O}lOfX4IGt783lfnSNlRh_)A!>dWdeID8 zN*RHmG{^I>6*jo4`0IfTZZZsDoo2;W+mEUSWFhq=Y`VsWhO+KRs~nv%EVDKrpO~gB zTcwN6=hcLv)d1U1MK7#XP0*>>va_MAr2&SkHUrhHcF{cK1Dt?$L1us>rM2il)D4TTBBLI(@SJqg9b_-Y&Jw%RUvF(XTTN>t}QvX{nWzE zR#w{8te{6(eBHj7v@B&V>tA=1wa9yPL<=zy9;RlJ>RluqB$GtC9=*>Q>3DDi`{MAt zOe>E9Vb@JJw@s+FT^0Gsgt2<##~u#O&hRJCUiI8&!<->kSlkk^37lp%=#EtOg|_`? z%)-vWpvU)L({-KcF24j$2KV!0NPOI#DG9gbV7*I{>vC?zBx`C6|1oV<77go4(*{A*NQAa$2ub<=F| z-K?iBpsg4$+|q=!{t0|W z5+BN`n8V?E1IaehKPsb?JgPbLV0vT>q*kh-(Z)hBJXH%df7-cr2(=CGP+v%gCo}Z` z#7T1UGJND7c<1-R;g@RPg4eP9v`ePU{(R1?g(qgt-7DcdGq`m*{cZUH<}{y3U-Yc% zcS1ZW_}#kiVSdE@gB6X75Na&*mJiZFfJohi=aTWq`622L>*n?P3EB*EpKkxlIN0z1 z9WX~(K$wu<&3LkaQHdpUtW1q%C!eW9$<1v5MZD(}FYsv5IUrj2G2&J4|JH!OO=#bT zMnxZ0-<{r{CA-4rmKp-72#Ll18DX?+x%`6xp&++Zsmhcp;{h?}0$S!ZY&UO&Xe2YU z$|{ry7b?Chm&2CMfQJr^8r>|M>-rCWwO!#N0J5!4Py*w~d_-LGb_-4?`6j}L)k&GX zY3P^FE!)Kno?Nzo2E>^%eR^Ad%M6u0FJ(4+znZdR2S^UwI7SV(T^o5Fyf5hP2 z@62lZ=`uHsF^L&V4XRf56IR4z*I;wFGd~OwDG%gWoXOCh%5{CySU{aavy*-M$Od2h z6~*jNV)%n(UH^WtQj@rr7)NKqB_b>QT#eBv!EIix1&L7RVWH{j1Ou3>&;n5#!5a>uh%0=s zjm=|`2GQ|C4utx^c;_nrX~kea>pp!0UhG^>5cq}xg>)_ftZ859v9VlTT8 zGO2#_af&$4)?v9`w{@suhq>*B`&69|ZP0n`v{IB%`|xPR$7xYRFnU4Ub3kPrbY1P_ zpdWCVp(t7^{hDhwbK$4&x6t|?f!o{7&o<0mxUIo0rA2j zo^q|R{YQeMh2m5jMS#iySUdL=a+7sA7hpr5h>|4g#KxWXX$ue%x*9!~?%^m6fEO3p zt^=g_q>N7-=o_ccgswVA*l~>V5=fqV41=!&s) z**`b<^w9;i(iN?z4+SoC+MEqjFJNlcM!gK9ktho-} zA1;0Ht{wZpo10B9z^A}$m(%Xs6`mzS!iSr!qvb|4hkB~^H|}(9fmg+p*XnP0{f7*tkRouA%4p7q|7nC<@`zZkqfRipRn^3?C}=0wqu7gF;l6cL$Uso{@aui77Ef!htjo-Y`<+~HidBVI_dtT8?3V6k?YqDOs^Saa=?}lPyCE#v^9vc8Xx|m7cjX_qa zYFv(g;AGAQYgeaFk#>WKR5HYkfX$71+4K;;(ui4(PwA65HY_+Y8>8~6ih?P~S|%06 z+6)Q%-_^!Lm>Yve;AVC2l(LkG4q90mOLIw7 zPX(;?hS&PaPpt6nX>?kIHMQQkbS)iFni<{@?>vMe9eI}A3hTQvL5xd$8KLzb(_1=3 zB$SXFJ4J1O$)z~ZOCC%kS807$M!!h{GAm(uo!VR0CN07ug*jun7LL*45j7qY_2HrH zQ&HblDge&h-;@br^GN;c>kM^@-q9Jb$OnXXF0;+pLFb2X!CV3qY=oVU1 zU}@T`40<&<%!JqGl2geFl{T73$@|k8pLk(FtyQZmcIX=ga9)$^+8~8GliFyR$ah=0 zM?r9J)xDO`4Pg`jXI4BU*Db8Lh?7UlpL1L!CHq{6%of`w0DFa8%9h(?K}Ug0jbcPz zx(zJ`alFL1NY9(ExoF@Ltf~GS?c2WK;J`Z79yX%kCE#X7>1cslbwBMx%IL3bYXxTT zU!VOTUi2@R0JI8k>6VK*0@M~p`HSA*U*d4{1UQ|#Fs?0z(`-$FjgVWR~*)rU( zcEqm6ZN60+ORn#cRnhE-dm6v76>84qDJkd@o+Z)pGD;Mzxl5^aQa2Bl#dD-^+xEtA`2Vj1a4ym8BBQHex&<1IwHYo6CBD~gtc zOc6knzB$P4FdpK5faiq!*#B6lsPp6WueH{nA6;wy+eZG0vrpgOM>2!^7<={^e1l&t zXI@LVbhs6o%JS;1h3_c<*u0L7i>;dF((bd^w(u4r#K*io&q!eSTlWsZv(pGRqbf&3 zi9c}?X+0dKsbymS*6=psk4#DA?TO|@kxjs{iM(wlYa^C##3v6<+9-7&wjG5M7|*1{ z*Vf5Vzq3aVRqjGw{NfhCx$B0JJG$T3HWIEVF`Ff}zbMEq4Z-N9V z2rgM|i}R7>-^>L39_D^t%>cmr$hcBZo+cTdx(}Q$gT(`N`RcXtTf!rd;u(m|{#&0@ zP@d_6FVxxLmp z71irAn@)|1J(>!ZVco;E-?kKM0-lvyF7Idbwaw)GxShI&iDlFye#qtX6O3cT!!|-ftq>iR~@{|5R+%j&cFBUEjQiXt0k)q zfZi=-Z6U8UC$-MnZ?ifEdqIz(m#5HX_Q2FCmk%d)ru!(EW26IdB0C3mkGVi{+#}o9 zRXo_L(BbjgqTlb>y03%MJC}}pc09sI3y-hqOaG^hV)1MTuhRM3rKta*V&0{8XZ@qZ zs$&EBqv_ioi~7Tp=|6<4s=0eK;aEG!zAx$6>E}bDR_d=(4-qeV*=L&5$5yNF1yc3t z!~icRNG}|4pqCHIA3whQIXa+|H4Q*zMElhHxOREy;^^QeBH*I4_yzZ&JHa~4UI-536wf zW(RnU@K>t$1>fYf3PwK=f5`zyY#}D2_~DI0beMqi#Jx4&JR#*{Wp^HbVI$Dr$`$?< z#F9TP*|HjzRk}Gx;-=Co#}nt>FjtjJawwnyOb~l!a|>YzfY`1gPa<|&%*Smok;~`? zWJQyGeLjfFLqJeL?M|P785a3lQ!i(HR2y?-y3w~ki?Q3;De9eMYJ6-62|v*ijXbmmequT7>o1^^h|n9}r{vnd8w_?H7)l zNdj^R$!ac0kL9#Kq0*!ui9ZD6=mPt(?6OzA@tsN-m7de(&5zB^$gf4&YaHt5z4e9% zC;#RVJWpk^I{YbHzZulEq!A2vB&0QZ_fN+^;pdl-58;gA2j$y|JB|GIEC)(W65}Yl zCX%+q76*nSzF^D0wFIAw`JoO7t34l-cdbFs{-k)rlhT|@IO8aF zb4p%WK=yP)2uTdMeFdR-RLj;@sQ;E@?; z3?Wsra!9O;I*|rgz9+O5r2YowVTQ&1Q!zHDkWUwRuJyIhvW9)$sD99us?n7(AI>ZV zo63A;O|CbJjn9^5KV>L=Z=sn6h}l`pQRA0QMYF<&-Od%^G4zdQF+}}jY0k?b?3OKJ zPqMY2?_G$;i$JAB!xBXEXBV2_WDp(yp zyhbguJ1iN(;rxTW=4i%huEh7fQ z&P0NLxeN)=_76iYwSNTBA72!k&kMyr?MOSuAW=USB2K?$lhn+x?a~B+iFw^-+=UZh zz8-aLIPYRsW#?ED0Xz}*s1sy=>#gel+u`$JQ?Sras2m3SKh&y17taCDE}Md)XMQ(5 zs?U0)kNX3N*(~6RS%$z=qq~LCW9WRoBc!UXLG^is{l6@63ulWx%6~kH|Ckmq4YFmo z;f`|3dKQyAg4LI!@}p_*m!7?elnJj#=jSX-MPil@!n>0Z0Fy`G8ShQMSE+Mi77Bi= z!Cc3%-u|8|v|jsi(JPTcRSJC(rHuI3fnGQp*z4im(N)Bz zb8_v5DnBjwwEFSo$_VJhb9~pBw|5L}y9UB>q+XAKFP@5fSYE?e#$WQEV@+ITuqN{_ z^Pe(W5gn=iAF|_rv??!=+xMmZl~1>;B!REJZ8$4~<7hrQ@D9ZPRJIyU|W>eHl$rJqsQ>5X>_o8(vPuz~`K75fT`jhMLP}@e58x$i;6s1L% zjpYRt`$qtPMG&BFZUSqP4Ex4BpBg3ZU{awuitB5cg0Q-uFkbevTMSssT7-IsLrwT{ zAV~Zhc1pRgzrGpMY>W^Sq&swLOMBG~Oqj&%U^DNW<5kE?E2R+>E2|P_E*jjTLmV_T zA#wGKW~;D+N6H+OW|R@M2L zH{QOFeu>e^Er%)o*t4Sd4u@|Ho2I1f#(j=5l2n(|LA!XaieA8DXG9H%jR|Tmbczh> zE&mKXl1)OXL?D%5Ln-1bQRX|v-gv|CLM*CAxdVzgciBHMEmHayevtSsAcKxyu1ut# zZTdTB>CXaUY)l)osEKG)IaIU3(mZ*zIZoeub1r{Gwpk+i zIhyfD5U@Xtd;n+HXKVh>azh-6vN6;)rWJyY%Mc**=%oCARJ~~YjF!sDNx*@xNUy_eRpT~OFlf48Nx7}+sLO#5D!si3;8d-i$PYNI0eiwR8k-Vk=GK<}VQxZfs+E+5rJ|K#ZOQi=0wR zdqDflb$pBi(;@C?*wc{dj!myQnXgFxLxrP_|N7C1k&z9g2P% z!e#uqwp=jyc+fEc^K)&o@3E~2RZC-aE1I<24@wU&&4e5D?c9_)VKveDI|&Yx>- zokMr0N}j&N>S>PRoD$Ad0(3|vlBV+6xILND;x^;{51UyH!y*%%cG(gsr#c^AvM#u( z{K3pwJ-bEIY0C&g;KDkf!BVeIv39pOEE%@%8GK<9paX9yqx*L!Q(9fm`9AkruK1bL zOa$y6$;tc>oGE(1?c}zH+x8BAU0N@+>M@g|oPvKov~~jbJxzKnIRv!?T%GF*4Z*`c zZ~%<2^TeAC>n2t;zdx3sN9DFciTk6buR21Ms#iiHd3@(>RPE1a5~P@KLM-`alhlqI zW5h4A4;)mOKRSyWPRxpbN{d+jjSVa{oWvd?ruscbScVLk`;JGj@p{*`C|iKPBF1bp zE&>FaGO(!9I};LI5GbAFw+h+>O#CnJ#1g{gdwD?4J0x$K~=s{`1?= z8qqMK?MK4C&nV7yJ8_k82t6V9Uj0hk*5G!OpJRVMenLPE{);l1{xe{IlK29|TSFbn z8)o?*8C?E>Zo*=~Mv`{La}VXYf9$O?U)ax@=;?3}DEWMiuiIgFd>3%PqRswfC;Y5S z*}8r1*+2WI>+9C@NG;8DT9vU!!qh*}`R)@%&+88MX20LJa?|e6r!9l$&bG-ly@+v! za*h{@CSX$IXIW@gVAFpyx#$JB!@g&svc_cs_w@Jui|SZV^)+{S>VZ_}x8O}h$?iJ2 zf`3}1UTYyhFu33YqG`SNd0dUlO>G7Ua<(BSzJr!gam>9Ub`ej3!Fay$Mw}TM^40Z@ zUHs5-tNk{{b4P!srIuc7q%Voh|JuTH>uq~|)AOQy{NA}ou9g_8W`o-JF4N(A zF`s^8BJNav_c;^(*M$^cDr@q)9aBUXzdlL>{HpTpa<0d8_wz?lj!VRPDoLLzrD{hl zM+^G}K7P_^z3s9asAKZX@j@R2@eIluf!W;a&Gst82C5)C8^!M69`2O;*G?w8Rp(8B(TNp`#2PoVv4o0J zoSF)MTfV$s?0~$}a(Mag-ynf1eO;W6S%OT<5T#!x<9mS3Oh$B1MuG#j_Rflm<1pVE z$*vR9KUIJ)dyCSJrn~LHZnr0uGpX_IkMnFL&+=~r-YnK5d`y;#vAqt>ZQLYsB_{>P zoD+opO&t}d#1PmROs#;c9bFCaRx4a6i_{w7V_J2YBC5z65HK})Kf*_(;S0$$V{bbF%iuw%4Y@97_*2+NS|Ri zSQ5!GKRbYrxOAV*L^@JjZ!J8d?JCN?-`zv#_w1B+KW*cZRluQ<9Y-{yXmGC3z_gK` z=!(uCYZgiYrHtKNBy)d(ge^!~3|0`1?c&+AOl;<`dr@|cl!sSD9@KwGi=qyy6DoW1 zHW)w)FXnQQb@}qIgo^tuYdF)@O&qJ^QAMZ7A#KH;G(D-Bg~fF>3{T72x~D~dv$;`9 zYtF8Q$4L-HZ}D5@!OX6`t9N35v^+JO$`Ds(!?!gd58)ky-8CFDcm>+)bR5pS7~lw= zpZkj@zQoHf;oE9USz-EaWyww4#|J(^hj;M^yTRGeiK39Pnm<(z4up8g=Hu{!IN`z~49V z_D1?vx#@n}YEo5Y&MlnCUG%j^!#LrU?+uz|&i?#zU(}!U_egE`1F7({K!*&xtI0q| zEIXQCmE-4Ellp|e3l~xjrG%yK&wGsJ`wX6j$+K$$jb<{Z;G_CV59j;Ay1wP+`!$1; zq5FKIX|VIVa7_Z?c2`xx0RCTk9u`=yL$vFo`(!@0fiMi)U73du=OM>DpS{(#kH3~2 zj=@OV4d`tC5Aps}^O6OxVi^S)MQ3_+fZDwqTEsftY&J?=Y6nM^+dAQ5U@uUfA`^$^ zD#bRlDLF8v15RlaaJu=2w@WmJc*`)xKsY`duOzQ-V(?L|ME`;FZc)E!+=bSLTuo7qb;Kr7 zI?D!LBVJuoEXk(l>u}^s?Oy$?I4WDG67)E&Y60Tw3Tp52gdKZ22{a1tbGHS|QK+&C z6YHA`xyGRIEuVDW3maJrrL}rFG^}4o**7FN&2>1glVj9C&h?b{{<`YtCeSUZl(X;> z8ycKRbPlg%%zQJdO<$)~|JkjMsYTD|n5(>YHO%iRz$;vv;iv=B({?io!sBzA~l7mnDHG(QDzcgzSQIC-wM`q>6K(C_q*%m<}_a7;yDxIcS zn*d`sQc~`!gTXtLfR^$%vg%&K3O8nZat3C0AdGqfWQlx#?@RLlorWwfqAjIvM`dHm zT`+M7P^2s_@vuO>t1F!~*ZzfddTaMpgMhpXNN9Its$ky6Sj{=Sa(4XX7-607YfHY5 zCQc7uxINHCd%-shJ%b#JsoR_Uj#xLCM_)8Z(XmL0Z8sdMJ>l>$elW=Z~f_dCw zXOx|X%aQfN_e|SeCaCKV+657Byz}`x8fD)b<)knt|Upj4tY3~)0PU7wh zo3jYP)=ICBzkPCqSt|)s?GP`I#1n5m`4<;bey1@u%Cy@p=0Ptduk5)Xx zC9V!RPx6IN&X+d5I?r&8;RGI{(9@sL`PVz3$>1PVW@w$mf~Z!Jh(pDP6<0HMGkgqI zlL!0dP1Dz#1L$D=7PI~xX2-*E%P&*0;qq)sOft}70jO(aU+Z_#`g@g|?d;mQ$YYt@ zSaL|8T<#%O>jv(UZjl5;o2>X1_xxRbpF~;BJw0C-d&LzRi1L1b zIisB4rW?9mH|g_N^D|JDUOS|y7@OGJx)>;w!YdCylgeUd=JcEza4OewK$7sjz--p4DYE=(fl^lbny|Cs8FXc=*q$sR3oEdGY?SH%T zfjGKV+CtptJyQsXk^p$_$?SS6M>jzFJ3~6!9V!OajLI4c78)89jgi(G)AqQO(7VNf6{X2 zKIR(Gk($?lG!_D_rAPy=s;tJLqWT;h?H;761R_H(>eaHx>bJ7Xq6)@ekx-&t|Msh- zfEo;T}_%ncwj(A;k-a;&Kl7P5XW@9DP$(q^TZpOKMs znEkhEqw(J86DZpr#{>eo6*hh243mVWIQWlO~3*dqWg8P!M@jNmU?~b3R4SIeQPn#ysfccM@%^(RfH#({EZ2K=LW@b|Iey%9 z@_9vr{P260l^!AyXyfCVnx9op*gxLU3u4t>l^L1buE9cW;ze{rDYq6(1U((^WE?XK z0T~m%btg*riUo6#2Ice!9u*@V#q^1gC${6n<~W<#r7TGj`pSk>2E2UzMICSW$b`$O zC@HDzje<3=aG&a4O9zffv-w0`3T2BZ8wJX7J@3#DxFC# zzW(Z`Q{WlwT+gQ$zuQ<#?LHaK@0z!YW}V>FHEkV|;ZC+H3Yw{?4fmQ`x0=AEC|S2< z;__^*CAyAOWt<#E#;M)xc_77~$!fI4C&m;=R}M3Zi49rNIeVsRTyMxR|FF8v3AVKm zW5RKw?_h}Y?=sFdXfG2_;pdx3GDBX@e~;~zfgv$#y4aJ2p6VXr0pkh=?{tsmUyeHkHDyoVjAi>z|oaO42_rLdX~u{cVE|7ZPWz0Y~&Em;64 z;HW@%edcY9fA#U~bRJ`DGiE+AMveTJ zm9S%^;30CDgV`DtJ%5tM3eEvF&j|7M=SwOv%vCQr{9Ru-M*!3v7lSz;DR$o_Jh_YP zab~n~ zC#y~^_G!oQpZA6<#qZG%&g5&OinBaic1cD#0%BU>q8b0^;A#JV?w#MA_ioJX)V4W;*)3?wWb)`hL~mPkwZqdB^PM63@;Rnadw<^**c1tI#8@(8Myo!^v0GhGLuT$eaIlmqOD&my#da`tIHrDxEK>F*dxR#YX4$ zZz=g(Z^C!Qs$yH`?Liabz|!xw?^a3WUeP06Q9+NplzWI~;_qA_vM%ASX}GG#uO1)q zl{7Z}HtUq>G>5zynd;_=Vy6!P=*g98gS6B5NY1Cb?nu~vk8n|e!J@68QC8t)NBzC? zt{Q&gM=O3$1f0Bh)r!-f9d6^9mzwJdyJHS34r|H238NXBD`gsthKC(nDQQE zeN7dS)LJ5Pif_)P+@Y8vR-sIc9bC(}9`&a19#VIdndlnmj zOzQQO**`jbzQ4Ur(-4$D6Sx_*{Q*wBM2{f-3Nv2@5z8WSh_!exH)1jH5okk+MZ@!_k3Ae`DQrVfGxJ{t;1GS`C^ z6)>VQnDA;!ea%@YcDF~SB0VZ`C%pXX=ItvyX=V70P^pVD+o9vndEzhs+p;r)%02}e#T@JE}MbI*4nUWU5qy*Y-QaZv>k%$Iq zynYM3w&My=vWF}chiJ5(;azQ)Aq+2c9t{z;O>*e3R+6KRf<2eHI?R%E z4G9aM5Fq<4s@+@_b8CzhiG}5@Wz%Y7<1z9NbX%UXn&b>dN1oH}HCL@0Q{CH0A9tmP z{mwRxC!#}ifGC-_mzMaVCqB=4-Zt>E>~KjHIFw>|)C!WLR)vtCPSjGkA zU-DJ7-E!4|Q${7_!mA|S%?I|G;u@y0a&3)Bm96TSoDFX~49=TM_;&+<4f*pcx#a7O zp=;CL^YUQ6R;9%y8UV$PcC@NO#F^tb5iC}N+pv_DeTj|yqLmA$A)w#-FMF{eqfe_ z4-KoJ1nc%3Yb&;x%GXJsxAmMkzKL}}Wr4}H4y)+(FVxQ)LC=+DKmRwii+Cn4v<*R@ z2Cba+-ISXBw7^^;f#J}3_l{z(?EF7zoV{HAw0ax(fi@Sdv3CQi-E#%5H-x80s!zYv za`slPcZKdADK^Z1(^j!;9Y*hjFic%j2t4c!)6t^}K0-Y=E*}cT{2K+gUTD#C(fo~nV;(m~p}7*S4LrfWy5vO$05`6I)}zu7rP(62-jhVsnw%Rv0ZyznI)RUQx2j=tGx z4rmLS5s|b=NLiA#?v2wKdIJ`vla!=MZT#a^!qm3!%e-_z+t;ukbyy}XC zsW9?U{dr$@5Mto6aEboNef$7%l>gM9W!{u;)}JLwOE|i}I_U~~HJDV`$gv@sM)@l- z=DJD!qBOp9E(t|ZL#`#Rvp5l*F)Gbw$8Kb$N`qLw#FQV7*H9FKkFPDiHmQYNM%9R% z4ip-kH77IuNj@sA@+f?a>o5cMVgQ(@i-IB_PU|JGL5@?btCjeT>W- z{TwhQo23|z%Zjut;f+pQWy@yjxmHcLWNWiL;oaTZ^ft}$jqDOR>g=g2H z?ApvZAtfGrx_C`q}oCCeqeVY4l!$^`|k zq=cj_IV{#JUKpCV!9H=!sE;AuID@mE0K-K7GA9y*EiQjF^GzvO@d^}d>Z;k}#*+f@ zK64+US}V;R)`EDx(Pgw>2CwCS6R#Vu5*_hvkZ7jRshLFd+4RhK<~ZaHgbQy4CwwYG zkEhy0^aVqFibdj8iNb4O{90n~37IbUNh)pAXM8m?`j~|X5CRlct)^|1juTP1iD+uD zwYTM!qxc*yR!!I1xR@{!9#h&3I_g{@QEn}c@t+IFOW(D}$X-F6=-LMjha+MRs_qg( zP^y~21V4aqDc{~0G+iaHcpJ5(9pK2Jvt<`}L{hD6Tb_&W#^-SEW0dMsi^w!V&rl@1 z`ms&i2*YG-2yM=d=K9$&TMTYXz}QDe=1NlQ5QFUPM4Z!qIHKy~{&-(2R2?0(a`9%? z4;XkuxN$}XJ$zv24#XsT>#EU==8j~yqX|H_aBDMa?ouUpL(Y7^-Ci~TJ#z_r+}UNi zc3iK(zKH}~HEo&ko8-3Dj;<`nKdXYacvO;8UGzo{-U9L(;09Rt;nmJ!q!H|1GOi@j zXchdb`8o{?IJRV7k%sdyt^xtd6w(n+s@OY7h)uk9auoD z?+Q1a`WuKxH24RPnZZ8Vg@}BIsl=9JmyPjN@81hiUzLF(gpAjNTO8>JXSJH=J!f`` zeq6XV%O@<1TvBW}bqv^A^2MREI&b1c)0hi4N>sECF&(V0;LE@m(&!B`Bt1oN4K;>6 za>oCt4f^)u$L|gMJ(MkPk7J|luV&8IkL-<2L66k$Izin-nVLrb&(ukr?7PR`yIng` zZz*|sl@C@8xFT5p9S{TM9>lHq0l$!6m92Zd4N~RwON1$b#))-B(H{n>qV_~x4=udv z_j3104N0d3klPS3DvSnxMdGJ2JM8p3Z%eb0CMiZvaC~e3{9}7xN2?7hV|oQ)@Y<>O ziho_yx^#7gkKW=$sX}K-PLNMw<zA^H#^MdbxJ)fjNv;=%p`woM)zH?SyVolwHvOp7ko`Da1q$~@7 z-~9RN;t^M;+Y)CQ&tUD8I>=;afb}5SwU00mykz=kp0^Gl_dV|>6t2l? z?YtAEHb378G?c7PEN_69#)k-N5z7;GnKY_4cs85HBWs!#ucezsM-0LJg&iql+#GVW zs2gZqHj4^rUh*wUhU!*hX>a}2X~-Q zr|IX#Gz>;^heEsvOa$!b3wbmpVoW>K-1`-@Vr8R<^wE;MV|8qm&kFsD;^5-e%Y?^j zR=i9y3_v50L4?B+rCIs5pASugsjAU#L2p!v#Fh-ADxEV+QEq)MxKL*77C9o}K@l5B zthH*}jj7FMIa|N-D3MMI1g0kBa66f@aE=mrLCI+q6EfjrH2F$=CAIVp2D}<2a$acU z^aY_7uy}{5GB9jAZFU97RVkAjKFZV@p!TkP*{j$tJ#5TmHuAe4@{3yVFP|lklV7}K zOGlj$q0B++&P+8%?(uWMHT$AQzNE0dR}sx@U!~wPqY*EXbl}aEqGB z(s~ry_I#}pCq`eau5S!qi;XP%I;b#GGSdWaS$eH#ncUecucv7rB{X{7xj(CY#@zo+ zOOE>L?@7d^r*vGyxQNcAfv%;po#n)theOfhuL{vSSAdX0vl4JJe)QU^t^Bt1`>*}P zClfc0$`0Ks+jIMtaT^Sz_vS2}%|*=;&Uv0$1mq%@x{taWIANk7n~lFRs@Co(Ci;O| zVkP5fvf47*!xVG=0xszuMzviXW?R>gSml(-Vpy`p59u%uZcOgGhIPB@L|%Q)h?0T2 zeQ>rV5|Scff#ShorjMdrk*eY=;*W{^{%xaM>384XhVIv`d)2NVkD_3t@8zBxM4A&q zxVtB|3D2pJf&~$OZxYyz#NN%#7+(NKa`H1*2`>WEjeJ-%nD2i0_P(oj?RDl+RuTOt zxc=RyX|0u|R;t6o06kz$&-VO_@@jGU-gea5ZF3*;0BgJJ`03D;G>m7Coc?^4(x|`u ziIbu^er?RIYz)({j{R?}X@A-Qr0jzSDjP0A-X^v*7tjq*_CmB~ z_0L{FWu*s3>EnurzD>T=N6z4Mz9;ne_rf0WkW5;^W527z7T*i8^-b$cE7|AxSsR$< zqLEY<4x||6OSIMjA_of#2k>K%!@ssdpV5c!7MF!l1Mht)x}wQGt&*l-i6ZobS)>Hn z33(dZrLKbV`?}xHuf6I<2meIxH|FB?tXxKVJb+Bx%rx9wTp;F8H0If@N^9c6H5)bi zV^LCze0*^Z4h@|Y%^vt$KV$|zer6lQdC*8^(8H0Z3ZX|eRrk7P{%wx=a`*0)ef{@? zdC9-&LZjcJ9oyB5OK|_a2d;o3+2@tNo43&v=3(hy(%-{pZ!bvBo}jpG`QVi2JIz=o zT=t6!3u6m_ z*WD&n?v7Yo<8YuAwf@Yyoh*1hn`yc*2 zRwGJl`xCv`_QSpbCvjr>bJ?EzApBe)EAcHUHSajp#}-|n_Bw`CQd>wrp*>uZFfNAZ z=m_Af|D*ZhoIq^is?ON1p^`Qw+ZcFIeunV;4s`?(=0!a*eGwy#E?(D?QADmoqC2gl zz)Ux=xU3a~rog?k+ny@>k4hlO*j=`0OLsk*{D-En>`qV;O{r%Q*00#AVeDd+jduJd zK`+Bd$e@}nUg9Etwpd&KC*ZIufH&PR zx-KAn1l_n|oqrub7_jJhIJ%)p>Fmd+W~66o#=EGw-dV!#NQWfsPgeo`gRJS~GhY3c zSZ3(<^e{r_aUwCa;W~*j(BeTX>U-kWUXLvyM&aA&O7KwKAjbG)W>*4~? z|Fm($Jp*>PSC!8pV6)QECAqS-BRQFU40bU~DC)Ms2xO7^ClbrJf8BZvAgt%PRr>1P zz$%#{u=wH>lX#M|&hQY;{A09H=?oY%TBfY(XizN-j#vT~kCxS0aO4|$u=vJQY`Xr< z!4OK5zHjiROV3%)IXb_ur{ z*RL~#wG@(6-wA?0xSSnK=v*AKS6cpNVsX0%U4=_ozH|`Xp`w`Q<-M>cH|q9%j%{Ie}s_~xG4S^V5xP^l?MXHVn*Ls_)i3q1x}tA?i(Rz?(PQs>`Z91tk=Gt z!VS8NIXUwZe`#4d>NBuc@HE+Woj~t1Yx}L%xL$bsD8H?Nd6we#g|=X5XOCmp#=tjo zSEW${1X!(rh@JB*Ie(+XyiPjaRXDc{0I<3di<#on!?O<|*8HU$LqP}Cuug*l^WHsD1t%>jvWn&``>S< zg+1^%(JeP};X--8-UV&HD>04P40WM2+48&Si3z0*nV?b!e@>S8s7lbj59(dWx!-y$ zYcz(5&c4}awkW^Svk^Y!WTVc z3Pnmoe*N7)2vf&98X1AwC4yOyQ1_5adCR)6(V`Br^c!2MaX(6pVEDAS2E|_#vWx zwlOl2?r8IFyVcC!uo&>P0z)}I*Eg`>89gkKCDw#$g!wkw8p6T1*q=Ivs+>+z;<8}) z0^d;W4kR3_ki-cHE#?s7atA*yn-bspIN6%_|MMlV$x~N>d+d*RXOI~uDUIJXrw=NN zcD+|^htU*sP2zLP47X6_r=W~9Iv*dhlS(o`pax&NJZjgZ0ainDs|dc>wXo9&VL~#< z6Hr5rh%NAt+VuuWhQc1=;^=^A%)m4o)9m;g)NvLYo{6fhW<-N%YqbY7ZHyD79k=Gn zAMzaaZ|2%?=wJ>>MsR;^&Y=oNVCcc_zQV9O^CwK4`fKz9+#2hDA}AOT6YR5?Uy|L} zYnqyUYhklvMZoTw2P+BvJwg=tHY~~N;8Zvon~lDKv352oYBlXGLqBEC)(2>@cnR`( zZLA%Q2-m0Np7Gt(TF{&Ktj|+XbLw$CKvTIU96#)^vcSioup-g5)=G2^ws%-rM{?%Y zjo8rLuS9B1s01A)C!536l1q2Jaz}6jht&$V?ev({RxON;DOV#qZC{QbZHlK&O=z235tI*|96Z%zcn6*(ATDE85TW^#E2*t`%;*7uEleGXEK)R zFt7aH_>ZU!AbMgq#Rv?F79`e9qeP-@2yT2+bDvpN1M!TKZ+k} zH4pk0Fm+p7Y{U3wuN6z+j2HGpm5|p{|Z{j>Cfq9t@C+^)>lj4OV^a-F8DZJ$?U0cH}5; zHz^T!b^9wT&RKtK%$UMInhiujuR&e&KUe^Sx@~Rhv?opcn4Z&y{`j{DlK{bhZvfaH+&lhUoA|6DhL9D9FOw`g$*j$?d;i!B z;0uCk+)8EC{A%Sad%P%XlL}P;;k1`{q0$4X5+9L8h>O}?YV+IAW<-x_bpJOO{`Vpm zHA&jL&HDX*bKtpfL=HAI3)ViPuuzGYGe3+18d6^EYW#{6 z;-|ySMGX=rGx+K~I53;eGfZ?;ov+7NglB#)Mu1_T-k9IrW8dPtEOrOt#aubN;!BeY zb_w5L(8SvKG&`Jt6`R`Pu!NG(c^^kagnAp6e-W`ewlQx}e22{)QgoYMT~|CuR=m2n zYmCMvm+qPg6zT-@C#6m1h}k3=cb0^N2<$9kg|st1yk`4O3E5ezqs^+DxX$W4 zGMKC36)iGWE9*^si6}aRi(3zk2r;84wz|g<^CHVI7SnjLTWdJ~iRU_^vYtFHxsnV?H z&e^0iu^M0OdZxQH!w!CcEvcklApBer3;sOe<&MUjw$}bNNG`1X8-PtnPM{-krq*f#$a~eczX&tkuH=tD zFaVhY8wd#nbp*sy^~TovVVZXoZ6xCTb#O&)(Rql(y<*S78JF`QUxaZcxA;^*q9_H~ zB1J6MnHanL=8Ek}gafbmF^c;C40;FxH}h?xjJ5w?V6#m26H6$?oH`5>ebP!4&k`J4 zZYNyb^y}!^8TsCpJN;+cdMNCqF5@;wgu0Fy$F~F^8KZuKc?0VYsNIQA(vH5)EdNJ8 zouq-eyJ*CukT6l3=4yr`{Ep3+HgVdow2CL`RsX$3VSr%Dk<>`!yS{G!Kh~`igE|ej zW!)dO%DPyNXk%_XRTUuntUn9-g$aj;s-7~G4s&L8+}mO!qR!vlAhtsAFy7c zDWjTN)97ola-7c=`Apq~I@?9aY~gKcgZ9#-n8)%h-?=%u#@*%+u%9adh%Fz$1%bM?8z3V-?wFZ(r{YDUhI1lMHqfDwl z>~WNxgSl4Ly@$Dy64jmiBdN+l0F5M~LAy`s9aBVQxtr!|^## z$&_Btg)eE!pR#UPV~ej-O+2&Qc4#&l zL&CU13TQCU2!vp7Q7y5YmZQaaaHk_iQQ92fs^p48Wv0G365YffrV)?>Qi^=!b8~k> zMygp2-4Qc1jU_E`YwEkqvF-6mZJf&XDKu@Up3GsQuE8MA{wazKyB{6%5Iz^F0ejf@R>8u8+e3|rZNWapQi2-l;^QT|TJ|^%Nzuxh z8XYN55FAJKWBgH?=aNIr9uRrL*?#9#I1Q+!MPyN^lo!VM;z)3*tNMx3ruK_VWFz9h zvC>TCNMDQ6xIs>m?VRXmxfR(H2rIrNcZ4AdwdBd-c%=Qij5#5%M zO4j_!+ub_pDP!-S^C3%QL;RDTng>~rYU18C*txARH?+(i{<|4tO z565;F=+`DUJF3T<_ZD}Lh`+zRA8l@Z#}uo&v(M8L$1+8P!7jtN#BOG&v+tu*)!U)j zM>8ZZ-DAIpBh~S<8MA}HOd4ghJeDlkV;ZHeeS*19gEmL(d$nP!q)>Xuy8_(H?{UwaHFJ@C_GFJ%H^ zsXIT3j}mUmVHLXAMyD%&wQn@+0L$8oHD#FZDY6I$Vs0CAf^NBhF_pK0MB#7ecSUy- zJXE8^MwEGlECeiak|m~J|8RUp$$*l9e_|qseQBY~@Be7QPOv+6>r3z*rl(l^Zqj~O zlI2XIiqq8fc2eJJLD0e!#5U#~5vYJmZlg z2~T&|71d>Y+E<_H2{iGeq^%QGg@_D&4D8X@k568~nABLpdnXa`>5-zjx5DQdRpft2 z=p6@+e((Tsk6C^CL!s4@TteEjg$_zdLpJHfoa{Fv1#~t<9m)VKIPWZ*Vbg+ z;yXnGB$R>Gp)pF~9_uNXxFjRMJB~7pKa@z3x_qn|MT7qsKzZW86lEFe`_~141>Ch_ zqAzKM6gD0k+WRnmJoYx#&PMPF+b7257~)Up)5x_S zB3*QS5t#APiJ8Qq#_thf8n~ywl}!|6ltN0kEDa)98I=(W=~6-{z}?jH+Rq!&`3-EL zm4nmnfjbIL1NKWU}Wu7!zA1*_LttDe~!gT$JI!H^bhggq$^NcW=Nj9*F2;d%sN9)hh`#~-)(ouO0_EML(?1W8fY6f}!abl~zVg=5KPQmZ9>o{BNIRXP& z?hJN1l&|i~j6EVGsyq*_Nw_&tV6Njz9QU_xmRR>4QH}A*vq8njkwLZV4cT<#O3miK zFJe@<(`hi2+gkTtl7EX@YnQjsa|^SQU@F236DQVyTC;43RpbnkG;|!#>}pT!du@9r z2pvhZv|8;p9p>gmTS(6#-J?P^6?e@y284^1N7Ho=Jhi;^-%B#QF-R$~b&3d{1`1zK z(z$ih{&{^ku#7DHckoAy4O`Nk5})e#xZ}*5ix=|U?T#}OfXQB$m^HJwhYJf;MW;Dk+;cOqQ|MQ;vKxaai{O|X| zC%}l^12Eao?q=)(6kF2hsSdEATTB#lo zk{xF7HpleC_qEFB;2=mVVO6bw$c=b5zcWV(SY0awg7EP7|SYugKd1~d*~mz-?X&$vBcP6 zhwta|sX9Mx)xQXuW`p00f+uLF8HA@5_03kAG``Px^;G666JA;`SnWLyb3I{g`99>K z$H5bB;81?rJTWAu7Wz2pK2DHdH^rn-pSI~b0N>J4b3rsicwJW`@AXU(^+#H0H^c_J zuGHQ3aAVzm9dKKQ0-vO*F@503k-Nsqey;E6=-~C1aTql#<>Rquh8(&l>(l@iT7o$y z(xD(7De;iG0CYGQwWR#JqH4xv8dA~r2U|y4OjKb2^0vn(LPOKl7^)oGIY{;O;`3{ z`GV3v*^tmr)xh3QfU)`FnnoH(#3zJ_=(Iz@&_3Ce5;*3I%pUYW*BVV{_YFj>nF)INCpbMM`3 z7!Yav*MC8dLjGY-Q&J?>6q!)tl3j-g=`V6;(lb{LGm=)4Up-eW;RAHV(f`I;Q_}0% zz{LpXG!<&dp2jbe@GSkvU>Kbl!u1p1%ol^u7}|w>pd;Ezksy06R?k54=aZePKXPKp zLvOv*7JD;lqWC8k|E;Pb55QCRm4&syH?rn&(o;`g?6VRCTa=s$CO~|H z{=_!a9n;MQ7~gQwJZbX;4o}#ITX?aLy+|E#v+O(4y;$7dd~VK)xf{tV?}T072|P`j`IAs%Mdc7~^Rw3}$OKL2?D5;KM}!zW5$GLht2_|5r!U8x?|`#^X`52IqyF( zU!HmH=epOm)^AZ%z5ufC%W{rylpo18pN{Pxj^%>s7Rh}`(*!#n*km*X0cH;^lUW2Z>t8$jOyTb&UaMQ&s&SD`iPi-*5 zL@QDCS4EchJ6m*B>WFEmwZa{%+_yvNKXvO;W6_X)zj7n)EpN4>xAH;G^seY+5=MlS zFeYD`P=cFs4UdmLOIp*dZPfPQS#{UIPVUR&lAw4BQkPI`-E-XWRvdWQQWU32jc1-F z=1pB*ICQ|6aw8uQU}kHT8Fhjt$)KgeE1<=!xnI9HoqUJ;+2j60Ftka(`;sGo_ek*d%LIi#j*@FGch ziE2d-FTmrj^oz_Ri>fcnJ<4<&^X$SRurc+$V<{YenR20djJ5NT5Q&6OhF+bgyrlDm zxYGx#l5s8;Zvb;fd|Rn(oM;CkA*Lf_B`ogh`SrwXiX@&gq)CsT%2|qmp?l6YC7l}K z%0n4FB;Q69HqB;h9wkFSLiMiC@i&2r);Sk8j!zO`=oga0KELs4T{E{m+1oUP>y66d zHw%%y)VDvCd#%J)Zn*WhW`$^h2K9V8QG*dyD1$NA%bQ5ohjt zO}%h+@M-J?w-#W_rOMV8$!miCwQrPRh*f?4|kC%6sgLb%#rO?ICGhV_etfN zYm^omJ@mBt$1erspQ@I>c8>Bcbp?iy3*y4rVxPa*T2V+4n4kV(m1j`0hUxfmt2gwh zD2mkG-(_|Q>P{dEnLW0v7}^sd<*I_91E5Y7;^oj#TiUE_FQKTxT~Sr3yVi}V#4})% zuX^%NMBkV+%9ZC~%#ODg6U)ugU#70~3U|gx59J!X7JphMGVdp*`?7Rd#(S4k@*>M@ z)dEG!UHWtTNu{5X2jF7XRF~TV!+wOrCyP|9<4`Q{Wr|Y+E_}{Fl3qcLT51+d2L&X3}~2ctuPpJZoNl4R797LnOjl)T`sMt z4)n4mDRV}jDx~0}Cs1VD4MjeVLfmYxC{<6ZxUAQnh_~$SfQ2Vv)s@w+)o;YH$za>S?J1_VWl7N|cl63Nlh6i6N<^NX9FJDe`|IZPLJ~5{v zhxeA_!<1IcHpyXR<{MUbto#xI9cDbF_sx%sZVSkOLZtsB$HDbj+YI~U`ZWC%>bd5z z$s=f)f3GGi$+0ue?a-E&|2_I%jW2OM5Q1(n-ibQC{yB6{sP*DdalGSW_s5TVzyAJ= zuOxm-<82Jc(Aw{Dve`p+XlOobG8budZ*rtPRVud@r1A8pc_9UmzIK4~;69%H!at^6pxT9F&??7Em7%D@6ag(OvE2*;f)3n9Gm`;RfzevUJZ@N0t8lbdg zl=rKHGOfstX;qy~NdKNQDe?KcF}&}){k5O3URsD8g(7IneDXWU0wP(@n^_@G-h}B_ zC__fxNi)r}wd4*P|HGtxN2|p0UX$#hZI`8-NAhp0L_uJwjc#_*!kf<{`z0Ec!WHv- zes4S~q&Itp#5R>A!yZJHd@Nx=SMv>jX?J+x!)5Z3fZ)5m{6obCfdZKl@k0iJrjM|H z#?en-RgmUV@9bg`uu&)2U!x=~GKE(8 zM#}vIax@?31ea-HCk-GdVPtfdp1E)6;;Int;uMyXGAKba&e^s}>L4<68{!#DptzrQ z05ZuFT$+r~)LW>jjRx6~*E10&B_5KW79@9r$KH=w$z$ep`{1oW59NQtXQ}lQWqfWN zzW>!R|01d@U7r9Hew~n~DU%A5&oIv_a!gj_^f7jzBoO7X`N0N*I&{&N(Oxv zvvq5Li@3mtz2@B(Vs2u_BCHI9vgzLrG#~XuCKkER6_YZOb2NCTLc=IVRkdsmjR`|n zef1GXEXvJF4zOJ~LIu`(htPqbhC_Q&IsnCnatVAj7Rn0h+HH`84Y5SIf zl*XVW<{F3!7TV=sPK)gyO_1Zp@|tJd1=pE%;K35JjC4KT6kegSwdV=+_Y6eNxqDW- zZgrqzAYh@4R!w5M2GgtLnd8RG_X|b+0YS4(!7dKdOLZXCwxVW68^USj73Xu6bHF2g z8eXoAr^GJU3vv0BV=V431i!y1v{m-3;9`0C)|~=8E=9iF?5@-nhf>9U7@xQj8xTV@ z-3w`jxOXsKo;>09tF)PPNJy_nWFHv~)e3M$aT2GQb9(8(jv1d2rY+wd&lnY73VVxq zb9=9m2dsJo|21_R2x-cw=DxBat{}fe#;(3NOsY$gxJ7&oqWaPntA#5t8Ld%!cnBp~ zU5|F_w-2T_!?Vm3*x%wElMvT*k+$~>f+e{<7~In3RR0}76b(49{7XBL-U<%`i=^9C zmh5Gmb`3j_Xr4P)OSHoGL0Kg-z3h{fh5H7QXjm2PspNqoy_JjlpZ_p-Q8RLV(o2=<{Lg ztNg&~gqK+b8QUxMz;62WxVK^+tD+Mzy=USigNN(-&HJ8CDTj@PkzW_k5I`$?9p{Gq z?w#L0>&tJlnllQ%XH^|taHJ}^$u!BRMk$!Xou+BH3;{uSP$Yjqwe5(*pVJ;;-ZbZE zGGCh4n!h>5gqc+2#;F;Xsw;=rl%UU*^JVdLN{>C;q&mGVtd zPqJi@s4&Gbw$J^iIE1mzR4nr5>qzPHDg<}}N|p^`w`%n#>GssIcDT+Y^oB}xON8eh z!bs0nR_&SQGNE;yvKhvMmDIY2s&-`omO(WEIWnshJZ9wHsJh~dF|Q!#vQHWad+`YG zNT8uyQPzv~&X7>r?g4mEQL4eIUsKYRkDbS_z>v&6S#eVoTw5-y&8!;nI%pawcj+gx z4iK?IMXa3TW$C=g1IZbM6;32p#T14S*2y<*T^ET^)ZygGcK5k$M4%|3_BAAOPYkqM zmF2s~@*wM$<(XqG4HRJ~`VDKC+fJ1XuZ8Sp0&QL;h|@fI%~aqyO|be~WKhYlhMXto zVr`czixe%M=vK4JsHVlbpu!fOhyCH#&qgj1PS87k${%btd7bjgTT5$(^O-wC9duq% z`!;_*dZAM8{)Na>Xdu}kB`J=m`LXzu^CvuJ>gOsiiG%VSm2saOOfuZYY%1jeJYE@? z-ev#w(y`_Fgm;{T$!_@D1mJ8jAWrZEbHvA(2_?ND;I&eoX*Y_dS2Nw^d0XBK!rv;M z_c;9uLj!O@a19bG}u&R{QGgI(+cNV{0@i2vD9YdOO?y61x2^{eRm$ux0D@iWTqaf5J2vzf zj!OG+x~s0pl^T+$rTn(R8(ljkm&zdJo$BB}9Q!z9H?pAS($APdzd_{YN(59+U;?ODMgMoy^y5NjwFdoMd36 zK^!yoS`tnw*4C*~1~4fM{Ww9fh2Q$c9WR6L3b=0j{%5mtBKQBQh@AheB9i``3Ar~} z9xWC+WKT6)ct8vv_Fqj-e!t${R7@YU&q`fBy6GI34T?s4wOqUx!GmlIwl5r08D;Iq zI!G&o}#hn$$Bqz79FP5JW{eW?kZtlPr3SVClq~eNr_P7+ouI zMLcA;Zd>|DcTX;aCsOyu5YO|{iskf@y}SQef#5YT_jz2bFs&o$`Dmb=8L1z#;?f4W z5AU)5K^}~VX-)To+O0c{`b4M2;WYVf6OyLV-pkw7sm|@jH4j|Lxs9`sw5zpG0Dl32 ziOAXfowLXNMq-IQ@1rbOnXa*f^Nw+EtpnEK@M8N{2<7-Q$H-IPH2@o4Q`BTLw{Fi; zSHwXBD}+`Z=I>B0jdw7I3U-sCCZ}PRW3(Osw-QF&Pv0Pu?JhhnLxgivjp`szs?*==M#Har6$@fa@ISHfW*Q(^N|>-# zl`MpGjlA6_wXiqF;|1QA%37_y0dL{8RCEZK;?)&%Vyq}3MV|!q+rr(&TO?{1KyGZ=6jR-e3m=6mX4#w ziRDt5xBB|YK*hV#0!GbP&fIjhdx=ER3i!sR8xCiHC&C)A3!l3M=c2SPiNQWz*ky!i z5W>}d=Vw+)JtfIQuvVFuCw+e1UJG>G>lYkww&T;xtxWc-ttJH(WY(&>6c` zT&E2JIZSF?tTm(zPU+rBCw$-<2Nn?Ro>%g>IDBiN@P@kS-fJx!ci#ps{mZSJ5C8jJ z&_k>w*-45{Y>e-nW5ukystSJO0-AE05$n=7^}O+0O#dtCWy9Mudyno-AZ*a1p>@Ga z)Px?zBx>q+0%Q#i#jU9~jwZoDJ-+iXr}eOB^g$FWEGew=!~oi_dE6f(Ur`?zfJYjq z9ei<=LbFFuk*WMU^f!gxvbRa;APlli2qclrm_?QxK-)rDrX>X^AID{bk9V(g*{ zL27%7S_e{zZ;MSE+xjglI;Q&B9L*$-BzKZrtFGszvUw|5VgWpZJXC#^am zseS^ZSb+~1A4q=bAm3(DjXh2Vh%)!%eyMLldivN^c$Hs~TuyL%MDMobCoQ@guPN$a zl5W8nHSU$K`nFu4NlPB>_lHrB6<+?;h&|0aLl-`@78%J6atWBG6_TB1ix{$Yfqxz& z2Q4KIL3I1is`uArz}H%)`it`c8s+9cxtK{M&DjXyJ3?|dQxwt zTzx9xX)=BFS}icMok43dr~Lc`V{7ATzB197c|ojeN3OY~FD2-l2e)4&=0EBp{{N|q za{r|+lKyU69dUIdOhqKw$o#lou@QZ_JRBHy({y$ko<$|gFRR&lP<52Kyl5=C`0T_Y zNdbpt9~Wxm0=*q$PJ;BY9IFL>)n38ixJ+9&+2nQWXF2`1m#BvHg@cqX1^TX%#d7#7 zgzSupmSY#u598T6&+v$IeY;h$h$c|w5(q6}6XAZ!x8H~?8ku1bnO$m_4?GMV#aGDM z@`yu7V_ zuBY;SO?ek@~ybN$KldxLgBzub;s@1ApTiHg@y2XD8G!0*OdleQo z?*gDp2>M-?Z()ME@Y0X4`Xy3nFBP380AS!NjTR2QQ__s3qpKN zkC!g-brmrTp5Tk&i>DrpYLjT;PEX;c&LFutJp6pc@OpwqCkt0)#)DHx8`O*$?%6=j zjsJH$06exjN?1;rL%Q*8GJ}qe^5n@|q%z-@0K0N&JsQn`b%F)e#3LXL(wQRmWWwbh zD8q-(VLU6NG}Hq(WZmRh7cVq4_#=`239GUQqe=jnE+M0=zTYD^SbI!AsK=ea&~Y56 z$y>Wwm#@_e4&jn;x2qt?C!~2xpP=@M?3v7XXUz6{CvwY&+>#%5i7V-Rg|)zfkEmxf zX0SJGn~|};5Og6aIi#ZzdKjBgHLpjJbCAzY z2g3|EUrP=Wv`UIjl=N$~*ESzdvaEd}lMAWxqbnhy3elM0%ozgadT!c-#dJEH8ash!SNov;c2 z%6)9?AR4Qkl=aTU6ZY1wj=Wi8-ERZEW_@fq0u8fR7%{(zRR7|{DZ%INquUm zq924jLx20F4I>*2WDE(M6s>Jd97T%n=_ibYLYWm;tt#f@i+727pf}&VaKOszz@K)! z+KgP|ZbB&FTjGQrxG{WiO}fIU#+S>%bn4XD@!Sa-+QZ{685O`WZ5HOC1s{8n!mVhy z%=n)<^B#=4#(m7Z-|`9R6qbCo1vM>pUAOqy940C0y)8TKQ((q_=4V*Bw%*nj+f~=r zzn%V)Urt>5p5w~pTvl!BF6QK9c1d$`{aU{)UnR4tZ>!J{BGFHcUK$&cM4h<#8L=2- zd-0B6S(4v=3Kk4J&t^Ob@(IrJ-EKh7mXt|Xp(#T{N4`zSKydCdJ)4;%vqbrg&=Rmk z+*gkT;_mmOXo*_aGci4AJhlCx{kJYj#C$L_NA(YGS^ny(uC2?L{735po~Lz_!+S9h z|H!Sc^c(!8WsD)QecLimtG~;FRQwE&);F&1{H>oEo}haKV`tP{5G6!f9zgg&4%U=} zSt9>`Zs7X==iny)gc=-eKWcy+b!zK?&th?2o3C`U%BCA4RN&{ z^WhSi-o3T&a*!T_*({je-7Q~cBrV>*Z9PIqv`<5?{DluVft4)LN1<1KD-b52i<4%{ zv}+THY@HzXdbp)hIsgW!wKZF8SRx*)?}mGLk9^z?4P+@bkRJEseFn4_X;Q2EnNlpc zmmHV6LtJ47=Tf1IY0f`4l5upr?z$=XN;s@$g_(}aH>E~<%6IkC<;d6&RaG8}ZBB}M z=0&7uzmTqZ@GX0eiyGNL2jE)1t!A!`>Hs!e-c6-ie{T9R)T)hi7jM%48F#^HRa6?$ z(31zI#XdBR{lR|tjVH~kt=Cg^Y}a@s>p3S?2gQMYCmDqadc{HoJSS=e`}hd)(@3TE z1&W=TU*W~gStnsIJoohxs-Lpkt*RvZ*s}3?JMx$^Ldhlc+sD$GoPoG>-XI0`WN$4FR(&P9EhT zGbm!Bvn>0$=7<1dK}%^c_VnXNHXiCJO&{l%E$c5Tcst~MnRksUo;l$47;vmBbb1R1 z3rioD|--}s~rvcfO> z%GiQlPKxR?3Uv4#a!HGUg@JvhS-(nD7Kw#n% zOvwq?yvF2yy$0u1zJL9%Q66tR3!itdc89r7ful=hZhCIk1$2ZCf&3G}6)bk7hgD>2 z1%X{*7V!&jQmUu80VFEk@(n4;X=R4IUMJrY+;P-X7Qyp!2Z}p0ud5f{ls^;+a|mJF zpNxlTpC1T0=hZg<+{~UgG+bz9@JJ2=PRc#T_ghDP(LH!hqLwS5Xdn5Of;%hkbmc?E zVofl8N#!_tkW3R=7A;ZSSlA_;HfLv`QrZjkwbCUDy#BDI%|2S$Y}#}RckZ17V0`V< zgRwi7FejIs$(>c`HqI4FGx7RVd*zwdPsT%n4}L^ zRZ9aN#ZaJ@KTtG3Uqyv(QO(}zC=|&$jG1QGYRm2{xp)JBU1XHF!ZwE-NEBwW`ybS8 z&DRrnxsm7ULn)5alt&|KUMJF-452NKK`2~DDxOamR6ksU?-eh?Z0~;|?|*h$9o_#v zlVXAlQ77P1tmn_0P5J5!I0UK4XOi|=19DdVQO|mHb_}@#=zik@a^ju`GmC)+R&NZ) zD%(@X($Y5vETZ?Y)+E;J3wGmHOU}zvq>27%3Ax%(w!5MT>KElZ;)_yC<2$H+ zf-mKl=R3sZbN@%75m-mbA6O@S+U>)m_weueos*Q+e>{$l)KB(O_BuCj_XUq`0)~?R z*Z7F`L6#Ey6b)4T_ri&KEnxInIm>d5{NBSQWV01+*s?Mg`G+?}im`h9 z2KMG@yvr`;WGjI3sCCO?E>M90=47S`DOof_-ub&RR$o$t${OTcO-YS3QpBZU#KEAP zNMQ)ECt}|yrA$D>k3E>3Um|B-^{^fm*L6Udz>Vqp`3J2ETd^lE%dFfO`FZ9QJ3RAP zfiUHyYyD@Y$;PieVQ%zfuqS8#z31lLSo)y3M%Y@!ECU`yyLLD^$?~a>%!Y!%yhT+l zb`OUyY=A&MIzzL0h!6WOUP+XLBCGKWDYi(w!Dp6wMDIctvxGkv<0BC?YYpg~yWDYe zK4+DH%2g7)-%g^@pWV@5B9E;NCh>xX2q643@MF8Kc5Cv!OAO>^#O?FXcd$=bK3r+u zL%EiDtbe07^MOXBazmg7%l&(0e9Xrc%Xl$n>`blFdroY$IewDC`nQ~wg8CN%=PLb* zg&heqSOZ~#HWDwWl}nT_t+M3geT?Wk?d1bm1N;i|*yB&fX?<3l{?Wx@3WSCpuNo*X zJ^NZ$#FJGyvB^^Tr%XY08%hoMMbp5nCeHi4`K_7OXyIE!-&SfXmkz9TJ|RHf!kD5L6_RtL;=; z5jS+pVCakPJ9dpn5sB5%sRRxQ4y-nn1K_Pq(>i zp23x;!2i_<{>ZX0%EC2QIIrwUg*PzV3eM; zHu=0XI(=_AfBii)CcdFnQe3o3X$T&7^&W`I_b@xK4KDaKNP~5f-t11XM-Qs;LI9{@ z>m2^@s@5!qJ{rkNF(+T;aW6*rBc}Cd1ST-!1gVWgx2;v5Bchb+G4otV(dEQXCp zN@t{<8h_XC^yTMgu#PN|=<6M*qFl<>Xk8f1fzePkEb2!4*R9U3OQz^AS*h5ooA?S+ zjXfPn1XrXA7m4R!!32Aij0}bUp%&F0_qEk^CvSwQNbO64vaNWNcN`K+wU{Q*QYRCk zSy{etCGw0;5o;os;-hnk+h9}cG!N~;l<8zyz=F4CP&rLRNS>FD<0;6BW`rrs!&W+2 z)#xB#awf1l8x`N!zG5Wn;uErYG~c=uG^m`9f3Acc^S^9KHGZC6FNxf~@5$Sg=-XLj z;iHsKu1lxwMN~aGG|Ul?Cl!kq4Ex_B;a!b?r*X$*e8m9TaM-t#G_CTcY7-7=aQt+O zuw|}Hde$ZiRD*DDcJj*j#NBsG&nz4vV@#liDlTxO)Egq=xC+-{T8$jIP^;y4&_Gd1xOf_qN zO0osNJn>TRr2`zTzx`zi6`qlxa7{M~+H{za3mp3yenUx^irw){uJqeNe4?zz`#sUF z>7QHkOW`3f_3Q6Zu$&VCB-jQ|L{#*kpV776Y9R5t2~b>MrnF|nlj_=WZ6oQn_PKJW zNxg|NXaU1#(JiX7x_aSceLyT(EYiE>JuZe>d3Ke=lu z^ccGo#PVNqsXa!Od5lawKW+bhI0ebyf=)bBh)O-QVHNWnON^|lqDQxCLp6mdX~aFL zeUtkUdTGSbhlaXO%^meLiwF*DsjMe(eMt442Tgk*NV$wl* zSAQeea`IHIp>=vPir_F{nn2tU%^lt+c9P6;Py5jKwAAM9p-t?kZNV@r(x?vfpLNK@ z18p%1^sY>cv`VUb3m!&h{cQty%L~Nfho$T}{w{LxrLk7ZTd9IXMzp2q+BdzR$MvJ_ zSUTyKzTL=yEjjo%3euZI1P1Ytufr?V^hwm>y6v3yuG>-&GM1X^@)Nr^B<|SPoZ9`? z(vy28MF5X94LrbPkE9`e1!qklYgeEM3#^qLB$Y5|UCtJ-%Yy4K!uU=A?2xg?_j0tH zAJ)N80Mrg}lm5IX?A>Lhv8E7$4a=Xb03Oc@<1OO)KP|`Rv5&UU)0T$Oq_djO;Ihc? z&z+1`1!Ipz=0ELjn18Elf*+iO!cfJcdcg^(QT;P2tc@FMv&U=JF1uwL4BX z_dAN?v$Y7foFq;pAhJ(K7LkC`ZKyTTtEcM)w)pR4Xqc0+geRh5WIz=S;U!I5yF!QC zT|Fffv1gW!9}j$Go`Bvp)WFay9O-(Wrb50#@QgnOH?1 zQA2H6Tm}wHH1+1K|7eOK*NpO)s&k_5BShoed#TF6HQEUPl^~}K#0ocIlUHVT1P5_U zt~wmQ+ucF{C+Dlf2%9RXa8pWytd)_du`P}k{-~>quzS&=?@z-_R$ctgam#iP&(QQ7 zgI>T?o9yrAr#ypurI90TUt52Yh^{Yg)FE}ZSy1@FRQ4tlCMH#43XYU;LyND=XM`4i z{q-M4rL>L>3CU@Vn2_G=lp8SPzG=N@wUzkE2XWP1-tFpMS$E&USJyOoXT&Lvu66pV-O@QKo z7!h;=t8oB3#L)ElH$VJsJgA><cC$~mzQJ7z(l*d>9?H1y#NER3J@>I9vLxjHaQby4ZNR}L z(97@oRmTuzbr){MTKp4kX^?}62tzjWM&*OF7|SQ!D)qp?AP|7OkmaAL&gg1x5~o^C zdY|wP)>h&I_;S&_bIS$Zv5EF#p455YdOSq5-zA#WWmJ5`}<*q+oIAUn_^8+@#7#LMaN)!xyg{8t$~uVy?9IO!kJ7v98>2 ze*DL2-gaDU53MB_n9-Wg37E?`;MM}$Hs~UmJI+4^D?q^;TC|q02pt&)Q?!D1^}J8k zltwy}O?s`K1P$>v%?4c}ZEeT0Q3)8LNYC%iuCBLdoEkDGFs(s!XDt?K`Az5j0nXz+_4 zit%xA;hMbT(QmYAWcPC9K{|w{k+Buq#Py5ss|p#ycS$?XtoOaRy~nm(8Z3{Anrxa+ z(%TMQoA1Os6It#kASsr3V>8N3g!7WjXD=(=>tV9na}~|bJ2oaPG=e@qzbljk?#B~L z@Ny?AAY!~Ncf*_(U8$#MoS2oL46C8t?SwcP5qY>SM_O9|?e^zYi8LN2#}mxhB7H%# z=H~5x`lg}2+J2w2&)okk&@^YM8of@CGdpKVMDl(qmEM_{#TkT+Uf(~OLQA)$J)b*| zM!;c1WwyGF1tQkCc@h3a(YYo&H=x=nIMd!=fOOP`vauM~3%N!X<3>}jCO}_6w?bjA z2<;Z3TAC`~K08oy2CrBrs{w)gc^decY=O6rZJHoHd64|q$ z`d(f0_%F@6(4Tf;y7^@V8`D2aK(3mLm_08;#xKUQkI}HBhSI zR6Or@>@)bSEFhswNqabEj*%=Y!SC)Tn4qI~KU&U*UEtqUsR40RdUN^5Y13tGyE;ZQ zbRMr#`!|^!)1@6-)|_*WVGt)3U7C(W8Wsk|t0p#4Ql7=BW{lpvN3%N! z9u^NWxXvf)a!C_8|FRvNEG>jktfwtc2nf5TXK_foz6fFcbGi1vHK;>)_ z58XBf4p+R+UYJ^35)W=iHxn4c_~zAp!&iUvn*F_rvW%O3CEsLK+I-cqFeP;;t|3@h z#=E=M2$Zy$Q-D{(Xn;fZ!=Ub>Gj&_b)Rk!-ZCS>(>&@#pk#PoSp*ViKTaY)jJ5#Us zogh!dW<MHkrM)c7Q(2|Tpkpr>Rk7cy4a0ou9 z4(81&<-H?{h}g~Xx~WpvtjucEQI?W&x@CCtrwG|XLK{A;SO0Ff04}#z?wiY5L?oQp zr}q<&B{1kfm-|+K=%t8-_xaFN+NdW_)cu1NfGTy3A~Qq5JP~az1a%gb8!=L4d_!Ip1!2}L?%OVo^;in=AaF=Bk11P z{ybqeY77KDod{U6WVszC?paGfgzh!x5zdC@+i7SAY|F>fF2=2HCI#jjyo1X4YCGTQ z%0UgHS;A94jedpAQ<4r2*mhN~zcs_G0yyF1}(5Std2;`Cy~OHgTgcN? zBQpZy8*sf};QFA=iBslP!7GJdDOz%WSn*~(EuLc$Og z%M92jiyNSDABo}n)V#f~ETTnwP$)x#KLB0ZSIyS=E$_jxx_x?V(W>Ee-&S@Z)}LRm z3hAE$}`iXeDo>joO1%==wq zZ{2r8Q6()f-rE?=Z8!*+^TBV#GzEpM4FJZb8g8DK-{OukdpOc~r*v#`FCWeXWLdXA z44xm;!XON?H|h@SX48 zZTb^Ym{kJ(019d4fkY#^kJ2j=p$&w{Y{E>p>vvr@A0%{D$$c;^)_etg)Cd;i63)a) zgO>mSTzp>)#KG(C1RJWV|9BHPM0CVIUtlrGlfh*zwuD-T-G)BLWvfG1Bp-Dg{-NZP zIo^BanXmdfc_CtO)RdehS{Le73wE7f<8h2_yfvcbqa9S5$lnT6{X7FRCcsrBwM*I{ zIt;=S(_rzd4lCaS&W zF@DQwrEjY#G#G7zg}^zYVQ|zesG8TvaYRES-bZ-s-;STwP*7ZwgG>{MwHqaB&Q;FDD!3FD^ zlQQ_F!*9!Sb!(j`(3O)twYZlc>=6!j92bN`SzUqPv+0=zlgwxmPT|RX#uJI-!oK8L zeG~F`#a9?Jt27c;(itOj`KBHgQf|6-l+zzzsGfYh7V6~4FN_hC(|pdW3V4Y<1oLOz zi9J}a@Ki(N(Kzrb?#Jn=YH381oOtfr73Dmonvk`euC1U6GPn+d=_i0{?;2hq%Ly5o zaWT9Omi3~MY+B22gu5bIdnNbxyC1&}9XyWadS;JzixoWg+F~ z-*|BV_znGQPc=VUX}11#c)@I3-3ll9Q;`HSTA&Fre_p?RUYV#+2dL#;%_87;WFMsB z$zk{Bu!di@$@j2YQLpwHtO@TP(dv2xz!_zoXFv<}TQZNdPM1Ar2jET=)HZLdwxBR1;Pd{xjOuQInaX=QU`Tgr=7<0>Fz6ZW(=R?KH zoUCAsJPp?6ZB1L^(1960|NFB5EwNAcft_)z9UYh~1M=OWo0Ho20}@^=ZNsN)+Sfs7 zWLz}=`l-O^RWM8H%pnbsRPiwc>?= z%#R-X0@8d!ZoZgiWHhjG(@aE z^0qF)YVWX`}�?)Tr5Yr{&Q7ph&uOx+k9pG|<(RN*H3)IN75NMH7?Fw!(~ z3@*xpZ}HiQhe@Brz#$M1YA0Ds4&}U~qAljt%U_6kZP_a~gK%mL=NfrEXW>s`NMh=z)cN0&YF~#OdUc)8L=2ks`BloR91eBsiZQs3?$GznJ>;~bGGr3 zI{Z;X^uq?__KP%k5wh))2zpbVME^eAfKa}MT;z!L89>?!Y`aP5xHqSP%9GAAHt?L; zN*U-pUYS_^U5m3OJq-HP!Tb*yn-^)$5Nbm<>(AvZndS|q*9E%zlA$7Y6}mrVR8Ho&#PZfaOxL<67E|kOCCJlIg?hF7)XKPg-n;w&Gh}Vj2{1UV+ujG4pZI3TxcC-tj}{?7 zj%4H#)X!RRDt}BUgg=#U7`l<`+r=7UxT?%`Sz9YDIqgGsf5VQ5X8%s!)@oXMHQ=mq zAG{K;XDs?g0ZI9AYI_rd-JpjeD>#e>~UOncRmLB{QG;EVi5x`b+k99d=}%#mvPgq^0Jl4W2`Atc?3~S z6)jR%t%IcRn&dp17Dy{s>uDbUvJY}s7fD>0SI_mlH9mT~k4b$*>Ojosl3DA%_sz)B zQ1D$~MI$9CURrS4xfCBFdL!JDud<~$J)@8AIe_3KDDqdF_c21deoPCs?_^hcQRORW z&4jRkrzfZp6YeqYf>s1V-*Sl}#PTru)+%0fw6Zc#)ZsUoIKa3wYDV#=d@7Fn8P2>4 z^FA(65_7La1{-rk?k)&iy~?P~+oqV}N?l&6n=7?qE-&W2$({OO&s1R%ns2L8sG(NQ zRC<_rrG8e!&@AB;D?*>p9SNaKUP4aWI}PTO4)XC>)P{d0Rtrr*kWhUyl1_q?r80|8 z*#8pP?$SfcOX=!ZCv}@HV9#_1C)HgQV@G^9hkgl%mYitQgu^agPi=M!>-vH%n=+4z z6E;n2X)1u`Z}T?0V|j?d!yR z_s2*x|J4rn!d&4>T_rGOdwCmCml2`pi7bIqgQ25n58IMpn~Hky0drs!9_Dbxw* zE|1v@NcOSPhis@DRjo31xfZBDwhnh-u6aH{#74KHkNxu3KN<=Vq$XGa-*ARj7yy@U z?SBTr-e-2?_?09L^_5?`QG4FPZhi5VS7ju4#*7lA*I_e!tpTv*`;{W3-=#?<@|Hm| z$^^Qe1S=#2@Erp7Te{YNLz!joqEq>DmW@9cLQF%CSocmnh+51JtO7w#O!*{ajadp5 z!miZ)1BtJklUG90=A74R?K|{q+Q1(!WuGNABt;#AHM+wX$EzPBt1p-7ZL8Y*mkryv z^GatP2e4zpHcFpp{c^{dt#AeBnLo*Xk3&ed+1hx2T{NKEyqWF@D%B}R1hP$*g+&PY z(ktO^)W|(|AYUr`ccTNV~L-7|!jilP)-#(wNk- zHWm;(>=p!5bJVzxm{t1-G^y_8kMw4@w!GNHr(ryp=#nVt7%65m$%uCztU6O4R32n` z8cIC|LL&htBS)kiEy!7oKV~@hRzr$rngAi@-<5gYdtbe4?agBv+5nGR!oRhe4HoN( z=rG84o;i^B^wk(cKgd|wqQ2_sXZ00u#zwYI6`V35!U2{LyVbAsJz%+pPCTF7UOWi z7qX6r+J};Yw2QrKn3GY%hnBklYeI6ttfA;?2zAlhbkTOW`klfsX+jbt^O>fTwn?2I z)B=8>;v__`EH))dMWooTD>Pir*UvQbQE8;RtpB^X(wm^kIAkTrmy%Zl01yyia)}@4Y?>RmZq~TcGAIXQ@!Zn)sC`GmK7u<;C_y zCeW}+rcAft>BzevlT1Cm5S^AEyFrLcmj%&>F4y@JN9LNOkYnnL$J=Ysx#r8C+;2Dv##PT%P*k4k zy2i3MiYK;smqx6n5N?eV`jrq=)WUnQ1oJu9i_ipIpkPWpJ2m$!e)U!;WM}U_G=;P0 zW47X#d*E`yOJ&ThSEH|dW0z(7eb_F!#qLEkY1OA>HXk;pBfq_GtxXCptsgq(%}F}? zBT2v#MGH>7>~v#Ly5RpE-KCcw#T>vwy1Jm1^eta4mWO{$=I}rWTneCAK3OQ(ae=$k zx61gf2OkIea06WC4#pCZvDEv1q9N1B*vgq(?+b;G&9nnMPZSpu={O$q6e1t;4k`{hgNW#<8{OfsaTu_8y+_T74o!v`uJt^bFqw+w6Y zkNf^fnII(%!bDUWq(!wSM$bHgBN6&sAUd$;J$~5@AHBVOSL% z&xBYYj00bV%E;}sy@5U~qOMmwaU*QCG1(YDa5`6TT@=g9l+Ey`-0wUibEj3`g9@f; z{9!6eHrMhrU3{XW=ACRm@gx>a=*zENTNHs|0n$6{Yz$B)%hm1k;Uef znrt;~(+Ml6Dc4NqmR3DkI^fYj6rD44z%gf2&p1gTcxW4^7 zR8qDfJUZ*9+suX$DVp}-fU{sbHY8p0OLPP?M4$Z$w;mT*#+|;9`gq0WEuiAvGsD0U zwZr#p48_KPk(h+GV*60@06oVw{6#CwGk>bz_@xzlpi zB_baTZPB=1>#)`+`AH5)cWc}kV%%3e4Z(lZi%IFyIZnFio__c&sXYyuCuJjaDU#3i zqg!@Ks9Nj|HdSPOIhHH-!q_2v;dx{8I1u`bIJ z&nQto1zX0;7cKhl$8sd(61Bzz;SiHiUcv4ms4#oxYo;_1V2%~_mMi-8vH%m_+41w+ zx!S|*!7csIY|AI9Q~Y+%Eya0LjA)Z^5>H=&gA_JnLvhorC6adbt{ zC_f!Lo>leWClG5<1iZif?(&8#tSA+_1KA<3Ld2|Uyw4M4Uco0x14t$c$;9$GcsTq< zN7(4_roeG92a9unJ=L|N3y!F#Y`eS07O*~Oa^H6@V z9D9lk?bvmhTKRVk%5^)$Pt=o6-9A>fg?;)#b8Ln1ZBh^FxY?=9uvzfQZ$T-o34e}k z%{%UOnfauY$zR?vfy|Q6|2zoxCH!2_!Oc02leEM2c10-0lZ9hRq;T&o&I!QybQx42 zCb?%~Na6(j-om8iA|b4@oET2EpKfgKIPm;8vVVWlsFNSOBn#xS$|VPxv(aca$k=a_ zhcaxG8BH}qtZ5m_p+770vIps{$)hQ(=?h#}mD>xgif~Vpwv{YM z2sUn@Pl3UKd-e6k%Xn5=3e_hG?H+V^Ep;HBZ=qQ&mon&oXp>K!Cs(Ex)(=${n@+pR zue~dCUv1pxTBkcL?eHoWt=rg@hlk8|m+lgtm}sV?b(W=CQ-ZS*jVR3G!%E=c3&e*^ zo{)x8k!`a&^NCQUi`=%W(NE4$BL@YRCY;qFkMJMEZyCpuD)Z6B4my3cm)cnHZw)9O zlft8;`f`&>6k2NN#odtPyWMAZ6oIh%kH((|y|BjR+4kmGJvM<{88#UZF1lOq|2TL9a4eEn9VR{w!` z_S|Pn3mOQe7#cJYOT9aFCOtU@mLx7rqau7`EVjzCGRn6PeDp9gMTEs4l>LKQpD9mDy?Vx^G_K!$RgzE1HRN-2U~2hrF7i+W zyAX?~weMqW-BjSMtMuQCJ4(%PzHCyW;dN{!o2L>seT@MSvPzLT|BH!q5j^qcv}{)z z?vK6O2HyT^zao9u;lGLr(JF*oVa(+AxTL;GFhDF}GDFjvtPej7L0hh;5ifqBQAZRA zQ<&H65@>qN5=nFmG%Ru{=Kx=~hkwW4B60hwD)L)1+b5!jz>1{lPf`lEG@S^<*VIz8 zS3sJ74zsGkg5K;SKK?(}mn|TDuJfkCdG5;P!m*sx)YEjc2hiPxnr-3Kc<+{%cW=%# zM#2&Jn*xv)+y&U3$Buc`-0a*USu~MXAPywIl8uv5|_kR_E@EDKDEaZtE%G5>UnKXT6hs zzDun<@_jHW*?!}ChZ(z8vC{9^Q9SJ7b??YJYFn8ajUDTWWjsx{T)e`*GH_xUC79PG zp{t(onUI+f>VY|0PExP&s!yKkf%~SV0iXK2zR0X7w}+_8Kh#M2FS2ptInxP1_hxz_Od22v7$AcLQk5yyklB&)PBd*g>`G(0Gg75L?9JA|ZH{Ct22; z!}m-z5{8ZAVdTpEoAnYeRkBG(nP^A);=*j+E=cX}Q&H=*c$!!C%K;^jRatEbXRt=% zKca61e>KLZ5=*j~=K~DyIEfx)1A;}W7mH(*E-%DGSCyOpDFclLjxNz-!=lK?X{sd# zFfCu{_6gh@BG^oHN z9-hHL7=u3iOpKi!IBT!@OgJ-w_-9L7=@`Z8q5bwCW&p6MUM)wlL= zL1JvCwWR|2t|6wi1);ivm9DtnTH{rU7TiA?OLWhBR%cp1vkZ?{5t3J0E7K=IuIqD= z7N7G_kuUhfvRctSLoOED6$DN4y%55!@sDlQOo^CCweaRQ&X1Z^>vn1hlP`X|mNPZp zIDCq@RQvZ!?E$9((Jt-g_{RM}m@>k6-C~6*z0$xE>_welLOg`8C(ajmVnK93R*;`r zLLfS-3DaeNTWS3qvK#lMiS36zs*bM$Pn{DT%&%u0Uh7@$Kpy$Jiup8=6h&bDz=)gwu%Qn<3vy9@xXY zpQgvzj=|oP5_b~|=t4KP+qubts~5&M){9UaPpY{vy>T_kWSeiVv!@#6HOf|Czn}jW zfCoBN*!mOyrNk>;0lHe*@z6eY?OLGp_C@p*R?z%3>D4!W)0!mB(y7##TjPzFyvi}h zOGnV@w(3INxOlni_whNKq4ik_By2{B*Vbu=?UK z;@ZP+8;S45IZzlMHD+N{;9=x{^at{gRF;G)JBOj)GfVlzuJR!AzXW!MCgolt72!7m zJ$n!Nx4Yz+3V-;Ipy8)z!7a&wzjquR>Tr*uu{1dad-SNt$|q8Ziz-CjLxb}j2%Ye7 z!&Gl??zMXNxYcDH;^+Wfa?3$;**CoN6+s5XcRYKK84#SjwZN6-T?<)JN2CZe?vkT3 zuXgs^30_9SVdsUI4WWj5rha3YkZuv$HWdeIE(Py2|NHZtD+K?V3OiWCpy#SjT}Gw{ zL6OWoa*7!Q__Q^W$tJShSGLw11?fFe7q+X?J&NMxAn&wqYMInS8bFU}>g9NIM~X(? z5WIW6sQiCK(@oIB>Ao>B1hpUYn|JEVJuep*%X{s4Lwa_I?{s|lbSN8!+IyNPK zOp#ueuLnkCsQ_56ClV!FHj@yqb|2|4Y36|WI;>4v9lgXKu8!shf*MrBoNLdQFs1hU z?(;T0rywmhQqOS%bK$yEck@~cvkVm5LywJ3$=|LPj;naMgjdvbx>|V!X{2H+hLgzv z#rs2JkBVjom(>^X9%plhmRD4lX*Vq>1bvvZPp>1=qWPPJ&Y4+xKIsv{di zeR2?M&a5DoJt$T>f8&0ojW;m(N=v{I#7QZ?nrK*L=E1LYT|)Z9f()$fO%Ks~omJCe zW2nP~qwVQh$^0Hf7d~r?ci10FFlgP#$4BfkO%;1tU^H8yrD#VAO`UxtcktNxvtaeF zY?+Bfjm&E-Hvf-S3kq;;xD{SEKp06LfuYcn7Yf7zjnY%>bxHpuym}(DgYFU41h!?_ z0bBj2OT!os!NJelNXWWCW&12V>6lfk7WL7kAQRwowLCx<`dbq6agT6L#l4(Rr%ERK$l6c+P!g-Qql zet@<1{`5!G;zP+KH?*q5V|7q*5zE%iJ(PNZn#Wy$(fQjmy&|9HNqYE=L8* zBsOLFB$v&2SRo51v4PW_^jj^i_+53wo4I0Mx%IyX`rBO8EXULs(l6DLk)wQG$`B^9 zJd~W|JP8!}!p5$dOLqQR8?==13pb@^z}cKCffe(b**Y?Ul;`5nekTDn&FZnOL`f-c zIXT^ysYSUv6M<*~;*VBsj1_wke>Ary$0r4mWj7&V`d?8km3myTqt>;Da+|6Wlq)J^)(-N{dzkndICO2y6B&(1@)qq12^R6hj3$t1`} zEZE&YY?|7W9DS4h`^BJDV?Mr*MyAqodbvk=$zWLLQcJn*?KAshkddv2@^y@+EElAC zAol~7*A^mG#PQuOcyGfe0}3;`;e(;Fo;r_!!vt493O*-MO|6bt)ukEA@JKKRMx@v* zKOUUbSYow9u%8anq?pdUGn)8ykSamXO6fEgGf+|C!Ejg~AW>=;>M6v;Cg$oPLRGn@ zR_5&hCu}{*UlBuocl3C~Gvml_D&JZ?8$S0Sy<~C$o8-A^U3s9o40_1;0n!jFH5gQ4 z%b1DGM9rJQsfnZZ-SVd!B`V}XsLD(DJ|MGh2T>rWH z-PA=WIp(+23I)49a~&XGQx_ZSz21nj$aJL|#vvhi%ZvL;lKR)c`1uaouP;E@dI4VA z72i;t8Iq}=I#r%wL*e<5*fqo5B%EW6%}Rf;CRpy`3AJ4EI$G``I9PjjiL+xL!8)%0_R}#qL#ZJq|2CyU+no{C z?eTtE2(T*qINiw&A3^}FF5PZ5u{ zQX_U_)7mBHAgHt5br(V4Y75Ho#Cu&1i2B-on&51-VoU<~PLYS8AWBl`)|wDIMz7q8 zIJ!9w2LTx$nf}OhO?q_Jb4tNrmL*JmE^iwbyZ}4$I?OvKT@}qb7&<Zy#^G{lebkkS8epiDawb^ z;dH2oT`p`gfan#D&7!+|)V{2m2Oj?3^73>no2%C^aJZ>ZB_%P=;Vw@bUXV3K)K#!oR$P7^i*z5xbd~pq zZT|7JJp#liY{u`)e~s(!w<(Eco{;RmfMeWH&Oc1X7a;)^T9`?tWM3oI%r*2mlb4l3 z_!VF%f)t-Lm9)u^qg1k77n1Y^#{H{M(8(5Qo$GpbP0iyXZ?31YgaP5w?X?u5WMLB1 zv^_a};jZ_fcK@CvU7C=?>f2Bz_5_02=WGmuuJYQafK68V{I|^6fMw|G=HWNjAt1ij z@gi5?kvE`8QI}Wn!ecat6jep;V+)8y^IzG zaknXEHNi(*;bApw(~CVmMuLWz%4CP*DeAR{&Dff9{EY#nx(WRgiKakihHJHWdHVTgXSNBb&+`;R7#+$tAZ} z1a}|`e{liiFIXLu>db7(wt|9tzw(gfb+NHN(yIK+ft}cae~2Gam@J@*z6_xLyv0;Q zKnjMK%yKVgwLG6$CRy_MumEut(lz4DaHyoo@{# z(5anw*kT?>`mF~6jZ2*YXvWLa)&r&ke45~16Ps3_iu|5VD0+NuxTE8fV3gs$oGvi} zRBb^XpfG5yCU-y*B}|MKo|Ad@TCc|D$J4rpwv4}NicYGJv^1``Lh}pd>aBpi1 zvC$jw?|47p;{1N~Iy$&)_zjn>uPEkm57{AI#a)TKyS+vz-=8(L=}p3a!sj>oRdG>zFRiTTKCbZdCH*)khwh;{ZcN17 z#^va$%~il@V8(9N-x?H{x!TOE+e^?NSBQi$23_{%u&q?!HWU(hpWOlW$|) zMySK~ocatW@5mIwE!IObfjpkFJ3=Ck1kd4e;nhP z|Lwe3`RcQo(Ybfz*Q)w3(Z`J&8w^X@1GpoA+g;s*Z-Fz%`Qm1SZwb>NL2sMhXj$*~ zd1(B0nGA6mr{d9G6+%Gyb&kKtbUlT3(XbmHHzPn8re|}O=>1pteP)pD9Q{S8vOVP{#4R(;H+t zD2664X2AG#Ze+d8<}Wk`+i2(A-_}4;3Q}BYjTStXTYvlBrDtwgYfD6NRzO+<31OE) zzF)@`WNOK^tRbZR9Yf?4g^C%4Zk$&6^Do6~O`!8^fb!xm0B6mMpV^3*mw@ZXGw~BZ zBAn$nrD2-1SfM4jJV0#>nS0mPG*ycSi}Yol0bvmV7@a z7#}pnQ9y4jjvZz8LZQs)`_%HHb^2{S%10!Wc{NLww# z(|3PzkbN}u*!@||2ncmmi9iy&GG^+{DE*TH)He#yTf)RBR9(pkzPW1jqJ#Hdmi#-OSNJtHWYx`gpxOy zfD^Fn97442ZXmd*E1Jw9mm)KtO`ep8)$L30Zat82>#5T+P6D@bB~~`o3N^J(K`3Vi zfL>kWG-O+}HKb^Bf*+o`*j(kxQ+1K+n63pIC}=5&6C$eh(E}6fBub;2czTr~`7IiC zYuO}mlsL2b*Rr&`8`3E($B$#H@~(@9=1X}`#9ftgD;1Z$wxim!g{odZWsGhOlPxX@ z{LJwxBW6cfr?b^=iqXE)*em(X{E)aiKvt`j$@drqLyrsP&DL9UHwr9HkK8?y}BymYx^t|R^#5`K!k*? z%GibF@P@r*H0x2EOC!o4m2wk(HkNQ)A88XWe$ukdMZiE;NKfGfn_@{+*+ppaaxOkw zGwSw#FN5VWH)GHxAXE|IN*|QuPvh3Ps`R`?vNTC)9d{#n*s!>>^h!BX(-+Tm#ZJ=2 zC8%C<0JAQ7)iJv=C6qcF21q`5%R6{AIH^R*(`J43j>-?K(ITg<8y<_%Y@CSwHLkRC zh;gK0H+nHw#@6(A^HsF8Cl$!hWkNYC={GwW{7O)1Ex+C)QM~I@<|lhTW~C& zimS=e??x{urTsh$QD+Zr9tb>fzRSHk9KR23zZ9yh*%3$UUrwcUfTc>#8roZ}1(2OZ zUb~nSssehu(LXC-Lm;QN`TW*0OuqZ$Q0Nrtn_DI{SDr8a>e7hY|`Z%m9>!MBw_YzaPGXN?z9Y;z87;^Xvb zle#KzG^|_1wLm32vI4Ggfs@X!8@!De{|6%_j%1Gce~fzPKW)2ZCP>flf&|5vHZjjp z8i`Wbo8vLpE4-;ixFPa*{&@H@zg=P^Uq&BvCvvCE43-nskbOR2NoQB6Ytq_LxSn^TmSU^V^e^26I7uGOvOnVS z=5Fvy?bp@dIC5opR3F=FK{u1Mz=bLKmFgXWTP<}d>For>He81(Ps|6EQnJmjq<{+A zDPGtLAzddgHoWa>Eha183>@Y<*E?ucw%~*^ZuSwrCuZrs`3+nw);Tf0IfM?3CK_VN z;KCH%98m&IN|$h35W?In%Y9_bT_V+z(cK}O%oJG%s9<^8SQrkDvjsHw8J#VdP$lGZ zo|dahoRjk!HMc(AWNBrkb<6`l*>T$V7EJS86I))8szxn_x1bmgBU z;BloND6NCIZ!}1=dJbUt$FN!_#+OddJqd>h@NRZyJR8|G9>Tu-t~xM=vHvT^LT&PbmuuH)?WSf=N)*f?swO^|hK`34quEv3qYrR$`K5 zzxw0J^>N5B;~(pQ+-KYlQ9#2_A3`#CY~t#%WSx`Mz+u+}ZM%G$5)e%Hoc}F$@R-M6eSAMPdgr!-AC){NX zR%aVoF^jD96mJ863s0DoS63Cwuz#~q)u10z*&xc3A1>hrGpze2G>6sGS^m=VEZ$kx z6D4FX$a8<3E^Y-I;OXlX`9Z5zbxW+)a2lm zRMLF^gmk~657$W<)+V z8L?CCs;A)tMK_0@Q8k(?B=|j234L)lk!eA!VT|k_;f@;MFq}JWL5Mo9o;FI^q%n!x zE|D_T=Aa39>!YMVZZztp1BIFWer!={Hc#^AZoYJL{5mJ51eG$-BvBz-1;*{}C|9`( zJt6f+PJ~hWEXs@Dw-r^aughv1?Kj&U)zztnRJwmfbG5j6xFoKqpm~e8_D)()Z8Yd$ ztsI#hhcQ%{&_1g1s&9Gat~&vboh|NybH9Pj%MC|VR@>Ip83!swm-fBukwP!@6sk=3 zRW3EVA`*A+&m>J^NWBjO{zaejaMD>l<{2a~iaA&I$6!3{%WGCVZjWQ8K=xo0*C}#1 zBXnmCh4mt{%w+*ylO#U8jYrAj6KEfeVPaRk{z7&@(gm}=9qX2zO5s6`J~hNex2AYp zX)IluM-*y0xer8Go1S;WA>VRYhJqXd;a^Thy^?HyZx5367z(;-ki6t0lDvZ1V= z40xzAFu(FJcI?nts5{HGpRR9l+{a~F7YwQKLzb!(to*-JR^kuX=FRlzv@J?7RZWM=@bCT!3(1K!~fU zAlaR!ah|tLY&225o8}+C=Yf=jeTpT5cF>bEv;9(8WfdYQV=clWaLEKhjvized%lv%GKG?J?x zP)V{lY;#VTWz8=zT|H&1b#{x*;%}*Yq%Y=7IXVj^A$tQ6lS&g&gh_5mZ;Xz%Hs5IkQG>UOS3(x!yTmJYys;EyG}sLE z9i#N4$HV$@+u~uwJSJpQ6fc>urBW%}xEzxcb;*Waa@n4-8HJNJs*5v#MBu6_+dX%` zEJYg!dTI)b1M`#VNl~a@K!c82`*aiS+fwUu1(ixh3YjvC#nS7FPbt;casjU6YnKEFYVcw^Y>gmar zhJW0WFy#-+6!Yp3)}j%)0X6YG(5H=R91rF-dC?MKPobUa=d$c|uQNx)lJf=XiIvnR zs1K<0*vgW?3g+n%!?MXGBitw>m4Ym%5&aDzK3?r~o!=vx0qn@Ka(|rboxZF1Q{s&* zbU3;W3io}~p~avf{CiQNvMd)R)}NX3i(Z`}W`L^VmBfLqBSQhve&Z1D7tC^zcrKEi zatwm4oYOJ;n3B_@!ir*Y$+)g$eBtBRP6stZ&UmwLf6I~6@2Z)3o<0J)igcfP;)Be# z#jmh;vG`;mCuJwyFAY?PSd*H+r=1SaJ8?BA`XDps)D9EpoR98dD7B><0`>XI zyPUzpdnt!&fAjLHSoXY`Q(u~mh-BgkzsEbS^>N6znX2$#R@9Ug#^tu$6Ys?777d*B z+?88lKL&~de3Z;H0_>n1WW%eftChklYdz4;IIFW$V4d70Wk zKX}-X(YaAdNFCaI-WsyDuY^6zc|s8~ODczO%c(sqI<97@>|7C6e^$F6v)KT}P0M7e zR&c8cc)-0pX$@RZ-WZOOMC%VrA$&k4rcz%z#OckfoLTw+S2@d6&leQl>SA~A8{3k0oEdU^^b?~g+p&K14;SaV&$Dd z&1;DS4A?8qt8+>5==^_Sz4jiZR|X6>w!tem5cm%+O}Crf2+(&eBFXxcs*$yC*R0GR z0^8fb?K}C|Sz`cNi9nUtM?LsS+z^~e=x7PKG_YIdFzp#OPRbfyL7mqu{hi4tiCogM zJ%fJuyt`zFJ@6qVLULK;aE4p1_0Imj3);+LWljse>MkGjmW~$tL*u}UiTw-JW_-;| zR0QQDR8(B_zo~Pnw3VnQz`g`KK1&>E5YAS_-b03rE74Hn9ltJvd&vB8dDl?A&zll! z0u{_ydId|!1GZN;s^TlU11gOQ@Ti_}K9#?baq9>zNCkWu&m&3-# z?y-`8d%R&OSZ!2)NJwhYo(tHp#4mV^JwcAc63xq^GbaHe#4T9gkL_9$TJM#hnSH|M zIDb3N)Dwq6!Z;R)_Wd?zebc?Qu&$h$QiGx|HR1XmEbadCTJkm$)}ygi{V&gX0w<1c zS3To4%)}?%f40S(@*qsuLnx3Tp+yMUMsN($w~${Wbp8qh6Y_$DM3K`zmU*Z0uKB|2 z+wsDAfZ#`ObEW6wYZ8AxzX4a}=(f&$#ye*zp!m9~LqjduMAZA`v9FAXk-WL1pf-A^ zB;GtO>`TaMd0T5h6K;#leBzeqau0@XnlFA=58ytZt{5rzB@pDJKsMIMSD1UR>|5~)sG zIVmED3P?cW`lF;qdh5TSt8Xis`GxIG>u!z2lVoL-n6v6rPnc&uIWfPN{{{lVPx>>3 zq>)ca9}{s=+UK3D;}C_gAvYIE6>yHa92w5zCC-{&bVCSD6fsv9D7Ui#V2YHQBls5i zZ}iBsAs=zJQy|wm_P?P^zLIHwQSTNwY#65m7*jHgmpzEYXaA8?oA0;ka%yH~YH$|J zR;$)`7R1zo#Vku)=j_r;3~z+w<31RO!w`$I7e7@-GSBRwi?>2iqbP3AU#n;i|H zzC~5-LjyM-JLUWO<`PHLYA+f0m>=Ebm7@ z2;HA37Fj#rhDi8r$N65iIQ!>FoK1Lm?saqft#&R}VX(^4z*^`DC)qGQ6FG@c|C}PX zKlERg90g4MHnrR)GOqe_L7rDIh*-k(XJWGCl=ndv_#2oQlr7BiE1$TZUK2`oAvS&wpL}x({$ZFH@XX|LO%lW5=e01xGy?Y5Jb~7k z-=;Hnu5db2<^{2GYDr#jeI9j>AVM&Xo-Syu_Zao5{^l(0udRzY<2yV$yit6zs!`jA z(I8Cfy7vXd4oMD3Lb2E}i+L=TyIIr-W^=AqyOs30-l_ehVP^?(w}OTQ?!u?->JLSN z&KnQY$Gs!(FcX&JW%~Ha&X7a79I!0dPq+I_*A%H62$X}E`E306@8I+n`J@%uV%)_{m;|pOx^l!M=c#)$zBK7<1pB^R2 zPqyt!k9>o&Nzti+dny;IrxuMG0c|G}a-_`}HDyR#d?IB?Wl(R%(cH?FX@C>%>cwKC ze!Wh8Z5mSpuTr;c=fk8FO6fBDs?9q6>|ccQ!=Xbn?W@G$PAbrQVZU zjNU*FxUwVZnstYMxVyQu3_?0yPBb4JSyetj?vM{BPU!3)l)k;c)!*$c;$fagAY0=a z_?O8B6s|q~yCNE4I@R)IwN{4Q8F!{5$x2;^a$i=edTpuD$I1!P?!Q@bazhbA zAH9&>P=1MeBKzP`hq1H$9gTpG%;Gw9;fY{LvgnLl_(e$0m{p3%x;eqyP;Ddn_1O}U ztmap$o^NXs92UJcuH=ECalFZ6PN;~JZ2|ox%}^nw+_n;%$YJK>7o%!(aGmNhV85yr z6F!8en2V4OuS;$@|1sj{z3S)LDN$DiS%ti7~CYLa~t_`A)FT)|j7*W7lA$T6FUM2@ajk#*~jx-k$hji{U33 zj0fOgvgycq^35-Q`S+g=J;t;V)T&He`V)2&PYr$_o#T7b{%An=s$gQCn%1Rz!TSz!{gt?`JE*vKqT4?lyFKuy8+I&if6M+s$9l1=H_S0MT zkBH_TVI0Jo$GL~*#}=ov)3kC@M!k80AM$M`LQ*JjM!Gc)ew+nJ+HwsHSwiXTq(;T*OZKs z)ncIe4OUlUOBWdDRS?O+>(SAnUq{A%aa+P;^M{MpU|zk3lG(iU#b6|Z{UZ(XQ80s& z!TgeefHs7V+7QEjtN->^@PtR%h_bjZ!AIKOsT1bW7+Y)6eZPE_T|f8Wg285=wPrp7 z`aP%|8B4X=B2YKe4Xl#8(Ijhs4sMP{g$#A-F}&VU@}CFstv#9Bx+d=IGTQZf=Jl46 zA4=NwF`82YPO%0_;G$xg)u23r!+ue5R2UZJQoB#hr$3n`8u@~JkyL^>~na=TIF zIGcni4oJ`xA7VT?lLtE=zw13MU|in+oL)J7Z>kU#Nv{?}0BoGBN7u)9&`JAI}b${onm)sxBuy!GQeRgbr{26% z2ujK=+2KZP+IPZ>N>;G5vJHI%>5*1XQ+6L@zU{8FiI!U5T)84$)5rQX1H|JILz0Av zyDYK)zR$WP%QbnH?Z@|P<#izn$~i|g+Rns2{qZeqPN;vXTwu~zhE#GhbSnxK8^QF~ zs(D-o*3wjNF-o2Sb{f5lZwIg!NjPm3ZBFZo%+az|ett_ypKJG^IU1q2n?bjmm#v69 z3G_M;m2sKieB&z?Fe;P^bLJ6d%r)~BSALp|5ibT{b)Un^Ww9^T&EiFM-XVrgC2s+!^RQ@kD z4f@rmH)9obdENUK6@955sJ?b+(yKRhDDxrBYFz(e zRCNO&Rm^38D)#y(g*8h7#WDF&l;d8XV3C7i|pFh-1-5X z-Hw1VRKVN*@!kGwmm$NJ}w{p10npCevm1 z%;#YL#;0tvyE*1ylERVjU6RgcV-F)O)2g2`&*39m4yt#}xxOx~Fu9*l@p3V;E=!N# ztr1x(7yN!T*JZ(!>T>-{`~nIPzzQ#Wp9Y$#kvvjHHB71`ncWm_;lFSiwYJQSOSVhM z%9BiHkT3YRI027KDMscbXpAkm3h!^(6P)gbeM7xf7db0ChLCAn0S~hU&0MCGVM3Zv z)9|ysk3+GRTp?4lbKf}ADqg;;$`APU*C0W%)EU#qEfO}H{6-CzSiuj_dj}$R+6uG6 z+bj|G$@NN6$)DHs`vMVC2JEWjRLcfGtPv#R<1VrXo`M;bmA-{z(bIUJyX>Zt6*f?8 z2*M7U>py@jLtaGFQhRv6Zz_T$D8H+i-oW3`?YcUl0pV`EujH(3(U>=*3fmy?(mvXV zwCd>)D#UGkPFr@^f)dQIh~b<2xnaUyL5gZKjQx3CQS9sT&^Kt}$KSBI;fi%yS{wwX4Z@XYFXsxx^wT)EGNSov00<|9%{Qcwgxy2(ArI1N&K`sazh(U$m z;8?w=wvT?C3^SNu|L3TB^wZ)fFAuo8fHV6d{@~O05O#Uxnjs|v)o78A<}E(A5(Q@^ zOo&EHxHxN~DTWuV%2{>{;^RfdHVTqtP0rv?*e0N}cX1d|5NsSjqX~2Ub!dJ#(CU%w zN23$wWpjn$GB`M;Ss{4a-QZ}jAJ995!A^s?EULPCUxd%|$sr$?rF^O0Jtr7u2``oy zFma2%QOdO^{(4U9R>QoQsb{RaMT+T*^Cv=9?`o?ZpC~fTOVO%x?_N4u=dV{=41v%7 z!P53W3`cP@sLOFi7sj3bvHYm3?qJ9E8FDorYam5Xy2PCx?)8p-{*8m-hfRYcVMn&W zzmKdK?2IZ}pdKw7APKs4C1&zs#}Rnw?7#8>vySG^72Olu7rP9u4pnC9A6btF%0m@? zEZ!yrAt7*Iv13LlJ5nj{f&2bd-^8ptFzomq>-d>MC5HL{)0iE>L^|f&Qra6TZ0}xC zQy$}W!!_WqCVyAx$3tq%8-8;N8B*FqF#H?`i;R+Lk6(VmZyP=>W#{uTGnag}ox&Ad(iIXAAFyyhZ0 z#rmn!GFXW!I zY*F>FgNKoW!z{#Yr$eq6FTqgCwVBtI$DFZsk$P5@SjUENE5?{xKW(jce`P4c>%6eaW(TqS;bQosdPb2hddZ@CxMtH1SL3n5tM|6RW7`QL0(q@Hu> zas1|lr=`wkYGBUDx*OC#>y(Kh-5P>?wjUdciB_-m+@+3{7P;_i^O_K z{p-xBy0|6^TIZb~Ho4>%^SdlJ1Yxb7aj#kg-S9qG`C+0_TU+44hx^qBG<4qlehqS% z?RQ*lEpecKDe|zrTIYN>>s(*?fU3L_w}44=o&~NE1a?iw7@q%!AUcr!fNW@NA3fEQ zLd4Bag`;UO{*Oy4(^IO%hnw-Bj)1ctIdxJGI}dGXou>l~N6^;e#T@%eKls5Rx&wi} zed0(fm)*LVD0aIb(s7z_`5#%|1T~e`vIS6{?7T?3|85*~hg^x`^p8}t$qON1Z4`~Py)UKpzl~jCDD2vri2#@&w`Tn?U#8|n22!N06|SV_QnsQ3N$L~v8;Ur zSDNpx_{O#>2vi0Nra%vniXuhNk;2q0bw^bxyCvD!;X)I7X5`4!X7W+WTm?{vdnCnS z?D(+4Lu`Tc$P&0**BWHo8+c^eY0yM{$sKeGXHxpN8Iie#wDdbT?DZ_saLMDB*^FA# zrQo4H<8f#GBSR?1QQ}|>OR#54W@$>Z+e`OxUUL;|+4}CG z!>|ZkN3ad)cejXBUhr!7>iO3Cilr)`1s<~;vd4lG!ao%GgJU=mVb^Ta{&xm9k-{`| zp3C1IdX z$JRy`OgS?ivH0%j!ieoV2p9=r;bYYJp6CDKweoZSL#?Pw%X(bSP@kd$AkW%>ES{c` z3*qeBJXEL(BzwSS`N-h8xWZ(LHy>j?ofRPTWjVzZtVXEoo3;-u7+)^eZ`lSmGp65& zn0*sA>S15PctB6k7#e)sGER*H%l}d;X3W(TN@UIx3*a)U`V$NS>{gfW?Mrgf)66np zZWr(+CL#1Y_LZC*vXX73_e;s|mOQCgno!R)dDH7IsS4#+Kq)RgveX0}n&YErJ_xTZ z&BmM#(?L$=&_-%`;TI~^5~7<(t4GFVBw{>9Dlv}^nVjUL8JWVAjz}ZC*qF?UY7O!8 z0OV@n;K7+$=x!7dH{Q|!)b^AWM}n(GgQrD$KLe(r=x3 zYqgWKYIfw_k3tfkCYI8e=I8z?HEjyX&{39TjjdPy&7}n-%e6EQ>v>t`1D_UgECtmh zZ(A#QztZ{M26)clV&GKt!EEspihkff8^VS+`y#m=% z@rmngQWmd4C-Jjso*z^D2*~e>YS9^9gu5$lA3i zSHuK6?Y@u0&AqOL4}Bi)&>_$m^V0j%d^b`NbuUxAvZxOJEYtOe`HuC*=GNvx;tD=o zieX{3uJvy7*ru&7l`vS1?uahC+dJ0UYq`MNaZ&}a#7Dy8>SKH#mG*vlJ-X-msNWk= zNx`ztEAVlUMw{ZqWxbQ(#M|!uSNc&jUB!7ApXV&oJtIE2U$$ozr@M%Y@BPr<6`UN8 z)<@6LO^nae`NOeHJKV<@Cu>X@OWS(YW9%@lWv$`LbpfKoS*zYf|L_mnzxg-+#uXU2 zv&E&opm z|MZ{!ll`^7_SakyLOKeb_4Rf8p6~e{`^8`UMa{#%?RZc~0Jy7y&VTRk{XP4cpZOX4 zhHv-=SH{Hf)nEP9_R)`i)D<`2_&a~+@3`WFGm0bPC;=#8P+I)>kN>#+!+-b>T>*jO z>F0m`=j}6}`HcPhfB)~jt@IUmyZ$E?24?hS$92HP&b}yzkI$cX!tnI5D8`dFMOd>G{WxAGh0XyUo+D)+Yd3{Pa)% zwEd00@i$ya^4izF*3*gmfM)`2VWh0B>Q)>&e4~(Upn}0R4ytKU-0ow4J*(3J^9hYkpPK&?ho)q#aEw;PJFDV zwjZ+cpl4MC^4XMa=H^NtT4f$uZPK%~A#QG_)-M*UpGJs~w^^oW{mnDh%~kFG#z`Bf z>}=MvJ3qW;KlA^7uKn=$eydG3)~(Xf^W~al^`n{{^*e>Y1(w--!J2AISsz6Rx&e;~ zgJ5J>b zs{g&djk;YIwhvJ@scnU|y6-s0RTsg#$Ewc*>k6mG)exSs`rA}`MQ!Y=u0Z+30Lhoj zQE+SFPX{@-5b3CoSd+|WQyT^aQ!6ddeMvMS81|$;u)W#b7NR49bQC@8L|i~e;ma@d zam?=ae&`<`ZLALvE)Ry*Qy&rfV+|DOzEWowdd>yYSQhlu*OKxV`K+z0ow&+ssIIEY zO++(-#bbPKBBVYs7g*8hhA6TIJs%kSI=~$SAY2gzeP1{{7cl++949gh; zhj-MDg6Pl@eXwq7Aoy21`bx*51pzP0kG$xORmnitLyaH1!`Uf|>!@6%WO?D9ikD~2 zrn^cbUJZhHlo!P5Nr~Pl8gQRB(>y`hk{3M*!tNFUsS8%Lg6xB?Bn zP3N$)(!5mNu8`A_pUyoV3jNPz(^gem_~0kr%#3!6=Go4k1>J2s-aKd5HXji!LTlrO zZ0)qQ)=pW!dfd|Kf{j%d3e|=_ew&lX`YSCS!O{zNUW^acA<#QLnz}+0cY-xxkXAAKtmPIK0rfNIN z7TioN)UGiWSn!a>4*|csH`JM6_3#o_A6UwyG<2cHx+gv%E)e7BmP+aebk5bPme*QQ z77vK(F4#d)iaWo&%43DX%2?2KTl1`}#x_Fn%V1=iYEuW^)!3&-Ln~{1Q2OGYED#SB zg|nf#WW%w$9Gi%ay}a-Z+m`BgJ-|QZvzm_;)rU1kL+g`<(XGo+deGOL@U&AI8`R4# zAYg4Ch67gsp-e<6+8mE8+izJG-|!c{Xq^k6w{|EzLCGd(o@u>}Yi+l~RrEl)hSCRK)N7jt$q=?2`C2 z)9F}4G#+b?Rj}~EQfaMfi_M1R_x3C|(we04tclO2xwP$QUYyprsaf_>(Wu(qHru%< z4iPM0Sw1=V6a80ueDTNsZgfyubh%P?`6tHLX?CNSBVLk1%O_%d(T!g(TjbURxa{0K zw&9A2yw;8JNbkq(UE992Yt7x33*;Tu;Xdx)NBF$P%W(RI{#>Pxahd25Pg_2Hkncqf z`nZboFg}FvHN6vJoSseQDos~$>XK7jFe(yXT!}AA<34&Semce%<;BILv`6b>R4#tt zs6JkfFQVzPBBib3WBv@ZrZN7FwML|H(by9I&Nm!r9^u9lrAnyvfZq(AcF)>-8eg4x zcC2|$Z86Tn__$uh8U0au+{ecV#ANyKhwoHd8}=)|@?Y$`zxyw_aQAQhtsk_1@DKjJ zuTh`>!WZnFzw}@1U;fO$wtx2@-f6%78^2|ryysK)AK&>a%OA1{Jl$Yd>#Vk}pma&^_2`@6vBIM%SZE{1V*xC^5ac7|=l3!eBnalQj*;{7m%rTp+kg9SZlS?D1WOfgTn!$4@WFlS7nDILfi^c$ zmOpj{3kr6uP-q+Nq|R04Pz=Pt`NSta;dG%7uCQLQjDjBYoj!g3^PhiAeIkAeikf(g zV#N!JBKiY9z7ZZvKN5Iq0@f>;e977mCS#mH0tqCLKmz|OfWEL-KZN;U*8P6cp=*w~ z{PG(+`pAx8zo=Kk9Ut1ueC|owF9dkO=yE?jzDQYS=d9h(@7Q=4x%?uY!(5L{3t3x) z1Ive9>*TNmY1;hkgH{{&Ej1b1G@o@vMN1#+G;bPIY?!NCI#;vQJZD?8jGf+X+Q}em zC#NGj*GE}1ajOq@Sa zLj=uwzNb85S2V^m7kJ~ks+h)|Oxg;0oEoJ(zDh70TM!Pd76evOI`K#dW}ou*=)+6> z%^@srcP(8AY`oX9rqV+Mod|aIvBZw}snY7IhYyHcv1E%@)8kpVJF0%)##4g6&uvIB z>3q(WF%$I*p+s7Jb3wY&aSxMGKTv$^s;&a@XHk^SsDo!;9wbmatNX9RDHO(v+M1dsLy=F5zB^M=U8DBRcBFs#_dUJ5E`*+ zI%*5CAh0^XB^U8E#-cI(!}=zraY57vZ(vBEh|1FCeK*+w22r!{xPn!nRpo2M9^qR zzVfi-$aK0MpJgguBj7$36pLKettgJGF9_=COD7Z9u-LFhe@A1|vvj@cbaA&aqWM(q z;zK*o(a`fKde|9^RZ2y5(>G!cvD3LK8UX9?ui8K$d3c%N`UCMddkwR~8;rz@<&q0z z5p)+-4+0JXpo1%fZV2@?Pj6~nvW>lmtvqd7ekA@bmaV^atz9Y|w~N`TrLz?)X?$@_ zfpyLniX71-oM|mUAtAo23P)MADyyw29rG#fy0XrT(?ZqeYbUJOx@Z%v6TM2&-MBFy z$7%y{cIYc^+m7Y(J{PhX(Lv|A_#7n;^K>CzX70_^7c6>8I@rtarhnoktbcIGgVL|0 zIjyf{8|c7l4a*#qvREil9v6B+sC5FXFwrSnDn$ICHW4ER*K{Z>cXdzDc9e2hHnn|i z$XZW*Jucjntd?!fv$Fbxau;PEaa}@{aW{FQcehfpe!8UfqitcQrFk>;_*N*favD3< zF|2p`DE-t&EV<^yH4shJV5ihV@4g>}@n`7rePh-J@)XjmNf zEP&t6-e>s-KWm-r%q`32mE*R%b)7xun;P~Lzw!I_l2?A*KKy&nwx4+O*Vu3U)3UYI zUu(5(RMcEly#uYgC^J#`w1l&XMU&cv0=gs`L2F#d5l^Y3iH!?BT;gF}g`&7TPwTm~ zuR*hv_@=#UwTl=jgQR8CJ`-Ihk3DSQe@~#G*y#yvodNYMoBo9>{6k6TcTl zgZo7z6s~2}+ZDdv!}r-dbjNkd6gPrCRbiw2u>5#TU2PA;EB~SKwRT$I1KIT)qWwceToTOcKiIVyR@63JRCHt#7Qk#S01--no%7H4^#`aTlm{s@v&XbFXO~%}?L7 zp?ApT_<#eP5$JmL_A$c6^XKgy@A!B2zyFhes&{_g-uCU^Vb|Vpqy5%tGn=L+HtU|Z_QyW3tl+W1qqPI! zKm7n6tuFv>uJ}Fq6F>12Zbia@OEFx4u@3*-&;6Xcl=`tB`!T!amRpujT@EM#z=?8# z_X5@zC>#J>f?+Wd*Nv47FMZ=VgO&P=r{~~hpYnkv7!qcxXg-iUKN&*QakU#Ckt_ zuXnpPu2rmXYSX&2(5{+#;f> zwp7X`8|oQcKdtK^_6CA&cHOshjqtoM)yIrJil()yP1NU{sF^3O!ffW&2_?~{BV5B~ z%Ln_NI0~cWk?Gb^`)nAlEC{P}3cQl6jpjDm3S;6Y2ExPbYgF7c&Ut#?-Qa)BYCDnx@ z=YpeYNcJu23{ZNNLA(xLYfPT*G5IpV(%F9@aEU?x1eN06NtKk~82oI$zO*SO;rs>~$Tyg+DkF znC10MMRRdF7f%W&inS^8K)zN*BLqlTm2vDhE%xpMJHG`PyZE z>Djc>E~-7#e(07txTHftLO%(fFXFl|pO`PWgBxp3P!CF1#0c!IMVZ2S;R+nluOwO# z7s!M6i#Mnbw__cxW#@E_LbWCQ#J;Ji&VuN`+{elTT3yfBXHvE$+Rk;qu6cyJL9Bw{ zwY+E=2p{7EO|XW`=JMkAg1ZWw&C}MYY}#MDYEe(!0@b{D|@#{1jz?Z+Cmw<8q)3xeVtLq7?l- z^!~pR-||d?C#F0`T79i%8(SN;b!^Kj?xJU5J&n=!r6_QI8)d#|ctsx%;fwyR;sjRk zJxU*=yEyOi^WIL=H#?LNbgkfwa#!)ic{-DD6?`1vyaFF^8GUvAD1E#VAIA#LgGBv{ z%3gtU1z&Wfzxz0&w3goQ8Gg6u{q$&kWGr#zS!>j7&E1Ms+{&e(^@!gApyJ)g`WI^5 zV2t;6TdvUHnluEcQEkvCI!AEOrpF4*`)+g3KJ>v4+Ry##e{JvjrC+lD{@?zawRd+l z=Lf#V9|l);C-Ag|&pmLjTMZnBu73MJnsv*UQm_oRx6MBPX)B+<*UEeM+sXW%-gTjU zX7~)d@7#^{;HPh}owHlkX;!U$f6AJln_2hau3h-;PcCbCY(U$u7Hl3R2d=LGVgwzA z*tsA;f0=7P@*_Xuu{$`3D?(fk;J*9rvw!~2|M~yJ-hTi{k{b<2$`OO4QTIo^U5o)Vq5!^wN<}h6bPmMNd$m0YJjL1+Z8w z)?gRA<9fQgyveG{s=EI#vZ}kfX9m~>7QSMpIy1lgY<%&3RT=+`h(q^ZO8{V%ct&{N z``%}-dey7!$AA3CJr)TT7Z8>fZ+g?44(sMa9xN~Tjfq>UB>^g|0xpl0Xam<~%ldH# z_PW=-&hOE;Pek+;fCUQ?tZyDmnUjQjF94#G^q~)Z$Sr(+?8ko0L6ZNd{pYdJwZ=sJ z7vi|U!`Hs{wQd!&B<2!#WNZ4urs_@4_2b%lWld{Z)0)<_rY{~kd|F*Tq8>dOlfs7- zJ>u(h^*J^^fsgGa60#Z61nHYSl- zF0g7ew($seUlUi1Zs-9D3y2ipfPOF+2a!EmIc<~q#M0xI6)t?tilep-M-9vOTNaG^ zrbi@848~U7Yg&}gTfftGmpiRS)8_rYrJHRV%?p9G6C8S8nqbI0|_blmqn{%aBRA=&r9uy2gzV%F3vav%zd;wL;$CAP0Du zsvlT7@TL>4umCwGK^&AA_a0cTZ0h4ddy55EniK1V^6q;*2&|t<+h{Pb8SSVKyxBb& zh1M32#wy2~_-VC2-l(~?$3XSqc8a&VXBt~hfd{{SRhM{fhoLC%z zj>Is@iqDB~?C!f4vKc#*ZhnJLo4eZF&|K8{YHU-@8x|!t z8d=osSpsDqN^sqq?KM0)Ltp(Lp|I6Gk0qlzDjH+J9BLc~?T%egpRty5*IlZMei478 zC;6T$?^Jxl;(=iQW6edRe6wy}vYT_)Qz)Ef>Pxv^b@x)^%`@Uf#`2wu8t>F9wXy?z z<7CR}suMR`%<;a`ikgF}>XQID070w|g6CHO`7{wo6VNk|3?GQEN%a9fVnxzY-+NerX?zp)Z#Ep+ zu-mkJ`?B5CJa6@bN31ZG+>WOHy^NYxXJCT~ql!dx7R0Af)GjAa3N6c1GiNU?rRJ7%U0RSwg(T zC00>ss6-ra(;N;|AHX2iECjwUsP3_j>?H|}&0PmF53D)$Z&E7RR<3A+%9dT%xMcNy zM{+jy_$>5sG8kE<)Av20qVb*ic0f=viL<+$%^HP%SK(L@%h!qxTLr}uj#}+Vyj?bgq6zU9V2@|Uf(-q zeQ(tC8^Hk=tv4)Ta6#m5#kAH?^!j&3Z(!Ya&zii5pkrTqIqu|^?R~O@WeOIC{HEf? zhBbZh(*OOtKdtX$EDH7ea)q$OKqjw6p9o-?^m}ZU`cB=lq-1_4wQt+0J&A5)=XS5N zgA3c%ytHZikM7vTzf9WR+e+5CU%%GoSAu*WkLZV65uPBdBM5$vVjo(%>go@VtR{Mc zu3f)#)<*ZoYw7K8f4hC(_kEuOHUKEZUV#XpPM~@CLA?M^JV(GYEKmRrmVgV^6vS!y zdw=imx#E8bQs5qpd$`ZSng$RBmt6eb0oWk^3;e{D*2`c1a*x~ccnL7+TfXI69OR%4 z0=qB!MPG5HMZfu6jc|*`*l^4BSlw#TXM*Qb?qx4~nO|Scg&Hwda7Xr|Kl-EgFaE{9 z@WA(vg|0Ow_zrJ52b>~K&64iA>n@L(bIUEa_=`#e$6wPI9<5`qtZ7YaTGN`=^u0{2^neYBG0vu~KNwmx4y_X;Z4+Q%61saQ-u}id&t7rE#(DjqjXIX? zHC-WCs+K)iG79_5D75@+WE<01SvWwcs^eC>;~+p@Z6D}|E|v=$YIB#4WeGHIR#Bgb zO8}Tr(hvXkXl%*J)RIBk<1rklF5EQXHYqVwJEfv4-^(cGCy~E7y`=}ZjOrpz1mHtX zb>Myo#XJB{Q}tnCfnpcMIx!u1)1Ei;;Y|`KKk`E?CQlTlqLqAHSO-uMS}19YM-tUOHRPobz4e+Yap0dSCk zH->Yn15jtKe&dn}aHy#MB4-0#SHyd)SWvK!+mZ|MvR~SAtDbCU*MTCyki5o%Oy$+L zk?QrB7HSK(PV`qCcS|g{MAA9dKY(pmUJP^MqsBf^J@61KrLxActGPmsD(WjSNdP9w zsvG(6eycAiuIE~N^-SQd(RzS*-M-Z{4gg3jTIdT8y#>Zjed1YAE?OiWr!=QwkX7Bd zyc#M0(DKp11L)6Hcd}5lNqN%-rMd+Lt^1_f&8ciUXH|_Kw^4v7Sk(~21G$`NodWa# z+$Az8A1lG|$4ZYh7F?3CC*gW6$35jE*37_#NHV0pBbOcZeZqRxyn6f^r2~!<+lRO= z1Wd9W99gwkw2|a~rv7Y+FIdC@3QQ%-tZ@K4 zJ&9XRa)v8HESD;p+em8(c}{2_ycABYr+qNnJ+O4IYdy6quDeX&@~scBz%=T7zEQDh zDQ_9=FC+Fs^&bnox$4Vk4Gq*!EY+}*DWnqWmt;wE+f`bSjrrM;Y;2TD_Vicu?Q6cZ zYk%#3yW76$`#xr8Z)w;)@4eps@(*scpZ_N}+JkQ^+I!xjy;|d&54Cqn?zIV6nAV;j zh1MN()W1;eYtKq%Y@&G_2W6Y4^VX0Iy2ce1^4nG41I-VXC3EdR9hDgb zdACx*UF4100f;BnkUU~RH&pvr@NEKYZdC0V%6C!ah=)^1X|L3HW|^$!VQwYK>O^e^ zT4T6P#M){kSuSdh)wu7+sc7y%=}uB}n95qJ0JlGV-;MEg{5Q@V=ZWt^32PlRU2d^R zSmun&kW0t-fBYw|!qb;+Ub{UgPFrm+zJf?u=G8iujN(oYYluRr;A;fm_PCiEXb;2! z6zeRkOjg@GssoBUc(Tpa_KtGLxq6+RTQ_cQ*5jnO3eU+Hd)D8~oxzH3p}XNOS8 z-f_nri>u>=`=L+f4fr)Zag^CQlD3aWfBv2~*z=zJ<*uw|??5Td+yY!IV_}@6-Q8Vx zCB^4z^s#g2?EQ}%*r{iHwZ2Ph`ps6f%MYKk2S2iF4_wmklrt}|?k%sh-SU^&2M4#> zM?bJ@`wum(@YJW;4bOefqLxogk38~-2YO!u2>>9-9sR@F1R}Ny?z$T71N*%<|F)+= zjrP8M{g;2EebYC5v&ZYeMb_=N-|ip_AO}Fi5}`700lE)gs ztr+*YrhfYQ`@jGD4n{yX-+Z%!F$B4Xa8Jg(EG=sY29FgFF1WCmVNdz)@BVJL?f`fo zUJG-IMGP*^zWJNuu>$U_*1II?CWZ;Y`9IP6W$xiEAkkm@Yk$pMa}iLVF+o_vG#U*D zQ~0g^13&Ns_R3ej(s@IS8TL?O$*kssaIr?8Kk|`}Tyso-XYdA|!SlDh?QJdx9B~T= zPbl-XU;DLgjYFQ-zy9?;CM=QQ1u_FDM0-n1r!{?k5Y{WD(@(Ms@BFnz%9_@+rZuf; zO>6pMpu|u7;;-_=z4A~U)ADC45AOVi_4~_Ax;}Q_6LiA;MP9!=%(S@g=Q>^FIU+K~ z5Bu|T4_MgRw^}Og07H_8;C#*|-L8!+vgu~cG8Znn(sI^p89}i33Y+ehCX)z_fa^s) z{72!$724%;$qrO+pn3+moIgnA^ust6q^v%QY)^SNBh{-keDF^@JsTylpfJmiCx*oe zL6rel5-9t_v27~vr9#1a%DqZBXd z$39jA1Rb|=8c+hOS#R7rHL#3fKpQ?{2VNuu< zSRMu88*u1Y)fhW z&iY{J7AshvkQbL6rA*pg0}aWm57rG>j^JVpYm{DZU}cSCTvY!x7Ql<5KB3C;0D-F^ zoF;gD1;19&}#kQv_G_Ch42 zyjjiJP~#|O0xQu!tt%{B00tT=n+;W$WEqPSK*mfmWfSpoF8PITfC|LHsps?7RUY0d z&Sf*!(mJRiiz+kBq%4CRYD}yn0HUeZb2XQDw`2e(1C5RMn)ABGqKKFxHRQgcx#P%M zQ$J_bij6MsTa*v1q6ry*MfJ6=)Zb20b1D7}07p=Oi;rd1x1S0$XOib+;2^{R&~7|Z zxdRVCPq6a3=5C;NGkS<0Ykqp-cU^U22WL zsgD!Q`=nO!{b(dUv$?YNiDi>gy9CL0VR=Jw?;eV>kI7alolkAAdfulcdE&5;A`b+|8Nz6U5&==_U+b2&7$7W3gU6Hp*^-#u&CAY zcv;DO!BWkR_Cv`*sjNLUY0W~`Dw=clzJ%Ilj1x^)?kUf;WRUakF-WXp_tYPq=s%}^ z;PNoj;=Z5jbdl$X$QXY&F3ImNGO?E~73D1~j99B1N8kd-WfR`!745~D7Ad0mn!k{b zK^K01k$>5qCy~b0a*U)cFAp;<5=5R?8F3$mJWn(i^ckf+bA!t&x0E3BqQ2EOuf|i3 zG%?Vc8gvJ?b!sEN50U?g9HS@b|KjqBF_QO+_MkY;|9T$Jy7&{9c`Rc*q-A@49+&3? ziy`)dtkxgb%OZzuE{a@pj9yuMc&uZ@LNd~x#b>Jh3}q(w0hj;mw|>QoKGB2{^yASV z{^1|`kvH#AJYS87fxz#QHGOs||74`-vC+4_;Ct*vU;ZKo4seNuG8-#^s}Tx*f{I_u z6;n^o6d(T3y>@qZ+e)XOZlg!;xBK4qI{k*!ey-o1=~HKI=Q%I8Ty4u9dElcqfBk>8 zt>*pq4gcu>V`rcB%tbAqn6UJ~;sU}-1J{jjeB&GKUGI9ArvY%Fgoj@H`d_j?`pe(* z^HmytWVr}GHA2A;c=D4!`IEjz`AzymKlDQmq@djYe4wxV%CB@Qk^l6c{?j7m+Ju{~ zfA|moq5a5@{Kz8Z+Vr>o_TTp4{m2!-3Tr=q|Cw)sk%vMh;%ghZEb_+O}FDXSLDDMwOD~4jR_UX04pN^;D<35gHEN0>F|6W;7biMvrM+MwHWU?k5Nvw^$N(qC8lqx>d63g*7b64zmz1)zz1jG0Veom|eg z#uM)oU<+@pcBxN&AV58qDZJ6|?#FZuCF8<(WDxUy!Q(C6icUMNZB*GF2Yajx{>H!N0&oh) z1t^9m;$=~DG1HiFwT6NmAOtXtzF>&}*nlhzm5#e8+^Z00hPQ#mNC$!d?~1xcKCzC{ zBD0LzozO0ADBa^+C>?;TuXDf=T%Zj!U&I61FKug`sNO-tWe9grMFNIPUL34a9{NQ; zP`m@^01%aH6+dE0B*Ad2ic}`0dBx&pYD29*+-t!@lAO4>s*MQa8nyLHsEalPziaa|V9BAYe4u+(EI;c9SPDQoZ1D5^zYBcXWN zR{B&t=xQ%xU17C@$9wNdS-cyMtTa#B_x`i4eZ${q+qeEt_t@E+FWaLJZ`k>Vx9wm4 z?I+uxzOG<*zpLT^B4AZPGThMzPzfL?t+r5Q68~t9i!bqLqK*->1`woHE=o2d$&1## zWGDw{C0U$JN0MP;5sj_VZP+j?sV-EkPY`>}Efy!qoMv{;EJ^O8q57{r6l0)wtoi|V ziQkmbeJl^7t-98d_CU?i7BZl*@2JgGr)z2TrLFWze`uY}4U0xY-OpG{<4EKK2d=Zo zlFAYL1&}sWofEB}Y@ zWnU#fanY9bQ^iT1^5N}c0_|_@Y#nRw$}uiFxHv9zLd*I&@lyUr|5w{we3t1@T*OoT zoSTcw#kJ?~<7$iKIXRDhs_a)AWBg}P+p+;&Eyvi{Z-}Sbzr@oxbKLGSXH3W2^J=(U zZFBKirb9<Gy_)j3y6^vB48T6ogc9^xmMKec1zKCMeD2d@wPJxj zeEx$Lj0@V{iyq(|Fb|O6YA&TtT8^9~ew%D-A6B2e;9g93El`#_XqCVe;{ke7b^GV@BDpv7t!E+cb)8l=~aSyvNY!5}1B#)8CvAhC>H% zPy%Pg7lP^ImIXj!q`CstiMudBfk`fBqfXZn(bU~Ph3Sly;gjm&?d#q^b*T=lRC1Q| zK>n^+Pb6Il3V4IkJQ9CA?u2;N*Vs2z28$JZ>jR9SSmZvyfm>>*Z)x!pYX^ea<1Q=% zV5L5|g$c|k7Ck;pR{SZJOWseE&_sL5i02b5F5nG7Mo;aiP2T?BQQuIaVts;@!CZ5I zvKVDt2W7a%g#`zS@qvVFL;Tyc8>#@;vYNdR^9_n^&OKG_1BK8GfjVW(S{b_4{ z&Zu4H!vNeQj{tZC6=(d!o&hMzssC6i%{S_ruc@WPPrw$y17h&(sm@HfXa(Mc7oUoH z(8h%d);TEjaW&<0sXDuAx2}irW*9oSxT!Q`Bq2V#-d6K~LYi0{?Vjemep>R@vtoPS zQrWcfsvr5RcoA7!Jh?g2e5emtV-T~(Et^o*qrg=EP^P=ZiTc49k;9_qWCoC_KAFbV z)u*Pu1VP#k)W3q-z+G5Za|w`^RXYGZ=~~&XaDvXjEx+dCV`_|BEUgLgga>4RQv#{i zbRT7YLHym4j9*gQJ@tqA-4tIt0G7SJyI&KRY@)RoXd@z?2y)qIw>`*zxN)5W5YeTF zttXz&)W^+i-a)h(RxIN8yi}8%mbIP|zAokf58=S7!=5E)6YGi}!-J+PKnT}77w<-@ zJHP@B08jmeN0IuT_jMIH_=gn@R%ru`4Ik%NabfwyIzna~bX0j{0GY4p2w*qRdZ=t| zXq|N(=u2w)aVIy;2i^|AWJ2|4#QzO_aAAl7sV+HYZKpQt*1f!MMb*=-NajkLmcICq zm9{snDY-~#Ec15P?w^jWHVEB?AUvy!=eS%;Nxm{l>u8S4T4`C$CzeWknyb9V!+x0R z^z2Dr-L$7X8!MF~H$QKE<1_4`59aM{zpcGOed;6kSP6#WJ60v*O3_Yh4ny^oeF+PiQmbRl?YeDhE{5fTyFjY; zL+fiFtZ5!vLD?eBEiN2c>&cAfDmAz3!>)DaQ!9u^8>+9XwtE{@o9`XCW!9-|&Ms?B zo*Iqpa=m67m-noH!x^hQddc^^11tcv_h1!KP@fXoGXaPufbnzo#Jmkkd8>qx4b)DE zY^Akcc5blD^|$(W-O5+qiHj4oa_?&2joe#iIbIDp=hg?{)Vki>ISN92jRG;Pq;bl{ zWBvYLO+S_o;sQEMSfQ2toLqB^`a^qIjbRNq)|>(yv3IaO1I-Jr$DF)Cd?dI&@j_7W zJ1y@mP7m9RFOJ&doMfHE*<+qA(~sp_(iI5{TFKAJ)yC+g>s6j|cG#w9QB6$Ci(`ok zT1ktom(1#taZiVI?=Xu_9@7|?Cx;1FCvKDB7Nd|nib0`xB{&$ zSUx}K@mR4yKlq9tvJc<&5qI50?2<_PKVTU^#*$VoN3KPb;p64`zSj2+J;AzesNeYA z?!X$Yp51cebylgC{64>Rv?0Y6d;+3g_AbC4tRqn1L%6>JSXstUx$7hEwSW6d|Ju(_ z(h~`=@H^r=zw25Qw6Usp`5S)axq&3VT%N4vS)OyTe9iIj z#^MK1$?*D*&**LT}>%oS%-FXw9H|bLc2bRtU7U`HQ=B+pi?R<8_ z9~372%XWG+ajOwxfkb*>Xy>z*X|!F57Y&BCIS8${vuTOp$O>m}vH(B-dT=71z@=ou z%FVXNy#RcOvVrBUJ7wMd1Iu-~4j^347i_Mw>0ZZU8sM5BRNKW=%KiVlE1zV_-DJ2T zm9a1`^~tRv^nm3tKQv%hs@LmQ4o5aq`-zzbsfRiM7vdS-MCu;BpEkamf@}DUf%gGx74|1~Cp=Fi4skNE+U1{!~nbllr7!GRSj%ZN3&XVGHa-{ww=N3#tcjGphj8%8W2@z4DQ}sWudWT{s6;QSku?F~F`4uA?S*8p%1#F<3DulY(F4ueL8z>?#s z17Tg|Z%!v#&$o*=koCb!2Fs#|~;wu10JIJ~l#E)tZtEvwTwU+_NV%>|+#BSQo=B%Uki$lq@ zcs8w;Y<|$PulZlvn&hE<>C*Mrqc__l50>p0UY)Z7fR_3Nf9L8;Nn>GOLsd%<`mW~h zluCK|#p7MOnA_4pqzZ+wXb)4Y$mzfrM_&&mw;nwy7Tsf?7 zCDZbJS$09kE|*zJS#59iWVOIm@*mHnWBmW>y2xWiR_hRXp~Vqel<#*3?ottVecW@v zMKZ#7P)_q*Dr-+y`c-)f9oDNqpY#})w8$L)KWx*_NO#`vrM;nN4OK&v3jNpJ;P z11z6E^msv*mwd^~?AbRx&))i%Z*_N01ZU?(2ZG9D+4A6n4|+V3&lX`}0dN7ha`ECt zPs3WBm>R_QSkkrJfqiVs)N>Ti61%3dZc3b)3<#HvRa=@X75>gpE z(a54&#VQvs*)(Ogm*4U@E2T-tT~Q6Pft?wI9%!C7%TRQ60TS|oRg)@E$lH8(&qh0E z?1r??=aCKT70aEwU=6j8VtJ?!WAHc`s*~XLd3DRDcxI035jmxJhE{{qDce8{wO^GoEfxEBbO?Cj2YCdOe^|v^m**K$n17bI5>``Qs zO35;#iMva|%PO3^6DSP~`QXykAI&#`pSHE-$<$GrWWaHZr(WqGT-tbSb* zqjNgajfr@L`z_+r%+yA}QFZe6w*y`3UxK&vhoQU4BJKxf`HVH}YlP}E6(zG7cNaC$ zIEdE}sf|g$?;r&rfV;jDKj;(r0F)>fh#v%3C*B0H4sglFd`~q-6syVp(1DhO<~(Y& zEJ~;?jgKJIY4wwFp@dJU4gi*v<`*Cf5M^JyE2=(Xq$H=Y^*|z*b8C{M+9lowAjWlS zr>V3+_0qqZ%HnDbpbanrJrNcfSgT+eGsp*-Z?GD{brTjJ zNy#*0B}RqSwrwz(+vc?6uDK?v7ezOgKe#r)J=3_;_xVKOe^4%11~+-gjM8W$Cti#s zKLC};i7T$#ZObVQ*LPS?;ie9t03P6y3UF(xYe1PCa-*_Xyc9GqxCyK1nl+WynC7XB zyV1xrLy=7= zos-OJRJ~PP)L-DPJCuk>rywOD4bt5b(m8ZD!_eI!U4n!(NOw09(%l0L-O@Qj3GVs* z_u1#{b3a#evA*l`uJ?If(Tbj82gv|6gTfGHeJqx#k1Z_SNFT#TqJ{+UOvWC%hEbW0 z~$$Gz3mX)M4I!FwP5YpJ*S1rYao z4Not3y(`i8;oYj>D4VCulT7GlwLO&*$?3tR#O|U%tj~CKB%n(g!&&x0pX>yU0tGBc zSu4AQYLD(N$RN13kd4Q=m-Qk!%@y4wdcv|%K|Hzfkd_`3RDgK7EOCW?4^{iCVrR&l zNUvr5*OP^?Ua&X-FWnv6xuVKr}CBg@ZxX*C#a$w9D$h4{DuLBn=4+tv~femXOHmI4Ja@0VQuE|S}46l~x zb4Ug^1Az}E`X>6Vh*I6Er3%Q8t7)LSo8;E0j=-*#G_gvjdbtETgKeRCr9T0&ZHlx# zH+TflE%Cgh9n9{&iKZ%mJeI&;c>#Y0zjg z%wEu{2{GuojV3SPOug^r-_=4W_w4@amG|1ITf6wm_|4cSS=Rc zV4NLS9|YtkX|3XEI=i8^i5AR)I)3Pdy461HGTX81{W+E^cAfZ%C$O z5uV?_TIoaYsxNnDZNOC*(DhaiI4-?Zw)<(f)GRK~F&GWmvYiZZnMO*Rrt<9qU}}vJ zojvs&IsLg)>a)nimB^(&(T%{J$=;OC`|_KDv~S-<*<<^6xJ*U6)nt0WN_x9+4gLCE zz28|nLK(U`%svz{uR6EbOO)$U_cO%XCzd4Zzk6@M`ON=ahjnztOuHYHN$Jvg$xl4v zOxRHP{DnyT3x};b-23Q0b2ruFI9GgR_-+n8y{hLaN;E*E1?N~VL;&Xn*JY;8klAd} zczCFPh7!8m#F*5m;Wm9eJL8+0+_^`SAkUOJ6GPfF`hiV67f29PN!9L!w1H`_`urI2T zBbabwRxUc0izfqJx|1RW>_#R`ax;%+JFX+xM?QKIEhRa||osyUn zQw~8VYA>peNfIUE*|e}&$mOoNAkY-+8~&jPsp!~{A$y>vua~OT&9T8)*vmzws;)EL zLaBq3DM~NNdjGzfDETr#hVWU_hddmpG<4^D73Rl7xO!wh`(I!75WNCZk%Dk6SkFxY z*NCJrnwK{mNx^D^b?Nw`1qQ(?SD4R`9)YtvPWOI|Q;9;pL&B z>8V>}{dq6`H&~2O>~-*(dM?*>zHhy}q)V2%=oeUhIcFzJ!}iQEb1deE9Swj6UQcN6 z*1J^2%5yUs8?CL7= zS?Q9=2!$!bjzZE~_Yc?8z)B3BF8ENU$Df{3lPuK35>&m}Jj9p=QL)zby>MGmzOi}r z9#bVqg2CJ^e{V?Pr}*o2Db~nr(0YOV1%D>7Eh~UqbWd$}z*R0)uVJ%Q?WFueuHc`P zX?>LwBwWW#l_5@bE%Eaa*|XeEOlQ6>UgB=_Q@y48{SFyTua8_vp836`0>xLPMm3J4S6y)q4uNu>%sE z<0GC#B|ds76!AG#(-C?Jcj_MDxX$j zjTMJq$6TfF>f5~|wDsK8ZvqcqfhJKZ**izJcS_O z7PjJseQNN#_y;``;kwma&K7dVv*q$eMz4?X+#c^FE9(Hi$>~i8e)}o0Pm9o3wlQ?? zGpY7An;`T0UNTqZ1>L#8=Qua&-1_lGj^<`!7dLss?+#+Pn?ro+-5n4T$KASn+FV>$ zWQkt?`lYxE`c*a2s%w8wnc6V z(-oe^jyb)5N7Wr?M^DtTZxy&b|&-6lWRvZwoqMVpQYploHOj6u2nYBhLq_ zBmIz8(MzoGOCWEPUA1OB`xaDfW!@3j%&yQLYDr_2J{Tne>g+PhQuU4Ko}ojZpyJxO zUG5=Ix2xQ29cSsjaK_ZGU1G($50ZCzj-+3{`76%Euo?#vet8aR_;v5=6WKF-teYaL z^xSqf=)QjuU&yE0GJLnF9+T&-r_h>7QB4!j3U=_e?r#0Y)XlGJ5Io%0N3$jD(nUE= zs%k7)-E>r*m{qElO_@j0TsQJ7d56Imt~(hCuGu3vJ3BXM*RRhWlFzb?WtwU)vcjAp zI+M()K+~`3KH2pfpIV!YG~&YW-71chhWE*S$8u7lC)3>l7qN z=T6-Z)`^3m9^>Vjr6MDL2_$vP0A<aJq=)EMO zE%eOad06Php;1#*sb3StYva@jyUZp@pA!FDKd5i;-xYbKq#5@A^wV1@P`z@pMPJu+aNF&Gi5NtqaFWXBW(ta4=si32rlE`Rkqj_@dzhSFO zShSX-rGulo+c(08{#1XGOAYRq>QnK0qO3$kbF%*2dzqqKL63+B^e6PX#hU_oJa9k! za)X2_LFasNkB^L%VLOg=rI9HRhl83xd`6pV!7S*6eqnUZ*{wsXZXE(lNT$qw{nKE~ z8k~RfDbDCO?ptI>Ac|w4R4ghuIitR$K5Wzb9gYn|0ssbb`RSeim{ski`;~u@9o)VWgJOgOBr$tmS5eDa-J7JAzfQ!r(6oA@w_u( zJoN(JPwQbV-*x_G7RD{EA#FHB=`hxlACuQBzfn+3jh&+P4m)8sH=@o1uyBu3^{v&% zAT4K}2ld#UXqY_q56r< z7=oknXydWyg&{(ysfadV1n{zr9#(#gz$rEFgm=U*xaUgOLmo@IahHCUw^ybs^2y}# zs>)Pn=rLRTa$H)XIv=mAP_P&4*wIn;xufUEBAXZ7m&7xf5#0mCqIh$UZ{e$eWN&_x zI_tF*2xcJZ{t=l8IA~+>88}AD$4%D{Hg!xbvMA8^+^VQ($BnG2Y^v7SkPVwLio#Nq z!F1n%{N>toTigvUJt(7nxjW(4{Ya+odGQmbHLMiC(8~+3q?g#{SXs)(5;nqYro17u z(GUTQ$SWLtXbz9tl2U44>?yaRQzd{k)^hGzv*uxoB)kT?1KczG)dOC$npCdXmW0}K zyvL%uK$(=^gD{UELnOP@g*1%X`k148g4x=h$n3i71yI_Me@q@313?j_ifuj$71LRn zRQKqnuU<=bQ4XQhnV%(~@T5vD!riPo7kw$cqPJiq+Dt*why*+r5akX>2o(AG7Ot z=A1vWDYB4R`70;d$-}DIhLFct;l@vvJqm+BOENc;-xyJ8`9T$!m#;tJU^W?wySFA3 zTz?|bizfo`*G?<_1ANuYlr~>j80b%cf~JvKQY-3|0_)8=#2fK+71CK9Yf%1f+_x^u$}KJl_ZQg^lrB8Qsh-Y(># zQ;<`K@uoBm$b`wZiy%I7*;uri4Za;ey2kE^Z)yB5CAahFOPaVxnCz>o_*SV7t>$jT zgpAR%V^Kn()BcZya|YrVSh4{~V%$)!dcZ~w7S+`uU}8Ak@Uqx=PuaxU>cCxM1-6%VO+_iZFG;T+c*}@I`j1lCX6+&!6 zvReLV=SAT8pBkjTz~&7j!do&Ru`NRkN3MAQbXSd`m|{4-M$!;C?Dr|UxU59`Y7<0L z_lU*6ww3<|X)pb2_iIPeq5bgm8?27{=y`&n`o&Ow!qo^ z7aaKH@7nc0*~b7rgoR`SMh@xpzWDI}(rUZ`_3Eb|9Il+-&tFUWJdF6edp$S@y5BL1 zszfIapXk5D+huUi^^Y0iknv+b0vBPCZ90|9<-lRjL(MF>@44@>RDt7XQN~C%9p_zR z7BxLWw~B?!bTPhXukt#2jJBKghJYBxg*sv*p;n}%L=G{DCGrg0>6 zd*sz%7Yokna{&tH?iWk`&zhZ8=*>caXv#KtSkZ9!55nsj)8T#PlrMU(bWF=r;(0!r zcTA*Ww@I4r8_%%{k`C2a-#Qn^0k!_s@c@*lbNlO7TGEC*LFKlICEr`h>kG=_-A{>K zfeX@9efJC%9OGGw$0YS~-Y9uOHv2#C0y+&FZU_untsH9nP>*q*0$RmrUWU7&G^YOD zy6!AQYecC8UlMUb29ce1`iGOI>fL7<*XZB%epZJmYyGQ2(BuWC18lRJWy9OAJ?Pqa zV$3zWTc+wMN9-K|Z_B*$Z{vpIukmBas;rQ&FpIgJKm=Ovt+>wTG& zTm#Dk4q~<*+a5Aw(c3+zxr###@M%RNMY)lvbBKB7N51LBEMQ;aU5vtVR4K&%RKYoMwD0=qh?KB(IYd-3&9b+Lfyi>vz}d9 zbznEvYQlED`>KsrZ`;5u9)dzfOUy9vjfNQ0=~6WnXRD~PUdJOx?OC)M2RR1CAMaa^ z@$OtqgkX~V zh6q)EIKXQYh>tUDj_tStc>9={nf=Hs2XxQT=nNR8bdo7l8%{2)Z5G05NS>VO>juJv z##LXXB|M(zVp4xv!29+lw%nczY%xiJwEB)bjZbD#6e)WyS3{*p1bceldc0=dcz!N> zk1K@5H0#(&@9a)%SPiV6+Z?Y!PwUf9F)Q-DF7#NQ4rP3SpDi_hNm?KFb$7<%_N!l~ zKEH11eiPLsTkSsbH+d1pME;_e+a?pRoiW!AQ9e7;S!6O*FVVf%C(>nXkc!Ai>~Jt; zbM_u%Myqj+WhuHJU;2a*A&$!p8UVMVmKI`MZkYodcYRL!buWj#5INHQx0uoWBYcWq zOgcHaVd-U!|2LXcY$=FwCTLHiAsmX>ZJS?>nn8; zv-a?8JP?Ly!&#bAl_1#bm;fegQT^qr=Tl5o^(cLyrt#f2!}-+^tRl=!uUIZQq{xze zk$i|c4vKLd;<0#;=Djkx00DyVB$b~OgdPjJj~Qn^8K{|Io7;{)-Li1%Mdz}e0^p&VoSsTSjyGIkGzi&>n42K`MtHO;)4gn2ztz4V%n+j{ShD&G_M)fx~g(nU{SO z^piLGhdLWZkOl`1mDnRj68Ds_o^cUQHV_@`k2g<*m2%}#qE*~5=xf5%Zi5`tLhE1h z&G_j^{YqUfa1Io{@MFht&!j|xVR$+Hy@g;@v89x2NccNy8A&d()|M(JM-H)AKbHk! z_l1pF6OF`HlKHEfw(dgT^q?<#2`(!e*QE0ebdqPn`dQqSP76TS_lq4495oirdkygW zzk8&+Hv()b;X5aV3*XNWyJ4g7Ko6|Cs-y2ks>GIhb71~8Gu4g4nB>rnSv(_?_9MKLe!&4%76Rx^dc`kwHh2EF((vxY zuGN?8Urzc(f&LxDZBm=a0f%eO@Beypv{P2w zDGfYYdag&dLUE+|yXM4ZdsX!Q->JCoy~*5me_Qgwd7gQOO>?fyuiVI!b}aF~#w_mo zvVWWi-5J}|2Q(S^|E%#P2*$IRWUxy;P3tr6(g}axiJ|Q znIwAm0WcZe&-8q$XDw2-`;%Y|AqMhvi;k_hx>l z)jx|%XzyzC=E9aPCz!kz3D0?}^5fzy-_i*baC~ zUM;f_RYAy?b-~K1HVIO|fcMz1fVC-WY%2N|;~zC4RPj5e7suknqHViJyqQx}!N?NS zDVZW;cp`pfJ}e}$X&8&SKVo-UN?TqtiY}5Ou>q7Cj%m;MQgLRYbjiC`60|XEvgVFC zYP`LNaro_Y!!KI`(gf#=Cfsp~wTz$17%9U2I)JkRuOI}|ii-`0^Z>qIOj8}HfFb2^ zpMAj``3^;@jY|e@Um5!DVFbyBP^QuupJybCw#0Zb7n3x3(@sCp#bHpG@hN0 zuMIbg`DNbBOG2y|i=X_MRo}k0ybXRSo5t(?KBPW^=2)IwXtYlim4^l^FYt6F)A^{= zAo=0j+N<;2w~A~V^V5+Ovn={H;W$O3%gfNkuN!V&YSEzgH-$J`3N9q>p`#>sYq$TQ~3_?_v81ZVK#~Sh-Q*2HT^13@O|0RaFtORroj#4YIB# z{$R%+WHjdJ>pI-xc)nbIvQbFb*xGZILAJOSc=XSp)s+h@17{p1tl8!g;wYc=qvtQH zXt(A#{zz}o-yQ#Yqvj;^x(F$FAgkzc!g@t5XJC1jc`-9vPnqJZQZ?&Rg=_H**ZDPj zB@Y}$VnChd#3dZt-6$>20wXMy(4{G5^}`hx+e5||pxgH6Yl4-}H#%vCeB)VZQ(I{q z7At|nh?*}ik0;Y`IJr3OEYe)I*{D$dhceMQOyHHJY+%!!St?1+ zM!oz=pyrdTPQXC;wl$Me=MLxj+V6P-zIAe4Ij$ec{vC@tMGx;29YAY{{i@osXJE5! zzvuMWCnK&OGiyX++9wBeOVjrn+|^IR@b!^)m4)ry2XpAA@MW7Jy~GD23%sLuBzwhO z>^&z;MHGu3UBCJbWBF?HWnNw1Z&o&w%6*xJp8j*pQDonXUMH5W5SD>_*cbzD^Q-bt3&>^F#FE_MW%j zj2XM?jbGkxQW6Ht+#z1p`6uW}w5YV&6U$JWtB7nY`7Kmc2N9-w)G9FA4WG=t8%**3 zaY7Jx)hvE@v3oL&|6gS8^r$77rj2arvoUXye-W0s?tWnH#nSRd$rMBJlP{}+%LSV4 z?+cwMe*nwU2S8$?k^k~;x&Q4N3VdWRaV6~V++;1l;r0p{YT@y)XCu~ce>`9FICAkv zbIHW%*>hOTeaYxW*#AFr*Cr7HVflY_61>$I`T*AP;oW=mz`rvgT)#Qi=7P~L3gz(D zXTnR5c-Se@<4?A6OOv};e>pI$2-Z!Zcs-F{^}ARu9np}4e~4+KU<>M&pnoh6_`6sz(g4=u+$TO8HMlLD$% zo2hf^qWE`LjZO$}fI9DXEP1qS_)_crKUnpfL)N*A7m1y;cU8tLnEz0n`*rx@uBvr@ zV$8yD;P~#s@)nA7z)kBZtAAE}?W4HUPzr2zX*~=;`nq-YZ_3*NDPNZT#IcWz(Gt@~ zN2t^&A!=)b4@!75j$t$ zRZ##)rC?ZtpA>*Q9;g+Gts4xCA-oK3&6BjphBw>9_&Z5V{|?H*pyO@aVt<{b2U~N! ztCXw6!IS8rTrDz-^YXXH>}%*gGhi46CIRaY?-OzQ-1lW`EtC;Y{WEWhe;Qntec4ww zu;=ptWvfw9CJ$#`gnEDVVzGdwWUQr=&sTC2 zt^EYf`ojMDR7`Ud=S@~xMgpttSvzJ;hUK?=W&rE+_K4M%gg+BPwVuTS5>*}$H}v&+ zo9R30z}51z28S+;XkC94q#(a- zt-1I6gvwa-TrU*@gnyK>O)RoC2T49BSAnogk|nDHTU$suo$Sdkox&dG8Opn-dHHEa zSwgmz{Rdo`by%UswIHkFL|zWm1P0>QPe$fHuVQd0u6T+c2JK52jkkZZYnjY_jv4;+ zMTDiSI|<5IHpMD}A|V-}8@ux*h0Qh|!m6jnfmC3f`Tc5!Il6rEkwUTZCHb16`4H)q z#X7gTzVfcEyrUjRd3;52Gs%9lEVlis(6l}~i{L3tBtM|YM?o>dTvf7zsKQHGEmvz#}m$@f>{`(=0sdq8zTqP1eg1L%O}2uP5^O~^jF zTVkwqmBq*Yf?-c{id!Dz;C=&4nbs&WP$KGO-v>F&wRNl=SJpWZL!tiUHM%m$R!Msitv4G#53tet z&5dFZNwEQ4QOwx`GVfhK(x9a7nzlqX-vR8D3C`+*NS^er%N?v>#t6m^giU^Gc*vP< zm{<{Bs8s#C{wF$eSO%kc8unt`zkZJA);}+rD)Lx~Y9FJz+i<04_^(0vAgK9Q+rhoR zdYB4Hd2W@6klsSWNO@1kB~K1z3B|_JBq+|s=hiPUZW?=)Zdk!jE!!X{wXW6AROajg z?Mkqu(06yraAm5&y0-Vir9&RJSI8%qOk`;2%gRw)U>bfH0{_^nb*u{sf^quBm~c!} zPuZfsNY%DRRkP5#{G6UUC(X&-eI{ZMh;`ctv% zvb@^=kkbhQ2#bMgK!)i3V^E{~|0xUFDVtOw_@T<~=S30roBQ84ZDA}ge&Q;z(d1Xp z`~6KlNJod`k!MzHnEH^yezT4_1jDj^hoorz-7_F)uU_#ny6BAIFfHaIM2TdbcM=O> z(i?yE@-gU(&&S!f>(LaJ8hjiFU_|-J?qgkY4Thm;j?(%sk}FX)8)R+$0E`cZ{zs86cT5o zLEltwoMZCyv@yvcq6^8|!}{0W{qI3(cFdmQq4cjM|G1PBSaUnhZ6d14Vyx@dNPK2y zvsu^1t&|5TR91-MaMQcq=tk9Nd{q~w6bZZc%HXo@aG}z~uF*~PyvAdcGjlAdts}{v zi?p|Gu;10GJ>_EW^(*u_Lq(pv7yrDnP!n8poQ0?zeh01+UAEOq`Q-e}K9;(Q;>G4P z)-rp)`ys=bL=-JPYx&(kjNJ)YEUkI#L~2hira6Lr1ZdN;s?`Ze&epU`kBX}2r2j5R z)!rb|5c=KIbssm!t?@4OH@A49*&hh;T0fZuc5g`_N+g~?nGBnwzcc2+8}1>_MU)C? zu6*DS!D3lGGBk;yTFBdhrrIqjqjeHObHqW4WYcOMU&$&_d^-|Px3RId-s(63zEt7fk9sfD zx*lUfR8EoG{G{s>L-ULHyfyvHUVbu4yu%@#o%tmJdWRY2SYz2QwJv)|)i9xDMX!_e zh~67sSsewdaw(+)30BKhDUUsbPV$r{4e`(NJ~Y^)gT59Gp_2-{mIKzLQ|R`4kUQ0S z{Rw*<5c>qAsy{{MdDG00A4*=X0(t!?YuJq@#PEX8zUN#${FoCJIcllkW6cNYBEsJZ zvd{r5u)F#w+KP-q6TVj3@3Qum9FN+(XO!l12^x9PaM7C`TX>`*;-|Z@TP5SoxtKp1 z=svDBX({b8wGY~l3|dvm3s}aUS~nx160_>L5BjwR4bo`!>Y=yFd1@MJ;jdacf7sAZ zVm-hVY^@`;qf~^dWpNnDoSe~y_jXZ`gqahiF*JKKm05Oziem?MPw8$4!W^Q zBi4p*cooy_5yqe0rkpw}GbSgr{ZXxP;G)2{QHci(R}(bG0*+*zHQELfs|Cs$ip8=! zRi&mb_$HEt1&(@F^XLQ=F|zsPcLmmoa8-HT*S8)@vHWeJ7UJ?>?Mqi%%2FEaJGY-) zI7aLOh6e z-eBL&O}ac93A0uw>CwK=en{h~KWrF=y{-KGMZ#J~X-Wj*4N?TUb|deP?JXSM^HhIm zpDsf3lh>&@r)c@ooeJMspdY-`Srrycbm_a_Vv5sw^2-GL+z7qPqnQ=0C$L0krMbFk02fYIWx*q6{DZpQ#P;+&^@APKy)Ih(wA z=Jq>WcWP{1gOR{DaC*iq%1o1shf{ywPL;>AQ*=%OOZIUE}>B(3iFA6fu~Ow z&jNNyUvR1nFSo{;#rKByY|^LomZ6Ofr8e^#v$EwyEGz^^*)r0pU#6kMw4Z^QrK~n40q2U<_7o`A$+9=1-;=(uCFlm$+m8K7yMfY9$K8!Tj-6 ztgT(@C_8IXl!_R~V-bBhN$aKNc;FYd1d5AcLq($4$IzlU-D=7I8tBrrY>>ajA#C3eai1dd#Lz4{1qI;>^ z{wK!sQCSIn$L%sgY5ID{IWj@o9vrX?rci0}T`O}LFhO~DR#KQi6- zer;;W_a>59h4--IcLQ2R4oDQAJW0oS9vll1=cKwOZ&rB^b?=q~ZD6)d$vemf=_R<6l~T?b&X zcRzUVsy>mIi;YUvo9`M9CJU9!=}l+l4V&Ts?3guMc5>{VSZW`CtY38%{JJ!ttgn6U z<6q4nr)oK9Uj_eLtAI*s=AjO&*Im;wxIwl~!L)cEjg7i@y(j9~nW z=lZH3zS{QRzkn^eb=^a}Ge1^zD)dR(eujA^!8Z`&74z9!`q;5p5K4g4i2mn;WqCjt0>sQhoYr zre=PK;Q8#&Gi4T*pt9yS%i&o)WPTnrPQvTCFn3lvlMKa!xy;>w1Nn#8v8*LB#rVh; z0(evF*Y2T`He{tedJ?;<4`yby7O5vwL+9w)L+H-eop}19dV6-w(C&)`{^MNtA66NW zyhTrn^!$+m1ZS+>h3Vj-AmX5B_-uIIpGH@Tf5I92WxuQ*3Vn&~mPmKi#pD^XzuP2v z%J^j;i37>I^3|$!c8dCy7>2yNEd50$It6N)hDWdXNbbrsVOKv805d|A`w)5GK|A_e$kE4f|1#ES{P3qQ1%@62mDRBxU@0dZwdW1?Z_*>K&S&{LeWhDg3!K3Xo!X<5`LJDJGL z^6+8_Uz8F~zv92Fcb1$r)-qm0ft6~;R?y=(W-}61FC6Kq1pMG?VQQW*I*Og^sGKGR z#VJ9NBAf|*v(c*1opWSN_o*{n(Y8w-ci&7pZnBB3HQ31FK81E>(=dwEGXYq=%cKet zg1lKd)M74yY^DGSEsd@E;X@$P#&c&=$q4QFizbh0gxuv-m zt#^t7OI+F$Rsm$mBGB;>BRf_+P7PU5q|2>pV;y!7tFFv|+pgo!8#~^9I z0z_f9%F2Nxy0$5l3|kCG(k5fHSw!w>S~V(Tdgud&#^$XX@$%{#`YL`F!qpuiNh~l7 z)9{A@g^(Yd0CwH5E%Q2!$^l}FidewDZYl0~=?B4H`Yp6nX}b;09z6!cg1Hq^=v;DG z2J~|e*Y~&Qszbn}i8$Vtjy=auLq-OW>OxsEs+y^9w1!so=pSm^lu%A3vELlE!{Ac6FU|=Sqedc73V*rvy^_u07yJ9 z`WU(4NMPY6p^`#=IFm>E3Bn~>>+qd^UcVVEtI z%~6{UX1DXXu+P&L5|l+G^65*DfIo~1*bkyy$rHCt>DW!vfNmvT8>FWz`+!Pt_TQ>+ zCb^`vSAFso+t)6<|Lh+aoa5y5+)GaSQ4_{*to+77wm!e^*Pd&5dmBm^a?>QnOOadX zZof~n;XmYXC7za-=9O>Kc17UMO9oAEJYO2Xew9=4Jx(8fN7GHqAKOI(rfJa)EgzJ0 zE5iIVH|fW57QoF8%l+L5t`d=nXa1LoKbn^wCgOWO-$L_lj%0@Zx5Mj0!GQR`R&jng z!PKb%$qIGS`|sTP4!H(EN~jOskDGnk@=fDk0z&VJ4Mbb0x`xM|NWl4D$rQGa7BW|o z!zfz$Iab2$XSq;gsn|lLDme=2oPH&WI-&w`(qVN}j*v;uyY+B%Co&ZG{4Oj^K^;Oq zW`zqJw(r2@{Yo_!D!aN3W^UUqp;~t*3LjW6sJ*{2U)ZI4~*^pe4keinyLv`n^x6 zE@=Uj0$x=dvvlvN!{sjJVLt(bzepS>u2BzxLes&>?+}Xz0^l$9l4Bhp~xLcm8G-Mfz%a%D5DV459e{q9W19b?qLyJWb3Pv00Y>; zU>;sCK@2DWp#1@c*D9i$i3>t&6)3O_l{O?LESx|h<_!L@Zfw#|OWi{$q|N`v za4pv?gfn9Ln5sck<9Wnf?@-q;Zb1%OAQswA-WxI$BQq%=uh+Wj<-8w}GDIs_0A`hO z-c8XFWz-163s!?-r_`>IFKa!x+IKG58pvK_s{UY+K&nau02p-RK9Sg?e>1HLv<7KU zxcnzL^kX5V!jzOE&z<2rj6d_q zA|wVzSH`ZzJXLi+)h*Gk-V|S{c}&l70z&rHCX${cn%XBydSX%Iuls9Dsxn`& zU2UboM+kH?Bxr(gImvyx3q8KDdn=eqFZ=dQP|7OfLk7vCfKH&qFh+3HwS}F;W67PM z6&Tj|^EN>!7b1X{=fDLfIpjuvf4#F9^7uwKMWb_o+76W6*bZzOt7GT|^-hLfZ@1J` zx448KQqvRJE8{fDz80H)8S=LxF_n;M2(cO*e;HeI?BLc?#^}6))Z}!Vy(8qdAek0j z*?56uRqj{XvC)4 zR-2y+llIYR&sa3kAM1o}3h%!rFF*YlYfC0A{)8A!{X2fbO}P0nY=+(TZ(gM4LNd;$a1PYobRy z-lqsr(`3x1(s;J&U)cc@%vPJ<-I8h7q^Fu&t?8&%o=afOB-pU^rlf2GNth9H8mmCi zXfppMwXtnLWgy67?dObzV|jIgx7vw2*kqGMx6d$nZJklCy9oZ~n3kP2T9ikb;(mRQtSmP=L!0X z)a%!oe)%JpkX9O4H;{Lj6Hp*}cu?EYDth?@?{m9YYz+Ub*C3K7e-5MdgbTctp;UB8 z9agXl*i5O^{`ad<;goaTb9KGA=6?e3xgiRUmPas$?l6LlCqdh8r+F)lt|dm~Tgnpo zze#ya1St<2@0lWK#ER?}XgkqW#e)i~zcvl%ix`7loqd~xG43x*KkfXbi|+uRbpHdV zEzVE4%`tV#<;{}b33HD}fXCs!5Oxx<@zhG!dh3p*1BnJx}~ zJO|gNl5FPQ6*wPjT6JsHuKDEH0@h$N$Y{f;;OBg^nBLx#6pvh8d(Cvk{$q;GU`ZNR zVeIpqhVP#~#6qOUu5a8um70c$8tU8OeHG|Kj#>^adg{lWCp90nJr&=26Dx^egxk0T z0EkD&T@t)fRw>BOQ+j3#tg-}9zQ(2^#7x88RPu4L2sQYq`J`LAm5d={CH3HalMOhs zxUkq#2ueA!O1VEKcWRsmZESQ`H5BszY2J0d4lWkM=b=bfLH%R1!GLgilpI&G0diJw z_s^H5788GZ=i|hZdd!4X#eq(W zz~LPRAd==4V}^q6D^tb0Yv#yt-r&lDq4aUoG_4$pNgMUMt8LY_*S(x;?-$+I3LOc1 zN9y*_?i|}RLD4}Jlc-nB$fKy|5o%HRZs=A)9ps%dAN}wVAs~0iHW{V%k0%d-IM46=uHYcSGkr*m-(Wxba|TOWoMEC zv)P5VnSM%mfH!MbN<75STw5c6JTlKt!c-r^k6%d~nvUWL(A|P4H|`Lt=(YfMCI_j4 z(X`I0Fcaeo_{z|!V>nf`gH+v&8J%Kvd@2q7M@0Al3IX0FB3Pr^`c_CFXD}MzU5pnX z+G3IxsI?|-r!qmYvu&EgHC!R_Y<{lXkU3+$zZG+h7?9KGsd|j4vbe&sLHO2wb75u- z|3(@AsqR|ii!7`r9+gvMkVb@q4+z%h(3|9raVIE&*M!sq&q!W_4Qm| zaqGZTKHVwwS`JSH`e%2^JfHmY>~EoGs5YGZ?eqK1Zkp8~tzij$b9bacDOhM}!keY9r&+qG1=yLr)vA zMK$SeQzm3={Y1ywnWO>u&2g zjk}v{giBjRpWpa~4wwCm^r=xhonR1wjDgnPTJaY%}}z07b6U=K0ou9wUUQ z7J;u7(pMH*J`n&|Rq68o2Z(pe%Y6ikJ0^DT_vl~jT+^2$!Vb(>facWN1CJk+Kj0V9 zF|XgHvQxd^%T^+ZmpHFJ3fq4Tlx{VqE6tu;@|(rTs~`9;aMtcYBnpk_lpf`5mc5j-==2Pm7I+q&D-O)A2cQ7`7ij{raiAvoT?a`I+ zKwC-R*{$!(0q2pKlWCv#P8zX4Tu4;snppEo?4$I_(ikwi9IqG!|MEaIrg^L}*J(5? zVv!u{X@;&zXl%xR>P_d9>_ifyyuO7%Lu2_@*!OoVa6eDZ)CO|cfX6!>vy7%<%68My z>WJRqgO~Qf@1Y8e*r{NnW*-)K?I7^h zh~`TiN$%=n`klzPe?|KBKAkqrzJqXbaOEpIPRYQ|R*7K$4TNos*1L1H8nj@-swNvAYQ-bl ziwyX%Gi}PmrEStUYWbc8#p**0Xuq!ltyL)r`mqMdMb>*aYniwJnv*t8Y^xHT5!$RK z2fN}ENs}Nyw%~rd9us;!VAETTJ7%--bGn>uV}xU&B8?QcNVFmO7~NT_9ZD6y=J8ue zY~6XK(u>sgb1#JYmhpFEW@+d_K8ERDSFW%(Z|d8E&qITY#)U^)3(kBV`T5_O zp`Wsr{26v6VCuH<>xvM^(^Y4}b~IJ79G4Taq4=tNXE!hYk*edt0FvP3PG1eNM8BV6 zN$IY-M)cYdUAoHkm+DpN{)uWnS1Tc6>mOQFt2LS;#!zaurZB^WO;H^m=2rP1d|6J# zPN`XuAbvaa*L|TF&q#^F;whWKT7WLp-r?hLZ+?yk$V@KW zfqHhtu^nu>w0X~n?=6o|Nab{-r?3{C_2F2bft?tAv(vkVgw;&GUEonmb~rS;o2J5L z(1q(hx!w8h%{~}fd{hg1SV%jYL@q`l9_n?r=V~~M8G_n9{t#!DEk;VsLg!PdpxEz6 zc4Tiy61cGaOBCPEo8QQ&>&iB z4E$e$l^;5^aL}nbIujyB4gW;dwr{A+HMi)Wt4`htUDYTNRdMrB$cw1aLm{^>&2N9R z=p->%^5|MQW(>I>+d1jJ_sN76ch3ZE?rNDBcYQRTc6RHxN?HD#st{QIt8(VX5l-JiQS2bt^7JWi{1Yc?ag&&TxLHq zsjy&GXUC|tc%>Y;FT$x^hswv{p0M?iuHEEuyV;qYIw?d!gkS7`k#&}FO}=l~rvwue zBm@aXLRyqgML-&)ySt=wgdhk=N=SEa2-%h!6i9 zEkc$~C88Dax;OdMG3H2nibFzS$ZOC2x7kVKEuZ=&&(wd)G=wvLzTf5dF;vNnnYf!M zOwd)^^FLLCR^mTC;OVzd93buSkbSR%MfeeKT=lP|UH+x)(9K;M{d{pyc=~@jhq*)Q zxg(DO4Mm?K#lk4^JUK_ig^-lMWMx~PYr94@Ht?UhTmNXosH>+Cz`xG^#0$Fw76A+B zABzP0eJ6R7;KI*r(6D+Gwzl|rjj|*BOgA)1hjD5)rRIAaFJ%+ey|#i_xu>=p3z%8j zm29>MFd1ZmBLz0!p(geC3}KuuiVNyZN%udjuBw6KapwZG^5~;?inOs-AFC?MVV~s zFJpy$1EGH~;SnmJuI4(2mtimUiQVJoI3^;vUz&Y!k_gca^C?;S=bjH-`+8I)Re5KZ zHP0UjI=-EI8+-ahM)Q>FZ}u-j!k)d|ETK6=4fd;elpssYrTKD23$tGytA~a>FkY$H zc20Q;Bvwa9csDDiHuEwH5-7E-Cmvpc%L+}sGT`|$2i;d#Jw{NR$eElHe@1liS{@t3WlB| ziO$-po;@q{*Ln67oM|=HFp@E?{iXDC6Y-g2nV5o7qBHQ5paeg)b%xd^^%e*FgPI>- z9qdfU9?Q;|c!g7Di!9Z1W>&gAZS4CSmjC()MxghKxjCE2d*nN97?IN0vVpB|Dzw@& z4A6)ARiRGwoyz#-0R?Y$Kvu==owFxjzWW-_&7@dDrk0_U(ZgXFpu?Oi@8|!U_p^MGt zTa7Y=4+40ofs$oe=Gx4LZS-G>U6T=pmzDX5bs7o!x33KhMEA-mWGQJx-3p8AcYVmu zQ|6|0$YAfOk0Q=C-=lmwDvv%nxQ1NPKtv$Fd+tDWxT;spKK$2Yk~+zBEl&4MZ-}J6 zG!e#9GuCG+F}aou17J!@{0S%c&Tww4-y~YWHqdfuwmFTRu+CQm?6cY7e;=!`>jl!q z13*NF@%{Jz`c_B_BzPw^WP~m+38LtK;XVr2uS&B6lw`Mo<&fL7ou2$!*EyQQxekm@+9mh+xITXt8`nl@xRYueQ#B@;B zXX$XT5O*7@oR|aCL5)fHA6-!s)TfK^jwTO;_5Vyb&rdQTa%<#Z?K-%}WMd|I=b@@& z$b#G#dDu+-Cx$92>I>Om(jRlN;IEfr^XGzD87)g1sGKGN$PXN)5hK(h%Wtzq6*6vT zw0vNDt*+s-YyFQ<<>j#A=PL_StD{NXkQJIdEg3Q=KdQ`c%&mNtiG6*GXeVSrg`u)77a9scj}6B1$`Vv;_ubXhs7(^D5jT^;mUS`;jK37EH zX@aoR7G;$%wwAjViIpx~+I7ZxxY~nozpL-U*27@wxA49*!K218rTGur90D#@qIL1=^I6$K+y^2& zTp2)}apShdhTk?jH1wR*WSmQ^8( z)bzB{)ABX5FLnny#%Eg$wB~CAI3waXF(-zJ{93bgyEwH6!cM}mb~Tf|^riWuhI|h} zKSqdY=xxElup8%?3x-}yISWe3%4N5H3B5=>_2Eu95u-*GV zW0sf=bsZZ&jKfV7hyO6#mUV7zJ|G~G?GNYK<|hsJ@eY-1XE}VNYzBO&X-8$!x$1<= zUR$(UBW^r_UCNIUzyDY7Db{Nsq)(Dv9!ipmX?|L@m7eh~zIy@N*+SE{{RCb2#Zp>P z{H3v0v1^N`7A+!U0A2rd$&|H8(dEfoti@_SN)7s>J3ftAeMG{^$0fvRl(@9mgt@}R z=;Y4#kI$&grwv-Zi*P3Izjf=Zdr4o3*UbJJ4EsNSH0&po>+PpFBz%9T5JAfG37JE% z*w^JCWW?mcps(NVGro*bM|de?e`eJxfvf_Cv6`+l`&4vMmy&bZ0PYHd7xzJ_Sw=>l z_bn6z&I@;_1j5!A@DU8la**&KOyAfLB4ry>xtwV=&WDW}Xd9zC^DFbncSaOn1b=kC zN9%4-PQX?8u$&IY-Yx_{lq~~|K#LFSG+pBZ8M84G!c!gvKJgGzS%UjA-BjP9nI7hU z54+Yu2&;~ys0W-tBLn@gP_a4W`#v@egVZU1r7Jm7?#-E|^VqiLQ#B1=zp$+V`B6u_ z1QTTSZi>HI>3OPoface;)f7I=y$dO7~FW0o7DJ3qFF_B=aMQ%Bw!2 zXH_TEkVoAgR9Sd^{T68|L7wV@K;fk<4y&K-=Mpc2M&4TI?@YN^K-Z+fjUqxm)WolM znHKBG&=VChuiJKsZG)EGX(cDcVQOABkzd7$@?hRs1B}mIk=lOaKNCUZ9 zb;oSg1z%(eT7;U%Nhd*wNS1JDxSd*sZS*_z+3;I8qBi=i*JRHIXeu9_OQTO$q!w7W zAA#m0rMOz|T#f#aQ&CQsZcJwU4v+}DKawrpVY{U_FF4omNOwY~oo7G9j*^?Hjb8hB zIuB+v0IE(3T^W25tyn#WwY5m@{F^VT;D`d;nOP9ihc7}sQ^LVJXln$TJqWJj#eK2x zIZ$*`y170kAj^2~ha7;Mp0Jje2&L#<))|34tMP`h zHMer~6=m8d2Y>*SXV>60AixjKg)avHo8w;k#m>W17WiXEH)zU&$++Y3D3{{M{dpAeHkM-j_h*jdSbJP|aHkGX6 zhVygy=nOW)TW3^jzqp=froA)mv^X$baj#;GwKY(BkJAUTW*fL-7A-0htno?xBECOs zbWh?H$Fpr5a>+VU-3)Ij^mZ3Q)hB9&N@4|3P8S8XTr@o88q9t%*vtaG>g~?o^LVYJ zM&3E>f5CMQIcH>=5TJ8Ik*r|C? zjm%SKM+-?_X+Afz_a0VVcodW<20Af3Vw}+&>G5r@U(xmSA2Dp4#?@t`$CSYRX;V&12Gd2iST3uTF>2~7vJs?F-cECs25#Fu%GsT#(VSE!$%N(`X4T4 zwap)vQ}tFBz_alh4GD-u3~ zULFpYP$mTt@&ao;$0q#3ku^b6Fh&;>3#8F&3^ycA8q*9rxERoSX# zP9sr%vI(`Q5BIrC!_T>@Rq(2h`>V1>T5Z9J^hkRq(}50!a_Hq-aydVEM76-vew-6j zkFVt`Z}Mo1;Ii)Tt(de~_f$}`?fvp(+iGe6tNQg=a31w=ukjwIbNO~rgj}DgQiV*I z?ADzm?ShW|KOe@lK3gX?7;3&`v^I5$S)U>l-@`#P$ z53Rc^ajZq@`}ND@0fqeE*HN}2PE6c{qhW71w;n&!?pjs*HC{ix_mEUCdiS5spWFn6 zbi${plY%mREJT{WS2Nq}r*qcR%R#|D+dU&3)6JUx){D~_K-a44^c~VT2K@zc#q<dcVU$Ca#}HlL+DeX4&`*j3rUlz=V$F>$z%-e~%akkPA1wNZG^$>A z3v?@Cyt*}p^`Cv_O^S~eewz&{prRg0xRNbtezeTo5PWTxvg64WLLo#!9sej(JA&pl zZMv#KCc*gZO!x#x1G#`+dV5K#-`4&_eIaab-*Ob>_9a?Jn*movAusKA&*Nv*qq|{u zjHz6S#VxetP5-=+B#LCHzQL2dm_tD!a71ZJEpu3GD&|Ql;*{bsSR#h7#Qhq#%ai

Z>1aNLv|ED zkV4U26mQi7`Y1d%*fJ0~XKHr6G?(?-%+>lH41eI>cdyxm+pYnmP>9>N&;&#;BMq!;Ac=0SSzx9;_y{P}XgH+GyNVZtF;hOx0> zm|EfOW#@_pQR-P(C3LFqNc%aC_3C&$Z zrLVj({R1!!Alkj`y?D_t^1V@L+NAAHWQrBHHiVUlaVIg`Q0fWqIPKiA>9+k*|7LUZ zKDeE*KgMI^rR&9V&%~Kw9ffsO;GSB9AC^iWsmmOV^}m!w`2v?{uJis+ODLv#J>t;y$_#A zR737f{HIa>fA8(da{8o@t@Lcv9hvi0rdxj{ijHkYUo85nXA==qE;RH%#rhzU`|53kT&}k{5m9+e~!D!Ytp;)Wbwlq=J%v< zxpkY)NYKMDhQ@$pT?x*OdsXotDB`Gy5{d-|Qi%Im!l7ENkBnUt)7~8T;vpzY&5KPT zNli$tifX2>*|g%s$n^Vq+C~yt%{W#95}^4=7*}IU1V?*e)@P=t=gmoogb!p7zj*x| zvRmz^mgHa`l?C+nQ@9)1&y|CvQ+I=&T7(fG41xLqia2-dr`!hMT8wC}&)NaPZc61EI=uC==ZwPIlg&xE?vvN2(3z<}3R#ISyG^*7IDg@bK@&C4 zx5z7(Q7xmdY-shhNqBbEEK~cn%HyJ@2`ip$p&v22yzsqqI4qb|D;BJ4Xyx%Z|C=*Z zTxp#|rNlWbt8%BE<;IFPTu4ul(@4U+(do7uls_<>I%=mB@JvIg-Sx+yn}<9z0ZR~~ zH)@BtRJvPBZabe)S6<;kiscT|97D~Hi5W+hhAhJX*E0g1 z!rEpcVxDj^rP6X0+Xo{;D%3u+S$)bjlWs=ML2HBpLaz6QAJo9=a_D13fV#4zHrX|e z1DYZQ)h0=kv08AiXa9f z)1`(YN9AF654vvchF5WxQgg6mX%_A<3q0RZ_@`68&?k$~rzb#_#Q4Z|dx&n z+DZQMH#Q?|8PHUi1<_+LxbjW5JZ=|gG)w<9--9jv{$LewJ($6vc2%?EHOP^um6%-s zN(JlKAtGQa$Bn5ECWaLV4z(+PAoN6~G)`Id0!3DrG) zyJk4O(lo&U(HOv|03G=imVBP7>NN33|5aE3gKCtoIDJ)aO}r<4_VJyx6xSFuJ!ruH`l?mvvyVWymCi_ot5wZTJNTn$g({BK2KG#x#%TkbgoKQ!$o?^RNnj72lhpw;aKA zLMK1C#2PL$h{l}_2gM2VBSl8X_j5&2v2O+<&je%z>&rJqrP>vfnq&q0(CJ$~h^x9W zJk}fR_bK{n!-QEzO7aQsgpbZ*v(v~IG$#Pk!5U`UOP}F~dOZ=ymnj4CM&sbscmi+W ze|PTrvI*KSHf^58czzr`Zjbr8##QD-%=ZF-Hd_gBNrf-Luv&{t6rtY9$BU&i=9ffm zr-yz#HF^H68*Kd0wH8I-*%TSg>%n_W=@V-<%-0ccvyx!M>DSP6r=Y>E5J-N0z|8|x zRIPue)Mc5>1vtS=EuZm!s*6{qw65&7(J9#dT)f)MZ{YBvR|Z4JlkfKbF8b2>ug^yG z0l4bxmR~ZGuTHuT%B!=%hbe#ZttpmOjB>mtqYdtUvsw$JoiG&G+s~A0V0pH{k_pKx zm1Mzjib0V42S$yl&|uL9H;_wO2ppr8awr1&3|asdH(ca!QwF|H^$n!tqUOo@7#Nb6 z*3y2K%kvbtuz$Y1yc)QQm|r~@X|O4_jR=_+Y{^M08bT;FBJudczca#P>>!WUs*qoo zbV~yIV7GTo<2kbX*2~W|Tnu!UoAXDe;E}fCrp~GOoR@%BO_9lrcDv7Ph?4auy%8q5 zppTaiX@#55m5e_F7%QdiHQxDbAE<7Bq1GX2{!}KZKzNvCx0(lulGERbcCuE%TK7Xl zYp=BD5`Xg)w{Pi8_mH!f&!8?DMnxZvRhHKrihJF7BGrZlJqv;}>l}>L)-IY~jnAM!T{o|sh25GI5BZJ7r5cq1R%u4_ zoqxIEZW`CqUYIoaS(4QDH!oaDt?R;x*%TO$5T-*IR>LWmN~0U~UxksHm^jAW5~8R2!z~vi7xrg1l(jvfk9>e3@h z1YS0=hwos-rBA*{EHfBX=|Ng91#Ow$`4M<=E@Y2H6osY^Rpx%p75ASFCt6FlSC4gK zAb9oqLg7h5_RIWotEpIV(y6{nBjPY$yjLJGqnb%9zbrB(*jTDsp>+!YqY`M0adwxHm|c2}0H1*5zN9v$dfC zUZW~&tCtF>$HtKhv;>?^1MeFpJwZN1`Pgh;bNlcmj!rdmG_efO{L^4&9 z(5!wy9j2Z){$50Ucqk4pEddu{wGbjsP&(39=%i`XM zNPxLa?JTyleE>zj3(J5t*_37E7#n*2$s-u*NBggcP_i(Mez1~1eN|3!WA92hBCu0` ztfNh@F9Ux6qFPLC;gxxv$9sv&C8dN955(0_c*9aTW;k#TvDEv8W3bbFhHi0UQ&hd8pCjHLD7Reja zhrH;)LGXaA3H|1;r*Ui`>apU>@1-gfnUns{m|aW3Ar#Az>R`oO{icAc8^jA~!(l%j zDe0W&*5>V-K_L&md~5qF24m_w8%gdJ9M%}B-f~)^{P@r2=8M zv|Zf1EZ@a_5w_u>q|eM+Z~x^wMpD!RSGREd(QCjj$|eOZ!ISsP9>Zt?73?BLJu`UO zAY+l+!sF+f_d$I3Sin&Ty_v_HrqIB_d;A=R%%n-Jqv%AU^sKsO)&!@eA%M#t*o~PB zv4y#%tjL>d>|Sx=5~JTr{oaHJ0*PhsqQ>_$C@HPMC$YvKWK_;{fh5-&w!n~`YM{r> z*$qQpVpAZks}%hnmXS@S5qx%PzhHq)F*bpp9@jRG`bkNL*={A28|?o%%YRES`m^gg z2(>A(*3fM90<_BURh&6!=(nATUprF7Wu^Dc!GDBY4%swh`O)r-d=OT~VRtPkBmVTqMJCID zXC5u{|FtuoBqf+cUd(MKhOitL(97^o_*8n`{G2)A!MCe3Ic4m}A%`8TP}fon(qEvu zR8)S^LL>cr4Fl;7N=c2r@BC!2Tgu~HmJ|0Ili7+hxd#r(1u-R-4sBI&1qcEn&aZwt zA+?{ub#qO#q75FE7Wb?)OSTqXHwR_fa_dP3#hHc%%u55Kkv1>tTpC?3oQAW0CeID9 zlId#F75&J2oNT6{)g0O*rB%^mlf~%Q(^yDW8>DPooyc)RDqiE~@0Djj(GE)ZoC>|t z`ZtEBw)jfW?7wgje`Z_|-Ei|NwkOqQiMZh;Cq2B9Q*!pEWU9`j#l`^s zjzk+z0Q`lgPu;JLw#&YK9dKh32|wrZKjQEufKd^jg`X1|J_X-ddzasQJT+^l4NybS z(;6s2;Sr1Ey`VyJUt~D7)YB5pLV8qZk&MxgAkA>8f?i*hG9hd*cfW<;a!&+L9(pG ztaG8c95pY)M6zibv|z+!!?&CWk|F9Nmv^4-+)~dC4VLTceTceI~ixUZ$mL|xHy@vsgN6H>eXrMuMH+R zSp}~@oeBb>Wc(Y|{>i2U0tSw=Cx&D;WnB76!CSH9E$kn~^xO%0{ye%`{o5L{M7lZX z(;GEPDEr*4jewT`R(ITkaAyy&MZbbQ^rZaMsR^@>6HBa2)q?MCENpjD;p5IN8+yD3 z>8?)jD~A>v-Z7PH&lx7oRbYLLO@^$j&)n*mor~f%izeW`;J4*kC6>+#R4_jFRf5S? zo?|yco)p@!^WG!|3Lj!tuMPVz1RL97Ui!|EuMewv^yCko9N5bbr?k4*0_uLqm1#GY zoH;Mk><1BGZ5BgjetYu-eO$e(%U;>}5rK|U>YO#lOk~+fd}NPnE%)*pV_(~@`}3P^ z^00I4lrgOT(|aWNMJHh%yWz>@_$N2_!1jtFjFHtC$@OGPRyC>hYed_}_C9%56sqD+ z0IH2~&a316k0@1I1Io#y9Lz)(%<)>IYz{KI1Xr&7F^KuC%krip2`h|yP5^;@QNj`5 z|CcvTbs9-c!K8~kk zFvFv>Sxu17%|A0unD$$Z@BlIBJ*K?MW<%h*kB-*fY+frU$==N7`!XxvtYr=p$O9v} z^1+U6>lx3p3#_INF01W4Mi92pm33Kqb7==E8h_dkf6HOxaz2#DA&G zFEYOtdgB^unb>)xaWLzZ?>--T5eISG#r$Kq8Op-Kk?H*4e9#;txcL@O3}BDwT)oln z9IJ)q->5>;@Q)j)T^KVzxj*XewtU@n!a#0j5#ZGj3I%LI7xwuB!XNROHz>kS%hzf* z_mMzP3_H?P)V>ty_D}kQ2;gjfS#u&e109%Xo?o(i)pb(?VQQXmg31ho7G$ofyBxOE zmvOLmw3)>Dzg2xk?yKb@_+<_LQHw2I=U#2+i1itvAH1Gr@-87;GU0A-IyKUObTpTmMzAsfS45Q&OxA-5Va+e;^ z%A?0bA*CpTbn9TVoX?+XG-C_b_jUH$A*Jzx11?u&pemz-r-Mq)D*v-RKxt~a?CMkaelx0ex&QoP<7kQ< zmDg|CVNV`wFeIbXd}W`HdHw*40r_rdaaZP&`3buONkeux!Ny$F&S%lW>tkzcvZy~U zSrbbiDOd@fNpMZn-4*N%bs9xDkzwC3I0gNA+-3@0-}4UC1EzvX-ILpw2Bi|(5T60~ zSddXk336Q1wUVNFT{+5{A(o>hx=~iD9J3C2Tl3OGTi}>mb zJ8X95vdZG2{-;Wi&Kq#nRvY_u@egM9Pyx?Y<}G+w)31Dz`Q+faZ3}mxE?9)#xXEbR zKv#>ejws@g!sN-X_%fJ&et>asQQ+G}7V?tXcRViVJF1lvoMlv85>J@}>3Ib@#ZM!GYW7ZYWM2Sp6118pUh0!5rhIGlpPI`_6oR&gPb~jS*9+)oI4i95 z6()2iEsRh$IQsEGjBG)ZROY0#kWF_`O#``n1_hgh;Gru9#NtJ9yKY|GMP3xTuZqJh zH%^i%?y!?DezYAtRSdq9npuHPY_I=&l1|01`HEr#Dy_7_pk(%?OZ%Z96F;Hm>yL!w z6Ys2+NaG0zZcmjyzSj|6+e<`HhsM!;3ovW%&{_CW9X_@r4?Vuu&jDl+A91r|`6+S88O|EvI^V1!@{n7gxh|O~cdu^c~tO zyIw3RR7&OGFXuOpE3z9%-VG8(`j~2V)yFDT<{b)nCspbU8EZ?HhnO8cujDm(XE{gA zLbMPuotd5Kv^w1%d*`C)&JTCdx{MIl(c_+g1-psB@jmfuSBcOIy|$3u7u9K5zRQf- zt6Of<<7?nqqdtP+C;juZOfBk)s7K_BnNr8H@7X-Qu`@Yz%J^s4P(F&Of!}2J3B{TO zn2>b8&R7NMSl!RpQQl%u8ZRG-`w&V`4k~Eun-tEgpRNABg#aXtpd#t_>N{8`Ly;&Uk(W0ry%tH{drhek%~dNNx79mGilcU`zVp7<`mPr&&*XL6PNwd zd`Xf|;&$3EV<~-D0*JyK8NrM?1a$b%OWzRWP^wC5Qzg#I0G9{~2^i!0M5Y2BJ07%l z0xv+eal)!(ZZCdL20x89jXMpm%aij(3`Jkw|J6sTuV)6d+Y8bPTC^w=sO5+&^$a^Rst zJ$Z&|TZ||pma+wyKd#-wva^xY=s~63 zsIpNl)o`Tk41G05Gfj#4or?~afrgarK{AX3UfS#+shJ%xrvc;*^92Sc{pCH&zW_L2 zgXi2sTU}8uIP7?eURT_O0@33i=)q9^t~kT|kPhIwzom@eEkWIi(UsxstD=HK!o5cq z(l9?_cz?;p&J(C@07`LGc(`jRPd%+B&NjOQ1m3;Io)#a!0{VJkr}K2HsUd7@DRnNr zDjor|>^}IQSF+=xor#{K^568CqIIi1vl}`MiqU``Yt;LY>|pTzz02C{MEf6jb1OC- zm7I?QmBGqwtgOAZ{DV9!Wv1CV^uc7jOOcx|M==HGOf(-@d0J@-=)2em=3_QK{0nyf zIUUzKH&D@Kwgnm3qkS%Qvhts~RhrzhRtWAWgc_dkZ=${Rp6+079xWR0`Oh>pSUT|_ zOdh%S8h#_?i>|$Gpq?;shlX5>ERx-xg#I%QqorNDp&?@*Z(o;p6MB?nv?At8aeagy z)O(uSWP1BML-Ze!s4QUOb}G~geKCBi-K8<(yGwD!+lG|6iX?u7Kh(INb8h${b7lU? zVZzPNp9LL50Z`*klR)fpNBpsm$O7ONHUGSC9lIORg*Eow(hrGul|0+W^xU3$ktgK< zhDT9{x0fgSDEMIz+|eA9c$+9EK;&l{_5_Vv47E=ViIc)bhxSQdW!+j8`;Jb?oaf&} zsCk$NQeRFpUznqMWUd&=wCPxDCYjDx=NSokupxU@)}?C4y@TGu_7hjxw?8|O@EdNj zU;d;-k*^PO8XC=yJaI!vGbBu=LxuJjh~lZ*Yz-F!yLy(R@nJB z^o9)6#EdJC%fo|5`E_1mMtVbUyo4qFZIbc3F#u+-@m|e}XWZhbSymmb9_j?7J% z+~}s;2wq`y0=ivIJ8t1@aR0M?(B(eIyXHzeY9_!Yl4`|Tstp--Cj;= z+xz8O*tk)kAfAW3(h$*2`YzzW*M3r9*T&-(8m7Cjy^aw5NtRQ=1&rTgo>E^d~^wo(wn zPf1kyq@`b1b5s}HLf2s&VLY5?tyZoD!JKnys1rR1cj!R!qdsCQXU$Hzo5fk`LXg}1 z>V&HHJ%)Xzp3Zw8HN;Ffj}(N?$_eE0wb)-|(!i>s^^uq1&Q(>HvJa!(UM4UX+H0sh zy`R3fZpT}9!*VLJuCkTPrfBV4T=HIw-;ibR%4ADXX*e6zfoM`YmghbQ`9n0e&GJ)d z#^>db*gDmBez)*qif)z2Y?d*ho|~9EPxjx}C#f28N~g|Z-yE}tV1+X7F=`dHE_0`b zwaS~2tlcQa=uAB)z8gHOA1|psqNcIGkZGJ%1N8ZpY{_Bii-^Kq`TJ~E(u>#p>pHCn z(XHshb4H10cOJ-j9~Rr5QHGwU#EQjcM*{XXB!EzfCe*YonYTh^_w-F)-ej}AFV+~$ zH=vz39L^^9{(9i|eY2vy0V2*7i8=~z9A@Y+eDK#l8Xp1FlZ=FDkeKS~!oP>qp@4%cWdwXCPinTA z;NvGPxOWJSaluOp+JW)mhm_0gGga#bV!E%t5Rq2^u2t9tebJ`P zikoA%B^_9i2J*fq%aPKXBM-Ml)9-}63{xDc8I=E^E+mYJuoJebfN!en&iX22q?dJykujOqbD^!y{5Mr`n0fP4)mq|t+m0Am$_P2(`mA)i zY}gZni@&pEe}CQdXwNX|(IzsCL_;1QE=CzG`wVZ%VuQAM{kZk^Q7yJlEqd6U)ji{u zJJjDba-EbP--#b4{$1?~b5}x&l?#0j;i!d(tSvQU!BjtZYRyZZ-NmWBtB*xUOlEcK zYJQMJ5*^3#2c#E|++)>c^B(J^r%)4X9Qp5IpX79>Vae&{8 z#ZT10BEU-k&NW@a>!&CGCjb>PcThd2qE`_<$k7@1LS~AY`iED)i0M3|`TlR+zj1M_ zo_E9sn};@Ex+RM0a{kRocP8Kq|C`iooHzA=5peqxyq8rT#OrbVBG+Yt7*{R5Vj@-c z1|Ro;Hcx+}^ggNk^Hq9{F7bH4l@BTLXYX6WK=Yq27j*rPsbQ+CMDvuo=f!QVKOefI zUHYN5zi0aAJijZR@i{E+D4*r|_tWXu5YC);zRG)mW7Jda6C>XHtyZLeCOYUZ3CACY zZ&mQ?vsP5P)Wx$2qZJADgRKLk1_|JAgx}&PoMs8Y^}mt;qtgq5y3`sOXNa*|qgUv~ z05?JU?rp@a8vK1Qapmo$blX)pw$-P9rgOJ;aU0t;pN0#g8YG~Z^16kG!owd}!KBV= zZ^9xN?DMeOz8G6`F9UP&45;5NF4T1b_uo-@b$hLH>lzQaI=MNNY4E$sza4zToeYx5 zx79s+Qg=rsxjgh14#r;p>FRL0txFZVS7-6&cpZH!(#0O46uVp226u;c9@j2NbsgtR zT|1F?(O1%K$laUk`c6|3DT2lIwMwf* ziS{KSb7>0fvnivL167$%5l3E~0mP=nb3fJgv`#ZKl4d;WhDJJ87HDu+FppCgLqWx3HRk!&fr{P#so-6Xb zRjtt*?X?C39an9Q6$?JO=Nm-Z--T!!V(4s+T3l`Xv+s)ZAxqZ|y*rRUPkckLlNvi9 zVL-B;=LjqlPuaZZA?ZKEC?c2nVU73MI`8|Iwkl$}f>$AD-W0St%~v1pB?#8yIisxN z{AN$$EX$%FnmGAzSaVnK94_2FO~6ryQJK7F6OvBtZ4XeW+Xf=WDnZFRkMH1oHXRjb zl-B(TsJ^|)BLt}k!ag`Kz-Sq|@BrjK~Lpawd1`|YiQAL{bE zFBHpJnztP%FGZKgL+NgZAd4;hdQQ%n_Sk(8!S6Op zkWL_3fqP~y(a3qfp9}c;-2<=T+Vhg(#_asge==34smt5~)=CmV*>*-3Lli1I;-Sc( zJU5AKUR-SLxr(wRU+eiEFQWQV1YIq?*29d9_^25)eb`3vg{)f+#qsL)hTqn&tF{1n zeji1k#}vq}gEzCUS-Gbj<`5K-5VrG47R~c;p)@gPccRCZKebM z0S+IlcnL!0ufYEI&r4fITn-E@w^B##ll0*_{lS6_T2R*$Sa_+OHbpx565(B)WS)%? zJj2IBmq2KMp52YCo~DP50jcs}-Y8}GenB#fLODoQjHst^#~_Gx7!|bx57!!#ff*72 z?+wf80T?RY_RasYongu-pJ84I1&l@?fI@v6uTQIyuz8;sx%GL6cT%*!UQ@-9i75yZ z0trO#MJ8_3Y>idj_URElCz}2HbDB5^42T#@trd4zFtL{Ph?l4Y`YTAq)bA+oJN^w8 z0E1zNpXTqO?v?uqgYbNZ9~e#FyTHY1RKk;oSDp|jhTIx9?R#xNaO=p!ewDBFB5ViW z)+U*ffl!?&QPGO}Vqbk{8RtU8z{~PtA@y~iFgn>EpK#Wm^|YlBCLH3{Ajo|dVWi{? z0s(3ZIwVWQ2gC(Av^tA~fYRlHOrK=PSeVFxk}k_SCyyP-2j^xKN`PP(HXSzqexXsU zz^cQ_IOPE}cDmj2y&&xQ5vi1WDtlZ&^#OENVSM0noX$~Icg$37{$TDAM&n2tjU)3jmxc9OBb+?kq>xNzH`Z@cF z>E_0c8e&)Laa|ETKgT(IM`*Sa2i!q|E_FT&;c(N6kbq?^SIHz@C2$+?HjuKhrr@*r z1=sx6^P!9Ltq)~^0_WtWsH?wzZ@_^N&1n9bj|k)7XWX!B@6!9~g>&Ksb9(8nloNx5 zw8K;)7o8OE4vR1|c-|eDHa~`jx~7crKuFS8Q_brRZ^K@*q(srSZg3(p4VP&BgL{}` zT)$RtA|3n5xI{YP&4;+Z*5IYJZ50v-+-#V6{tM)dU|Z6yX{9LN6Z4u^`J(0>MpDLW zZ;?o>%EPMM9P8nl-S3VNDF?%oVN5!KN|6cFtlaCNMa#;*VU1=(;nG&dbJ@hmQRT&w zIX3Nl9{gd2hHms6{=fN+EbI<4DS{$&^0FED)gXW)^wB3B; zhfrMo)>y9Gl|<8Zdm_0z7fp@clZ~~4-7+*Xi@Z+Ak0PRsd!-hK;H%eZ>)K>wE~ zUwMC{!)bz14OkwyV!QB#uB~aBBIs${_R-KlM<;GOKY@_zb4{>nW}n-^{!w1!OAf zoz%NS4Dg{rq4e2lSoroQO-_ScvQkSR_+8rIXLhY2W9IF$5w%NV&9E%$6WNWhhr`ToJmwNe{;EL`do^R)vjrQ3GvdIz_+ z9OpYB5sOL2i&bqmHp^=l9LAW8F4G2R$Cjh-{$VBH;1K}V2yb1zIn{Ru``ujfVleRb zv9;qR29#?TRtGmTzv|X85RV^Upv%{2X=ijO9OXN|?0c~0m($QDlRCNJ=Xz^lLqWZxc%$(GTFn$H|{U3;$G z?9n!2+VEKd=vByw^l8*ryx{T*f2yQHL~cTs4nzEYOP`KMuHxK=LFX~g^~Q4`EsPT>D=}8!-xpZl!oLyS*PJ$T z!^@upJ70*oG~T@OGl;W9eKlDpBIo9(kmP^%9UR3dIvJed-IvRBSa`hugE|=g;&ChD z`45-SI$QX$zKn~6h%IW>eUeLC+?kQ!u0T2z=+Qz77XG=iF+h%_G5ULcShOLm}826V#G9@Y~(dO5jZya5_dw z{ttWi(%XaLOH~X7Po<>%FqD1yerq%vweo--A38`*3st~;pXf~9`9*o4<_e`>3{-n& zN&jv)69J_Y-DOu5)suq>+Ohrt-cglqSxc23QB^RFzU1*{Z#9PjR)D1t^FSbdJZ*J`G|sBr z`6ucTnLGi0ieLwS`Yyy7!Ft%0 zL?&yzf+bvH4Q_}A;&)5OVoHNEiJ`R2rGtH@P0PLB zv<+J=pNw`kf_fWy_5@aI)1N}`ougpvn-t7I^12WGmqZ!gS%+z9H1M<8N3mLMjt~>@ z{H!~v8%)^A<|H`Ssue@ma&j~<1h+;V8G>7s^O*Aa_WZM}z2&pH@g2y#%aJuOCU42t zZtkIe@YwAl|G3nCZ~34}$ZTg%YWlq=s z?`XDcLRwLtc*^)C+CE$sj@=LLX+j65OPvmeuDi|F;2LV{@Wbb>8Pj^%QF2hu?tI7k zH_Tr4?H^=>6Lf8(djkKL(2`nqq)fX*uY+#9|ED?n?lltDq3JfBJ2f4qWxjnwl@bHnDlakaHdqmtDDM+^xvXq2{b z$XJKN0(4E{=8tdLTO=2&dxKxVhpGPq!9YI0gL_{2+ljd=slu&Is~qgvUOKQ-`cXZ| z#qLvh!<-<;ss6y3NO2e`=5w?e48PLIc~Z1YU2bGYBM?HjMNs6X_$0YjAZ>6BH}4*tobFtn7; zQQ9S|Wee5rjvf-4;mC&NiU&)_Eervfd)u29bvl;UJ-`XxSjRO`sZ#d0tm~?yrFLmMs7(@NUCZ_w4!EJrtg4PiF6XgecsMJiGagjC ziIPV3p#Ubf0?)4{$p~-9XA^;2jP#Rfo2woIr85>DdKha~9h{FxHW?0Vrx4g!^N)+8 zvc}XGuZUqmfORaEnB%$TA4M!y1t>eW#DnnifenIVzTJJz@yx23l)GzUTm<}2sGWiyctag;P#@|x4w>}$atbyc~c_S7}O676uQ_woa8X|`!9{kfM4-?hTvGO2q(dKm? z8z;E@D9M&pnXcq0qq)PXrl2_??nwl2srn=2OY>ZmJmhOtcQ;nn9z?vJiP|IxxoJJ) zo@}l@MF7#_#h5)-6seAFjpMTN4EI|e17%RJ%6fX#f59sZ0>`PX& z;^|Ox5r|*JOVMw52MPe63aT5>mc3xoYFocl)*dc7fhTGQ5Wb-L0LyW`m^f%!XLG|! z%3D$O?764K%SJwLO|@C=kL^NvLupMbOCAAu57h6CQq|JWf4Su5gi`$)w9NDI@~+-H zI$xGLP9LwHB|VlrtO)>aw?xo9!?GeUcM{agO1w7r5GR! za}1c~zsV%WIjsS#_fFEU{7f9=K2fSW*!<)>pJ|u6p?%jYzt+ywH>}+Yt=Sz~qdl-j zYhVYRf%eC-efa!+_N?pmn?`Hx@zO``{(yCN&%1TX+Ja?G*P_Q`!7?A|8$+Vb>C>oRNDHx7wx{+ zz1D-xV^sop0oZ`0Mqz75vvgD zsj0oD>iboj=zIIISg?42{1g2n!2H>lewD{NIscY7IS8?8O#*E`<>?-r{^Pj^gRo%f zoB(`Ou9p>I?D?p^KMQ@{HMuNZ4a{kob33(ps~eEl~$Fh#t)SEm77F@$sgfh#ooT3eMrij$O|W_iWPc$Yd?oXxgOLv*h-srSxN-7!4^s^z%kC z%5l>@tP`eZPh08S1xpl*ZZ(n$NB(0uL7Qp^WqMNWCYo)V=L_y0$KAmwy*ty`RVOa8 zdg?by=4`cU(P-=encH1lUKA{)ANjn+o(wX0=CT8o&5IA1*azatygzieJh&<9Dj!N_ z50o$7CjKAx{shjR?5OWV)bx3>S*tJK=}4J08XAuO;2h$j#r7J?Dsr)FnD<5hu;`5Hsgmp*v21&1u|G@myo(!Qm^Xmzt(p5do%C;e&5J@tLk4} z|5nRLs9U;Gl{Yg_o;6OKlX)UeM5NMQ&r(utRN2L$7CXwD-O?_kQicd2s){!i-g=|4 z?>KGkAJ~@4?#!k(>kq7 z^wKFljCPp&nB_SLLN`-ZE9Bh)H0+5{bAEndulgYjm@S zTy#MPPVEb#=b`AH@xu;$%(}t)LXZX%l!YYyhM+5_YlI?b@Zj(qjV4TP;WvXm2XC-_ zL(u@{mGE7HFAoF@ZeEM$s(VZE(I6mB>g+A*s+_eycaQs2=`c>>kvTx}O({LwLUHbn zpfAJtP#fHAo65>>pZCRiDpzm@`A7@n_(;c~#s&!us}RS}Bbx>zr<;k={Kdc1R8NiV|MjzZ z7w}BgXZRmUs!W8K2(5a;8yX1g=8gnMaoZ8@aB7cVo1mnH{ak(7+_4?CZB6t#Q=F3U z)K}R#+AaLh)AR$+U|6F40F5CGiD|shFYTRucWRHe4D?EWAQUT(M;2(@<5LR3BK@! z2B8^O*1Sodb(CL4&t{rKX2qN(x&xaQvbL`AgDI|t(v+0Xpt@z#WX94eM+mm$pld1B zF<9ReJ*G96=bVNLF^$J=M?|SZ^*{>_Ekqdcd6mOl-WR@NqF;PTCBc>Ghq?)hw-4EOsmYc@HlVxTtJ0D)wSF-pQVch3pL;KE*dC)T=ayXE+(87 zgkv?y8dKp@JvH5|Yb=YNgda5i5P&CzYfOt_#xAGvh9GOKGVq6$MrfpX2zD-QT(C3U z1I=>@+idpibNP(jsyZ|#GmE|QjaFRWIC|9rYB%?ciQ~Z$Fhegxxdx8KT0A};>j;iN zJQ)KP{^x)HM{qoyK*4BIEJKf&@neyh=vB0hKdw;k^35&0)CeBe#?#6JPAPNo_;{=% zIR5Zhj8k!XS`LH(C&N#V9f6)W0#3v@rH!3-$2X^7=Iuuj+g>>F}1AUA1H8#7EprzRv=({puAN$;0_R_O?%|UR!zc9G}Gk06_%4047S_xQI za3grC3s?|b0Q;Z0%gq6mg#h?q?ZBy4Dfe? ze*>Ju6CX2VFxE4A#xCk~KBt$5>IgGP2TV40X~Xyrx&xalZK9C3ZnN#oKWC~c&!oZ! zLFf!Sv|w;(-zyeCmha@oBnJ5>=QC$Kl1F?raUn}l2Mt3LVU$Q(Noi>h%==JyR?VDw z!ESlXbWD>8OXqU#j{>GJ8bYiM6T%zbLE;LBDLYZUU|7TC#7qtjvtdZ0Z8K8a(R)xg zxHbY>88G3|XhBP1tUh%|`l|Ox^}rMlueFZOXxYSd#!pO6<%}wI>u1aA%Z{b{9cMN+ z)s9#?Y59D?oqW?CG+mOJtcCr)jlrqH(#n*)z<;O`5F!m!Zy31X1!g?E=atCHv6%(K zzQwfKjUl3&L`=J++MU3pQ9Nm37VVsv^)kYbXs0-jTO*~i2kIN;jiYT$+Kkm#i9*h@ zqBl%TfkT8Om{o&;K88lswv_4)-NAxE;E^g9{bK@VJ<=V79#k(Odg@g+ZJNs2=D{T! zgd_I{fnWrDVdg5PK0~XhqcjE5DJ<9S84lPF6#v6_-fIu;cf9SGii4?+rb$wLQd3^| z0of28Ry6L=B0$sQoZ0}hy`-b8XPAeYCQ=rN?y71BCYVOTR$cLix@JdRS#i-YKp=>@ zEhK=`@z|N4&@O#3Qe6C%pcMlh#S|Z3BxtpOALK;P1cCreUk#KV8ZRLLJpT;WW@MC-vMGIN(pp#g-TWo|{4Lp@@F#(cMBnV@6A-ZjlNd7GmF zQQfd{v0{l_!381QUCh{$iX|G{)H#is>8lH=v=4 z@~*2N`h|5p8(F-wXJzG?RT#7sbbFRi*>lZ>IJ`H_4-=IQZA|stG~XfANT?hHVBiQm zmz5sDZ%^^dqBs1{ptVwh7U;_$uyLj0*J-pzl+I}zA%*DBL}&Oog7z|+4|<{_w5zUY zPN7dQ*@*eSjPQy!2{e$LGe^X%t8swP!Znvv7yK*XH32hRIGe92Z_2|VJlbdj(W28) zaa{u^@2`mYplxgOk!VW!jfRGqved!8=DEP~ea)w$uTE04Qn_wnp=#qoN%>7}Ev|VE zfv4(*Kc_6dnrM^SS%4XMvmepg-CAh65(M>vWr9fH5P#y{C{CEETcO{wN=$WZYMcv4&;>ql&=h1&%ALPexkRvh7e`lIFP{M1FD@4l z#aad~&O|#A6ahC!4^CeB`MUA|!WNf;sI=qpqVw@vaGDdH<@LTR6pL=^4^5=FXaloi z0ev8vWKJb+l$bUHx?bkBfYZu58MM5Nu8tlYhWo|Ix-a4S`zY4&cwAq9d+a1QdT`y< z@qQH0?ty4?V{_vu$pRi%-r{t;sz;YxE$^b-A6MSVHlDbTVx5S0{C06ZdV2JLEBZ96 zc{X3ftXR>_iXk|{Y$@L>58ZQDWFjohx7EznH@EDd7g&AcoW7y5zJXvkp4nieW2kSU z{?H!0(zI50-)^qwU0W3TU?twa_tRFq_LvoSuK2F;vEKWsa@n6BKQIwC=tSm#Rwlw$ za3lCaOoaJIph)G<(nQ$VSAU&d``Cy58{>-wtg$e8h3TyAm%hR^E*}29-*IzbUv9tz z+Dm@?XDnoEytacuQ=!=2PwS!G1?;NaeY`ruDwwxt;#hp7)%%y*F|Z_Jpu96SjgCtl;Sa ziMM{oJKl{*CHjwM$K~0_+9A2lvSM)N*`ApMk){UO*uVtQ)FsuC|qN*C{TWG?_F&r zZlExj+Gs!EST@vZ5qw^R3_R)Xt2b?&@}=0mVs#ut-ydD3TNz-WB2_;eSwJ-`jfAVc5&TO%D)&KSRQ6m zI%)f=XK6CCFq8BJI;M=!PRR5FH?5Y|fWchc`>yTVAKiJc?GG^PX5J3`bRhI$&5t>) zl)|^6Yr+x>`{%K!nd91W8l1V^jACBcUgl{%FGl>dT?o*B zk@Ci`2xA$)Ce>2T7>1F!ROk@44nU6K%9bt8tZaRkf)J4|6__Qwb|; zEKJo!n4F`Ym>op;r{;J+ao`|+qyPjG_&UKvpwBl7$2{PgXmAXfA$U^zvue{gSF<3Q zwee7MaoAPc25wGls<_ONXp5jB7ZXil=4?FZiT);P+otlC^ueIHSM-S>;|(U=Z3exBiT7essXEqpZnF(&#|+v4lkAa;+lE<`1_wbzgfa{`Rmz<@{Yg%qo1Q^L|xQUfd8nbmDN{e zy>kntylXn(e<##9>>`NLIWjM%@gmwiy^SZ!JAOV6r?-(Tqu^s{PPXaf{qZurN=MI# zdc2MPd#t?E`mxz)T5YZ7yK?vH)Q1uStiku~1eH#c3ta^}nhYYZmV(zyH7Lk;`GyRX@&K6}yb zec-ZdtHx9M#>?HOF{;t7?}%GkSM7~kdAn`YwVU<5u{oPqsnM|AgFQ>0Ip=j;9*zAy z8!Y@StOP78xDh5s)z4F2r&+YARV{5Kit!DN$ zuZml>Hn(CSX0Q1=vv2-$T2rUBHcxZbvMn9AzxWs8_VSlSLY2iQo&!smK{&O~Jl40z zdOLQp+ptURw(aX%{aQb;{czyd&1TH1Mfe8Sr#}rY?`w^niQ8H!l9c3h9gJpn=Iq?= zy7Tk5b17i?dw=im+0M?6?d|PF0bgSH`JexJ`}C(jt+h(m-u13`S-oDjhaY};@$_`~ zu^;;}t5hnVH{J?Xu!65Vh`XwvPsRM|n)!_#kjq_=iv?)opy8Snn`bsGgS~mhTP(4i zSxFX?z01zT#!mj)AhckuVyR5VGC}Bq1hiE0y2gh6M5~sb61MaKP2QGDT0b7Q?VUYq zrxMO!8JDre->}r4j`cJ4;@h8PYl*pS&&GB&leFw`Y@I8YEHRn8rbt)iW`n@;qmd@; zu{(@rhdTm;Os{WYq2R&?%+E|P3A0gmlUH5U5$E{BT;7ENm1@OudNZvleY#e$br{Zi z(_pe^l1N&0y>4qM;hFfF$6(X)dkaIF#RTK$0M_e@8XBCiv#`kpl`#B z-fTK@LS$9XFhc}08I25>_VHF%2%VbNfYwqz>?|KE?NpOHJ{_>%kM_qr9`oJwt#02J z8|h5eO?SnnbK8RnsyFB?ld|+^;yxSM$~AFR-ni5E1zBG7kn6Ur zHtE^CamC_;mhPLX^UwnIaR&zHJR!X2EzFceV?}Gl^ER%nTQE~zDzB(G%FIoJjY|bL z!Iczkfs>x-7%d)l@3ZX!^FL@n;3tB0cd&NOwFWAK181^hHprAmMdx`jXEC)quDl28 z>tHf-XV`t=lwIq0U*54#-gTd~R6b@=5MIFCMRNq9%340_f;&EPFkQC=y@;N-3waN( zSIhcH$=fYk8?Kdt{|p$oDbWuC3fGiUouT=j+JgU_jOq?l6&Jx9zCaN6pap^^1=)`EeNzDpaFyy2nVzHqS3s=j^?C3^5)kbvEu$k zOL3@-bbZ^#H{D@j{k)|%&RUQwSV}l4BBWG#apjRlII8v_Sn8;byV;Zr!O+5(U?z+;`(KEslgqs>^!i}&eS`S471JP1q>n0bjq&s`U zx!S2TEztmG%Miwxj*-SHLPLBr!K}ys4uUlp?KoF2i4FseiJ5569rA-4;R9wo{$ysV zdr5uCd@;JHIahh(w`m=R@#Lquqq4SUyUJsz{=uivd@#_sVW(2owRI|@-!wE0!-0NP z9(na=TX<>+*Z5w+{{fl>f#NeKS-aJMm{`4Re32Wj3Z+8~}beYaaK z>}u?HUE}FYB4u^vG~uk#?b%Fed+mSJ^dR87ctga9H#4NB- z4WH%K?LbFccn%bcK1-?WIa*_?CO(5=`tWBC<~@=qBV<*dDh_^R=m)f%Fu|5qUGO~x ziw4t;LA7K#;c+yb*q+iBg?}{hI>n+THI`;*f+5q9_rk|3>d(f5RSo z9|N)Fwcke1i0i2cBZzx`1Tet)i&F7GSBp3&-~T7eTfYAf%k+dt<$1_5n6uz`dCR-K z-F-J@na!d*8RfEZ z^7=309B-rFFP|^kbX^lz$ zljSYrEXzDuo*%?sw2^E6ql8i1<7J*KkH08obnVGFJwY2i?qPXLnEU$%%}esfFV_u0 zN%OMB(%5wd9D9pG@StW4&A}T9@9GJ@^Ht-snBk@J!8*#Zq1(i{OGvt z*=4h0+^)>q_Gr7SwdT~*JrT6~Q_E+s|3=f2*IxX>r_XoWum0+<+WX%3zQx^tAF%Q0 zAN`|$w784l_SMuH;JGu5c6avN6vX=G z+Ty9Nfqg+v{8)5hvz<#8%Vk_!0cLG12yJUTa;7k*s@%L)JZ5Rlz#BSSDesI_{8~&E z4|6uV^@^LZf&sddN?UrZY8w}BvW*wK&~ELv>_RGTx2lY_gIy~PN7i6h^6j_Tw$jE6 zIp1j3&1T&s6Gv_|vXmyO84StZzz(L7Uypt=G8crCav0#f`P}lOp-pmG-}#E|wZhQx$Zlv9k(1}PEmz1p&MK-8&g#3(mJ22Jbd5#?^~Apf4!?`a zgPqvB2WXQ_y^NdDG!hO=sz3b#0~qsYeVDwIullk3-ol}UHgl#o{!J>%C$ID!f1YuZ zNJg0Y6TPKgW!vUTUl_Kn)ofd@)pjOqM)ebd?B;aJ4(2nb83a%WT5w(snG}Qr%uG#t zJvVdKS6|^U+nMdE=Z4Bk^aH~T(Ns9>$KqB+b4cMZs?i)kJ11Q!S)4u|j$Lyj6$~w{ z{yl@hWg1v<+_mzgW1D5s&|LjG89L*1ptR#y%KEdE^|B@F)X&;pW!-`rS}187iRST1 zg0N!>Q(5KWJRct~6_tS*IQ(T?-ECN}cGgC{u9Xn(sEr83%A%Nw>Vz3AcHx8j4fHAIuZo$J(@sV8*?=*b z&)L~R!OH62c`9R-V#z8h6CC3F9P?ml)p=e0j;|5SxnYi|uLD0OXzAp2FplvRqyi1? z4}>R}`a;BpU@^l>O+?8H#FZQELT5cwsEWV*DqMIv~H7Z!3Dd4${Yxn1?5){g&*}1etc+$ zn~)<FXCg%0Rk`4fo!|(s5Wu310xcxEJr@Xq z%MtUP%E)WpX((*3Sg>I}Z*??QG=FVsE{UrhnA1b(hDKRlw5jiH8@JmQR*LEg{OL?= zme1IpKBU1*ML0#Ei=efGU#tE=<45>VUCXKynkVsIpm}EECg?CRhuR>cL zlu6Kat&OzWi(spV#)mO9z48f$}WqdA`$kAvIs?Xim*$ns0i0 zRtXQR*1BR<-D7evapoD;vy$~Sj)&>8bv0)R$<|a~%|$=V_3dieO&8Ah4_x~U4Mj|U zAs|7(h=3`hF*&=o=h|BcrgMYPwW`n@Dh{IeMoM`z{|+?gRj%&1Biw7MTf5V9`o+&n zS#j0~)BJsPMzs&96592~T4Alf7TscrO( zqbyHrG?(y>YS(bUQ-?DT@^Co`KJUmK7yu&gV@sG;8 zzKxG7ZxMf4CYQ_dxZ!lXjRZueMR^>HG=$xVZkA=9EYE`v!-z}pvGR^VOcM`2T1Xm> zo?z#>j<>%ou-k4L*vpkQBSiuTbu!8>%KzpTk_1=|Z`xQKG;0@n6bFGxMa-`-_D<$m@@9es81wg~3zIZkm zVwUyB8(rA^_(-E`AHB3=#r3&uoGn|b@?6VrzFNoESaIVOR;a(oZhQ7^cGKRAOM%nVD=Z4z3+3cdm$ z@s_uL=aCN=-oQHTu9YjL$d0z2%{|UyVXfbY=@YY&7P#TgMc=9X;_<{D z>-KC|$XO7CRvitkn@wA&sDaJgG?NQ7hG==r+yo5{#9_?BfJYF4Mn`2ZvaOqLvs>yL zcE@O7$#Z8d=yj}F!k&B90@VrU+=;7Kol%R{My6DHghAFDfb0#`@ z;PV8tO5}yJe9Y<~=s-|`t8q+5;gbRZ8%)NT>J=+htX-(9-v$;N_id*9V~UrlR;{4+ znfe{yD$Yn&o!a5Zs=~Jlb%th6k8m{3&Ut((;CF#nJ?6>gxvUGpYA~D?Cq7i&3wgzl zd@f+tr=l}vlW=uSqheA0t+;3h&~EoDq4?z>utXS$20|;11Ir8obuCO~ zwM%uy2g(c|F2clE$G}pSN@bLP!nKSL2*RXh$1U0mag`B}cm0ATCxPf=;wJ3q+nL&z zgL$q#01pY#Hhx|vqNS$FopuNIiAKx*#5ENFUUVlIk}9L(NmbR)>ZcMRhIEf^7tGsh8*G{C+3kB@=EZO_p-2rPbZ#pwqh zP57@t<7A}qi1|4*fuKX^4}l-1t`Jfo$iU&g`yx@5(AQ_LC0?X2e5uFhG}%v^Ox7)$vX(E$Py{OVv*uBWnl!c#)y1HmL_z!*QK za@{1}aBOFkFB)Q9;Sy7MQ_VY=N1%OZ$>5V|F1n>Z@egANj+7SdBac_x)c6n`sl0aB zweJ2^D=OcqaDXrZ6JHb2FIs-(USK7(#Z*s(dI(g(YhLX`Ypt(4*KAT4bClDf@RhAq zU9dIS-&1~~>p@$%2`$#_Xbk3TTHdfhu42=9WL9!M*4&`ByY?RfB4}Lgj<3}$-|D#U z6W%?R%_+U&s2)j`%h*SN$UK0S4<^?T#w1mCsD2JL?_+KcUpZOfbE*Dwl(`C%kOw;Rp6F;iZ?sxAz4Z?NxL&@CqtO_>kJ`j((weLO7F8^n@P|TRe*Hk4}p+iFti_oR?)1`}*?8FFF%?>9l*It%>lk*rc*3&rh5cMwvuD ziBs}EEHgTJ*y53&J&0pb9zUnGk@M+vvnZ30^!W5f2~oh}w%x%v zsJ>bBUKr^eGt{^p4u&?;d7$%dxoY_{XT8kk!GX2W2GjM*MA!;$1W$DnVJld{3ch@B z+nfHft$eSn;L8n9*7wSHfA7p*_v*Owb+e(G zQX{ZW?e5y_trE}MS~;%uT-+YG*t4?Ur}R0YeD~skDuP&^46w(<6rr4``YjQF1zWLn{91v&F;GEE;kXzn)sdX ze5d`;5B-q6{`IeSAAo%O0o=dxm9Mmyz3gT7qd)qi_QDsw&`o{)zz_U@$G`2i+w2pc z_{8D2;5UBbH`cQ}qV@43k38bxZ+`Qe?L!~>kediQY5I$}Kk*YkVc+?k-)Z0a zt>0?j^iAL7c|QBu&$f})h;MnzTkQM3@B8c-&v=H1pFMll{`TMg+x~4#+*++>ci(-t zHJeR)?6Jq}TfXI6?00|ncRlS2Rj4aLRzGG@2ND%bsdw7bq=z~oM- z{9uZg^bE64=~l}oSq_OIedY{%T-~pjp7Kc^ICU%fhKR z>%N-q@d42phLN9{>CBmv?Dxe)8I0qh>e*Mhm=PPHI;v-*L8u?ZDs1GBTYoTcO%1N6 zYBz0|=1R7c5FR=QmJB+URz8`0-kFXg;T0201C=pSUB+rJOkS z-AdiUY(=zKwJ=|^e!gsTL)jUqTg6DC{8y5r@p z;@~LXP3S>us$-})EgderfL@hXLC^44lfC)bHV?Ws-@odb0%&VhVSV<+(GMSc zF60qDQVHd!b|OH*FH=orA>4s+hJdsnnn9}t4IXt`TnI6a2Wgy-pc)fxk}U(biiC!;bEx&j)) zwyIU9kBOe6b(73#?w$2DuQjdMx~#SamM@mAS=_X4Y1`WQx=m6g3xwyG+MCt8V_*4n zL?=nj#i7PA;}QQ%S(RH>-&`K_EjbyvW?5e0nC)|4Hi?wx4bhnJlF%H8S+Z^+Z@F^O zroqS>#dQ}puB9e+wW_e5`d)KcRk*6^`32D_bJ$39hNdAJpXV@nsBu2-G<63O_+K!z zb*1ba7PtQ6VNnFZ@_ifup^D+pqqKzRaK<^|JdOS$Ak3rt^|CxJ7D3U^f3sKjR*Qar9AREUgnANqOhYf@$0cxuepW`2TXbvA$z-Mxa``&{(;H`qhubH z$6o|oohZ*QkIFn*UKC{+kt-q{wej#+;C1;I0dY}O4O}3wj^8*BzHu9)l+iChsONXG z%p3J1L3)0}HZH?%&_+T%uN$@TWO*K!|ESEzmlpxgct^4!gumekj`ib#(k*3-ggdsFM1(yh97517hRtL{X_;^}ib&iI<>{C2^f_1wCB z<|?b()4|I;VIFJ=tn<#FJ-xDQ#C-G5{DfV*|3UlppZu%#-uJ%Og%eA7$2;C(rBca$ z?&p5ae(@K7(cbp9x7l-^^PI&~v)8`%wf6Ckf82ihr+?ac1b^+X{Wbgj-~W9-pF4NX zUi6|Dc|3$LuYUEb?H~Muf8ah|UiZ4!*@X)i{Mn7*mRoLd;mhCu`+wj5`d|O+)@U^B z@BZDt>;7SYulbs!$3-yn8^7@z9(M&R zSix5gXr@~%oCPpCS>o>P?E9p=vAOP{zIgU$(GkVwmwPjuEZfc{*H+k?VuEUBLvqe% z4d>^%(J<1I&_Xy&#w@JYA`OL3-_0Xo{~t|_tz^P?x34EMmbvM6Tdx+avbAPwnUrm? z3o?_n@{CQu8Jq81v92b{jECS1g<4^EOtVFsO&IxD|AdqvL$OVEbCYv#l+wNqcnAuxz<#F_jCmv`ly^Wurpg zhWI^*Cu}>XytDus!(gU8N{62l;yRO2XZ&m+xWVzbo1jv>aT3SjQ)lqnXk^{_)Y57b z3{3oGzm#OqV*7-SIf4 zMx-`_L96m$K%*IgDJtqR8;)ID1OZYTL5sqY8+99I$~ID;RIff{@##?gfSHb@wfcQ4 ztIe4LOiQ&(`P0Yv@fhhouYShI3XD^nOzRD*J`>)RFFqyMl~2F(@`Q0YQ++W{M*Rn^ zj{EC~tKImJL6Cxw2(v+BNKbgFsV@;);YVenduS56tt$`AQlW8CDdt@OLcDgP?dF+q zXzdJQ@MJ^F2W`ttT9)qYTe{n{#4xZ7OdGUT=E6f(FY-dwM)|s~g^NMQ28Le@;Xrk! zT@{sq)(%egdGYVeXVz)=E!G{_gO_*hzua}d-Tlx-!^cj{g>6ZveHhu$CR#9S>YIA0 zXd49bF)iTg_V^D+9r|1{h1|o58hym;~Quse1xhO z!jYVwZz+$S;<5{TsPQ|6k*522GHz=wfD+y5KtO>Ih>so^^yqDX_Oa3rQ)!D`xWlIH zeG3~qPFFbf9;r-^5oteC&`!x znkgMZD>sqF5K+1Sx>Yz@R~zYcWHwCcH7kgY)m6g5MAw+PLJOtQ>G&Lz5q)omf#sT4 zEq~>6mTFwIjlFBuRi9*PXKlK9tHsaUs_|M?@2Vx#QM8Nz;OGBF-u~MkoVA~S&%SG4 z?e_+5+N*xLFj!Ds4YqSg=lVz?;TiCag&;R7+1ErX&L43 z0;hD+dW;K|HPoEe3pMYk-fEuXXQsXh2E#}jPhA%< z(&EAyW~~qyPm&0?qmSM3u&;4+;QiFs7-Fm^L|aMCDRZ?0|0kH68>;>Zwc9oKL|+}~ zQsrc4WG*^qzSNw1S$H0(?6Shp)WYfi74-=kObGb`rNy)w8a)HyD$!H<$}6XlU2a~p z^8=0PaNnw`6N2q#ykxCR-P)O&rD~g&*W8Qg$fEEBol&o@@Yw2ymRI|k1JTBuisqp| z=2I8$38!ePG3JN;fmIt#_x;i=)U2!b%UW;GHS;zRdMOXy7x=cpS^i_1&!&} zQkl1^5AIdI`@(tb0I?6B&b_})1A{E6GRgs42`2^YAi95#cXcF((DSKlxQ zT=q3KI_-{)h2v$qKTR8t&I(`Rl;a87h!${lEn1jH@I-Aqp3f=$=w}_Lw1F~jL^nJ? zy}V_<5j?KEC)vi+o=4>!f$w%yeq=e+xH}DAap4hr(ev-n*BQHpqx>)2+_3)h%{q;a z^_qIe>b)B1y&Ad(%SdBXbL?7ek$24BGb_!M6+GRcbpB?$^v8c_fAUNJ*gB6qY-eBf z+QpL^XCmh4fOc*-L)9NEG4hMV!};5=Rn1d~y zCfZguZ&_8bl`l9mJ79sI&R9}qAwa=MR3e$O;dE|OwH-5D_)p0V0!vkD)>Ph@N2-K6t>H?>qrrL9=VTSaNYdCsPZ zlx_6(EiW7*h=B=?xwQ?&fyp`5i_SGBnCgI#C!zF)c_Q@_%>9Y_!A&EnzBq3OKmAJ4O)bHcr#}gUVGIMlq60>KzumRI zD7&dXgUQ)foK7)s_4PG3Z8XefT=SqWye5YeOO2;0YiOzI$gy@``UR<}ZShIsz(%S0hf#@w1Esa71UvvBLm0kPzBUi26 z?YbsWuivu_JJm&>Fs3qU-$tqEnki`Is6noofwl?4zKm$^timt2S-hnCdMT()>AATR z`+@SpbXY~to9f45v*lVx2xAZ?vF9balB9YXg1&& zA8luZKlH1AX6c;K7{p|kYjmm2n8xZVPo9-rt0-*;s{c&=h(8i&)lFY1PsV3PV-mq! zQuILjyvoJo6eh@qse}tQ#%d3=a&URF7OZLP6Q`54V@6tCtL-R)0rosh2ThP1;)PH-TeN3<+ z4DAS~IJd{VpKE&|XwvvVD-c1&u+!7LB3e-#G}!2OaD~4onEMmqZ`==@hB3D`SKr{b zCtEH1SPO;!j;_OY$AYQKhXzMO*K&)uM7zDfov4qyfeWk(VfnAvgY8N`U#pVFgIylor`9(qMcA}$uTb|ZBp%==}2l^Bdo1! z{=^hxsB3g1=*z5*^hj}Zj{pTh`o13;1TZ+2Ppa+Ao$>kDDjm`5q^)<-fz6;__09hJ zd26kmx4r5a>*cDpro~9I*|CYn6LpBIp2hl_o8=p+{7`gSU9Vd*t@%p**VjG$w5RgB zxr+72F}tO4$ugCaon;*AD3nWS%JippHZee&f2*FT9_r_-^uHN=h4y@O||>&5Nq zZRGmo@pyQAXcUsuxT>jK7r1O~T2cLowkf8{__o>GJ+QWD9CL1e+BRMf$M1E$xZU45 zutK3=X?=Gb0S`|eOE|hd9vZ=9Y#+7U|~`d&Wy+c3 zWCbgDxr|o5b`LF9+?W`X!{NcZ} zh+%f~o4(05U;Hu+ubAEb)^E3${H34q=h+%>Q?nQU4}ZnZeeKt|=1jU&wio@ypR||$ z)X&y_V=UBX#i%!*4Nn}{&;}jbsV@SJ+QH{VL$RCKjK<2OHCNQ z-=7E&&|qeaw3LaX`lWfYlfdS$Wmzj&!3w^z!Dl+AG(wpGmh-mWY1u|8Z(+M_nfc86+gr95OSs9HKw%}7Tc6AOgp!!Dv+(mtZ#QjmaKQv`^5sz~@L&`QZc_p-#8g_Z{E8;m{fH zyfXSQRMjE)*g(pUrU1TAn9dQT!1yk!UNE}d9L{j;=DJ{HXQy*FXEaSk&bv+h*DP%s zj^3;N12@HE3ZJUKz#R_n(eP=jZkTE7>x_Slfc_EA*=-MTv5OxQQ|=pvuYke8%}gDr z%yGNp!V(;U1K<&7_SC-#(-&r|%IWIaBp6yuc&Uk2vcf}Jw9uL8#i;g}@P}^$%tfhT zmaeHERkv-W8wzijTRO*%YV`q3QB0|+C>IW(0fm!yOcRZhahsKLu8F}OjNW8ul~LdD zm!h|#CAtSz#PpEDQ_3SL{898ImNCm#tdn1}$uzBW!pmr8@4NF}d;de1Y&Z-omChQQ zGjjQYo3g9IOm@KqPU(`O&x{pSe;lb_$Yl(6T&0k==U1!N4M$e6xeHU`(1_Z6pfQkA zKh%|O8V2^5#vDR5OoYK-!VDAI7x>wjYn(yn?7&Z}YuA-;oOTO;bw!S4RTr4|!uejO zYoTZXO)F?;qB<5-1{zB7`le0rHL`zMxM;e+C475SMaMCi*sBSRDp$WeJ zV4j<31X^~0u7%S|KJQu*Xr*B+EK9iy`p=5iD(aiK;^maTgUVzMsi=+61PletXm!;- zv@_|>=A>)!Z?eZvoJTXq~;ytV6s6tw0LXie04|M16xa5$!Oi75zrQFwL`T1sPgAX-VQt_auG zRIj-5>FOD{hp`&am$j-*MDLh!>&NC6s2-R`8*r^Q4H2a4TvexM+Pj(~dbY9msO8%G zmR6fCrRx^d&#Rsl%jK%BL6*Y&Tfc8(wPUkXvZ=;SE2Z|MamU;%x+%x97ACV+nF{}E zPehj4hO z2-iK)&2p$52jW~G?68wX9-^yGW$sEyad zVbGtVjY~MpOK?=)@z7&;H)!KYASeo6lyigf059{jOdJs2M;a`(V#coQUiEk9ad`8y zAJanhyd@Hv`L4?=W*hacRlh!N`>m$+nqAGyJqtPmyQ=x4arS1b+HK>)v4fYghD*o3>0P5_s31`D{D5 z=QFnTl2=%Guy3FI-~Nd`_#fZnCdYt8ChMae&6(;m?(lN|2Uufu(346KKdyTu$fwGyVHQpTnH}e^@Ryu z0m6>s0+%NX2L}iCE5Gt9_ESIgQ+{B+>pkyz&*JInfG}rwch}92VFv8CfBU!neg!L7 z!B;oHi}hWd>^9!q+VDWaj+VAZXQsZB;gg_eSNC@8swOxBx5g9O$>l7}r7h6)gnTDc z+bI@pstt66mrx{Z1?L^98Pft|hOf}Nd@wFu9-ZF(HpkM%s)?c2K2 z9E2momq@1Hv!*80^cDbQi?Q=bv*bqjz7Dlbu+%D-w z!XgGUNbD}h*NHoo);-z;l+A8`9Hb*$8nSD>-LZYu1LiX8A9j#q9t)p1LAPfE!WG{# z8S8{Yw~L=rJLag43K#hDXsDiT7~48b@5e@NU3ChrK4{uFm9&oXbkj|$hnop1=H1@} zbdy&(IPOm?j5_l=XSe!jICP?^tB*(v(<82Mn2Ejl%$lWwGvp^K8zG19^j93(fWLw? z41ey06MRP?XxUdi3Qm1U6W4orK5b$PXQUuXQ*utVSdQ7Bnf_}&Ra zD@{dPsyEDbgi%$|2?CC6f9MSQvT%+z!9lL(O#Di3&(?zhSj;+ft*R`ebB@ zQRu!(0_D+3W$oeN-0m67ExmEUo_+frwtnGO(O}-qT4hvrO?4TnPC3!+Mm8%tO1g$o zrBZY=UHI?7PH0Z;L35<2u%X&hQ2uq%>rC{0wyt_9O;`C1@;SRL75N;YZ}ONQLhv*k zi`LY3r3J4DJu>Q}OM`)};p0Ylgo%xbJT!k`99L9F(2HOyp>gZN6{QO^MJsKdv(BUU zdVArGp(Q~bLd6T!CWL?pt!An_ZNLvjR`^GI3z|fu3E>@WD<~dXDwr2!=L*j3`7A@g zfRJS_`WVfGQ)ps5av>09(P!}Zrt0&!+R)V)#&=6>|3KqzZcUBLl;}De3|*^4QB~Ku z(}!!12&V(lYC-wJ@M|mmmgo?AL1=;bzLdr>j@Mz%Yn{n(HEM;oVe7T-88gqPhBKiXsXR2iwY8MXc5h~`?&c5n^ zua~68##GPI`f0R#s!PiHn(x}e7v{oX{qh-{(0r6s|6?976$b7oy`i$85orH`MpIQO zS~ID!CcNiTaXX_n;8zNvM_jZ&afAq`8uJ64H#N_W(@EDLL@-e74s2Z=!N+w+_{Th9 zPW^{=6oMTDuh7*ntvONcPAmU$*tg`U;~Hp;@impLPIfbWYLE+h`@%z8eY&Uo(!z_6 zIn9A+do@H8C4?tRhei?tQT#ulbvRIaC%LTkm0wBylUH9OEILqGeV!FGuPZN1ktJ2f zHI0Wj^DOOG81oJv{p)%sfuk7oHJUo^V1DB)9p5MEyl7+4utM{aRnTx5_id#?3Y=)P$Is#*^k5l{Ha4;Ul>;>C`P}Rqu-4S;K>)k8`?<*^XlgTrb1@La==9{BPYHkH3V& zG!D@fzl$s!mT{Kxj-Nf=BJwgED^bVGL|}n75svAjsK*)k(e)yWLs%p~Jb-09zh5Nq zF#SAH8wouMkFNM#mWiOZTrJz?X5F^8H?3N&Xn`8{%_X~Nxa{m(^8o*I2&E%9Ugk-N z@_oFp0>R}}w1PGba6I|(^I`b1f`TV%Q#LTo;tNygv?6 z)Mc4Rzl$sm;V|!G=i`6|9@-^m*DAJ|4(!99`m{ZC>A*tGCz;VDd(*3mR;;|n63L~B ziDS9w@|Nf5JLUF)bsJrM?}Thj63`>9X(b^DAlr;540blU5n6;HS-&<@D5v$w%*OewGrg+WyuJDM z+pSQ@*?~U(AIj!5*uuuZkZ?|m!+lJE zt!H?iwo$vI6;IwL`^iM@WM{c1%DqC_&Sf-7!t6^SWQe&>hDxpK=8%T*gcXWKyQ27& ze8KBjoJ0)%sq$jC|0o=~-vyYU?CQs73-22=TQCiV#tK>^L)91U0hrt{D$z(`*F4Nl z>WsgI0Za>p&s2X*?BI)mUEMIt69|jcH`qj{-YR>T-n0Hnigp-s{}n~N|G-x zF|-Iesz*-oca%Oin_EJ7VV^P%&Cy)R=p{6%l-$fu8fL8WM92oaDqAVrB$;rB_ywKE zovy{z{*vn0==I$P#YFudXScZe9BmkUFQ7qyK#uYXDi0H2IAcd$ls!11$_O-xkv~=CGjmk?Ko3dSPfKg(sA5r~@W5{yavdg7tEBH{q2TG;CNb zSWV9`E#|@;g{QhOf~HoOkF;oQSV!&M**mbyt&aL6Y2&nTgXW6*yO1wgX??@8#gZjN zr;HwUzvt5l_rrle3V{ly!fGm;KH1RGS04B_f?*EB8k2TK#+7hJUl5mXtEq60zaF%# z5ZE+DO9+$@UJP}eR~bT%Gs&yUr>*#W2O(U6!HI^tq?J_$)xsNPT;{hxrfg5#B(5>}J6S1+&IA%?)+6$F;J+x$2rgaH71J z14b&#&BLj^WA!1<*9*F493WUkV9PV;0W)CE5X_tkhfhZQGr;^q5Ql#nCcLid4E-QP!Yp1EGo}+$(Ur=bLIxoaL!zz4|}zxr#?jR3?RgdYaU7~E`C!IqFu}ln&#*+f+*$= zwPUJwfeO>+_2M_I+;-ImoDo2E4<1|3^(?neSby=ywjob{9Hml&E= zY0Ikqd!=>DSzzf#!{RC*lX++<_LO%{M*vO`wyFIH%hNnp7^dIOD}GaLn5uvBdiJR3 zf_XWA_Ik&PLPd05TySDkapBjC2b^L;9Jq@@BJgWH^yfz*{u@2hKNp_q^6BFF;>J(< z;e1%$;)gO14w^n@&=`rrd2o30h@Aa;5q=z&x5vvn*~UY`!wuW0|HJU3U%!fO4}n`2 zYrGE%#k>_ocX^#Bq7ejt9nJ5U*&OLz3d5fF|M4uMJoU?ebaKFDKC6sO!N-DW^H3YgZ4h*uxhewYNY2VSDMzUa0xz`5Mc+dV%cOe0rb? z_4J+>{-=SZVl5l$``}L=C|FzXqw&kX#?HO%?LOa==kjPZo3?x9vWH`;;hE2TW>l#Y z(ARgyN)Yqa3-O$2G%1=%=*&Ao2hMtHXTHH2mw%U!7(etn<1^2XIUh-XhLA{S?D_Bd z5xf8Y`nN6^IS$oZpW&uodzX*Qk>vD^`K-5pk6r%YA6fJMdu{#sFS2VN|A>VLdv^0T ze3RNU^Z5OptN!fbAHLTf{=MI^s~V?!cm9dzarSFo>uFe9Ac)!hwKl4av&p3w<+Q&af+lJ-$AN}FupJCn}ARsPeFqOzbbd!aff~1_kEuWUw-Fze#buXiD;7mPUsQF;N%{7*0Y}F)?@Cz`)|4I&TkPAu?c3}vZ+VNo=}m8PArJY#^rbI7j0fOMpSk%S1^^bv1a*wO~kXQ^)d;Ibp!XU z(a-0sI-S~cp8G<#72FwgeAj&~82FB=Rz7d@cH6R@uA9{ZUT5OU7;7YMQAWcAj4y&#g9&e3yi!ie>lrtWjOFvXGWg``FPx z(939DFDHZ_P2mI9Qc`7h6`mKaV4TNCp*we;x7ya4PAtsg$7F0lKX6|u(`jV32jK~I zW=CvSF9Mi=FvNR$XW~dWsa4?87Ytf6n7xY87V;4ed9~uFVjm$!JZ>Z55#g2hD0tKv zrfPistc~*}OJ97z?M`|>AdFB<%)NjknAoL4-iGb2)nFP5wFn^&MiZ;CL%rRx`sRiW zm32n-!0GmUH1?hN&=gt*2zgKk#lboZo=Fh8?**9k^Fd(KG=c}wq{0w%VKT@;b&cn< zkst(O6kgGIKu{7YPXtM5VOXu=rm4!R?@aib5GNej7y*yk?b=1E`&4al#yzIGRBoX} zyZTmC*`4C16{l+Xpy}yT;n-T>OJgP9>ARV(nfe-2TnJ=h&5r%<0}tB-p`pTyqKisK zA1_7Gs%SQ!%R5sS9{~u*VEv=PF%No?Bl976)iX5<6H!~>Nw~vYgVU?Z0Cxx_&?<>5 z4(1@#@xJ>VjfA-t;|Pl|g=C^BG=4Kc$Cu`l5r54m5Mnc0uS+L^z>3jcJeC z2eW}OfZrmR>Uq__t+@D0!Hg8XL-6TQ6ps3$vDjcFx=wpuZRHIEKcPJErJ@POg@SCW zfmZPgQ@rKbqW_M~drkXuZ#R3_PkqSV|A)`F-}s*@kyZv@hvhY!rqu8AxYs*9SGk%i zGGWhs;J8l?(V+`RRZj#F;YjIpMyOEBsb5tt+Gno$0c7+KxH-9m=G2zz zb(8X$3tz2n-?i}yxtujxt;nQb(AU2{L=|t`?^#c68Hv{CnirTK=IWQS%EK248g)$U z2!4BG{N1QSgu~fXV^?)Rs|0g`+1iF{?8Fi&8#VSVHf+209@;Bo(HH&|&}5pZu8gyx z=qjr^A$S!MeY`T7^U5!e7KiGCCLyN#X3XF8yW*f-Q`hsy)Hevh7|SX33-f16a{yW_ z$zIRnY@}x7HJ?}KLz{HFmMZ3b zV}Pj91^X$bftK)p6f2kQvdYP6&Mq<^iFSsXyUr?~w)&&3{#<+2pR-v)v%KO9`-ksi zhvV;JuGD_$gvUqWi@U{%a7Bw?aUTIOnTM~tyW#u)WO>UtZP6tfVC$P}hxte8qBzSi zZmuhj3-Zt(WgexA$^)(|lemiqr?xSAaCjfVQJE*p^J|@tmw8=zy6>s4Fd;S(&2rCN zzyb|nOq9+O`I$>hkJUscX^jWqx-y9tWv`2$%{0rOO-nLYsXyr=PZoU2XD8Vrp4g2<- z?R)J-ulyf0KG7h4z-Hrn^sf4>=AUai26~_DYmUEW4?dK&Kf3$PwsFhL7pcwO^RIr* z-u`_*u(&gO=)ni=&X0ar{gm>ZjoV%}Gs#r$fy?Rx~`1sV3 zNagL;m;M8L@FPF1j|$PTj+tIqbA8ZiYS%Yol=ja8_>Otr_x-T#f95W`_KA-yLXQBG zUL&o0u731`ix921Uh)ddZEU(ZFEm!pz4~?bssHyc_%nQ{BsG@}K@2O7l-`66%|H>x_-M9UBk&_T2CNA-nkB-e=v5k9vM9 zSix5kpuLiyoA-&>F~(T$oiSGbz)k}Ska307us#N-r}1ekHf={JZhc&k6UK0wP|c* zg|KV4Y)0R{C*el*0S-D7yX$VV-~5g1^FIwf>I|T#2E6|DulG$v2z7u`sbqiW@BAJ6 zrC<6bd*qSNZ`N}KD_Fr-CM4eWUElNWXkn}cGiUuAUzBs<;hgz(E(q5#8?~DjyZC^e z@Ad6kF5|n`5w`p88+P@NUSlogpHdxiD${p-XR^)&t>~JC zPEU&u7{HBo+ci%zDr=T2*jf&I#h7u**(exTLT7fQo6=3y2AIVC@z|#EgpD%7gK#r1 z6fCJcMv9vnjh#8$QC(qF?q+H>uWi~WiB12EWhN85)@)dOhVKS;*d<+)VKAOJ(;4PA zf)q@z1*)GjLUcyr-&Y(oM)0?R86lX?GqinR3@J`)5NKhk{8Ywd{hW;xDXVmMB4(%3 zBS`5E$L>HK6Kq>53lm=m2r#_^QyQjvsQ7uc2dDD*vO)V`JQ}$fv97{kXmSiy4gx23 zgJ)G2>YKo{mFkf}Fa)!EI#n47chsIyc?czNnKsF!EU&hE`-CT$VmyY1^>fu(|4j zrfy7h!7g@8&C%Xcb6{g4pUT3dSY7z;Yn*gcUR^jlE1GEwzlb`r zipTyFoc)`|UrBiEsXvRtc_7>+2p2Bq!p~6sgNY)9T?oCP<^D)Cs(R*>7Hunhmdr%| zn6bkDO7`3>u8}dj@;U2->T8t+Lkh+)LN~OD&?3SF)MT!-)JgpVLz>+*Xr*Kn7XcN* z7&NmcI+xW3p7RGSyU8o%hhHE}c)5QYg+bFr-9xtsqskgkW=0W0uc5v zZV=$5&OgI~)`7lE+iu?Mfj-M_6_6_&hPVc-u@@dT*qMi6ybUzM+OO365+mf0$ zrW(t`T-h?kss>8QLe+INReiyyt`R(yM2q-WLHIy>5mLuA{t~oV^V?K)Mu>=jsI3F7 zG5nAu*4BKELz9d)B0$Wj9c|smTV0Fk8YlDksX=gzc`9g_a?!>biUv(|i*9yYX9P}c z*kz0|#xU815Dkr*VrAWC`HK2elrZR8zrEwT|FQpzrbs9{&8RK(cS5v_u!a6ctITOo zv_C`WD>_iN>T#u0Ui1aRHhf;qG|uP6f@|i)G`{q;>OM=_svmx^T%$>Cno(co3xqA2 zZ_}E02Fj@@pSY@bwMq^1M{TT*jm z9zm`04>XS!2cb1J#xAJ!_}%hy)z)>@`O$LG=W4X-;(7<9iY$H}&m`xD6YpZ@YcJl% z9?m^cfGDz`qvy)QpA#>-_g{XCeMygaVf3 z`Oi@sJ?8Sv@5wfj2PdKcQRFDxpI={|KU)4Q%RGTokG+U=8cw0@yyyoFm&j?p-esD5 zx}Bc4ogHaxe#nTXFp-9NG~%DuMlbGodHfzLlQ16goI8goa&%tS+fT%cp82nSqBFmz zw=qid_%`xzk(FOB&(U+P7iBJ<`7b{w+UW6g3Pk5<(|+!=cP%tn^#0Bltfu+O>EPIr zOr|wov*F~kn%@U{Z%j3>>n%FnS6)56j)vCQ>)5p`WBbfQw^;GqH(!_jvv=KT_vm>3 zi(c&a?N-aKUb^Tz5my>4E4UFn)iqeozxMUE^|Dv`28**_^BOzz%2&Il$r8|D!N&^W zIHgbOyK4W_ce)e%t(U$+>$8^S);HZen1kl6>{{J5QUH9jl+InS+rIf*ZS%!1x8Pt; z$NqI1EQPIYyW{QO<6%R6%U=D+2ciMKf)#v)0op5%z5iEM+AAw~O2OyTU|GVQpN!l4 z|69yn|9X9YjU!E#hp&$8Sr@X_?oBP5<|Sv==}!$`C*6V8AseR)LV!{}Zh5Wri{qZ< zM;+^2dBk=edBDQ%HQT@XsC9Q9u))rKHo1DYowZ%tiU+olo>;xCwUfTXP69%mo#w=D zJA;SW#{;jG_x}FecCLN?_M+MIRDrMk+OPF*+3jpaou>}Gd8{Wpk-l7!U&JYT&tuu!cF%y=CsWpo$K9zhrfoWy>uNMCSFPEVy**nyf6i)JSjS2w>x?HhPbS^&KYl#&^}3}R z`|hL|Um{~o^e_6h2kh2Y{5!k&@$axd`K@#A$AR7N>{7)+I}Asde=s3gUu`5+w(5?r zhNWhOYrDXN#GFntZRLE%&5>coC+v1DtM;|_4=h#4*&vp*3XDM23yqO>II;;0ap8p> z+f$uexvY)Ve*7Lxw9uR8%GQAsqPC;LI1PJN?zbX;1H!>bZCewb_V=4MP@XVfr)mSt zL3X8MrbcT>8>wFStSDf5NA=a))^~PKW@;-!nA)}l;n3oZ%Pw%h0X2d#gdpsQ$GLTK zFw~Q|@5%?aY2~$#wu-J1HsFH=O&0J~NM|f2+?(2y2!_tg!7LjNtv3sKOHOCTJG3tR zVV(_t2s6Eut+K7*LkljoGSVwP*(_Gp5z=v@e>-=e&R1;=TG3 zUmchq!r3=IHxO!!VY*I*C)%UB9H@>tl`+Chm&(g$O4d#kRQHaRg0ApVv0Ss`rjzic zfdC9`n-R1*7}&0=_|QWa?cqk-nejcfhn>wA)vvkf#Qi?Ns3&ij(m1&)bIg#9UGrn6dLTH!yi`JYWrd57x(SEDDALXv zD1WpF`g-QyA*xGLr=t28lT8R&2oDu+Q~ihlXQ+OH&WlQiAST>6Z^FKfu03pJr>B_< z_z+59ehvrxXha~aLReB%zLK3e`qZs)IgiSwH2Uk zg=Livfg$~l)(M(52y^nH0nAPzb|2fH_z!S*UpfvvTQN#svZ?(PC4H@PUGWB6ZWVTv#$6_Fapl zn$KIPyhft$AOE?g-F#cie&HvdWdoJTn5W*%HJGuQD1AY=g5i&8tYNxf^HkP-HKi3d zBdkx9Z*4Ym&5>L`GF^yZVW2r=t6I?qao=*~l52$F9|{NEXu%{^5151n#br)G;DRr! zqUH}A!MCqo^EngRz^exuY0VfKhKenkV4j+39!iO35$M1>>Zy)Crj%bOJYhO;g7B_f zw@F&#Q@9O#T}$@%{arx3qUK@*nfNTBuA3v_tycE&f>}beaS(vag%bp(P4y?5PWZ_} zn8dt=Nwa>Z=RSOJLO&EOXG=w|6T%?0h|o5JQH%Mxpwo5}X|37BO;V!Cg^8_!+FDj0 z?y7FgRVnpxd@{B`bwOYoipZIb}9J+xaUmRVW0zd{{HsoN6*k& zaHcv*dB%hcEIIyuyLkWmy#f}=pXmM173K%gUfDXcxdJI@c$c!k zg0Y{`yj0Sd{X*Nwl?U_d7ssA?n-=-qppDUq3tgSs#>KN^ut*yHlGg%m*pJ7`P>AnjoHur+#=u!;Hjj&0^mdCUGI9AZ{(RwCid84kGW%f&MR2K3ch+F@z(En z$Gew{;%JfV7lLRKV?7@IEMRfZk~($o$85dbwWem)omrT#I^kKf}!3 z%4Y3A3-){vXhj||9C2!z9S&{K>p4UFpxttUI%B#jw!Us5+uZaVCNO?5V7|vx zuZ+ro0UoN1gzD-nHMKETDm&vEp8zo0TbRQcjcquex*h($URGtLgNYmqN0G2fI8Nm< z7EVH&2OS$W_pGYAB~w`&7uPISuG_p&vhieWGgSx1AU;U0fp6glt(06g?arS2I^z!o zMkPD>5fBt|3V~ssOjuYwYw_X0?BI&(KeLR=&7=~pnSs-MgiP$}hjE`S7yU(BR9@Jo zPs4l_E?~0e^_-pZQy5J;V-jeWIGA`-nvwc8uDlUwVA2WZGmib;F}v!E218NLE(!l= zFrZO^xuUpefL;7BYVjj8!Gu~q>skfWZ9EvbNh$JxK|fKN;anJ}eR}R@qf}3{bTHL} z(5R?*3DFMq#FtN9^=hiLak^-0oju`8{Q;w02O1ifC_}igBbvh(Mpra^QGNdLhc4TG zFtoPXFjo5zo>f&Be38_vRo7lZ5QcU^P5qQbAfV^?1VPi`LSFb7_ANDNTU_m??_+8s zLJk*HB$75w;V(w#MB2ugoTXr9t6n8hRRjDoMwCCFPHEu?r|>x0hAt6=VNMHLr45Ye zipC3VK)6yu03=$BD;_5IO3DX+5#x>X(N6xo$J}|pQu_xKxI()L4XCc@s3@9co`|W; zJ~SvAg(j1#BkgcMEUE`))S#gP`A{8|LAXvXWErTx@Q;B@di)j9{{`Jc1DGX4BM2>* zjQRp?B22jPA%oTff$;~jDcxiGEjSizMir^?=ZhGbb! z@1rm-V1{$$Y4m)VmNS<;_n;`;b2z+@%8Qal&ksTO%kmEP8&+Lou1oO1lU-Ndl40-H z9=!Y^MpVpUm~!RhKMPo1D(uO$@p0vyXyeK9{EEwEnTykP1&8@leG{N*$+yaNM$h31+o*egCeZ!k z+o+#Y+IU@=$I6R-2|K*Cz1@R_7Ek`j2aD!~VzJ~lrEMIqivbCJ_q^oCf5yUg z(=LDDj~1aTSiuUG@XWXTO?%|Ou6(bo;He2uR*MC=bTMYne9p|a*V$|sv$OTI{nLLo zwHLiGuBBSce(9Izwy`z0XWg8(Pk&}^zx2P&?ZY3|T1m^M3+MT!T4W^Pngfj#)}+40!=`Mfij+o}c(v7p`Y-O4HDi-|DTicy^s<%-1=x6tePZe^Ud=Sn3D zl;5yav3aFs@BG<+X!k$xV*B{N`8GS(Q+v`W3*stgGPP78Z|y?SQiHy0RV2GOE036m z2yXCqk{yq%G}lUNq?OieYE9L*p1=|N#P+i}JD1N{L;02RSvzR8ET#ApwWFeX;nWJJ z|FKfhovq{aIg`s-2!l}9BR#{92>wA}%D}L$!}Jq|`s1O+!=9xPxHQ<09u%zgu_uf>4Ksuf}io!8F@(&0S@a^!hNXw zQ7>@DJpv3ED>%JR77L;~Slr4-;o!st7Yd_1{MVpygGsDLqG0nv#kTe@T9`?>a018g zbM+mXK}oeQsdOcFx;H!4P^KTh=OOE0zD^||ki)c9R|%@W(nMb zasMpJ0}UJmmIzmtzFEK>%yGwv>GgMg$w)w_Q*4@8m!-GB7Zo~Ia zP4tacN>cO)pqa9ME@Q8L!~Ir2Td>c5q+nC!1*XXZ-$iAm?Wi7emCXm{NbTYo^(we_ zU*E}G4$9TuAq3Evw zo)}jOi(_(2eK%2=jBU)51sYsgwE+PVnr)$QHPe{F1lw3NKUDgG#s_02Ro}E!c}=9J zF*FLS)3{t!Dl9E2bz;zSgbLD22nR9+7!)9n{CUhoh8wIQ}_-_ zTYC3tT#ehBZ}m>Alx?86(^f}wn#L>hXt`tsm05I}*VyHHI$JZ7hDNY_b`*O7#8tRI zQk-R!MVT#)1Ij_@#`(y}z2-rU@G2{P=PAr#|UrFX1(>e}g^t z$it4?m4IahH-fJg0SlYQ@YB+`@3V`L6|7(dOE~+A@3dj-sx==xGQYQi6+D&T$vC?| z4j=iD-fO9;-F|bz{>d-Q?V-=b?GOII?7iE%7 zX)%Twn~d7xj@Y$ew^~i-u64#EOQUU|dX<8}ddlBT?__f}>-C&51zRfAeR(>u-DW59 z;{p?PJc)LntBqrwQ^kVezXLn(`1x=lK{&AFcwocMfhGEF;W%x%%DPR8HPyGGm15Gy z!@hNet1>&!wSq)I#NPZwdEG|kn&rZdRbvy|SDP@?gMbB7JTTDFTtJ9{5MY?gSh72C zlS>)ZkDdJN#>X){+7rpX-p(**6~8qZTdjy0Cd?%zd}ln2{2-llhAB)Bm>N^jNiUtU zg2K@fogMFvp83Kw{$MC}wz#tX94O41|^4(GX zj|wlJzV8uhs{ZLh!FE;en({`_Q&N9*RHmwL72yq2Z8h~X4#LZdyRZD7S*f@d3cKTR zR-aR!v!g$*`;zj&9}nZCkV)F8+p^qrXz5X4g=Eb0!S@P6goJ1h(?f9>w7G&c6KU(J z&u1}AY9*~9x>q%PXFi%N!EEBr#_^9q1$~UE-aS3XJknI*qqXyvos4X7>0uuSBjE{Q zf#Xwopf!P35k4dsJ2=!=Qa8ip%zEXArUDu=m=D`5mz@C)e?r9NLX)=ehZX?>3$!Z+ zYExeHg}J(%#ueHtzFS6lpuJMmb4>4$Klf#o-%#JwvsoLeP6%>FvALU#L+FAgjr%E4 z9noNcmhz*CO;s<1JosFhYJM22&x?h;n>Ry)VyH2R5j^~;IN~;Lv2kP9I_(2hJ?X*? zXovhVqIDSlm?NvG{Lg&2Vx=<$d-dx-V}JDT?y#9?kDWbfWdvc5;@6T(5=CYBrxtQ>Y#iJ9=3v2Lbd!&t`VnWFkpxKnzBe}U$Xr1BpNw_9pk zOzrBmyDo$d)sDFEc1`)?@2Ri(IjJ=C2lF#b;baCO$i(}?`%!&5R^IHuPime?X^z0> zN<5X(xC$)Z-LZ^tiJ&IXd_>zP8v7|F&T2lS3a%x8J^HG)aB zb=s8;*J`ZW!22^PT&0?A8*6^X#NlMEYVqE{VtSACR1Y+)5M*Kgj`q5VH|0Cj9HR`) zg1+TON;eCw9*2faY3T2w=C`c%G#_O&m!nw;i8v^)>)yp2!oOehcoEUh(Z3$+*W)b0 z>FgzE-1AcqKhcB3(;;{q!WWt7w7=JI-y~(tZ^z4qDSy1YWoVRV6iD!O-y4U|qr2nf zoor*2MHJEFME`mm#W_mL^&(RbQxHE6%jeOZ!!jKQ`5!Om*tI{_>3AE#MYTphsb2}u z;c@s;B;I+YIprXfDvZ2`v}i%g{afG0!|v8;?SDrFbXX7zF~$=h2-5dESU_ z7T2Cj^zY?GaXg0q9cJorb?46r)10I6j}U2NYu)dOvlOr}CzSQ>t5&N~IbX!%?oS>G zSibi!{IKsHeDtA*Rsxn4JRy9w2v}CIf)(5tZhQ03th85F@RWxyRLJt+{c-!R?~7>- z`{dK`8LCew`@Q$ht=WW~6*GOdeNo{4OGCSPD{V_(FaUm85V-t*{>9w(_C9|-pK9j8 zRFSFQOD_q47X+uODs2G-~e+*Hw6;UgV5C{8PVC6ltC*R!jMoMoP|Z~wP<{O|T3 z|J_em?BQ*@Fdtcech9D!l9}R6vRUhPdv<1P(;c(M!q9p!^9~Mt=Q)}v1GNq2b6Vll zlil6@R@*Aos`XS4gfN(hiEBlMlk80)som{Ztyr+8?gK66t16H7Wby^~slu**G(}KX zz+pHhnEEQaT&r2C)3rn~Z}?v53pX$zag3f)+wyU(2-Rk^QrzT@aF`#4dLizaA#fRw zES4|YtW>u`sb-^0!I`cp^~DT-E2)fS_b=I?-*W~qf&c^wFdHy@VVdX?}le*>1d*Q#av*9z+^HRTaJ|~S|EeK&0bAllIj_LPjFg|qiX&TFksRM zht)P#o+=N2DMhsn{~eidWJv^bDrXji&Rp*RIzh$0svddegJ1=1mzi*oQ5+ci?B-{8 zI)Wq^_Au933-{A0uQR*y5nhRqysT7SG*vs}CbY`!UH8v|kZ)2IO$cCqAq*@|r*@#c zyGnQ8)jj*{V^{5}`Uvw{cLX!zWB0g&Zif1sjh3CHdyw?vr5B$$`)Yk}fpfSd4 zN;nva_EMr1>aQ9*vyM619SlMYk|%wg<;YSZ5>R6p{;|7 zyoB&uP+Jh7q%@Y&iWAfL427pP^-V+h_s3I<^#|5h931E8^}N4vs|^mW+N9GEjuT!N z`WoR8!V-i{UjB6A?O|U0^apbG!mk_KnL9%J?1zdj5JZJ~q%YQi>KzFjQciCODP54M zGxH8Yuapk_aZC{2sL#uqSB4tXN%hIP=BrHD6+KVY$2n_T##$QFZH?NT<^%*#1JUnH z^+Mo+*{^m}bC&A7uKpTnoTA1ItsIPoLiF6$%91 zDOxs7m5*7vTp?!-&A|x9u+EF-RG>CsI*u`z7EOer`A|5lDgIb<18s~1hH*zanJC_tUUavWnhQ2pkc3;*ZC*tvWsL zw|=&)c`0k_LBst^6*W&Jx&!N|?v#rVe1>30?L~-(*}c3d7tNdl(IkFl*cd>+W7e#s z^BCcBGOd2oe5*ExS^h5Uj0f<+X^rEOS}Lb>#tmd31l>_XeO- zbj9ycc~PtMyKLhk-i_M0ypB%C<8XKV^P{I+9eZ}XJYW&`sLbe0dAmCYuC0POti#6( zIIWF<3tTim<@HTe5^d%+Hkb?1Y;m(<^tJ9qx=#NQoX8yTpp)U;FT!*>ddgKaS4FKl zy^TjH7I!E6@w#j7j@pR1IB0^oItsg9g9YKrmqNgD*C+o(Wtn~d5C4eYclEvW$U~o7 z30PL}gs>8@tY8IS-LTSLS;5l@zEB~{p9b{lWbgmux$W;oro*rX{ilI@9t-VR=d;dV z0<_fh4UQlF_|*R3_oFra=Yy5@$_iGnf~O0_-u?5xJck*GDH|AFI0jv;jU&)Ce2bII z(9R4jMFq&GUT(Ek#VzKA0R#txO{l<*U#YEm}uYT0N<*o0r_xvA!!!lQ5?hHLW9$LTKwUp9e{tL|z zG$q(ejv&MB_p8nwc59E&?ufgon!&+=GZuRYi^9-q>+9Cu+jHCf2x8W2HQza3E9Gsb zcEiZmD&I0{N0>;uUlRln2sKi1<&=Qlt(#-=) z7xM1RdP=+1%j^P2)8Ohbbe|}!Ezu4sDX*CN3+)QRD#d~`*4c@hgb}GaW!Bf+#|u7T zDx#%Hx97~#p4x(`wWe^K8;)(9Q9j^SeZg*Xc4&{;IWD|oP6%%y>>S6e(6}EsvvQcz z^MP^}P2pgDw%@W5=oP+knwryG|S*F+b9q{bdzbg7?Xa1)izG%%>&-O%D_`6A_Qm*BKmJckbmc~I@ z&kE(DW!2Y+R$zXk?Xamj;32M5F4;`+H-~{uG!8I9lu)}fxxBUa4{TGzCzOpXX^>kT|BP7 z3Po2F;iA*jnAS7!A)23U-6DEzT73VS3#kx>K_?y2+PdljPvJ^$;Ewq5e}NDMp&;~( zz%-{k&=7(illiPW*yqD(s{DC|wuFhs$BctMeu6TVQN0foUQ?c9wM)b+I-hFH%-oa{ z0zWhg5Yp;a$INLHQ)X@+PW=tzuM-58$Ym@md@zrhaF2!v-(lblZ3{HaFw=wSr>W+u zV6$$uPTxlAgRtMX9mXT|Oe&x1wgruAHfry?S++o9GNyh+dx*KJSS?!&0fX9rDY25; zk6APr{+Q(Z`QQ0{`_Lb~#D44lS@W^k5suv72#rwPFgHid(dHOvjIOCpk7``tOQolL zOKQW6aMkUa+FMe8Av{KSivSB@3R*q}ZRkULk}J81vx#UDzc0mvsIjpdjUnA%W$dVp zZXQc@fhM54_*COy9ZMXuE{=`$=k44v!v*0G779T6vfTPHWDsexT?C& zHAc{0fR#E{eG#&Nm%Q4@KENtwqm<`RI6#=DLFAeq_+}bwJWfRSb@~?Est=(R{B)65 zO5Hf0sJ|6wlFnF5b5U9O;bdv&1KCcTcW1~b5N<^X5w6f zQhfvt1C5D-Xbu5xPVGhm34u&axI7SU(1JoZgC-GLUk$}Qt2mu>$$Gh>ZO?ly7(t*K zSKPkJAE_)%rX^JNaII?dtHQnJD#m3;ap%G(CN8%{Zw<{sMYREe4BB$Exv0MDs-4w# z&#s=?wDanpGs^3t=92qF8@Ijw+by9n56%$Ek95Y@hnrl~^Q`KS6)mcX%nFY?b8#4^ zwnohI1*6>~M9+cfslpEv3rMf)}N&*M27c^8Tn3h~Nf!!|CV*Xc#?Wzi+=a1A=ma|Pa+$8tZ}##74!ZcyflC=Py4{KfV0epGR%(v530 zi5|-pbVCKCSG-^Q#s9^f+xnIEz;j7Kb{R&pFf+I+z z^Y;Aj{RO+{w|>q>-Tg)A3RdvchcCqG{TB|g#1R*_E*RvIiTSiuUOE|B={AN&jNhG~gl15-{c?1y1!^Xc48 zGck?(BAbInGl#Ax*B-NTJ6An!Y^KF*K5tiGi1q_FAH!}id@XE^#uiqJZZfE!PJ1~o zyyH1`a}c`Y_lNO=pouoMzi+$cg70`|7j~-Gw?VCH^J_c4%X$D)P2ud0PPW??&tz>9 z4lSHbeJ6gW({U&CHt1U_m9=ZPCG30t+CQ=T?|*@P{MWwOHajEB(w<;o`+Em&vI)O0 z2n0gqjW$KLQnsD()cw2QX9R{lLW{B5h_8-PZOzT>1oOBRCL`;P$G%H)n9I4-@twiI zZpo%>fB(R>8A{cvWx8G4jYqrhr?I%(U`Jb}tLN2Z+*&Xg@%s`6?!-G$saQkjn9AQM z!bF`boSnBN z;Vq_qE+)`?i0EY`Ix6OKc5x)U>V98!Dz$q@--`!%aLG6I8CZv zz9U`r9H{K7#t=Kx*M+a5@Pl9i;YwWxIE4v^ZxXbHI@z?&DI3qxhDcdk<8Bk75#vtj z5Psy8S6^im)Q96i;O3kdja~I|N9AEoDXqNYsy{-G#BgBoVc*Xt+RQ60K1HtluOtfke1%y6|tGR`}-Uva^@}iE9!kE|C!)M7M86dlasmam_+SbNvz!=7*^y42WGajvz-Hr8^n;2K>e;c2M$AxIi({=$r2 zj)=;)a<*n)`}KF*)#irn-XGD)fM@EjxbTGo{I+N)r@V2nk5&lUMF?C<%CDpTMBoA2 z5A%d$)sy~kQ)B9XG-=YBF9xF1X}>R88L8iUHc!k%&vCn!%2}AqSwB~{POfO#@yuGH ziw%{JKN9NP*BC2`?gCvSh$#z~yTZGhb}W|N%wRm8GWD*_I|nu#1};2eoFE(C4Kzk>z zuw<=b_%`aQ{G#xYR$T(+3y$&mx2du!fOP^7~!iU zXw>+aYfR5}_Fd}`lY&FdXUy4b04Ov&wxRm-4#V6W{){v`Sw(H`V`5BoLAc0#x2tCy zYpNRp!s)qNHO}*nR|F|&N|Z_kE8>4f@dM2VUCp)l_rlDSnx^*xy~3;mTbz*SosDV1 z=$a=w>(;{;*UJFEJa+hV9Rtl%f!;@3Tam93@>%3`7>g^wL%?Zt>rbwW$%X%Lj>4D6 zGM-n*k3~ED4Dh=w@A@`ISIZ|lE8sBp(L)bAeq3MX@$wFH@>m{6f8ZQpSADbYVSvXa znXV7ZTNb(qUmn+&$FVGnaK63h>#Um+EsCZPx+19V=zTQk4J_0(8zEAf_l}o&qw+YG zbvj<&$u=&3qndE7bV?B2MR2@4uG{Ud*RNKudHC|+`BH=BO9^PjoP<`Z<=V3=4VD$$ z2v!;_D_Fr-FPwSl+g92uD|lMMpQR?t5-=Th=bdvqcg}2m{R=A_fl8w@brWF+9j$*e zar^y0oZ7$rH!I=F3RbX!uS{?gVN2!>Ho{9~^Z;WP4sqPtv%VJdFwt2_r&2H)qlCqW z?zJniiM?RLF87!n==pYkU>6jjq3c;9ZZ$1n@tD=jWh~d~IMb_CDqE&pv+c>)uByyb zw`=8I&jw*=XL?;54(jYN5=Ib^K{KE1+Dq_|Qg=4d?6CeBSo8QmbUsHpV=Y>Vvs1 zn9lnL4ZHQ`3pQ0-tNEOpGfI-T+J^IW>XuE!+?*CY0E2qB*>ZDZ*&@5uXI4<$$$0Dx zi<#4sVz}n5Ze$s#<{x=PF~~Y+I~UaK=iEowkbG6OP=^jKY%6 zY}d2V#7$zkHi2-~pH9_I)lc-07w%!aqIpnKeFHtCZx9T?$i}P)&v8Z_S6OH=!IZ{C z5%nr4jir;GHeSkGVmNk2^+^4K`7nfE_>%!AY2mP&R6B|rwm#?>{%gQpKa;XyKl0PU z8XBhMPzMg^BwF6L2g$$3E?X( z`b}rEqIX1Zk;$O7n21^F*HrgMl|N>+;%YDc9PmwomKVZ>(F~_=YLmv#FbHiZJZ2UD zn!@tJMPK)8m9m>T!v6{BFug`v9MogxYBU>JDI6$|meWZ=&wb}RjD59(`Xy4LKZW5- zA)PPTNce#fJyE_04PZL+44O=$Ws*)QOx>?~qeX)N0!<|x(1&Uh8b$cJaNz^vQT0sD zV?KUMYA2?@lIpua`7zg^T?L(>9RgE$sPkGbZ5@Ph%r%M+EhY49uCTPqLimMyd7O^p zpdPbbv+>kI<%Qg`k&)(vwDLPp*qZL48_aYegki!%%!#0_jPOS1EO-#@l!Qy_k1%*9+O3sK zky)-mXl0G1{Aj3tA6t5V$4b*sbH>E_qTO^_@#)^%n%9$}6$GeF<;S<{ zJe{*tZA1N{IgruNYgicctOkzNMjz9{0b?LkTC~{^l7lYV0N|t^L2`ld$9$l%RoSSy z^LYgJ2tpCsrNIZZru=9+nl0cornCWq3Dv!%>z>-Vs|W~Ry#Le}6SSms%~(fEjodYHPUmNOmvlA1=M;v(7@<7^JJ+rgEr*(u<0^+k4RibtwSegr z>$Vl_xe&Wc`AUbt4s9S8b}2qW)*0_6r7__*C)z>#tg8NGZpm(Ky8kNjoT>gr)qg7d zOktE)O74WeP%65%8G^>M!Xv`Jrs%qG)9so!%e<1Ac6F$JTnLz#mwr9+#e&aW1GSC! zHi8#4O#pvylXjVuYkv~Kb=SH915#Pn5^i@iy{{hGeJHwXHK7 zk85L;ewjlA$IEvrp&s;guiJNl3*&%zPuNBRj+OVg zHhOUM%z-eEA3Zzv#ILCXn{u|c*U7=PW{#J4=SLy|3+77q^v!@t#xM2pxrZONvfdTE z?^Xhq72F6`0+to5;HwkRUb*#6Ke_U~vVx~Ad^vvw$jyO*ox<>Hzh#D|OdQqj%Q3^cQL z**&Q1k&6Yc8xD9-qL*qA~t1^<|(ESP=#N*Cfg^4*C4Sm=Ctc$4@-D?5urka$` zyjr!+Xlj`kwe4HK>tERa^;z!!DOIe0i`y$GlEmcnI+=pv5@a70F83vWnY4XoMYon_D53u!-_2v!i%Ab0_ar zJ;R()E0eQYG4ISmnADwA#vS&vZtJ&NmPU9W9A?KO*P4NW?T)_HR)hqYFJhIKNkopd z^NNdkE=-1@!Bb79ETOu>V4Lb5W-nR;qhi%E>t}7gd&zK|%}#AJ5pZOUiLz*C>eLcJ z-)8NGjRq|jxXfazcWu)~xw7R86&tC%HN6Wv!awz77y4XBPB=t!B~~rFc_R7*e>~J5 z(^hDxAb9c}-+Diy0Ry9&9q(18K|83gJYbR|V2EY27W4*@vuU-ht2V*tr%afSS(vYh zh}i^vU&ni)LWns~=$8W-=*nH<|(nqUduxkl=)TY3>ViEok?k znbJ@`2>9s7A$U~1N}@SMw3h0DP}0rbss1svm^w`x_gWT&fhDpU*Wtj;ov8zCBpR9{ zV9Dk!OlB>WFWEekbyIW*DbP+q7=w@@ok_YGIymb+`b~7{ruI}Pc6DR6#vQn;?78Ap zmUvNH(O5uh1T$A)0znzRKVYCE#KK1fLX3&pg0KhS5xf4;WC0J1*NJd4QacfT;Qyzs zk2VprFNgLx3qPOfIL)}DDah3W&7 zeXKg7&4W3vb4rVOF&wzld_Ex&8n}r!;VHp+9 z*Vxn7bl2&cId50E0GDcok7>-VQ~h{0p9^PdBPRL~CKDIUyOwZ^ z{~Jv5A$UxP#-U%dk?^I2i7qtWFwuoC9sKoRmJY$|Onov@KhFD@P)u40VGC(Bt}sD{ zDLCE{2xZ;upw0-l8TSX4t5y76XnJoUbRDUFXeNdEf~7>i1K~BV_ROnAr^&3|Eq(P3 zb0eBPXf>`WZBO|zj(1fyn+0xCS_Eq&y?46hbt|sHyk3H?{APKMfSEj#OIsly30w*~ z5P(L0Jyfph%liR9YtG?BK0>7vA*V%96sRz?KC9J=zmGh4k?t@aaNW&GSUx;T zp9=mwN_5~3{)G#TtJB(e=R$*Ju5X}l(6{Hj_+_?n=IodB80!5NpNv)lmK8i9tOP78 zSix5(th85F@Ra~x4j~JGHp~0or?t91JvY|j87>~&07|)pg<;Ho`#;R>*MBws1v%aa zR=!tOu!0pl-5~MS?|jF*ec=rpU-&N%-Dk23#!a!{Oa*qw?lqbgqzV=!)9#C)J{`Mh zs~(Kwbi&%>v89J&%MM3|qxN{KW9K$EEtbhzF@bLevu?Fw=|>;40~oo@mirMYWRf-r zhweWk-s?M4t}`0jdIB?BS^@1HxJezD3H!yoP10$rsBYcS%!;qQX5aSq->~2Mm7lTU z139}ejZAxuH2cG_Tc5$i&bYq>%ppxxZrJTQGdQ6Wfwh4AAs+;yd%fjJ^fzD$QhXKG=FCp#ItqkjAb;JX0r2b>}($Ec}C^qlcraxT3z)^!kkn4VRnwy?woKKOlP*PdbflxSgk4H zon6&#+Nic|nNrn;7w^;k(3y&KCp+M&Z&z(#$2iP*%-z8J&8aP^xa!zzt3URgDT^7B zNwH#sLe=K!g2hr9XKJ%3g<&3|(jfd~@;PUwvlBKM3@wJQ6qSdNg|{!J)%wZ{#$uX& z?1#Sd+nKg7Hm7sfg29@7sQgRmluZVKGt~oyrxfNI61sMc47H<^O1N1aOzfm|E~#%Z znFfwZ$`4@@yWyvz(_yw`*1lpz(Ez4~uBzUcKuT#<4gGdw_zX$dCDs2^_djBzxfb5a zb6;^WISRuT`ok>TG@WsCV`bqUjSw_>a>^IM!nu0QwE+skCH{UeYe(N=rVRX`-GcuW zXahed#kgoe&t2G{vC`ohCOPTArDy~bQ>^RpLBcNnOg`(3X@nFZ1627UfI&b5lNY8p z-#cix43rO!$uZf)?&5L2VzYAH#yc0SH0+CLg}<3-R`KW$ga%2qFP$$cpR^@%1=Y7` zsY1zp-AqMG&TVPbfm`CG= zIQZ$nY+7IU!jv^cqu`7&hj0ZN#WWT1a8^&B<#QPqSY!tp=elS9aVYU#B7JRhC5K61hGQw9cRUY%RDHWOX<9;eAm>^9qu)z zGRhM{dzJTr@ByBnVFbeXY$+=5bTQ`wUWC`}(w5~W!eh|0P;IZOJfWk1EnaIjV1GOJ^fOATBe(Bo-vJdS?Q7>YDpiaPQ~0vEmWt{1p0Rxj$? z=zQ!@Svxy>R@6!=UjWO;U=jT=?(u-bumv1@bXb4NJG@>T+#io~8Z6`Ta~Mz89=3qR zwKs|5WPZ!=<+Ib`0E92kI`($#eW7h6e0lb3{T#;A_3}Be zxTc5{!c&c*$fQ`o-&OQ?SMwZ4pz$^l-6L@K_x^EM=ILksoGfqod;v?G?(ar^SBm<+ zyHOjL8Z4G(ehh45`;5|S-ux0C2Rl1@5os>g_+JTFR&XO&30PLJg0DVUX|Jr{D+#__ zLKZDM?XJ7ce(%4`?Y{fw_TcCErqVa^zFDtlcJY$g0}6ZZ@0%OAdH^`)H9id7pX}QtJQezzmO!@;it%8GRb}Wa3 zz^0{wjrJNA%jayOeDTkKvv*TE?CW=K8QXXNkN?yjxbOM);a~q|TkA~h0*v7Ly6tv* zSbVXO$}LVN)@rxiZ-V=Aai+KyRVojrTPmBgvKHKeWWuct`10Mem_$fd; z1>qF}7Z~9=;c_n#vm%UhrOzmho4nGqQ2m6~N?L90t8OE;3y1ELQqgiSwsk*JI6LKM z3cIFj9EOhyHFttt+I`eH?$k*%c0srKcq#| zF~u7U$Ihr{cR#{D{QvY7ryXgO3oOtaxUAMJ&%+6N<f4s6)YNYp zQ5}ta)fXNVxJA1Te@#v3QhkH}qAc~&T;d*wRF-R8wYrv8|KUpoGjemy`7VG~-4Onb zG~b)%JTwt84Tb>CowsWa!50u3cXiz(6BXnde0b4m3^aEPGkrkQY{cTb|O`CRcGPQIJ{^6=v5cJVG>-1rG?Bz(BQ zgfFRG2ob|UXq~0NMPrM&X^r(|8GCyNKK{$qBj@GOPxKr(45G~F!cQI!_yx>w|BdcB z7IpOSWxMqMr)i_ZB8)5)%2gD$IIb%X;B>rv4?etb;HuGRDN4+0%-J_+WAwXtz_F;~ zQJKfXxmeu$<>Fp&c)Ls!-5e%ahH-yg8yE2r9_I8tQ*bS0HpV3MPSxD3e(qwDRWv(B zJ5d?%EXr;1>lgeyzKzTBy6v9EpT_5U?Xb+KZvGRU6n^JNKX@cy(fsmfB4F9QX7i!u zGR+Yy0m}+*1S;D^mV{)tEc zbLC1TjQQ_@lfso$p=h_h`7M^zx9#@Jzs8ym+-v7w_eNi@hfQpHJRZ`;lD+6hf6A_Y z^zq|;`{l2)-j&OXyBou^zw7&L|1)abF*x_?*ID(}+pY8H!;6qF9vrW8?HPC2`g31k zoyQ(oJiQ*CZ2MSS-}dLf&7Sk!KcKeUX_H_jTwcKnzLelTSQgh2_(EBW4qrwuV7X>q zESh7zru80s*tUja>&2#4Owa9E-IfJGM+=s;^>SsKCUQ22rCl>2W)o*FY?X_aX|!yw zQn5n2V-J;zmfkK56EHin&%#$rR3#=aGH?IDSWBmR5Nk z)gx9YsIFRFsmxhEZykk~hC^qp;~QY2JchbYmdegJM;oJ;%efyE%t56)9rtb0QyxX7 zpN$l^QnqoU>9+XunT+*1T^C%i!`W0OW@PX)fuLhpD)~a6XJ|rH>UC$jQ)gCw@nXSh zx<5$5BxOhV+_iENF`Z{~c*>Rx@X0c_@tIp3XNhZ%IwO>BY>h_VHW=PGe`lwwJJ%MD zhP{E|{2w17S;d7>iMB{ubxx=+V2JZ>gqe>3XVhxgEa+NohTC!UW|9^b*VNv1n-_(j z@yw?2Xtz7QS28+dHmR(bJ)P&poYNP0gV}=sW|YFT&)9dSHwObNB$C!tov|qo(={HB zTq7Z=b|Q3`s!o`hg0BN(e}eEzb#5wO%*r7oONlo7r44uJzt!7wdP2Wurt&hQi5(~& znm6vST{w71G2&ZI7@AIjyD3((v=ooETuU|Y%;zY_@1 zwsaqxPuP>r5 z7D~YE)ft~M_yB=fjsFPDD=FB9gTPIq&0zS8_9AEVGYbnf3$sNVUwhb1R1L;s_XC1J zRE=;mdEAA9pH$7h+Kwrw zP}jvm&PJWS&lj_v@>jfsaNJEN+*Dn-R#qC#Vrmx-*hAr?f(C)|+#SblTv)Sw(6Wr$ ziGQEI+6`{e8t4asrBp|RQzB=dD;S4prezots#}&ZlFRs*8Hhe=3TF%-sP5x#*Jn8T8-xa=v1DKfzoDfhXlqW*Xrs{?Oda67y7FJVnGaqllh4O^X(D*@H zXQFba2#rLCXw+Z|ZA~vt3u0mJ|P1BAJWviX|m4RMjm2Hcw zAE#kp$)VC-d(1}7eM@MLXHJXfOBO4yTWoF961pCbr#9$oepP-#ndZW&Z6N?x-os4F z(zH+E35`j#)9`UpP~Xnf_PFYfmJ6m5W6U`kqnI?rgj`JY92Xuj&xNKGCgXB?KMa&6 zF8nZ75g=hEZ(C_G(}yq&b6sJhZ3UH`VJxX0HN}VKF$qQ^Cn{%6W1*!1RKk{-W&)o# zln=hG(vz_ZtkY^xGgH&Y`rOWjZTF8f>-LRZ|IClGFx32&cj3oKb5cP#oQFf3sScQj zWFB#oll{PIqO(BrOkOk}Q~A`Vt9JwIpHR>65u{n({m~#et|(nUsrQHa3dZ*xY9Crn zk7-Pn&)uRL2&n*V;>4K=FuI7s{m>7$Jo4wH(;UDvB6!p&H-w*gwFd?~S`U~4jWk?( z>f^p%L8`ZstkokY;)i)0E6NhwQ>3EFgKq2#SwVgW90$MIH%y0+m60F==05ubp?=aQQk6wM<;-L!jA#>i!u*k zaTyh*pXkEFJ-cO|++WwmWdy(DEP zu+;c5OHOwzF>6?2-m?U=Aa&RG9fH^B`pF*$yF2f?_Y#OP(FW?Z?67Q z2w1$%FMgQ|O~S^(4TUSflWia0#>3ckdUc`ybVK}6iaSOE3YujS~Z44tHJDpVUx+a*{}7{&`r#&DGrX& z2f_5AMhmhY{Q;~Lr@QseI64 z!KV!7!N!BY+cuk;L)WC&t*2LqexOc+d{lIjm*8OC*1d0{4Lto~wWKlDlez`zY)xT}t|6Mli4 z)Dm6yYTNExqqKL?8Q3ArHfVh^^SJnQ8L7^gFhe-Uix{3Bem;U;*U}S2JY$<`T&E^u zk+A*SPUMY!UYER-y(drWDCs(%KYsP1SPA=JacIAx7hmiyaatSCQB z72$sdK@UP7gbsnm1{xw^I^jN45~86pG@^Kebkgz~m-NF(X?tpPd4PGkqE$n*nPS!v zjl_jGe0dT*>B#z_V&y46|C|Waw63QE)3O+>mE2uvB0zvzP zxR|`71Iof7K4b9xfsmu1Izq!0wc*YW$W}osam87q6WJ;Ha*AOAsRIp z@T)34ZN{HaQ~7TwFZvjPB7R2@gicjfc_9$MnS5qZ=c(#H3PRN{Wy5C2(h7H-JJDLT zQgSneIn_0;`Oh_V)UHYWtmcQTa6YzVsJX3u;Qc{grSe(VRE#Sef%>H1a)D4HUsl7` z^}K4?QpGiD+T+06gkK&^6vB(b6j-y}v#Q2MY&>;AYfJeOZ>&0^U4`%#fts8D(!9WU zc1=U29}hz7=^aqgTzc37Kp7Sa z-UhTEV@i*(B`(^-q$L8;p724PnNxF&lS0nv3_mpp^ZE)8bZn|#_+PnQc%t7s<&u@v zM;r7j_`B(uqP`_C;v-NM7R-0Qi^7)7dQY%;wm1 z|1Zh|j+J>@d6;hOwtBX8CUWra!OJLK%!x9Om$&@!f4{k|JmC5=Be-E3Pb-gSi!!6@ z=*+z*2ac7=DH+!SRkU9a4ZF`C1Z<;;1$qZB`x`T?OwK8d%Vn7)-rqm4a;4~+O`hZO z$IlB`ly~PxmKrR1_e;0FwQafK$L(D|_(FT>i|h8X7ZvRVcf{;Dw*>aga|d?o#wEK@ zdBD!*@3M{LN3CL?wBq<;&~odWhXR&oeA{>UM%Z9~*S22vN_+Nqey`o~rf;_Mul;)8wA#ON z*~3fcF4(Q#^sRnAxaZTJu6Fw!w*I^q>U%Npwfzfz@W<@T%U|W23%eH|^)feJ@M7y- zzGPwZz)BZxw&(xAkJ$FhUS*kDJraIA`mmMG-(=5y=MSoWuT$MNtho@zy!gj|+LF1P zJ@akvu==x~>(*(|AUUUYH17SZO^2atHo+%JA?HlT(97yFJ^3iH5MC-L-Ic-yVzS z?e@3FwMhT>_G`cXI{Qz*_^5sAvCr8bX~GCwEejI~YpY(EW4WZ2({{P+4CO?vrgHl> z(qwbFr7|?J)i>5%NWl)vX#i8Np}0xgZMSW#GdrCdYEwR!_Z|88d&#IieNC+Fv`b;fZa7}~zhb=3_fD?7e1wS@2`TdO!T zaiTokws|3MYl*mPG4xdD?d>f~_XoB+p1AokZ1%6Ktiss~Hq5QrMtk2%-G=W_hZ(q; z%lfYIsmkkg`mO=M&iHI9?WV4}$`_vsn1c$EFpQ_J2{IPW1Fb4?_}v)<&MbtLlG8$S zqWWRR2?yiuY&x5^g7WC*(w6NuZ98b$R{N@L%)08cz{bg}9cE`pev?!bal@Tbp#&l_{IkcCVy~h4ox;EW7p!q z$BM0Auz0U)mG+LeAuF806i+LDNBxsX|Nq(h6KGActG*N6-#m|Z&aBLuOGBwNl%xVA zYXD(^EDH-P0S04ipfT-dUVB)6Zuc{5dA-%$tDoBj?B|KLF}54yi550PTVM$^fTU7M zs**}IS7lY@aPv;zJkRg{+wpz(-n@CURGKCDc4pj&IC0_(`|NY#i~Wzicf@07G!E+a z%;OjAiF(t4qZRd|1`wg~=u1`+eI`g<_d7sSEQ#+C%j?Gl))y#EX8=dy4{?fyA=RZ9 zb`-i9T?gRci-Cs9C3Z+r`$oTStGSH3I-BU;Mz?DR;xSfE^rWrhh1E|FrMURf)qOqn z69su!WsW673C$Cn_F%Cy74O`#LV3#Nf_`j-Y@qfKbK+ut%_0%WQgz$GL0q`er?i+x z5PZtR^%e?m0*o(WW&l*-Zmz)G(Okmy7qT(zwk%xVvv_0AQajJtq_MBD>sWl$mq1J{ zf(2VRZe84wTc>jzEnoW27o0>G z_@9sr^A+|)ZNSQecpU}F5<&PUgR$ksr_^v_G06<$+|FjLp*cW!@S)ZxfRs_DXwg{S zX1zU|O@q*1Wm^Z<&z)2*S_T~pR<>~x#I5Aa4`j_l2<>lE&(h+b_oCv_z2*TSKTPN zGm^Edm6}D$MH@**vZImJv7SJouW?Es1Ii!PxTduM*kV1>M>ZL6WJftk1N>ke#Z?%f z+bj~-{F>OfS+#-YF4kNCt`YI8*JwI;fJ|UD1Bfuv+S1*9&LZ2-*kJF1O(hGVOwLkU zci42}R;{^Zn+-z_1Ok9!nbKGPh|$BC=T&w_YkE>`n~D#+2%>1hx<8-e%x7Zc07`gl z2l1(|xlL>(zMSukC9hfw0i#CMy4FvvP4R?{)LuaFis~pTZ&~-JSBf5=W>00XM#Z(& zI~@;5PfVhz#>TA~B`1|g)&uXy+6Qjwn9+D6s|~G-#9RV^R0FiE#%zSby;OAmbk&Y9 zb5{M0D?jm80P`9;MmbHKnvT}aA%N@e<;_9*|D*)W~=LE z+uT~SjmPTi*5}858ss=xA5^P0te~+7>O4{3e77J@*%tK$`CfbTa!8ymwneU!^&Q<8)OozVMISlJ7jm|?zG7Qv81HqLbzF(u$5}`mVd|2Y>J$`?0@pul>mPZ`kY4f7~*i-#jeq^v&VZB`s-5OIp&BoUmS5 zd&S#r`+@&7Pg&BE{%8<@#qsTnq2f(v{X@FrbmrCf%#XhEbi?-P$8aPTch3!{lCIzU z?Qge-{@Ksl13&w}x>d`i4}HKd#d8I_{Lv5Dr~kqK*Fh4jQl9=Vzv^zWKl6|NzU@E! zkZs-fZT9T@f5SfWkAKzU`t#P=+4k$CzeMRz zzVF|wtiPwa@(yA^#25vH`Sd^hyUG`_m6zXR5B`&%Q~95>wRs*~o<0A;-}dyza~E{) zYd!rmy7_J2?d|^L-}|4fy?xn_p%>G?B`s-5Ut7|fD6hkOpB<1ZN&MxQ*W>w%`aIFU z5Y)d)r(w}{*V^%zmB(W%jV7)T#GgEt4?9{&)*EdjKEX$=J?*|C;@)4}RDR z<$?XFZ~0dHrT_5>dv<%GmvND4IOs^| zTX0+DLE}{h@gJ}vfS)Ln6FMINswf{{w2#Ik56a%vxDs@kuiXKF$REw5{A=&{c)7QP`JZY=G_Cd|vgyGI;VOc>CaNI|sxFjCQp4v})3X-{w@=SbI3ITC1sXpIMtP`c+q4sT4&)n)(`Toa|0?(UqJIT}lpxd14DF`@A15rP{rVr6*j63tmd233xKc`u$5#{;VnTx{W13ab-h zIPfzoqjE;=j_pcL)7_Ru8~c{-*KD@)oJBh|%S?JU?5jVtSNxi0^EOQvZBpK_q1v>% zan8m`tX;AWR;;2VSJ`|V3eZJxd6d~i^}%Wd3nDCbOzrdQjH$-9r*;vign;n4fdaIO zrBW74r>!WSGMBOP!W~yTmbPupmC&$a1?2?JL^VfHhcRXjrVTIhiQs9gxD>1yCcexyPKzsQj7Muz&Zzm+X@tzQ=y(r|)-A z&Rz0}-@Bcj))%!|ZA4Cd&cOFv#$&I{uu7}c?3~t&h}wu%2EYLMKnB0frp6uDZdtVf z7jvxLSf!0rChIc_e_U+g%BwHF;s7_LIRY5h)S6?34a=u87Lt5-t9usfw;k-G96&*U z-?8Kjpo3Uf6UkmgJiwhB-~r$h{EKL;cebCl$nFKp_V+D18rW?8jKx=OwpMP_8k+0V zY{A{+0X$*hb6Inmcu~mkiqcp^8?}bJV$7+1tREh`i21@8Yn@5YH1EYTFpgq2WeKf+ zy;j>+S4!S))-=En;=mNt*P6<0BvZDdHa69dbUN? zo0FDXi(x6(72n_{vNY1%j8_`g9gZz783DX5NdB=7V~vTaJpjb4Rm`o9WMxKt7qxL9 z8Hlu67Vq_Jthvq{b-);RVR}Yo)LyGL>UrmNqJUprE8E7Ibq8FSZ?1s9 zy62!vrBZj-V$e~2N3{fWZGH15&-2dny+9w2%HwFBZ=QDiZ*br7v!KqC^_}cvaNkjV z2YVGuX^n8pL{ACIg8Z*19V zd7jrx4)RjhiTa@9bsn!Tm*)eNvTbU-0oec$0l0~!wZnMtSKPfAbXce8+CCl;YaVV( zL*bP5x+DAa15ev4@4d|uiKu=4xx9VinS1QR51p~!`AEk;_&bl-BM)D+#~;0H|Ly$` z*?;*@pRf;o@L{|8wskv~eM0M_esig;Q#aMKv`ksjl9sfjB`s*F$$ z$kP`BT*`Vi+nj{#oHmY667Qc}BhP8qzf`&*xFX7)xyeC=8xrrWgT96J{vFl3AUdT5 z4H`A;?(e$ij9&G?Vp-nwt|dT^^WXl*-C||fZh0DI@bM=fDNyl_m)Kq3_XBqCU;gWM=3Cxq@%%jA zuAIaOL!lq3VdF!NeRzf*98d)tGw^#j@HTzpX^KtvVtt>S?udYf3MTAfB(RPR@v1;*6vtn|G;j;lH|%2>uXUeZ?0S9;K0&bo9;g6{A6tH zTHP|yh&8l`w$-P^#-=64Ba4;GVnxU*DhGf0Q!U!Kh)O6=h%b`+1FPtsQM>KHMOI~u zdwuJu?uBv_P-mj{?KPWL)rPAi+zrK*mewLV6tBms|3GQaSJk4?&{}*cFdSNbbJGf9 zIDz5$lAU-k<5(<+8KV4Qwc#LLvhi?guS}Um)IWfd;do+exarb(poosB&Q0AnMZwzd z`*jQR6{#LZ(}`QV0H$D35)ViG>r`Td;BF^19DD2s%cQJZCjk1`+lB>8J`uOE%EV>Q zSUl?DVk(jFKc{&?7RBO}1{e+Qu!mSJ<#- zGHdZ}-($vvJ3UpKFpxN0E%6}1BBLq(i{Um@n+O(7P;nIA6N_3Tzh${z(^9RfTXPKk zt9J1&5wlb_ZBfYy0m>h|x@XTGG+kMo6MrVdfq7gD^}DMvAvTT!J32=3i(55fJODPO z(`j4P*d;OfwgCe<-uo`~0LSR{$}YJ|Gqbsz;d zEisMzMafHzxEUJrcz`<8mCdp*;L&#$sa#(x)Q;Z>clbvz)`JpiyBSqk2 zs?+13C?8f|fKz}}G4*FCSp^vGu;#?Ez#3aVo3*CKwV<(p$N%x?R_yAf+wI+d`HFw_ zPjKZ>azwPjQ$Zu_u=Vyyv` z19*$7tpFf^8GPMNjHNSt$u1ra>TR2K>sntgS-*DC3hiAhYE1}f9iOh=Y=hz%>!gY{ zEtH&xNzJXa$|XQOf#vxoXWN84x~YOSC0LuSJsZO_Gv$DmSp)tmaYF&IOVL77v&kRO(rdqdzo--rWq03&+@n>=HM?6-VlXiLt z=ebX-51JnbyLlNL&Hs=>or_y;*hl~W0)1T2q9D&5>`6S0r;DtM@)vpie15v9)AQb- zj~&gI^^FxDYXFy&<{*HaTexiR+rHM$MiYml0UeePQItB&HVbRb)P2oH*FN~$58Io* z{cd~7O;xSM4c$Ajn^vm!s(a7tH`Kr$c%WvFSKe$F>?iH{#>=c(?|Lkm@U(ka-f7#C zmb9cLEon*6!g^)*v%fV@S<;f0w4fIPTqfP#{OE`Vdk6EQfHK$K@*QBgOMu(aRG+gk*OBz6=I3#+L{)tRTZ5a46Ib$w_7JID-8~;)Z4eI z()y{q?X2Bw{q>ve?jOtAkNoI|>=%FO4fbn4w`=>mR|4h#d`rip7Kui!+3ng#E?u$b zroorpjcD9mBh7|GtLx>zRW92Q%aZZfDzlj-l^+*c#6XxOQkEO4&DxI}4R={|M;c|o z(E2Di0Y+w18}A=@@ca}?*mTqv0!mGJjIg5 zRUS(6Y$|1$R?}A3*K7!zRcN_rulFg1~I(>I5^|9?;d!pL3?fr^%)yKYM zf-mc7_c>h;Np>sZ!$c3RQU+IEF-xnSq~sta-lGWKBv5cDYz2+SfyQ|~7IkZ@SikR} z3W|N)ydg&YynfjBxd z)$i?9+d7h+rnatlf#ph3>H8Ybs_Mg1Wxud#F_oJ>*tW6GiH{Uf9Vq5qk*#A~74vrpwHf@w$u~4pPv84E> zwvGB457iq}JxTGMd4zj7f=)BWSdPGVl4sWWd+f@n1z7+=hmK#)!NIqq?p}%&E)pHwKi#M&GRDSgz&~~OWm?!wZ1q>oS zO-N%+9IdeSxHJrDE{3(%0=hKRCQPsg+5?(|B=ZsReyA~;0DNdp5H|$dgB9g#u_j3_ z2p)e%V>>FX+aQwA+EcTUWTv6ESH;5>mCd~&t;NMg+g-g4^vp|%SH|4YdXblGA(MTz zwWa*yT0`rxo<))$Otp09aiG?%uz_<}{{MU{uV zVR|IhK9T-z?veZkSF6rnGISxy|5l^8Y&@L${f`YC+`FvFtdFth$-G+$O$igUna-OmB0cXtnb z%=h;yWOM?)!7U^fCIr72491d=xII&Og?;G1J#24z+dX#I#^d%9Z8x1;u}408(Vjkd zxpmh6pEgO~t2tNJb7o>2n5n!otyh+`q!&kP zdfj+5HnYdIJsF+ZCm;NbUHm8i%KDYSs-yCIe_+r4!++t&UoO2EU}ZrBn(yrH1Sh=j z1xMEqu}}aj*j`NVdGs;FK!FOk+~#pfp8w$c9c1D6?{QEE%aGK{swGQhzn0xP8>CkP zi;?u&x~Ffv=JmSg3BUGYsPXJG9$N;Ar86GDKcMS-xN`SB_SmodD+gn)e)41U)Em?B z{w--qOZwVUt668?%zae@fdcRHfUAsrTu|jQreYN!tSlL zZ95sa)n3n2mgKCaG`<= zDe5-ek1`WwDsdT#@t6%O6|1RD1f2!I38Co3N+zB3_$m0wd9ppVi1wTDnZ?v^f?@-j z6jbI&{OqdT{x!Ag_mCj*xPvHDl+q~P)zm*`R;9_N%NxLGn1YgzG zNVjK&8DFK3)V_gu+OS!tX4ziLrct#qnYDHzXQPP5&cbR7F)~!O+SsGMPS5t%w=A70 zSiX8W00!WP_)mX{4xN$0NS|yx!7pMEuaR#X}R&dbU}90$x#8J#KMxZfyQzg4+3kK0M+_KcMXOm4qxR*Rd!fqdq8cKh4LGV zG2E;HGK^Ml7XODfscftMsk;*6OMAB-5pQNF$<+E2sf!y5glu z_5k=8o8lGT|LPnz)TSK{Si2FR-A##A?o6_cA|5~$c^EJ!o{d@hOx9lime1J7 z-}h3texMG(-6$ghxTUgNTBpM5C$fWUIp!V~8^{*%cyN~nFoNs9r1(oR(Yr4SKbkdKMh>a5y0lKdcV z#NLWZlo<0|Oe?(jz?y)yk=So>m4hr`5vNnl7s(s^QUAx0sbxD2@tUCcN>iUl;iR>b zd5b5qHcO>7Hj+J!M?r0AGuD#*n94*Jv5=x{>Q1WffPEc}2f$!N>!aoK7Ots$T_Y&{ z0C$FJHx_X`zt+VoZ+GGQoXs0FJkln=mA{eo-du(ylr&?_hHI6tv4;1O*>GZ)0&U zf^z-F`SH9yFY~C*po$y!F-TfmyS9%3acxnSjyXImn>sPl9P9braL@n>N;pv#hjX3= zsT}+12@0+*>b$bL4$qHgn$r#X=vh5GSLSJle|7F-JSPs-$4aHD z^(0~iT$7$!UvM7Na|^4Mte&0OTv|U7BKF8GsOyZxrF^?O6R z+_~2(-4*NYecImo$6x7jSw8*1v)0Ibzm2osrsimMRGUK9w{x-Iv72vMu?HX1x{!H& zkbcTm-QTlrv+iwL2Dx9-jc6I-G zf$pF5dmc9h*JXEp@Auo;Z~J5JD(uonf7kCh^R0i(c0P5#>jla$d70mjE3eGP)?tu- zfS@zq`bInVmbcl;%U)si3s1W%va@e^v**PX*dlMDSh73S#*KSluRgs_?T=Ycd0tfi za(NH1pI+bew%}^4skSd^NlW_bQ0V{o5AO@!G5uxQ(~gd{KRi?>Uah+qFWAmApS9Pf zlJ-=iX{G+qec7)K2KKC8r1=7TR;ycliehoJ6DAdDt*V&t_*lyd7(g!Hl6H&J+ zxHKABQu!mjf#tMaK?yS$S>o+0_Lu(0-?U%-wLfkj_}RYgUcTU8;g44~^c2WjB$=|Y z%I@R;u)1e078EVKX!>4t+pYHQcYe29%7puUw<;LNA~w@8u~rD4j)HDm`=Q#h5sO=2 zix3ta1g9UVJqM$)`_4}m3)TkY5D%~%DCct?69hL}xaX;a)qnkP>!~leeOeX2n#$|$ z#MBmddlQLTUisU48l_YZU%LZ%OjQ=onRcgbQ{^MBM?`gCflv_-);2b5Cf-^iVbAYY zt)aKVl%9Sop^5d>+;XGg)(FIgLBYvb5l>{&sJm57v(vS9z2UJ4S`G0@?Q)k`>L*qq zxNz&}rV>g$jW4lBa6tvozFM?8E z?TP4^FZ>DaMZ5Zuko7daxH3agnbN}nB|Hyml#O`rcc4tifK>3_Ls4qPmr*hU# zBrQ6bY0QTf4-?osZJ}|<(v7PQCSk$}0D>Z?DPFIMe-({8is;W?xTqfqZ3!|GZFaR< zbwFsM2T(*~hWoL+^0zcjW$_{|8N^)^aVuVzN&7yfYbZ^#N)Jnb2(*ndx-Fifd?vO; zNe@F@=4Eueo`_pbV}peOJ~OdM5a-;b8L@E)Uyahg)sc*ezqEfWJ`$5Uv|1fk z(ob~`2r-Q6myG5d)A$V*wa2l00L1&{17j zTGV=7i%1>;U;1jVyM|Mru=pbO3$ag#KSF?h-0%Tp@;eHE3%6u|ryb2FWR|b>aR-Du zCW3Sm%O|EftLj^hIjQp83Ptr0`viGFu5mXN*7!&?te?<$CexORi|--jMV6G8d6E#% z2qfOoJWFc4a+1#Ju;;F305Kb?dsX~m4kR?Ex}msL^)vA%$wowVwA9BbW2*580lujJ z#4kzp`gWD^Q+vZ2yNUAj)n-87u;vG#DnIUsL4x&9LTd=Fx3COKiB|;v2DG7K;u+zh z47XryrN`B;qT0s%X#i+S-fNnBKlcCb*xB2A_7DE@Id^{uC3;YKP1^_7EdIf%!|0nj%sdV0Z4o#*4$DtZ!Pi3)V7rF8^Ax+pXhdMpcn3LEN9ta zQ{&%p>$E+MTS$Bx=d!k<{?Ay0)P|78dm7hzrf~r%i;Mrn|7q&ln%2LH+Fw?FEevXy z+9V#=m6lZ>-112G;Cc_YdI|A7)dwZ_ ze%}7%JI~t_k6gA#DsQuC>hAfK>*>iyKX3aNpH=&2_OXwB%-->icg(L`OM82JF853N zqe)RdL`~@Tj~3z(Kf<*|Vrjee-v8B}zW=ZI7bY`pdJO5eisq_*uuXyxQ%hRXlDi#Xoe#i$Gt$2YcG@ zN6hXc;Rf_#tyFGAt9QTB_Xm%B&iAKLvb{-z zT_KH7)Q%>$vby5d8L#bjtr!VgVmJz1cj+aU*b2i+-S)z~@T6?*htu{K{?c#RzxkKn zZNL3byH-`bC|Ied(Wu*?wr};_D^}gPVyQyWvTIvb-a2RLLdlhf&t1M~XIIv&tTu;N zS1s0TxDpT-Mg%2p)pV|UiI-u?r0wfC9*+9g={t?4)s$yLeclg;4a3Ns7=ddhtX+DI zhJ!5)^?R(mDZc#Yd_3}d+UjFodt6mSig~xpAwB{us2GMp(}9wRJA*7Az>` zP4(io3h-t~psHrm25Qr@`%P<(Lv}tDvfK4|b07l+tlAbW7A>OjK%s{v#%M64B3A+v zG?_ROE!DZJ_Ti>03gCe|C*4adj+Io>D)punvO$o30+_{>j`adxvZJJ*CgZO3FO>-H zKMi6*sO$nUC;+W=-$3O=qA`!3P}G;TFTU^=e0U>U(fn}G1RS{mt`xCsfYG<2R- zyOPQaAb|1~tBy#gXH%7*jz~VL`<5hbg!09+1smzjIHU(nFP8MU2;tSdMH_APUF8o( z)|gH-##8&?=O4HI;lvgB@cBSIO~vDunrRHgbATqkf~P%O<+582WfO5LsT|_8bWN+v9! z4XXx}?sT1zUWxW}R1dDM`0@nBw@zJ2PCSvcj_bX?W$BmNkA(%UrI0bK25=QO zlzgG6X1y@YSrmjQ&r!(tMnmT-Dl04x0O_zg;Ol4R%S?Gtree(lD2N4$TUqEDi_1j! zPe(mV4>~p(v@M3UMHJXiKXz0W(JrRgdL+{}j%#hvHc1eaKIXy2v2*~e=qIx3XTsKd z6$`75rdo+RHVv9v2dM!T&7>?U8EFFobo&k_p-coY15~I33?hGOvpQ%Ql=|v7LF$n? zfFCSiuqbgLNMlF;3V-(PjI@p<)>fPkxWGz_k8RCMta@+*HtKafCKOg;%y$5>S+1-l zA??8PxZkwi{(}U`-=$XPAhMPym9Dp63jnpRk%>0h0)7I8H$sA*y!~J1Q$8LS6HnemIC#yNi27m|OTWS+AlsE<~ z$CcEJ?c<^i$NT8S;UX{l`5{@3_i=y!Kr))Jte(kU)|?i{r}5NJeZM%FpHuGq_^?hL zpX90M)sFPwhT!^Qe@&g7AMW|HNLZu=`y2H!NCPx4!Kj zt6cn)WI(G&;=DzhAFx;4-q7#IoPGTFpR!5$PisBZnyLy;+p3iV>&yd?E-hG=bR#;= zf+dzHSZ?KYR=fH^J?E6BP0P6!=U`h}uq!VkUQKZxa5lUuONH{zsXk#eK$%cV1Kc#~U_04&^yrg{g(vIJ0M z^WN8M?&s~<-~LUHgZ!n?>fQHPcX!8nmHm0j*PoKbvfcL1@3qa>-e;w|US=2m@BeZb zY=22hTGBTbMc(zEAN)I7AN8`YKib&&-VXxuV}B0~+2FaStkG?F0C$w?`{|^u>slM7 z;Gk!{qFzvXeOCx~rZfLany=fjWLeXKx7P01)4GS44RtM+O_WUGnFqKgUdc`*WjDV& zYCrJje#?IOUwyaT|9^HZT;25-;g%Lq6urFb@HO!mC6)GrX3Z))SA64Z_e`N=D`#)9 zQ9NzAbk;3IZtZon0#7WIPFuI$aL}QbOj&Nww+PBh9w=%9E`jvsZ{bG6#@(JT=sgs( z>Jy4@0;ltpK3_Lep1Z$MoxNx{h_x`8T0{9!I!00n8|fMr3-q6_;whi_A&Fd0ObcR) z0HB1`cNDGBO4Y4;P+VtKJ~0_8lbG$!BK9r1rBSO|a%IKhxE)hDP2w(S9Nc}6%78aL z@f#OZD7jHIro`_-t7Ydi8GmQQeHO7vP^_U$jK|}aSDzZ{ODGb2ksnq4e1(ox17k_~ z1dR6B9cmNc$K^!a6}9BUdVw$C;kAE-KO9+F?dm7R6`coe-1GvgHlt5Uk`8%w) zec-?a))+DI4L5DmSj63jWt5jTV*x-A>S}r2-2fGb4U6^LLF|%<`Z$hiOf%M=g*DCz z8@Jl_dyhP6v5@+tF>wnK@f_tiu8au&olm9B!^5kdS|X_*7E^b}v>uOGOUdh{qPMxM zys50NtG!qn&?f?et5nNo(ykB(;EJx4Y*jJ}AJZCl;)O6i@Oub9m6p-HGxePqAIQ*N zqvf$i_>v!aj)Hnf~2kf5?cc$?jWRjq&krU+=OAJfdvv4E-}eO68Y9S?yu-CK+{-1SWt{( zM}Vm=ZrG}7*Sh8tKV|?Oh>gJyo}BvJR-TA>8PYu{FA2`>aX{dM?p;_z;IamJB1X)3 zquaR$YQQyg-0wDfP$v@8rGinV6lY76>jr-5y>uPiWd-W!onIu zEI3#TkrRNokoeA*KL84VSi}g4t6u;R{9vuA-^DXoyYHZ1poS3mPQ2Z%tvqUtYl+Mi7{#YVQ$ zZm18MS1L!G*5Z({n&x^uO;CBQlX`Y^t3)x;c&NT%R%?E5pl6NxuX)8h1?caJSAF%h znOw2tq$3{A9PGhWAOHt*HKBPt(p+Ocui=4AsvpPKE zbk4Kvg7^|yS+&Ia8AL81IxFe)zuK{>ieOJ7$9{3XpA*-EgJ9QFJKN2UZEda}J^zpI z=j6xpIyq)r)OUDkUgxRz`2|u$s>qY)X}+5u1Ek<$DyOjv02i%K;uRne;AcdmA4TvIUkb-2K%f zTwS%VUYVbrPJAf1|L{Y0RlhlRKJ^Jd{!+meD4s7^^SNjI-Txbfa36N{{y%WAY4jlp9*$uzJX=oa%V$ZgGgcf_A{;1b9h$7t;Bt-?RF`p4IoR z+STWtwtH^5)p~`pU5=#eQY2xWSi;KFk>w*1t7`E@nLbRWELkdA5tn1FmIt2qub{_c zSG1vg>uK>u35-h@zU1fIec}P2NNk|M)dK)GN+=&u(Dgc9k3GZJ{kXTm8X=lZTS0GY zGqr!HdI+qXReAkd-7@jGCCg>&HX5$% z)xUwB<#6yn60NU9w%mBn#Le03LFY~m+h z=c5EfIZD4>>8bmPrGh&u02362@mSp5iscG9w^9Je=&Qdd7ssjx1$<58Mv#3JWLSw1 zX9Pa-@FqsfRAsi*PQHpys0`d2;esqYP`lz$>n7tCM;SaBS*>{1@}s^*B13om1|Tq$ zT&{PUHqu#o*tN(sYi zsXUbVShrzGg)GGqk}Z_>Y8Sv7GKA|OQ=X_~gc4!|$&ZNV>MtOT2WD3tlTgI1$J`ZJ z0{3rm>(A79jnhbD3vfq}bATU~H{?r9(0rTv4@Cf5TrwfYSp2{vCL_xKj@pe?)81r0Q~}WsZGe)uEqmv7U~Wc)-pdrXS>(oU301P3IyAad(F*X2nSQ7yh=hSChg#j30 z4TL*C37WgT!?Lc?YWp=nz=+xdIFypCW6{J{`^XxA0$>_z3+@FoQJo`;;un@(tV3A; z;AWD!HPGCTYHnvV&*Q|9(LJL`+#Vy%RN1(49BbTI$0zZk zWKZL{bH#)4r&Lc9_h0Z?tZ0i5faFbW@EbQ_SfT9dJgH;W+mY7lvF2r7>DxR%G!G^! z3y=hWhL~YkB917(n73WEA2|tE8#Yp2z7A`bi$2GulE=LExHF7u{1{V!AY5mipPKlL z^;g*L$Yiaxd0u0s|7z#rS**k0T>HgPvgePVb2z`o!F;EUzh;&Fnu7_%T_TO=zn8bT zabBLEdYQPi3NCp%*XPIl@%`a>aP6>t{`dH`pg!n$om^kkM{E|>*-S3upbHf)0GGC8 zonyCl6wemr24%DHS9=vsyMuK7(cx)*Y^l6{Z(ti6tJm{%ejNNcejXej@8ik(uIr=7 z%k_PbmwOjwoW@hnyGRoquMfJW&Uqb&^^t{3+WRT#i|h0AYwHa10XFdrDU=G=C>oDhx6w3$%M<9HSRqZ7oyliyQmNS>k#I|nr1}&Z z5imRW(mhu!+7qp*g`;7+IUTmF>L=LMwAb?h>O-|V(eGQdkhjTX>_P5vw>DIsoa)C4 zr=)XufQu`>Y)5gLP#c@tXT(e5$gOs4rxNPBh zin|`ZDp*M--HjMP42o+kPYBvi{0S^r0GLv`PFxWbynLlT)_x$KjM6Cwqxh;FH$6$! z7ZNXKoxa|6XI51?$P7SCLu1v;ZCJA3v{H zj;Tq;3mP-5F9>)~9EOa>GA}+5Gl4Iz!y3N~0HUs8{nXWUEE4kKMFoIP{TQj-uEveH z53NSSzZ4(<9ODF`8A7Fwk0=x>uAiv z>Q6i#D8I2tVH~0=7b_TCqtP$Gl}@{BBejv386C+J{6)(K@I}ydEUXAt-%#3EeUD1c zQT*dNt0S4tsO?@pzz^-xF*1*vtDu^&yIMjo;Xs?}L5AAm1wU6SNRHy+ zh~zkscdUN$jERcPs{b>uUwh^CvUDw+h+3?V)2ERY$9)^PqF-$si{GlVqq2q};`>Cb z$v&R8R!Czu3)?6X)x1=@G$tMOH?())Ytpb!ES!jBb>d))M9SNS3qY)I0Aez_j?z9N zS?TXrt*)^H5W$t3gKv^S)uT=exI?mu3qlC7sc9JRaOESY<`*)zYd z@lajFKMK^{LEV7HN!?>UuKJ;=xk1biEV{6gfFGANcW^-$(J{bKZe>H|XMzCvlJiM@ z-(An))~Zi?=(A+1t+vu9Kx&VpqkT{PAchRV;du_AQfKV~42%FCz*~(GF4Pr6AL%^<_+Jl=wcg3bLz>Of5cYvH&7e&>s zknYcGULjmqU94p!U09w(r6NWBlL;avxWp5`Zk?0l*`|J(Dc;lx2f;Ms47^ zgAiUy7c6!5mLmnWQ*d&0F7lH=c7mgTn4T)e=g%T-_v*go3t7o*@-T5x{*9jhTs=I5 z$gd5E;CT9J^+BHNSfba*=Ew6oPt-U6?}Rm3Is@R6wVXC&6YDU7<`b*R$CPzevIBX2 z4CwKO%*#93M_gfTomoG;en^~aTl8^p?M8j{lVgb|`?$EyVL(CJHK*E+^K`MNPQMiNq5ATv%CM z)9=Wxm7x$~=5t9I<^i&+b;{3+#E>1?UnZU6FPw)^~3_O^Gt%AR`Ssy+N!J?GB- zP5qt;fY=w>uwYpNEKB-Av;a$!@T(;(Z=8L z;}+h(V4afv5i`Q3_Y(;#HQUyW$E>LQScRaZ zvPR1W;jkq`QJcOaWZ(A}f5SfgiC5Tz|8Zi`hpTpFFmylub(O`!Pk?Z~{wMwhN=Fp8 z!B^J=PzQL4_}9;TLGNys)IQuYRVo#G>794jy55q`>ah^XCT*)#vs)um>!$)m+}>bj z+ffvG#JZT+a60xct9h5&S9=Sp8wIIe?e!FjxKh5ySL-T2s`RPaYW=>ux+2&zuC`Fl zHAm`0t>N!x01SXDy=Kc@FSY6|EAn+{yW{S%XmdN6w7mA0x+A-!Cs(@Pwzas~X1Qc- z?i=*IkK=U4I@Ow`3wfL37Od5F_d$!cgi0lwb=rX?g!q+LJCaI|iS6!=Y`|CP5j&WS z+?oJTgZ_3jmL8vBJn4t4&j-!_%-{dfVxe#hUwS zoA}iqSzn`$3mufvd>NirKJMG!*|SewxMa27(DIqAMS5+!a&_Bkdsl65`;sL!=CNeP zl3VAjym7`hR#qI4%7)bbXw)rqu+~`s2qDRHN8@=$_e~rG3fyb~T%f?h`f8&3+r++@ z5ThXguHxz!X#hYd23_WrPP_@e=m*^3D|^0>2x*RV)&42GQF=`E!TYS5R8^j|+Bckx zt%agn`DUec8)wTF+kV=A0ikF_{%5)mnVl#f%Hy8WaK#6B#k>Ht!a@bGB&1^i8|D~u zi8^qf#TP66XxQCnp_q;-AMVa52WuCsQIHqfhmyZ1KJfc0qP`Qb9M?w73&75nttZVZb!0-_14st^b`GXbnBc*Tx(I%`eSh;ny{F7Jk4Zm zrg_u|X%1)%a6=T;hU~JsMAU!47{(FMiXWF)RMCEbPb}P++YXi~Efd#IRLS=uC_FB| z3aT3yP{;%FfmIIjGnr-VZGZX`R=lZfPyT`C0rNt6m{$N@%stjj<{P{-mBAeEt4}?Z zQE9cTiB*OAiB(Tz^^EwFcg6Xr-?8cL6`N^nTap1^05mqV)vf5%F5LbB0%M7U8?LNm z86cRoleyjpyw!a_$pr#m)VCfDV8@9i^wKNA=E>AAkyg9}6+8hyam@HKm>M4c!%5 zVbt}0Mn+oyV_6$Ul9rXMfDFe5J)3qq-iE%`&rrALRz{J0)<%+lWVtFit!a$5)Gm+T zqkD;agC*QZ`~V;U@He%s2iT-*Q_U%{-*S?(e!J^=2l<@EI=x_isDVAr2kwa}9Sb^K z&AAIoWJ7(-h_AR81PDimeJ5oo|5R&LWaF%AVb=0j6*&d_AT6NaNH-rN4O_6E=i1vn z@VHDl_4)9k=%|d~um97@wRK)!UvPX>=e6}6-RE>leNb?$J9O~EeO&AV_kCEnWakzx ztSiR>7p%Gf0X=5U2|6k(WV?E1m_+wR=7 z&wlz@>*oKgg`&i~;Xd71{jau}f6Cte&ezzpPw&_hk8NvwwQf&WEfn7j@{D$MPww!Z z<6Es-vD&__>v##Ud~;LkCHf7r`c>LC^(e+H%ENEOaN}~Mj?#ymLR|=^@g#8e`Su+J~bTLAd`p?v|!K4lEb2=-|tGwo)?bUjyTY24$b#pk0xRm4z!O!r{3a z8r_G2P3b7E&XotG_9Y@SWGY1>2kr!^{N$z9lJ?SnWoxTpYUaM#`+eq z%5ZF@LC;-+5!0mAY}#Znw7%Nj)#h;-^n%XMHx%Xvsy|e%x;ru4qT%)`8;#n|Xozml zT?FM*39IynmPJ9T{D2Jr8n_5TxqopQwOS}{=h89TitsgV!kYUBZUvGq7OWMETSNRN z@HsIiu(%kZ+|&82-}mE5rK%Ugf)&-~a9rnV!$^5i_!18WP#~r)(HpoTd%!s999KWI z8D%!gOTe6w$|S}F)+i`M#a34m<5p_S*ZYyEO$s@Qiey54!%0gwx6NubS5&r9!fU(>DhLH7 zU*Hp~;xYC6kt@4a*BB8yrau_C8?U(fgq059SF72uVY_MJTE#kziVc;PSzWXA#!Z$e zty(FQvj{HM(g_D3LXv65kRay`?Ewa8Kj2PWguQr+=gLAg8bvsN=?*2sT`FX#ITu)pD}M^rQWo-#?IR~npj#gspeaS@lko)3xI^X zGY?R&`v_K#mBK(}5&Rw{JN!wCClE0xu$Dqzry740?Z^yqX#hb1-TEpMrG83zna8*; z1AODF7%W)eGuKh_XViA)b5?U6a0*MAMGP0LweoX$?&r!L@P56s?@DVdNCml_aCW+dBT@hD+E&HF*G=ir7b^i^M0`5AaX z;e=$y+o^Q~@B<5`70nTp>crvbs~yBo;RmWK*Tqqkzs#Eh?G@WHsRPX!sX8;)$0Rd?V zm2Q-hMx?ukZt3nukdkf$Y3ZSB=^7o>zr4!=DP1|@BP{1=1>@j z(kuu1qq_5V-`y)0Z2}v$MT8QX-T`?#RWHgPV;ehz#2{_PNDN^RWjPgW0m8>&)`#`0 z$~89R39yL+vH}-rqY!#(MaS@M3Ke*atMlbJjI_v)>1G3`4PLxp6$p+VboAH!to)58 z0c0OU$KWrIH^RO6SFK0Lff>2yK|5s=>A#;7PDla|t)6VJ-mJ!(##S91o}ziZ3%&dbA`q)&QZ zDvoIP4L1g6`vJ);>zTRUYwEI2kyoPyX;g3Ni`h|Mu9JuJvMGg`TA%m+Y`GqL)W14t zI~S;XZfy-@R6wA^m6t{QHl>+Vf8K1m&G7T~sl7e6AP~G-vJ@jtVy4p%1f12lZtG7f zI6LOSD`r+atJAlJ9?p2|^YUD?I1Wojl7=+B*(%n}4PKFId(55*is!sq2+Ol!m5zAJ zF5?2f_x=!_I5gcR3o<8mYlOo&6L#rJLB&(XvCdxPgxPfD0IE|?E|FuU!ZR3Gnd8fd zBatmHAHH$Cl1Yk%gKq){8VpZ8*Q(J?<_xCk=G{gN7^^h7Rk;l1MV`Hp`6J?@AcI9U zHC{}HtwFbSDXP9tGMgBBy&W%O7z`T*W>G!xUN%84o~-xvcO`P!HO|*kK~|{#$q9oZ zXUJ#^fYf#^#}6RgEmyd&_b#%~SkXL!m&}e^ot*htjTi9JL47rlams~ks{zC@<39EL z_dI)c7^|-g+^eR^tyeK&|Krb9hR`JtS@OHRntI%~^|ngbI|E~+PT*$I)JpvT>))@$ zvd_fU(iM^g3$Nn{G^$_jK|h1aS8qbQ_uA{G>q>HFG`IMp6QKQ|4Gz-@K69 zM%|4t7*^S^5wAjtA2Y%UcY;WYMKNWA;dQbkfTa6FM~|Y+W49XdZ}oP(#o=u0P8eD9 zYr~EyOTN%TYg;wIiBltdQlPsI@HvbCZWA*Ic0bX~)X=R2UVEXQ#XxEOIau!R0=>AI zKM+Qe9jun(VUj7hDddRLhwm`kH_#v$Y`leo}!ye7r2`yawvmY+gT1y1})9`y; z->GTw1kCl3+^X3tur*p!+wmJIUmS5bb$2vWE`DEx*G#2UqdO>@h3PBcZGU(k3co#} z0WVD`FC~4F4cZ~-Qo86U;_C+gG-jadOdpEx0f9#6mwKYK9db!+$4_SEw{=MZPfs~= zu$BBWRN>Q&8Ri=2=-@HtRDgk2+<~AvuhxPVL@E#2J*&~9edaW);x(_nt#?y;p5k=o zJh`8msdd9ogS8Q!DD`biBQXOPyTaCp%&b&YykExNkBNMq8jI_YHY*=gF&qcijSb1HszoCR0h{H`^)}8h1@zoVpb2DAHS8&WEf( z5;u5q_I!-6xC^TR8(d|U27qhagFDFsxH5Qw&h|wT-#Hd6r#2LhC{j}J$=K(Ksg?hX zl)lxYQN2SI2L8!y;8>{^W2J8Qge2_m5C=evU!k}6SJUNO8N!mMJ;SZdV0Az5lE?WC zmKY0d33i%q;YE1!yVF>)sbRG`+_5!n@rk=!&v1dwbXLke|^CY21 zt&l9QFB0Xl6EXwa`R_%GjB7AObd^<0^6;ASo9oCK6c!s0Ifz8T+_aRx%}xocW;T;D zE_$ZVHx{z;FTCy0J8M&j2xD@cBXb%0#{=aSv`tSP0BSH9^hRJXZltx;}vpm=n5 zaC}7cB#QZ9`xpT$%$cXu^*WBg-8t$1t$V1wHZSf|b<8-41r1B8_3M%5C0FegCQsq} zo%-6*#Rs9e<174;dd92X`qp6;rB#k} zHvwbfCaSG~`4ufXQ^0V%=;{(ogP^fUdu^Xx0@=Pp3BP?i@Xgsu{b@XfTMK66dx*x} zhi2~G`WgyGhyNqUB{`EJok@?~1|p`dV0sU!t=d0MT%~osg)cnZ7I+ap3-2SBY-9uy zE2PS;dOF5~w{rZ74l4!DHD#{QlQvr=XB)#(notHqjDK0y@5+cKdD@H^4TK3iq_I|y zf*RprK@ixnmM>d{p5N|o35{Va#o2vRFzXH-no<~`%=&vSc|wXimN7Z5k^s4CNK>Rz zM%Kj7$z&c%(w=~&qH{$W)OX!>f_6PEJ|#5q1j-aQowz+75?f`tXqX4o1xnSmN@E{K zR!deaIV_G;NAJQQj64hXUaqyv72%1+x}{u)1L?Y64_QAHjt=J%#fLHk3Kv{kEC`E{ zk&^nu0j;Hs`SM$hWe2@O#hU0Inc9n3sm?a_HfF*~pIX8o2M-&V@?Ju158*oyeeDR1 zu@xOfS&5>SE8Z(ogeR7u+}-R_;Z9EuCZDcM*o#xt@Idm{sv5id0_=%(dQGN@hAWN5 zj31ZRT+Isk{ZU_>p$r9R@^8b@M$Y;jEhWAgPbvB0ZzPe>Ev^yk>-O~^L%an3x{c9D ziJP>92I8W4g-uP{%|aY?S*Y$2@7k9*T5COsDvhVwT;RJNT%4p`pK+J8t7enq3e35j zR%qwbVeXkO%P?X3D5zJ+d9*R6Uu#>Pr~iumphPHsLQ(A>GsH{Ua^XqwaT+QSXOU*n z>@cJ*+pFWz*4RqJPVIivJ~^+v5tZ2aiL`KKLUuX$>@Q+HZqFIM@zi~Z@h|Z6*CKXg z2IITP9&Zv;WTS395mDg?_!SPNU@_MQLm1!!@SX0#9wgq3$&_=GN+FZ`^V|@@IUxYW zDY1em5uvJM4w+dKg=xB9YigW-x~KC2lH&R^4q~&e;iGasY^km!bnY%1dk*rteY%Q$X>_6zX(77nf?LU)qe;-DEAKA}=I2 zW|aKawMfqy>T|BBVS1210O6+MFObSC?|^nx*L6K9) zE{$3!`(qL=y=l!BKKsLZ)yKr9{r4*kZ~xroL?z0vdPicgj8*g#6|w?cneV8V4Ru8-+#$im zsa>z;a|Kaeeuw!n(KPA~R7N?t6y|$rW;O}goIITf+U$Mt@VZj*s0Vipnq{>@_IN4L z;bfu%2&T;XKkzugN?cV5SD&Ged20Gy_PCk*@I_sn;fzcYi!JyY|31&h5pRB$Zdoi4 zXx9gyWACC1FW{7mlc+FB(rEQ*S!#$p`~=9l2VXj;BT9;@kO{dUOqO+f>QGKY-7QD# zN@aVY-oNeN7)l#dCTa_}4^*vO8hv4a0|M zGJBa41{YEq%3Me9bYF+mPv6fjloin<@P6;KW~YrJCRtR0KPUG-@XlTD#dlva z*8NRv)A!rgXsllfFHC$oDl_k-ghQ_HgN$AF>)SaIWQ@^JZhnPId&8C;H}nH_(Xs9J zyxI25>*{XB=mvtUj$M*ReOrlKD1`zv6TN%tizHNj9fMFE?eyqq zlh^cjC9dAkRX@2;o?WH%>Fi00&u~*w_o&b=d`zxZXW9kcV&V4_;bo4Fiv z6Y?c}-&kGp`tV`og|?J{>2>R8qxz*v3#6qf&gbwtbNr-(Tg&=jjP#iP)1RiJ{fjZM zNu8CQ=g`n$9;=X*nJvRYZsr{{=(U})$3&LRMX6E=a;^9i zXUliI?|k@OGd}%SiG=lHHzg8(!q#JwP04Y+`xJ(ho|ruqHJ0s!+%Y)$THntq89VlueI*MJOReD}0TGbiOD<$5Vrn!%`51GTJnhU0g+ zoT1N{KaSEq)8Pw+Y%HjP+5hqNK>7(?Q*3Z$i9GPy{dUTgchsWLPWz)Mw6lmR< zamX^B#k8nZ+9URu>>hen3lN-GvkeKF%?-lZp5u{Wtx3Sm;^787?r5wShs!Mj_v>hHac zM$@W}fe?dN{tw1;{x5KhrzaojWRyZLdISl8beD@H-)(wo@s306|LhEYhywCciMlM6 z{Ml5OTr1VqQSTc;ifq0~@*d5^pkeg&^!e7A{W~5=H=ga!?>euTpo#&}x0Zs5H4dtM zjduB;?Y%-^uT`3hn!%k}M0Ume39H~_0H84s|D19e@br&U5d6mAXcC5y{|s-VR%T1C3hCa)<*Cv^39a;QyD~}bA&Q^36+tD35QxKcm6wl?wKmz3AMyL1dt+xFsWkHl;-pNYiQmKocoIOh z!+RI+UShB=PJ1vBoQInM^1(MfHBo&u5F8Q=f9BE=|1x&nE;*92`2dgbg?@u2O4b8C zsV!HI)V68Di#Lus>5t>>)22b6;zk<5x^{p?i#A8fLJ-or~ zjfvxpXs5;N9P-36j+yh|83AxT;1Z=wy=Sau{x`pwi@?8H6Ah;W*Cp%dl4#be9RrE+?J_gSr!Tkh%BGuN?~SzN4VF=YpzG-`z?NBiNyCcGXd zYdOyFGVxqf8v#-;&uv+E*9w9?3K!+>vpe~#Gu4dI181$nNB=sIO-6D)GmrjsJ&)G( zKqlo@Xw(d`39owQiv4V3t|Q$S$&WDIzC;aaTSM1o!7+pwLdh=*>A~L=JZ>v^Z!jU3 zeUqf?v3{N4Vj}ve?)ebQc-1hbRyr?Wb><{g>T+~UNs%UEzj z4%V<*6%n&C*iXuBp5MQ-sjv4jMXDQk6Eh?_KYuVi--QVrr6%H8U7e@tv$r{Zr+Df< z@gN_&=S0&W_@Bm2sDN>hSm1ZymsWthz4-C$h69yRlJ#c-=rM%1${Zw5x;q4RT_r(a z2Z6ED>-ifSA=|aS=he45YmXH6FRbu67{FzRtpC|ykxMO45#%tr-vJCgL@z7UUVQP> z6ll&WecBX-M+Mv1aMgC&o0NVZjx#R3iw$jZ6*2rZMAe>z;hkNM*<)?|5BVpM?y)ev zd?EVW@ifKy98)VuQeA7iw2@d-1JxhN8`08rJ9?KoY^}oSN~2;`eJYXT>dIS-S_{m) z_r{N`B4-O>?NI7P?LMtq)Oe(;S7KV9f01nHZK^PSl9)C+>j4WvNNi@U3m)GV^9a6& z=E>?K3|z(|so!v`Y>0Q^$Iy*v(b+34!%3gSJaeneHjai|?GaT?7#KU}Nprkr4G8A& z{6N5PuE~t8!WV?O49kM z@+*FVB?BIQ7fcDl$EHcHX61W~*MEi9=CtOtQ)h8z;l%m_QWoPFGTlnD6M8eb5-?tn zmOKKypq%qe+l!F|gF1`19Gq~ldc<`8zfC$3*&w|>BZo(WA`R!PQiVm6?LeV#;YV%~MiT<=pbv;r0Ss-1^xDwgRz)TYe&eS^Z$bmn zErT^-9qNFdjq|80)WxmtuEw$s5lDn$Z*?&LLu_bi`bv#P+ujDUM9*EU!gr6vUt7WP zC;w;A?d1Cx&vg1#lKEL47-04~=Yo9#Ilu&fP_&kKJiJoAum$?WYhK4cilxpEpm<5M zfmNvv*Q|y~taAZh!)}XpH&BS^fUxl({wVA-TgQEsTrxWPHM^#{y2@=uY1556wxZfs z73j&xumbAoALI(leNK?#Y4YbO4 zpH{jx-L7$GH$GBMvyTOt|9;qp`DSE`3Iy&_z(WDKFV1qZ9dqm3I=4nL`{XW#Y9B{$@2>k!Rcy{5@gI>A7jmbaNsz~?!dT5RpYD_^h2`lY=4owD=q z$nc-Qc{?qWvS2rx97I23o?k85yClU<&uT|uh!?clcL1PTwxTP~YJjrKG0 zy+UN?;SV9heCw|c66b^NOPj8T`Tq-KZ0D4t#h`%KGsp<~=G{ZK-Hmapg5?RaUX!Xb z#@9cD%ITe>hkM_wfNzTK%Yhp1ezeDflbo~v2Q_Qq`RzUC<#&3L<-BH^lB{yo5&l`C zS@oirIrWXr#UE(Ehiew#);TH*3(vV^Y$iY_o5|*QOPbKSL)ab7HQo?7vZ^icRH#9IxZLTPx5H` z%e0pXVNFAy-W}7u4qICl7U413#qzKd&+bc}LwhIT;OoY2w}&jRJ}Yz?6l2U`JG>_A zmiiaZ1!EV9>RXC}se)cxtE{}NSUS0~$=Rs9)3@P(%y+fG`}M~|j*^f|MjP-_{op9R zjEQ5C_EJSB{OeF-;`hbIk;W#pbqu#z?Qz5nyp*f#7;HQ5CT1dpTNUT-n8ka8!vA3u z?^h}^EksvsslbdxZ7oRl1?j-&Ek$dEDk8rRmuf5Sa_6#ZPpDkvkhcOTyL`v0j_PXT z=rOFUt6DM;9O~=&eW0~8*MuCnOd@(<{@4hkP2H~fdL0(GJy8-(ha4)qBs zQA{7cB4aM4e2S#F^l3ka-~*dI$q{1TaKb~c?-9ZHYRkR_pE5pWeyMlw(0!-Bs<4(f zANrA;(Do(khJaSf04`k}Nz;=lJ3w9b_KHLf^me0*-IKLGbZy9u3m^j#w0GN}{8I+> z?KJjP%x=%4plbuN91JQ@daVCo`vHw6*(DmH#zrsoB5PLkQbaLUN*FY?E^Z9F@AO9X zM|1v=Bx_)ieM>VZIu{|s3Q-dWs9(xmBz+|{?x+3&{%7bgO%VN>?q84J`uBg-As>(| zw?Z}ARYk_sDFYMcsl|&FTDeF*Cur%DIQ0y;ZbVhwvlF``rkq$sm#>7sAEb_pD$I5k zKqO91)$*yK-wcv}6_h}j_1d3s#uQ@epA*k+us;4Ruq<*OQ$_;jK7t`kyp49CjBRTA z(Iy@|X>=QhxgkXw0we5|`0URp2;n07)QST-?9Y-VJgIp4VE^>nWHW4R^C;?mSly0d zVIy<&4hkGL)ctih-AxOqQRKDjQtip0X8(>{p736?*-m- zyc-%8a|_D9a+x-3=d$e&HG>MGHis59|LYCE4QOhH48Az*$a%{~HlEOcWgaoaX*3OB zVbK|{q>1M)+YwVgpeNAqcLs!3cl7Ia0@=1MBT{ztbYN$vKz^Mevn8?*=fGVNVc*Dz zT8omKo{NwhY2w6#_S)VO3V0*lm}ZX0Dob;Ugg9_ZjOy0N?!>pUCkpj zI>XV`)JPs~=sAiBT0e1;Gy#6JMw1-rubz<8Zu{OsoLar_PI?>UuX~tsIcdvWva}g? zJth$D^8VXmi9%>}vBNNDGQt3VyHm?_c6_8H63W8@ zT*1dJy}p1wr#C-Qr znS$(yOTT&m_y@4FNc=GjKC&=sCdc%NrH7a82?+$5W<~qZ@Oy)` zjF_Y~sP9mzHcw?X;w457<>MxS)C%MlYM~%>S153CgD2kHa$XDuW1N=OLT2*=G|`FiE>g2* zsZP(W&Xb3S{~IK|d93)7z_*(I^zSp#|5e1+@d#q&Pa{GEvIY<9ylfm-nrysRGOJF5 z#=w_tj{$NEppdLVqqfkdZ;Kko`v2=AXklvzXKP8iyJ!FBGK-DKC?>RTTSp6xX*vDX z^kZIz_R#WuPa!#Qqq2`+Rc3c&ubYOBoTe~8Awd?*{ioiZ!E8#aJHun0i|@vu@Am){ zKcOv`aC6>z|LH>4L-5ZF2hzqC;cUjvt>K^ho2FlbaRuI?#pS1WQ&EQUy#DrZIrh=e z&;B%sOCNoTre5#QD1=C1_Q;I;P*(7u?Drr8Ub4cO-b&S<1U-Xd^4x3{I<0>z_4tW> zga?I8KLWE|l;VnX>3Ss?u-&v{avzLg**;OtjpcgfT26j5T1^XOHS$~M>dkvU7;V#Q zb+R`r`H}5^R}Wp8Rnn!Sx=*zNgYY_XlP7yMQoM!;HBT^5jyVB#<_nuF`q!8BGF&52 zyvwL3r?f<&(qyu&dvwz7%p?o*S_YabmG$f0*i>L^;71#y_(H*iE6uZz@*OYe3a(vP zfj_4r&-=={8(|2V4nuZSr`S6Zgq#fmcIGz+I{ z|Mucj3;x~BQH9C61xNDa<1PC-xW9Do=F&LHi42k%vxW zwk2=Y+hnO2y_qvz4?>fCvrJ?+STb-fy-HVVFW6zZcSC#s@|?bED>a3ERl!vV8}Sct zjgf5p_Vt5D{lbSz%aeTr+w}pHw`~e>Qp^e7ZGQ{g8|HMVz*g4Eqt+mQ|C}AmK znR8Vmj?^PHDS08r2iLr|4mgR!;QEs@qC1iT$mqhpUB;ObX+E-KyH_;4mhp*NXmGva zrtc=}X5+wEnEu@sfa0M)XAJt*s#xJ`?JKLKqWLfv0tb~C6#!kp8LH7b18er@%8%`8 z#AkxvihiMTljRw5zU`fqo&osVFC-IY5;uiyW16m!*cH3@^H3QhKn=$W{qXsH50ns|o6(udVN|VMFGxpApeo4!I@@i5X*F1)xIui1MWX<&ETL&I7DP>c#I>ot}E{ zSc~u|L6mHHRTaKeSBlcc4du;{KrGvmNU?#iFa>{OEhPv&v0R}5|I3Q)j*pfp`t_0p z`DbLjpVs+-PO`(&2QUZ^{y_Djy~Y>Ieka=(@rZk-a1nPFrtteAiW|4{hIOpcY2 znMrd~?8P$2bnDB5_s0MDp)1`Z%fI&r8U37HM#OhsZu%U0K!grqVhU#bkCaeP=)#J? zsYf#FW?93GMeDWp*FTvQdhm|3o##rXndu$A-5b4yb-nHF53#;Eg6S++(Srfon#eU3 z+nDzW-;+7UzK+S;5o|&iUY;Ve*?r5oDf$Bw3I(`C&dGPcAb1ryn30LRv72H+AAmquVgY5P?%VKr|c3plX|4>DwHney`JV|u4V%>uUxK1o4Af9 z?pN$8VJGL_n|9Myu3PIp|7jrK^z9B3U$}VMc-?f0UahKN&wd*7IVMHXLCWn0NV#1v zB?&2D+=>5VMf-OuUk6aR#Mpi-`)aZrq&y2f!2r_FSbGxIP9}|vt{BIAtysj{d6ml@ z-(u;GdtItz*6f}<$zUAuA9*r4lQm-k%T7BEI~`45l_6JP$lP9d!MMJR2J<|S-wMMZy$!+{0cXL0!4+c5Gi#l=BMU&3)ff z`&L>BK-7-CsJfPqPE9A*r}l1!j{%jM|2Vnzaam*bU`R}6FGz@;M9au{q)k$eOZzE^ z{qdxTJi%&UZM>MFsIJ-=GcgFovs`^f#%p^2h|+an>&F!8xEH{?1+vlGlQ>wM$+|sv zfil+hnwpdKnX!J^;uVur4ggDq)Oi73dc4rW__e7arQU=ISxrTnMYM6?dQ!<$6y&qLN8K%3BH01vE%Prt73iOebXL#rK^A z(`$B=c=yDrCT%s#;csqui8N(~?T0ANs0ut4!A2wTBPLAw4`_4}9h;S8(*)$l-QNKA zX+;k}0*V$&8U^VYF4bqXf!Dy!N=2{~wax6lmYGdg&R!g~l?L-a_6|-Bj&ytK5wJ~{ zR(?(-1)br5TQYu=`Oe zfOJ6z#;(uLBJO2j20 zBggFBIr3J2-2tcj!jN8pVg%a=oyDZd^wqCTU-^}sd*3yi ztDC;x#pPzq16v&V4Z3K5Ey&90dM9`)ASeDhJm!IndcA z0#+tSrMMwaC2dfzH&(bw+AbrYfyz5Da1l=k5Lx-e#2!*}WVJL_JO+g?O#B`B+qM+v zw&J#Qk+KC=Lx$zXKSrjIELK+)*S(Mm`3{e_kPpY;?ze9q?YhHy9-eac9x1)3Q3+Sg zd0tMT$Q-uUqIh)9sDt_!|mkF-5go z^9EUlZ`{?@)pq-ST&o_EPpC1v^5EIDw>@*`bU-E6>6sxLa#4K{7M*<;IRAhw%K9La z=YJe+&VM*(9Q_c*xu3j4&nnwOoAT?wNVYsKoYAf2!0hm%?ObSBhNOMr<; zvmfirRTGh!dU|YgO@c-P^85yvtl0^&)hUvmF`ZQ zn61mjam%Zkx7N1duX4!U(jPh&$K@JIR@Lba&i$E4>myUYPryJ3!Hd%J

KY-igLP}{VuQ-qde(ok<|zH$ z$fejab7QM&-CvhE^JXx$^Xbuby*R3$J)-NveO3QX^2d}fYrACgRPFeDDyXnL8#3Lp z(3pJlu|<4p3HnwAEt$lh3D&SsC|`ckjx|PM^ztnz_$tRF;xw_=T}Czl)}&|H{W$Bp zucVRhU~`bf$CNnI)Rh0_&#%&8+Pcr?bH#V>#!MeX%+@8oJxC`HB@xR(AU@js9WG!J zQ~d(tQg0UfFL_r%u=<3X&%@ggr!`GRNP_ii_iR^*o@7aW3&YyVg_C(nA0h;wYtdAF z+1;;?mibno;^E??=#h{o#chs8P>?agJL{HmCZ?$LpHWj5i z@ZM=LVYJrYRz{l`-AXA2tF)HiO$MTx<+eo;u^^8VBsyWhjH%R1>1L)^#mlHEhke(p z84p7AZQo3JC)XO@=Op88?mF)(L1W{CfZ=NohBUFYVCYO-idR_O_ndj$v|s8`fRx{hu-)GhuB4vQCaUuF z*qxctE!Tp4h8XPdDzYk!s=p7WGVqsVa^=cTDIcXGxb14C3T(fUT~A*@_bIQi&%^m$ z)Cg9|+sP_zZ&LlJ1o7N*`O=|r{J5{0-kfi|DS5}_p!k`?3zab+7v7g6c5h90DD=1D ztz!vEB&|~iGXc72WLLL110$^-T$`-x-bcfxvy(P{WLDDggk>&)a3~<=xS*66_9|-} z-W*lp@gYNmf-m$mdp3~co~(~7`2f6}lF zdhmLwc17U{^T?26<*EVRnSeLBj5yFImG?*K{san4%o=-9CGiWr6(WSIn3HPSp`@1j zA}(8?)L1F;cN*6=%|qAZ%L9tR?tR8Q_TI-21j2v_ivcIOBRwT@%uv)KfI)YoVN_^! zlB5|KQO{jeLzovGM0^oH+5J~qo6-AlIz$`nwAO?meD?ZLsIFEI^N8Z{@+&CD%4a0k zdLri!#3iAeN7ai{Avl$<$Y|L!J%hTtEq0T_05jRBrlIuq7Xn1U1WD^IM`=d!r#|&< z;$sWAMScH41#dt?@1l&M^&O`mggC-f?-BKT*WgEHaugGQULFj)HMl^?zm|Qgi{ZEP zJ_s%NnfySEzbrhN)L*F)YyM{~W)=Qq^{0H7OwRYoM$Bh)eJLeNs9PPpG|2PIP6X;A z$w|>BPb^)y+ouCA#=0^ml>QwUH&YS17HKT3V*d9%kMKi6Z)1sI5G?x7wvX0l=d3G} zZd&0#_%{Bfx&o+x+)wLvN=JLwx1?BihEPvRK4sE+(aV3m&ze*=d- zU(bt}83g`?rxwEHYhPh7q*<0)Xw+PxvDY_}Tom<=*RTL<#8wqrZ2xF9kM5&2Jbx_I zo2r)HXlbqFRq$Y^0_IZaGdM7cAu-_da8OExOK`ufeiQG#mR;q0Qt531e~g2dW73ch zZInI}{61$jZ5ov++th%eWC>ROD5RHmq?DP@Z;1JVQJ*+fPP6Z%-eHGWCiD3s6# z*5x@TV!GH6eVu+z@MW&a`eb&=wVt8duwCJC(Ed<=;3DsijksD<(5l6HfF=y0r=!Gw zX6ieZAD=;enPrZD&Sh1j`_a2WgRaPIO95mKT)`a^2uI=%LR#-q$_(tege*EeL&=VvVtw_7CvLU!WR zFX(jJu8WW0vtbld%o1M=08rB(XTP>|qsOoQLeli7wz^pdz1=U>-b({=Wh25ajXjC5g> zZIiX57@)xAz=>~n(5$eTD2W*TF<)giz3##!X;)S`qT<3hF0Xj`ElJc!Ql=`3Phjv9 zov(xsCDtn!Tz%AV;%zBwewP>gNkTp_;P}rbew|#y34a6{%N=iVU|nZ?<6Rh&soD&5 z#Z8lN0kvLro;Fr<30psOL-Y`TJ)UFwhtWw&uPxEYJ+wf(X*6DyBcH`sF3uUFqVTv! zj-Xvu+(I`}N%HYUb628@L|-@bLlTw&Y8eb5k6s!TiHj_-iERqA;ii7EPIzWAk9%G- z{~kJk;k9bEXwlHbnf|2>DJE?M_MpS%bRqr#1yuz(MB*3XwBmFqWFR!;$aN}C05(^~ zn7JKw^UI1w=S~hrI}gLQc(OP+06B7KOh=zo2%IC6eAk)xK10@~hfmS@)(HuJEp`81 z@^RU`2j?xsKJ-C!7|RzFLP}FPRWb&C5VBZ?BY_7~7 zpu_gpm%eB!_XyQ44~9p!&f~-Xf<PWB_x03#tOq?n|0|K20 z5Aq2W!g=jZX$e?5FNYc4LBLFRi?_dR3`^zq(wOzUM+~)cCn|PWb?kjvb$|I7 zPZ2s|C&_Lf!LEJEwadC|o_cFq$yc(D%rfh~)Gs?MadJWOti1TtwN_4>`z*MuwT~Jk zjwT=5ERt(=Zy%pcl6{|rz-tXF6=s5`nM~*7el;!8En`=L2%uFR;+IS^)1V!#Z@kQC zE0HC)CD-mXq`B2)S`ZNe)-55epPRz=Q^w}?-C9aXvh74(0S=I|b@laQmcVs~AOC;Y zz*cW9=oRnYGREV(HmbQx4tutsBXQtP8`F&5{F}F14+2~*7lLW6+f6nPy%ZG#^}d;M zGj<5GSi?W(!v##9&-YBjSGy8Smy2dL!6nV`pG|D&{6MgwsKP2Qm5o20xA5!G09$~Y zLAY~<_&tL!@i?*|<-hUb{yJdsVAULROR}WpDx4!~UqQf90lTrR`H&i~j-80)M4T@D zcCr~COPg7*V!+>>)`4_y569|Q6B<)%USf>eLMgYPON#GAu!qfD1`?Hm0e)TPen3_} zm0qHAMFp2atmxZGmo|X#KI0Peu2h518q)=;CmoW}09}2xd#(B0H?A6@6DezK?F+$< zN;8T#=}os0iAot3qQqd!TFAmdLjX}vdA7L<=?$)j#LVsM7WVgGdDHc-m7kzmR2j5) zW&98J#&BV&B4c67ghZ9Iv9>{pFipC-?Q}QoqkVk0>R!Up8e*{q#@xgZFo;gm63u#P zsnWgN!mL*pgZj(5+~mIJ0Azn6B2u<+MP1AoqpC;9m(@;_QqwB{IZeEqzf{lz= z4%WNXa|=k7sAu+neUVJ>T63blufw+)Ef6oLS*1_CnJjf9ahmt=%D;I7yG7RIGwp{& zb;#ZwLRWaQ1l`N8j7L8I2ECSHVl^jiwzc?V|3#gAZLNRk;2k%D5?j02Q4IU30^~=2 zK%Q{$$$!ZGEB*(cBM>Cccl$O1#6H0E0+eUz0o&AiP!d+h02{KQCQf^h$| z=~3t?cQx_AZ1lVcX z?}HghiR+Q+-qHlm@@4>%-eJ7uMkzb9gdcA!`ZJiszkF}$o3Ja*bNEX%vT_xTZfM+nZ|q-yTqw`zVBMTU7^<=V-^_zbOcy+(lAMFdoh4@o!y@=SA90U@o_CG-2XwT6ub1H z5%v94;L!2aYk%}SLI2kxTe-#9{=HLb`Pwvvte5xxygku>=R->Jc&vhNzA0X`9vct; zfZk3$Hi0b)PafJ2Estrb^p|~VlWRe2$(0{xc@4hnU0^n7Dy3DpqZD?z5W%GZ#2zr0 zyb=fH=P=@+Jd$?IE&LjJBl>u(qPCILJaHfFQEZO16e&H`71{Hz<6JVPRD2)q7jx@p zub=xRZ2TOPbv@**=#L2)`PRk8MCyJGn!PbQifTeXuo5u#F?fG3joOU_C1wBw0UTnS z3SkAPY;O$S_{9R}-zav?0&-A(Cs1!N54cHsn+h5d$Hu;?3^u^|7iQZ%^ubQL3hC9T zoJRQ~HaaeRD0IgoeY1$K|NTJyUkD>dDZNj^l4qSo%TjoLX*`P--#-UMvWb|C+8x_^ zO)gd}^s$W|mARcriZE3g`fiBjw6^Q^uxAX5)+W}|h!iNKFK6{Q-Z|mfJzf6dE^60( zdDZUpglKQmJtNnu&*?>9-vQrTpMM#%S1g}o`bxIw=%TWyu`ZLPfUXYDzg)*MW~EE& zD+iJHIn&<6l0E`qt-~anr2peTWmD&tZpHV9Mw|I|?rl-x6-i#7!FA z-lhi{!d}gu@Ai?`d{=KP)oX{%&0rYZPEb55X=`d$nq~U9W;pUb_O#s)mMYS@x7N2J zN^QSh6)E@(`t>~?wLRARu)q%xR_92Qtv-uki?Ww1v6rcoK8epKAbp)flbrsQI7peM z+$%Yd9zdTotXbH)AuPSwajm^+{Jf@0`J%!Em|s{d%GMLTGt=L7&(i15@Q(td9{OTX zJ+I@x$%7n%tYP1tjgCJ?2zd&?GFM+#N8iRW=4oO3E~;mGck9l`UVc+dI}bV;zVrNi zS)+V?Imw>Br$uFQtGDp~024v%zB0p>D@*y3eAa5af*)m{MWU7*jon}Ut5x=sswAwp znDlsL)wovlcF(4ro>gN>+Z<2rS-l*c(e=G<*Dg=SrX<}Ea^+P|5?!4Jvb@N$^6!Q+gY&KdJ&87V-x1`!pkH^%0eB`%mg3BqT z_4W^}K2V91nKkLN>Ugf%a)n)^Kd@G(YxQ2=6{sj;QSy1 zL3_mlzM^)BGXiGo9=^(7E#>UY zpyO@^P%PKPV}h-(sQzvBg}5BJ^vP&UiMcWs4~9eWP5lShDsFO$L2CGilqjKXTsjz=c{!Bp$|0v1b^mh|0mRr=Xyg|iqdwz(l2O? zhy}EsOxi3Oc55DDIcx#|B;!`dWo>{`Sh9zrc{3KZj_Qx6(mpnMwJR*%;tH+R?s$9# z-1hL}0xJa%e6N1C)K20J4JFeRjd3;_vvqgXB{|e}lrz1)LwW9ZAMm5Ax(eZ_Ns3+3 zTv5N^g=%xxNAMYI5LXc3JFd2gkF$x| zso)ECiOfK^sq zV_a7MES>SSrBAXP}Q&OKne_<(f~ zt10}(EmbV5erVnq^Sar#cvh>5WD85AQ8Z=!MAEvUh{eF@|Jm=? z6cc|5Y_EBaCD>>@aOGuMV+a6=`$b$964xdzp0l8gt2LWx+;I8FSNv1$!;FLaMhv5t zwygRK2+DkqAP1_OKBocQ)MtP-6hr`_9@_;FgK<_m>ue@3o=c{p;xob80ZWk2lI{nj z>U8^-D`cH_@E(5Y$zVwWPgthNuulS!jt#kCddwIzqj+M)g8>o{Ff z-!zx{Rn0HSI%A~y3OGaG7@wr%FQPH;ONMc224LUU+QN$kajpQEaK$yz_#<0boFQ*% zEP~WWtYFfrYoz&{YPGDtQno~`uIJ0l^7*{A^(;(D?wG55M3B~+QmfQ$DqduoO|65H z4ZZY)wJuX90L^TuXNLN_f-6JuJBjN_m5~!anp#V6X;_RT?4s8Bs-AU;vv*jkP;hIQ zEI^Uk#3bOac0RtoB8zw1+q+g$+f$m;^Bnq1rXVXx{&=4s&osf0Uk}c7Cv~X2AWetI z>zp4^-%&zv-P8Rp&O}N+tPcvZIk6)*8N8@@oqp{F`sj(r>VvNB!x7cBP7SmkY-|L| zRliPI6mz3K&J(Vw4|$k9;1FziFyNjOXrF6)C^T~{z%Z{zm9M}{4BYXY# z{aIW6*6*E{^5W?7(@$D{W!3Vf)2+sS^&kIzyYG!}w(j(Kd*rc?%~P+XFV#gD0O!yA z%+J{0{F{Gsp7PZpfRm@5ddhzHcYoIbmY2QkW%f`1=|7!cJ)-0JzE%Xt^C$kqpRhN+ z@s0NR&wt+j?Z5rE^D9gGW~S82dAt3){`xZZ%96eyeZ#o+%B==2y*6*t@tTg$*(|?m zq2__R_X_PkGe5p)A%K(MRwv%A7lbNoLC1f^@UF2S-t`xReq%v=H0k?!n%=;Iz0Q~J zz?QV6B`xX8C%0hn*Zjj5evS{14qukLou+M7_pFgjxDx7>N_{#Wv-~)0&3MLQp{R`_ zAuH6YmWqx%*zdT*SJrXsrxLz^&&Db>5!Z|O)S8`+wIWduDqI|mEuni+CZeE^CR5f@ z9k}AbWASC>?`V(mlUM-6K_H-eCmblr-2IQXQ7G(gyih3w0j2c_(!IE?!sq!)K5J)6 zd7JbGZmoe8NJiI)`GBh>+>{Ypfw&?BJjRDUU%BHxtf_RAUtA|}zAMDGNo|-c8li=w*AP1HXH;NFMuHI0V7bf0IoPGKMza3wC9U_IF90EbdC*Nhagg3`Je<*XkbZ z8*7$nyX}?(^kGH)WxV-%99KL|%A3q=sCG`rBmWv7unIRhfH1_(zzq+3f_&qqYM^nP zXxyjbZ(e<vM56bm;EMnDW+n%Xm zu`}|o^?Q-HCDp=dI_*KRv0@sEM+BlSs@(*(mWa9KOb1>n?@P)hSMDZ?CACwsAzt)U zJ^}M_Z3GZ8W(<|i*S*N)NHR;H_0?qDmF#Gac)fR*4#`mOt@cbLjsG*h(kr6+BoL58oh(Zu5h<RK#UXOA_02PdG&1bAq2#!vhaq)$VEXY9@fP+p~ zg$6G2l8nE23_z5S+>UgAO!7kcNtJ;sHr!+}ANJE_N%hRi&3y-k`noqEzD70XShYp9 zR+#!nJPz8x90B~JcEBZo(~|D%YTl%jw_mBa)lN(ODb6&{3VF>ztq%YSs=MFrdTb&B zs$+G9OTrb+`GMpCU1U-%E>IqSs| zT1y)4BC*eSsBKt-5%?Ye1b2fO^|z;vv;Jt(yY;{jOSO7aGL*7`)(ZfyfsRMjy1Oxo zWyRBKO-rWcC~Jgxikm0a?x|QjT3ZXO@Kjf+-wS|#wPjUlSGCqM-wVo*6%Rs7pJXDYajXn__L6AGUaC1Cy6=rX2T{H^8!dOm z$T82hxcJX><4L`9b>A|%w7a7UD0qHD%EGA*=FfEIj^7s$&-%slA95f-X^Ry0eoJt9 zenlI*!##9#PjJ9_aPfGZ$Lm{MU)1Txey*#F%Kbi14K8r#w7A=|1efP2toiL$$F{Zt zS80nnLuwOr!#>Uvj_T9yaKA`Ds_$eUDb<4)@-({qvz^K^20|ZSi_t;jwk9n-p7M|;;zIjSIEupIZxNkwr4SSQn3|qcu15)Q4?nw`q42hxYQ@^sMO)ZQP&Om0H{`?2hfZddfDp zZn952bztwjD``*e^=weS-L_uxs(C3dj%a6bbxrI2>HT>0fsd=7YxagWf46=5!PDbN ze5n>JSdG2m4R5dyeBcA~l&=oG?sc!TYPIT?DYx8mi>;|YSg@QV@>TS^?78Qjo2PvB ziM4QRYs=RnT**HC@Wb;fOZsM}o4)mjmewmvx`w`CEK+i-W@ld-SfsrCEjIaQaQbc>{#gPQx3#yg$e-6tZ~(5!><1x1uy|vVOv!zXu3f$Ktc`LRi+B5WB^kGBG-Z47j7_6an`(iM4O(_48@C<4{2z@it`~WH z!V{ALmtc`h%Hn#d!FmE$G`M}aP5E%iQj5pTv3itEgQ2_2Y3t=TqDK`9O_Xfo+0+s` z=Bs^I9)?5y^*u^u9vZklBAu@-0b@{>5~G8d6Mz7>l#7PCWriXv8dllr9SU{bJM8t` zhdyq#Pfcrj^lRl#KW#= zz{L&y@pu+t^+(5mNqkks7yhKdbKdn)GN)7yTyLmtD77eLrEL%%!vDvPNg*dQwzr#`Y zA56LzS)yOKf&$5m8E(Z~TrfEVm+vT8eX0ZS&_+2NuIP;{U? zd6dJdm${q~UvLwblI)C>2lsg?rLjk`OdJ|49{?|LucQX}+;nBL)&_tm;_*~8*VCD_ zCG#1#RslpIz&h&zK%)mo7e4?cQIa$gCI5lA(y8iKAah3i6CaX*G5ll|Z@AAbLg=^T zDyi#O(si`v02oeGw~3DnCWra5K zQ@-!*)>uX*H!;o6aLmUbI+YA-T-&3eTfV3XmSz6Jf1LoRMbt;E3~{5f3b>$LwBO@b zDIaq`j8a$qt!NGskR6Z&pf!X{tB+Xu0C@H!AIKSNApjD*qECP`omN+KQ*xpCiCZrK z#y;}2qg5YX!wcp0*gv`#V2$wr$Yu<}jH&8r$8%awQkIxBEhG8v=)SV<=Y@fF4fzZK ze5x)0_nGEsQhnAe^fJR*JK}l{;Kr^j-gH%FMso~V!Id8`Pqdvjw;6A3Om*TlIr!aF zy=qHNW8#*L>9oi3;)8&M%EYqJV_B&m8bYm)5o@W>$RO(n>qJ}gn1?G?GAPNXYUhgD zUR662T6e-)FR+B{6jp3>i=HLAmd+$C$9k8``8tJbGTec6S{>_3rsHt}mgjxhrtCn` z&xrf!d{0M$Yy1r^_)(DN*L36w*FOLK$gfj{=L(Xk2AY>UKb-GLS(H1k^YCww?)fZgaqeSit8tK|Sh^n3XmiBaA8qmknZ_7(;=tF>$|Rx!GROtaY}L+#Bj;D9E>I3{HAI)iM7*pz|k%2n2);W zWPOW1;(~0X`MS2ga&3J_Cr*pI{co`IGLQPGM8Dui+OF;6NuDlTgiUiRwqI*#EtOn< zmA7Y}eb(~&4S{va5@7kprY{>{*}iP4Y|e5wunfWFZmw9exPJFtM|#$useR0zyV9{% zcWUcv8QWM5<`FShc6RG}y3Q<@$ylr1v_eR~7W8n7z5Fej2B6JS1#Jx*_(pj z{HG8A%f{9jyXV!fv(G>J`&!?Fpv3>nU;2A?W$%*xrT6|7d-wNzujYNz0h7P>*Z!JY zW?j8{)h)3SdfvbL-S4&^`>`LhcfIRf?$YaXpZlEMbkj|4y}}&)?(hC?E0szPF08Ju z+E4w|PuZXOGk?Y{O&)*zaqIW{Zg~O#f~Cig|M-vF2S4~h`-z|Ui9?rO-1oyj{KNL= z|NNh~@BGg1^f|;FgI)+=`PrZSS+^W%wOXDJp!c2ce5d{35B{LtcH3?C$Rm&VM?t66 z&oOOy&wJiuFMa7tz5RTsf@R!K|MXAmS)a1M`d9y|mCI%O_HX}oyZ`?Cy-wDppZmF= z^ER+n0_dDMbH?jl0xaMBg!RhitG{O%d*zQXEx?p-1R!O0^F20s)!VNFQbNP7g)e^o z>jtD8Kj!`gpaKD?EJ2keEon(h`pQ$}d*1s4L6Cbbvi`!Yy}vY*AX=mmjB(uBw`i~L zV8fMU-llqZWP1Z!2~F(wVasZ5y~OKB@~tT2^gv2$!*T_}r3|jX2$D~rNE8D+KDv4_ zDCwo6rJ8V6b51Wd1hih$G4b)_^) zdEHdPVolW^janLoA3fInSZ0L#fmH?y`MS!;DV-DGB z=?xt;O5jeb)f9hbR!AqUp%=$|w_{O_!62WrxY){<{27fk3RbLOaD7EQ6095m25|30 ztc@57Oyxn@oq*?R$C}c3fou;39%LTNhDI`Do%E`eMjab>>Ym=-zi?~R9gHoc zeyzi&@kl?y)K>LvLwp*kEXtW`k7W~lud5AN@rbx64sfY#zKn<00Cf||D}WjR7fN+^ zMI_mBiz@o0vU!C^nHy3*Vy<8X0vLzv>GzL*jcI&!9$h=@@pVR5FW98fcJL%2UI$;v z!_NQ|!J1`^{HXqJvmJm3Y9~BKwonL<2R$1CT#fYd->PXw$83_xTPVM3@qEd~iKJyv zG>ZpV*kDBg=;E$nR1QBj0Z+!Uh%10m?9x3rmfXt506RD)jE@`-rN;A*WX&+~-Xgnr{?ipze0Ym_zusE0&Hg(U?BDEclV*;?l z_#n%D?NOGYEKM^e@JPI8PT@vtlFe9K{SHxQzpr*_1?dpP9KKI2k42pJxN`&e>#1%P z=x)-myr?yrl6B2vwGr2afHJHT#N-*OPR50a4ycq-e{ey?y2g)Iw?><{=>R}U?TYGN zu4AwXLJ|J*4>nP{fM}AQ@m3{lv;6kxltJ#k>ef_G98P`2iO5 z;uCW3uH`gF$Uv>#@iI6bNS2suj5jieykJQa7mxeheqhO>cE**D@wjDb$Ts+Q&1nrC+j>}AH?Az3)VbY@rGwXkTS2=GXxoL)JOI&*vEO| zqCSc~%oY?PI;!umY;8gBj-~nei9XKhv_1mFUkkYCw}aNYuH-hbaOwJU-eb7%3=3-V zdn=W?Z4sM?_A2Q{eLUU>4*iUKJVoRO{0aKP;kAAA42Nk8fMu3QX-;?SWv_m%t!-OPaIgQ(Xo!+MPB!&x7yvWeVrwnm#i;&X%F@OaPN0nZtcvxlov;PmoM7J zP1n0AThIbvx$~tjvrj+x-wpwm?|l7t=^34|UwGgD&z}F>Wd}B}KK)1k=pXqWkmW-k z`j7*x8+tbV@-P3g{oe2Wp1t?I@3qU9FWZF+7woae9<#gezS}_vfCb(^{>I<<8}^A$ ze8PU=7kP4QqqrHe&MoxwSg4>c>|ELcHU;U-5UTYuX%@!-~49-Af>3^R-=9Zq&)T!i+<#n zkARfRkA7W&6f9E~)+tLsWl2j~(vrS5ggJ$Fi%m}#f8o?IJMBYrr3mpXP&(&&BkQM& zmI%kJGsRuS#A1V{HS}YDBObLxxoFL9-{P5!6{b_SVBl--_;BQ|hEP7E0K#QVFA}xw zZqM48v_(fjyogJiE7sC|#I69`K;f5GJ!{$kKU!)_7Vv}S>IIqr_a$qRS zSI**9M%Qq)gTn!W)AOY=ZjVyAg00?lk1ImE+t0fTGunaj5oY2h3PNFr!WJcCLTw^I zHwt=UZ?NZDK9TUi)F>ah?{c$kHSw)B99j#7N?R{~b=>2O{EK_WZ2O?%Uw-QiMtx}8 z`BBSm3J>j$=+NT5uGMEzdwdwPCle*NJlc=MZB2adXu};=qS3VcMCC=p7F%2OFSv7R zKNdL=$-qoM0USILV_j{fz5{&3)UK}Dh|=#s*NHL07yY;~8HZ99Nfj;KIxV&6XWcb#KID6LcjHm)d=IF_g`vTu~iUzi@lBA{j$j&6n#NY9H1SE0UuW zY1$KrJ(5h@)lk@0aV?~>>yn4DUOsX41z>hTJZEg%y}rjqF_nd4d0MI3Q0+q5?|#`; zCsr*004T2kXHeYYA`Vw>D11?#;`)gnL447VEDZUnq_i;Z$dtdB%Q=7o(13MPc>TPM zVo96qJnKqd0;KcxJTl)@UjZyoz9YY`?AAHp2E4~Y1TX~mOb$3Ochy(o8|BnatZ&Ax zijB6Pwf-}Y*sQkW0l$Z(4GW*W(l67$K4s4d*Y%H zU*e3~h7ynQMY+ek!L1+48MBZ7-plNVe)^LZ0XXS*Y+Tv3>Fy;n z{eTZQ53JX$s$R)2vMJssRSuxUB)6h+&suWzCYJ%zdR0&xyw8$BKm;sPSch?GhSgT1 z-n62wsEMmF6!18WUcCSU*QG$(QPyXuWF2(ze>zc2{6MrBCu1x3t22T~~b> zOIB7?Hm>7LV?b#5vHG+w9#48b+m}q0C9|wss>T5wEV^*jmX@rRBmz9ZN;|EY}lEt z4F`CLS#x!J*LJS%S!KVbXNIKE@1rbctcX7hj9*~$R1tZC{o?uWmk)PbKT2@7Tkv!~ z&VH|_1=s!0qRx}`DLo+aaCv^tWr#FC;JP;cAFI#LwJqx0->o()^F)2@ zuc=dq?1JmT5r>O9{g`BKnYSmnL4PQl^yBrBaJ)`!FWATV?uLD2&!#^fuSaXn#>SeR zIlJlccTk=-B>US}82f#zR_b)Yfc7^KTf#k#hUXp;zO^xo)R0miQRpZerF`}))~ay zJ#SdQh4Xp{ow?->d&S%Sv=wf7r9FHwvx^NrD!_5^i>Hx(x1MT2^CIcDKmJ=b81~&F zh2N5Z5`4%8NU*-nvq!&6R0l4=#(ExLrGbmQ^XJd|wUcz~t+)CfU#~N=f1#C(E9yxvETCickve@FsHr^=~o9l;^#>P7cz)JA(svEfhsA2xc|U$e=*e?sqtc?&;vzePXvuPy%1e$t}9@;_PRH-6T_AOBUGJ@*-# zRxY|uU;%irp~qNk03(YHFl4bE2SEDz!F1Y&%j3m{=ZwX+ub1w{Mhun3wxlI3X-P}^ z`cU|)Uc~qI4*Uy9y%zI}HF#OpVjjFWd#a_<8Jp=PVJBC%y<|oYgpjR_`xa@}ECtXI z4qGD}vB{MkkKsUY^~<_9t#rO-ovANLX=9t4+%mb9P2;CxHE^(Q<`43POUlqgWs48ei}OhM-sk z7)h^fT72tf8}D!1@ZhT7GuDRkt)P10A65`p!iXiV2u4Y_f+A2nOHzL%>Q*e77;#?o zt3t;p&v8M9l}2-ndor`O+J#%Aj?PD|mi1=pht4NS{cJd>SX$+yctL55OQ*)Z__(Wwo)J$tS@v*#Kk+nyw>Qd+k*_leIFaT3KWipTYM!WG@EzG{#t;Cd>n zK27?xKo9)s#F{!!Pi7WYe}_8fD}TnN8PD4|m9tFkip8fxo1&O!T$PU>LAcq$rPdJl zJ>qFhGSO@_?D1;D75j)G76~gVpMH;q0}rH))d+m#3tij_p}04-x0Fr?*L2TIH14TH z+)CozKrduiLh+?YT=&y{tV=TatX1@q3P>`=azeaDDO?cWvD(2c9f}&1`os^xMbboN z60-qIBEBrfjT|q$SVf?EnyF2IZt#+LH+(gTykJq$Q#tM2n&d2L>4Qsw1&qqfCR4_j zHVNfTX+IvTUZx+)UhQ!sQKr8QeY`CLSd_M0>p_VVv!1K1o3=1lR!vonnD5Ejf{b|NV_wuDZZlthqfgf>rDn8@Rk2n<7NZyEL zlsVYe7^eJdO8B>0+n4UJKlA^pIv~PY!+ghz#@bC=v!S)GJZqu) zRS%SiRa2*t%EG{9#wYsnKJYTw!9Rm1=rX#zovYM<(SfSC6PTL~I zyk&`5r8ZAh3>GjU$v{|ZOsLVc9+rXSq7780X$sQY8%!;Saeo37U8u(d3OAHtZLc0wF0ieHaCM9Bf$mzQ9eM*wT)Ffb9UXf zwl*xL^#HeI+gJAN>du~3DmCkMfc=hc3wHB^WA!bb{~TUJiyL{Md0tKz=UgO>J?C81 zZc*0a+^@|~7ImJiZ*d*s@I-x#@1o9nz2P#tvRVo*lg|F|5BvGSjrw@9KK>l5^LTyp z8-k79ocwTpeqA3=)(0K0lPkyS_MGegvHF5P?6|NvA%AfHJmru8pWS-*tG%h7CBX7W zhp^f_jc|*3n&rxZh{M+$M{TvBcNG1$h{XX6_`S|EU~I*Net+Y(GE%lm{G4s~!nXJS zXYWs-ZAq{CPH>-jp6>WwzLzgEl^RlsW+Z_I5RyQM!hj9fAUt)IDOcg{TDX^~wcPG* zo3-q+yL)xn<#xHt#sf7_cG-8}dt&(F8}_%qFCzB;+j~!m$>`DUzF@i+$n(X}RL_Q0;+O?25DQxV)SvoO zZkd9WhA(R2edi8@1qdL_-}`%i?{ot!0hosXD(*(O3d21auE@xPI^3(8D7DdQjgznuXoPk(d`q^xO8Yg*Hq zzGzBc)`xf%*EQpbuXts{okTeFXcMO&(Y?{w4ksDQr!sbV7+7;2+2!tuZBM2a>BIgw zPaJ@S&GdmEv3gW5zIBv-)v8;2pwMb4554)jE16AgxRN0*Mt3o_o<4RQj49=ffYFO) zJys^`UI3d>CS%Xb=z~YshSQn9Xz%M6H&w)?a3z|~;UH&y*E=qzX?c=sAeQ;nK%B$Y( z=!HUW6nqRtLwmSYwwGF98{IwI&_^UIq1(BZeLUZ?PQGeK#Br!sEr2h&ZF|13n(8;v zhM#6)cn}i@g?6J@uwWFr%N>;Sw7FkssEq(VPg`*u_XU7kS^N1?$rZ228{pVnb?t_c z;l=s-WbXWB<@lgfb}JcE9k3MGRUcZV>syqH((zhZe__+Reb?aM{s)L zC19RRUwZ5o)f=TKie-X-^RidfhbsLFD1sFXKvLMa zbz|i(?yqn)gzNx%5!(h=Ranyy@1|BQTSxUqvA)}=`AgWWKXj!xpWBOmpqIyj#{}7c z*XsMJE1Rd*>K<9Cebb8V8|ssh5itK`Ydkk1w}ctZpCod1V|xP87k)owbe&~#$IdC5wX}Xw?l074WS%ifT#~->fk!3X zTae7*R1G(3DWyA5dvQ${pzv4ufP2Iy$%+32z?V>2p#6#J#25nbJyyHB;!(f6VPo|N zre4I@NokJFq72osMx^GWlr{1Nd)GTJ*|)vv!}dde>BcffkXcQi64bG;IblQN18{CZ zY^H;o`VhEjvs<6H#o)wpgRaI^*H=xb5};s9RM?2$K@mo%ol*OsIZ7rG#(5+ z?p0gYvsW%#Uv=<0j90lq-@RY|9p7PZ`-z{lSN!mww+CPS-IlM^?c3k{Hv7+h=C9g+`hWf_RtUeiX!9%% z`bq#S_oDCo&hPZtEPvx~{0;l%U;bs=-``(ext(z1b@S#;k6)5(KlM{T<=+?2M!4X@ za)#I@xZe6g?s+yMmdfGb;qvG_aqW-%kw0R8>u>!n`{)1spSx?Ydl7ZR(gpB`#WBRF zS<^Qct*uwSap)@nr0`AqY(NTI0;I5gAwWv>6Myp*NJ;HIehQ@Ie*Q0>0V$t;&l*Ts z)0)<_rZs)hl;*>?TrSz>)<*1NL>txtD2i^}*wc&pp|v|b8-`=cm&z7Q$Cg`+Y-7~3 zlVaYw<$@jR1AP;vS|)9El%T`V-7(E;RsYZ?X2~smT=QU>=X2H{PwZqk^i})Q;mB8p zPqrF1&Pt?YbmnIN=cA@5cr1dzth{xf=5veLYuIu|BW2W|>8;&!AgKb!lCYcd_prOibXo0W>b^1V|ndkhEGt)V_DDqTfoOnQN3DrHwR^U)sZ z;WpHVQbE5S?*OIZ_7y!yGnRhp(_T-k3UFUW42F{SQOFV>1?v@6%W|rRE77%&1q8pn z0LN##rt_?Xrz{l9m0aE$#ZqkDk|j;xf1>n5m6xm4ZD%pIM|NxWz+zx8)K;xjY@RAuH`}tHv}w~z zSDi1cvWB@Rk+ED^pg?4GJLT*AQ1uCCQyVuf*$`#r$qj!o@|ZH}iY<;55m#xtd5R=k#KGyP?;_%iD6hKYpsw}+f&mH;*8_J`0A?q_P%knW3swG( zWC7rYm%NecnAUZ5k-I@F>OS0pxtk%4AKbH*_+S_Rak=I3X_ST)l=DjuIcS*c>`6|5 zR&=j;)a9DWpl@-pg<_e0;svXs4aGg64Hh5tFE4r8v8iMLtCh6I92QO``c`dAV&V{o z0xJXJ>A(x*F)O}t3|N4oi}*;hu&0;D8@AEgw`zA!`HpN@Ygq@trE%G^TbCUK#iF69 zJTJEz2(bf%TdF?++i|nuFS_DsN#}a-XfknETusS4FSvE}ebDct(U9OTTuE=u%N}zj zJ?=?H({7ajU{%oA+>#6e$hp;;`Zy;Z(1tCP&VU}ZT0Bm4EwAHlS@Tk^V0bOZ6`ZLZ zd9Pc}rh~p6Ph!P3pl?d^4xmwC5XP?7G$x2igVhag0@IPoQd^j3cxfL^Cn{s+fEX^? zkY|8o+8k*gu%)KHA8Q+q#Lf%n$fhs=p0*Ws`_bsbQW_5HL>=Xmy0eNzyGUwd*8cXYHxV+6E4rp1GF1uensWN z%aO(k;2+?U$9q!0^?Mx~xA$${-M8%F(-w9PEj5c&$B~WHZY(TP<(hcju&B9XLt-cG zT(xp@OY>(@^_}RWKx0^KBBmBRfkMfe1BQ}GTx1bo8X)sTm$Q;jeN(A?&2MVYQ1!?m z2kK9(`vyusE)^VP!m_Wb^jydN++1@)w|T{~bB*_2+XKi?H7}P{kD~ewFfF6~l%7L? zILN)bIn(`->Q|^$tflj5%@;Y{TOJIpp)p!g@l){`&@zym5Oa&zH#3z-y0-cs;AF_N zOgv970TP}=jALR!%?=N(qO?_HlGrMeg`#8})52(tYp1PTCz;oW-TZImEfe&&bs*IlEfs?d3snO_CfS z{w8;TXnLD+y)9yKd5L0|M{>yKCb9@JXw5Oefr_lM_{K6@5HYqbn zaXZCVpnJ9PPCSi|{h||R?$pNf6rYX%^1GTZe-{2sy?l8`=i+P3A;=9b#V%jDWS1`O z_*_l^d;k|*iQ!TVuH>3vPlgV|zcm+!yN zp7-#>_UiBbQTx^({b{@J(bwrcqGS)f@DY3C_k6#7*V}%;9=iI)#b1f^{3+;N{<)-@ z-ide@U~vI)M;?9j(d89=W9a!*uh-rBgl`1mvM}er?QL)KW2}r<-EQ5BD2Fk|eXoE0 z>mAfd2&2H2g5`Vm8gp!)>7|DE6Yo#mDDM7=y$C#8QDZ1 zB8NN#CR3}8^|;W>FR@Tql|J6ywo0dK(^A2L@z~wHr1SxgqL06SRd^uVzFZ3Q);S@YgT8Y zfaN!p%UdyC$^o`RaZwaXb*Z&hY^f=$B7FzyTHQif|NW?!RC6qX9`N{M=j7Etcq7_(oW(y5Yq*BRIHRvH>x&ANybN)eu-~0 z2!~dzRb44v*GE;;`%(PY`GM-jD!Z+)Z=0im>N2-kx9ivE^(~uk?pmf)apipGg-4#!h_3U^TIhm>Rv z@C^4&v>RpmMx|u;tFE|ULf$57^LD-F{Xf>n#GEQ}qqKlaxPl^|bg^hf z$s#N6v-6p|6~HQq0PeUOYpYJY+)+=JqKwB@RC|cE;g&1P1273o8}C2$1E+544!QE6*d|{jrD+pyP z7BnitI5cfmwrcO7r>K|DoP7rd06_+b^CC3f%Zm4suQpv{^paKpqimE&A zo{$-0B%x5QhzSMte;9TwZ*v>z4RhSsvTSq92C2N|G(M-D6AwhrM=ur)D8V!0FY%JF z$YBnkuDnF@%Q#TwERbyBY71+i;g&w;qoGx!p2}5!=sG+rt1cPwk+v|l0hux?i7f%4d070-a zVLk%WoDwil|2+sk?#=+*YT_}$^BI}EENc?e%~`8x1L#8* zLzPSXAY3Y{+ucmP$lRzpGv6F`dzNYJSgzi5ImkzWjfb7MZh%Mnt*Tz|6N{6D+6nL9 z|L^so`9Q^f;6FLCU;h^+kL|+PWSkIRi1`nRn#y2a z#PSH>EzmgtNyaAg!lu#(>IWiK~iFaSfkI4#}+mJwS9H-0>q=C#=MAkP}+bikgpo*QY^5sRgw zWTmH@tD0x#x!Af2ODf_J1u8dFE8B_s2a7147gu%f!Hd4dvW-oZNU8B(JOtj#>#nq<9k-={84wkTT=_3l~MJXI9HQ`$_)ZUf$_(vLj;mi|xnc9KXb0p+9-Ix_Iq$UpwCSl6&0B zAyddRecn^wd(eD63%MhwvCe(e1v<@hnOXc3pIa?2335*iEG%)iFA;A`*PmqrkpIBD zf8C;>Z_l>(AV@$3F5|s}q~6)v$N} z+OOKb`;}j@C!c=CI@65hHe^F80=0pJPrjw`Jn{m~!Qm@Ybq!aEUx=!e6h0~vqj&-@vW zmjdvBrN>*|@)mo?JKo_z=8uk!?CGbUzAZRCv>Lpg-yivrAMrr+Sckx0%HTXe4doLH z2J3GE&=XGu>y(Q@?uozh!$17P_V%~G-Q%qg)Sj_;vHW(s?dNcR25>|kSFT*~U*;76 zm(?Kl5M@xufB7%}rN^Ry-ul+J+RyyV&)5rJ_(FT&fd{<)*RNl{r=`!DzWxa7l}oSq zL3{FDe}8#qOR6zwfRflZ1b1C z)uwOwF`MakvGb6n^iBP=Cg+!tdMe95L0tT-}OK~Xv$xb@2Y^MyT`)8ldxE7mf) zpA}Wa4{=MARNAcw$|Drqdg4T5j|rhd92}`{Zdp+u1vZ~ps~gx+I%Nkso_G5mcoJ)d zr^3)i`W>-?I+d}iuD6wr704*3=V}Z91kP5Jx3@{%SAis53Rd-RUiI6OLz2vhw^q5`ATdo z3x>KEH(@CA3+c44Vuw)3QYTjFqrfE&2*K=$v5{9jsTZs1Q4q3v9#=NFEON`3eAcZC zGTF3MxmS7SGAY~HXu8WC2e#CQ4{mRWcXQh=6>Xr8uWkB3{fg3rm>`u})%w}24b&&w z09yKZoOZf)q;s3{mwsJdN0IJ7|dUDroS{-kF&t4&)pTUM%6Y^pxYj7R#A z3aq8N4O&+%OigU7f9!$SB}}eN2HbEEOUE1GKwI;FtwMJ zZ&-h9%_U>XBNES8ISx3|Qhq3Gc_D&VGu5T2Yq-D~t4-X?%0%1-5xBil%)6qcr@mnY zIj(|Gi~}5^G&%vi;)P5vg1qpcygyQ2$SUr@?JiYfn+y5K(9b2iY z+A|cjDQa&%sJ-`IHDhl=tlE?swW*qky|=1Sd&XY1MNuP0tlRf`T=(?{0oa?f2SbS0$Q!n|Jl$m0Kw@p1$@3E@VJ_JDcuegrxjTc|L~b9U+a`1w1SI& znBlrTTQ#`6GCB6}*)~ch%&IA%K+&)jjzO$vm)ud&h!W;84`7OK9Q0@}3}HX8)8QFI z+_{U^dkg=&p^X1I^^=nqZsyFTDeIh&;mxHXq1RG$^=l*vQM7!SWDWV~^Og+DhJj>5 ziwa=*w_==a*9bI6lgp~|xt8^pY#*17F7_tbnG_(&-9*sU55^jBz)`a|T{ozw;1z>1 z^;*b4?R=7{IqeG5n-gj+T`_np8oV7Au2&gpDD&K#?x%t;s!{dMn^IdErL;rU<>7?R zDJs-lnFO|;WD4BmCw!Xw6!8S<)AECx(7m6!MqY`_$}WYidGZ!)bQx6ohGVcHfx4D%Bnw?|yo6h_>8>P3CZo*YG<-IZfFo$g(d~^u)-d*auQ~^+NYd zVX6R-PCHMBCA?;jfZ+14G?0s(k?YaBU1G&SpmY6SE$gFZ%L|A^eWj#T=ceeZFN1eF zJ`7hMExa&*7RSfXdB9lyOB!jvsMO^oXL&$BmRG|Gj_Q(*SWWDL9Gq~|?)%YLhn)oe zH%0erA%iUZv(EPg$|to`I#~!m&V+i%%t%haue(pqqu*T@!h8lrCnaw%=@kN6S$?en zx64a-M+~VdCFRFU0A<jJ`PKLGC)+`k)cURjI~L(284=ZDTurZaf5WFTco{!3Ddp*3i^L^w+Xn~NJjD`M+{5=n zJ-0t@(~}g`<@SfqY+j&@b{~A{B_uFqEg_2b->i20O&8ven(!n3IEyOKSq(H zszx@Qrqk3pEZtLs%*?m)&*XVFlg}IA8K9Ho3j&@PWtQHegSDE%@An5VEn_E=CdlH|g>(0(>q6x=IOk zHbEvl%mh`h=vS5RpZ0~E_uNCTKA+wu|J@Hc0WQ^cIyU90RI03vAbG^69*BXVV<$le zRva0VTM4E=Ebi}~n!5h#)ij^p6-&81{VTXn@absj7{imwOBfMYi|4lJc9D9Rl>+^5 za_;VC34j}>BHSL?j?A9Q^MbD=M){osxpC>48311*YpIl2%{_@`Z!hdIKc>DeRFXXq zdn2pyWePD%uQx><3>w`tS#a?HQUT>rdpK99rj=Y?}9^xi)6u1rHH+yF=BfF4ig zvyq5dRVEk02h@7~L*RQD$Z)57w__=s6kkA0iPWV=^BhOhRj^L>!<8_0zuLzI_HJ28 zSrH+s6?A7ZaH~m|6UGdomMIym=bKTpzTeY5>+m4Fo+D4yXKB_0&u)$;)AAstLE)c6 zS`U^)HCh*UOA^RBd>#O2NcR;IxJ&V5BZHORB}Ei* z7zd~~Uk1T~uzyXNiPCnSpHJ8 z)0#QRco)Mn&Dh64!Fz!Jyaa6cg~Z#!PrPYdd)-X)_UDw4lV9UoPrCSo#V-jH5 zQmF3v%AE^nil~|K7_c`N-2te4X-vO$q*~A(Ny8O*_qi*4;gT>R_lC%oyj9&ADMT?} zr?xSnJ3i2xke9N)Jt=DNBa(4X-t489kYZ&f&MA4p7v>w0*{E3LgBN94bPmum0#7Ri?iAfQ-a} z=+qNb~}pP{~b;H2KTAFPk+fF?@CWzk9u z&nUZBXFf6b=moWDqE18H03qNrVQ>9j#Mzu{Y5TNl(R90-+G%2Zi^|PPXHOE&kTlt# z33OinQNzqXW|W}C>H3+<`S*)W94{9oTH?ofQNVP}Y-61x=emBvCaE=k(T*DVVt zib_t@xn{dHy2u>^%57B{g~XjHMsr54DY_5V6Z|7hZC(uB6Yb&G`B2+vJj7bJLATp( z;;~ffwCDU;7h~FF1GKmy7V_7EPI^al1+8%6vq0RyN%p78e3Lcx$>y}?8qb;M+pvzN zc21fGaaN%!Qc7kJ%*jf4GVmQ*cwu;JtG+DA(%XYLQ%XFgeGn8_UrAk=!#6K|IU#8k z8$X!a)YP?0NH%fd0x`AfR*gYluW_^yX1+VyZRj~$$C?>#xHzM$#)(g8FkV7z;plS` zpS{B8gwe#X@cXPghZwD=yO4W{i5btF_dBsK|3RQ`o@kd3>>qf*y~>Aqw261S>5<%Y zENTCkO8rlRv(DP$#nm3y6U(NS*73MBs?9qLsEgp|i~6IgkFw9+eFq3seVB@S)7p5w z1UPaTq(2hWENJ~FT)?Y}yDjs+okL~37{G(E$G*OwmJ~(cp+DM#D(ck1CnMYa=-TW% zX;SOG0uo!^_$p4Y>O=sz`GHxs$0`Imqddyx4i}HS|H-ZO50O*kIHJ|$)}(7%7(SD z!*$H$r^|xJSy$4Fo9$$t$?C(Lus6?@!66s>8TRv+yV}+%p4046Zo&}K=Ki-)Ty-DM zz-C@%_=jna-WJrTXYAzHhYsT(4*b$2oZ9C9jB|R`~6p3>K(98+4vMR$jS&@}c$8*yR7Wken!-yj7hX(UElwi^tW+ ztJfx7AS%(80g?m?BinsJPwf)Fa8A9Q3_j^d+<(ddUR9A0xJc%$Fj-6!H`r=b+ZLOy ze?R-ZSIny4m!Kdx(*>YueZ>ekBTdUQwi$2T zVad1N?7k(%jj+H$0RoQn0DnKtK|A-Rez`B3qi2c5 zGbs63<=3|Q(LtuKZlKNfdKy`oB}|*Xu)dSF`&J8VNtoFR#~j-^4)Q-wYu@t8zuET|@)QcLC)gcv2qSnmni?A+ipLll`}W@i;CbbV z>2?VA)7_69HF8vkl>ic`e#?#@n8{k&3`5oT^KFeY0l~POX#HXUp_WD&Rd~w- zTZTqpmEH=PP;IRYQa+c)7B@aEcBY+!|6DH=aLMJ)Qv5g|u_?RlZC)i@o~CVL#e|G3 z04QH{jvV^UwWJI>t_(gjla31HkU|Byg+Thw2T{h`e3_o{+|~$hs!XogS&8C&j@7y} zHkA-}!p|JF-H5pO=0#(sBiFl4kODX{70iB1^9mg}iIkmQlY#3otcsz{SfyhY@{4*w&hzPnbZztEDY9&5^mAPx+J~(^juPq)sv!2| z&xmUoJ2|A137E*x_+rKov`5)E<@KT$_lWhBWi5wv@&!}qcfr=NcplfNvS8UWLQCv_ zNmras0y&jEfL^{zoa1$6m0;^dWo3D%-XfqdG_%iil9$WK|M z_q?sCI>0sUob1#b578rzlh~U2DG}5B@pK`y`w9znFm(mtvs~xhcr85>^8{R-r%-BIQXe!!>-_V(I)$&;{1AtT>bc``tD6jcRDvQTJ`|th1PZOLx7i{;?`hOL-By9U9jtnePOY3FyGJ?sw3Y$C zn|%xHjq(}E5B?4E&!>u$dT<5fphK2c;Xyd3@BNDY`-Imo&A{cQkRf}!dt=flNuy{HO zU2|P+wd%Q4>1m`}*|7CSu7vg_Ng}Dq^2NKv8%lZ7y7M6v77uevId*-=%NXZNvRhUas?g;M(WJTpDlX|2at{7Se8uq{6UN?)x2p zew;J|XkaaGHy44ml5~q_RN1%XN1XyMKONSyGvG$nBv>F=G_$VH!Qp!REl0^<;A?d* z<$V`jC&P->+I&e>T-9}oBJmH@Bp1GHU+XZyX_M^CqeVCRyf9Oj!uQV2Fp<1vE@tzC zg}#qXcBen`04F7vfEvvT;*X$6sj+x7^wnc}&yB}$Z z=yg&~o}tRGKM`v|in;tSt0Q5n6RMD&9kDL!%>4uTRAO zWf)d1xmS|fjlBF!Xc+rqI1+7t2hhNai=+Vg>gt4LZ)+<72t zw>=^Vm5DqxOySbSvG&`<8?&h`U+cHbB0zUOkh~dZKXzoOYfYtbxZmD7{lO%8V65yl zmT@Acp94)j)a*HY6X!E7#_lzD+=d*fn|TxcBGW)J|B!H;|3o(419!3Tk!_Pt739h9 zKltT@BZiRSkO}&G*2R%Ix{nh2($87(uAg6;JZ&l>WYf~z@uGg0s%r5#O-V2!>0~ze zN8sz%KYAniMZ4gEuFWqihQ+_>6c9s^IrDuEZ>rJST6_IEyL=7gTNH&+)bh=+8brD7>Vx zE~S4GS<5m0LCeXYPN@IR`itEkAxLZzeqwtf1y9xImWGlxlB2;=gph9A7(>7F+k9eR zae@%N2qZfWg+VXLwbX|oKBAkd@Tjpj+Kpcsu1Cmn(&rkVuR;d)t%zuwY)_a#CzyBpq8A&rHe!%a?%F{56_W|A0}imh^xYLGHtUn~Q>i z$dI=I5(|S&XJ*U(C}NAGC#Iat3VV)yVo^W(^G1|cO4%cn%#8Kani?#%^lS;4%_BYk zfV$uWt*(eQRt}0noljR=?7Bs_3C{UK8CZ3wt@N<{&Ro}C79aU*i|e=m^B z*>uIfypzBQKl$%hh{5@~m!JLfXd5kdZ8@qlmAot>tQtZJQCfA?KOFe2@Son^KA-W{ zV!MeTY4SeXFF)QMbf|}sBx!`DolRLty?HM$zee=zorjP#3w-&%CpT1J;TYk=V)7Fw zH*>A2%q_mO%bIB1z9-O)uttqCQ`_=4S9lMhKx}L7 zWpm$C^##V!nPL0#Qz7L|qu z8ndIK$w2@617bfwl&6MIe;^j{h7{~gt8O1ifiO>#U@z!h6dMRn+~ZG{`^o-vrkZST zw!%&>@#zH*5N!I101EMywG6&gu49(?mDJ7rbK@%zJqSO~gx@!!(L=s5x4zvf`5tqw zAMn~C?*^as8K2RXYI0q4|6f}?9OFo#PIIv`gVwn)cx&jT3bVzM<4PP!K*R-h5E?y-cis}FJ zOXC=s;1J6}Idof@P^oQ#y!xNeUZM5){swGI#vb!OSe3p$zAj}7PV zmZos*Ihq)k9pInruMGyD&oKf&EC$<`c)y5r6wBKo?baz+_|2e`Ps|8P>Jx)LL^8Y1% zlQI0OJ4fbc!5mV=x%=!y+@`T)CZl!gII-(ugTl8JLUu5u}0j7Cc& zBI8Q>!m5d)zgs>3%Mxddposjq$oy`cw8h?KHSyoE7OcFLmrxYl7g&{~EL)Ow0l7=? zS!Rr5cvH^#z%C7$Xihs``En)6@qeHpJ92&7ghNI^&d=V63}RK<-W2B||8PDDXy<52 zc!X*&{ibokM(zzj^y5qVJy^<4McGe{+7Q2^vI~Kmnm_>VRubn_fHhgKPK)HM4`(TE zn547o-M!*!{)~A2Bfo}mllFOC^6siGwTG)=o0;3Zguaek{RScWlLNW~s4}0GPRnqM zJAp!>L3QbI_etn9?r$uL^h>m)jFt#)%=A~UBO=bHgS+onb?#XlJU5AB3>T~)pOF_B z(ne|*%s`!fG7lAcQ2%hB9c`~5y_&}LT)M-D10xYCM1;Cmq%E6}U3D(oQvyacKC;7Z zpe1-X+*9cQ$n#Cl6mWPXPPMP6?dZN;_)?!?`UH5Fl9{Gc1j8b~Ct2wG z&hO80wzAT4#Y*6OlV`}K*B_9dN>8FnZoUmo*rf9oc3hX9S2PPzMs=Y5``N~7mn{A^+(v1m~Q z64XUJU5Ud{VlBLqrujYoMo)Mp_eN8}$jM49{9eTTL=t$5F?4f2trdE|YA*$K5ioH0 z`)4FJpsT6LZlNGh`2q+qo}fz1MlORot~#TubSv7A$}+nVaLJl?dgg2DNv}YQW*D3s zxR)*fd|h_X@_Y%-B^~ryW1RVs_Inlkt@K{%_JzsL$8&Y5-fs!w`tKRp?6}Hf}0}#hKpj|3s+fFB4@ep7shJ z8wT8k6AX-xBH*Otq`KweK9{ZAu{ekCXB1^_CZ3jzQLG2-5`-3t}p2qXcr^Ij;@|2^@7>cWpi7G)oJe zu!9Z2rlnFok0U2Z$A8|bf`q9)>u)U9s`uS{pQk&L}m-&URU1~asI?zSiiZW{( z{dt?dpJyvF*5IJcYkgFQhh9wgM@-^WF6?~l#8-IrX4jCiwGAW(3!iIV_Gel;Fbou~ z)vq|1XU7~GT8@No-~F62>+n&6c48pWIF_t%xRV6u*256&O0{7lqilp}^{gj&^Hm2%S*%OhyBuMLZ$%=~5#X6}CfnI^d|)|ExN< zsL9m~TQ?;M3@kgn+wX*i9}PinhcwWT3hKh0|E?3$Cuy>#BwSjVG2GEj8T1LlM(QAJ z%6l;y#o7-xKxXl<{+2Ytc{Rw3-!#1b@4^&}c~8%y!)Y@u0up^-;h#!UM%xH+26^x2zOQl5+urnW- zZ8T7Ct#IxPn1id%DOLatfR6wH(wkD}e(N&|u}VLs`@>j`OCEi?F)*g_P!o-fu%?tX zZHp22lB&;v%BwnInfclTUfPvC=CcHU0jlk3&Fv}>8vPg4=Nmf1GTx`c$TEw%Y%6i% zXuN~o5MfSa10enBBp8K*p6rM+u-Oi)i~S?*=Q$9k33 z^rV|9%fIEpkVP4D8~F=q6>$vH*bhLywwabo{KCkHc2NWC^%=>JUW$jC8>Y9JX@XON z)BMalx4)ISXW{KT74)}bZSe)MgJJJ%GW{U&!!O+x3&3Be%<%TeY%=3rD3)}QrmVoN zhS$Zve<}~tb#5`%^3Yvq9qOBjN@;D3!+D;od~_-pmlt`(S9%nXi=pVm^74htqQvD<7p@@V&W4Lpi^*&F#)SCmrgmasjF`_ku=x zP1^V(pDVLDmNYbuoJq-3?O9UAGAm5OfL{&1dJlJ>a%tq2CMR!oZh4JzoU!$UbdPye z=#NC8kMD9G6cSSU0r(&btUC`Av(K|`%XDHyT+|S|X~T7ZC1Yu50qGYIB-_&BAs`dH zaL7ihy>%?UJ6WHo5?8}=9mHN1bm6TuT(sfaov0r2;QH{pU^Y#?vxkUy*VB5Jr0iJ( zs@_RsCoO1GpFOx0n|bYhkT*=jehY5BViysPcefjbFw1Rl$DoN{|`NnQ*muRMND z?g*}mSPccwHej_$l^9LXg&Z8mIt(_l$M_Y-=HCJMES%XFE(blM1Jy6r?{>;0>%{r) z;zM_9+V#$)?uVpo*4A(7ua5xN&Zu9?hBdEOAHVk8gc{}F{*tpG|GC#hwk=xj-Nx&kUj$sQ(47a07;UsNK0v05^C1?5k>Q?HM!D9A|;T4nrKPM0Lg z-HbrB=B?QBw#ZjI&B_`rXe+#g47=D;cM((X;`uu6OYZX5ug)%TNmneNEdTuAdz+SB zB%E(zaZQv9S`4Axu~GTbxQx}?hv#YS|>j6X+# zIKJAn&b~BYB@vsQrQXPIGfp{N)~|1>ier^l&v^HF@FGb6OW{CO6QVwIFq3?2#lhf| zEmAN}yMoYGe^Kx#Z?{ef-(tYlsTb4@)bB4I`!6nnV*b(0PH_^8cS5MY8^Xv(t( zhKpB+d$NA>=`3olFPR(63%`-13(EJlQvkZNLSlE{>vhE^3cn61_XZFENU*?Z>e*`0 z%Rpa73S;i3+8n2Ar1!W1xGWz9+cjZTy__+qYL&f<`c(wx%xvEK55$vhiuU4P#;do^ z&>kbZuOb#6bKB z3Mu3>6zS*TM!p*%#wP(3c7<&Ca*$Fx1^chk>}nK{J}0R=x%3E0Eo0hz|@CqY* zxFROyx^@S?QKH=$nL92%`Bso6L0A;gJP?3Jj11D-PlEFpov|&0r|?>pbfnN_U!P(x z1R6$cYJcL>1y&@rCL$8yQoEHH1`1DQ`^^G>jTHk*5v0yD82W-t))p@M+CM?1oqZfegZQ4T_gAQ*P(#tfQ8456Kn=NI1e! zKc@~I;{X><_|LOC0L|Tt(+EALuO5Au5sXzE3nRT0OZ=bQC;*TC;I*4IyE>!fIN|8h zF6$mwqPW;p?Vq9*R5oW4Q07UAqi{DOROT@zuBG(=aINc=3nR3Qy#Hcv!k1u2NXEg| z6eO9C`ta526-Oql61B~q%==-M;=?YFeyfWe(~v9LFYoX7tEn1ZQoJ^RS7-V3&w@n;ox418A$fq^v=jF4bn&WLq@rK{b3S#sYH0k<+SoB3onpg&`LdVVu7_0 za@+w{F|(=xWVd*i@%N*bj*Qng*WqMr!BYZV`(0?gR`;iG*O&MkXg?M3&twXcNXPFk zf`5*r{#jQVLZm)nKt7%^GK(mh8;TJA!=gZE)zOpEO?zb!Ge-CfhlIG`y z;4Rmb!vpMv|Sx-5r|7161o^1`?u>i zRc$xhrMtaw&Pe=hX3$|_=C&MdaVO#H{InPrUAkwK-=n&Ib|EaXmAM}gVnFhw)UG$s zn(d=}_Vxa6$cDuy3Ja6rhliFCz{+b6rMcJH!tdUY0`q_2p8;%O>t!GrP2TW*KEYP*A5%KzEP!C(o#jc+jQSa$6Af_I5`T?}SRI zNXc4*bFNfX;wjJ9n z+2qaR|Bhj0JQTKBB%rd#H%AWUYA6(kS^@(Zjl|g)ayM%~-~49Ft!|GS;A`+=!?8wk(_XzXOW__v7su8L@Yazm%61wlgWZqXd) z5rjRlolDfrB85(G#8)|w?&`Aqfp$pek6rPS zi{as!hROT9#mgTywCQaER7>`k@qOeiLanCk>_D0^&29EL!H2Dlp3KqZjB{C{#2$Z` zMfD!O{+dgG<+T>Gn-6@z5S$M+7SpMZRxooeE1mzwwTfJLo6|&`p7lcZsPdRne~tr~ zHYb;-c$;Jm+@_RZO~=7o0L=Zc0c_|y+990{}&_85Y5D$VgN-TQ=WOr%EPb6u;<|o%@07SrD z>T-rh$!#!+u`|_DNY69xkQDG*NO^*Am^SQ;ILuQON6Bxa$5Zu657N^oV@f1xM4r=?_>ve288pi_S;>(Ek8~JuRj6%=l5hIu!`UU^v*ZQV%hdd1NM7l(gZuWJH2gWVrY`SZ_TFEgTehtu_bj46croX(<)Or}d{||;8uDhQuWtkX(hoY@a z2$U@2r8Rm6>wSHx>MaGRGl7(cB_JEC?bBZQ*^agAl^m8I>qt{JNqkcSnSf6$6SOV6 z6bqF7X>DsN+^*wI=J7gGd4^?55FE>B%dya3LloTYn?4l_4YKlHX?qdwZ;Uu{n(IdB z=;%6D#P+0N+W_z@_?1fg(Au}y=-`CFRXU`Bp4}WXPRdQHmPE|Z_QfN8ZR6flx4+($ zkM)06A|y_#fDAWfZn6=c;{C~Ta~~nYe%&IFOtF>EcuK#Y#{zh}BJrG(SCA}nryha{>#%Cs7j@= z`=pgL(0luF%W+V5L3?@XvUZ^BL~Ilh)D_T7Ij3Y7wZ|uETbAI&jfapf<7-SKk5G%tTZE6Bf0qRE z(kEz%!@T&mXm4^uxv8@1)6)*{-V7Lz+$)v1xSc#Cs;R5A$1kKg!P_`1xEJBMXedDa zkhsXwXupS{Wz>YC!qLd4Te{DK{#Lm?>WmiGaF!^duqhiK);v<+4L_Fav8s=|of>N` zLiR(Q3vU17WDfCG{Y)2luqr|9D*k;Gx?gf*KT5B``huR9V~3`VP=S&_NH%(;u(eST zHjN?a5zkNo=>L;EjAy&pkbaNDJe&vuns-$pWOAc~K^szHK{t;ZwX0PuT&(FZ@0lir zcXn%@OO;>ixD;#%x8j{i_2rTSF>>lrCPPT|Npx^;a<&i0OF0Aa{XKrd_rcoihISQc z7_VP^@>+op-4LU`HKR($N4T8HIOd&!M%`Z4htwJ-kQsx3!uM|>7jW-ER3-07&-US* z(?~pyK!yOpZbAOUa|wO_7{GFoiwu-MN{JSW6tU9-|2t`q%6dCF;NhP}92d{oq&v)^J( z#U<7SEuN?gMizI%4K;?MRE#o3ql$(EHqFs}CI@x!2jtL;GQHiJG;6PaunXdeHhVSN z7a+Vji$h#LWDDw0rN#9B*Be-ohxJ;gr`#3t{`gnYa!NhHuEDejfKng!#_Pvw`BL{u zS8~YrhY=f7E$Z)e5{1o{RlHiu^d@fz%cV}D)2{t(saaQ8QnK^wAKV>Q5E`6;pHqeO z|NgqhQr>WP(HW-GS7bf+$cYRm_o>By#aAe})lJY=1QbPBRJ$RvJF;gU4SoCy+2h2^ z`FIlIQOy(fRdBH>lf6C}^3_-?nRKn&{%FWh%85!v7t(WhC02ZAZlY}vp4Z6;Srxs5cwa2tbkJ@T*>V70HWhu_?t@FmLLR5CF$7EFZZcsmZz)S4c$=_KtRSP?gLF(cFhbz(WFb#ghpHuLlhXRMIrsOF>4&3XjW}s{pNd2d!<>! zNjU<28ceOo-J#02*K-wGsHb;WxkzgiqA@BA1_Uc6W=L)U&g!WY`}4H*8;y>6%`}|z zhdDoGkXw|dUf)~WZWryP1;WX)6k zJTjjstphR- zLpO9pJ67=?DTHSbjeab{_EOUeWCp?wpo?9YbzP9Wh!a}sT&>Mhk$x{un_A{}E}oQJ zXxK&I*e$8z&n>TNZ(j&+I17QjlPW+>Og`NtHJqo4PiW^#DaSO}d+V>?-hI`zzrGFx z`;}q8{w)E}l}B3Xfk|eMuce{(+YkJ#mQ9@v4-L=xue=qC{S2Ba?bLVb9{YmTdj(T% zd=IBasBc>|29whLW??)X9VVkBXJ-4SPfjgo!2!?itg1QMUhreQxeM@ND*7)Y@LmEs z=Zu&PH|z3+8&f4eSXveh9z2dyUo9_m{uC4xs?tU?tq}hBPF^9>k*=;YHO;ocAptnw<5zu6L*P zeSVo=-y3ixPaNa{AFoHd_A_o2ZWdWiI%Pu%p54NP9AHM1n&2h~$$_gr+y9d+o=-`V zC69+Zk2Pnr$vC-J!iE9!dD^)SRzu0)UZ;YPG3~&uH__<3=8>|YA3Wvt)PZAbAsh97 zrtm5nO>9-YX*GURkL+ZfEgD$vBW1!ydC}(=(ubJhT#UI*?D*Gf9+Wr%93-XcXV^W z(kzW_mpCJ?=VGLP=sO5%5f;|Z8~xhGK@{E9yGEJ!pN#=}3o`zkD2fJe|Ev2C@}el( zS8`99)=gZ>MfuD9yc|=C$}5VF3DlfTFYuqwyYRSM;pcMbSA&VNU+JVp#wr8SrdTUP z;BLMA7CDhn-f@(rUM&BeY+>Y~0n zj@+4XbR8J4E#;8n`cFh3+I83sKbS8Px3x>uyl**4ETg5MY+3R35k2dDO-7;Q>nw8hwPnXg|hXO)*Di8yp}RJMtu?_psh0f38%=ZcI=8}5B`gYmJ|Cqn|HKDEaOU{}kXUh6 zB%>fWf7#Wz-r#5dFgran5PE_KR^>?asb0yk%I7cS;X7P|#88JG6-%jyy}st%baJ#h zkCsS$5lB|opDEHCh(eS8EEkSuzZdw&1w=q1eY7&bJUf<}Pv_x_)>aWG6QL12Q4(X& zhURrJ&9!SImZc~C8C3bxr%If89>Z)vAKIHNTVqwfcyts7xkiFecx3US!p|d1h%M@) zC*-a{yuJiASE=W$^2%cX`+c|_v4^+je?%^{@z@YuTTJK&Ya9BUp{*{EX%m8}*0<-F zUenm;d+<*2_h5h~3p=!eItiYywkt3#LI{A01IV;W?E=Bc3v-njtMv?eHEl<%FkNlL z)1z#CQ80ketG$8m@3Pvik~pxpvhhYLBY||_`b1&WX|1}W{0OV4!{*oWnAux)!aaeV z0_g{ENzuKQRBh?blJpbONyYN z%@of|*n8nC$#?HwdE~Z|wxQ)>7&0j6s8iQR?-5&C{%F>0IisLgyBd+g=qDl<|Wf3G_!(G(YrRr@zKwN-k zM6O7G?PlyjT{rG10K_Wn(gQRyyME0eJ{6Nf<%tne0RbC%c(ezE*+ewccWF{}NdGhG z*E-VJ!?*c6O&Y6T>rK*ZX>lvi8h=PEr^{z+yo(4M440(T+fZ&exnP|)F?9OL^Bkpt zN8^9o!sn5lQNVZeoz)dLJ_Ul#HR`GB9l*}6og??coc zB6Ehq&fPw~lbl;^?wh~(dmeFphe5vdBcD9>;y$rv0PiF+^-_syqJMwFQ{`%6A|{pC zujpnbOID(2_D+8I_M)WU`|)EV=V|kWO58=w@@eV&xMNiC;=LYD0 zou-7y;c_}h$0*d^`^-OmB8=maS||1K>fCkmc$L!Ed+SPFTsy z<%e?B$p!GjLw)kUb)eKk3gL5V=-<0!LSN6|gA;5;s?dNgF|<_vCy9qSoqWPm1x*XD zdyS{`C+KPD$!tMAi{zPBD9*Diy+Ls~0N|X&>N;31zGB?C-5xFGN9FYVGsq0Gpf--+ z_@B7nz|Em}XNR<80 z5POzXU>B{(I4K$W$mMsRTM$!o?lvb`H*uCPpO{@!IfNvGW{qA%5oOeg=No$PHK}vI zqLRo`GfRnney#D8!I##^Fgd>agJK6J*7qJ5YTqPK_Ple93co?t5EsabEZBCpwlL&Cf=UrMYPlX-tyutT;v< z7TPGR>x!4=shU*3{!4S|kDacL;4}Q@p0L7qr8Ibl&FiSVMPPUS_A8wC8Huv4RsN<7 z%RsBu*3&?bwW7jVGU-rOHJOQ3_~%!xkvE%knPx7qU_~RT8Ht_a>io_0W(K^!Rcc*o zYA#INv}8wDo${6!g{J^{6Xds-mwlx!%D3N#z7^6*>=x+fV@gLa^!vWT2?J$14);-2 z?)!Ejpf_@7D;RUoYFKGOg_;18IL-!u*0RY)!qvwoe@$FSW!}i7Vd(th9pehEFV_K$ zh((|5FsuzUt2{ecB21?g@}}*G&N{d7pF&ZRHsydj53Wm*=#;?#Y(zWX=IpnH>2ozi zjlFl0sMWFTYZsD%OWwMmb|fw1yjq*rs70{HyVb-mJNEBH&>)zaL>SM{W*Ofa?bktKOC_VbUuSda@blR8}Clanf4oL=T-3ie5@*QilROTaNxDEfmAPTH_t6Odei7G3~B5+kvo zgBZueUkVn9h?}U7m|;HHNHY#BT6}C;gZ^LC%i#i13u@QCOm3}p#WnE>7-v!?yiF8p ztMxBN-L zvb?Jm`HOVP6!epT;VhiBxZ?akqxbdRdrtdSbA!A!DwW9K_buq{_SEvfoWys2W1U0mGo4CIVrjEmNpNEsP(#z`(GoBYcMg0${phk^qj^)3mh&p z>0~tQL$a9R`CM9wjCCsa1*mTSe@wkqRGV$MZXKYM777%nMcYD=1S!ReOMw=OyL&0_ z5?qQ~fdIu_gS)%C1$UPcG(gauZ;k)oYae8cj2yj3d7t~auWQbH>sOi3myX$B^90~8 z&`)axs5aLtkV4Xzoy542bAir9TxizNZqRs|^V!RA8IZA^uG+v3)gP0#iH?ezz9F$K za8@J60v0V=g_upn@4+HGyqbf*ETw1B+CvG4!h}zJ$>R>F^a5xHe#1H31pU(Inade{ z=xQ5ND_3bgr+05u`x+jev6_;-WWN zl3hH$67X%q_|vp>XY)NhZyz|_M;FNWecNfauln;aoj2XoSlV#7@{L}LhqUWIsmYDb zn2XC08)O1zXCNo!Ja1C|j|HKd7dFZ_J?k|QFr=ww>#Eq7+=*ld} z=p&i$GRsUaU(rU9m$g;wdu%FaRKU+d5qu0TPu*-&Ez@(3GUH2x_8Myf=QnT6wSVjC zD_|O+S9ArXqZtH7p3s(6q6N|2xjC*U`!M8@0`JWBG=DcVjXzj1SWGCyn^7F_F8mJ^ z4OLRV{h6oi+neEfKbg?5D&+C}Fu((B{fScJpieo0I*1z3Ec~K$33Y(rt`3G|3a4qv zhlbytu1sLD+v6(SOI{?A-1=c6OLioU`AZT&WKjqe{hRKD%(Dj8;@=xkex1< zx?bSHj6UG7Hh+cDR@0kOgZogUNT&wO?(o`7hpDkG?eCiwIy4Q4hOTf@gAwTt5bml$ zg*U&+1~ej&wpWAaK%N0YJ|~6ynAg6m`}PjEojpF0>B`Hw;T(bQUB-l8LjHp9cx*sH z<%lOvO}~(8)M3zg%mC#dC`ux7%pSm2J=1PUX2LzAvM8p=#F%}${YdZF1kf@@9qid3RrNm=8a8HMn!})UZ)8Xu%6_Gr(~!yK z{p?=p8Lc)S&Y2)vW5VHa?&-=bSF)&%#>o@+dqFGql)nGy|8__QEQx@r)>~z^ZAX^K z(oM(z;I4~ON&>{Ad1)1C=y`V4Qi;66_A~(&v?!mVFluN1Wa~|fY0rI?sT=gVR0&!4 z0l9!41-fQ_4#?VJcm8>*#d6i#@5_!NAFD@v<{M0kei`6&keuO$a{^caX)Qxy1VXS} z|6yPUt$(EPf>;aw?pq_kyLu%1xdsW``v=GCLcAWoPCh&!rcUfe(^witr!B(A7^F#l zW%Z%7%|H0?8pZbpj}j>I?Rlbt=lm+dl&&7s+E@V6j+dv{b8bwC;~%E#&c|Q9^t!2% z%8XT&NTZuX*UKjCa1vCtN)6}@;Hfc4MH5TUIeNIq)C~4j6z*e&b zxGyJaET}3pgk$OUm7)H!QS}3n2mgCRYrmyWpc=asNve}IngU!-|z5Z58kR- zXN?&L20(dt$w>@zer;F9V0=?(pST88j8w89zBjKR&N+MqEk1Mr9~u>sFb7GI&{!K? zZUtdjJn4j8RwF?;p-D}l%WJbxf#u$`^G-r+*T8|Xng+uKWY0&#e&ey%p6QQ&4;I!x zWziKxT%_*m`~DJHhwPf}5@#?tm#|hCwp)&Bn!JgG_Ig@KjD0pMZo4SC{m1~GOEh6F zz@JirjZ37`U?voyfSN*|)FspZ)A`9$yfuHPm7@*1r}!FUg@)LX^baux~lZUdnd0sR!clR zD|1kh+}Q6Omq)PY6-Y>mEIN*85NvbqIX31YBy+C&X!ocCo}e1iSrMkUk#ngP7pf;A zj5|3uYos^rBTgi1#-dn)!(AM1(Z8dV6EH2!@$+KXVNY!G;-4=qOV^F5f2m$OHm`YS zwN!%=t#$Ik`(M)UolNvCB@bupxD;^rg{tju+M|L@_m7V;^%TCL7kh`Jj}Fbs6zX_J zO>U{F-^TQZlCp(@8&ClB71 zKD)}^_jwBDmP^e1iJ-%pM6>j?GVeX@Ww8VaXV>iG#MUX+*4w4hhXs$hqZpdIX_w2h z{sbx1oKa0Q3#1tLX%)Ok&(Kdt9!`>B5zWm$unC45&Jw|`0&+#XAjzdZp)C$J84r05;<9-tr!1NF67QY=}Nl;0gGBFXI zsDwY_B9b)!05w#WYE&GDcVkJG7i7Eq2s*C6T@xBV_oaUuRMryM4p3m6H|JsN)!<)` zDrt6Z*9+OJvv(Md@9k+9;g&4^+@Yq=7GF|>NlA;6c64d|%B=JSG8u6IK3>tcZ{vuF zj8KcvsC5y^zt~~#9DK;rxvR{Ja~pZ!BP$XHfb3+yw&45?)BXq&WsdFq{*5vxwb}Ch zPpKvyhw%51R*h2w`z~oOopRdGaJe5lW-9U}))m?YfK6}c+_c-YcM%L?U~yk0IPjUZ zVV?~^q3bQ%$ZD6+uqI%{xEG#1KNCAuv3-H7k zeHUkB^&nb8Ci01RuI62i$H%4uNfBd&YWa#Ik2cs#Bm(AQ0wiYpym$^qEYa1Bbjn8B z5B{pm7MfVD8EC{20TF5B$}<2CL)>RERMh zQs2Ju{ybLsIH<-oM{EoXMA{j=8D)#0sJBsE!D7Z||6LiC!UTHv!U5j^{hKCmo8?an z5gBG)>(99P(gYI{DDJI4FXI4yg09BhWW6WEFiWkpxHVP^RMFL`aNXx9C0F0H6E(#% zEP)mD#}5NWbuEAqS?YHv-{>32l9yI^JTq!sQ^<;I#wW!gouI3i<}&Sq6=3 zzzo`{QIhiPCxe7*bL)Pwczdv&N!FpnN{mm3vhQ1+w#gIa4#HjFpi`keZf5<&eboJi z+^P8>GHUFTBu;V)a4Y6knnTJ}jzK7KtXI6FAl2lfO1h%-j#-ZsV$`jvLWHJpWCb>( zm>{_)SR(;pqUTN4=`AArB)elMktn@3Eh zbDU2eXy&>T?E!N*C3cBmM0ro$TSbD3D^)6b=XN{EdBA7D0?19U6u!ht{I_aUO*5YC z={Bj_i{s33F!1lUwtFHP%e&1XW9{@GtfwiEy(D_Fn!SVjXqpSh0`AS{B_G;WT-R(; ze}Y$C;VRTA9QGY~T7tE98(@xC6?Sj-7hdzN`zt-;Nq5aNy^{OB`aMa(0X}TC_2vEO zInQbW!~OQTx^}^|^QkM2%rprfi=$=%o1V?Qn%u{|1f*A%b;~y>DM$lKpdRQNWYT=K z9&k0K-qOXo=Gsf$@Z=2y9(RsXJU#*p+A1A29o| z>(yI#Ypt8m>t|u)O#i>6fC81PfUB{! zyZWoaac=`K0xAOlLKA@6Yv)Q7@Dok9h-hE7NN&B;5LGt)N_d<+Xj}ltoDz|Kd#~F` zQds4&fE~Fjh?XGMYPgf8d*liKj6!CiUK-Qt-R?UJzQn6VMC>~@|Dy`HY2$7indgF4aM>rKNwk!5Y`j^~CfehJWuseah?~73(_LMLmtj8_FUj!9~o{?j+&bpvr^9Rc$qD$ zSsT;OY6q94h4}iLbkA-zL|-<1)^`(#u&TAf$m>tTv2nr9i`|io$pe~3)1Y2sS%#=gQ+`vTRZfCUEdu69(83N(60E!;)u)#ma0w#Z*8dT1b`W51;KKu7nc8?mN-#`6RF z&}hLLxO}Gbeiad>w#Df3BLW{bw!a*?+zcrc6`( zISi8dq!*!KPhH%`sG_)7;G7PF0XXJ{OjBSMY6{I)9HzrN=Pu^6j(K-K<^a7zo8x75 zg12o;wFhp8UJBDV%(RLrx6r4GhF>OCWxfo0FUq{#kUjqS(S)i8=)`7CSea2IdyN0U z1d()PMGk{Q?uOl%Zxa9|YEqTq?>)F*p`6TYue?l!hKM)e4e{aJAR@cYp5lAm!67<@ zc6^V7lUf!SzYB60mQwfYcrm-O$kP!|jkvJ+yi1q-jMWZErdW5#PSh@@8_iv=Wynz# zr1w)Xs{2I5*+HU+H&r>~N6-gX2C|T7#=n)60IR{`moa1AC!yyoM~^Xw9BRTF>CkBY zPY8U{4-Ma#VaztT&v`!)j_;pf8jD;{CpK`Rze0mBK_dchfv6P!JKy*70b@{4kRQNA zdMMx){TCiWTfG$xf1RY#RKXYHbyI4g98T@aqgdHv`M0=O-geqj%dd3Q4D)>Xfw*6e zRJmEG_AzukCCe&`-xPX*cl<_J=~4!`MI>||0wVSfxm+dG<3`Y?Octq9zKw;Pbn$jg zNv>?pYDPG3*0MjBbJ8Jr>T5E4hfKHhE`oYLa~R(jGGWypTJn6&+YS(Qc>8p4m4&#r zng3pHjP#rgTS~NAov)HE-MQA{RiWuMo-Pdm7^;{aiu@x25r(8}!OI{Yq1@S*QgvuT zc5}UnN_4CFL(WI0VMg`aG{ea`M=qJSR_~%Wr5MwRx(k>e(X@VWMpZBtg+6H%Z>+wm z=K;*bY`+Zj*1Im|ga3>|;-7}MVbK;r^**cwM68!Z@JfSWf;y@m zfKKUw+FK&1u!uq*pG_V4o@qQ5Ij5@dNOxrbIN~T9ZmcIj_%wVdzD*#44W_FQ=&&v1 zJK;Pu3Ob(#gL?mVhmaAFzKsoTfnBGg&i={;~_)vu-9|B2{lP6s30QKUYsqh|=BK=7M>ZnU6kj8Xs7! zbVOXGMxgD+K@(3#DWoNYu`1TA|2Emn7@lw?S4Ja9Ni-Jkoh=W%1jTYyeWU| z_lXTK32g+yQDu}p4jJNQm|OHicjVwG za0jId9@#;lASKZ!Q+?CFzxR=irBSbDMfyr;?CP|&cswqi`*N@)>?J15jmN*F-(itg z&YwnRT=d_6k2;x=Z114)t~3BgV}K%Xj0$z1X~ahI^0lAQJO*%R&PxfM6O4m$?Atw% z#eJ;f6oSTF&w~TXZrE$c+PCuxuBUt+@fw%;j_U$A|9iP4wyBB8n9(kvkc7!o)jQCC z$@}jsVkf-H@9>uHIy|h8wDo=_>R<3RpWpH<1lZnrQ&75Wn&&~M1b7=c$ z#@d6~Ci(XzHm^Wj8=d-F4E#knP~Z4?+-q^^ESttF_}J_8q1bgn?<&rx6OZO@!WxnO z>h@|s<374oVDa``_(}f;S+qJmcR4DfZ+;a2>|5iD{8MWWHrl}BFIY-?4;b9~=uES1 z&eY$KJ+^76ufn&nzQkpv=pS9tmF5X)=@<0>ca!6FfO0hQd;Vp;4+jTcvLjGE%&Z~P zr|0dD=Fptqurp5brmi@Wg>aR=y1kOsX!R!iFtF>9_;OV77zu~>s?}zV z1$~zThB zv2LUN&7vYSNZKlRrq~FmF{_9o?@mYWBzAmvP6Bx1tyF@AI4R~R*nP#qf!pF zJIkTyyEkUr!8-x{`w9yU&}5Qu_${`~t7t5-B?xs#tb2}%B--HL=a2&lf)v*kQ0SDd$gdyp_uaw^VIC$Sf>LP zWdP29hpQ!UphOJrKE+O2DXbUL7}>McWtWR8OOj*43Gow2!GrHF2<(j()6%IfgGkLi zQ>K30T2s{H)8`!?Nsn3?&nY2ffvSf>ZO}t)04wqNDMmQyxV&jIhnxVLg;i#uvxxjX z@f=;EX^J`PyBBwKudJMozGqXT&953Fl6eE-#y({8{|8lL?Y9@XV~^UPqeh=F zO8d&mupExmP~+3-G5i9V1EUn;uXSkA&(T^qsL>=UDz-jUWP)D}w?MW@!|_Dg^>`g* zn9%1g3`1c=q^z0<%u0)&O^Nm*BY&#KFod!?zC)=@5m+28Xx}~{OQCH5GOx7u>^t|y z7L3sd*OG|Br^1;Ui6SGEGLQztZ`ZTKS!Yod5plW~;8tB!>3<};9FL&4WL^{-#4qd7 zVlHP9Vn=bR|5jIcepN%md-*L|=5vWEvjtY!Il$#}QKR{9D3vl$3S;lx8ATO%@Pbz1 zfs>o#3y$)pM{7}wic4K4*z#vj#9wncokA*V`9c`6Jm9snECSg-~$5-%Zfe9}A#N+x%5mZKc#F1B5LO6G6+4%iI6j&*hTlGR)rv(nls zuHG$Fo_ZyG4j_3o(G@4GAon*@CW5z8g z1jjvU?Bn6EzU><$;VO@q0&mS=EUf;z%1OK7=H}Y7%bvqPI;wWEp#WYX;Pg`lsSjLl z$iycQKHnGbL5M@Ts;bd4u{eL~$z~IAWV6@fy|>c7(LXvHbjVrX*O!n+dA#%($#A?r z>wLtBTUkr(?7MU2ISJ^X}5NZTRSHfaWRA<-)7o3(o+2+Pnq_3!XtwE*;F=KZ^N^Lxd+a zns0-6P|y2Rrt5xuMA`A>cHVf_?ev}P)}Wb?vvzB=@R=gTa*GZAQ;hJ>ExmD;KlnoT zcj+g|ZQz69N*ufWMjQ$tj8BgM;#n2JSb)J{0z!0{Y zueQ+9-c2tRu5@RNw{mM+KjoXuQ`m{3bHA~K0I`de7D($|Vs+(>T;|h;z4kHTGz@;l zUZLGs6WMczHN;h~B9=5};SNz<+T@Ww{gG#+J;V=ed%nqrC}Y&uAxjH!RYj1%{VE7M zmC))nnRet;@wY=V;a;^dM$rLTT=8ad=YVg%wfzRX4(u2_@3V&bGWc6C1e1$}-H`OQ zLflkV3Q3;ujk?JA+k+&+2y~#7-wuPgYiT?Sgryj@U9+C2H2s1oJkj|4R>N?DRJhtk zaY{W6@B-ja(S?30GK)s{!X*lUjcpGQE3pzFuR9FG#J4{BuX=%IXFX4|AMtJOA}<^3 zbvd)}awY*=NCVrOUS?DdvJZw0XVNs#6Z*nYVyB1_7=#qul}F>t zx%JoJyvI7U0-I^JMzv(N&HUX%mUr2))vaRy0H(1t@e$NS&K7eU{zj1~p{``E{hc?# zfR8xxoYGK_E*&=^%C~2vKu3N#Mb+6>8~HOH*9KrV+JvHN@odv;zr91)8T3MKRlwVv zPX>8gnW?*=bU$TJojsYi{Fpl#m4zCF!cqyaS(Pt1g40c#ike7ensfr+$sXO}qA0IC zd13@knyr(2!s0J?&-cDFXP|PAsViSZXlJv#RG(b}>GE{ru2<2yEVhwITYcvJlYcpM3zV*1D3v{&Jw)Gq=M}p~j=Y`K3vnLDf zB_TS~wy{x;|H>it_cZz!0nG!W9BDLb(8&6D*R04zx$=XxBup8(a))^O4zb0h`nz&L z1KazuldAkF)D>IC*{4uopJBW0gSUK@{Vz7~!tg)qwd)7h7U6}aw5XM)$B1_AQnUmv zuS(80E+xqIdaeA-y~_=*@y9WETz}wtJCs)nP_RAhg;lpcQ4Ai0d#IIL^!5T z;xrL!%8wW_Sq zzK<-rL*vd=^)<~usgWsnFFkJA)GjTvcYhE(ahcP9+0nI$TOzMPH{E^~@ezL%$DLKF zZ?a#==(AU_E_lneh_&4^u?i@e`zBhR-#epm1E5+5I~abnKv#DxYKFV<=ehInZf>75 z3d(P2U(tS`ef!jUozLfimun()2~7Z>w7aR%1~Mq3=ijuA={{4MS3`^Yf9f%FG^XDB z^7lRChbfnrIVr*5d;5_uLK@D2`05WmW#yOt@_+6!ZR5{oh8FT-!+#{>${YpWgu!ty z9R%67K}$Hx@6j^$9-R7$a|XPolElVIdCv}Y}BoIYwqc4;v6Qcc9Q$4XeBYj+9idI8*SFExZ%~(oHfUa@}+WM zgM;FsqQ<}gAv`B@Nx3RhiumFn4CooiNr9pKDnVhF3AfDu75le-@2IOI zG9XUJD;?2|b1acw3@)g^;nqKxK8WmQz4L(_+rw2cH<}m%)hPW##^zR>4OAG6R=qpT zDLi5%sB~g1PGkHhFcuWG(i%0z7KZ8Ct3rma92Yuzp?UN*=FV2iFdV%;ac~K-83a=z zY8K|IW1W4bfyo$sYhn@H6io}4*qKb1FI}V2Ff6%4JP)DB%{~`UdE)wI%xasn&_%64 zT~gKlU2_P^s7K7hqf%c3tV>~D>O@~;N}`?Mk&Y5&q_(7ajS-FoAOgup;4>3XS;**t zUvs7;>U9wN_u3Y%0B{?u#RoOjRvCLMo^=hqXd{`Cr@smRSGWqm?^zTr3dfYT;7j<2 zuyXVd><%TFYMPygM1-vE6i4tI!+-(uykn}RNy zt~i1yY7p>zd49o77 zNimoEgc_t?LF$+v3WZL>u;Pq*f5r!Gb9_jT0tqbC->X!~>&6S8Ybbb0af*4;M~!?# zbfaT8_7Fb5R*tY2^2d(-gGnB_BTb=65S<@^TbZ9&R~Zmz-kY7xN2U7r-fJm;@gzhLlWi$ z+GB-uk`EQG5+SZb;R{nY`0+lH$Pe|HFhkb1NT96DiSR)JhDH+snQ_+D*6g@LEI$IH zW5bB^+)Z}CNPY|R+7x^S{i+2cq27p%t$RQGYzb?oQ}dhhh4Ob(4ty0AG7$Kq@7;_P z5~C%uL67MRuUV{zt3;6ihhpJl>OANE_jK4t9a#%wE3~H3if>9wRmntMI+LWxSSlIr zAEws7R`+Q=nDIqOfczlk83`NufZ8#Y^V(^+#!N}`tIbuGA>w1k=#=g(!D4s8=|b%` z=fNq2o3rrs(Vj}@XbjB-G;LdxQh#?&{zmpp-m}-I!S8dycOy*xIfXC=aC*WZmwd0# zoKsNMz4zF~IQ}NZx4q`!fTf}Ea&Q~v{&wT}%@(A}+jiY?Hg9-I-+^L5H~-3#Y-yko z)q7D}8hDS^r%?KQU1{>~w4=zDzXrQ5442I9bL+5y=Y(F?C;oGYu-q_Y_#8EL z(U;a$P9GyTeUH;vg!Og_3qGz$CqbIDsGFhI+vzvw9Kj8=1=G5ZZa<}~V|z{}LOp6H zuOGB@^w2AK86Q9ZnJVj(e_A;UHUHsV+^B!G$p1sKxyN{2U$?F3Wv=nBR5v;ETQ{Fw zPFwn#%Ib5BW_OgeT=gH{Ot8_S(MVwMq+!fEwm#%r07c`$N zt*!e$Gk6K_uxYvG5oCOvmC>{Dpt)ah1jw{yBW3$(9I4i;r;Cay{o-P|HCu;dL1;#M*2Al4nWAfa#kQ@lfvd5stbN_ZOD^(KoKlokBhq#K=tikxgw}vSPf`+ndEx!>%k5b;f z+3?bxq-JIGl2guIejJ(j9SzByu2T6fddbLcdyQCthKN0o zVGI?s_+MV7)-;chA!s94e)`5((`QpTRr4LyO=X!Gv$Jp@*H@B2f$I@cH58FVEm7%Bh1mL`w`N&3pftJPtT5GF0)AwDlRPW`5FPG+m!`&N{J8^rWf+@v z_Np1ve=6&BpDKO=1^|zd^)o>FJ%|IK{*f6U2bWlx@l7ckJe=q6_v&m4AU;GaEiOtm z9$)%9y2}aKkc-N%2AH(CHD2`6PqeaSr3CfX>!>K}_d>FO(w>Wh#~5JobdAK$F_Z<2 zz|WknQK@vu+nfba!E(x@GqF^jhdC?@k_<>agcy;D7?gRJxl;xQ0^gxPn~4TEgM{#M zyMfj>ikty-u1M9VZfb1T9iULf+fWju?@LDKV&|%!?P^2CG%e@hVSGx%RJW67NkapNv-uNU5xvcUj(d%f~9>Zu*vt3shw49HQ(3JA*^q~CTNHIzzDxg~aHDlG0oT!VSoLkf#{?NZ z%MZ0?nNB%&>i`nxu4Ft*TEM1s*8Z&4h?#~HS|NQ0sUk~9 z4wcUQR5JihTzMGlAMl9QKdaIDVgZU%HhC`a%C`PWSsWUdz@^_p7aSH?z6c~h4(%v| zXy0&~&#aIn_V+8b`I76x+JTMP@_iP;xKfAOuv)D%-jDQWqg(fda-Of#**^j{=`dPV z#{7-Ci*FTdita0iFx)XJI{9Q^Rmtfj5f6d4HQAiLyftjiweR%isD3{u9t6rNd*z3D zJQ|6G&6VF3>^_`CZ=2p^2XWf*p2&kE@Xddo8HCDZrf_LV6#KPZToLgg=QfxaA*P% z>o%jT4KNYuM`jAIT4_pnQAsD{cuwL;<1T5wD*dSES*TJwGAa%{Xx#Lt)$$_kmx8vx zKfYr0ni)zif&OCaj8AZx2yfr&>L zG?6Q+7|&$2rO}M1e68vsz}VH^z3j(Bqc+8t))rzSbEGcQ1*n{t9&l+u3x4J_r8<_L zr%8we;R}$-+wF0qd|jyLd;+!e?ntEfNdbGT?UjzxZH+y5UAlJYFZjnh*-TLIm*{!< zr@VF(pDPBRsmJN|Q=b~1RwPSZV-XUGXKuUn7mA8Uvjw=13;aAD_vxVa9M;!JM=rir zd*AuEdcMC<vecR`ftVIDDk)5LVS7etnJ2N3<`!%@w%)NE|D83C-m?gXk(=% zdBP$0c6w05R^UdUo*R6q+inprb8X&HAW-3rJ}a^^E^L_C{(!rAtc=&Ba?*T=)Ag}? zy$oR^bb@q$dU<^DbXPtvzIG>-V1`;xebXp6xp{Oy#7{7T{{R4cRGE&F0^$W`nFoVl z9BXZCRK|_(7hrw-8ddg!3(M1;4qKryQy#5?m&_@xa4GBCx{Rtp`Fq*$$BT^W!KFvX zl-Bj7%g3ea%l=jC1=mOG;imv$+XemmqXcB_)7TUZl1qE}9x{q0g_lX^i$yVERfQHo zN3rHVm;j(Xt83UIJ=ChgOewY8dFgbxZwB8)uKSt*a~ zCJa@l{aOvk7Z47eO_TGIJK3fQozqy|6H(Ze%?u(zHlI#9IvL{-BquwM9Yb-UKs#Bs z`6N3N+~ZC4HPpKOUC@&?=$!(!oTH|Gyl_dvgK;`+wBdDe#Af^^@)K>>a(r>4QfFA) zH(O?Rb2hc(L7lzSRrULVeq$LjRI*n{WA<{0-iK*1b&MsG)xENj zk3t(PP*E!JYwK*;-CdR+#LY4lZlDidNgQ8NK)xOU=i*06V``)8L6q>aD~jKe88-au zipPcSINtUu^*g2UqLuJ%cmuE(x61xPp1aCxa8_hf*`ZlRSdc=4^He3}1ZLT2__)q8 zV1FCli+!OgWK-ZpA6b_xDLd+EcsHKXScN-;yL3!b?DKwULeS4mSM^Je87-B)1S%&U zKwLlL({MbP$KldCjhKJ4a5j>EP!eAwO!O4$9N>{%K#$Y2LfiIYXtVJ!IQ#0cRMtc6 z0q=DmGg$elN}Ad(fb5$H}11Z`m|ap9(FnVm>rqw+NcNH%HlFc7Z=Kw=|1v!b^u|Bbp>Lt<=WM`fu6O zqD!YNwLiUW;NaCRTF6|iA|{nPWCxT?S{;gfVD+~feHIenPeO~yj(>y+5stujrOChD za0qE&?)hVG*{(9fIN4wjVNe;%mbfC_;YS9O$PpDlS-4kumOp_(kia?wx zM!2iV_u8~oaA2rV7J&egAZ3d~np=!7{asMCBDWOF;$if45zlN-owL`!XgvN9v_a9>`nq}Rb=kArIzCqT10Jvq){UB0l;gOIj`8SDY#e(IPSByG&0Ytxrd8b zd&Bw?-9EpvS{0n6gt@+cb;Q^AH81;7IbFjNmDZ4<@A!U#ajQy!N^0hd#)P300`cp; zGs}`?BoD)_VP=_U0#l8;qlyEi{UUFkjgf%_Z~lxBJESKzzgNj%e?kvA7nKr7h1+d! zVZF9tZEFy)xX)tkEDmh~aEbvGBEr>hc*2co#wkXF`UEn|2}9OK#z!)6MiZZAPn{zlS|ipi_(;NNxD(#r)TQeyN!h z5X>_A&UrrL>B-Q{+BcY1Fm5kI`T$G4Q(nP>WBPsm+}x&y=h0-U7dEmN&su2|7g4`4 z<*66@TJcaxlH`gI=z*EGtyMq7_DzB+TAuI58%d5&mI-A~lHBO%xY5u0gz94jft%H2!`dtIj6$vnPltlgy;#mOgxgcZz1u?H!@~zl z0{yQ7f>H(bbo^$>Ua9+!6c@+^1nfb%>4Us_Ih#)+>gOY5K@-6mL!I4ZNxqPYrwhN= z5#o+J_TL%k)a*R=9@U%w;hpH@{*4b+O1ZrA2%fcqtsH*-PF~RHNbzu!Zasti@1bt` z1fl(8<7s=gV%~68YaM3-wemR_sUXkyJ4~yF7u_VgX=N*xN6yVxmvI@cI19Cy!^v$b zhx$*2Tr_U+T-2WN&KdXX=l|5BL*A+TtiQwJ&b%N6A#q4Swoc*Eg-dl!)#4*ZKaaVASb9Tpmf*Hm~cu zgGk+4i5K{b^`b6%iLFfd3Cm}uRMw{TNf3S&&dco4e(gPu(RE1_Wu?<=?;h+OpkYjxOLlZ=t;GM$8m^H;Zjn9+ z+fB9~oRyx$chQeG3;e>pck-T7SF2AHE{zJ8^K?D17en+On&q}9(1a4wpf(F3nUe5c z9U5%R>m?tz`74nPMo%>MijN}i=3icl<>+t9W?rTobSIN#)#+{9x5NJa(6NsiuH1F| zDM)+@tKteWEO{{_hzZ-Wht1MRs3l1z zymhj67Xw&xXa|zQBmD#PE=q2(ELVro>r+Zy*}e-b`Ar=sfIMAOREy@kth7C8Q@6(BY%IPL1@wAxfHQfh(iXN}z1uiJNmjwKBi9T5W|wWPq4U)RJ@`x-Zxx^PpQ#i zT{P~x9FLR5GEqfWnF3{{uI$_oNu%fmX{eRK+H9UhB&@>wTa%%pbT+-?1}?Q*cYLlC zEYp0YaB)60FS(0GveE>kL_Ah$t})M-o%dhj>$k2qYYLj__{pVhxMR{ zA){u^d#>G8-R8vW#N+s`?c^>&eQdh3Q}myWovdXOmf+29#(6wqyye=b!v5(O_^Ocs zeNkKr<#qOHk6|n@d++;deE?OTwL-lMyvmVVS4ln>bBSg)3?*s+`DJv8@xM1bbs`&7 zpS@*fPtG+V7J5|@M?%^&^;T_n6)bh|>j#yS0VSz%jbfi)MXd}qPc>K3V}fviEaB!6 ze-^9&6XkUVfL@|S(Q)byIwJ(a!G!1{-+x*A{eP~Q|L)#QANQl5ZpLpfypW<)!bfrK zw~?-iJ;P!{`JEkIIVq1Qo@*bx38$A@4ZTMk?X^gwtcch$g{;$f(5jNcY$b=!BK|VR zCZqVjn56QSvQcB4-+&$H6r|rBGf7VO-3a#MkJ6vja%<`WQm`o^iE*Dx|Cx10-g$gy+;pZ%VCB~prJ0@7?e~afyxwl{IsyuzqZNZ1(85(f2nu10ax99SxTw2RZ_T<3sz+1?YS%C24_kY ztu%_My6@cV7IE&ebT5_scBql=nD2gehZNxW< z?|7I^@hjkExqUIs&!Cx*-+PbUyN6soT;+8efYdMAhi(Wi!9}6Py z?&mb|9rm%VN5LU(jt#nR?;PY$lE*ZQJ3zXQ2I_9=>SYUej&rGzg+$R_F)E`aJRiQZ z@0KXRaEqEsyrF(XFRO_ea!C{!;t6lrzyD?2zuQ|JADvOzVq(dfKA8}BI3DIjd{MHA z1+l|K82q|9k`kJ}D2l|QN=c*aVM7T9N3or&wT9i9@=Uod=v?g86d#tSB*8fquJvDU zC7xHnV{8m( z0(ER{)?(#C$r5cC%6 zI5>xQ-xEL2x%Fqa+;~41yihEDp-?rg{j@IIt0OD{-s_JxXWAh5L5|P>og z>Mq){u&lH7J8I*+XBr_GykpIn35Pmm#Jv|;&XBdj6_ibBi$)tD(=hgddw6Tb4>NW= z#)Qc7?Xrg9nJ*ODati>`Yy&b`|CB)DpP!u@JV;BuqAY2Rno%RZi0I~|O}=&2!_rZ_ zP`$j?7-43`-@`Grw#o||I_k*X|udPmO%I~!{7GiV&E)|)FDPaD2@_& z_{Bs9j5hXu`jU$#|5;fEW>(s_ppeP{XVQD%fl!Gg;D*%Vb7dT*Qw#_XN5b`Guw{AZ z^N%#uIFW-22LQ9|KGR`IKeude#r6XF9Kio!>#U;M3cqcSwYa;}QVJzVON%=cXwl#S zg1fsDw78WPDegsr1$UR?8r9tj|eg4aL zw3P9@GZ3UPXz~p91ngyDZxK$>d9KWN(%E`G#sBhY{ljzD;i>)M;?Szu-J3ewyXMoa zZVTP>TTxIcoQgCP6}h|X<9XJtaCAQl%{@wBbMFOZI$V(B^ItmbSMN>)n^FZDL1y4( ze%blLHgiqQk+h7LJWMSsdt;|1rE)#}@93qc@g`7B%|V?S152LNR;I-Fqwwhy#9==( zoxaWs#S9>k&MaDZ-HGeHTlySqqU-$hWS*p%q0@MRSXm=uutR>tl}1~oKR#&)e(A|C zZ7x@JeD^b8nnVt-cnv_M@5lgn^%ioAK5OgNXAyOChX_%#3v$0u_DecD;+9aOxbyy% z%0v6d&Sa?pgp!Z0R$fV|%Kw54wzFP>p&>~*JAC<*1-GXDsvfp zLXM`+z9Ln>y?ScVB_mxve?s#itrfv@1DGBtnBf&7<&cm&5$&vX zePG}665(Vf^_iorcx{SjfRtL)rOh;EzU;>m)n!X-iB;McKv~%GlyL4*B6m3P=p_<%K?xhRO@7J=wHmTBAgVd{>{T$XCHI@_)d)H=F3#gi`W12UM&NRSPMs%fX-#I)<*e1P#YtX=KXc(NsVf(=FA@3dAd)&-5LcrWeM25fC0J}3)R4LkMuE$ zHe<)2yYoklLpPhZPsYkYL_SGsOzMDBvcP|flOJyqfhz<$@ zeAf7LvA$HOQ8Kf~{}c7*)oL&|a@i3E%cwzzE%@(Zg=2nMS2a2n=XZ_*ujKMD{m~b~ zsnvxc(%N=$z8g|CBB*_!*j&d`Q;oNs(Z~@=CF|yO*ubU{HUY^qO9t#GQr^2|4Ds`D zsh}Qtg@=yHjdQcXJ~~8qkA`RN9vShmfSjr8>wu=^RBxS09+j3*jf%k*2z4R81N=*R zxF0;_HGEb)5sOT%c)^i`f2@aZum}&-je|`5MMRqQ`IiSTky7{xZ0tndlJXw4U%g5L z_N@}_r^KUqy4|6hEF^S6puHDZ8J|AHE;U37$9LDMukT!F!ubm?If=QQcLYta${WBsSWxox>)pSYq~05_OvvPV2x`T8YH$01sa1$|?KQ_6k%R zrg^a)qtg{^SjfLMD*^5+l3$eiB(>g(W?jq<5-|`Bl*Wsi8)3-!C%O7>g znyA_~!Kr7}`vKD;*>}mwCM6Is5|>#aZ#(tzB@z|J_HeN`TRu;M4854RDZz<39!A`- z<)Q2}_jA4uF7!JY(kX_9XsL~*NT$|OFPU4DhKiuaYnSRZo}W)CMK=B#?vBj9TpCLSjf&U!E!opZt+%o)O=KFUp5r(^MM zS0(YsfFX9I{kV?647w>vPm?M9fc>O#p)l%3xA!|_JX%Pe2=uIN@Hw7`V0;wb)Ki2n zDm(${C2$0^Bqgg!i?i7Ide!xh2Ic;JtB39s&Cg#C#y#-3Ly^3f-Li}gxulmAE4o^# z8f3tFEvKJumWE$ax9h)2wmpeo3El{ExRpecZ-njNs1F0U{dTFo)m*boPoU)*+ctM{Pk3;J)N7h2V;L&yZoxRCaZx|p8yOF( z@L{*Gx3{Q${N)K_HeHx+m*i9C2s3{+lI?qx>b&J5}yvy*&rLLSAoBc z?{Qe7ELzW5P``Y)aJ@o*IY}qbXkNPUxc}|OQ!=7Qz(2$X$%DQs6ZlzVLd2MNR9nh~ zZ>^sPfY>C`#^|rCWEY_thpkWTSFBPCR7&jX|K^YfD&$8(g0gi;R_9GabkcH(W9e6+ zXP--6EB26mWX+`rDFfl)t^jJG}l{F!fy^E)$a zW6*ghVr)VNw?HJs8E3Hq{+t3!>Fr&wm0|}cW&H-X-R(VrJR%?PDpEx-}pgOr7y^G_M?(d zQVF2QR;beD{$Jo)JL*Oqvno0Yu-z#8W!HTX8ClOmh_HtIBW98msA07dtKzLX2mhZj zU~;e(Jjj~3#9Pv|Zb!8)@Xd0M?|}%QXz2d6Ol!pZA+r*qC~$vNAicxuvCxE#4_g5} zie?0}EJ;;?Blg9wPXzfYkA-aaoaPehSX$iUTffj0mo(_aAvxIW`QAC}SlYRGCn~2a z6B#RirTHaZcYr0k$agF2eP){%`w{$M?R5qn-OE zD9QkVMuE(dAIOF!p&uR#5Bq*R0SYgSO7>MgCX&o&5kVPp;N&O(l2wfsYj;7^U}nl? zS(l!2N{BUnoG*znv|;V?p$S(k&-cnEd}9KHvG-|(`K zBP)tjBx))ekt z#fw#{UhchAVbMOU_B`Dx+)#I-K}}aKqoc8fYJtRx0wUTdjDUWATn{PBxcG4CFGNN)Y)n~ys4T=h~7 z(6)cpXCg2L7JJw-6CoYXq`&mGmylPT9A^QhA7vuKc%x4<7;?iZ+AwR?0!8e z4<`rZkhsXRQEj$D-E8-L**fkTYq1SjJVz~jOe@!+y$Agg8xk{BldbdFX3$Jz5Rw*o ziku3(QsE0Hm&O}^MEJhMW1X72|7SBXF*y^w$cAcP^Xg{K~(6&?;^FC1U#M ziw^3GLLqH5^e|%L{zsqYOt8o>EB|!tq*LGTJNop~p^eN<0a0_)-mgR1U0ufLgd+(b z8bNd}UQjn7>*wF^z!~~acUnh%TkMnXzB-B0pD-tBRt5fg*B`^I`+f8zec{hL(1g=< zwfw(H!+fGXTSG;Y=FjM;c5Y1&=*&BPglyVR>VGm#-$#7il1IQUFqKXR?br&;GaLaSC2i+-TqPrzN~TBY}}St4IE>$ zYc5qw*!FSV;oYcM{=E)s8gKDWW{V@)~rR z^Hak6r%%EGvuDdUk@ZJiqgx3Wh0OG0f}dS({}3Gx)cdt4M@yvS+lv-RSfQSBNd zWoFrS?C7D^`bf3F*prh^a!%Kkl{N#=t1+6Jo6<}rQ}ZfBOS zodLb937>_}ho7TK(TP7QU(p}b=`J}0?|t=29)6!le`zx&QkhnU0LrVl*I-AciP^en ziWOr=_-V`%SG~Fx=@zj_j5t1yB>yK1T??3GhY~qcQin}2r_4|U-GRoh?qcvJN(Rao zYB@|RzOfYDJ&8!en0_73kX9a`|263_e4f8L`60Tz`bKX|7|rLT_@oG*OwAuu|M~~#kV4SF-(fX_BFG?n1(M+sZOY96Sa4G0 z_;=VRwZ9^=T|uT?B=+1&q!+IaidUO^-R1UP&u3ah@xYmGaP?YVT8!>! zTK{t}q7w^=pVCM@&kLt#C1<-MVsj6;l@KWALx`_V&NhK`ifr;Hmy&qyyGgRuQbe)7 z-YuRh9zU&h$q+mfXmxbfu3WXX^o8G41MmGBC=q%DK?Qwbm>&srj>HEqq@ zBBM_OoUw?>KEa*rt=5&DnRPVZZ(b7>ZSos+O!qH!phraT;q2?(_|yKuSEr0GzOsG1 zVYp@jjRrwqXFi!rB_gZ=;C8wz-L+pbP#rN?{DA7~2dL-o`3 z*zwov3(Pssl0C}TlKq3RVSNKbFERH$)y6jX9bfMuA+O~?Y2i^YRobTNzl(r7N<{;; zDb{tpTlxL5BkrBbNsA`L$!mHkG9!|!ZAvJcvu4P9yhV(9mmm38>x8M*dd8lhm&axN zt5Buc2fAy&hAEF#_1gRMleS#MKubI6o!wocXo3Awn(4};=>Ep1m~50n6sC5pN*I?Q2vJ8;qSeY6)50f1`hD*SZO@StbIs{NP5CM*Jrnkdl1e4$E8#4+@e zZt<7K+xnJFq`0Ok7L);TwbV=RVEZ16_(DK|)`yn#{J>c*VNGrp9{~8|Ubh zmZ1QOr6QjSx;H@Kvv)ujZ3^uckXsg7u=n2;%_j0%+UTRd6C`U|6mtdwu=rY+2D2b= zf3KBf-o|2<`R|c%#J#R&{rb9@P~@%55Keqb{dIrkFC9-uyx=R6kKY!)`2e7Y^7&?U zmmR+`=iMyn8lmr-7juD1xvNvc-HM;U^a+tK>rg7K+!-L987mf5bVr&f{IF}rrOLQI z+wt!A!8rhIPtRdBz%*^~a4~v>i$?7RD=UGy+`7J|>nIdo;8&=`dsE^3js%c6;-oku zZX$k4dR=~~`os?Ep!7b&$kYFAbC%PEKy1_+9X9vJ;|!okZhYPe#(rk-j(~Jrv^p*p zTp^zt$cs2ogRFj{b)$30Y*=8a>`^KqvAS%Vmn@-OZ$-LVmHog@mnE7Kp{a#n{^aS< z5%P=Tr}5t_y6;cnypv;yGY1n?gd`0gxU$UUCbN$Sxs=$Iwip+Q-?(20>pcFBVP>{k zs>ZH+?v#Rit=%l!M9elUp84UZsBFQCnd9;=D>gsgJ8TdL`K{WNJ&S)rkb;`8v}FYi zvOJXKNW!OQ-5d*}?-K%fNO^@OjQv8QUphHw^IgSTpOG!wocktLdsNR|H=FoT9a*6d zg)68pz;s#dbApc<5x>6I)Tu!&^(dgU*wj%H&`s>JYWrQEybI(<__4N`d*4J?04i8# z|3y+&f9zeCk+iIs9{6}nfr_MT#jL>^=OGSG`igv{$g&!KezVvCTUr!#5h4E)6OJ8} zKxSFtb2`h^7-B^4za@fLdt(Rdj-CIdiFS@o)0q?Dp_05^#y()h8F8Bhc&aN6WRo|; zdqTEYpT@?f-JE2XayhZ#pwwt$gcAd?>THqXBx}McIP2X|JxuV@EpSL{>u6m7U&2&> zPWRcHAbO@=;2MAkcmD#=HW1y}cZB*oW-b?U_L z!uc(k4=#A_p2EXLTV7of!)Ul?4y=07-2gC_3f|}|lr8&R{_3BLXzQd5f@kqal1Kqz zs6SH6e=}59--Jmm0iD0z;Z{m+D$cI zRbghwgZGJ+BI@bfPe{iVcF0kH1h|q*c3A5rgl^ndx;uOlbzC;(v+ONJv^`klnSUE- zF8uiG7(pgp!M)d`FUXO0*gjnPX7|C*V=sG z>Gn>w8NgsKbglIdX-s4K;;uw!w?w`Y%pThXuZH%y+b}ZUC|D$KW7}@rx|luhKBptG zQM)-+*vB?Ms1E{0X(9VB0q~z-|H&(S8LXGT7X}nw(D+M@YHzn3w@5-%UYv)#^kX#< zTddSx1z1O$&L=P2<7VsRb%2a})ypC}JXFdF34m9z3tlH#bv_!e%R*A@uKrz~i&C6t!e^^gFdT>%Ps*J_!;=q7fX7f%wiESr!7 zm%ne9!~E*oi=iScyPZzMjkgi%3-`5gs*tI_vOGryuM=OP*0hZm>ij@a*((n>|NEuN z3*M=?f&on(N95`mgm%UC(HXwFS-qJhgBP>se8c1-OBk#zdZ;KwLhG=dHKdpiDWeb5 zhbx;X?f>DU&3_3FD&~~6F>)B<(y}JOrmLWqVunsAqvXmnXJFX*EE4%U0okA>G3QGv z!XWmOvZhsQtgLqM8JJuNNzQFRiWr;N{SQ}`f0_&!tI+c=_7Yo z9I2RUT%#6vnDR&CIbG3K2a6Tz^Si9V6K;~z?(#vf4|Y8+t$eA4Ws*>N89X+l9@Y5f z%btZ{v#!Q})i-Fn!GXNx#*5#FnJ((TrO1t3uqH8FA0St+MMo%6Xs%Q?p~ zi-s#)%^3RP7YNmR&t7emgQ&l~RK~^(PuYL^)aN9G3hXOJI%-I;p3+{{x-}IaUcCcY zW2=i)*8OC|MF2iHnZk6SMbW%S)V*;!tl2=NJ&IQbimq9>dj=*ykkO~QA1NIpuzszS z^(soy|EPDPE?UmKKP0yEoQNCBieE0&SUgf=$v`5*c>(Zmp>Cr5dDkvHO<;$dU0UMO zeTei8?JM*ITq9Lx5H~M$P(yoZgk!Yz11eIojC2n?)_gZgxfLnzf%6W&0lav-qa0%f zPps{Y&_41ho+`#uf%nAXt%mMaZVQ#T^iqk=1#Xi>Z`d^KXYEQ{h%;eL`Nf%d*f8~r zzngQ3*wzDM1g2cGo4)otJ1hLRrCL1_*SeLn>txvwNxpTbZ z_+jJZ*l04$9G?r{y+#!JrWYp8Z}yxBjp$&>g3w5X8-_JRocwe`y+~=!Q3jK?Cyk8$f46l9^cB-kmOkJS#xdR2o;&2wIdlO`Ci4t9=#*3|&; z7a5pEyX(3D18?UQj;vdBE&%)$eGyX+gG38XiyBncP3+UYuhoW(@AXu^f-}u@+f>XN za!-WBRN_EKTh}a_#CgELGcJchW^CI)MI-`x&4_|?V4b!JZLSse9{;5mUK)lrO zx9`l0#{CRhp_rBLJ3T$Q!^^)uJm&N@O86y)qVHWOa>=vcyB1;cT8t;uhc>CR*ruQsdXcu)-?`wxojJ-4h)t>{u z#=X1`^WD#v|N zJ(~b&)j@StVl%=hf6+bho$OfafI4$aqQtEq*=dsFU;R#2SCOMWwAINII?S_E>f@P4 zA59lG3E20MB~@f4{k)bVjF#4i!dU8--=SNo{c+|XQ}2%8OAJ8>)@aN?j)PdyZ>mch zo?Y!C=R?80NO)sDm3QKNt4Wc{-2Ps?tpaoHza85Gk~=(Cx1b#Jk=j`afUoA{l zdvu@dhx%9&20+S3X6}$8P?Y-^^og3I+nC(5_X?O3A3-%5m=NBQC6wMVyX2BQpQ6t1 zHn?GgDd!(#H|lPi+aC5BHQIcBe*BwpokP!e-Uyo_dK5pD!74f04xTS~YUVPRSOw0J zU%g8t9Ohor&69>t*N02IxXS%{iD&q`2|pC#A6cw(gmp47FDqwGjeaxMn(;9H9&p~t zuKm?}pFN_rtJJ#ok3xxy!{m8%aa%oc8*&|6zDmFJK&Dck_(juF>v6Chw}0xp_>X9@ zeQ#0fG(MAHo~BrYEO4%St(x_J;m9@I)cAb(m)SwIqZtzvw3zyYS4p~i!`e8cZCN?kLUuO&TuY!mq=~j&Pyq~BuBrk`;klzj2OYK zbBE;T2a|7MlaXQ!I3}r2w0JgHP!7+?<{t)Hzo$Cx3M2l3QTAq;LGLvgx#~FiU_?YO zUXmK<2LaIUt%%8!(Xa+ldA0$=#J_LOaY&b|-4g-zRFVm{g)1zctu6;;fIWvSbwc|< zUGNr)8%aG{h4~^yYQvq%IN#-u%vK}ikQ{uvzF^II2>h>>1ytl^<}_{ZmE}yL2~x`M zsf*hC3hAqSoi8Ik!=h6$+sTF6qxs#on>B}NIajF!=3J^#-v0tsmwJ)>EKST>!Q932;P6F+Aq?CcB2$LdWwH4|%|z>e->huU;7cM}NW;-#6j@lUp4iZMC}qxH>krI)CRKuxS*q?N_u76ol|j*WMOO_-<4lrstkj<7lE;cGy#g_62=)m2cyj+F^nywCY;J;k`0Bz=s@hCgKB)geR}EiL5># zf-$Yr;#bN$ld(hILrkY}dFREXk7cWyS zX~aS*ul+o+k`Q6t9u3arNTr7M`=9OhyoJJd?+=>KnY_1F{D!a)>`<`Zzw#O23?%U1 zwfzb>|J~2_8M@wdh;`dTB+oJJY=XOQYyzk78Mkt!(8C(N(?NpcPRYTy5>27)!VCsb z#cB&OZMc2?E$4Gd{)v!%veJ{aA5HeRwHC@PESYB3)pUQxYY*!EeZ)R6wmUoKKtIp> zf#@4U0NQ^atUWsFX*oYF)Eo`G5|OPe+ojC9~8>H1d0oy#Oi?6 z8nssNp&_m%WAXt~={Q_+IcR(&N4KO!xGRQTOXWuR$(S9M=9?Ff0b?6@R*yC2O~)WL z%2VJuc@>6|^Rl#C&+3S&cDWX+_>6(c(NW{8rB-xAA2&{X`~K>Sd%xev44oyuQMY9( zpN$~q@4Qhitm&0NG2CW)?@rN;1moyq)DqFS_z#cB2K3+;*1OBf;1^DCcRm0Un0GMhG`u7rZ*(2BLP#nA*6-lO;=t!BX}1* z(|Ab?=(kuChYn2_X~qz#p-+;$%q@SC>eN(wD%zL6iE%=UL?=o3C~8-bh_m8h&|}yq zK}r|Np4d|i0Fx#2_5PD^cF1n!atCS?pmvqE{&)t~SG=E_5EkL1=nB3I`juC(e1*-l zAkw<0fvVFO`U=BQyRvbG9v5QFW4s((*0`F)8LSxEa3jbmko!^cAC4_-xu_8TacwO4 zp}uM`t3pD#xb_u`!4wC{PZkx}y7<^juIrKeTKcx&@ean+vSj$g5x>4ce!OxU0Z*z0 zl_h(J_K}!vldFf#i@~j9-C1^r5!>CXKHs`f)QDFz=Tk3g^s7$3*p{5Du7z$MiSOT< z|5$qJV2gdP9&3aP)E2s~mU%h=5;grmkpi8gvqauOure0gfHD1x01B%58AiG(u>({M z^!lYBzg2t^x^9M!BN|iXiM7Ajty+b+zmMCmI8L<7;b%^e)8mPKXxS!STgctmm$p~H;vaj(oXeX+U?SImsmHjguEFjraspCF$!4mjr`Jlo&O zI{oc2?n6Ci>+&R3Jy0Y*_YIaMaV)7d*@yrSsUlb_A8Q?btHX3nH+tzCB<&zWr?x$F z-%oK1CQr;&x{$)SFG?mG)iA05MKW21WRzqoBYygws!o{Yj~c~4v`;U!kVeplGfQdV5i8-T1k!|Z=u;(`rGpJet3{F08y{vp%+)hl zts{x-I434O^?K!IgMePIz*QdRWpJS_OKt^DcS?9=&_s-DZnlT>%WvFj4T(2(??oDs zUv@N`s%mVjDvZSQt*TVGeS3=(b^`KTxvc45zzCucS`u7fNM~1>!}$Cj7I{qopL~7d zI?W)}dFE8_CdxD^^x=K;vN}$xWm;yNUe{g5Y3VO~{8YE2yM!B~R|jDHkPCh~7d!_@ z#L1;cN^I36VD%9G$bZb~h;`si%@=@e;g#ZaMd#o$b?&%0Uwp7{b9{f{91Ulb?Fs~# z7OS5@Z`_NyH@~js@beLiRvfnj%-7!192^Y07(nddeZOtYFKn0Rg>(QQmF~1j?F#R` zXaFe0YbSX(S#yl0VfQJ>w-5ck_S=7&{&dMHp-M+;nSr~n{|?eO=3T9Q*DicxN!7fLgqGc`UG0LP&;C+4@Nhg4B@MZmG+4Gj1c2a z_`|A~&E_qPt-}ksjvu3MmM{|=o5SMoUiW|)#dLbbWTR1);U=dQhR<9}Olo@ao!EW+ z@OFBJuF8A`xRSan;C>$;UDDn3M^?NCr-Z3x!k)Q`w83S@zI36E29?AB%RjB61=sAW zW^nC;g-Ngk4THDl(bDM7A!x_0BS+*7+GY=|=r!?SgFjGd2L3`5IcpCbiEHXh{=t+Q zPhHD4Q<|!}XR|yo83V6CBYOI^KLm~G=Oc=Sn?r=QW*>`VmN?KfRPuH9H*#Js7Fq=p zJXY?uS=DPUmMwcjVNeMMH?*IZgaOYiY75+Y^^u*rN`jfN0Rw}e;9mXeH~)C-Aix*{ z%+02A*R(PEnxdRWg14cTh{&^i+B4axJd4n&9>_NwC$8rfQ zB*YK#;9Jc;{>XfXtqEfF(u&7tc_drcT{JygiyCv6#t#*Sa=x#naEn?!CHuj{3NLLN z(iRwNmW(U(xt*JO->Jo!2o8@UdVb>&^cp*QfsOe|`AO~(b?JpJqT&?5RyGct|Huq$ z7pI6pYAtc>mOwG&)gZtaqD%_7!Mpa^JSr9zZ`sD;F6p0D(EUSL{&&@q#g_mr7o=S% z3gXJ50AP}Tk_6g?RnYk_p~9@ovMhQcY?Pr98I+~CpEY0jTA`rnp>AhokqUCX7E>+P z*rg|}ni`Ij>~`_6?L-?4svgZe^%f;<=C8in7-YgQ9tSIBTNz{ZPqE?esFcvzPd+zC z`L`6_!FCyHacpH8G`gwKU$9)>`;26y-ETYRQ8jg0z!f|aALl3eh#9T4fD4+|xMe&0 zRGD#9Kh}}*Nd{Owl07#EkPyhr?ZY!)ojqIMnW-)eC@WovG3u$LI+H@O?e=oH>`CDb zV`<&m7)*bf#3twG)oEG4LLb}iBJ;^%v=wDP`6S=-#lu|(I%npA$gh0mEwm0v{{_p|>2EOdJy)Esah1y#io zPmJDCnvO?8V})nO+h8foV%zv2(1vS50f)#MqmXV9D1`g#D9ZdXmUn5N2oDviML4xs zUGoGCT4P5oGIm3%hS#$oakNMHTh3JZrzNF}5L!zg48NZJ=-!&AgaPJ!{HH#rdT3>w`k$ao2I$S=!PWLty3lRhiDWGa)#Jefz%bm#~wb zzMHwh#sXJK2nP2ktXZ-?CV`Ne3~6x(#<6OIgPjzxODGkmGv4r4$3>*4PDD_+_36S7 z(!AQLIC}L?Was2);eNXCV2*L|Q_1<#OrLrq>W($d&IzVQlDT8qZ(h|Ia{;|?lZZ#| zk`rB@XSUWGpNkVn-)Fwj>6w#A%`YM__0h-cnSuhojg7k#fdq5I!rPb1bw(XwN)mhO zRN_F{&=aZSv?Mc$o5map50~^AS(%(d%=kwjD&HY^6U2oLrbGK-9>#3?pfW3`H+xd5>@FgBp^oo7jmtSKZ zlJ9R_aeOp7f4Z8lE{l^o4S2H6+Dg8zRWn`KUU~9^Ew()_)TZG;oN37bKKP9_5gGjT zZ=_*?!T(z{Uamo;^?^)JB-_71=4XD-Jr%9mOP~%LzKXpyTXK2AG%Ed@`l-hF!FhJb zl+s{6pGTdD0ioWAGe1Du(I%0mBRe6fy{(?$tyw=?W|geH#iNlyzE+99c>hOCROs$j zQ4(~v9tacI?gp73?gr_2{__r76?4sI5hqAge*E)_JmOUsn3;X84F~^U!8LUz9_@8V4NE2DkpP2OsD+zKyKK_cRNAo!|IMR74n9X}n7+gJYLA2)}X4k)h?;$jw(6?m%mIK#<3dO$r|#Tb|jF4D?C`| zr>2-m2uW*5=TJH(2zzm+j$g3(rO|rCkMoDl#IO^_;RwFt%b5Fc}DZUl@a>0Ywf0YL;hX!)opU%`oM zbR%D}-5Co6Zat#DK9}6!lADi3*ORfJ&aTCNOiHo?dm6@Z4Mn_Om|^GL2f>0GWpOei zg0s7`9q4p09hGa<)XhjXZCPk6^Vd+Urs?a~u}xROHy9!KO2jgGA2p+^H6bSGA}0t8 z-dJ?hI(dwQ*eeqP2B=6pImT{54s`^@;=mg(w>p_l{G}HKVNYfJ)#Qf+SL_7CO|oxv zfNwWIVBRHkN;WYgwscH{s4WKUKm8R=Vf9d69I;(8Z{IZdFScT)zW$L+sof;Pq zA>%+AHb%>YdG;NnRa8w|8u1n+1ELR4WWl0PH44f4s=Z(LhrKzbVSCNQAEBcy6aZJ1 zeIVCi_)1wyT8x^nEWI&GC6V?sHMCEJ?!%gRJqQX1dngJ0!g3b-qKKJK38uI4vxN{T`>xNoaRI`bXrqrOOc0tz|iWe!J(Vpw4QqUklJly&17*-G&@=eBnA`3wV#U8~wD}#uGoyWJSCW_*St8 zfQ(W{ZJdIwjV^)5&HZ-)zbVYqdNY5gI(qr6?`%eK&Uh-0dPv_HA|eeTHHvFD2?)pN z&mq5Q&gSM0QAXBRIyzu|y5Z$3V)U<4Z$=Cwp7%wCkVFY2F|o-@{sD`L_*O_cY}g1h zLsExjQ0F%9O|JtLomE z^-#>tvG1-yV6CV!PXku&WY}C!ndI6oziXD=@aDy&4v;STGyO-$_#ZJtLvovOrnt&C_!W+cAZ5pt(BI#Muqky#^iNqBGc-V_dAUM?cf{*NubQzD;{Kz=xhxNbF!!u?(l~??gWW#xRcS&|?vd z0ITnwo@)b)yHk!tn~y<(H`FW?9&Z@QUP&xh&R4ffja%5*GR+Xw?&&4hzhl)050wVD zbEe#kT$n*^YqaO=3iZ3@7K0hkqQYWe8Y>flZ>x5dHER%o zlTx*}=k=Qw+Zc9UOC4i@3i*RE>6{Zs9($SK6vPEn>h4AuA1AkR0XbQTaPpkO+A`)B z#cIxI_4ZDI+_)B=4DHuUVFNpePu%o4LLwN(v-a+b2qN*jy`4|8)Clu2WdRp5$Pa<# zqH->l*8@?(7QDpl;Yq)?v@a=lWz4{SZ%EddsAdc?Lb-U-{PoaCD(Mc2@aK!Uj9d1(z#JJ%cYb@yv zEjj*#9@@s35vvG*t5ufKg9qkX!6l9a)|mfx5tR;JJc)T5YaIySFj6;A?i_JJJEz-B zJ*x~pvf|0}Qh<;Y!2jmsS_|^Le4f1|o}1>z-k$XsU&H{|3F?U~AOsNZm)uUX`{iKd^XT$tIXGb+3J5R3wK&P2)9COs+T zZ>#5#4BMb|Q2s1PKiavwh-@A#y_lxoG?WWwcArJ1{%c*Qg{cw7ZUzrUrcD z?@A){M)6W)^$K6ySkpzu-ycZGt2X9Rbgz&QJbn7r^P~~GMHz3qg6x2{iAhr7fIz~B zjcJwq++3X*OQshGO0kGbgo+AHE%qSLd?6b_^3JfUH4GYfXBM3RbmQ)Mjs@uwraggo z96}}D6=6lgCE*}!LhQesK0~qW`f>fmW> zq(?7mTPT;zS2o)|rcJlWj|>zgZw;7IR|_16V3SMo!*i(?twN@=SaOuHOEEW&tP|;K z)y-!Xq`_o=3U|4PVs!?AU!nN33w|)(G;p)4l!8ou zX>DxjGZ!9 zWUbCawwm~TtF9Q8z!YbXjx(-PYqAQyLRauN=KyG3B;pQ=xb&kv%poObBRyZ;1) zMb58x>yHADms*bMIUbw01j3!u6H!JsE-pUv(f_cAjA;2B_tZ}~MyoPY^9S5K2@J=Y zpCYI71x(&QMC(+J_>rwx?o%nAEwy+XY^fBQs@O}IJBZZBJ}h=rAaB& zP5Qlo`Of}Mre^dHtM^B@JFQvQ<%z#^4{Vtvse{ zKagkmRrt-EEIlM=Rwq6CJ@+0zt&cW2y{f-k#`mCXCNzKra)o#~Qo7yDDS3OLV17MX zaC_8w1_*i`7ifI2$MMzN_z=6I4M8}&*=9`Co$iPD+atxZ-myAc``P7RNTs$t-?Lq- z!X$b(N>3++7Xz6F68=Z-Ao{jiM{Lr%le_Yqd&h<-HzY6G+tu&&9q!+s^m>Gy(8C1u zhipAAqz*=Q=(>b_vsH{DXV{zw&hu{TT{x|fczD##rfeYRrl z`Cr^Nj)~#I$m-uw>o=L; zyP{8jAYR>`r2AY{;sd@ zkh)RKi?K`Or{u|h*+pJ0_63uieRB{FS0p`_mRFZpA|g9tf_*-Q6L$1ecHhyZ8HzJ@(0cmirzf z2iLXMn!h<$aH2?K+yquqJhD^W{xF8)FeuWIuz6zzsCs8g@>19NV8|*O`TZRKIUfS0 z3Lj9F_*<;F6M3F5`I$D{1zINTWU?jl;GWZd=LakZ(kY`BI@6=oA08hwcul7-ZynWx z1`nztYhU;Qsm~jdii&l|8jvLsfq8|_0DEI7%g^1c)?E&HsnBGH`@pgcY#$X8RK#2g z@sIO9rh$hsP>0K#r?H(98PNsTUXK^ii%VJuJEeGkXVd}jrV1rGHUX?#F_N54$=Z~$9W~P ztZ2HAcU(658Il9HkHyZCcj$0<9~D?eZ%n^B^G=D98epNdG6hIMC5&53hKcJ+feS@1 zaD*&#A28mB#x?wgGLG3fcn@`edxzgXk_?6|I@2tF3XmlpRBJeA2$Jv5b%>GcEObZ7 zPW+6&5lX;-x-Os~kCp|}H__RSp+hD*6Px>0$Tn7MeRx?^H>>mKe1rZ_y}+Viek$2d zXxbe}V;9Aw%2D9gejdl){(C=sCQnNKKw(uniF01kR(AR0{204Ol!@kN`O>=Au^&l% zK;(H&Qfk5_M$Qdx^MyM#cluiQ^7%$zucJAWX7y#I(#TdatrBA8^@F$|-;v%$S3HT( zQ}_o%$&wSujYg~D{gh>5&4;Jnx_MP=;3u^h!d*s^BPc@3Sx=^4yF&2Uq(_b_jTmSm zZ-c582a+*)8)Z(a!gTXmDo}%2PtGhdBz^8X>NrgRXstKuT-VbXAfQHVhqLA#PMUF& zKQ%Ynok+9zS8MvlR`a{>!fn%0+#=+^1Zb058_lQ?CGl+f?~nZjf=KioprPuMW4eb8!PQf!?U$x4ivGerTt!9i;M z7g1a1OYHS*lTV;&MHuHA)p^buNoL3H^XSG>wL7VJT`05GmG*Suba<0>Vbz!TH2IS7v7x1q>A;|SDy3ZmHxfX+tFtwcl@Mmg^Jpu zj@NbC&8>8<2buXy!H#StgF|g=5Wi;9$evgTa&UR?7*rrLW--v6x*I*?ODBw147{(DBtW&+)q<^gSHV zgjdzsC(jcXp>jLu>pJ|jD10TCmB^=FL{89OSX8HVi5cu+&W9d=q$|6n1JK=C3NDaPt_I#Rc>q@>T*p7w zU1CfY$7r^Ham;@Ak-45<4`D6g1<=``!n@&v;f2f$erRdg;lbDBuf>mzqLh9b{$X{% z5{x&;sWMSdj+th?du3f$QR*5ybu7I$!+AKfQr4Jol?3B5B!h)?nQt~Om~a~Nt{XT~4%XA7_*Wr5)`exLhs#IP6q>IChLymUgU@o4yKvqE;;Aq=)k&=Iyv@UJTxzG461^veKP6 z_US%zM`LCWO@s2=bUKEfTGV6arjxKz*yCrgdHT1CrO7T5RD z7&A)7McDC1_j$hlm1#cxEL~_4D%~?sv8a_6i>c|T=+DrK4)3V~bt0*T;+!(5@8ZOy z4-0S5R)P|`ZQ5Oc8msT5JcYv7wCp=7C~#VDzg=s{%lRo^M<`e_iN=T|VRyr61C*2n zRcX;}SOL{z;Xw$XAr$3HG|go!zAoEGnA}{#8nxs;K}meKt<-HO-2lTanU1JUxatl<^zWMeq%1Jkp7Y?Ja&e>d0qi3ScI|ac~X_f35Yy zVY$e6sBvzCk+o^d4w!lL^zTlW$_6Mpr43GkVo3H!l%bM`%G_1flgJX!poDE!wzv$2 zKZ3$}6*k&PQaI8h5%`oQodx+ztb zyrP$j79~N(nl52B9Oy-DfOBy0Mw~Pu-6}GqsKhp;mpE>P)Ot*zje16EI+#9+OD0ci z3iJv@)On%5&BdV;#i1(^;ymg-#rwolfqmik)vWaFyO{Oxee*G27yID1siNDl#9A}SV5iM!O)MrQ9%;P zA+~)5 zi0jSqLBX?FEWTHP|Cac1qtC`Bt2U?ia)u!1-~BZ4UtVMYrx>0QXfNh}zg@KhoR-@A z2s1@v)!H?C@HJaj|3jB`(i?qr@La+7H@q*!JL3>btTtv0@#SuMB+>$AebwLe#=h|v{Mj7FUvQ33UeCwCUC zQwJW%mXik7)rSfP>uL-VHW(hmrIN2<)}fJ&>jV7j0Y>0 z#MtR1K@+kJ?Q1>2UszdOwJ$Im&e$w)#Yq{w+PV&CD~^X#!|!ncvuG+iLjovZQs&)2 zft5!*_}NIq;_tTWE)Qgy?bScuRa3_&PH{Vd6{H9J;c&A4zI$`V>n1|8(y;#gxc<4* z=V&Vx_kxDWg(u!f?N%Yldn-aS+i$4M(R;w7ZL_QXad>p@YEm0~mEE9oyU)ZvTcUTe zJ$spDTVFwbMEy8pzgQdb_5ZS7L^l;dQ$~-Afq~Ib^NgStAIF5YM&vDo*uUI|a&_>a zpiR%0#{cr^vaP;nJ-Ba-cI;;RtlJ?)`e5%a+G-;PkNeo%mE!?H`{TVle??M7QKIJq zg5ks4zAA~pu&8*unFMy*tX$LE60=*WSyCP>!UDt52&^}QjO3HOLV>G zsv!enQ+B#W*A@joF=w+!$gfmCntRG&-SAw+Ks;^CaU*Oh_nu4kBHU&Fa#f-Szm3(i z=qWqVf-o4aN{B z9OHDF>xW9!v?9uF!BMdV&&NABCPD-8*$Wyod?>~qmBi%6f16l+yn9(bmbY@Zf~tOC z{G3c%J2=E~EaUdbxb8|<>Wo=_N7J-6MatQe-=_j3!)a9%S8V~|u!~T~^0uOZd#8zl z>j+OLXm0*nUOtSlBqH#m+3u)btO@t}=|vREE3%WUy{Rzf(EH}Ks6#hF9I;u+=j>(# zK|**hYM(A+U595x`HE@d-ZsJDq!tMs@uU7(ZlXDfG6-Vq99kU)6Hh8HS;msYZjI+I z5qhs{R(>qgG`iv%u&&q# zED=_2Emudl6Aog5p%40^RZ~r`IKYh1_CGcwZ|@jb4^XQL>Iq*-R5ppV61Yaz$;z`; zM-gg}18Cl0K+rlWmI*qFyhK><*6IgpG4l3I{I0ugfeg?;bo^Lk2B7dH_hr(RYAZKz z71NEV!wr3fFOUUgyrs0RAkO8BUl=;IpV0UcvvqbEUF+aT zP4J9G($FcQbEglpp7_V1q@NmjyjmxBY7|rG9Ci46u1L_y2{Kamf^XE@s-y)1)CeSO zzM2B`Hs$lT_x0%}D&{XsAH6{2-TlGOP`}(9(3^N|S8(nX=X3zLR5d8K{aW&tMMC|S zNDH_@%{Cb2xpkqvDIw$#Gt0}_b74R39V;wb5Zwz%bp>`QkcShV@$_BMQ?~(3j;+b>(6|yXdIZcxV<2n@wpFD2fv?AYd z&LCNoO5ZGC#)Qji+GP-}bn%<^A3W5_|Lb+wU*xm$BLC%wJpk4>|@} zeVBV>w$x<8K!jSR={R=IZ@@Yq4i^~DnL*AE>LM$80Q*NgixDBxaN-Sh*H`ah<{Ft) z#?q3VDQr&6Jh!aDO1;~>R>rX>+0npkI|NB?JTWV27z5#S^ElWsSe#!zhY2kSPRfr~bJVK+`R?F+yrwOjd9W!*$Z>q|BA;IW zbr)}IV8OZ>);vd~8{fhR84~bGt6R^&XJ-baNK@g}?(6HP`!<_YPNNrJbY8WY?|E8- z>L!KN@#@t5;+)SiMC|-u*G3gc&7ZLOG@Ns8nrgN}Aam|&T3)-ptU8MT<_+zUKAxNovO(2H3Azg-Gi05RP~;9VZzPR)a< zUXS{^N9mP0nu%1vax2x>zK7D z%2WWizlBhAFaEH@0+|`+IM8SW;VO;@BI0^5)| zAl9nmTa2p&sBytj#0hUsa%_G11LkH`T|pJAiK;rmKi|MlwY<_&>$c{&a-X(4hRN;7 z;=vR9^Cko!Ert-=a@ocWhkgPlvH=@2VcS$HJ*}5!FU&n=q1^!n)vftfYS+^kUC*;v zRjPSskAK|!2FGLZe1x|cTnj_&<%OfMR0-!F5;OZl6)@?6w$f;^Ux!>YT_Z&14d#PO_T zpJ3fA2Xo)W^{;3nqBQ`=KYjF4*{QX_E6qoK!P|uP73MDW@8w2dFfN4cpXfnJDrYfF z$VZVC_juBOw5fF=6RH|{Sj<+3@G{F>QBca7sRM)`nkZ01Ky6j9fOle?6|&v`?J>_D z7_9nXt*j)xA>^oAXh4# zI>@z{YEPL2E^ibZ2+uEGNHqU70CTF-<&)7-W3hHfXO-kl)6m}P^j%uqj*+F3rn)OQ zfOs|j#jkC;u_|kfOYZ(+rcJmC)uV|ebRVuxu2#})&>JiP1mAt4x@0f#{x#vV4oTO5 zCv_h7oC7=PiuQ`k5S3rQ3o9U=UOGP;%98rX3OJ(V!8Vd}{e5 zQy`Diu7I|Pvq7)WaaFC&tyjlySPP;Y-^`rJ5dQ$<;Qc^GeJ?34{pVSrkke1{=%TSi zK8Ji){d?Grp?#hq$+vFddF-k`Ixuv4*FQm*j3a5D2*3qP+gS6S^&@-I5(~RTj&2!f zI$f1C3s-kkX7IDjaB;{eW&`xQILa-t3^teR->P2p4=EhrWgr8*TYcO$`~lo>X_8lO z$xB9Y*7XI}w}H=wy}X@;=&m3i$c%By=M6p)GyDY%8RXmJoz&G;Pp=f{jMscWBiT(? zfyP;p<=O<>YdL~8RDLut(_`fcA7Z%}k~LOx_)i|6jWS*pe=uZX)F zQ%Mb51dAT&8M-ljq`0X5d#*&PQhr>bRTtD))S9#)mT_!YXWg266_HXl)q`*IK?0e< zp#2nSOZ~WD!uUY~GrK;`(cS-7iI4v2k#z{N<>DrYR5|GjXszc=FDCt9LuDMV2Z!Qa zV%iOXqEHf7m(*5_QBwU91KDK9Q=XpPG?gyHdu8YBzJ}JVLpu>OSY;v;dHJUHT;FfM zB@rQV@W@HmF4Fe{cswOht<7$))i=YBESxRJbQ*IDA5MLn@M3I@9=zR z2%LwsY-YS+P0C!U@v?ldf33TB_9u87cZOT#DI0&WLd9QhmA^{%oRVlT<^Rs6)&!`Q zWI<}2%B23h7iU9}7sj7l!g1xpm_aU?ettPLe#OsQ$K@7S4b>FzB9Y&poV1m?zAboQ z&e3B{Ft28_?8 zeH@>o+|GADTR#c$2kY&SaqL{_2U@*Fij`>E4NiDCdHt{+Pouu(+-H1=Lcp>nB=ZT{ zUACQ3+y&1_d>qYgRuriMgq}pvED_up+0Z%rKrIVwBCYdbHnl$VQzyv zs12a>mqk-Juu=Eajm2jz{s#%Hj!yeM#iR;sOZ{(U1#SqolLJZ0*c4JgX_0zRu|W;W6cfLm z$?TYE|M#VRVaoO%cytf4$3t@j1}p zPll+9YSgu&9zw=guLTuwFLGM3aQR;X`+%)z<(`jtM%3yH?i0@6PY(7wTK_{H?OG`G z7h$%xwIvPpK87l&wlH}MoG+#Nzio1+FIpL@)ydv$S|J7*0I3(caZ<+ybe~}##qlpt z*Zf}_c2;*RM_bzyVNdl4?ob@mqt4F?9(Q4aiDvdQZeKR$8ihL8U7dXZFdg6~R8IEs z+`bqD4?gbodZ%Y^8RR%M=O!w(ND$2a@N{HVUVDR%+Q9id%*}ch_lkaT+ zTdLLd`kKwJM#||{Ix){2*kt|LTbA|;ME-7rszXm_%5ewywfr`H!>{hSr0iw>h z^))rFdB3(VM#U^gMK9uV1QIi@4r@mDyo|H7d<%a>A6)t*CfbcTc(43V`VbI4HFUZr zmvApQ{UT)X}qZvKRiGd5@AOP;R7#T&a>qo- ztEES%EY~fib^_kSB zanI8qYdYxsahIk}!NwfruBw#*nPCx+IK0DW)x@v;_7D*>$?1Zw0{Z9s28To$$QxA{ z@)8ge&!+jtUU08b(re0^>i)>JDVydW-EPd@J6LS~*#{T857JKE z#aKF35ylTuu|blA=`Npvk?z=!Pr z9qVYBQcm<;?mKl#zjX8FB=S@xG)4KJSXtP*iPbMFM7-#qCg0Nns)XptTQ@D(?ETNn z5+u#S!f|5+7AWXNiI$@le@zOJp#kafc^zPVEMSNl5uqv#opY7C2OW>dc@@tfOV;BL zQ01TA$`I%mFYL<%T7QWpm-pMUMi=V5c)W(2*6*7Q%$b(6w>$Gmnaq~S1<>aAySmobykmz3`cGzoMW-r-cDSkA*Vl_ zM=G4~W;o=b1gPNzR9jTE@MUy?8veI+z zlTw~YqE}CrEoyqs^S$qavBHz|-tE)i@ZBj(IH^Ra_ZWqSCJrku3WszczkFEUG`#P3lDPdjCu9O>ynEKja+%f!KLBUk_ z2w$#Mi$H~;yH2a}WB?wFh!FV5qT)yAMHD{%qv%OJ5mFp{!>R{8KVzo?0uF{XT!k2f zlj@F#vHz?(>Uj&-mLi6>q;1*j45OKxWOmP#$i63{hLI}hHupX_mc9jrtZ~0!d8<7Q z!P$5`6>9WR=Ob(rLRIm(nAY7F>pTGRaFB7K8q6B+dfgrQdfYzO!z#bzV+B_k`5<#I z4J(Ua!XFW!W@&~{a=OwoWlsfXcQIIP%Zwr?+5^NN(oo?G3m7V;23r=Oa4^+rHJsQi zzt#8F)p+)z&o`E|)}I8uk4m}S?_?3?=!smPF|Vj%+@1C_lef*yc4R@#K@HeS3^=Ie zfu`v|2YfId7A|@`Z+vO`c5Zii?k>?r)=^<@;jwq|o#9?mz z0!K7PiBX+n;!n@ge(h9y1ziW1grJ;z=S@?d#4gl37&g-^Yo@*>!bNs=XN_HLPqPTH}KvnZ@incp&lW@N!ZP$U;Xa`O+(1&=3IB1x0!!TPiC8d z77N6!qX!{rdqEz6{9a7P=CfsANy655tqS>kWxskA;QMsJ3ATw|Z~le8Juu0-JwTc6 zlOlSb@?suX%XUY$s+au~(1O+`B!Xu}ul)XJ!FZ{Z*GLRis;VQH01R~%I3H$#CRDJPsJMzLpwhzl^5DI`Y7)lIkPD(`)z(JDW)MLSbJd<{9v_8>Z z{1E=>L^c^4spkKFtD*wmZp8#@(lI;}B^;Y~BCV3Bp=AZdX-_v-h1*d0+gUPElP3|r z0?eAEsrQF#VfFH$lf054*CqbZ928{3bxnq0U~ zBZ;Z_Sok$F`VdU7&j}$L34P1?(Gf5|X{U7UWwB|+SGG%UhAqF_Uf*do`t(C0^Wzuf zjJ?CswdEK@stc)8JPe@k-=u31KN5^_OnpQVPo)5COQda`yV-S)PLE;aVQHM!h0;;i{8K~#0g#)w26dEZ?rDN(vq;gW>Z`v zM01x-?(-)XIU7tOJvyg^>+q@>nS~RP3BYnhz(yVdrnhQ?17n7}B@%0_A@c=WoeLO? ze>DmZIW{deC=%Q9rjl;3J)MC-s9}3dE65rH5uu@)gSeLdfBoG zCsN3tdwrd(yUP0Xtn|C#KKTSy&Q)~7x&FWim`dD9M z%rP;Kxk*?5zho8jIsu|YGiNo(G?;ZEM$$vmpMmyHTF`l2E@x4Jvt$+I4fRa(67)`H zxKa(v|Mo~LtszJpP0u!dWy1dq zy`}?pzwe=Q1}Q$se)8Qhs4%=hM&H(|E~=4fPJLX;t{RcwsQP8)U82TFqxQENd;rvr z3e>bXn$mY}NPh7@a_w|ag8UlCjQs!DwEp-UTI+=;D}n=Z%Da*8kHzh&8sMsIVi;ze z?t9lE56nvl%~z_HUCi>VU==Q6uH-UhpPzV4=kD~iUV@rb6b5W6bCo&jIN!T~^~aj^ zv=A)LEMQE*RncC_Y|R*ifs9`Swf-CZi;IIvJ4@p%KN^73m7lwN2&Z`V^I&3=>Nhf8 zPKCm;1s_f;UK@y;6xUZgtI@mgUtVlJCflE8{*20!56Jwk5!}2$u3q?GUf%5ZLtCHk z{F|eS&VIJ-o;e?RZnC|+xOSug+p?zk(D?aZJ@U|;+F;e-dAa-6=jPR~+1sFDyW-u- z*}mA@-V;cTL%Z<)w^q{3;nvQ^&OV>0&h07LcUlnZ#g*l{&Hv*XJCp(31&KXB;I_(T zV9}98B5<6kiHJcYDc83RyIOZbLhC>f*18`gJZzDcX&#DNl zbhN2Q35)q6$;BI+d_H|p`2U%O;HW>Z2A(-;8U3LGr2Boik7VLB!}_yf#%2@qOceHx z+;rKuf!+V4DA~tg@MWI{_0>^~%M`idy&neo7Yck9gAC3%v2e{~KxqJ)r3s6r=is6i zwLAN=t@je4M=KF|NY)1G^ml|CyB^zu^tSLzFwyQg$Dy6YbS2>mFIfUPi<;)RJw*O!{*LFc*rLA>FnwNti2V_ z+h28nz`6WIPj=P^kmS%Es)z=hpgvEqy@db=XGxdt=6$(8OmRTTY1n*5{octryazKF zi5>L-cOJc}w`7|I|N76O7%fNuIg|*Mzz`{q1vB! z(`1$UGx))LICx7B%`-=!LakU*ddk?+0j?Cq^><~JJ*rC`UL`|M@5`}}n5`tkKEo*| zHi?`5d!bVMs9xG;v93Ac$HBXj7eTL1X?&$GaXwpyJeyL7qhRCGg-r}GFD+@NX?H3H z$@x)coU2R~3kzysbbOfM);UA72N$qSg3Qo=LbS6S)i?OSHBcfOTT$!>eESONPY&ZK z+c7s~6HS}Jqm>$E9@?9+)gg_sxI#PMzsn*vkW~x6Gy@1n&HUVUiOWkWs2}%yW!;J; zN>|z!I#t*T{pxIz8Dl03kyFhv=Y>773|u|5+R5V6aKS<+wT@tWKx({(=b=P`*Sx8S>UqN9@C-L{)*qy~gl*DiLYmRj*w+zp@{ z{VtL__SG%Mu-oXsTigbZWC7bo>mMzldXSqBr=lU(r6X!LJ+?V>>h8G%(Qfg%usjcf z)snK?@1(8229}AHRMU@3-gbZM$XdrLEWF9fz0B(k7m{6s9(UEv70L~h{#e{_o~iyh zkn3mm*v{RbOt6s=y%)zv#6h4ovwruckjvcV>#=1%%6RhABb;*aV4yxqq@rmXwK% zK3G}D^#}hMG8W0UahrelP+G+ABg!~@)_lih?>XeraOo#fty5UCtDEQB(>6afjsBAx#cc=*H1|yWQpm2RW5s z4i>e(vi)$}-6#>lZNfsjCEf-O<7-we#zV?Jln~letRGOhj_G0BGI-1BUq=@x|(j3sbbdX zv`k%KrlOwD&uZQOY6K*nqQrjpHx>4zYGl8|3cdPiFE5r+%AV%ZV`G!khbaXtt9Lwz zs?JT5qu{Q|k|tmUOtr;8?y-8O@vg8oY5KxMD$XR0{pHQ3T3y%Rht5nX%7q4%a_tx5 zD4bT6^_%B!UNfSsf7#be`YN0sX2LB zMV(CX*6Y3f(eeSnK<&-e!qi6xetX8y07U5NpS_HbrKaAA!puQL|UDqu(U*40E;Q}iS4G-B z%ufOgVS|Z3}nGN!@gD^mern zZa8^QH|P;uBF+}jG#1ZSFiEO>rV5&vVh3rNtxRQ#&b(``4%{)$U&S?t8j@_yBwPF&$_Qu^t;0a7+Qh{oW|xV)L6nc)nG zHY~^Lo3p*Z3BDK}R*c|H>{$?p3>!@nIc{}mM>}bm_a5s*@LlvYMhteyL&1;k0rUg9 zu%Yhf_Ch=|YebmdXx>~!i6d4awnHK}3M(0#0t;BBNzQrxII&xz3Z3TVa~xAx6}HS& zR?WDl*;YhJvByo(TwQ!r57I+~OMyRg=CmI-QZzB-by4{~=A*`dmK;pBf2Ol%T9*6B z{jImLpv3&1bz#wmvHfd$+HmGO3Gqe1D(O6|E58y}WeXgoqa1mEmWy-T%f>32b_?>0 z2~iO(@vz{lcbG1$mffZ!Q&Dh70bQbXhME&!8b4Lb6;8Ki@2P80ABU*$BIo~_DG2Jm zFi*PoE{IWJ^7vewrlHWKnf=FJRjEg+15_H>SAH^Jfd10idk~_T+(GZ_XpPJD+%4KR zYl@l-b$O>spsaC*b!#{x(CERXZn{nc8Ik-_Uo;cIooW>Y^sx^&EQzKpY02a(N0I~X z*si`g>jX(NnVZh}(e$~OffInUFsjigSPJobs@-;G<+k$&E+?7Dx0#lw`>?jazq@9= z^_~rW@uH!Q1$KWexv79&MVl#ax9@e=rNy(WfMe_EB$GqQur1~hL7vFid#@Dn>uavo zI2F;h&=JvST~oo;zMIzPvsg@|Gx5I_2M+`0z-=5z$BZD-vta1s$i>ab^|)&mvu}N3 z#_a~&HkY2d^&6n2^{t;_Sncm>%d?)NMXSYrF^4~oH_sOu9lxe^Pm2U3wmtD3+REyD zEKa;fMUG-QUu0`r$wqa@N6sti!1KVVrB>7DrxU+KYVyEqe}xo*mVK`Kt>Qr03sLBD zvJ1M`8Sdy}YW6*NwB}`6{&Hmd6?AFm>+5uF0({!UF7Dg9M%79_A0e^*yz;T-E@LwtdY01M2+0Y)mEJ-zP#cw(`VXr2JX$^#!6P z%`Ee6)Z|7p?BD#e=W|Wro&TVwV*TMISarF|_jh4Dx6b0=U$%4=wz!4k8I0G)mb)=; zcbMT*jl0sb)rQ>(NP4*kl%-i(zs>Lw^rbQek$1HI&QgNTOsX?b2C4&+u%CU8!EkrF zAZHu#8ahbhoB`Ocwox6+E{wpc*hFYrLYg3AP2HWZ8W>*WkQeL(2;MJIum9?3ths(% z7SL_-+sahivz{H+Z00_lGgxR}U1Lt%@`~OG!ealyFKc{`?Drh$0C#eI%7A4Rk?ONe zXa9kjwjk6&@*O%?s7wD<($!BUni}%ZUv;>ezIwau0Ttd`9Mhe;!!ft*DF`ENF=^Et zgvatPbaJb@3id^cw+4+NgPa~ei_?`FZvJdl+t|)<`vf0Zu~`5?VF!ja+f@=F8UuHf z<&?W zH1EhZKD5f`ft*Z4Q8vCd9>(x^@)cEMg;(dRDIDeSVC2{6jejQm z0xshUjo^4kq@?PxJ_2T>OhKfAi!W?rgEu=+=vzqaLz^}EB6HQg`h9+NpKuq$s9da7 zC%LP*oa5jKXKgfF?-Y(*-b3}c!IkLrZh(xpfEVgWmWwtH^eazhM1TTL2NBXX%(?^z z44lHB@8(zl0Fg>=$guP>*<|BRnboIgwlAFazFb6`aw*^SuJ{~rHq1j(3P=tq6nK`* z2x-O5H!T*$&nC(_!V2TJ6?*!xJ<+hvce}qhZ-3F^jZ1d@{mm-ATwsZG%>_(R(%91! zR7ZiMKtcjig-v{MGiwm)m{EI=oF==WV8smk2a=W?4WGeeJAzttbfThv;3yZJ`43@S zr@tnxe8ExKN^&WqKoL_}OBb*-V7@IbQ z2|dkprpdppQPe5=XO%nyrTaEcq6gMu@FTinP`}e#spRNrFSBRg?LrV?n7Bp#+kJfG z^7U2%S;fU=OsZnU%edVIz$B20%yPk7o_K#Pw~vl;?nn%VWnBGrlTI;p;eq|Q-dE)N$c(zwE34P_`erHzwhKE#@tg-UFM{`hb&EnHEB}VPd$)v;zgt%gEUujzB=yL& zBV**xT$1HFd>f2dR2NC_98DJ@Ht7C9qiCspB#GUt3wp02)|wo>O1Q(RHWnS}IVCW& zL-8`2+3gp5OypHb zNYM3o>`mqoJn6#R`{1zB>5N*3{v4s$Okr2X_@ zDmY-fx>dmD?gjk(#=k-S6cL=w+AM#akbqnks#@vzxqSOTet45Sd$>0}D~h}F%8y}5 z`*rlM|Dyr?kF3AZmHuBC#iIme&iT*gC$=4Ez~W1dVN5k)#Ub4b$AGSBtwG( zD5001H5FX@2Y!k!V00MA6!A)Ah1F+YgyiYa4I%PG2%%>yd_R%qqML^73|FKMiQ9a_ z#HGlD!(IineqtpvD4F?gG(RmiC1xr|tFKR=tEsEJZN-El9#$tpujl}GFmE}fduJ0F z9NuT280@@61!7FD{<+5kNdXr%%}*=EVR(qum}MHw49<6A)!Uc(LOgP#q=N1<(fLap z=96_si?UMQYIv|x!L9RGVHjm$dGO+a(p9NCQ;}^KJK0?RD_I#ZUhhTPf;gWCS~*UG zq}9G*;FtgvUInoUdeX%oE-aMBUL+ohEdCW30B?&H3za31y7%|ww?G;1&hHt5YLsDd zl2OrZGWrl}qr?)2VnVpyNbtIDU65H+{X<}vS=GsnF8g2ftTVM- z`}zLoN6lNpUsA~o;<(3^1QrF|l6j+&Gz7a(OjGN{XvZAEt^V<@xP<6x1f8;oO@ zqlZ0$acRC|7#H_>TK(7*eJ)t=G>t2CR_A0DmkN_28ct(k`)SrNmJUD1-u~d64AqxW z9l{shs+0Q`R{UXwHd|4FH}d%p!{>ZjV?hPd#N>jeW_*++exABK{eSm@Z)JbC&ituZ zw$DIDY@9S#vUj_id?V9s7)bwZ2DS|Q-MHo)-OU8DfSKz0vqqD_>hp(z3=w*>V)YZy zSt8r5iofC}^P#rFi97|)?6Km&i8V_Sbp|AEFpDi~jEz-qI*O7s!;WdZBV4Vn#0t6vRA@K*;|5 zIcR+qYgn*Z#Lf$FX7A7by0pt)yQBH}f+mLdjF#iyah2SqPZg1NEoUhwc-Rd2v#Q+2 z-=@HrRLt31U^I!nq8_uL=*Z@cPelT2OsWl)D|$KS4uivyQ)3YsY@!CjjfpG^PqrVN zZQ!lr!$*iX@O2UMMj(GDS$>eVhRCw1(`~UlUJ6IRNURUIvzakCp%4b~ZtDl-=j5n6Vj&BZ@gzx&TTN<1981G0GyAOam{C%$f zA^AB*YY1C=dO+_#1#VGsgF>N*E|d}0>(#rhOU|#?*J{sVs7&OSYR{WIqgz1}3><$S z0*<2}*6RVAHSX-rE!W4?I%egFiwm^k)!2*f!pq5LrI+rc;MI<#6;HeXe@cpUIinOIYkJox7z z@5NOqSDAWzh6fh)>L174#qd`gRGuM@HJ*=o5>_hy_mRy8QUwl=T@0&s$Ye>P*#E!V z}D`M5+!{$C8)eI{n-crb`AFsSSsC+?^~e=e7Y(`^$V|BQ8Ai2(tt)TPbsax4UWk#%iRv?~@)Pz2}rnoQ^nykN+uYoD$ zhbNp&(}3YYOc2&@?K7p9WJqshSs>~oQ1JRt{Tv#`sMWX(&+HE=Ovy56Kw9lmvN^B> zM%0iL!STYVzJ*M2!)5VDkx}v zCW|g0(gy*KM&R|`neW!JXXl$te!Qsc__XMnTJK!8-nG-hwm#aA0ZJnbn%F36xrNNh z_(g<2Yq$F8N~qXMI70${>$!*z=R&VwaH*sySHi=#;xjNIyx3{PY8&;*N>BIxn2-|4 zDqSGuA@a-XMa3KMll479WL`E(Lc_>>I2;uhg%>Xoo@KQaF-aapi6nuv<(r^ws&}N} zuqmJG1fsu*3!mQkr}+?6`mqMN+79qXbR?jEW$tPmrfqElFY~O~!;72&lfS?GQHBAo z1<@4nc2vedPS%2=IMW7I0c4(1s<2i5IFR1;0DI0jh=$i-5;T7uY%)@Uk@bD$sMcku zZ?I zp~;=lhvf_)V&Hy>XXLA>?JP-CdF{?47h_ChUDD~lH6rIJeYrh>{y-Ce>xr1I@nLu~ z8&U}9`XY}d=kBR+L2j*(y4&8he^4tiLx-;4SG5P%>A%~;5NF6q(CMaBgH|4(fIL>OG=}@b%$MrdSga4oF-!&R~yqyQd^{mw^v$@-cIik5+NmQHnZW#+#$x*79 z&(XrnOhyNy;0+HsJ_{l!2&9OLo?GryyH0vKcC6Q8-$U}lc5=qv=iD*R=-0%tz%55q zYc8O*#Y8ipV~glsCSYSx>fev17aiA=>;p(S(qqa0aWeeZBLz{=D$!kOyE5?m6NWNW z%fq3#hx*zqvhG^^+o-64szX`2cQ^Hhx_5}5mk zBP5v21e9!5k@j;TW$jk~hD4cZjhUWM4o`%sy5)RsxNk8UHMD2Sm?1z@kGS#S>DVb)MF;NCW_=FF94R55qmu-CA4 zYj%CwLSUDk$GFv7&qypt`Ialate{%!aV{##y_hlhGZ4@XG;UfSe8H>vk-tz?WCZsb z9@X0%p+PM3N*N1bR`g51D;%%rb%roRTKk^Q&Qv$=f>-vAVs8(4X)L1sQlRpSSsz$) zv6?ayDTmiLEV?4Ec103cpwYEa4z9P9ty3hu7@(hDTB$rv|)=Ns_t!SX0(40dINR zpunUUK_>o~H{-O_B9!>;E|=a9F&SWhd^X z@Gkxt)}wwx9lv1Xc?8isVX_vL0^U=pfI^PLa!LmVP+uhazH*;1%7}kr?gfm#b86!q zf^MeQ+i#X)W!`E+x3PMgk)DkEz%?}r{#Og>qUq&J``wb*sMW5m^L@OpD&_@ELT>x^ z)4`0o{^!`V{5^DUHp8CK1AO(57-?-K3$DA~Ggo+7l)}iVV|z!>eyg%rRT^2TC`nW# z-9l)oQ;G$_jz%RK_4bOJ%+0?iS-S*qbxSA>Qrsdjs5t7uGuGGH9+vO8>hyRti_H~^ z_rXtpIj?Tg7EtvaEK7S4li8E%^#Kejl9Rc#t8Owamjy}N3dEol)!k*!fY!VXzn1BS z(h_E-sHcXxEBy|#O}J24>s9EmEDG}kZEi(@N_x>3Ge!wV){OAn-tP0 zpY!|sAc-+i@6G-s-O<`{N5P;R1(8$5&2Yq>nvzDz!P2)aCFk+QZD1P8tRk0s)N()*pW`0p8Rc8DHI zL)6c5{!WYm84!1kVH#bJ9v;}az^gQtJOc0yD9>HSiV~E(J3M$6)l#-c!7gaG@Au`* z*ygZ1MUxvObWSR?;|*^8Y3Q_6>*(*m?i3OD(YjoaRnPcI{ZGZiXzXrkX61?4pH*YL@cEvIh z37)@{2{66L&ym zRw3MsYjAOAMtZxQ$k|g-IdPu+aEV-BMn_{2uWColhj}|kCOEH7u=k%71@$Eri=^%s zuiMxbHMZUtVt4DOY3SRR{@4gFDuQq4o&vP(5lP{v{}jZ$y@3do)p@Cr&us^+POdHT z(!r54OmhmJgt~z4$)BdtQ0G_Do@!FzeI+bR9K&hat>Nziz78(D({_~+h00IX$(*m| zjTv9B?w=cLhMguXU$8Q-EtmD`X+Ii4fGnLxr6cySews@jGz?)7fh35CUFyL z%SyaaAeQSyeojw3I~QsKWA0a+{jZ;{l;j*Ua!2?o#V8u2ckRWZn)(IE?)$n686S-I zV&TMF^UsI~0fjVBC!DnywG9)ntE-#R_{f^DLo%s`iUuik zaL^_M76VD)>8mivay&-6UVG~CBoPt6$jg=sqRqHdq5@w0^p|>g8x0!Qc%o&gUbLmc zZ#SSCR>7>sfr%CpPkwzOaZR9yuxc%B`oa!yLJ2s(`_xgl9N0r%A3URu`#Y2xfj91@RI2nB5 zaY$BklC&#ZP32H@#f>r&NaNMFH@~SfVlx6VSoF?zW@9HXT}79yF5n92(Y!*2jovP| z=WSvT2>F{#G?ap@uNp-&$B2cnhY~;O-Q>3%7P6u<5-rx+M*2(|8U@px5_DFwoHS|e zauXO=L6BpOfcTBaR6B{;Ks8W~>atea!*B;OVJo{srG(lwj1An64o~Ak*RO-H}n$e*I7{DF>C=0H{lRSKiY$KQI4%VP&xPIWXHP_ zeCx69F(d}VW){pyHh~MM#k}5S46d(kw-e_$&O8LbXR-!$ypps8B4q0`8@hw1US!i% z9L7t*Q3bi`SiyZ4MY?_8*ehr_sF4Un1y)Dbu08!&iJR?#iByXEU;xhOtWxk`$(>vF zd-o+yxl>ek$`c_^eS}ENQ9GhzvU*gSw-sh`hHNCLP!@(DH_)FEnbbVvC%*c2G)X*} z=(rhH9B-=+7%F7Di?B@cinw(&&jby-!G-(b1-^hHA(;zXzRIrSMSKMEV!|S7tjb=@ zuXUfDXu!+l?k+GrpC-n?-nb+9=1mGBAR?%hh{2C8E*PoZfe6{SeX$fv6pRX$bU$=; z{|5l>>*g{}q#)tDY@-5SN_*H-%>464H*Wv2=%E>=DcBO2G)6`&aF>Vhp`)771G9R^ zM*#St_;KO=&=3Ea{-<96WzpLxqJyOv- zx)5ABG>H3u2|ew4Yuw#;6p2zl7$ySCATo$xBZglOVGz!f1Gti(s5MvH@bInEB3z3cv$5|fiet6qa{zVFZJw=7k3 zns~@aa0pAVHW63GG%`%}R<1&KNRK+jDSN0n^jjv*+23g{JmqEpZix^ua7`=rmY#z# z>4^w3`~wDcz_=BRpa_?{ia6Rj&4eEkbR&vmBao7t3D}KOmpGNS}Pym^WQro+UQI z>5>#XIO{2B+0BB9c@tcTlqY0z<1uPxn|};H4$1mx@Gd9d(^KzDwdt!q+0Ekmx-^C; zjCgWEK`3(hz1t@$u!gIJE?eY^I+(J$M#fd4KG#)xpk@IphuG9c7iZS8F;_MeMm~~J zG=6#h6*MS@_62F+m?8KH(hTF~meGh4@n@ni$?~-5q zq$y02$<8DBphV4rHO{K{DM&tRTwxw7&iLb3@2&uu^K7zf;~?6RVrS_3;cxz6d$VSc;I69f+-eAdGX9t zdIx+{;otW!gH&~0AaYL4V}uA}Z?lG7BbbPmTQpg~4C?vHTvR!ySaR(!@%8QGL5tu{ zz2gLEo)+6!Uyp(2Fq*Bx!{*j=u0grYqHHh2_cyEQEUg3mLg_5bu;`!j%^C>nzSo;E znJlO;$RsZ1{X~MkghC}jV_!n0TIMv~{D^2Frt@g5c6i^jmT46S4iBf&J1~efKtp!A zcM30*(0I>9Mwp7Bm@GqTdPp=(E0(t>ig8ep+>urCbrKD}kdF-OU9Uyhn1;Ax1%29E z{l5jjyChzUYihORw98JyC9bmscz;35zL<$8jYGfpi1s)J%v{htGNoIo5jWsRVV;HM z7x~CPMx#1WPgnCxtxSf>AGM1}+{G(lJe3zV`SsY{=~}mlg2PyDtR3|PrJWzR%fOZb z0m&MutW}a?oDMJtbG=WNs1F+{HuFmVMRJrCxc;}U5QuJYDf#q!KPJb{cj|gBvd?`# zs8uQn+Km5h_r()x8*mi$QS6a97ssmbksfGfZkVRbDVNzSF5j=$e9G}2Qrejv)o}#9 zG$>p382Z2&q2(auM5xdZw&%L_ZVXGpk zIUFCG)zU?bsee~GQ=fgIyfnNi^at-JEwJI`s@K9t!E`g*sb?h>|GtT%qy8>@>VaKu zL9g}U*rR$CNJ`7^JXSf-a!|Tg65r^4U;ChBKiV_3NzFiM$y$+67-S&+r#9}IYnM~d z49}}qg_a_z7PP_GJ!qbIDcrIrnIxZUgjNdEvnMR#*A4}vP-6)W{# zxmCYYau1c%PXf@c^9NUS;{X{z)Qju-z}RlEE}n7}$S~{lPv>z{D%V?Uo^1y;9nM?= zWv?XXEZ+?`vq6~DKi)j{XhoPoQJYfx%9JaVnvmAZ-Ny;Z`;$EA*0;x`6`FZ-S2wyl z(S!QsHZMNS>i?X=SI3VP$vHc&9xrY0Olq{c3Qi0<-$z~qpkL|klp39Dl8LH%MB%1Y zw&){&yu!CXhbt|ht4kLn)3MzEpQ{7dkE(^ynPlBT-GOzQx9#}8Tj{|4^%#arpM8Px zx5nZ`Q5`qRw9g-9ju*RBsXXrvj~<+FwL|vQ=exSX&`56|FP8tC0QqD1kZi9G7*{?X z7Yvrk1@@asn%;Y)C3wBM7ygIf6T6!{o8gGQgxCxm&hX4@z6Alrd@5;^mJ6+8HRO5R z_v?A~0wbk0d35m7zT=7Oh08STXVzE+J6vc2R}Zy~u$XBm9cISTIVs2`Tu)^EjHS!m z_j{FuDRIJ*`mGkTMywhn9Qe4iZHDHGRFrm;0!PzHdl{u&qdNiXEr#Y?%eE}SU4Nbx z43U={pzlocRIt!;@V2DJgN9}>Lt*e0i($l5S&;&s&LKo z7q)ygsL-(YvO+TQE34Jp*J>i^P|H^?POysN_HDS5C3z&f+%TqhWJ!8VvjZ#T^b1Zu z@s+3HzI5|2e(mKO)|#@t?Cl}Jzm$~|iw&-V36&CWnJQk0X1uI3ndeoG6B{%VtaLJ} z8>cEi#E2)tD)9GH5;me!ps$TtzCB*DjE$1+Z>Hw?F)ZGWr;LrYrT=05VZ`KVs+nJP z{IT?w=ztcs0;wL?z9HHZP@$bPNi}8KfKXvqD(??6B_Sd+Gp{qp&;`}3g;4J%c%I1_#yGx3?!Qw58O z%~RTndy#DwHSyB-l}*?3b@<(_L6?{9+f@tYf=rTq+2$mKj%en)&9W6uG80D{yWVm; z*w0|bFm#A@WEOLImbM}$yNbQ1aT|V>b__swN(dWCNdNx4H00u}^4r@y4CF8#oU$J{ zad=-25G8wz6p_+3C09=*ylWd5rDPlj`jOlJLF>wi@}O$@OTD7QH(7z~Jw)w2MYMA^pKZ!N^sd>Kgxsy{E(>4$GkE@`x_s#< z^{xJ@SxMMb7ug1Zx*6T36fIBi0}(9P8#Zbp=8^*y#TNX^v%iVT-b5>I0+67GVCJAm zb*m^9JLa@DYVBN(;g_eUxuM-rFv%C0RWPMMp|J109{au~zg+drz+Wm~uw1D_fL9-+ zgvH%|8YL}!oJf)q1t(X49#&=t;1frEKwDP`@`8Vi93lFR^6?~~g0G8x71M6eCC4Wy zBt6iXcGDOM>JYuJMG^GcvheEHJ%os06Lg#(wm3aDJ(`z2ROuhYF-!z>-|lboJaXNh z4?O;w*A6)xVh3J(EudjJ3!wilj$KDb3l*CCJrV_4)2!5)Tz zX1VLcEu6M~Y!dJU%8adK5k{Z*)9eNv=%%;$U%WyOiCj)qiIidnW@eg`z9M9t{3N8i zPIrE#Kp<)s!nB6=6&5@X`z(5FMA54b*aY08%Km17I|VWOO^95T|C<0Ed*MUOZ``ZQ zm!xhoO~cx)fcYtg_c=wAg3ag~p>L87QgqG%`D-TOn8>2_qIX9vCp@n*2W^)}@G*qE z|1zcZJW2&Wx`mMI4hqhykhH8+YE*CDECsE)aJ{8`F4i7ESFk2*G*0tzxj5>pYJ$Qt z(g?Qz;kg$+Yye&Bx^$b0D8%Yw>UOszsQU

t&}RRDy)M0!`Ut^2uhUdLdM$Q+Ts+ zy5V1+$h`Bzn;8uL_mQ_QM{R6TqP-4zSWB!(-0&12saa+obux|VR{JeFbhzn&jwa2U zYf+-UFQ1O!IEtpLjoRK|OU6!>dPZa%P(fszcu`xZ+u;(E&~&gzPe~>>L4qQ@kRx$_ ziI()(5+MVr_7rqCQvq$p&x#iCCW94barbarn)n7|-eZM9?SHz`Dc(^;ZmUc%G$`Xa zn-moCgQ*&4e~f|C##)Z4Y)Z?L#ZQFfrNH6D%KpT5u$biv3V1<-649rmMP{rFF4Nx% zx8PA|d{?)fAvhp0s{2G0RC;vJ=v@kQSNys{bBfn|jdA2V`K2F4jo zo)Y)i>T;a05bw?e!Wn`ajKo+*OnO24{IG(~&O^25ZiyDLevcsHCeqU=b zrC6psL>kg)5FbwrEWG<^zvLwW|EVA&`dam((3N=`?`gwJx$7~jy?kOy)g@A;pl5J( z<4lX9bv;S;6l{w7UeP!ar91L|ChxWecd57jEsfe%J)+f_T*hn>r*EbOTH}UlFw;WV zdj{C>ag5CiI<0f2dN17M$^eADnB@coZl*!c1;zqcgZNiiKjFwISo06WD6Q~-HBPCo z3P+(&%tyLOYky_hRe8<6el6^&qW3sx8UC{+hr{=^IlTqVmbUznkP zj@HcR{a7gHKueC(o_o2lo0g6;ae$my6K^x73dP3&;F1fAG!BQK2ti7d?!R-h^5C+y zBu#6e*XV(}GHq3J%$|Ba%;N1G*uIKwj8FNfG$>$?sZK=eFREk8g zcwyNfw%B(r-Ov_Kk9%Nq-T^ef%HHl8nVEz94B zQ~mnnMWFChSqD5veApS&ikn!sQpjqPtNE}YncFuYBHZFnlz+E1)Pjb#F`((siR)cK z#@~&4Y)~j#-*MsGnA_XN8(==!=&bPUF0J}0_{quHT@1-Zz*&1f<19$lYhhsG5BrLaj&%3s2V;{Js?t z6BncA!>)K-={W)M7|avRT{iiS0wDlH>;W@E*$f zHkILqOU5+vgxV%|2B$?XV5we zY$T93`9}wtOz6Y45+~xMDEI`|p}??K2qAZCT<_e=*0jqky z>CaTOPKhJK#AESo{ST@(i>o`_h|bt>Cs@)2w$fQ2ol3w3eJ(sBLC(^5GhRLc-D>fv zxHdK}G%WH|Zr%G+3p`pYq|__3a@4q9_1b*5b++yxb$L}<7r z!{i5RrpgGt02V?JfBdN|Tz|z6p@o(VMbDkcUu;+{b@SUov1#M3}FJMFT%10ScSci4`p}GEg%WHc|y8q z<^b|AUghTnte@STdA0r$C%kbonMgN(Xl%8d^?sU@u@JtPU$&#kMo~$(ZE}*P*P=KF zP9xA`EmylBd&(__z--f)t$>136p0&;$@{bm3@wb>;rJG`iH7{N>Q-nTgBxu}RL&~a zH%0RV;wDJ&bGUlspwq>-97Lnzs~JVLI)1tqeLfv;9l<&lf%^ToN^a1ifLi1JWxfJ) z>CD8%-=C-&*XR`t)WkKKZ<#9^T#U*+7}aWzQZ8j&Vh-GBk9(=f)MUw#!0Egq~ zi9che9v!QTzXOk+1{TE3Vc5+{(bXCnC(!4E7dk(o{t3sJhh(6LJDYJ|@!D9^U*>B4 zR~BSGOP$e1l3IgPl|MN3a9!&;-(~I|GQPoNt*RBXh(s*($u>Qvl4sxhr`hT+1J-{C zKpePZaW?Xr7Kn&q_NE$*>lcblfYQdA_jpikYBH=P3Yyd#my;N7S~6GtF2Qk zuZ)XgBF}|)qo&bao0#qKx7I0@;9GIl91YUf-2Lff@gi6kjUsYXkmnHNGC83wTWi(? zMb>S+3^ilwG#e2(Z(ogy_^|V-J^yKaVe8xr_ZerH)6Bu#Sww1^Yrv`%+pFtd35qk! zK4mj&=!~01YqB0%BfH)IHUHojMD@xHwx0uVD)z$LQm{B`R=uRJzq=R!=V=m6m!#Qg zk_J7?NFI{lZEvyb*^7|0(aL5TtR=3LUGHVLGIC;#4~ag`fe`HR#H$+HL$ye++q(m2 z1r8JVUG)%~nwwGSa~iF13W$bPXhpro8R(UNq9Szw40}u7Kx!#+1Sj<(&4tE)Pnd)V zE8A!osl|Qd+^SKxhALGE(FxJ^w0s~jvj?LRHd2Qp>uREIzQR$sg8M|`FDO0iRvhf* z>m*Bq$>n=TcOB@AV{ryVZ4<>%7I7K6G%sW7P-DT5X}(-V&8}Y26t4r*!lTd1ID}q) zPu=U5*vLhVp^^nCxu^6&)fhkH)s6pxMPe)XIAb&mpEE1T4~osm<+`#DejJE!-r_5pNT`UIJ#GY^C2XYn&1JW5M^lJ#n>aBr2o1D9${fyj#M7sB?lIAAx=v?CO3h~;{w zy4@eW$AlnP`a&dD@n^1|B>$N0MD5FL{`spA=pib4HRVY*;1}45+AEZF4zF4kZ)o3F zKiipSsOJ&CV!nmv-8WN-No&Ls$GbWnAd-eJjb zJ2$W&&~X(Y!Eo3Tyb}}r=V4vyF6I*O&~0`c{7c`Ury*#uVZKE2g6)Vz!#9@KFOx>SGoD+SHFZOXf?x%fX=oY`Y_fq9%+zb^wuAWLB$lHzmFs1QOt755pKm4P+tU`b1r9V;56*pxQ2g{{ zNdyLW#=7Wz{&5$ZZ>n^@{*(;xTXZ%7HtBlQcSz4E-oGcOe9#ZqWKV&~E-a^6q)zG? zdCT8wX;d$GS`^JsS~;7)&;lg;vz;#=_qJOgBwE6cN=NXN;!Z+a7#7mu3Gbkcp7vp; z#b?wYGKppPBJX+1rq=X@EAXPD5o@kYY?6vl|DxHyA%%u`>AMtc>~8DGtKOT;vYf^% zc1{83ZxHN(22OYza)w)zyKZ{M%gETjHzVE2XI0@jHm86_P2*(PQ(`)ey|l@_j#J_z zj0&R2e~@?D!I5i+ddgdxb6IwqvhJwThw4Roh37N0`tqObZCjkeCNqfye>B4h#LoCO z&bUsNNeh2_xQw4_X1PN?V;79B)5gyNh}0%_94(DO3i*J|l%;NX8|VLa(aji{Et+ z0XL~q?;1aIGke0Mm;swfQqa?jae7E6zqT7@T#ARfEAQn&bBkOE7Q zOJ=8>qq;JWV>iG#C1tmNSGwmXUDu(F&cV!;ZC-7FeUpXy3LuX`o9P6KrWxZKLK3;K zz-*{J_Uk^v3bY%G5siRTGjKHp-R9c;68I!MCc6|aFnay^VRQDby<4)stZG(?$3NV~ zgI3}|=K2Xwqv+MQYkx8pTiaJ^2Ai!!tN8phOa6b-#awp2*RTBu?tOoBeY8@&HTak3 zzi$+t1scpX7aYBA<1@w5&#TUr#6vUVPUgpg`yZwTLS|B!f{xnH_W%cnU34@GaxLB$ z%|?OdY+4mFPLFo0vVsp6iEF2kufP0hDnTWW6$dW{v}9yG zKk#S*(?w%1YCa@OdQagMo^OOPF7#{DXs8!iX1Uv4e;{m@F!KEwo3%v{#^sf?CF?Bg zJ!>*8b$GtC5#D@Ulx$AlAiIBO#u~(v19gTu?&^F7`rp>Ko_&pKOIAHq__5Wn-&hBl zVwbt9JxSJqt9R}dLZIN%-Kg@a7bG!QA%}F!Dv0;>M$dzQ0%4vYyv6su&;R;<%E$>c z@3k$EXw5u8Lnuq#2OIzy@5qew4Q4e9${(=xBm?1A1^>#?HL=9Bo@N{iPf>Vea^ zDg-m~q5Xx0r+6M$?tXwoKpda|u5sR_kO;RTaOIIeUSCT?zE3p5bCjN2ew+iEtMnw3>AJ*I%msnz#j=lrc}K z<%5Mf_@wZ^rOEsnT_MW=^Lc)fRs7Vr+MFwr>5^$BN#Xa(N+K~)vQ_X2r5L-h%x6Tc zKgjZAj>3JTeWkHBS)_xY#`OWLm;x^jNmYWXVg>!+3El4CJHB<(*fBJ=@B(hLbWm{4 z`EI8C7Gc_cz}-k3KbyV+GzrA(uG07M-Dgblv$@K)R%W%+al4&ERa*IsV0G}hyyQrG z$^;`6ay%<_nV4`=S-$@YGrjw{P<0D)QZWhmwgr)J)`YHS0QHAoDOSdoP+jG2uXm+U z@y^dRE1Ydf*U1OAm-s~u3ERbLRH>RpZ)Wkn6oP4 zBd)R*;MOj(h?>!r1WU)(MxYbI)Bc8!uJ#|qNl^-k40{ke+PM2}(X?3!OJjJHiw^4^ zu)*>14`xDgB%*j`vdPsvU3Vt)Q?E;}f_=P@TM|_qL)rJ6c48?t++TAFSu>a@*V0y% z32bJQ-~O>NMK5iqie_x!==yPWwB_M*`P%kDn{+)FJGy_RH41RCfxX}P7rF;@rFr@e zBrFLqHqxNz<#sOxrtsB(pyQf7<1`gL;*Q7-iIS`Z!W8HwVU(24?Y4jWn*W!}Zgu8L z;Bu;DFTAR3MxE0|Bk`(lbt>O3Y3C37atr0~zzm^^cRr%;z;Y+`iY$`Ik*>muf0 zW(peJZ*)pnjuucDTTCxBk#t<}wVmCB5F>FF?1PWWWBbqC$@HJ-l{`Ukr&IpK+wnSo zb3f6>H@4|TnRIU-{NAIoMadGC}%QP8P$s^?2 z|FKz8HGXVXSe>9!?ZTFf#U5D$xFGot>gY^0u;R^9tdqTo{CZ8BiBH~HBY;VeU`M|L zHybxcZ8kZ@5aQ{s@*T5MXaiBcB0-B#;NKd=~h1%)wEM=Yq$6y#!daUiU(4Axv22*(nuOF25nC-Yo*R=y^ml4m+e;_F1kH5 zxS#QQd2TMcKOH6f=)72v#RZz`m)*^!+-E*MOq!uo9@<)QmQ*1HMJ`+m=k*P3Bb$F) zktRp(*w2r*_Wobs=mG6&MQ95a4)@;PKD`~onyE0)###Vv8%jn=@rzB{K;;l9YYl-p8FlG#H*KJY7ZYfFPQM9SWT=5H@GuFhSx|GFY z(@$$7rv?mZ*X)U)jl%PUFv8{_6+r#;Y}6KR4EOcs)MKtI)*czJ5%JIB^wAeCrJ}@i z;_pPSby*J6>W8coPW@D@r?`m^^b#n&W9X;9OV3Vu_CwBbx?dXlM}`uAw|GhLE0s_O z(6Sj3w!?uM)=@VUc1636G*Oty=^@Ja;J&~at8aC1MYJiJ&!7bi6`#C*-AmCI7$X(_ z*R%EjACr@h`1iJN7sWEb-@tLq*4x1Ihc_yB*1#5eoTIpZM;}k$J8oA23AP?v{+NEN z1nKRh)A)F?Axi*o8fT1syFmwQE0ILJpHb%Gs_-xtx0e6*6}3ufvSjl0OP5c2*WB`N zi_{2;4=NWF(=OyVtV*cbvMu(-!jFFy^)brQYd`S(ZL>lMUE@fC?E;}`$_W6?cU1gj ztGN5s6v0^g`qsn|v63}s5=Ey9c10em9aZju#cRpV=-JM*3g)C^OYU|$L$x~}^#Nr; zTkIkdqF)kJ3x}8W%WCPI9hru1Z0-5<%eDZiv!~d&875c~13BLZo@mk#%gQNX9Y`ce zKtEG~EQiKfB8L@{4lGJ+w-`ZW6V94!3m<|^V-@w8-w|oj3h)Q(hd@CzMdSI4wIADT zw1Wbz8|s%}R7jaO)LU?uv>ev=)jOS9;OAAPJY*vW?T2JSILv+0PRv1!&8R!wJRW4s zZD{c|LMmvTd2w>UhzvV!&n9=RFd9>d7_d?w)Tk@p;b*6ily{cmYH8mkY!RfR zC8j}Y@ho!V&2LrFV*XXlNybnmb14=3k#XwO8EWV!TPG2k#hS(XVMo>#DkD8l^XFw( zTiyfQAX=M3wy-hH>1}yiay?ejtnXphv^<1qG;-Q7LZ1kI*fCp-mK*2FU(jf>XYx>C zS#siumUSFkI7S%NJN-|p>9cS72>LczPf^)ZO@;>Iwd5|b2sBFWQl=|1T*ItW(80Lj zw2>p1-ZwN*pohZ9(#GeuPqfpRd*>Ix)O`z8F&;HZB`{AS6j zZfB{oQb#Bru*8RBh;$ok9AD~Nz)Hw3M(F|C+?x6W*$@Qv#P`t~gnEDt?}JjFm}~;< z2@DWJrMP6Y_K8Bho|~k(8l2x$J7}6R(xWg?+(RRAN`~B$)%N8k#C1J9wJ9FsQTm`0 zMl+bDl9b(6&uLH;Bc=jV+U?b{%sJv?@sub%%Cw23=pzeC8uD(lYf*~H*z8W#=M^J&wt!baVc^FX$tmCXgg-txr`7I211r2Y3pgsyg?rvgem+3c>vEostr_Kr zf!+O4S!6kV!V+7wl51Ec+?#1shse$5pFN7B7nKx_m%Nj;sXnycux9M6Z7lBNKlCj{ zmQxC}I#PJs(?F}UY$%U#0E(R@KmzpHu_)v6{R!QAS%zVX`CVMm2-t(h)* zhzjFlHKVcbAvYnm*mmvQQW-9fu%Sl2U%<^NvteW+VW02U0_RgaC! zzC*S2#t-7H$K3?pO(hQG7y9PLA#%8#w*VV-Lf9f$pHe z^{Hc6V_VxuXk-t$I(k^vjh|PNLTLtUXW9OQbeyYne16=|yXTX5{k_U)jQZi!>|yBg zo7BxWXOeIK3(yO^mk|xd-eeMas77GkzZsdj*!;C5)xMd6xOP$IQHXK2ce~$XR{sl# zrxdKOl+3z@cmytN+SmjLHmD>-NkH`%F1yk635Tf8;?`*eZP2A_T@Uv^ee)ReM3cS! z4aJ*xL>PdL^k}&uXMv?k0F-g=DvaUIfcr@7cCq`TV|*yxC~6%Czn~UZMU4kcZMUEpC0f%34uRaCbr&=eptUxNVQcW~}h_KN2Gs8fot3Ap+-B`wrBt796 z91LL7w@ryH3$K0eAJV*4m$a~f4n?Vx1b#%tt1^E?;{#`f&`QQZQ@{heH9q#Ly*fC7 zP1YCvuO5wCka|%=IgzW?HMp5&gsX60(BjmstBI=+xs%{Ui17vk0*v~Bnp1Frl^;wy za0_oSf6YN>l!)&nf8REal?m>(6U;eK0H6Ge$EIOJtHy}+>Xlp24HfnlB-|DpjBk+i zeRL?IAgPxq!Fd&@uar=c03C=$#m$io|X)ecB`xa9;0#}Fu*P8>aZuH>EW%BakGVt(+~x=8s9`F z<0UJ5tPdY>Sq1i;iVW*PEkf?R`zz*9hVidGy^p<5Ew^>15pi~V9!mVf7O(;<<+?6w zg90rI9y{lPOj7O*5bZymP0pGHN8*7*&35Cm&vKE0g+y)~_!gS?ri}sb8eUBLCL+C5 ztehB8E)_WilbUwFd!_U*h<$8!TE?owwIt!H=I7Bn$y=D+?vifan#uOVaVZhHV;X)L z!(0=L(*vJmd}~qqoy&9#ylf+3Uw7Ue5-k-nk`s}RGptb2F&Jo`GmhVzoETWVJHBKZ zA1!Zd)CHB9uqHp8AZA2NGd*ju31J_a&Vt8mDuaF;c34h*&(oIjuV?bTn=PI09_98fDb%YLb;&QboTDZg;j}zptnik^12>_mr z7DISKAO};cI01q|6VBLbn`ftPuFhCly%_AJ;wN6p$65U!UMA|l1+Q4!74xU%dC);g zyWuaxS7q+A6_%}lulb7gyR0JxITlg`sx*Y7sHZfJvoZ(QLIumsjX3ROqzHm%j~y06#e@Lm7m+}vv{jq2NsgoS6OTcj(8s5cBA)M%uSR;d-yKpvh(fQH{%C4A~kcH7}dQ$>hN+=w&z56%Kg>~gbI z-PpT`7PZt^pl+BBu+1AQI#KDcBM$tk8y-N}pS6gCUW*yJ<=4mkG0U=+#Bo00hMNmX zq`PV4Yvh18Y1%oI=I%6blI3d2W?Lb|7_;ef0sbChF(w4F{VjdBYS~!P_l5ZJUW2iT zK=aZX%?>=|){Ul8&y|@cB$lLyH1;&sW&cHuHhu@SwYpb}%ZP{Fk7YrVQqjXcKKK#2 zEDD%Ek%durU&)2gZLT)QYDmAZukpN-$SGdIedUC@nONSy(Q3%=OFjKxPi;@HJ**`8 z?<1Je_lXTrP1?aDg_8NLvG5QA`~37Zq#ef#dcmxv#(Nj0ti~&^ak=u~!uz+@nu_Dr z5N2Wr8H#y`eYtgk*F@BNTy~>jOE$H82x5liAj*Y%!hFOoxx*r&}3 zjbf`3v1_SF(z_m!hG=Fa4u&+70S0Ht`MnP06uEPNRA|LkEc!**@rGTp6K@DO$JZL5(*e*Wnt| zYu3a=?-e<$_{daOf1(czpP&C-D;;;4E#^yrJXnu+1eEzFsc;YNmdb@TLPfqI%MHx2MQwL*cwZNP~(e-K~~|yo6Xk!glY}j%H%6 z?HpWsqAju9QEu$C$xFdsJATFKy6uEfu_Rn`=~^G6DHwx357txJ6XkpgE~6l|)7;8c z9dud?vm}R+p*VI`w1BSj|ApnLhw;tH8&XL1g}LN^Gk>zE+Tm>sU5@0PRpU9I3^Mve zv-zWb5Q`s2m)TMnvii%us&@S}K6gyrhjPIg^bw%;c7AI06^gyIn5PP~HY1K4*z6=h z_EFV4DZg*1-lNRx{sD$#T8&kEzdUj8gn+p!0(G9;_q z7_OT}S#AAu@`w`Z;<_Q;4HplBIElo6xdBn?io5wNHGin?J3BZQ3;4Iu{l%CuXA`M? zf_Q?sQ`q}dPu5=S(VK+HCGK_50IFb8l9Wri*HV+zedwc3!|~_iovy$nG`>Q^FMEiP z@4{E8NoUCYb~}JVAhJ5fvpm;w-fnm!nA|V;<8L1bv`@mUV~5Wv{dK(En(W z-y6$pLTh@xJCkm}?ZzgXpLIdhrgYlIoHyI{n1L}J3Bs#ZB{8Qw=M=A0` zF8)W(806m@FAmECMaaTXBp$ynxSI^rVUq$$C4V)oq{j^}}{{kW8cioNLI+i1}jXp*>7 z@V4qBhWLwqvzX%l9cBx1?=gy~G_l>r-I5Ta!;`*GeBOs=Q|w~V&s2)Lh^i!v2z5}hup7HYiI zYb&|FJpAUVh`&{QA7IG7?8WRT*l}#mn4+FV(kf1{rt~x!tAeb`v>zBq7JF*h6e6g# z(L23}YKL?>j@Jwo!&1y*d;^K}u1a@B1VNuGh7w_3leNW#SVvhWi96xhEP9FTqUH_5 z-Gj-UY;(>&NTUSma!_Ku4`UB+1H5mFS~2z$2rA3LPIe+AQ?fi#7~Txzc;U!5a_ISM z4Zjl6W7N;PNi8n4RN?fLy`pNyPx5%*Hh=vh{KuPuXXmYRr=Cgowd#<(SWBh7hBan) zCljkc&1SLGd7$(xEP&|M(zc0cO%$P**iKgA*o=gwt!mHLpfBbk#4*&|Sg~kG7zHG+ zr`nNQr14pe**)#Mw;==e?0@2+7?hl;L;gwHRgr)-O{J?CP_sE$RSu4gaR1oZYouE$ zW-YT|SYeqA8vOFeC7 z#Jssz6hVS&opH-xt*l=!Qy5s;A|6u#MeIHZLJM}pCKSL@J;EB!BVCRqAECy|^#adV zf1FMKGu@^ziHOrcaXXMiJ0`Ty%WDA1pPU6Gh{_xe*O0$s-u`$WGTI=WVDaT9yNTBl zXz+F~GrIY9=241l(x*`qizYdDO3^R348-5TsXj{aMRm{A!8T%uO1YRo+&0U2Y%MCPTCVeV$@Qwy-viXGd<0=7`p} z-WRr4Aac@E@R;;U((*dM1QJC743bh`lnZY};jW6ev&`Z7&2y)%SlnJa!51@WqI6%@1t4B7)&ppsT$RNB_7VIC(q7nn&q~9@rs~%& z2G5-EHU%xoRe_v;aC`9tGq{UbPfHlDm}ww6lt*}x>1WzfvEs_j>-lylqTY4eECp`; z#?iSf=C}MoOm+46_w(SF+zm5ArWyqkDAL5bki4e&pdfqU$06lxdHErIQWCGb%hP+W zfPeY0PDpLp?#(yfKO0iMzSp;pknCuFH7{{A0+~zb+?o{7z{Z>O;G|F6coCL%hHR62G^F`46#OHx{{jO_&6vX8&^uXvgO_+j!D_F<{yE zkJvoUI=@H?Uj0u;X;W=8x=SD&($oo;+CjY_xXU$SvGu7Id+lw7PJnq2aZJ2}cT0i> za|1`~u9Y{gluc51Fga&o@V(&Yy_0a7$4%xP)~TqxM=(v3aNb>m_DJ|v0`RdNpSEgD zKeX39@9e|sw*rql@0Zh!N;k&;7#aTSpwy&;b!@zZi?=?D?1sOleOfMWY6W=3_>UU> zAr77n*4BzNlQJ2bd8VFuNf%0d)~TdHlo`BSX_9LspTwBCD+ zgDng!@e_9=T9d!5NZrH^h3q?kJhrVi5lis@iHM=HTh<;YPNGp?WrzIje@jNkqp7$4 zamcSgS}l6Jpr4Bsn4ivu$ey%h6lWUl9zo;R@;;wW2N8fq5}pf>G8(mC!sNtEx*pv5W;pN`==egO^dbNyE?XU~9_4llI)0`Gx3B<2gJc z4`KS&Ro-T7;u^f1;9vS#=AR{zw_bCiIlq{cqG6J9LDQimU9yHz=pjAlvl@qMg5{|Pn%Z99nkT@j?}<~zzUzo9W~L<#qFF5;5$>5y1tZ#_ReW{;60v_BL5 z>Wd78{a9#Gdk(uaf8x@R1M%jkmp!!?{r;?1xUcS;LI_}}G2R*w#Ef04tNAgbw)+jw z*p;1Jiga7P#$Qy+#JAuE9QmhJZaQQtEeCFq+Ea@e+zYrxUrppBUCiHa;6P`s_ijVF68M~no_P9#IT%-gZ&ZZh}lv;w1B z7l=?f&jeLKje1j=#**9Eb}&` z1sx3koq1}M?fA131C`;tbLeq{i&kQ2H!#(FgZz*Pq_aYYev2eH-33;>)*4Qy(9cyU zfAP4=c;vPC$sS4QTBxZ}xX4B*HrFpF?0j!Kj+c2qFK_gp+a?E*!Mk}b(D7Y+nNnMo zDJ$1$AyvKn$jB-!J0?jm;C*@K3R`7yfIc)4E8PlU9Ul z);h_mr9R4R_}+jt+F8ErCf}|eauykX{Z*TK^g~vKRpm#*9Qj^d&6V1bA+kaQrgdBr zt}*37#){{J{>e`bSOt7fR?69MG#%2j;sbvB8lOa=%@(i^MrLhP>U?lY@u$Fl(B5j@ zbrRJg(7Le4g1aqIe<4NHQETBzUwPecFC4#cpgOCVEWXS_VxtUWPCh#M#CRg_hje$@ ztO(gAbIdq>ZUdtvat5y+&|wfrUnBKA(56Ew7rVc{{G)vI z{J^__$LSZs>>%omxKWd-A$T!&&_LkhOWjS$bmP~dKpL{WfA22G`f}FQ&gKz_5D<~S zC^1M7P|Bziu_`DMd>p2n`>E4p;MC>_cU!6Gf^WO6q&DVL!dd=DMG?+;r#yLQse#zP5WiS)H!>53PeO&c?ni z;)7dWZpFk9X}XjNm~=04tO-pSd1u<`cC!~+JIY=RvSruV`jh(y%_r(&U$Z5cBt*C( z@h+)o!DBw1c2CjX>D$sHr`w9lwX;Lw4 z8HXip$ch?u&=i)+>_gV=mz|%MtJQ8j!_$rZhI0e2m&WHUO#0XuA{jr^uNd0C~^d{TDKm^kVVm7QCxMEGS2VT!H4%m|Q ze|Dc-Nmdm-z$8y$S$)w!;J4~9o=hcg3)zkoVD76+e1dE|$Y;$B_1PC2zVUhGeDVkV zetI3BuIf_6x8Hr}JbO4M4Za{fIc8mI|0#v}vFQ8j^0*bbIr;oI>8Zc3nveA3$_;fu z9E{uj2!gW_n89%NZ#GGnX!WX}qsx>d1@lyFUyVbLYDaudkw~}@EY%D)T7 z@*@huPcvp5TdsgeCCt&64D<=<4VDh!12bW6Ht}fIe2^c_0I1y(o75uLvW8&Ch@NFx zQlRmEIkA1U4n)=^Tai`LC$K+fLf=x?@1n0m-KoQ^#YEXQe14`Jp2;R}5&T*nA9F*F zIn4oD%ryBL^QIg&+8nQZ-Wxp_%bfrlblS`+*^F$dV#UteB=(bcN`V=iu9UvDI4qAp}7{)H*VE5_2Kbl|~NKZg`M9 z-TY&H%RSyzw9`AHx{5#%@Xt#~s`vh#wuD9GWjv|Xc4+@VSnAw*l?ywmecM8-QzU%# z5>~*QLWLqHdGh{4pBB0_vI;ZB0V8Ff2djO}TY>3cg%R$SnfoCWTq$Js$Yt9lnhg~* zPR+G3u2gh2^te)H(@0Im$MkZ{O6>&R?-&9?QFAt*fS3gWrO}MK69w4s)wseQV~xKb zZNFb8jUg#GpXVit%rIUT9wH$u`K{3)1)D0=A2kpA-8Y!5Zaz#;4jKhOBEVW6JJdSD z3BQpH5=)#e%*iV<&asnnYY)HLzKh?+CdC%UD*ElP-YVE0pK-UgwZW|Rl zTtt7VAz)Df59+U|eMI*5)^|4vc%xj>+cCE)N!R2s&y%$KO0>3|OS~}sr_khDY~G=< zRNK)RcskG~wxHTDn4cYs=TMO&JHNwT;v{C2U2d9G9jsH8xM z#$9lu<2em|Y`xl0jEHjy&N!3Q*h%N}B1ckWI6UW|bNM3ug`kaBts%MnxeJ}~--DpD z+*B=o`yn3^!OpTlWE5-#QlgeAt5W#YXU}}(`1--wpC@y7Kf zv&yqyCb!Yx$HjrBFUypCE<1L6ZvKA6G~G+#XeC}QHBIQN1Vww7xE8alORrrpW_7JZk_$^Sz|@%dWJ*Rom_N5Bs*bTPv$wGZEukl$(x* z6f<2`nj#$J#h7WY2RU8j0-A9`7)t!*tg!dV<^faXvO{wr%0PEG8ZLE(#1=(_fG%=z za6`AZS%>k$!vLYvyYOwZihw(f(8igx%2XU~%pl-+cKcQB)75La`Ra-T*_*mj`u-vR*%(=<&3&0`PE)hl}B`Jd|YYjruY(&j(A{&(x@Ko^wJ-2m9YSYEuU^JdQzA?fN` zyYF~)vheW6$=*lZ6@sUoUQ}=6dIicA+cKL5->(C-_R;8Xp^-x?-GqW+BP+1Q2|Gp> z6)Sc?AC4|RtGnx4lCwsnX-L*Zjwj)>B zNI1d>wmC~pAH|zB(}rt=f6(d0HKo)AmUzQoWu$e66M@gLUea$L__WE}Fwa402na7% zUt}b;3UuulT#4`|^5lMK>--p3JzEJ_^|n+tEY~jq))`k=-kA?FH-zEHrs%)o0Eqz_ zZ?EWR^dRJ|*GBNdop!Brfsg2<;6}-*FRZvOY&GO+%Nor~-~@!g()3M~ex0S%{it(1 zTMUj$#fEnUjVOXF~J-5hIra*FCfs3YKHesnSiB zqy|tM0_~OkAOveVhp=XC*exzfgZno;caetJ<)okAy5UHksX6=THCq)?%YWJZKtVCp zY8rywYoE$Y0J-;@GoBa=qT3xV??XP|{J@9s{B^N?k7%3t>5w!~L5Rszl&3rM4HkY1 z8$b^)P8a`gc_Q8GFe$*tg;$4^BYxT;SX8rZkYv-9fkt1;ak;g{c}b0j@G0iuJAM^N z47Gf_;@RWSg5i%UYd!gb!`d0pg6|yM#96Nv7*bWmX*M>n0#wAuqgsDUj&rdEgr-kb z6y1YmdbUe|6@F@dAf}k{FsZ>xTpqcr`5}9O+H=zlh&2_9&ItSGA#H*<5VyiB%CmwA zc?l?GTB#<#^yi@;pclh*JyO6T^`WlpY(g$0MgDU&H07c`H6gXS@Lm^|VSqcYg*K9w zIS8f|>hhQYKdglsSaYTUeG2~ex+xOQVDjhH8y25pDRE^ko?k5z(^!sU;rA2EFQViN zjK=3);Y4C#itLidvD0(mGoe=CrSJjL7R}I-Qq=-tNnP<#anx6d4x*PoucnKfs0F$w zPzjF}6n|fqxssTk)(KXlwwRxv?Kre6fXmsBmRe`HPTpD9zjY`R$<`2rjqNC+vT7+UZ#48`{R<#aeh}DQc!H%Oy(=q3aLm8FAJlM#Gh> z`a1LzLsm)IB8=MuWD@RGp71`E?IUg`&p*_wghk#fvx56rqX-m6cmD!#)Pn0;x_qob zr9n#d^7!Yg=|%QAQdaWV6Z1n&b<1D6Si7Oi@lxWyeGjXM;sq&U-%5`!Z5xm34;f|E zQ=~0W%KMI?hDuzqQm(tCyRI-5d_L_ama>jG_MFCw!~!$p6?qTbAg$iVt|K7&pjFeC zS>O1MZ8#p1u%C)BAdw#LLX6-KkB>8=VLxbAa_{90sv$L5p9EE(Pp@71MMKBehPJL| zqt7n9nbP~NC;Dzf)>cO@Faz70I|GvH*0P6vFTLYo>uYG8iCg!Fh4Pk)m@r8%(qGvKFftfp3Uf2DMe-Y|s@UB4+h0S>{lx5Id|@0v|8%RcNBYHUEu#+u1|5H|GI*sNQEQCC_kU>r|_? z^-D<8eOHpLt`1VYn7`fdKEQ2R+NPtXx8fVRUO$CeT;fBRpvUqwY zz=_^~E$Tk8EZ5Au2Dae_bi8X;&w78^(kx~;VPXG!$l4|_S z@}MA~AFC%WshQ_A;wZM4#w^KKBy`yyzO*8PvdIR`GFVL^J-ui9!KZ&e5F9mRNgKSz~5gS5?>3H<@uO@&JuQ6Ac zP9@M1zis=qG77{1{UJ0UVpd>TSqIrGPGi}>XMO#~Xg@PsA(m_qZIunOYuuH)&?;4? zNS)EN9e`=+VniMFv=xF929Qp=sqnZ|6b{AVn|xSDk^Q3j^lHZC!N*$iHc9pk7*fFr zq8|{?F`X(F9dT8!-Y3M~L$W3-LlJcM0Hmv#g@#`D(vKg)D=woX9QeF-b@<53aok7W zw&@tF%&S~Dx0uyQI8N+VmU8{t8 z9y#JfbBm2wJ>PZ}exa%JTs(-1RRr{uBs2`71HDzN*?jS7Ie+Zu31#Bcn8cCitJY>C zpf|@KBG-Ldq)cqS&a@;Xchj z^r%2v@oHq)G#sULIpOd87u@%6hYsw6Z_Vt=^1z`l@vg+l&t zQt6BAx7Om(ir=%YLQHKvi-K@@(g5^oKk;}rEcQ!Tbr=I~@w%j0Vh!~1Jjl&HlDr+e z3$=-sO5hFijgoLz4P*oev%gV4RaQfAIQ+&t1eWU|KoyH%0+Rl!09dQ3lB33A`gxydydQ)6s&Y zVKonvJr+KWl*;yf13inx;_1%}{qv+%#2@AJV!t2;g7L@;|7z1@M4wY)_SK#%YrROp zN-Kr-Ik-OGx5|C#jdn2cpER<(ts!S?T)M zj=Gm{3Ozlk{}<*m4h1rAJT+cIfK|qjPoykpv$=IM)vYsQ+7%7M~H?m@8J*B_-p7{kOrkUlwkC*B`}H$96Z<^M*l0{3n%TpS*De#!#e6bfiQ(%>*r zznJEX`qQ3;-s=O19e8rugcH}l9!lO`j3(89zmK+_0asf=n1bF$k>|mc=@XCel0flWze5KI&bJn^E*5~@5D`p&#(=3N&BioZ2(&K-nq_YPYHU~&$ zL?I$H>7i?>sem=}E@U?o5pn2XWYp?wK*gGOf ztsB(gYx@AXTRbxkAuc8oikr^>tX<@1#}k99*U?{KZNsjvZKW$lT-vLjB_vD*K2Tm= z>9&nJjg6J_jhBfqYb*21&8m@h$0g#AhMDj0Ub+MZ7Yu1OK zM&hyu9hrrcwd9k7kX!nHD!CKr+5r%bwkcTKT`(fGau=zar{ANCBf6Peo6by51Q`M& z3m24N)%mV^N4YG_a~nTxdWQ5>j-s)^JPYdgW<65*SDNpnbB{&|LIC!t-j?-!p8buN(p!kUjj37zJrQISuUaZGnAL~Qg4bR`bp(nwuGQZgF zX5&{=qp@u_oIL`((;1s`u5XmmLZt=5LlO5T92`Ie$aotvbd{rf@$)pQ&cBPY$!cuJwubp(&VhNN^w=MaWxfr)>Qu7T|ytH#IO z^F{ye6`0c>eNzKvFapXingfsSo!Bq{&_0V!13x5&i=pH@*x|jvOn^C?bU_#k7oP?v zhp+q!>e!y%NE;SJ6<|-a&wa)8CWcs0RuQOgV30`kg^3 zDwpkS4_=VAXWg;*8{GpR`tKX*gN$w_jE>eitff1uu~k)0t|CeKGhR%;GXwu$K4XD^ z`(;%1d8`F!fz|A{@8t(sw%4oFaHH$KMp`k+;@f~2JNY{^;kw)`1ot>~UbU}D00gL4 zN_1G-a)y^nt_o9}=T`@m*;$(r$b+w&#SRa5u+74T`2Si~@+293!5g>I!9mPIM?>s& zsISo`Yb{12!FXUk>g|#0@k@~oDSb>aI%FKTa;LETbgGXw`a`qf?DNezY?9gmx$Efr z=7b_VJY@A9?v<#<)A9F=@}R@mk4fG3Bzn8|nvF5LI@un_z#SJ)SQNjwy((=zdvuz5 z7RI2``DL=@pw3R~z^jD-d6a!OZJ5JRfLhN{pix5WXpm(zjt?D=iT*3~*0tXbW^Cd; zP+Js$;6gkw%7Tf7yax2-LFdxzI$Z5u3TQ#T|pbdvAS^EbC()`SbU%TwIrh zBGhZ8A&xDYa6?^eL6JL+8R(1L8*kcJcvWy-An})W_4s8&sz(#VIn zTDeq5zYqE&kLEeSojoTyp+flWhku=Xo7(Z&_oExo;>rs~Hc8?|oGzpTw{ekM?Gw&E zLv^*gS_|hpXKF;kbpIy3M$^72`c&V5Iv2I7~=_b!9gM!OA=&_MmQ_Xr@idZ)0<+ARak8TpcpNi3`Pmp`>kS zq?DYGerwWB51FhvF+6=x()KWo1#xj6^*L}FV?oazPO}pcE6-ww23tw}dbp?Hi*cHcGBAjq#@FWl5}tH!$GqT9|+_#&!1UVDWeQ0nfVmXR%x zWTj=I;+UAi^t(lG`gURigQ*im5MGF)f%F&&jnhQb&0Y3 zu3B32yhj#I?anZ5)*uQWkt*=A#8o^&{fuA$jlH9N>`P22{zAEqjP25aU8i-uligT7 zpSkFDzTtH155kQU2YqH+?D}VbD$r3M>Ueg30TXBb{Sx$CY7PA`y8G4LGuH=Y4h{Jc z*f0i8GkaUQ*8knsCRam7uN{=HH;&U*%u2Zsa65sC+-=ADx6sMmq zh3-<1{9B8gID#j`DqjBpD6)%8iVu8SRkD%)=T#8pvB}Sq@|B{^M6z39ugnyAyP(4F zeag)CPUEo|SOZal$P}UvkJCCZ0f(7ZctNxo0`Oi|# zi!2@y&nXt;*|VWivPU!4{&3)P`WTw0@XKl?wclIys&K4t@n{R(?rw>(@!H5W>#KC``9rJZj_ z1f7~JnGO0ZQb+RvV`4K&e;lm}hGY|d=bqM$ly6Ly8o_$O2ya~|d~VSaypO0!8iy7- z#Mfhv(8Gpk@+KANKzARqGr)q2QvkxSyP8{pkSuJ_3cvnLU z$EdS`bt>Ige<2rtt8m-AD2W_$F~${eMx%+7aO(6YaQLQbsysXKjPQgCL&DMkku?&-WqCjD!$;Xxm3>3J=aZ)1$tpXMx6I4vdJKR2 z0GVElPQ46!03c>a^ zY{oVpvq1}Z6Np}glmER36NO(EAmDZR-N)_fly4M@JW*j`V3fls#3f~EeNWlT>gBsI`jFU*xK7Agk`FL(w=v45DA{ZwN_$YoJWw`M@qQ)G;Fi_55vISFoQ@ zjDksN$Xe-P!k|E;-y2TrcVvvS4kQ`yFJzP)S}1Qrf9h{@+9~pln<9DL=cpv}1t4+< zV2)%gL1~YoM^U7^{njkv3-4etB)(rRx6jrUvvz6w+7O2VYl$3fNO*mm-$Wli3C}*M zuC|;(kA%Up&a7T86uBrEA!46;kgakbGA3B&&!)fyXl~ zUPPU@S6_4g0ht|xEFRVB`RYHIO^5MfoHj2yCtx+86{6@k7Aeok-}J!BPmdJw*y@t>-WHwYKK>O_HH0rSqyPE1)$5i>iJF9Mt5=D(U}q+Yv}>wH1H zitr)LQQV0+(gp)Z>VN&g<4E!DvG)C!9>*kKV9P&He$O)OV!2CSy+OOkvr>6!nV%zB zZB6J;wtwoi8!%$d_vk0KaDs7-uCkVLgxkCVbtON?MUsQP<_xj2qZjzzeQR)d4Rt8u z)QGbRk+56Fxe4;wNRO9Xemh? zhs+VS-&%UEsZkuDxyHr;tRt`@vE$@)bL~!dWLb9caRx6zd6ms!xB4&rZp_hAf5ai} zW%CZS%U*M7nTP65aEqO)OSNZ9IJTi3Wvl)q`TGV|QSV(yRB+Hq`ta?F5I?4V|Ce&ON%^a{C#V2%DUJ^nV(l^Y8uZx_`WZ=^p>iV+?NT zoRJqlM3sLT{Sr1)UA+z6TQJDR-BLVU{`0}p_)po(+UAwG@NN6)$q)bgyF1j44u2*m z;@(ytV=MRF;Njr1q%7gqp6M62+JUkw-9rnvpA1Nbr`uN{ciRmLmH%AJ-A?21Qt4n8 zCzH}doc$@ih<41m0|aT-;+CTCJr8gGy8g?3Sus3MuUDo?mSghX%_MDSDV*ysuP@#! zNS_~fq!E9t=-R$B;rMJ6QT(57ivZWEUSllap*Ed0bh!^sXdeX!=GnP4ukCnW}a^i{r4 zwWmt1h@AK>JZaz$u6a=K>+^7$rLEBs+w0uL7+e1;-<8;Y6H2qp(cQ>5xus_Q)G^HACJ@)~JtD>Tga`sx_g+~_%5*psnS_+vhmrpfTQ zb||hlrKFtC{3*nPY(Y^OZXXfiAmkoeMRl@txGrk{Pe*4|jxSR-jZMo@+i|5FQeQ*1 z>?fji(a(z=hm5?7&I8>Ui3FoFtT__&>-w2@6!67Ct}GHX6rOtNqY>7u#X8?@5GP8! zF;g^xa^Z`r1vIaG(w1Elg}<|W?Pu)au~C-mb3#AWGF&KNibWck;@tlD5mX@yOF7*o75b90@1}~|%z&6b z<#mN3@8^|r(CuL4!eblw`4(#$bjlVACQ6n7lgbpsFAs|`z~$G!q0ncCR6*>6-DKi^p{R1J$hb9)|K?QRQFbZh z;luVJ-U=cM3Q>scJc=r|OiAqwzC=`H;gAmZI3w~gc%QJ`DaD!6e++9})yKE|?j=4i zh+W;G`~+ZM^H+jSXRXW1(y*+o>cGeJ&TME;l!S&_F_N$11-VAvjPsXrC7oX%M>A^w zxMu!yO?Yd$J&`iYMURxl-$##80GBGO&}L#{!y8NNUR%~Cmy1E6DA^bm93F4Hn%`p? z6$$M95CMM|j|;?$&`tc+p9N|IIaKT@V66g^X3=om@2*vTzd#y^40=cRkYCT zjr#TyeN;Ur%T16wnfMkww{okrsTIYyDv`ge7CqS6YnAC&E!7J_-x{al;#L{#3?}!2 z6t*af0}HI*cXymj=V@>fFWfuQG>EvDO-_DUL2y422~zxB?#1pPkPWO}Y1*Olsa?iPHDyfoQL8b43G3d98uEdR-(SmTime z#&1Kj3B9uRc@Tna1S6`UirWyQY|?{wNAVk0Ab0(j#JAU;Cj67Ox&Ks&S^ryUs}^rx z4!JQ=`Fr&0t-0x%v~F|{)5v^vwkh66dmy} zgyoB=IkD_*eBPankY~Y;2&=vxhxwAi(i#HsT&_T7+W?Z-Jt;hsSWA^J?p1bC1@c?^ zrfv0O>8o9Fz(g8Az%A5(7>^W@R2SU_ECn?4EOoTF#i5*FJl=_QcNiPp-rNt#w|W3 zSWrJAwRmq7uluQ@+I(wbrP71ND@g9$ba?j)`g>Q+ot|#bY?OX6AU&~Xy7-?9`g1pseMHur zrZx=|0Mldqt=?)z!9PLuTpN)FpO-%{h3DhM$_U(1^c2BAuJjYw3e*w*+~G)Xcu0tm zka=2P0m(gG9}}AK0Xk}jN^`-is(4Nu__U#ljzi>c>?4(E*JlOH-tBT@_s&wFP}DJSg(|nlXWd6&~4!z=97j@Ze6BoN6Ic zF~q2+OmgT-soSoJ=bI)Oh*O5h5;n-tz$G1f2v8VFN$`Qen(_-fx z2@LO43Xr$qk*8}j&I|~vcR7Qr@E_Y`<8RT;U>YLdqzEDnIw|$#;SZDqr=82IYB^&D zpr3=}xDoTmhw^$M5>&Du!$f?Du^J@q{=$T}BhgAqCallt!z6eYMjjSG%VM7m`4gfW z33AAAQvv`AYT4tPxEDfF-YI!FXo=Iy zO!@^?vMq7(U9FehFr$Vyk({SFHDP9$7G}|IRrx|L-V17aIF&nEB*kbif>Zbbu zmD6KV&iykGEWm3E6G@I4re)+q>8QLr`m}cqGF4&dLO-^OyDeU;&4h@4Lf~@Rxge zl&~U*GOFkca8~3+P^|K%xNsg$IUq|YHVp>ud-y|+a07C}AqDr!#KvtZrv!7IhP86Z z`L9H3f$Ht|)g?db9aCt(sMZ=C{x)Iy@B21>xh6anD^FV87-&*PYX^-O7JvM1=seT3 zll&=%=}w>|O!wGAWA%?dmY=?~lbGxHEW69k(4&7zCQi=`JA%fW{-sHe^INWg3~hUb zPU5fWU-2)`K6n4djxJu~U2VQ{two1kk-7G9tNP^wvB~fOsY&z>BvUvKB%mh_BNEG< zDsuF_?IG!K9$RPV`sgMb>+xZ4gZ)Xn;wZh5<=#!)v}A20PX;)-->_hn!4>|u{kZ6SCu}qNd5z{FHr=>$uWjK5cjcZ_F6MWV z&CBnkuboX#74Oa|vS7%KG&3ud2yOJP51)?U4pZWFHQ(z31Lu-T&}ZFA*e$ZQ#e*6= zWJjqYkdCVy=0Sfl@!V>KbJi9%2j|DsfOxd;>sc$*XT4@IVSm^9X&=Y-Eojo&(KRp5 zk|f^66C+n$*e=+eM6r0_o#qXKS(x!YY`?|05dZ*U`0+}qSxAf|D78i*7gPtHSz4Jo zVt4J@N+%i}7F%-D;j(YIP||v1Icdz!jT*K}mujv(tVHJpsS`o!V=K0m=Fu%-mVY#{ zMhdi{#R+(BJ8(!mo&!&#Ef099)c(Zg|L}CyaZSf<_g7kwE(K|U$p``IZUuqSgV7<~ z-O?qXQc8z_z~~y?pmc3?jg+p@@!Rv<_w)OE|9tm-?e*DpozFS%V>!g=jIe=JyV~q& zj(8WPej+b^rb{Z<{3J_W%d4E68rxe0pfdX`Vf>}u`??sq2QW3`$LHbH3eW$+BxFCw zvf_68LJ4(u889>#1xWW9CU~n&NxpA`z;AdCEAYs2w2WY>gLKQfrJ%*&2_sG`c+R!Y zCx!5d(z4&pbCuzpKG2#~+4lw{bX8DOA@T&Z-{fU9y;lI~Xl#B`V@^}q&AkCw2y-)D zzgRa|w2cRyzh>hlV&4NNIL|kYGMvFmiui&&X*r_l};? z$a8HhyA#=)88fYh@hh!ooYH3<;3jh^eXU`cJ1e8{jruU8KTw~esnecxgVWBiwslLc zNw;e8`K2liyALQ(78F7Dd?=ngo=*2AfQEE>#{K0&PqWpc-V(t{#XLE}!O-3wW6SX~ zi9q;F-!~dDWFsd@P)xCF+>A9*8DZ4!g7i#)iP^W#SEknP#_QNzE>9z|v6LqyjFKcl zEiM_!B74o|-(dnt3YG$EUHN!p;Ef@hL&`n^fYa%2E$)e)Np$>EXO=3Z7m3LAu$*fs z56c%O1m^LlZ`bv~8&1y5Cs%j-*$|c7Dp}J+>C6{zVNe#jJ{8u)D@hB=H3lB|mR%pt zVA@n==|ldGXsJzqY$}$Jx3}M_4xKythclf6FcNlA1!=$J?6TphI09AWJLDNE*;7(c z;<3Htp$MX^r)@OsZJ$f+$}d}?1FJZum(dM>;4B_&HqnKkEfdl*X04o*5^BUD$4hqd zn?uTCBLBd(`rfQ$gJKA1DI!nW*Sb6-HKsT<;{qz{@D1vN3uIU_`Q%0wMVJwnIu)?v zggNAk(!cu){xW^}-6k@Ej5i*Hft(zp%&`Lo#9>p%(j|KV@>U-W+qPmEtWXb4DsMNt zY{@L}#`yG7MV)O9diiY2gNA?5?$YApaYk0TFcT3glTh#u`x|Dj(TsDo3!eE1rh`k+ z5dbr;dP0Ko$ekB4dSlvp3hN>Ey!~VBQ8$$L@p28=KbC3+eN2q?7v9kzISr z@-Y=bnJ0<{WNo}zOKJv7U)}SOl}TvX6dE&Kx@>(VG-lz(R`s`otNm$m52R2KkBjKU}|MljIeo_9RhzoEMvK1@i1pdG>?frle%}ksHJP1$TQ{ z;PeTHq-h-gUzo(McZv8++Q~N$w3={td6YA|Y&=BSvgij7Yz;lilia;wN;l@{k>KW8 z$LxG&*zH|-$K9TJ)u#QtoSD8K`PV4Hj%&@g4aXz zj`>vkshsMe_Guu1-DKb|Wz>k0J4WXIB1?zKv%6b=_j0LT8)3f_5wS7^bfaZ>B*9%j zF3p}gVP$;aULUb&_5A7F&{+iT*%qaPLD^MQNBb5RglmD%*g(y5I#z*r_{&hX8AZ!D zujaKfQ`L84q>E}@_L$?#4MWhF+FtI`dBDC8_ZK#L2$THV0UpcC^KL5IGU`O!JidKC zp3-|=+lA)22CK`2BMzn9c3p^FVJHC!>F~Clz{=0DIC%<7L+NiiVx)`9y4fGfY$eug z)TYy9mm}(`4>uLNg|b@sY>l|lZp3(+&q1ad)!7N0^q1xDXkkx$bV|X7w)(j%qOV`v zPnLTR+EHUTVC}t^q3amc$gG0A!lvN%osCZbVGU8fX51fT0P&inY`R*vD~q`0wq@5} zP?_@;exGcpO5MO|G~?=i4eJ(B*XUAsrq`~J5tosOrfvt`j=GcvfTZ|onk{-P{-eP# z>godFg9~^#>=kBLcR4+IDj`!q+ijG$VrQZ{U{H?~+tEnx4)E}b>#~kiX+U;YErmnKfshLeBDk;_ZgF{BRrlDL&(tr zC51PwJI@vX#v&1m$hzz8c!|DbA>k?u*{S|9SKrrbG{YrZ|7Ps)CN#^=N#`BadROOd z)aghHDBnuYLQ7k!77v_~AO;i15Xws2ia9NRZKyNxmLu&s0T0&VJ@|ggM~0SDGdiL+ zoFSQ7=GG7e*TKCubm0nEWM{mQ7njQoS~Mjk-_SE@URPG#qQBI@Xt*RQ_w!j=AdT5( zXZJRG+J!PaOKGSn0znN(+QJJc%t^o1$K1%j`VzZEE{;#+wP(?%0oeV`r-je*{JC5r zu=`aQb(JAAJs$S~Vm#UxU&P<#f9%3UsFE z(OBa9J|JGgok+a^O~W^aR8~c$tz^VULijU{PGot&vkb}TL~CUJ&ogi%lHGwQqhp~a`+zwy=jvK;%`9NsR_~Pc<&!MuBq8?1 zBa8mSB`H@G#^t#0oMOWF`zbdt5ht~zo~@&@oprgZiu03`fP%A)V7Ji~U?8W>(kB*G zg*#B0-d$4;*yJP`O|f-T*a@&$#1nFZA07-0~`0ag4cNF{|y8=yQ5Mf5VR|n zO`3C&JZ+{*M|ZmZLphJUEho!IRp#YRZQGJ%M?d~#`#fG6Y-=&BB>!IsMCy*@ZpY+$ zNJ_)Zx;~iEcn**A=a_R?)`BzsE!|6^O)TWK>}4RC7s)W3=DwoAj0~Fx}yyR{Zh= z3e@cmS%8YlvJ^5gj*PN+TW@w%sN_}WV01BK4C@qbgdC6tAUhbmr6L$D))9np?$|^0 z9X3B(jl+?%b~2+}d{uQn%cKBIE|BoOm&6+t7mUfTJZ^O>XB-l0qz}OAZ1hJ@<3eFy zYZZentx>U6)||}3(3=^JKuWj(C>X@YCC`A2s#0mMGnj`3e=0Pa$*;wwin1D-x#X%1 zwsb|e804XuIK2^<<4Nu1s6cOgp*7-2d%cnsiaRPmlC!^HB z6}n|DdSnL_n4g?L08g8_r##S-8i;D-dO^%3bU53#HjX`u8P5}ojg_%KPnhppb7tb1 z2hP~8$(AHL%-ron*QvEx#50A$!3SPtv^J$I3yw=E-x8L&Z4`~_R1aK6vIe<4bBvl- zU8rrFkMJ3dSl)M+V#Tpx&EvVP(HWL;FHDNwEfuE zp_~6Vdmf7}m(;0OTrGOCXvvsNUG84~Ph37TT8X8P$*`6iN@%9iwG$lF>tzt2#myT@ zX0QFx#Aq8K3zFj0V9T#JU=D&9n6UfXsT9IJmZf=X$+BF1sbh{nRKIDQSwaIjFbq4k;2;@rr zRb%j@f_)WEbP?tWKv|eGmM>inHjlBYjv#|}M{o8Dn}5KRTx>Ec1nIOGnlWWp{5}B) z{wO}g$CZqtA6HcBjT@#;X0u8rq0#&7+e3px2g)U#&hm_HATP#k?_GLT9Tv?b+v1w2 z$VqJ@E&hUnf0#cx9zYvC5hm7^Ygps@i#NBtc_4suX0U7ey6I zG(*+XO9ncZIiAz@uNJg0wy?r>#)v?5m!9;N#>K#UY0(WL8iI9JC3!r`Yv`8N5LC(y z0Lv4VB;fTrVE7y80J%<_fq@W!hOfFd=1T}k^CDWb z$ffjp6Tf_@2Gw91?oHqdFqR2kPLCEi2NY*>-Iza;<-|X8A#!O`wdh-JquNJo7XL*l z5M^bpw|*g>MOqRbdNg~XGr)QcrkkOC7_q6w>H!fMN~osS%O6c4Gh04HqK}%lu%^o^ zN#l(-}s%?qw$@l(y)%r<*i z_5v@aGLkDf7ax|78vQ#S${MD{FtQ0GAI~1U4+D3?F8Zaetq=B-7`!8i;uL2V_YfQ$HY0Zelh(&NTKvo6^r3-ZEXkl?s->8Mxtg7{b^vf`IVKAG@s`^P_mg zkYjEl_9tJBILm3NILpqH=7tgQ*m4|;vK^Le;^hQmNxLEWC&tkhLk?aZwI4va_IKuWHf*)jTC#^#z|89W0%3->?iHrlC*Tk#ksXdHM;N{?0Fi{*N$J#Ik-@$J~G zvm=`htoIQ509LhxKi+WG=)W+K39Mv!t?)8DrI-cX5d%p3@>nzo5OuM06gn6@SXnoi+5ju_%HP?T-BpN)9*Ai zV|ljh#C*l6;^yN+IjBo2lcxeXNup~{|0R*%?ws-5yckEAOIB4iIh$Fii+B0NbBVTC zTZJQ#Lzu=s9%@63UECJFD)%5mF4;rm^O09O6&&Kj<87F&C zWR~PqxMBe zr}%spiAVNO0{ik+aWEUzx_6zK=X1w9Ir0ip6_G8bf;A2JDRrMndIM7@nyC>+FVo0# zD#D%HzKCCm*|TBj-k<~TuQOl5AsqIKzn8B6eY|U{E13@m3MO;9qgX0EX=FI>R!CGe z@W~k>K3_wB;$3M-Hb-P6xI#r07?75mz&r@Bz8*1I+_a-AX=to~q(rW*o{n~w)kG<5 zYPI~JRT^|R6w_>}+v>Bh$75GErQiT-*in)m;Y9LRw$4+OsNT$u-c$XOWn?#7-iNnXqh7Q8tCeIv> zFkg9R4+Obk=M*0yKf0Mnhn^j_S=gib7)xod3pkgW$;)ms-zUF?UA(gSOZpf~9P6jc z>glkC79*&)f#)Z8cSBPHuLHNUr#$@I{DW~#4e)1QU0fsJMBE)eXpiv@C!)0!?g0hWL9zM-$-VBO zdrb&z#9(-_m+kjgn%FA;qu_SW;OuiiuG)0*sh7W5x3wQgoLM5-KJFb31aFzlEv?+{ zunpK_R_d125C|<_2DLn9FnDdSLyP1`H z$5hJusuxd*mLIqTo6l`BH-wGc*F}##!KUmTm*1l1 z=@KI^5Dd4#gv9osGxQUmp7&WIJv(SVdwB=1>)kJ=D70QSUQSnd`-q;#4c5~(WU6t} z1Z^62{jdK6{l@m5brIk1B>H~6-O!SH8g9Cv?vAT=&mtMJF-$T+n+{If=SL9AjlX@PK^QKU$h3FVFbBA<2TyoZ^V+ON^=kdOfTeDLA7` zo>-`0G6r6&brV#eCL|tJt?uvyYDluE!;yz8%N9?UfhlqPOfu0WNtgLGC$pudh@s?K zvz^P;uU$j%c*~J8#pm36_qON_Dqz_THf~|3tYi007+r_!*Xl{_K&>`BtGT{}<3{bu zmfW0xp8KM8FYFK1tJMlsd~Zs!oNSqzIdyPjF~WPl-NXTFWpt0YO5Ih1J+jJ@TP2~a zATZWh$tIf|l#`cz9A_g3SCLG=06I1|FhRe zgAVpS7S6EjUrXbR1?@cs;8TbCo~q~n!9E9w&dwe2De|2X->?Um(iIzd>7oraW=Z*D-rL~kq-LLuJU9$o!H zBbZc8sJCb0w|(IU^#B16A&qr&9iAmZNcs(z!8=l^GCBZV^`|%gqBKAIe&)+B_L3fw z4mdhdo+idvwP;h|e7+b0*G?7waQ>_r8H8`f6*44&6L)jV&u|^+t)r{Upkb3j;|P3r z!8sk(Z!Xv{5tf#@3^p}&4JUi9AyjQ?zQ$A1UOx%4IseHcdiev;Qwk4dEGP zk+O$N@*X<<{B?I+{{s+1{Oe*;4f>|arklC+wO<&)B97M0%78e{uxa z5>{w@mOfJJYd6^0omkF$h#W2$0-=*_vuPX zOt~xBe?2^kYxs6rQLjcwOpF@F+aEz*w;wk#HlP!a>zy>}9e-yiafue(W!!dSGKIxY z=U`7yO$HBUs4qiQ#w^BK{c{AkMcAa&Pb@G#{1x*~T#}eZQ1199j9^>&NUloa&wx>i zOA))h_WyQ7K8lz8M0mN8jo-Dd;&8h-L7Lswiw%7Tw>DIcEHDyx*DoGe13I_N8;4l~ zKC2xqiB;>?uEj3jLva> zhMVY*u^}6Cp{!2S%9NvX(p^Qk%jBR^^ZRfsf|=uZprN+T|EbpV;YLwL3^IK*SAW?$h91&q&V7s*u9l5%&!z6s*NZnr z3^$~=S1Y#+2Zy-9eJPv;{@#o88%3kRkIwg(hmvLYsg2F%nAijv2;;j#lVh*e&9VC* zOZO;~reCJMyNM63m3vLW*IEhGaimYMajz^ShO(~`jv8(|?wMLH%8%T!NB=1WkI|@0 z90cwM+$0g#-|nw>I2W|=EYJoWkL}Q!uNspj1aCLCE$d+hT)qmJYBc0HOO<+DM=uj7 zxr@1+y8*%W;IXOLkyM~x!_5De=IIQ(PG{rOUtO11w!Z&&%8bqXS#raB@~$VyUG;e5 zSc{>yrEolKlwk+E&nSfMumzNpHL?HeQkcn~5IE30870~>QmhP7-@d4m+))QwQAg`R zRPD!DS~7<837O~)Ej=t}1gL>~g>gpOXwG>-_D^DY!&J@(f*~7bGdsk||*0n(F5ps&d`lJF~ z0wgv}ZFe>WFr98NEHL|-q(yD4r=6O$f;-4x67KZIt;lZ|1~K5_qUQ3b_nMI|(kms^|3OD6fR{@Tt5h?hDiy@#PjC}XBDK@;N< z)mxZVciVTM9tkWO+&kC_hs&hBOM$9gt>LLovZ&nF#gt49tye!x`7xqV0?^yVU9$RXKLN*91B=^`(#t63k` z@Fu~i3V^IMUy7&z24aQNeQSEC-OzprVIV5p}^SX@ud_R znvA^3Dh?JZeC4%>{^H7uo|eG>+4X>(R*L5tM2K@dU+Yvy3zLW{6R7wC5C%Sgz z8w@B`e*g+d9D2u@1;R(bAC@h@0~%A%M2!eO5wV~4YO$k8x<_Fh{lPSNLeo4ieS*=` z$c$R)^Uv+W5(qcMljQQ=HFKh5d6WLNirqdkg0uj3;E8mF4Q|-_>xTH7X^Q*)(%K>h zU2Tz+o~2bc@p{>E^XhrciJRsMTOzv?Acxf8txox;vY!atMcE4Xi}3z5fiEd9+OupE z76WGdxzHBg;D=jGaauY-cIAt+vN7}TV;Y9dvz=R~%tP=SelNGl2`G5sTXxd|5)u&9 z9+vdWk)BhpG+$3{?0pd4m{LLY!0KwWKo}&{mOE=*yRULqDIqvlQu5=JQRZLNR;R1~ z!w<=uh@i`(e@#%|Njm}V!PmH5*rOY#7%RTnXZ{&I>o>6bEWL&hl{D(B(Ig>Zo(dt{ zgoQJn@c|WKpoJwqcv>OleLB`&VIo-1L(g3y%i{b%XQXDK|{cwincvS`Q_dABBs*%#gx+lpM*$| zWefFG%&}oDyY@xW<4$mO%KV8PQ{_=3`XXTes_`@MZDZ>7RPeH2&TSo}$8;jJdtf2! zzph0S+^?Hs*X(QzQyIj&Bb(R6#{~~pQQ}HLzg$b|4lc!3pqVxPd{gs{W^E4swM|Pq zg!_knR;YiAidos^H*y)&1Q)$_o867OVF?7|{MW6JdTkQnJ&Ol^4*RTjodvGAQNN41 zfYFPTVhBDLf8wHSp(X%)cJqzv>W^Of(F5C8SEe*9q^<=dZZ@^&8&JPl#H+-bvsm!C zwuz1J5QDhwCTh6bXM@{E{A<%qXk0ZQ36-@Ba3RsPU53oW zP)??R)8 zfU0Sm7;lDZPDK2o$1^;Tr^wPX5~>NoJtgkRCvcrp%{~&Z+WZ^W5U{32b*^5nO)}!t zI#D{-3dhG)2N%NC`c*qhZqTUmo6iI+So4z8crW4o=HUAMF4efiquDBd`$@>{^BTdzHsph; z+4CW$FZ{@~vyEW3-PmU#bn41xaPwaXq=VWU0?iBV8jcT{dZOUv{ z_Bf*HSx&?;Ofgo{AgU1J0s8SfsFX{dGgGB2g5aiZMQBRp&ETsYhG)2gvKY}qaO15f z>&9dUJ|{|DAf1JnYb1W-ziBmr3jiBwWQx2NmI)v(_gS@2)xN$JmF~fEquX``Wij1$ zip*@4&UNRZHHRQTTf&&E^~s|E94FbFzdr88D==`CH^GMsYmdQ(=%YvQFUbZpSC`MJ z`SP#Riei#2ncfG_a8yw}aRL-nlCGcyArk#C9#BtUPlDFpUlL9;P*4vG7^dX?kt0>w zf4yiq5^h^&{B`%vcm~fDON;(sBjqNlaX>JQmBr~MSzv?pjNdnIGlDA9xp()Ry zZ?cVi_xQD2z(~vD@T}~IH&A>rtX>{OL3%C)u>~jd==+OQf$SM|DY-`O;hZ(*ZET|K z>*`f^5j*i2oD_+83e>v;pzC6ItPh6^_J#Jj^s^W@Jo|L^WQIa0!Fz#J03>?IEa{t0 zzqM{o*4)JYbiV7#js1xuU`fl^5$n2F)kDa$U#s&9G4n)DaT*x&}Pzet~*q9WntVN-APxs-vULx-{zL z6*uX~loOvV<$~X6`f7S&&UnCG)y@nhS{^g@nRs)iAVK@L2PIhW z8A-Ib@XOiReNp04D!KUU=-AnwU!3#!6u)j=v+MoaE?;bhsyO%U)qw+In?@S1_5N=X zzPFJF>o=M%ev{SI#;x|3hp!-d7W=TSJb#l^$?yFo6^((5eb<11Cw(DGYkOr&uo^Mp zSrsyX`qM1`K+6p`t-svuJPnC0=m)vWyDLgK^KWjS9~fA}Ox}p_K5;~U@F?etzY{>c zU0@@cZDU-O`V0isuFGCihL(8#1HEerS|6v>5-7KY#zWV^#7-9a|IuNNQG_VS3e zZZBYKM(|Cg6p7g8H(oWLDexI)`);tOog{2~(oD|X zO|K@m%ddxP(PbdmYU_Wk#fi5orw^;(7e{36aUCzf|7d%KXpKVt&6OMn45Rmm^_l5C z-OK;m13WR>AK&^Wn~0C!)0z7Jr51W@x|rJNzYyq3%t%9fQ4R_d?1v((GISw=6h+$$ zs{VdWH6KX8`q&G5DN8>_9gB!h-ZN|)alS~7?EC@4F4X)R;Rb6nD-O7T+Uo}{rqzTD z5SPRkwF8@IHVltxa=bWy>l#+6L6ZVntLSB==$_FZgl;j~XPxw>|Adm7A-OT8W&3V6 z8#YBRxrBBXKB8Vfoq9vNU|wJMy<)rjgHf*wBj7>)GG-{0k%r)6=pJlh8sAA|XpT+U>r4p8 zb_SDI2WK%vWPCzWvqt}@w@uilwm>ox0{u>hF zu9^Mc?C)=WY19T39J)rT^%Bn{I~QGKmb78{m_XBzC4c%k7Jer?LdEa9#?f%6I9&q! z(hhLexz38_^_;|)xYpG9UZThM>l1p{E)l1kd#7*&uEktL=} zCtfwE>8>EbkaFpWXw7>N;DnUU*XTCp`%p3r>P%q76|Dt)YMWc+;y&$b1(x8@X!SGi z5bv+&1TLS>WsYST)H=rBvj8J0J;xk=wXaeoJVCugnf;Rdfp|J^a~wTc#a9xO z04}PN-@8}ti>3>S!bsNhSKjN`VU$ElZ*jLyP)LneG>@j`Z82JK2e{D1YMZxv zyt{Y_uceA_sV!Ne94mOSZ+nP%XQDxrvMNv?A<${@yyO*vYoEJ^FI+MaJjBM|=s{~o zWKPkuv>mtjbb^Y6-;IRQMc$(>$Y@Q!O7V^aBE}* zj=a&v+t(ckEqS4O-Zi5EN$7Jn0@`E`_pP7ZFL-s#p4LlH7-Ln9@ClkRv*TN@joB{s zcjGKeGkpntZd^dDwaFU&XxPfH0kNYYM(`k((2OwMU>pM#jqj!hVaU4tdUC55V^7HA zk^LcUu5RXeG)C}5;kV+6E)qW$z-yb+)_HXhc&W~LrcZ?wSuoFmm11OIH&A>8#yu(PtFd-!=F-De!GFr%(>;le(nu)H;mhak~#4Y**n~s=vKdcYH^~ zN~jY1TG;bFcVi4jR`z-TR{N6VY0JFQSW4#WmtLbLstmWt7$o?&03#Mwm~#)?DIE02@kt{aVUW3G;gvQ>YP!Ht2IuUrYKBC);m zE`CdrH5M*8&8|&kqq(P+s+%la{~@DNRCL^q%`Xu@7L364a7|hHf?R^#Q4x7TdvEqS zj$2z=x^Juq00Q{+={f3R#k#+R44a;=((I>vZk68F%Bcb!{gC1HP276g?u*q@gJGXU z9kgcB{5>M-OlK4Hc)h)UmG}?R`|iBXUM^7nY%Vh(iT&|AvB&*~_V)Y6rI70T-is`%UydtS!fg6?(U4;M z#+>Q~$#G|599@HQ$1mejcmYuY)nGoZz`B8Ix;}zLS2B1ReKLIz5v%tTK~P(>bzDH- zZqKdm^O%3t4Xn`emnKc4P0V36-GRne#)hR zNwyLS=5@a+{7oMnOq24RpkCFtK+{GfS-EsfKI5tEBXp7lt5v@&mJJt`ZnjBU%AJ7c zp>dpB)fEUA~ON?yyqe?DuNDTO3sECazrL zmWURuq;J4(>@(==T~w+LQN_QA4x0%NhxNl4Y#67OZ*}`Y$s?Gt?p+aV$l}Y(q0%V} zEB3+O#lez_63I^>qK|N;D-1)tK37N`8QmvdA{5yWu1$m{mP!?~vJ`xqN~?P)(6B>w zHm1-G92npGrTlFj3+h?f&poAE!O1B33eM7o<09!v-SLbEH!)2LYV*77_^QptYx*~am9xYe(pbDS5Nh@}@B5lrwW@6!7-`lM zKJHcTKI=WI&gKHN>Gv#|E8V|Nr9gPW!pLcIlE4TDNv8MHvTeZa6eAy;ygh5TCc@-VOL%JK)laAG1%@q_jy#o^^d1wlJl#28KPdtdHd2kfzOH=gnUz~F5J1vGe4;Ql^XCaOw>DZyaku19K5e< z3A}AyYWI}0x13Pysh)S4G1e4QOAw=f_sG#6H~&3Y@5Ja}oyFGAIPz@dgg0=pYWVqu`<5aA`7{tZs?AVif7~9{vU3Q+TC0EbM6tB8>zWe0NKKR}H4)-ypZC&cF zV?N8|Ug!Zzba}C4U`cR47Cf55jRpxXjfy>P2y4(w)GYaJNue;McAUmt;&gu_pbr~U z6(73_saNqC0)Z#%cO0wf!yQj80;Bet9!DR~g6)p(5+Agj2@als@ZvQuP5=7^>14J# z9)j#vHiVJCEiF*7V!hMi$%d*Eyc_-}h)qe{HZIqLDo8d{lnLJhP6 zLJ*2=+4Zq7R{V0_SNagx;SSTmiC(yrUI{pt_?BIJh8_7DfmsQVX!|`(da>z-7+t4g zw-E8-L{H*MKW6^vD+=4$O;N=Cco`xlPbQU< zs#8$NCF=VDv%dQy3!~57<@E_%Qgj9;sEyvUK-7^I&vLaE3R?(VuRCOiE95}8Q4UDA z?ho`Im*DU2$$ih_BN5OktRwKiKY=e~bKK(vU3-&^F;13d3a;mx)=g#x$K?W6wPI%J z1BRsuD6BbFm9t-{(0kMBd0>xAJNA~wJlTi6wHOBZNUPi=5WH>7pA=sAMw6s$$~a@i zlJfvBbXstWOzg6$fUMq>jgg1xX~(>=6Dw1* zFI2eBC(LqoIR;|yvcA9(%*De^olkFqx~OY$2D7JWrZw8r;Qhrq?7Z>1G}hiafB-tw zebH4(*==)O$3doAp4Prd}?Eh_ac669AR=#Ek8G=Hg3 zxfOO5xjF+-=|?pv^wso#uO_EM5ZJ3JH8VgsAL0c*g`Da|Feu;+y7hned}5zlk3iJl zL^2B$?3K(%L{l|7L4m#}Onal!?@D9M4{RJPQp#hvEl!@`PXws#VSDUclt)JV7+e{j zcPa8jyE{o5SvmgA6*X`v-2OGzvNtu=XfU1WRS>`3>aNtNnGm&iEjLpS*z*=q7e9Ac z2sLq<68hg01D%b+RiA%lI``{&E&*GJi4#A;-LyNpZ6VXYedF$M$)b4S!C2*kn6HRW zyQA7Y_CKoa>wDD)t}81|C*JJt-mzYcjCzdJ7Q9nv`d1Nh5QX)TbLsb*yomOP&Qy;kTw0G0@ zK4aa9X8TFn|6C&YeDlTpWm5W?@9&zM|A=Bb&wY@Teoy`3D%qFT4_^0ymJB637!O+} zk353bmBazZVGMP|0jro9WkW6j0sDI&{*yDUl+dnd`|H$t8|8Sn%5UO{0AkhQM$?|w z<6JAlEouS|iy!~7ZK6W#j~)cQZ7+mrKB2g-w-_EsZ+llB7c!Fn%ay(AA?_l6oK1Yh zI&^y?DM)Hp(Aha7A*S|haTr(dY2Lj>`jPFUqa;)ax4n|I^Ca;s?)D`FN5uc1J43B* zg7oZ3lgba6c8xx_ef_^-3qU4STzf&w=VQpV%Rdi2h7%nDVv`%+CF1G!_c{fJU7kd; z?cjgl7j;!sZ_>W3?pw#DY(@-3I87`rdBBwwGSJoKq{#knecd{ufq%OVmyJ{ue%43e z9dG|;!xE(`rKFzRh8vq}e8H6y!X5;!xh}yQp82 zIHyt*0Fj>*1D%Kz{jTxEKVgU5oB`?B`Mv+aqp7?AH!V7Ywlh@akpthJ-C_a-X$3*I zX?Vbxbq&s0flY`_Ca9C-IGb)IpyYxpHwn5#GuPXaZ^G23OpHqe7?k141`-0MryjUcRuRvHgzou*V+ zE1^4U#+HCOB1F|Q@Nsm=EBLKTBOk!3)PH6vJRr{(MxQxn+gMP|4r;p6@?4KhfMd+P zvKV>6TdCpEJwR}R8-gG`Uh6-oGNx4LDgS+=0o~F`JnH;J6oZ5Ju*$^QTqRx>&3672 z$hyVn9%y3!oY!M@B;`uHENH7tN4;kk3tL&?j7$N@J?H|DDojDw%5YF+t=jgJrnw8_ z<`TnLdn6d_+3q4Wxj_cyhF&37atD2$DSj30^7KQW1;}P1G}a@nsI)RJ#~kQ*%9)pk zyPauhzjYe5H_0l_44Whp7JBh|*On4&fg4b>iIV1|Jy|W(zRIO zcFmd70VJAd)5jORx(P0v;6Z?GT0Fyq$r6exG(cwc4O9}<`{qmWH9(-Zy252sN)gKx zXRT`m)U$;1p#~NeC=Wd!T|}aw=2SQyO;~K&N0u z4HZxU;ri;yrSBg+f80b+4r4TaI}WUOu$aIp%a6#mPEzjUOP?|`-isczj)=QbT>Iz_ z12=p@2!t-xMlMbKFdO?xpyG9Pcr-5Bt%yne=9cGa@Fc5skX&%XOK~%IqG)IOO9%0> zT8R_PV6uR_xkb{~*2V#li@SjwtSTy*32(x_r~lGi*rKwMM&1P5>>OtCv?pdiL`#yj z6c|_963z5hh&1l+d-x%ahz0fygcRHt6C9OU`-H4`f->w)!^R-*Z>YANvg49Q{uN2x z@Qs^T|80`_DCjL1#WOvivO06uB$`kuuchgokhsr+2Ao8#-fi4X6SwGl zcm)MfGR$;duVFB(hq1nJ~2 zQ}jo+NtydQ`*kcm{*h{&N__0z8r36Syz;6)=;>Gs+3rAmesb6AJ7>_+YW!A2q?&#C zv28o#wz7LdY}qkvOv?N3B-?+5oQEp8t)u(tv0VcGYqxsWZE&+A=xF9qC}T13Zwgh?r=|Pz^@aiDA4+PWPv5dO z+%6c)qTK3tsn~GK>a$Q$F>#Dun%if(S|XepAo*k}m7(1`bPqSHTrM8~FZx=>;cLpN z`I}G(Uw9umd}X}N31&U3F+Fx@(5`(-H+3DeTb>Mko8h0Cs3mX*_%jdoe5Z36xW@b& z@`VhNWdqd`bA<@b21Bd1J?AtzD8UNoSbv26JKGp=ZWBVSf56< zW%s=OxR2xA^nsQ&YPdH>lfoV|K%i2b<(Fy6@Z#!0@>&d&QmoR$-SksHSk0{vm!g>4 zqpx1;xMFBa)IP}h5Ml^Iy9+rj2yp0z`rTKo5*dP$g;&jGbk?hT%6?g5A0d5pHI0w#YuFX0SC^Q*Fj$ulcphE}s6R#WB){vT0q8PsM2ZqXJiUZ7ZUD{jTzt+=~GDHL~?Vx@Rnthjq{0>K@MdywEx zaDwa2ch0%@4!`m*lbO6^_I~zSwk6yCyqA6gL}DJ&qWTiGZ+-^A|5MpVv+}O+mya`* zGi)_fpQn1Q#alzPTUrK*ZPaz06od#cZ}aV38A#`JZ)Jlb#TiMP!>Q|zr7gL%{YV+Xi>YIxjkG5$R!FOy4r)P7p|^!G1o zDGtD_+_x*5Yc}F_&Wyk;X!;F38Y@N}|JLwaMh4aC*P6dR%E0%jPOJdT+Ti!BtW~RJ z5FMwZ*x&`(+46@fDeoM+@^~gq*S;S!VT9-_2B`TU$$x{(c!%On-L_m{l_{*qU#!YH z_GRr}KptEP6&=(IKZe3OT|V_u7KrB3mZvk}?xgO>sTL**#t5RFQXm5xY`JxP)+pR+ zU^P9uDQ-P4SXE&hev-NZ`RC8lPD(Y-z*-heAVYsp_%bVSw{IYrSV5>iSFpo|!I&x} zb3r*`uy;qJ_t`$2^-0H;i|Vi7Ds9#78V6LTOGA&w)daedk_F`yjSnj?xT+h>ufCb& z1v4GVZ%6MAL;;0}-1ac;E5OD8k8WaV&grl_j)4xDBfEU|FCJ!k*bT?hg4@yO<5ha> zOBzQMrxUei!wzeCcH8>+?5=LNRwt64`~m*%(7LP=QuRUZ(tBMkve>wW5juj-YOZ$k ze@pQ_|A^pGkLr|N2KEHo45`@+>Ahroy7}81M~z1Hij_B0t|YdI#CM?OwPDcK_q-Djbj#oP zOlTtN{IFJReWN{&Q%HT{kCFEUJs-Wz;r4D;=2M$sHtui-yZEx`eAdQlctpXT@Q5ma z;h%I)Y=)5Xh}jhHuxB~D$l;Uvh{vBf*cPVG$uOgfV>QPNRnrq_d1&-b(0{@sD@Rt^ zT^jYt-`FZCX9qqBP|OTatS&n43tCAQ9cpH7XYXo3ZfX&G-FvbO>_$;bFv-pF0`jXp z3n{$`0I(#5eaFa6TR=Yy&L;n3GHhu7f0l)184b`+H^PH9N6fSt^YMGNrNBe?_Fwwi zUo&&21&ljaE(2yKM#0Z3HIQM2*Oc`~dt^3)PNFLPH&#V?pbg0L_1n(trs|pMUe;CR z|I4T)0Ke`7B(6DenH?=2{W_X1zBo^dDiLtRMvBW`|Jo^}%s0d}pvi3xq_MTE1G&*R zv8R)BLRGZXB<>%|6xcRqxxkC~q_JD>V#eAg_oj}vTS%)7?^Aebp}FRLYHzNl zpSK+nNK-fyJ$Gc2<@jMUw2dSn{Gq4%?$6mCSnL{qH1&5`O6FK#v#(&z-yynaBFgpN zf<@Y0`ZalNo3pYP_s$IrgosJ}X={KRGkdc7t!q!CWJ}@8=JElWF!ZHjL7&Y26pG4< z58%qWzfENn-YdeZ80x zA|J{-XJ2G~WN%wku~aGil-_>YQ;o!hy00?C7>Bw-ZDhoCrGPB026 zek5iB#MxJDY_`B498y-ngYHe#JA2d-+^wfEV^n*G3HL(l$}%%#P_eyXpv>`)$ZRzRFiT3r z5P&+T-rddvp7x9LlxnnE(EsRmu4t^AxRqV^Dm)FlA}2xwW<>Vo`*wcu`JwGV4C4?~ z_z5?odyb!Fk^olP^k{y96pxbL|c2a*aVI( z!>^~^Ee*FTf}=xH`#RkBtR7ghh;BW3<%X5KZ%jgti0FyRdZ$rq3SZ*`ot&E zM}1kj=v_lq(^*V2oWGz|O4X_qNs<1LDMMaRwWOi2e&bKs=chJ3xi-riF%e1zM=I*I(&#V)02?qe@ zwe%_keuoGq7cen7^a|Lm)z14m7GIRlFSCnG8%3Z@8=YwIPbNZyP1eSHrwG*v=c(*e z$8=aup)vQPtrkZ58hLw-@>W&-?HIfZX-r4Fr#u&!ZjrS{mK?Y`l$gzL+w&d)X-q%T z>_zIlYQKlSDzw(T1I-{8zjZUxNbW|pnXj7xS$Z|;Jv$(8Z(YQ=;s+F0b`r{k*v0+9 zADE{#Zc=QqH{`;@7|_!5;HNZHzXDV`Z@E^@aftnFcQgS&*piwr4>||q?#({~Gf%dt$rnLqY<=KG`lQ=hk`NJgxSZTh{g%u61F+t-VCvT@$X2pp*)0yPS z$)|||8#tA_{-%mCTjD*)m!Bc*U}xBy=8szS_t*epeF!;ulBY%HpYBt?Kd)PkvG|}e zCrCmOIkWLVkKDT%pHXK>N-|Q@f?|557o#oYDz>Z5i(bF2Tv52&%N=!5Y(MVk#83;e z?7tc+k+^iwvmCC~Oq;U>irsfFXP0`7MM^k$9XF9n%1lh?)rLg~w2x9kIM z4#dp{hL+@v{x}e?56m|m+XNB|{Sg`Nzzmesat_FzYZGfLd(^a5Om}*QkiPXF#1)7; z)L#&FEEu~rfjpsqYzf88>XyqUc_a&Ye*OKiQ|S#uPxOOnxAI5-Z)4t~isq{CWunpEwG z3w?j9IO{lZD({h*76Yb?bJD0-+c<22o$V|J7-T1&Y4HqOZ3EA_Nz-ZtQ@zUWM^9lBV#+S(B|05 znIzDd)-zS+_alw-r?UOZzohs4v&N_P9ApV!T{9TLokd;{0QGG~&a(5@8rhmlOV0Q87uX-^<{)d$Q zwT}hC{l?TaLrj`tm=EaB5mfXF#w*{?#kO2*lQ~;ePuKJ(Agu zuw{iVjk^o~#82%s;=P0KttVg2Lj!gR}HY}%Cud2Z&P z1b9|g})vKO9hD%BV_rL!N6OKoRLakIVG8~XD#E8s>3Q(jkfCrJZOy*3j z)Ra;oh9_s`ss@kAb9~7%LO_a~@ET3gvAJq$^V-x)mHA07>+*m`k_B&Ds4d87S>rm- z_e+!7gs}HqnIS+tFK~bb)yEJd`j|c0<;a=iy#kFV{Bm|f#%8RP{FDDw#I#o)qGw+^ zB%D6!2o~d-9Wu~;@~Yt1(dE42+(&Ab?zP0_igso0U+J?7yO2ZR*MVf%TkD_LRZJf5 z7!ktc9X}@P(HC>w@TnRk+R(NZt9_|(ZB$YiZ?H8-!|HL&lZ7T9Z~hnt&k zXSgY?!Ru_To8PprVNzzwy7@&+(p2`7H$k}9rWLs>HnBt8I}+mg=s)27##-E=sEdCN zYic6_6So1g3Gs8{R){ur07>+ZoigSBB<@yL`=a%fftl*gfIN-w7AgJy`*pU-wt%D5 z2{)=A@Bgi0LWf#MMdf?a@Ppp6ByL6d>Kc65G^!5UQgL|eB#o}Eo8AJ}H2HBokmNLt zWM+A6_{z6IwBV3B->OR_!NI#i1sG?;WE>n5_YY%W`7X0`_ zQ{K9iE|o4lWlxcMMR^=^^8-8LQ5vbYle^>g=Hd|EXy5NCo=&DNaBpHgr?Q1V@LTIb z6Vkkg`Zo|7o5M?;xQ@MC+ce2?EYTvMrPNmaeH)7K1_zFSUk{H-H!yItT-+{d9@W_>He&T4}C1xhLR&P*~4J zdCsjvoA-%@2j?*SwY>w!eEl#md=kG&A0k40$5G1IeeF!;$Hhe#I`7e>OC4##mWf3;vOK5x4wPiveEv35h`$@4fL&k?K#cy z`sC5LLjB88GE(%559SUTHp3|fON=$$L~k`KS494O;6%{6{zbr<&s9r@+0q5)lShF1 z{uqj9uGakrsc7im9ayI}pEjFO38dkZ*Ugh71k@8qz5v-c`vBkoyAc`2{}?hp58n?DEMCNrl&V%@SP zxT6Bv^Bz^e!~hP`6OGQXEdRm?72&AJdyrM7ej^g3aXa>b(Tj|RJ%$E(*kC{J>f%zE z3O7xD8}A0bJR-s*UZJ4(%rRsp^F2<8S&n&R!fvdJ%%#{9nox=Gib5VvN8yu&I*K{}`m;%y{%ZMCydol5cbJEk9tr|CZg2H$SCI8mV`}Op@qWMA} z`4D&Y=U?zQmoV`&@P^RRERWom*t1>b^mHMvHL_-Qaaz{Zk4#^;E*3F%rwC;YqAjC& zGrwW!P=Te<`bqZlA0lS35S~g;vloZVJCEGlpcQR_zRIv7XT#~z1rfeXEcViiZYv~Z zQmd2X+jj%Td_FSzV|LY8*Otp7Zo8O0hewg!RdltBy}jk-enT(U34>XF1`AnI_uuKY zvKj;aSiH2!VUQg<8a zDT8%PMxRQUH0z;Afe{7GMl2wRc+q)4tAqvxef+qz*NT3JdUOs`iYv{co{@SW))_7J zDU-RPjY4rMVQl`cy4mFw!1q&4V820cSR~%8z(_I<7f26V!m&mKc;>hEGO(xd8EuOuP$K8LUgw1OcxYbh& zqL#UACMsPB@1pCK`n)XAJBePmjIaf~llzJ(>KgZKP0xty%<{-$=(gy0mA0g=`6`7c z2=!WrG9s!L+coMxJABEz*eRdO@Mbq)yy*EEl1ylptOKGpw#&w{*Uj9D0q8E6XuTRF z%Do=hSH_ou@w*5F6Zc7*<8-f71fBQ?7K~OKbK2R>Yu7cq8pfN%U{z-e4RN~K{kW%` zdrN;W#)6B@|K!e&rz8-7Nl;A#Mv5J>{Rfe$?pN98)n3>?-hU>3T?^WnTn+CNQ>R<- z3(8T{&$(sk{J2lVcrF+y`A4Lsq2p3H+qv3vZi$ebSTM;X@r>@4)1o=C+dwqDAmxMI z&X9kcDg{$YcRJb=)#^|vINq$;rPFT`-y=AZD+>D|2}Fv!72FN zr(P9V|Nj&A^QHp@cgVVR?Z@t+%a-q3PcnW;FEr!Be%?}pgZSc}bi`xU^jji}`YhRU zOMeWSYdt6htY(QNY}KE^HH&9Dqk3j+Cr+AUi2>^AKjd6Xlv2Hw#B z8%b?-seu0-0-(%n#6)AXWbT~sBZbfH35LIb++o-EV?_j5saTq zo$c$wlkYJ`#BMMIe4Ahz%00Hf%BP`ySon1ngQe^E3Q*Ci03~xSd9Zhoaq1Hz#9&&H zsf(pxwx`SW2t9O+9FSXT9Y`IR9*fC->?ETib~20~ig6?4@Q!T5%=0@wP5Gt!)?PvC zu5PhsgkHuho>|1aZ8m*gPC2ZElb2t!%1Jq>)&_=x6GL^~rU%V6(CC@$JScZp*h)(n z$p=~L3tceRve9%(x8ni8xOXhC(%<4ZpNB}9iJpI!f^y?Aab;>bkWE79`tJt=Z0akT z1XX}=9^ObN&jo`AdQSH^z8k&&NUTP7_9Mxvg6n#=3LBcT^K*)f1)9@ixk(8%iVw*zs!{uOYh7Y6q0J_p{5y;{$yD^?`6+{_^qbP6D9_RP((Sl|nS!!A6ob&N(@9$% zV7Q<(icff7)-0`!X%+EU6r02MgE4NY%sT(xW#nK^D@%zcjJ7cgLn{OPNFUwvY&od1wx%`QhE@F_oX@s3-N4P4ZXf!WqT^IE} zc4Nyx2W2Y&ES#)_7SCuOfJ`y>gAWd{C!SARa&^2nS|SVson^9cDh)+%#1u#+=1CMV0Bn&(UWuo~IMpC#98ewOq zRNUm)eWTJ$GA2FC2NxT7c0Ql1+sa+|U3Ry9=iFTab-YjiW1LyIu*(R^EOO!bSUZqb zWY#l`F2H|ST1wH((c!oKFR5k=X~NCfp(PB58pn{b7uE5UzW5IFILqnOWfOS1O@zPS zO=NEFFewYemZO0s`XKnOuz#!B2-u7iac=2-{3mnwrUu5{S!9#h;IyaOTln~wyvY@q z_#}p^CicoP!FtyuH-Yc9^)1uC^Fu~XU*Y3twye+t^bJygW{ud&(9rof5o2w@t+E#$ z*tvN%Y!_+$a_Fg}P~>=dcw%<@ci{o6I< zthUy#4+9v~S7D4J)pa|-g86-DS2+bAF3~NPs#~h}c?G^^KCi7moH^aiU}owxFuwl7 zG2kU#?pS}?VE7*o*#9KAo0xr(VY`~c?hb_@F(kB)SDWLgH!4&h(A9>u06eNy zNoH?7BSJE4I4JBotou;mr(1**Z?Jo#JJdeg(6gDNi}x0~V8~`lo$#k5b9_m=p(V(> z#oYlB*+Cf@zCe)r{NcyLr@t-AG%HrRUSh0!Rm803b&e^A>Bb9-U+l#qTzjJ)$sXwy1y(1a>)cL9Qc{Iuwe!TaoC6(_BAUinbF zSPSIM$BlDl)_!f)8#O>Fx6|{_p-)zf4*3?Q(+C;wE5DTULldj!gW0}dsEK$)%J7>l z|C_Str(`Wm9fb(~Yu!b-5S|)|7-?qH=L`0Vjm>Y9Fg`47pnWxShFIsCw)~rqtOA=>;C%SGRn7 z$UzE*3X%`Z;EycLj(qJWr?g7Xu~F7ISQ6SfY$w<;q_1XM7}1wXL&X~M)(29%a#S;z zz41Ir1%n(o6azSDVR?|ygJ2d?c&|ScPKXnOU}BN(i&AW&G1fv0Rq@)+sw}vm(;G}5 zY0^o1$A5+`DzBvZ6sE<>DO`x$A4KG4QTuRiacVzg;6>~eAHZxiN6p5#A=Ef%|JEq0 z4F1_8dlFZ9{08ZOXrIxN6T&!a+zmI4`u3p3!i~b=| z5CKJ9Zhxw&N_4en>T)b(9-V4ax~k~W06X8#^qbkZE|n%mYpZyf?yCRW-Fc1cvw3@d z?KTW0IEXFm+%MlgNu>v>IkZZd-!1h?g`Av#EMluBRjK+rtCR?k>edysL-s8@Yk@_4 zqTi}V2}%EJ{|8}hGH79)+?E7DMAWY9R8_|uNt`Gr=5o^=T{-cyXt(h90n8DhWH%7j zmmpInb@^`_9QMHjMm#077e-gKgM>sZOhW2%xj!CM0OTu<`TZXXuL{;e^}@Zij}FEb z3Z9jg{3|dQEE$$9TKFkd9nJkp?8!t)V-*b3G@UtcoDDybEP1Xqw+`Ysbjkz2X|`~1 zpqY>~jK$&HY&j))0SAY z>w7@ke%m=y#-Gk%A8#-T=$%C@JQ%P*-SJ&U!sP>gepd#gD%~{6Fs!a;XnP+cUzPxO(YpKo+JrZsxr7 zqsR=~kEdl9>Oh8gO5VN+63T6FdruZ_~f-JmGh{ zz54j=yPQvQtS2WB+Yo5(|HKmi`^$I}2Mc~mc98hmzxQ-G#Jvy670(DF-j)~MSzk1k z#NANXDHUqgDc2d%tI-WAZEG8y5!>>_jqr08GUl+#tVYD#!*qBHLgXg?XUT#OaYU@eK!@~d>QWYPK_)Yv zL4L|M!&KK=?JEyXYr>n7GH*Np<*k0PiN)@>o!)H|^0r520BgNHCq@_+)dN(N3&(XR z?ED^!##X2?2AVk)W%z{U9 z5t`=6YLJmQyI}q4B$F4IEOop%)cQu^iHS`f%Gw?ti7(ZL4yWzAL<~}XGWGk(7@GGX zUo_1Aq512Rt^)6e7O&pV*S2!n$=;yszOa(-nT$X&Gv*M3x`*Sj3VTd0--=$5=^%Xe z9`gkpOcWFSYq_&|PiR<$fv>9v7)yrO_g2>0Ej2~Ky7>nwRJ=M_?^$ZgOw}XQO_D^@ zL!5O{AIahw;R;iPoFCHXJ}P7iFwOADHjL zWtNtJHe1_R?d?R!kUgshfq%Uo^;9pAV*2wh@`8V~{MZ=W1SlXU4h&4B8^#wXfo%zC&MBeXU&`xx3PS63fn+u;7~*VhCZXj*p}H7czqv;e|GZHq z0CUEdu)=v5IKh%FwoGVB*p-Qi^J==tA)kB>niI~iw0L`34U> zSHBpvf_+R35T+3(ePnntl>&&|{gu}&EHpLamJ+2uiKV>q*Z|Biom;>h*x z@@m>r`^WaYxLFu$TJZQ8hWG~gK4GB#qrvpDiHlCv62ReP3}P|~pM#|tfMd=KLVOj3 zI2)c#d+tH-25kS^FjbU(5py?R7wg@3{dtAw>X|EdV}T)d$C10lK#^OYQLg|b{rYo9 zQQ>9v_dX*g8|`5@i{-PE%@Vth(yHJ`=GY%dWNUtk>*2(y z7UQMs2WLxk#V#~ckLL{`(5RD}+u*L%r<=3-9?VJOe4_@JJIs)2%D|nzoc&>mhoLG1 zm&LoDpnyJ{7m@&IVsQ7nuET8b>(l;~6A#ek{`NIvs4MVB{nbz8v0veY>~*HgR-kLU zt)0T8^gqSpe;QqL2<}r0)$>K}VxWL|_f4h;?DrU|I0t>xx0jwts)WqP;ei_$_kKCG zmiJJZ7OQ*e{kA?lJNZ%^lg)Y#eWyqW?dTGVd7}d?lAR6U<+2*bI8}QU?IdbsU9Zi~ywL}uR9Jli zSy7t{Mzm^B7T3u6tPlaV*)+9956g`^{Cwi_WjzObRD zi|;q~JR(<_RV8i4y?;=Zk=efdv=0AnBs(qXy}CtH!<86AZ@h5pWLlAV*#7-e<`8XI z71Uje8rd3s6k+%zGwkY#Q9B7DH!QROVDB7$U(VnFHZ=-1vPuf*I8GLeaLfxlEptAFv=Hms zZ$-6&8xvGD{`NNXYEES-ApF$*Nm{6wYLK5FL8~?_-6-oPrcFh+%bfOJ-^~P{H<%^| zjc>48))I?1M4v@nVnZ8D?VGVEG#22CpiJ=4n`QNk$;bcu?!zu|42el@N@HriN;J?r zFG60(SU(`L7p>MDkF)0M@2UtSeM{5L?6Qeg=vRs!(&APld$kyz{x;To7*6KhrOo{5(7-0n=eyyMYBNt~37fY@M(HsMspH;$C zhIu%UO|{O7>6W zPk$uH7S|Y6_w8feWnu}to`eWGpE43dx-z%q~D&jH+iwiqCXYX%%YBQz{i@h9oJ7;>j5a&_}G6-;NLRN0@CnmAKBqs{Vghqq) z^ra8ibdDrTXPbjv4q`k{iFv!=@yMvh4$croRwg7T=cALlr-pcIkBxhxgh*{iOZnM! z(3}hE`ha;4zf;nAaa>`k)8)rI?$fVM06tU{#}1C*8PfDGi(lxsOU9biI9tV*BAuSe z0;wKSH~|)xSvkI2=*|2U3rCb8#TB;i9kkzLDms{Ho=LoRnb!@hgWI+_ubu8sjroC) z;2M#GrJ>wcP%4W~ZQB3*#7>EToV~6e1%bZi-XFedoh2?Nm;CpXP02%@Gk#e~T(W(= zf3+VM5Osi~HlD&Th}O#YJE}YGVkKNOyKEQMVB*cLEpV83?WwK!VGeK3ODI+25*``nk(*C{x?sS&acd#;vjxu;m)9?4RFO$xCHa0zJZ%qPBy?z)v&`h3Uy|9K+= zon+2sByP4PdSZWse4@)<4E*#63^8ztz{u&2wt!t6qYsBf?fhGh?XUVAmi@cfeb^pt zcTbxvd(Feg0?qMELD_iklAkHrF=u#*mT5z5Or9k+@7E&2F(4;ZhG%7O2b0q7Ui_A* zj?d%aj)>Gd4Xdvy-<|oE>SLJCGsDAY#sjy1YM1vbv_K>qtkeFtQebPujf$8%0zVa9 z+y}K%BHp7@Z_EKCH`P6ms#d_#>qxazGRg56qjR6Xs7IRilHYOfVvp6gLgt>_NMfts zAqjlS?`04-dAD+%TaKkSvmUr~pOG8Q7AlBR@mWb%6b8`VA1ag|haKiaZPSEAHUf@i zOU(zug5{e1Ef^W4*XhBGtqcdAOkFMu#{?j}8wXZmBMI5TJUiUxl& zOfd`HyRe~;pt$?H0RU}%k*QAgC z#CyL1=T{rbIgt0yqi?m_X9)0ZDr}l{q%VHEfJL1L4g7iy9s87qNi~nHWpaPQLU)l) z$DzGEGA;1C+%BI<>n&8QG0$@8A5y zIjNWZq_M4o@vHOq9jU5br#AD>+G6K4jz3eB@CIV-qP#Fqjei!0Nn=-juRjpd>8Z`T zu)K9+Zv35KK+n60|GdI&!|gSI5*GM0NB3{&nK-nUZZk8QpOt>FK*9qtpEm~oU=W0| zr9MbHFt@6<5c~||#84noch>XRRT@Rvp00IhIE{u0zzeyy{Lh43CZUqY*>Qu&Yd#je>$?%0(mvs)mD+tc6?faVFJ{kUw0AFb7Z}_f+hnw; z6Z9@Z0I#3|U8!Uo#7ZZIGp@s^6x}o!iGrPGg}+B0gM95}I-@cq{rRe=HyosBdLgOp z^rI%)XYN+0+HFSYiA(R*q`au^?>obu#+&kW!%Y%h9LJ>&mq>JP{er~3cSmUZHt|ku zW<5aSpl^_mBL?R5(H{?nipc@( zo2S|usRw6~i{?M1i}c6#%6)L|tbKF70fUcH+gdL9pkC$mTH@ic*$$rJw_6cYuO zp@Zt}G)A%Ydf`8+klLQBU99iqxL&v0e~~A7NCc9=5gY4A$_c@XTiQXA?y+6N1FHc; zr=zgWf3eur~wk{%l?3E+xmpP33k<4Tb)Eca>+BD1fafWLb`fN|p4 z_RSXU1^2|s8CTH9*}XNtwQ+ZR3GCAp!M@F!MjHu{?@ag+lPCbH{5y6`bR`bbZ^taAbZ2+1T-Uty&FS_gUT#-pg%y z+#c|>YVLe|WShGEw^2;Ik|FVbOB1cT0k6W`P5wa)sODR!T{;4%zoxg(v(=)? zdE3@Fg>+m|3-+Nlc!&Q z2^d?3814OQfoQrxBR9@+{2RoAhVA4JI03y9S>s}Reb>9oG56i(PkZ*9&Mg^G^JcAP zyWb6kEoL8W@tdD$R()0~DG7u_AJ&Rb>^V3s^-f|Ym9DePsIFX5(47+m?9TO5gnT_zUgXne}97Nb__H z@{*IQ1~(eQroh*!RT%y8KTYy$HG-|;aMz0}xV}2tY3KPLlGRjg5%ejGzI=GZNH9=27F-P@_CR2d2^a=VfQQdq@t8K^k(#R ztaI`!UY6%H6IaZ2Br{5E`T=zph%361wZzv=2;HTWCUl zvPW2<-0I4?mYMJLDcQW|6syKks?uneGEZk@($-OOvfYpk7=cEi=qUzna!D1mM0W6y zu`}v@$cmoEaGrm!W0YEXW(dF7VbcGVSM)`^kWhM9F_0!1A9-o=r`d6k_kt@_;Pe^% zfGVLLVM`mRo6@t4c*MKK0`DU%BV2#^^U?DK8z@%68Q>H)d^Pz(sR6b?I@Zc;-724r zz93QzvGa6jA4+N)A*2xnMUjbIY(6}!A`gfdn?kqwp|>p*oc%JjAO+~raGl`ms{PXo zbp$*dG4apY?D5uRj8qWQ3n*>ZJrq?Nt8ItX*E4l$788|E8ocKJw4w|ni)}lNSX6hY zjIFUVm-FoBw#dJ!FWL9{0@zHK^pp(^Eb+RiH6&i*ZK^1&dC+LGvINiFHONm9u?@Q_ z65C*u;MFc4k117b%Q_I%eXadNLo|-yNi?q3kGjs(%5I{Jit}xZEAzMgHQdH%GHQwj+%&kS; zdoTatP{pbfjTg6I0q@W!PUwdeZrvczxK;Yo81RNX(oi0{T1_bCvy#8~9F(vbQ96&x z;T`U53FM?rZb;gLf?{cp?1XFhm1G>(G$ z+(dv-y>)c+P<_Dp!%swb6umV8uUE;gd4z^6eYEy~E zv)4|Euj`iyE={#swHe@m{S(;hgGO4TQHwcE&gY5 zIG#Wa8E(yeDLM(+7J$G73%|xE)Q*SG9=DxSR9`N_1A@&C1^T>vi~$Ph8XXPCN-xEr0V{?{!;%hO1dg)tVV9-r`z;Jrim(?x-UTcSH<{ zQDk2$f?66;S3_AzG=B2N%H1Ay0aB{^PmgOYv{z+(6EsrP5*T?Zp>i=U9^RYVOch3z zAN3F)?97vu_!F)5^ESfJrNn!!yuWWhu!ZAlFB}uvaEi3GM$2*OuC*-7u_ey?viSG& ze*=2OIhyb46-Hxccr}B2NZ%H}sk3&q(a{bPb|k^u7eBe%2g%6HyCe9`tl^LP1@S`ZR-WU>wu zi+IA&4mvOXB~5tDwb}3W!MS#E7^f<5sC9&v*C&I9dLDuTf$d8wiG85XF~$8EUaP{Se#}Z0W%2chkJq7Gn6>(R34I|PZs2ur+*9L zpiQxQU1+;lNO3qU*)JglO~zmw#|~Av(f*UA*=c*ern|vXqBhB+i5r|@ruw1xKASOg zut4jk8`>Y%!{WfPMz@EoCp|?t`fKY8bQ}3BelEnsOEo!!kF@R2&%6!m=|bVeRd&L* zIP7I7->6pfRx^O^svH>+0}~bQjWrOrY%+AP7pXmwacvDSgJzEtl-64ElfsQCjT{8&uk!7U?|d^|Erc!2W@V zGJurI&V;RzkZA4%5Jiedz?*pMsQJwZ?}%~QIT9u7{Pv0!&sqUSP+1lI zP>R0I(XSDF@7ChFOBhqifuWmSxbu-oLiDZbW2>_cYvr_#h z$n-T8&@%3zm+cAc{Hd-RTyT*$?zKBK^Ob>;B_*~~jpArga}|5DAKOS9R#|$VuuI|= z#ZnyO6=P!zBn5-HJxkjCg2v=YqcY@a?5GSL<6VHkAzwPgPYb@%mhWLH30>Sp{z|tw z+(K^>Yx@i#H!rlMbJY}ZbsNAFe)m?U&Fg5HLHBT<@#U2!FgU>DtYB~G`ohhYyi7C7 ziHoAS)Hur|V0og$iCwW8nFIQkqq-^N8I)h1Yxc$4*MurbW>8JNOmTuMyCR&!I8mgv;Rjs@L0{bMNjLPmFW? zz$5-?2xpVKD|Ec^7K6tDX6Rc>Rw^()&1$=Aq^gB2Yz$C z40r{fhTOqhcg+^>KJnMCXMif@3S=dyG0f&{+y&=6hRlsI=CKidciFYn+`mRVt0+g&wI2fUY_QOO{8*T^>EXKOA%J#$;?5@9&gChVS zr;${pyQho4Z(!7!$c_oblw@D>gZ&&y=HtJ=^|6xiwy*i)`yP|x=s0;kmXK5TH&oIf z6a__OYVZm6s3iuwQq=+HwDsIS1~IChNms+Zd~%dlSopw95!Rfx) z8=_>Yfmj&5@$?f~63L>@gh`_<-*YdbF}y}`Z*A7RUL^6XpfZ+9aTJ7lT?gBCcx`<0 zIaj|xl&sBACc1y~2Jd|s-%s=z&6_&J<8C>$-$d|e`6c5TtZlq?rtYjn4^gz)6atji z#)P8Q7uMNCGUlkPwxVQU)TQt`Gwo|6ctu7DQi1$>E5a-e(XgTm!&7iMpbSbi3U!IB zdSXIAlI6I5GA|%1jgP8z=q^g=iza0RX^r-HE@-_uX*R()4extb@Kwb8^5aBXgOolM zz0aztk*)`w7Nu+9UzSQS*wPp~kL!AbGy#oGS-Zmp0Y*HA661v-c2CBsW!)!TGThcp zmiQ0S^eJb3;Y^?A`C98rM>SX_`;V1| zba0Nh0eG2~at725V-gg-`F_3&q|ED=8cv2^NmYwm(hOxOP&Y4cNG*kR6Lx;zcRL#r z*3cHW8d1^tswYza^yQ-&>4RYYXE9uf_uCPEky`HtW|cf|USi+H@j1LB5=1Uk!0XSD z(FlNx$)tJ&l&5i)dz0d9BKbf0c%A6!8ezKH>=D^+&8n!iR|*f>2gvO%Y!Fr5cv2lz zgCwrFj>^njx!(Gj7UF8jHO&ZzB1Z8cjnWcOQbH6wCJCKXaEjBxu3J2ohT*%bpB2Xl zwW*NeuVt?b*y6m*F#1dFhsx?*qFAefNU9(O${Rt}FO^63oL9@Ui`J$*+V{FGOlAx1 zA11HZ3?Nqr-QbjhYztlO8VDQJvZ@#(rQOM}134;7c!S`cjq7uh*md=-QHQ3Fk1VpR z?W+7Re0}2diM(xVlKswPxC_M#H_S;=b#~X`)v^7}OtQ>vlk3@9v~hJ_BXX0Cx7%e~ zx6khk6Gk77a|Hbf?{g51$2|-l>tC*@4Piun`B6)N3N6b-wzOdtNiVC$}~dgz>u`*c7+z@!+MtkFthnM9p{Nz zT>TvU)I7dAb^R@%$IfF9ikcavIGLz2#hpMv4#oetb5Zv8e0~Oq*xVu{@P@Dh>5WEz3cUazccYm|{&v)j|+{u4=?zzuB=W|jR z&hZ29%=T=$@1<7*KRZe367lvb0M>?wWp+gu9+$d}@trk>3iF=7uh!0}J$GY|tx%>M zM8Kw3NQU5q_56=16%k!NlndMC@5zp;8|0}twb@UJ_(ISNhWLU>;o*Ds0#idi@GE}o z#6;OO=SrcJ{bcM9z)hQpw|eod-*j7Ly9B_@L|&Q1;rT;G6mLP5x*W7>mHJ!@pWv0H zuD@6GlcGs10OVkYoY5QAF=K$WooCp@b~Aw_N*DjS*01nG&RB=y z`gK=}evrkeF}%8EQQ_P5(g=s~Z@>t8Gg-~SU{gSx5{cMsSMknC)^r^}zk&{%EaA+t zrJf^HeVLJI^rchNLfDhx@>931Mw?a9Zkb9_38(K`aTGaktAfD?H*UJd%x2Bph4r9} zVMC)vn$jAbBwAa37yL-JDD(L}!HRMWdf3orA2%7P08nYq_rZED+C#>73Ov++%AN_s z`8FUf_}=)sPTOz?uIWA^O9f>xs+iZ`{_dS3(MZ(9Srb^fmg_hfsyhR>rpatF`a4<1 zu1UV1?u}xJKJw}~Y)|l#3S%ApuHfFq2^7&)JP`JMoFF#NihnsbHV7G7$J+iI;|C0X zDTGA(+VEOsx43SgJ;r4)nG}nZm<1X9`Y3&#SDA`$Z_-};Dvj#pdFOeXMIfP$S1m}! zem)<`_WXA@U4r1n62f0WBVi%aWD$|I1FR4a#K;XANdb$y2GNwayW^FeYArUS_(aCL?T#B!=oeR(80qPZtRwO=lq|6g@@W zkO&;O;`bXSX5T!2b7~+gleJJ7j$oj?h{#OiW+BJcN42Rk=lmezE1KO7!*R*|>@n^F zBa7zNNFmgU?z*b{&hq{(m?q^TCt*(BH9r)`%i<+l%e`gK=b`6<@tyIqCj5QQ3H`;z zjrNr0KG$b-4Fph=(7H&cto1G4ibds|tykqehFCrW-2JyAWc=;J9I(19nP$xO(n_*4jy+Tx{G=Md*luKrDH5>f)Kszjx6WaH&z2}#t3?0Yhw z9^=VDlx7lS3cu5$4%%?)kY}{-36LT*@%VF%%OiM8gksTavPbD>AJ z?&bY|4{R!4_dAB2Zu3>v*cAUUYBvW>xyB_qAmmpnwtMkZdmbPGFNh=nVUV{m*ElBN z^}#Wr1@`>FYvY028%bAx;%@Bu6+i7eOKMExe^NCDLA2t+9#43eoiA>d)fGbHDl+d` ziD_DofbV4fK^r}HlR|T3Clg-XqmsgY9m^(-hxBe%_nu$w_wRkl_B!t8D&gv&+ws+l z@vAZZ$!IR64zK6x)Wa;#gP_Nu=dq{rAZsg&d%wfk7b5o667#|F^Jvi8=emlMKK#Hb z$&(QL7aU0c>1o}D(z+d$dSxfA`qYxob8cXd#3Q`q(RZE(fgZFKb~RnXJ!F6Lxet4v z3}F3Vhau(4h4oV?dY#n$zrrfk|4BC1;D6-Me^3g#xA4E^r#MM;Qu}yx!-|B#J>=us zD!lHUv*Xh;7h}%QhDWIqm-C#2{kOcL=<-}dj4D1LRZ!N(bAz$_JrnBdv<4RAl<$|= z6sxRi<~}GR0_-DO_BuEY%_H-j`puVbfg7^WjM4c!892$SeV;ep3nt7kc2W+VA%pAZ z$(0~!qf`^Gt~GI&T>l9bvnRIF&p(t5nnNv(QaFL{fMKU@NYrb*uY`sH{lBcHHg4CM z04IY~@wCXGnM6mN4GKyse9HT_Pk62<1_rv-cinf>D$LvS&#Oy*xqe5FM|zh!pURPZ zCjBNC>}r~>4DBXJt_<9&MM|(Kircd6P}hb+?=>Xt;#~VEB0(KrD=dY4A_m9ReNGZz z&3KCDw69qei?gPEmoZOYcq^X$W8Bbiu5nLNdr`!KHuE@L_jAgC8FG?3H3GkE_%~PC z(Wk1Tl6Dc+)#2?W4xEdfxWtUlvGZAwW!QV2f7Xswj<;`yfiAYvqJp)%*MU06OgbMwBSHLYF* z8G`qPHcX9Sf07+?!j?O1DE*AllF#=m2|xF}PjXxv+e6M8HFbm3jpI)2QC9HKF3;9y zWlIw-;@O6TJZY!n&ejqPgG^n;Bdn7y5)R?YRRHEDZj}3Sqp%APV_t)Yx(4hs@fR!eu;Ae)WDy?c#V?ke^$w=U)}sJ*xWn|sjBwSqFQg{M zUHlW^v&NT<9R0>r0dtzSDleE3mpC2*a76a**wCp2I4*2`DgmadY@y@HZIq922FGZ$ zIQ%T^;3B|6Wzi2938!S(@txGSQl&qU{U$BhDCkWJZji-07l4>qjKOpS zkEQ?+0UBUH5jzlL#xHFobRCZ;VLMiiou$erDu`NngLg7tgpwEZx%HN6Fa{Cc%O*%= zI}TksYBKh2;n3mKTK1&lA_5~*;R~!1qS*$`iKu4u2Rh}^pUz7z4AKuXP^>eM6|s?|YN<6C9oKV#&toE`k_Oby3^ zyozDYbp9pCKmGB-*-gVyHQo#9oxGpS>!1(9kzVaP<%x3cBACu5ioPp1f#=KKNL;$k z)`wFI={Gsl@S!264bi#8YUEHJGjegvT%IMNX%DbcLZ6$@@9o4wGqq*pl6`R_GvY#} zB*u1o6cmeYF|R9=c^5;*uwOh9T<0OeKL}QK3-$v%B7=+Nt8`rsneR|%;)aH3>};y2 z@h&B;RA$5uP-yXn&}WF}F=|dl;($2#y&82@x^qqb^8hB#jRh2PdjxNCO%d!mjxx$X!jT&LkjHZ1Fj062Ld+( zdMYnjiaaB?dZ+mAcNC=@)r=9%ifk>Fz?#M|-AT!Z-NaU=J%f(|`rVD|n{TRL)%XqV zA2gZWB6d{o(W=A}0~ZRf^3CqliO;8H3!p!wk+>zCz4yk0&L$UHXX+PwT%Yf`LQvj( zq<=YXvQh2mIQGHafboOOfW@)YRTl8iLMWOT%7DYlpitmCJJ<0$@>H6mKE0G-R4xJ5 z3qwzV^!D`XzY!*WDkV-{ZWJ9nX5m}T=4rYk@UYiy`uakk`L{nC2J5YxL=1^-Mi(`6 zlXdtF?2NoKB!5mAFjMK14K|*s>1@5rc70}=Aa-odZaX|cD$4gjVk9rV@aAg}4~^37 zi`9Dg$zfl8hFqUeqFbh28KoPH7{fe13HMa60cHw!kGwe8^Fz~c5ibr5zq}g!Koavb zh&fG}qw;d?B(nqwapgN>;Q_ETGWr>hw1~AxkLxhkH9D93LPTHLMVRk+wq8FyCkC0a z!}t)o?ORj5L)jt5=+vu_bmSV3qI&ebT(*F6 zDe}81+&)sn4;Y{G@GD0vcg!^XPRRO(eUu;82c~FCSa6QxG}eRIVM2oW-OuwyHbFQ@FHKO zE_^pjk%;Y3`C2+RYfw758_rysa+vqhH4HkCO|*A1=WIfM<`{pPy3!u};P7E>R*!On zQsK0o-#K!*a;~LuFH8`l5H|#+xB`uh_L-@6Op_Bb;eM=FNzN5gS#4Rt5d!NE?nX-62WgtVOJ6eWT<$|w!|gY5|Eo^si1wQwHK%F`~B z8bc%Z8dll{pxjp1$dJ2Uc5_{KsVY1n3r;vgZlJdw zHo#t?oW-vuaAaN3&~T-_lH`hQ6p7g|ZEOC1ohE6N%U(ZTKyjz=^pA>tn;Hpd=z)+H z+hmc=Nw4(_X;j-13tBbj03w)lz*WFyof4tSm<(q9(5P+<;xIgB?S5zYw;|#_bi}NT zHatVC36mcL%I37Dw2lb94vO17%P^e%uyk7@>|)?yxQEMh*7!>lOX7>f%8<}3klQr?9=qg4?B&c2F#J-aGEPqUwwhKxe6QCpT70S$@3JVcaEOAl| zo&bm9LBVD>h*EB>>ivI#*{b32ij9lIILJTh2zoSmNT>86v5)IRoCeZfq<76H!9H`vB?1tw+A9{Qy%XH6e z?A|@tEo6f9(Pc`|n} z+Vhv41OE`Ef4x`oAwLlj6yW<+>hUT6@eDn1S@L8!@y2gU95>}d?^zKxcZoAy!xPH7kIXzp^Sw2?wj<>aUw>+w|XkE zdS95oq2)P1m1*g?A-o#q8!j!>oi2HM)8JA+4f)z#Fm6YTx4(Avt6L}kYgl5?AJd#!3ry+pg1+bz@mubm8k?3|L{yxEkUvW{ zl7Z+7h2M*2cp&G^v(`;x^Ws$c`Gy`uS~rHhV~TAW9@dj-6#ZFvvZUngX(kXGp@d!+ zQt(1W9lvU7peQ9B8c&slBTyBcc}b;h3=d1vj!wjpB2&< z!}=B!vKo16{xvz9)21-9Eyl3^_lm8g}3S9-YQBXLdr<}#A+PXnR2sOgYjrzRb-n0pk z!bY;Bt!v``;b-xO+`mp%BF%=rj;K)Jx1sCU9GyG;Fmz7LG12_P;9*|!VTvZ}>SlUr znHkoaZh7e`!bgs57v^=p@{R?%R;Fl|c7xW!kT7Xb+tiJ=56=K0mP`X>yvqP%<)@10 z^ED~zw)8sPU&~=kC@*o^WUO30N%9HX8FPWgase4>r& zcf>sR5n3LN6H^FTY*3l&)Vwr#bQ?EH>u6qwgVh?JRmkL}w65vDQMJs}~Pk~^2#nBmTx9aDDVu2tx)YzOu#TXkPy zU5qaIK4~Zk1jA6lu*A7JF&J&;YgglzV)s|@m5ESLi&2fCZh+*huNLX}pskX;gQ7E5 z*OY9#(U_5ofX7cS_^yGEctbJ#NZRfpu*(q@S9pwj`pAF{sAAbKrF(ekRL5?1)qW!J zD+nE8zUg8y-`DUVU3~YS35h(&7u`*g^vL;) zE%hRuwQ`3d)`eFMs4=sX*yk$~GxC*SuAW~>5UEzEf+B<8Sbc~tIq z_DD)Hdz@XGQ-4ydHHO_&a8fqrO&R~9OU$IX_6u#sM$(73zr;=4878d)mV9OPx6|*w zWQlh;$2*AwxMq5iil2)S(vKLz*HL64vAmup5Bb8k&h?>EivOHF+5uXns+;gI;1o|YBszgMn^_XNeW}Rxip$hkkftQT`ozJ97 z|92e!XC`-w*Lu1h--A}wqcGQZM|LRXeO)_;6999}kt_u&fXs&*gC6grlSn5M-Oq{p zqolELCt8D&Wp{<8hTSBc3z3mx2MyMail$-WSJ2k1O0EIL~4Sk1~k@ zDEPKYxyJf@;dgz1jvy&e>UBa4`0-##3S0P2R8zH|p8>PA>o3*bsFNM!(Lqm_vThJ+j}W_97ZZ!;w(qT;80AHIpJ4Pfz^9fX_V*HhY5a^YI~`jIiq?G z>73n#*m3nqS(z%zCr)Tbo%ChVU~AQxE?%7iX!6?Zdgv2Pl!+i32aWUr)9sS ziKf$BEHV*EQ7^>hq^UEFE)0r~ROQsa-Q8eeKX2a_eiD$e!#LOCnI)f~0HZ2v*5Tp6 zQcms54aS#rdMcC!cnGHcIsvNzf&xtBHEFHv(O00_UpkWPWC<7d81BSe70Zm5+(>CK zXY=IQv9o2M(U^eVXUfH72db~1o3*`50kh{eV9c>YoiW#fp!1XyQc;KsOIcQBrk~tS zu+zKi>%yS$=so2UXu%BjQ2H4)Xysx@Ydtad?Usn$Ofc^+Zvk;04IzR@0?m3)dSZ*C zb$m3Lk7}0m^E7tiB%sV3%*GrJfg{RBnG5Pt+)CzZof3T6KFpFsR;|f{XxcQseg=S5 z$5gQx+?b6WKjVeh-bXZaY6F|mN~_a_^N*pS=wTR4#RDbeZh9%DroPN#+j@5O9Crs3 zRK_9p?i=sX_xAYBamsSmbzyVG$!Hn@0_WXPDg5giP;z^iUF?ka$k#)N{knZo()854 zOtAYVr*JnIn8eS(EMeWmgLG=e`jN&N4`FsbwB=S^_G|rlFdcyl541Hx14&X?%S?ii`>*FPghRYpp)&t zLLqh-0}@F$P&t%me_PZTfe2&jx8?KHl1{x8(S2fvShGIh?J~lAUUF*1-&!)x@TLo> z8|mUJS=-HBu@oHjgv}{!6Y%2BoXdlJo$UoLmlO8-de96xLU#HvpUMy#1l4Kh!{utH zZM~Qe)qBQ}UOh^IUpx;st#Gu)DfE6>qO^_VSpGwc(nNW;Yb8c%<=a!D8TRA$+qUKP zI=k?^LoL&Zs$VXe#^CH*si~WJZifYdQc$I(iL+L^%Cd|IQf|SW8){*Pl)DNp`K*Y* zn^`H0@hM}L_(3lHJ*Zl{SIK^SKw5PHk!&!RHz;d-c8C=QrNiN@;Ef6*y^Fc}OZVr@ zz)RPZg;ZiybAt+;T7U>m%!u5R5svQndLTtD*zgv|qqvL6%Cw;Jyg%wd2a^rT>w557 z5fZ%pBdRN+7}hN)NIN}HUc|N|S>ymXSag0aPIL*UA3M}H!Yn@fc==X%1Ir25jMvdq z83-(G!;+)zQYxur_dl!}ujfH?AA z?DZCGcKLO}MAep16fmB0YrbU|Ju+@p`QGkzWc6!47&+enSfHENCj!}L%sm-q+E{*a z%1^qWpO_FONA|-WU1T#G(dhvictLm+J0!CsI-4sbb#l?)kJ7C;6FGyL>HVc!+tpW9 zFCWrQD}0X6Un6WJEd_^8Ekj=+_n~glO9rS($FgPUBRK`<^8W4>%ym}C-_bqn9aHG}t^#bzuA>m}HcJ7B24;GgR{l_^38)Qxt&*K|7%wZM zsV2N8R8k}3ed=Ilw>U?KZ6-b@qHt)Xe@UZ^Ie;845l}esa>Y03k)x>fogvYzdr@J5`2njv(%cuWrj{mzp zemPQUHKlHk15L%olAW4`S}K^Y@2rSM-p4MEQN52K*oKnnp%3`n#rBF`xl`PU-{%HR zoT0ajxf=NSi#GVBH}V6I$Hsj(Y1kL$_YQ$Zu2@D?RH~^Zco_icJihdH%=AvcQJm?j zT6p^ctzdOoy$d<`=RfI9$kLITRWn8`K)u%e!qG`ua zEILY_c_o>N>}@q`Y&(qOEoTr|wvTqTdXXi&PQVi94(_DwVD~&9SrlrrL42p|adb3) zUL>#L3X5Q?oa~sVr(Y@dHY>6d^>jxnCT%Mm-Bnhlayw?Pk3vR0;Og z4-}i(`EBZ~hH8iXoy2(0ydH2p8jo;s2w6z57xmbl|AftVHnOE2TuQQc3{$O4;!d%6 z7k~`bHba;VGIDbX9~TSiVOac0-vKn_%(09nZPGRBssI;BNZF1-Jp>All+!rW#cu@$ zPaB^uuQsc1Z z_(3#f%WxNv2|WRM9+V;D?ZetHv2J!Ak*MK%U@41Xa)sp(9hwl34c!H%FvfgItfynd zN|$eJAQ1lj2&iyTrTrP8lSy#GP>Uc5yup(_31?+g{@PaP+e8<2om~jr$INv!eAHNW zU0=4Js_W}A0%-bV1rE;sw9*$whtW2=yY4X$3L6p$UYe=3=8I?(Mr_dy z`EEw@rSJvKc|PZ&PVq=^a(SozT#q8_+> z=yc+|mI@`2E&gEuI~B={OrIsJ92bB5;0yd z`~h2#sh1D-1NE&ATHoiOuU=gCev+eG$yFdZxawfY$O4tEqzyIh?1^H})-t@+ePND4 za85=AqyO&AyBh^MuwhZ1+gbAGF~U#it+0G=)F)oox3iF}uT7zJI5{{WQ(YbCE9hig zM>$+VSe3`9nHD{-|B6E&MKj_jLpByFf_h$2OPv1u>yp!5%|GT|juQaUug1zQ!`I)& z{3)ycs=*wiwZ;tlK0{b7=)3Ts^AAr^n}+$FzcD#a1j3PdQ^?K-dlI5k!iwy;uaS=F z$50D)3Afi@2W?SP1EWX5HD1RwZJvf2#|v>}p2TiH&?;SKQ-{vqM|08AZR3V?+`FJa z`~oVbo-dsq1bY_4hiHSAC~KZXD~$cZ;E*~Rj;CUs_S`0SH^(C8r(9pY-yQ%8gwxac zvl2PnxBs|wo|+s&x;1p(SM#)2gAfXs5t8>RSzlZ09sRu4!|lz0V0ZH0rg~}ZHu$*v z2yq%if6WwD!U*3@B`aTqu@KAd*IKqnRs~GU^@3qwmZblzf&VLW|GRu3{&qJUbRAUN zlF&R_E2Ijb$;`Bl`WcUR`m|C*OT_GGHLcj}}Jf*?$IT9CZITvALWz_{H%)qIZNR+S+Zd_s_nFY8$tnQl{-W*tzKYD54~iv0|q! zmMv?HG=&W<@CWrODdPYtkNUYtrZy!&zi51!*V?U{b#&|e2*a>LdA>}Ws&m9X~P`~DYh4ksZR?GL~2Wx5_NeN5?1u*D5yG`d*Lfa)tZU|ul zsl4dMA@-T&qS_{Bp(}Y0sJn7&Z`&9V*_D$x`l;^gXrbJCN<$@RN|y%Ys652j^S1?Vu5x-=u*q2AlTYsglVC%E*QeFFW2OE)_r~D;m z26LsLNtITjF1x3You_3%;I8ch>~nSgGz=jD%P>5|u zS1-Z3^LXihV)3~1RQ2ahl!OB5B3>8#AzKmoo#@ZI0_Fsd=Whvy{;mYYsMi(L#r6g_ z?cFsgmFz9xtfuz9Z^!5MXYV_gU&jE@y+B``<9k9HZUKCvS#gDTryMF0qMgFO*T=7e zvOKXSB#Ha10w%@XMrr*n%0n$;PWi1z#f@ft*0rRO$lm7+YOLYAkXgTsBIp1EUhaSk ze`0@n)C4@rpe7%l)E3y0_d0TL_h=%8IfpaO@5{vW#ZIeb0GKt?AnsT=I z0GMlgtrgQ{)@}W!=jrkum1B<1=XZ6_I+guBNlmjFIPiD1c#+pzyDE4R6}kW$+9GYX_ed$L`6oO`DNavxJqxDK@VR?^i=`~oK|(|I zZJA1#c2y3kW9Mve&AU12gh3w`XPDEl?qc9W(38vc^_l-S41@m>zAqAZttf2FD{%5U zf(e3BBDBDq`Bk=i^F-k6!|MXoP^iK^P9>x2G;JNyRogKG+zVO@FzWOEan--wr_+V`v(yp9PEf9Fbi$i4!iZu z|5)r{=MvsnI{%a}4_b-B%x=yLurG&DRAg-_c=M^*F&0n0&2Bh)>kuakO+QSJf1^Xo zyr5-E_r#^^D_nACu~L8=Mvs&~{3VhzkR)rgEeJEqR82>c85TDZ7y0_I&z@v#ig&WY z;%0+|B+k=PxG`nKsO)etO?2j*=WKBxbllYlpe@IZXVk1Mc3L=HybO!AOU=8i)y|yK zU%QZkkfi144A7wZkTnX+`S7`c@%{PDo5OHT6gK-qd&!Uu$?QL!qJP(;I>|(0^s82( zFL`ij1>e3`T^BkpU{}^BZ*KoPhh)ah3R~8rxCpbbQkh)qq<&3|AOyMgnr{*Ne=@)- zN0v?fEU$#CaMUYR(irdDr>6Wm-@v`ytJpeqZ%%r0Qq+X~(6OV2tWh1SC=_f*hO2lZ z25{F>xp6f=Sc|hNyXvou^Z_rhjwsG+XfHIrpA;q>x7~|_I-76S-?@m?xz||!+@1D~ zhy`<4H`w=4T#dS{qS*>Dm7^6X)(M?>4`RnVlF!HY7a*ji@CpCL06^Yr4OVR8Ct9IW zvCGzBDPi zk?ZdOo=bL?W0{|umm|s>6`$6(bnH|9{f-=n&3qU?^nIktLiXeBRzxSZ9m%0XCEuXM zmvS&g6Z$%8@#yRaYg@bkNp6#wu)F&7sVq9HDc`0F-CX>LO!!-pXad}GkFo)6V8GX8 zn6}+dI0mPCgRBa$*&Z8AIAqNvo<#Q348+fH@HHU#6WEDr&>|M_V$U$D#Ae))AeXhu zjRMWDDY6u`b!U|Z!CEY#IU++3i<&;5yKitB9OL;oiu%U0yu+{s&WDc-{{|Y#5xhE^ zyBO9FFRL@$1}BWaTUh*o_2pTCDygGf%8+!aEyR>SMl=^NS%Rc0?LHtmn~O|PigG3A z%Q@E@N>kDmZj7O}FA)55pia|ev2*K-d6wsoXty5B8VFmIJTfdXg;H0}f6wtr7JVJW zjb4EIWJZu6aXPVTS#(HKg717~g8KWIb-F;v2qTBKIKuhO)0a*vIqTcB*z%4T_VBc9 zRkg+(+xW6vc{SzV14X}AGcRA4?BQ_Z4?-;(IGHV+f0ec-jf>;QF%n+S5J&Hen3z30 zt^Q&0=1IuBTyFK5Yi_Z1q;xg0&MNH{Z6##VBVTzygS>&?(grg+)2YnGOeSuKR>+z{ zL^;c~Y7OYN=n1pt1QmD!M*Jlln@xm;$jwzbjIX^ztta4fGkzsC_}6LGa=~P&0kjE| z5<-A=8DU2&iB)uLbhV5YsiN!gpho+Ar?SpKW^K73HW*FAMq%P2QB%#Fp;QC@!^mWX zgYR|i$_}y?b(Q2_TC_Jr7G}OD{#$)qlD^RHO~3Ae2o{hiuPHz(TXjhCG)8Uyr<$aX z8D*|%jrH}>azmGdxO5KMAofP_f-M4i`aKXmrD{RESuJquXHFz{9=`t#a*j};*Ia)_ zy%@gZO6RWd!O>Cm;vls&OSILfwd6xO%XSz8FHy?>tAM#_T7%zX-A~N7AZ1d8viy0E z6G`RNV`d$v^+o-U(@pHm-`N{Bb!VDP)dd#1`GX@|BYo)6EEVa+b(ImvJczH!uxm@T zlhCwNmMV249)R zmT%PQM}v`uj0gnf%;wVG&wR@*#<{;B<)OfKs>o}BLf3j9qhIv^CDfF0RDfP<(x?#^ zDll+2p-GE}kyXtitrR52(WiIF%}EB(^s_}QZusSk<lY3YNI^0Hs$$p9Z^SbAB} zboG4FjBUA79E)v|xDl1@*69x;p1HTie`>$Vp-;$YP(hRUx!fSQxx5%2p0LP(wEO}c zGd+KkByD`=2Mblb1?fG*xdxswc38PbB;%ZP@H+0fcUyyH!}{+CrmPIbObgSBR-fFI zld#D5CUI}8T;K9}Kca;F#9wl8s&*8;V#vfqL$+~IeHF(eJ^F-IXe*u;s> z$i>(C9(@O&Oy4kRIHAWJozl3iY;*6mP znepk{dh9o$$<73veg#T2feDCaX@a-ZJXHfEG9%51;C{EPLpM!AhsrO%35Na|=x(6# zd!%*lmu9g?v_~sdu~l6A;_V5Nq1FxZKd7WFJSirq%l4umCe2n^pC;CIBvGHhlUo#)-0 zq-U3+(d^8YPC%`U(~$hq9+56J7E%W4YyUAr{c$q2+UNI@d~07;ZwX=dH&Hh0F~b5^ zp37ty|CCc+pgSFRInm75w6aX|x8D?4jlW99!KX;_ha2^Us){|^TAU~@8JW&bR0i4y z^^2bZBCfTswv(=GVTsPveI^R~y#xyNf1{c*;7>p9ISN!TFpr}RFD}cqn}+_QXR8Gv zo;TM%$K^bm|H_^4Pdv_2VzuUKCz=WAP}{=$&iDT3Z?DOfH)s%80jrB6^D>?npT5K3 z?_xx}T?^HSs>vH??k^GA7yl&Ews)_M*qs%uEEM`%NT14cvm{(CM>`*nS##PI7P{Zj zS_@Vp$B}c%X83h*)FoF!k#IP7D^9RF(6(WRvEPBdMjj|du4tY;iR?eX%e*eQ<4JSQ zBdlPP$$FaX5zQNFmZMNe`D8UsEi8G|YL3UL^D3kv49kwT#eVP@ZTwQJae7;vItE(~ z+JY`l7J>oj4;5Zy`armisqftad6(nG2e?sQzzZt;(y~G}4`rSPfX>9$)sMqMlV=j= z?}Ki)8@VDO$z(53P&B28yDKVws5OIunN7xw3#V;KtSg@`;Pe9 zf>BHEWtjP<=E7*^S%==SnK-2B-hrJ&K&`@qthQ5M`j1x4oQaVgk@1eJ%?n3tJ4hnd zhsU1*m1Ff>Vs%57HC&5ZXi6&RvgS!Acl8i3H2nVw;Z1Ze*C1QJF)JSr&kp5(!vpJI z^|*woA{)q7B0_{!^@M?$w}tdZ_j$&3nO^vuVwnqH-R+%BtC#FGd4D!5HvkW1&B>JH zI~MllGqrV~NFrKK$sF2u*g$5d$cXo<9IiTNgG`x)vDz~f?v zY-rV=T&apntB-z4U+Ofa+fFLzW`a3evRY$kReiX%g+fE*0mqPd`EK(#ON1^$|Mt_l z%&KM)y}P$$rV(9nXzXud99h&cl)mT)nTcJO3D*92dG=R7dM_reXW)?`oZ^vTAri69 zABEQ#*GrB0vG)M!o>d6Z=9Ha!%J|AMyHB_L8{cWl4%cEoHN@_qY+O|5EK{j)RCqtY zrbzw2(z}C7s~tr*y4gv6)=L1>CL~Qg8Wn8{_z6(0C>5hExm9%AV16m6eK6CF%xQDQ zgDK@IEs+sytdA!w(+P{r> zmI+6$S9{1Bo$1WnyM#O{b_mno&^sB{%t7S_Ev`sv-&rJGhCZ>rbU$zCA1*sJ+w)`~ z>eZWV^8Zqb^;@jghc#!;1z)`vkaYDS^AvV73-}8`he_$Br|ANzUG^vdaC~XiI+wlHr8zh#+A5kprYgPQY`|;+eoKpbV zj0+S-|i~boR5LUg6FPn(p``z;sTUK678Lz%ONn4M5YKeRHV5D&q;onGtTgaYKf^o88r<@))u;Br1p1^AA@T3Q=8VIPCJ1f}sk% zUx~YoPx`cUS1T<{w%eUUAlLFl-gwlgopEZ&KK-MVwBbN%V@1L>BG~X{)yL8=oJ;HU6hQuM+l^i1m?u=7t%v;x-hDJ@{Tr&h8S1dYIGr3sV>F zm(8E^S(NixlYq&>IdPTgY#lE|Olo+i5Am{g#il89XZ-B2qwWkDkGXBnx$vC68Zr~n z=S)&$AW`)Z37dtXqSa+eeZhs2Sd4xt7pyMqeso_+kW6b)d){I`XrK$|3A_ene+_^c zuZFLl`voRqtI}TFUX(~3-oAM~no2948@xFzBxxf?=JoJx#K(Dif(CaZ$)w$#HC6=o z7Ae%(7ev%ej6@nPB6uj0 z*7GXh6ex8CyrO&R4?6MNI^=$w<_>^{vzy%}o9#MyPx`F@pKqm}w^;!H&&NEJ94g~x z0>^~8*VZ9fXafWkCgOUHydIa=Cv_MQ>Vv+GF2Ae;7oJk_&>9}pL1eY%HaGJ&Y(0l$ zN`&2@)}r1LPilh&+hIv2H#Zqz=dvni9FY%D6Q)~be<4hioT~MGdaj#x;?&d4o~RVa zZS%oVrqYQ58tIIr)Do^VHT3{y;$IrJy8L&Ugs?31_+IvOn(y6oVSIu078tqLj{RBy@ic({12 z!@r@R36!*8vw0N(3aWS__&KGedt-!;{_tzBwfM=2<HcD3 z>@eVLlRN19;z+X&)9f0WX`?Ccezc#U$t}SO0h3~jwHNU133}lrBKxZr(B(%Ic>Z%u zpNi>1%xj1v1w-B6QbrHH?4rqq8-&!oivajKQTIht{35UzX(Y~gGzizFhRQBzMq67~ z^#--T(!y@k3WA55ie$No?DZi5*~Nc>UR!gh?F94Tq@R}pM;84wE*W-iAmo9{=!(Ae ztEeTKzIsiEO|4^XU4~zJF;32G>eW>_W7LOR2SX8;;$%con)D`mh_rIgw^ra z%oJsijFM1Y`K9bpFPIBBhhQP3Fg}^@@NExKn#Io@e(Bq$4om>?GmR0r|IMRWM{{%J zxZ9ky8q=Wxaio@8&c#A*B}3~11`=Ub^W><4Pp)zfQ{pmd;R^V&wcqcsiLBG6s66jH zb=*K>^V&XXfl7r{1-Cw!Zf>^BRtreu7AUNg*`y#2++?~C5@1GiW2Br=VdNT9RVN1W zSpg%Y&#-N$+pHK;RN6gJN}Z<7xfa z>5!D};7wWr|BI@(jB4wPy1k1PFHj1>p{2#GxN9jbTHL)rkl+qUikD)gSa2(DMS>PD zp}1RcDDJL-H~(jhd++;v$`~i-jO4=^d#}CLoWCi2{HFVOm;+R~rj8_zxE&redf$RtzJy#HVg>KXB0? zk^yV%xj+!luko~)$X}^Q^ydVzvM`T}VYNpuswUJ(kESf9R7jD8stbrHs5BvM6*3de zeEj4V(-8CTAF;NFse@aRz^$^Ep;+#TdbzkKEhIi*y`a^!SpnK2T0 zDR4>tSe5Wdq1*Q zI#L}~L-Nb}q0Q1%gCg0j>nypI_O(A+PpH#*HV-py^3mUr8+_Tx2u-xfdGKoa@HLeTq6BE<_-N_^c-rLUq#1GFyi z7wyc_Y@2fZTr0mc#I(AHS<5X~Yb^z@P`Nv&)_5w+SU040wW6eO;i^aMp*mpIx5=sA zky4Q|JkK8$7h9hGJj9Rq>`C}*yGeH1c2+*8x2<7}F9zjuOEA{GnP`|Y3=(&fOOA|e ze1GyM536Pt5BX6T>I6h^FoyRKtRgfg-ow*`EMnE&^0Mm#i}q+cwW?e;R5=98-q2x* zJMqJv-nzc*|9}df2%zG-{?JGCUXRptl%}~nK-FXwJFrsPL+zV#-%!IUmQ|jl4ID!b zr;UM65ng2{RvjDpN)?YDKCcH9H1WmAZsEblQCo2P zY$^p0fms08)i89M4ACQ%_T4`7zR1Jrk`Q@LpBQe)TMxS4fEClHxQp!(xX^uGe@6$( z&Omygj&R7(p?s^zZ`0+7Bc0xIa^*GPnLOp$zOrD~ z#i0bb=Aui@>UfWvET_`$7IJt`*vGY&hr$(Fw0MvYIWAXepm~?8@+#@!6wc%-XLgs5 z>poUDkOBYvpzy8*Wxf+#2HSknXI9}c!Y}DNa7hc1q?nz*s_=aj?=ldU(8#tZ6?w2Aub=7z&uFIlj#E+v8UBrZ@ph_>z9 zF3lbfw$|eIiK}}{5wW`z9x9?dvAy^PXK`T$FGk1zy-ki>P&obO5Fu_6k91nW;F%XVv1 zw-a@x>%83i)5rX^V6ke6UN7~*LQ@9ib9|=1G|jvKoa%Q|fmTARL=WH#PRP%%y<9I@ zCQI>@!^|BEZ;CF9Ldn~{gr}W}?wiLL5DKckBiuzOJ*x|O57Saei16~xk*12CdJ0c@ zXceQLS4^7F7Vq%?vJyoOFZYY~K`5!8z+{}Jc#J@k_3t?*->tvqNfV*T<luV8bTBu$a2l@xCJ9MXFmUB}R<05OdMRLzo`qb`yO306DSqo7F# zM=Yr2tIAi#RQaNdoMvhl#cb194_Rv(s8ZE;ZYTF{43DRO+03#5XU^}|pDfepSDHy) zE&iRd&if$vyfjZYKNsGr={3~RX|cU=E)FIJwYE?4=As?};mkMcQ(H59`=UFKO|Ftv zZG2SAZ09DTr}!?vGitq-0xv6t9Hxku1^=}$CVDGcSEnCo=yEWi&LxYyF1fwP8ZX02v3&|2uBM>hQN)O`8K`bEsM zduBHf+(=q4!k-m-9Kz^%kZ@1|su+v8tNX<8G|O+3I^h1KH@}7aYv!fQhmegC)S`@C zL<7me9pv9#dEPQ+J8Hfiw0s@241}I)JbY{q#x<`8rj#CVNSg!->^ zpKaSMYwL#xJe+4gOjMGc);F{c!Tn?7#KbMP>i0D6BV{aIz+2sU>xxFMkqP3Sd2_%Qsu$yk#MGUnIZ)UyyP28SWcWkJou}d3McsUqj z>`ZCn<0IjDejn>&RAulh*d5H1S)HjmP0wQeoP{d(K&#bOfht9`aZ0K{s^GX>v-aWh z?qEYyLL{N3tAo_!CayWb&eS;N?b}%Px1lr{r6W5^V3usSqsr>{Ap;Fic`<{k$k2HR z{VK6-i^aZL1oIxA5Y>E=({Lf*6P@=TY2mT~)jI9=0Evj?-KE-%I+s-E0ga08-y=^I z2a*%3+8mp9ag3+Exn^cB5{~%vd+UrZ(sn!ly(rold%o%^^}sYrNoD0wY5)&ly!)b- z2Gaf7@zSLQiC0b_R_orUoaf9CZlmwZ!R>pQoL;b7T5(NoUveM-((&)A$`8B)_$=xV zDqm=nzOnn!?P|7hIvn^R>TtuB^@Ra@y!JK-PJO2AAr;b>Yg^>#Ytcc1Gq`YK0VG_l zt7<+j7zao>r4)$=wXrr8ARV{@h$z2pv5#jI1-b-n#uVYsf#UlyNB+D-I-s);1O+4@atX3~d zIkc2iRw9%7|A82EoFaL%QxS+UDMv44UOFP(b+Fheh70h5>5qJ4_Y*lLdFaY=Fa2B@ zI@^{@DU*iP7-`Zlb}#3sFv4RQ?#R{g9HcV^@QcwM!}f57H3ew~$!-SU(ih!b%XW@D zal)nmQ5WP7jaw{SLTVClmU1r}HnI!(e@Mo*OI}w4bnG-$+*{a$uPKB+Dx{8o`|{} z8Sh*tZ~E$QT-LE9f!eNMF?%T$NficdCLZCh1r`5d9CD9I71Wzhw!#MdDx&HLkyZX( z=HQQya;9{1E!tic4FW?|zY#cvR~9cc^>S{$es3;-uUBjJA@i8`?Bqb4Ksx4k65`HA z@2%s5o(zeC{9C-o2ol-~SQQ)z+ar$yy+tPtrs&4f{=)3b21~PKK5%Ca51z4x-Wdqa zX9M4VKGQSW*w?KsNDn9|@RRIC5qbQ|30H|M>%x#kLn=yOqB#_SmJ?%!^uha)S#(rWp%F1*nH4g+Vd zKa&IQl%$3Cx~}9#~mM{i0&t_(>JYKCF=s&x6!S)x>XQOBp^HPlAs-T z>5|x`7J5JPP>q_@KqZ2E^aH~PUpyd|uW2u59+2j>RVZW?d~c?z>g?6RwFGqQXmmqSf|0G}vmDHsTKY zF!IHO20R24HY}YNium1&1a^yvGSnrwlOBGVAlHzzbb*|uAN+UU zej~Ha|5BKb;Ep@R&k@xMU8+?_TvVueXLPkXOZm5^#*TM^A+QPSM9O4rwm^qCda3ng z&OJv3=+RQ{!J*UKbTVX8$xDwjK+g!~#iDJu?M^`7{z?&KW^_?{Az@n9HH#J(Bw#+$ zkL)`eyNTD9%@bb<1-C!BcQD|wPL&7x6Slw1xL4Z4VcmgEcSZ`b*2L)mSi;>WN-lIS zYtYh=qzj!`cz|AM;^JeM4k+|(lo%&$0Z(pxsQv;;Pk7FpMgFg(r=J77sL`A()pnr% zOCTXgcT==gFJOSe_(f@QPj zbdIV?dZlqM&+3Hgdy6+LSgop>mr*Y0ZoM(o>#ELDNX1>FPhqSrncLlpjFiZi_<=sYGWjARL8KQ{; zZpRl<^&i5dg%3M{`mMmRa(6Lfg2(XNFXWftvFG9T-#jK46#ne7pnl){4GAQ zB+dC<>bk&pG*cLB(!bhgTJxCaiLu2lF9&^=M==YC*FJrqR_W zPPAR=iZIbJUbm};HPsUr7LtyiPkI*;Kd6rH&Sa~wTF z8C&9Jht}%1&Fg~^Pvvvd`dir30+p@P-N|13Jo6jNvk)NGML>+Ku^6cnsMGXST8iB6 zOVA`xj|jEBVvvvGa?Yok{pl=4 z3ZB?vUs+*}K(E}nQT_L-UKhF$b!#hHxgW?#S1b$*BWwFYv5@6>LK;rcVt!{$fizbM zi0_a!@2-A2{2M|L^+~%$iKE?wvxO9h6MNlZ$h}ZUhh$4w9mT-|`WA4^3lIO}nrUA5 zctQ>u#j5~d%fh<5S%-A4t=^pjvuHE)-> z;3c0>L~1u@PjF1A2vM$Ql}khzce)xPncgdKNVYo)GOJCiY~&$2AVE2ta*?__=TPDl zQ@N?)-;wWx~H#LM>!#5L(NxjIrsaG+l)LnL#s~}kBGlq1Tle$ zLarWsbUllFE4SLqx z)*;@Bbk%b$rS(nd%L~6R|;G@A456uf0KR0c^FOPP7(oc_#zb<0|yuA;)86SSZ(|Wys zJ$6xFWmozAGQW|5^dN2zhS2x5on>AM$eamOT29Oy&6%U7%;7U_mvBw>+U$p&Z10Nn z`P-Ne40QpsAx!tO7IHLQF$OJIzIXG>&h00(?Uvbh3tJhs*YD@J62wUjKJEGc7=H5P z@%-aEIUT48i#&Cm&2bQxqr=&~K+p)$E%=yK}SFl@t9+lan^tT9O0W?Qg$| z2N;yvZ7NM$Hq*rL-2c}!`YfpKvbT*y8h!qH^Ng)0q)fC75~$R)9qkOfan~i?_B+9H~+8Q&&9r=O@cQnQ#5)`K!INYQ{~N|9*1uJo*K2p;l67f z@CcYF+8H{Xx+!#=Ik<0DX4l$X2;WrUEOPstRjth9GJuZYK<2nDaeZ){FyLx_49#Raluuc$42m>;Y{RDKyIxQ?Fl*I(A3q(~3agc*631JC)qC@XLd{^wK7EJFDon~?R zdCC`s04KGXaY;Gnwgv4-R_K|w;BBsO;H?kr8VA-VP9mOcwjtl**PArDzcHYG%H%Lr zGo+Y4KAlV<_?yo|h+J)yXpV-c0+XD6nT&%Ociqq{e>%^6J(TKAE(WKu)e^7v%f~CrQw#IhEeNRSCDH zv@Xu6*473CIIk=0=cf!O9B6p8hYD=#og<9(_igJP<}bKgO8xuu*kdp3kRR5y!|SiS z%xCA}ZamgrN#K^LYP%~Er`(g^W0y+*vHL!kqb5WC{YYMMwVo^a+@Ig%u$)TWN3AU0 zTVWEd>O0jxmBVB0Bh$+baMMH$|1Ok)9kVBexZ*Dy8;5pmeA*Zrfqhkwjby^>1Zv={ z%`$QWue0{0wK=`!H`SB&G{K|b z@~sOM=QZo@D>@34%4lMn0EyDAwm4qF(zMQNS!OaDJLPJ#tJb-a!Tv+2m73i&+f^r+z22lj?73? z*i#Q;mICI#HtIfbGa?57SCz1|_|crK?|RF&TMQUGevol+*HW3Qsws%cW9-ndb*{cA zNy<2xaC&L6#!}n5`cpkW3*`mV=0HCx-mx@^!Ef9Qm1MpSJ~$%pEb&Qt`f8aW|Ng4A z%<3!SYVAqo>GQR#7ujsXmMB%aPv3CO7g|o{jD0E)J9cAUXN!%HVAu0|v{O?GH`Z>q zJ8^bHCE#+FnQy$RQO~+;5nkl7P-Bk5KK9;A7iN zyWO)K0{eR8v)oVYC{5H0-wmk85r=n9AGX`00@Nv`Hky6FW9<3GYJ+9c#7cc=)XP21+Bs}W! z+InWcD-0ATG3gRzm(>Ppa9*B(;b+=pt7w1uHGWvM17LiR!ly6%`kLRI=Nhm4u>y3`Z{$~QaaiY=i)AiDH-hujzKQB++(lkzdiGa3 zkZWfRnj{oxml;&){HmB6q%Kai>fBXsH{@yN4GNQ<>KWJ2l&IB{Y38j+o}Q9o3Gm+t zhJ5v~f^0y+PSZ|J}a;YkI9O`0E#2%dg;5mDk2Q)|VSyadl+OHaeDQnhSQhC@+oQ1gi}g*CU%P zrUv4Se)({pZ@GI8z&Vd!RbB$96cuUrfT--Xpa>x_62zR#6?~c6C6}AZyreKHT z2$*K%Mfgc=)uEg$2Uh=tcX?E94J-y_|J_rES7W7k_YSGLHA8(Zl|XccZ8EwO+@)hNfrP*Qa>;7HKJyn|-c7>1Rj5*@IKVb;z&m ztb%ly#Gy^M&uw|-x$ELaDd$WF7jac`AXN^MOvT$?Q->x?EHnz|V{(PIsbiz{N}vG; ztgw$2f%s?En#WW+S1i>x?b}v83?O!E3v%tuADn?>Um1TNu9LS=JIZ@ z?e<~X_uuf4496|KSM#fj*)TrQ@S_sO+8EOT3eNCI*uCdU7e(M@PK=PgxfLb}i*%df zaYV?vH#X%jtdw1)+tLDJK_oYj7@qR#XI4;T?5A-d!<-n`5cyGDC@q$hgjS&P`ipiY zSWVZ7WLt#ZsJf^HrgJ%o@Q}jRK8~>ikt_h)AJ)}x|8T?sRkV5eDJ`G=r=5g}?p=g8$oDh+@a-B!eo>nm$79q+}tj(r*lZD7k!l zR>xdD@#y)rtady;C@%;oI@L%wlw2=$WN+Fxs~EFmAy^%-yWF%p#6Ds(mY;W9l%`$C z>FBNRmp#_#gr6cGV4*vRsmCF0?j7pVoqWj^UB+tesL^&ozoelP_!ywzv)Vqsp2+s9 zE#`}N)%vtOheo4f=P~)zdNspR=(ak5r1gu>`eHytxb_{WLZaxuD!*2g;Ki!j)*4l> z;sxk4hU<4IievLnCGzkvIlWG+RN3iOn7P;39773t;2^L3evtXd}SuSPh-H zt+=keeYXz)lFW(;OL|e$q7Y}cGosi-sL~lBT3n`D#1MISf9L1=hh%Xn>;vJhTU`!^ z@Ko9W75-2peIwt6B%OwZK!z;lBk0Ly3B$h$TwflHfqZ>NIAIIGgf@SE*U0F|{;F>a zGo^%3k8iBsl61ylDuAnO`3%5o9;NK-E9>~LJpT$n2wb?oj(cppGh7Wxt$?Dc9=#I% zrJ260EPiQEIQzm6Jpn-7Oj)wqZ$ut#6a8iG`>XE1p|;~5iXD?zNN{{By-$n}2yqzi zW_wMncd8yQ+7IP3{%6jRXmz=x0Y!qF?xUb5ye3c1nGOp6A5j4Nxi8gK7Q1Q-MqMMFJ|EbkRkZsud1#_5wf520^()!^59#^L4jF(>-*qF*-5s-i>&Y$)7(J zcJ|4XOQ()T&$0F2zR?1PgwEv{bKn-8Dola$s|ENisPbciT&R$d6GxT(W)93fACDFf znq!_ltH|*bsmSLS>(P6LDdO|FUm#esT$Bp#VJU7p?6f3aGx*an3Y9K=^i?>dazlXg zF}Iy7k?Ze{Us6)#p@H{Zu+6Bmrg6q)wJ6{)5;Hcd=gZZ|J{4 zFMvg{q&g}c*jo~ik=OmiY{xIun7+U#)I-IOPil&?bsQ<0r;817VtPCEV%zWj>trbf z0A`WLVaz?BbxU%PWcdB+WpKEb@*3l@&^`BS8jqfT1Mh*&F$Lv5cxBu6Q!hf;+1;}%kHOay!U9vpIqnr^MA z?W-&>@yU(Uq(Vk01oJq$0P7N1w<_y3#nmAp5FdG7lhK-v60tN2an zE0G~v8{89`DeZhgi_#qp>|A}Y?(&lms=fbdt)JF#ux8~%bs~;ObLr$Zu-TLLJ=0Lp zW_VJj!Fj1BxxZ6GO~Y&aa5rDW;8SVo0eLzJQh3M9sklv{7F*i`*4ItVZx`R|prItn z=befL)MZ+rWus7w4c?AU>T;I*w9eJwv)TOoOl zd&VQ1lYg5EI2Y?8pn54FC9>%Uhn9$sLJS-NJ(G&cq#boQaxZ@%w1pDjX{W)w4?27o zDKju`J>^Qv&fjuhf*9!oSuKaLWQ1U{`7tZ8BmluT1|$nyCZA@g?3+vY0%>%5nW4{C zP1I5TT(@h247exP_te`Vn;-jkc%1H=ey1k}Y_8RX94Y0Fgr!(`8G^B?uo= z!7n(WBdpUh>WEj3U5S9R1c+)-uwv(QR=z{vc-yo8t{$}^B%ZdVq3)MSh(f>9dIzP>@Z_~#iGkl?RI0O`NZ==G=k0Zxf{!G^R60RG6EW*ynWo+sE?~u6HNio|!PH+6*V(HKa7^^}p&JYeZ zS{H>i`0k}=XCO6F^`)eS&6fQ6;QP#zRwcz%wf}lI0j|FLdryLM&zJ_ip8h_LxqX6z zimmdnC-{@fKy<~b^wvaYi{{07F&o`XQ@jG{VrJ}b5{B!+r?J2{xt+nTbWm>+gr=+c zAA8YslPQ{jo$)NG+gV)^c0}Xq7b0nh^HMY-avHzt?Ds9&G}=xk>)P3JE_ZB#7HblZ zECeFVP?KdKA0Kq>+4PvTLjPA>Y>g=bqU4&Bzqp6Ao~hjPsa|r_uYCVTQZlfOrAv4f zd=!uko^JrB-@RFF_`_*-Lhy(}nRH0w;GP0~n11z$5Fi=fS}^R>ANfW|G#9DXY4Txs zGprcnKw1AOxGMN{c)$tIOks!e20amXMiraEKL8xqH|X_S*~j;hS>}}a@;~jUqqd#- zrH%Qf`V}=vUDOqk*PH)~A9($}BP#5FyswYI7t(Nn>7&I91{9Z*b56FSfEv>F_F%p^ zz3ROCMvIEn+0`!5FoB$ht+Dxuz;$8oV|oCg4R(@YiE;4DQ{b2xW=(K8~kj6QrT zlN?T6b~;f~cQGZb>SURQ$2#d#r`hdyp?X91t5|H9Y4aUaysQk_l%~F@PbiJISsO;k zINPYYMM%42Sm)SZ5X_Cx76}nBAFSZtM&`{_>sa^llf#TTQO6qd`~Vnfy-BsxoWtU& z=nx5J_m0>gaiA&DQ)?f}!v{(ZMdN>`5CW4HX^}obeH3tUQh;58GFGvNWU3PK-D7Lw zY^rO>UY8=zG&k6L6=+imP%{}`zYxT~g@zq>y&N#Gfc&9`-109MV`KW&xaD#p} zHlM1GQv=v?#N^tECui)(oWd!;^?!<(C#WF9HnLvr7vQkV`R%l*R7zVY7299Lw>&ix zT`g-J)MozU=^3Mk3LXiS3=LL6&I*#@T0+i3fQ^zOrB;PTpBqk$RE1lLtm2$O`S*=2 z%SV2?80S~Qx6ajuFZB&2^z7nvrr#aVMkA!_FBt-o3WS`scDLNlMsIr~Xw1@-@Wg-( z$ly)92jOGp;7x;%J7NXQCYJkoO+g*_GIRoP+HlM(p|K=8A%h`7+fl*J7XbP9p(G(E ze|kl`0QYe#%s}?E&QYa3EZjZd4<@Hv$xs|WY=SNfwThZ?Fi}+U^>SxQAAh=V^vcSX zZRLrq6uVT&vF74NS6N@MTbbBT-@)!NJMs(i*t@Z5*G_y|@6`k?)OD?;Ns^HFU4hg< zoJ6wm5LcNu)4pM8HmvtmxQ*A5C?UyU7FmSH&9~{~48&p`V=VEY%`dkRPDUDA?TxY$ z^LhX2FmW2FXrfYaXmbCLHN~n?Cm4`7jcKqeLtpL+_U7}mYM!_1i$zhpisUT$H3Rd+ zGtZ17r-52Y9;E@ra98OXr2MQ+sC9j@o6*>U0*swwP1by4ZI6Z5KZguKr`HfbPY90^ za?HE5T8s7q3?=KlIX~cM2RSY+!jVoK{O?djZ=+;mh3tOrRiEa*Od1B1mtWAGS$hZ? z>UcAXC-v)1%yO%5(lFa%5jUq!XI6-`4OPlqh#TdNAZ*r^B}PKnqPlHoEt!K?z&myV z?~e2K$X6!Y=A2scPtL|?NZ+)5qB{zS`Ez!b`G-;v!dEg2oLq2w_+VW;e_*V=x053? z`MmO~V6(?o>?+9*O)(rFv0ZWBq`^|38Poa=L7m=~2pG!Gew7qKsqPaN?0{{Y-;w_2 zML9j?xw2~}S>xKWjTsQbuf*8oL5249E5o-@S3Q5mdOud|UNpvK12gIb)sM>}o;3bB zV(6@T9Ou2g*a)`E22Y#a^y|O(DGKtsA@tLd4oZ1S!w&J2p~?zePpJ2>-!j3#S${ct zls+rdYuNExuR!Z$#L!is>$cVZqF*=Ws0O^v%y^=}1{@eUkL^}<%>6X!65J$pyKQ~B zDbhApX*yefTMrp<)Hs%3nXWMA@%)^-d zyPMx~c2n)L+?1}DPw1B|tNf?ROeAoi94>wX#_Bgtt4vdj_aV^Hw!N0N0XWoEE3Tw| z%VB10ES}H@vV3$0Udjr%I&Au~_n!nQlZWxT16Li6a>{ghalpQnkbVF2@x6lMQ`YO{ z`=RB1Oh#<+>$uL6$1BLuRWp8d_I+QrVBjCg)4$AzzKoqjn3-2?SFO$u(e?jpJMfU( zHAn4_WFxI^!%fJXiZy!nJZ`1=iOImdI}C9;T|=KTB;C&DT9C8<_SeD|*;qmwrAhY4 zWym5(WKNBf@BZO}`00$Lvb}26)TFSn00|$wq%!nq2Irp-_R&QT1#wSYTfpYNJF*-| za}tylEGm15Lsp|zs7mo1L}{ZaRrJzvMN$i{Yt}4dXWYf^Cas)PbcHdgcK?it6gDG~ zIr@&eoOR;DoKL5x1n|r)9BjidST`-@Lq5xmtXFlUeskbuY0XHja1T!`xm+1!6ws_~ z-K+mJm(V>gta}Gas#a~XQUoXC@$n~C5@$=pZu{&LL}1cmm)LA!e6k|d)?I+Cs(jF zM|<|@e^f^;@K99z6wdMcGD^6LF=(qAU%{cM633dXjUw!^+11?>Y0=XPRBNnL9Ju}b z4=Eq!(4*JdcGoYfU{mU9h{6Qfb;qk|*_RP2(M`tPfkiIRsV;dqA+e5LpW)OF#W9>4 zjIaKUvm7pYZl+sOZF&@_)i*emUK4heRjIv2lJNzfXosx3*JW6+SfReb>7qWjId<5qj58`ILT2M`i80LSJ>QQYX1> zEd?zeUOG7jj3mp=VSo@Og$9yd$qbNV~mxYlL?74Wkt_J^%XYGr|M-d_=d^8 zt4`XXL5eDNk-y|UNHJjPaua7lF^XA=wNET@A^wS6bdxS!@Oi!dFt_`(77GbR~k}3ty$6F{}h3!JRQx384XaJ{`IXWy|CiR$XT|&4b(f>gH02S zmF5Ty3qswX#=FFe=!FL9kD@ns4A8Ga+fKEFMsAj7Lx+5HGuj{;7)49E%9{6cw_h5E z@76BFz>h0F%xxNc%Q`$RX;85e4|20-bk?G@GNZG*CFa}4Yh>q^yT6(*vne-NpJezw z_If?{;?SEzCqPV$EVvKhQ{7t9iH3`<{pNkqi#vt80EI$jPQehctS{8O{MDMc$f{Et zsk+trs6Ab`3f{}ut3&!5WUxK&{Q(-MX}8K1+p%-2oJDf>YP~EKY3om2w_6o~o{5;wubuQV z##yO%D@mV{%}fIY4*_e(0rZ_u9zNnhuk#tJp(t$`kci9DFQbCpc9P?lr{7r3EL{Tb z<`4Qfr+&tsu4zpSiOpe374NE(k;A2Onc3Ym+B|6E-EF}}_v0FV`2NC*9j z^)s&NM7i`04OEE?#m4`EE&flFgqP@Z+gU){h>*c7lM810K9%H8UPwmekp@OnAL(Ki z#=f_Hzaq%Vd*;Gv8i2bljxsi%1nBH}AD6^6`V}aPAEW_9b+%Nk_)_OIzc*CqAk%gd z9UG%P#O%srYpH#fOs0zT*qh8TvNjUEK~3(UWO?C(LWpSUGwYm;k(S7qIalC~)>ya$ z_C}%(!|;jQ>uin}89S1+Y$UNrvFZ}xMp?y;pJ$~qnQrf0y(Oz{D9kZ!7jFjn(nIG; zJzxGxM1~|d4IWtNIVf!~13#Ju4zlD2x$c!@61o6b&jh-Ih*=ek*LW46ctIxlOl-d3 zS`&*@t3Gr@2=R-g7x(6C8GxXexhhPwfA33ZirWVvvFLuxa6Z*3)4vGzJ=Bc)%ZL>kUikW?bLcI%d3lMLM)QodedWEsN zVBbiDo>miI44E*|xk>YKgJ#J-Qkk!k43BP8KyRwH(C&mRA^2Gy{vg=Zh`{jBDRsN$+IL-TSgT zQ>BrT8QWEk(KciJT7IKh#NyH}?_O82rVJP;t#g-=SaT|vvq_h=rKWJ!e~B!(vn{e~ z(3yz|VeZZUS#w;)zt;eA53~q6rR-lV+058AX8JiXyZ_{ve( ztDOr^{=I{>+;xFx4-sQw0VW)P@j>^d!80!WqoyAluH_qaTjw>Q=K)YXcdFYU3`dND z2hXfwk_lGV%{cMSI{1WGeYC*5-Kk*i?>HK^TAOtDtSdgi$eOT| z;|!l%40y9|e`vu#>|s9Gn~OC1)C3b4h*~Nqv>(IWYM8ZGGU!_1F4nwuC(wZaR>S%| zD>z~RA`^u*2DAoB?gaXRJfr(^jfuLg)=4oPc-r>6hBJS z^+W9UsMovs3zJV=rM!YqdliiB)%v0X>TBPP+8asqdna-l3zStCQH!L;fWCh;)H}#1 zedNmvk`2fce^v&l_>neWW^05?EwHpqO>5t3y{LFCVODUkrtBDul-4qOnNX{Nj`P?Y zPBw~)vP~-kB`p}(4^dRGcAf0bFWU#`S4PPj`6OjS7O)i+XGH%!osH^fu2qPeWC?FGSWYxc zk}{&JG4Q*`gf=l8T&6kkyxtWQwr)GWg^A3$7Lxx0t(X47Un~&5)r}r?UM+k!q&O&L zJH$4Vj7^iLjqAx}<7wV-|M6VGfYmX9RT{142NGm(j~rcG++41^mF*SviQ${gNbpzP zFcgK&h`vBb%`6-jKznzzP`5E|O{>lLF7631tStiZV=i zIgfBQdRP&jg7uO8t93W8GHawjFry5onN3@XkdOH#5_Y|TYW{G~Y+}9#H&tSf7Xnfg z=uzB+-bU?4P>@lDzQQtTzk~tTLI(Awr)WVoK#C6|)Orw7@ELGUDoxYhl%cjMkbYp+ zu<^?=Y&c$kAGf3~om1W#6Z%#PZe1C;Be&ci6*<(*ZOnB-Uwu50=k!mH9qb-NtY@#H z-=GVbW+|ukT$=Sgc zpg}x1gxhwHbL^%B^|KX62jnSyq;(BUcO8eQ#HlOJFFEbfNMj`_(ULw}sd}1VFzTyp zUIMA}B2}K8RI*3DNqhOElZYlwN3=BCK{8h?aSE2RsK;f{W^_R;sOpPNiTB*%f%$C* zyT2UD>|MSUUmYYD7=+ZCDXOYppTWi5hBjPGkx+uwfllb=6Jx{amyFJ7|ErX*)2m;;v2x9B!l_4odJ3`RTTXz?aY#jpjsut37VS)EMIRpsSZ z+gx7}rZHG`Ec^~z@;36cNmiU=Psz<;#|j1lq?u!+LH2P^yj8A%NlZk!OHwgMk*BXj zt%KxNR-LbeDlaFWA&eBVza2B;lY|P)9hBwePlt9(4C^42l#P9-{mi^9AG(LIOXIdB zHl<=8-58elNy>B1V-aeaAofyj>w=dC+r?6%e|kf69Bb`SaDp$; z2ua$)PL9Nv8Dy^2EaMG$LA4HhdF3~Qw*}W(K&oQ%Txo z7<$Y-t!wOu5q&`faTTS`dQMNd@h#ovcivq6#B4!^^|dC;p#n|o}p$9ZX>b6R%_9d!!;_f614`J8`IUu zM+YHON*%d!{_S)1cZ*zFqs!zzELHN4nvR{X$8%_@Iyk$M_uE;S{5K|)oM`yts!-*T zPUhYnTX+DfV$X9FSx{|Mrt8$ASsRR)f9KJ62dGhgwj}6ieqSEuS;O>MP^U@ujL&_Q7U64$Mz34Ne+E9F`rx;49$HR zAz>Q+L6#Y9BWu3(ZO1&{A_X#Y?XS^cPTf(G!jxC_6W!FT_b*!W@+>AVe`qqR{)nZcNMx z@0Nq$tg|N=gf=Xg$q`tp@s`0vC+Cr_FuXzkKMhg z6yRa5TvIx8Bwe-uzx<|5q-aZv3)JO{;oaJ}9$i+&O8$yI`gk1adz12+~Ln zn2RPfMCY6pC|OHP7vdE#tvB(s!_ES?7)6pPA-M1(aH>gvVYK8T6`oNsY#eRA6;Wgc=qCV)4$t<7# zuc=|(vdc8#?FNN2OFkZ5y>PWPE6eVi1u(PgEyF5?`u$s}$sI3{H0^vj*y(I%b|>ol zSK;$XJ;HAe#!JcYaPA@%V@eeWzjV9oywWwrx5Ei5>C9OwZpzh=`T_pT>g2B^3e9Nb ztAmHXBq2Bvk2mx^OsvZ%IEJ+{{!0}RHZ=_2=$o`cf#t2kTdn5Pyg5u}5gFrID#R}& z9xGH@H(-hqTUcNfHubbNzXl!lsMWeF&tl`T%)TyfabThb=FDhO;`!Z|q~eSukekKu zDmEAGm6$vOLI){k(C=2*al*aI7;zBRWnDl>kRw@6ykJN9EO6II3a5aXiptHMF>u@b zBFSh&nyp`%xwXhOj9*b(Fk?YA@<>ihc|4J!}Qogz8_Pzu4#Hkpi%Wuc+6bEFsNzFSLUo~z#KwT%7UT33I5vu%m znz{RRujH&kDuqZ6(Q!Y#tk!$=ap-|-9lH@n!MC|-oB^GNDHoh8?(uzZlTUTexkumQ zqkaOp5|w#A$kk%9h(|*PrN@+<;U*t}VV3fEG2fey-^&_li2)-#CnMxI4y-Xv%L5yZ z&dOp(pIRpMx8`h-e#z5(y3TNBI%T!eMUKr20?Xp7;ooL+4o{3Bf~N80Eg_?Yl?Wjt zeVC&{`qLmhBMOBf=%!)4R*hcKR^bynIHAK>zw&zQc8d29KcGA~WIUoawcn7yC5FC& zc(D3+e-AJIi5FbN^f@Zj#r)tE{h9Tkxwg1B@}x;ncQHCW7>PIZER0jWC>j`&+^>A# zVa|~!bN1CvyObR5(h9^g+w?bk;jLE3P{_TuZX!>myicxaKktEW!Le8WeFF z#To8V-*|l4OYh^einw8KLmTPg>{V_*jMOT`ca6Z~4fWICOg`1GtM)a^;rIP8rK4*e zokL@nG*v@#6_fO#49t`hzU&+0)B4c3B-{x<6?j2~o8e!;q8+*I{b{_O?oJ^O#cfE$@cUw<$Hxxj_9eqP^p#mpt!QI%BSMGLE>gqGu;A!raERi*^HGKGtP( zQyC0%@UoQYkseZ9zuTVQwDiNtS9GEX!$>KB5@x!azXkdKN7h>fwb{09xW$7*aSfCf z*W&Kb7BB8r+`YI%i?+qxio1J&P?F*f#U;f_(crT3&a9dLTeEhul}WOb`?;^{ltVf8 zhMnQJ3G9n_X+lO(b*jD-m#U>Xr>>FO6M#t=tG@xe_ z)&~q$A4}L>*~dt{3wygLPG%0w1Ozhn8z_ICfbK0qa{c+jUWBz-j2uh!GX~yQxbz|7 z`^bhQC|{z)cen{8otM$}AylZD@5Qy$$@yR=AmJltB3k56?OCd^XGHE!7d+d8KB)Eh`^biO+jC zOX2HMrsku*gMmx?BUXyZLSf=Uw7brZFMemwRoU9^_u!o3;0u0j{>+XKgBic%4jw(H zH71f{tXo;EZa%i16N+-x`Htxdm6cfBd5Qtcmur_+QP*G@pA4&W(%QKe#oKnhEd-}Y zAnxjuTjG&HBIM9TS#=k)Ptol{p1F?YoMQ%C_9k4uDe9dEe;~bWiQQ^;r}VfRbUU&s zHd@t7o~3*I{xo`yLL6y>cF`(~r4op^QhRGYH~8($)n!7C|HoiT-(oJ2dub!9Dt4H+ z+{z2PMVlSy25yd#-Af(?rv29@Y6ai-RWfWdKFpHY9;HJDG#|ev)4m{)S3&SImAw1} zmqF+&tQ6nT*w9F&%sP-)5OjmQ@A*w3@<`2?JxnUoU@7ec$<>Z#Gk* z#G(S9T^;5DJS?lZT-1`Z$NVIqH}%#Cn}zasf~2ZzQSJ9K{N)}d=Tjm1(4nHN2t9nM zk3=sAer;5xuU+Ged7|s-O7iCx_G&kJyII_R92Vr88UZI0p~?!B#FTv9I;Jgq4esKz z)@;GY5p{CJvy#K*LXP3}zU4<_*Aqdz)S7X=wb&c`ZsskAS4p_3dLnAqNL5Y`kYnf z+IqVFH?Ferv7ViB(&#VBLaaFl|H-IeSYp|pb5}_w@I<8`y;ZUDIcz+ruh8Rr&wkNs z0r_KzHQS>2Be)*Ud8Mb`$^QAHA3Ub@z&5b&XLnhQyUY!!8Od&+i)BF3cH)?Rz8p1HoMxe+M!c&B(*grF1^!zty)8FJXbum^y;LFuEcdR znz!5W`OMz7xt5M2RT0?wj0t-Or^D`pu3r(X1U#fsYitD+LQCrR)r#6q|5@P0)QUZ> z14u|UO%<$ueAge}?H(zBt$RgnHGHG|B54Zr&{EA5R6VER86E{(_%}%_=PqC;Zoyz} zl4X8HYuR3A|vt?;30x!)o3K0^Aet6e4Kv(V~fdA zEHu?ENaWsM-^pK-#k3vxi>!C{x>4{P@6*+G`V%Bw&r?Y4ZZ!~r&lVp=AViB$>(yU7 zT2EeE74f0q9`siLrjzOu&Ljz?ewFjL#X%DCJ<#vIKa@9~uz>_9v(A7L1KdMI_D)N> zrk#NiAgbh*`eA~n1OO_cPx(4u2A%$WD(aJojvgh6TxLrh&$Pw+O1R?mi;CZ;99o7xa-|J)` zZuq|g6KPaX!Kw^Jc!_ip3eOXO>2%KdkJ0==Ya%k>O?>9I9jLS>j{iZ@(`9R#o zLEJ7(Pd}5!-2o};E6mc@ea5BIZMLPKq7TL}C}T%rIvFxE?MpXJd>Ypp%!FJhPUx4{ z0*1zqH_?6hs0}e~qvDj&UQ3&Iqz_qio~9f+uVmX3R+t3gS&-wz{taG(eW8M9> zUN)Xfg~Scr)=$xdsz&z;n~S0)&a_zm^9lFO1Q+#u>pB2RI5yUUfV=1FV?hO8)H`O{ z>9K*(K@tic;~lV~N_A;Y)TsY$!GKau~>Rv}xT=Ssg!Z*3{=0JNfi>ohfHaZx~m^Y3G zaS+96G-=`%+#*W6gI6tDpy=5<4A-?@_Q0TCkCUNYPk_ zu951H@AN5#UR#EG{(xEDa*Pz<198aTIjn?PrE<5n<{eb#MJeP&2}Y5Y^_pg^W6QB&{X`%F8T66L=2dk-pfb6z4{iVLg4bB76Y6qjV=n* zH4Et&W(mj%B+k$O-i2lLn%V?=wzbGZLhEW!vfG*tWeY$5?20oM_eG3uxcav)eSH}c zT_~}|g#s$^gI3dFcM ze*qd}bXejgMFb4_P0tjh${>K0vNUMwE-caRGhYm}+zkiX*`Odk~P zBc<_ldM`oC^d6ID%yWG{#gMkat&g3oA=fUpddAESpdkHk6*V`;Oi3b;9 zm)P0q%ho-)VGS)h91b|q7tSPiYoN#NzFWnWzDD4GH$<%K|G5ahKAn+63ht}V8hzWX zy1RdwyZ<7}#Mb!&&)*02oH27JBye+dd+kk8oh;Uwo(E!u3WYF?)Sp(~EtIpANaW<) z`}{dx=Xu6Dl6=JOrX*dxhV~+*5&W2|th^Xgdq3y=$!^S{pto)3_KwXTEn-WHYh##% z{pP3#De=|*%@J{P>Mv;J4F!ysjklGuzd1sdC{ZPo#Te!_T?K8GmGgQCz^`%#zBUIv zkZb~gUdr~C=o)Oi(IoHC;0F$sy$cR z`09}*a&9QNw(9CihvJEb{ILihnY@+_0Am+T%3bF+?Y$NoJO1Z>OwA4WQn;%M2M>tj zox^LpBx5iYYi*ZQ*LS0TByz!R3z1N`E3JW^mwfvnmn)0j8)EEiMFM0yH|`+TE)MiR}x-&uSS82 z7Z6;nQ_W5LX?@H1*W*=%v}vS@KyKAU%*a0ue28ROUzH=F^;eRel$mt04`dYYxNJ!{ zRAc@rNSFAB#gIQaW;#{MZ9eHvO@w(!6XB~AAJv_1P^@z&OdAxNp&M+Ts4x3S&WR{= z|26A&QcPTYF_3QGXEL@LiXD)$JCL?MI4^2XI-vo~{;!%i-g=^xT5 zdZ#ptGz1+?Z{8QwLzv8kr{2mP%S$YlEhJUh^J?XjKJs)iz2K-C-|8TJVy7v4<3%?> zh{|k_TY}+bOVuiCYVB28pIP7aYV1=_0&xXvsMWT- zR{1d23eqyZS>{n3*P48m^SHDiH7iHc%Da3v%&sd~`GN|73fo=O*H7;h-4koBidBQu zwWJKW8w>BNjDM48wx<*Sl1j_?yTz!* zt&4H_yL7w<0A&eD{$RYXu!li9@- z=b>2Bu-yI&3hH$CQnP2+d>2yWEr7=RMsx1s#=Bou$Vj+Z_no-|*^}(F60*n>OpA>DUzWDW= zDO04vtYy2xX&~qm6Qu_{Uri;PKgZ);a>`x)$SKKmN)}yp@Xa7LGG?s4$FA%X$DoKV zbJ+JC+&MIYS?s6Lu`{2#DNmiZ?Y?-n&Ru_6XeTZU%bg=Ny}_ z=Us6-2L5?+FQhN5tulryzaZg<>K7Y&?EN6)@F`c}E%Z3$9vo(T7fQ?UDK_3!oX8of1lRf`;y9gcz z)3fo9n)K)jfO)0t_)G|OMNL4Z1=>ExI}T6<2XpydG4`b|K7A{$+7IU<$LjO+c`PhF zGD*J3A=-K?IR%?V!O*j>jaIbrN%mIArNCE_*V^`9Gu`D>V$Pze2J)A+G->$R2`!g& zF37iD6YLk^XNw{E9xGTVOxcPz)kc*-14E*wSEqPUK$lApeFA4`7jY9pM_GMRRflkI z;R5OS>bhX}Qn6|G_R)$yj8sHYQi#wk*&DRDtK*sL7?mF{xdh79$$*aL*r5wdQ86yH z?;Xa8Rr?xk@eVU;rGN%gZiC{jar?&{<5sr#h@+q`?-Xyk#60!-T98>q{oVqAy^Vd| zrE6r#PFs|Nl_$iq4zxqpcE8V2ENJ#3GW|zqaxtJLlHjMyuiNGv|6+I2lY$NjsV#j# z4#HpR)k!^%Y7*?xmMK}zW4S9gbxmC46@=S#6(440!Dz6DTLQ@aD`H&0FalgZ5?7BO z{kmO@7vZxRLfgA1bcCjUxH=a2Lz!_X`r0yk?`s63LO9aMpUcgos#I8fR(U(KNEv!O z?Zi!cyq~x1?CVnK^Hm-PPzi2Q&uEW{r4=qu!OnBl&SXrG?o4*2EAIE&{EW&XlK*39hn}w8lt#Gxwz*aJ`g#ca^x@#{i#VI2Wik%zQC3JggKnha$A4t)I7~A0v>8Gt}(5tNhs1{9gC!a#mPC z=`;!9{w))5KZ7=ACRvcxkb&dNee$Qf?`Z8R$t@r@u_Z<_VW}p^#ahbyEu zN@HdAorAZoM?8mq>~wK84+eJ^CZ54n7aU=YIW5@~XgT^LZQ_*6vjOzlip>u8s7?z& z%B10Y_EP=dD~@7*Dep4V=g+0|BD>#@t-h zLA)zRfh!9ql|l3PgVg4P0FyXgSr%=T>Id4fYwzW()KLh==#Gk>+lI&P=QlZLDXZ1D zayxaaJSa~T7I9wSzSk1zqtX15NIaUXI(vZ^gk(466oYS$EOs%MxuAUJ-xpBH1HjzV zUaunW-=Z8sLY&&zkFVWjhw323sD_h&-`Jn8)riqWsG!sQd&0;EtYS-}j!)iP`YvRt z&Q$%Z6IJV95uy&N8wVsv)}em4g$2{t`CWy+4>~U#iCO0EujHZSU}viTJZ4jhh@HZd z7P24eu#F%2wYuWIN35a|&_?N(w}cznR4KG+K!3**#RE$`i#24;Jjm1qOZ;il@eaUK z$Qg1z2>E33!p&x0-ISuVDJ{l2hZQLI4G+&Fk;>WI+7^2*0@;56`>w`H!I-qqh-z46 zZ` zw1u6trnUp*-XC$bHQk7-?YTK(Fh#~pnl!C)XdWe6x~7uOrE6;KAa!VX_j!Zdkt5-v z5<>1h7_|0TKz78&Fws8<(2-`xhfKRDlkK|euTpTJg$vO?_QML^L}_r0 zqX`7}qs;2paNVfN$;4sCGZ%9qXuWafFr$>~AS;=|+IHoCwJ@>26+>R5fhUJO{m5>0(y0)?m zJ?f)QTj=@D5Bt2v^CbHkG3|paEfZim|I68Qb{O+U7hP?-(_f*8PFwVaq%yD`}6%32o86>r>wAF6*wewa-MJT zElL#hwflFI0~TPKEOzkys01Jsb}gnZK!c|2nb9Wk{@)7K9Lh)lRuTA9lS~DVY!_E`-?!}80vE6M}NiZP8R8Bh98-dP1=D^ zTfnA*n~mqR$GOP|V{tJl-&$rVWvE)*SE765rdi(W?W(AhD6NbJ^{{H~-sdZx--7XuNylqNG+2 zN=4-ycab}%k6Q5gh{~;b5Pku{G+MNCU2QUIJ@(z5l*{BH7~0qpTwUg9Y5Xcp1`LK0 zLGj{k#CvWX>>w}=m{A0giDsv~cN5m7`xXDXtT z$`BIFKX^5wHZUkx*Z2}(S&6t9E7?)YtT)g70+g!$l-mG-xaQ~rc0a6DUUD5QRMIGb zh!=fT-Imtq+hP`$!@Vx^f2OVLaVfXZIyF>za^HD&&6kzk@sI101| ze%SaZovh=U#`KITK(qSy`%@-XZp%rcQhL2`%^ZBW*vLoQh&OqKokET7d?42hOi2+E zJni!xH#h49rs8Auiog;jTZX^Xnw>x`NhrakwY=PO>Kpi%zRFILLVO^UOPu=BCCJ_0 z6v6WJv8=1uFx;Of_h0q0rkBlq#103(Wk*xbU7cUhB-QN=MAkk+2#~acHV>0~iJlB5 zGyY2=nG&(U9zUHlkO60C$X_OSb6Jry;`#jMaK-c6_1<)kRgD#nvnV~g>!;GR5BqI3 zl%&r2Jcf>Y#Sp~_$I>52CZ!D|8v=}Aa#tpC5}f+2MagdqAaJ|-8_a@fF^v>r=A7B+}*YDW|~r7?#_*DrNr#$(lpxo3NEzOvC4 zitz6X7%iOw!!p|LQ(|&j!W40!Eb{-Osbf{Y$&fqg7o@p>>E6@CPISg6fHJpPy6Y5T`^3W)ftU;dq z`ECN`>~}8VeRZ`;^S0TO0IHrWwGG%`amfJ_A#Tv`V<5@jTYo8X3tHcyjV&dX+U9!acW6YH@`S2f>XW3kRCpU6+J=?w#Oh zz?*O7lEDAQXw%P~NLZ?w|9G9GKUWuSc;stf$VO~QcLvr=c-sH^az6VrTtJa+TX$NN zl)OCR0M;b&iPyF;L)LyUc|+*cF=K-V^JMMHeOvdMJLVU+x{@G|8wm!n)zktI>KT)V zC|{JNLEo+vLsu-8Ej(Q?tub@_4}W~At}TBK`MGOx`Om~#f`}gy$d5)P6yok~2{%D#z7K@!d zVuNS5ry@vNOZuR&DbmN#Rrn~^jnx`IQOKQ;#96&3l1IRwK6`GTOUpq}z=$jt&swx6 z`V`quwPjD;pQ_jV*jaiy98oZ0wr`kQRX#5?_N{1Onz6)D(ITSrRf|yih_({H{A9_J zoVZ&hJo2fZ077z&&-aVdsP>rUkOth$-80i%Nrs7JPU{I}CFObG@dp}`;?GbS+n%?{ zSTpK3S+(va>}D^c)OZ@IAnBXHP34j*-+?KfO0x_HRvvXweAk{rP^gp_vp*Z_a)-yz z+sO2J3Z@mkt^ySbNxsHtW-ay9Ok>jd8u9CE6|g&ikR`E<|l9-7lZCta)Mo36sNX&uR%d8(@QZm61*nmdSx zmT6ZySV_A4BUO$=O(9iySnb@OX4TfMgHhQ@+dL-Wg6JtMdaCB+xYd+2S{k`yK2q+$ic=0=$sw!|SNaw-8`x6HE8mFv! zYNV<{jia>F;=O}?g>gVtoWsH*&fxxx>gsy;E_bv<2x@PI?LGLMZ54A1YjoIT8sI zyE`FRMsMu%3%}3LKBg~(B|y?}m2@fOOir-&DUiI=&<7qZ7VgvdDlB+O)*X6n&UBE2Puf?c z!MzRXZ%kpY79%c3%we`zh}1SfscfBxzzY>`L6LrU zgE#ZSr$F0gho*AFmRcgP#JYfRrK)7~PV={shD(nKNTLX{u7j5uYG+7sy7WP&KW4SScZ5KX=W*f*#70f~Rpk|aDttd7o5M-(L6hr`b$-i7NDKMnvfBg2d_j}+^Psf-#;K3F zQwFzL!}E3D&HMW};ogDJc7dGQiYP4k|L&O48S?R^HGNi9_N6+*+{hpTMLynQ9p#is z?=2`NUE{#-=hF?MV`@VM`|H*Jcl?&vlrs2(NAUr}X^y?E)$~Hmr%$>CR?=2LnKwx> z0YH1_)ZXp8dW(;AGm2mCtl(JK%hZKOoM9C+@+(%A$W+u5T0nFxA21m`nYm(K> z9Pn$3@>8hnL{=6+!FIkWr*c`Tn}gDV=m;3yCgCE^ceDV z{OtH_C_SG<9^@4pjDUoD417ck1GY&nLHH_$W2aK^V)ICn>NOc zX5hPfZuM%`;o8fI@Z|!MT8JZ!)LYx?gv564>W4a~=@*YWMgqIpo)GCLBa4o`{rhQ! z;Bqltbu({8Cl($B(wd5V7?sk4g^E#pymRpS(Z zw)qk+7_vkYN7gCs0JI7cE*i=!-t)5zw7g`& z7~b2cLB#uZH{on9r)21YzEq3ADKIJ~Du+Tl6J+J=*itR6xDJ;boof@|nValQ8lN%= zR|iKYN~zvd9mdA-?(==k&#>2 zuq=%t$YOgh6}lAU$kbtllh_$S#S1|TPF4ECPq*B&*fP`LbYV3#Gn7r;=E5;fMrx*u*UmM57eJl4a)Tv16|3o3(?!h7h(4HL1RN+n zcMqU1Qq=V)60))-@-ieFCqpktz(-O)pT-;$kK=Z4j;I{X^rMy4V1!0;g}N19PpJlzI<+fA*Nn#1@o8R(CA)54j^8b8sc9-?{__h#S{oIo zkB*pEyL|MILZ;i=k^6koKQ$MeQD^1#1(V EqI=9!#U|%*M2Q^B6(;YtngFpP_C~ zBKxivwEcOqJof5@ZVB0IZVJuAN~L4@X!qHs%AwAIUa;*@%FH6wS_lW8u8zg;8vx~C zLd6bM0z&z?IeJ$0>&v13zk!&N>mJ&hq8J7M-TNm>dES8@Cg4XW-aE%w7YV^G_`-q}a-1|Q) z2^mx#;3v@iy}KvRRGr8=qO^~8epre*NHul4J~qH8 zY7K)0UQ=lTjx{?3EdH?bP%~y0vpF?K`E&cVbQ{D-VWqMN=X7g3y2YMbH911QI!1ex z;SbWf*R5IUz~Q%P~lXa#av;$IUjBz$cSVVC28g(%g7ADv$_wzy}DAaG_muj2_ zE&8NhNF(6xZxhwGVpTm)Su0IVw|u9Lx(f&%%2h{gs&~%G@s!hncrFVS#%eZx1F=GC z%58aiD}2pAh3qWXDXg!+{Tu3O*l%BIZzseo|=Gvx+%ipbt^83y(_AUO{%v`r5UD@htNd|3>TddFn1Yb z?!o(wxY%MB0FeMmyLTip1?7gxYp52I%_qg>bD?A8d?BDVNY`HdcqxO6V>ddVy3Z~~ zg?CNMoaVmkrbkqlxdh%*F?6wA@Bj6$Vq9immp0Eg$?}9SNvVvP&rNhl|uCECU;L zeB=8)+A#L>)+bxtv5#yp8Ms&C>-`=hk>2Sz*^4*&Y}n5&Ie$^5$3TfiiFdf#W&i== z%gq!F>W&yvn?~k}XTFR!xi8V1^8Oc&SiAF8)mdESXrpb(a%xR?Z8lXa3zHd}*G_m& zs{B}UW-Ybs$Od_ZK^Mf5j7$H(;|J@dC|uhyN*Vyq{Yd-^YvYy!r*(?}CxQIsq+Kf8 zO9NrsM2__QE5-V{v`bp%)~P?(3gt2vH}5WZZJ}>@MypbIQHPzGolIMuM75Tx79W4@ zOVw*IJWPF(cmHi9Aop&xM?3SJ*m-ji8!6N@+$4CYF(%g5-7(jnuj=G3ClxXq)R?GX zz~3MxZn`%JXAK8M`}#&yDMXd+*YnP$w~$7NLy?Im3ij@K<_aHo!XjNO77FLiD}!IE zRMf_qyiruCTH^5mpi0kOSwpW9$K>lPQ~RZ8^)U)`NOVma7KxKXey00l#X^;5@TgK+ zHm>KiNOuv3RH(1nGkN#j;bKSIA8LC&bee$_cTx+`ypy1GBcbHkA~=wB&egJZuKR<} zw*rsJN3UMtZ40Ccyz-KlC}XeCiS{x~>c|^>M|%u(ATZpI<%~O$RK&o8YU(d9u%B!H!D7z58E|7*?qm zk&dJ;9?6%g&YxBRcZ;{k_HDWe?Cr=HdBO*?r6o~I)`FE};u|jWJLd%|uvo){7rDBP zWXhAAdk`gly4wT9t;PHnu`#tOrB~nsmj~Wf^uk=+T1PXuyB2U_`JW)q@RMa0>07o* zAi8Ma;k?E&RO$-5FO(Z8Z<{ZNwLw%Whnv_esAvg}mk1@D5b?UBX-!v8&y#LTEFotk z)XyxTJZWJV10!X-AL>jRp&J+DZF46LJkR{nRut=}w%zAR^{t0#P74onZcaZyHy-Ao zpn0=Lc<2;?)a4WF?iPcq_L26av>TATE@Xbu2zJI&69~JFuH``&x$#6US1cm~{T@Ia z-F7>;8O}2Vt-EHAbfLckgbO@KY6^G+@;{emKoK`E;8!b>{|lnuoDiEt4havRow)cy z=ohX`E!BhdIT&G95?Nh|{_0%dzuP}W@rF!<_HqtLzW6Rq+dOW)jMkV}URBC-r~eXH z;PC~hBb7Nd^l>Rk`hLatJzSmxt$-qP6^!D~U(mdEDST+#0IEc_D)GRu#1c`V4qEYO#VNYtj^BRog3{ z3=LYp=|9JS6B-Xq$)@dxm*283_n5SP5Ob2B4pC`|d81pCUR!_Z0FQ84`Ga)q_~`J! zWmUe+R@>KpnuFfV_VYxw8-BJ5z_$Fs|CA{K{!4XdPvLXsFxl3+K9yo( zWbJ5%^U^_=2Ju7C-*bEVPiq=@Pfi~X64K(9)#D{Dz4Uw{aPL^rAaRGl9YF$}P|W4Q zxy1|R*{qzPBSPhWEX?tG5$A6~L?&xw+*p2B_|5ORTAhTlyZY!`9HuJdw5@&;$`Pst z3gTcGe}MqRJ*$8N%T--AdR6|Rwq@2wz%>sNP+T-M4!XjGRJ{)T6v4f&ygl0!{ zWlQv_jl>s6ZttI5MPCae5V~YHRv%-18G2~`narBZ=hC#9oW@bTjS3NL4b0M)W+Uj? zE0zT7N4471Okat@>L~#yMNVv;UjAEQ%yYcuf-*i#b2Mx~fl;PZZLyBxMy1eAXlZpv z>at0sR54S;P|OS^%@~riW1|kMW62%xK%yeZU&0Fs2NrdpAK_&i%TPKF?c@=w-WQ?5 z9t-epU}oic**wHRb4)DqG*hs-REVi&Aa~2;0;4$$V134@JHJ=qJAI|V5d{_L>6koaIu^PW+` zBL^5RE>oYX-ePQE&PNiJZsWEIR~nsZR-P2m1QL7XHlAa7O5T;Ms z(6-Es#L65CLNC9vJGwyafDn}`XCPu#NS}h%6GPTiy}rzpsO0`j_Lq4+5<`gUnv)Xn zFe1ArSbiZtEWq0^^o8lks^n=T9yezp=gfASxCB;`2iP`yP^n7_z;xJJ<@C20A-rG| zrsS#;Jk<5VT5^*YY)EVU`Mb!S^$Sa$3|`@`!>4cojO`W?*=H;)D7V3<2Xl}&qw30o z``5a&N84a@p(B$$l!pPBTsf%ZPrI;A z#8t9OqvOhKU&!P|?iYQ~4QhIGL>i^9`F#M*zm1CxTf@hvr{B+4gz$fEPgU@qU7x!| zeFIzqGrYeXM*0Oo^SI;oEE|PQqQL_ zY!QtTe>TF)~KYYS16}-vVyBVlj|7a2=yp+9-98e&ijUiUz{6r8GxkP`@-zS zSe{{q>XQU5t+BE%vCf4WV>PHvz?{7A$}s~+6ay`JB-}? zSSeuH2jj~-*K<4Kyaw~J-E==CyOC(h%pVa;>k*c9b{0}F4b(((bhg~9b-g7KlJYfs zq5ghF06Ix-o9b@Ik?>;b<|-&B{-cqTK&yCOT|rN5V;oyc(|Jvoq_L|or&=F}YZ;-c zt2zR9+Vg5P9&Ias+`utz_*HIk^-)te{>7K*xu=gqn9)C+UaQhjxcE)AKnm?DKTcC; zTD=^f>Flu?c{7SFoIj$mD-^qMh@}eX^zsTm&(aLj`CVw*ILB-t2#6H`2?)589tk*9 z5T2Gck%H2PyCN^T&WkL=`=|trR->})<@;=ib}MRZ``o&#uo!6)4pR@I2^2ZvEo|hi zUE{jU;Km|*zT5=Uduu^sBV_?O`5o74Ypmio*@}h;;t=5}y3M&~q6ZzGG5a9XDXfJ0 zwh@J?sKO{UN+XUX9F{(`QS}(xJTw(23H$>UN$rVUlYZ#uzs@3H5>>x(a%z4@mfh8< zT$%2Ov%VPv$|^H-ag`xUTO1eQU80M)5wpD;ubCLE64%h9spn(&9pI-zJ*oA(1N+X@ zqQwu^%D22MHq>qa-~HrN$cc`Rbvo{b+>4~+X-7FtLk*|(%av>mG~o-uu{38R$3L7E z9We$UZi-v^7IVkH#L#UWgDj%Zn5T(WEt>FqFdfZht-nZ73vzd6wc^I-0K&_#D611+0~QU+j$#9 zR7Xn~>-rY2{FZIVCXFyLwbVXJZ?p}<_?WWb#d~)e8TkUY2yeH=cDbJ)i`;0GsA-xT zLbL3BIr{-VQW*?*&ztef#IH|i%qy*P>PGektu~;_PX8jZQBYUhR}hE=Mzj2rw7}I8 z^kqXY*zZuEQ}V!A;}Z1K(+&}rsO;5UaTZ17P-RC1xQc%h=*jwE( zr4o}9%h9gaOu{Y^VRsJ~{}ok>$NrmJcYD85!^8Or7IHC?Aw}a8b6YYQA0#aH>$W2l zMdZ94l(?n(#|;<-^GBS0J#=mruQT+8kCgI0;U<;Xi9|C|6Q+F_3T<|F(S4LWoH)<< z-W)XEt}0a9xv%{8fiLuJ9pRTFbR>&_*Pjk*M*^i38kEVUg;sy_6(?P7{I@uFCC4~d zkIR4191g1#n=E0I5pgWBg9VU7wYQLHo42`D$&+0Z9jM{1K;%J;s1L1$18JAw;(n%@ z$&+b|eOGLueO?__c6KXfG`F9!oDu^rzgM@dRjjZ`ogMtYE7?2@b7$F6LloR2o1*OI z!_o-4keCuW)k+>v*p4jn46f^e<@ufnht8khnHu#5tR+d2i>*&O&~Xa1)m~iM9dBp! zjhj762eq399O*CrTx;{V_?`c-aMCcdjuAL?a;w+S2)HnY_!k_}v`|SE=2#1*iwBRN)L|IpTUhtp=jtKpWb$o8T z{_lw~aB|)KP6C1T++d1qZav%ZNkxKJU zfE9Z5Jh@%~U2dnl4z|eir~h|Dv+)}$oBJN0R8dP_L9v*OxX~uQ*yTN6#4kED-*1J_ zxKnB^;sA?3fG!gBykGBFl)n9P`Q;1sU1_pWX?tyi_WnKTn+dnO@jmf9|7E-Vlk$c2 zy?pV<8+yd+>AAYVYr~7GR>#`jNkiT4Do*pdKP%6x<_!zYlPed}cYG&D`?v*uMt!G^ z1r2*4f1&W(3_43DCk}ZqXA?%uWUb}CZD|rLGiiwz|{ds++0qoV%V+jMKL+Ipg zb%RmS(*z|ndoX##>~G9OOp9XwuCjj5oeSk7RAdF~pZ2+!_9@~ojNfDd5t`_`j_Xy^#DIWbcJ2hq!SsH;46?e{B4|K!z#Fyyf1Ki9Tq}tG@m7n=e1SU=Id>-?&*@F zE6_STs2?e#iX}*?0SH|#9tT~Ks?W?rK&_v$l9AlKH&2Fe_=OI+W#^?KrcEus}hX|KPQirA34<@*2kRv-%EM zF_sLyD6YM$Su_ywXbMqo0e=<0_|0jnmrdYPkWkx}TkTeB8oUy78<&$0yS4gmTPejH z`JI1hS_eEJc})~Z6Ar29w2Oad4W#d2_$rww{YIe3{U+I$*cR%Z-8(~B#3iLw95j%f zEevK}@UMHEq~CUxe>b>nnGUdhqPHfmYut?z}r;8nUUT&~?0#XZK6YC^M`1_LCS7La#KFBT8nX; zk={c7-fcWD>@T{P6_X@My53g%sEyZaP&iL2&dXdU&D_7YqWLs4cdt%Gf6Vct7xm=F zCX|--_BM~t{&tE!)Soz`m8u(ce`(T|lrbfcibKot9X<8*aL?U!GZ5*8;(Ck;+KS$T z^Y`5%A#2Zp($>JcO)Qg!^MdErK3D1I$PZ{j- zB(OiCi~=>@p>w%i0`u>b&Nd7}qdJ{%R|3q}L8k|SLR02EPe(OkzQuj=k1khdN)M96 z-Wn%;N5Q_Qof|FZ3s*+{s3gx>z&m2`+nJMFKk10H$!Cq{Udg+0jcy{?v|Z?6+s2h| zUL^Zf;PdEn#fF6ZO7LDc{3gE7iKn9HzLjSs<9Yp=1t2=dg6t|6P?41Gd$@jTL!9(F zYCQK7c86{KKUBSCSX6Pl#w(39A|Z_^D6Mo2r65Xoht$9TL)XwLEh0##ba&4X64FQy z&CuOL4V-!RK6{^izN`=HS~DNkb^V|9#QogAnFxRBUGb|;*d0u?a^&4+)N9CE|Km@; z&Br^>gNLezvn0R8hvj4HqZKrbvC-GN6U z`>j8lMWQ#=lKnc6Uj(I=;e2@#<^MBp$q7WIuPfzYeh>IZdg^%@al0mYDSW+F*b4cPeUfK-tMWYtOmmIZYA1sg@8orSa-`!1bK6=&3dIGTc#&$bnGFDoNYZ z)MDpND7o>>Jzj6^Wuv1Z=wH*K{i;g6Il54Z`HmF~%Nz~D!ia?*m+}IG=XrgDwchK` ze(3Zh68T&j75V#fy^{m^eKK>3ZTIl>^u%0T5kK_@X~vAJu_^Y^vM+p=&fm)($UDs( z%XGM|bDvl{Q_7#ZY=R3cc!>I51=B3w^=0EK=`Dx%|9;j@qQtP|?F)q67gWKAuo$?* zU{MIrgzq$6i&Io1dWNit7o@E1rccx-q~6ZQq|d_NQ1OzjQQjaiEd71y#(SOVHByY3$1+Ex>2I6X5HMF8YevV0mf+Uv=jIyCr zxW*&~CDu@SS@=sdxUktsPSEk&XTy-4(6j3SD2LdZn}4N*O)Ur|ZV)hBtvsIPvIU-w z+8g^Bk%Pk>AG{Uu#Db?5xeYIiO*T#0iYm<8&KEad0p1fV&f@#q@=>}g9O|OsaW_o1 z_JjrOd!U?~6mDZ0tJ2vCFEXxo%Qa5aQQkfDnRo2!l)%uNQs}LoUB>wLyKjEMZ$+^& z@j`;`q<7!I{@nk>18h&)W?@y32B)UpLK)-9CkcoP)Il#m@gKXp7OQq!5;U2BoPyxcLKO*HDOik!s2$we?M5=x70P>Ab-;G@7a<*>lGDdk$hclx-#VB z`G+~t@VC)eb|ci`fZ(OPtG(*bY9Wy5(lM~cO~5AO%(Yy8Ebn&*(a1@Buv2JDS=8OO z%yn;6Zft!~EtKFSb)D8jB72IousgY>+R=Rk#?{U}9UV%Yt(r0%q8~F()5TeMsFz;3 z6C+g|cAHKp9K0Tpnr3XuojEg1$8sq`ZPH*|p9 zUb#b#S{54H+BV^ay!()Y-e1RX)E!=HCZ5d+1}KAZyGQ(DX!3NM4cg|(bn^xG`XPti z&gg0>#SL@x_b>L5cT-RQ_oxp+=}?b9L`^l3-*u4!2M zTK@H~f%B3w zG8(Ly(mWNZS`>aa8hzbugFhXyT}3FFH8s~Re8SY@N`9D*l9XKenz4QAy+E9Csz6^_ zG+yw4>goC2SFrynb7hQvYz@MF0)k!9T{Skf_o89U+Xp8n<8g8EOWT9guxl%nzwh+J zUcl+>L5;ts+SLKS>U^E*ijVSxuEwXecviG)qY3|IP=zpT8ddFkB6U|b1G#xEe&4-q z+>?JZ?0N--U1ZNe(1^Dh7s${Ud*@$W=%1d>KD>BiEbLCHfu6DZw0`y)re{wy&1Yfz zkoLzW)JArbZ^hP)#7oAYw^#=?% zZ~OG}SrOm}wRqndQ9ef`yQ^#zk^PtR-oPpUZ#95uRZT8Z|aO%s`r!9AzGaa5K1Q{+$6_0HEBS{>x1Do@dd zo7iSD*=#*}GP+SW)5jaS)vAVS-+F4%SWsR0VAeWAi5GwOTdfGlD&Vg7ce8%Dl#o?_ z$U-|7gP6e=)Jf0VZ{0}dk@GIZ_LUOH8arm4HooD>ey|P}k@YZrhntrr?0S>>5Xs0i zmye(j=h(6rCE7~8&AXFExtq^)uTqbAd<^lecVqJ{RCB{U^-8UipQ`J~hv+;Tu@hCY z;5Pb1;azT)f7>X@(O7QkFz;Bi2R=Zw(~S`KJ-7AE^j|4WOr0*`H@w8v(ORLO5IOm2 zLs7Qe&UR}H<@;CH70{=5aZqw-C*+>ZiPZ~%pU-=5Y!zIQ*>R2d>_uG4^mMaP2xkf! zjaKAJO#1I}e-Ge3{B&&!MRuATULKx0&(4zH^T#F#ns>Mwe7OgGX!Sw_K*DG|jlBDgiQXZ-m;_@a>XT08pG+t8{k|6cf`{c}i)gy#kI-nvuV5+}Az~ zDOWaoQVO12{ zlE!$JdO*v>CuH9uiY&)geM)kM7SITvV2~{KQAt0Vaag)r5DQ#6@iTPf;5Du7{_8hX zVtS+w?99Mj?ek5#B7_UdRcPx7D10HRV5yJd6Aep-%N;2+bY;*LV(;%k$|cbzIpg2I zKAy%%TZEw|b}x)V2r&*d+};-3=iOlhd|KFpL_-C6_v-ws@mjl`fBRfN`)jAFT`yca z9RH0&PS?mZa>!5^9$HC}A?H2o$B_qhVUe2|!FGh4nuTTjfuBIH}yREBFgI=KA zAaY1VEqY&Jad-cPcKuxvw_(!YOe(TsjGtoi8!brJ;jAKjM%aQhoqMPeo;<-{oYSDt z2_Khm1^Y!v-US^u*b&Rfd8wQ)Lia>f_G*t~PjGz$e=pmr1pWp{GgNJ(UWs76LSu(o zoE>Jf{I&`uABVZiw4!EqNe+6ckKtvWNAnn4jEGi6rgW!yPf8}=cyg146=LsT?0`?E z?-XP-q)I&D5Dlb2gqVpc6Uze}N+;PMGx>YO+qaW#)ZKJhbztew)AIe|i+lPp#FxFL z0H;+XIQ>|+if)#={B9DjMKukPc%`fnp`+M_ZL&{KciZ85z6F(hJaB*<%^UJ)Ns8_@ zS8tAH-obepv;l|P0=^FW0}YX+pX^?v+%8Z-*)+cW0&cueXQceU_nobAF3(zh_I~&! zPA!BEZ4KbDv=s6Cz9+)Px|4c{4=@XOKuDQC?l3)OpbB7T*{x2mF+#ZlZlq9B3y-&+ z51w~(@u-C>LfFaBktRDj{cXm-CKSCHEP3*brPt`lH7#TY+>CWxOFaB{n*QIpdV@49OB6dI-^GQ#uY9lvFW$iu zywEsX_;!6Ij)epNP=#Ma<(xIeCpYG{eJ$Em$9_uce3+Y8ahc8k`lM5yb-7}jm%d+a z;mK3zWQflv9Q^Am^imK!*5=?%wky9_Q@of5d%CmDMEruXP4Q#gKXS|{jfqBUejDtj z&|ks_;HfepbpSZK3C^9a$~yZXjW8QbKPjChm-M*kVcY@*yl*h z1QHk8Wze&pI)6{o>zA^6>pYfzo#3=K%gS!FfF8RZ(soHqR&8m(Cu$qx{q@$nl5kC+)y3)*Z_z6Yjl*f;34#^VbRS%G9cMy+lMY8qflIQJM^&GxMs#NAh{~TSFKtZ9cvzhm z%#`cb1vxRI`}8W2e(&W1b2%fd!*1mWaVm-`eEVwo{v`=d`s&lo3Tc$3&iK9YdC8U5 zMMmu~=bW(9G0qr>;Ya<*v9~>x8RxfqJ#d6XgF0(ebf|2X*v3<_7A0!iXYl!5fgCL- zpDnsVC#CPm6Rq*-L?n_Mt`<`8DdI$x9JJvZ-b12A z=^u^7djiTkf}Jqy?7mDC2Aio`5idnU25m@L++_5^^ovXT8s0otqFEIFqKfEQq;rj) zq9$28g>5*GMBG`hz)61MQNYRooOY(Ob1#I?yO5uZ&jZQ(M99A7It%^-bh0a|AZPq8 z!T2m#$fVx%nNsBvG>-aR2z~WdFC2FPGM>RfTCx`w45YUf6n37SQVI z>3XVQq6tRYmpk172W$73Ehcz2y@^eZdAdTWVLIf)(2aJQ(MT%JoqQAyDl;O8KL!7R4 z(f%(B#xB-UU82*_jeH1{yu9+fF-qJPp$2!cJMFqkMEX=3cXd`~>F{#I&Xe){4YK|K~tLI}4C?~VuGmXT{X9*ermMrjpHdmflFFx|k zrcmHeF(Lso_aED6-Sz0`yP)5rVWMIg>(Lfj)K1}I>DNErf7P9Mj_)p8_IpWe_jT^3 zzV8+$x4@H$U}bpbx>WQzvJR|5DCZT$`t!ATM>_; zAnny{>;QIP*pmXba8MPlfV9WTi=&agkyS9!4)j_ZS+Ndtzjn|8|9Ne*R{9R3s)6(g zal+teWjX*M6Q*B*-OB4Xa))u^{sXvSj|tTn=keamex#swiBY{s$CDCNl|*blWuOup zT6F_pD0x%T>BjF%@Ig~Y*tfOu%6+Q}a^hlVB|-Qn=3kdo+$WC|CM>+@51j)F z6wVD6bj#Dp8nF4{io{oiS1eUMWZm%$l-U0qo42HzZ*ZBv5|d1)KC9#ooM*U?)w+;l zly_1`%x!WzqKf+yF4oK}V|6BJ&s*+DgIKaCi4DqP2;cG{kFy`1&3P-wPS&<`UhAx6_R7L8 zqIvKKRc&-kD(%&|nt7>|V%J>1R_S}1zR;(s*+Vz|Q~XmT)PM0X%k;2H_ciF6Y4iks zEXM3<&$)8FjPG$wPkzdEJ;ulB_pA?{RTN?5N z-&OD0!kPWFu0;96?1PKB#U+THT?p)G2>Vw*eLXkWo3ZKRHx$iL-oM z;4kRT#IqUPS5|Y9d|0bT)$zpu5v&~TzEAb;T+4}l^94gNm4StTE1O-2YTTPNJAfHBbTOTQ(%mHOq72Q0U+bC3i=TV>!IO62|-! zbw&MP@g6vqLtH2{jHl^^|GD`%rHbw&8Gyc@^@Of_TsQmrgUmhwznysZBwg%c5JU$g zsLQx@7bBGRMPWa*lT?~4B%DZLEdhkf?V#fT_utM^uHS0W7*Qb*G^7L_9T!Pt&)3Y9 zSJvnXl59mfVUCnW&Oo?)*1$He;Xk$jG9l$}(hzkxEN`rP3*k2S1!SD7zifXK^4s8# zoy^v1WCK5MeweJeB7lA!_mZuv(5agLbg^;d)>X9C=ZcZbzArTbj4wXaL!@w2@%Xd` zyf7Y5KoaJw;^ciVw4#DVbH-MV6PHuGQIAiuv;bRn!A^T;=L2b894~nBzDM zub|6*ZAH#j4OjTCrsFh6`#v1HjUPMAk0iESF;GuRejdf`JQKW95swzVV5@@zZ1za&M0 z!q;Y-hC1u7Y1756;?bt-E0<~dXs9X28Zw$k*t8gOs0K4_y>*n~i$+UJOPdVhdm-c7 zv)QZ=(nnS+Nrfuog$_Q2N8fJxAomU$(>Ieg*T`#a*LIhcKO%|u?sk&Nzc&u(O`{X< zu-SaBd{?nn9ML_J>{wSJ77M1k9nys&DOCDxhnF4I&c4iE$m=lSXM?vxR$k(GSiW z(8uLtr-0W$@GHRQ@hDL#Hs9(47;@nd&AS7s!jb`Ls#5#zFAx_SbLbAqWBFmnxunn` z(*E6xHd3m}^c55fWbZCuzyo@iSsB@VyrFE=1`hbuaZ-Z@7eUb-udb?x+Dj9^d4>_h zAI7?`#A~8awhgGQrq=w&t`2b?GoL*?z}MLioEeKN_npXcziRs{*kcy5Ehpks>M>vH z=)tNopdZhEe*|0y?VI(VG#p*ULy8A%NvU-ly9EtBK$f?y|A>SX{GZPRn9=e98E`f> z!>%-(71Kunxk{aE3RYn$p^6)`VWA&bD#xxG%*LyQj*3!EilMp&l;3vcpigJd`p{j+ zx2kg-)Em@gfGz2z1+-9~H5lZ5Tg2?>usUsDyB#NP~AJfE#&w|gKt4FxZ9$1vR-ae}E z?2}B9KVd#8vHQUqc0?!FVE%+vsmm6r#1f-V)0)B+oEkSHTW>`BDluLiDlj|9{i(e%a_k`Unih6om2_ZM2#+l?+%%NwvmJAGk#3NCx z@L6cRbQmIGqSJ}|mBs|#DTNH#B%%bZx@MsvSxIE-YkPLv*w zFdB6%9x0;*D+^d3;Gv-WKd2@ak4ne#TE*23>66(n#{XnG9`Vaj(3UtFJCK)likRY) z(&7SrI<*v}wH>A$_;&8n2g@lQv`V`}tYwuW7Gi*5IUu3QAdjgUpum)vWjSZYu~v3f z@p0kKziE+NsWb(?fm=jLdZP>oyH1b2G@jK>sZaaExqU||im{~-3sM+$@&)2s(&Ac8 zSIP|`h)oVViq8XvFjrTK0QBwPG?F7luhMh`z-P^>eB6CO6kV*=)n?LZvRez|D7~D@ z!3oR1wvQ~uGIWHRTKLT4Q8M@zgPf|?JVZm&Ab&8C`&jLxlk_pvoTllvyGS`p^0!EQ zsL5ekLxh5Cn_JqtA$FpF@7EFeq}Hp?Gzh!#x^BBt^neddlNYjxUq@P!OlrBdvZT3~ zQc^0=w9zYnSg3L6ahr8zTcFc}8M(2a8XAD^S!$ZbEoF+sV650^$rr|4{X$bxmaX6j zwQ?@S$S={(b~l!9ThVIvrKbnQyx8WNz+AGf&2p*g+vT_sJ8Lk1NiP5Q!4CQk;l1gMPv^Dy1@gBp2IlafIKaJ8ZeY&oF|-@d)_5U+H4@hXH!BtE|N2ogrk z*Layx41%F@$0qJNu$fMXSMTW_n;t>*cejB#4^{X5j|nK!$H_-KrVd+psaXxn3Yy}|4 z^tM5!HT^KZ)Dw~acWC&ZZUmTe!0&$S@q%5n^Gt^)M#Fv@E69_IsgD7lSJVq4#PqjI zauRTRjorti)RXz^-m!16eT(Yq z@A<9eBrh1_z5LTu`*WqXn!4XIMr@(x5dLzj&+KZ;MNGcb zzEpt77-{v=$DGG5Qb6(MTF{J9mMT<)mK3gtHS!1)smWjNv~DRKX}WT_{zu*Jn8{x%_s>JUwGS4Jy90Trf8f zdA7|=`<|>}Derut>lgGdOibmkJwj$k!rPPoLliH%ggXqd+{ zuqJO`L$km^cH{+dr%ck1B{FlNJ*)Z$0XY0lEWMLRhDZLvLz#CUzVKaGW= ztASkw-|6^NuL*c>SZ)Z*r6b650MNjMjLe6`w*gP-f2OvwXuTbzU8s=U8@Gqwrp?@1NvU(MQ&!4aK zL_{!lG{O+O(SCj2Hco7-I4rH|QkLfltE?iuEl-Q|i$r+W#kt#P`c@sTznvNdkvbH4 zl&$fGpBTx{xp=vKU2+^Vg+3v3iy*-`p0ns#n+xNVP$2(QbNujW>zh+AqKbj(`zjGF zcac?=w zoAH=jq3^qxUSo-gg7R0G<7W5bxdLp|JIrCIGu?xQpN=Zlqn+UxK6rhsL?PBawUZ@F zLsm=rcOU$kv`CZto2BZ_E7ny`Jhds6!~Bvg(tyRc`0Vc|2S&oV2f2r4_jL9p>NAyQ zRM<34rkdD>|4z)Uh#<2k3WBw_2|3H+b9TQsxs(9=(ZJwuR6=Sf--fRi(_I;4g+It% zc7E`{tvmY?W%2c!@P&D~kf5-z)bCnvnL17JP&yIrF}tju(^$j_ER&#i;+n|Q7uEDAJhgCr0lFM7M2#fUa*+Lsu|> zK>4)z*N24O#yby5H@@OAy!H0r?sefcnoWRQf1p7lU}l zOTPC!A0E6N^qf@6V<{{m6JxoT)_f9#W8Lk=`9g*;>d|V(P0V#I>iE)beiqYQy=11k zbfTm}yDmh9RPHH<$S0hsP6d={JY_>n&3*#Ke#gQXu%n!xAyBT* z6;JHP`>Th%JbiD|VyBFtZ~I%d@56Q>z_!yZnI!XFN4I4rulx=};TkmQ*gEbw#8Xci zR+O{dTeG+Mk-aZUu_N`08ZMb~&~qwfJ;&G5>nCcJHK+h7z^nM6ATWGV77~z?w3VdJ zXwLwI|C?SjQTsM|Q?NH$U#UIPROuhz&^YpGjA4X<#N#`y{Q}n#txw7X;`ZYb2-1l||gf1Qt3PjloDbC$84>ehaM+!=yzB-fy zKsCF|yZ3(%Z+~L>zwn8AY@HqIu&LnRV{3?e8-lH zw@XCkKE0x5au%wNJSPG$zrDDR*&HMEzw$NTUpUPVi{(cd< zPpwLU7h?Mj>(S@3IjWDOEVKLM)lcF{NGdw&9A5hgcl;;n6@gd&A&pcnb{BY2=sKM5 z?oyh=OmQtX6zqXP$Xh_fxEQUuLVq$MQYZzKUZAJ=nnEJiyg~9XX`}o<5@U3%lh{Bn zp##nomrL87`9%&cUEggZHkz1UjB1_yj8GH^8sR^_bvm?WTNkMjw2&2);C=pTf?p&^ z?VwO7?J^BU33n7++8AByipjNO3ToLEnn?HHJ9?+C3lu1Ca37(< znWjGaxIOTBD3!9ogMVTQDJCm5t?(^)oM%Wci4=>JAN;a(Z|%Djuypkktm?U}R47Rp znv5D(P1ZF#KU z(wtUk4pGgvS8UVWjqHP!IWzC1YZi)XO;dHzTOS2`w`-4EPl%Awx6TmV#H8ky(H|0G z<0Q#WuXB8G#n4Q=;j z%|iY|V+H0-2pPU@`;Hexh1Ar(i{Hi{!x_fNlL_5Y(-i>MSL&NW&cq|wg<`3$N{=FA zj#iv=?MF~_tz(zNlXK1S;s)__$38IQfIIO_^&fo2+8@6Amgi?N_4A^ZKm7}KH^vwX z5$WzTm~A`hNZ9CS+Q_`Aj2i0TA{mp;fCk3nKmJoHUV?GMjS0QsKPV#1ugN?vv0wL?Yw zf&c@M4p$=WbVNN36lEtdXH;|5>ps_4BQS}sg5CQp z41W#@N$tYRH9T-%KM-&vmPk4tS%~*>EJX;ZNr|S++6jNN=E(`#o!5cCs4ewuPBHxA z^==XY7fARw$h6@mFtkN4HF|CS9&A%5lse3%`L=bpTB!tBrBbA$X_ztHb5O4P`RKq; zAbnn~*()UTG^hz++WM3=6u@_4Jf8b*{gFjDKD9cE)hNij^|vXm~rBsoEvM(tY*!O*7lVg6Vd1XPEol`K#Yye}K5w%p;ka5sB+-Meyr z>)81D(APB=Q`yxT&Cq{#B9B6NnY>Yh2JagERl5+ z)55_70vtx4debIOK=UvtSU&-ptN}syg zk4+PBAzrhU|m$`PH$d`1M@S$TEjg1DBOLhiK&GIIVPNXpmfVYdgN?buJ_CryGw(T1P&^rP%u` zGsB%M@=jN|0@#d>EaXqu%y!&dTtiC}Cw)dKU()xq>+9dy3DM}8xI4$iJ5}YrtBWV2 zwIX*bIefG1Dza|sOd7`y&lcT}w#Jzr*}Vz{xn2hpL$=m_tP1ssdMJK1lE5- z&_Gte!`t`>u@CnvoIe{#Dy)|po8UVCT)*3>cS|tsLY{UR=_8@S&Dc!4f`8m4e#eNA zA&63S$4>3iT*89+uLGM^fXAY_8rSS=w(*z2_|wz>?HwFiJq^{ZW$U#AsQ9KXSTkTKCRLrO?YApZGvSp z#FV!!c^qOcn+4=*)!I^R*=-oG@$nF9HdM^oSCVZV+DX)WZLvK6+Qq2jX`Ih02Hp)(?(h7p_V&@09!!-LvQ<~6b+Vy#&IblEEV!M7a-t4;QH5%8Vj70AIYCw}F3~uA6u6H7%8vfa5|J*vfkFO;)eZ9hHIduZ)LbOc? z`o<$i(>mzN^rk#|&@8a$qT*n=dT*wZ!S~WLLqF8v(ym~GMajP&FopJH!}V|Ik5JMU z2%-9EMa4PP{YFO=^KHsVrE|_w&=w{pYyHddtiYobMT^NC?(X!*<*F0qB8up6$-2Me z)|wSRufq8A!WIZVa%$P34ad1wfZi!1)W%MN8Q2lMDG-@)3=&kEBBxL;VM!>Ht9r`2 zu67DcoB#@a?}RPF5wv$N9;wf3zPky7UHOC=WrVi3BC5gzymw*JU=`~6P-!JEIs3Ev z<{u8@(;mjP&mU12^;zX8y}KuPNMI%NeGliKXML@EvNkh6yj4C5*K?XR*WrzO>5Dpi z40f5nMFkxPNmwPO&b;Kiq)V#3vkfBeZ6|Y^{`rg2IENQpJjvv0$;~%0Gt=G%$GRqh zKlA&t$d3P<{0Hy=-;B0Yn35q6y&g|XZJ-SFWVYvs>f_v;o?7|r`kVeyDn-5w7$9LZ zoM|q}4ZgeQ=tmkm3G1|l-)5JLNg!r@ebTn$;E3w>*FG;hOLqLTp27&h9~A2KqoVSi z`%v(yd$KE*VJ>b~5YGiyWH^t~+j0T4Yr~oLQX;;TI}??w(4@a}z$duri#2o8_>X1u zTSU{9*suKj|Hu^;z?2}S%??35R=Gk9*zRC9Cx_-RkHxAr864{rm+G`wR4XFHDl zK_5VYf>P>`87Bol3nAj%v#h>p9TZkW_Kd;IV6wyA1ZQKupZlD>p6O-yEZD#p5+F*i zm+d3rx_stvb#W(U_WDpUVJf?eHtyxY{P%AmYWQUApU$59Xq}FrqZ&kN8y$<0S==G!vL_C>95u3?GGq53u7QJ%y*0-t}XCYkea0J>M1t6tYXGm z)55KZU34M*4oiuc(X$@!uJw2eG^w_{NSD3-is*k&Fz5EC8!Lg>?N7Jw(|lZ+zVk@u zDj+0yRR)DmyY(pacp&A%6_o<$8oV4Dtn z3kjSgl--#S;k7h{f_YY7t=~AizDEc|ir|MmaSnc^%A$Th%o19$lNu7KV?v+0Bdqj` zS|P4gh=#=Yu3U#3FGJDwczYc!_hB{KrcnPRWx^eYwJ+LtiF)bm829(~zu%5(aP#v1 zwAq?xBy_$a53;ead`s2l;b4j!jRd~0g#v%XApa+m`@f5?{~4GX;k0ClU0-yMLL!Z9R@J5Cann#Yj zO#c-nbVyn;f%YfcTq@Xmif7$qecmEo>iBjUtvoMnD%J5FZC3_N(U?hFXq1R#-q77T z#MZ#{;J>Au)-<$rZ0zP1c@(h~VlAM>fFa*dF)hNmC*H`G1$yV(VzM$)=%yvFS$449ma_Kr5Xt^ZfVXJ6^qN162?)*Z zsI`a(P#=c4Er_T2OS%8}!ry^<>2diFEd<}%`Qqc}AlJOU&hBXo zD<~|y2>vpWEy=I`Xo#i=^Ro?*F`scse-GUv#AH!wqSX537o`Xda>`2x3wS;g($Om! z(nws`K=$cy-BA8;9CKg0si9AFftJY@w#}em;y#~%FOtK+hRk6-kJ!ThF=&v^WkJb( zhhO(IB2xVRL~BaQUyT4!B7UY|f$QqVa5>`B6m0;J{P>xFDSL_4aN|p3v4DetEyzXC z@Oiy%T^?Qidg@fM*tr0wL509}`PMf+l0&+B#Scb@+!Kl{vcs(>%)dW^i0jNH4#s`W z16a5WfhO1Ay8AU0#_)Z4xCN1t)8eZyeqWV)s=5u>spb^8>W<~SgBV$)*)LUZm4chJ zJ!=U@&vj=V^~=;6EBwc{AYx$v zTe`4MSjY167$S_ey&Dzej8Rg%5Z0E_HP@IZ4V#7|aSP-Tp!&y7O})(b zkud}U@>H2bDUv)JD8<0YxGW zBrvrp7ep!&2S&HYX4b^c?%IU;+@tyk*tYLIqq9VxPp?xe@I^=I57a>M$p2(i+j#Nd z)4>Ca{J^Uc5%D{zqCPawevKv!(z)}DzVkH_R0vk~UHKMSZP)}}Xo4O4J)n$vk`Cu> zvKrfvM~1M4SWvsiC!GJ9D1d_lH$xuhO&d_i_7z9pS)?Ac^n7fHx9_zzn&}|*b-Dv2 z)a=PVs`x)^mH$5c5UiMUlG%QYvC3u@&=cuI;z5XBvaX z$P#Aj!ta#-e7%%(f!q4~67hdt?El+X;YcdoJlBslxz7+RqyTM4p())qm->BMy|vo9 z35R3bM;17RqEM!|Xm8NljaZlAfHjN!W&^mh)vR6gz}Zs24mseS(JTu@gy7HiWMfn5 zcBYNj8MG9tQ)EebD)$|Wf^mdc$=Li1D;5Kr318m~&7$`-TcXp;HK>VndU^4-1OV>s z2^GEyoVlH=RF8;mA^o^d4fCrltEmtUz>bY4~kG*a?ZoUqS>)&5P zqfdD3eB1wm=Z(AwnXKz?Gqwf#c861^_-^~vEoZgjCU0tif!5e?dOkz5RZ^RTB&4GO zw8J8iI$$j99k9{t_O}U-z5ru{@SQeOA+dE1neUU7F0}nt$!X4fP&f`Pcl$&X?o&*P z83HD@O2wDHz)7=OqF73r{iAp}kyJbsLWG{1F!Y(l1ewCUP^KNjXGdHIK(@ZSM7=@; zUNQqIy>MUSBG<25r?t-&jxC?LjW$cCzgW?g@N{E&FcfBtXUOcR>3mDZH4w(9pe--k zTTRJ|jf|`eo}rfclRWc`iR*kOD`}6ou6L0tgknfJVw6Zh4!d>TauJ7x;N3@otl`C` zrvPWKBE~wkYrxw)?Scoq=mNrnMI*`!Eh*p|na3nxlr)aHfeFaF+l1V~Y=XB@@M#1l0Vu$QVDr=xP=Fu zrCn(IY&(7sb;496^z}nfGi~+XP`patxczvoop_?|k$-hHYF|po^m}fOiJ7NF9kx+_ zyfJKk)bJ=j%%aGoL`0m;=PJ_g_U$^YC08$fg;;XR-%s6sRUta0!Y)X8 zGR!)zoYv8`5^FAjUV<<2c%dxp^vr&JAp@~=7k_ZuV0>tMXm3@q-&fPD80el&VA4-7 zd>STG{skrVN0u==iy&drPBp^GV%?Y_SxmPXyIZj8nT6Lr*i>*I)tznV4z zZ_QTr#m}Gk2LG2e54UUKZ2-`!iwwmhl}_aPCpVv0X0f8O3f}6?ibz@*|L0FEwQ|7` z&9qNcP*oVd**=+=RB22M4D%ro@drm8kgLJ6DtNRhWP))SmYw5m{2jgg?Zvv5Zrs@o zp6x(KGr*ncBe7>S^sNyFe5_;jfnF2i|5tXlpMJl)R((8RN9mPOobO_!Wso5+ZY}9t zW)x+x$kvpR&lL zFO)+YAP+5jJ%C~L^jF8ka{Yjslcv2T`ycDk`j!z5ejt@_DsBZGdjb1AtQN{>p)4f_ z_9**cey5b&Qo}dD?B86J6|@xgE~b`=&YGm`Nr$j~4twC2Nyl=)t_O^>b#BC`%xNl2}PBCsi8Zgx?vD<7)nZ- zamzPC-rhN2pwge5scbK_1ILyb*03tYa;Oc?!`(-m7KYN9)rbXZlaUG+kF~hLl;Zm(egJS|PWj|9-VAqgGHV`+#%_}08M@(9w zUgX4(f2uYAGB{bq^fI}0GE>QjFVF+efE>R-j}-fP!AsgtFLQT7n7`lTP_7A-Ya6Eo zjgzKTZbS(ctpB6Vw$7s#r zo=?TP%CLpI7S+y-RWEV2Zzfmr$b!LIS+TZHY6kwHbsBjkj4oN$H(hv}w%K>v63AgGtr^Y!5 z?*b~_luo|5+I79?-_7~-&gSx2!u|8j374OM3E{Iz(1n*29RF1{PL;3^FcS~Vszr_- zCblb$%J*9z0nY87SsC004RI7>l{R5*K8=s7S%v$dZKcT*{Fd2-p->AYcsd-Kubhiq z&QW6V(@u6w13r|ttrwIZH7_=ZyzexQ&;Hl)oSn{XSs(&UJIaFOZ9dS5M^+^VGwja` z;Z&8JQ-#XGB_&NdK{|eJW+_H|`NE)Wsk(2xE=8GB^6Zl3JQGr+aRnFpN07M>t-8+MYzGQ5@iz@RPxt| zy-bTV`JBGU{F7+M0qFAbDne10<;QE3$(~96-SDZ1Baa4DSjfB!jy0E7gpW_iv_y49 z7qSC+jyG|uoHOE@Vf4ON_M{q z1FZ@L+=$(he-s87A_;6!cP+=0y7;NM z?cwbfVz2vCLZy?(l}|7Bu2c1u>^yiehZpWQZT_%R*(f1cgiE0y(3C#8ea4rbIg#80EY zVKCIc2vGY%yh0>ZJwwI*`A7asHUGMIsZ z1C(U(OX(zzXWp)HI3u4KCrAx<%6i0Pj1pZl=>81sBl)v|PcEqn_%U}ou!{+9C;2%k z!$N8c~4?AFmx45VWpa{rVO&rim-zEqY z5dlhTvFVR;6s27r<5%+?wSzyG8vp(cnuV`MS4iB&gIeixEr-_PDok$2_KbO3A5>AT zU|0$1Yp&PN{FaCazau`L73Y@sbe<6-z%t6_BYNLwbpHNC{2E>{S57Cnenf@%g_DYB z+MukGvQGg6!p=s*z9n^_J`Y^tgKEPlrs8-KgFt}_no!>p`zEWS6%35YWhBv~%x-~s zk#s5k{8w-!+SEg1f&G-9hPW7_Gb%opUw9&21uv`3nbzr+dFiMVKQ=t)G#pIB8dX=) zq@5CSPOp%-twk+?NN#SGiz&l&_uT=|V%<)N_^raH+Mtmxm}fl!__+|fX`mL5oB>+Y zUqs~-gB8k%DXNfXtkiU%$kQeAKE{X;gO`w}5S1jb6u;)NOZP zWq!)lX{mY^T~3g-j%1jhzX#*JN6cDm8eu|BB~+e1(%#~BzAt_brN{K^qJxe+MD)Pjp8ZkmO=Q#Zv^1wN_7Ot_en?C26|#2g z`fOJn1GM70QsxF%Y6dRtS32gr?^>5c>1UN=o$X8#`EOsMWLSwJ_oWsNE z$O3YP$;Of!-i013gehkfZlS>n8q1WBH`2%tUe0Ia`^!2c`&M_*t)UTN`Ws5KYVFxY z!BIuulL&9De2rNo_TQMdm;%DW^-8Ud1R_kH8$`FP7kAs65X(zAdE+Ey(kou)kzf1@ zCKRp=gWOWjs=d%SG_8$~|L-MqlHG`zH^cpDQ0#zl;BMo=fMU&OzTgM=Hv&6B-Nn^x z6Re)~Iw01l-D|#MxWcPPkUPybI(V&`29bOqxt(; zv5P@=@%Gz%n(vSCeh=rbjxE>m9uF~0hET&-If^yE`PKggkVqV$LHYCY8$*lV%!L+v zae0vMGHx7aS)P0e6~xK=ZY=l`ZHL-{ddAc@uNQxx%vZ0^f{bgX^dqyi!dJhXi*ryJ>m1cOsi+ENxt2-losFhhm z&FPOH6snncZGf|h1viqK*mK|c&FE2K!VEZ6O>l+NUfv2oeQ)ot7DAkLMZP-W6o3&C zgYz+LS@Gkf1cpALTqE2hJY`Oy+aMH!oZwaUJr)wOutlHF0Ft6HfRBeal!fyFFA;{r z>l4>)lH@EFCtG1IvbDh}=ky|q8*wu{)wSwIKp0gLy^o<5w&gho|j!|tx7)UIgKj6uqzh}wFV|h30al+0)pEXE6C~h zdMcWuu{VDaHDlXRJZI!d1lOTtL-ix@j40Ba2Fn#`M!$U)pf{ z^5Po6-NLmF%MSZFwtZVnWktBj`c@!Xs{tzt1?M{>rf9v!koz}fKseh-jrSUX)h4YT zjQ>=!^tB>p0YygyBd*Z(XTiS2uuDe-FfAXiu*GW{cbgf83JD7|BFrE(N<|mjyOOO) zC{Z+kL<8hPJHA0PycPJ92vfI9|StB92>6;R|?Q#v*^^ZYj)gwJZvae!I-ZQhq{^6#jSEfw_r1<|W)-W@oupUSSzc7nA7cdzaBaulboMpt<^o1Bps2C(BijFQl-`8F&z?^s_hay=NZ6U#deV+MQ5 z?=5%)EVXQ0@v=0p+#PWDp7s7ZZgp6#+H_U?J=*(V?Q;avVXZp)M65%n&RfiBx zD7`P*owxnx&9yIuy^i0qBlaM_L~dWiiZ+!$9Itx~S0nxpit~ReT(B)KaMPRk=LY8J zp3}vRQ{$9{O2iSv)I$b-uoLPcQ_!oVvV5<64C&6mcVZ^qN>5^xM^g9#ZsKF+;AK8v zddWU^7s+jWbg?d!okT(@RGDso)N{MXq}zejezcYP0a(jXmt&o% z8d*es_?||1eU(&#(8A)bC^@>9B`uCD!Jrpk50b{%=*hhPu4&Q))i0Ra3s5t_um~*p zJ2#oM-D|E3IW9BwtuV?#edBS9b31;VO=VfM+rMOam3>tJh#OkujCXP(u%%X8NJX$s zt6xWa5X1K;J?v^1!nJoQ62lWkM1sgtZ1*?>Nb6!Xr zwmL~Z$zCq_=U>zJ6)Fo{9=kfc!2aO=_&v-Q)0LQJII-B&D(v8IaahJfe#IVYy(yad z^AS-A?c9q7hsU&jH((8qUYnS+eB|Zkj~P(^@%Wl5jnF$3eN zv4~5(Y{C&=B3%V?%ln?34Id(#wnHWs4+?Jhs= z=_3;+f3K=vwfprHx2@_Y&zMdAOE?<4!m;rcaz8!%XOG~j+nl2#I;iocwGvUBqpsi0 z9Q7#F^foH0$~a)__2<&@XvV59XG6zUmXYhHbbt z;--inw40?8!`zjX+%A%v4zQ{i1hWrv!nTTK=e6 z8kC|98b9sE%dNDnS3-_@;@lmUo%8pCi6^ACa}oN#YafO?okbhm-%c1M;54;rH+JMO z(N7!6nacU>rCZF0SENm627A;uE@8r|2>-sYkk_@j z1Gu|GL8aKwAR)haAK~)f;(DGsa`lsf2DoCQplu8hWLkG>n*7q_ui|>7W0E^fVSR;` zy?l`{R@FG+8Z6P)9KKS6<%CU>C(5z4TGf~ydpZ0=RHQo!&kY#MSn&Z8_*@Qm-d&A*oK6=?Ek;J-W%swn55qPE_Y+rHt5WUImdw>{unI(aV) z`w_?~Lxw(Hcd2HPT);Wg!M&`{WHMlMj$tqL(= z*hlC#iKt@=)pNNB*{4pQyB4wQ{gZ>3+}5;!Gnz+jcz)=brTxDB`Ni?Yb?>Q{FlNsk z&Rfls=l0`z?=7E{XrH5Kf75FH=Ii+@3m?^8&f5jYlQ!f3s(}%{@{99k1wo{49uu>C zlgd9|i4ZV~yMz*dY+#4~!h$!p*evOF@M%gUBEd8A!Ht@RXrY@K{|KE>`6BG<;&3-V zoe2HIs{cL8Nm*j6dA^a4V4#z4;NKwa_bf#AYm7Fl zd7$9|GD!FKQhM*VI3hZvSesDtb!)TX%b_rgMF4gaf5mRY`drH5QK7(&ByEZm2jwqN zh5DkF1Og(r1!o@Kjh$+Zz>N!8rL{x)T~D=(3=3tK3DZ}SIXK6G?by{hYDUqi9U|B2 zw3^3(X?}RD39r+SwW%C!jxp}-49^8ABYXro`txilF;l*Bp6?u*ROTkg{s3=WB%+TX zy(8HJA%?dWA7d( z-rr6&Q!_EEHbq>p(!U&jhQS@T5Pb^j#X0uWXSEz{o_R(p>iaz94q#4VAsrU(YWkAO zHl?ycch2pY3Ax+V3?URpL~GPua9u^^WSd%D)MM}lQSo}0dVm$uX>I)x1Sh=Exk%)P z3bmtzX$M9=5xhWCRa+_Sx$yO7xsaUWIBbDS)elHE_ucFp5Kk2vVwsAdW2CP_oI``@BvdX!=C8hMJV$U%BT-`83Nz-F@ zJE&PDa-dbBRqt;OD)Cv_o?I$_*2L2cEZwvD@P~_1FepDJipr^H_^pa8|$3kfU8~< zOD^P*|5Qp`2koj$7?}fSlJ{uz1;3DWXrOtj?b!#Spf_A3wK0Z`9j{MuJ`&d*13mcU z00aIO2Vc9!R(he7`l0^+EK?c(%5~US^zKvq|m3LK8UZFT<3SswhDq?Zbd19=C!vmYZ=xcOHA zb*@TI+D!{P{BO^0*BerEI^r5E7{n1z;8e<4=1attTlIJ9w6G4i%{@vQ+c+@4%#yF4 zBU24D!qw5p|7@gLUJch*TSbcV|3Vb|x56K_VpkHq8SMb1t`k>eZ{{lA#@|IgX*q8g`P zT8p2pwjXEguS^}bG8v9Ml_d5*%91F{-xu#i2{CR#T)AB*ggX_-EKMI`SF2{7vji!i zU9sbZO#(5su#^9KT8~LY2+YFam8mgyH(+9|dtDs&6-mgz=&H;i&V2FHZt#3zyAlb8 z4{^|6{7o8F7OkS^eC>9NV((RIq6&?%G|cX>ou51K6ux{-Jg5^AY?bDGrAP<@V%aNA zY7Hqn#d@+2GM3@W^I|1KW87iGT|yPPX58$oeRFBtQ|Z$62biZ_k3j544M?1EGgX*6-(+8ubzuOC4cKZ5M|hvpU~^+&b^`|(~YSfL*i-=9A^ zzR_&ZU~_XV06^LCKyQ@pI_F2TlP;o$bDAR)kmpixhFl&8F|z=)`R68^sDWOUkp}FE;4aJQ zfiU@R+cNa1_61>kX3_g$B(8$|?W65NukuEZrs38FJ$)$2)InG}B>bME{h}FSR(-Vm z;+zXALI88zxvin>`hPql{Z?i&r%YL(w8nzVy4sM#z7a^i6}wIa*6X)uoolAwHaFZTC;xJHA!_(J_4WDfEInD`I{FtfMbBg%>Zq^Dl&SD@8jC)Gw024;V+Lt$9 z`m50d(ZDlgZQUO|X%LEho9%&&)V>SGyrY$ESgL@2K8ShMK1DkFfXJ*n5EaFJhIU4? z^lr=IW$%Q;oWrlmruO0eyK?5Dx2h4%MNx6o5CpA9Jp9EBaz#dV+5t5kYHypsioK3P{<=Tb%8 zrBv&8-s;mdnKV;V1L1$IK|+uBJ@pP`s1q7{7h?0Fb(!a{=?6oz9#ERoA*>8b&{VIR}bg1=NxAQ7gk9m`;I#bTL%Zy3CVvP zHapxc;@?R`8fHPfe8u(VYu%`Dl=lyUBx|k#w+(HS3j5Yy{bx|NrWrVR#>0{HqGQ`dDQ%3as%8@Va%{s`SjABdI3i08lrxo!|E#A zVv$7K%;8-yTF2`b!`nw}uuGOHGJ-2AC2s)xAnJwEjzVo(k%0NA zSPg-+vr`L>XxFQ?zsk8}&-+a1M|uzigIDnF5MuU|huve$+aK?kV(yL2{|Zy?tyRBk zwJk~t!7za5T%`tOyx#S@yG``ilkQzPC+^LAs^P*%y`hSKiz zolTJbdZATzO%}ndsx?i-V8jXG_TChwO&4AJN@9$K`TmI3ZLUqgkKS)7rVf~$Bb?z+ zV~(j{NiCfp=7n}sHqFr@ISSSJT{ih!j@N_Yxz=kMl7sIZKWs=7`TOSftx?-dae7?} ze0J(bnVMl)IK`K@V4mQFgoSqCz)fVeRe`g3{(kX?D^?A?Xvh;X+I->wzOF;EsV(U;rK_O zjl!f$f0!n!Dknu)A2QJ9jPo2%ns#X!FZ%$cL>L}t!CKcAFlRA1r=xKT6hfIsdivfp zFv7F){-(S3ZdPRweUdHHHa6zNtD#GHqc3NHTjDGa4R#f4)40UqJz}g!6a7U#H*q#D ze@fE^YN*q~=KRJGmtg!Uo>Csx%%Z5w(+3#R#evg_zyVlG{$5M`V`yHLnhRC|!kv4u z8CA1&xxJ>VMS$8olj@PJAFf7T_U#uR*ge-h#c*Dj3?V*w^yL?p`o?!3L|NBK1jXOQ zcg>jrv(J}wp>>f;%P~wVuCd0f84bNa!yhj#FU2SNhPw-XVCXC` zL7QM2CxPu-s3ky@58%yw1&UDRFv~6?0XShp}H;qDOT5+ifwb@P5sO@v83Ut(BzAAk4>_#HV6+<^w)eE zcV{!Hj-21=0sC%`ciBF;YH8ux^lW0b+GQSFeTw|7YnN)F$4!?WH9%Qi%^^Nz*il+Oug}_^hU?454qQBkYokXKjnrQZ2^Rl-4=P0bh z<$B60D3E}&WaYBrNC{7&IJG4tCI?%PumN)S|5=Al;$kx%h%1_?9n*>55V_D`9Zx@| zml3+ee+jq3Z2@ofnJm}7yVcI0Zg})ciM*&>Z@OG>ll+q(_xHanlb32@BK779c#B?= z`ir?rdpdj0)qepM%NAO|pwY+Z^8bC;tisW9QvCgWpc-6PWNcB#fZHvDSP>;#{7M>jIYWC%!%L+{r8Jv4*{3YVxORMrK09EF9&jvIc?8pX#LwOYzn)G z&h|p|8R^9xBLG$V&aiA%*WU`{|Iq9AH|!It8Z<`Z@wB$LKH&N-6V34!Mgr%>% z7eBBGc%FaJz=(LiIFW@t^l{+>>t?K$UoOEKauKN!5(exmKh<0Od(0}QgL>ButJGP? zV1{>4yEvj^lI?7|TNonUs7C!kFgEb;%&G@izdRk(-1tkeAWRX1C{tr)fn$M{Pj~5k z=g({Dl=+#C!TcxCpZUtOR&yGA6h-z*QP*!xX2fShRQn5O`;v68Bp0edcc?m~lQzS| z1BTIi8mwu-n<#Ov_W}a1?&!r6i`?*u8(JV3`Hd-0R;TxAhM_BNcxEy_KQ6=wiTwAs4*^e`bD{w+*5kxusbU=*f)*yKdO8AZ?-)nlw_QWZ zn_;|dK?zm3WMj@x3=30rUYz-H zul=kX8E#sQrQicx{Pido6PQgqgFMo5vE8?hq}IX75*CouajjBg<_116(pze z1TV&j8g$X#5cz&_z^QZwy%L5&c5V_dYT~B0I#*h8t2fq00IHyhYRtDx+Om-d8w=Wb z-}E(nLzG(K7#MuMwGOj^^;u7czYeabSIbMZE#v$E2ilz3wU?m9@3^k?x^2rYscO)m zM&kA~&nl`rbOn^)cFxmz#e17}z(M~}d=te*>TGB{6g zo=@`fXH_GOVt78$OaGu=p;SC7V|z&sgFK)b=axk?%4wSpBwSk2WbyB5HabJ?Go zK9kb*nQYR<4jPLNq%#6S8JqnwYxfpNDchBa6#m6EZG2D|tM{1uP%?b6y|bdC)4FE0 za}1Ng+5_#1O@MoYe|u~%>OefB?F*%=huWP69t66mcb{WYW{D-p;pu>XhAs53f8-p* zYd1yu#c`VP-}waj;hgS6>M=nX) zrF_zQ0Co*Zh}v3JcRWcBSo0k2`C9O$60qDS*tY?f6Gf6LS0E#>gB{z761(ZT?>A6x z^txSZwTkTuo{x#;;@#%0sO^c-l z-B3e45B4!dAMYhlvIs+azf;f9DP?|F$K$vOHO%}w%h`mab>@KUc1$%#de@&^fd&~zjQT4q_=@66p62{3AlM@F;+ zLexBOd!mjVc4krBl$}SgB0s%L;uK$t*lG|6_n+FieneC9%Nmx;XIi*qz@YW_Tx%}R zfde0pxUEP08-;#(&R?f+hE$%#Au^Q+WIoo2H=1{@&s=ji7zAudzR%i&aAR&dqJ


siY)n)J5wzfM+;J_?pWiot% zPkMJ;^(l2URQ>!{cj9<^$j>IK-=+v|8aadCrO!97tLfsE`JVspRx-oB zKUd3T2)@kJzwrJ9WpqU1vs%e)Dr?M#sEoI!1Z}Vd)>7wrtIdybWpu=zlwjk&-uBXQ@wjXRwg%y0s*6bydo-}Tq|s+} zcsOiqXytAJMief4|3!(Sgj;jyhC}h?WHSyfA<&9nZ9V!RkePRZa4+D;6a1`s1SHB_ zw^O5vSO3~vfK02!$EuZ(X3A2>b`pWoUW5J@3e=}0-gFO%E8qB&1poi+_`D2~B#{3w#=MuN4nYM+S{q2!n~)rp+AQ-$aA zp2NFGK~v*dYAmO zY1E8dvajNZiwOoyDUu2t?7}Uh<(F_8^iIOj$1OpzMJ?fU^BNMi$Usw+xg&Hm<{>FjhUH%IBd$sI>XC+t56Ry~zoD z7(g^Adgv>WIF97;et(sZcRFa; z)q}a~lYli>v}X}Y!sZ?_jY&BiLPDpB+qfk7-+8bX>$pZ6o&HjG=ZNk+)_TNaQ$Uld z5P4_QCNEul-yM?j9#zwgw5KCc=H5uJ=D#sowUQs7mrssaIx{8`hWCy0#nfg&J^*fE zz`(z-_PpfpHg*G-zACZ-;*o^9gRP69$tb!y-J|75YemPF>6`>uxoV&l|JfO@?qX*m z#PIDmRlwo>PS*b-%ERJp#}ceY@=C+vz7_uO?G^+KJ-8cA2yUqGhOI0H zVqDzX6qfET`ofV&L(b8TB{JKuBy1DtT%Vr0vZ<6fKx$UP+$x;b_kz#Ttv}Y13d}Fj zSXAY5l)oO7iV%AcH8NMBrZ*n(x}F6*8k`vQ5)8(ElsWjp@(;i}sqU(09gk{ml$S71 zlL0?9tV!lCZxxb!8`cm*&SGi?m_R7`!Q0pnd>{sE`g#+||#{lp!@dFtsk@JEwLT){YiQm7`ReIdFB z(9(0|LXOj;$f+A*NQn53=oeZ5t(9LafC*6_TtugH6)sfL)d7Y4c#3l3Q%jvS=>PGm zU@G2O#!cF8WK$uE>q|?!)ngQE<(DrLf=n_XiRiF3fJphO3}*lPv1g{o&`y8R_p*3q zmU_`wy>|ArGrj%SZ0lD1e)_SrJ?Ch1bvx|Od`e(%rF%p@G=z2QQjnkNQK_Nl$+xJb zaF2OxpwS(F7DH)HjuM=d^hjZAe?*R8nuWmuLI;fDA`_*^8if;sTXsy>O;LY`e=S7# zwfNH9H<336(`4NvyXReQ+tKvzz{+-5#DHZ^eT`WI%YZ_G(09jP3r%WwFB@A1cvv-+ zn681S#N%-px3`pnvf_nqSw!F5)vR^fHyU0F4s#YMGjvmzyYuUb>+Tp&9^NV*3N3+}MLZ=beIqsSgGxVqa*sDc?%*9BU+kxd4M7l4V{Vwxxd<-;b zk$vsPH>){$*#NcJFc*I?cN>gVl3)g*wgWSNvV}=^ngbf7ri*8(+BgdG)(2W80t+GW z`o3xCgj!;c9`o7^( z%c*B9TG+jPHSof!C>+@q+e8gI$_kcrNR$7Os`-zWv1+Q&1tNfp-<(I$fCQAb3% zZvX^vS>VG7MDx{2v=u+rVY`MpE77elG}qRh10uN9oGdMT+3l&g zZkj2nY{zVsu$ZqPZ|0rCfy`N6IpSHYRR{!{bcM@kA4LuPGA^f@iSyJFJ+gt^uPhId zSo&?8^HAXSxxlp8Vb(;3%G{1Ow;hbkZ4Km+qHUYWpbZ?;te67IUDTIeLb9m);Y4kK z1Qv~8hG6Crd4cK*ou{Y;QNvHmYR7Rc8pv$e$VF~_xd^e$i2f(8O`j`1$$f16uCHYq zR&`M9qOfs0N8;6FTw`@&bQjaZqN6%CvceRzP$TouFpqQo=$-Lsk*iL@Lt?Q9Py+aI zejV8uQz+5+o~t_DUFxMizwh+fSwdC{0y`TKO|DdgoD;>aY*8RZJZo>uT!aP?jD*Qz zT8up&5fHe(9P9&A)n)cS>CmH^o6`$;mk)_b1hH70x2c zxr6%>_zrq=7L^G7}X4J2sFhNWsgi(aEzbfa?YEHwL0TL zjD2nV+f?@Cpv-g-PE?Qh@ze3@HE4S@!d-LnM_oMQE#U}1!qB$4>)hzMV|*Zf+G(cS zmj~os((>(iE@~qyaq@68)-=x@muRL0dQy%yqmt5ul zZYdQx5w!&lCrlo5|D^06Ns>bC#?IZ|tYx4U4Rjka{YxayMGAjJvUkYX<76NfldKi& zNWT>M?l^KV{k;iS55IYBm-i-mSb_dv?r=5(z5*Dv8HfZfxUyI;fZ4xs*^dhVWGCRF=u{H&Ee${j#yYJt*{|6)X}$G|BN zd)BR7z}R4dtiJIN5=NJEY@lg@GvTKXUlRXTOh=A+EL79|gN$-XvT3M@ROd54@5_uJ_bV?5`XgmAeZu3`NFWnA0i z$g{Tm{D6G=me6q&kJVe&N6hokD7$AsZRJ}9n za?fbF4>_lPj-tJhOdO`f?YS8-wY62EpC2tE9uQTi=Q(oQp6OA^;Gx5TnROPxF6kJS zb1U;A^L4WOJ(F&R;`Dbs7Q=eqh$>(`+fOc^aQ;8u)pA+{!dRC4|$WOGihwZAl79HuOxAP(kf?C|YZ~ zf%@xW6Oqk;mE^RNT-hDatf2&|uvv}2b6CNBWAUm!zs+=i5;<@DTDf~)@QUQEq*m#W z5!pJ)BmLj-%;_h-FLR^;zlf!-x7*vh1-yh0nt`t7zGa(=Svm2?UR0t|Lm`u#eS$mU zM*z8cSCDfAH#A9alu%TP0d2uK+Qc)Ei9Zeou3z&dUNN;oYp+{y zRv|wMYk9=P!{Hr-EpmkNS<2z5$?5HT9=SIygz|qbv@SU%6U-w~55s1gocGEBM``8W z%w4-|97_>m&vZ{?ED?A5o7V_;rZg{HKG&}~al}$WWHie+m!{*2`x@YLv`{rr@1gb+ z%qSMw-0E?%DEiXrjT~Wlo%}yVrT;i(|5p{fCU}j>|ItzmZT7Q(q)+T%0~4y-JH?qt zg{vr`kAIxDl#WIoCP^;&M$0oz=^wLXu zmFKKSeUnP{k*GugrBWrv6x1*I3ftlrR>4R9ed>5t-A^kGI&J|h&Hdm(m3F&%bF=o1 z`rQ#2OZ}rJg}aLNGOK0_(!}-d+NUuwohm!N%H^rF1%h}BM%x%Wqt}8D(?mJ-lBEA$ zE{DQ1c;JafEHbX)C8Edh{An@!(s?Dxzo^Y>d6RP}Yw z-N=gnm~Z2>4)@|(Ns}pjH8UCZR{V0Esw#&1?y4ZI5(`iKK8&iSmxXF=J0be7s5ux;D`)QO-3M;NHTZUxNiYMUmihjG=`ouLw z227%JD5*O1vRbC(Ky9t~nb-}xx*_Y)?4mgM4qFm`qBZX4MXSkjvpB16UDlq8!Gzv; zX~|z>WCgat$CwdRQfZw`;%nYnErloae!M5*ZK{dV=#ju$vQV~U2awH*a*?q&9QP@o zRXlD*36Dp_Spfk=mcEoxn4V;du&US_p?KO`2TfVs&sB_#++LUPc;RG?#9;c1+fhYK zU}b?wyB@n8Z}zg}J$5jTQ%e&KxI#jRrLg-?cl6d!bL>w?Hfh_1x`OtCsOEta;8mMk zS+Z$x#(vZa$sC`*K{)xMo{vXm(Mxf?`KW3xg{gF8@SUPAwA^qZtK%de)0G)A~xEUS*k!;w%5O|KB44Fn$~fk$b@bqt0~~E**@pT~$AYVtOAoOB1VytFf4czV_B( zsFMMSlQ;9wVY5BmXo6`;7qVG;b(%6^_6~comnw|&fommG{EZ7ajXx~bG|d@qs~a|l z52`N4JN^ApRJJsxdsu8dlzNVmYY460l*%SI2Ly9Oq6AsTmW>hlfKW4L;*Pc(9|T%= z&uM4XRI3WT@hYs`T9 zFQdd7bG(j9FfbTb_<%i<--5b3!1ws~)_r8OXLQs^mx0_jUj%>q{6$teADsQ~-&_LEg+aenIiQa1+x}&`6FIuVD>jIui+W1Ve z@`?Y({f2ik7$VuR&B|d&-NM-QMb2W7wEh8VVoFDSx^oQZeOVweXwH3wj-FVGUNTkd z0ar23{rbP27Ia^yTb?NT__!B!kKUvAu6w4#1&uX=gzY!qx@nqnpB@ZbD0?P+snewX z2c-D_55MzxnCpf`oL%$MM}<9kIW0gqWc==Hm#m6xbjuhj%88Q^mfOcdzXOiB1Rwqo z98@mHhN&3Whx^yRQBSLVX>#ZCChaPKbD6vO6 zB4E;oQF89spf9E;Y2y|IaSGNGI6ORji|EXVxkQZ59~M7OLjmzoNBw$KN~JAW$v>dR zjF3Pvq4bAmqx$&FMZGc=;jaAwLduPO3>gY##)b2{Z29AHG|FCt&R$HoZ2xq2{eI+8R8Q9?PCH^0LoVbvQcxXyhlA+| zJorsg*v|G%hU5rftDdPyKmI#~m`!Kk{tLtrmK^(HZyYRl+rnn67)Qd`xhF6uD5+X4 zrG9%zp-8>xU_{jHp|u=11Vdw`%3)zZZKufbyA7_ z>TRL%BljC3{c@I&KgZmW4zOX;E~8`#MB&CXVzctuDxjtq;-7f6^F^Vm zPxk&|?IpzwU?7de-<)ex6Y&ysYVDx9J@`|zT&D%5twExaIsE}B0+Ki$Di?;GnTFDk z#e^pK39EJ%-=VKCqgGH`vl4AfKv1p?P>((pAe0>Jc^mvY)b!+((O7uaAm=_FrIyKa zt-55L+v~U^9Ox}sdH++8kopSjZOv<=QIpcVn8s_hz&bNRjl;;>_0ZJq_!fTKbdOU) zw%X~nzHyUtuU#H4^bBSG6Y^mQaa-Ucz<0#>4;!J&!uQTQjakJfR=~YMucKT9)50R@ zegW2s8DRTC2J)|+yhC8v1J>_BUx2>j*x#htX}S$9Jruja<;$b<;_CS>HZiVS$;vDs zHi{^m7I2=M6}#U1wiK5CwCIr{I6S)V9q&BF*wg4L9K#U`mqbnAnxtg50p*;9Y$9fTmQNY=#E=MV|4Y8+XwH216e1@y=`#Wdu&s z;jqjj2L@S=2leKIfXj9Q;%S`=4-gsSL!b$DagQaxr@&i$;)V!bLJ%R(4)Xp**kh@y z?@TwIq*h<_MZjO!74mS8(z-4RG+N$(SUM^h6w&Jv78q6=U%MgHl<<~LQ?Ekr7_m+- zbSApsFH}eGNAfeCW+~s_$1DbGv7z$!SPjz9Ir*9Kv!&mG)TaI07R?(5%Qf#`qW>uq z(ZO2e|3}LN`$<#c3{oE$Ht6J1g^ltL_p-35ln;9vo!>UXj*9siOjRm^=_V!eDO@F3 zkuuY{7CuXMY70W&-|iqsC!YR_pOw!r)O$Ch&!=7A*|>XHggteTMt*;_71V3o)2#!` z!(Cibmp?D~8p~D0y`kJ|&zx$v>Rs~vHO42YG$%paN*-*coMS9?(y4A^rZY&aJ%->i z+R-^eUcq6=EqW_aGrvg)bAxqFt#6R0LMLTm&Sp|&WD6z~-l-ba&Oa*+J@%E=BAJBI z4kflklWWXxCR`rZx~IEivYa<55qE!$gr`X_1%K5`uv1WW+lm4g2GE2V$H&MWJ%q7( z0075{^!C3cNAW`0pd4W*iSY8=f)O%B3wDKV`m*XvvWVT9*v{ zT;!=%RNh6^#4KqU%K%GkE7)`8F5ug9DTf8V+eRQKP2T=OK?iins89}-gf{k{kiE(p zZJDNGaobmMF;3v3Z*6`s$s&hkgzi%|)n4>`7DtLR_B6nHB^n3RZH$OYdw5^BT@m(g z%_l=%hM{PP?|a2q6m)=D)^jSe5Y`jq}!*1%JC|e0pHg)-GFULgwF^HeOtA0@WSsE8e;1 zvp(%HHKm|x)gQ@9pSA|%X;s-V&FK%`5&^K7b)&Z3%z$8)2Ho~4bFTDu7Zh=I@vFW8 z6hJKDG$yH?pY;)ujG;K!kSw}&t?uN~x`sBX$L#(XAu5TM$aJgx5m~V5=8Aex+O~Oa zwA2E@UVjPaMvq4P--UW(U?6soAeS5)xMeET8SN>1jF{C&+Jmzqss!yF{`FqTZJMmo zBU<1HF>+o1lIf13aHIWDRq2tHY~rRYih!L+8myl`zyt!YGfZoiJeby?BH0Dv6nwRe z*A{Oa()nPq4+=Hi1;s!EA394sqbX9M6oa{hST3;};MN))?c*c$vqNj!+=)!_5WGW>$N7;|f4P90%2IIeSH?J5-7@f`* zVcE59XSiIv6tU#2p! z4_?Y0bhoB%g0#kwd?DUn(z85~A54F`qbavVwIZHJmmN*kGA&!Em=GE&QWGV6s!1aA zV2qPVi5hC=^L7}&C;k^JJr_29WWd9^V2DsuBDv}!qxmBq(D`|729INo()Fs-O-T-MbX#e zfl>S3mz1)NbKz5r$V2d-0*@@=ZZTIFt%C-L?Hf8I4?H!?CpWMdE7jo#cBkz`B`rCs z#iB4vfI3r-XWduk8z_Kmr;gRBkb54C2_jDsyOLO2N!UVoEv^IKSXW>K_RT&TBfm0L z1cSH^wCAh_69ms?Rhwxteo%dqqsr=kFOG069)K{{*P~e6f3*_!*__MIOr zX27^VItvhQi-r3q4{HuTI~>np5vMBuIq2MZ@J0Pq@01;E9r}yLKr=J)pxkpATCBbv zs)1T#{TUS^tt*xb1OwP3AA@T8c~5Tqu%nUQ(nL#z=-=^}`J>Js5u6BaNdxRLXe*Zs z*n<%6vd*JhoR&HR9I6Rk3~`epoHBsMP{PIp+-c_O0g+%&c%Z#Ch0dt5%sGVGHpi;T z`Jq2v10-(8HLiF9Z>$3LqeEJXMcChlu@zT7tlHdhSJ-CjE!6D+g7a^JZhrnvVoo-4 z3H^<_6`8qe5i>U_&n3QzpQE4vt$2yG4a7ymqK~F&qy1%S0Q`%#A$2XEm=3UTWzAa) z8d~}0zDOWUX`jVviz%!BXaPR>c6&5KvX&~i;5g-Apa=YGtD@B6tbaMg?DJ4g=FdvI z$)uF`L#_-G`;Du)WzAhqexgUc=6R%RcG`(}lgah0 z{ooYM$ZwcNjSKNP-0grz@XmdSLhNl|pbfd63bo4QwdgcJ%K;@xEbc5t#~ERFqKBYu zz@bySd8`;YpU8Ht$A;I9j6fj#I4Uncd}n;t_5SmxD<+X`p|N5QA3tfGd58I`HHQk@ z!WfsA_;oT@{;fG6Cafr6Ql2!P$n8pwU#}6vBatWb4vvaY&HBf4EE{m=bG>3`U%0+F z1P5b0_aDge@PTjt{@5R1j2vufH|;1A9#Ek4eQgLoK&YNAby|ZTABlSU4bW3F&=vlR ziuj+^=)Zw+K805HDuvw3El!re-X`}>j%1=7&piDCVbA-aCI~>=AAUl%FcrUG2Zq4j zL)<@pyGXdAb=an^4+OH2uMC8vGzT~i`(b~t ze-Ecwt7q0sU&CfaO5L*7Azs}aIyu;TbZVAo6$=jB*-M|s)SJEskVH%GDx}3r%(Cn_ z;2)+HztmKUDoW-POskX1T&eqAHj75@DslmHsYBiVS=)=M_jqCGqTwiBPvik&)*}df zh9^{Y_Za7yD`lDvqgPy8M*kQFBg&jhfDGC;H9LxMPcWWrK5F*je+UX)x7>~nbM@O` zSW4D<2=5C?TIS>>hJc#-$T8m`WCcL3$gKHdhu=5i&)%A{w?MBeI6KvDf7p$am8R)d z>|(HB_V#)Hr>#rEb_rAA#5E!tEKuS)%iks;h8-cuUa{S>w{Q`yJAxhfGuzVHeZjq7 z&SHm}ne~~uRn}cR;Nmf9qW;2imyh&8@v*8_B5-lWTM^cCnjeH%L6~FqpUPZN&Ph@H zbB>gb1;|b@V7T>2XKoQ$TCH~8vxjLHWEoZXSVN*E)mrQ-K2!1_ru!p+VCcNj zg`%8gVid*?dDOOI+jE_W`^{ST#2_I6CJ0OtOj_`olIT|x7EwfwAIgWBFJqcekKr+= zcQ14HWYh!+uv#~6&`Of2pv(2AUkg#zL`;IA*{y&7I2GpTg;}G3j?5jS@tW5J6a`12QC}{K0 zlj`#Y(*aK#YvjC*att%$>Duo^^M5%yRD~xLjH}+t?#wHkDjTyuxkzqe-*F4acp18! zrXMZ&egVln2!%}ita_~7Y89}~NMt9EIo`~Jj9K1T8#Mxdx*A`1)lIB~jI-mtwIMeu zEl(&C-j3iFIeQd>FGb%^g&Ls?p4s9)Uqz<(;SG}nuMdV{tP?ot`=)sNAYV9#g(SR} z>}l=5is;;R5JsGvsWBt$=@>wlG?>b{8<88%=@;?_3jq;JyrXzmB;Z^rD6dA8c*y#I z0gz?0muQ#?%5<3{r%;|D+Nbx5dE9#kA8OXN0c3n}Yb&B#;>xe^(MmMm;7%9vWZl8b z(kq}6-}>qSR~h%U#GmmY{nxq^ZKL3`k^5S9(&aL<{?hzi@B8oihs+I&C9{F`wTG6f z#^t(fUJ6~SN=vm@JzmJaQQW(UZ7a+-Tj(MqY_btkyTIH>3Pzau0RR60;{31c&&S({ z-1{Y!A=n%Lk2=J;=?iA>Qg@yI2tRlnWxW;ce0)qwU06B>a8gSCqz&e=d7vPtT#3ni zbm}1c8KrE7U}6DS!9MLYuY^$ZR)^vuQsMHw)Qr1xF%UYi>gzHNue5R7VD`Of?)%#e zAjniHd9m(PqCKjzmfqd3B(RZUX1oKXtuQ1uZJV31RO{ zUSXI_9@SQ(jC}=v?u+2pXA_K;1O%OtDTbLTV=9HM``U3|M@+&aqtYX?4JdC(6tBMV z5z`g$aD~wVW7NWTn5&V1?r$TPt?{+FOVd$fH>3P}7063SoKLnfXi1eE)Esr2=NqdY zz+(ET{xFcW3Z`Ej1)YJo!2(T-c|yVB4upe#i%SkY06NJiWuUb z7NGv7TaO|&8GkUa3eIp#)G3IYQGK4%q8nY5f|zS3nm!HGJtn$pIFZWx^VF^S+D9$@ zbt{awZg9&V&gxR9uc$59({aZt*XsV#Z;Cu#Rpq&|yppB?+vRTG%!b#hAEw>7_{Vd?whUDiD3bhqh;?9k|S;%o=CiNhyGHu4j zsz^BHEeQj$a>?ViyPid}4Ho0#IUO@|$>28T`2}wfo?;ORHV)sudGmTDCnc`AD2}nK zh0ZT15iEb_DV>_XT({A&Oh<=K)=m|9>>(_=*pTKjU`f%R?!eU$d$fsCM)%|qLfb#M zK2Sv}krrkhIzga(myaW&cfK++yG&)bdS_K{zjVq#H524)UQ@9#Zw&hNRp0eC%&4}d zs<8W9Ni$$Ykh#ZCX)>oy_;uCs8W-VDl(?h-l1IQ+db?hC`gb{h)LFUdLz(RQ9Kmtx zHP2=P7I$&Mko^CyOAtZjg|LSlAj1`DPzORInVIp6 z@lJ&cNf+1HF$6~nV|p=*6c?;azCb=*$%OT>4LC76`m#bG^FBZfxEcZ<4DtRwPb!)& zo5sDc=>2;}`*C)XC8(znr@O0A{q$c4Hop1q)1y4c&`0O{AdDk6=A(XsIY?WK3(GVo z@!PYP)7a-%|5KBL7HQmWhyT+&^uK;EuS3Sz-)caCjDDKEVDQgV{$R7^J8v~Nu`Otl zA!evRMM=xYDpvO0ceM$USkra>G;>;NiRIeCJ>gMSpg2@t&1}$lN5eZFnqC58v0x;I z%tbmvaeFF*h5NB$<Ku#?lhwW-L{%cflt)Y58!C zf3kuOmiim1oaQ1SiO?sh<-S#|;~Ooufd_7ioKoW3J0iQ(Ani)-g(_yY0obp!h-h84 zFQqJoDQYEAn#5O%@}AYJni^%SpcAFqWLbBl9rHnZKV`6EA9c{bOBLoH$Qp22O;rT4 zPZp6dk_s?yi7S|wH=}Ol@#}v^UBd0&oGW1>HNU|GI-#!LG>XzPgdcLLn&^SKO8vo1 z5+B2nl`&2#V6%r?dtn+OrRZm%e)v7Wq*Gpoy z$BEpVYWUXbK7_9?hRVry7 z{Y=1VxD!@Q#&-*cb-6NlL_FL{Il#j`nckrCb~$!u$EJ<<6r}}tdn4!hmt{r^!l?~~ z53!CX$89b(wXVsx&Wjpn_ns#k3BumKJCjX_PdRAdHyP9Xc)4L9v`_*2iu;Y}g7RXu;;Swrn_f>qzCpXH8?M_u@62{G~^l2>A z=T#rvMz$$qSTx2QYc7P8#}3`lwg)XgMW-#Hgs6d~(vxuGej0IG|KZ#vk;i89m^>o4 z+|xf6UFEqZ3#FFW-k5*50UnQn|1aA(Sxg3oL95rEN0ZHY^2`F|ww~cGi5Qum*PYB5 z7-J$SC$MO7V1ZBe#=BPZywkeylqkZ38HdNfLBenp8qOv)F#yOc3=X%E=iMalhW~C; z(L1r^2g<&IA&Y6bcGOMoYxo(DAj!((wc@A;_2bmcS;H|zax}+xiLemmrskIGimco` zoNi%z!wpsCYOQ3y6hr4~RjM^;k5>IQ_jUhOenZ^%GCTBtBrgAxa{BKvNC0~+XLVErv{#)-VhhbCI(l$pKJ42e?&U-Jf&+WVW!n(q?Der7AC%N$dW(+_7UX0pMFS=VP2?qFta!_&?JfIM8G! z56MOH-WGe46%l^Q7$-B~!tE%y@4x%8t5yTYbiQ;SAo^YW4KLAHaO%Y*pe;dfZ^mUQ zC;z0NW&uc6s9_J(xE^qt9Pg?VNz)}0W4*4ceh5eXV%qwpr@q z3kVdmo~LTfdrJqs9$d|^PYmwvk2vJodv{4uBK*Q(+S|$|=Prg(uIad3uiJ3Q0B8XB z9(7U2=m52zV~4=10Ui38m?hI(;b=if#qZy^bKeXyO&fzn)OrZ2IfH@|VT%s~@w*IG zm0U+oIhCIc`E)0#TR)skh4gNT?hRxxl)@|VH||@Al-1ZFG8iYas|?gRl8byrF)n#n zSGs-|#+?1gSzv0uexeD_ug=A;lNs5=+6SWb+vk6{3oG=hi zm*+F>Ktu`~`=Ig4;qJN4^MPQostVZ6EDvWU3f*AFYik>{zjb$W>g*3pJ$H5?4IuAq z$4pgO>)GaDK>#apQ+u``#QH?-^Odf>J_j{rQ6X&^-fruJFx%d46o6!wE%|RkZ_MOw zFJQJiB{#}a)FDQaTUL`Ft1Qp*<)+^=>q29<(`tiu~mI5`SF&Gm9l<=ZmQ5|I33&Su9Ie+?V} zKZsIDzQN`^&FSUrypo>B+U+j8Q0e*FXK}l+-2`cNRk4wv(nx@at23y&Q+s?3xk!FA z41DVYoo}M^cQ#Ce2;kF`9%W6yI8d1-D5zd(xRm-a?u5R{OCDcuB1}LKc?LN7x;}m- z_@jq5k!sRrbD}*1$;8zU5!i=?*3aCL_AG`TwLn2X$K(IIB)C3aQ3qfa% zvBTTV`Ns|i7V>WJVIW0173I9Kfo11fOrw~Uk?ax6s))l>_Zxc_(7ltEZ9|quLjmkc z*IC4e_d+wbFsVrgMEayS8S$SSVCN5rF1GwJkX93KPxAyUja<~r|IR-CuSnBY9J-t6 zbweM@=;0E7gmYHOg4$;Wo##vE7qtZ9zHc=}GJlJ?!hm-Z0Zy9HtbREIa!x^KTZPnQ zu|qc{`%Z!rQFpC8u3=R>cw$kzJ9D+|xwz|;0f`q=#`G-O@V)2qvxLjk_@KdwEnqcQ zu}Y18v1{uusmHqhxNcE!z-)zqH5b~8bUg`gzE<34C_~IfB+T8qI-B|XQEc#K#DkzQ zOBFIi*H6()(%;Urtgg|FWpe+8knSD!vW?dFGLg#N1cKz6uUVb~BPQ+TbZL|KzJ1Ro zja)e}cgtN{+w{};T{jwN%XcONJN=sJ-~IMZ8%gb7zh7fd z_Z-e`-WrIQ?--dHe~T~=$}STr!N2^L*eTQPrrZQ0d%Q}UbVRriaIyFnjv-5qi=K&S zuX1H?q%bNe72+9Z9ZY)E)(z+3+}cj;r(X*PnqpmA+77WPk3Cs6zJds)`um2m%Z)Xl z76tqnELsQUT<^%al=A?3%&p<37p^!~7zJ@R81HJ5_d+|%D;}0))`q;-rwB>+D+qcP zrdAK_Yme*w?KcQwJBK)Cb@N&M10DF9)0e5E8LaC)eINC}^;Ui{MB(=*rMXSeGq1R0 zd+#vYC)RG?NqxRCCtl%`BG|7*t;yD?Tx_VVVZlSH@NnDTwP*yi3BG*5Wjd+39BBX@ zRt(t&nSp_0z)WXU;ZtEO))l#hquIA`N);bviOmXrWYn!an|Ds$M+bk#%Z1?%p1#yS zfnQEGBjC|$*i6<;nND{?3EIR?yLT*nroXP15t}^tQUE)D#xd@Q_!1-L^46RUA+cwb zD_&Rbr~Q!>=q1TAzT4q9r^Otnho>(D(~fIMJiLxS!S5IgSu&~5;g#4X9#M*UaO+Ma zznkN>WP?MIjk?+fj^K(C?-nt7ZH5IVG(-oRy$Lp9@$vSFJaM~nkN9IqW-d16E;UP= zWo(jKJO40TkXhV|!8;9Iw8I9Q+=oVyGQmTqbp97BC#g6l@T2baRQFx-UrHhG7LPFC zV_%$ai&jNT)ji;>rGIq8`a2+Q4u_TnY^e9JOsy-9m$HGEHZG-cY63T1A9O$pO2jJq z{?kV(|9_YI3HVs?H26yMsDpCjAwq-f5d;NNnl%qw#3$9ytS>rW<7c?U0@kIoTq&te zcwKXbWjmCzGVe}>`F_LP%_l`~X9dpQiToFhN-baCd3nM|)Rtfn7Lzb1?yp_GxH5aH z8V(}YJ7bKoM$|`v193~y6rX*o?ZKwD_y5Mm<;~p6`d8A9_7C9!6tqk20VOP9>PU-b z`cmHz8jZR1F{0)||0(>wfA^=h=8&o-CwRGqS*>f&*~dC5r}9;r$7ZfNWkGbp%Ap{b zyjzWME^;@iJAQOIZ=(eQ`{TV^Zv}-TY7A=^LT?w3;*4DrO`?_-2?rxRnw<>%UhNpU zzZ)tPvYA96lZtg%36f4?dW+Nf9ry9gKe2Jn@8#(+=_19D5^RTCwHxZXB5U|t55OVD ztk*Jg%1QgN{btdqyvkjV8DtXl4&{Y`znRQm80s?)mcsY==5V8NHOrW5yHD+vDPm4& zsz*&ZT=ON$mt0Ek$L0TxTUu#pOZI96Ep^u?45<>H92F?2Jb&tynBs)HB=h-`ZGa1V zrbdtbaU?8hr^LMW6+H>IQO*gzJ}Qlq9WGYYl(({sefpG1`OQ065Q6-KtQ%71m+q)->7zg8um8x#X=aCfQwEbf?joWEW3Faec>)|%i-!FOe7^qZ2R`%m~%KLb+;^;uBqRT@nehqaPxrSN9_nrFM2 z>?azfH~*~6MrU`aQUDy?RSHQEJ|6euFG`%8-v{q089(PO1(mZQ)}}cPYzaq;&7)r0 zI1@TLmq)ckb%$1$Jnn&Xs||e4zE;AWlo}={Yep*p8z>HEp)GCm=#ynSXCf{rlY)XP zFiAzKkRUoy2#bhD`7a?I;kcU$stg~%D`W#G%orjS$gLbmR115X$%oWvP-wh#s=Sf| zLdFU$HIy{x%31yo6pqNG8xTiH@sN3#uR#?$z{?@Kxit{n1iXPG=3cuz{tApE)C{3K zWnQm&Es@t2d=0|zJrMGH97^2(_DDAd7R_Oky~yE{J#yHdX=+&U9H(#8`0m9nemlP~ z6+QjZ=JV=$9z_U2tjVzW5ye$35j1mY8!$-DYqxxOZIl)+W7wqzObmb$>P)DK5dmhU zlwG~0bb{h$X_en?l4ZlOP5Estv9A(I_X|IOMYA3Hp;ufj?)=5?Hv2UvP zZnNyXToD^EBRLMfrPH_t1+}p?s>w98^Zmahmf$W-XD$$6xK1zOGDO-5XBqpEYFyTC~VsFJ*-88@Ulj9BBe(G zzG)&1!b%_IrnUK!z{Dp3-}dq#pWeU}sg{!f~PRq|B9fgIsG4IFp`+n!j zU;q|IPSe)?PHbcEEPHVx(H+qwv%S%))$210>I63zh)XIsXYb6KT;N51kC3ASsa+2xOh16hW87MTnr$Bh?bJFwD4QVip*#~lYd zeJ%HItKt8t47}0m+S?mwo(;!9ywD5K@Kw07qJmJ-R(e*kiYQENGgLQPW1JMX?Qx%W z^wNiaWvMnPa>cU70L+p$GbA~v*XWBHOszJ)X#bvv2}6H}6-diL2eQD(eQ2V~RCoP) z1Pat{^733yrCUTm zo&c&@+Nj~JYMt75tdCrcDFsA678Tv8_TjiVi9UCu{PV<9VQok;CGEqFI&=krk7yr+ zr}}?}Np7j%+nX)SB2y`+c?fFu+!=%9pFN~5S3O6)gX?Pb0`|k!7Tc`pVg&)?yj_Ey zCSIAzO*alAH?wdDl~Lv1{0USw%%L<~w6?4e5%luD1?E;O`POW4G;@*$EvuuFfT&N8-ynuPk(SWyR~JMr1yO4SU|TT8;f8fb6i*!k9&+4nw;n z&$(;0>xbKS{Zlp{%(pA@ZS2T2lu{?+kEiesr>NL>)#A`U$xVdvClF1;Cm}?R@_K&v z3SRDiCt~%*aU1#5|2;ix@0>c_VqJ3r4$%3nzn=-$8}QyfEx`%F&>`GyS4zhPcwmM= zI5^IG1`>MZ_DhJ(6=KX z*c*=yes4Ajfy0-!4U4dG1Yg|Vup%CxY<7Lc&lw&@@-l(`j~AV|ykn`N4v_byz8xYh za~UMh^>Pk*dq-eDC^s!Ne^}Hr>i0X^2K0FT=-(e{amOuxq_$}uju$(5c? zxi()j_fvuDd*TXo3Oebe_q5S(K1P(C!xciE*v_l!F32`+gLm7nzvM#NGw%}ktsAs! zTTXV0Sszx;Y(Kf9BLP@a=2`JiXt)RWrHqKI#n0P*;F=mTwWsuZ2{m^aPK@&iNA5W_ zqlY<~eIJ$|h>y|4DRG0lYQ(XYvDm8D?w@^6X@9XT&8n@;S$^O`WT0VGq~_eJu={0; zy+i83NPYaz`uOeGQSk-WeAnSvyM0_6v)7=~RxB5x;^(a^^6Csdiw*S42!bIB#-!02 zQ1F{_a;0h!E#JkuUGm{;n$cs!mNH$fIK9#8y}qTc<>(M1*)=~d%?r>1|rdA#n1$)Wj&87X-q`58(7JrdX{iTWnyfBIN#40IOw3N)%YhsTFy+9p;f~J z5tUp6Y~72Q%&9Adz-5Zdvy}zc1oB0Oi^n?SAWGiN1%5hppCZ=vQH;vB3c9Zg9on8a zB-lm>=h3*1L96%-iYZBE#2UqC(AM18J?xE4jK}ziYiuTu`M}EMv|cc1sfjT3V#|~f z;nojiqtw#?p=wN((f`Co<(*njD>Ie>gyW*=e;e$=x_?!F-lmcfpXn5Y2O4n2^xc2%?lbqAcw$)a$PP~ceVm@G{ zgIgzrlk{=E`J@ZlHNV^bK5 zKjh^8YB+!_5`>lYLV^1N!HlEV42codt?$(+DRv;TV3r#SwzS;k=dmfA?h`rumJfHG1Vp&W(6v!}<7_4N+1dQIIQA z`D>ET=mblKvXS)Eoer_CuV|#`6Pt=P#8j`f-QFgXRT)joSjEY%gWYIz5g}ebirCO5 zwgh=)8A8?l!hY^oyBbSq6kt=zp-ZaH0f}PDPEMKu(KLHo=c3%&Sr0VAliP$Z0mvI| z{Kjq0=2>q!oJbw2e3H7I?Rr2)6*PW>*V{iOq(f$A;8%HXHNz1Pfn3+~u4;E5HFpyD zy}RHWIq1;gf}eBQ3&=M$9d%%G?&hZ3h}G((+^w%2Qnj||(~z@|Q+=^2nTfOKc|s$c zc%6NbF0p3vVUwRNbKI!49|KW{O4hqZ8Q=fqyZ-{^x*u2`sk>|Pe|z@^U#qSr{iiMb zzwJ>Q`ZwQ5HhuQjRUX+!uuD*`7g8!4osla;JrSn<7hMPyGDV;R*_;o0YJL#} zmszS3p4dOuE9@Hc(XtC*cfl6*(Tyz3+Zv7AEUnTw8NOEs8_s9+ zZ@nY9HVKMfcUUaaY&877xs1=;1ihH=T%M<-($nE<&W5$RiQ=$Tn~h9z?{ScFWz35$ z#y47ih8t|Ff4@fWrpAwl4X1$9%-+Ol)iSTL)xAd@3vKA0t#g@ed4W7O_hc+p88)bu zo1J6>=KsCCEZKeD;TmYA>!s_R89lmAh)nuSw6#2X@;)ln|10?xsigK{*h0-h zH!S?>Co}QT({IehF!nGSoYgQC3*sOwvZ1@uA8#>V-kbiTjG+F3PxX1}L$UScoUi+> zU(UYBqtRK_$OuEgM&!?ms%4*C5x9xdx`A$iKmC23xpb9oe7v5Z#jZXhE|rC-o~8Qe zo_+a1j30w`YrN6Bo=*<|-YILg`IuApzB|fpGV#w>iVg;b4VlbRP*!oDX%FbqioKGHJ2_(j4G4M%GBIQ;njs;{ zk;nNP2s?I%s@pOmE+lu;G5C-;7i!Tkrbda1Kk-SG%<0JAz~4!L?8MhxB^cEL#EQ8o z0xO#;mBg{Ru0F)FAx=~DwQ+Tq%PvZq{5h9740;FJ(N2pQ#3;#XhA5Orhw4%C!7O`W zbxsG(Q7KgJvN|$k?TMFVt9?Si!rZ>;(SK6Ocmxa&Y~PD90q-37lrxO2T8&qk3Jn;Y z=(Sw@@buAtP$e@)M{HS;H=dRkjy=8co7pZ4MKKU7@&M)FSFDA`2Thsbf5n~+uNqxFCZ*nEzlbc zfGLdRk}=7v&~i_p2VaNgg8^Q4v{7sA2YTK=sq{FL}dHav4-uT^5KcQcGfl3ydy z&c{WFZktQ1@f_K$g~la<(BqUFEAucXi15N=lb)qBCc}P->7QeUZJR7Uih^}B&bl98 zoByJXKhlvXrSBT8Bj!GUbvWOxSwbyWYGjxgDpg!R*4r~N?%xRCf<*_uUE}{6tpu)^ zD>96+77|4yInAwTlTXS!u9U+3mx@@!z_B;4@SslR=>f@|r;;2?#4&a+9=tCoh?tkeG*|hqqv@Mv;j1~N z77_nRRJP}<)aC=~8OdjgWJqkA(Ze>5Feo@BixjnFwnz|TmbA*p=zkP*VzWNkzC$3E|T0}+Oc zA^PuZ+U*gLE-`45MOAniWu;rZHv6hWalub{Tn1QKQG4({rFLk0bEU;9LKScOt^AY< z9%zE5T2G>iOe@4KyTD|qr93|ey_^_e9erQ9DFA-fL2^9A##xh(1eu5p4TT4YS?;%w zDNh!;K}uFlaUZLh73qJv^|pbo?!=tdTCIbuYAH4M)IwqO1Ykr14;Tqb(6~*7XJ|A4 ze|3Hj9GYk~K)(Zr%Xsu#2k_!n5B%?$R>^{`Hs0~Fu!F5fkJCSUlYRxHR9`1;gbp6)9 zaiLa2R|*W(yscJv!^6c6E#!C?ioAF4FN9JXv4^esb0S-`R`W`>k0_JXWB2RJcT24- z(-@}wl8U9>#AQ}ZSQA{6+cB4St!AB6O*0UNH6~-{x6_O++)q?@Hn{CcLYoTbnIYx+ zGTAwvbMJz%?hQOhCMzwA63&jRO3{DlueBqO{wst09#pW}aYYjiy4ZOo7tI7SJjtM&EM&Y=059C6++qgVco2k{5|umXgxmrw4BxY z#4fq>oCHO9hCQ;l2b2$|F+=TUBK8}<)CE%R4;1BFhmJ(h9FZVt`L#$xDhq1KT#1%y z2^>97;ZF@;)BqSxH7eoMXl%EIU{x786Z@IKp`69YOgB>QqG?;`o|ooGQOuP)`@;D7 zCjx3sc{9o|zPo2O-tyLeDuc+?Lbh{wTcj>kuJ|MPm+_|95))+VRDP=bVi-71RW8uB z2vB}OS^i}lkrDY3I#{+sawKm)p6TZoUCR$wlY?Zpb6xoNWldOKSYVTAj$}Gb<;DWtuwTLQ$#1QsyC-3v zi{C|XNLMgj6seeiDG$FjyD4p^d9Ebf`FAp`M*UEr<2}JSX!oVXNry=J2(|O5y^$ zj*6;IKPhFNVr7BREZ8btG^gyb`ENM6YZV~QX^fAk@r#*a9`5rcr6m*9$aw84$x0x* zZoTDa!8D?ED&{0RKWPlE_R&HKK*){z%nieI>E;YMW>HTMUBib{gTLAV&xs!(ITyp! z5uz`0OWi&j3@X}_WZAmp&fU+dp5L3fC==h6Vtc)42;{^c7?*ynQ5PZ5MVo49Zg_^U zX{FF%c&G6ct@y?!rt~u9^6GQZV=(%sNJLPmXjt9XrJxF9o>=ve$;+y?Yk`Y3OVY3| zOZ0!Mm5*XwzL-V>2rHY2lYN&zPO3b%a2WSb=E+qKYRNM(8d2J4aPVskaUI$coLWopQ}A76blH~K-Q7@B;uI8r| z9N{EUdx&#V_#H)|)u)^mPW`g&+n9N#r>@CNn~Vw4(968{SMO+LE500$)s-$ui`p?ac2DH&aK0Q&Z;e*i#{B zKbvoN4qz)BSxQz%;5m(tbM4euWcgkzPSm1%l(r?{g3@6_xjaUmv@Hb7n954Kl=SNtMYs*{lBXDl7JW_0#@8`lY?7NCk4C3s68|nn#+g zQ|icmic>_viVLmfRGLM4++@?yU-Rp@r|6jFYl36Zi-&^z!Qu6sN977-jhgY&76#M$ z3N>a=T}x5|ociI2?Pw}33YcC2VW7wouS2O^t@F(+FzHY6BNCkAXh2a4-DakiFB;g` zM7u$eKxB2N1j-m2oz8A;6Prq0*V@P2G*LzCac-hOu!4+2^f3ite1Vt=rxTy9zF~R(YB8d@Cx1UC!V9 z$&8J?f`VuN*o{KB1XzD#e_q|MTvt>{y)=M<8BkLHwHAG0Yx!DOX#}=o8NM9hr)fPN z%BrE~xqrWJuhjl;@=HqG?l7^nmJnWZv;MNo+s<#@6LD?iDoCk%V7pi$xuVzFU#@cz z7t{i*)L$3w9RJ!*=ts>+lW+s)+UvypHRoYxXiZLi!dYLQR`wn2*7|d@7zGv{jvMKo zw^T>Gl-0mQW>_o$6LlE`#o7edK@}j8Wur@Sbn007+r6nUcfgZk`-tPkDD16@M&F;3 z5b{ZajP$``c3sq>LWV1qmrd0Qs^?$6cd{aMU;U>HcBv9bUb^oadSGE!P7vpsjX$iS z{+HHIF@w%%Mq>7p#G|jbua}cW8m!Q+0D}K4P9opv@tOYH5fNXtSL-*X+hi+_QoF^H zy&SoYB^O8cS)1M9xVFDmb#2{^(R|l5i%G9zv5%1l>n>q6>&?R;twt2@*Mn`=h3a)m-we2 z!?j@P_HQjNh>(b^JFDl&CU;+keKl#`>yFE|kcM(nYPA>GNVzaylp^&ZlTM z9HPj7h#+e!c4t_d$4Za$R-cslFdZ2@kBwV1*tSJ0I<#`@cL;fKbC8T~BbAw^psUzS zbHzh>YORyNh|V~Z!xU&oDg-pn;IqaNQ$tj~Jm-WQ>TWwQk$9=tC9AF_6wRkpefZ}v zMS$Fi^DGv8KNFK^QayrsF+(utME=hcf5bej+hvVw9(6CY@4jK{oB5DMFQ23#Mf<_F zHD9ulX1mKwmXlOgD4N9zdA0{Tj)#ZJU>d#1gFg?c0?Or4KJ$KNCL~KppOMX24$19t zh4;(gm*QXW?^c60Qvpu>dCuLmXL+w}`w$wRPX@%CM(yl3k*5A5PxXXAuKk7Sc8)`3 zdlrNVx*BF?h1Np_8r)AzPMkvx= z{M_t@eS+9rKvyg~%yWy@ z)N<+(_k>M-?zK;{hHMK> zg3lkfr^MMsGt~@1VmH}8@_Zr|41hBVV>lhYI~=d%rOV<0tPx} zv*GxADV~Wxv-B?t3`0Od31|Q2k1l=9bUe#Vo6p#L_18gWmtXtoOw2_qS|R=HZ}~?Z zyrRAp&xi?RaCiptaNj(h`wpaXx|aU+$%d&XBuoi2jA$SPgkE`XTmXCHKk9e#Aqm{Q zD7#Z;7WTf>ZarqEpKa#V|8Z3_cpVxQvzaMz-@7@~8*-gH(i_qqT3J}Oc}$IZGiJ^e zv$P)2R?#b^^nA3^X^cUmV9QoJXRpXM!q40GPxH4tjY~4oT^}<98|22!*FEbo(?a>b z-aReX+ez1Z_5pR5p_oS$7&+9t-t+B4r0n2ZBDciQNBWV63)394h2m%dRU`2i?drwOCpJM9&KBPP-{AC{f z!p_w0Xnu8<1Gnu8JS%2qHKO}HSfiCW{siScG;|cHt391Vvf(seWd^COFN z>a@5wKJ(o1*{AiRIKy~WjD-4>>quKp0=BH=%)LGw#A4WcjhqO& z3=<82m1aly=lF*z;nO*Fr{SixujomRR*E}&O$j=l5qI^wjzls38KaKazQgW|&cNK# zSwwr~DPZU;nI_IAFZ*oxp;rHj8atVxdLDU#EyhDL!z&#*9X>T|wkL4I4^qx@KzY%9Hqv|`k zYy;Z6&K3dyPS!!bWJ#7Kj}Kv_v%u4R-*LhQZp?3U=N03DTlV4nT7rIO#~S78vS331-{sIkP5py98qV}Znb0G!AT1xtt1faR$_-nJ= zi5d8!6UHwvuz8vz-2qJGMSLM(0q`ern8#pP5>9;rt%?Xuc);Uqh}M*4k}@Sn^f*F) zu~Qr3Z^NA5dq)cDckFQWBn13e)u)2a_s9GKt%6DvDe9ykmAeQVTBOxDT>=}17Gj6F z0d|E89p%w=^e;XhJWbLeq=y`z0`1vw;J7DDDT*I2KlT7uQ~#wnP6yVyRs0i1 zG!~hNaJ@^=nz1OU_x}0)F0V*|%M4_i_~amOlquGgJ6A-Z$VIeO?c>Kt>oNvuAWX{?qeI}7@Z9d7J>=Jej-He72G(U`oi>vuR+!^iO zP1YS`L|J;3dZU*!Mi5YZu42}*me1WmBRgqV#Rxqz13zJkUFKVCMcJ1-Jd)kXfY4mw zTpyCQ5%0mNQyrQy)L|{Rf+;Rei=Qi*wm7T(3hv`sPsZ_MdqwGyF7x@9KO1@krQJ%+b%;-90BkAY-_Us2l85P#B}_BZ+;# zh<;uCdkJXR+&$;py*~Z9)iI*w$fM<#1;Gq}VT!Sovuw~4{^hHclBP+civxF z8^OosBBIYP-5)yCcZSIL5$7Gd@Ky1Pt@inpb_lslCBZ6zDBJI!r2Ppr~C&T{P50Mu2WWpoe?ZBaS4sh#f;Xk6z8F7i%ixV`R4zv8r za=YfEB4)ltN&MXLybXJXSmT$0m=YjR@;&WW-G&+F>1z0r?du?y^R7O|Xr#)|tzD82 z`-^GSD_=XU;Q0lW-+p5(ckAPq3aQ$xIt^c{aM`yQmE-Reo2MS2I`Xi>LD>_+(s(p+ zmXqw@xBloU#KG~e<=Yga{X5(>^n6JX4V!u%3_;R4rCvixNObn$?cf_) zXyK+U0ZjuK!M$agE)CWM>nvjO`wA)2IqVh32|}nd71nZ!!Ws+=w{-~5^9xCF=ozim z0S*!gijrwNKBX9wRF}Nj-+8m6zT7kxWH7$1Vsfx#m;sx8u(4tq(5n?PfA&LXE);oLua2?VJx%cq$B<2EB7M^fXXlx9KC`Xb*f1Wyr zAy_w*UZ&&2JQa*PipmzGUpY^Q!a9Tzc?PeSd&5q(#aOE@RHt{JBI}r^LLEE2K6*}z z&<-h@)#akktE*yLSAXPYh5+yu;tZ3ari$wg zZ+Ebb$b}rOj&#HI+{A3dJGP}#Jk30%1*r}TC^caF3UFA+8{1b{8Bc%*B1%0Z5A@CJ zh?cOAzQYHLLq_u{*dg!2OFkT>;~&R4u`(`ou$|uy*^()dt#@!-t~oogDQsk`{gm?d z-|>uXkt$5t_@xmib_sEQ$bI)$AY{Xaxq#ypu_pI21%~&r00( z4IByjs?cv|uBt>w!k4N;!G7(Eo)q`D49N9U{k?O~?Y;G3Jh+a`A%FUW)YAg>FyKA> z&GzmbxPzbN9sLw&I8l{)u#&C!)O&0;#Z7;TM~!OdN6NIw&tEOB-USeC8WXK{ueB%Y zy%N?t6OU|DAQ@bKZkDNI?a$en5V3Ki_C7}3fO-e?7VH?v%gQ2(Kx&=R%Jh2|gdI!= zMjq@uJE&MZpgk}L@#C*k06fLp!HD3%7#M-5_mXc{AHP;3hAQ+R^8y0oOr$dYEY3u_ zTD}=AB=P!Wbf7}{e12E?JbgF%e8P`mly?0ZCqc~Sr&NZLX-cJh%fB(X|A}p@wzN{R z__Ven$n@A)Yf1Pg*0!dK*5J;ozyh1md1LX7*zP{9g-VNpotGm?^YW%y3vxtSOLJ;_ z9@pynK=eTjRH690^1tgdMRzS#C`z$J1`|GYb%*;iJk@i75}Z|!=^2!uK1nN9JM<;N>)`^;A!`Q z0LI-v@vigrrU$?V6f5so+(2~gNF7TmE#gLCSVOiUS^e$FHvR0zvSwejDg&301BQ0^ zVY6;!5j+D^jBE9D2d8z-n9x$>jV(_ zMyIDQ&wu@VPhar;mszg}@3)zz@`{8uG&&d<0pC=dOs#bABP0s^*_vWVmT))L-t;PL zwP_cG_s9W~E8;kP!?7Ra*fOx|yzOGXr-mbXiqeS|a|w{| z0Lpf4)^J%*i}8$OFU&2JF^|SyB(7&%RPK}5x~!Z3p#eb7Uoh!`8&m`sYJ!@5#V+$n~`>ufV(3mDnjv z$Kg5AfJY$kMAP|`FZy`;kLHme>v644s|R8Q2e!pmcc(5YR22H_l${I3RR zLzE8lPD<~DvaizR=3yMESe=$ZKL{h$18oB*ldhri|(ibN&y^5}fp(TI2tMgA3o+ zSUUDM2EJBOdi+XdOzE)~R9WS%e|Mf4deYWrlA>wt>QtEgh|Nupx_s)-6p13~si`M~;(W=;oDs`W zi|!)>_M~N6SBBduX{lAK#GnA-f-#IWQco5?U^4q{p;zQ_ovOJ?*NhS3)f_gJ*kK|r z3%-3ej!4C~4`<5P`j1v@Nr94j&atU>3V%FbQMl6FWG7`fy3 zFK(vZEP2O`fpsBEx$!n>7iFL7kR3OU&h*e3{_p#*3wHR zYG`y&Fx1FnNmwkHm=B>!f94A-n{}!zhDuhv{N$AZu9Cz4{=`+}N#H-4J?B5M+*w4P zWr56%~KTeFVn}?1e-*>sBG0d=(5u zOb1r+G`}!74titWvr=C!4kqM%RKW~ zUjsx}LeK|pZo+ASa-zj+ADs-Vks-;%ZE8ndk}U{Z8iv_|)uD96qyh=TniKh81%ngV z*d1n&Al4#WM_zw!1Z~B8Xi|YtHDR)WWDFr-xup^QnVdj z4u~Fq&m|3MknvVMKRpv#>g|bh{$yV&mpxz`Fx#r`P2v0m%k##G*ZrL3^ZS9zeUhg& zS*PCv4S$mjN__s#Zm&rTq_Jd(K1jS?EyCAw14#h*x9w}zL$1N;tSc)fYN5`+&NO4c zXFRImfYXwfUzUW=gj-j5TVN70CpBIL1On1}4uiIPOOrlc`*}G8-p@>vE4pCn#$EDC zbvq;HUzw1$cP{D0SyLz$rlLWTIsv!!F1Kg35dB>8f?yA#`NXyQ{ ziZG?eiuP)cmoP)_JD7e;O_)3PNq@WE-+kNC@DOD35EqV? z`Sju9{|71g-!#*I2&Z;}uE*<>&@;8M%+1xY#vc8zFZfqgi}Mcc@k^2EWs?n7_Cf&7if@H~$k!L+{cvXa zOX3%uhLNnvM~++(ff|S-THUhZV!vWlT1d+DCArvq)kjV%qO#bAieP8Dwyn7+NeLkc)8!vc~lAXP`LG7{5v(A`*7II zyXf4#4K+o*)!H~^rJLw4;!_#3g#S zDl1i2xzk@m>-7$PYp0at%fW^cFPPsVqOxw${5;9J#-mJ|{H2F}2cLl-7@qV*${uAWIz#)^VS zn&%}(sYa4goLZX;yJ>G1@1DviI6P|&fETEIny0~x8U{-)s~r=E9twU(#m2q}eM=VQ zZ_T%|X}LbmRn00WBj0xtSTTn{3H~7EBT3ZS-uez{vpiC&{yc`76B2q?OBoqlKbrSR z;L-Khe}0Y}y3IM%R9}0|5x0sr-AR$vz(QPCBSx#RQ&koYY-$GK?GnV}{M8>$!uHHAOR@HL_VTu}mWz`ZRvgczW{e8kf7JLwY#0 zkZ&73UwW3Xe@z8AIxj0K9-E%(pss)}OG9OBfK@bK$@ zEoL1V@iwb6spD=JbBYVW@R34r25)B$gxb$UZ)$PlBEGRAHeMPr*v`ygW8;Dk(a<(P zYv{HY0>TOnXr=^t7n%6$%qo*AKV;N5Q5V?aUQ-9F6}tMgI(#UKO~N~JnCXvaPt4Wo z)8Gnkj*8(5qJAQcGr@U1e;6gv`%+6P`41gRAt1ObFR+MbIS5$esshGm%V(WE9n~$; zAA50`)F0vE^LnuF#QWm4LIIs2Beb*(C!)qai?nmLcuctWSF$dcJgU1vQ~3`ijP_r% z(a5cW{#zmeQAXqum_=f+J!iu&>Od5MB`6?)4A)kXbtYm2q9kWTDAUP?N1q8dOBdeu+T}>73rCt<%t*PXFxYATr>q zdKFAKKE}5IPr&xxfbu!W3H6CrdN@A4qC*z4P2??NdQDWmm+J-RJZtSgz;l-PcB%1m z2$sQ5QM=s;$<~hfHlaj{60LGmIXFeDjSpQrKVVu+((~F<6Rj(+GcQQ2MG0y?Gn*3; z2*bFwRnN83PJXs%hec!VSI>(`E7BFjdE}hMUvMB+KzXdSdh%)tSR6rMH~eatNGYik z=U`$;)>$Yrq*tRw{{$@Gz z{E- zDR%SxQ}0K{=K*kbvomNmU)QaF&+6A8y@R)2*)V6>yxDVs%fN2!VH68uy7ttkOc9&f zOz!N8M@6^EIMhjW3}5hA;)oB|aDwx>+6++EXoi)|HK`Q-r&a%M8MV#I;o{U{wU-yf% zl?fxMyMJX%m17GA3IT5Cb5RoTT6XxRj(_e9 zq#-~h`l|MBR=to24FH%NCD2{r0wq96`7ZUU&lvf2S1Wmg*jjEYHSHTQjk#N@)0)tk zh>A{*Trhwq4VJF5J!a`P3XXgC9z^|nmKbAU2%5y=w9R=K=fU)ppudhIc`FT> zdN~lq?X~>#dDv>|zD=c|&fd2e18$Ae;`8@ktR0r*2JzDAX2g@j3Dnf6e@sy6)`3eC z?gSByGGjKIL1ZNp&;4CQoM@=Sa4&ttlrnYh9jik$L0Xt)b5133p#uWowpUi&>>m!s zsCk(8ruq^Mgvb*}y@hHF%W#*djUKT=rZp&rWXCW%zjhLa?p9W`$X2anD!q@C+>6bO zR|JlGhp5du5qh5}5Y-mls6#(#&&VE-;;^_`fhyXqDPZBTcnFhhoz{^zl<85WEtC`T zr5QxFUZJJAZGwnF5xJUFOj8p?0yA7tqS8U9hSoHWW@lYF3}YKp1_3VGe80dWy_Q{+ zfHpj^U`nOOGVG+Fw6O8aY&7N&t%#(v$7JLTc^|IcQP?Gty#pPfwbk&UC~HZRdwv*hfo?+k*SzJD5D0w48}eUw38>i)FVR1!-%EkA*y~tuW`NRaBHS0;Ir`ut*-gz)9~-2-<~5 zgc-XXo^Xyzxi=GY9tzoejtI^BSmBA?aB8jq;5bz;b{G$go-Ut(3C56vG9qUa%s{7GL*eGl&7pb)+W>2_!TPH8%s2Ze05*-iz{X6I;+i`Zr} zYI%kJ@kYzr@BQ`+C(qZ=(OpRRWJd0Gw>h7-^or{#G$CNfJGCI;)ljN%j>{zHqvMi~ z)hG3o%w+pin$SDfGVcDhHC}}=$SjT5M1$E7acETNpdP2!ov1DNj?kv7PwSuY+3MDi z0w>SZ$wIfMzYmul$ng4Vq{Vq9c24v#I`ZljOIRkPuKa=i`#MYon7(UZB}NT=H)dLA z-i)Fe_FwAmhmOqQdq=|50(Bi0Nv!&akrlMIwpBq*sq$VOUXYNMt`_*z@kw8{2L9|9cr&Y5z;*?& z)@BQV5yKNxG!L0gEA#x90Grg@y7~D^L^>w(0@_fU@!UsFBukSGxpU*fUSeF1P9fe` zU*6mC7tDOpE;<@L(J8QTJA@z)s5n%SAA`M0ah*>5Ap5Sbjqt&BJ(XdnB7b5Vq!DR; zQJ`-PXt`o#@EgocVC@rc@8?+y$H}c2RA+>W=Y%sHwd>>;(u>9C9A#XvlgDH04rY;u z#%M+MIq(Q>P51rYi}Mr`U%-M|(mn44l=u=~)lg9&pVkP|s6^hihsU2GCTI{s`nEKW z@iSt)Z!4dvzUPP1tu#l;c4roqRO!fL?L{LK1&4OCI3!1kg9P1<=eDUM5|#JD1OaN| zv@v3TMoSOJgsaDDe9fWAduvyxo6@54{28Dy%_&f0DAG(4|AZ zXq+L7Fi^EM#e6a74)%2w@DKZNpdn`VZ$qIbdj!vj>~|-!xYDE=|2j>eoTMHb8ZMxO zZ`Pv|{w+(uG*9G0ZgA#>ZN0**j*4PgRAgEjS1ErCfD`9Gb6z ztKc}RNbL_LU>(fD?51SP_Hca<4?hKH?F#>S%S-=OlwM8S>X9wm%7qu^ixse+Qwu1O z@}J9}!@C4)3Pz1Q6;}8}Jzd9OgLbz9Z_Bm`(**zvTzXuOd%B6qC-N=kxC z|Md!_@Zi~+I;+NllNMx93XP8uGMqCt4Qge-J}2TQV3-^F)Yv2{9)HOk1LS8GqsxDNze{@k#4c#>fcFL-EsS_ylyX|dCm9KNXB zo_M_4k&INChG*Vo`GG`6P_;KcA=*Au|7bG5U4OM`F+&F6%N)HMnQQ)1$yV#+b$OzogcxvG>*Q8W z{FP)QoWkL+vWs?@q~Ya_;O`}T`PurWLIib|p3JWk!=p*S1LY#JX9^1>7TQi1b4yz?!t zW~TJs9H&U~k!x z5Y_kO=2fLG^!pdtcZi}TMU&3-D!u*>2TM=xU2^`kSmh-O*S>wjO){M8B`rV#xp(lm zHm=Sf=e#YZP>x>2I47A&pk&LlNL>7+ieWk>DCRMv92Ie&-p}zZ>%-ZbbZGNKwqi15 zL>6)+^gXM5Ly?fNGdEXz=|fA3LCqiCcof~>hG72a3tzBDnbEqBnG%)rz=AJl1e(!Wl11aav1?6C5DMc)2-NqB72U8Sj;b)ByykTA?LR3BvR zR`}0N{&3dx+*xbRrTzkMK?xi1z(!X$$lr%9e+5jTHvmEEgRS(v*b+w~0(NZj=83v;c4 zrFSARR;Z|VDG__&yV>bhpI9!&$iPrX`5vx?QI#;u(Y%V^Z+WA1Pc30d1m-pozp~RD z*2r*D#%C;?4O5sh9*S8*UyMDy!8T85I@7Km5X#o!l7*zuUuFff%tm=&#v zn@?f*13ezBonDw%^I$yEIL^371(baFU;(tB%ZAiw8;f8i2s&4Sp2H+C%m66)NK#%c zvRdwX{N;$>qgHDZ;4gE7{_mXhvu%WoEj5AT-A+C+-4B7~AIaX3UK`KUq>ksEzKrKZ9Dyma(tvNhl zL3K+;IgBZt@?6SBd>vfKBdeS}IuX?@5Y4$mNvOnSlqh(DZ%T_0*=!#Pqw1pmAw3XS z*rG%18ogH%ds7-G1xfRqtTu`k>tBh;dyR3tq^aR%*2^X)eLpZ8yd4hyfpSipD5+&NLZzQaf_keb_SKxoG-POk=l{=O-KW#luT`>!rL&;NA= z{=YBAL86z-Cn>vep;ApaX>P3b5dV|k-9VWQJ;~U z4ix~;oT%d74~Rg^iHqM|-bmx|wM{0fz;|b6I77$ux)Z*BU8z(yOW>fbMVaI;GAE9m z-BDiQgpt$Uy|Rou{+>f*Uw&q}XCTrWb^Ti1=u*z3qsCUaFRyA>f*-l<-6?4?nW&u- z5utETZDo%eR#G^={9ouQ`Tq~PO3fo$fO$o8WZshe=WBCFk?_6P_O=X~PLA1j2o;Ql+SKAnN0N}v zg6lwkhQ*R55#$o#pw;R62srJPkLSg6(tBcrSAOo&UIZoSy%MbtF=v5Prc!T>!HZ_T zLTr39%ZVn5Ur)Gox9tL+4Lg7`r0bq)Pd!0_>)$YCN1R|b2jR`^ap&{(zubtTMwaR( z3+{l_?Dtmm49)+EJFI!AIulI`W}pQVTXquZ7#ykK2aBSJM{JGY|_Cpu_?1 z@<=NbfLT4QxtnteMF>5Z_jaiHk)fs3gy|$O(rtesE$E`XM!LwfynIrOttlK=6o?n9&2Do`|lG^ zzV{{R&oO`N@g0%h-!%W+xc!|uTJk0;OF8}$sJVD_{MMJ>4Lfm<4E!~wQ@NW2?4 zSKRi%3pwX(?E2Td`H(D@P6BQyD6d3trvQZ@eht&M13%OG9qugO<|4GA&$gKI#`KI2IjA6EcZb z!P#1^gNxaz2dJ-j7BMK`Lc`x0CKn4Njsuv_j|c3%?rD1W$_ZTaan_uio^U=k8lmm#MqBB87|!HWkW+`x zNxDfZe8Y+U^WiPh}-}sUeERbT%=?7{Z!>)(cMSCVPrWnrOtg z$T^p%&eeAwZ|?9#>^n0Wp4$Wrrh4q>Ia%*HbbRv-a!pKb618oHhN^^8*?#YT%n_IT zP_(_IyUjG^-HmwSc_56|Yki=rd(a~zyy?MnCdS~Q&7m+;;dO}i*PPaM4f-0tj=}Yf zj=sN?>gG9}8C;sQrju&4y+6_IY%gK5mKk;ROa>$&EtSf=klN&C+~V2Sc;Gzft@vLL ze`cDX@hDM?X>NggAo4L{m&5DiSYylJ?td&VUKuU?N?#+aP*?`1&d0U0zZ-(KoP8Ou z?f*llohX>iQ8Pab+^-Sp_TC<)ju~PYjOz63 zHqBt(v4uShKT5#_2PhIYu)u2Tj=t;k3U)nv3$a!$289R?aG!CZf0+R_4s|qr(M;)9gU^;)FP7GMNfE*TM(Q3f%^Ult? z<|2wkpG-SIup*8f7Ma1l%2WK6YtkDhkSn?z6?ws=O#G8;8UBHkf&@Crm_Xw{+ii@G z;QR4DKq51B4{v!~f!ZyuUr#-!!L_l%+SFk{@xy1gK@3wBf z1})4~#6ixgPcQnKpV)9A@#_U{OTKmx`Tly*#T`D*#F#Fw089OSU9piLj>{X^+0OW< z@HZrDW6w%DiV)&=UHy3zVMXj5qniB7qxN`!+N3by3*53!*Hd?}eeaJO#;@l0*4qD! zLk#T-9c{s#We8iq;?w#^%C#7_7|3%GCN;c32vBe%Eb2j=66dPoWh|wvQEg-5&A!zk zjbY?@(G;~W&8J?OBrI2G?Y`i$PE1qu1^&(_bVFANP|rIHw#y3RO0ZtV)xqoN&%I+A z{u(d6r0S<&WHX? z?Hyz>a^rW2J5E)_Ke0&`v#R(RzXq8XIF}VIgt^}FpX0>VLs?Dg%9~f3iG@OPe(Z=C zpXqt%tAXqJaQQWWt;ei8jtqk~XaVR=QHWc6$(&Z13}qchOVIUECrAWb^2BbkcPd1Vt<` zOM${p_ojQb{HL5Ru>t(iXvEWML0b7+*kWWABfIqj}rn`<^CQ&7Wp46i-O z;`^d?RSA9XhP`Z1pqbXK!oHz(_K&tQK`APC(PUgt)+X@`=0DflGSD)C?aUhmBCfpO zWhC30A{o+X1Q1ZPjy7e9-L>ry`ud5)zXX&JBVhu6Rj_?wb)F1ZWKijS$?6f}`?ELi zbjlBe+;ll2iVmMl^&R>)7l}MEOTN_6y3+KCSD-QifASQ9*B4QPW%@Qdd89`5-ul4Z*r4*>iAJa3(>-b9#F-AdBzt z(GeUDpKf-2_kZ+fF5*G$)L%fFz;XlaT#Yr9&*8V{Jq>5aD&Xa>1 zUwypRFUJBmz#)+#^?fNIw%PleU7Ym~&KS@{|2ls)4-G3<-KFpDHM@J6jX%C5R0@3C zoC_#r(d@&YqLuXT})-8yTFViOm>J)iRN({8r zSP=_OPH^5Dhy{$iKt*~+$h=mOzVKYS+1h2hJxm;~U7I^WcG@nkrld`zjPF7MTuf=Qf_b)7$8M`zG8550E{yb8aR5MrWTC>07kDq&ory3V= zgm)u)17bEbw=**-&%O9ar18K|=x0=14VjUA^uy^v6v_D*o5E zvu7|XOz+8FJbqY@2f4hU;7Pz2E3Wc~u2OPI-t<3Nlg)?w*usL6?{)g^X}Y>#i42Ux zHCHD*V-i0_k-<`HPN(3m6=(Ezm5+FRiYK|3Z%&J=$Qm8L zRVpJ!wt_El#{N3>*ROZBUp$JsH85`ea>L)a$R<>%9C>EL_$CI}CtJ(@iJ_f0DRu9h zZ&uM2z+!OJI_rZ0-^xb2(^{=ueb+d!1o*N^ZpvQ)EIx;he`FYPe}w!;hW-!tKLx;l z^U4kKhmQ0RHuN#md72Ve0F zV8-ZdM~In!-xa=YQ4~!_hfHmf(SMFn?~di6SxK3upq1pVh~e>Y70+Dj zn-k~UnjiWpenmXaYgRPfXhU^B$;2PAWe+f~@+1a)W$nKP!2nd$N6eTz#fP}M5j@J> z82;}mL6S)~bU^cgmbY>P)NJ6N{!Quuwps2t<9`L+X$r8ByEI&$M9sy0KliN16->yM=^BBGnhnX~Hse{WhfMRu0sr9_@pAXLV;O?*(Bi<_z6v-U|kqplijO%zFZbFAPZt-hle?P#Kb23CCy zr%l#WKs>O8y--cioZlCB`xY&Tc$PQ-w%8hq$c%C}U0IgQ%?eC?6}&Me{tbxv`P%mf z&N%^q_`wUnC>_WpxG;LGX>YvyHp_SiBXcwhH_k$To1mU#s2rAiW1GG4j3gii#(kTa zt0$dG{mP=w1mC9|B?IV_pHSI z`-OOR%HCSc+A~(Ph!Za2+gwkA8{lMOVo$E;P8O*FASFq#{TTFP@T0dsJC3=z5_X)0 zvMWi-Ri`;Yf+eQM+WZe4_uCpg=@M^LCgsG?_j2$NNq2Q(i*J`QYC@XA!TOmj6a8Q-#ma%rG zbA&>bVmORR3+uKB0{H~<1T*uCP46?rTFeN+)x@W4KKxxVH->veFkY;_{aI~LmR3G% z`&E45{bstNirClaP$u5eW+qD)azr;?V@GM1-N>8Z3lVw7cnA+0ga=0ME2c!4{p@Z? zyuIkx_v^|}MPs>m%}x92Lvq*{bsS16FGDWOXcFg%ml$@!`-pETcS;h_;c%WM^{U}n zvE;Mw+oI2AT`A2HXI@%=7ml4?f0L6GhC>61x}GmrzkADyH&~ER_T#m#+A}UW%H3Vy zIf%2w3@<;Fpb~fryM8ISZAUKZLZDUxH+Z1YL>*{V66umRQ>nF6PJbbqYZXgFD|M>1Jr4=yWAURIQ-J`&>(6 zAx#1bvsJpd$`^etMb;5sAcb7f+lr^r2F$1j*~Rf-bZXQC#@^0eBB=6$3u+#{tD$+C2Tj!3k?`siZ|uS(b3cT)wbsyEYc)fSA~l9XryMnsPl2yWL@!Yxq*Ho6ZPRdC~lSpuU;l#fYK zXVFMnx+V@=D=zs)dUVY$I_@k&u+SFHBclL)m1RCMm6UZzBg0UxJN9=Nb8Lz)SY>Q~K+yCk>#i#_`|;My*);Umr38W+x|#8h@h+&oXb^R5}@6l;!`~I;v`Q zRmhPvw^IkU!FaFwCFV6*dNeRj15v?lrvhBP;z!O!C>+^8-X?6~jYe@CtuoNPevk;w znE`42wUn3K60`LQfWH_DMpqrKqkER|1U#>Yn5?5=wMH&OL+GEw#Z*dkf!p9ucJKiwF~&1l}c3B7cuuRJFI^a36x;b{X= zX;4s`^XpB&W7Ta0$AzWL9W8h6Mg#R-o$>#Uto$!eZt$bre(z1oI=T|bX_&fcpji9b z@|2){+;Z-Y>=zh9V8R~%H11UaoW_r$&E32y+=w^cwP#DY)b(VPPb-vXpYW?e$}?JXi~pWNt9)V2mBdlyQ8s8IhQT>=;SJQ)AOw|9`l8%c!=x zuv@#uS|C6R#jTWL!QFxvhvM#p;BLh!ULd%;yA^jW1a~VGcPYhP^5r?_$M?SH{~jZ2 zXJoIvGVeLB=_ir>^dOQ#FAGsW-zBl?_4j7qk1o$t2lMkC58GWeA2iVPKC%6LqJR5Y zob|a^c}r6w@MCg;=f|vGmti>wR{CUARQ!I2pfaraV>LnO54n0km{3|YZW$OPbe0z) ziRl4Uy-UimUnr;w3yKIugUQ{32kRA%0%|aF@bu-wKeYxl4O&V=rckWV4C~D*M+*Kq ze$HAwzGzT?-@PNRo-Tj?)CxpGOJzlKTesrfMPXPqbAqZ%DWmscIRb@%KIp}&L3Nku zCJN7c)nkD&V1F}?6zyWw;bLo9s??e)zW8(F&tgQWzd2g|QQO~br`zZG>-tYQ(`*vL z1k8=(4nziYb1xzy8cDCqJ0d&68dZC*39mSRU3(R&q=cWPF9A>G;9E`JRC@sPbj`^mR(Hf&}quR(&@?SM+RyyLNWHBxX>(fet zVFC5yF9|m~!a$`!8@<54e{-DNY1QdJhm+lg)^*NvRd28*3+#lmQ@YYP{^ZBWR={jp&1I5b;l9o!IVAwzuG!!PnH zq)ZBbMeCCw(Ifhv`S&h|u7~&hUMJKuzt?8jRSaFW_p+V7;6iYb>02%yJK`C0#^tO<_zc^~a%4qaUucpHp zcs#!E1{J`e8&#f?9Op^KG8xF&ow5#7^g}7C9p~Mjqa~49LFKjhTVY;M9&H_cCYWE{ z%Y$I49r%%=)0W^bHW2;%f=y|lhb^tZeB};nHg+%{qj^qZGeQ0t2k=Q zv+j$JJvn*SVHo<;-S+ZK);gL0P8r)5Bmr$h1UVKiy}jj{xTnWc>(DT_OArXiLR*=I}&f)ZTY$j$0GD3JEzh) z@?b@6ZrzIp;=##WO%OA1F)@uS>mT;3<{hng!+y=BWUn7#Ja$5(7B^F0NRt`g$ z5@ruI{O;CItJwd^fhOC`8LIlPUjcrOAt&|_}ICBhnPiyQCx>svTZyoTGGgMn>7g|*MQJz?Dy%Y5cJ;hC=*M!K1MNEfba z!-UXEbD;qxjRUVbnp*C$riq~)>=dJ~4~RZzk0kRyg!e@_<%B#?z(^h`4E24(i)r72 zz|LGp8vH0R_pljkcJ9M)50%a7Z#% zGvvebzQ&)H@>>1^ZFvgw=7uj$O+V&1_7QO9O0#Do%+(|OP#E%6j#ndBy2>eMzzO_> zIRy!bIoBMhX)YuS=AgVf%NL5X6rMj_E zAVddfsW}3BC7Gt0Y=rOk!dxb7eh*%olTiR|h@=CJI9+A~&MkC1SaK-?*GK^xaia1` zgW3|`J{oN$gW=CeJ(SF_82edK&(15q}AZ z1nRY9vxyYu?-K6EknXWZo7W>qXNcEfXuCR$CcsnyeV=wMk-Ac41)W)h+%QKK^xH<|SGwpamrh_sJ| z2<1sT68~9^&mmB5-AN16pkA;ZHvj$d)KO6zeV_^@2yO6bZE9C6*H<%FSVv^_S+`42 zH@LgYc{g+gv*{hqI2%Lhn6dREA+&=oSl`N^@<^T!e`TtIEbU-1qMGFOH-gj!uM=w$ z&vE@wMlYTHP}CfZmt@$BgE<8>X>yBPIA)xon#@n6ff$}I1P%^%3cA{Cxw6sc60ypl-6x5x_|CE*FhJ=VcPrT^A3aH%~B!VHeEN}@N|xj!v9ic&QgdG z5j}*Dy=XW4)mNXmU7KcatY%9g_TI-z=8G{xhlKRHfPDnd$bEbMv6;MfD}^iN4D2^N zyTGpEHSHy7@~CsowZsKDHBKv0K2MtlT#7cEQ{5@|CIq^x z)X2Rhkz4u2XFBALri<26l2Q4VqgeJ680chbA5C{*n?7P+Ph~Ou<2p6&RLC?nW@+fs3+ z55zekgX*TP=>ypIK-wf8%lWrTRO zUP=#&xG{OL?*aKmnD}f$m7GA^14F3jG9sD410GCFnf@G_3U`*6o`p=#-QGak`AY8h z?`TD6|C>k<&1Ks|=Y;T7iqixcuuKMAvMAxqp4Nw-L43%4C!2(Ht-8PEVn_@c8i@!49gcI8g4_2*J(r!1|*hi@XsvP)J%+N9AeXQS3U5Ky; zmhMS$Tc$0~k?LT2@t;~<=6IrR9iQq85{;*B&13C)FZnLgFW`sC&iEWcoUrNG_hg;2@tx6uvEBSPBhmb`P9vhg#4)JH}l&J-y+{3KU&5*XJ@XzZvD}GB7*a zV`da?dQYA7-?Qo+z=V(+DFckAe#ciEB~5H7Gh@jBo^^MX0Z zMTzAx#$=TTFl{8!4n{Mg3i*}hXQZ~XjBT;bM%ZX47`w+I3OH?i@+#}d(Tkz$ew^pXX4-1=xW?YArV080qM5~-(B+Q* zuo!b3%`)DC%H8j?(EII&^N(ZDmmEAGZtTpn&Jt^d9DM?N_uH@j)FOZ7t%~d5M`?2G*)N?>#Y zoo`t&Si|v4eXA`3Ansx};!Z|-MzI%i+^AnO0$AkVe)y&DTo6yda`cB1WYzlR-c+XI zUuP*_pBtrVB~(=WI5$@_!I~%G-KD%1WY{&db__x(P3Vg?H7U%jWnEjC8rh8>cEK{U z)EOi9KrKR372_-QG|>bcPtLvl_%$7uWer{-fUpH31)KZHPJa4Y?Uls-eG6gcT5yx4 z36R{}Sq=&sWep%r)+c1omf-e9Lv1N`J%M3~0B?$0zLmP})FB9bVe|DLi+sm{3toBFOZ0_LVg)IlMl~9QqpMpkPht# zh{u<2n5GAompql2hYp&iP(v2IBsvPa>8Yu-w9S`bg|xN!>#$Cf9*AxF;QmE=<4KJn zFUZi*Ay=k|JO`4Q5bXx{SU6z@FQ4B}SUe98x2|~aCfoTpLc6bR;Bro$xq-WDomytz zB|-bE`u+*`k5)7KhB!8jed}5&i#+r;6y4N!osU5x5C3P>BqY%RXRzwJ;qAvHCAc)b z`-snk4b9)hM^As`I2K8e)I=Fz{r}0=x!=2?h<{GqH^(>J0pC+jA<;crkdFR6Rpd&@ znsV#k1iG+?`D?~+4kSJl&O3~3Le&p+L=*^Pj4d5PQ#U$DHnsw=C7jx185zRpCZi!- zB(Du-u%;kW8)TD^=8T^^RvuSg`_5;RFY8B0Ho?xs&&iDVRu(S9<#_-($RU&tH}@Y? z=?I^_-2PK||B|@RdP10bX<^wHB595+Iqs}ODNvA^{Kn`%A1cH|W@BS~QwGh6UhRrG zvUNp=sW7YwaIjXgwc9~gze>K^QDhUOgc9(y$rqPlsN;3u9l;T_H=Cppe6BftL*~L( z5zO(c#na$hzRVhf1UABWFM+gtWLX(-TaplNU&q!YgXyX+2ijbGDs^zYMBOgZK>0E| z?o7E|w2K~8JzJ%N>O7Sm#K|iuwEMQC4Ct&IYw=cB%v<-gkYTl)dCqN~seI%o+2|)| z|Mn9GjGf#j_L^tU7+Q;x3XamilSZ(eNH%nO^=G_b$P&7Jj=%ir0P~+LgykV(i@NhN z;6jwBHbA?tRh|$&k9z!BdiKUZ+UtEQu`hqZ-`!K~xe~x|8~GP$lK4{<3llOKSz&nR z@nxT7j`>P+Es;d}uhZ1xkXHH!iP(zYf`KM4#FHTeQVpNWH7m5`F|CaKLS_mIEJdd7KvgGs2VkLgJ)Po+oX>Esm#;;ZyZ`qGcr!<&F`G8`2_G9aOv#G=Twfr ztiE^li3k!~FM9{6YQz2ID@TKgu&2?C)uov$7#&{_jR{OZdqi0r>O+q(wBs+$?${ls z2=6QJ-Y<9y@qyw{Zbv)Y%?oZ+v*i(qdY1%B6YitPrv(8Oxju}9AZ}H2P2Z~9hfl;V zS-<~|Ub-54>0c|c`Tkrvj=3uMu0^R$n3?RAM5Noa4dVmHshCK+;!ifZ*9RnG5ULXC zrc7S8(-)Mn+u5#}W6k6L{Jd{9Fqx|bS{!8*KyFHam>wCNkZ6-2j}htKJl-*gAQ6`h z%vEJ9;x|vsa*yrW-TX;8H?=k{SR@naD*`b`z5YK7Od#y#9&-z{4R8PT9n#FdL2~3o zzO?QIv$ZlIpC0Ky=VI1+dKra3ix|&x=SKdde2AMIhTok&#VFT-`yG0`M)0VNr2IGs z?wkD{zwN*u;CKEb$-sAud5-Tjdmy6UjIX^#LK1JbS#c-}BY##HpDol7+W-2Wh@=fx zDg^5_(*IPR$Ke(0#9q-_Clg+5Gd(RvZauLOg|NUon5W|izUDrIf)Rfq^2wm>o754q zEX5Vig#Czx!f3h}H$Sw5FoaBHOVFmbxtQ@Zla!$iSjhQI>yBxlX33dZoj9G`#}+?x zlbz{vIk{_}K2ocdOoH9^C?M`HqJ6Wo%9pq=rWkLnCd?_cSTirLQur_5)ZVbbfNpfh zj3e&d>TH-7*7gQrwlv?(2vox)FqCy_ef9Hn4M)tBImzJGI7j1end#8pJ>sFPfD_`+ zAn^>+%T|@C+#CXIH)K>0*kB43f0fV)1ig0VAd&6VO%j5JlQO9zONZfv%}N^PYK)y5|JiBFe~ecJ;(}bgw6^%%rlNzvt%*+ z06iQSf~w5hBo^A=v>9Dl2Iv2T*h0>ZxA{;_F&9mtem)IZriR3PA{oW;x&A<1L&>xe z?wm`&i|gxKqI-j>9JXpKSQBa+;5awLMxlZUm?8` zo#ARZDF^i;w7qM>)rvhCuc&u=!^Iw6ufe@i@o+O(7(KolPSYTC2j$b4Kg}(&@;^a# zR_M!0FT~QK){$u7^IU^gT$_VS*sY_;CdKCBABMeTr@{kqDCxc{sw547IWgHWV7(36 z-*UOj!NFelQ$WdNXL6cfd%JaFlIEYn!FA}-+R=*;@a^9Y{_M|FWWDq3r`E5uhAkr% zR_^nmq)(}m6FvA(Z$EY*s?8!z^Bn^yasIGQ$|y_}d1muZ`SC!wTyZFc5;7LQ%`4{h zp=L+BQ&Cx#LjP#;Dl_}OWM^IP-d!oIicJfMuGZ^##6fR@`-?|0C~~!~9x~JCqAkK3 z5tK6|RXr}ENUi0k+W5FK(s)^Y^2;V9M<1_hj!XWokk+^79qh)ISVcwM>Qc0fex4ZA z{?QWwTZS)d+L{NOJ3m2Aw5fCdXW}M729#N}s^1FltDEsfg7nV0v=dm$77){SYt84>OG&@<0~0IWyEHF}T2N4EuVE{C z{i@Yb=pMpu|Mlx61e9JXo{O7G=+&gNZROQd8*A+%54O$(`N<=XF2=$SY-#l@;XhU5 zmO-&PRm_{i;8o)qV~WYyv=#c4-PK-Y0jI*=p~r)#2fjFPLxCF%#nolbWfp`9Dtj%B z8OVT7(3ozPTcv?Cl?0Ik;S-8UN;^X!B>|Xy4(!XJwQSlVypQFUgyIt}4QXN&I3Uov+BYNhXwYx#QS_Z{ z-Wsf*ze9X3xikje@U9aQaUw1zTAaf9{F-??l`9e2w;q*150JrrgwnyLz+`?2#unA?t9J!H%i)kSkX!&0lw4Tf`C|sv%FA>r{?d z(9QsMWtF6NIAp6jPS~IZFccq147wU-?H7W^u+;Ls(SOG|_GrnGs43Z>12CK5J(c`; zX-&$yj^B8&l&k-U}AaD$~HRItZf(2L=%NhhUV8;Vc3cS&C_}cT{$9 zsxeuh_B@^~<_*D?1-s}+wT?vtPTL#YCt8AAS|IcigQ)O$fRr;`CE}!!-;u2WItid- zjwP|=a^Q8f`=+wq|65m`BTv7NOlisiMIYln&bSIpo@WHKmUAk_kOx|fSaSYqIckvt zv`l)iP!GM&#WGlN!cw>IWSLfi-}b|Y=pY^QdH10gI<2ALUzkIjljih-Ov2 zkmKrwv01t8;f0rK+pW1)aw25mV!9rZaNZHLxStcjv*l2E$LWDA|oc%nPD;A?}`~ud2G$92vM1Uj2 zb5M-9WJ41a?)#>@*$rfI_b1W8!V=Nb_?%k5Ik?{VwVi<+Ho4(|`R}oS8ZX8+(7P@# z`IT+!+VRB`67l`y9Fg%uRvRx5{go|S4^@G-dFlqy)@{7K=)IG8f=lk@{!v=G1bcE0 zLyd<3R2GZP4Md*2KTo8ieO(}_IPMufkFDv-_~0YxqEAy$;DT85e1?AZdO7S|zu>1u zLOfm42+qHCl}wm@@9FF9|IxUSV@E=F<&@2*Hybf!zS#|=K z`n{wMof-(PqX)wecacv~TK;JAa3O0UCLi7pOenewQ;^>~cgF;3|9hfc6BOQypuv{|g0Nde>zi$f@j0>4tvcdi@0z3tVPqIa&4MVt;`h%F&OqK=qkf z{pi3y+VhhE>2^9SPyHk7s_!^~i&cI!26U52lJbD`#1dcJ^T7QJ$ml@ET0ky#P|u1( zMe^--%hYIKj6jSW^O(H)@lfGk`P3rZalA4*WDQ}qRM2z(Y<^*&52ZBq+3qW_4OqXy zESxPFV3S-c9nJRU&(j0t-3K4@T&$jU#SmmzF-D3kP^Yu}-|_)Vq$U{^zDRCSWTM(x zL15(Tz}DJ^5dM;)caUulL3aa`dW9|Gqft(V+pobUdlpUxUkhkyYiXWaWfGxgxg_{d zbdc&<>9#Ht8{PYAf5J=ITV-LGn51^Z^*0QD>r_4611@?_*n~|BmZx+s_~`>$Zq4RF zK=GS&l{pwU-qNATe4Y?zX%(5niQM5EKkb{B z5u=ouFcI;p_%&58wqB2y{D1zDDi40$H4&QjTy5nbCFzqpJK=4Qt}}(*UK9|Pxco>A z!(%RlFewH~T03ulBvg@Xl$#2+Q*^8vY>9zw*84^v%JcwsMu}8RJ z?MbUeLCP-3Uu*bo;SX)8P>>E@TFNw3@_qlo5|?2y% z-->9%@?T6Xsrz?7U6yI;i(~a1=1+y(V}o}hx(Os^&6q{ply4FgWYlOsH2bl^wH1m6 zc1~Vs^oU+R9(K*Yor>}W*?i};y8tQ5A(l+Rnydn6)TiB+^y|+6_3qdOxbnDxQe4wb zmOe7+nz($u1hJm|iFHcIX=-dZ74|Vlp437RIY}v0K5Y0E*A*2-bjt>I$F&LFFKfX& zsm6LC7|HeZocCeyFxE0tNa%X?=*D>h-34R!L(R@hr?>NCaugitKWab+VD8jkti0K8 zD0z+2rx~$(u-aPN66n@3?cM1${4coUT{hY&q2D=ScVP{>kJ`qu^FPTyyQw4{e!OL@ z1U|=pmsMe^QQ%f5@a~{?S;K42YMe-9OTF>4ox8NTb2FC77<@T zNcMN-%ufqx>~w$hmyFpt`zVM*@cE8gDeEi@mwZ(qA~g%m7YnYmLcRpW%#Q^d@Xwke|r z>olrNnbH74+X9htam-h}ryIloe}K-`0Mc{vK#X`63Enz5kMx0zvC3fNU|Xk~_0lYY zceM4cCy9*~7}Ugm^I{fw4j#)TY}jt!=O#0NmV|e4v=QZva!w`YM~t;l;ee`0h=_6) z1e4_CA9h7M4%OxA3&V*!gC|1N=P#Pu4ge4BjSes_gVk2VnG*+rVh>iH!S*6jW9UCv18%Rlbw<3)8LcgAmM;j5$)RI@@ z0@;WjV$bYlz37}?TuqNbrE`M)(zn<(tIO7kOvCdWd)}%nHlRj>>>Mg}WhSMjWsRy! zv(SGODPr+;xF46NX~!!Pw}x7-_9;5I4QFoh|30~% zZ!1#5lukTElFJbEYpO!#)f~`4SXCw)c?cmpX+&WNy;UvY9ogEst40P#+q#8td-K_! zahy+oWg4;%o&VZAF(pyyZzWA={sHcKUbnHA<@v@z@0-v7eqh_NWrggLo8)Z^Z%zm; zU)AOIk}88~$fCWSTk{_+%p^V6uu}Yc9z-61ZLkHQ0KgY&o=MObjMaLb&z?kIUf@Um z%t|>FL;E7PK^K%yQ8_BN-#Y8n4mX;Q>0UC6}`T|@ep^&78^ci z`aeaO-0!CvXMVjfCmE1SRrmETN9>;lH8E&n9cHY<3onMQ~G;5>F zim2%)aLnpnOts>g_WnLU{E6ZH#2oHTKMY z+2Oq(;=xM)NV zh~Ah6L!q;=!mL?7%ix_5o6)HD7c z<;`EYFDCVQ7}Was@1YLMlx%0w(CI~_XO=eK@H=kSy+i_LsFg&DPQwFwtv!m*A<#a$ zjn;5MFj8GAp(}Xy%iet3)fUe~`7l@Yi1OZ6)F6j6=`#8BLkNM8EA@h8H#aovpD(OY z8|kc=TSOS`ZFvwy(JsR#x5-p+3G4s9wn_qC7-3&vi5^96L_bhg*I~Yt&0tl zXA3%aIFO?kG%G~1nu_YDvx+bZjSE@`Ev?y`oB%YO>R+9FSd33*fUZgu;aBMsNr#DD zo+24l^A>r*3vl5(>+j&#N-`_GE+{r{^I!cPhhWf~^j+}jHOynM)G!>UGG(O6E%>(I zisX2=|IxFS6gO_vq@pQW^5L@kV1d5iVJ(D1OKY$n{)@n_vClkPl8$=YR{4BXo3z1o zQ_4Af5=mdsUheD9P%DGRJvo5l{CM6mA1_Lv1m`@kFhVyAFoDZQtSFexwf)Z7!voM& zw$du075{2%GmE@-ejJOZ;l|*vcO90imiZRnncJya zOoa_Dz->ELPRSoxx7%Z~5d>)R&V}j%Ob+`q>Xwe;N^qU6IZ#JB+YwifHZI^z3bHf+c{>6kjh^a2URPhF?yw%_jh?kG1iLRG_c4C|9ESZLL>_ZS zeo5o(ZrsQD?A~^L@;@o*{;2W9tp4BsruS3K+RsTjKZ|a0BA34G+>y)Tz!hfp{b^?l zL{bh9^*ktY|(Z8amrw_IGo$eR~!Gi zjPySl#MR%B{_La>{_$t-53-iJB7+L|w+-t%7LN2RX=&>9Jq*mMK(!XnUsw!Cyqt## z9gBmJ1iYCDfRt`Ohk#J>%RQ%#M>g4)xRvap_=)|ax(Gd#dUJ`rC50JP9TJ`5h%ow$ z(Q9X&ElsuTE3X+GpQYE&E?04Z<_MjE_+j}q7II+>TXE=V1vkdFS!O}>>-ouiW#$j1 zPruJcF5Kf~!UN7{(>^zrcLa8|1Sp|mtR_zj{YGiPTTj^kRohQL$q1Oq9wuV2!UIYH z@jbbS%Os*aq^l@=~nPNln?3drPZOT`C(2W4JE``+I!oq<>GNV}Ws7FLDG>15a zh_cTyMtnBc$@vPmenzeSK^vRQ=+C6LMS)J7=C6WwIW6J;(EwyQ(}swgbBsI46RjGz zg8aYxAj$*)PpA398$JQ7$So;H+>MptWgv06u7=!AtyOQL!S^uHZwADBLG~*|`fG32 z2DwdgEh#St&k?8tOu~^o`v&o|L7zBYe2OA#GfPME&{*GBX^*q&`(bjmW2~|(A}J@h zju)m&6)5oxh15mly#vk3rSK2dj($%^4V1}kmuEE@i&%JVnuo) z+fZu%UT0Sny~kHX)@RhM(*NZq*qfSQuhqxaPX>NEfD^+OO^9v6BAOUrO1x(JgwR0= z)H%hW3OxvKs zB+Jj*$eBJM{wmKxs4s@%$U_HlBMf&3tb4$lSjYfN*T{mp@qFoNgc>i8` zulO6jjqRZv^!8IV@v*tKjk@y`{)Z}r6RH}zonPJW=sSb}E0S)Pe|l3r_F+)BaiG?K z@^*LFO3TpKc#HSlw=uuTr# ztABw|rauBj&2FPSC@IYzv^bQ)1E=z(nh6mp;9tQWLd1Bxn#Y9r2MwN`6ng5I6-N4TJy!_Vh1Cvu6LVeiwuO z;z!q^Ab&sl?koB3&;G{_&&>BQ-YzPKE-RiS_;WWr;>iNO1lN2yQhEvop!wb}=e(qK zTrcQdaEd&mde$QjIgY66%eRH+%agxn9)k|=1vxw(c_C*L4iSrjXr}Z#p@>ovo_)&&PU>wDH8%Eo586dsizt`}~Hls@?Wi(hFtLL)9^X%u)+HlFra~mAhuqUNkNcef!EE|k*dOId zvHxayz+h)`>iaocOMqgLRO&b+c47yGukB@dv)FPVt+4NTQWMbFS$35?>q;#+&)o4Pif~E3XBS$3O^A-sEBi zY=6l#0q9+)qtRH{#*;V-dnoxTsm^h;PZ(?zi{)xk2YrtFXIgFFN>oxXDSFbVJwb6u5wI&3Nb=EHD>xt8eUoi*o(vr?R_~E zh;)*k5FPb%Fntj!RR1{8Vh(T=5!Dh~N^lG%bkkoqV^27K(GToRyXwhPw;AQKmE*0% z=-hnFD{*o>s19T|C4%Ne)M%CM1lYR5xfwLf0tv5ha)gsv_O>)O5S|`nVOQzFo+}2& zC|!-cmQcKn3{95RSn2iVDF^g7;C(Z7k}a>*er^$1<}@%&2?gz?{myIn5=-0k#@qASD;c25__MrJVxm?#*><8pXETKqTcr>0drW|_J35Y1W- ztq8%va)hQObTv20=IJfZul`N_Him*<`^Hfvqk8?`$xcB=(8g6ub?ybU96aG_<|Gyu zegC$1g^cV#y@wX_6+VkIpSEw_aE>iU4@~Gff?1y(tj>W;7G9*6FukRjM5`muYc@9r zVgBibH4rFSc}~sO2ZE2g4kvt{gSh;xq~*hnoNZI|8YsGnzy8 za}raLS^WMSLCps$I#bOnV7K;@IzCx4OKRz<-@W7Q;Y^MIU?t`hmjBRiY$T?|-k;gc z+_U5{R+LI<{^3AF4PF4818O$X`q#IT29k?Rj_OdZ>R~a6Z6a;678Ya7S)IKx!;Prw z^1)v4XzM^MuAxPvNOoa{+YhCX#l|Fxl1gjh5)5sft*`EIzAYAG3Fn!iQR<8MCy9}) zxJ%C4!!?UTWboat%Aalx#xYdR|6w!gvroeT)t&^P~It ze3EhVcJ`%Ry*K_bto!EdoA0Ok7fxc4N8dE1HzYk>JM8A$-pEbh$EAbc|L^I9=In*e z=xHk@`E(%IXAMc^c}+Q+f^7KZntZjiUB+_63pQMs3`<&F%E#xLoIR4%QmMAIQZ&WH z;DT+%cuoQ1O@DDPCGsH0@-Vw|@gts8LzZz(LTP8?Oy)epr| zzCx2WUr$DP#gIO)JiTFUdy^;LE8!V>K8B#Oe$W|6IWz-OL(@huOfa6v8;UR)bR*eX z_`#)Y6zJPoQ;NjpLTv)yA9zl zma$2eYW*y{*O$TixzO#+vSyj>k7_eDD|7O| z9Z4kAqv^0oJey+W~lXb)Aa;qgb9L*A2(Wpik6mOrJ0cU&I2x{4|O;n@jM5 zf6?{(xX-OG)XnmYC?rBSBa-)8ssR@{;%LA=7%{lKVdvLn7h0Bueqvba6OjuCYRo2J zQ7~7WF5&f_3uA<1mC7D1s2V-VuHi@dhF3ONcvPFNVS_j*R8tw}JKm7v+a*gy$%+c= zPTC}EMS`3=mDcB3cz~<$6*4YQGZbI#-x?b)mQux|ib|`uQ_}B1aqLU-E_lvntF&bQ zYLN}2L~dg>SPOQKgvbqEi49v~kGFG<(%Pti3=-pR(Gf=g4cuaQt^~JnCh&=oCna5aNUA*r;nnls8BD;31=dEu9viMb47n%$GE) z4i+FjaF_I7?VQI73!=gfVk-REp=JqeVQyTAPj|P7)D+FOU&W63x-yO6HvCMWJj z?v7H2HO?hz%L|+|=}Jx~5iuv1t{86(G!2;-ejavEd}|84sIA;fGfsFz4LlN(&7Kb@u1q`}xGZ}#~&R5tlH z4L$6_`@iC`zwVAvW$&6jdgeK*zZdyGBcYC#h+H$IeWi0=6Z%zOnvbzwU+;2H2j9sV zIzJy!-04QNs0+VP^sVFF`QHC65n=E@!8D5HW6=-BcjTWteGW!(le+VEh*k8Of$w!E zo~!9*Z#WE-bx@*?DrBvY_y+jssM%+<}Zxv0yQ$YpDwW0j4wf0 zsRC8ppVQka%~t|uad8}rKwqhvZDRME^TE+v?qp*eONsC46q!K2?uDc&x2LP|KXGO1 z=VM8m?S~Jyn|HXfB2Tq;t=`Gwu-aeNs}YAHN^*YT6qbR++Us3u6~`Wt3r{_=K`^4` zEt!8z1Q*gnoL#73Tae(f_0*YPD^vLXXn`kfjZ~gOvD*2R(eIPdODkZNE)#geXyA97 zN$CPQgH=ytzWOMQCz1e~khww%zoX7Oq7?0o5{%b{Zn{<|UC>wu6Q%~9vhT$o2AzdQ zcNh;6z{14Qh5d?rQ&DtyhiXfrG{_JBDAAqg+Uc>37rv`IyR>I0cU)-^MyE2Il_Mz6 zUyCKqojf)kUx9ORg9tw1)i5E5N?!vp2o0MPoz&RS><@C3?}P8ljj@Sr;i+H^%I@j!;sFJ%RsF&QI7w`3<)cFu1FfDqbWwHMHyoC}_h4k;Ys6u`*7I=cD~ z;K9aH34)yM(Y+Z-$_Ao*TxN-!_68Cp7PJ^Nm{V{qho#W(gOnFAG~nFQD;7)&-qoaP zj-}tSo8mFYD=Px)jNw746G}e}=}g6XsY|rSs}~dTE@v;^?yS8#wg&ZA_{j$CaPIOZ z3h8s&bEr_kxcd3fRsdN=Ft-md*+nZrgGiZVc?Tw2^Wt>!NXl!NY_qFB;E7rPR1aS% zTQ%*Xj54X}|1y=RX>P*Kk9+Lu0_QvHLbbe10X+U9By4WFHZojpI^!+KB{UQpa-Nxe z@g)U1L&Jxyz?4%PW2Y?{@L(?!9)VpF1)o)2u!bj=e+ZP3M6Co8_Qh+7Gl@>|L-mIA z(D}A+adm&Xn?aCze&V&wdI(y7i5|^b&Gxi*C~~D|V%gt<_EI;$;PJWuX(enfzrkx8 zbIxIvD$BHb?MU^p$E5QJes0fihWmr+wI64n!QVCthjA6G$lnZ|cMQdB95HA; zs|$?pcswind*bfs>udz-7L`IS{%V62Ud&QK{B5J5A6cIvMos&|l`CabCeN}{9CMMJ zj5xbd-IVb9S*JGwUe4Ns&rxlufGCs2wetIFrNTMjU|FS^WaIrut}lY@ zUO=C{1hHA%u`hK*|99Ktbs_Q)SDL`G**Zy}c|s0C6{);n%nZ5xNL}TBuxiL)ST$ z-5=YA|DY{4j?J!G?in?s_K$cosm7h1VblV`?@puq8KbOpmWyse!0ipp2aUJw*mocR8WmHk1yhFEXEXD57H%ZRo) zP-D1rNicJiR5_DC(+y1aQ)ocIAc`k>R5{jG*`x$6Dl$-++QYVzZHDqVu z0np4i>j*qHe?|;jhwfYG1Z9mtBJsLlX(>adfJBIJ&WKG?1X@*%>NJvY^OTr3tS2oF zO?4epA`0J`FQ)=gN%;NXQzYr}ifc(^8GtM4MYLK2yi|e0A!~sfP3NpzCHIXh{vD+4 zCH+e+no3Tc2+0jZ939V z#+P-d){L|h4I;|L+71{C&ytjnat{%zuWRWS`t96E4&7LNMsI5|4vFr@DjN>03J*ny z=tEosyEOAzF$8lzvsPw;?afLxqu9jv3+kxvI6&weQd;~M0ir?oCbsH&DSGOnkhi=s z>YGE-wv0y1%=ILf4s;-UW*Ee~f-aA+V?E?0P) zZ5ty)D2vQDAlZ)=g3ukZ?Uv{pjVp;111L9V+IS(>3U?yihNMk~pk<1QkHxQAF8&ll z9cGzquxL$SQy5R?HYe02*nRL<+~Pl(%O(cqw+7j!_)aT_! zQr_9grFPqmX&gN|2cDmpViTy8MpVCP!G`Kl6W@KOT>ZJ)2QrB-Oq}tO+u`Z{gr+n> z%&dE3UiO9eRQ^O7Uj9YoW)ZUgi!!0KzCi34&an;;>eY^pp4e&BB2E@r)a3-}jDTzN(qP(Jw5^HG*&P(n3)YGt!5C;J>QsddhQuhw4ooqNkI`s zYwb4h1X#?;$V9o>_8`f%sZg6ztlFLQ$|BXF0AmnbiW6hqa1d+VM{=b57rz;>YAjPz z&JLA}V}`i>LB$x=BUu z6GW(t?nktC&dy|x{V(YJRbQq>9;926<3K)7)u2AH0hz${IUDcA-KuYE42TJGscFav zrug*&j{I3MT)z- zJH;Vr3lw*U00oK_EAH;@?i$>JyM^EuJV3s@@44T-=lt4#_Sj>MvDTbx?l~VJSte7& zbEPFfTKQ;XC6YrJ)ypvcb_6>90+*zZL)8MuI&5ghwG>A3Vq64`p2;4m?d0`K{N3fY zPfPwlGzN)Qu&(OeatyF}f@Fv=p{?PaBylBx^jZHR%o?U4!CnuqP0+#u!7@z1VY~O2 z#IIzDR??i6a)M>tX*+H*dF)y?0x9fzm%7B9sH&k)4hB=lwGl^BkhwQL9-CksiHj+1 zT$49XwHj>FtyoB|(w}x0r2eTySsM8mN8lKETq|V(<%ntby5~Anw5wY+EuX|M#BP!( z!k6^qlYj6C$K@L8#@k9IY*&dZu3RbX-NW2*pUuK-9}?S9E&jw;whF#q^X|oI>cW76 zrD}bgc2x=|E5JVv%XmmUnd1> zPEVue5hoIoi2N11u$-2Z!aO6kQ=DlaIuol@0d${d2gGz|-=L#|E^`)|#lk0?*bO(E zKATh;f79XQET7J;=%UPv-El!Wdb(*>rBU;kVn=?~GXR-!%;>C|EjT7{_%M4~c@Eew zg%J45ydIUIEjlbb>FLl@z-Uw*QMh}rpCx~Em+}%11Xg7=vA{}+i-F*GnH}I|mZcYH zlZjh9eUP3rfkRa;o-k|JxbT1AzJmjz(u$NM*5zgJ0CZ35!H{|ewSG~ z5f%|SBF}Lm-DjsdH&q+)6*rAn=V`;Ps(r1z!hEsN5S8nDiR_<{26PH7!cxSgwy)Il zG~xuotY@95iq`oEVHur#KAv2j;V1j_>9DT+ch{=ufc&T43OL@WPk*8z4h^mS2yz@_ zdtg<(MPcygfw*Ot%_bYs`(d!%Oj9te6RTUQg}vIHWtx)ikqPwZ zoHD+o)v#~?V|;6ww#wzsQlLO@QounqT6@wMH6zZsr4aQq*+^1vjmj+;_W1Iar`_Q= zuj?}$DQ{FbiXJ6)m&Hh6rP|}cKwh7x`4;v3%vf-Bs0%;XDHxcB@v5G|yHj$%Ojpc% zYLZsHMZ!$6qV&R$-jY5X!?b5n{@NDlpV@l1oQg&Luf+F&Ow5o1s9>RD9b*|QOYVdu zBw{vygmt{2ptm97`6AwBFlO0CvV-6B^|NVF?zTsiCsU3{M21}@(f6#x&oK73j1s9y zN({{saqDE}K{<*GtS=Nl#fevi4zOndduWEW3jc!Uzt;2SMBjaW$nZ~ecE)KbBxAT^ zZ~QxX)O%rysaZb-Az_XO)J?f+zpXc`FYvDU*ltmvF0)}Dh1Y_nZ$2_EYotc@epi`t zVcz%+N&C4;J4eI`*3N;$Tkzm_kPdBIJok0W!5=9lEh zaMN88^fdNmf^wZi7<(sj9xRaQ-|Dq?-o9-PSZV}Vj#Ngq+vSib|1JFSl~$ey^v>!D{PS(#sreqa=Z* z)5DEde|n`B{oH@3w}=<7J>=Y)zK?aIkaN1DZM^z9q*vYkKvgvoNa1O0{XL{B@5X2& zsR}W_;MkPx$5LzTSUpr2OAW=V*E+-4z_7^R$hzU$??8q6mRDQ+9}IsN(SOLw2VZQS z@b53cQ1+DkhigWonIuxoo1nE0@is!6c1P>Xxk3U6L_u&QwLws{ZFuqp8`6|S)OR@A z_z=rCkb>PaGwo5?#L&C}xmx)Wor3I+$%T-3nqSi#9{|DfmG- z?U3((M(p4Nh%`fofKBNeoOC-IlSjF(bU$Iuy$oW`6~-GN@q^HvLl~8~M1}BCn!f*1 zj6wlzB~VvrLTHx3FN5V@tR1ci;$xV0~m3TsbCv3V(P*i$+zbDgAo$@HNAy1PCa5L zPQ%4m8ht+mHoi!!NA$s@O<#uG1oZt|ZFJ)#I;yVms*9~=pU&PlL#v*+lZ(OfTD8wy zY2#mm7;QI~yRX@v=^a)>e>^4_4iIJ;!Sa{_Az5F}{JRYOME=Qdmc)KFHQH2SNd4&Z z=bs}t*(jM04;q!LH@A4!+seLjk$?RA#O4r-c-ab7q?LvlTS}?aDCv?Sz(VBJQ?Z(= z)ce4#q-mNLLI!E%Ih9DN`wg)xE0o$YVuI5{cr9Rhr114R{M$Y{w3dT>gm_T%ZRrfm zdS)P1>pP!P)W2$o5VaJ_6(muzTE$R@iwiqtOvE8x(o&auC@D&UJ_Z z+Aev169sblP*!2sOR6nj5R57JJesGfEt(rOEsxEUv{d zt7Uf@-z}c;i`=9^E$MaV>%Mj5M+7ID-$+!uDqf2`e;$X8j#_++(I1F5X0>s@;9uq+ zV5|E~b>RG{IDh$qsbm9ql+!w!JIal^9J8lLF~oBJUNNJ`{#=4)61luE#c;vu-F+T2 zjdGz6ps~MD8j*lu`i5R9Urs@+f;pR^8#lei##Hv+x>&pZ;^Mpn%&ab~!YZE*(LtoO zF?+8)ZbCp)38lv*d#%iW!yx%V$rnd4v0nRv3dwO4+13)oj?z`1JQP$>AFj<*I_MZ` z?wL%Fp)V{rFfKgt_*MZSm2UOio@(f<4AvOa+2zAohA+0M&=oCFDRc zIJ1ZVOMrKe4yoQkW?Vk!rSvH;%X-M&x(2J{m8f6+_pL#{@8U+AY<=N0hQtNf7ve)a z5*tdh84`Q^gR>KRPVZi+p>`2G7{57os$h~)HQ>=QT}>KkE^|~K@vn0HH4>)*O}TUk zFTlJv=iT26JgpEvPnRG7fUa174Zh0>!ZBHVvzxPBit+q-+VJ{WrW*`^klES=K<$>m zn=pvH)S9!Dg@|OEQmBIwvKL#9wwZnL^oZouoY4?c4|?bzMs^ zC6z?!BNU^s_)TI01&XI$78DeD;g#X2cUTJCvPVZ67F`E5np6Phk+ z)IQin9BPKhhxoOtl%$K7A}dIppmfEAU!qCSmL8;38w-G^k!3rXZUR4f+krFcf7Y$3 z!P=}ud=y#gGTUmVz(lxzbGg4SInm~0pzSIcz0_ULCa9v)mQA|!kl8R164;sp)n;Ypwjm0q2FI17x z&(O?`T>7G~Hg^1;8I#VXXp^y?9(jwKt&!dFM*NYiZd6`4bN8Dr1iXtS>uym7lD6?! zI8crhRRSUc!kc14?jC6Kcfs$EM|;k%`~*61o892cuV0UT^n*QJBq0_P*GXAv z6!>hCi5OT=tOjDE&TpAT@^r<145XG$S^in&wQuAEkh>QbCGYZYj7ty}V@$*_&vT z2wAv~nuZD=3<&iQm2Iv`tt6B3H-C&~Yl1wBmSX+I0{D80NKt55wg2_)!f9bp{fZs& zL{zJq@};jvV@Ec#3|{xw29;-*EW1l60`}H|RO4AR_??N!E9?r#pI1J@%DZkIG_>8I zZtR%(?y^X07p%vZPK?E(z1LgqUfE|d_)FJeA>w!aAkUiJ7f;dUF}hMkHKuIvUt3a z(6z#TnSU34#o|h?>)X@8Mi#yM*v^u7@yg6(ZpLnY08@}pkiYLtk$(J_Qsvo6JIK}f z?Ig8T-ZZ?K=c9hcjDeKm5SN1~#ZX5MEfe#PFhFs^RGqoZ<#L(3s$|gi!hx>ST9v;9 zztf5PXT*TW9eQDU;ONh4pul-F;zFd654G!Yw9Cd>3M=tUyLrrOpvq3+`X+iYR+fuo z*bcs!$ow*Jxu~P4@oL?l0^esJX7rDg;-NVaPCE%9HG1Yc_|%CIR=CsSTSw%_2aG0! zg-hvv5;pqgI`bc-#ZxI$MRq9Y|AhF4&-d4u8(|w>@ETt}kE40&z3WqIK?Gig_o(mI zFYKO9m$qL3N$E1xP46z8UVkdJm%q#EAV@HtzFu0ntA9Y5;f{=s)?)l732J9T#F?;H z;Q@pDjkW=0-W*+fdBkuJ$NiYdQjj|S? zde2Ta5`FLgcq16c&EMMcW|yHMSaN&VPS=$CldRCe0a~YqNf;6C>nFAe8NWoii028% zlO}%#CU!ot4S4o}iu(PBE58pW?SXD8wJlc!m@Q#7YB4>(;rN@ve|$IkJ))&YN9?)(uTEaf)^R-)Pu23%#%Sh`*3gxS`-Tk=#G9h{*y78Mmr}ZjM?Fg(x?cN zW>ysi|7YjAm=SJwUtjY#bL2QjRU-yhY)}N#iXHoChFCa>lxS4f_~+r9%FjFvO+9h9 zx8zq(n)!3@E;pK4*4NNyI(@WXX5GlNdk9vb;^X#h$wL46kLjHRa~^T&m8sY04_a5( zG^V^OkTNCfItzkl8Fgi)XQnk1Rx*QjSrQ_KknmrJ6Q`4;NE8-^JXEIKsJXyt*-Rm! z*agE;TVPG^C5IzdAxTvb!JB&d&DQ(|vDde_xRn*Z>U{mPPQ=AvjT;JPUv27*65l3G ze{iyut(|{;&xlFd{dXLV8P|j5XzyqPC?ov1%m65w6ZK_TB`w29!_znE_KtB}dW{rm z?QNpknZ8K5LYHtAlgpM$fQ#;m6Vm;gbce4SZ=tqDkzS=4w4bjJx$hr3H6z`d=)Bv= zopJX*gG4=(V-Es=He2k(RTxfZn&S6qNKU-YSQEMmg-~q^J}s?SFKs6EQe;qYnfx?I-4I$IJDzC`QJu| znt!#*ZC(ntv=3LlVcHMx_*LKhXPg`9zR2avOO}(TyG_pflV->|gQ*LC{_TNbIAaBF z{bc!xR!LV{`sfY!jzyv;S@iR<$}Tdc(9aF%R|vV)Dh?Ktqj%00zx$Q|u}H+w{88li zHGEoUE$3LV=<-d(#MIjzTge^CST)KuD|IVJO(ZEhgL*&dQ?AshE8OWYf%*m&JHj^r zh>lcoxFm6qfy4^Rr1TuP1C2Vv;`LY}GQL>S>k%ms0{5T8Nog?i*&LAOw+0cOzaQTY zj@^Bs+Fc4pS!Eax2Em$k*7lIEs|39E~Czi<8U3ouTNtiCz$C;-Us}_Nbw>Y`AArewxmEr!Q}GL z{8!_5!}krzES1<{L{^80f85uyQ;9A0mW57x1A`9xx+>N_te+o3rQUqz#>VS?`R7FC zMkO&bf)Vc1cn$-@kdIx+@MNuxf^-_w+u>dU_OeBE&TcF$EU>q>yXHctw%IQTVB$%WF3BA z^$fH)Cs~@B{%+-W-BenZ-va4)+WM#$V)lX2%$^my<9;swP{a-`(qkv$-MeKV5N)zm z)TXZ%v)MH7NfuO&QDqhMes$xuzQ}slPV`}a(8Csm)aLy>_Bl^;QCr*toK^DlYT#lL z>Mik8F+1|&n1GuIe{`Fp6|;(*@Qd?jE*FA+bfTU?kJ`WVL;}_A_41<(N9ErhI$-Y8uLDG+*+a``Z!plR`eqER z!R#Vy4;CI2xyc_Da%(#sZf}HlU)2Ts^5E`w<4dpXmqAZj_tW`3Zgqd?dn)M&BDSCX zp^nzmm-YzQnXO6BcFYgPgi2?(6-c0G0fmxOe_qy=M*F#D%GH~j!;MRntww(j z>ECg-D8enh?oV4cQ^nCA}CByoz(j0WfBAYFU$7 zPL&yliwH<%)@s(MV)~c00Mj0Si9kjHej|B#Qb|X>*atmol6{Mv@OnYbTHVxAjc>YD z(>^I=Ob#LiE`Kleqrb+nzYE|ptZo#S;ioz(7d?~vYLUs8_(6_;1XLL3$>P*za(uM; zl(AX~*6EauJF#tN(FE+-w&L^bf#dD03!<1N9Gz1DPJE^Ic51=VmK7Zd@{n4_=S(Dy zZo-kYog!4lDYJ~B46kc0wP5AWr`#Y=_Rf zp((7T?U8jRPxB_JusK0*S2#4kNBOJ7QYP{w5y!bgEKNjQYgEQYhZ*bgsiu2w2Q*&_o$G-I7e#0#qu!eUYS=0If?Nd1Cnv#LvC$tWvY33a zSv8mNpw%-RqUd&A;jvUZvuwYwbmwp;u<}PJwGYPdImThfh z*ddnA&of_LFQK4wSd^@7ToqnxJ0DoEm~eGr=pySWZxAJsG?J%%3tJikr}D+ci&0EmRa8cu^Ny#@yenMa%Y}oe zzM_M(=WvrNqvu_EY#eOZ2KM7E1l1_2=P0V@k*+(sqD(=AA(Lg(a`IX{%xa}I%k?=z zFF7T<4;{WAwvkm7HBbMvZU1P*$)Q`&d$mAK6XEfX`u_>&|3tQ4NFuJ!9o32i>hsSS zFaPqSp&m?gC|Y1DQ74ZD&l3?9{hRYPpO?M@Ocfgt)JN{a>y`U=q=d~pFRDnxIs>NW zPoIyuKnh7!NVms))E%mA^-q%Gm-mluzD58;0T7@?lJo=NK`+=SFEK%iUdw7WYj|oz zE3CEYsD6H3n(CV0)@H%<{cT8{E`JisP&azvW~zYThz>lhl%SEy$8FKx zDOH!Q?k7W9fgc{fLs;7j&)g()9#c174~kSG+4V`=@gau#EN$!KRFkWmn0g+WKN0U) zm(5#LsKwA~C_5j=JaF6A!-Vmh=u#990^sTDX4c^tK*gDKq{Npl)Mu;#vQLR--;AK| z!h)si;laCo+@MbJ+u??STrSDC-kB@LBy}&3P#;Osx1PM~>oPKb*^yOYuQwsf@r65& zTUMU=?1Nv=3P3^D?*R>fMkkE#-bEl74Vg_!Pbo)(f3|qr!lt4<`Dv=MI0d z?uI$nryJfZ%oCtt6b?3#a5wa7rA)>3|9=XwPea6Q^p7D=ey6==o=z`NB^E~5EN<_O zXV;l%zc##}>1VLLa)nx0$=c;Zu2wAHcAbDe8FViLE#3J@7!Wq#-8tRmz1xLmPcPgX zRwI+B&IOkf6qPSF7kqzc^<3!LPW@5w9({+-;dJJic8=(MFGh*S(O>Q84+$rePv4NB z2MQJbZYVy!-En&Gfv;z;U719TK4$6y0C_tyxU!}a>9PPo*C_g(?JVgYDkHSwVTG=h z&FU=iLoDFMhwTJzuO>carO9DR1qMH~=ram7L(1fx!x55-3#fUR$Wa`S`9}=!R)PAr zd}Bk`u1=DP!vgUV3u^USz zK22|Nr)Cpt*5r|M>AH@U|$vvE1c=_!ygUnO1@&x zHXS&1D6g&B6#(+oXq1K(gK;z&>PK!G6y*uL{Lpxgo2)rCmU5jMMB$AJ^_gQczZ%2U zDhxDae_7PU7o7ki0e9Qg;e4tr&WNWL#__q**+F(RG z`waW9c(a2kn>2o2W*O|OzTXzaO7v+VyeWx*Fn;UT0OpUTwsB4|w_z>sqEY@?G1CTq z#6LBs%P6FW|AXgU@Q2w~7OTbi7+N>FxqB%?^Xz<~C8y@n4p zlq zF#XlJzQzz@;vC9XH>SRGhhir(1aFw&?@ZX}1$|P%y`J+hx^K3-#9SMOry8%wdNUNX zYJrC0s>c@W>Z{Q}{kD z!zdR5(Ij^10l!J%%!JgOq;~O8mi^CpVD)kMQBmxQxmS#bCR;=Yf-WExa*hmEmVBnw zR_+^^I(YLQw62?h)j`g$H0&AurG>k1w)9kw_MF9|`3oZ(C{Tu>^{hTbdDl6?=toAo zn0hYzQ@%C#={<`MmiFD!kWf-cMusi>eo#?vYxK+rP$9zas->R*-rcUdEckRkkG!5bH6vA$Kz;X#)I^zf5C<*ZlwIAoq}I zz61()H42NojIFRe7$BZ&Dg{9TnbN7Uy-eQfbOsG*F0V2UVUInp+j-8st&S;nwa(GE zPV{PPv&ilWseR03x!g!;xt9X$P8Pk)>JzQ*NQ~qIB>QknA4ycSECF^N$pUM(oQiIP zhol>{W?R1%$unm)0E$f&$_0XoKsV9Y#L6Nw;D}+b?|Q-&T*^n38&HuaOR6FFQ+K3;l0Q-mPq2k+L!RTOqajSCS(9fvvgGvpyz1qir1h%QDzO61~$$d7h0 zV`JNJL=zb)rgE2AO?x{dqU`$Sh9rD@CgE*rE5Dr4<7yI}(HQvQlBDA}K}6IUv#!3j zQCj8kdf@<^b;m#=0t^j)K)-)n&X2mmNs+(yVigcj*q|fJ<2vLZkn%oz;{%~WMEYK= zNgdjZfTjGxNnk*+NMqjDbys7G%6e zRDKdWWerSC^Je*ce zP2b)*jf|r1Ya!#d@NGlWwBsqus9;{ZeT6=SvA1JdG~b#1!Y@OD)U&OpBhyTYbSV2M z4ERVtW&je!gfFDjrNKX$O)QD~<2Z^WkU$3>giozg;f6aqo|a{jlTOE0dJIhG!o~;d zZC0)aCL-`rpjW+Xqk8I!&?ObK6go^MS<}#&_4PE1jJmWYn_wG5Z%)^0*NR#* zAm5RHksVkqtp7>wyu*Zh4p4KnU@HT_kFt>o!f z2M;5&erj&#x}Av@#RwGMF&!}(GKu_R3~08jd+InDz!&Ne^`bO#Jb=+I8oopR=>87u z_e~MCL<6UbXBklU*tsxS6!&=l%E!|)aw0t~Tj7H`XIFKE#b=h(p|ljFJ!?ir!2HeYJ1XVg@mtvz!pc4hzFvl#n=QbVh! z+8>oZe}9w&s_&a!1_Ib#AMgF<&g*}6?@zrcxI?tpOsgIq?`N|kHd<>9u(uEK-Sx(yl*$;=h_% z_+H`{TAOiFpP)RgMxc@vzh5GsQ2nJBNH4cGN0OmHQ<7nHf+F}fW)f^wA^o;dJDeJD zY*91~^zZCz46_OvJe1b82<-;K7ypOUt?URP0dDEdX|+fNDxgE{uK#u~}!Dtk5a0`U$TYp9!LC(%GY z`RmT#wkOB`iX6SHT}f05ENtUgB3d%dI^w+he_oMxtksWmm-__xCrWCgv+bK<9>E>bh3#iZl;g*RlU#SDmX!>n|>EBa(>mndGZJ^4ngVE^fEgERA*%lk# zXRFSreV6QhX&rDdQ*Af$0?OJAdg18?^VMi!n)g507G#xLF)dIe6%wlV1U<$3?OwZp z%6voZ{04B<5HTO(O@;YXUPAGsJkHx|o-@`-Upi;57vGA^OYyqhy~yD8{kP1E7=Uo` zd%KC-MeNXYNVcRi=i8rvcCao^xIp*!)Gd5Y=bOS&tUhSQt{|v`^aV=O;gh}NT9)Gv z;UgIDmMg#0GQByblFB4_f5_N0YJ8H^#Cj8~3{>u_7_Z6e$OuMz*mWjsNA(e6uXsPt zTYBm36ej{s)S9<+b5li|+=FWG(zJhR7x^JCnXxII*0zUwoVL2ALkj*DD)n7{5@IL7c#iN zdm+KOxIt?zpCROzGI8PN`pdA866P#fm*Hfyi`w5?!?V+YQU$4hhh6iy(rtOiy0|ty zy$5f9-fRFarZ;}>93-Q<``O<-ESpb?E}5FM^y2SpH168RE+e9o$1VeO1mZGyp)2@Q z&@(oQOj!K7f<)+IfOh{1Jo(+;9k=6fu4)(I9OYy~>Uxf-(xE+X!K$_y@#|iYW+w!W zK)kBKY)cHiK0iw9L7%)8(X@T9wfzzdZ(A3pW2j^zc>dRoONbVG<~6C}_l|I2&uPiV z>wN}mt)XI`Mw_7^EVwEfBYS1JUzW_KdC^Tn`{v_%J`2QVGfr5sJWRbc=0iV?6 zy@&H%J??DchulNEsJlVOD3-- zIJ)KWOQJLQ?KUql9Xs_men~A`Vh3 z_=7E7afTeo);zT4UQ-q4Jibs?Z@{8jpq35HVN?&L2DE)kQXB-x%zK&)*<_2$I{n;jZz{ zN8785;?@Q1wjZ1J1y*U%9ICqt6oW6B-trIGR`a@K!amuK54-4rffo__)xUuaol4{- z9}`;EmFtH%wLHDTcc--McQd(w0*a% z6*iN;!4Ku@w$HVVo@c&N25$`e^-dWw^#ci8j3yk& z;-5*d)aLkkkh2Ca5StHxeC>jdMOkrJ9rfvH?XVW7Z|SPNCjC7~V<(a^V3M{prl#P& zPO%X0$SSF%pY`A0hVT~x2}JXbJzz%CG^v=f$Cf`0dLQ@cTok2LmET7$frSG1F7?+* zt$`C&mwX|#W(J1E#&qZu#XxU65*Rcf_{;sDSS}Fr6j=cyE zcyj*l32yg^<}I8FS^j;G#A=MGTtJ=tj0pWIcUJc2DaN4(U(V;hf*Adj-C>?{8ygHN z;sls_>~A6t&RNNIBM~_Du$H2Z9F|HlWp@b(LpvLE+}GYiJdTWMV{nfqDDzxz=o$#+RCMv1igsZ2lpAS3uG!i?>2geMZ5S|p(?nMDnA*dynNK#9OU7o<-W z0HB-O`7F%u_DF3`X}DLRV7~R}=1?af2X`**U89l(F$XArk4k%lmXZ`OMg{jbF8i zSeQRJoGY7gWAXXH`WjU)>*?4WQu67l#Sh`@wU=U^Z~H$(gZ~x|EU&&Ezpm}bPpta7 zEuQj&MW`Y1j2fdq98*0ij_Z^0zj4XXw?g7jA|OYsRLd^+MyAVseo9|S2QP5b-VnRr6_1yzti$Mw-r&6a2FPxP0Tyd5HHN~;!nwk&HcryO#elm1V}2=)CJ`Y z5yn_{epjrvZBWYzx9N+0s!L}mtRK024WTh2!nU)U=}BZaG9Cwi|KVS_xW2qDVPr>| zua6)~0NZl*3F6y@9C&w&3=*c?7S7bKPD=&p)4IZhqvuOG0ZU&;Of3R$fow6^nF(CP z?^5q@&0{H>cfI%x)5>k?EbuykoLZA+?Gja#5sc~P?TG_4~$JHGy-N1a5 zcWmA=B1lRWXgOZm&hmO)i*FKp!k#`UPfU35iTq?NC7%qcCr54BqF?YT^WTpR8W_%n z6cw7_Ud!Y+Uulq8p>pEDD9N9jGOm1Y}%; zNT|T`Nr1~cFzcWeRp`XhhH@vHh#dKlxd-1?@MPq7o{+3Lp1)~m--pazMBS>LlAq=E zcpFM`a*8Jv{<^C!Fms?jZ11^;WC=t|^UYeQ1Aa_je_Owk#p;BvVyrZs29r>LUFBT~4? zOVf}3tvjr7u8YuC8thVe zwRTD_Q6rl$-L)Mo*a@3V1yDF$rmqjx#Q~@>k(ZgNYz3L57|S!-bH)gC$5bfebTk8G zlIa@iCUEiQIz?;;R(=tf4)(X`T3_)tv9pUAyutp9H}Rf3X>#w4=&WJxBxx9!*@Giy z@RzfVnymjIAEu*cCZAR(Rb@gl#Mwx)?#ImcsVMG4 zvHm?~biv&B%aFzai)^w1{xP_~P1LI#T%SjCXSjLqnlhZUEzVn2rCd z`)DXDJ2u96m{6@x>m8tt;^dEQz@3=0ox7?pZcUfuw1th$1NdG`)4*BnKVxWy-*}SH zauIT$FQgc(-q*|tEQP1X02d`?$UX*NbLda7Zrq2*)f&Tv%NHoqe@`y_qiO}w_|qR# z%Uq|ie*vKhyAIFu2`Wmk_!6((M%}6-P+9V37wOEFo!H0c5^PfW_4E$766hNs)&IOB z5A`=Dd=#kx77;vHjpaLqm}nTWZ{?AL$67;h(4sDddF%urqqab>VF4CTU8#4dq0ppO zVG>#hfERa||2DCzU!GRZ>Yq|7%z%F#S)k`^>Xg-Pc_JUH8HfKtNjKG|b)9QEFr|@D zxg5~XfGux^FG<++wSKeJSpSc|c$JnkqD6mtn6-Ci1_jSFrjeTv{J-Cd;Xko|&LSS9?LB%?PPI;bG7|!)jwz zv^pjXSIerl`&l|imROjX)?=e<@X6#|C^x1?udib&^nPI9PVPG)sKIIMrL)_`1^;Yn zfo7D^9$BRWe8*-5^knik4YS>kNT>C-(YyGrmZI=78lfbQIA5_ip71ViYs%Wl@oHV`CW$Bf91ov~Y3Pxs+KPtvgSUfV8xDA$3|J~hG z<2X@#yE(YgHL|A&#kve~#)2)85hiL`gio4anaX~!!7B~`Sx#}vglX5sZ;^fOW9(rp zmaxqKh7mIPK-_#F++THZ_xyu|_#kOu(u|(%`MC5Pjt>q-wh)G~_ex510Xq7H*uGhS z30dK3q0ilf6RH}gdAd;^4@GSuBH-oaR4837hBu%6CYY6(YJ$`QS-4EUM-Te(gq6fG zR^+RWLAwd#+6|>y{Q5npV>gs`s`Sw_+WwCTf0aWNHt?`kL+v=-)79PC7HU<*^r%omxs6-k@nLa;R{WKWr$T1y@C2Uzs%1QdQ z+g#yp7#|bM(72*|+Q-CY;mIBBPlayr^Daa^gHPMD*hwh@yxD;fI)2R~h~wfGd2LeRqo?XnT2} z;s!XkGMxAdLqafhbT4=lT`(Gu)MuqHb;}QG9ohQ&5Xg8dE-@B+U zI9kA6S^0T*fTwMD+W^&pi;u5)qZ!q=q^H}I>+UsL=IGR2~jNEV$MoKdW^ zXhwOXiXqV{)}p^$a+z}G#Y|}hH%>uTtA&GvR%ITSK|X7bfFHJj#On)&!WPYcMja|w zmN+9Fgq;~T##=t#|Mbj&NOiu+U?C_8^pqc)9w5Vy66imHwh0`dBw8;;D4KJJioF*; z-8|eD_VQ$?0S3WkFJp{kX)OcbJvFWUg#U=nQ+{Pu!D@yXIgljn0%>>RYb*8THvxm&(!iZ5G!X{|Cse7)XHX{n-;WJ7WSldF%T zI?O;0;y$0Yojlp^oMWTFPfP1{HqL7u9-So?N#Rf|s|2F@%&skc=1F3ElIyINV$f+a^G&H#R0bcfeyOOk zcUqy$()eT^$`Ze$3~~vEgFFt--_cmiHXnJ~i-z*sOG>}CNOo6L4KP9C3$9KsB6Q?M6`lBtGAYtM{qYCj%wvi%<1BK-OLgB^2K0u{kv(qwVoj?3kp zn3cVKo~Cfp`KxuLtYYUC{zOdK3nMz25((g9cG(+{YbVcCFzN;NBkB%3*IANdosnXM zvWWjBXcofhqti*mX2*>;yr$CC(AY==b>!gm_IfSahJ`q5!9``fU6Z=)+81tpnpip5 zN)nU|!6C~Bv`CvzW)|ehM9nZ5-eUj&dfQE?2;32b$SHoZn|0g`dv`9`+T*RAt*6pP z2pS9q6JUPmBDAryyEHdwn|8z9-KiW6;IccO{rMkc(ErmNZ2$gmQG{QSHi|r+)V%p1 zKaeTTiQ`Su=Np8Jw_I=1lGlX|<4u5Gi6~WjnFBP-r&~uzbOF<8!`%8DSs#_Y z$Vw)kV5L}5R6sFJnhZua0qm1eps(^Sgm_!a=|tofzTSr*RDsC~c95svcSF74p>pQ~CUQi-xc_2tIbF*7>w zvONA?n#ps>eXotknZSIU?)G9Fu!hHwIXu(U$gflM)(JN zabnU5-lkW_9vWyl?bS@4`EMVRevv=H4h^FHRgE*Q-oVdv-brRgte*H|EeR!Yp&FM= zWbh)dy$iLSg{Aicn@v@XPo(X}uE|D^?V;sFm5>ZLFK-sH&#-^KvaV!s_NeEw8>9XG zZ1J;IjMO2%2f2ZCiSfwuLLlOMP>9ss4uYf9pkpXFe%Q`&;K7)ZynZV(`^sq zv;_(jhaiO(D_&dz6fIh$xVyU(+$}&`G|(W$i@UqKyA#~q`SQHydw=IS|8ZR*nZ0Mv zJ@?+T*IJVYm|ha>J;}Xky~A?g7OYyQn0zjWzpOw;z2NqN# zmE1I!*VOd)mPx;UK0MlLRZrhGTXygCMkd}pI3paOq}a{lc(JX-fNeGh%~CSs{MSl{ z>=iPET>hhz0|HHL1F?cMkxGE>h*>Gko2ldDb;*7BW_bH7(j1y_;T9GXgC%qR5;R;k ziGbnXz8uhkSg)p~r<>b&(gdOKxm4nJJVy~aT|;)+-(nyo2E?@|X9m3Y+8USI%?zXD zD0asdn9zJX)+BwGc*pefS)Lb$=Qw~OSWJQ+dMxLN-W_^r-ln= zVfo@4pqzG^eol7GyS)!EtM6W~_1TzJ8LPU9tUW%8j|pd?ei=J&4m;$7r_6O;E$*YxweYKvl+tsa5U(CNERvIwms;v?5tj>`%RB zA5A0T=CQ0%+lgA0mtlVqyr~@mB#3`$DNkGX;f+Ql0zYYS2NfA;^@=Fge<8XPOgBIF zEqSQ74ZLwRs}z}pRmfpgwfX3D{QML`&Fm~RHNzM+;1`44zo8bPH^6}=^<#*=N2=)K zC{azCS;|f3z{6y`%-H+w9)t8338)d>#%(0KAFCve37)cUb?r z!NKvz_D&LCa!_3WtdU=cl?U_Ygz}oV1LQrTRJ{K}0f*bA;gEk{cQX2Hmdv6cr>t&{ z?&|LDv5xe%0x-u!w|368^{Nh3c>iqHi_hjbm*pJ2=IlOjTNcNMw)FOpGQOJr{-ME? zO{gFG(KJPdNij;c$mQ%DUtTXgyStl!B-nNf8Ke-y3lKJ^!UmswJpQUK|D@4<@p%aravB0;8 zFD|Mf5E^dd83j4TN9#@j(}qq%{*f2i^_K!UehFs`S(jAdJdrZl*H=j9SX66<)}m(<=Um7Pri&sKHpw;wO8%_l7nyQk}&msh2Jx{n^Iuodr<1OTP?A$z%|0iri%AQh#6 zLttIwNsG2kDG2sSqF2?hP4C|8yq|8br13Zl8U1aa|7a(+u$-dcS#SjS-_5sxt;Us$ z-`HOqO2Qpb}_TckBy}Q1og09%ESeWDIOM*EI?>PH`e>*w!S93>fmma^Kx@sp3xVEs_ z4u6eJrpCZnGhz;1u?c+_zKOvnh&7q;b+W((JmW-OrCOjuNtsHS@kT!J%~XO-CgyjB z?~Bw-ww?h`^EDJ&>$hL1<=y|_Iq^kUHuMy_F7!Sb;oi#B%UyuZQcZkPpTxS~eLNek zzgBC=)5{Y)k)=-0`3V}25(RVg=G;CGIcdvnD(}qc&`R*tWJA5{S^>41+eaS=*LnrV z65f!*FqG|mG7u25@V|&BN@?;ec_A2bebX01GJ00Di;aKdn@8F{Zs=juJ9Qz;@x+r+Bv&E=Fw z)rr|EsqyILW~Vu`&Tw(3x#f(o3J~@((b=%QumFniHIwxRP3wk%8l`&?64(JE z!6dA$?q6+MIcBMk2n`_27&!5J9R6gNkpFEiOAxAfdsuW~E>0NKj09D|S+kCZ?AspM zp8pDBXSX}{rb)|Y9Dw&~|1;po&SrB!o+W!-LVxaE^UA?871k*t*r}NhNv|~^Bb?Ca z%{DzZo|U%CsrBmYVlgsVlZpnqh28HLsMd1EJE&|OV#-FVfGFBDsT1h!36|j3AUec# zcqWGorGJ}OP`uV7Nj^R5bFU)lJ|G-c_OcxVi9y zCdp&F2s+yD(_a`fR-QWos|?5=WMyS9`)FeOCizc1N4-CkuHF}~W+fyf2;I2eUs_t; ztlYGy>m{|`MzzLW8UAl}-R1E<6J`v35M-IWlWIL94sK>U6}px5Xvx4(*z5@qT*pON zs1qs&3m_U*w3vZJ>U@ZCpTahUejY69f<3JA8XG4e3iK5(XbwYI!|c%xAbiTQhgo)n z7Wz-6EaM$RoYREn7Ec4Hh3DPIQnG>XayXQ#6+Z1@+Nu{#b?cVP{OzyOA$B8C%-NlA zOSdak-5X2~QkDIapQD#UlOs6&Ng~xqORkpd_hA2;JosU0%{S79Y{Rzsqm=bL_ks+q z#UHiI#|1(1Eg)XP8(TK?+&QZq;7ldS{`NK91VieUvjc=xk6pf3+i7!pchoQM<-K8u>G?jN>M-u33zh`ketJSj#!p`N7TOm&{ zE!|w)LPyGKxzvTl+pK!`xX|9W$|a>x4)XM0R;8~2@xXONhS zzhJE;pTKVXGO$HlL6dMmitem#!0={O-SiF?&NQJvdH6XZ?h{X!7UV=*e{@iu6SHuw zpAfWyaJwX(_apd9$7%kr91mgebpY8OF`y0KFaF_Fu!(1C>)vkbfEcY;Fbu%1(NNVy z%k!39I)D{~g1Zt%4_xbM;R$r$V|MH1`P?W10r{@c(vD;wm%P>0r}Yo_ncMU>-WcFC zy*p9ge6bdR8~LO$LbY1$^eFti&PsN7*S;AmtWfIrbPz*`(h^|5I?CSS?obBHhzGf( zKN@;HbwdBRx7;i45`xpQN%Nd?A8 z6PUmL`*&|*et{us+n5a|xjv}>%<-gm_oSNJLs$Nzn~(p=k34snE|RmAL)!0U^@%Yd zaYV_o+>!lha4{*S$~`(ddW`kuoON>a<#Ciu_FuUxyIWvA^(pI*quS!C;OW-n9&Xyn zaPllTzE`=qrT8LRzwS#VM@+IMosR|fgkHm1l%utgSTgoEhQ6K7@%jpig6@x~Rj+^< zXOjL6hTli$FVyq%8V4~;c%KlNyi)6Qglp~Bno{`Spo9UE;TY<) z{I?ae6aDT@P;l#Z@s#U{YyoKZgKfAFCd@gAow9UiB8k#ezW zE*%_H@+m$GnR#(aX<%}}O>@S{6KB%(k22?=VEPrQsaEB$exm%3>6umqU37+D3HyW_ zKneVxEekKBC+e>eQx5vlnsvww;zaePppQO+`J)@yw6`ThpJ!}Y>-^n zYb=5{RjS3`0$4qghULcykCCOQ`fHSqci!6x4>`p@w23GKe`d-H#&=wQt*NHdq90#X z49YvseTr>~Jid*uofJFqz+OJPhUPBnKXp#IFQ!j%3tFM64>?}0_3<{f>=I(G(0FPF zmeQc9m#Lx5kd=R{^&!vX!}`*Y@P{zzVD?&R2g{-OrnMu1gsI^-8gOqzt(k{-2{qS) zzf6BNsRLi=voi*V#3=03=c<~T_lBfcE%XZNW@WpU8%|G4dJVS}J}=s3yP9=v_?owA z>bGgYEN7c7XCLrT*7P6A^{dSbI4~4`kec3RfHPJ6p}J0|wZ{Ffp<%(&A|LuTI(A^# z5-tVuz>#8;>6G*3fXq{?P1Oib-O|cS0Zc+Tihe2){9W#h4h|Ol+(oyyv~YuF!XuS5 z`DM9G(y@UfNJ0@W>0sv?E3I)9S0!kjmKV(TJHMJ`QO4@^^sU8zo-oiqc=z(go1Wdi zI81|$uiaoUx7PY-?&sw5(}_QrBbL7yntVYm4qy>pDY?&@aHuT0`Vv|k`Es~&#NgE@ z_*oB$p?X+inU-QSaVGSj2NS=5KtUh$60ZKq`*!eN>);Mbls7viVuTQWCJ1*VC2A?AXKd;3JD+P2tq<_r zm_sad6kyu*y6!MdO=+hNKSm96%OH9`@rhq_@5~Uxp|CrWf{uHRO+H~NQR<$(!qW6^ zh?QUTwJ3KIHnK#s5U~zBMjJmUON=8e5P>p-VttBbJYueck5maKJ-eY)pce<~E?0i{ zeG)L~p=|8&E7YzPqR@PP3%|oGe}Rnx)I3%f-k=U#xlIOGr8GEe7IK*Xek!I>1llxp=fepR5$9tagEL-C8+FgDms;L(&rRHEX!LPv7SjSin z@q*tkA^;iumVR@Ba@Cn3c8a&<6(O3 z^(`UCJ~yKi&GtS#T2@1V`OH9NSu=^6u9>(m8pQ4B4<8G(Y(oWR=pW1&8}%6 zIq^G97jjR5_c(;09oCk9^=#Z>mGSnad|DPop@0B0K6p^E_>Mtx1j8$26T{Gjl)aXrBqH%S8(9fLut+v ztYZ!^l5cD!PP%)N+lhYDZ-9|YSFCD%E*DIvr$SYz(1)kPTaD?!3_Zb5YgF{qVcY<1KR*=5yVjyf0|nwdVwN)ijm{l<()!OTmZ zV(MOT-db44*vaRCf`D}h5!=+M*kcqv*|DDJ^B?l(3M5^Qkf`DOO7t6k}Nx(^=GKhw*tc=zrAI@J+b08d6zzt9A5~ zZE~Hlc1f#c-cH^k2DGB|4w+4=4(D2g)K};V{UR0q(tFYpdw&A*N2Bz34QKUIVUY{U zq<41yIy%hd?3H~!w+17F=_rMqU8aMF@vrMrEs$-=LFCMHYfa!CR=vdTuCT*PmrzBk z8`obBaGZ8yqS``^=$!R__)Vw_cs2Z@2iCPMru~psk^~#8kK|2K^pddU{WyWIhHiuP z+DR3xaYzB>5ktPm*Mje(^)s`R5>Lp@91Tr1@k#cPrNwrAwUTGdPK!CS)otteb$W^E z0HL3BV&~5IKT~t&tij4*-~z~|v(JluTJn#8{B~afdFnQb&iO+)4v8>M9M%E5%q+f1 z&2C%wZdoBKu4i&}tx~bKr^~th{0`%OLz2vHWBr8U8`Cd~pwaRUwTQxM#@g5NcSHlY z58LqG;V)?w4e~Os4}J2^CQ3AQ(S1rXVT!6YxnuNrJEH%y6a@IZXca@d;N54rCg#$= z{ga}+zi&5?!PS=S4!45Y1aM7Gcw3h2xmKyIV%^4VMMD8YI?HL@Ny}t{eM@hfGLEms;CMdZz!!33?j>{ z5uvgku|}B>BVdJbd;Bu9{#rsOPEoq(kKtP?FR9l#qZ6Y|o-w(EJ-Zl+EgHG6qB=-l zbXd80c=n7bACkxaRf&W`@5#y1PKXT+UgmW1%1Y2p=zU}CL2rY+f}sIILv80B>iyqiSkPqOQ3jx z&0%iH{)B?KrNj0i%GF>S4w`y&EJhNxqa8Th`OcrpJPsQRiBG0gXOqY*!jbo*-lCp9 z_&qSZuiq)Ma~-dB^fe`8l49||-ue2KuC8{r)qBNyt$0Q}a|;2b>}%x+tx5*QV#54n z>2E|lS`7Gm8rnrngxI0aP9cdLBr;C+$u*xw9Utm}=`!PLSRc4ZzkaZcS)?#palYaw zm#5ve%8iL+Cehl!SaQh`e_GVI^^JOy!||Oj_lL}whWDpowZf^zS*c<%Q2WySVCe;p zl6KNxa@9^R<~~azzDV6AUdddA`@A+k_vAKC4-hs z(C0~Sr~`xBd_A2cf#zjyF@xq>`7B4`CIJL}Hgj`K176q~FK9jnO6TR{0et{bdIDa~ z*UMrA^F&)>K_cMr#F5*7Ce-Pdhb`I2xD(plw+1-1HGFB^ZskSg^Mz``uf5auas=pX zV7DYNTU#L!8N?H@Dow!f?6*6a@J3)3}kxFC6xE@j9c(*ODE^sm) zCSF}u_CcQm&gQG{fI4c>Q}Q@z-+rb`m+Su(a*M`^_MoL*832fxUUj6aAKNS)Ra9n6gB!Mdu_DMP8Tn z;=v$$7y9_H5+D@4`qoF`_FFy3>ifulOs;afGe{(cRgv=y$NRC_F6=Mx+n%k?B_^U& zSHtpa^B@a%I&Uu0`TSB8Cx;&(=L>l2rUC(JKnOD=Xfk<{|*{?<<Ky^APdtm?#pGq!OIFOv?JnliBKC{z z{Q(`evhVpPTE_653bg-IA6G!n&Gk6qjqUX{6dm z6}tT6a#KI-(K1f^bOoq8nP?jOEGl2909WR zq`af4SIO=_H7wBRNpV@X59AC^R&WmmoZ%XqXQLq6#OV|NQVEh{Hac~k`js_8Hlrj> zDcMunRkoi}U)%}D49zO-#K5M$vET-FSYueMt{WG&vPoxCLAx5#M1i+vx0N{>jFaPH z51O;QAvLQ{-a#0NdFDixKZT17dSe^}bH-L?O48=gak3Hb#SIsc4j=SDFUDR)-8o#k zQ+oGClz>;F?`^Av?-GcO-w_MQ#_z=NyWDiIe2^kLsYcc4Wi^WKu1xXpUTV6gMsrp@ z&Nm+5f3w%wcbh=0CiJ=1^_J9W>;7^c4wkz$EGLSR7$6!@WwbrZ4Py4_K(#%alI8#T zqkEWrY+`<#)<=p`WwCWxB3t$^#Q3Liq-2)$e?!jAJ?SR~&SkwxrXJZ09)^kiW2&|9 ziiMxXpqX=}NWD(tTXJFoD6FdVtk~DS?UjO~jJ}ra-??vO!oDASl2Lj_l(#&y-!A)< z^YoARc-y*Wg5(s#Sc$ecNK*7FVuedb^pm$ni|su5XLm3!ETM@iGMkOR0Y~f=pfleJQUB)k2!V**NsDPMj%(a&uJmyB&_2|wAtj0)$SPk1h92^v#_x`ICa zD|=khsscSa>&Fe6>o8oHme+$qgS7yUH0qPjA)KmRb5!Nd?t}y3W(lSNqS4+vJ(0V2LrOaOIWak?g)Acz0|`+!MFv1LMjOI> z2y3)#0lw+JE8}eK>skTsBCdTIT1(=npOv;%dfDG+&cuf}pr4s_$){FlM9FN0rrdXN zs@+_a8ydsTbNGu(HXzNWGR})7BW@|%`R}Fp(9{Ab(zCtgy0PbZrwD+#2oG~p4s!AW z2|hq3i&%)@d6z%YyDe)Ea(SLNuh)JU+r%Y>X_57ZJN39bn~Q9jwo=Lm?RM4HnmDG`v; zH!NqDDo{dT3a8{)Y%74{9@M0U3+L)_=0DHjKjDNDb!cemW?x;&rzm5p%(%K$O`Mjo z*sVI4WI0tm7;IiAXYc!;KTYa`(KU>gG~lu^2uGXPN)@*L&m-EtQF@?}k7qh^)$t|h`Z5wb;WPC zwX$gtnoSXTpM|#bUuma09y9l*)T1uU(f9r0X%a!Ug(_rQp+(=qJ$!>w%-4z{cmtZ8I$P*a^wP65CnWq3aw+Z}46eASMJj@%! z$HH7Xv9Lg!@QGmH^e)0aYBRK(%@l-i&ZiX335NTm{_c+rAn}S zd?r#f$qeewvW5(Jkq6p?V$X!bE@+{vS@j{NDOn|rMJl;?Sethq_O}$9esB5iXLeuB z`NVFn8iPF|tw1kje;1Y4S_^nf%fMJ@e@{~gx+t7naI#|CR?sv~Usi=~mgH|%Ul7QJ zyt*I@mY$v|EIM`4ioiS9`)wX*qK;dXhd(SNB40F7?uHj8c}!T(te&ARd`m)lX!*=3 zPU`s({thWb0x&3fNI{GkOOQ`og|u8vTirf2mflgzR6 zTdE}?Nv-r1Q63KeN0tQJCorV+hhyg7q+$VmZtJw>w(8<6s|-(lZVo_p5VN)uHfa#^ zgCrW?7aD@~mQH|Sx}ltQ_d9txg~1rICo`MR!FQ>Sh{Ja_QM+zHP6ZmDxf#1=WV!HF z^$QQ-o-c42o4TW?XEQS*({ZyU%yyo;Ff=cD3}MEW9W?E!P{pQlj2d(jhQ}zlSxkyQ9gEiMZ@<;r2bLDRGT8 zv69uRV8Xvy@aj5-EaKq?YtTG|I!oAiqQxzU4SMu>?q+`>)aKsy31rUd4j+fLZHG8jWOJ#-7F36pdP%^> z6Tv=4foq&e7?QAjjr&CMbCrX-$H39@`|sZZZc{9AHrf)?B4w;gD6IIkCc0SgRa3yP z5|Q(RBROf%)Fd9{U1A-(+e^LV9+$0TPW&^bg?ouUi6-lW9>P9PXg4(VU2s|}V`c_G z4xl7}^jQAEUR)@4_kVr%U`27#tC8PbidgF-?$@(b8y@bM;b^zY4b(xFzjqo+Vo~h( zd`FQnfqIUiTCBQd$#6G;`0~`kErpWDKA#xR9_@EO*r*)@`=`BL;{No3T+qXxMK3ku zBn>!xLZ_S2n3o5!6LyV+^XkOmT${|*mnY#2*(8>T6RmJ`4ZXTo*cDYDCZ;?e!=l}6 zImy!In87Qqzx3{|)k)>qKeLNbJ{`r9+MYO|N4Nz|JXWe|)mv~Vles z{9Iv~^Y*%r}n6L!42GO879Zb`8y6C68DcI)*4v^r{d={xh~kh6USUw3*LI7w%Z+h%Qy( zNjfhn{TDR3-e5YGCOYXaa|QNMdV&+BxU|x0{DOcJv zM?;Bo>`kJ$YMby_&}GF1=P#%lB|ozz4fjnlFJIfwQ}A`JD-F9x&%@QdxXUYw&pgBF z-o{hrq8%LXTl|@$G3N#F)u55@Oz%w;LJ_$Tv!l9vyh)5=_5F zg)6@Kx)W7j?93=w)VwGG+U(FY;Akq``vT}a!~IMxG*kF-Q#dE3Uel9*251}Fo@XmZ zz%Tek_zYY-q3aI6O#H^XRZ9PUVsm5l^*>TeL`sU~RWZ%U%H-`YJ8oYr*EtgsS&41! z>kh^ncQa>!CY%xl8d<9kJFVtb&F|ywvcWX%h!AVTi$}VAy&__HTO*hDfUkeGOMrTJ-v`FDw75Y z?rQ2_&WlK6LSz`pzBcFkOfk`zUO}IfY~7F@B)dDzj?`A z0|a-~!`;YB_rMGDv4eZ2h9c|-q zpq?Dv%<_Cmz!L8Ln@b{EWAS8aL+37b6ljU-y7Maye}JppO(Y!db85pR(zEU4XkGg*iTBhGzbJOQ!}XIznVwc*jMd+h#>K`mX_Q)p? zTqI20qaw4c`Xdh**SMSr4!b)?SgtHLaP1%G6>JsN=*2pkPQcnJz@S{$K;SNSHiYHR zjxR*-rNU42kZ3&aDIBs_$#z(D;#*a0i}|=D4N6b2T--}6xUQ$1dGgN%? z+g>jfRFW}=QM$H+Skyi{zm`dKIq1_=o+=y`ecYI|^70H{f%lWb*{ctweq9erTh2lf z{U(umcNLTwH%$k0u!xeBdA^0Mi>))d-)k=~1D7hocM7Sgsaf;$nv*J=v~+ZPw`ZG@ z7}U_w6U&Bj;#FOZP3Z5$HFJsP3el9`yIfjXaaNQWT8=4;hRO@ghPQ2ON5VC$7H=@; zgq}k3VrqD*C_RF;l*d(qBr=;o@r)2>$%}x^cmM7tX~%9g4k2JraS@#_i|z!@Uyvat z*o8YIA+pZJUf46{Pri=t(CXyDTPkT3xY{=L(`SW~4f=*<1ta{caU5u54DOxo9I~Cy z_tQWS0FV9Jtep?AEwZtHT&2Fok%i8LPW;WZkHV-uDw_EG7Z6)0UnhJ)eZZs9TRjqI z%s2?GGsqa^X)R+~^+2VwqaNgx-aoglI^zo1xsf-rUPIQXrj4=uoo%6$Pv8?lh2y7@ z#N5?St%^CdzD;k(32TOISUYtxSmtiCuP>k_(7*Y_?G$=TT*tuAB_zAKN%M<~r!C6~ zMSSy1@vbZib`0np!IsIm%6n_m5ygXV5OwFwr|_?9cQO7lwK$Y(s1J_ofOH&&ztczh zR-f3Y_GiARm(8FG+{DI&k4FU!eXqRenPGeY`S0)q#)wg_13E!-ItqQdAib+IKCZqU zLbPLt3nyPg31BS3k_dXo1C@3-b`RT*0^b7h*}WZ|vLkGu>aXr(9L<6@L4_JcUI^Ys z_nlhr38ZVL4ZX}to^(=BYP{~-r~7y9Vl+^3R6fm1)jqA)a0XdX{g`jphvc->guV6a z^*tFhoraKGgd!)~%?>{P_sSU~W|dRpZ29NWU!Sd7{wree zN4+O})?Fa}&2B@8lL`A!*szlp?|Z#8d)LR7uQpg9grb>0?Bje!kz9Y5gXsLHB3s09 zq!-Irw4)xea}<&He?f`sI+GaHW@ZY8O0j^Wm(>>nWk05Lr1e|0FVoR9yu7@2ktuH_ z?st2EqLg>L`pS&)S@ZDCj} zxy%W313WgmQRUVF-!EMw+PCMgxP$V%61Kz1J`95yaJDbLS7-hjF3+rS6S00tM&s2i zbM&vm3$i7kM~O2Ld#}s5+R;(5R2THRmt5s<#4PM{S=MZ6e0H${e8f8WCjjl#@Ruu=RjP8VeZ8Q*z|2+yPWwbebzV0&w zIZyS#KRg)W(~=fg9Hagti^)B@;vrY93h!F`=%|B;rbKsU*{y?1TNDRDA3B53t7kw?j`rAl1$QQTV%d2uuWZQY3Qc)i+C%} zE&I$&a`v1Xul-}92a-k1%q+&Xj_M+uiVg3Xkc78Qru#JOPW_PU8@7Ne8EiUZ@N}Nd zhaeik_o%(qeGu5t0F6BO4qgKL_br0fr1B=)P*^IZb6{ zDIQbW(W&ycdHC_9JWhxd!Ty6KboFCUXWTf??}n~Pk7O;pwn^s3{l_x-iMnuffo{T z-+t|akkuP&_0i(YU4q(-YeNE!Dk^w?uIrz%qV5DD88nji5?L!>J$hdE(UAi z6Lw_2T#cZW5Pt5fR9=QI>ITc8j`Q`1H|fU(+IhH;O2wLAmhr6qJvZ1YQr?iN%Cs2J zoAowqt9eYywm=wXVFw;D6OGi;VPL%__#0IP6YhOc1B&R`9Vss}jy<@btKutfW+xUh zcV15Q*AUXP4a2gv?Yg?Ute{UeT|636QfMF0ju9wRJIt)E=rd%4pGlYfL;3*C2xx=r zfNsp?sdY&#kdB95k*rt@t_PBTwWbz~thugVl|>zv_G3@wT!X$f#7VRA6ez~~$M}eL zDD0`n>wR527}&jo%BJDw9E?!Ao9jjrSuM$=5jkJ+HLRR;O4MK=CC!{bB z0=`07pJ}GDzr|8c;>{MkSW&$2t4@HDbzN>+Y zat_>+({%bw)b5xXs`D-Is?q36Yy|{ShYJP<$^w#7jO6e`+Q*r(}d1nAZT)eyU@S zL12oDz4(fzZNgYC?>Ug8T%csFU1Rx=8-oo0tNhQOD z9^f+>i-5mFmn_ZQ74P)k1($;s6_8PC*P+vY*wWHgIa9L9gP7@%QHreBYU^N^)1L+R z-r5D3klOhYiI`~-z^eN@H#0zw1SepHQ_AuW;cX{h=-p|Rs(RAh2BM$Y`eQp@mq!EY z3zX! z@25cIw!4@-#MMU{`#*|)_oew-#iWWj@B-oneJOy@GxFoIOh3Ty_u;$bRE!`F?{t>G zP2Y2)R$HV;K`;^$hxtzny_8w14{_se5n3T{R1{CiN+^gm%00#v9~pJ;?)AqiDPL}? z9GI-@ivg2&=d%61L@MZM>$hgD z+{cX%=^oSG7S}1G<9mwFgv-}reo@<_z9A}B&-hyZLei~%l5TluK6#=l(SuD)H?q{I z4;G$rHMQxuV{CCpRFkzc=bV?+adoUXyt=S#)gc~116oT3=;N&=+?p}Tx;D*ts<>>f>^O~A3gE2cI2z+#fJ9X_gFIN7J|O&99uPaZ zf12!z7>gG&xHJDv##K<8>m%A+H0B+(_!w!2XeCT-)JEbmvX+ zBK{%pJkXJYG`8ToPGtZPZw>xgD=~SfBc=S+~vPgmKq!341L=%5$(?pz7 zrS;dq>HMofIGxqEKF*^{wAL-O+BtsMG{3T07wuEoiel=^TcdIZOas+IE=I$$klN~M zd0%0}<-`h?OvP;m^JMH-4*Z}Q%D(YbC21xUIw>Wt-uaM^nu^GZ4*jb{U9xF2?=b4p zs23J~9UuJT)4*$sRvmbZU(!tKK)-cS3*e+@RwO$mEGTh|{Qt(4nQra$eQ$mJN6l7A zaOUgtTe((*d%HC;Lz`Tw_*7W<1-YUE*4xX0JGhD%Lbqea93*O zls`2sEo*PjaubW10)G8MAi0I*Xp+f(Y2u&O<9RzO+jk6wgoH7$ZG_2y>e$xccizM( z74##%K`eGYvgerD-j}>rCg+ zMi@UjeS)p=$ZJcy|E=nJZa97jhaeJSoV;1mGZB;QPFm?PF{T zN$t>$ozEi7LP_Pu6=5^PCjIxUq{M#Wa0!W=m@;R49Z^%?*w!}4Y%bIlXW%1dO;u(< z*CH5+59uv5ScC^y9IYPhJ%s!5-a0Xxp~+0AkuP#1oEm+N6z5MvujSKN4DI`|dOa<@ z90JUeso}K&AZE#J8dlvnU*!0Xbsu}==ML(E4#9UqN|sjoCVj^ypF-V_X)cd6flWOcdC;331~KpV}{b_ zCphN;KK4XObw4e7;2|qST;Tn68D{RQh6IHVuIx zmf*=gZi;vJE}NOz%1W-AlLH>z`Jk*&zHBG)h-Vw7UK0)mT{w}b6z#sz^0WT0(Q;I# zANo~MTadkC zvf(9f_b!~}Puy+p!l3kz-w@FnP*#Q-d|z$bT*@sdy5MkN=jQLr{RSS3VO$?mLRKdG zxADkbCA9IHZ3{}o#64AT?EYJ8!jH;d?JDJ`wd8Z(eZqHTWM`rPy9Vx8+mc+WIMD6B zf_rs?&Cf{i{eNOL&Np&yzP)Xxbd9&;O&dg(x!-hr(ZdMO_7O@DMGL*tX0Vqn-XSd$ z2ZHi_M$p8Y!Sv(M<<{mcdPykub-^B=uOg+02q$#BogtjpzRUDUhiheCr3eHgh`38L5f>QlxY0X5r4vfsn=d7_T>FNX*!?t%q2ivu2Pg`#%+=%A4zE3>`)n3JW4 zLxN=vO>9&8@8`bO3|akWwxh;Pg2(GpmHIM;ASudIG>z-k2dz|}YYfiLCkoK1)GkKr z9Zz#R0aYuf#GVei&yMj67&5#2xZ|sz%P4R;2C>|T5Kcdq| z6J(`5!`&v|AM|je1o>;7;rq8eus}xvAn;-JQqcLD1Flmifu{RsG!!6VOb^X_6YbXv zV3uP_vjZ#jP9@_UL?Bs&i%v&lLr}MHC|#01ZzO9GtcvSub}rz!rqId~+0PKiPTCI{ zXsWttjj%zx+Gq&_XgD(7qY+34_wB%y8>s~#FnaD?NvU_miYfg+JXb%y7{<$mcSzu#Hp-(?O*%{<^dE3&G^*5&4T#PThwf{rSfIy>|& z$=I{t_YYkefunW^hdn3lT{6)$i^hxuhc(#6F|!D%G3AhA10G*Qm#IU{omb(Qi`?nW z4k5AjwQ}F_f+t5{zgXa(V$;^ld(tFSBTvMa{y&dd%%|A7XuH02`7cEnqwh_fb)E-L zEp@zuY7?1yvC91<+`1tBOci#U;S9*77H}{KhEY+j6(CKPXo3SQ@G$aV!A-_s#CSw3 z_Iwd)aiBi06z)996@O$jDZh!x!-Z-*PS6`*BVrj@D>~jFJMp*fN!75oYp<#BXb6mK zC=G#d+Z}KJ1j2nCQ>y%&M)U6=MhX{sHJXU*p0=}-QXD2h=6FjTf=G~=iNE#og^n`) zbq+3Q{J$}bh+PjA)Ql`3JTnagyET>CYRBMe+APf_kHyoyfrsb&XP^yX+&qcv3W8B+ zN#W6`3hLi^-ZSfHwGDp*1Eck(uO~54kDmT}>w?n8Zrp=PHe)eX_y6@Gg>gSQvO%6k z4B6#y7m?p}1Lx}eE|j4uHM6KFYG&pR@Uvzi50n4Q?IGglr8@NGhv0i;x#1x%Ao&Ll zvX>J3zZZN}52(Y&pqGpa--Md&7F+m1`7}^^xwbi_mPaYK7gG<2{y~|AOTi42_xg02%>PI6n#)XNK^2^+Mg3k?N2 zV{~>%L8<4y|Az&MNNx}x<%sKn@zQyqV*nw=y*;L}S~?#i)M=2H;8yy*e2KUH6T_28 z6o|mglvg7ufcvMTO1>_@W4{YG#aM6s(wg@s7Hhm84;{As5e@er;a0e~ipf48q(-y~ zu2h=OA%=nnBf;7M^L?plu$RKPgs*maQ@NI9+Y{%G6S$O99@4nkNrBF6qFR?(3dn ztmuwT4y=nEZSCX~oTFRC?5*wi1dC*{D0xa9z=t#AuQ9~o)foIX>5j`Dwdp^~U*8AD zAf@?s@}VJU$Km0!Kx@_}Y#A#e&JS~; zsOiNw^@v0}IS}>L7f!kfjmI}SKeBs(>y(Nrpk@uPpz}5Vcpn&X9*tJJ1{VW%VmF1I zers(gxmziNAdTnpKvmw9as-a<)vPi|$@$O+#mW2UNo~opO`wF-bPvzc{eBDu86qM( zL=6_52NN9k-}9Ks3%(dJCw*DJC)@^~b@Xm`?G#jj+AX}L8mxs~(?K*>D-SGIh(73xwU?14qTGQi>9`q`${ed6a9tS>TG_9emwua4*(i*qPhsmXSI3vw%EIenecx) z$m@eRU?WpYQ9e16@?9x=^?Sb%#4KM0xx5HGN6gh)fA=gzYYV)|iTN0B{J)*2TRo{S ztkGv=&%N5L^+&PyQ?b%E{VwsAT@-oNUus{QwyF{%8$|4x)-h?ml;Gih_{tHQ7Te19bYK->9nc#LE&pw_SJ3F zU!=U!Y#hxaY(;Z}WLIk!)m-|4TF*oHn04UEkd4zyKB_A|^y(*5aI7nfoq%Jqq!gM7 z|L;k2_Bi*HT=S=>eltG36Hnw@3JDi5j?JjfKxrjw;5NL%U(#B)j^}eKhXCmEwoMpw z&wRw*+43PSBE8z#?U#3|QcF^_ZOC-Fv#DAfn9Ztt=+1Jl^JsJ|B?09aZz>aA1DpdNHc_hbR#ehjR+_yA<~U>hs4m* z%m~sgASo%`9n#(1-JN%P&i$S9o_qhD&z`+k?X}nQJm08I*;RhKg+be6<|ORn@ZH0+ z&cx8YFE`57YTd~yutWq{qepm-NL8r#gCo&6G&u%s2#c)`?RnUvu}lj%z*M%B zpxO-YT3vmPgNLKcnmF!9xL9g0(&VRKGG!j))v`WLG_m`!EFX`j0o;&ra6J6uoS0tv zm37M9>yMKM2<;cNi3i`AK6o6tGvNrImybJb&E=m>TXh8aMOTg7{j?RkW_C%vB_NM% z9b?t))@KUf8zQzE^8Q+IV^xAxF6!y0rS+T-x~n`=meYHUEGDn2Jm%mS{hP{+t0o9_33&X3sXVv&tIFf+nknz?-@ zJ4ryIC?T*kb;kpE12MQXIgPE=hHRCrE)|ud-e)*AuxbZI?Etg|lnlq4(tOD~l|Ds? z0QHywJgjj&PkqBVgBoP!%;^u+LYi%eQli%~dAxyLGzilMhBymIia6R{s90ISF+bCu zZNz@m>}imjL|%PFf>`3{D8Vsd2;;_U_zvLB^lg`fGnYezuouCMp6161g&#-qfQsE0 z^oz6F*gn(s`L&YwiTKOI4?<6eY~g7yQSTuRWl ze69I+ag8s|3D;vr&r9?`h~_9is6n}Lc>s5-U)T_q{cz{9On#XMl-vL*HETI5)HXHQ8sHy`OZAq@olQOUKDSglHoO*R{E<8S_y#_Q~n6G)UcU76-V{Cet zD!1)o_a#tH5``g&4y)1*gkCm6-ic3iHdFdg+jl0jrWv=+TNtKw-7Evi<5cu9{Vu9> z8)7WCK!yCC;jhucY-O1nkU%^~r_RdyCMr!|{eU2H7rOKlRx7vK9?krv>4emzC@wbN z%w!k^tN&AkjzK0?u)gqP5%f|=j-QdSK z>|r%I_e4=_8^V0Pg|15*@ATrKG*V6kWag08Sq@j-gn=6Xc|0K|8BBA_lXTk!D8@4W zQ-K;uOY|m?%i~kR;zmR5n@eZE3FxnP0&#mJA0~GLC9Dgl&80pdGnNc0H?@%BrOS;0X`+;1w{&;uNuVwi@(y3hrf@2eE^={c|Pwi?ByHSr>$K+4sfPYQ#JP5E;p-v zI|oLv49(SEcfUx#ttc9|K7L4>8kgqH*#=%D%ZbSxfDgovX64k7qaM;#Fs}2Xs`S7b z>*5CLW@PN)>Nh0wC}fOeUD#%muO4O1K2;B~D@4m1`oF)R%qF;n+Y14g`%Vu}G7C^l zPf!1BQ%{uDW_Z+Aj%8`GcM^v>ymu(BFHC^*NT4w(LL`J-#w+In;klUizWNI*`oin*J_(o93}? zXvFV?=k{Xh8t=3=u<|13m`@Hi)MyE-#`Id=dg3%4BfpJ*I-LS4$!_;2-8+C(OXG-t zLV9{UVjI~KO-qcfE|Z?8WEykrM-8zn9#$@WzS$y)s>b+3pYD?3c%Q8;*guQMWc7Z{ zJtCa&fsbVmDm0wAFaxY+X<Qe+~pCIvsjy`3_9zQ(!q)_ zw{Z+V%2gr_(WT`uY9M~rlzR`bxH<*ifV6|JJ8=2dkeAy4>Zv~m-M9Q6T1AQf?NS6D zsC3M;os|ru3*3aaPm?jJBRx1^Hl+7GdA^SgB(#aK0PN53G%DYj*|O$Q>eSX{Vij%0 ze9EREN=Ycba1_BN;f$k4RI4#7T`*u2L!Haol*%I_G<5Va^|yKMS=X=H=g(*(B~$!U z9ly>n<3Pd|q@~wG1Ur~qe~{B4kXCW0V*;jyv{rI38nSFJ`he34Gc(U*`7yP8^^G$- zLyOoPGZJ%0+1%&tYM}z%zpT5LWjT|hT1ESq7KBoe%0Nt0ws4@vFJJ&Xi*>+poNhHi zFonW!9|1TX{;SnEX7uTkajLArCA2b!_}y+D#r19KWuSjg$$%3;J8q@S8Uhg{pGvJM zJ+8MqA1!IHl-Cqo{ZyambCE&D5QlPmQy1N4W$YRSVMqshsl@Br zMHU$XWinBV?%%1>h3R1VUks`uD?51d^o&cmOQvw~7sKDma46vUOk^Pdsi5dfF9Em% z@Nh6mV%1DHER9#mkD*!Z2-{~|^`8g=oD&d0^viV?@Vrz-k!HWfXI}v^Ih%-c)KY5* zs$wCaMr%b<+B!oe6>_w?@r#!%USV%F6{+GCv?_5a4VzRqy}#q-c^T^Rs*tc$u82As)){i=K>4F1Yc+bE2h`8W7!9j z8ppTU&${M>(sNU00S}azb!=3H(`9l)|Bi`pdLZl-?i6aL!%fP+dzX|3Cdri9!boYf z*POK4**W}q2^Y}pf!hE0lxD7}y%fnZ&D|0vVJuu57Ei{p=tR7W^oLUo;986hJmYb_o=O0O1d*27%s?>03`pnuSe+5v3#z~cK(Wb?36s+ zJ{!~K1GMptzAEcAiF4$$XA5yJtbL)e>?GC_I}^*-(h!K#4%ag6(=Pwt4K-MiAZX8U z3rVXOQiKx{L;HglcgPNDo$b3$GWRwwli;c~jgI!nt{;UjcE}=~Dtb|IQD56RQXk;n z!%1V%KL41KGxGbHZO_Pfke#CkoNLmBqHJ(bfP#wp{BC(ROdW3=!9?7dj~jRB2qj?b zOlB*@o+raTNz0u+Jsxzet!-*Ku(94{<>{$t{1Z0T1NaS2AhJ0gXu{-{zuq1w|L}8g z^JXH8C8deV%I`2S*7f-zf;rcN61rqT)T_+5=6F}0*>aLD`bwf*(rbL~SGa(cPsC@4 zH078$9N})c%V71^L$8SeZs%icgEG%o&4XlNMr1$hodiI%$_;hG%}ttV7AZ_H@*>*L zl6`^VhnKyL6wzN;q@OHyX7CX*vdx_}-?K6wmTP@NBz7bH+M} zH<53{QFHg`Ada))@PkI@D_(5a?!VhD4Nnsjl=&L|EO2Z*S{d009M(6ppMa!}Uf5G< z#?R?TxLYmbhX%knAC}FAbTa1@lXCAe9r7>3y?^A zd_Bv3Gp6b|6gbTl&;_Vt=jNkEl%@|rP%9#uCKnjLq@vO7i<7%1@mz0kubFk)Ot=FQ zIhao7%K!EwJ z%|B64@z-e%x)80`UbQLIg+R?yU=8{8YQ z-H6drV0|Z#_16|_3_Vsh_V)}xV{%bRry6(g!_^Tj!5b0g8z888c7#KqiCa&r1E|!f z{2_w%(XCPh=V{+8)+j?T*6PDhyyIJY%MkWCI$6uybiXQNYK~>4tc&e8Ce7oHa4PH8 z7d|rRPdK-nJv`qP4FpS1^k_2O9beMm!_5G5S}f8Fi-L^SCh>ng6}&zjfww#jx~?Xe zw!k6zq1Wwy!S8YLOJCue$R+OMU6rD~`|7Xr{Y1b5?>8$?W21=k+#ka8<8b{BpR>B{ z=7(yr93`(^Ro)`Tw6s|ONiG**l>zwmjte?|8%d*46EI^DJ8>`AB>ms>;gXg4%{Zw` zl;<6p92RGfLzGQ%)W)r_^9XxV$u4SjY;9|m&QCeXAdh1l^{ARiJKZEvn})Z^id7qf z0m@|Cwx7MDxSGzKPp4ZMp_z#k52!bV6IIx!8P3o=4V$2iEBwxdOKJfr8kO&I_rU_sXrN~~iQSDhWmr2nVwhnw3?aNy~7kOIm<0#$E?A=LB45cgsr`4h|ELwz+!DimI~TCVl=3^99l$lIUOK z{a+F$21_hZ~3F;TkM0G#2t^TU)d~K$mk+3q)bOsao**) zY3EM?jcN7^nwM@wRjEV47}!-r{h7@PXwOtnG(>kKlFYR$t@P<6Bz!^$>ph6NERBCw z-Q_%Fb%4i7Nj~Li%zw|NLA&)Z!F}kVJa{aZx>J6}-g0Z(~ElQ1xRqisot&#S%QADWbV;z(B~PXuxz03jaQ}T3(U;Qyk&f zku6xPKQwY#dD>MpoyO`mIEOYfT;JK|mn$%GkN8Zk7t9f{KItMt66jq)8x{x_ZOPr} z_JK`OM4C7Ft1@LKSLqz39PEX|vt5iI5295UsK%!PSkjU<%zrG{6Za~!3k<&dfX!qr zAZ_RY=7CEYQFsC@1V_HI(dnTP*<-zw8rGwOvJqOCL`{Xv&#_?E-*R!PnVE2wms}(y z=}P~Y)&%OeR}f_$^ZquSpoFRVQlLw-Uvmc6~b6MJJZFOHYU z>pzE7S!G;QDkORrb);*Cv{HavTp6M{VidF2g~#$c>X0D2-UvZ>m>>lKB)5OhZ#UBC z`XQ|hI|8YyJ4<84xc*0NQMSY!3MSl*dgUY7*cRh<6=4Die~+2Vy=-t(XT_ywIS(Qb zuaVnQzNJU?sQ%let~aM1tGilCrD~~&B-pM$x>Pw978b_m`h42S*x2rG%7gsszGCMA zfcG!Hrc52k>^zr9*cBBpG5MKC<1R<14>JiFtNUQP*%sNl#sJ)gQ~rwU{xRn}rKVun zUu*zec%&=#nc!8FScZ-FH(jn-neOdlya#TtJIQ8K_WFQ@Tr8iHCD=x-@qSdivcDKI zquA4@E=S|xwsf9=FH16KN?r=*lYSC_pwH;+*CsutrAtER9pk6y-egdw80k5sh$6cF zh1Trq)OLURyDD_@qM373rYc!v+P+QEgHSqmU9tZ;lk+1BK0~pAoGTO_8wa^c9yygW1PLI^9xrMJG<(k?c?_i8x_)h=)d|b<|h{y z;X7@0&QbS*BfGbZ1B1pkKX3{rEgx?c5S%;?H~`uGvU=z>kF7Qm-TMXwlqfydFnc&N z0Js~Ce}^b)g}%8Nm#@Y-){1OwE&20(&uP%j%OHk!mfUY0R?T#>U+IN_&rmUySnip$ zNKVi|1u${a)T$PCuc<_>DcZlkL>IeN|2ual_xIlmJ4a`evQNqk)5netk5Q2QG?Xx{ z$_+lCD%Dv}Rd4LXjjAVN-hX)>(4DMfyaGKoQQ3JxQx}Liv(D@hO=(D=5wFhJWu6D@ z8}pO4L`83>22NR`{CM6Mc9ar%P<0-jo(ygLA$JeMR$|c#(O$Ko3vsSN*c+8K0>afG z!bXkiN>Had_B6i5bIg-Ay0ZN_izrWo0LE$bao2VP*=&`Sp?DW}t?XW(cJ4_3&ZC2> zWB~M{w=UYH2P7B%&|L@q($BVZct5zJW|e*-3SPD##x7Xr@=~;9|LjxFZQre{mXN#w zwQP8@IWJneQAl!Mb=YlwP?R-wib+oui1I{7qwx5a`s2pVdqKk$yNa;8w7uig)lrHpUAMPx?VIW z2{RzUtOmHeYH2AcQ4&;8Hc$ZD!sjUGX1uw@4eZl=6N2?}o;Xzov93UvGL7?8iTeI4 zeCr)Y%b!c}$z03T=h@7PORI7uoHQ>va&tcCtK|=uwXYTOpCT4WogjMaivx}!%G$>X zLHeqsO`qp}nZDAGln^fb<#+b0Jw<=T%_f7x#{B`HSluD%e5+D!G8>W~zP`qAsW8NO zk*?~AQ-$dyaEj>qq&w}Urnt0sR?9=uWa2d&&{lo@9nS}^a`Jg8fUoyms=q+<{D@MH zz*wvki**ioOmu+y`(hEBv$-_W`^eW-|GZh70-plsVNURG@^Z3o_||weSD6%P4u1G|u#ZIE9cbru{`&U~{n6U16M4GRe&=tnhW?A6QBX z6T5RU%P;2Ll+0948T_gyQ(i4E9|bO@2R1()qn$aLw%F~+P3DR)`$JhZQEsdwHk9+D z@Je}h0~YB5Ha?k=Y>zw06PS!Nd!^d{t}$Vh$nVJ8(l;ll#^|=JBe@JiWz8{*R?ivMyd>yAvR1NpI5EkH$wo z!f76ycF91wBcx()a-X)ZIz&%1gIVS#B;&;%cqp2lI{7ZY$M@dZtVTbDs}H8;7>nHs z%xY=<^b;^c&Jc@~xcpZD& zP8Ixp1M$=rSXH*JB?JfWLR+&~yGzo<=#@$he3TkCx=nv%_CQi@j%iftDmhwQE>4D= z`i)1I=2i6x8>7FC5Ns;_9ILoKup3?95>>#vpXXAp$_=iHi(j9n^f{Yjm|ATU7I{d2 z()T#tdP3Z~u*)8bwtIRuvvepaV_|&Cn|d?$#M*s3oxI3erK2wv3!dD$f4b)W7!DNy zt)hFt)0TzxiNsp}LsvWzfv+#Q9eIl7k#^L)6Nn$U|Mv4seqruDbZ0(XMG)z)ZG}kQ6W>!C%-I$B4%Rkx~PcI%x_azTlzzBpk7+ z9%cDFj&x}aMF)_T2^_yOh)G+}qO>`9R=d1HnR&E--@m_7uQi&3N`nSfIfpl;;1&pZ zS`BZC#Id0&^F9h}tyqt_muDKRd)Ul(8XWoh>?Fo|FNuX`ZYD4(JEaGy|=Dt%rstufJU=434XAQ&p;4OGtJa&LRPNBLIQo^ig8^v z<%BZ4mMFN|SsBm!fuRd>JHN$s5`Z&X9M^6$|4kj}hIpyJ`OgZLm zdEd0Ookq2^5$b5@q)rXTnm{)n>Qi*U5W5&vV-e9u2dA$;le=!I#fv0Ft##ah_!DHY zKTIilW+iaHpF)p@688!0oW$g9cXd{4sC{tRIKmmgbij+F%NiSrdSX~Iv8z_BW_paO zRBhmk7vMRe^--_<*pr@B#vbMRt@^im@^367ZDnE4_-|D-6l3{NOkc~4ys%_&U67lr zw+l2~J*|*uO6oJ6E-dqvg+U6Y&IuEfuJ!B8*tUZh4 zj>JP#=1$nOb@eqZ2}uWZ_~<{B5anrU#N=uppFc~pZ&gq6N|HTW%HWvJX_5~7Qxf83 zb*QbTB`g_R`Q9DSH1)2U?eU;!mA!PrLg45Dm%g0|j+{iWFl~bjTwK%%PVh?hT?7!JaM;V8| zzQWHM4p(=pz&rY$GB6CfsmMO>za5`&?-16SehJY%QYtaJTWC*NESDv21Z+4zI@pv7Rld>bb{_>0iqo{+uLl*TcJp`lz=w{vRH__^^aqhy0(hsI%>b$lL-)1RZ}Es$PN$eh7)_*} z;j0uJC?M;?S%C!(B$R`M4@CjJIeqdUHJ|HGMEww z=K!E7zrH*b@aqr}dm$1|z5a^YWSQWsQiyRy-f$@ zdb#(=34P`eAb;0}8U(~6Ha^K9R4q;2c!_OpGhe&~!4$x#yXBqGdfq?)G=b3?Y8|4u zdcVpM`)+jnyrB>)faL-J^M}OHSkN>0dSlZ8x`G0=DUn5gJ8t~(%EImlXPOfrZ2@I@ zU2VYA^W78c@6N4&O-JCgBD$rxg>x<~YQysH#)7Pu&uSsNB)i_Hmha`4VXIXa|>OA5k%i$jwm-?N7ZRQc9Irs9ChY)ZQL0$xXUs3!c}>*_|;=kdX|n-3AODgR4DxXQtSQMXykJ@}rbqwP*jBjxJ+vlqf|u87rDf8mVLSuy6l zv}u;X(y9olp|a?&Od7*OO~KUJ@}J_Tb&EUg#5nd^uoqoQ&ZGB0Kj38#yygL}(@K=# zjXx@?-UJ3h$ltMYa`i4NR%%2t8F71&r)8F5oc$N6?q*0!OWH5(S6FE^{%eC|=6Y4r zs3tPp4*Ba3UYqr{1%IybiG0hKG!zty*qF<6Lrid(CzN8yf5QaXIu76wg{exd9N!v< zf4L@NuYrWY9>6RzEIOiGX{hklf&7yPdgr;&c4vcTDr><0cu@ZYP5Y;xfC_F2UgtVr z&L=hiNf2{E=wYgC*-pNCW>8mYo@AUbKn)m z$Z_iltI3q5(CwiSgUfg(Cg01C`FH6N_7DJN7uEPN$!JQ{D(UtQf0LA0JEP(8B+^D? zgJoWl&Zh~uRWGBTB{c}jdeck6!w^>{X{uu2O~h=dh#V2)w_5- znPJ#-2YC?yk`FU9>^AQ8<>(J)DJqyq!WK`G?p*bhZi^KZ07l5?()_k zu9t>=1Jsq_`a8d+7}>GU8u$rzesbY+_muT#$Xk{Qcj|P;Y$tK)$;OgqhAI3h3uiL0 zY+nEF(^HB%R~mR^3h{$OdO{|)5{Kcj<#rmn$t=%{ewzQVs@4KgZ;mvJLmfo=pUB=E zbFE8lXdMhTciavYCow3#LC^+;VCIf#SnFtre&k7=V2_nG74%;B)vzYi_5^5=WwjV5 zh|lTDC>m4&-@#Qd?FR)L+Fed3KK~tdL75Ay5b(6)X(>BN1n51PHN^7}bD;lqxD@X& zcAzj~g=Qp+N9lGYlui5gEv@q3M47v%B7_dja*R^>`11`yv92P4Botvks(XCJdL3(% z*UZV>U%$-n|W1%Mpa+cuDO20uf6^JtrR zYRSDnIQEu&h+Z=xM*-Z9==O8a91?+I}e+raD?L6N-)Ob}`ymuS*^5Tjz2LyBN-U_7Y6sLJHEK zIIA{HEbAWb!6(yE54^{t3k$SklM{cv05?x}1qb>$Mzi}qZG=B#v%2mo1#Fa5;~jix zC7gi1t4L~Yn0sUEGi(!iOoa+M09wxvs|>~BbG~{wRl|rpShe~}<;x^q%84~xYu7y9 zGTV7pal|0@0-0u;)T;7r#*8Rp{Q+_dAzje}GVcYs=uhsVxX#&ZtWms3TdS+I9)}UH z?GRyIe|~fB9%UWPOzbVVi!OpErDZmUW?v_gs=KLR^3f-<>E_c-Mv;rBN4DF`qemId z091@KFm(7IdCN5Ef9EY>)|_FLaXxn+JW2bwneA6N3)bS@5SI9OgIb>8(m?}zm;zY_ zMUk@EvfXf_I?$3WU@Ioczzxl8x24J8z0cFL-o%S^ht0p- z;xD?!;39HhK-q}y8ezrx9)sX@igGt!OX%zh;j3K?#TC`oD`cUKo>C}KAsBfqqwO_$ zrPn+;#V+HwCKJ+P-&po z?Dkkopfe}Aip~<698D02N{wb#Qv+gM{y<0l5rxJ8l3g20oIqKIhL7gzQD1+3WRg_< z^2{muK=5P1U>(O^a?BQmRjt9(G1x_^WK98Thru*QN~&&hTaJ7uGrj`g%*GIyEQ6K$ z^XOj(Dxogn?Z?m?=Ta68T?X@ONJ~p!`$WUF>ptG_caxN~K3}_g25{JN9@{ABeCyHV zDnI-H66~7VC{`&3_$$}ROA_>W4GE>@53?Oo!1COSBO={0UC!7#=FxJyeLtJhBrBu$ zM9+7-+^)7T+KT>{5m#ptbv@M_8g%IOVUn$N4CD%XE=)|g$W++M>wJXFch(UvwWL6! z0iaz(>%KQhAQe znXEh|mO!H>OXPdn&SjRajd_bSn9kS}jVHd`zr4DzoNW+fL6%=DapT81D@*d+{Y7~f zQjHcq_cSbW)hCX3`P8%L{dO{*-2Iq#SHwF^!0o9tp|KC|P-yAFTXUHjnN#v=o@p$^ z@ShO%B~PGc9N$y{3pY%R2a!(Jh=1-eWg^W?4mYhO;rGrjTm=|8|};3IWOvU(~wk*%dgNdEedQ)XvE)I{Hm-jQ3PRC z=l6`lIT*=AryIRrzFVluP@A+h$;ILt6nHkSGamt{+SIF0fw$LrsPY`wIGAjcwP-Af zM!)fd4T#UyyAs}|IGWJ5m{FmbGr2M|lB65T$yqs`fy}M?{!ob6*c9ohT66`D=ZHY_ zY{clag8f3eHV;66<5I*mE;Sjk?ycc;&gfSxu#DB!^~-njdXoQ`qwE>?fH$k-2YkGl z6%(^_pK@V=b803Q-6VwAH2ZpD&wtv^ePGR6V~LuWz#%ULhHWahKt9Fhd0wq~9K9vm z+sz(daw#=}ru(e$NxROM7^JCwD42Nu*uhhDjaI&YbuyA5~~7R{7j+ zblVo@xQ@<|=bDfi*A8K{K-`MsETgSzRBfanQ9zqxm@SCsEnjQSbH2WjH_n`G5X@nc zR-LCcpe&7DIxci);ovI$046V2{bUZ!OP9@lF7kN2c78k6yy*59Dx0fyK*A|XP2Z)^ z)k5_}4u%+eL;?jLjt!up$*a_=*WF%knc{B(Z?fVqAjG@}r}Gi~CwS-43o*Yrk8gT8 zb&Z4pHMHjXS+t2IJ2NUp#X?-mi7Nckdb~;x&%bzvZbC?hyPb0k+{G^xYd<|~ zlDJ3NsAvdlS)UMziWn1w?v}!sWB5cS4HLFoK1*sbeS_rnPQrO}{%fIbfL%3~ypF+> zS_S8~R|b*?%+K5C5MBZQ*=LjypXK{ALb%LJYfF<8`r_0@ZQ$F=K+RVtcPVsj*^RKm zT;SC4K7j1&Z$IB1l-=@Nu0KfIZf&y5&r9b^eQc?s=$9+~`o6k(y&$P9z=6mnz$SXb zuV!fnE6$+O4yFj19U#bQLJ~!3=BHpsl#VSFI+U?T!ymgbjW?F7Ksb^GG+7VY!BZ(} zfj;vU<#xF({M7*p<6xI|J!$ytN{q(d8PfICSMl^TT_B0Idr>FMKQT8ZC9*&@TODdK zErz^P{2s2};>(wV)-E197rY?#f>&nZo@_Bf4x$+@oTHBT;uYYw;i8!?u&%F)tbV>5 zLWvkZB$Cqt^R_EI*=o68#RN2>$nimB+cwi?p)vB8R0LuJOG_SfbrF%(IJa(S>xlux$EY(Tz5xKpF-gze7-G-*=g%^-%V4B%*CZ!a3L9D{7`_urRDgJOMOZs$=hu3Y>4$9)r|R1)+WRbFSG-~7Z~ z&wFlQ4ro&EsCv9H{@YX3YlI1uNv?y40l$QgL<#Z@ryfgBLusjzJMndO{o=8I85lUw zkVwTC!sb=0?^$2z3V~HPCV&~Q`r;rHKr5-Cy-dMcV5-z)XD=RR^=VoOdYzUAh=u6cD?ZD=SrfK%y7{mAQ*f3I!n3V6 z-~48EG{O&-HY9hN;DFgSsKpXD{UfCYFn8>rLe3?z$Cf3)-unP9s-?L;e^`sR0w&IF zz9CXZRK<;315iLqpIXM~mTZ*5fK6BRA96ont!7 zery_E%ApS~eV5VrdWpjyTK8n@IhVd<$KBlsB;L&UXqK<2){!rJfunkp%V$85V-nl} z>`YShl7cmW?Q;$R8&WKlW9^+6-3o_{@lGRy#6K63uu0<)}6f;LlPx)0d7$d(n!Diux#s zchth_uAQtkM9*B&&m9l%P2K4}8~hkrL1zCpG?lQXN6^oMVRZqqI|K+pKHkj|g9~Q> ztgNR_Hc>uEKR|TX+>RgMKqE9CB_|uxr&|hb)g*HDl0FY3vu61ONIm^gGYX9(k| zVx07QCZcd+Y;kc=q~<_ZvGZcx?RNhw_U>9Zen@i@<(HwU5MU4h%^qVl6+XvVCJpYE z-*pY|au1tOk=d2Rq%L5@1!}DSDteI$a8=FL1#K$hCw7r`u&%bvc)$_Q0VMb7xkjql%#>@g(k20_29?02aD z>~?05WlJnXkw1o3F<_?F)ZG!)Ti$|~Hv$BsBjuGT98?0?Ndff%Sz0ojqQ8|N9Q)L&Tla)DC79!tdWnBb>mw3KeS)V2rCC3 zOQP@pNCz$si{fZ_qyjDHdk&3s6@<-hbLxoN(ad9-8pAQ!uA{V08Dsxm(O#*J|KfAG zCh&e{w)#;6`85@}OsSlLXw-hT*WKq`8Ha>g$BbS-1nTlKC@|~=AUVEM9hlxfg)G4b$q&C?42!kz zrny)Dw=)fok^)K>H);YYiZvzX3zK000K~$+G-0P)l2>xmCRuSKTa#8m#(O14Oz0Hy`N zk6~(#BcNTQ4?FL4NhU=*L5p2~dUdF(0lQ=S+=V<;`P%E2!4A-l`Sl`ZZTy_l2@r6c zmVf`}WLIKmecO43&HHBz-H)Ij>WJq(Uc<7e3$ET&6U*@pp++WHhflB4ufMVjE`9%_ zoUNAGx}q%L^aOU$PD;=6$Y(B5YUuI$$FA{rEfPmEY1k`A^cf|x$rpo_H9E(2D(w-P zKJFlkwsbknv=K@r^ z#QF7J{-V(Sv1tj0u*wU+bSQhtI49VT{_aN@1ymQ3`MtCF6If$0r}$8jfB=(rHcv+*;p>Z1hxC5RuA_1`e^@m1O!Iw2zxF zwV=P~l3DkZOq!-Qfo7JQd^E-y*Es;#Jg=}EMN^lJpNY!TR(#FPJ(WV~0WWl4;T%O~ zg-x9;egJxArwGR+#Cxy99{Q&WE@=1igA`8FXZV?0dP&h(sBwb8yScn@RA9GVSIDRm zr^4p;B^a*B*@CU8C1|{3R5L_1Ubn0U>|}^293;fwa?2O|J#o0$BKQ!ejd8Q)g4DVtJ%)_1w<|(~~HiJ1&mQLt=^j42Lz1`p7IFK+> ze|K*Eo?yNAc?$>I7e)UQeAh@~JcQC+HIL&1Ix@D_`ZMMGRQ6cbQ?<91T3e+y?y`7$ z*~>d|fI|Dlg#jy%_+88)l9yKNRyjn}vy0uDOH}+0!M4M*eR7(u0s}BUSW9XGCpeY( zfDQ=R$46v9R==E2i#=Z&v0u6DzSkwWsWqzfh%B}hG2qL5wRDRXt4?BKzz$$2{t1o; zEdMYopto}0lI4tx`=#o2K;m)T zVQ3|9ycTPgXR}I{*Ul{-#MO+ajm)Yrd4{@2hyL28v@8J=92FNgX+DNq%K0u`J#WJYOqol3-9Y&ptBgH^lKQt0tSv}mpk7Q#_Vj|wshZ;+2 zhm%8m)I1KD!@Y)gQBKSX%ltAoe=S4vydVFYs#H2SQ(IBHmHqf&Gy+v^1VRzMR+=K$ zw^vbGA)l+5;DSQu*a16+LG|Phii!Zo3&|oI$g{|In|uqrg(f`>mcf)95s;L%tKPfj z$feBCeDb=7b%M#^&=dNO6++CC#Mj8se|P2@m*k)|m%V#`wKn=D^=gO^lJ)kLQjR8F?>F>>#X#K%c5os!ms_ger!Yx}Hij z1G2Nr;{~Rs8WH2>W}jlZgqjo@v+%;G1G6uFjz8!Qpi8KKZjz2p{xVvAQ%!+|rqT4i zGrQK!m!a#5LGQHr=@n~HQCP$AUkrw+`Y)DXPJq?YF ziAtQ=V!J;7Rlw0=nq2}HfA5%oJx>KNMWheP@3PP8PL{U*temryA2UFtWFGz*rxdps zh*)c2wl@e-ZzT*jx{PK$)dJ3=R&ulB=}_v3xnEFran929CRRixSrfbr2+#@Q}8iAXTgDq;+OSq6Vq; zJTcMq1BsW)@(X~m>Fa;=m{TX|J3qO7`y|S<+*{b7%{>XpF$4f^7L4=B(1(%%dDI1j zy=B4Ggs{17Ew;DMQ3GQE!GI+sZeaq5EYX=^Z@XWUUSe^vO3}#asi4p2Nedg}=-oLN zORLUkj&014YaKe~S43D9bauh4(lBg@wuM zBPBUHi3gE|@r6K(CqATRm3ObkVGxQ|zpi{L?MS70^>{6U<#Jx9us^%=U3P|u>3}&{ zmh2&q6biz#bcKTh8{P%RoagO)AuK|F7k;)%qF6U~tOh`j!TRkhnp4obIXWttUHhk))Vh9vR|>7dCrMK-q|df~m6oDp4tkrR{eSV1{9CpK!K5 z<-+~I4m%PPM^|!I{-Q77c-N!9Vg4B+SK{H$IQq@T>Q_36x@Co0qwZ~IRRg!uhVaw3 zibg6W0+O{m%xfL$hAnT8Q?NJebJ{g;k&+G{wbamxu^L;v#o6x9|Ia%>dow0xjHY4# zN|H1628n{S;L<~<-{U>(dF8~0U=rBegNhSV=$uaHE$8w7)7E!~v;BU5YZWb8R28w> zs;WIBW+-jc2T?^)yRmod5u-JWptMGX7%hrgvG)umwf83Ws6Asm>F@e|$Mamz`RASM zdM8(2=j6_Pz0bMNdA%0@kxu;(d(NKySx&V!_^uC9-G?!IWG5v=)#Wmb30`iLcA|HRaspE#RTW?Jr( zTJt-eFT3Q3C$mf(sQXDO&)!|-7VniFGu2D{@;EKI!CPX_R?}OG8cWsccO%jyWw5e) z%m7ND0oYDgIjLyZojBQ?=`Au6{WYI>I!q5#^pI+AY2?e_5GA8eqb(L0;f@=keb@w( zY(-%C<{_S^-%Pd)O$~p$4z-MFx-Pjyq63K-*d}V5|)r zsd&mYwVFT!SfpZnvV_7}h`qyz@DijbsZDg1Sq}H5B*gnv4cbKo3Jbc#p?a-c<8iSK zk4NS|&%DZix6|TtgtaTSr8d--j$0? zl#Dq9JRK^K{-->j6X&Tkd-W)9de6*Oc=M1*an^=us8A7ejVvpzfHD-^9-4WwTO%~^ z{vJE}ru8oZ)&;WeO9NDSqQOHhW_5EdJs-Pi-TiO7qg6efp|IFRp#RAA1yF;gSGllYc5*Av}BAq{ zsL5%bsKwt$hAhf@8fzcrfs+pvYg;L59_XspwQgcDJ-u`LC_C5cfojNNucp+gPu^o= zy0Nsg5!PdomzA>wPXJ;#p_ZrqVzjKLZRHzv@zleMcLotey(HEgS#+_>U$xvCO5ey(Pc9_dy&f@p{Y>cmdqPEDpIPM8Q#GsCZq9}jx_$LF znVJb26F=0qXg|~RkXVIlD3*PBnv|{R-b&#tP!c zyFCWql!PzHT9Ut&w&iz4@^; z@CB$zL6bu*xsIM2&h9V&uHz$2Mq+mRIMwxPC5@0I8PD8|J8fS{+eQ?j8#f4*t{f|A z69DzT(BSaju6HNA8>+CSew0}&2hC63BD?wQN=qwxbl*Cse#R@Ox|R^{0bCOPXfaYV zOYw6{5C;1D1-Uzvms(D>T;82Cp@-UK@I^CKe+j0R8Jt}VwKyL;vT+b%waCu%&!ud2 zbz!&_m>NlcgFV=X-*sPAV)DbRTbfu01tx+y=LesidDK5oJ`YyUXAT0>@ zAW_8@XuOzwuMM7@Ba{rPl{lhTp;2ViC=e7>kGf`&KyH!&p~`qtKv%>fE$4gvbf`l}(0d6MZFZ2bQYf=}b8aq#DId(T`PcJyz^yG=lXKSs zX-8RZ!J3LPiS99xDFuNMru}5(BR?1zLWUCIfPk4?aEzl&`~7@#i&fc>ms8T&Q9Ab(x( zof5v!`M4mrUon>_zyE-CkMJ2j(T;E{d3t9(n&pleL`#@A7w+wV6cZQDvfVr>KCLzF zT2=3}M8J{39%({_C4Z_)oTq~ko%q9b#{U7@ePq+AU!!3pyqF|ZP|{c#Cf4&rBYJoE zYtsDSj{uonZ7E}(&}4b4$Zi+dS=v;sybx%6DzRavkd<2twsA5RhPuYl!PF1=^tA@? zsh|*h-9($yO$wZ(gOgOBo^$Q9w&}kJ)Q!mu!TPNw)W<|HIX-u z2MHWV^(4_e0u@SW4_l`W=KFcOBQXL;9>?GE^R=+b%0qb`1W{1iGv^v2#VK`4B9FAe zFK`Gu%FCU}vD8o!#6u7Bz$^`v8`L#LyVKOwD%&7e%XO|;8KQgrl3Cm)RtxcYfbJt6 z3nF#(FI>G8Ge@ic!F{s5eRRmsB`awg@c!uo>up=y-PPg(2sI_b`i@S=Z43LPm)|4M zoX?|Hvs0=dj!(sDU9gBM({LaKBy{Jw44=n+vh# zpc}F#hMap@l|rsTcRmabkFJ`J)v5K5|8vB1kVZc;0U-U%p1)fD#ZLm2e3l2GB7zi| z4XS>TEa@L5I-T^AOFdY3ICY>=z3Nv)$TnQ_DB_a6*5xo^s#=DRB_?gGa_O}wI}7FQ zVAL1F5L_{lD^T8?kZFKBmIa2Qh6zJX8I^R?Zt?iw=1LOPNV`%Z)%4Gw0t2(74$YKR zOOmOAs)6URTltq&&s=l!`lWNMuDb@N8JsCNN!L81XkN>+6^C-SE?Y$OyeZYlI7StJ zpbtUSj#H+N$qkyN z@j-z#v@~}I_|(SxqTA#XRpS|#&6ZnH_)zbBOF#g6uXT-Oz;n4E>SCz3^Uq+;5ZB{> z8G@-P?AFCcL5(Co(z38rr4{7>5}y@ShOJjkDvJ{b)F#_v@D@QwZ?8nPkB5sc8;!^W|Bd9b7E9QUnJj2iv8Q`9+rr$d&)Ahb#I@V!MV*E#DiGtk zc`gcFO>NX=%Y|@TA*S@uei{>^;8AZV#<$rWit~AaU71G60_HYZrRA?Ko{`VuruBz` zyV(ZVHN>r1nZ*Wy&WXMd)IFm+J7((XKL)uMiNeI+iz-tzKA6-<3ydCm;uhirme(&} zpNBior}Q{R$DH5DQSZQEgzS3zcOx=wh;XaVG&(FVh_PfSCcizBB_;3P)nXIIt?an@ zHn57g@%weIof2i+0%KUh{`&rCnQv%ylU1fMcHhWPqKx6hkO+FG-{WH0h+Qc4%y-`# z%u+S)Ge5=Sy>$MK73}uXa^?PF++|^vJE1|=aI)A-T!F9$P%kX_B>MT;b}XU7P*!?U zPosZH7|f9_0kq7E04OzaOCJgh@4TPJ ze5b|EUe&4b1siw_)Oc}hh$q`x09~~lkOX`hQ%|eBJZC|;k0}Ricz*w~k@{aNey5)` zl?((3WasXd=<8Y`DYsU zBWI?}JnTQ<2MbED&%Ii$%931`KlzKQQutEY>ON{xo^YgNnc4wtdA4q+qN_|G17WRO zjNBS1$tSfkwTQdtyp|QX`C*yc+JCg>yd{lfvklrh`3uY zh5*m@%>7wH>Fga1sAuBrW%gn%#_|u1x0>CQb-!3#t3V~UZ6uPcXtq-Xt30sF>e{#% zYrDSVO3L9_!OFJ2qBxK=?y`lA!U+{h7LmNXJKS0KP(>a2MZTj3zzG(IoXRpFyyE;m z7m>MAK4Q{_w{R`|_0K~-O0%GUo&;g7Hoq9#d(2*rMy|}e?MCfoyk}du7$;k@iNlYf z`<|`&u_$tJ81K6MiTa)1qE1#I_aVgxmecN&C-&o#h z3d7!m&Ww5SrxKHCZ(FtF71}Fy_linBU3!z|(9wzCLIuthGxY>2*%{~k!)i%33hBwJ znc-EPhkHPBcA?jtbDQHyV~NXi3})VwwWKbmb@qR*!xdY?Zj&+^$8-8!e5UAa;R^JOn2ugDjH0y6 zBxbZgCv#w#db3U|dA~b=dcQ`bslvVS0E~bJ;MnTQT3^h50joys4<; z<$Mc=F@tVrN(ki4Gx(SWA0dU9si@#Yd}Rl{b;iw?%U~pX4t((oOGa!5d@A#E)SJAi z_e`TU^{rJ5jd+=CXi_)bBfKtVcQg%IdFDyi_ufpP-lux;7Vi3aNUuYeYMTSg z;5sy{X~ow77{Su%R@k*-Q;e3An?uJHQR4;ZV{yaoCMz03H>KUtau-4(IbR` zd?FBsrjDZ6Qk0`v6AF8vh3x|)9!ZTIZ@(IN1--NK47018Tkcw3ruQy8O}f&UoSeny z>$>JH;u=eWNs^i_cU{C(pAzr=Pv@59vyenpnt8;#Pl;dxO=Neyh&I~ykD(xB7DuR7)4kT zaw$2B{-jxbNfW@=TTy`;zF*w|;LAdvmqq!li=m7@jPvNV`7a)7bRM+=XK0&$)4g-j zm>?&&m=Bx9)7oL&Vp3xVoz6c{U$u2C{o2c{;F7wGmltrIB@}LMt)F&vibx&vHA1Eb z55o{T$>{00*nip1GsvLl3?jZ4FjEiep!edZofsVC>VP=YNndJ+K+Dy2k7k$hN4H-R zI|*i`96eAL`@d&`Rf@-WYE=lEw~aT<5X*^`i-GiZ7c_O_t=l3|w$7n7@7@fu1@{t2 zg|Z;B%@L8KRY@NreYv@vUAY#;P0|Y>Q7qIVCQ4dmH*$aWXXRm;-r$$<0zd7($Qk4N z2d5dPH+N7XXsc^#zI}cXIqysSNu0F3wTGDYPc?(Y%6!Eo?J`gvrQ@d86`|e1m!`mk z1-V?hDldz#n3r9B;r7DUZuf|S)9GxyN&uk4~hQM_zoY`{KuI;Q?m^&;-a9WKys^Lu)}k}of-`(Q|ebb zu-wTpug9)b-KDhzwX?Gt3%45>-Mvp_L+l`MY4h=(-T%hLJ3_xc+xpfkgDholq!)>W zb$XFVP@Mk~aA7!IqoP7GpA58@Cg+O$Ev!EHDkH6}GFN%%$h z1)I%yBQJ-1tzdV)fs5S=RnG&Li!O_5->uSwzQ;}tr`dJ8SeBl+4kG=?)?jdO@M{#| zyHMxeQYD6&|JHwOxO3*aun4%N-M@p#aj|w(Vc6gkUv$<&Um)8(jFkWAhyt^iV)N5? z+9Vg?cYA1G@(Pn*e|!{Z(R}?!@KNWsg?$SZSp$VBQR&$DrnPfRWhccAeeG0yRto(3 zd+vFo)%&{)wzxT$WX_;u4%m7Gu&_ve0mo2VJz%L(Cgu=?X<{7KtJe6@E3UsySCw0m zjSY`6D5CW3LISESBLk33qxRz^K!YlU%eW^Z&_&$tvo5&e_Kht(0PpAHyL_z`?%G~G zmYoT4bVWcnv@HnYi`4T*sFWuql%EFo8SHTT4Q}p`G1r&QQSaHtlQRo zlgI2}lRV*$>snJe#O)@@5C0^OPORa{ljwa(nE=PvM@L7Cd*ud(zAG~q-agIMd5{N- zq;*u&ywNEISZcvUDbb`;k?ca1gz4MLbw~VB2ZBB7@#2#C$2{r&{<(ho$TaRn((8=C zks+pC_=b*|#`uv9a}MzfiAjpr(#eA*$qdgAM`p;XPZ8x`&h2rrGzv^gB|P~hpL{<9(Ck;&b$|UxXHo+Huy#UzMd;wuF1rKt6)Q)2O+M%V6V2?{s&wGN-Z}Tqz18W-9Mx-Pt27$v zlOaC|>x{Kjy8^=btZ(sCmKcfT@0cv5P;wvdYsohP@omD3q@vIwIHF8A@U8m1TaV@X z9ze*bfvc$Do!EO>?yZP*Kz)xkZ42Pb#~M}+1&*d0Z)2%;hlG_{6<+8gy}<{|TT)WR zvVoI1Br@*SRrl=596Bk2QO2o7bRZiMs{PHi_~=gQZ->ByU9%lgFiWhB=HS%%Ba@#Y z;~ZNK4ybFQeX}rZtk6rc`Kzv3Z1pLFzP|6%p8^7+qOwi;KjFwUnoh}j>~4Pxi7hZ> z&;l_uu;Oj|%}CQIYc!csQ7=nWGZ9a^N}8lVi`!rvqH*BWvxH^<(Nv9JkqdsEA-Ul|-Y}Bu zx2ej%o+JNWz2y6*zFBqs`5FfFbfGS(%+Z3PS@AF4&o}$eI5&S70A+7?txqVO8P$6j zocYvD({BMbG;l`H#alX(szN&@p1kDNG1;Q-QEg;54wwk4*C%49+O@vBd=gr4yS(S# z`fwl}PQ4OKBQR$k_%T)l_r<6}!$=Jpl1&=LAvdwOfk|`3FL%)~v#(cDH9AMx2J0EX za5*VzTWaONgiS4KIji?E4alVHXck{+6Baw#4}T7g=7`r7QtP^5S+V)fvb)T%A4WyB zD7NqbDD4e}amf2WTFJJbhn~Unp+Mj}y}7KScIQ=}yuLbPhA$n+mA1Z9f?rn;d?1iq zz&B^_NpAh7OBl)>jdWfm)(WKiT@}dCYm>)SeTS=^rqM;OI0Jjy8SS2L1Ijj2Bd$$k z{ZM1$H0fChATs6(qU66f4nA3WWk-cRUycfr!R%S%OYp(_zE`J>Ey)TsLO+xO;S*Ig zkZ^aPC6sb`Kt0`J7!~()4y0|ZbKz?SwYsXNSFrXd-Z*%svrzs@2G{5 z;LG6dNLo9)gZ~dszn%iX>ITnVb;-&Ndqd5vqv=#O!&&=x2<_|xFAlJDCxu}&tMudE zqa#UIN{lJT>TKhr=O5PuDogAm9b%DMZ!T!OT|s?20#I+F2fK-z*%ec>4p31jbUvbF zANcR}_;*G2M`5(mC^l}1J(iQ#Iic6y+KP-!KuZC!?u^9!_TX0ABz1u@=fU{aTVKQ*KZ-353enVPDiZ-Vn9~4g%?z;+)nO~L5(Y@Rq1*DonuxQTFG z3qht=WM}08uDS2=ECA}${A`d+N#@}GUXf)g$w`@s#l;hN?YJoT(PV9{2PUM5&J|?9 zlw-z>-q=$8O(-ocKXmg3v5p=UdBlUm zkyZ})t@~VL^3LQ0;(z=-1F&!~?gh_%i|dV4u&4^4QyJmiJ0EWlnzS0^>QgL1ZA z9GDE?BCD2dsh&mFrotCCOzDFBFmAI-A8Lp3Pn*d$SU2Bx1F=^ zU2?oIv0$Y7ff0Ez* z!-PDxXX!<20b`1L{J+we|GDm52HF+xn5Q=B7fHx3s^qP0e$(>>F0d`=xw$*1E7!=# zSop(3G*%XDR19SmgWG*hP?A3@>D_~tATvo~i$S_DJnL|bhMBZnZIqVsy;K_UhgP>Z z-Ubc+sM7SEvpanURsgboEv$g#kj^g03+TUnyA(oexX1?BT%pfv^VQ}&Pv)D}7cOgwveK=1lnIp53=~Ya(li$7eZOcHd;Y-tjwum{WCbPTk!P_2yTJFZd}x z;8)Y4PYHJF)SrvA)An^hDi|TB@}|Qe!22hX_RfI0f}h{8dwvnfiTS&`B~(&85+{_v zvkZKSiQ$ssOw~(NKB>#xhJ$z*qR-hHT#mlop>M!RyVF0rMlGGWib(ek>m@h#wgegU zyi8>GJWuslN|>s1*L|ZqDrVBHORR3s_qn+PXX+c9c5lgL_cPiaWC&G5%C0of0laIB zqc?>dqG=eT;Yu|{jAA#`O!Q&QCVHIhNilIi`GVX}ypGy;5cLk}MFvo`Y)^_n)x)y- z`fmLU-NNZO!tqK3q7c#oZ%9NV$4@Q%M`$=4mwwBu-gS{?00CB%%LA|0pqpMl(^mL*+JO)R`D%Fv z`-5DO=dP-lpRJh!h0D_<)d=IV`%5Yyz5hH^5?{^8ho1{Qy~a5n%tIGXLNt{Z(C3AU HrXT(fIVU|4 literal 0 HcmV?d00001 diff --git a/website/docs/assets/houdini_vdb_setup.png b/website/docs/assets/houdini_vdb_setup.png new file mode 100644 index 0000000000000000000000000000000000000000..e27e0b6c368370f071a9058b14e6e2bff8cdf719 GIT binary patch literal 28411 zcmZs@WmH_j(l&|&3GVI?AcF*V50V6e5AG1$-QArK+#$gRcZb1Ug1ZHG8C>pg-t)cd z-f!I>vu1jC@7`UzTB_=KY9bWnWzbPbP+(wS(7%3>REB|p!-s)^l|x2^Uh$5RUx9wX zIw{LYz*LNr9Y723%*4No!@yL>p*|TRK+Es#zi2waz+m*eeP9RdicMf(Y~#O5imSTo zovb2xlS1xA&*PXiFV~o2jm?%9y%?}=3ayb9)katmzPZWDJ|5kkhtXbQjEaF3lGS3rx)!hLpDxyUHMu zlqWHQmV*xaB~Cn&mO`ODvEzMF-0ZLF6^bTrfT2|}qglbrn>dM)XX#pqd;Vi(Uu={} z;9a+W1szWKB&~P|E4MG6Bd3D6{2^LW#6IQ+?$+u?6MT$cmD77~66F~WBH@pIlOy#I z;Yg(4cY2@%eQBWZ)JGtC6SJ78MjU|?*J(g6;yT#jAv0iL!)SREv%2Wc$tU(rthP`S zMRxv2>rryIKO!!d)$M(ytDnAPph9~VxA}sAU*b?u&2}a*n`m5Rc$NTpT#pA}ztxST zCL~?9`b%2$^A9U8Zn3%F zUlZdK6N5%a<$f0y(g+EW#l*xs{OuYXAD8y_7CJmSg77cJjhIV1I&uI2fEi~=$#>Fm z6fp_3GDZtkhL-7%Q;Q!PbFd}C6|t1lP#Ow|2EE+TnBfU_R&zzqnx7wUjko(_78>nx z08aza#08%YL>(L)%+1XeC(_DO*$m;dw6v;zj1X9_cj%6hiu$%`u|nU-U5K0_Hf0hv z#|fj5VHelOM>(I;y;N+2yzwp?YHHYogapfuJ|m@Byjs&SWzCq>R3h%2{#bHQ4GJ!U zyz0n64C$Vm3*ve~MTNYuf`WpKl@(Jg89!X>YV6I8D;^$Rv5!GK?SG%cZZ-Pnp>~qT zct+}>cFoGlN(C%sZOx2^7&J`4R#0UzP5m=DH0=Ah-GM4t7~cQbq>kCo3xp zqz^*yb8@Ohq01~SEmchpAP%jm;ppT*U14q9?!Ddcavgw?A#p^ZmNZ9>$Tb>Yn5?+j zw_O^%f|#|N@nlR*P34t}So36vG@=&quakH5=TLRk9|qkCbeOqi?y)Ys&2sNRk>c=( zFBBY_C=9ah&N*K81mvzSPCi((+o}1p#?f zo(14Ta@sC_0TI+i6HuKh#1NZg6%AfQ)5J@&3^D8JeAo4ltd_E=O^&GbXc-en()xcDI}XP9m4) z5*XEJ_hoQBTa5dANprl^K=9~VCN=dEQ(U=#X*GRe%+b{I?1Uk<^S<_-^gu+;)KqHp zArN)8YMCt1t7>E2WWGZ8;TLr0%%furLWxb>I0kRT%vwT7oUu4zkEb>&`y1r(X6ucv z8oZC!8!10LHsrV2W2S49POa!*TFG*fSdP@^=A+omZ7<_H?@f5|a&v>tZ=ZH6eD^zW*s_p>QPk^%M&zF8j3{W4pWrH!{s$}-A8Pe z86f(6pm4WM`C3d;^|D3SsmS~JDrB+REZd|1Xs}@Ye%#G|&13HXy4hiq1a{np@gC0& zZsxUL@D>+05?6!%hBpivhI2I-N#0u#dF)RN7C+SHhjMOB8~SD(ter>J-0wESs*U0^3FqMRPmP-!jgq9S;sN~{P3MwRws$fa{2><;pAsilBP zk^_V5685c2Bz32F7M9D+Ycjep(h!5Ht>7u{rn5hz7o8M-m#Q-QEH9#Cr^PdQ_tH^S zj{3@jMz7O44?U32NB?`?n4cfMl+}()Xr%6?hHJtQAa{_kJCd*Mb*uE}wv@m>9u+8R z10((w%hUBq(XddHG*oG*&d=qo;$g~~2$iA#us4U5@(P2U8%Ng2FTf_u_>C_=^^hDp z@VxDg-C;Z4SCPg(c7;86AaaDA1`Pgr8DxoutU%#?E)Eo{;YAxrZ@S;jn0332vGa!4 zzaLhg0rJaqnkf9Vs%Vy8CCK8%G%gIYQO|bYdXGm)xP7y1H-N9MlXt2yT+N)xi)swp zX1&O<;=Ucjx0mUpx~+6$=|Ipg`m~8T%b2ON_i$7hc9Wr!hJq370$ntvx#Q~FY`Mo}_8{c2<433QSeCJ*L-4Wz<)~^1e z^3IoxDJUm`_&v|lPOe^3Qqtko)1i8&a*$7!77Pk{=uSyx!ig`G=7lPd=?xS=E{>J_tb!{2!I_Gelf;AA6lg|~kq{PpEwyi6 zAtBo*pZYy(|M*x8cwjh_LYQBekLD7?ZEocx`n$e!mUXnj-Kuzz6pe(>ksj5H;1{93 zcQv5y{^@c3RYW?P$U$dYL2uE?piG9sL$ZEi!_Dicyzx*rFPoG0Ix*+z-P_^;QhSfE zH15l`>iFL8xI^phZ&%&67tM0|_Hz7UU5@5T4tUqxKkJVO5dQV>Ij-Tag~NZ zqzle==-WDMW`41P;%wiFN&G{q=?SwDZ;X>CEp{kTs~m z01OA=-R!t{bV475p1rBT){0;~+>VySpOxLh{7e49;jU=1-^HAN6HgJ){-hyem@#x#EL>ztYGScGgu;GkkbnJqHNF7Vf%Bi zOY3d%X8ghE99!!4dg$W^=saRH2|~%vTwTSV#z$q)w;^%NCt+u1$0-kGtgCAPNl>F< zaN(Uvx{pxf6Y(9l8732G1Q$`;Tw6=_|HyA?0Ja9&cE5CU_{a`yzd&50BTu`%>sHVI zJObI59o$g|GkH;rxg{JOYdqVJ+9gu?H*R}>$fNPV#e5DGTsE~{ZukxqwC7S#qh!PF z@drnMgRMRhpK9M$)R3k9(j@RZhm;-OkLbTRUW2^~ojWZ^{`~nVX!8mwM`SNHs*itrBlZ4bXBe-M#cNfp4bKbjbD)Wt)cZQrT6(7ILE=;PWq|9ac` z3OrHFjB7n@k^N!ZvhAa&1JCTHQQqT9jV909{10b#?K3iU;P*j9`~b3oZX+IQmQvg~ zHKwJda>h?fVTX}8GitoK)j(7VxsPlzAE;QlQI!%*0Z(+)JRs9-e@_hP*Fb|B|GN_;VxiJ;S$tYcXs*r z=2!xZATXqB97NFNREvW~m&vyx0FR>PQxy}$i-x>Ao&`d&vyDUlc;A94awn$WBUmvY zon$=?p1*4BZ@}$^3Cq-EL*(Pyt)N=rfokQQ zV>F79K?1h1ScWj#ZmOyVj-#UHe8l+z_F#4!UjM$T>ii6~u%J2+srSR5y!zDuqNvA1 zd{b)gcHY~{N@LY`D5%Y06rNwW>6ifkNCc7^?iSbOB^KC92a+NBrRk~Cp$_wu7yySL*+ zPjC6~yq{GRg#UOtokK()rsCo=GC;`}jRRswyBT(B)7G+J9_HivZXYuR>;@zUquD{| zo>rcGA!wR`(jon(KE<^^ztFdQuQJx%Z7{(SRzp8OJpA&qHW`2mU&VxaIaV@RK&CE| z4Ewt-ShAH%?EW~?n89A=KcQeH{Tw8tB2_<%+126Y4RQ*=~E9>yP)7*HD* zo{5eVzpeWgY5vF}ebER*M&dWYLT^8n*9_x7`J6CaZMD2VyI9HDEqE9zcx&1n6pt9> z;K7+Sbswzh{h&V&1)c2XhUZ(S;0xx#-m+4NMJ2r1`;PH2P^0T_{UD}Uuy4>?0ymC86x}+4d%_kZ)iPRZ6*UsO5ci)<9-9zSn7pK$XUtUd9E$6knV{9v@SvFSNIL2$nb z8BmSi`hjNEKdX!8906HLq;z-L(WSc$`E)LesxrMR+U))!XHTlpt}MqPgK zhW`L!#ngLK<_-YnL4om<;(RchF_F=>k)oPn9I$eyp!swGYvIkjF3fF%G=uiTC4UdZ z%@!dhf2bgL)m}esozk9-30ZKx`|LC9hXycG`8l;R>7RayqLGn4Rf0fJ7>>-Ps@{qY zNX^smT$SuKO})5D1f8F%SlPR)6Mw^0r*+dK;BdroaF_W&=2{{Rb=C8j&6idAPFS__ zL`nH}Q(aBuK;pZjmj9Sb#g3~gmaQ; zeW!0cVba(EqxJb^Ye(6DP$qTOBSZES`eW;nE?(B>N`L6cKs7%}aQ#7C#j0Md%l2>4 zgT87cdXJepEk`xeDkZ_G*f(2Y<*UQlYq6?&UgM*@Tgb=P5aO7@EFN@C{sK1FZb`W; zLBi?Xv7Wj`CpH={I{Jy!yt#u|8^%Mcj4Gd5SPfEZt#j*XqYQURVTn+=QRmg$S^-<`x!#>?_3-4Gn5D7FR}z zxU&Lla@rgO`p;{N=C*vvSqbB`P=zqk1)}h%kquh+4VhTkfn&R04^)Jw4I2v@DiASP zP&R_9VJOEBT9!!a<=u{VuycrlN@S(C6@7{I?t2*SZaJ+CVz~^5x2C>)iBvjz8{3iu zy;=J~SLe0aGids>d1Oay!SP)8S#yHF&OcA3lciD6x_8K>#kg_m5` zs<`mw?!S$|j6Kl*zSwrrOs(aNN!vz4_e(k-`PVA$P*4{Q$AJdnaR1*gs;BakG_fz} zeBUfxjZd;uu1oYL;Q|b4YgQUEs7`&PEIy*GmU1qD3d(ZTAeX6L@~o!W>=>x5sljPQ z(K094phIO#D_4CMx?n7$edGrRK#QPTe`95k>6RTGKA;>P)sld@rB6`lTwTY14(})%8Y6#7^kq+5A3;fM1>PvB`0> z5>oQwMAZjV>xz)WEymmXWOT-~0Ibu=IV4BKunxC4XY0ZTNgtDwGrS{%>LOTFqVRhN zZsoreyH}9NtRD%j|BSRxe$EMt=Iow-AQ5xb*gH<_Qwkk=*tam^8cK6+-~z76YRAiw z>5>s*G)5N(%oy^WrrL?G_*+1X8nH4S4G_M{+DQ1Ko`X`B&IpgAbsgcWI<3J)r3tlX zuxPNL;kR7KKDHA{ zy`=GTGsNf8>1|($T4QX_`c6iD48_=8hg`l;aHAuBCL$R|nLMZh!MPpwt@kIe43viF zC)U~z>ndz-Eb-1;&h_dw|E7;YKoI!_NQ3)S`>S6O;Oa`80jfvk%x%YV8P zf^ux5Hj{G9?@LLbS3==%V62$kspZ$luJko+=@NvM!%sopK)?m0m`gDq!Qo`T@AhIp zSZQTBF~S7;S?99&Rf6^&u+{*FG&jgUUujaQnb2~O#KPu)&yEUR0RkR@V`SyrWw z8?8Y(Q&@Sm)|go?`_*uc`J?CJluvzQqh*e9rPgWh?We_RI2#stHx&Wu;C;-`G%A~_ zntm{6WsjYD>S0w&LAqa!wIRf?-G>+$)_oNP&xU{RZ>dJjy9?)iF=XXv{yGuv%X-Fw z!3>!CF$ypC=&l{FX3_edv|Akghq3{`*lWG@T#EKsdlSYeX@u?aMD~afk?CcBiOkm9 z%2?Hk%978J2rZ++AkpJN*oM`2Z0#TEUewBWY-9|+-Vx*3J%909 zon;FqC#}9d4|R)Mp6^CKJCJQTKr7ia3dZ_86ORJO)F8h+ebCQqv&|715Jh9ef!Bac znV0t-KKT_N#=cmKCI~qSV3d22LjF9eZmPFjar%7KcP?c*4Xt?&|1L-%iDvtM}wA5_^>cV(8SXoVhT z7W=Il&G{#^r(W-;KRpJZ(60^&Z(CVreQ*;BIo2fHcM1qDi%50b zK5K38fqbr#Z1ro$l4v6NDUJZ-A?x+yc_E;*_SAsXog=mW?61Fhj{FIJH;`@MYNT+n z_ptIZy`&gYW?^=u5&*WuYoHD%d=mSkcE)!>zo3)1j)y*+H{%HlA zeFw4=+H}jvM*dzIcMyqNltJ$yal7al2WRkbytFSU3IOtqi0IpltlFVel#J=T2T^}b2TnAA~;F44cQ z(AotUx>Nk^AL|+JT#I!1ZKO_ zUGkot4opK;6RoldVyIX08Bt|(zjq#(e%6}Z9FKh!3LO1tW^f95#jLg{xNg=pnwHtT ze$?lG*qANh9fzd&UY#9ArZ3x;0{*60fJ%o3}9#N`>Lm(IR)!Vcqw*fJe`LH3IVHY$-R&rt-jk0?;%fZ<@ipYaKMqT_^ zG!q@W`f<1n&A`PG-#nDX<21s|g$jwN&%>^jYG6dvYe$yi{fb8suVIs4*7~va6RT0- z3nruUfgjAeCT(q;waE?UcAITM4;!W~2%Q(RIhK#j$i@oI^7@W#tFJzA+G`KoHHBFv zfkbAr2>7;76I0JRkByIz=C%5It}Z<6f^5)X?ggsB-@x|`>eY?QGcTN+IEm>#Tn3x8 zI$r3cJJP*>EY|&+J%#y?$<1xbq)ohAM2b*BK}3AjRW+#R9Fp1O=c6Wl1%@0wEiVP}{TD zGQMzFBIV4HSmfn7sj9Kwas&_yQZp(H3rMoc-BfimNkK_>^}EJ1)p?<{S+%b}84Naj zAc2}rqVECT25enaJqekiG5SKKHR#9U-2gO1jHi_8%-5;8&W$MH%F;D5pJpkK+~&f6 zN0Dxz)1S}f%@5%KES3JzxjkpdzhaR(%^ah!F!`j}{Wa2%RgT4eaJ^ND=mLT9RH`$G%|{FzyPS$OeQeBs?A^mb~BwY<;0u zmf3~mrb@&DY}!4&{6Iw0kC{O2KdovsE~8|#8i~tfwg0qcvy%OA=Wk7ERT|beu3W8x z9+=yLSU{P|1LM-A1nv$Lw=v=oSQ2+M;vtkYiM91iT?BR97};WAeQW2&b1OeJgm(k+ zgV`>dQ#|O;bGW1_X zlZg2t*@SGjq3fkSRHpnI!qW}DJ9Y!fG@Zb$Y*HNMG7exmm((P2c@ixn(KaUA}0#gpV_?+)3m?>n2(}< z2TkA#2~FuWBeWdfhL*_n#{*OuQW`{@TFSE0%^+c|n{xL}^Z8k^jSWNV6tE?VsUA+~ zUa9Tx93gMuJPH(03m@DGqcjTL$-C+5(pkMlij{0OyRS8`0FZFNA|&lpN^FL@0dJ>X zkQu|(Z^GtZwSS05)^&@2$Tlp=JoXKp69LFhf8O%b+_`aR=Y(GSX8 zklHdYd4T?=e)<4ye?e>ZZ8i*A++=tU5Hgqzt8Y<^`Tg;&@j6}cxj&E+EVPjXEwN8R zi(;NWP~Qbh7|LoOZ3`em*hcS#whT{{g6?V>5Ngdg=0(04U9mmwf*lnUQ;^6+!%=na zAq`J6R#u(Bd>iGRLSUq=5y$U^G!3}^&(J+T2ci%MG@pxRA4|cod+Ch$y_>m z`mLX`tYDA5tfJWOesL#>)GRFMVVIQt;2@-_$w`=#%;=^jzL}YseXDC-Z|}B;#*PlX zXRjI)Q`7v?QWV!#-5gqp7{iC_gRF-u>JV7)zoA4(3F}j}=>5M$J@{aD=47>{BZ1;n6HG zo}Qk5%^!pWjRCT4N+G6=naV-ICMe<(JZlM5zoC_hm6Vf<7T{%vaqx|;HXg`tZq`Il zf-)KajE44qq1b-1Ab%}hNJz-xN*`(|6m&2$Ga&o|Q|=z6Sp6fwzR0uptna!Y~2Xc+Q# z?zzsDHwL9YErjjv(obO#F8ohAgsCOZ#xnU;{9Msc8^G}|w@9dv0{{B*@k-Lj+ zVAcP!AQFduSEAE>_^{a(0KZW4BT6eLAn+5Ek${Yf+Os>B0qlZCZ=;|b3n&Vc0~@TO zs`?9szR3rXxj;Hb!ri?||FPd^v*To`Uba0OjPR+0E*Dw%Zy}8skq;`he23zf@>+jh zl9ADc^|AN>n&#iN#jm^b?c93os|mB z15EEb=I}9hF-4ygS#TxZ^+VA$UkUM{7T*R2WJKm)nmdyV3;R6&_UfsE)qvUQ8Zn0s z8HL-JmRDWLed3yS(bRG6PT%&@Mw7f3D9CKk6N>(zJfX2Z)FR#?0;RT$q& zu-PMT)E+K)l2fF{9+$G=)^@_C<4$Grwga}uZk(4Y{TL4m`~ z3YFP1qM0CqQQld37{UbvE%#3AW_&*zdw=aQNOyU$>F0t74FkiT`C0wHe0b2v*9^ikdwVhO_ z{RV|Sq@M$nIvG}Y^$^^bMYEaR12<=rvB3M+(+cVc*@D)B8w!X4A=5*}^d@=3)`h%n z%4Vc2^EdF`<5v{TZTLbXyhC z@j5;!APd>I8%}~Ju5(_Wklp}85%qBJ2}u9B6y?+d&C{}K5ACU)oAuvWj{2J16ed?} zZI?e-yGP2Du*nZCKJ$)*V!q7w?m)qB$qqK*9llZ3`?Sz>vb;+u*rBYBE+l?&7`O^@ zPnx*SE<#dxCnY8Ipw?zpUJ~$KaA;vQlZxC`@OUYrpuL&cX7!L$cr(JEfo@L8)ARiI zh{&3pprf;xuRL`ixC9C&aux4g?4N@11m_(ZNAC>+R;ClomX}V7tvHSJ4Y5lBvNJ-! z50m9uU4$5LeR!R%&yFG*f4ZDvj)e5yoeT9|jR4hk|JZ#GpCsfEN01pbaScSO!!2qD zief2fbZ{VydEMk6loT004=GGuu2SZnD)@D}ntJiA|pBz-U;=!T@agb@qtFFX7Q z!;Zn=RI6N%SoOLosB1%RZ7A>SAh%Mg^P@dvy6IW^r|(iEI=~3;r?5w?_ir>uR6hqr zC^mReUlV0(FNHZ|KO!C(z}orb$SsD7s;52*cZUMdQ1lYC;IaY-Jr6Npt0_khLRk|t zka-#b7W8UC*Sx}}cT(C7T^DgFQ}>;5hKzvF>JsAqB`u8x+kofFxV%1DgUyNeQ4FeP zT1jd6zT-~g{4(EB*)^)GDg8h%=Pqb>PMLh%4KDe`75EvAJI1da&y0(W_&Xt9lPgQ% zySttm+UZk@+7$@+K=WEx^r(~foQ!LwKQi634}4D@w(bY&wIst8330^x0ufMGFiiUr zwe%|k$(Te!^4j6mSaSC@YTl)Ck~W}@huB<7Iw$|4x~xq?Hfx-eN0Liq06R~t+OK#~ z$}NfH)!^K4gIan1g^DtaoGudh7OKeeOXc9K>Uv-FHNCz(LOyPQktl^WMX-$Q4ZPs} z5c95hi_LAU1{$&+FJ6$Clz`e{5x6x830V>8ml88-$zW5qyuKxvjQX!$T%~^P7q>po zLk;d%UGSJ2b<^_GMUs9-t(p2yTZE+p-~1jnG5v~37*+f__6(j*-uWnky=>VS?cM%# z38*&6p7+m36l~j5qkSD>iz5y)KN&EbT4T~{MTz?IO=2%wIDEMpHv*Bi+ict)@;A!h z?@LS?5BUw}L?|^2JUKDH@H>q_(dT)FVb21|NcM=l8Tt399Bo{Fb{my}E9~ z_Tma<(b=ypH3_<1tz5Wl?bp?F>iUl-6X5?oH=M1gIuv>Dwn1I3+-$p+wwZW%wauosj+XPoi!yX(Ih0(~XNGVD4upOIU0ocT`Q zAz5Fzm`O;-kd>LDdK)&H(VxfIQJh~P?EY{#A2-(DB(J@{KUh{-FHClV$J7vwdwk=?5vm5 z28r)k=ktjjkZwG0IAWc)!0e`YlgAx(Pw=b_H$~T#nBIIHmo%9l6?e@vyZp5iQf_Cs z*5+~OI@)H#eS5OvGyxcpnfQ#ycjYS+31XMuTeq)3_J)I1kmTM?KUZhh$ZZSq> zpW6*=FHC3gf-(fEIz>?Gfk|It4<(g#=>Fp58=^)@x!8~|eZQ!Z?C)7pL<}QPtaCFT zus3_hJT(;_^|6mQ1` zy7vx~zS1ID_NO-nAw+O5JcA5AXn?h|lOfw(B8Fyo+kb#Bjxcs+WIH-_=m-26sa=~= z{lM16h#t$+2`;Dexbz0pT5Uhnn*bTjZ^sKywi#lk984F1kSYuBJ$(^lX+~iyU#^Qp z4IziKr2;ALYmd)oewEU6Z>5mv#S_>G zUPe%_#w6l_tU}ZOjml-&Tw65pAw?(uCgasnu+{BMty6vL$Rn@2^W&>@#n~HsM7bWm zZ6gqMW<7;4ylo8rbzJDxA>(P(dSj%M={C%-^>k0#SEizqd~5yGO8=3hqV-e&_{zEY*{!`C+T(78>+c=azbk~A{B6uW+0gp5in-%W8 zr>pVu%$h&v3!8XTz-7(=<@bD~zYQf{bqIXC2w5YqEld9 z`Glc8{>W3N-6;ROzwW+`&cdo4q*-f9j{b1hxH}Zu!**pdBp^k>wf=I`crl}*(EKdl zTxr*FPv3DrW><>4tn0Rl&!yzYcr|C$9WfBm*kvN)LPwIW#RkZOZ%&&U9AN`(i}285 zk{?7(PU(asM`p2Qr2hE7XlF|z?i8%)XD6pCTf^(tvI^(!#_=QF*?@2e+wohVv` z2>4&ZaN}s#PM;HGD3k`^C(s+$JV70Jhy0)bzstm&V&l}1Czi&Q{7)y{PD9#u?SEoh z&7i9)czcqP`A*Z|f@U+#EL#bI9+d?Vo*70L8ojk$ZGo7_gRjshq@KKm)Smf2HE$nf z&a94U!Q5U9P=cI#L|^YG3JL-qx?%i7o+o^7I9+VF?LziA{GRq@M3vW>IvEg0D?1)a z(>MyYqQ_(j1zfa92dG8EwB*`@5F!6ZHnHLqwuR3 zfu}AGLgAUb{O%qSC|9?<R787$JvXz{{Vxyv(cLEggj0h z4i1lckKs8*bW#|we0}va!xKleE8jEdx7YqGK>reMK2SCDPEsZv+lHOyV>fKjXH>_>kEcb4%X*^DpzWWr8o6ln{v3jKNgT{Jve#>z$ouWu znd~7b?X#=s&Wj@@G1qyZzsU3T^mhM(q2lH!Jtrg^?AHdSyN&Q8O~yupdA4~sdg;F|TuUL; z&}4&XOosOte`04v-!=!Vce}2vk6@z+rbJixV%@YKD;m-u$`GS=B36dVnF~Z^?UvU^ zIr=?hPR5TiwgmJ`x}iX-7yTBZ)+H535yDfxu~uMHDZqljvIO)jbj4~O;ZlSeXm^Xm zT=$$r1eKn5#!e{$Ci9)A&nszL+i{Dywj<5@%o}OD#;_B(@Cs_9G+2#e&P=p+dqKPa zM{?q+`6O5c#+B@=VdL>kewoIJI{w>s1_p_W5j=B!t)r-}jrF-dN8x5;5!6@ohPb=)fk zt5ZfK)6+@M@e-%_6Yn3^B@#G1h-Dy9MEYyJ=A2vmn>iU zTE4#_&NZt_%WuWkjk{&YNjz2h%)WGglJ?!)!dl{BlZMN3O8=X*0FG0Q!I61Sp78zQ zh6@9hy7-fLRrVMmBCZ^$S&+)3=0LA#%tCtPWh)7`QV8ng;zl?HE8Awcg_&gixdNAl zoi@h0^}%;*&TjVN!eQ#X2&WkucqS6rJbK~SZ-JrawjTx&-P7LH-IYUTue4@7iQ#I{QWT5Kx>~OG?&$n# zSm0ySn}uREa?gqWRZmE)MC`#*7_Y@TxD6O8IEXf3_Fhp<-?pXiR*#m!bB1MKL)!iw zWX_OJRF~Nr0&yG1ZnS!xtZpqG=20aZbT^xWXTfEW2Fd;U%>Ndp17Q(rr{voVwA$RO zOHp2+$54ex5R<+iuCJL;WAm0<|E)dx!h`lCBEB5Ahv5Ew(8&=HE3N*PO-hP}-HvRy zexKK7*n}=_930NS6!BG%j}%avkoqg%)2GfH`>gl=${;eLf5D?xQ4?6G`=MFX9?k*3 zml<8fuA^nF2IdDh94Ww2mjccDOD}S2h)6BW=+imU$bK5qHWH#IZ$83}g;G{kgVxlXt(3=IoZ2Zt&f?K;Z1MI+={c|n6Q9OW;T0Y{^)8*|9s91Z zNwcrWy5TbR@XgfxXF<%l9vw!+VCk|7V)$_~)U}iOH(K*zk0P?8MGZf&o?3 z45T8@r*54p>J}>vSAYmedYtf0>ce+-#T_S}V4UYDKVyZH5S@GNCeEtCaQ56W3PdM4 z0P}nP;<6)2_hV!e1w9mFXF#ddVSIKLD7LfdH&uaQx=Iyn^U+oq)pA)1Ih0I;w}%cz^1ey8uS}=7Y8Ba5jZC)!@7UCw{NkPeOz? zA|qo7yZ@uEb=V1b(K|4v>MS#S_FlG^Jj*tJ(1{pdJVhtxX`@wW60e(y7rw6=qD2K5 zm)(4$!|hX$`-`!EFID^=$sQDzo6vpgSn%2rT)&SU_H9y=y?@9!r%8)hBOa|TLgDN= zZtuR~ga62Qraa8bAB`tmU$haI;-^Z$vu3^4ruhu(jlZJJV<&Fu2>^gV@xRi8yWzF4 zut{UikvRb)fCM7K%4A9*Z{$RJx!zD4Qg)Y1yyMkcLKOx%VMEdT&xYGlFX8Q*apCG~ zYw}u8)Y#geprMBI;C}3P!U#7)Uu$!>XN!1+5WUFBHv_-n_ z9F}HHjR;&%mS9&#(3AuSP>UMpMwi?p)h`XKjO!1I20qUJq7x%bx>|Xfwa-l zjSUgI5UNaMEY%8LtUL?eQI%B!s{j@@egMwNyIGBSWbIaT#FI{W8HT^ge`9&l9e2(i ze^x?LIgCrcpCZu&Mu-CAg!$z&nRo5!%rj!*V#IHXl?Jm!Fk2hSBxJ{^nT1_Gk`*$x zHF^u*kP8GRlw^ORhtjF_$nLbcv5txAn)EbVMKS|Y8V{LSw{4a@s3B1D?}3w7*s^^i zrT2>Kvdx3mmdX1TxsXaGs>3)?ZsWQq=`JH@HyXWMHW#)R%1r?fm1WyAv_ZA;Vs2?JD=OJycx3npCpH zfC2XD#SXL(IfP}Ws^M;OwdR^{>8qOe?Gcms&f*A1lf)2DD+MqjTU($3ytsn1aak5#vt+2}h$+(ss!C0Nhav{vy+i8u{RH`hpcv;_g$(R`|UQ47c{*Ym8uV+^&# z$tVzm+R2SWDUXyF$&VU6Y|oP~OfV5C2A;^WtJbnH8xF9I7?E2JF$tE|T4)j~^>AmV zX8`bNO?OQNV7MY;xh6!s-sIsmIFv1~N3AeeSp{W~_qrIS5sjo>bzW|x9`?(1`gu`g zFKN^VGR+7>jm(s23tC(@ZU)#yX*vUI>CAcmz$Xj2u_z?{Jz`g{y)G*WG<F#|4-wpu`waaoi(& zZcjkSp&e>oKL{c_I9|97p}}j{*Ty6s*`m!#-@7z384MT&(e7ScxPA>zRo$>_8-qNY1&_ z4`D##b}zXt2zFUV)rX>=hAaa5)6nKc(LnN``+$9f8yL64TA@JiNvC=i#JOC#o9Pl>iLLMy_K zsFD)hpZCPkb{VE{er7h;MP||ctFIs-9Okq4EYIL$m(n z-Z=4Z+5V9tL>>>iP_(#2;XSnObH@M4`~UxKTD>n#7o(s5-=?a5cOQl5$qkb=h`PHC z*)*{TC_;X%AXF`v6${rP#NH5`c)$ys*zB8^&b-tgR6cXpy2lPf}IBYR}()ObUh#-jI2)?H5FR_5y_?plN=mGnaa< z!}{xQt+z~jMN%kMM5@b?yZ^msBw$jf332W(%Uh-`GPX|WC?E8uWE$UYbvP15$q&#x z2~L@u!`A)dkR=hlvJ>Mn5hw#b7@6WTO|gMEc><484#~>G6`kQFkWH#+-|0;w8(4p5 zDM!Ec#tO`FnkS^&irKP)(W{{4dJ4HmgyE~+Z z6a~qFp@tSjS~|}j{_pv6&Uw#uo$GvuHEXTC_gc?-p8NhSfzROy(_1GAshI zZiWymV9Lm*rPzU>bwmQEVM*H;qe|(Wa5ov;9Mljl!<;EsYBpHu!ky}W;0K0)-WyaweMO)K{=R#a<(&Vg z2P)@gW&*7&4Baj7NWSL6%MMFx-qpo!eR6-72#<@z?x zEVEl)A6gbpKZp+Hh8fB3Rk>efW}6L$pdgU!tI zX7Bttoc*S!mn!Vvag`z&Cai^v3pZ2?boV{aFRlLoqd+SFVQN%MsXD)S<;e#CKDbh! zxT`_0eRMw$Zfo4xGS7v6&F45_J|L4Y47mF9eJU8UWbiAA!^qfdNnmOzlVx#XX@rKP zE%V*kggndmp^n!sXTNHPX3p``q3Tk^Mdib~lW8bPsab0zxpwjwkOSA6y;$J7uvqPn zx+o`6BWv7x34?jLUVd~K503gVC^wQRNO*K~WY-BqF9o@|k8JfHef-to-RgB@PHc0i z2-yjFs(IF%#neE~=ID5(N$Nprbr7FzBi2DNQ(yJ)0lFMN0kS;3EhQHG(|cZft!B-l z-~QeF<8pMHz2!$3HDPu8tb!K({*7J?zUc}g=dmumTw8B>oKVQ~4_8oYjaO+_C&AL=_Qa)Pp_FQ7c*L1{>p<&%&##zm^}2Fh6cn@4S{FHZ2SIPQ2wiNlgna>r=`Z&+thjP|@+8Yh$y1e*eDK^2}iog7Cho2-333 zx~To$Esh&f)D=68R5Cr0QbQw7Oyj{9HSZGJMD)v`ezm6fVtbJ3{W~r7OJ-%cl>con_Vzg9A7Fp%15DNay#JuS zVI5aH6<$DLz1Dw8Xz`l^IeW$%RkrI{sER}7@$Ht`EW9nt`9BLC(l{1YV@%tk%=uh6 z{1PK~!fqgm780sDTN$K`{VF{@BQZN0&&|Vpu<)(Ghke5lT4C)f#S!|O%`DGaiy^!h zFJ5fB`fja2L&$iF?ZpGm+m}~^&}r1hjI^}0%$B<#y0xY#+SP_p3r_90T#1QZC;RQN z0s7u0*|SGI!a1B@5+w}G_9<473Ti%m~rI>q8q`?A^b$sez`*c7R30)PzZ88yUV`bTCa72m<#^?|&sEU4>p zTv{PIrjOB}xpcO5O4m3(envYqEP_Gl^@Y)#u+Vh9M+Na7KV>f)37LU>(LLf9OY;`!2H81F#A>x|>y$wGHN*KE z=>B3zT+@uK8iayC##COwK67|;e;4sk)Am&wC?18MuA|p&?o`zM$yco1>_V?~{#Qp= zuXwiC*VQc&9{00)Xr5aGUf=BDEE?_nOMa3EE>CYyr)U7>P;pDpD=dZPsTFc?B znCSk8sEyQ2J&{(k=DX)zAu{o`-_EeV07V3&*E;!!v)z)HiW~**V`6SIus(Hll)qa! zX7}e^aV)e!vWkj7UG%U@V%Xa-faD9=ug*iM*t7u5d{RVAeJzgP!4V^PIKe+OrRls# zQJ%b3pf4A~F>Ke??dIX*Gz-1X`I4NR1y14vmJ(^jajgKbfW7Mt1_Xq0`=C&*PEOAH^>uerF+a z6kRWjs;K--!hvT8Z#eSMb#Cu!WtZ;{6u8iJ7|<~ zqC;~qW8gf{A4QnZ9&Q4_o;KMo~xC20;kg&6Z7-Y zfH{`Z1bCF*VSb^PY)fyYRJI>7sCRE0T=tvJs)I5U7d@~g7XTyn#{3>O<#gb4x1GozBYEy8Q}~p zGy?DS%dzBiIwooP9a6>I=}kcGOLief?N|FY4pJq3dF3b+6&31-zYR)((y!S#Vp+f$ zZ>_o=t*^0%fm?{ex+*t*0>Sg~#HMl9x2r7Ge(b4g>L`<~dw2Yw9aU2Wsv@6npIl#z z@23iy)}&w(zACk~mVl#%Ymtm79SZvZ+==m-nc}wA{kaDDs;%fHBW!GJSQ}M$2o$dY z36Tla7zYuT#EtcyY*df*(<1y0+{vqK9S0nN2W`~uQFgiHjZFc^>sdJ7?l6S^_@REX z>ndjetdxcmp?#5YN<&LM|L^xPR%7y)=N`vId}Gww z#20Pz9~8s2Beuee2VHZZ5wEpNX*lM}%69JPmw@E0b})>D_rotLK4n{5TP6;U^Lguh z;p&C)@fAMQ2iGHJdG6Ym;UpI*yt=~UESd>ceIlp;0}|kYHIO}fMz^KP>T2ycj83y+ zdM+;Y93k~V_t_p1KLLH6D#<029GOu<)x6El^Km`(pbI^oMDSEx?MQ0)p!;9-i<@Qj zyNMzA>3+Mw&CNd+`tMywXb?rlCwIuV71FHRRttQ+(ovp90VJ_XpFu~^uR#`(ksRE_=ct*gL5<71u0 zD|)7F_I`hU=$sC5?(K+O&19%|TXLO+Wq>6Xj7K+^YqJ8x)|@)<1?O+f=EXucH&+i6 z8Q`av@4h(CMW;t%kkuH4TZ#&=#Mhi#kE`4LGZvL{{2&%FcKY1xOGG&h8h|)j8T|$K zYO1SKA6;&9ufSpvu#T9p(TsbOHoWarPcrTz!Pz|23het@;qnxgzzG&i%qt0+x=Pd! z8g!xHcUTAq)@T`%HF9hfl3>II3p|5@LhS=ai0!@uj)4cR5;kd1H3*-2{_cz2PyT>9 zk}&kA^xl&)GMPSD$4wO%J10b@lP{Dy4yDLW1{3!${0@p=O<#T26d-SeSY8wSw#par zv*w3y2vGeprtV)^nUJT|D)7Dob^vY3hz7fUV#<%lGY3>oA}?Qp5=4$8?Mx)8sA85( zN>sASeIgG9?~y@s)qWUKkx8J43e9A- z{FNRQ*)KD#rAbH6O>ue}$HdMIh3oO4#E>%-tM>oOGC+dGod0;;o!uOkT!ql*ud9_V zcfXi)(7s2e9Z;D|=QY*BJV5(<#uU}MxxVdOsp68dhVSWnsd{R6)vs`ecWoSP21{K8ad|DXTJo!_WC-CAvfWLGa+zq2`A|6PsL^qz{zYTv5 zEDimSu+npTeQEK|;iq*qX;|s?mRv%WY>||SQT+0htomwG_|}1R%p~f45l|}9I6P@a zhw=ObFoRuH)%JLB0)GK!ll$qFcKlE?4#Fo3yzJ-B=pO9=2Nj$l^xnR;adJ#9M=I(W zWyh^pyl9`QuB*42=j~!+dj!aZWr|pFd+g6g;p^!!F|)&r39IBvEMFD2Rb%a)c}#20 z_DonuRE-vcB+!V*aTIBb>&f;nuxwhLt^Bnirq@%cT(+Nm+FDw5Z;|jtoW@*C7JgEq z(ts3p-9UgCqb_i19{L9>4*q#tcEvfzQnwc{wi0~#4|a0a z7Xj-=&YuK!I)Zlhz0bAxmYJ2mShmj=HQ!9|8-r#Cb)D;yERS@tEg4$>i@Y7k>ff2| z{t;C4DRXB61V@3r=CB;tf$i)DY%Yw!|LpW@m?>u;7k|g8&}uFGUNANGUfY`%t9{ei z6@1lbxL64JRUVFZ11OJYQ-C)SAYPPm6E`U<>wZp&sAG`x+KYZmRS=94_-5B=RaHWg zDlKW9hcg$fGBh+qO{(^+fuU>O_9BUH)krc%U$o_0ZtXd{{1<5PW2~#8ARK{J{(ggRZzs#&XRnEgo#j1)xR0F)RBFz^7gL+#VoISw{K8IE z630tKE1;j`7JHz3ROU<5lkU!2559v@SVi5|cl^`ElTl z>smYZvmwuA`gobcvf)gE+aGgtyY_}9LN2SDd_Lwvuo_33G} zIccA5W{X}d$kUc>b#YQcSLpcrV(&#-NZPvuLPykxzf&azPgSQ!4c7Df9rxn7!Ktnu zP@eZsjMOA`Y$f~uOcj@HHOJ$1G&oUv&pRTqCDbN62h*l&W58Ov|>SYBZ265KEz@Wmp3~xR*#Ut zOHsl`$*`%#DJLmwL*DlcfiL#VxUOhkRZJn!d@HUc%Y5ir=^vx*nblNGZJns-dcEi! zK@9y|DPghQ@27EWdCKdS55oga{LM>G$`j z;&4v!0b#oRE%zx4{d>me){R?E1=&EpQKk=(ZL}#|C;Fs4?>vv^34<56gN;tp*X+Q$vMse@GEgcVgvi`VKxoT6_>vi2$4XBAI=J=fDtGXfq5ks zT9w?sYI82?W9|97Uptuy|H)^;1)NB756WLULve_o^Ygj!#g{ zKNo*%xkCb*zFrI97NQZ6E{d42tqTFuEu%`#!_7@InS)*x|NEUx;aQ~wqlP8H$lHPm zW0TTynH#=F{IJpf2KuNwQ)D?u;9j*b5(>6%JGpw2E_ix;`ZngXO% z&}orQ*c*qjo+GwYsA)n{=?3eIxa_#kZ1>WsBnqKvtemn%y}7rP3Y~jm$V@#>A(Az* zM@m20vCQ95!#YtHsXQzJTfM9JY~#!%dAr~8kk&Il#G3OpdP9Aq$%nwP9#K<}0KOFf z4(p&MNi{~NyFgcq8>Z^l*jrai2R#!J`^DLzq{WD1z3bTfsB|;(zJ(i^P(X_T0HtarOU31csVLjKwSQdEpK8OV zoxIgS63F+DlY(0^jIcu5uH_CD$N*nuCRKEOYzu?*fcf2W7%*dC%4zhUQ-VfQEpg|T!W<2o)8;z zhCrW(oF8B$jD~X2<6-d7DP@f1wHk$BS<_K@>`lys#usBXzDb563}Ge0B@Zpj00j?) zU!ov&<^r|GE6@W3n9B|K8-g~7h=c~k*8uRRA)gUYIzjGb|2JN;EN!U#V8N1p&Hqw3 z`R{u92~XLwY?|Fgkq2a}vvVO{RMaD<%ImEk{%W z(5jFoLSo#kQYW|EzPvGHB}VF#X27;NJq}q0O}Mv(Qfj7^jDE*?V6TZw;+4lxI_KL3 zr)ZK=HmT+ zmz->BCjN=Ox4oW}0I+O_V!V{Dm!ZriE9NGwOy7=Cv3P+)-AQ}%gWPb#ibAVT6**F!oP;-#Xw08ksZ6E zAb3euA*?&_3@6yu1ex&Hp`+D2{2>mnzDv_GOCYlS@X&gwTit*u093vmFpJ*NkXCkv zGguvC4C^9$M*|?yI8!k+B_qMZpJjR4j>=T!YPv4jj`e~}jkDG?a01fmsA8}X zxKpe`Rqkdgudb5|odu`vcddLdLsbF#WnPQ@Z8F+I{A6>ds|uBc>_Q3~3r|zP2cp4= zMu8*MY>zS+&*0&nREIFvnKHv9Kz5gGQL4I-Vk)A-N86}Ud^|50#e|-oo>L3kcyj9V zsbDs6m^Fo(7PZKggp~!pMgBvj)DC-*gY-m(-BW9Z)Z|jfvx5pGBw=u1mJhC>*L;LjA zpdM7B(AAD>*Iito&HNUre77CjCV52zMh}ux_T18{Dp(G64M2DfnBmzC-f@P-XlQ?a zsAZzjTFA)_N?-lUwd6XIGZd{LfNSiit-m{wv$6uPb^j7GGh_SvWpt~wVG6DT(Zp92 zKyiqkK@8_^CKueP!iM+fFWCc3{5c46kdTcxut!DC^Q^wU9=!1#A5!kD3z*N+ zs8~YjxBqR6+A2=uB}0R-n3&X1KrXy09^|K_&a3R}+Q{K41q*4oz>-reR(oY5qvC*| z#+dOhp~~ag>M~KYhB>ub4qA?`8&yqB&2411F*1qKhKKmgA{g~+W<3!eM_mu#YpwIx z(}(va+m-O1zj3Fn9rPKm-?=kf0N02be*{hD6Us4oTdeg`l8eWoyDT z&ALfM{7#k=>ddClDoxsHNR8d-xa-5soT0`Ji}jeaH|GIfJHsF9&SKojc=$;RnJrBhLo`C#xyZ^D^My|}f{1P+U>D9#;MueuDcU+#Jwbn@I_vZRi3W{h4< zM^WLS&(HhJi6fsRMlQf{zJW@K^7Cuwd>qq(W@o0xs`v_5cO7%HW{RT!DYM0RlkQ?V zCP@W%f8^`A(T9OrtdbEMl{8pTTrSm=%*4tn1(3hHh|iI~;CMt#cG@zfB8&^v5oS-z zdcZBp@z*Z*x997CB7O)^xGO59^S(8q2Np~+BqQDdt>RtB5nwMHPpFrH+SyCGX})-+ zG}5=I=lbk7r|D2U^}K~J%j?&iE^9KtodCEN@(Ge|O1y)cak63j%akU2g5Pr-6a!+S zq9$@c@$HV>-%S~2ulL)ZA{_?%ZGYL3MH5A2!rMbqF#i?yRl_f#pMRA*QbA12%tv#P ziGKm>ulESqq3C>5Z+Ct9vSwy=c0Bk_c=GPbO<0E9+&rJslFKMQd`Q?Sx2n)MR1w-V zJ%`0%V@4di$;rvX?+gj10P|PALfxtR)+3hkjS5KFZxrB0=b>G#E$NZ30C6yomd}=O zajDZk2V6*Mk}yQ4ua-ORgP1X=2Jo3Ha~QR0I{pQ6#jfq60C3$9atgCEA=^H3*Ca@h863A_SSiP`;@_UIf;nJlB2v{l}Hw*;di2Aw|Ze#FQE(Zd(V0v|IIcq>6E$F&c z`q~)})(Loo8+luv)3Kvmwja9yc3b38eG4+2;}FAvG)~M9s`?)j-T=lM*>~@-oW5<6 z0Pc2E@d_cyM983Yd?>jL5HIxb&M*zHIfkJht8zXoGAQV3Qrf?0biEdY8W+#mHLM3yKsKhEd$C;~C?%CmZ8X^wD1H3Mx;2=nOe_pC1wmoZxw z$0x(7LJ*2#<&5ac5!S|q`8w|@YZ&Jj+uJ`p#bzc@<7DQeY1Bdv3r)8zMgCJoM$C}Hfe`cZCY7V6`i_$FfC-M6w*`%u*|D7g}f{I~lGt5hBS(rkr4ymJZURC11e_Kl}*LZS4e6S2NzFs{$CMN>XZxL|KQ|t5rL5LDLbx@0KBuFjQKkyWCya~}{JN76lc3QlB^O5( zR=KKYm$6jy%V=-8n-=iqA77EfzbENEb%qh)eDpiLGgX70;q9XI9?`)wg7;Wbl}!h8 zySiwAR8}2UY^ts40UpW8?yV%wGvHB&BT?W@Y49d{QufxB$EVb;88~FXE_<|7QA1O; zziJO(UkB*WT0tWbwEmGD9U`#nzHf`}8GzkeV_|bU7suyfN#uOd^)o=Sh+Rp(f z(pQt&7PUNrKc9&EKxwq}XUoKpSQRc^z)@`G`DlU!HGkyJWOGQjR}HouSU_sUeGuaU z5YDNn=QHA%8I@84LqnhNX}UWDhYHw-L!Ag}xY63(ehKoi3=_1r1tx8)6_MzjNPGdM z^7eXqDtn47F@{RYI>##EN}D&{qkyQ({>*Iw*?l@RDTOD*4hG{%&BR0&T&3ZOj2bHM z$imNacfwPPn5vVJ7v}V@5E4j&sH=Cp-5*iPsr=+B=f}NU%hlUI6r&b}q7~@96}`WT zd7+4WZ9ZXL@e`|b1b_IJJ>FOTq+vMli!N&A-iX&WPbX00(e`}3S=FDlt_*}7>Ps)D zb9#bNl=e9p-mm-6s7*Hk0tDHE;ONY6CeY~^nx2cIH_t_#@g3q|Hf78cDL!vjcNfg! z-$_^uNJ3N&F6U=rU(?FWRgWcd9TD{x$H{0|&Gs0lC8_p}RM7TrgHj5n%;X`%G<+t= zG2nBjVVO5Q!bW7T36Lg*Q)*qK1srOxcVEG3YW*%hirihSBW;eN)|`e#LlI^OUBb`i zJ|q_c#9z+YbICdnSz=SY#hSZr4Os8jxrr+MKpN~*p~}4oD+}OGJQ$4#obf%NpjL+0Ja^!_T8!WDF+Yb2;O1c&aiMIzY{w^ZKNL|il27* zr#Wb`bsZpQF_1^&{BuULU}pWVwc#aUD7i8izR={DvAS*3mlAMajpc9t-nDkUQ@qT5 zD93{k$4qYbazl@g4SB-BC{qH}O<4QWwi(n5h3--RPT(_h(@$M4aQ7Ip!?S)>9Ls6` z@oSwb3J5h5@q~ydLrnknFNK=gKo^IWsVUC6|9z6y9M8E)mn1t|RD8L@a ze5TG>o06}VA3$($UiMzY{G3G`V_E|!jqmXBsv**B6=JQjw?B6hwhLEnCLfcOgdd~Y zWdkcG^yNk{mbvPuY!9BjU!@S9K7wA&84I6ybr1y(fk3v*d3aW4Jg;Y5^e_t%8h+*P z75g=M04MNF{O6sqwy#?2ON!>NLXaIY1J^1$Li6ZtBbc1ygi6)QGEPJ%YQ#)uRrzby zy5XxyL{y||SX176QO{2Cce<^hvv__8R&7g5IZo_DFJ1t=)9Yvn_fE^_z@0{tk5O^Z zaAF2CI6t_}c4uY_P8OoonUhX*S-v!MVcPGqG%6jP$51?pvar$3C!Co3BhlX6QKPGD z=4>A($gK^i0xd{PLYPSr2^~D$@lyu@ptph=bsO=FFm9)XCbn>9k;l~frpS*Dj5(JZ zoLaR##zj5y>h`U;X~zIebyh+ zH;D&Wdybc6T47w)!`yt!WmQGS?gy6Z3L$xa8TSk+V2c@nc7XS$%*TtYo3@4;ro4AHXTTxLqA3pFrg0GPn$2U8 zmuRm)BPH2=D>zjP@EZT4how`yU5zG>sfySOsTK-_QBywkcp@v|GQAS>6S=clO0$%L z&58S(rGzOe8Q;9^N;h8I#7nW%s5Y{s3+!>8#}rPwc-dK(v+eAJ`1`lt8b_i2u4%T8tX3l0F zW0tJ}TN3^cfaDb<4VITD(@aQ&D}-Qirw)Uu%QK*leVxIEjM|RVEpGX9Ge}-xC>hKD zyl)L>81W!@zyOe9zzF^i7pnb#-^lj=+Q6fpdvpWFt^q@bvwPsex<_wiRb;BAOoRUi D>Z Date: Thu, 19 Aug 2021 13:22:33 +0200 Subject: [PATCH 171/308] =?UTF-8?q?fix=20hound=20=F0=9F=90=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/hosts/houdini/api/plugin.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 989bae12e3..efdaa60084 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -20,4 +20,7 @@ class Creator(PypeCreatorMixin, houdini.Creator): instance = super(Creator, self).process() self._process(instance) except hou.Error as er: - six.reraise(OpenPypeCreatorError, OpenPypeCreatorError("Creator error"), sys.exc_info()[2]) + six.reraise( + OpenPypeCreatorError, + OpenPypeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2]) From c6dad89b3478f2c59b60fdcc6ffadbba61d2c1c2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 19 Aug 2021 14:22:42 +0200 Subject: [PATCH 172/308] Webpublisher - added configurable tags + defaults --- .../publish/collect_published_files.py | 19 ++++++++++--------- .../project_settings/webpublisher.json | 16 ++++++++++++++++ .../schema_project_webpublisher.json | 4 ++++ 3 files changed, 30 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index cd231a0efc..0c89bde8a5 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -75,7 +75,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): is_sequence = len(task_data["files"]) > 1 _, extension = os.path.splitext(task_data["files"][0]) - family, families, subset_template = self._get_family( + family, families, subset_template, tags = self._get_family( self.task_type_to_family, task_type, is_sequence, @@ -100,7 +100,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): if is_sequence: instance.data["representations"] = self._process_sequence( - task_data["files"], task_dir + task_data["files"], task_dir, tags ) instance.data["frameStart"] = \ instance.data["representations"][0]["frameStart"] @@ -108,7 +108,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): instance.data["representations"][0]["frameEnd"] else: instance.data["representations"] = self._get_single_repre( - task_dir, task_data["files"] + task_dir, task_data["files"], tags ) self.log.info("instance.data:: {}".format(instance.data)) @@ -122,19 +122,19 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): subset = subset_template.format(**prepare_template_data(fill_pairs)) return subset - def _get_single_repre(self, task_dir, files): + def _get_single_repre(self, task_dir, files, tags): _, ext = os.path.splitext(files[0]) repre_data = { "name": ext[1:], "ext": ext[1:], "files": files[0], "stagingDir": task_dir, - "tags": ["review"] + "tags": tags } self.log.info("single file repre_data.data:: {}".format(repre_data)) return [repre_data] - def _process_sequence(self, files, task_dir): + def _process_sequence(self, files, task_dir, tags): """Prepare reprentations for sequence of files.""" collections, remainder = clique.assemble(files) assert len(collections) == 1, \ @@ -150,7 +150,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): "ext": ext[1:], "files": files, "stagingDir": task_dir, - "tags": ["review"] + "tags": tags } self.log.info("sequences repre_data.data:: {}".format(repre_data)) return [repre_data] @@ -165,7 +165,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): extension (str): without '.' Returns: - (family, [families], subset_template_name) tuple + (family, [families], subset_template_name, tags) tuple AssertionError if not matching family found """ task_obj = settings.get(task_type) @@ -187,7 +187,8 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): return found_family, \ content["families"], \ - content["subset_template_name"] + content["subset_template_name"], \ + content["tags"] def _get_version(self, asset_name, subset_name): """Returns version number or 0 for 'asset' and 'subset'""" diff --git a/openpype/settings/defaults/project_settings/webpublisher.json b/openpype/settings/defaults/project_settings/webpublisher.json index 8364b6a39d..a6916de144 100644 --- a/openpype/settings/defaults/project_settings/webpublisher.json +++ b/openpype/settings/defaults/project_settings/webpublisher.json @@ -9,6 +9,7 @@ "tvp" ], "families": [], + "tags": [], "subset_template_name": "" }, "render": { @@ -22,6 +23,9 @@ "families": [ "review" ], + "tags": [ + "ftrackreview" + ], "subset_template_name": "" } }, @@ -32,6 +36,7 @@ "aep" ], "families": [], + "tags": [], "subset_template_name": "" }, "render": { @@ -45,6 +50,9 @@ "families": [ "review" ], + "tags": [ + "ftrackreview" + ], "subset_template_name": "" } }, @@ -55,6 +63,7 @@ "psd" ], "families": [], + "tags": [], "subset_template_name": "" }, "image": { @@ -69,6 +78,9 @@ "families": [ "review" ], + "tags": [ + "ftrackreview" + ], "subset_template_name": "" } }, @@ -79,6 +91,7 @@ "tvp" ], "families": [], + "tags": [], "subset_template_name": "{family}{Variant}" }, "render": { @@ -92,6 +105,9 @@ "families": [ "review" ], + "tags": [ + "ftrackreview" + ], "subset_template_name": "{family}{Variant}" } }, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json index bf59cd030e..91337da2b2 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_webpublisher.json @@ -48,6 +48,10 @@ "label": "Families", "object_type": "text" }, + { + "type": "schema", + "name": "schema_representation_tags" + }, { "type": "text", "key": "subset_template_name", From c20c992f7ea4debfa32e8a399e114e47bb338308 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Aug 2021 16:50:54 +0200 Subject: [PATCH 173/308] define hosts in CollectSceneVersion collector --- .../plugins/publish/collect_scene_version.py | 20 +++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/openpype/plugins/publish/collect_scene_version.py b/openpype/plugins/publish/collect_scene_version.py index 669e6752f3..ca12f2900c 100644 --- a/openpype/plugins/publish/collect_scene_version.py +++ b/openpype/plugins/publish/collect_scene_version.py @@ -11,14 +11,22 @@ class CollectSceneVersion(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder label = 'Collect Version' + hosts = [ + "aftereffects", + "blender", + "celaction", + "fusion", + "harmony", + "hiero", + "houdini", + "maya", + "nuke", + "photoshop", + "resolve", + "tvpaint" + ] def process(self, context): - if "standalonepublisher" in context.data.get("host", []): - return - - if "unreal" in pyblish.api.registered_hosts(): - return - assert context.data.get('currentFile'), "Cannot get current file" filename = os.path.basename(context.data.get('currentFile')) From b2c06b937bc8d148ec8e81485232d9ecc93ac030 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Aug 2021 16:53:25 +0200 Subject: [PATCH 174/308] added collector for avalon host name --- openpype/plugins/publish/collect_host_name.py | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 openpype/plugins/publish/collect_host_name.py diff --git a/openpype/plugins/publish/collect_host_name.py b/openpype/plugins/publish/collect_host_name.py new file mode 100644 index 0000000000..897c50e4d8 --- /dev/null +++ b/openpype/plugins/publish/collect_host_name.py @@ -0,0 +1,21 @@ +""" +Requires: + None +Provides: + context -> host (str) +""" +import os +import pyblish.api + + +class CollectHostName(pyblish.api.ContextPlugin): + """Collect avalon host name to context.""" + + label = "Collect Host Name" + order = pyblish.api.CollectorOrder + + def process(self, context): + # Don't override value if is already set + host_name = context.data.get("host") + if not host_name: + context.data["host"] = os.environ.get("AVALON_APP") From 64a186b437844711e0501a4a848c45ff13fa7d06 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Aug 2021 17:00:18 +0200 Subject: [PATCH 175/308] moved host collection from collect anatomy context data --- .../publish/collect_anatomy_context_data.py | 17 ++-------------- openpype/plugins/publish/collect_host_name.py | 20 +++++++++++++++++-- 2 files changed, 20 insertions(+), 17 deletions(-) diff --git a/openpype/plugins/publish/collect_anatomy_context_data.py b/openpype/plugins/publish/collect_anatomy_context_data.py index f121760e27..33db00636a 100644 --- a/openpype/plugins/publish/collect_anatomy_context_data.py +++ b/openpype/plugins/publish/collect_anatomy_context_data.py @@ -62,23 +62,10 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): "asset": asset_entity["name"], "hierarchy": hierarchy.replace("\\", "/"), "task": task_name, - "username": context.data["user"] + "username": context.data["user"], + "app": context.data["host"] } - # Use AVALON_APP as first if available it is the same as host name - # - only if is not defined use AVALON_APP_NAME (e.g. on Farm) and - # set it back to AVALON_APP env variable - host_name = os.environ.get("AVALON_APP") - if not host_name: - app_manager = ApplicationManager() - app_name = os.environ.get("AVALON_APP_NAME") - if app_name: - app = app_manager.applications.get(app_name) - if app: - host_name = app.host_name - os.environ["AVALON_APP"] = host_name - context_data["app"] = host_name - datetime_data = context.data.get("datetimeData") or {} context_data.update(datetime_data) diff --git a/openpype/plugins/publish/collect_host_name.py b/openpype/plugins/publish/collect_host_name.py index 897c50e4d8..17af9253c3 100644 --- a/openpype/plugins/publish/collect_host_name.py +++ b/openpype/plugins/publish/collect_host_name.py @@ -7,6 +7,8 @@ Provides: import os import pyblish.api +from openpype.lib import ApplicationManager + class CollectHostName(pyblish.api.ContextPlugin): """Collect avalon host name to context.""" @@ -15,7 +17,21 @@ class CollectHostName(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder def process(self, context): - # Don't override value if is already set host_name = context.data.get("host") + # Don't override value if is already set + if host_name: + return + + # Use AVALON_APP as first if available it is the same as host name + # - only if is not defined use AVALON_APP_NAME (e.g. on Farm) and + # set it back to AVALON_APP env variable + host_name = os.environ.get("AVALON_APP") if not host_name: - context.data["host"] = os.environ.get("AVALON_APP") + app_name = os.environ.get("AVALON_APP_NAME") + if app_name: + app_manager = ApplicationManager() + app = app_manager.applications.get(app_name) + if app: + host_name = app.host_name + + context.data["host"] = host_name From 46fd7ee628afaee99c71c8157180a60f3d861ea6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 19 Aug 2021 17:00:34 +0200 Subject: [PATCH 176/308] use context[host] in extract review and burnin --- openpype/plugins/publish/extract_burnin.py | 2 +- openpype/plugins/publish/extract_review.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 91e0a0f3ec..b0c6136694 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -96,7 +96,7 @@ class ExtractBurnin(openpype.api.Extractor): def main_process(self, instance): # TODO get these data from context - host_name = os.environ["AVALON_APP"] + host_name = instance.context["host"] task_name = os.environ["AVALON_TASK"] family = self.main_family_from_instance(instance) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index bdcd3b8e60..3b373bc1d6 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -89,7 +89,7 @@ class ExtractReview(pyblish.api.InstancePlugin): instance.data["representations"].remove(repre) def main_process(self, instance): - host_name = os.environ["AVALON_APP"] + host_name = instance.context["host"] task_name = os.environ["AVALON_TASK"] family = self.main_family_from_instance(instance) From 4a09a18275ec6b218f62f53a4f5b8e6cfd408d62 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 19 Aug 2021 17:37:33 +0200 Subject: [PATCH 177/308] Webpublisher - fix - status wasn't changed for reprocessed batches --- openpype/modules/webserver/webserver_cli.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/openpype/modules/webserver/webserver_cli.py b/openpype/modules/webserver/webserver_cli.py index 2eee20f855..8e4dfd229d 100644 --- a/openpype/modules/webserver/webserver_cli.py +++ b/openpype/modules/webserver/webserver_cli.py @@ -139,3 +139,13 @@ def reprocess_failed(upload_dir): log.info("response{}".format(r)) except: log.info("exception", exc_info=True) + + dbcon.update_one( + {"_id": batch["_id"]}, + {"$set": + { + "finish_date": datetime.now(), + "status": "sent_for_reprocessing", + "progress": 1 + }} + ) From 6e43fb1e44ebfe72c94d704785e264b4038eca69 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Fri, 20 Aug 2021 12:07:38 +0000 Subject: [PATCH 178/308] [Automated] Release --- CHANGELOG.md | 6 +++--- openpype/version.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ef4ddeeb59..fc066fc7e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,8 @@ # Changelog -## [3.3.1-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.3.1](https://github.com/pypeclub/OpenPype/tree/3.3.1) (2021-08-20) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.0...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.0...3.3.1) **🐛 Bug fixes** @@ -37,6 +37,7 @@ - Filter hosts in settings host-enum [\#1868](https://github.com/pypeclub/OpenPype/pull/1868) - Local actions with process identifier [\#1867](https://github.com/pypeclub/OpenPype/pull/1867) - Workfile tool start at host launch support [\#1865](https://github.com/pypeclub/OpenPype/pull/1865) +- Anatomy schema validation [\#1864](https://github.com/pypeclub/OpenPype/pull/1864) - Maya: support for configurable `dirmap` 🗺️ [\#1859](https://github.com/pypeclub/OpenPype/pull/1859) - Settings list can use template or schema as object type [\#1815](https://github.com/pypeclub/OpenPype/pull/1815) @@ -50,7 +51,6 @@ - standalone: validator asset parents [\#1917](https://github.com/pypeclub/OpenPype/pull/1917) - Nuke: update video file crassing [\#1916](https://github.com/pypeclub/OpenPype/pull/1916) - Fix - texture validators for workfiles triggers only for textures workfiles [\#1914](https://github.com/pypeclub/OpenPype/pull/1914) -- Fix - validators for textures workfiles trigger only for textures workfiles [\#1913](https://github.com/pypeclub/OpenPype/pull/1913) - Settings UI: List order works as expected [\#1906](https://github.com/pypeclub/OpenPype/pull/1906) - Hiero: loaded clip was not set colorspace from version data [\#1904](https://github.com/pypeclub/OpenPype/pull/1904) - Pyblish UI: Fix collecting stage processing [\#1903](https://github.com/pypeclub/OpenPype/pull/1903) diff --git a/openpype/version.py b/openpype/version.py index e804077e54..9868010a4b 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.3.1-nightly.1" +__version__ = "3.3.1" From 080069ca9c18643276285904124f7508eca44d8a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 20 Aug 2021 16:00:34 +0200 Subject: [PATCH 179/308] Webpublisher - added review to enum, changed defaults This defaults result in creating working review --- .../settings/defaults/project_settings/webpublisher.json | 8 ++++---- .../schemas/schema_representation_tags.json | 5 ++++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/openpype/settings/defaults/project_settings/webpublisher.json b/openpype/settings/defaults/project_settings/webpublisher.json index a6916de144..f57b79a609 100644 --- a/openpype/settings/defaults/project_settings/webpublisher.json +++ b/openpype/settings/defaults/project_settings/webpublisher.json @@ -24,7 +24,7 @@ "review" ], "tags": [ - "ftrackreview" + "review" ], "subset_template_name": "" } @@ -51,7 +51,7 @@ "review" ], "tags": [ - "ftrackreview" + "review" ], "subset_template_name": "" } @@ -79,7 +79,7 @@ "review" ], "tags": [ - "ftrackreview" + "review" ], "subset_template_name": "" } @@ -106,7 +106,7 @@ "review" ], "tags": [ - "ftrackreview" + "review" ], "subset_template_name": "{family}{Variant}" } diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json index b65de747e5..7607e1a8c1 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json @@ -8,7 +8,10 @@ "burnin": "Add burnins" }, { - "ftrackreview": "Add to Ftrack" + "review": "Create review" + }, + { + "ftrackreview": "Add review to Ftrack" }, { "delete": "Delete output" From 92ef09444695e16427d24b381ba0f513c90b3903 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 20 Aug 2021 17:18:44 +0200 Subject: [PATCH 180/308] Webpublisher - added path field to log documents --- .../publish/collect_published_files.py | 4 ++ .../publish/integrate_context_to_log.py | 39 +++++++++++++++++++ 2 files changed, 43 insertions(+) create mode 100644 openpype/hosts/webpublisher/plugins/publish/integrate_context_to_log.py diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 0c89bde8a5..8861190003 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -96,6 +96,10 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): instance.data["stagingDir"] = task_dir instance.data["source"] = "webpublisher" + instance.data["ctx_path"] = ctx["path"] # to store for logging + instance.data["batch_id"] = task_data["batch"] + + instance.data["user_email"] = task_data["user"] if is_sequence: diff --git a/openpype/hosts/webpublisher/plugins/publish/integrate_context_to_log.py b/openpype/hosts/webpublisher/plugins/publish/integrate_context_to_log.py new file mode 100644 index 0000000000..1dd57ffff9 --- /dev/null +++ b/openpype/hosts/webpublisher/plugins/publish/integrate_context_to_log.py @@ -0,0 +1,39 @@ +import os + +from avalon import io +import pyblish.api +from openpype.lib import OpenPypeMongoConnection + + +class IntegrateContextToLog(pyblish.api.ContextPlugin): + """ Adds context information to log document for displaying in front end""" + + label = "Integrate Context to Log" + order = pyblish.api.IntegratorOrder - 0.1 + hosts = ["webpublisher"] + + def process(self, context): + self.log.info("Integrate Context to Log") + + mongo_client = OpenPypeMongoConnection.get_mongo_client() + database_name = os.environ["OPENPYPE_DATABASE_NAME"] + dbcon = mongo_client[database_name]["webpublishes"] + + for instance in context: + self.log.info("ctx_path: {}".format(instance.data.get("ctx_path"))) + self.log.info("batch_id: {}".format(instance.data.get("batch_id"))) + if instance.data.get("ctx_path") and instance.data.get("batch_id"): + self.log.info("Updating log record") + dbcon.update_one( + { + "batch_id": instance.data.get("batch_id"), + "status": "in_progress" + }, + {"$set": + { + "path": instance.data.get("ctx_path") + + }} + ) + + return \ No newline at end of file From df310da1411ef10331daad7a0271efe1cb7cf429 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 20 Aug 2021 17:57:28 +0200 Subject: [PATCH 181/308] add update dialog --- igniter/__init__.py | 23 +++++ igniter/bootstrap_repos.py | 6 ++ igniter/update_thread.py | 61 +++++++++++++ igniter/update_window.py | 173 +++++++++++++++++++++++++++++++++++++ openpype/cli.py | 2 +- start.py | 12 ++- 6 files changed, 274 insertions(+), 3 deletions(-) create mode 100644 igniter/update_thread.py create mode 100644 igniter/update_window.py diff --git a/igniter/__init__.py b/igniter/__init__.py index 73e315d88a..defd45e233 100644 --- a/igniter/__init__.py +++ b/igniter/__init__.py @@ -31,8 +31,31 @@ def open_dialog(): return d.result() +def open_update_window(openpype_version): + """Open update window.""" + if os.getenv("OPENPYPE_HEADLESS_MODE"): + print("!!! Can't open dialog in headless mode. Exiting.") + sys.exit(1) + from Qt import QtWidgets, QtCore + from .update_window import UpdateWindow + + scale_attr = getattr(QtCore.Qt, "AA_EnableHighDpiScaling", None) + if scale_attr is not None: + QtWidgets.QApplication.setAttribute(scale_attr) + + app = QtWidgets.QApplication(sys.argv) + + d = UpdateWindow(version=openpype_version) + d.open() + + app.exec_() + version_path = d.get_version_path() + return version_path + + __all__ = [ "BootstrapRepos", "open_dialog", + "open_update_window", "version" ] diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 535bb723bc..c527de0066 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -966,6 +966,7 @@ class BootstrapRepos: # test if destination directory already exist, if so lets delete it. if destination.exists() and force: + self._print("removing existing directory") try: shutil.rmtree(destination) except OSError as e: @@ -975,6 +976,7 @@ class BootstrapRepos: raise OpenPypeVersionIOError( f"cannot remove existing {destination}") from e elif destination.exists() and not force: + self._print("destination directory already exists") raise OpenPypeVersionExists(f"{destination} already exist.") else: # create destination parent directories even if they don't exist. @@ -984,6 +986,7 @@ class BootstrapRepos: if openpype_version.path.is_dir(): # create zip inside temporary directory. self._print("Creating zip from directory ...") + self._progress_callback(0) with tempfile.TemporaryDirectory() as temp_dir: temp_zip = \ Path(temp_dir) / f"openpype-v{openpype_version}.zip" @@ -1009,13 +1012,16 @@ class BootstrapRepos: raise OpenPypeVersionInvalid("Invalid file format") if not self.is_inside_user_data(openpype_version.path): + self._progress_callback(35) openpype_version.path = self._copy_zip( openpype_version.path, destination) # extract zip there self._print("extracting zip to destination ...") with ZipFile(openpype_version.path, "r") as zip_ref: + self._progress_callback(75) zip_ref.extractall(destination) + self._progress_callback(100) return destination diff --git a/igniter/update_thread.py b/igniter/update_thread.py new file mode 100644 index 0000000000..f4fc729faf --- /dev/null +++ b/igniter/update_thread.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +"""Working thread for update.""" +from Qt.QtCore import QThread, Signal, QObject # noqa + +from .bootstrap_repos import ( + BootstrapRepos, + OpenPypeVersion +) + + +class UpdateThread(QThread): + """Install Worker thread. + + This class takes care of finding OpenPype version on user entered path + (or loading this path from database). If nothing is entered by user, + OpenPype will create its zip files from repositories that comes with it. + + If path contains plain repositories, they are zipped and installed to + user data dir. + + """ + progress = Signal(int) + message = Signal((str, bool)) + + def __init__(self, parent=None): + self._result = None + self._openpype_version = None + QThread.__init__(self, parent) + + def set_version(self, openpype_version: OpenPypeVersion): + self._openpype_version = openpype_version + + def result(self): + """Result of finished installation.""" + return self._result + + def _set_result(self, value): + if self._result is not None: + raise AssertionError("BUG: Result was set more than once!") + self._result = value + + def run(self): + """Thread entry point. + + Using :class:`BootstrapRepos` to either install OpenPype as zip files + or copy them from location specified by user or retrieved from + database. + """ + bs = BootstrapRepos( + progress_callback=self.set_progress, message=self.message) + version_path = bs.install_version(self._openpype_version) + self._set_result(version_path) + + def set_progress(self, progress: int) -> None: + """Helper to set progress bar. + + Args: + progress (int): Progress in percents. + + """ + self.progress.emit(progress) diff --git a/igniter/update_window.py b/igniter/update_window.py new file mode 100644 index 0000000000..2edb3f2c6b --- /dev/null +++ b/igniter/update_window.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +"""Progress window to show when OpenPype is updating/installing locally.""" +import os +import sys +from pathlib import Path +from .update_thread import UpdateThread +from Qt import QtCore, QtGui, QtWidgets # noqa +from .bootstrap_repos import OpenPypeVersion + + +def load_stylesheet(path: str = None) -> str: + """Load css style sheet. + + Args: + path (str, optional): Path to stylesheet. If none, `stylesheet.css` from + current package's path is used. + Returns: + str: content of the stylesheet + + """ + if path: + stylesheet_path = Path(path) + else: + stylesheet_path = Path(os.path.dirname(__file__)) / "stylesheet.css" + + return stylesheet_path.read_text() + + +class NiceProgressBar(QtWidgets.QProgressBar): + def __init__(self, parent=None): + super(NiceProgressBar, self).__init__(parent) + self._real_value = 0 + + def setValue(self, value): + self._real_value = value + if value != 0 and value < 11: + value = 11 + + super(NiceProgressBar, self).setValue(value) + + def value(self): + return self._real_value + + def text(self): + return "{} %".format(self._real_value) + + +class UpdateWindow(QtWidgets.QDialog): + """OpenPype update window.""" + + _width = 500 + _height = 100 + + def __init__(self, version: OpenPypeVersion, parent=None): + super(UpdateWindow, self).__init__(parent) + self._openpype_version = version + self._result_version_path = None + + self.setWindowTitle( + f"OpenPype is updating ..." + ) + self.setModal(True) + self.setWindowFlags( + QtCore.Qt.WindowMinimizeButtonHint + ) + + current_dir = os.path.dirname(os.path.abspath(__file__)) + roboto_font_path = os.path.join(current_dir, "RobotoMono-Regular.ttf") + poppins_font_path = os.path.join(current_dir, "Poppins") + icon_path = os.path.join(current_dir, "openpype_icon.png") + + # Install roboto font + QtGui.QFontDatabase.addApplicationFont(roboto_font_path) + for filename in os.listdir(poppins_font_path): + if os.path.splitext(filename)[1] == ".ttf": + QtGui.QFontDatabase.addApplicationFont(filename) + + # Load logo + pixmap_openpype_logo = QtGui.QPixmap(icon_path) + # Set logo as icon of window + self.setWindowIcon(QtGui.QIcon(pixmap_openpype_logo)) + + self._pixmap_openpype_logo = pixmap_openpype_logo + + self._update_thread = None + + self.resize(QtCore.QSize(self._width, self._height)) + self._init_ui() + + # Set stylesheet + self.setStyleSheet(load_stylesheet()) + self._run_update() + + def _init_ui(self): + + # Main info + # -------------------------------------------------------------------- + main_label = QtWidgets.QLabel( + f"OpenPype is updating to {self._openpype_version}", self) + main_label.setWordWrap(True) + main_label.setObjectName("MainLabel") + + # Progress bar + # -------------------------------------------------------------------- + progress_bar = NiceProgressBar(self) + progress_bar.setAlignment(QtCore.Qt.AlignCenter) + progress_bar.setTextVisible(False) + + # add all to main + main = QtWidgets.QVBoxLayout(self) + main.addSpacing(15) + main.addWidget(main_label, 0) + main.addSpacing(15) + main.addWidget(progress_bar, 0) + main.addSpacing(15) + + self._progress_bar = progress_bar + + def _run_update(self): + """Start install process. + + This will once again validate entered path and mongo if ok, start + working thread that will do actual job. + """ + # Check if install thread is not already running + if self._update_thread and self._update_thread.isRunning(): + return + self._progress_bar.setRange(0, 0) + update_thread = UpdateThread(self) + update_thread.set_version(self._openpype_version) + update_thread.message.connect(self.update_console) + update_thread.progress.connect(self._update_progress) + update_thread.finished.connect(self._installation_finished) + + self._update_thread = update_thread + + update_thread.start() + + def get_version_path(self): + return self._result_version_path + + def _installation_finished(self): + status = self._update_thread.result() + self._result_version_path = status + self._progress_bar.setRange(0, 1) + self._update_progress(100) + QtWidgets.QApplication.processEvents() + self.done(0) + + def _update_progress(self, progress: int): + # not updating progress as we are not able to determine it + # correctly now. Progress bar is set to un-deterministic mode + # until we are able to get progress in better way. + """ + self._progress_bar.setRange(0, 0) + self._progress_bar.setValue(progress) + text_visible = self._progress_bar.isTextVisible() + if progress == 0: + if text_visible: + self._progress_bar.setTextVisible(False) + elif not text_visible: + self._progress_bar.setTextVisible(True) + """ + return + + def update_console(self, msg: str, error: bool = False) -> None: + """Display message in console. + + Args: + msg (str): message. + error (bool): if True, print it red. + """ + print(msg) diff --git a/openpype/cli.py b/openpype/cli.py index be14a8aa7d..632c3d3386 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -18,7 +18,7 @@ from .pype_commands import PypeCommands @click.option("--list-versions", is_flag=True, expose_value=False, help=("list all detected versions. Use With `--use-staging " "to list staging versions.")) -@click.option("--validate-version", +@click.option("--validate-version", expose_value=False, help="validate given version integrity") def main(ctx): """Pype is main command serving as entry point to pipeline system. diff --git a/start.py b/start.py index a5f662d39b..ca48fdf3b7 100644 --- a/start.py +++ b/start.py @@ -610,8 +610,16 @@ def _find_frozen_openpype(use_version: str = None, if not is_inside: # install latest version to user data dir - version_path = bootstrap.install_version( - openpype_version, force=True) + if not os.getenv("OPENPYPE_HEADLESS"): + import igniter + version_path = igniter.open_update_window(openpype_version) + else: + version_path = bootstrap.install_version( + openpype_version, force=True) + + openpype_version.path = version_path + _initialize_environment(openpype_version) + return openpype_version.path if openpype_version.path.is_file(): _print(">>> Extracting zip file ...") From 2b11e589c5610d5d9ab0232ed8a17bf0ca295949 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 20 Aug 2021 18:06:57 +0200 Subject: [PATCH 182/308] handle igniter dialog --- igniter/update_window.py | 5 ++--- start.py | 11 +++++++++++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/igniter/update_window.py b/igniter/update_window.py index 2edb3f2c6b..a49a84cfee 100644 --- a/igniter/update_window.py +++ b/igniter/update_window.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Progress window to show when OpenPype is updating/installing locally.""" import os -import sys from pathlib import Path from .update_thread import UpdateThread from Qt import QtCore, QtGui, QtWidgets # noqa @@ -12,8 +11,8 @@ def load_stylesheet(path: str = None) -> str: """Load css style sheet. Args: - path (str, optional): Path to stylesheet. If none, `stylesheet.css` from - current package's path is used. + path (str, optional): Path to stylesheet. If none, `stylesheet.css` + from current package's path is used. Returns: str: content of the stylesheet diff --git a/start.py b/start.py index ca48fdf3b7..27dc105394 100644 --- a/start.py +++ b/start.py @@ -397,6 +397,9 @@ def _process_arguments() -> tuple: # handle igniter # this is helper to run igniter before anything else if "igniter" in sys.argv: + if os.getenv("OPENPYPE_HEADLESS"): + _print("!!! Cannot open Igniter dialog in headless mode.") + sys.exit(1) import igniter return_code = igniter.open_dialog() @@ -444,6 +447,11 @@ def _determine_mongodb() -> str: if not openpype_mongo: _print("*** No DB connection string specified.") + if os.getenv("OPENPYPE_HEADLESS"): + _print("!!! Cannot open Igniter dialog in headless mode.") + _print( + "!!! Please use `OPENPYPE_MONGO` to specify server address.") + sys.exit(1) _print("--- launching setup UI ...") result = igniter.open_dialog() @@ -547,6 +555,9 @@ def _find_frozen_openpype(use_version: str = None, except IndexError: # no OpenPype version found, run Igniter and ask for them. _print('*** No OpenPype versions found.') + if os.getenv("OPENPYPE_HEADLESS"): + _print("!!! Cannot open Igniter dialog in headless mode.") + sys.exit(1) _print("--- launching setup UI ...") import igniter return_code = igniter.open_dialog() From fcb2640c9492722171270b6328469c71dbbcf7c2 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 20 Aug 2021 18:12:38 +0200 Subject: [PATCH 183/308] fix env var name --- start.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/start.py b/start.py index 27dc105394..9e60d79f04 100644 --- a/start.py +++ b/start.py @@ -397,7 +397,7 @@ def _process_arguments() -> tuple: # handle igniter # this is helper to run igniter before anything else if "igniter" in sys.argv: - if os.getenv("OPENPYPE_HEADLESS"): + if os.getenv("OPENPYPE_HEADLESS_MODE"): _print("!!! Cannot open Igniter dialog in headless mode.") sys.exit(1) import igniter @@ -447,7 +447,7 @@ def _determine_mongodb() -> str: if not openpype_mongo: _print("*** No DB connection string specified.") - if os.getenv("OPENPYPE_HEADLESS"): + if os.getenv("OPENPYPE_HEADLESS_MODE"): _print("!!! Cannot open Igniter dialog in headless mode.") _print( "!!! Please use `OPENPYPE_MONGO` to specify server address.") @@ -555,7 +555,7 @@ def _find_frozen_openpype(use_version: str = None, except IndexError: # no OpenPype version found, run Igniter and ask for them. _print('*** No OpenPype versions found.') - if os.getenv("OPENPYPE_HEADLESS"): + if os.getenv("OPENPYPE_HEADLESS_MODE"): _print("!!! Cannot open Igniter dialog in headless mode.") sys.exit(1) _print("--- launching setup UI ...") @@ -621,7 +621,7 @@ def _find_frozen_openpype(use_version: str = None, if not is_inside: # install latest version to user data dir - if not os.getenv("OPENPYPE_HEADLESS"): + if not os.getenv("OPENPYPE_HEADLESS_MODE"): import igniter version_path = igniter.open_update_window(openpype_version) else: From 0c3075c0c62743680a262e6c5ed7442e10763414 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 21 Aug 2021 03:39:57 +0000 Subject: [PATCH 184/308] [Automated] Bump version --- CHANGELOG.md | 17 ++++++++++++++--- openpype/version.py | 2 +- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fc066fc7e0..a00e914f40 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,21 @@ # Changelog +## [3.4.0-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.1...HEAD) + +**🚀 Enhancements** + +- Add face sets to exported alembics [\#1942](https://github.com/pypeclub/OpenPype/pull/1942) + +**Merged pull requests:** + +- Maya: Add Xgen family support [\#1947](https://github.com/pypeclub/OpenPype/pull/1947) +- Settings UI: Breadcrumbs in settings [\#1932](https://github.com/pypeclub/OpenPype/pull/1932) + ## [3.3.1](https://github.com/pypeclub/OpenPype/tree/3.3.1) (2021-08-20) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.0...3.3.1) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.3.1-nightly.1...3.3.1) **🐛 Bug fixes** @@ -37,7 +50,6 @@ - Filter hosts in settings host-enum [\#1868](https://github.com/pypeclub/OpenPype/pull/1868) - Local actions with process identifier [\#1867](https://github.com/pypeclub/OpenPype/pull/1867) - Workfile tool start at host launch support [\#1865](https://github.com/pypeclub/OpenPype/pull/1865) -- Anatomy schema validation [\#1864](https://github.com/pypeclub/OpenPype/pull/1864) - Maya: support for configurable `dirmap` 🗺️ [\#1859](https://github.com/pypeclub/OpenPype/pull/1859) - Settings list can use template or schema as object type [\#1815](https://github.com/pypeclub/OpenPype/pull/1815) @@ -66,7 +78,6 @@ **Merged pull requests:** - Fix - make AE workfile publish to Ftrack configurable [\#1937](https://github.com/pypeclub/OpenPype/pull/1937) -- Settings UI: Breadcrumbs in settings [\#1932](https://github.com/pypeclub/OpenPype/pull/1932) - Add support for multiple Deadline ☠️➖ servers [\#1905](https://github.com/pypeclub/OpenPype/pull/1905) - Maya: add support for `RedshiftNormalMap` node, fix `tx` linear space 🚀 [\#1863](https://github.com/pypeclub/OpenPype/pull/1863) - Maya: expected files -\> render products ⚙️ overhaul [\#1812](https://github.com/pypeclub/OpenPype/pull/1812) diff --git a/openpype/version.py b/openpype/version.py index 9868010a4b..8dbb6b38cf 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.3.1" +__version__ = "3.4.0-nightly.1" From 10fa591e683eb785dea76c9ea300fe6567bdd033 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 23 Aug 2021 10:45:33 +0200 Subject: [PATCH 185/308] Webpublisher - added documentation --- openpype/cli.py | 7 ++ .../docs/admin_webserver_for_webpublisher.md | 82 +++++++++++++++++++ 2 files changed, 89 insertions(+) create mode 100644 website/docs/admin_webserver_for_webpublisher.md diff --git a/openpype/cli.py b/openpype/cli.py index 8dc32b307a..28195008cc 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -99,6 +99,13 @@ def eventserver(debug, @click.option("-e", "--executable", help="Executable") @click.option("-u", "--upload_dir", help="Upload dir") def webpublisherwebserver(debug, executable, upload_dir): + """Starts webserver for communication with Webpublish FR via command line + + OP must be congigured on a machine, eg. OPENPYPE_MONGO filled AND + FTRACK_BOT_API_KEY provided with api key from Ftrack. + + Expect "pype.club" user created on Ftrack. + """ if debug: os.environ['OPENPYPE_DEBUG'] = "3" diff --git a/website/docs/admin_webserver_for_webpublisher.md b/website/docs/admin_webserver_for_webpublisher.md new file mode 100644 index 0000000000..748b269ad7 --- /dev/null +++ b/website/docs/admin_webserver_for_webpublisher.md @@ -0,0 +1,82 @@ +--- +id: admin_webserver_for_webpublisher +title: Webserver for webpublisher +sidebar_label: Webserver for webpublisher +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +Running Openpype webserver is needed as a backend part for Web publishing. +Any OS supported by Openpype could be used as a host server. + +Webpublishing consists of two sides, Front end (FE) and Openpype backend. This documenation is only targeted on OP side. + +It is expected that FE and OP will live on two separate servers, FE publicly available, OP safely in customer network. + +# Requirements for servers +- OP server allows access to its `8079` port for FE. (It is recommended to whitelist only FE IP.) +- have shared folder for published resources (images, workfiles etc) on both servers + +# Prepare Ftrack +Current webpublish process expects authentication via Slack. It is expected that customer has users created on a Ftrack +with same email addresses as on Slack. As some customer might have usernames different from emails, conversion from email to username is needed. + +For this "pype.club" user needs to be present on Ftrack, creation of this user should be standard part of Ftrack preparation for Openpype. +Next create API key on Ftrack, store this information temporarily as you won't have access to this key after creation. + + +# Prepare Openpype + +Deploy OP build distribution (Openpype Igniter) on an OS of your choice. + +##Run webserver as a Linux service: + +(This expects that OP Igniter is deployed to `opt/openpype` and log should be stored in `/tmp/openpype.log`) + +- create file `sudo vi /opt/openpype/webpublisher_webserver.sh` + +- paste content +```sh +#!/usr/bin/env bash +export OPENPYPE_DEBUG=3 +export FTRACK_BOT_API_KEY=YOUR_API_KEY +export PYTHONDONTWRITEBYTECODE=1 +export OPENPYPE_MONGO=YOUR_MONGODB_CONNECTION + +pushd /opt/openpype +./openpype_console webpublisherwebserver --upload_dir YOUR_SHARED_FOLDER_ON_HOST --executable /opt/openpype/openpype_console > /tmp/openpype.log 2>&1 +``` + +1. create service file `sudo vi /etc/systemd/system/openpye-webserver.service` + +2. paste content +```sh +[Unit] +Description=Run OpenPype Ftrack Webserver Service +After=network.target + +[Service] +Type=idle +ExecStart=/opt/openpype/webpublisher_webserver.sh +Restart=on-failure +RestartSec=10s +StandardOutput=append:/tmp/openpype.log +StandardError=append:/tmp/openpype.log + +[Install] +WantedBy=multi-user.target +``` + +5. change file permission: + `sudo chmod 0755 /etc/systemd/system/openpype-webserver.service` + +6. enable service: + `sudo systemctl enable openpype-webserver` + +7. start service: + `sudo systemctl start openpype-webserver` + +8. Check `/tmp/openpype.log` if OP got started + +(Note: service could be restarted by `service openpype-webserver restart` - this will result in purge of current log file!) \ No newline at end of file From eabfc473acc0297ec8c2b7af355acea607b98f10 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 23 Aug 2021 10:48:54 +0200 Subject: [PATCH 186/308] Hound --- .../webpublisher/plugins/publish/collect_published_files.py | 5 +++-- .../webpublisher/plugins/publish/integrate_context_to_log.py | 3 +-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 8861190003..59c315861e 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -96,10 +96,11 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): instance.data["stagingDir"] = task_dir instance.data["source"] = "webpublisher" - instance.data["ctx_path"] = ctx["path"] # to store for logging + # to store logging info into DB openpype.webpublishes + instance.data["ctx_path"] = ctx["path"] instance.data["batch_id"] = task_data["batch"] - + # to convert from email provided into Ftrack username instance.data["user_email"] = task_data["user"] if is_sequence: diff --git a/openpype/hosts/webpublisher/plugins/publish/integrate_context_to_log.py b/openpype/hosts/webpublisher/plugins/publish/integrate_context_to_log.py index 1dd57ffff9..419c065e16 100644 --- a/openpype/hosts/webpublisher/plugins/publish/integrate_context_to_log.py +++ b/openpype/hosts/webpublisher/plugins/publish/integrate_context_to_log.py @@ -1,6 +1,5 @@ import os -from avalon import io import pyblish.api from openpype.lib import OpenPypeMongoConnection @@ -36,4 +35,4 @@ class IntegrateContextToLog(pyblish.api.ContextPlugin): }} ) - return \ No newline at end of file + return From 5d182faae26797764b2cdb98ee369c6600f5679c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 23 Aug 2021 12:25:44 +0200 Subject: [PATCH 187/308] changed context key from "host" to "hostName" --- openpype/plugins/publish/collect_anatomy_context_data.py | 2 +- openpype/plugins/publish/collect_host_name.py | 4 ++-- openpype/plugins/publish/extract_burnin.py | 2 +- openpype/plugins/publish/extract_review.py | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/plugins/publish/collect_anatomy_context_data.py b/openpype/plugins/publish/collect_anatomy_context_data.py index 33db00636a..ec88d5669d 100644 --- a/openpype/plugins/publish/collect_anatomy_context_data.py +++ b/openpype/plugins/publish/collect_anatomy_context_data.py @@ -63,7 +63,7 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): "hierarchy": hierarchy.replace("\\", "/"), "task": task_name, "username": context.data["user"], - "app": context.data["host"] + "app": context.data["hostName"] } datetime_data = context.data.get("datetimeData") or {} diff --git a/openpype/plugins/publish/collect_host_name.py b/openpype/plugins/publish/collect_host_name.py index 17af9253c3..e1b7eb17c3 100644 --- a/openpype/plugins/publish/collect_host_name.py +++ b/openpype/plugins/publish/collect_host_name.py @@ -17,7 +17,7 @@ class CollectHostName(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder def process(self, context): - host_name = context.data.get("host") + host_name = context.data.get("hostName") # Don't override value if is already set if host_name: return @@ -34,4 +34,4 @@ class CollectHostName(pyblish.api.ContextPlugin): if app: host_name = app.host_name - context.data["host"] = host_name + context.data["hostName"] = host_name diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index b0c6136694..8fef5eaacb 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -96,7 +96,7 @@ class ExtractBurnin(openpype.api.Extractor): def main_process(self, instance): # TODO get these data from context - host_name = instance.context["host"] + host_name = instance.context["hostName"] task_name = os.environ["AVALON_TASK"] family = self.main_family_from_instance(instance) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 3b373bc1d6..cdd40af027 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -89,7 +89,7 @@ class ExtractReview(pyblish.api.InstancePlugin): instance.data["representations"].remove(repre) def main_process(self, instance): - host_name = instance.context["host"] + host_name = instance.context["hostName"] task_name = os.environ["AVALON_TASK"] family = self.main_family_from_instance(instance) From 72c3bab5ec5ec06167afb1705a0d4fa7c47f31a4 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 23 Aug 2021 12:31:55 +0200 Subject: [PATCH 188/308] enhancement branches don't bump minor version --- .github/workflows/prerelease.yml | 2 +- tools/ci_tools.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/prerelease.yml b/.github/workflows/prerelease.yml index d0853e74d6..82f9a6ae9d 100644 --- a/.github/workflows/prerelease.yml +++ b/.github/workflows/prerelease.yml @@ -47,7 +47,7 @@ jobs: enhancementLabel: '**🚀 Enhancements**' bugsLabel: '**🐛 Bug fixes**' deprecatedLabel: '**⚠️ Deprecations**' - addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]}}' + addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"### 🆕 New features","labels":["feature"]},}' issues: false issuesWoLabels: false sinceTag: "3.0.0" diff --git a/tools/ci_tools.py b/tools/ci_tools.py index 436551c243..3c1aaae991 100644 --- a/tools/ci_tools.py +++ b/tools/ci_tools.py @@ -36,7 +36,7 @@ def get_log_since_tag(version): def release_type(log): regex_minor = ["feature/", "(feat)"] - regex_patch = ["bugfix/", "fix/", "(fix)"] + regex_patch = ["bugfix/", "fix/", "(fix)", "enhancement/"] for reg in regex_minor: if re.search(reg, log): return "minor" From c52f0535f6688885715a43208ccb8403e2521183 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 23 Aug 2021 19:04:01 +0200 Subject: [PATCH 189/308] fix tools --- openpype/lib/applications.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index 1644b9c977..d7baf1d27b 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1105,7 +1105,7 @@ def prepare_host_environments(data, implementation_envs=True): asset_doc = data.get("asset_doc") # Add tools environments groups_by_name = {} - tool_by_group_name = collections.defaultdict(list) + tool_by_group_name = collections.defaultdict(dict) if asset_doc: # Make sure each tool group can be added only once for key in asset_doc["data"].get("tools_env") or []: From 14d8789bff9acfc7d91abaa7eef322be01c55f0a Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 23 Aug 2021 19:06:24 +0200 Subject: [PATCH 190/308] second fix of tools --- openpype/lib/applications.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index d7baf1d27b..71ab2eac61 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1120,7 +1120,7 @@ def prepare_host_environments(data, implementation_envs=True): environments.append(group.environment) added_env_keys.add(group_name) for tool_name in sorted(tool_by_group_name[group_name].keys()): - tool = tool_by_group_name[tool_name] + tool = tool_by_group_name[group_name][tool_name] environments.append(tool.environment) added_env_keys.add(tool.name) From 9db6dcd2eea263f9106eb6f15c0531a7c248ffbf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Aug 2021 17:07:53 +0000 Subject: [PATCH 191/308] Bump url-parse from 1.5.1 to 1.5.3 in /website Bumps [url-parse](https://github.com/unshiftio/url-parse) from 1.5.1 to 1.5.3. - [Release notes](https://github.com/unshiftio/url-parse/releases) - [Commits](https://github.com/unshiftio/url-parse/compare/1.5.1...1.5.3) --- updated-dependencies: - dependency-name: url-parse dependency-type: indirect ... Signed-off-by: dependabot[bot] --- website/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index a63bf37731..c4055f15d1 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -8341,9 +8341,9 @@ url-parse-lax@^3.0.0: prepend-http "^2.0.0" url-parse@^1.4.3, url-parse@^1.4.7: - version "1.5.1" - resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.1.tgz#d5fa9890af8a5e1f274a2c98376510f6425f6e3b" - integrity sha512-HOfCOUJt7iSYzEx/UqgtwKRMC6EU91NFhsCHMv9oM03VJcVo2Qrp8T8kI9D7amFf1cu+/3CEhgb3rF9zL7k85Q== + version "1.5.3" + resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.3.tgz#71c1303d38fb6639ade183c2992c8cc0686df862" + integrity sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ== dependencies: querystringify "^2.1.1" requires-port "^1.0.0" From a3f106736456755f7e7a9d4399eba68cc54d321a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 24 Aug 2021 11:08:44 +0200 Subject: [PATCH 192/308] Webpublisher - webserver ip is configurable In some cases webserver needs to listen on specific ip (because of Docker) --- openpype/modules/webserver/server.py | 5 ++++- openpype/modules/webserver/webserver_cli.py | 5 +++-- openpype/modules/webserver/webserver_module.py | 5 ++++- website/docs/admin_webserver_for_webpublisher.md | 1 + 4 files changed, 12 insertions(+), 4 deletions(-) diff --git a/openpype/modules/webserver/server.py b/openpype/modules/webserver/server.py index 65c5795995..9d99e1c7a3 100644 --- a/openpype/modules/webserver/server.py +++ b/openpype/modules/webserver/server.py @@ -1,5 +1,6 @@ import threading import asyncio +import os from aiohttp import web @@ -110,7 +111,9 @@ class WebServerThread(threading.Thread): """ Starts runner and TCPsite """ self.runner = web.AppRunner(self.manager.app) await self.runner.setup() - self.site = web.TCPSite(self.runner, 'localhost', self.port) + host_ip = os.environ.get("WEBSERVER_HOST_IP") or 'localhost' + log.info("host_ip:: {}".format(os.environ.get("WEBSERVER_HOST_IP"))) + self.site = web.TCPSite(self.runner, host_ip, self.port) await self.site.start() def stop(self): diff --git a/openpype/modules/webserver/webserver_cli.py b/openpype/modules/webserver/webserver_cli.py index 8e4dfd229d..24bd28ba7d 100644 --- a/openpype/modules/webserver/webserver_cli.py +++ b/openpype/modules/webserver/webserver_cli.py @@ -19,7 +19,7 @@ from .webpublish_routes import ( from openpype.api import get_system_settings -SERVER_URL = "http://172.17.0.1:8079" # machine is not listening on localhost +# SERVER_URL = "http://172.17.0.1:8079" # machine is not listening on localhost log = PypeLogger().get_logger("webserver_gui") @@ -129,7 +129,8 @@ def reprocess_failed(upload_dir): }} ) continue - server_url = "{}/api/webpublish/batch".format(SERVER_URL) + server_url = "{}/api/webpublish/batch".format( + os.environ["OPENPYPE_WEBSERVER_URL"]) with open(batch_url) as f: data = json.loads(f.read()) diff --git a/openpype/modules/webserver/webserver_module.py b/openpype/modules/webserver/webserver_module.py index 4832038575..10508265da 100644 --- a/openpype/modules/webserver/webserver_module.py +++ b/openpype/modules/webserver/webserver_module.py @@ -79,7 +79,10 @@ class WebServerModule(PypeModule, ITrayService): self.server_manager.on_stop_callbacks.append( self.set_service_failed_icon ) - webserver_url = "http://localhost:{}".format(self.port) + # in a case that webserver should listen on specific ip (webpublisher) + self.log.info("module host_ip:: {}".format(os.environ.get("WEBSERVER_HOST_IP"))) + host_ip = os.environ.get("WEBSERVER_HOST_IP") or 'localhost' + webserver_url = "http://{}:{}".format(host_ip, self.port) os.environ["OPENPYPE_WEBSERVER_URL"] = webserver_url @staticmethod diff --git a/website/docs/admin_webserver_for_webpublisher.md b/website/docs/admin_webserver_for_webpublisher.md index 748b269ad7..2b23033595 100644 --- a/website/docs/admin_webserver_for_webpublisher.md +++ b/website/docs/admin_webserver_for_webpublisher.md @@ -40,6 +40,7 @@ Deploy OP build distribution (Openpype Igniter) on an OS of your choice. ```sh #!/usr/bin/env bash export OPENPYPE_DEBUG=3 +export WEBSERVER_HOST_IP=localhost export FTRACK_BOT_API_KEY=YOUR_API_KEY export PYTHONDONTWRITEBYTECODE=1 export OPENPYPE_MONGO=YOUR_MONGODB_CONNECTION From 6b6877e76ed934b22bfc3b4206de7ed16984a52e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 24 Aug 2021 15:52:49 +0200 Subject: [PATCH 193/308] fixed get_general_environments function --- openpype/settings/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/settings/__init__.py b/openpype/settings/__init__.py index b5810deef4..9797458fd5 100644 --- a/openpype/settings/__init__.py +++ b/openpype/settings/__init__.py @@ -2,6 +2,7 @@ from .exceptions import ( SaveWarningExc ) from .lib import ( + get_general_environments, get_system_settings, get_project_settings, get_current_project_settings, @@ -18,6 +19,7 @@ from .entities import ( __all__ = ( "SaveWarningExc", + "get_general_environments", "get_system_settings", "get_project_settings", "get_current_project_settings", From cf811c7e0f56353af0a3a18e01593db310d1edce Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 24 Aug 2021 15:53:03 +0200 Subject: [PATCH 194/308] added addons_path key to settings --- .../settings/defaults/system_settings/modules.json | 5 +++++ .../schemas/system_schema/schema_modules.json | 12 ++++++++++++ 2 files changed, 17 insertions(+) diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index 3a70b90590..12cca7ccf1 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -1,4 +1,9 @@ { + "addon_paths": { + "windows": [], + "darwin": [], + "linux": [] + }, "avalon": { "AVALON_TIMEOUT": 1000, "AVALON_THUMBNAIL_ROOT": { diff --git a/openpype/settings/entities/schemas/system_schema/schema_modules.json b/openpype/settings/entities/schemas/system_schema/schema_modules.json index 75c08b2cd9..0e52cea69e 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_modules.json +++ b/openpype/settings/entities/schemas/system_schema/schema_modules.json @@ -5,6 +5,18 @@ "collapsible": true, "is_file": true, "children": [ + { + "type": "path", + "key": "addon_paths", + "label": "OpenPype AddOn Paths", + "use_label_wrap": true, + "multiplatform": true, + "multipath": true, + "require_restart": true + }, + { + "type": "separator" + }, { "type": "dict", "key": "avalon", From b92621a270704bb8b61842d977d883ea230b304f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 24 Aug 2021 15:53:19 +0200 Subject: [PATCH 195/308] don't crash if path does not exists --- openpype/modules/base.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 3d3d7ae6cb..e407a34606 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -165,6 +165,9 @@ def _load_interfaces(): os.path.join(get_default_modules_dir(), "interfaces.py") ) for dirpath in dirpaths: + if not os.path.exists(dirpath): + continue + for filename in os.listdir(dirpath): if filename in ("__pycache__", ): continue From bf1a5c85ccf4db4d3be2c99a7ff3f1c9d5cdf519 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 24 Aug 2021 15:54:08 +0200 Subject: [PATCH 196/308] added get_dynamic_modules_dirs to be able get paths to openpype addons --- openpype/modules/base.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index e407a34606..a3269e99e9 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -115,11 +115,24 @@ def get_default_modules_dir(): return os.path.join(current_dir, "default_modules") +def get_dynamic_modules_dirs(): + output = [] + return output + + def get_module_dirs(): """List of paths where OpenPype modules can be found.""" - dirpaths = [ - get_default_modules_dir() - ] + _dirpaths = [] + _dirpaths.append(get_default_modules_dir()) + _dirpaths.extend(get_dynamic_modules_dirs()) + + dirpaths = [] + for path in _dirpaths: + if not path: + continue + normalized = os.path.normpath(path) + if normalized not in dirpaths: + dirpaths.append(normalized) return dirpaths From e3754a85a662dbcbaf2d06cc8285638b7ea9d7d2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 24 Aug 2021 15:54:23 +0200 Subject: [PATCH 197/308] implemented logic of dynamic addons paths --- openpype/modules/base.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index a3269e99e9..d3b83e85b1 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -5,6 +5,7 @@ import sys import time import inspect import logging +import platform import threading import collections from uuid import uuid4 @@ -13,6 +14,7 @@ import six import openpype from openpype.settings import get_system_settings +from openpype.settings.lib import get_studio_system_settings_overrides from openpype.lib import PypeLogger @@ -117,6 +119,21 @@ def get_default_modules_dir(): def get_dynamic_modules_dirs(): output = [] + value = get_studio_system_settings_overrides() + for key in ("modules", "addon_paths", platform.system().lower()): + if key not in value: + return output + value = value[key] + + for path in value: + if not path: + continue + + try: + path = path.format(**os.environ) + except Exception: + pass + output.append(path) return output From 2495cffd509a51838fc1c8c5c77d05a007794322 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 24 Aug 2021 16:44:58 +0200 Subject: [PATCH 198/308] don't crash whole openpype on broken addon/module --- openpype/modules/base.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index d3b83e85b1..9df9b3a97b 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -305,12 +305,19 @@ def _load_modules(): # TODO add more logic how to define if folder is module or not # - check manifest and content of manifest - if os.path.isdir(fullpath): - import_module_from_dirpath(dirpath, filename, modules_key) + try: + if os.path.isdir(fullpath): + import_module_from_dirpath(dirpath, filename, modules_key) - elif ext in (".py", ): - module = import_filepath(fullpath) - setattr(openpype_modules, basename, module) + elif ext in (".py", ): + module = import_filepath(fullpath) + setattr(openpype_modules, basename, module) + + except Exception: + log.error( + "Failed to import '{}'.".format(fullpath), + exc_info=True + ) class _OpenPypeInterfaceMeta(ABCMeta): From bd791c971985bd2229c256d8946f808733307597 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 24 Aug 2021 17:08:44 +0200 Subject: [PATCH 199/308] moved few settings constants to constants.py --- openpype/settings/__init__.py | 25 +++++++++++++++++++++++++ openpype/settings/constants.py | 9 ++++++++- openpype/settings/entities/lib.py | 7 ++++--- 3 files changed, 37 insertions(+), 4 deletions(-) diff --git a/openpype/settings/__init__.py b/openpype/settings/__init__.py index 9797458fd5..0adb5db0bd 100644 --- a/openpype/settings/__init__.py +++ b/openpype/settings/__init__.py @@ -1,3 +1,16 @@ +from .constants import ( + GLOBAL_SETTINGS_KEY, + SYSTEM_SETTINGS_KEY, + PROJECT_SETTINGS_KEY, + PROJECT_ANATOMY_KEY, + LOCAL_SETTING_KEY, + + SCHEMA_KEY_SYSTEM_SETTINGS, + SCHEMA_KEY_PROJECT_SETTINGS, + + KEY_ALLOWED_SYMBOLS, + KEY_REGEX +) from .exceptions import ( SaveWarningExc ) @@ -17,6 +30,18 @@ from .entities import ( __all__ = ( + "GLOBAL_SETTINGS_KEY", + "SYSTEM_SETTINGS_KEY", + "PROJECT_SETTINGS_KEY", + "PROJECT_ANATOMY_KEY", + "LOCAL_SETTING_KEY", + + "SCHEMA_KEY_SYSTEM_SETTINGS", + "SCHEMA_KEY_PROJECT_SETTINGS", + + "KEY_ALLOWED_SYMBOLS", + "KEY_REGEX", + "SaveWarningExc", "get_general_environments", diff --git a/openpype/settings/constants.py b/openpype/settings/constants.py index a53e88a91e..2ea19ead4b 100644 --- a/openpype/settings/constants.py +++ b/openpype/settings/constants.py @@ -14,13 +14,17 @@ METADATA_KEYS = ( M_DYNAMIC_KEY_LABEL ) -# File where studio's system overrides are stored +# Keys where studio's system overrides are stored GLOBAL_SETTINGS_KEY = "global_settings" SYSTEM_SETTINGS_KEY = "system_settings" PROJECT_SETTINGS_KEY = "project_settings" PROJECT_ANATOMY_KEY = "project_anatomy" LOCAL_SETTING_KEY = "local_settings" +# Schema hub names +SCHEMA_KEY_SYSTEM_SETTINGS = "system_schema" +SCHEMA_KEY_PROJECT_SETTINGS = "projects_schema" + DEFAULT_PROJECT_KEY = "__default_project__" KEY_ALLOWED_SYMBOLS = "a-zA-Z0-9-_ " @@ -39,6 +43,9 @@ __all__ = ( "PROJECT_ANATOMY_KEY", "LOCAL_SETTING_KEY", + "SCHEMA_KEY_SYSTEM_SETTINGS", + "SCHEMA_KEY_PROJECT_SETTINGS", + "DEFAULT_PROJECT_KEY", "KEY_ALLOWED_SYMBOLS", diff --git a/openpype/settings/entities/lib.py b/openpype/settings/entities/lib.py index f7036726d2..d4b0e10864 100644 --- a/openpype/settings/entities/lib.py +++ b/openpype/settings/entities/lib.py @@ -11,6 +11,10 @@ from .exceptions import ( SchemaDuplicatedEnvGroupKeys ) +from openpype.settings.constants import ( + SCHEMA_KEY_SYSTEM_SETTINGS, + SCHEMA_KEY_PROJECT_SETTINGS +) try: STRING_TYPE = basestring except Exception: @@ -25,9 +29,6 @@ TEMPLATE_METADATA_KEYS = ( DEFAULT_VALUES_KEY, ) -SCHEMA_KEY_SYSTEM_SETTINGS = "system_schema" -SCHEMA_KEY_PROJECT_SETTINGS = "projects_schema" - SCHEMA_EXTEND_TYPES = ( "schema", "template", "schema_template", "dynamic_schema" ) From 2706c7759f6ed01aecb7a205f4cfc806aa22b7d3 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 24 Aug 2021 17:15:06 +0200 Subject: [PATCH 200/308] a littlebit safer return value check --- openpype/settings/lib.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index d7684082f3..60ed54bd4a 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -337,7 +337,9 @@ def _get_default_settings(): module_settings_defs = get_module_settings_defs() for module_settings_def_cls in module_settings_defs: module_settings_def = module_settings_def_cls() - system_defaults = module_settings_def.get_system_defaults() + system_defaults = module_settings_def.get_defaults( + SYSTEM_SETTINGS_KEY + ) or {} for path, value in system_defaults.items(): if not path: continue @@ -349,7 +351,9 @@ def _get_default_settings(): subdict = subdict[key] subdict[last_key] = value - project_defaults = module_settings_def.get_project_defaults() + project_defaults = module_settings_def.get_defaults( + PROJECT_SETTINGS_KEY + ) or {} for path, value in project_defaults.items(): if not path: continue From 735f4b847b7e990c8e6c0e22ed45a5d6d4c6c829 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 24 Aug 2021 17:15:29 +0200 Subject: [PATCH 201/308] added mapping of schema hub key to top key value --- openpype/settings/entities/lib.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/openpype/settings/entities/lib.py b/openpype/settings/entities/lib.py index d4b0e10864..f207322dee 100644 --- a/openpype/settings/entities/lib.py +++ b/openpype/settings/entities/lib.py @@ -12,6 +12,8 @@ from .exceptions import ( ) from openpype.settings.constants import ( + SYSTEM_SETTINGS_KEY, + PROJECT_SETTINGS_KEY, SCHEMA_KEY_SYSTEM_SETTINGS, SCHEMA_KEY_PROJECT_SETTINGS ) @@ -734,6 +736,12 @@ class SchemasHub: class DynamicSchemaValueCollector: + # Map schema hub type to store keys + schema_hub_type_map = { + SCHEMA_KEY_SYSTEM_SETTINGS: SYSTEM_SETTINGS_KEY, + SCHEMA_KEY_PROJECT_SETTINGS: PROJECT_SETTINGS_KEY + } + def __init__(self, schema_hub): self._schema_hub = schema_hub self._dynamic_entities = [] @@ -756,7 +764,7 @@ class DynamicSchemaValueCollector: schema_def = self._schema_hub.get_dynamic_modules_settings_defs( schema_def_id ) - if self._schema_hub.schema_type == SCHEMA_KEY_SYSTEM_SETTINGS: - schema_def.save_system_defaults(schema_def_value) - elif self._schema_hub.schema_type == SCHEMA_KEY_PROJECT_SETTINGS: - schema_def.save_project_defaults(schema_def_value) + top_key = self.schema_hub_type_map.get( + self._schema_hub.schema_type + ) + schema_def.save_defaults(top_key, schema_def_value) From f30253697127285c21650dc400f81de577f24074 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 24 Aug 2021 17:15:46 +0200 Subject: [PATCH 202/308] eliminated methods in ModuleSettingsDef --- openpype/modules/base.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 9df9b3a97b..ce555c6bbf 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -1025,17 +1025,9 @@ class ModuleSettingsDef: pass @abstractmethod - def save_system_defaults(self, data): + def get_defaults(self, top_key): pass @abstractmethod - def save_project_defaults(self, data): - pass - - @abstractmethod - def get_system_defaults(self): - pass - - @abstractmethod - def get_project_defaults(self): + def save_defaults(self, top_key, data): pass From c16cee6f810ecd1cd0a45b99cf77079e83c6d51f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 24 Aug 2021 17:47:37 +0200 Subject: [PATCH 203/308] added few docstrings --- openpype/modules/base.py | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index ce555c6bbf..9972126136 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -1008,26 +1008,63 @@ def get_module_settings_defs(): @six.add_metaclass(ABCMeta) class ModuleSettingsDef: + """Definition of settings for OpenPype module or AddOn.""" _id = None @property def id(self): + """ID created on initialization. + + ID should be per created object. Helps to store objects. + """ if self._id is None: self._id = uuid4() return self._id @abstractmethod def get_settings_schemas(self, schema_type): + """Setting schemas for passed schema type. + + These are main schemas by dynamic schema keys. If they're using + sub schemas or templates they should be loaded with + `get_dynamic_schemas`. + + Returns: + dict: Schema by `dynamic_schema` keys. + """ pass @abstractmethod def get_dynamic_schemas(self, schema_type): + """Settings schemas and templates that can be used anywhere. + + It is recommended to add prefix specific for addon/module to keys + (e.g. "my_addon/real_schema_name"). + + Returns: + dict: Schemas and templates by their keys. + """ pass @abstractmethod def get_defaults(self, top_key): + """Default values for passed top key. + + Top keys are (currently) "system_settings" or "project_settings". + + Should return exactly what was passed with `save_defaults`. + + Returns: + dict: Default values by path to first key in OpenPype defaults. + """ pass @abstractmethod def save_defaults(self, top_key, data): + """Save default values for passed top key. + + Top keys are (currently) "system_settings" or "project_settings". + + Passed data are by path to first key defined in main schemas. + """ pass From 60ff21534219da5eeada4c69873ae0baff4b71a0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 24 Aug 2021 17:54:04 +0200 Subject: [PATCH 204/308] added few infor to readme --- openpype/settings/entities/schemas/README.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/openpype/settings/entities/schemas/README.md b/openpype/settings/entities/schemas/README.md index 2034d4e463..a34732fbad 100644 --- a/openpype/settings/entities/schemas/README.md +++ b/openpype/settings/entities/schemas/README.md @@ -112,6 +112,22 @@ ``` - It is possible to define default values for unfilled fields to do so one of items in list must be dictionary with key `"__default_values__"` and value as dictionary with default key: values (as in example above). +### dynamic_schema +- dynamic templates that can be defined by class of `ModuleSettingsDef` +- example: +``` +{ + "type": "dynamic_schema", + "name": "project_settings/global" +} +``` +- all valid `ModuleSettingsDef` classes where calling of `get_settings_schemas` + will return dictionary where is key "project_settings/global" with schemas + will extend and replace this item +- works almost the same way as templates + - one item can be replaced by multiple items (or by 0 items) +- goal is to dynamically loaded settings of OpenPype addons without having + their schemas or default values in main repository ## Basic Dictionary inputs - these inputs wraps another inputs into {key: value} relation From b869f2ab82657d24af093554654c48538a177be6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 24 Aug 2021 18:00:18 +0200 Subject: [PATCH 205/308] added few more docstrings --- openpype/modules/base.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 9972126136..c8cc911ca6 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -118,6 +118,18 @@ def get_default_modules_dir(): def get_dynamic_modules_dirs(): + """Possible paths to OpenPype Addons of Modules. + + Paths are loaded from studio settings under: + `modules -> addon_paths -> {platform name}` + + Path may contain environment variable as a formatting string. + + They are not validated or checked their existence. + + Returns: + list: Paths loaded from studio overrides. + """ output = [] value = get_studio_system_settings_overrides() for key in ("modules", "addon_paths", platform.system().lower()): @@ -963,6 +975,17 @@ class TrayModulesManager(ModulesManager): def get_module_settings_defs(): + """Check loaded addons/modules for existence of thei settings definition. + + Check if OpenPype addon/module as python module has class that inherit + from `ModuleSettingsDef` in python module variables (imported + in `__init__py`). + + Returns: + list: All valid and not abstract settings definitions from imported + openpype addons and modules. + """ + # Make sure modules are loaded load_modules() import openpype_modules From 014960f5cc1061299d116d089e8184fa61efd77d Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 25 Aug 2021 03:38:30 +0000 Subject: [PATCH 206/308] [Automated] Bump version --- CHANGELOG.md | 82 +++++++++++++++++++-------------------------- openpype/version.py | 2 +- 2 files changed, 35 insertions(+), 49 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a00e914f40..5c55be842a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,23 +1,21 @@ # Changelog -## [3.4.0-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.4.0-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.1...HEAD) -**🚀 Enhancements** - -- Add face sets to exported alembics [\#1942](https://github.com/pypeclub/OpenPype/pull/1942) - **Merged pull requests:** - Maya: Add Xgen family support [\#1947](https://github.com/pypeclub/OpenPype/pull/1947) -- Settings UI: Breadcrumbs in settings [\#1932](https://github.com/pypeclub/OpenPype/pull/1932) +- Add face sets to exported alembics [\#1942](https://github.com/pypeclub/OpenPype/pull/1942) +- Environments: Tool environments in alphabetical order [\#1910](https://github.com/pypeclub/OpenPype/pull/1910) +- Dynamic modules [\#1872](https://github.com/pypeclub/OpenPype/pull/1872) ## [3.3.1](https://github.com/pypeclub/OpenPype/tree/3.3.1) (2021-08-20) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.3.1-nightly.1...3.3.1) -**🐛 Bug fixes** +**Merged pull requests:** - TVPaint: Fixed rendered frame indexes [\#1946](https://github.com/pypeclub/OpenPype/pull/1946) - Maya: Menu actions fix [\#1945](https://github.com/pypeclub/OpenPype/pull/1945) @@ -28,78 +26,66 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.3.0-nightly.11...3.3.0) -**🚀 Enhancements** +**Merged pull requests:** - Python console interpreter [\#1940](https://github.com/pypeclub/OpenPype/pull/1940) +- Fix - make AE workfile publish to Ftrack configurable [\#1937](https://github.com/pypeclub/OpenPype/pull/1937) +- Fix - ftrack family was added incorrectly in some cases [\#1935](https://github.com/pypeclub/OpenPype/pull/1935) +- Settings UI: Breadcrumbs in settings [\#1932](https://github.com/pypeclub/OpenPype/pull/1932) +- Fix - Deadline publish on Linux started Tray instead of headless publishing [\#1930](https://github.com/pypeclub/OpenPype/pull/1930) +- Maya: Validate Model Name - repair accident deletion in settings defaults [\#1929](https://github.com/pypeclub/OpenPype/pull/1929) - Global: Updated logos and Default settings [\#1927](https://github.com/pypeclub/OpenPype/pull/1927) +- Nuke: submit to farm failed due `ftrack` family remove [\#1926](https://github.com/pypeclub/OpenPype/pull/1926) - Check for missing ✨ Python when using `pyenv` [\#1925](https://github.com/pypeclub/OpenPype/pull/1925) - Maya: Scene patching 🩹on submission to Deadline [\#1923](https://github.com/pypeclub/OpenPype/pull/1923) +- Fix - validate takes repre\["files"\] as list all the time [\#1922](https://github.com/pypeclub/OpenPype/pull/1922) - Settings: Default values for enum [\#1920](https://github.com/pypeclub/OpenPype/pull/1920) - Settings UI: Modifiable dict view enhance [\#1919](https://github.com/pypeclub/OpenPype/pull/1919) +- standalone: validator asset parents [\#1917](https://github.com/pypeclub/OpenPype/pull/1917) +- Nuke: update video file crassing [\#1916](https://github.com/pypeclub/OpenPype/pull/1916) +- Fix - texture validators for workfiles triggers only for textures workfiles [\#1914](https://github.com/pypeclub/OpenPype/pull/1914) - submodules: avalon-core update [\#1911](https://github.com/pypeclub/OpenPype/pull/1911) +- Settings UI: List order works as expected [\#1906](https://github.com/pypeclub/OpenPype/pull/1906) +- Add support for multiple Deadline ☠️➖ servers [\#1905](https://github.com/pypeclub/OpenPype/pull/1905) +- Hiero: loaded clip was not set colorspace from version data [\#1904](https://github.com/pypeclub/OpenPype/pull/1904) +- Pyblish UI: Fix collecting stage processing [\#1903](https://github.com/pypeclub/OpenPype/pull/1903) +- Burnins: Use input's bitrate in h624 [\#1902](https://github.com/pypeclub/OpenPype/pull/1902) - Feature AE local render [\#1901](https://github.com/pypeclub/OpenPype/pull/1901) - Ftrack: Where I run action enhancement [\#1900](https://github.com/pypeclub/OpenPype/pull/1900) - Ftrack: Private project server actions [\#1899](https://github.com/pypeclub/OpenPype/pull/1899) - Support nested studio plugins paths. [\#1898](https://github.com/pypeclub/OpenPype/pull/1898) +- Bug: fixed python detection [\#1893](https://github.com/pypeclub/OpenPype/pull/1893) - Settings: global validators with options [\#1892](https://github.com/pypeclub/OpenPype/pull/1892) - Settings: Conditional dict enum positioning [\#1891](https://github.com/pypeclub/OpenPype/pull/1891) +- global: integrate name missing default template [\#1890](https://github.com/pypeclub/OpenPype/pull/1890) +- publisher: editorial plugins fixes [\#1889](https://github.com/pypeclub/OpenPype/pull/1889) - Expose stop timer through rest api. [\#1886](https://github.com/pypeclub/OpenPype/pull/1886) - TVPaint: Increment workfile [\#1885](https://github.com/pypeclub/OpenPype/pull/1885) - Allow Multiple Notes to run on tasks. [\#1882](https://github.com/pypeclub/OpenPype/pull/1882) +- Normalize path returned from Workfiles. [\#1880](https://github.com/pypeclub/OpenPype/pull/1880) - Prepare for pyside2 [\#1869](https://github.com/pypeclub/OpenPype/pull/1869) - Filter hosts in settings host-enum [\#1868](https://github.com/pypeclub/OpenPype/pull/1868) - Local actions with process identifier [\#1867](https://github.com/pypeclub/OpenPype/pull/1867) - Workfile tool start at host launch support [\#1865](https://github.com/pypeclub/OpenPype/pull/1865) -- Maya: support for configurable `dirmap` 🗺️ [\#1859](https://github.com/pypeclub/OpenPype/pull/1859) -- Settings list can use template or schema as object type [\#1815](https://github.com/pypeclub/OpenPype/pull/1815) - -**🐛 Bug fixes** - -- Fix - ftrack family was added incorrectly in some cases [\#1935](https://github.com/pypeclub/OpenPype/pull/1935) -- Fix - Deadline publish on Linux started Tray instead of headless publishing [\#1930](https://github.com/pypeclub/OpenPype/pull/1930) -- Maya: Validate Model Name - repair accident deletion in settings defaults [\#1929](https://github.com/pypeclub/OpenPype/pull/1929) -- Nuke: submit to farm failed due `ftrack` family remove [\#1926](https://github.com/pypeclub/OpenPype/pull/1926) -- Fix - validate takes repre\["files"\] as list all the time [\#1922](https://github.com/pypeclub/OpenPype/pull/1922) -- standalone: validator asset parents [\#1917](https://github.com/pypeclub/OpenPype/pull/1917) -- Nuke: update video file crassing [\#1916](https://github.com/pypeclub/OpenPype/pull/1916) -- Fix - texture validators for workfiles triggers only for textures workfiles [\#1914](https://github.com/pypeclub/OpenPype/pull/1914) -- Settings UI: List order works as expected [\#1906](https://github.com/pypeclub/OpenPype/pull/1906) -- Hiero: loaded clip was not set colorspace from version data [\#1904](https://github.com/pypeclub/OpenPype/pull/1904) -- Pyblish UI: Fix collecting stage processing [\#1903](https://github.com/pypeclub/OpenPype/pull/1903) -- Burnins: Use input's bitrate in h624 [\#1902](https://github.com/pypeclub/OpenPype/pull/1902) -- Bug: fixed python detection [\#1893](https://github.com/pypeclub/OpenPype/pull/1893) -- global: integrate name missing default template [\#1890](https://github.com/pypeclub/OpenPype/pull/1890) -- publisher: editorial plugins fixes [\#1889](https://github.com/pypeclub/OpenPype/pull/1889) -- Normalize path returned from Workfiles. [\#1880](https://github.com/pypeclub/OpenPype/pull/1880) -- Workfiles tool event arguments fix [\#1862](https://github.com/pypeclub/OpenPype/pull/1862) -- Maya: don't add reference members as connections to the container set 📦 [\#1855](https://github.com/pypeclub/OpenPype/pull/1855) -- Settings error dialog on show [\#1798](https://github.com/pypeclub/OpenPype/pull/1798) - -**Merged pull requests:** - -- Fix - make AE workfile publish to Ftrack configurable [\#1937](https://github.com/pypeclub/OpenPype/pull/1937) -- Add support for multiple Deadline ☠️➖ servers [\#1905](https://github.com/pypeclub/OpenPype/pull/1905) - Maya: add support for `RedshiftNormalMap` node, fix `tx` linear space 🚀 [\#1863](https://github.com/pypeclub/OpenPype/pull/1863) +- Workfiles tool event arguments fix [\#1862](https://github.com/pypeclub/OpenPype/pull/1862) +- Maya: support for configurable `dirmap` 🗺️ [\#1859](https://github.com/pypeclub/OpenPype/pull/1859) +- Maya: don't add reference members as connections to the container set 📦 [\#1855](https://github.com/pypeclub/OpenPype/pull/1855) +- Settings list can use template or schema as object type [\#1815](https://github.com/pypeclub/OpenPype/pull/1815) - Maya: expected files -\> render products ⚙️ overhaul [\#1812](https://github.com/pypeclub/OpenPype/pull/1812) +- Settings error dialog on show [\#1798](https://github.com/pypeclub/OpenPype/pull/1798) ## [3.2.0](https://github.com/pypeclub/OpenPype/tree/3.2.0) (2021-07-13) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.2.0-nightly.7...3.2.0) -**🚀 Enhancements** - -- Nuke: ftrack family plugin settings preset [\#1805](https://github.com/pypeclub/OpenPype/pull/1805) -- Standalone publisher last project [\#1799](https://github.com/pypeclub/OpenPype/pull/1799) -- Ftrack Multiple notes as server action [\#1795](https://github.com/pypeclub/OpenPype/pull/1795) - -**🐛 Bug fixes** - -- nuke: fixing wrong name of family folder when `used existing frames` [\#1803](https://github.com/pypeclub/OpenPype/pull/1803) -- Collect ftrack family bugs [\#1801](https://github.com/pypeclub/OpenPype/pull/1801) - **Merged pull requests:** - Build: don't add Poetry to `PATH` [\#1808](https://github.com/pypeclub/OpenPype/pull/1808) +- Nuke: ftrack family plugin settings preset [\#1805](https://github.com/pypeclub/OpenPype/pull/1805) +- nuke: fixing wrong name of family folder when `used existing frames` [\#1803](https://github.com/pypeclub/OpenPype/pull/1803) +- Collect ftrack family bugs [\#1801](https://github.com/pypeclub/OpenPype/pull/1801) +- Standalone publisher last project [\#1799](https://github.com/pypeclub/OpenPype/pull/1799) ## [2.18.4](https://github.com/pypeclub/OpenPype/tree/2.18.4) (2021-06-24) diff --git a/openpype/version.py b/openpype/version.py index 8dbb6b38cf..5fd6520953 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.4.0-nightly.1" +__version__ = "3.4.0-nightly.2" From 66a98a5dcb39eff57f8cb364acecb9fddf36b831 Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Wed, 25 Aug 2021 12:12:30 +0700 Subject: [PATCH 207/308] Resolve path when adding to zip --- igniter/bootstrap_repos.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 8c081b8614..b49a2f6e7f 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -508,7 +508,7 @@ class BootstrapRepos: processed_path = file self._print(f"- processing {processed_path}") - zip_file.write(file, file.relative_to(openpype_root)) + zip_file.write(file, file.resolve().relative_to(openpype_root)) # test if zip is ok zip_file.testzip() From c67b647dc78614779075774e5180261d2f6dc7f6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 10:16:10 +0200 Subject: [PATCH 208/308] Fix context.data --- openpype/plugins/publish/extract_burnin.py | 2 +- openpype/plugins/publish/extract_review.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 8fef5eaacb..607d2cbff7 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -96,7 +96,7 @@ class ExtractBurnin(openpype.api.Extractor): def main_process(self, instance): # TODO get these data from context - host_name = instance.context["hostName"] + host_name = instance.context.data["hostName"] task_name = os.environ["AVALON_TASK"] family = self.main_family_from_instance(instance) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index cdd40af027..a9235c3ffa 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -89,7 +89,7 @@ class ExtractReview(pyblish.api.InstancePlugin): instance.data["representations"].remove(repre) def main_process(self, instance): - host_name = instance.context["hostName"] + host_name = instance.context.data["hostName"] task_name = os.environ["AVALON_TASK"] family = self.main_family_from_instance(instance) From 47f529bdccf00aebccf83971403b4ca91dc9bdb8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 10:50:35 +0200 Subject: [PATCH 209/308] Webpublisher - rename to last version --- .../webpublisher/plugins/publish/collect_published_files.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 59c315861e..6584120d97 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -92,7 +92,8 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): instance.data["subset"] = subset instance.data["family"] = family instance.data["families"] = families - instance.data["version"] = self._get_version(asset, subset) + 1 + instance.data["version"] = \ + self._get_last_version(asset, subset) + 1 instance.data["stagingDir"] = task_dir instance.data["source"] = "webpublisher" @@ -195,7 +196,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): content["subset_template_name"], \ content["tags"] - def _get_version(self, asset_name, subset_name): + def _get_last_version(self, asset_name, subset_name): """Returns version number or 0 for 'asset' and 'subset'""" query = [ { From 5164481b367bcfb1a7fc768d5b83a3f7b896ac0b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 10:57:22 +0200 Subject: [PATCH 210/308] Webpublisher - introduced FTRACK_BOT_API_USER --- openpype/hosts/webpublisher/plugins/publish/collect_username.py | 2 +- website/docs/admin_webserver_for_webpublisher.md | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_username.py b/openpype/hosts/webpublisher/plugins/publish/collect_username.py index 0c2c6310f4..7a303a1608 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_username.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_username.py @@ -31,7 +31,7 @@ class CollectUsername(pyblish.api.ContextPlugin): _context = None def process(self, context): - os.environ["FTRACK_API_USER"] = "pype.club" + os.environ["FTRACK_API_USER"] = os.environ["FTRACK_BOT_API_USER"] os.environ["FTRACK_API_KEY"] = os.environ["FTRACK_BOT_API_KEY"] self.log.info("CollectUsername") for instance in context: diff --git a/website/docs/admin_webserver_for_webpublisher.md b/website/docs/admin_webserver_for_webpublisher.md index 2b23033595..dced825bdc 100644 --- a/website/docs/admin_webserver_for_webpublisher.md +++ b/website/docs/admin_webserver_for_webpublisher.md @@ -41,6 +41,7 @@ Deploy OP build distribution (Openpype Igniter) on an OS of your choice. #!/usr/bin/env bash export OPENPYPE_DEBUG=3 export WEBSERVER_HOST_IP=localhost +export FTRACK_BOT_API_USER=YOUR_API_USER export FTRACK_BOT_API_KEY=YOUR_API_KEY export PYTHONDONTWRITEBYTECODE=1 export OPENPYPE_MONGO=YOUR_MONGODB_CONNECTION From bcea6fbf0d2b19ae92fc946ff1501704ce024308 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 11:09:57 +0200 Subject: [PATCH 211/308] Webpublisher - removed is_webpublish_enabled as unneeded run_server gets triggered only for webpublisher, doesn't make sense to double check Moved webpublish dependent classes under webpublish host Cleaned up setting --- .../webserver_service}/webpublish_routes.py | 0 .../webserver_service}/webserver_cli.py | 105 ++++++++---------- openpype/pype_commands.py | 7 +- .../defaults/system_settings/modules.json | 3 - .../schemas/system_schema/schema_modules.json | 14 --- 5 files changed, 50 insertions(+), 79 deletions(-) rename openpype/{modules/webserver => hosts/webpublisher/webserver_service}/webpublish_routes.py (100%) rename openpype/{modules/webserver => hosts/webpublisher/webserver_service}/webserver_cli.py (52%) diff --git a/openpype/modules/webserver/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py similarity index 100% rename from openpype/modules/webserver/webpublish_routes.py rename to openpype/hosts/webpublisher/webserver_service/webpublish_routes.py diff --git a/openpype/modules/webserver/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py similarity index 52% rename from openpype/modules/webserver/webserver_cli.py rename to openpype/hosts/webpublisher/webserver_service/webserver_cli.py index 24bd28ba7d..b1c14260e9 100644 --- a/openpype/modules/webserver/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py @@ -19,7 +19,6 @@ from .webpublish_routes import ( from openpype.api import get_system_settings -# SERVER_URL = "http://172.17.0.1:8079" # machine is not listening on localhost log = PypeLogger().get_logger("webserver_gui") @@ -32,72 +31,62 @@ def run_webserver(*args, **kwargs): webserver_module = manager.modules_by_name["webserver"] webserver_module.create_server_manager() - is_webpublish_enabled = False - webpublish_tool = get_system_settings()["modules"].\ - get("webpublish_tool") + resource = RestApiResource(webserver_module.server_manager, + upload_dir=kwargs["upload_dir"], + executable=kwargs["executable"]) + projects_endpoint = WebpublisherProjectsEndpoint(resource) + webserver_module.server_manager.add_route( + "GET", + "/api/projects", + projects_endpoint.dispatch + ) - if webpublish_tool and webpublish_tool["enabled"]: - is_webpublish_enabled = True + hiearchy_endpoint = WebpublisherHiearchyEndpoint(resource) + webserver_module.server_manager.add_route( + "GET", + "/api/hierarchy/{project_name}", + hiearchy_endpoint.dispatch + ) - log.debug("is_webpublish_enabled {}".format(is_webpublish_enabled)) - if is_webpublish_enabled: - resource = RestApiResource(webserver_module.server_manager, - upload_dir=kwargs["upload_dir"], - executable=kwargs["executable"]) - projects_endpoint = WebpublisherProjectsEndpoint(resource) - webserver_module.server_manager.add_route( - "GET", - "/api/projects", - projects_endpoint.dispatch - ) + # triggers publish + webpublisher_task_publish_endpoint = \ + WebpublisherBatchPublishEndpoint(resource) + webserver_module.server_manager.add_route( + "POST", + "/api/webpublish/batch", + webpublisher_task_publish_endpoint.dispatch + ) - hiearchy_endpoint = WebpublisherHiearchyEndpoint(resource) - webserver_module.server_manager.add_route( - "GET", - "/api/hierarchy/{project_name}", - hiearchy_endpoint.dispatch - ) + webpublisher_batch_publish_endpoint = \ + WebpublisherTaskPublishEndpoint(resource) + webserver_module.server_manager.add_route( + "POST", + "/api/webpublish/task", + webpublisher_batch_publish_endpoint.dispatch + ) - # triggers publish - webpublisher_task_publish_endpoint = \ - WebpublisherBatchPublishEndpoint(resource) - webserver_module.server_manager.add_route( - "POST", - "/api/webpublish/batch", - webpublisher_task_publish_endpoint.dispatch - ) + # reporting + openpype_resource = OpenPypeRestApiResource() + batch_status_endpoint = BatchStatusEndpoint(openpype_resource) + webserver_module.server_manager.add_route( + "GET", + "/api/batch_status/{batch_id}", + batch_status_endpoint.dispatch + ) - webpublisher_batch_publish_endpoint = \ - WebpublisherTaskPublishEndpoint(resource) - webserver_module.server_manager.add_route( - "POST", - "/api/webpublish/task", - webpublisher_batch_publish_endpoint.dispatch - ) - - # reporting - openpype_resource = OpenPypeRestApiResource() - batch_status_endpoint = BatchStatusEndpoint(openpype_resource) - webserver_module.server_manager.add_route( - "GET", - "/api/batch_status/{batch_id}", - batch_status_endpoint.dispatch - ) - - user_status_endpoint = PublishesStatusEndpoint(openpype_resource) - webserver_module.server_manager.add_route( - "GET", - "/api/publishes/{user}", - user_status_endpoint.dispatch - ) + user_status_endpoint = PublishesStatusEndpoint(openpype_resource) + webserver_module.server_manager.add_route( + "GET", + "/api/publishes/{user}", + user_status_endpoint.dispatch + ) webserver_module.start_server() last_reprocessed = time.time() while True: - if is_webpublish_enabled: - if time.time() - last_reprocessed > 20: - reprocess_failed(kwargs["upload_dir"]) - last_reprocessed = time.time() + if time.time() - last_reprocessed > 20: + reprocess_failed(kwargs["upload_dir"]) + last_reprocessed = time.time() time.sleep(1.0) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index d288e9f2a3..e0cab962f6 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -42,9 +42,8 @@ class PypeCommands: @staticmethod def launch_webpublisher_webservercli(*args, **kwargs): - from openpype.modules.webserver.webserver_cli import ( - run_webserver - ) + from openpype.hosts.webpublisher.webserver_service.webserver_cli \ + import (run_webserver) return run_webserver(*args, **kwargs) @staticmethod @@ -53,7 +52,7 @@ class PypeCommands: standalonepublish.main() @staticmethod - def publish(paths, targets=None, host=None): + def publish(paths, targets=None): """Start headless publishing. Publish use json from passed paths argument. diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index 1005f8d16b..3a70b90590 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -167,9 +167,6 @@ "standalonepublish_tool": { "enabled": true }, - "webpublish_tool": { - "enabled": false - }, "project_manager": { "enabled": true }, diff --git a/openpype/settings/entities/schemas/system_schema/schema_modules.json b/openpype/settings/entities/schemas/system_schema/schema_modules.json index 8cd729d2a1..75c08b2cd9 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_modules.json +++ b/openpype/settings/entities/schemas/system_schema/schema_modules.json @@ -197,20 +197,6 @@ } ] }, - { - "type": "dict", - "key": "webpublish_tool", - "label": "Web Publish", - "collapsible": true, - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - } - ] - }, { "type": "dict", "key": "project_manager", From 9a9acc119ae5a95b117cca0ad6ffad65554faa71 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 11:23:26 +0200 Subject: [PATCH 212/308] Webpublisher - introduced command line arguments for host and port --- openpype/cli.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index 28195008cc..0b6d41b060 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -96,9 +96,11 @@ def eventserver(debug, @main.command() @click.option("-d", "--debug", is_flag=True, help="Print debug messages") +@click.option("-h", "--host", help="Host", default=None) +@click.option("-p", "--port", help="Port", default=None) @click.option("-e", "--executable", help="Executable") @click.option("-u", "--upload_dir", help="Upload dir") -def webpublisherwebserver(debug, executable, upload_dir): +def webpublisherwebserver(debug, executable, upload_dir, host=None, port=None): """Starts webserver for communication with Webpublish FR via command line OP must be congigured on a machine, eg. OPENPYPE_MONGO filled AND @@ -111,7 +113,9 @@ def webpublisherwebserver(debug, executable, upload_dir): PypeCommands().launch_webpublisher_webservercli( upload_dir=upload_dir, - executable=executable + executable=executable, + host=host, + port=port ) From 91f9362288b1a6a4ca5d894812e6e32621f5874c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 11:47:52 +0200 Subject: [PATCH 213/308] Webpublisher - proper merge --- .../default_modules/webserver/server.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/openpype/modules/default_modules/webserver/server.py b/openpype/modules/default_modules/webserver/server.py index 9d99e1c7a3..83a29e074e 100644 --- a/openpype/modules/default_modules/webserver/server.py +++ b/openpype/modules/default_modules/webserver/server.py @@ -1,6 +1,5 @@ import threading import asyncio -import os from aiohttp import web @@ -11,8 +10,9 @@ log = PypeLogger.get_logger("WebServer") class WebServerManager: """Manger that care about web server thread.""" - def __init__(self, module): - self.module = module + def __init__(self, port=None, host=None): + self.port = port or 8079 + self.host = host or "localhost" self.client = None self.handlers = {} @@ -25,8 +25,8 @@ class WebServerManager: self.webserver_thread = WebServerThread(self) @property - def port(self): - return self.module.port + def url(self): + return "http://{}:{}".format(self.host, self.port) def add_route(self, *args, **kwargs): self.app.router.add_route(*args, **kwargs) @@ -79,6 +79,10 @@ class WebServerThread(threading.Thread): def port(self): return self.manager.port + @property + def host(self): + return self.manager.host + def run(self): self.is_running = True @@ -111,9 +115,7 @@ class WebServerThread(threading.Thread): """ Starts runner and TCPsite """ self.runner = web.AppRunner(self.manager.app) await self.runner.setup() - host_ip = os.environ.get("WEBSERVER_HOST_IP") or 'localhost' - log.info("host_ip:: {}".format(os.environ.get("WEBSERVER_HOST_IP"))) - self.site = web.TCPSite(self.runner, host_ip, self.port) + self.site = web.TCPSite(self.runner, self.host, self.port) await self.site.start() def stop(self): From f7cb778470fbfbab3b0ad7209912670f0992cca2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 11:48:43 +0200 Subject: [PATCH 214/308] Webpublisher - proper merge --- .../webserver/webserver_module.py | 52 +++++++++++++------ 1 file changed, 37 insertions(+), 15 deletions(-) diff --git a/openpype/modules/default_modules/webserver/webserver_module.py b/openpype/modules/default_modules/webserver/webserver_module.py index c000d5ce10..bdb0010118 100644 --- a/openpype/modules/default_modules/webserver/webserver_module.py +++ b/openpype/modules/default_modules/webserver/webserver_module.py @@ -1,24 +1,34 @@ import os import socket +from abc import ABCMeta, abstractmethod + +import six from openpype import resources -from openpype.modules import OpenPypeModule -from openpype_interfaces import ( - ITrayService, - IWebServerRoutes -) +from .. import PypeModule, ITrayService -class WebServerModule(OpenPypeModule, ITrayService): +@six.add_metaclass(ABCMeta) +class IWebServerRoutes: + """Other modules interface to register their routes.""" + @abstractmethod + def webserver_initialization(self, server_manager): + pass + + +class WebServerModule(PypeModule, ITrayService): name = "webserver" label = "WebServer" + webserver_url_env = "OPENPYPE_WEBSERVER_URL" + def initialize(self, _module_settings): self.enabled = True self.server_manager = None self._host_listener = None self.port = self.find_free_port() + self.webserver_url = None def connect_with_modules(self, enabled_modules): if not self.server_manager: @@ -44,7 +54,7 @@ class WebServerModule(OpenPypeModule, ITrayService): self.server_manager.add_static(static_prefix, resources.RESOURCES_DIR) os.environ["OPENPYPE_STATICS_SERVER"] = "{}{}".format( - os.environ["OPENPYPE_WEBSERVER_URL"], static_prefix + self.webserver_url, static_prefix ) def _add_listeners(self): @@ -62,21 +72,33 @@ class WebServerModule(OpenPypeModule, ITrayService): if self.server_manager: self.server_manager.stop_server() + @staticmethod + def create_new_server_manager(port=None, host=None): + """Create webserver manager for passed port and host. + + Args: + port(int): Port on which wil webserver listen. + host(str): Host name or IP address. Default is 'localhost'. + + Returns: + WebServerManager: Prepared manager. + """ + from .server import WebServerManager + + return WebServerManager(port, host) + def create_server_manager(self): if self.server_manager: return - from .server import WebServerManager - - self.server_manager = WebServerManager(self) + self.server_manager = self.create_new_server_manager(self.port) self.server_manager.on_stop_callbacks.append( self.set_service_failed_icon ) - # in a case that webserver should listen on specific ip (webpublisher) - self.log.info("module host_ip:: {}".format(os.environ.get("WEBSERVER_HOST_IP"))) - host_ip = os.environ.get("WEBSERVER_HOST_IP") or 'localhost' - webserver_url = "http://{}:{}".format(host_ip, self.port) - os.environ["OPENPYPE_WEBSERVER_URL"] = webserver_url + + webserver_url = self.server_manager.url + os.environ[self.webserver_url_env] = str(webserver_url) + self.webserver_url = webserver_url @staticmethod def find_free_port( From b235068a3eee5858be38cd8c70ed9bb4d824d2ae Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 11:55:32 +0200 Subject: [PATCH 215/308] Webpublisher - proper merge --- .../webserver_service/webpublish_routes.py | 2 +- .../default_modules/webserver/webserver_module.py | 10 ++++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 32feb276ed..0014d1b344 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -10,7 +10,7 @@ import subprocess from avalon.api import AvalonMongoDB from openpype.lib import OpenPypeMongoConnection -from openpype.modules.avalon_apps.rest_api import _RestApiEndpoint +from openpype_modules.avalon_apps.rest_api import _RestApiEndpoint from openpype.lib import PypeLogger diff --git a/openpype/modules/default_modules/webserver/webserver_module.py b/openpype/modules/default_modules/webserver/webserver_module.py index bdb0010118..d8e54632b5 100644 --- a/openpype/modules/default_modules/webserver/webserver_module.py +++ b/openpype/modules/default_modules/webserver/webserver_module.py @@ -5,7 +5,11 @@ from abc import ABCMeta, abstractmethod import six from openpype import resources -from .. import PypeModule, ITrayService +from openpype.modules import OpenPypeModule +from openpype_interfaces import ( + ITrayService, + IWebServerRoutes +) @six.add_metaclass(ABCMeta) @@ -16,7 +20,7 @@ class IWebServerRoutes: pass -class WebServerModule(PypeModule, ITrayService): +class WebServerModule(OpenPypeModule, ITrayService): name = "webserver" label = "WebServer" @@ -53,6 +57,8 @@ class WebServerModule(PypeModule, ITrayService): static_prefix = "/res" self.server_manager.add_static(static_prefix, resources.RESOURCES_DIR) + webserver_url = "http://localhost:{}".format(self.port) + os.environ["OPENPYPE_WEBSERVER_URL"] = webserver_url os.environ["OPENPYPE_STATICS_SERVER"] = "{}{}".format( self.webserver_url, static_prefix ) From e666fad275a018aad670418605e041614eb85e1c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 11:57:58 +0200 Subject: [PATCH 216/308] Webpublisher - updated help label --- openpype/cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/cli.py b/openpype/cli.py index 0b6d41b060..c446d5e443 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -162,7 +162,7 @@ def publish(debug, paths, targets): @click.option("-h", "--host", help="Host") @click.option("-u", "--user", help="User email address") @click.option("-p", "--project", help="Project") -@click.option("-t", "--targets", help="Targets module", default=None, +@click.option("-t", "--targets", help="Targets", default=None, multiple=True) def remotepublish(debug, project, path, host, targets=None, user=None): """Start CLI publishing. From 498adfeeb9a2c4482cef6d05f76eee51b28d57bd Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 25 Aug 2021 12:22:15 +0200 Subject: [PATCH 217/308] mark USd as experimental --- openpype/hosts/houdini/plugins/create/create_usd.py | 2 +- openpype/hosts/houdini/plugins/create/create_usdrender.py | 2 +- repos/avalon-core | 2 +- website/docs/artist_hosts_houdini.md | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index 076197bace..5bcb7840c0 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -4,7 +4,7 @@ from openpype.hosts.houdini.api import plugin class CreateUSD(plugin.Creator): """Universal Scene Description""" - label = "USD" + label = "USD (experimental)" family = "usd" icon = "gears" enabled = False diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py index 9b98f59ac1..cb3fe3f02b 100644 --- a/openpype/hosts/houdini/plugins/create/create_usdrender.py +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -5,7 +5,7 @@ from openpype.hosts.houdini.api import plugin class CreateUSDRender(plugin.Creator): """USD Render ROP in /stage""" - label = "USD Render" + label = "USD Render (experimental)" family = "usdrender" icon = "magic" diff --git a/repos/avalon-core b/repos/avalon-core index e5c8a15fde..52e24a9993 160000 --- a/repos/avalon-core +++ b/repos/avalon-core @@ -1 +1 @@ -Subproject commit e5c8a15fde77708c924eab3018bda255f17b5390 +Subproject commit 52e24a9993e5223b0a719786e77a4b87e936e556 diff --git a/website/docs/artist_hosts_houdini.md b/website/docs/artist_hosts_houdini.md index f70eac625a..d2aadf05cb 100644 --- a/website/docs/artist_hosts_houdini.md +++ b/website/docs/artist_hosts_houdini.md @@ -52,7 +52,7 @@ Alembic ROP `/out/pointcacheStrange` This part of documentation is still work in progress. ::: -## USD +## USD (experimental support) ### Publishing USD You can publish your Solaris Stage as USD file. ![Solaris USD](assets/houdini_usd_stage.png) From 4d50db97806c97c5a96e071a962530eacca7f0f7 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 25 Aug 2021 12:22:29 +0200 Subject: [PATCH 218/308] validate extensions on ly on the main family --- .../hosts/houdini/plugins/publish/validate_file_extension.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py index c299a47e74..b26d28a1e7 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py +++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py @@ -40,7 +40,7 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): node = instance[0] # Create lookup for current family in instance - families = instance.data.get("families", list()) + families = [] family = instance.data.get("family", None) if family: families.append(family) From 1387b75d5f4d1242c2ab7f35f8e406564013b848 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 12:36:03 +0200 Subject: [PATCH 219/308] Webpublisher - revert mixed up commit --- openpype/lib/applications.py | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index 19208ff173..71ab2eac61 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1009,7 +1009,7 @@ class EnvironmentPrepData(dict): def get_app_environments_for_context( - project_name, asset_name, task_name, app_name=None, env=None + project_name, asset_name, task_name, app_name, env=None ): """Prepare environment variables by context. Args: @@ -1038,14 +1038,20 @@ def get_app_environments_for_context( "name": asset_name }) + # Prepare app object which can be obtained only from ApplciationManager + app_manager = ApplicationManager() + app = app_manager.applications[app_name] + # Project's anatomy anatomy = Anatomy(project_name) - prep_dict = { + data = EnvironmentPrepData({ "project_name": project_name, "asset_name": asset_name, "task_name": task_name, + "app": app, + "dbcon": dbcon, "project_doc": project_doc, "asset_doc": asset_doc, @@ -1053,15 +1059,7 @@ def get_app_environments_for_context( "anatomy": anatomy, "env": env - } - - if app_name: - # Prepare app object which can be obtained only from ApplicationManager - app_manager = ApplicationManager() - app = app_manager.applications[app_name] - prep_dict["app"] = app - - data = EnvironmentPrepData(prep_dict) + }) prepare_host_environments(data) prepare_context_environments(data) From 647f9779acb8cbbfa82848097dc47bfba0cb2bde Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 25 Aug 2021 12:53:11 +0200 Subject: [PATCH 220/308] moved host name collector earlier --- openpype/plugins/publish/collect_host_name.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_host_name.py b/openpype/plugins/publish/collect_host_name.py index e1b7eb17c3..41d9cc3a5a 100644 --- a/openpype/plugins/publish/collect_host_name.py +++ b/openpype/plugins/publish/collect_host_name.py @@ -14,7 +14,7 @@ class CollectHostName(pyblish.api.ContextPlugin): """Collect avalon host name to context.""" label = "Collect Host Name" - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 1 def process(self, context): host_name = context.data.get("hostName") From 3dfe3513c3e44e445042c086ea94740c542a8e3b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 13:10:45 +0200 Subject: [PATCH 221/308] Webpublisher - fixed host install --- openpype/hosts/webpublisher/__init__.py | 3 +++ openpype/hosts/webpublisher/api/__init__.py | 1 - openpype/pype_commands.py | 9 +++++---- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/webpublisher/__init__.py b/openpype/hosts/webpublisher/__init__.py index e69de29bb2..d47bab580b 100644 --- a/openpype/hosts/webpublisher/__init__.py +++ b/openpype/hosts/webpublisher/__init__.py @@ -0,0 +1,3 @@ +# to have required methods for interface +def ls(): + pass \ No newline at end of file diff --git a/openpype/hosts/webpublisher/api/__init__.py b/openpype/hosts/webpublisher/api/__init__.py index 1bf1ef1a6f..76709bb2d7 100644 --- a/openpype/hosts/webpublisher/api/__init__.py +++ b/openpype/hosts/webpublisher/api/__init__.py @@ -29,7 +29,6 @@ def install(): log.info(PUBLISH_PATH) io.install() - avalon.Session["AVALON_APP"] = "webpublisher" # because of Ftrack collect avalon.on("application.launched", application_launch) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 7774a010a6..656f864229 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -157,11 +157,12 @@ class PypeCommands: os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path os.environ["AVALON_PROJECT"] = project - os.environ["AVALON_APP"] = host # to trigger proper plugings + os.environ["AVALON_APP"] = host - # this should be more generic - from openpype.hosts.webpublisher.api import install as w_install - w_install() + import avalon.api + from openpype.hosts import webpublisher + + avalon.api.install(webpublisher) log.info("Running publish ...") From c7c45ecf879ffe54ee7f942a7490350baec0c862 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 13:24:42 +0200 Subject: [PATCH 222/308] Webpublisher - removed unwanted folder --- openpype/modules/sync_server/__init__.py | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 openpype/modules/sync_server/__init__.py diff --git a/openpype/modules/sync_server/__init__.py b/openpype/modules/sync_server/__init__.py deleted file mode 100644 index a814f0db62..0000000000 --- a/openpype/modules/sync_server/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from openpype.modules.sync_server.sync_server_module import SyncServerModule - - -def tray_init(tray_widget, main_widget): - return SyncServerModule() From a65f0e15d71632ff94dc90d3a8b13222149f494d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 25 Aug 2021 14:00:09 +0200 Subject: [PATCH 223/308] fixed webserver module --- .../default_modules/webserver/webserver_module.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/openpype/modules/default_modules/webserver/webserver_module.py b/openpype/modules/default_modules/webserver/webserver_module.py index d8e54632b5..cfbb0c1ee0 100644 --- a/openpype/modules/default_modules/webserver/webserver_module.py +++ b/openpype/modules/default_modules/webserver/webserver_module.py @@ -12,14 +12,6 @@ from openpype_interfaces import ( ) -@six.add_metaclass(ABCMeta) -class IWebServerRoutes: - """Other modules interface to register their routes.""" - @abstractmethod - def webserver_initialization(self, server_manager): - pass - - class WebServerModule(OpenPypeModule, ITrayService): name = "webserver" label = "WebServer" @@ -57,8 +49,6 @@ class WebServerModule(OpenPypeModule, ITrayService): static_prefix = "/res" self.server_manager.add_static(static_prefix, resources.RESOURCES_DIR) - webserver_url = "http://localhost:{}".format(self.port) - os.environ["OPENPYPE_WEBSERVER_URL"] = webserver_url os.environ["OPENPYPE_STATICS_SERVER"] = "{}{}".format( self.webserver_url, static_prefix ) From 5dbc7ab36d1eb3c601bbb6cffd92a6a0624d4852 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 25 Aug 2021 14:02:55 +0200 Subject: [PATCH 224/308] recommit changes --- .../webserver_service/webserver_cli.py | 28 ++++++++++--------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py index b1c14260e9..b733cc260f 100644 --- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py @@ -29,20 +29,23 @@ def run_webserver(*args, **kwargs): manager = ModulesManager() webserver_module = manager.modules_by_name["webserver"] - webserver_module.create_server_manager() + host = os.environ.get("WEBSERVER_HOST_IP") + port = 8079 + server_manager = webserver_module.create_new_server_manager(port, host) + webserver_url = server_manager.url - resource = RestApiResource(webserver_module.server_manager, + resource = RestApiResource(server_manager, upload_dir=kwargs["upload_dir"], executable=kwargs["executable"]) projects_endpoint = WebpublisherProjectsEndpoint(resource) - webserver_module.server_manager.add_route( + server_manager.add_route( "GET", "/api/projects", projects_endpoint.dispatch ) hiearchy_endpoint = WebpublisherHiearchyEndpoint(resource) - webserver_module.server_manager.add_route( + server_manager.add_route( "GET", "/api/hierarchy/{project_name}", hiearchy_endpoint.dispatch @@ -51,7 +54,7 @@ def run_webserver(*args, **kwargs): # triggers publish webpublisher_task_publish_endpoint = \ WebpublisherBatchPublishEndpoint(resource) - webserver_module.server_manager.add_route( + server_manager.add_route( "POST", "/api/webpublish/batch", webpublisher_task_publish_endpoint.dispatch @@ -59,7 +62,7 @@ def run_webserver(*args, **kwargs): webpublisher_batch_publish_endpoint = \ WebpublisherTaskPublishEndpoint(resource) - webserver_module.server_manager.add_route( + server_manager.add_route( "POST", "/api/webpublish/task", webpublisher_batch_publish_endpoint.dispatch @@ -68,29 +71,29 @@ def run_webserver(*args, **kwargs): # reporting openpype_resource = OpenPypeRestApiResource() batch_status_endpoint = BatchStatusEndpoint(openpype_resource) - webserver_module.server_manager.add_route( + server_manager.add_route( "GET", "/api/batch_status/{batch_id}", batch_status_endpoint.dispatch ) user_status_endpoint = PublishesStatusEndpoint(openpype_resource) - webserver_module.server_manager.add_route( + server_manager.add_route( "GET", "/api/publishes/{user}", user_status_endpoint.dispatch ) - webserver_module.start_server() + server_manager.start_server() last_reprocessed = time.time() while True: if time.time() - last_reprocessed > 20: - reprocess_failed(kwargs["upload_dir"]) + reprocess_failed(kwargs["upload_dir"], webserver_url) last_reprocessed = time.time() time.sleep(1.0) -def reprocess_failed(upload_dir): +def reprocess_failed(upload_dir, webserver_url): # log.info("check_reprocesable_records") from openpype.lib import OpenPypeMongoConnection @@ -118,8 +121,7 @@ def reprocess_failed(upload_dir): }} ) continue - server_url = "{}/api/webpublish/batch".format( - os.environ["OPENPYPE_WEBSERVER_URL"]) + server_url = "{}/api/webpublish/batch".format(webserver_url) with open(batch_url) as f: data = json.loads(f.read()) From cd5659248bc6db9c4a4602b24ddb467415f188c2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 25 Aug 2021 14:10:34 +0200 Subject: [PATCH 225/308] use host port from kwargs --- .../hosts/webpublisher/webserver_service/webserver_cli.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py index b733cc260f..06d78e2fca 100644 --- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py @@ -29,8 +29,8 @@ def run_webserver(*args, **kwargs): manager = ModulesManager() webserver_module = manager.modules_by_name["webserver"] - host = os.environ.get("WEBSERVER_HOST_IP") - port = 8079 + host = kwargs.get("host") or "localhost" + port = kwargs.get("port") or 8079 server_manager = webserver_module.create_new_server_manager(port, host) webserver_url = server_manager.url From 66b710116447774fee71f088d34c8eb5e34ab798 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 14:11:08 +0200 Subject: [PATCH 226/308] Webpublisher - fix propagation of host --- .../hosts/webpublisher/webserver_service/webserver_cli.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py index b733cc260f..723762003d 100644 --- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py @@ -29,8 +29,8 @@ def run_webserver(*args, **kwargs): manager = ModulesManager() webserver_module = manager.modules_by_name["webserver"] - host = os.environ.get("WEBSERVER_HOST_IP") - port = 8079 + host = kwargs["host"] + port = kwargs["port"] server_manager = webserver_module.create_new_server_manager(port, host) webserver_url = server_manager.url From 2d55233c6b2ab4c8ed8129f052b08eda70a3bffe Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 14:33:16 +0200 Subject: [PATCH 227/308] Hound --- openpype/hosts/webpublisher/__init__.py | 2 +- .../hosts/webpublisher/webserver_service/webserver_cli.py | 4 +--- .../modules/default_modules/webserver/webserver_module.py | 3 --- 3 files changed, 2 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/webpublisher/__init__.py b/openpype/hosts/webpublisher/__init__.py index d47bab580b..3de2e3434b 100644 --- a/openpype/hosts/webpublisher/__init__.py +++ b/openpype/hosts/webpublisher/__init__.py @@ -1,3 +1,3 @@ # to have required methods for interface def ls(): - pass \ No newline at end of file + pass diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py index 06d78e2fca..d00d269059 100644 --- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py @@ -17,8 +17,6 @@ from .webpublish_routes import ( PublishesStatusEndpoint ) -from openpype.api import get_system_settings - log = PypeLogger().get_logger("webserver_gui") @@ -129,7 +127,7 @@ def reprocess_failed(upload_dir, webserver_url): try: r = requests.post(server_url, json=data) log.info("response{}".format(r)) - except: + except Exception: log.info("exception", exc_info=True) dbcon.update_one( diff --git a/openpype/modules/default_modules/webserver/webserver_module.py b/openpype/modules/default_modules/webserver/webserver_module.py index cfbb0c1ee0..5bfb2d6390 100644 --- a/openpype/modules/default_modules/webserver/webserver_module.py +++ b/openpype/modules/default_modules/webserver/webserver_module.py @@ -1,8 +1,5 @@ import os import socket -from abc import ABCMeta, abstractmethod - -import six from openpype import resources from openpype.modules import OpenPypeModule From 430801da30a7fa259bcd1b815e643c5433a41651 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 14:36:37 +0200 Subject: [PATCH 228/308] Webpublisher - move plugin to Ftrack --- .../default_modules/ftrack}/plugins/publish/collect_username.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/{hosts/webpublisher => modules/default_modules/ftrack}/plugins/publish/collect_username.py (100%) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_username.py b/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py similarity index 100% rename from openpype/hosts/webpublisher/plugins/publish/collect_username.py rename to openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py From 8c9f20bbc483d0facc9e8a40e6789dbf7fa2e9e9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 15:06:27 +0200 Subject: [PATCH 229/308] Webpublisher - moved dummy ls to api --- openpype/hosts/webpublisher/__init__.py | 3 --- openpype/hosts/webpublisher/api/__init__.py | 5 +++++ openpype/pype_commands.py | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/webpublisher/__init__.py b/openpype/hosts/webpublisher/__init__.py index 3de2e3434b..e69de29bb2 100644 --- a/openpype/hosts/webpublisher/__init__.py +++ b/openpype/hosts/webpublisher/__init__.py @@ -1,3 +0,0 @@ -# to have required methods for interface -def ls(): - pass diff --git a/openpype/hosts/webpublisher/api/__init__.py b/openpype/hosts/webpublisher/api/__init__.py index 76709bb2d7..e40d46d662 100644 --- a/openpype/hosts/webpublisher/api/__init__.py +++ b/openpype/hosts/webpublisher/api/__init__.py @@ -36,3 +36,8 @@ def uninstall(): pyblish.deregister_plugin_path(PUBLISH_PATH) avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH) avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH) + + +# to have required methods for interface +def ls(): + pass diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 656f864229..c18fe36667 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -160,7 +160,7 @@ class PypeCommands: os.environ["AVALON_APP"] = host import avalon.api - from openpype.hosts import webpublisher + from openpype.hosts.webpublisher import api as webpublisher avalon.api.install(webpublisher) From cb229e9185308fc08603c88341c8792fd63f652b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 25 Aug 2021 15:11:08 +0200 Subject: [PATCH 230/308] Merge back to develop --- repos/avalon-core | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/repos/avalon-core b/repos/avalon-core index 82d5b8137e..52e24a9993 160000 --- a/repos/avalon-core +++ b/repos/avalon-core @@ -1 +1 @@ -Subproject commit 82d5b8137eea3b49d4781a4af51d7f375bb9f628 +Subproject commit 52e24a9993e5223b0a719786e77a4b87e936e556 From 3d9d15829adf156da1d884675d10f33c4b6e96e0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 25 Aug 2021 17:18:18 +0200 Subject: [PATCH 231/308] wrap paths in ffmpeg args to quotes --- openpype/plugins/publish/extract_jpeg.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/extract_jpeg.py b/openpype/plugins/publish/extract_jpeg.py index b1289217e6..1057b5c696 100644 --- a/openpype/plugins/publish/extract_jpeg.py +++ b/openpype/plugins/publish/extract_jpeg.py @@ -95,7 +95,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): # use same input args like with mov jpeg_items.extend(ffmpeg_args.get("input") or []) # input file - jpeg_items.append("-i {}".format(full_input_path)) + jpeg_items.append("-i \"{}\"".format(full_input_path)) # output arguments from presets jpeg_items.extend(ffmpeg_args.get("output") or []) @@ -104,7 +104,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): jpeg_items.append("-vframes 1") # output file - jpeg_items.append(full_output_path) + jpeg_items.append("\"{}\"".format(full_output_path)) subprocess_jpeg = " ".join(jpeg_items) From 053e5d9750aee23feb12670b44cd8b28f34b1d5b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 25 Aug 2021 18:48:49 +0200 Subject: [PATCH 232/308] launch blender like a python application on windows --- .../blender/hooks/pre_windows_console.py | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 openpype/hosts/blender/hooks/pre_windows_console.py diff --git a/openpype/hosts/blender/hooks/pre_windows_console.py b/openpype/hosts/blender/hooks/pre_windows_console.py new file mode 100644 index 0000000000..d6be45b225 --- /dev/null +++ b/openpype/hosts/blender/hooks/pre_windows_console.py @@ -0,0 +1,28 @@ +import subprocess +from openpype.lib import PreLaunchHook + + +class BlenderConsoleWindows(PreLaunchHook): + """Foundry applications have specific way how to launch them. + + Blender is executed "like" python process so it is required to pass + `CREATE_NEW_CONSOLE` flag on windows to trigger creation of new console. + At the same time the newly created console won't create it's own stdout + and stderr handlers so they should not be redirected to DEVNULL. + """ + + # Should be as last hook because must change launch arguments to string + order = 1000 + app_groups = ["blender"] + platforms = ["windows"] + + def execute(self): + # Change `creationflags` to CREATE_NEW_CONSOLE + # - on Windows will blender create new window using it's console + # Set `stdout` and `stderr` to None so new created console does not + # have redirected output to DEVNULL in build + self.launch_context.kwargs.update({ + "creationflags": subprocess.CREATE_NEW_CONSOLE, + "stdout": None, + "stderr": None + }) From 5aabef9f9bcb314aaf68a40d2de8dc0a85ad0b12 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 25 Aug 2021 18:53:21 +0200 Subject: [PATCH 233/308] push new defaults --- openpype/settings/defaults/project_settings/global.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index aab8c2196c..0c87c915f9 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -151,6 +151,7 @@ "template_name_profiles": [ { "families": [], + "hosts": [], "tasks": [], "template_name": "publish" }, @@ -160,6 +161,7 @@ "render", "prerender" ], + "hosts": [], "tasks": [], "template_name": "render" } From 1323ad175329535063ce2ab42fef47ded33b85c8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 25 Aug 2021 18:55:55 +0200 Subject: [PATCH 234/308] added margins to footer layout --- openpype/tools/settings/settings/categories.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/tools/settings/settings/categories.py b/openpype/tools/settings/settings/categories.py index d1babd7fdb..c420a8cdc5 100644 --- a/openpype/tools/settings/settings/categories.py +++ b/openpype/tools/settings/settings/categories.py @@ -203,6 +203,7 @@ class SettingsCategoryWidget(QtWidgets.QWidget): refresh_btn.setIcon(refresh_icon) footer_layout = QtWidgets.QHBoxLayout() + footer_layout.setContentsMargins(5, 5, 5, 5) if self.user_role == "developer": self._add_developer_ui(footer_layout) From 946e7da7f8aaa54d6e67b971b9ed57eecd156f49 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 25 Aug 2021 18:58:57 +0200 Subject: [PATCH 235/308] don't go to root with mouse click --- openpype/tools/settings/settings/breadcrumbs_widget.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/tools/settings/settings/breadcrumbs_widget.py b/openpype/tools/settings/settings/breadcrumbs_widget.py index b625a7bb07..d25cbdc8cb 100644 --- a/openpype/tools/settings/settings/breadcrumbs_widget.py +++ b/openpype/tools/settings/settings/breadcrumbs_widget.py @@ -325,7 +325,9 @@ class BreadcrumbsButton(QtWidgets.QToolButton): self.setSizePolicy(size_policy) menu.triggered.connect(self._on_menu_click) - self.clicked.connect(self._on_click) + # Don't allow to go to root with mouse click + if path: + self.clicked.connect(self._on_click) self._path = path self._path_prefix = path_prefix From 75f3dca1ced1dcd4ac4c676ecc5a684178727b0c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 25 Aug 2021 19:03:38 +0200 Subject: [PATCH 236/308] mouse click on checkbox changes path --- openpype/tools/settings/settings/item_widgets.py | 4 ++++ openpype/tools/settings/settings/widgets.py | 5 +++++ 2 files changed, 9 insertions(+) diff --git a/openpype/tools/settings/settings/item_widgets.py b/openpype/tools/settings/settings/item_widgets.py index d29fa6f42b..a808caa465 100644 --- a/openpype/tools/settings/settings/item_widgets.py +++ b/openpype/tools/settings/settings/item_widgets.py @@ -312,8 +312,12 @@ class BoolWidget(InputWidget): self.setFocusProxy(self.input_field) + self.input_field.focused_in.connect(self._on_input_focus) self.input_field.stateChanged.connect(self._on_value_change) + def _on_input_focus(self): + self.focused_in() + def _on_entity_change(self): if self.entity.value != self.input_field.isChecked(): self.set_entity_value() diff --git a/openpype/tools/settings/settings/widgets.py b/openpype/tools/settings/settings/widgets.py index 34b222dd8e..d49057e1e8 100644 --- a/openpype/tools/settings/settings/widgets.py +++ b/openpype/tools/settings/settings/widgets.py @@ -459,6 +459,7 @@ class NiceCheckbox(QtWidgets.QFrame): stateChanged = QtCore.Signal(int) checked_bg_color = QtGui.QColor(69, 128, 86) unchecked_bg_color = QtGui.QColor(170, 80, 80) + focused_in = QtCore.Signal() def set_bg_color(self, color): self._bg_color = color @@ -583,6 +584,10 @@ class NiceCheckbox(QtWidgets.QFrame): self._on_checkstate_change() + def mousePressEvent(self, event): + self.focused_in.emit() + super(NiceCheckbox, self).mousePressEvent(event) + def mouseReleaseEvent(self, event): if event.button() == QtCore.Qt.LeftButton: self.setCheckState() From 7b0b6a50e82f465d5e8fa4bc86c6d74ca5935fbe Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 25 Aug 2021 19:57:17 +0200 Subject: [PATCH 237/308] removed unused variable --- openpype/tools/launcher/models.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/tools/launcher/models.py b/openpype/tools/launcher/models.py index 846a07e081..09bdc3f961 100644 --- a/openpype/tools/launcher/models.py +++ b/openpype/tools/launcher/models.py @@ -122,7 +122,6 @@ class ActionModel(QtGui.QStandardItemModel): self.application_manager = ApplicationManager() - self._groups = {} self.default_icon = qtawesome.icon("fa.cube", color="white") # Cache of available actions self._registered_actions = list() @@ -186,8 +185,6 @@ class ActionModel(QtGui.QStandardItemModel): self.clear() self.items_by_id.clear() - self._groups.clear() - actions = self.filter_compatible_actions(self._registered_actions) self.beginResetModel() From 7458c049ccd2edfc47dc7055f5bc531fd01d3899 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 25 Aug 2021 19:57:41 +0200 Subject: [PATCH 238/308] don't clear items by id on discover --- openpype/tools/launcher/models.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/tools/launcher/models.py b/openpype/tools/launcher/models.py index 09bdc3f961..398d8aad3d 100644 --- a/openpype/tools/launcher/models.py +++ b/openpype/tools/launcher/models.py @@ -137,7 +137,6 @@ class ActionModel(QtGui.QStandardItemModel): actions.extend(app_actions) self._registered_actions = actions - self.items_by_id.clear() def get_application_actions(self): actions = [] From 6db6969e1b8896f7c54444b2af60727ddbfab2d9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 25 Aug 2021 19:57:52 +0200 Subject: [PATCH 239/308] added projection to project document --- openpype/tools/launcher/models.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/tools/launcher/models.py b/openpype/tools/launcher/models.py index 398d8aad3d..82d118c5b7 100644 --- a/openpype/tools/launcher/models.py +++ b/openpype/tools/launcher/models.py @@ -143,7 +143,10 @@ class ActionModel(QtGui.QStandardItemModel): if not self.dbcon.Session.get("AVALON_PROJECT"): return actions - project_doc = self.dbcon.find_one({"type": "project"}) + project_doc = self.dbcon.find_one( + {"type": "project"}, + {"config.apps": True} + ) if not project_doc: return actions From 3a2deeb7c48acce8bd94a59a04ea40673b39f3cd Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 25 Aug 2021 20:00:21 +0200 Subject: [PATCH 240/308] reorganized filter actions --- openpype/tools/launcher/models.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/openpype/tools/launcher/models.py b/openpype/tools/launcher/models.py index 82d118c5b7..3ceccf439f 100644 --- a/openpype/tools/launcher/models.py +++ b/openpype/tools/launcher/models.py @@ -183,14 +183,12 @@ class ActionModel(QtGui.QStandardItemModel): return icon def filter_actions(self): + self.items_by_id.clear() # Validate actions based on compatibility self.clear() - self.items_by_id.clear() actions = self.filter_compatible_actions(self._registered_actions) - self.beginResetModel() - single_actions = [] varianted_actions = collections.defaultdict(list) grouped_actions = collections.defaultdict(list) @@ -273,12 +271,17 @@ class ActionModel(QtGui.QStandardItemModel): items_by_order[order].append(item) + self.beginResetModel() + + items = [] for order in sorted(items_by_order.keys()): for item in items_by_order[order]: item_id = str(uuid.uuid4()) item.setData(item_id, ACTION_ID_ROLE) self.items_by_id[item_id] = item - self.appendRow(item) + items.append(item) + + self.invisibleRootItem().appendRows(items) self.endResetModel() From bac16b26ac5e29925d136a9e7e59192bc514df97 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 25 Aug 2021 20:00:39 +0200 Subject: [PATCH 241/308] stop animation before filtering --- openpype/tools/launcher/widgets.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/tools/launcher/widgets.py b/openpype/tools/launcher/widgets.py index 048210115c..0cdd129070 100644 --- a/openpype/tools/launcher/widgets.py +++ b/openpype/tools/launcher/widgets.py @@ -158,6 +158,8 @@ class ActionBar(QtWidgets.QWidget): self.model.discover() def filter_actions(self): + if self._animation_timer.isActive(): + self._animation_timer.stop() self.model.filter_actions() def set_row_height(self, rows): From 64d0fba55f4febe1daa0c96c749389758709b2e8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 11:55:16 +0200 Subject: [PATCH 242/308] enhanced project handler --- openpype/tools/launcher/lib.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/openpype/tools/launcher/lib.py b/openpype/tools/launcher/lib.py index 65d40cd0df..d6374f49d2 100644 --- a/openpype/tools/launcher/lib.py +++ b/openpype/tools/launcher/lib.py @@ -44,9 +44,12 @@ class ProjectHandler(QtCore.QObject): # Signal emmited when project has changed project_changed = QtCore.Signal(str) + projects_refreshed = QtCore.Signal() + timer_timeout = QtCore.Signal() def __init__(self, dbcon, model): super(ProjectHandler, self).__init__() + self._active = False # Store project model for usage self.model = model # Store dbcon @@ -54,6 +57,28 @@ class ProjectHandler(QtCore.QObject): self.current_project = dbcon.Session.get("AVALON_PROJECT") + refresh_timer = QtCore.QTimer() + refresh_timer.setInterval(self.refresh_interval) + refresh_timer.timeout.connect(self._on_timeout) + + self.refresh_timer = refresh_timer + + def _on_timeout(self): + if self._active: + self.timer_timeout.emit() + self.refresh_model() + + def set_active(self, active): + self._active = active + + def start_timer(self, trigger=False): + self.refresh_timer.start() + if trigger: + self._on_timeout() + + def stop_timer(self): + self.refresh_timer.stop() + def set_project(self, project_name): # Change current project of this handler self.current_project = project_name @@ -66,6 +91,7 @@ class ProjectHandler(QtCore.QObject): def refresh_model(self): self.model.refresh() + self.projects_refreshed.emit() def get_action_icon(action): From e0df3e92d26fcb088d58871c638fd7e59afbab56 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 11:56:02 +0200 Subject: [PATCH 243/308] use single refresh timer across all widgets --- openpype/tools/launcher/constants.py | 4 +- openpype/tools/launcher/widgets.py | 33 +++++++---------- openpype/tools/launcher/window.py | 55 +++++++--------------------- 3 files changed, 29 insertions(+), 63 deletions(-) diff --git a/openpype/tools/launcher/constants.py b/openpype/tools/launcher/constants.py index e6dbbb6e19..7f394cb5ac 100644 --- a/openpype/tools/launcher/constants.py +++ b/openpype/tools/launcher/constants.py @@ -8,5 +8,5 @@ ACTION_ID_ROLE = QtCore.Qt.UserRole + 3 ANIMATION_START_ROLE = QtCore.Qt.UserRole + 4 ANIMATION_STATE_ROLE = QtCore.Qt.UserRole + 5 - -ANIMATION_LEN = 10 +# Animation length in seconds +ANIMATION_LEN = 7 diff --git a/openpype/tools/launcher/widgets.py b/openpype/tools/launcher/widgets.py index 0cdd129070..35c7d98be1 100644 --- a/openpype/tools/launcher/widgets.py +++ b/openpype/tools/launcher/widgets.py @@ -40,16 +40,11 @@ class ProjectBar(QtWidgets.QWidget): QtWidgets.QSizePolicy.Maximum ) - refresh_timer = QtCore.QTimer() - refresh_timer.setInterval(project_handler.refresh_interval) - self.project_handler = project_handler self.project_delegate = project_delegate self.project_combobox = project_combobox - self.refresh_timer = refresh_timer # Signals - refresh_timer.timeout.connect(self._on_refresh_timeout) self.project_combobox.currentIndexChanged.connect(self.on_index_change) project_handler.project_changed.connect(self._on_project_change) @@ -58,20 +53,6 @@ class ProjectBar(QtWidgets.QWidget): if project_name: self.set_project(project_name) - def showEvent(self, event): - if not self.refresh_timer.isActive(): - self.refresh_timer.start() - super(ProjectBar, self).showEvent(event) - - def _on_refresh_timeout(self): - if not self.isVisible(): - # Stop timer if widget is not visible - self.refresh_timer.stop() - - elif self.isActiveWindow(): - # Refresh projects if window is active - self.project_handler.refresh_model() - def _on_project_change(self, project_name): if self.get_current_project() == project_name: return @@ -103,9 +84,10 @@ class ActionBar(QtWidgets.QWidget): action_clicked = QtCore.Signal(object) - def __init__(self, dbcon, parent=None): + def __init__(self, project_handler, dbcon, parent=None): super(ActionBar, self).__init__(parent) + self.project_handler = project_handler self.dbcon = dbcon layout = QtWidgets.QHBoxLayout(self) @@ -152,9 +134,12 @@ class ActionBar(QtWidgets.QWidget): self.set_row_height(1) + project_handler.projects_refreshed.connect(self._on_projects_refresh) view.clicked.connect(self.on_clicked) def discover_actions(self): + if self._animation_timer.isActive(): + self._animation_timer.stop() self.model.discover() def filter_actions(self): @@ -165,6 +150,9 @@ class ActionBar(QtWidgets.QWidget): def set_row_height(self, rows): self.setMinimumHeight(rows * 75) + def _on_projects_refresh(self): + self.discover_actions() + def _on_animation(self): time_now = time.time() for action_id in tuple(self._animated_items): @@ -184,6 +172,8 @@ class ActionBar(QtWidgets.QWidget): self.update() def _start_animation(self, index): + # Offset refresh timout + self.project_handler.start_timer() action_id = index.data(ACTION_ID_ROLE) item = self.model.items_by_id.get(action_id) if item: @@ -204,6 +194,9 @@ class ActionBar(QtWidgets.QWidget): self.action_clicked.emit(action) return + # Offset refresh timout + self.project_handler.start_timer() + actions = index.data(ACTION_ROLE) menu = QtWidgets.QMenu(self) diff --git a/openpype/tools/launcher/window.py b/openpype/tools/launcher/window.py index 979aab42cf..bd37a9b89c 100644 --- a/openpype/tools/launcher/window.py +++ b/openpype/tools/launcher/window.py @@ -103,14 +103,9 @@ class ProjectsPanel(QtWidgets.QWidget): layout.addWidget(view) - refresh_timer = QtCore.QTimer() - refresh_timer.setInterval(project_handler.refresh_interval) - - refresh_timer.timeout.connect(self._on_refresh_timeout) view.clicked.connect(self.on_clicked) self.view = view - self.refresh_timer = refresh_timer self.project_handler = project_handler def on_clicked(self, index): @@ -118,21 +113,6 @@ class ProjectsPanel(QtWidgets.QWidget): project_name = index.data(QtCore.Qt.DisplayRole) self.project_handler.set_project(project_name) - def showEvent(self, event): - self.project_handler.refresh_model() - if not self.refresh_timer.isActive(): - self.refresh_timer.start() - super(ProjectsPanel, self).showEvent(event) - - def _on_refresh_timeout(self): - if not self.isVisible(): - # Stop timer if widget is not visible - self.refresh_timer.stop() - - elif self.isActiveWindow(): - # Refresh projects if window is active - self.project_handler.refresh_model() - class AssetsPanel(QtWidgets.QWidget): """Assets page""" @@ -268,8 +248,6 @@ class AssetsPanel(QtWidgets.QWidget): class LauncherWindow(QtWidgets.QDialog): """Launcher interface""" - # Refresh actions each 10000msecs - actions_refresh_timeout = 10000 def __init__(self, parent=None): super(LauncherWindow, self).__init__(parent) @@ -304,7 +282,7 @@ class LauncherWindow(QtWidgets.QDialog): page_slider.addWidget(asset_panel) # actions - actions_bar = ActionBar(self.dbcon, self) + actions_bar = ActionBar(project_handler, self.dbcon, self) # statusbar statusbar = QtWidgets.QWidget() @@ -342,10 +320,6 @@ class LauncherWindow(QtWidgets.QDialog): layout.setSpacing(0) layout.setContentsMargins(0, 0, 0, 0) - actions_refresh_timer = QtCore.QTimer() - actions_refresh_timer.setInterval(self.actions_refresh_timeout) - - self.actions_refresh_timer = actions_refresh_timer self.project_handler = project_handler self.message_label = message_label @@ -357,22 +331,31 @@ class LauncherWindow(QtWidgets.QDialog): self._page = 0 # signals - actions_refresh_timer.timeout.connect(self._on_action_timer) actions_bar.action_clicked.connect(self.on_action_clicked) action_history.trigger_history.connect(self.on_history_action) project_handler.project_changed.connect(self.on_project_change) + project_handler.timer_timeout.connect(self._on_refresh_timeout) asset_panel.back_clicked.connect(self.on_back_clicked) asset_panel.session_changed.connect(self.on_session_changed) self.resize(520, 740) def showEvent(self, event): - if not self.actions_refresh_timer.isActive(): - self.actions_refresh_timer.start() - self.discover_actions() + self.project_handler.set_active(True) + self.project_handler.start_timer(True) super(LauncherWindow, self).showEvent(event) + def _on_refresh_timeout(self): + # Stop timer if widget is not visible + if not self.isVisible(): + self.project_handler.stop_timer() + + def changeEvent(self, event): + if event.type() == QtCore.QEvent.ActivationChange: + self.project_handler.set_active(self.isActiveWindow()) + super(LauncherWindow, self).changeEvent(event) + def set_page(self, page): current = self.page_slider.currentIndex() if current == page and self._page == page: @@ -392,20 +375,10 @@ class LauncherWindow(QtWidgets.QDialog): def discover_actions(self): self.actions_bar.discover_actions() - self.filter_actions() def filter_actions(self): self.actions_bar.filter_actions() - def _on_action_timer(self): - if not self.isVisible(): - # Stop timer if widget is not visible - self.actions_refresh_timer.stop() - - elif self.isActiveWindow(): - # Refresh projects if window is active - self.discover_actions() - def on_project_change(self, project_name): # Update the Action plug-ins available for the current project self.set_page(1) From dfa9132ac3275dc4e9ab8abd58ee4246c485cc4f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 11:58:04 +0200 Subject: [PATCH 244/308] trigger filtering after discover --- openpype/tools/launcher/models.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/tools/launcher/models.py b/openpype/tools/launcher/models.py index 3ceccf439f..4988829c11 100644 --- a/openpype/tools/launcher/models.py +++ b/openpype/tools/launcher/models.py @@ -138,6 +138,8 @@ class ActionModel(QtGui.QStandardItemModel): self._registered_actions = actions + self.filter_actions() + def get_application_actions(self): actions = [] if not self.dbcon.Session.get("AVALON_PROJECT"): From 5578784360a8b15a12be2ccd5d9cae222975c41e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 12:15:24 +0200 Subject: [PATCH 245/308] fixed set focus for dictionary widget --- .../tools/settings/settings/item_widgets.py | 19 +++++++++++++++++++ openpype/tools/settings/settings/widgets.py | 2 ++ 2 files changed, 21 insertions(+) diff --git a/openpype/tools/settings/settings/item_widgets.py b/openpype/tools/settings/settings/item_widgets.py index a808caa465..e3372ac2c4 100644 --- a/openpype/tools/settings/settings/item_widgets.py +++ b/openpype/tools/settings/settings/item_widgets.py @@ -89,6 +89,25 @@ class DictImmutableKeysWidget(BaseWidget): self._prepare_entity_layouts(child["children"], wrapper) + def set_focus(self, scroll_to=False): + """Set focus of a widget. + + Args: + scroll_to(bool): Also scroll to widget in category widget. + """ + if self.body_widget: + if scroll_to: + self.scroll_to(self.body_widget.top_part) + self.body_widget.top_part.setFocus() + + else: + if scroll_to: + if not self.input_fields: + self.scroll_to(self) + else: + self.scroll_to(self.input_fields[0]) + self.setFocus() + def _ui_item_base(self): self.setObjectName("DictInvisible") diff --git a/openpype/tools/settings/settings/widgets.py b/openpype/tools/settings/settings/widgets.py index d49057e1e8..b821c3bb2c 100644 --- a/openpype/tools/settings/settings/widgets.py +++ b/openpype/tools/settings/settings/widgets.py @@ -221,6 +221,8 @@ class ExpandingWidget(QtWidgets.QWidget): self.main_layout.setSpacing(0) self.main_layout.addWidget(top_part) + self.top_part = top_part + def hide_toolbox(self, hide_content=False): self.button_toggle.setArrowType(QtCore.Qt.NoArrow) self.toolbox_hidden = True From 472ae6bd4623ab8493f1db123b16eaeda453fd9c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 14:52:46 +0200 Subject: [PATCH 246/308] stretch second column in dictionary widget --- openpype/tools/settings/settings/item_widgets.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/tools/settings/settings/item_widgets.py b/openpype/tools/settings/settings/item_widgets.py index e3372ac2c4..b2b129da86 100644 --- a/openpype/tools/settings/settings/item_widgets.py +++ b/openpype/tools/settings/settings/item_widgets.py @@ -48,6 +48,10 @@ class DictImmutableKeysWidget(BaseWidget): self._ui_item_base() label = self.entity.label + # Set stretch of second column to 1 + if isinstance(self.content_layout, QtWidgets.QGridLayout): + self.content_layout.setColumnStretch(1, 1) + self._direct_children_widgets = [] self._parent_widget_by_entity_id = {} self._added_wrapper_ids = set() From 79314142bbb2e481d716b71608bdffa41e99d657 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 14:53:19 +0200 Subject: [PATCH 247/308] fixed single selection of deadline url --- openpype/settings/entities/enum_entity.py | 34 +++++++++++------------ 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index 5db31959a5..ed5da5bd9a 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -458,27 +458,19 @@ class DeadlineUrlEnumEntity(BaseEnumEntity): self.valid_value_types = (list,) self.value_on_not_set = [] else: - for key in self.valid_keys: - if self.value_on_not_set is NOT_SET: - self.value_on_not_set = key - break - self.valid_value_types = (STRING_TYPE,) + self.value_on_not_set = "" # GUI attribute self.placeholder = self.schema_data.get("placeholder") def _get_enum_values(self): - system_settings_entity = self.get_entity_from_path("system_settings") + deadline_urls_entity = self.get_entity_from_path( + "system_settings/modules/deadline/deadline_urls" + ) valid_keys = set() enum_items_list = [] - deadline_urls_entity = ( - system_settings_entity - ["modules"] - ["deadline"] - ["deadline_urls"] - ) for server_name, url_entity in deadline_urls_entity.items(): enum_items_list.append( {server_name: "{}: {}".format(server_name, url_entity.value)}) @@ -489,8 +481,16 @@ class DeadlineUrlEnumEntity(BaseEnumEntity): super(DeadlineUrlEnumEntity, self).set_override_state(*args, **kwargs) self.enum_items, self.valid_keys = self._get_enum_values() - new_value = [] - for key in self._current_value: - if key in self.valid_keys: - new_value.append(key) - self._current_value = new_value + if self.multiselection: + new_value = [] + for key in self._current_value: + if key in self.valid_keys: + new_value.append(key) + self._current_value = new_value + + else: + if not self.valid_keys: + self._current_value = "" + + elif self._current_value not in self.valid_keys: + self._current_value = tuple(self.valid_keys)[0] From ada23f369b7e6946d675a7c3bb267394cb4dbb18 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 15:02:17 +0200 Subject: [PATCH 248/308] added base of anatomy template enum entity --- openpype/settings/entities/__init__.py | 4 ++- openpype/settings/entities/enum_entity.py | 42 +++++++++++++++++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) diff --git a/openpype/settings/entities/__init__.py b/openpype/settings/entities/__init__.py index 9cda702e9a..8c30d5044c 100644 --- a/openpype/settings/entities/__init__.py +++ b/openpype/settings/entities/__init__.py @@ -106,7 +106,8 @@ from .enum_entity import ( ToolsEnumEntity, TaskTypeEnumEntity, ProvidersEnum, - DeadlineUrlEnumEntity + DeadlineUrlEnumEntity, + AnatomyTemplatesEnumEntity ) from .list_entity import ListEntity @@ -162,6 +163,7 @@ __all__ = ( "TaskTypeEnumEntity", "ProvidersEnum", "DeadlineUrlEnumEntity", + "AnatomyTemplatesEnumEntity", "ListEntity", diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index 5db31959a5..17915d9948 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -494,3 +494,45 @@ class DeadlineUrlEnumEntity(BaseEnumEntity): if key in self.valid_keys: new_value.append(key) self._current_value = new_value + + +class AnatomyTemplatesEnumEntity(BaseEnumEntity): + schema_types = ["anatomy-templates-enum"] + + def _item_initalization(self): + self.multiselection = False + + self.enum_items = [] + self.valid_keys = set() + + enum_default = self.schema_data.get("default") or "work" + + self.value_on_not_set = enum_default + self.valid_value_types = (STRING_TYPE,) + + # GUI attribute + self.placeholder = self.schema_data.get("placeholder") + + def _get_enum_values(self): + templates_entity = self.get_entity_from_path( + "project_anatomy/templates" + ) + + valid_keys = set() + enum_items_list = [] + + for key, value in templates_entity.items(): + print(key, value) + enum_items_list.append( + {key: key}) + valid_keys.add(key) + return enum_items_list, valid_keys + + def set_override_state(self, *args, **kwargs): + super(AnatomyTemplatesEnumEntity, self).set_override_state( + *args, **kwargs + ) + + self.enum_items, self.valid_keys = self._get_enum_values() + if self._current_value not in self.valid_keys: + self._current_value = self.value_on_not_set From a06ab7e0ef08c5cb0a90bbdceddd4681cf9b5288 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 15:03:54 +0200 Subject: [PATCH 249/308] added `workfile_template_profiles` to settings --- .../defaults/project_settings/global.json | 7 +++++ openpype/settings/entities/enum_entity.py | 1 - .../schemas/schema_global_tools.json | 31 +++++++++++++++++++ 3 files changed, 38 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index aab8c2196c..63c4bc5091 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -249,6 +249,13 @@ ] }, "Workfiles": { + "workfile_template_profiles": [ + { + "task_types": [], + "hosts": [], + "workfile_template": "work" + } + ], "last_workfile_on_startup": [ { "hosts": [], diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index 17915d9948..d174b6a3df 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -522,7 +522,6 @@ class AnatomyTemplatesEnumEntity(BaseEnumEntity): enum_items_list = [] for key, value in templates_entity.items(): - print(key, value) enum_items_list.append( {key: key}) valid_keys.add(key) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json index 9e39eeb39e..245560f115 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json @@ -65,6 +65,37 @@ "key": "Workfiles", "label": "Workfiles", "children": [ + { + "type": "list", + "key": "workfile_template_profiles", + "label": "Workfile template profiles", + "use_label_wrap": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "type": "hosts-enum", + "key": "hosts", + "label": "Hosts", + "multiselection": true + }, + { + "type": "splitter" + }, + { + "key": "workfile_template", + "label": "Workfile template", + "type": "anatomy-templates-enum", + "multiselection": false + } + ] + } + }, { "type": "list", "key": "last_workfile_on_startup", From 59cff86ce4ea05f7ff1161cf274c1b5bbc336546 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 15:17:04 +0200 Subject: [PATCH 250/308] fixed templates enum entity --- openpype/settings/entities/enum_entity.py | 32 ++++++++++++++++++++--- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index d174b6a3df..c35a800883 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -521,10 +521,36 @@ class AnatomyTemplatesEnumEntity(BaseEnumEntity): valid_keys = set() enum_items_list = [] - for key, value in templates_entity.items(): - enum_items_list.append( - {key: key}) + others_entity = None + for key, entity in templates_entity.items(): + # Skip defaults key + if key == "defaults": + continue + + if key == "others": + others_entity = entity + continue + + label = key + if hasattr(entity, "label"): + label = entity.label or label + + enum_items_list.append({key: label}) valid_keys.add(key) + + if others_entity is not None: + print(others_entity) + get_child_label_func = getattr( + others_entity, "get_child_label", None + ) + for key, child_entity in others_entity.items(): + label = key + if callable(get_child_label_func): + label = get_child_label_func(child_entity) or label + + enum_items_list.append({key: label}) + valid_keys.add(key) + return enum_items_list, valid_keys def set_override_state(self, *args, **kwargs): From 9d4451a3d40bb507a257ff2fd8e38826966a3f82 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 15:18:28 +0200 Subject: [PATCH 251/308] removed debug print --- openpype/settings/entities/enum_entity.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index c35a800883..c5330a2f04 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -539,7 +539,6 @@ class AnatomyTemplatesEnumEntity(BaseEnumEntity): valid_keys.add(key) if others_entity is not None: - print(others_entity) get_child_label_func = getattr( others_entity, "get_child_label", None ) From c0d36a27984ef232c6d66bd37f55db7ef3156e0c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 18:44:39 +0200 Subject: [PATCH 252/308] hide not used widgets --- openpype/tools/workfiles/app.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index 42f0e422ae..66e204e89c 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -126,10 +126,14 @@ class NameWindow(QtWidgets.QDialog): # for "{version". if "{version" in self.template: inputs_layout.addRow("Version:", version_widget) + else: + version_widget.setVisible(False) # Add subversion only if template containt `{comment}` if "{comment}" in self.template: inputs_layout.addRow("Subversion:", subversion_input) + else: + subversion_input.setVisible(False) inputs_layout.addRow("Extension:", ext_combo) inputs_layout.addRow("Preview:", preview_label) From 43d718d0ee3401b446cc91ce2192e0ed5d4959b2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 18:44:52 +0200 Subject: [PATCH 253/308] reimplemented task model --- openpype/tools/workfiles/model.py | 145 +++++++++++++++++++++++++++++- 1 file changed, 144 insertions(+), 1 deletion(-) diff --git a/openpype/tools/workfiles/model.py b/openpype/tools/workfiles/model.py index 368988fd4e..92fbf76b95 100644 --- a/openpype/tools/workfiles/model.py +++ b/openpype/tools/workfiles/model.py @@ -1,7 +1,7 @@ import os import logging -from Qt import QtCore +from Qt import QtCore, QtGui from avalon import style from avalon.vendor import qtawesome @@ -9,6 +9,10 @@ from avalon.tools.models import TreeModel, Item log = logging.getLogger(__name__) +TASK_NAME_ROLE = QtCore.Qt.UserRole + 1 +TASK_TYPE_ROLE = QtCore.Qt.UserRole + 2 +TASK_ORDER_ROLE = QtCore.Qt.UserRole + 3 + class FilesModel(TreeModel): """Model listing files with specified extensions in a root folder""" @@ -151,3 +155,142 @@ class FilesModel(TreeModel): return "Date modified" return super(FilesModel, self).headerData(section, orientation, role) + + +class TasksProxyModel(QtCore.QSortFilterProxyModel): + def lessThan(self, x_index, y_index): + x_order = x_index.data(TASK_ORDER_ROLE) + y_order = y_index.data(TASK_ORDER_ROLE) + if x_order is not None and y_order is not None: + if x_order < y_order: + return True + if x_order > y_order: + return False + + elif x_order is None and y_order is not None: + return True + + elif y_order is None and x_order is not None: + return False + + x_name = x_index.data(QtCore.Qt.DisplayRole) + y_name = y_index.data(QtCore.Qt.DisplayRole) + if x_name == y_name: + return True + + if x_name == tuple(sorted((x_name, y_name)))[0]: + return False + return True + + +class TasksModel(QtGui.QStandardItemModel): + """A model listing the tasks combined for a list of assets""" + def __init__(self, dbcon, parent=None): + super(TasksModel, self).__init__(parent=parent) + self.dbcon = dbcon + self._default_icon = qtawesome.icon( + "fa.male", + color=style.colors.default + ) + self._no_tasks_icon = qtawesome.icon( + "fa.exclamation-circle", + color=style.colors.mid + ) + self._cached_icons = {} + self._project_task_types = {} + + self._refresh_task_types() + + def _refresh_task_types(self): + # Get the project configured icons from database + project = self.dbcon.find_one( + {"type": "project"}, + {"config.tasks"} + ) + tasks = project["config"].get("tasks") or {} + self._project_task_types = tasks + + def _try_get_awesome_icon(self, icon_name): + icon = None + if icon_name: + try: + icon = qtawesome.icon( + "fa.{}".format(icon_name), + color=style.colors.default + ) + + except Exception: + pass + return icon + + def headerData(self, section, orientation, role): + # Show nice labels in the header + if ( + role == QtCore.Qt.DisplayRole + and orientation == QtCore.Qt.Horizontal + ): + if section == 0: + return "Tasks" + + return super(TasksModel, self).headerData(section, orientation, role) + + def _get_icon(self, task_icon, task_type_icon): + if task_icon in self._cached_icons: + return self._cached_icons[task_icon] + + icon = self._try_get_awesome_icon(task_icon) + if icon is not None: + self._cached_icons[task_icon] = icon + return icon + + if task_type_icon in self._cached_icons: + icon = self._cached_icons[task_type_icon] + self._cached_icons[task_icon] = icon + return icon + + icon = self._try_get_awesome_icon(task_type_icon) + if icon is None: + icon = self._default_icon + + self._cached_icons[task_icon] = icon + self._cached_icons[task_type_icon] = icon + + return icon + + def set_asset(self, asset_doc): + """Set assets to track by their database id + + Arguments: + asset_doc (dict): Asset document from MongoDB. + """ + self.clear() + + if not asset_doc: + return + + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + items = [] + for task_name, task_info in asset_tasks.items(): + task_icon = task_info.get("icon") + task_type = task_info.get("type") + task_order = task_info.get("order") + task_type_info = self._project_task_types.get(task_type) or {} + task_type_icon = task_type_info.get("icon") + icon = self._get_icon(task_icon, task_type_icon) + + label = "{} ({})".format(task_name, task_type or "type N/A") + item = QtGui.QStandardItem(label) + item.setData(task_name, TASK_NAME_ROLE) + item.setData(task_type, TASK_TYPE_ROLE) + item.setData(task_order, TASK_ORDER_ROLE) + item.setData(icon, QtCore.Qt.DecorationRole) + item.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable) + items.append(item) + + if not items: + item = QtGui.QStandardItem("No task") + item.setData(self._no_tasks_icon, QtCore.Qt.DecorationRole) + item.setFlags(QtCore.Qt.NoItemFlags) + items.append(item) + + self.invisibleRootItem().appendRows(items) From a4f8521e49c3cd3d24ee403ee90e4a9bfad07dd2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 18:45:26 +0200 Subject: [PATCH 254/308] implemented functions to retrieve template key for workfile --- openpype/lib/avalon_context.py | 64 ++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index c4217cc6d5..b363027ec2 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -344,6 +344,70 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): return version_doc +def get_workfile_template_key_from_context( + project_name, asset_name, task_name, host_name, + dbcon=None, project_settings=None +): + if not dbcon: + from avalon.api import AvalonMongoDB + + dbcon = AvalonMongoDB() + + dbcon.Session["AVALON_PROJECT"] = project_name + asset_doc = dbcon.find_one( + { + "type": "asset", + "name": asset_name + }, + { + "data.tasks": 1 + } + ) + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + task_info = asset_tasks.get(task_name) or {} + task_type = task_info.get("type") + + return get_workfile_template_key( + project_name, task_type, host_name, project_settings + ) + + +def get_workfile_template_key( + project_name, task_type, host_name, project_settings=None +): + default = "work" + if not task_type or not host_name: + return default + + if not project_settings: + project_settings = get_project_settings(project_name) + + try: + profiles = ( + project_settings + ["global"] + ["tools"] + ["Workfiles"] + ["workfile_template_profiles"] + ) + except Exception: + profiles = [] + + if not profiles: + return default + + from .profiles_filtering import filter_profiles + + profile_filter = { + "task_types": task_type, + "hosts": host_name + } + profile = filter_profiles(profiles, profile_filter) + if profile: + return profile["workfile_template"] or default + return default + + def get_workdir_data(project_doc, asset_doc, task_name, host_name): """Prepare data for workdir template filling from entered information. From 461c33321861628737e6da76b9915e0e6738851f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 19:08:06 +0200 Subject: [PATCH 255/308] changed arguments and filled docstrings to new functions --- openpype/lib/avalon_context.py | 65 +++++++++++++++++++++++++++++++--- 1 file changed, 61 insertions(+), 4 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index b363027ec2..449dde51c4 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -345,15 +345,47 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): def get_workfile_template_key_from_context( - project_name, asset_name, task_name, host_name, + asset_name, task_name, host_name, project_name=None, dbcon=None, project_settings=None ): + """Helper function to get template key for workfile template. + + Do the same as `get_workfile_template_key` but returns value for "session + context". + + It is required to pass one of 'dbcon' with already set project name or + 'project_name' arguments. + + Args: + asset_name(str): Name of asset document. + task_name(str): Task name for which is template key retrieved. + Must be available on asset document under `data.tasks`. + host_name(str): Name of host implementation for which is workfile + used. + project_name(str): Project name where asset and task is. Not required + when 'dbcon' is passed. + dbcon(AvalonMongoDB): Connection to mongo with already set project + under `AVALON_PROJECT`. Not required when 'project_name' is passed. + project_settings(dict): Project settings for passed 'project_name'. + Not required at all but makes function faster. + Raises: + ValueError: When both 'dbcon' and 'project_name' were not + passed. + """ if not dbcon: + if not project_name: + raise ValueError(( + "`get_workfile_template_key_from_context` requires to pass" + " one of 'dbcon' or 'project_name' arguments." + )) from avalon.api import AvalonMongoDB dbcon = AvalonMongoDB() + dbcon.Session["AVALON_PROJECT"] = project_name + + elif not project_name: + project_name = dbcon.Session["AVALON_PROJECT"] - dbcon.Session["AVALON_PROJECT"] = project_name asset_doc = dbcon.find_one( { "type": "asset", @@ -368,18 +400,43 @@ def get_workfile_template_key_from_context( task_type = task_info.get("type") return get_workfile_template_key( - project_name, task_type, host_name, project_settings + task_type, host_name, project_name, project_settings ) def get_workfile_template_key( - project_name, task_type, host_name, project_settings=None + task_type, host_name, project_name=None, project_settings=None ): + """Workfile template key which should be used to get workfile template. + + Function is using profiles from project settings to return right template + for passet task type and host name. + + One of 'project_name' or 'project_settings' must be passed it is preffered + to pass settings if are already available. + + Args: + task_type(str): Name of task type. + host_name(str): Name of host implementation (e.g. "maya", "nuke", ...) + project_name(str): Name of project in which context should look for + settings. Not required if `project_settings` are passed. + project_settings(dict): Prepare project settings for project name. + Not needed if `project_name` is passed. + + Raises: + ValueError: When both 'project_name' and 'project_settings' were not + passed. + """ default = "work" if not task_type or not host_name: return default if not project_settings: + if not project_name: + raise ValueError(( + "`get_workfile_template_key` requires to pass" + " one of 'project_name' or 'project_settings' arguments." + )) project_settings = get_project_settings(project_name) try: From 96b1309ec927909f360b6e22f653ac85b5537bec Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 19:10:24 +0200 Subject: [PATCH 256/308] use new functions in already existing workdir functions --- openpype/lib/__init__.py | 4 ++++ openpype/lib/avalon_context.py | 25 +++++++++++++++++++------ 2 files changed, 23 insertions(+), 6 deletions(-) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 9bcd0f7587..3d392dc745 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -71,6 +71,8 @@ from .avalon_context import ( get_linked_assets, get_latest_version, + get_workfile_template_key, + get_workfile_template_key_from_context, get_workdir_data, get_workdir, get_workdir_with_workdir_data, @@ -189,6 +191,8 @@ __all__ = [ "get_linked_assets", "get_latest_version", + "get_workfile_template_key", + "get_workfile_template_key_from_context", "get_workdir_data", "get_workdir", "get_workdir_with_workdir_data", diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 449dde51c4..497348af33 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -494,7 +494,8 @@ def get_workdir_data(project_doc, asset_doc, task_name, host_name): def get_workdir_with_workdir_data( - workdir_data, anatomy=None, project_name=None, template_key=None + workdir_data, anatomy=None, project_name=None, + template_key=None, dbcon=None ): """Fill workdir path from entered data and project's anatomy. @@ -508,8 +509,10 @@ def get_workdir_with_workdir_data( `project_name` is entered. project_name (str): Project's name. Optional if `anatomy` is entered otherwise Anatomy object is created with using the project name. - template_key (str): Key of work templates in anatomy templates. By - default is seto to `"work"`. + template_key (str): Key of work templates in anatomy templates. If not + passed `get_workfile_template_key_from_context` is used to get it. + dbcon(AvalonMongoDB): Mongo connection. Required only if 'template_key' + and 'project_name' are not passed. Returns: TemplateResult: Workdir path. @@ -527,7 +530,13 @@ def get_workdir_with_workdir_data( anatomy = Anatomy(project_name) if not template_key: - template_key = "work" + template_key = get_workfile_template_key_from_context( + workdir_data["asset"], + workdir_data["task"], + workdir_data["app"], + project_name=workdir_data["project"]["name"], + dbcon=dbcon + ) anatomy_filled = anatomy.format(workdir_data) # Output is TemplateResult object which contain usefull data @@ -568,7 +577,9 @@ def get_workdir( project_doc, asset_doc, task_name, host_name ) # Output is TemplateResult object which contain usefull data - return get_workdir_with_workdir_data(workdir_data, anatomy, template_key) + return get_workdir_with_workdir_data( + workdir_data, anatomy, template_key=template_key + ) @with_avalon @@ -637,7 +648,9 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): # Prepare anatomy anatomy = Anatomy(project_doc["name"]) # Get workdir path (result is anatomy.TemplateResult) - template_workdir = get_workdir_with_workdir_data(workdir_data, anatomy) + template_workdir = get_workdir_with_workdir_data( + workdir_data, anatomy, dbcon=dbcon + ) template_workdir_path = str(template_workdir).replace("\\", "/") # Replace slashses in workdir path where workfile is located From 960c5a2279e155cdbfdd8c600d7d86a6fde48d3d Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 19:10:55 +0200 Subject: [PATCH 257/308] template key is used when launching application --- openpype/lib/applications.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index 71ab2eac61..fbf991a32e 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -28,7 +28,8 @@ from . import ( from .local_settings import get_openpype_username from .avalon_context import ( get_workdir_data, - get_workdir_with_workdir_data + get_workdir_with_workdir_data, + get_workfile_template_key_from_context ) from .python_module_tools import ( @@ -1236,8 +1237,18 @@ def prepare_context_environments(data): anatomy = data["anatomy"] + template_key = get_workfile_template_key_from_context( + asset_doc["name"], + task_name, + app.host_name, + project_name=project_name, + dbcon=data["dbcon"] + ) + try: - workdir = get_workdir_with_workdir_data(workdir_data, anatomy) + workdir = get_workdir_with_workdir_data( + workdir_data, anatomy, template_key=template_key + ) except Exception as exc: raise ApplicationLaunchFailed( From eb0ec073b1243b1c1b392cf43a1c2ce830954549 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 19:11:19 +0200 Subject: [PATCH 258/308] reduced project query time by projection of required (and used) keys --- openpype/tools/workfiles/app.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index 66e204e89c..ca202ae2ca 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -55,9 +55,13 @@ class NameWindow(QtWidgets.QDialog): # Set work file data for template formatting asset_name = session["AVALON_ASSET"] - project_doc = io.find_one({ - "type": "project" - }) + project_doc = io.find_one( + {"type": "project"}, + { + "name": True, + "data.code": True + } + ) self.data = { "project": { "name": project_doc["name"], From 7e1ac056b6044a60ba38924e8d6254aa210da4aa Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 19:13:13 +0200 Subject: [PATCH 259/308] reorganized attributes and used new task models --- openpype/tools/workfiles/app.py | 74 ++++++++++++++++----------------- 1 file changed, 36 insertions(+), 38 deletions(-) diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index ca202ae2ca..2fa2d19b35 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -12,10 +12,15 @@ from avalon import style, io, api, pipeline from avalon.tools import lib as tools_lib from avalon.tools.widgets import AssetWidget -from avalon.tools.models import TasksModel from avalon.tools.delegates import PrettyTimeDelegate -from .model import FilesModel +from .model import ( + TASK_NAME_ROLE, + TASK_TYPE_ROLE, + FilesModel, + TasksModel, + TasksProxyModel +) from .view import FilesView from openpype.lib import ( @@ -313,32 +318,30 @@ class TasksWidget(QtWidgets.QWidget): task_changed = QtCore.Signal() - def __init__(self, parent=None): + def __init__(self, dbcon=None, parent=None): super(TasksWidget, self).__init__(parent) - self.setContentsMargins(0, 0, 0, 0) - view = QtWidgets.QTreeView() - view.setIndentation(0) - model = TasksModel(io) - view.setModel(model) + tasks_view = QtWidgets.QTreeView(self) + tasks_view.setIndentation(0) + tasks_view.setSortingEnabled(True) + if dbcon is None: + dbcon = io + + tasks_model = TasksModel(dbcon) + tasks_proxy = TasksProxyModel() + tasks_proxy.setSourceModel(tasks_model) + tasks_view.setModel(tasks_proxy) layout = QtWidgets.QVBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) - layout.addWidget(view) + layout.addWidget(tasks_view) - # Hide the default tasks "count" as we don't need that data here. - view.setColumnHidden(1, True) + selection_model = tasks_view.selectionModel() + selection_model.currentChanged.connect(self.task_changed) - selection = view.selectionModel() - selection.currentChanged.connect(self.task_changed) - - self.models = { - "tasks": model - } - - self.widgets = { - "view": view, - } + self._tasks_model = tasks_model + self._tasks_proxy = tasks_proxy + self._tasks_view = tasks_view self._last_selected_task = None @@ -354,7 +357,7 @@ class TasksWidget(QtWidgets.QWidget): if current: self._last_selected_task = current - self.models["tasks"].set_assets(asset_docs=[asset]) + self._tasks_model.set_asset(asset_doc) if self._last_selected_task: self.select_task(self._last_selected_task) @@ -374,21 +377,20 @@ class TasksWidget(QtWidgets.QWidget): """ # Clear selection - view = self.widgets["view"] - model = view.model() - selection_model = view.selectionModel() + selection_model = self._tasks_view.selectionModel() selection_model.clearSelection() # Select the task mode = selection_model.Select | selection_model.Rows - for row in range(model.rowCount(QtCore.QModelIndex())): - index = model.index(row, 0, QtCore.QModelIndex()) + for row in range(self._tasks_model.rowCount()): + index = self._tasks_model.index(row, 0) name = index.data(QtCore.Qt.DisplayRole) - if name == task: + if name == task_name: selection_model.select(index, mode) # Set the currently active index - view.setCurrentIndex(index) + self._tasks_view.setCurrentIndex(index) + break def get_current_task(self): """Return name of task at current index (selected) @@ -397,16 +399,12 @@ class TasksWidget(QtWidgets.QWidget): str: Name of the current task. """ - view = self.widgets["view"] - index = view.currentIndex() - index = index.sibling(index.row(), 0) # ensure column zero for name + index = self._tasks_view.currentIndex() + selection_model = self._tasks_view.selectionModel() + if index.isValid() and selection_model.isSelected(index): + return index.data(TASK_NAME_ROLE) + return None - selection = view.selectionModel() - if selection.isSelected(index): - # Ignore when the current task is not selected as the "No task" - # placeholder might be the current index even though it's - # disallowed to be selected. So we only return if it is selected. - return index.data(QtCore.Qt.DisplayRole) class FilesWidget(QtWidgets.QWidget): From 9449df703f5f8d53d63b46c17f7838d8e148040f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 19:18:58 +0200 Subject: [PATCH 260/308] added task type as part of task widget context --- openpype/tools/workfiles/app.py | 56 +++++++++++++++++++-------------- 1 file changed, 32 insertions(+), 24 deletions(-) diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index 2fa2d19b35..11d6257b06 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -345,15 +345,15 @@ class TasksWidget(QtWidgets.QWidget): self._last_selected_task = None - def set_asset(self, asset): - if asset is None: - # Asset deselected + def set_asset(self, asset_doc): + # Asset deselected + if asset_doc is None: return # Try and preserve the last selected task and reselect it # after switching assets. If there's no currently selected # asset keep whatever the "last selected" was prior to it. - current = self.get_current_task() + current = self.get_current_task_name() if current: self._last_selected_task = current @@ -365,7 +365,7 @@ class TasksWidget(QtWidgets.QWidget): # Force a task changed emit. self.task_changed.emit() - def select_task(self, task): + def select_task(self, task_name): """Select a task by name. If the task does not exist in the current model then selection is only @@ -384,7 +384,7 @@ class TasksWidget(QtWidgets.QWidget): mode = selection_model.Select | selection_model.Rows for row in range(self._tasks_model.rowCount()): index = self._tasks_model.index(row, 0) - name = index.data(QtCore.Qt.DisplayRole) + name = index.data(TASK_NAME_ROLE) if name == task_name: selection_model.select(index, mode) @@ -392,7 +392,7 @@ class TasksWidget(QtWidgets.QWidget): self._tasks_view.setCurrentIndex(index) break - def get_current_task(self): + def get_current_task_name(self): """Return name of task at current index (selected) Returns: @@ -405,6 +405,12 @@ class TasksWidget(QtWidgets.QWidget): return index.data(TASK_NAME_ROLE) return None + def get_current_task_type(self): + index = self._tasks_view.currentIndex() + selection_model = self._tasks_view.selectionModel() + if index.isValid() and selection_model.isSelected(index): + return index.data(TASK_TYPE_ROLE) + return None class FilesWidget(QtWidgets.QWidget): @@ -417,7 +423,8 @@ class FilesWidget(QtWidgets.QWidget): # Setup self._asset = None - self._task = None + self._task_name = None + self._task_type = None # Pype's anatomy object for current project self.anatomy = Anatomy(io.Session["AVALON_PROJECT"]) @@ -512,14 +519,15 @@ class FilesWidget(QtWidgets.QWidget): self.btn_browse = btn_browse self.btn_save = btn_save - def set_asset_task(self, asset, task): + def set_asset_task(self, asset, task_name, task_type): self._asset = asset - self._task = task + self._task_name = task_name + self._task_type = task_type # Define a custom session so we can query the work root # for a "Work area" that is not our current Session. # This way we can browse it even before we enter it. - if self._asset and self._task: + if self._asset and self._task_name and self._task_type: session = self._get_session() self.root = self.host.work_root(session) self.files_model.set_root(self.root) @@ -542,7 +550,7 @@ class FilesWidget(QtWidgets.QWidget): changes = pipeline.compute_session_changes( session, asset=self._asset, - task=self._task + task=self._task_name, ) session.update(changes) @@ -555,14 +563,14 @@ class FilesWidget(QtWidgets.QWidget): changes = pipeline.compute_session_changes( session, asset=self._asset, - task=self._task + task=self._task_name, ) if not changes: # Return early if we're already in the right Session context # to avoid any unwanted Task Changed callbacks to be triggered. return - api.update_current_task(asset=self._asset, task=self._task) + api.update_current_task(asset=self._asset, task=self._task_name) def open_file(self, filepath): host = self.host @@ -706,7 +714,7 @@ class FilesWidget(QtWidgets.QWidget): self._enter_session() # Make sure we are in the right session self.host.save_file(file_path) - self.set_asset_task(self._asset, self._task) + self.set_asset_task(self._asset, self._task_name, self._task_type) pipeline.emit("after.workfile.save", [file_path]) @@ -733,7 +741,7 @@ class FilesWidget(QtWidgets.QWidget): changes = pipeline.compute_session_changes( session, asset=self._asset, - task=self._task + task=self._task_name, ) session.update(changes) @@ -756,7 +764,7 @@ class FilesWidget(QtWidgets.QWidget): # Force a full to the asset as opposed to just self.refresh() so # that it will actually check again whether the Work directory exists - self.set_asset_task(self._asset, self._task) + self.set_asset_task(self._asset, self._task_name, self._task_type) def refresh(self): """Refresh listed files for current selection in the interface""" @@ -1005,7 +1013,7 @@ class Window(QtWidgets.QMainWindow): if asset_docs: asset_doc = asset_docs[0] - task_name = self.tasks_widget.get_current_task() + task_name = self.tasks_widget.get_current_task_name() workfile_doc = None if asset_doc and task_name and filepath: @@ -1032,7 +1040,7 @@ class Window(QtWidgets.QMainWindow): def _get_current_workfile_doc(self, filepath=None): if filepath is None: filepath = self.files_widget._get_selected_filepath() - task_name = self.tasks_widget.get_current_task() + task_name = self.tasks_widget.get_current_task_name() asset_docs = self.assets_widget.get_selected_assets() if not task_name or not asset_docs or not filepath: return @@ -1052,7 +1060,7 @@ class Window(QtWidgets.QMainWindow): workdir, filename = os.path.split(filepath) asset_docs = self.assets_widget.get_selected_assets() asset_doc = asset_docs[0] - task_name = self.tasks_widget.get_current_task() + task_name = self.tasks_widget.get_current_task_name() create_workfile_doc(asset_doc, task_name, filename, workdir, io) def set_context(self, context): @@ -1071,7 +1079,6 @@ class Window(QtWidgets.QMainWindow): # Select the asset self.assets_widget.select_assets([asset], expand=True) - # Force a refresh on Tasks? self.tasks_widget.set_asset(asset_document) if "task" in context: @@ -1101,12 +1108,13 @@ class Window(QtWidgets.QMainWindow): asset = self.assets_widget.get_selected_assets() or None if asset is not None: asset = asset[0] - task = self.tasks_widget.get_current_task() + task_name = self.tasks_widget.get_current_task_name() + task_type = self.tasks_widget.get_current_task_type() self.tasks_widget.setEnabled(bool(asset)) - self.files_widget.setEnabled(all([bool(task), bool(asset)])) - self.files_widget.set_asset_task(asset, task) + self.files_widget.setEnabled(all([bool(task_name), bool(asset)])) + self.files_widget.set_asset_task(asset, task_name, task_type) self.files_widget.refresh() From 2bb5059b85d574432f017d9eb65028984ffcbd76 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 19:19:30 +0200 Subject: [PATCH 261/308] use template key functions from openpype lib --- openpype/tools/workfiles/app.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index 11d6257b06..3a5272c4b9 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -28,7 +28,8 @@ from openpype.lib import ( get_workdir, get_workfile_doc, create_workfile_doc, - save_workfile_data_to_doc + save_workfile_data_to_doc, + get_workfile_template_key ) log = logging.getLogger(__name__) @@ -547,10 +548,16 @@ class FilesWidget(QtWidgets.QWidget): """Return a modified session for the current asset and task""" session = api.Session.copy() + self.template_key = get_workfile_template_key( + self._task_type, + session["AVALON_APP"], + project_name=session["AVALON_PROJECT"] + ) changes = pipeline.compute_session_changes( session, asset=self._asset, task=self._task_name, + template_key=self.template_key ) session.update(changes) @@ -564,6 +571,7 @@ class FilesWidget(QtWidgets.QWidget): session, asset=self._asset, task=self._task_name, + template_key=self.template_key ) if not changes: # Return early if we're already in the right Session context @@ -742,6 +750,7 @@ class FilesWidget(QtWidgets.QWidget): session, asset=self._asset, task=self._task_name, + template_key=self.template_key ) session.update(changes) From cb6b27f62e94bdfdd3e6b59debb2649452c01724 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 19:19:39 +0200 Subject: [PATCH 262/308] minor fixes --- openpype/tools/workfiles/app.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index 3a5272c4b9..e559eb61f3 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -628,7 +628,7 @@ class FilesWidget(QtWidgets.QWidget): result = messagebox.exec_() if result == messagebox.Yes: return True - elif result == messagebox.No: + if result == messagebox.No: return False return None @@ -950,7 +950,7 @@ class Window(QtWidgets.QMainWindow): assets_widget = AssetWidget(io, parent=home_body_widget) assets_widget.set_current_asset_btn_visibility(True) - tasks_widget = TasksWidget(home_body_widget) + tasks_widget = TasksWidget(io, home_body_widget) files_widget = FilesWidget(home_body_widget) side_panel = SidePanelWidget(home_body_widget) From fd30b9290487e866c567875c935be8539f191ec5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 19:31:20 +0200 Subject: [PATCH 263/308] added new anatomy-templates-enum to readme --- openpype/settings/entities/schemas/README.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/openpype/settings/entities/schemas/README.md b/openpype/settings/entities/schemas/README.md index 2034d4e463..05605f8ce1 100644 --- a/openpype/settings/entities/schemas/README.md +++ b/openpype/settings/entities/schemas/README.md @@ -380,6 +380,20 @@ How output of the schema could look like on save: } ``` +### anatomy-templates-enum +- enumeration of all available anatomy template keys +- have only single selection mode +- it is possible to define default value `default` + - `"work"` is used if default value is not specified +``` +{ + "key": "host", + "label": "Host name", + "type": "anatomy-templates-enum", + "default": "publish" +} +``` + ### hosts-enum - enumeration of available hosts - multiselection can be allowed with setting key `"multiselection"` to `True` (Default: `False`) From e5456fe55b1b22c5b8f97dd33201eaf5e28f705e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 26 Aug 2021 19:40:05 +0200 Subject: [PATCH 264/308] initial commit of "NiceSlide" widget --- openpype/widgets/sliders.py | 138 ++++++++++++++++++++++++++++++++++++ 1 file changed, 138 insertions(+) create mode 100644 openpype/widgets/sliders.py diff --git a/openpype/widgets/sliders.py b/openpype/widgets/sliders.py new file mode 100644 index 0000000000..2f26c3eb97 --- /dev/null +++ b/openpype/widgets/sliders.py @@ -0,0 +1,138 @@ +from Qt import QtWidgets, QtCore, QtGui + + +class NiceSlider(QtWidgets.QSlider): + def __init__(self, *args, **kwargs): + super(NiceSlider, self).__init__(*args, **kwargs) + self._mouse_clicked = False + self._handle_size = 0 + + self._bg_brush = QtGui.QBrush(QtGui.QColor("#21252B")) + self._fill_brush = QtGui.QBrush(QtGui.QColor("#5cadd6")) + + def mousePressEvent(self, event): + self._mouse_clicked = True + if event.button() == QtCore.Qt.LeftButton: + self._set_value_to_pos(event.pos()) + return event.accept() + return super(NiceSlider, self).mousePressEvent(event) + + def mouseMoveEvent(self, event): + if self._mouse_clicked: + self._set_value_to_pos(event.pos()) + + super(NiceSlider, self).mouseMoveEvent(event) + + def mouseReleaseEvent(self, event): + self._mouse_clicked = True + super(NiceSlider, self).mouseReleaseEvent(event) + + def _set_value_to_pos(self, pos): + if self.orientation() == QtCore.Qt.Horizontal: + self._set_value_to_pos_x(pos.x()) + else: + self._set_value_to_pos_y(pos.y()) + + def _set_value_to_pos_x(self, pos_x): + _range = self.maximum() - self.minimum() + handle_size = self._handle_size + half_handle = handle_size / 2 + pos_x -= half_handle + width = self.width() - handle_size + value = ((_range * pos_x) / width) + self.minimum() + self.setValue(value) + + def _set_value_to_pos_y(self, pos_y): + _range = self.maximum() - self.minimum() + handle_size = self._handle_size + half_handle = handle_size / 2 + pos_y = self.height() - pos_y - half_handle + height = self.height() - handle_size + value = (_range * pos_y / height) + self.minimum() + self.setValue(value) + + def paintEvent(self, event): + painter = QtGui.QPainter(self) + opt = QtWidgets.QStyleOptionSlider() + self.initStyleOption(opt) + + painter.fillRect(event.rect(), QtCore.Qt.transparent) + + painter.setRenderHint(QtGui.QPainter.HighQualityAntialiasing) + + horizontal = self.orientation() == QtCore.Qt.Horizontal + + rect = self.style().subControlRect( + QtWidgets.QStyle.CC_Slider, + opt, + QtWidgets.QStyle.SC_SliderGroove, + self + ) + + _range = self.maximum() - self.minimum() + if horizontal: + _handle_half = rect.height() / 2 + _handle_size = _handle_half * 2 + width = rect.width() - _handle_size + pos_x = ((width / _range) * self.value()) + pos_y = rect.center().y() - _handle_half + 1 + else: + _handle_half = rect.width() / 2 + _handle_size = _handle_half * 2 + height = rect.height() - _handle_size + pos_x = rect.center().x() - _handle_half + 1 + pos_y = height - ((height / _range) * self.value()) + + handle_rect = QtCore.QRect( + pos_x, pos_y, _handle_size, _handle_size + ) + + self._handle_size = _handle_size + _offset = 2 + _size = _handle_size - _offset + if horizontal: + if rect.height() > _size: + new_rect = QtCore.QRect(0, 0, rect.width(), _size) + center_point = QtCore.QPoint( + rect.center().x(), handle_rect.center().y() + ) + new_rect.moveCenter(center_point) + rect = new_rect + + ratio = rect.height() / 2 + fill_rect = QtCore.QRect( + rect.x(), + rect.y(), + handle_rect.right() - rect.x(), + rect.height() + ) + + else: + if rect.width() > _size: + new_rect = QtCore.QRect(0, 0, _size, rect.height()) + center_point = QtCore.QPoint( + handle_rect.center().x(), rect.center().y() + ) + new_rect.moveCenter(center_point) + rect = new_rect + + ratio = rect.width() / 2 + fill_rect = QtCore.QRect( + rect.x(), + handle_rect.y(), + rect.width(), + rect.height() - handle_rect.y(), + ) + + painter.save() + painter.setPen(QtCore.Qt.NoPen) + painter.setBrush(self._bg_brush) + painter.drawRoundedRect(rect, ratio, ratio) + + painter.setBrush(self._fill_brush) + painter.drawRoundedRect(fill_rect, ratio, ratio) + + painter.setPen(QtCore.Qt.NoPen) + painter.setBrush(self._fill_brush) + painter.drawEllipse(handle_rect) + painter.restore() From a8fd971b06a6f417599d7874d596c1a31a3eb1ae Mon Sep 17 00:00:00 2001 From: Petr Dvorak Date: Fri, 27 Aug 2021 10:58:36 +0200 Subject: [PATCH 265/308] Removed deprecated submodules --- openpype/modules/ftrack/python2_vendor/arrow | 1 - openpype/modules/ftrack/python2_vendor/ftrack-python-api | 1 - 2 files changed, 2 deletions(-) delete mode 160000 openpype/modules/ftrack/python2_vendor/arrow delete mode 160000 openpype/modules/ftrack/python2_vendor/ftrack-python-api diff --git a/openpype/modules/ftrack/python2_vendor/arrow b/openpype/modules/ftrack/python2_vendor/arrow deleted file mode 160000 index b746fedf72..0000000000 --- a/openpype/modules/ftrack/python2_vendor/arrow +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b746fedf7286c3755a46f07ab72f4c414cd41fc0 diff --git a/openpype/modules/ftrack/python2_vendor/ftrack-python-api b/openpype/modules/ftrack/python2_vendor/ftrack-python-api deleted file mode 160000 index d277f474ab..0000000000 --- a/openpype/modules/ftrack/python2_vendor/ftrack-python-api +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d277f474ab016e7b53479c36af87cb861d0cc53e From 412dccc6182d456f660aca01b9f7444627f31686 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 27 Aug 2021 11:08:38 +0100 Subject: [PATCH 266/308] Fix PyQt5 on Windows build. --- tools/build_dependencies.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tools/build_dependencies.py b/tools/build_dependencies.py index 3898450471..e5a430e220 100644 --- a/tools/build_dependencies.py +++ b/tools/build_dependencies.py @@ -135,6 +135,16 @@ progress_bar.close() # iterate over frozen libs and create list to delete libs_dir = build_dir / "lib" +# On Windows "python3.dll" is needed for PyQt5 from the build. +if platform.system().lower() == "windows": + src = Path(libs_dir / "PyQt5" / "python3.dll") + dst = Path(deps_dir / "PyQt5" / "python3.dll") + if src.exists(): + shutil.copyfile(src, dst) + else: + _print("Could not find {}".format(src), 1) + sys.exit(1) + to_delete = [] # _print("Finding duplicates ...") deps_items = list(deps_dir.iterdir()) From d8b62cd2dd129fdf7307e782c95879ce74aec6b8 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 27 Aug 2021 14:17:05 +0200 Subject: [PATCH 267/308] fixed hierarchy position of houdini submit to deadline plugins --- .../deadline/plugins/publish/submit_houdini_remote_publish.py | 0 .../deadline/plugins/publish/submit_houdini_render_deadline.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/submit_houdini_remote_publish.py (100%) rename openpype/modules/{ => default_modules}/deadline/plugins/publish/submit_houdini_render_deadline.py (100%) diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_houdini_remote_publish.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_houdini_remote_publish.py diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/default_modules/deadline/plugins/publish/submit_houdini_render_deadline.py similarity index 100% rename from openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py rename to openpype/modules/default_modules/deadline/plugins/publish/submit_houdini_render_deadline.py From 1c571b62febbc70340b39b5c4ba7c3f23ed44d7c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 27 Aug 2021 14:41:24 +0200 Subject: [PATCH 268/308] pass template key --- openpype/tools/workfiles/app.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index e559eb61f3..b542e6e718 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -578,7 +578,11 @@ class FilesWidget(QtWidgets.QWidget): # to avoid any unwanted Task Changed callbacks to be triggered. return - api.update_current_task(asset=self._asset, task=self._task_name) + api.update_current_task( + asset=self._asset, + task=self._task_name, + template_key=self.template_key + ) def open_file(self, filepath): host = self.host From 4c55040a58c9b68ebf1a804b7b2df84fb45b9c16 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 27 Aug 2021 16:55:10 +0200 Subject: [PATCH 269/308] enhanced `ModuleSettingsDef` to split each method into 2 separated abstract methods --- openpype/modules/__init__.py | 2 + openpype/modules/base.py | 124 ++++++++++++++++++++++++++++++++++- 2 files changed, 123 insertions(+), 3 deletions(-) diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 6169f99f77..6b3c0dc3a6 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -9,6 +9,7 @@ from .base import ( ModulesManager, TrayModulesManager, + BaseModuleSettingsDef, ModuleSettingsDef, get_module_settings_defs ) @@ -24,6 +25,7 @@ __all__ = ( "ModulesManager", "TrayModulesManager", + "BaseModuleSettingsDef", "ModuleSettingsDef", "get_module_settings_defs" ) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index c8cc911ca6..66f962526f 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -13,8 +13,17 @@ from abc import ABCMeta, abstractmethod import six import openpype -from openpype.settings import get_system_settings -from openpype.settings.lib import get_studio_system_settings_overrides +from openpype.settings import ( + get_system_settings, + SYSTEM_SETTINGS_KEY, + PROJECT_SETTINGS_KEY, + SCHEMA_KEY_SYSTEM_SETTINGS, + SCHEMA_KEY_PROJECT_SETTINGS +) + +from openpype.settings.lib import ( + get_studio_system_settings_overrides, +) from openpype.lib import PypeLogger @@ -1030,7 +1039,7 @@ def get_module_settings_defs(): @six.add_metaclass(ABCMeta) -class ModuleSettingsDef: +class BaseModuleSettingsDef: """Definition of settings for OpenPype module or AddOn.""" _id = None @@ -1091,3 +1100,112 @@ class ModuleSettingsDef: Passed data are by path to first key defined in main schemas. """ pass + + +class ModuleSettingsDef(BaseModuleSettingsDef): + def get_defaults(self, top_key): + """Split method into 2 methods by top key.""" + if top_key == SYSTEM_SETTINGS_KEY: + return self.get_default_system_settings() or {} + elif top_key == PROJECT_SETTINGS_KEY: + return self.get_default_project_settings() or {} + return {} + + def save_defaults(self, top_key, data): + """Split method into 2 methods by top key.""" + if top_key == SYSTEM_SETTINGS_KEY: + self.save_system_defaults(data) + elif top_key == PROJECT_SETTINGS_KEY: + self.save_project_defaults(data) + + def get_settings_schemas(self, schema_type): + """Split method into 2 methods by schema type.""" + if schema_type == SCHEMA_KEY_SYSTEM_SETTINGS: + return self.get_system_settings_schemas() or {} + elif schema_type == SCHEMA_KEY_PROJECT_SETTINGS: + return self.get_project_settings_schemas() or {} + return {} + + def get_dynamic_schemas(self, schema_type): + """Split method into 2 methods by schema type.""" + if schema_type == SCHEMA_KEY_SYSTEM_SETTINGS: + return self.get_system_dynamic_schemas() or {} + elif schema_type == SCHEMA_KEY_PROJECT_SETTINGS: + return self.get_project_dynamic_schemas() or {} + return {} + + @abstractmethod + def get_system_settings_schemas(self): + """Schemas and templates usable in system settings schemas. + + Returns: + dict: Schemas and templates by it's names. Names must be unique + across whole OpenPype. + """ + pass + + @abstractmethod + def get_project_settings_schemas(self): + """Schemas and templates usable in project settings schemas. + + Returns: + dict: Schemas and templates by it's names. Names must be unique + across whole OpenPype. + """ + pass + + @abstractmethod + def get_system_dynamic_schemas(self): + """System schemas by dynamic schema name. + + If dynamic schema name is not available in then schema will not used. + + Returns: + dict: Schemas or list of schemas by dynamic schema name. + """ + pass + + @abstractmethod + def get_project_dynamic_schemas(self): + """Project schemas by dynamic schema name. + + If dynamic schema name is not available in then schema will not used. + + Returns: + dict: Schemas or list of schemas by dynamic schema name. + """ + pass + + @abstractmethod + def get_default_system_settings(self): + """Default system settings values. + + Returns: + dict: Default values by path to first key. + """ + pass + + @abstractmethod + def get_default_project_settings(self): + """Default project settings values. + + Returns: + dict: Default values by path to first key. + """ + pass + + @abstractmethod + def save_system_defaults(self, data): + """Save default system settings values. + + Passed data are by path to first key defined in main schemas. + """ + pass + + @abstractmethod + def save_project_defaults(self, data): + """Save default project settings values. + + Passed data are by path to first key defined in main schemas. + """ + pass From ab7ea51bab5eacfb8ccd6d4c913acb89b115855e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 27 Aug 2021 17:12:35 +0200 Subject: [PATCH 270/308] preimplemented json files settings definition which needs only one method to implement --- openpype/modules/__init__.py | 4 + openpype/modules/base.py | 195 +++++++++++++++++++++++++++++++++++ 2 files changed, 199 insertions(+) diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 6b3c0dc3a6..68b5f6c247 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -11,6 +11,8 @@ from .base import ( BaseModuleSettingsDef, ModuleSettingsDef, + JsonFilesSettingsDef, + get_module_settings_defs ) @@ -27,5 +29,7 @@ __all__ = ( "BaseModuleSettingsDef", "ModuleSettingsDef", + "JsonFilesSettingsDef", + "get_module_settings_defs" ) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 66f962526f..a11867ea15 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -2,6 +2,7 @@ """Base class for Pype Modules.""" import os import sys +import json import time import inspect import logging @@ -23,6 +24,7 @@ from openpype.settings import ( from openpype.settings.lib import ( get_studio_system_settings_overrides, + load_json_file ) from openpype.lib import PypeLogger @@ -1103,6 +1105,11 @@ class BaseModuleSettingsDef: class ModuleSettingsDef(BaseModuleSettingsDef): + """Settings definiton with separated system and procect settings parts. + + Reduce conditions that must be checked and adds predefined methods for + each case. + """ def get_defaults(self, top_key): """Split method into 2 methods by top key.""" if top_key == SYSTEM_SETTINGS_KEY: @@ -1209,3 +1216,191 @@ class ModuleSettingsDef(BaseModuleSettingsDef): Passed data are by path to first key defined in main schemas. """ pass + + +class JsonFilesSettingsDef(ModuleSettingsDef): + """Preimplemented settings definition using json files and file structure. + + Expected file structure: + ┕ root + │ + │ # Default values + ┝ defaults + │ ┝ system_settings.json + │ ┕ project_settings.json + │ + │ # Schemas for `dynamic_template` type + ┝ dynamic_schemas + │ ┝ system_dynamic_schemas.json + │ ┕ project_dynamic_schemas.json + │ + │ # Schemas that can be used anywhere (enhancement for `dynamic_schemas`) + ┕ schemas + ┝ system_schemas + │ ┝ # Any schema or template files + │ ┕ ... + ┕ project_schemas + ┝ # Any schema or template files + ┕ ... + + Schemas can be loaded with prefix to avoid duplicated schema/template names + across all OpenPype addons/modules. Prefix can be defined with class + attribute `schema_prefix`. + + Only think which must be implemented in `get_settings_root_dir` which + should return directory path to `root` (in structure graph above). + """ + # Possible way how to define `schemas` prefix + schema_prefix = "" + + @abstractmethod + def get_settings_root_dir(self): + """Directory path where settings and it's schemas are located.""" + pass + + def __init__(self): + settings_root_dir = self.get_settings_root_dir() + defaults_dir = os.path.join( + settings_root_dir, "defaults" + ) + dynamic_schemas_dir = os.path.join( + settings_root_dir, "dynamic_schemas" + ) + schemas_dir = os.path.join( + settings_root_dir, "schemas" + ) + + self.system_defaults_filepath = os.path.join( + defaults_dir, "system_settings.json" + ) + self.project_defaults_filepath = os.path.join( + defaults_dir, "project_settings.json" + ) + + self.system_dynamic_schemas_filepath = os.path.join( + dynamic_schemas_dir, "system_dynamic_schemas.json" + ) + self.project_dynamic_schemas_filepath = os.path.join( + dynamic_schemas_dir, "project_dynamic_schemas.json" + ) + + self.system_schemas_dir = os.path.join( + schemas_dir, "system_schemas" + ) + self.project_schemas_dir = os.path.join( + schemas_dir, "project_schemas" + ) + + def _load_json_file_data(self, path): + if os.path.exists(path): + return load_json_file(path) + return {} + + def get_default_system_settings(self): + """Default system settings values. + + Returns: + dict: Default values by path to first key. + """ + return self._load_json_file_data(self.system_defaults_filepath) + + def get_default_project_settings(self): + """Default project settings values. + + Returns: + dict: Default values by path to first key. + """ + return self._load_json_file_data(self.project_defaults_filepath) + + def _save_data_to_filepath(self, path, data): + dirpath = os.path.dirname(path) + if not os.path.exists(dirpath): + os.makedirs(dirpath) + + with open(path, "w") as file_stream: + json.dump(data, file_stream, indent=4) + + def save_system_defaults(self, data): + """Save default system settings values. + + Passed data are by path to first key defined in main schemas. + """ + self._save_data_to_filepath(self.system_defaults_filepath, data) + + def save_project_defaults(self, data): + """Save default project settings values. + + Passed data are by path to first key defined in main schemas. + """ + self._save_data_to_filepath(self.project_defaults_filepath, data) + + def get_system_dynamic_schemas(self): + """System schemas by dynamic schema name. + + If dynamic schema name is not available in then schema will not used. + + Returns: + dict: Schemas or list of schemas by dynamic schema name. + """ + return self._load_json_file_data(self.system_dynamic_schemas_filepath) + + def get_project_dynamic_schemas(self): + """Project schemas by dynamic schema name. + + If dynamic schema name is not available in then schema will not used. + + Returns: + dict: Schemas or list of schemas by dynamic schema name. + """ + return self._load_json_file_data(self.project_dynamic_schemas_filepath) + + def _load_files_from_path(self, path): + output = {} + if not path or not os.path.exists(path): + return output + + if os.path.isfile(path): + filename = os.path.basename(path) + basename, ext = os.path.splitext(filename) + if ext == ".json": + if self.schema_prefix: + key = "{}/{}".format(self.schema_prefix, basename) + else: + key = basename + output[key] = self._load_json_file_data(path) + return output + + path = os.path.normpath(path) + for root, _, files in os.walk(path, topdown=False): + for filename in files: + basename, ext = os.path.splitext(filename) + if ext != ".json": + continue + + json_path = os.path.join(root, filename) + store_key = os.path.join( + root.replace(path, ""), basename + ).replace("\\", "/") + if self.schema_prefix: + store_key = "{}/{}".format(self.schema_prefix, store_key) + output[store_key] = self._load_json_file_data(json_path) + + return output + + def get_system_settings_schemas(self): + """Schemas and templates usable in system settings schemas. + + Returns: + dict: Schemas and templates by it's names. Names must be unique + across whole OpenPype. + """ + return self._load_files_from_path(self.system_schemas_dir) + + def get_project_settings_schemas(self): + """Schemas and templates usable in project settings schemas. + + Returns: + dict: Schemas and templates by it's names. Names must be unique + across whole OpenPype. + """ + return self._load_files_from_path(self.project_schemas_dir) From 82f361d48aa041e6b9da4b2c919dc567e2f8aae1 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 27 Aug 2021 17:14:20 +0200 Subject: [PATCH 271/308] enable addon by default --- openpype/modules/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index a11867ea15..1fc1d900a0 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -431,7 +431,8 @@ class OpenPypeModule: class OpenPypeAddOn(OpenPypeModule): - pass + # Enable Addon by default + enabled = True class ModulesManager: From 2188e62b694dc53a7a07959ee51ebbf36ef1f3d4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 27 Aug 2021 17:14:33 +0200 Subject: [PATCH 272/308] implement abstract methods required for modules --- openpype/modules/base.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 1fc1d900a0..23ec3b8c6f 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -434,6 +434,14 @@ class OpenPypeAddOn(OpenPypeModule): # Enable Addon by default enabled = True + def initialize(self, module_settings): + """Initialization is not be required for most of addons.""" + pass + + def connect_with_modules(self, enabled_modules): + """Do not require to implement connection with modules for addon.""" + pass + class ModulesManager: """Manager of Pype modules helps to load and prepare them to work. From 5dd6d010609f91647b416d81e38ac282f7de3269 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 27 Aug 2021 17:23:34 +0200 Subject: [PATCH 273/308] changed name of `get_settings_root_dir` to `get_settings_root_path` --- openpype/modules/base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/base.py b/openpype/modules/base.py index 23ec3b8c6f..01c3cebe60 100644 --- a/openpype/modules/base.py +++ b/openpype/modules/base.py @@ -1256,19 +1256,19 @@ class JsonFilesSettingsDef(ModuleSettingsDef): across all OpenPype addons/modules. Prefix can be defined with class attribute `schema_prefix`. - Only think which must be implemented in `get_settings_root_dir` which + Only think which must be implemented in `get_settings_root_path` which should return directory path to `root` (in structure graph above). """ # Possible way how to define `schemas` prefix schema_prefix = "" @abstractmethod - def get_settings_root_dir(self): + def get_settings_root_path(self): """Directory path where settings and it's schemas are located.""" pass def __init__(self): - settings_root_dir = self.get_settings_root_dir() + settings_root_dir = self.get_settings_root_path() defaults_dir = os.path.join( settings_root_dir, "defaults" ) From ca1dfbd98c24f9b8bc22961a1cb91eac5436cdbf Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 28 Aug 2021 03:38:52 +0000 Subject: [PATCH 274/308] [Automated] Bump version --- CHANGELOG.md | 27 +++++++++++++++------------ openpype/version.py | 2 +- 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5c55be842a..4259a0f725 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,15 +1,26 @@ # Changelog -## [3.4.0-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.4.0-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.1...HEAD) **Merged pull requests:** +- Removed deprecated submodules [\#1967](https://github.com/pypeclub/OpenPype/pull/1967) +- Launcher: Fix crashes on action click [\#1964](https://github.com/pypeclub/OpenPype/pull/1964) +- Settings: Minor fixes in UI and missing default values [\#1963](https://github.com/pypeclub/OpenPype/pull/1963) +- Blender: Toggle system console works on windows [\#1962](https://github.com/pypeclub/OpenPype/pull/1962) +- Resolve path when adding to zip [\#1960](https://github.com/pypeclub/OpenPype/pull/1960) +- Bump url-parse from 1.5.1 to 1.5.3 in /website [\#1958](https://github.com/pypeclub/OpenPype/pull/1958) +- Global: Avalon Host name collector [\#1949](https://github.com/pypeclub/OpenPype/pull/1949) +- Global: Define hosts in CollectSceneVersion [\#1948](https://github.com/pypeclub/OpenPype/pull/1948) - Maya: Add Xgen family support [\#1947](https://github.com/pypeclub/OpenPype/pull/1947) - Add face sets to exported alembics [\#1942](https://github.com/pypeclub/OpenPype/pull/1942) -- Environments: Tool environments in alphabetical order [\#1910](https://github.com/pypeclub/OpenPype/pull/1910) +- Bump path-parse from 1.0.6 to 1.0.7 in /website [\#1933](https://github.com/pypeclub/OpenPype/pull/1933) +- \#1894 - adds host to template\_name\_profiles for filtering [\#1915](https://github.com/pypeclub/OpenPype/pull/1915) +- Disregard publishing time. [\#1888](https://github.com/pypeclub/OpenPype/pull/1888) - Dynamic modules [\#1872](https://github.com/pypeclub/OpenPype/pull/1872) +- Houdini: add Camera, Point Cache, Composite, Redshift ROP and VDB Cache support [\#1821](https://github.com/pypeclub/OpenPype/pull/1821) ## [3.3.1](https://github.com/pypeclub/OpenPype/tree/3.3.1) (2021-08-20) @@ -45,6 +56,7 @@ - Nuke: update video file crassing [\#1916](https://github.com/pypeclub/OpenPype/pull/1916) - Fix - texture validators for workfiles triggers only for textures workfiles [\#1914](https://github.com/pypeclub/OpenPype/pull/1914) - submodules: avalon-core update [\#1911](https://github.com/pypeclub/OpenPype/pull/1911) +- Environments: Tool environments in alphabetical order [\#1910](https://github.com/pypeclub/OpenPype/pull/1910) - Settings UI: List order works as expected [\#1906](https://github.com/pypeclub/OpenPype/pull/1906) - Add support for multiple Deadline ☠️➖ servers [\#1905](https://github.com/pypeclub/OpenPype/pull/1905) - Hiero: loaded clip was not set colorspace from version data [\#1904](https://github.com/pypeclub/OpenPype/pull/1904) @@ -63,6 +75,7 @@ - TVPaint: Increment workfile [\#1885](https://github.com/pypeclub/OpenPype/pull/1885) - Allow Multiple Notes to run on tasks. [\#1882](https://github.com/pypeclub/OpenPype/pull/1882) - Normalize path returned from Workfiles. [\#1880](https://github.com/pypeclub/OpenPype/pull/1880) +- Feature/webpublisher backend [\#1876](https://github.com/pypeclub/OpenPype/pull/1876) - Prepare for pyside2 [\#1869](https://github.com/pypeclub/OpenPype/pull/1869) - Filter hosts in settings host-enum [\#1868](https://github.com/pypeclub/OpenPype/pull/1868) - Local actions with process identifier [\#1867](https://github.com/pypeclub/OpenPype/pull/1867) @@ -70,23 +83,13 @@ - Maya: add support for `RedshiftNormalMap` node, fix `tx` linear space 🚀 [\#1863](https://github.com/pypeclub/OpenPype/pull/1863) - Workfiles tool event arguments fix [\#1862](https://github.com/pypeclub/OpenPype/pull/1862) - Maya: support for configurable `dirmap` 🗺️ [\#1859](https://github.com/pypeclub/OpenPype/pull/1859) -- Maya: don't add reference members as connections to the container set 📦 [\#1855](https://github.com/pypeclub/OpenPype/pull/1855) - Settings list can use template or schema as object type [\#1815](https://github.com/pypeclub/OpenPype/pull/1815) - Maya: expected files -\> render products ⚙️ overhaul [\#1812](https://github.com/pypeclub/OpenPype/pull/1812) -- Settings error dialog on show [\#1798](https://github.com/pypeclub/OpenPype/pull/1798) ## [3.2.0](https://github.com/pypeclub/OpenPype/tree/3.2.0) (2021-07-13) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.2.0-nightly.7...3.2.0) -**Merged pull requests:** - -- Build: don't add Poetry to `PATH` [\#1808](https://github.com/pypeclub/OpenPype/pull/1808) -- Nuke: ftrack family plugin settings preset [\#1805](https://github.com/pypeclub/OpenPype/pull/1805) -- nuke: fixing wrong name of family folder when `used existing frames` [\#1803](https://github.com/pypeclub/OpenPype/pull/1803) -- Collect ftrack family bugs [\#1801](https://github.com/pypeclub/OpenPype/pull/1801) -- Standalone publisher last project [\#1799](https://github.com/pypeclub/OpenPype/pull/1799) - ## [2.18.4](https://github.com/pypeclub/OpenPype/tree/2.18.4) (2021-06-24) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/2.18.3...2.18.4) diff --git a/openpype/version.py b/openpype/version.py index 5fd6520953..2e769a1b62 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.4.0-nightly.2" +__version__ = "3.4.0-nightly.3" From 6533934577e22ec53c2f4a93811d696a07054b9e Mon Sep 17 00:00:00 2001 From: unknown Date: Sat, 28 Aug 2021 15:12:56 +0200 Subject: [PATCH 275/308] Added Moonrock Animation Studio to OpenPype users --- website/src/pages/index.js | 7 ++++++- website/static/img/moonrock_logo.png | Bin 0 -> 22947 bytes 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 website/static/img/moonrock_logo.png diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 6a233ddb66..00cf002aec 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -120,7 +120,12 @@ const studios = [ title: "Bad Clay", image: "/img/badClay_logo.png", infoLink: "https://www.bad-clay.com/", - } + }, + { + title: "Moonrock Animation Studio", + image: "/img/moonrock_logo.png", + infoLink: "https://www.moonrock.eu/", + } ]; function Service({imageUrl, title, description}) { diff --git a/website/static/img/moonrock_logo.png b/website/static/img/moonrock_logo.png new file mode 100644 index 0000000000000000000000000000000000000000..249db7c2470b37bc24cd4649feb418238a690c34 GIT binary patch literal 22947 zcmdqJgNJyh7NFyO7B8a4fboWS?Gztg?3P`7P4?Q9hA}P%PLnA{o zFu)Ms=J~zf`$xQ97neG-&p!L?z1F?%b+2`ZefCuK8aWF&1OmCHuBNODf#98jpA}MK z@Co}TITG*}nY)^i7X-rNdi8@>qel81e0alK#nAh?o2|FcD^DAUkB^VAgR7I5)hl-! zVK+~^v~3v{2;?S2UHOroZ~Eq}Pdek+`Q_e(_~&YdB%YfigpZt=H(4I!+}+TQ7h(y> zVRm{V#zL?4J8V{ib_NF4GQNtfeY^>E&?WOW{wq!*RdKS`Wqjiqd9C@{PQBT|&I<)16yBKA zg}G3rWq*5x;lVuQZ5R=vBfgdharU5$=Z^$Gbm2}F)lDe67{RzB6xzn>V6s5&OD9BJ zfaeuD<#f?n97HZy^Oi@qMQ>fj`6Gb|!3>^s>B$;R7E`a#a%3t&AZcJ_L2LVJyMa*@ zU9S|Qf71F3yk>~>BKheN!Xhi*rYOQ5Qm`0zfrE=cDhLh)m13=9$y?Qqn+T){0`LRy zeuiEzh#j$`+NImwPYr3KxSz^Hr_KlF>G+89U&lcLP;t1=1+mo*w7dMfRP;nRZ2rC5T%tL5zzM=@-pL%#lFl5&Sx$di;;D8W= zcU*=b9KpBHWbYM37=j(~5_z!d_|LvCGz8MURNfX769aMe{Ms5iTm-kvFC)CTmQGo# zqO2S@(DW-v{3udgs7e3lbk006Ep2aYFv2{vqEbY`ajzjCK69`678Lq*82=%$FAMfb zJ*-Mc^D&MYPTCM08WWQ@q=3>uIc>g;GxnU}NA19Q1u)9VF=`#32;>o3<(8%jqazaq z5GMRB8rme~Vtippft9)7=*ax%MPkPzMP=nDFaqIjtxt}oY@^i^8y>fhe88$K>j>Nh zw*4H?mRb62Tihs7Z%%V%F5PAbeWm=r5=OTG&WCi2fwW1_uKkFAH@@WIsP3?{#4)4y zgNc?8VPPRS6nY+Os^wy}^#I8o1fJ*-CsOv(#zu2opNA^Z((=)N7 zeha_@PQX?UUGrteaePWO)}0POh$=&KI9faN2_N1#W~!|5xSquIk35~&yQ4qX+=*3H znak%q)UI+zvbdPLHHeB5e88;{W}fqDWpvcjAs(@%zzDrH1f`_=_gmB%$TI_uSp;l^mG1x3E~1+kA##CkrZK`Re8_zk4l=MqSRd zDJ77NS|%ZFSX0?q55F72?m@5Xpx5U75lkceJ4}9YeC&GC^0A0Jo9Lia3|As~E8*A; zU(!kNpBU%S6u9O|g$YTL^{}iznWZ#JNN-ssxs#f@;eS*JO^Ul=fVwHnX;n^CI(6aJls>`3k*OsuAVD{`&I)lym1}f;@^XwF! zS@#1wCkLY>=kLUA(P=u`aWNbf^r2=5ilUyk)1FX=F>07; zh&Ch|Jj?O=UelsDsljrKIzDdP{n2|bG!xQbOhTK~;X*Cgd2U&fUv&8I zZ+G){6T0QsTc&{iud)I4KE2#2zFRX55`(3mzmG%CXlT{z@DOa9+IJi!Ym<}dcK(PB zVm=F*j5~+T552n~)*_2V{*`Gr{%$PmP6KdYh2ShzQVXV%T&9tF z{mxZ?QNPUCV>wCnaeYv4tx~Ad)t7qfHCApGf~n&V>!@TzPn(5sn>E_Yxc7ff`P|!J zq`lwBgTyfe97OzO;c6FA)SMhAa`p4UxE}1Sudms&`V{$c=gd#*h|Idnghcr2Rz92D z^IHAWgRSvg(U`7x(Nt1WTE->+F~-{@eo{{b{ZXWv>B^0mE$S(p1NOq&7i?VTsIoKH zYBiN&tnN0{(bU@`)l0;cabLkbaH;oKNw^; z08K-e0QbkB{12hG9#T$By^naFtzB3aN;2eC_LbHy+ikW9<_vC*BXF;Tkve3M2c6b- z##ioGvnzM#=<0T4J=(igV?R}Gb2UhSXq)8Jm?TLH%#mpl`mIb!&DS5h&5jN_u<85! zt6qeUJPnVUpo#b6GjMy)->Ez|=+~4;89%A}t>z-}PWR z-PqVL#PX^vu^}O{>dcI%-gmlZNu2D!?F%_tt(D2#d58L?VoJ3m<=BOZUN=zP8jd-* zaoW@*rC(}DJ9Ck@?oE6Vl~JM1z<9dM8kO;)zjS0ow|>etZSttF?}#ouJk+a9smGWf z)2koZY;FFET{&mC!EN4;_cfBJLdR){keyEJqe@#-M{2;c9`Xpq9NYy zBY2pAE|(yAtMKgG?4Yo}rAiF9!Gqi(*j6OakBjri=th*|H5o_W5eKW-wZf2U4C^GC z#SPgaGQm(*M*o1Vp*%X3!knBpWeo`-BT9$s+;~q3+HBZ!z7!43G*b$p4ZVpenHAn% z7qXnM@W&)W@)7p;MMc1T^{04i;8~B2Ue=QsR$IsR2CIlylDNNv#aL!5x;ZgyPxi*Rkyg=Szr(Wnj*=gmF5TX#8Qj%nBAjeTZhq~0EU{=ao7lzE7~(TLDnUz(shjt5iK^o!3t zGc)7B1&`&XL`acoowM^Xo*sWA!qigTdR_V$S)wXJ_Ja6qKuY;@@lbckwOuDPjb-2W zp{vhdv$?iX$A*#zwG2EukYSPq#L(@IO6zG)(NK3G`EetdzfcnkWjzeC_yUU&)Xh#D zj8tF(*O$B~(Z>LQww~V@*6jEMX|JP7QsKw!*(VBMbeD;*u_zlEeeG=nOJaCPIoU6z zJPu}D!bk4E(_JnVo(Ts#%Ca#Q{UlP({}W;g*vJh*idOI;9SOnf_B%K76ggX;532Dv6OFGb}~PY|XBioUN56j@huO z304gXk`>S|6`g8ORDk!Dl>OKkJYR<$6oJ7`wcwc=IBkL+o*+9E%CQs!{ZN z>)~7&R1!^aCXc3ZC{*N(=gfA?uEslDSK0TBf`!&T&!#r#1NM+~=`c~wE*o%kf2JL* z+>!*}#3))#jPZ0gh>87Ub(l-T>}L?aYR`P9P<&}p!<=6Dwyes`W=ksXThq3 zn{=P8$_M~k3wr2{>g6#iHzuhJJn?jNa?%ouiCBM#hB)LZn!`0vqTs5Rp*<1^8@9`i&3_sA*QD!>CVZAHUZDT0{%Tj zBDv{+;l47??K>KwME4zK0nF4jx3B|8k*#R{g&>Uo>JDICz(OChyRhFB0`nipq9Mu7 z|K~%8q_j}*nb&(5M7{n;IEjfWn7Z0AAS)UcbN^ZP;C#J;$kiMmPUM0Wjo;N<=PuEB zW+y_}5WeSRlyy|MdiDPciQ%PIkV@-bIy7l=G%(rM(+<3C*w&q@ z8Q13Fwx9sp!HoV0GJ0kSS2jTO#%Vtk&`I3cd4Rr|92|g`k!tK!W>8^frYm!{`*Srj z+ISaCy2$R)NjtaP`$6JFENG_{+maz0kZPx8fg`HcY4XlwHLYhdS8sHt2LI-najv;m z(Xvl|dA25>CL0BqMnHz|BAIcmJ*`!f-q493z23T4E`fR=ckz&usF_r{_lSDS$!gjr zAQNF`?LVoc-MensDW~(pFv_DmMMG=0cEBD2)%|LzMsu~B_V+zqo43Tf64*5XdwLpz zRh2*0Q7+akp|8w`#c%<0clF9@hLNhvfg^~CizDaoMTNBltV(agtt@TLpS<5HBj8FR z4|10_BzeCqAS9&qCkB|STrt0ZfFEX9Rq}K__h9a^x;hyF2^Hnt7d*~(>^EaEzNYS{ z1$Wv2od5vlQfhUU=#S}ruCL#fn<)4DRy3RLZhf+MDR%_<`Qsv0&UC7qd!O>I>^+Fi z)Mpl!&EOXbh+Aa!H%gso=i7Sg;eYYC+LHBw6>5LZJX_{X2YfNxW$>>QF)IscZ7o&O zeSXZ3k9fU?FJIDWMVg7*9Pci?3^_l{cJ=CdcO%?GLMRs#I@L+J~oM&BmW)V-2FNYrKbe|1(Qbuwqu=VZe%OAQ!)C7w3ZR5?Yp!d zj_c{`8+RKf^7K_zfnD_4@8wmkKkxm(E9Nkg*HfQK3kGrl`9?lSjKN&JS)Q67a~+uW z51|I6qPn`eH4ct0Z-&mij#_VN{lD`T4^bcnLoN=~GTVxZix=hscH*6JH%4Zp6q1XE zaA*er9U`%jX4v(mx-or|y|no=o{=DAFm6lGu=3_ekhQW18ls+~xKmLBP*9H1zi}ao z6>7@aR$iU}?j~HlW%rJw!X%o!wY?uaM)eCly-JX}nRFi$n>Hm5>Zr0a_TFzmvq--# z)i0qazfCi%5c^Rl8(ZiWHBuTzK%%RY)WRz|mYb+LQ}1%)fumj?&feZ0^}VD#&YSbb zbE?_~sw4(}%moNQw%mKwvi9Ui_}K-vz0hvZK6+o~XwoY1^j~4p_U-UdbrYq?Hots? zEeO(6$%gB!EG&ji-o>zYe*pMwJ;WltZqE<)B;CF}uS#6}p+1T{=s*fpfVe_ftssi+ zi*ppT|^i9j%`eFMj&?QPh6uW)v0s z@1bm^8#2D4CeBqv-=uvCKokI+uFC(=@w$lFpJZ;${6pRV7dRa(jN|&l$Bizk_Wdd@Byi6 z+9lhfE3iPf^~{y7^pJ~_)tv{wwh~FRG_k?PMkJNG0DIowQkrxuDFacMB0oQ0(6~0{ z(Teb9kpM~BA%FmyUH~4N=b^C&iczEzf?CU)njb&Wz zRW7*qlk-A!|6I?&zzgh@YFj3y6R^sW1;WktPhHbZK0Dj9fd~C&Fc_>xHlB#$x-UKN zQw%_&CH5f9@SJU$i*3ei&iOy>khpxi6ygUD9?TipeY3d`i>-d4ukX(pa^@kJx%R@q z)aD}i`CPdwSLokLc|$8F0I?Xb*0c%r;Nx;Oju&Q&j*0RYzCDFl zOf!fNVgcqSBbp-dD`KT(WrzMpn+{iVxvFr5(oS-gR@JR#hRD^@kA38O-ZFo2)L@n= z7xZV|;Nw=!pd7`PSYNWoQkNgtrB(S%Q4>JbTT)X~4dvQ#+s%V#=eAk1@oJE@S0R;M zBjWRcUKLeUcQN$(R#vhj+9!gSBLi5wM*1MCf3r`e{uiJ>8yQ@hWM6ED5uPX_P& z5r5SYF)7!atrVTwK2KTe5kt63u}M8{IE@G+xGnG3pCLQbyqv7rlE5wv28d1%Nbj(n z*nNI|`|&%`OUcOL1j*GelX5k&^t|BfceScCWGK3;>y4XVZ&^ww37NMcoAh>;Ccw8ffL{laDX7|<7Fm-xN9bKcAc83AW&|%;arVt)pjaI7!6)N!fwx6 zO-+E^WR?#O6vXl4|C3!;EKr}*Ij0QxBa zU5!9EDzjR)Ss%(y_ZpC_+HxB$(z!1#-c{fJt^|)Zx6Y#*{h9LfPGjFco!|zZ1>oV` z7t6?$LPI=KCB1l-y5g5UK7V?5Dg=;IW-(hz-f!M?b{T#=fU{(%C)~UeA%Me_TID?k zo(ezRD3Js#2M-7c+~LI6a!eXr^^*cga;?84m_CJv=n)NiiZrq2@n%UQ$J^qYh~#vW)Mzt6;`3}8+dtv)9!DZGxxV@~CbU8h!j z#w7+7N862S92Gu0b4h2;)DKaihV$CM6C|vAXb!BT!ng30@sDo+JX2>CwFqqpJlQwY z_6_EFn!XO(nVUVU3G=m6{}69d=lIFz7o$Oh)RHB@QVE;i9}VZbKWNK21k2df&W5W& zRK$q3yQcpYW^$m(M6Q4MMwsJ2D#7cqba*?aVxWD+}zwRVPh}O4l6?P^YXZk z&Z#+MCav469Y7F}r002J(SCl^7r-hp>rF3YQcrpJQ+v{F)kQzEQk)>PZX6HD$8CO^ zr38j1RhC5EY+jTA2gh~V77W@?8Lhe#;)DFhuC%Zwd9ngPB4sX)mp0)`36htyONsIs z3qPY(-#h+Y?N93trbCnrB)Li ziB?eBOP87YmA!6GJ0LSqocT;QxcwJlpVWC+b-jBv_WecGPD6}fCDnTn!}|qJHL+Si zu@wdc+6hbsfR^R(AhKy ztsRmI&HMRzlKPJNlalY>_vHXzHY5>Pu{=B8GgDDfSDML~a$Nx!$`B5`OcV6pB~-g-^!A)!iL{&YAI zzMk_II@2aRQ>eNI0*mbR0XdxWYaqrl(@Sd0>=9-LpE_*fgcJ_|QSyKC^CHb*(U>l? z!h8bw==ENl9J;iobUt-C3#hT^;oNSC-~`}@Z(+l!FvioCc4O2yE#7D@wPgk@luKjr z;1Jz(w%^yd|64$5zT6-4Z{+(6f#AKj=7CF>i5#-2RP0in8Eab&^Q!>S-UYR*6wwyd zQ{DQv$9y#ZgnlZD@<#GH?l5D`Phvi>#=zK^WjY|^p-0Z4NitiRVO0kj*?4dn90f2U z%jok>0>Uvx2me}E_Kjg2Zdyt~S1Uv2D}^u|A@#UsKng9m-HJ8i=jT_P$^Z-b+XQnO zkj1`)fUKtR;uNjr83_Db2AmZSPFRs2bH$D&CIJ7O_2_0t0zVnQn+oe7{x3!`=-K1AJw87nU5fff+XH|pHx=gxfUVy4S_E{jxm+?rsa zdH?wfyAPXy0Z)MAzaZ>vN@0dSf8jr!i3h&BsQ2{h=>zhYXOm224xVe@e5$S0hef{8 z@*b{FWGntR!Tn=YI5k~JJBI}zFKb}0^8&Ejl9#4JwjW$=Y;1UNF81~of4gY#4F6!jKo?yZ*qw$hhK!l-vXoVJL3P1zdlB_s`;Pf z0378SplCcGTvR74I<5ZsO35iFvnyyrab3{jpkE^K;V0{U0L24N6|J`Hxsw>ewraqh zH-<^XW~CI2OAV{uSS8AeC!r_Yia?s(a=iF9Y+Pg>IGWxvx1C5UCOIzf%rQ(7$PdaO zs~RjN$m<1pcjImcMVS>Lh~-RRe|tCr?zdk=3eRZD0qSauCjDj4(=^+}!FXzsEIr|* zctuNPF3%3e@8bU($|lg3J!BwZb4T!)=PHUco&s+Hh3OIYL*|DvXkZ~<4 zJDOv$_U}_%S0<2Fgs(_9;9nAG=3m28F^T_wFMvj5Fafys34y*zv>NN+!Jk#eL6C~x z0C_L3Jb;=B+-0hus;37)nJX;{T)ja6lAhdd^cx&x=pCP=8by8 zom00QZqtgL+YYczrZVHYoPvTVdTp6usf4`FbL{-!WR+E!6+_8j7>1H64rrJ^ObK5) zutVePCoL)3ju$C_LzU$&+f3IwNDk)k{&E8RrUj75@w;(j@?ksR;aFyKskdE#hW5a2 zfDMQbcxYz7=SeA989_%zjD998F%7g}BUNUXme&-r}6aMEmUm3`bOxn zCg-Zl{&KGj;05D%m7!OJJ|l2n9qfr~GQb_(?7?{9c_vg7y1WKxd0#;c8ksEmYH&>m zQeYgd1_iQF3&^sQzC%jQ7?AjTR$Y`$ueTi8Rk#=cM<@XF z;LkVd2`P;#ocQjWE9E6oPU3-mrRpR73bX#QhB-%Y-}@l!y+~>>AZY<|dGToO;H(lMj8vdsS4%bZ>VMG0|A}%3kLB7F(A&<6hN4k`T`vPm02*C# z%~Xxu0l*gHZkn~rQUWeeNejN9D6OYTA_B1$D5=RccdfdfnV2}SE2S4+4dAHgn^(3l z2LdOs?p^@I|HjMGU<^qLj*gD&VBq zDp4RBU;mRSpNZRPpO=b?sp#emd8s02Vpd)zqrtH)=BJodC){$9b^MXmnX_%mMchMVT~Rj6Wta;E$BT z=PLzkoL0H*6=r>yf4_I1lz%_0Sk)Xpj*!XvEv0td9-m!36_o0#Kn=3`O2E8RJ)VIO z0E<HS4 zdyuNZ82CI74-YZuiNvpBcE+knb>$?&K=kF=7>-`zv#5f_lo|qM62y&67$q%7@bN3g zljUK199yv}XU0qzr%7~A+@o;fy@hB_UELfdz0FPFb}b+o8^3E^CK7s~kPCZ%Unfyc zCiUk7Wsv9I1vC+Cgo^XVXfb$1FA4sANl;=K3<(=7Q?iaNU}e-yl*tk3B>PK>v;wdK ziULb`Az&A|!J!IXr718 zS1PJ>#}&X#z9MINMd;<#6x1@i5RkQ0f%~t59J3ajl?kB?s8p2wE+w!Y;zt>xX^a4x zD*!!UD&+FKELSbbu*LW8RkXd5f_1FRq=!4L{hlYv1zr-b8b|je1Lpe`Ff_yN_fQXD z9@YP0bG4jly-27(X2A(EjgMnl>v{J{z+j}tPCM!vt1Y|Ay>cK-eRwJro3EL2wc~Sy z7Z#veGMby%RTf%~i(Cg=m|LZjIH*y};lF;%wm;Q|U1{7W{=aHWVi((eEvurdq!K7* zoM`4`kKD)c)!h3*5GKl(pBU105)dy9Wa{2LT+bbY z6j89`fSRvs(?1??vs+hoaCyCuS?)QhX6fM^5P$y8O^tx6#2`Z24I?{Zd-+bnJJ0bzs=sCS#7({rV zS;Q;_F9mo(GN9mZ!M9UVQF(th#!7D7x2G`=+>3-U7&$>%`WVO#*Oy@IsSv2%#+as@ zD|~_Y%8^=6Vt^dp{x(eh%8`IzAa{A@Rgd)41kXPhpL7g*p;i+D_+;CJs3lwApjFs`?*$W4uHe-6mArQ*1d zwGMxofe;7)qCNrY6i*Ng3RZphUnRu*gyn_`k3Xum0LhSZ)vxPUK0L0&wJ`8-iSjBw zpqZ=3`B7>Xv9R}o&lBecULo2`!2*DS^hFBBZ)0=1&CiMj;8(qt5wPf&0UjBxfrN9% zz8{EtEk|^O51I~o-)w9+zD9beg1V|Ty9!-VRaKHzf@D-h+bLLE=>yJ(R6{`@V;n>? z%FOXoyB~1*k3JP&0fA{DUSbXa2D+Kev^p_Tcy^|l{viN!{T9$$(qtflHmY~N0cf}N zK)U3We`<$rg-M}r{!)XCQ?y`|MLb!)Pk@FF1SO6H0p|O zVB4^=U^CkehLdNj0E$T363F5S{nF0@g2r&2lL6qR66iaY)`uHoY@niK93^fvNBkI7 z(DI4782d%Aov;m*K8S$7=VWL9kcwiy%B+A&83)|Eg!UoHkVrl#+3i^P?Ft-(3sz9lDx9`O9?rrw_vb&%25o%qS`}%`Xkv(`7$=Utr)MynI{q{z1f4 z={UxJb>t}sw=yLLP~x?gq8{r1h2IKtxvOm-zsvPFQD&cXsXGyn04G00Vrc&Oqqwia zv%G)FEreZP@bV${SNg6hMP*iMlpwM{1S*nIwQ|jOo}PaDkFAnmkxqiHmSlX0+lewA zz%EO{xL|DaJYWlySGQh6sEprImOaf&BTc8&Ga^wJ7(BQmBC?hhP6f`1_H_3rwd6VG zuY>a_iw>DiVj8pRd^NoS{VLb|BLpF)=C=t@>c2j4x(;;d1~7hCCJ``}Odx^I0b_#^ z?MkC*7D1m*U?zcS*sJkJBj4-mKLajN1DL9bY7*($mRrbQ%g4R@!rK~o8DF(Y59$79 zUvt$j4g+I>Kvus9RMXs+7GMSu_QN?Ulq~mSD%vlm0qH1l|LYe58vn*eELyw`!_t&h zHQiSnfE0YT#CZt}`BpPXXb zs$rBK+Xz`P5wZe~pr+YS)+}C0`R1aMtWAZ%X)tPb$NUFuq>DZhiLS$zwob68ro27Xa_`6`gD zY7%axi3Xi|2LqYMUM8~D$35|295i1c|Lrwx@{wFzTYt0%(a_Wc?n{1-!vZU1^;pl2 zz88GvdzHxWY~O9F0KJaE-v8tRHo~mJumFD^0sLt|dcK8GPQ?JZzPM+`1i}^(7x=w+ z$nNhwlmb$belhp^cgK1dvC%w`N>8Q+gRhe8F-SB)rXfjE$NET6{UMPVVUr1o`gd^p z*M*GlSC}@xHZ7~^F$QWo3Yu%J8ccQdy(i5s^U#Qt8r@Zw6WUxImG07;^61kd+(bziI|ZPToQuTzs~<8YCwxOG&MB% zOKV607pUB-pScGP5q;;^dIN$Jl#y$_|F!vQA%xtT!TOLFpoXt)kffc^C*1r*Wv)x5 zsE3K@aNULzi9Fsa4e!hbdn@=7?ylnJ=f{c`#NTPZl#||R5d*sdQt()kGrA$EeicPo ze?PoQ>Mw5WTvY9+V->3)PAP$-mOts$)>~d0gS=BvX)6NF6k7Fo8N~j^B(WMQDnx)C zn2iFVG-2F`(6#kY|4QQmmSfH64&5z+(Zr_sV@8(gE!-^PVDF30j=SZ}ilCX4K!!nq zDp6g6&XenSxLm0DM^LrA<`R{rXUB=Y`HG*9?-i(WdrA9j12*;+0c6ubpdi|_=NkU$ zlC?Q#gV9uFbF@0!-*t_hAt?=y_y>^K92_AfU?A-;mX{7_D_QyXRkURNr#|YU_)%-) zk2-dNh&?dv59o32fX?Rkl9C(utVzv-4nM&@TJD-7pL}@>I&g)+wmoMFGl#6<6?0r~ z1sghA+{615Ncj~|&cvC}1<(SlrhtQ(Tgkt$Guv88F~8{Wk2Bb7*<0#P=mePfFvn5Pkwt253|6 z&H0c?cqmyZ?HYVG3g7r%V*9;krWpuIFP3u;`6@~H_p50^|4q3@SfBgbS6rvZ|3Ezv zq!C7@#>U3p6j-j_JoOLEQr>qm){Inpzw?H2;%fqUxmR3xyV0?e@pJ(pvn1Hx1PWZ> zU}yGa5}#|!zEF{4xrW#Y`>B(ZV$0Q}$`d<5?@Q{_E2;zfdi!%!-hKv>LVewU?w_j+ z;0i+wyL)o{;QyR6K~Hus=&fW})I`l7%sp-g(=UMnl2IPitpwBvS?^j#z}^Y^Q%%w` z`q9QtCBb#t#}k?SfXsJQJXvFv*V4ya)pH-Ct!`PBQNfvEL&}}cBvA6qnKVEE?pzsH zDW4o0A0JQo^vQ^+OR&xeu2QJ%S>P5$SI~&ahTk~UH;IHV#tGaP?VUWFFxOU63WvQb zJ(jMd-^~HdcY-l_8HRNA*6AejvP!&^&!+W%$$Cq6(>;9;wjF* zU$70*Lu1C17THJ>O+^(m#rzLBoi5U-rxu1^ShuEXc*Si06p0842~m4B)1|NZ#RF&- z`6lj?3j0|5^(`yHza6xM^MILMIU&t>g%FC4>2;uti)*XQje~S7^A|{G9Xe;jIp4&! zD67f(SB=t2xLq_d?B>^8L4jdqRzXfq4$Qun%dUB=o)vJ0CfK`LUy@_8-6$|K=*jst z0!87!q`vGAl-NJWybP?0YbjIzz$5f5i8_zvn6Hw8e;=xA{X5Jn^kiAo$`I6!8l0!9 z9noJ=akD^a{Ql21&Fv7uu8_hO%&V+?ub3RpR&)zN=G_@XD^U9OWmOtz;adbfwrIuF z<)izRO0Mn1Q0UYIJ)BWuYT&VU7O3&erAvC%`o>q$-hNyI0bpc?*7McK2Ode?khGCLTw3wf1Mm{v6d zbNT%0`I8TmZN$Qa#>+t;-^P#z;L-a`*`z#1Lk%{8T(0l4G1}V%=gICT5XQ$WM{^}} zRD7uQBb#&stv%g%mz`QtP~z?O%Zn9?>k;B>IxPL2{diT3y9P*zZc^v4ESLI&!2{x@ z)uBtgtQ7|Cj~|RL)<}H48N$?Q9dhH6)6TSTl+fd>b<=RA}B0e-IvR9 z5cJufK?V0egnkGs3mY!#(bTx=UIWD|Mt7F&@H9z3~UFTN-!M0JKIKzl{R-6|)BFBL7 zYz%-m1L5c3|IU2WFsDqP-8>3k}=d3kwwzQn#=2QB@fR_>aS+x9GiD&@dMK8cg{)a<7kA1g;FXZ567q=Mv zwA-_@v&%uLm+@dVKZhO}!>WfR6s0;I6;FtX;R1!6AlboY#6e4;?4Ur?ivxgd*^QIf zKWR<^p=9u|&!)GfD3x{9W&M-b{4yFOcTG{xP>`GG!H)qT?h9X4>C3!IONu8h{-hq! zr4~Q6&IRvykuUq1nIpgZ2jKq z0Z*EjCitoLqjmY;-@-xk-$LzCNpx7EQF~EHM8LSIGI^e&h-K$N3kTu|K?Po6I}Zz1 zu?tgxIf7@EqUW6JOIsv02EY=$8bOIkv*^;0KRPvSw35gnzQ*`ogq5 zy3e7x?>rpwFhr*WvmTi$0$JSKV#VEKZrj;$X^`$Um79y2mNYL7~kQ$Sy}@N|Pfp_FN2^Qp6N?LgYH5VK>f!#o@Jj z9yfc1t$`&zx6bXU!>8&Dx;{@_fBX1^*5cN#__o+r$P+4Q@V?nl{#W_T7~HqG-;4+{ zJaZiH%b;$PzXP#%KGwA_&CRcMNxsH+gM%i@Wp6ElY zJa}W1kLP|%yli?}P@%Rumd7KXdooyiDC!&P+rm#&e%S|Cw|3b#!w+6%& z)*PK1F`fo_%nRpA^*&&1&`}ulozwjx1!?~ogiF}>@?$-gd&V+2V?5XtIrnq+wYnp1 z6U`-5x9ASVc~cp!?{%z-hy#DWlt2O@sxF*$GALXI9zA~IA$P>t$wor6vKtH; z0QZnp$_dWJ11~@j>s#W!juNyU1X6=nd#v33!doh$r#inx#h>f$FnJEa&m-O1Djt#t zi69lO^%3~aieHz2|C;_f8N6rI(2suLV4}46$$4v8SB}L zsQ#@fGq@^#*?h=z>GDu4ok8|Ilf-M`9-QBGtn$R;Pi~8sSbSH)U}jwu+`Cg&g9(c& zv6|RycyFGy0v9nHb27fM-KS5244MsWkY(^C!}k0h#ou4~&c5r0w4YY|)6swUEFSl6 zdO_|3$NkF~O|Rc!!*IL9G;&YmDT?Fg6cg@*R&cI8Qq#R5kt4yKlN)Y@ZozL5`?v%z$OKv8s)rB8HW>3U5Q%WTa^=|7C5b~9io=R z)t4zsQ3XfPU0LxU*n=2$_azU{Er^U#?I5y&Xs-Y6hv?=q82L$}h+|kY%lX{n`qt!* z%;LoAA!-Ed_nW|{obP|4EZgH{6Z^e-8krHNi6bthk*4cdo`$5{x9%wi9WCcswq1Uhdaczz?6}|r++fMyM z`}Ol!H;u{gz4qz9qpLJ#6;hdsJ9wN6#G1eU31&WV;KQKgq}u~(Im&6Gbex;%0t0*Q zA@`W0FW@CUk37fdw{MRH^=|3^Hib88=UgBE5>ZKUz6$T#eI!>>(c?u|^zx~KdYZ0> z{2T0K2R|m5_ZKY0?{P%!VAk(2X|Eop@stLA!puW7`=W^TPxsre!0V&ouVV0qI__HnS}@{P3fY@-?yq9l2fTk2 z=M&tXZr2wdJs|qCp3+CQ9u??A7nT6;PJUxcojc_7W7dhfx?N0h(p*l%oxtmjbzr#r z_Tc$l)P-D!Yj6}hX|UwM5Y3T+S~k}G?@?4EjjGoGrZ>4>(Ojc)&N(}`B7ERlC8f`q z(RwJZl_L3|;@0NQ3?6m`uOXsRp5hhb={O;$K{a!OsPUYq)b;1x?GH7S4XPyk%#r^l12fGF*RT1J2O zzon;JJ4yKU<_XP2D-DUns7m}J`lvR-=A0K4l!ET&kGj=llw?xSxipWn-NH;BOZ3>) zeWW2yT=n1osu?I7uXS>_aZR{(2zzTXIr)-1HO#efL$Na8(_g+#rtKpL&YlYy<{Al4 zx9HZ)HV^H%0}OF5ub<-2V)qV~FFx|Z1n&szi8mw9jX07;VTm?xKlw>kf9Llk{?T1Q z#8H}~+Gw};M{*&|3gJ%CIEpynx?tvq!KGK5548z;T(VTbFCQ41r?cP`m{`_~%fj-9 zd|W$f;F@GR@>xBn>X>+AL112>Gs%=yN!}XGIx$ogDj0mWnx$ryEz+$8Ln1GD)*83& zA(!3<@7UezHNJ)sJ`*czNUDb_Fm8MwcgM z?MK&#gAxjHo6_--9A~OO2mg3A&mT;cM@zRI-MJaSAbxw2`)t_HxjD|FhT)R z(#qMl6u~6Jr1z=@+bJg7%(F8iarif^sbUJQzZkr@pLt$TH-yu=o$8htoIdzk7TNhg z9V2j7BXScfK-*0+S+2U@cD8(bUXc@{vr7y$wBwoY__y%brTamaWeB;+weRm0XV1}5wGWkL<2@I-dzU3WxHL7zuqr; zpa1Fu*M#Cu10|+OD0xcpr*G7$WzOZ1KkJ3l_rVzZ`GAFXROefGfVuji%(h9O*J-K4 zqb*j{19fxqdp(OhIMv!67%!R88M^W{OMOD$S{$9}>eXwQ&SH0J<`KlC6iNeWM z_`UDgE)?hO@!z<7XFvbS;pSwo-F)bbQ~0Iq`3YP^LH&qyTi3{|Bb~+Dly!i(o^S$v z&3F@w`1Y_wu_yVLBIojmmt5k6%|hU=+;(x;Du(;`cYZTijTbyNJnqqXY6wSo^F2E2 z)%JE&d95;3E&G!Fg7>E!;!DR;;7#9i0g4vOeS5B`-Va100{Dn{a(OrS$ikIYoeGNVN>7o zm*R^3s{yyO53bl3vJOR-3Vj>krU0+|gxqhpb;tipRa%bwAm@XiPgMoE<(HPvB@Cs& zy##x5;aT9P(Z&gWkN#5N?EtU4nedil{UiS0%CE&;5{gO*{s-}=xcg5@_Ze=qaC3sL zDAzB*Yw~?IKW^^_cul?&h%~3U%Xv$2m+^xZ`)}h00Oeckvr_0=;1-z)QlNs|r`}MO zj{6{mH|TNSJHTtwaFru0ClY*c_FjaubZ)bP7W!J?1l){t&FP4n^{3E5e%}Fp?(>@W zh<<{aHu!%lOY$JRbv5Fik8MuS30ZS6y#E9nnsfPWo?D~g5@{( zGE1LJkep$y)4i^sztl~|egeT}6&nsb1{^`~TS`eO^vB|!$8Ju0T*2*&8z{R2xt$;v z;RYDzBgk=|mVy@MaqsuJtRndXbFCeMt80IWV&l0MdT?{x?+R?y%xfxfBMyg=+x_b^ z@t@!K|A=oQmLS>lxm0S!?-#Nb{~`~;uB4>pa|v?tj$#Z(F2$+oGX4Z$&j7DK2sekp zkp%l$C8f|`9>YG)Bmc|mL5_n$21Ujx&f~rkcf2R%I7_N4aN}~G4|~J;+)J;<4L0!m zhBgE@dsG=W_g@5AemY*`<0bq-;1LQks z?j_vic|Yglo+tDZayd5F zVt)v3QXs!#Ukru*5#0Hx5J7%Fit)Iw3GkY53(SZ-;9D;b2D%fYl}R%F%QQR?270`xG(9UDD-*v>A+6{yzcwJ z=^Od~o$!B@t{(S+S&icMEA#-nM4qB>LFS{L5^!5VK9eB(v(_5_p3M^cM}p_0&_`*S zF5?F+_DACGPjBp!U=|Dg1Gv@j3b`l0p<4VG&*~0r8R9;p66BoaHy?hJAccPWfZNJ~ zCxLU=`0v#x1gn`PxZ0S@LSvg0#_Dws-~#y>%{OP3x*hk6(VRaMe1@*M-OLkn(V zg{c9y^$KfGG9v=K{&j*6N*Lp_t$f#`?;jxNDv~|eWDx%Ep_<&r8r&OTn~^*d8lngB ze-C~$G;WIDdh|gq-vw2LTeK+Xn%57o{?GbwpV!B=9{ptCEuZ!RZXv56@AHfha&IWe zc`NCQT%C*mdw48|?X|{T?eN=X8g6k0;kO=rkdAL_+;qu7j^iL}H~8|~NsvMh?wR=@ zKkUnK1K3vhyly$c=U#{C3Oxinv7A@H_M>(!{`qrVJO1@F zP=Xr}=Jy`X$#J`{2iRt$E40PPuh5STxSc_`Iv3>k^@Ku0`|TiTp$9j!kKgv{aDzPj zUYF~)@hA&@kU}|&kS?)=tTX;!syyJR9JjkU!M@ly2=^X+llKDb_kjrZ8wS5Gbsp~bI_RP-O@0$#yR-QJ?VVeU zomCabf1Q~!?VQq#v?(DPJEm%M0gMS=5@W;%qG*gEq5%^L zYD5w625Plps2Avifmp49ptMO_TOqWXX$b?Q%+So7rjrcHTKU&*szFxHbQqlGzcU6`xrcr9`sImjtg5T z9Wf^k|nT|hY&re zPW=hzz`M!6krx_@B8eV+=P55?*Xb`47vuA~zmZe_2OPRvXt7Zzc+TcuBW~N*BKjg* z8#hp%z4tfwtnvRk5Iu>WJg34Z@x2e%=!3DJM|Tq8NNBCK?<_3GtqpOUl2aZv%W6ls* zL?07~d7q&>lfwk|Rj?H1C3?^q@vOtf>%GD~$Wc-$CIKdC#>gP+WDZF}~>Bt8$eg{p{N zn9#Nd%kkB_Xdy2##rT%v-}!u+>Ua6_z#{seM(A$fcaV1yurRZf#SlICyvxr=+JN$i zehQUE^-hqX`KWoZ4Am&`PTr4`raD`OZwAqN{ToryC(FhRY2UqV4x5>rpu8>XRluVGW4kq>?W+Uq9zb(ySxAxIXA%9+BK2>< zmlE9~;F#D_6if8W=%0r5C(?9(D~sqsZ3jap%7^f^h0Kd}C?&=_p8pVk2HlBq6v*Qp z!!)YDUL>~RJCo1kiwSL8MBhwk%Zmx%kH)yV$siCtXgAAqJ+TvUC%)~p+d{Ddw^3}z z%S%yBb?=xSD_}g|4X{6r0=SFB7$U`833ORR|4;%Oo`Pg8v@>|6N1>%Cmgqr5)NPAv z2GvAQfij8SW3N+=q8!qcXM`gt7%r==?44+?NH~UeY(Oc7A@}8e(l-HFk0$ewaqJ@P zv$2T2mC#lQCJ3)}F(^bog1;1S-W0NKSb_g&D3d1$d}IS8H+b2 zCX;(9W-I?)EOtWP8L7`A`iCR+XSm<6lgTmE$G$HtB`Js=RJ5Wq=kd&wW*l*a%WH-jHjHMtI(Zk|WvZ+f%mQD1amHckQ zZZA0jdDKB2&{e*cH?PGib_!O!!Z=I)&qMG72qaU=VcuJ@2 zBR=z>8$3i`WigeIZTvwZdhnHFZ6QKQi4DlR$}=Av0KN>o1K$E<3N6}qpoMa%y;n%T zqtFdkk#8UqUz&L6-yB0;{zm!NXi0D7j=l_RU+9o#z8Ys&0+DJVwpdb4TXI87(i33N}^q8~v_y53&2D4CAX(F?btZPb1kqx_(} zc1$3T^1a6An#7;sz7GY5^}}Z5!Ng)YnzO9D`FTWkPbaXMVIw*QikWK`P~`dx=)Io- z&Oz_}B)YEiBJ@wAql;AYx`k_ymm5oJPUWFQ5MVYUBh>p`t+EV7rG=tW_Qy zrT>n@4$`(Nz7e%_x6nd;E4uDjSgVAT9zAF~=kmllp&E}q!x(xHc~$iU0(BFt z`uf}7>AdNLmPfxMr`~>e(6E`wi}Y{`tuLk~`l3)J(P!9>zl>oSvY)io!17=n@H?u% z7nwAH+bK5c83u>wL7vth=szjOqHLlExYJ>?MgN_^yckC*r+JQ(ijrRuSY#zJ-VFS? z%ebnNALP7_Du}*;uiSVGH7;7xq|Ex#km$p(h42X6)Utq5eyXZM*NIU?e4ZewBIO9_ z^5iXei2j_C#^OOgvL95UZ=h#x*?pPfslqG%TqqMXnBf!i*1YO z^K@VJ;BENN>z9<1x4tkW`f!Y*A696@X$sj(d=zg{*hp9rk5n>;0-eh2n&lz-JgtF? z#7f}zEFI^d5v{jVy#hn9wi6Skl%E^wwWxx;7)?&*0au8J?tJ0J4 z5PeSD%(q6pZp3@?H)j@BI+v^mqS%!wNKEUPeehk5w|#A%+-Y lhyleo#ReA*F~krB Date: Mon, 30 Aug 2021 11:33:00 +0200 Subject: [PATCH 276/308] recalculate relativelly offset by value --- openpype/widgets/sliders.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/widgets/sliders.py b/openpype/widgets/sliders.py index 2f26c3eb97..32ade58af5 100644 --- a/openpype/widgets/sliders.py +++ b/openpype/widgets/sliders.py @@ -70,18 +70,19 @@ class NiceSlider(QtWidgets.QSlider): ) _range = self.maximum() - self.minimum() + _offset = self.value() - self.minimum() if horizontal: _handle_half = rect.height() / 2 _handle_size = _handle_half * 2 width = rect.width() - _handle_size - pos_x = ((width / _range) * self.value()) + pos_x = ((width / _range) * _offset) pos_y = rect.center().y() - _handle_half + 1 else: _handle_half = rect.width() / 2 _handle_size = _handle_half * 2 height = rect.height() - _handle_size pos_x = rect.center().x() - _handle_half + 1 - pos_y = height - ((height / _range) * self.value()) + pos_y = height - ((height / _range) * _offset) handle_rect = QtCore.QRect( pos_x, pos_y, _handle_size, _handle_size From d6fc47d1c1ae6914b138f65878052dc4af874705 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 30 Aug 2021 11:34:51 +0200 Subject: [PATCH 277/308] added option to have sliders in number widgets --- openpype/settings/entities/input_entities.py | 3 ++ .../tools/settings/settings/item_widgets.py | 41 ++++++++++++++++++- 2 files changed, 42 insertions(+), 2 deletions(-) diff --git a/openpype/settings/entities/input_entities.py b/openpype/settings/entities/input_entities.py index 336d1f5c1e..f7e85294a2 100644 --- a/openpype/settings/entities/input_entities.py +++ b/openpype/settings/entities/input_entities.py @@ -369,6 +369,9 @@ class NumberEntity(InputEntity): self.valid_value_types = valid_value_types self.value_on_not_set = value_on_not_set + # UI specific attributes + self.show_slider = self.schema_data.get("show_slider", False) + def _convert_to_valid_type(self, value): if isinstance(value, str): new_value = None diff --git a/openpype/tools/settings/settings/item_widgets.py b/openpype/tools/settings/settings/item_widgets.py index d29fa6f42b..6f304a1f88 100644 --- a/openpype/tools/settings/settings/item_widgets.py +++ b/openpype/tools/settings/settings/item_widgets.py @@ -21,6 +21,7 @@ from .base import ( BaseWidget, InputWidget ) +from openpype.widgets.sliders import NiceSlider from openpype.tools.settings import CHILD_OFFSET @@ -377,6 +378,8 @@ class TextWidget(InputWidget): class NumberWidget(InputWidget): + _slider_widget = None + def _add_inputs_to_layout(self): kwargs = { "minimum": self.entity.minimum, @@ -384,14 +387,33 @@ class NumberWidget(InputWidget): "decimal": self.entity.decimal } self.input_field = NumberSpinBox(self.content_widget, **kwargs) + input_field_stretch = 1 + + if self.entity.show_slider: + slider_widget = NiceSlider(QtCore.Qt.Horizontal, self) + slider_widget.setRange( + self.entity.minimum, + self.entity.maximum + ) + + self.content_layout.addWidget(slider_widget, 1) + + slider_widget.valueChanged.connect(self._on_slider_change) + + self._slider_widget = slider_widget + + input_field_stretch = 0 self.setFocusProxy(self.input_field) - self.content_layout.addWidget(self.input_field, 1) + self.content_layout.addWidget(self.input_field, input_field_stretch) self.input_field.valueChanged.connect(self._on_value_change) self.input_field.focused_in.connect(self._on_input_focus) + self._ignore_slider_change = False + self._ignore_input_change = False + def _on_input_focus(self): self.focused_in() @@ -402,10 +424,25 @@ class NumberWidget(InputWidget): def set_entity_value(self): self.input_field.setValue(self.entity.value) + def _on_slider_change(self, new_value): + if self._ignore_slider_change: + return + + self._ignore_input_change = True + self.input_field.setValue(new_value) + self._ignore_input_change = False + def _on_value_change(self): if self.ignore_input_changes: return - self.entity.set(self.input_field.value()) + + value = self.input_field.value() + if self._slider_widget is not None and not self._ignore_input_change: + self._ignore_slider_change = True + self._slider_widget.setValue(value) + self._ignore_slider_change = False + + self.entity.set(value) class RawJsonInput(SettingsPlainTextEdit): From 8c04008b4f04d779f542bf7cb5ea73af9a15ba69 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 31 Aug 2021 10:25:11 +0200 Subject: [PATCH 278/308] fix attribute name from `host` to `hosts` --- .../default_modules/ftrack/plugins/publish/collect_username.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py b/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py index 7a303a1608..39b7433e11 100644 --- a/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py +++ b/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py @@ -26,7 +26,7 @@ class CollectUsername(pyblish.api.ContextPlugin): """ order = pyblish.api.CollectorOrder - 0.488 label = "Collect ftrack username" - host = ["webpublisher"] + hosts = ["webpublisher"] _context = None From da0ea31ec9446509cf37e7036d81689adbb71a07 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 31 Aug 2021 10:31:36 +0200 Subject: [PATCH 279/308] Webpublisher - fixed documentation, host and ip are provided by command line --- website/docs/admin_webserver_for_webpublisher.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/website/docs/admin_webserver_for_webpublisher.md b/website/docs/admin_webserver_for_webpublisher.md index dced825bdc..6e72ccaf32 100644 --- a/website/docs/admin_webserver_for_webpublisher.md +++ b/website/docs/admin_webserver_for_webpublisher.md @@ -40,14 +40,13 @@ Deploy OP build distribution (Openpype Igniter) on an OS of your choice. ```sh #!/usr/bin/env bash export OPENPYPE_DEBUG=3 -export WEBSERVER_HOST_IP=localhost export FTRACK_BOT_API_USER=YOUR_API_USER export FTRACK_BOT_API_KEY=YOUR_API_KEY export PYTHONDONTWRITEBYTECODE=1 export OPENPYPE_MONGO=YOUR_MONGODB_CONNECTION pushd /opt/openpype -./openpype_console webpublisherwebserver --upload_dir YOUR_SHARED_FOLDER_ON_HOST --executable /opt/openpype/openpype_console > /tmp/openpype.log 2>&1 +./openpype_console webpublisherwebserver --upload_dir YOUR_SHARED_FOLDER_ON_HOST --executable /opt/openpype/openpype_console --host YOUR_HOST_IP --port YOUR_HOST_PORT > /tmp/openpype.log 2>&1 ``` 1. create service file `sudo vi /etc/systemd/system/openpye-webserver.service` From 25478317d619909422a0f16a5b5e792b52ec24ab Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 31 Aug 2021 10:42:47 +0200 Subject: [PATCH 280/308] modified sizes of slider in style --- .../tools/settings/settings/style/style.css | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/openpype/tools/settings/settings/style/style.css b/openpype/tools/settings/settings/style/style.css index 250c15063f..d9d85a481e 100644 --- a/openpype/tools/settings/settings/style/style.css +++ b/openpype/tools/settings/settings/style/style.css @@ -114,6 +114,30 @@ QPushButton[btn-type="expand-toggle"] { background: #21252B; } +/* SLider */ +QSlider::groove { + border: 1px solid #464b54; + border-radius: 0.3em; +} +QSlider::groove:horizontal { + height: 8px; +} +QSlider::groove:vertical { + width: 8px; +} +QSlider::handle { + width: 10px; + height: 10px; + + border-radius: 5px; +} +QSlider::handle:horizontal { + margin: -2px 0; +} +QSlider::handle:vertical { + margin: 0 -2px; +} + #GroupWidget { border-bottom: 1px solid #21252B; } From ff4ed83519c89473925999b7d749b31405187365 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 31 Aug 2021 10:58:50 +0200 Subject: [PATCH 281/308] added slider multiplier as slider can't handle decimal places --- openpype/tools/settings/settings/item_widgets.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/tools/settings/settings/item_widgets.py b/openpype/tools/settings/settings/item_widgets.py index 6f304a1f88..1f74308211 100644 --- a/openpype/tools/settings/settings/item_widgets.py +++ b/openpype/tools/settings/settings/item_widgets.py @@ -389,11 +389,13 @@ class NumberWidget(InputWidget): self.input_field = NumberSpinBox(self.content_widget, **kwargs) input_field_stretch = 1 + self._slider_multiplier = 10 ** self.entity.decimal if self.entity.show_slider: + slider_widget = NiceSlider(QtCore.Qt.Horizontal, self) slider_widget.setRange( - self.entity.minimum, - self.entity.maximum + int(self.entity.minimum * self._slider_multiplier), + int(self.entity.maximum * self._slider_multiplier) ) self.content_layout.addWidget(slider_widget, 1) @@ -429,7 +431,7 @@ class NumberWidget(InputWidget): return self._ignore_input_change = True - self.input_field.setValue(new_value) + self.input_field.setValue(new_value / self._slider_multiplier) self._ignore_input_change = False def _on_value_change(self): From 00d1ae5d43500116f735522e5db79b8a92ca777b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 31 Aug 2021 10:58:59 +0200 Subject: [PATCH 282/308] added show_slider to examples --- .../entities/schemas/system_schema/example_schema.json | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/openpype/settings/entities/schemas/system_schema/example_schema.json b/openpype/settings/entities/schemas/system_schema/example_schema.json index f633d5cb1a..af6a2d49f4 100644 --- a/openpype/settings/entities/schemas/system_schema/example_schema.json +++ b/openpype/settings/entities/schemas/system_schema/example_schema.json @@ -183,6 +183,15 @@ "minimum": -10, "maximum": -5 }, + { + "type": "number", + "key": "number_with_slider", + "label": "Number with slider", + "decimal": 2, + "minimum": 0.0, + "maximum": 1.0, + "show_slider": true + }, { "type": "text", "key": "singleline_text", From 8f5254ff234e7bd88298bcb230b1cd44252f1d7c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 31 Aug 2021 11:00:44 +0200 Subject: [PATCH 283/308] added show slider to readme --- openpype/settings/entities/schemas/README.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/openpype/settings/entities/schemas/README.md b/openpype/settings/entities/schemas/README.md index 2034d4e463..2709f5bed9 100644 --- a/openpype/settings/entities/schemas/README.md +++ b/openpype/settings/entities/schemas/README.md @@ -300,6 +300,7 @@ How output of the schema could look like on save: - key `"decimal"` defines how many decimal places will be used, 0 is for integer input (Default: `0`) - key `"minimum"` as minimum allowed number to enter (Default: `-99999`) - key `"maxium"` as maximum allowed number to enter (Default: `99999`) +- for UI it is possible to show slider to enable this option set `show_slider` to `true` ``` { "type": "number", @@ -311,6 +312,18 @@ How output of the schema could look like on save: } ``` +``` +{ + "type": "number", + "key": "ratio", + "label": "Ratio" + "decimal": 3, + "minimum": 0, + "maximum": 1, + "show_slider": true +} +``` + ### text - simple text input - key `"multiline"` allows to enter multiple lines of text (Default: `False`) From b8608bccaeaba9810473d6516124ea5d14b04cc6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 31 Aug 2021 11:10:49 +0200 Subject: [PATCH 284/308] added explaining comment to slider multiplier --- openpype/tools/settings/settings/item_widgets.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/tools/settings/settings/item_widgets.py b/openpype/tools/settings/settings/item_widgets.py index 1f74308211..a7b1208269 100644 --- a/openpype/tools/settings/settings/item_widgets.py +++ b/openpype/tools/settings/settings/item_widgets.py @@ -389,13 +389,15 @@ class NumberWidget(InputWidget): self.input_field = NumberSpinBox(self.content_widget, **kwargs) input_field_stretch = 1 - self._slider_multiplier = 10 ** self.entity.decimal + slider_multiplier = 1 if self.entity.show_slider: - + # Slider can't handle float numbers so all decimals are converted + # to integer range. + slider_multiplier = 10 ** self.entity.decimal slider_widget = NiceSlider(QtCore.Qt.Horizontal, self) slider_widget.setRange( - int(self.entity.minimum * self._slider_multiplier), - int(self.entity.maximum * self._slider_multiplier) + int(self.entity.minimum * slider_multiplier), + int(self.entity.maximum * slider_multiplier) ) self.content_layout.addWidget(slider_widget, 1) @@ -406,6 +408,8 @@ class NumberWidget(InputWidget): input_field_stretch = 0 + self._slider_multiplier = slider_multiplier + self.setFocusProxy(self.input_field) self.content_layout.addWidget(self.input_field, input_field_stretch) From 3a8b1f403ae2155ffe705886dda8b222983ff80c Mon Sep 17 00:00:00 2001 From: OpenPype Date: Tue, 31 Aug 2021 09:35:27 +0000 Subject: [PATCH 285/308] [Automated] Bump version --- CHANGELOG.md | 9 +++++---- openpype/version.py | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4259a0f725..e1737458b2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,11 +1,12 @@ # Changelog -## [3.4.0-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.4.0-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.1...HEAD) **Merged pull requests:** +- Ftrack: Fix hosts attribute in collect ftrack username [\#1972](https://github.com/pypeclub/OpenPype/pull/1972) - Removed deprecated submodules [\#1967](https://github.com/pypeclub/OpenPype/pull/1967) - Launcher: Fix crashes on action click [\#1964](https://github.com/pypeclub/OpenPype/pull/1964) - Settings: Minor fixes in UI and missing default values [\#1963](https://github.com/pypeclub/OpenPype/pull/1963) @@ -18,7 +19,9 @@ - Add face sets to exported alembics [\#1942](https://github.com/pypeclub/OpenPype/pull/1942) - Bump path-parse from 1.0.6 to 1.0.7 in /website [\#1933](https://github.com/pypeclub/OpenPype/pull/1933) - \#1894 - adds host to template\_name\_profiles for filtering [\#1915](https://github.com/pypeclub/OpenPype/pull/1915) +- Environments: Tool environments in alphabetical order [\#1910](https://github.com/pypeclub/OpenPype/pull/1910) - Disregard publishing time. [\#1888](https://github.com/pypeclub/OpenPype/pull/1888) +- Feature/webpublisher backend [\#1876](https://github.com/pypeclub/OpenPype/pull/1876) - Dynamic modules [\#1872](https://github.com/pypeclub/OpenPype/pull/1872) - Houdini: add Camera, Point Cache, Composite, Redshift ROP and VDB Cache support [\#1821](https://github.com/pypeclub/OpenPype/pull/1821) @@ -56,7 +59,6 @@ - Nuke: update video file crassing [\#1916](https://github.com/pypeclub/OpenPype/pull/1916) - Fix - texture validators for workfiles triggers only for textures workfiles [\#1914](https://github.com/pypeclub/OpenPype/pull/1914) - submodules: avalon-core update [\#1911](https://github.com/pypeclub/OpenPype/pull/1911) -- Environments: Tool environments in alphabetical order [\#1910](https://github.com/pypeclub/OpenPype/pull/1910) - Settings UI: List order works as expected [\#1906](https://github.com/pypeclub/OpenPype/pull/1906) - Add support for multiple Deadline ☠️➖ servers [\#1905](https://github.com/pypeclub/OpenPype/pull/1905) - Hiero: loaded clip was not set colorspace from version data [\#1904](https://github.com/pypeclub/OpenPype/pull/1904) @@ -75,7 +77,6 @@ - TVPaint: Increment workfile [\#1885](https://github.com/pypeclub/OpenPype/pull/1885) - Allow Multiple Notes to run on tasks. [\#1882](https://github.com/pypeclub/OpenPype/pull/1882) - Normalize path returned from Workfiles. [\#1880](https://github.com/pypeclub/OpenPype/pull/1880) -- Feature/webpublisher backend [\#1876](https://github.com/pypeclub/OpenPype/pull/1876) - Prepare for pyside2 [\#1869](https://github.com/pypeclub/OpenPype/pull/1869) - Filter hosts in settings host-enum [\#1868](https://github.com/pypeclub/OpenPype/pull/1868) - Local actions with process identifier [\#1867](https://github.com/pypeclub/OpenPype/pull/1867) @@ -83,8 +84,8 @@ - Maya: add support for `RedshiftNormalMap` node, fix `tx` linear space 🚀 [\#1863](https://github.com/pypeclub/OpenPype/pull/1863) - Workfiles tool event arguments fix [\#1862](https://github.com/pypeclub/OpenPype/pull/1862) - Maya: support for configurable `dirmap` 🗺️ [\#1859](https://github.com/pypeclub/OpenPype/pull/1859) +- Maya: don't add reference members as connections to the container set 📦 [\#1855](https://github.com/pypeclub/OpenPype/pull/1855) - Settings list can use template or schema as object type [\#1815](https://github.com/pypeclub/OpenPype/pull/1815) -- Maya: expected files -\> render products ⚙️ overhaul [\#1812](https://github.com/pypeclub/OpenPype/pull/1812) ## [3.2.0](https://github.com/pypeclub/OpenPype/tree/3.2.0) (2021-07-13) diff --git a/openpype/version.py b/openpype/version.py index 2e769a1b62..17bd0ff892 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.4.0-nightly.3" +__version__ = "3.4.0-nightly.4" From 901e5f52666f36f7accbc95ace2d9abbc4f6c993 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 31 Aug 2021 18:24:24 +0200 Subject: [PATCH 286/308] refactor common code, change handling of env var --- igniter/install_dialog.py | 33 +++--------------------------- igniter/nice_progress_bar.py | 20 ++++++++++++++++++ igniter/tools.py | 12 +++++++++++ igniter/update_window.py | 39 ++---------------------------------- start.py | 12 +++++++---- 5 files changed, 45 insertions(+), 71 deletions(-) create mode 100644 igniter/nice_progress_bar.py diff --git a/igniter/install_dialog.py b/igniter/install_dialog.py index 1ec8cc6768..1fe67e3397 100644 --- a/igniter/install_dialog.py +++ b/igniter/install_dialog.py @@ -14,21 +14,13 @@ from .tools import ( validate_mongo_connection, get_openpype_path_from_db ) + +from .nice_progress_bar import NiceProgressBar from .user_settings import OpenPypeSecureRegistry +from .tools import load_stylesheet from .version import __version__ -def load_stylesheet(): - stylesheet_path = os.path.join( - os.path.dirname(__file__), - "stylesheet.css" - ) - with open(stylesheet_path, "r") as file_stream: - stylesheet = file_stream.read() - - return stylesheet - - class ButtonWithOptions(QtWidgets.QFrame): option_clicked = QtCore.Signal(str) @@ -91,25 +83,6 @@ class ButtonWithOptions(QtWidgets.QFrame): self.option_clicked.emit(self._default_value) -class NiceProgressBar(QtWidgets.QProgressBar): - def __init__(self, parent=None): - super(NiceProgressBar, self).__init__(parent) - self._real_value = 0 - - def setValue(self, value): - self._real_value = value - if value != 0 and value < 11: - value = 11 - - super(NiceProgressBar, self).setValue(value) - - def value(self): - return self._real_value - - def text(self): - return "{} %".format(self._real_value) - - class ConsoleWidget(QtWidgets.QWidget): def __init__(self, parent=None): super(ConsoleWidget, self).__init__(parent) diff --git a/igniter/nice_progress_bar.py b/igniter/nice_progress_bar.py new file mode 100644 index 0000000000..47d695a101 --- /dev/null +++ b/igniter/nice_progress_bar.py @@ -0,0 +1,20 @@ +from Qt import QtCore, QtGui, QtWidgets # noqa + + +class NiceProgressBar(QtWidgets.QProgressBar): + def __init__(self, parent=None): + super(NiceProgressBar, self).__init__(parent) + self._real_value = 0 + + def setValue(self, value): + self._real_value = value + if value != 0 and value < 11: + value = 11 + + super(NiceProgressBar, self).setValue(value) + + def value(self): + return self._real_value + + def text(self): + return "{} %".format(self._real_value) diff --git a/igniter/tools.py b/igniter/tools.py index 529d535c25..c0fa97d03e 100644 --- a/igniter/tools.py +++ b/igniter/tools.py @@ -248,3 +248,15 @@ def get_openpype_path_from_db(url: str) -> Union[str, None]: if os.path.exists(path): return path return None + + +def load_stylesheet() -> str: + """Load css style sheet. + + Returns: + str: content of the stylesheet + + """ + stylesheet_path = Path(__file__).parent.resolve() / "stylesheet.css" + + return stylesheet_path.read_text() \ No newline at end of file diff --git a/igniter/update_window.py b/igniter/update_window.py index a49a84cfee..e443201e09 100644 --- a/igniter/update_window.py +++ b/igniter/update_window.py @@ -5,43 +5,8 @@ from pathlib import Path from .update_thread import UpdateThread from Qt import QtCore, QtGui, QtWidgets # noqa from .bootstrap_repos import OpenPypeVersion - - -def load_stylesheet(path: str = None) -> str: - """Load css style sheet. - - Args: - path (str, optional): Path to stylesheet. If none, `stylesheet.css` - from current package's path is used. - Returns: - str: content of the stylesheet - - """ - if path: - stylesheet_path = Path(path) - else: - stylesheet_path = Path(os.path.dirname(__file__)) / "stylesheet.css" - - return stylesheet_path.read_text() - - -class NiceProgressBar(QtWidgets.QProgressBar): - def __init__(self, parent=None): - super(NiceProgressBar, self).__init__(parent) - self._real_value = 0 - - def setValue(self, value): - self._real_value = value - if value != 0 and value < 11: - value = 11 - - super(NiceProgressBar, self).setValue(value) - - def value(self): - return self._real_value - - def text(self): - return "{} %".format(self._real_value) +from .nice_progress_bar import NiceProgressBar +from .tools import load_stylesheet class UpdateWindow(QtWidgets.QDialog): diff --git a/start.py b/start.py index 9e60d79f04..2e45dc4df3 100644 --- a/start.py +++ b/start.py @@ -181,6 +181,10 @@ else: if "--headless" in sys.argv: os.environ["OPENPYPE_HEADLESS_MODE"] = "1" + sys.argv.remove("--headless") +else: + if os.getenv("OPENPYPE_HEADLESS_MODE") != "1": + os.environ.pop("OPENPYPE_HEADLESS_MODE") import igniter # noqa: E402 from igniter import BootstrapRepos # noqa: E402 @@ -397,7 +401,7 @@ def _process_arguments() -> tuple: # handle igniter # this is helper to run igniter before anything else if "igniter" in sys.argv: - if os.getenv("OPENPYPE_HEADLESS_MODE"): + if os.getenv("OPENPYPE_HEADLESS_MODE") == "1": _print("!!! Cannot open Igniter dialog in headless mode.") sys.exit(1) import igniter @@ -447,7 +451,7 @@ def _determine_mongodb() -> str: if not openpype_mongo: _print("*** No DB connection string specified.") - if os.getenv("OPENPYPE_HEADLESS_MODE"): + if os.getenv("OPENPYPE_HEADLESS_MODE") == "1": _print("!!! Cannot open Igniter dialog in headless mode.") _print( "!!! Please use `OPENPYPE_MONGO` to specify server address.") @@ -555,7 +559,7 @@ def _find_frozen_openpype(use_version: str = None, except IndexError: # no OpenPype version found, run Igniter and ask for them. _print('*** No OpenPype versions found.') - if os.getenv("OPENPYPE_HEADLESS_MODE"): + if os.getenv("OPENPYPE_HEADLESS_MODE") == "1": _print("!!! Cannot open Igniter dialog in headless mode.") sys.exit(1) _print("--- launching setup UI ...") @@ -621,7 +625,7 @@ def _find_frozen_openpype(use_version: str = None, if not is_inside: # install latest version to user data dir - if not os.getenv("OPENPYPE_HEADLESS_MODE"): + if os.getenv("OPENPYPE_HEADLESS_MODE", "0") != "1": import igniter version_path = igniter.open_update_window(openpype_version) else: From b59bb52b6b7abc55e38132cf3175b86489b995cd Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 31 Aug 2021 18:27:26 +0200 Subject: [PATCH 287/308] =?UTF-8?q?hound=20fixes=20=F0=9F=90=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- igniter/tools.py | 2 +- igniter/update_window.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/igniter/tools.py b/igniter/tools.py index c0fa97d03e..c934289064 100644 --- a/igniter/tools.py +++ b/igniter/tools.py @@ -259,4 +259,4 @@ def load_stylesheet() -> str: """ stylesheet_path = Path(__file__).parent.resolve() / "stylesheet.css" - return stylesheet_path.read_text() \ No newline at end of file + return stylesheet_path.read_text() diff --git a/igniter/update_window.py b/igniter/update_window.py index e443201e09..d7908c240b 100644 --- a/igniter/update_window.py +++ b/igniter/update_window.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Progress window to show when OpenPype is updating/installing locally.""" import os -from pathlib import Path from .update_thread import UpdateThread from Qt import QtCore, QtGui, QtWidgets # noqa from .bootstrap_repos import OpenPypeVersion From 2b2698639b60dd6495545dbe1ff95216f1c04069 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 31 Aug 2021 19:10:25 +0200 Subject: [PATCH 288/308] change source url of arrow submodule from git to https --- .gitmodules | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitmodules b/.gitmodules index 82fd194d26..28f164726d 100644 --- a/.gitmodules +++ b/.gitmodules @@ -6,7 +6,7 @@ url = https://github.com/pypeclub/avalon-unreal-integration.git [submodule "openpype/modules/default_modules/ftrack/python2_vendor/arrow"] path = openpype/modules/default_modules/ftrack/python2_vendor/arrow - url = git@github.com:arrow-py/arrow.git + url = https://github.com/arrow-py/arrow.git [submodule "openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api"] path = openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api url = https://bitbucket.org/ftrack/ftrack-python-api.git From f36f504faacb7a86690b90bccc45f82ce9395b14 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 31 Aug 2021 19:18:45 +0200 Subject: [PATCH 289/308] updated avalon-core --- repos/avalon-core | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/repos/avalon-core b/repos/avalon-core index 52e24a9993..f48fce09c0 160000 --- a/repos/avalon-core +++ b/repos/avalon-core @@ -1 +1 @@ -Subproject commit 52e24a9993e5223b0a719786e77a4b87e936e556 +Subproject commit f48fce09c0986c1fd7f6731de33907be46b436c5 From e3f0e89e2129eaa99eba436ed5d2fe43402ce77c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Wed, 1 Sep 2021 13:05:43 +0200 Subject: [PATCH 290/308] default value for pop Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- start.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/start.py b/start.py index 2e45dc4df3..00f9a50cbb 100644 --- a/start.py +++ b/start.py @@ -184,7 +184,7 @@ if "--headless" in sys.argv: sys.argv.remove("--headless") else: if os.getenv("OPENPYPE_HEADLESS_MODE") != "1": - os.environ.pop("OPENPYPE_HEADLESS_MODE") + os.environ.pop("OPENPYPE_HEADLESS_MODE", None) import igniter # noqa: E402 from igniter import BootstrapRepos # noqa: E402 From 84ee712ef4b9efe22c80baa86842fa1f739f07d5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 1 Sep 2021 16:32:41 +0200 Subject: [PATCH 291/308] change collect host name order to lower possible --- openpype/plugins/publish/collect_host_name.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_host_name.py b/openpype/plugins/publish/collect_host_name.py index 41d9cc3a5a..b731e3ed26 100644 --- a/openpype/plugins/publish/collect_host_name.py +++ b/openpype/plugins/publish/collect_host_name.py @@ -14,7 +14,7 @@ class CollectHostName(pyblish.api.ContextPlugin): """Collect avalon host name to context.""" label = "Collect Host Name" - order = pyblish.api.CollectorOrder - 1 + order = pyblish.api.CollectorOrder - 0.5 def process(self, context): host_name = context.data.get("hostName") From 69b8659b6d99cf2a547418467577c9bf9e2e1172 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 1 Sep 2021 16:33:58 +0200 Subject: [PATCH 292/308] change tvpaint collectors order --- openpype/hosts/tvpaint/plugins/publish/collect_instances.py | 2 +- openpype/hosts/tvpaint/plugins/publish/collect_workfile.py | 2 +- openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py index e496b144cd..dfa8f17ee9 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py @@ -9,7 +9,7 @@ from openpype.lib import get_subset_name class CollectInstances(pyblish.api.ContextPlugin): label = "Collect Instances" - order = pyblish.api.CollectorOrder - 1 + order = pyblish.api.CollectorOrder - 0.4 hosts = ["tvpaint"] def process(self, context): diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py index b61fec895f..65e38ea258 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py @@ -8,7 +8,7 @@ from openpype.lib import get_subset_name class CollectWorkfile(pyblish.api.ContextPlugin): label = "Collect Workfile" - order = pyblish.api.CollectorOrder - 1 + order = pyblish.api.CollectorOrder - 0.4 hosts = ["tvpaint"] def process(self, context): diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py index 79cc01740a..e87c08fda8 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py @@ -39,7 +39,7 @@ class ResetTVPaintWorkfileMetadata(pyblish.api.Action): class CollectWorkfileData(pyblish.api.ContextPlugin): label = "Collect Workfile Data" - order = pyblish.api.CollectorOrder - 1.01 + order = pyblish.api.CollectorOrder - 0.5 hosts = ["tvpaint"] actions = [ResetTVPaintWorkfileMetadata] From 1f6a3fdf4286aa3c4291225a02d9eb7af9912088 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 1 Sep 2021 16:43:27 +0200 Subject: [PATCH 293/308] moved CollectWorkfileData in tvpaint --- openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py index e87c08fda8..f4259f1b5f 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py @@ -39,7 +39,7 @@ class ResetTVPaintWorkfileMetadata(pyblish.api.Action): class CollectWorkfileData(pyblish.api.ContextPlugin): label = "Collect Workfile Data" - order = pyblish.api.CollectorOrder - 0.5 + order = pyblish.api.CollectorOrder - 0.45 hosts = ["tvpaint"] actions = [ResetTVPaintWorkfileMetadata] From 4ec7e18aad0044164fd56688dc70183205401866 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 2 Sep 2021 12:25:19 +0200 Subject: [PATCH 294/308] pass workfile template to `_prepare_last_workfile` --- openpype/lib/applications.py | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index fbf991a32e..45b8e6468d 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -29,7 +29,7 @@ from .local_settings import get_openpype_username from .avalon_context import ( get_workdir_data, get_workdir_with_workdir_data, - get_workfile_template_key_from_context + get_workfile_template_key ) from .python_module_tools import ( @@ -1226,8 +1226,12 @@ def prepare_context_environments(data): # Load project specific environments project_name = project_doc["name"] + project_settings = get_project_settings(project_name) + data["project_settings"] = project_settings # Apply project specific environments on current env value - apply_project_environments_value(project_name, data["env"]) + apply_project_environments_value( + project_name, data["env"], project_settings + ) app = data["app"] workdir_data = get_workdir_data( @@ -1237,17 +1241,19 @@ def prepare_context_environments(data): anatomy = data["anatomy"] - template_key = get_workfile_template_key_from_context( - asset_doc["name"], - task_name, + asset_tasks = asset_doc.get("data", {}).get("tasks") or {} + task_info = asset_tasks.get(task_name) or {} + task_type = task_info.get("type") + workfile_template_key = get_workfile_template_key( + task_type, app.host_name, project_name=project_name, - dbcon=data["dbcon"] + project_settings=project_settings ) try: workdir = get_workdir_with_workdir_data( - workdir_data, anatomy, template_key=template_key + workdir_data, anatomy, template_key=workfile_template_key ) except Exception as exc: @@ -1281,10 +1287,10 @@ def prepare_context_environments(data): ) data["env"].update(context_env) - _prepare_last_workfile(data, workdir) + _prepare_last_workfile(data, workdir, workfile_template_key) -def _prepare_last_workfile(data, workdir): +def _prepare_last_workfile(data, workdir, workfile_template_key): """last workfile workflow preparation. Function check if should care about last workfile workflow and tries @@ -1345,7 +1351,7 @@ def _prepare_last_workfile(data, workdir): if extensions: anatomy = data["anatomy"] # Find last workfile - file_template = anatomy.templates["work"]["file"] + file_template = anatomy.templates[workfile_template_key]["file"] workdir_data.update({ "version": 1, "user": get_openpype_username(), From 23f17609db0549f1b8ed56f10e530b1330adb109 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 2 Sep 2021 12:25:33 +0200 Subject: [PATCH 295/308] removed todo which is already done --- openpype/tools/workfiles/app.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index b542e6e718..3d2633f8dc 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -430,7 +430,6 @@ class FilesWidget(QtWidgets.QWidget): # Pype's anatomy object for current project self.anatomy = Anatomy(io.Session["AVALON_PROJECT"]) # Template key used to get work template from anatomy templates - # TODO change template key based on task self.template_key = "work" # This is not root but workfile directory From e43f7bc007a74f033f39ddc4dee628e183518218 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 2 Sep 2021 12:34:18 +0200 Subject: [PATCH 296/308] removed duplicated line --- .../standalonepublisher/plugins/publish/extract_harmony_zip.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py index f7f96c7d03..e3e5e94d30 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py @@ -244,7 +244,6 @@ class ExtractHarmonyZip(openpype.api.Extractor): os.path.dirname(work_path), file_template, data, [".zip"] )[1] - work_path = anatomy_filled["work"]["path"] base_name = os.path.splitext(os.path.basename(work_path))[0] staging_work_path = os.path.join(os.path.dirname(staging_scene), From 2f9f1ad00c72c52a17bbdd01d46672c03e334a64 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 2 Sep 2021 12:34:42 +0200 Subject: [PATCH 297/308] use `HOST_WORKFILE_EXTENSIONS` to get workfile extensions --- .../plugins/publish/extract_harmony_zip.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py index e3e5e94d30..e422837441 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py @@ -233,6 +233,7 @@ class ExtractHarmonyZip(openpype.api.Extractor): "version": 1, "ext": "zip", } + host_name = "harmony" # Get a valid work filename first with version 1 file_template = anatomy.templates["work"]["file"] @@ -241,7 +242,10 @@ class ExtractHarmonyZip(openpype.api.Extractor): # Get the final work filename with the proper version data["version"] = api.last_workfile_with_version( - os.path.dirname(work_path), file_template, data, [".zip"] + os.path.dirname(work_path), + file_template, + data, + api.HOST_WORKFILE_EXTENSIONS[host_name] )[1] base_name = os.path.splitext(os.path.basename(work_path))[0] From 79819a21a01f17d907d116789932976b7d9f9321 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Thu, 2 Sep 2021 12:34:54 +0200 Subject: [PATCH 298/308] use get_workfile_template_key_from_context to get right work template name --- .../plugins/publish/extract_harmony_zip.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py index e422837441..85da01c890 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py @@ -11,6 +11,7 @@ import zipfile import pyblish.api from avalon import api, io import openpype.api +from openpype.lib import get_workfile_template_key_from_context class ExtractHarmonyZip(openpype.api.Extractor): @@ -234,11 +235,18 @@ class ExtractHarmonyZip(openpype.api.Extractor): "ext": "zip", } host_name = "harmony" + template_name = get_workfile_template_key_from_context( + instance.data["asset"], + instance.data.get("task"), + host_name, + project_name=project_entity["name"], + dbcon=io + ) # Get a valid work filename first with version 1 - file_template = anatomy.templates["work"]["file"] + file_template = anatomy.templates[template_name]["file"] anatomy_filled = anatomy.format(data) - work_path = anatomy_filled["work"]["path"] + work_path = anatomy_filled[template_name]["path"] # Get the final work filename with the proper version data["version"] = api.last_workfile_with_version( From e9c2275d046c0b02e82493bddde5df4fb74e1200 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 2 Sep 2021 14:28:03 +0200 Subject: [PATCH 299/308] remove ftrack submodules --- .gitmodules | 2 +- openpype/modules/ftrack/python2_vendor/arrow | 1 - openpype/modules/ftrack/python2_vendor/ftrack-python-api | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) delete mode 160000 openpype/modules/ftrack/python2_vendor/arrow delete mode 160000 openpype/modules/ftrack/python2_vendor/ftrack-python-api diff --git a/.gitmodules b/.gitmodules index 28f164726d..e1b0917e9d 100644 --- a/.gitmodules +++ b/.gitmodules @@ -9,4 +9,4 @@ url = https://github.com/arrow-py/arrow.git [submodule "openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api"] path = openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api - url = https://bitbucket.org/ftrack/ftrack-python-api.git + url = https://bitbucket.org/ftrack/ftrack-python-api.git \ No newline at end of file diff --git a/openpype/modules/ftrack/python2_vendor/arrow b/openpype/modules/ftrack/python2_vendor/arrow deleted file mode 160000 index b746fedf72..0000000000 --- a/openpype/modules/ftrack/python2_vendor/arrow +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b746fedf7286c3755a46f07ab72f4c414cd41fc0 diff --git a/openpype/modules/ftrack/python2_vendor/ftrack-python-api b/openpype/modules/ftrack/python2_vendor/ftrack-python-api deleted file mode 160000 index d277f474ab..0000000000 --- a/openpype/modules/ftrack/python2_vendor/ftrack-python-api +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d277f474ab016e7b53479c36af87cb861d0cc53e From 6d270bfeb5d951fe09ef580e5b7d625c3f2ec753 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 3 Sep 2021 19:44:10 +0200 Subject: [PATCH 300/308] script fixes --- Dockerfile | 9 ++++++--- pyproject.toml | 2 +- tools/build.sh | 15 +++++++++------ tools/create_env.sh | 22 ++++++++++++++-------- tools/docker_build.sh | 4 +++- 5 files changed, 33 insertions(+), 19 deletions(-) diff --git a/Dockerfile b/Dockerfile index 2d8ed27b15..78611860ea 100644 --- a/Dockerfile +++ b/Dockerfile @@ -33,6 +33,7 @@ RUN yum -y install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.n readline-devel \ sqlite sqlite-devel \ openssl-devel \ + openssl-libs \ tk-devel libffi-devel \ qt5-qtbase-devel \ patchelf \ @@ -73,10 +74,12 @@ RUN source $HOME/.bashrc \ && ./tools/fetch_thirdparty_libs.sh RUN source $HOME/.bashrc \ - && bash ./tools/build.sh \ - && cp /usr/lib64/libffi* ./build/exe.linux-x86_64-3.7/lib \ + && bash ./tools/build.sh + +RUN cp /usr/lib64/libffi* ./build/exe.linux-x86_64-3.7/lib \ && cp /usr/lib64/libssl* ./build/exe.linux-x86_64-3.7/lib \ - && cp /usr/lib64/libcrypto* ./build/exe.linux-x86_64-3.7/lib + && cp /usr/lib64/libcrypto* ./build/exe.linux-x86_64-3.7/lib \ + && cp /root/.pyenv/versions/${OPENPYPE_PYTHON_VERSION}/lib/libpython* ./build/exe.linux-x86_64-3.7/lib RUN cd /opt/openpype \ rm -rf ./vendor/bin diff --git a/pyproject.toml b/pyproject.toml index e376986606..a57ae19224 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,7 +68,7 @@ slack-sdk = "^3.6.0" flake8 = "^3.7" autopep8 = "^1.4" coverage = "*" -cx_freeze = "^6.6" +cx_freeze = "*" GitPython = "^3.1.17" jedi = "^0.13" Jinja2 = "^2.11" diff --git a/tools/build.sh b/tools/build.sh index c44e7157af..bc79f03db7 100755 --- a/tools/build.sh +++ b/tools/build.sh @@ -58,7 +58,7 @@ BICyan='\033[1;96m' # Cyan BIWhite='\033[1;97m' # White args=$@ -disable_submodule_update = 0 +disable_submodule_update=0 while :; do case $1 in --no-submodule-update) @@ -90,6 +90,7 @@ done ############################################################################### detect_python () { echo -e "${BIGreen}>>>${RST} Using python \c" + command -v python >/dev/null 2>&1 || { echo -e "${BIRed}- NOT FOUND${RST} ${BIYellow}You need Python 3.7 installed to continue.${RST}"; return 1; } local version_command version_command="import sys;print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1]))" local python_version @@ -122,7 +123,7 @@ clean_pyc () { local path path=$openpype_root echo -e "${BIGreen}>>>${RST} Cleaning pyc at [ ${BIWhite}$path${RST} ] ... \c" - find "$path" -path ./build -prune -o -regex '^.*\(__pycache__\|\.py[co]\)$' -delete + find "$path" -path ./build -o -regex '^.*\(__pycache__\|\.py[co]\)$' -delete echo -e "${BIGreen}DONE${RST}" } @@ -173,7 +174,7 @@ main () { else echo -e "${BIYellow}NOT FOUND${RST}" echo -e "${BIYellow}***${RST} We need to install Poetry and virtual env ..." - . "$openpype_root/tools/create_env.sh" || { echo -e "${BIRed}!!!${RST} Poetry installation failed"; return; } + . "$openpype_root/tools/create_env.sh" || { echo -e "${BIRed}!!!${RST} Poetry installation failed"; return 1; } fi if [ "$disable_submodule_update" == 1 ]; then @@ -184,9 +185,9 @@ if [ "$disable_submodule_update" == 1 ]; then fi echo -e "${BIGreen}>>>${RST} Building ..." if [[ "$OSTYPE" == "linux-gnu"* ]]; then - "$POETRY_HOME/bin/poetry" run python "$openpype_root/setup.py" build > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; } + "$POETRY_HOME/bin/poetry" run python "$openpype_root/setup.py" build &> "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return 1; } elif [[ "$OSTYPE" == "darwin"* ]]; then - "$POETRY_HOME/bin/poetry" run python "$openpype_root/setup.py" bdist_mac > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; } + "$POETRY_HOME/bin/poetry" run python "$openpype_root/setup.py" bdist_mac &> "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return 1; } fi "$POETRY_HOME/bin/poetry" run python "$openpype_root/tools/build_dependencies.py" @@ -210,4 +211,6 @@ if [ "$disable_submodule_update" == 1 ]; then echo -e "${BIWhite}$openpype_root/build${RST} directory." } -main +return_code=0 +main || return_code=$? +exit $return_code diff --git a/tools/create_env.sh b/tools/create_env.sh index cc9eddc317..4ed6412c43 100755 --- a/tools/create_env.sh +++ b/tools/create_env.sh @@ -88,6 +88,7 @@ done ############################################################################### detect_python () { echo -e "${BIGreen}>>>${RST} Using python \c" + command -v python >/dev/null 2>&1 || { echo -e "${BIRed}- NOT FOUND${RST} ${BIYellow}You need Python 3.7 installed to continue.${RST}"; return 1; } local version_command="import sys;print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1]))" local python_version="$(python <<< ${version_command})" oIFS="$IFS" @@ -125,7 +126,7 @@ clean_pyc () { local path path=$openpype_root echo -e "${BIGreen}>>>${RST} Cleaning pyc at [ ${BIWhite}$path${RST} ] ... \c" - find "$path" -path ./build -prune -o -regex '^.*\(__pycache__\|\.py[co]\)$' -delete + find "$path" -path ./build -o -regex '^.*\(__pycache__\|\.py[co]\)$' -delete echo -e "${BIGreen}DONE${RST}" } @@ -166,7 +167,7 @@ main () { echo -e "${BIGreen}OK${RST}" else echo -e "${BIYellow}NOT FOUND${RST}" - install_poetry || { echo -e "${BIRed}!!!${RST} Poetry installation failed"; return; } + install_poetry || { echo -e "${BIRed}!!!${RST} Poetry installation failed"; return 1; } fi if [ -f "$openpype_root/poetry.lock" ]; then @@ -175,7 +176,11 @@ main () { echo -e "${BIGreen}>>>${RST} Installing dependencies ..." fi - "$POETRY_HOME/bin/poetry" install --no-root $poetry_verbosity || { echo -e "${BIRed}!!!${RST} Poetry environment installation failed"; return; } + "$POETRY_HOME/bin/poetry" install --no-root $poetry_verbosity || { echo -e "${BIRed}!!!${RST} Poetry environment installation failed"; return 1; } + if [ $? -ne 0 ] ; then + echo -e "${BIRed}!!!${RST} Virtual environment creation failed." + return 1 + fi echo -e "${BIGreen}>>>${RST} Cleaning cache files ..." clean_pyc @@ -184,10 +189,11 @@ main () { # cx_freeze will crash on missing __pychache__ on these but # reinstalling them solves the problem. echo -e "${BIGreen}>>>${RST} Fixing pycache bug ..." - "$POETRY_HOME/bin/poetry" run python -m pip install --force-reinstall pip - "$POETRY_HOME/bin/poetry" run pip install --force-reinstall setuptools - "$POETRY_HOME/bin/poetry" run pip install --force-reinstall wheel - "$POETRY_HOME/bin/poetry" run python -m pip install --force-reinstall pip + "$POETRY_HOME/bin/poetry" run pip install --disable-pip-version-check --force-reinstall setuptools + "$POETRY_HOME/bin/poetry" run pip install --disable-pip-version-check --force-reinstall wheel + "$POETRY_HOME/bin/poetry" run python -m pip install --disable-pip-version-check --force-reinstall pip } -main -3 +return_code=0 +main || return_code=$? +exit $return_code diff --git a/tools/docker_build.sh b/tools/docker_build.sh index 7600fe044b..dca217d534 100755 --- a/tools/docker_build.sh +++ b/tools/docker_build.sh @@ -32,7 +32,8 @@ main () { openpype_version="$(python3 <<< ${version_command})" echo -e "${BIGreen}>>>${RST} Running docker build ..." - docker build --pull --no-cache -t pypeclub/openpype:$openpype_version . + # docker build --pull --no-cache -t pypeclub/openpype:$openpype_version . + docker build --pull -t pypeclub/openpype:$openpype_version . if [ $? -ne 0 ] ; then echo -e "${BIRed}!!!${RST} Docker build failed." return 1 @@ -47,6 +48,7 @@ main () { fi echo -e "${BIYellow}---${RST} Copying ..." docker cp "$id:/opt/openpype/build/exe.linux-x86_64-3.7" "$openpype_root/build" + docker cp "$id:/opt/openpype/build/build.log" "$openpype_root/build" if [ $? -ne 0 ] ; then echo -e "${BIRed}!!!${RST} Copying failed." return 1 From d26095883921ac187dfccde229f38ceee5eea745 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Mon, 6 Sep 2021 10:43:50 +0200 Subject: [PATCH 301/308] try to get error log from failed build --- tools/docker_build.sh | 44 +++++++++++++++++++++++++++++++------------ 1 file changed, 32 insertions(+), 12 deletions(-) diff --git a/tools/docker_build.sh b/tools/docker_build.sh index dca217d534..c27041a1af 100755 --- a/tools/docker_build.sh +++ b/tools/docker_build.sh @@ -20,6 +20,28 @@ realpath () { echo $(cd $(dirname "$1"); pwd)/$(basename "$1") } +create_container () { + if [ ! -f "$openpype_root/build/docker-image.id" ]; then + echo -e "${BIRed}!!!${RST} Docker command failed, cannot find image id." + exit 1 + fi + local id=$(<"$openpype_root/build/docker-image.id") + echo -e "${BIYellow}---${RST} Creating container from $id ..." + local cid="$(docker create $id bash)" + if [ $? -ne 0 ] ; then + echo -e "${BIRed}!!!${RST} Cannot create container." + exit 1 + fi + return $cid +} + +retrieve_build_log () { + create_container + local cid=$? + echo -e "${BIYellow}***${RST} Copying build log to ${BIWhite}$openpype_root/build/build.log${RST}" + docker cp "$cid:/opt/openpype/build/build.log" "$openpype_root/build" +} + # Main main () { openpype_root=$(realpath $(dirname $(dirname "${BASH_SOURCE[0]}"))) @@ -28,34 +50,32 @@ main () { echo -e "${BIYellow}---${RST} Cleaning build directory ..." rm -rf "$openpype_root/build" && mkdir "$openpype_root/build" > /dev/null - version_command="import os;exec(open(os.path.join('$openpype_root', 'openpype', 'version.py')).read());print(__version__);" - openpype_version="$(python3 <<< ${version_command})" + local version_command="import os;exec(open(os.path.join('$openpype_root', 'openpype', 'version.py')).read());print(__version__);" + local openpype_version="$(python3 <<< ${version_command})" echo -e "${BIGreen}>>>${RST} Running docker build ..." # docker build --pull --no-cache -t pypeclub/openpype:$openpype_version . - docker build --pull -t pypeclub/openpype:$openpype_version . + docker build --pull --iidfile $openpype_root/build/docker-image.id -t pypeclub/openpype:$openpype_version . if [ $? -ne 0 ] ; then + echo $? echo -e "${BIRed}!!!${RST} Docker build failed." + retrieve_build_log return 1 fi echo -e "${BIGreen}>>>${RST} Copying build from container ..." - echo -e "${BIYellow}---${RST} Creating container from pypeclub/openpype:$openpype_version ..." - id="$(docker create -ti pypeclub/openpype:$openpype_version bash)" - if [ $? -ne 0 ] ; then - echo -e "${BIRed}!!!${RST} Cannot create just built container." - return 1 - fi + create_container + local cid=$? echo -e "${BIYellow}---${RST} Copying ..." - docker cp "$id:/opt/openpype/build/exe.linux-x86_64-3.7" "$openpype_root/build" - docker cp "$id:/opt/openpype/build/build.log" "$openpype_root/build" + docker cp "$cid:/opt/openpype/build/exe.linux-x86_64-3.7" "$openpype_root/build" + docker cp "$cid:/opt/openpype/build/build.log" "$openpype_root/build" if [ $? -ne 0 ] ; then echo -e "${BIRed}!!!${RST} Copying failed." return 1 fi echo -e "${BIGreen}>>>${RST} Fixing user ownership ..." - username="$(logname)" + local username="$(logname)" chown -R $username ./build echo -e "${BIGreen}>>>${RST} All done, you can delete container:" From d4a6db63f467c740d13af5fc644b6ee6ab669d0c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 6 Sep 2021 11:41:26 +0200 Subject: [PATCH 302/308] Fix added underscore to internal methods --- .../plugins/publish/extract_harmony_zip.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py index 85da01c890..adbac6ef09 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_harmony_zip.py @@ -66,10 +66,10 @@ class ExtractHarmonyZip(openpype.api.Extractor): # Get Task types and Statuses for creation if needed self.task_types = self._get_all_task_types(project_entity) - self.task_statuses = self.get_all_task_statuses(project_entity) + self.task_statuses = self._get_all_task_statuses(project_entity) # Get Statuses of AssetVersions - self.assetversion_statuses = self.get_all_assetversion_statuses( + self.assetversion_statuses = self._get_all_assetversion_statuses( project_entity ) From 53c6c9818454cb850751a58b8a6eec3907e075c8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 7 Sep 2021 12:08:49 +0200 Subject: [PATCH 303/308] #1938 - changed warning to info method Modified logging a bit --- openpype/lib/profiles_filtering.py | 7 ++++--- .../ftrack/plugins/publish/collect_ftrack_family.py | 3 --- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/openpype/lib/profiles_filtering.py b/openpype/lib/profiles_filtering.py index c4410204dd..992d757059 100644 --- a/openpype/lib/profiles_filtering.py +++ b/openpype/lib/profiles_filtering.py @@ -165,7 +165,8 @@ def filter_profiles(profiles_data, key_values, keys_order=None, logger=None): if match == -1: profile_value = profile.get(key) or [] logger.debug( - "\"{}\" not found in {}".format(key, profile_value) + "\"{}\" not found in \"{}\": {}".format(value, key, + profile_value) ) profile_points = -1 break @@ -192,13 +193,13 @@ def filter_profiles(profiles_data, key_values, keys_order=None, logger=None): ]) if not matching_profiles: - logger.warning( + logger.info( "None of profiles match your setup. {}".format(log_parts) ) return None if len(matching_profiles) > 1: - logger.warning( + logger.info( "More than one profile match your setup. {}".format(log_parts) ) diff --git a/openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_family.py b/openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_family.py index cc2a5b7d37..70030acad9 100644 --- a/openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_family.py +++ b/openpype/modules/default_modules/ftrack/plugins/publish/collect_ftrack_family.py @@ -68,9 +68,6 @@ class CollectFtrackFamily(pyblish.api.InstancePlugin): instance.data["families"].append("ftrack") else: instance.data["families"] = ["ftrack"] - else: - self.log.debug("Instance '{}' doesn't match any profile".format( - instance.data.get("family"))) def _get_add_ftrack_f_from_addit_filters(self, additional_filters, From 3628ab8904b397fb71484e294c0d706bb22c8eda Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 7 Sep 2021 13:51:53 +0200 Subject: [PATCH 304/308] fix changing of slider value from input field --- openpype/tools/settings/settings/item_widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/settings/settings/item_widgets.py b/openpype/tools/settings/settings/item_widgets.py index a7b1208269..3b1fc061ec 100644 --- a/openpype/tools/settings/settings/item_widgets.py +++ b/openpype/tools/settings/settings/item_widgets.py @@ -445,7 +445,7 @@ class NumberWidget(InputWidget): value = self.input_field.value() if self._slider_widget is not None and not self._ignore_input_change: self._ignore_slider_change = True - self._slider_widget.setValue(value) + self._slider_widget.setValue(value * self._slider_multiplier) self._ignore_slider_change = False self.entity.set(value) From 46697d8d816c3a603a4ec8f0ed8cd8f1c2ef17e8 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 8 Sep 2021 18:53:37 +0200 Subject: [PATCH 305/308] fix docker build, switch to cx_freeze 6.7 --- poetry.lock | 684 +++++++++++++++++++++++++----------------- pyproject.toml | 2 +- tools/docker_build.sh | 7 +- 3 files changed, 416 insertions(+), 277 deletions(-) diff --git a/poetry.lock b/poetry.lock index e011b781c9..6dae442c9d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -80,7 +80,7 @@ python-dateutil = ">=2.7.0" [[package]] name = "astroid" -version = "2.5.6" +version = "2.7.3" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false @@ -89,6 +89,7 @@ python-versions = "~=3.6" [package.dependencies] lazy-object-proxy = ">=1.4.0" typed-ast = {version = ">=1.4.0,<1.5", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} wrapt = ">=1.11,<1.13" [[package]] @@ -146,11 +147,11 @@ pytz = ">=2015.7" [[package]] name = "blessed" -version = "1.18.0" +version = "1.18.1" description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." category = "main" optional = false -python-versions = "*" +python-versions = ">=2.7" [package.dependencies] jinxed = {version = ">=0.5.4", markers = "platform_system == \"Windows\""} @@ -175,7 +176,7 @@ python-versions = "*" [[package]] name = "cffi" -version = "1.14.5" +version = "1.14.6" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false @@ -192,6 +193,17 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "charset-normalizer" +version = "2.0.4" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + [[package]] name = "click" version = "7.1.2" @@ -253,7 +265,7 @@ toml = ["toml"] [[package]] name = "cryptography" -version = "3.4.7" +version = "3.4.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -272,15 +284,20 @@ test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pret [[package]] name = "cx-freeze" -version = "6.6" +version = "6.7" description = "Create standalone executables from Python scripts" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] -cx-Logging = {version = ">=3.0", markers = "sys_platform == \"win32\""} -importlib-metadata = ">=3.1.1" +cx-logging = {version = ">=3.0", markers = "sys_platform == \"win32\""} +importlib-metadata = ">=4.3.1" + +[package.source] +type = "legacy" +url = "https://distribute.openpype.io/wheels" +reference = "openpype" [[package]] name = "cx-logging" @@ -386,19 +403,19 @@ smmap = ">=3.0.1,<5" [[package]] name = "gitpython" -version = "3.1.17" +version = "3.1.20" description = "Python Git Library" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] gitdb = ">=4.0.1,<5" -typing-extensions = {version = ">=3.7.4.0", markers = "python_version < \"3.8\""} +typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""} [[package]] name = "google-api-core" -version = "1.30.0" +version = "1.31.2" description = "Google API client core library" category = "main" optional = false @@ -436,7 +453,7 @@ uritemplate = ">=3.0.0,<4dev" [[package]] name = "google-auth" -version = "1.31.0" +version = "1.35.0" description = "Google Authentication Library" category = "main" optional = false @@ -493,11 +510,11 @@ pyparsing = ">=2.4.2,<3" [[package]] name = "idna" -version = "2.10" +version = "3.2" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.5" [[package]] name = "imagesize" @@ -509,7 +526,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "4.5.0" +version = "4.8.1" description = "Read metadata from Python packages" category = "main" optional = false @@ -521,7 +538,8 @@ zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +perf = ["ipython"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] name = "iniconfig" @@ -533,16 +551,17 @@ python-versions = "*" [[package]] name = "isort" -version = "5.8.0" +version = "5.9.3" description = "A Python utility / library to sort Python imports." category = "dev" optional = false -python-versions = ">=3.6,<4.0" +python-versions = ">=3.6.1,<4.0" [package.extras] pipfile_deprecated_finder = ["pipreqs", "requirementslib"] requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] +plugins = ["setuptools"] [[package]] name = "jedi" @@ -560,14 +579,15 @@ testing = ["colorama", "docopt", "pytest (>=3.1.0)"] [[package]] name = "jeepney" -version = "0.6.0" +version = "0.7.1" description = "Low-level, pure Python DBus protocol wrapper." category = "main" optional = false python-versions = ">=3.6" [package.extras] -test = ["pytest", "pytest-trio", "pytest-asyncio", "testpath", "trio"] +test = ["pytest", "pytest-trio", "pytest-asyncio", "testpath", "trio", "async-timeout"] +trio = ["trio", "async-generator"] [[package]] name = "jinja2" @@ -695,11 +715,11 @@ reference = "openpype" [[package]] name = "packaging" -version = "20.9" +version = "21.0" description = "Core utilities for Python packages" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.dependencies] pyparsing = ">=2.0.2" @@ -718,7 +738,7 @@ testing = ["docopt", "pytest (<6.0.0)"] [[package]] name = "pathlib2" -version = "2.3.5" +version = "2.3.6" description = "Object-oriented filesystem paths" category = "main" optional = false @@ -729,25 +749,38 @@ six = "*" [[package]] name = "pillow" -version = "8.2.0" +version = "8.3.2" description = "Python Imaging Library (Fork)" category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "platformdirs" +version = "2.3.0" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] + [[package]] name = "pluggy" -version = "0.13.1" +version = "1.0.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.dependencies] importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] [[package]] name = "prefixed" @@ -849,7 +882,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.9.0" +version = "2.10.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false @@ -857,22 +890,23 @@ python-versions = ">=3.5" [[package]] name = "pylint" -version = "2.8.3" +version = "2.10.2" description = "python code static checker" category = "dev" optional = false python-versions = "~=3.6" [package.dependencies] -astroid = "2.5.6" +astroid = ">=2.7.2,<2.8" colorama = {version = "*", markers = "sys_platform == \"win32\""} isort = ">=4.2.5,<6" mccabe = ">=0.6,<0.7" +platformdirs = ">=2.2.0" toml = ">=0.7.1" [[package]] name = "pymongo" -version = "3.11.4" +version = "3.12.0" description = "Python driver for MongoDB " category = "main" optional = false @@ -880,9 +914,9 @@ python-versions = "*" [package.extras] aws = ["pymongo-auth-aws (<2.0.0)"] -encryption = ["pymongocrypt (<2.0.0)"] +encryption = ["pymongocrypt (>=1.1.0,<2.0.0)"] gssapi = ["pykerberos"] -ocsp = ["pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] +ocsp = ["pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)", "certifi"] snappy = ["python-snappy"] srv = ["dnspython (>=1.16.0,<1.17.0)"] tls = ["ipaddress"] @@ -971,15 +1005,15 @@ python-versions = ">=3.5" [[package]] name = "pyrsistent" -version = "0.17.3" +version = "0.18.0" description = "Persistent/Functional/Immutable data structures" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [[package]] name = "pytest" -version = "6.2.4" +version = "6.2.5" description = "pytest: simple powerful testing with Python" category = "dev" optional = false @@ -992,7 +1026,7 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<1.0.0a1" +pluggy = ">=0.12,<2.0" py = ">=1.8.2" toml = "*" @@ -1017,21 +1051,21 @@ testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtuale [[package]] name = "pytest-print" -version = "0.2.1" +version = "0.3.0" description = "pytest-print adds the printer fixture you can use to print messages to the user (directly to the pytest runner, not stdout)" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.6" [package.dependencies] -pytest = ">=3.0.0" +pytest = ">=6" [package.extras] -test = ["coverage (>=5)", "pytest (>=4)"] +test = ["coverage (>=5)"] [[package]] name = "python-dateutil" -version = "2.8.1" +version = "2.8.2" description = "Extensions to the standard Python datetime module" category = "main" optional = false @@ -1042,7 +1076,7 @@ six = ">=1.5" [[package]] name = "python-xlib" -version = "0.30" +version = "0.31" description = "Python X Library" category = "main" optional = false @@ -1085,7 +1119,7 @@ python-versions = "*" [[package]] name = "qt.py" -version = "1.3.3" +version = "1.3.6" description = "Python 2 & 3 compatibility wrapper around all Qt bindings - PySide, PySide2, PyQt4 and PyQt5." category = "main" optional = false @@ -1106,21 +1140,21 @@ sphinx = ">=1.3.1" [[package]] name = "requests" -version = "2.25.1" +version = "2.26.0" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] certifi = ">=2017.4.17" -chardet = ">=3.0.2,<5" -idna = ">=2.5,<3" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} urllib3 = ">=1.21.1,<1.27" [package.extras] -security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] [[package]] name = "rsa" @@ -1163,15 +1197,15 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "slack-sdk" -version = "3.6.0" +version = "3.10.1" description = "The Slack API Platform SDK for Python" category = "main" optional = false python-versions = ">=3.6.0" [package.extras] -optional = ["aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "SQLAlchemy (>=1,<2)", "websockets (>=9.1,<10)", "websocket-client (>=0.57,<1)"] -testing = ["pytest (>=5.4,<6)", "pytest-asyncio (<1)", "Flask-Sockets (>=0.2,<1)", "pytest-cov (>=2,<3)", "codecov (>=2,<3)", "flake8 (>=3,<4)", "black (==21.5b1)", "psutil (>=5,<6)", "databases (>=0.3)"] +optional = ["aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "SQLAlchemy (>=1,<2)", "websockets (>=9.1,<10)", "websocket-client (>=1,<2)"] +testing = ["pytest (>=5.4,<6)", "pytest-asyncio (<1)", "Flask-Sockets (>=0.2,<1)", "Flask (>=1,<2)", "Werkzeug (<2)", "pytest-cov (>=2,<3)", "codecov (>=2,<3)", "flake8 (>=3,<4)", "black (==21.7b0)", "psutil (>=5,<6)", "databases (>=0.3)", "boto3 (<=2)", "moto (<2)"] [[package]] name = "smmap" @@ -1199,7 +1233,7 @@ python-versions = "*" [[package]] name = "sphinx" -version = "4.0.2" +version = "4.1.2" description = "Python documentation generator" category = "dev" optional = false @@ -1218,14 +1252,14 @@ requests = ">=2.5.0" snowballstemmer = ">=1.1" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.800)", "docutils-stubs"] +lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.900)", "docutils-stubs", "types-typed-ast", "types-pkg-resources", "types-requests"] test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] [[package]] @@ -1367,7 +1401,7 @@ python-versions = "*" [[package]] name = "typing-extensions" -version = "3.10.0.0" +version = "3.10.0.2" description = "Backported and Experimental Type Hints for Python 3.5+" category = "main" optional = false @@ -1383,7 +1417,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "urllib3" -version = "1.26.5" +version = "1.26.6" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -1453,7 +1487,7 @@ typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} [[package]] name = "zipp" -version = "3.4.1" +version = "3.5.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false @@ -1461,12 +1495,12 @@ python-versions = ">=3.6" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [metadata] lock-version = "1.1" python-versions = "3.7.*" -content-hash = "8875d530ae66f9763b5b0cb84d9d35edc184ef5c141b63d38bf1ff5a1226e556" +content-hash = "ca2a0258a784674ff489a07d0dc8dd2a22373ee39add02cb4676898b8a6993a1" [metadata.files] acre = [] @@ -1530,8 +1564,8 @@ arrow = [ {file = "arrow-0.17.0.tar.gz", hash = "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4"}, ] astroid = [ - {file = "astroid-2.5.6-py3-none-any.whl", hash = "sha256:4db03ab5fc3340cf619dbc25e42c2cc3755154ce6009469766d7143d1fc2ee4e"}, - {file = "astroid-2.5.6.tar.gz", hash = "sha256:8a398dfce302c13f14bab13e2b14fe385d32b73f4e4853b9bdfb64598baa1975"}, + {file = "astroid-2.7.3-py3-none-any.whl", hash = "sha256:dc1e8b28427d6bbef6b8842b18765ab58f558c42bb80540bd7648c98412af25e"}, + {file = "astroid-2.7.3.tar.gz", hash = "sha256:3b680ce0419b8a771aba6190139a3998d14b413852506d99aff8dc2bf65ee67c"}, ] async-timeout = [ {file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"}, @@ -1554,8 +1588,8 @@ babel = [ {file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"}, ] blessed = [ - {file = "blessed-1.18.0-py2.py3-none-any.whl", hash = "sha256:5b5e2f0563d5a668c282f3f5946f7b1abb70c85829461900e607e74d7725106e"}, - {file = "blessed-1.18.0.tar.gz", hash = "sha256:1312879f971330a1b7f2c6341f2ae7e2cbac244bfc9d0ecfbbecd4b0293bc755"}, + {file = "blessed-1.18.1-py2.py3-none-any.whl", hash = "sha256:dd7c0d33db9a2e7f597b446996484d0ed46e1586239db064fb5025008937dcae"}, + {file = "blessed-1.18.1.tar.gz", hash = "sha256:8b09936def6bc06583db99b65636b980075733e13550cb6af262ce724a55da23"}, ] cachetools = [ {file = "cachetools-4.2.2-py3-none-any.whl", hash = "sha256:2cc0b89715337ab6dbba85b5b50effe2b0c74e035d83ee8ed637cf52f12ae001"}, @@ -1566,48 +1600,60 @@ certifi = [ {file = "certifi-2021.5.30.tar.gz", hash = "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"}, ] cffi = [ - {file = "cffi-1.14.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991"}, - {file = "cffi-1.14.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1"}, - {file = "cffi-1.14.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa"}, - {file = "cffi-1.14.5-cp27-cp27m-win32.whl", hash = "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3"}, - {file = "cffi-1.14.5-cp27-cp27m-win_amd64.whl", hash = "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5"}, - {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482"}, - {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6"}, - {file = "cffi-1.14.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045"}, - {file = "cffi-1.14.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa"}, - {file = "cffi-1.14.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406"}, - {file = "cffi-1.14.5-cp35-cp35m-win32.whl", hash = "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369"}, - {file = "cffi-1.14.5-cp35-cp35m-win_amd64.whl", hash = "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315"}, - {file = "cffi-1.14.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132"}, - {file = "cffi-1.14.5-cp36-cp36m-win32.whl", hash = "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53"}, - {file = "cffi-1.14.5-cp36-cp36m-win_amd64.whl", hash = "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813"}, - {file = "cffi-1.14.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49"}, - {file = "cffi-1.14.5-cp37-cp37m-win32.whl", hash = "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62"}, - {file = "cffi-1.14.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4"}, - {file = "cffi-1.14.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827"}, - {file = "cffi-1.14.5-cp38-cp38-win32.whl", hash = "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e"}, - {file = "cffi-1.14.5-cp38-cp38-win_amd64.whl", hash = "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396"}, - {file = "cffi-1.14.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee"}, - {file = "cffi-1.14.5-cp39-cp39-win32.whl", hash = "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396"}, - {file = "cffi-1.14.5-cp39-cp39-win_amd64.whl", hash = "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d"}, - {file = "cffi-1.14.5.tar.gz", hash = "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c"}, + {file = "cffi-1.14.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:22b9c3c320171c108e903d61a3723b51e37aaa8c81255b5e7ce102775bd01e2c"}, + {file = "cffi-1.14.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:f0c5d1acbfca6ebdd6b1e3eded8d261affb6ddcf2186205518f1428b8569bb99"}, + {file = "cffi-1.14.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:99f27fefe34c37ba9875f224a8f36e31d744d8083e00f520f133cab79ad5e819"}, + {file = "cffi-1.14.6-cp27-cp27m-win32.whl", hash = "sha256:55af55e32ae468e9946f741a5d51f9896da6b9bf0bbdd326843fec05c730eb20"}, + {file = "cffi-1.14.6-cp27-cp27m-win_amd64.whl", hash = "sha256:7bcac9a2b4fdbed2c16fa5681356d7121ecabf041f18d97ed5b8e0dd38a80224"}, + {file = "cffi-1.14.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ed38b924ce794e505647f7c331b22a693bee1538fdf46b0222c4717b42f744e7"}, + {file = "cffi-1.14.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e22dcb48709fc51a7b58a927391b23ab37eb3737a98ac4338e2448bef8559b33"}, + {file = "cffi-1.14.6-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:aedb15f0a5a5949ecb129a82b72b19df97bbbca024081ed2ef88bd5c0a610534"}, + {file = "cffi-1.14.6-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:48916e459c54c4a70e52745639f1db524542140433599e13911b2f329834276a"}, + {file = "cffi-1.14.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f627688813d0a4140153ff532537fbe4afea5a3dffce1f9deb7f91f848a832b5"}, + {file = "cffi-1.14.6-cp35-cp35m-win32.whl", hash = "sha256:f0010c6f9d1a4011e429109fda55a225921e3206e7f62a0c22a35344bfd13cca"}, + {file = "cffi-1.14.6-cp35-cp35m-win_amd64.whl", hash = "sha256:57e555a9feb4a8460415f1aac331a2dc833b1115284f7ded7278b54afc5bd218"}, + {file = "cffi-1.14.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e8c6a99be100371dbb046880e7a282152aa5d6127ae01783e37662ef73850d8f"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:19ca0dbdeda3b2615421d54bef8985f72af6e0c47082a8d26122adac81a95872"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d950695ae4381ecd856bcaf2b1e866720e4ab9a1498cba61c602e56630ca7195"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9dc245e3ac69c92ee4c167fbdd7428ec1956d4e754223124991ef29eb57a09d"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8661b2ce9694ca01c529bfa204dbb144b275a31685a075ce123f12331be790b"}, + {file = "cffi-1.14.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b315d709717a99f4b27b59b021e6207c64620790ca3e0bde636a6c7f14618abb"}, + {file = "cffi-1.14.6-cp36-cp36m-win32.whl", hash = "sha256:80b06212075346b5546b0417b9f2bf467fea3bfe7352f781ffc05a8ab24ba14a"}, + {file = "cffi-1.14.6-cp36-cp36m-win_amd64.whl", hash = "sha256:a9da7010cec5a12193d1af9872a00888f396aba3dc79186604a09ea3ee7c029e"}, + {file = "cffi-1.14.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4373612d59c404baeb7cbd788a18b2b2a8331abcc84c3ba40051fcd18b17a4d5"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f10afb1004f102c7868ebfe91c28f4a712227fe4cb24974350ace1f90e1febbf"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fd4305f86f53dfd8cd3522269ed7fc34856a8ee3709a5e28b2836b2db9d4cd69"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d6169cb3c6c2ad50db5b868db6491a790300ade1ed5d1da29289d73bbe40b56"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d4b68e216fc65e9fe4f524c177b54964af043dde734807586cf5435af84045c"}, + {file = "cffi-1.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33791e8a2dc2953f28b8d8d300dde42dd929ac28f974c4b4c6272cb2955cb762"}, + {file = "cffi-1.14.6-cp37-cp37m-win32.whl", hash = "sha256:0c0591bee64e438883b0c92a7bed78f6290d40bf02e54c5bf0978eaf36061771"}, + {file = "cffi-1.14.6-cp37-cp37m-win_amd64.whl", hash = "sha256:8eb687582ed7cd8c4bdbff3df6c0da443eb89c3c72e6e5dcdd9c81729712791a"}, + {file = "cffi-1.14.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba6f2b3f452e150945d58f4badd92310449876c4c954836cfb1803bdd7b422f0"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:64fda793737bc4037521d4899be780534b9aea552eb673b9833b01f945904c2e"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9f3e33c28cd39d1b655ed1ba7247133b6f7fc16fa16887b120c0c670e35ce346"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26bb2549b72708c833f5abe62b756176022a7b9a7f689b571e74c8478ead51dc"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb687a11f0a7a1839719edd80f41e459cc5366857ecbed383ff376c4e3cc6afd"}, + {file = "cffi-1.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2ad4d668a5c0645d281dcd17aff2be3212bc109b33814bbb15c4939f44181cc"}, + {file = "cffi-1.14.6-cp38-cp38-win32.whl", hash = "sha256:487d63e1454627c8e47dd230025780e91869cfba4c753a74fda196a1f6ad6548"}, + {file = "cffi-1.14.6-cp38-cp38-win_amd64.whl", hash = "sha256:c33d18eb6e6bc36f09d793c0dc58b0211fccc6ae5149b808da4a62660678b156"}, + {file = "cffi-1.14.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:06c54a68935738d206570b20da5ef2b6b6d92b38ef3ec45c5422c0ebaf338d4d"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:f174135f5609428cc6e1b9090f9268f5c8935fddb1b25ccb8255a2d50de6789e"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f3ebe6e73c319340830a9b2825d32eb6d8475c1dac020b4f0aa774ee3b898d1c"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c8d896becff2fa653dc4438b54a5a25a971d1f4110b32bd3068db3722c80202"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4922cd707b25e623b902c86188aca466d3620892db76c0bdd7b99a3d5e61d35f"}, + {file = "cffi-1.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9e005e9bd57bc987764c32a1bee4364c44fdc11a3cc20a40b93b444984f2b87"}, + {file = "cffi-1.14.6-cp39-cp39-win32.whl", hash = "sha256:eb9e2a346c5238a30a746893f23a9535e700f8192a68c07c0258e7ece6ff3728"}, + {file = "cffi-1.14.6-cp39-cp39-win_amd64.whl", hash = "sha256:818014c754cd3dba7229c0f5884396264d51ffb87ec86e927ef0be140bfdb0d2"}, + {file = "cffi-1.14.6.tar.gz", hash = "sha256:c9a875ce9d7fe32887784274dd533c57909b7b1dcadcc128a2ac21331a9765dd"}, ] chardet = [ {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, ] +charset-normalizer = [ + {file = "charset-normalizer-2.0.4.tar.gz", hash = "sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"}, + {file = "charset_normalizer-2.0.4-py3-none-any.whl", hash = "sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b"}, +] click = [ {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, @@ -1683,30 +1729,25 @@ coverage = [ {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, ] cryptography = [ - {file = "cryptography-3.4.7-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1"}, - {file = "cryptography-3.4.7-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_x86_64.whl", hash = "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959"}, - {file = "cryptography-3.4.7-cp36-abi3-win32.whl", hash = "sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d"}, - {file = "cryptography-3.4.7-cp36-abi3-win_amd64.whl", hash = "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca"}, - {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873"}, - {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2014_x86_64.whl", hash = "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d"}, - {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177"}, - {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2014_x86_64.whl", hash = "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9"}, - {file = "cryptography-3.4.7.tar.gz", hash = "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713"}, -] -cx-freeze = [ - {file = "cx_Freeze-6.6-cp36-cp36m-win32.whl", hash = "sha256:b3d3a6bcd1a07c50b4e1c907f14842642156110e63a99cd5c73b8a24751e9b97"}, - {file = "cx_Freeze-6.6-cp36-cp36m-win_amd64.whl", hash = "sha256:1935266ec644ea4f7e584985f44cefc0622a449a09980d990833a1a2afcadac8"}, - {file = "cx_Freeze-6.6-cp37-cp37m-win32.whl", hash = "sha256:1eac2b0f254319cc641ce25bd83337effd7936092562fde701f3ffb40e0274ec"}, - {file = "cx_Freeze-6.6-cp37-cp37m-win_amd64.whl", hash = "sha256:2bc46ef6d510811b6002f34a3ae4cbfdea44e18644febd2a404d3ee8e48a9fc4"}, - {file = "cx_Freeze-6.6-cp38-cp38-win32.whl", hash = "sha256:46eb50ebc46f7ae236d16c6a52671ab0f7bb479bea668da19f4b6de3cc413e9e"}, - {file = "cx_Freeze-6.6-cp38-cp38-win_amd64.whl", hash = "sha256:8c3b00476ce385bb58595bffce55aed031e5a6e16ab6e14d8bee9d1d569e46c3"}, - {file = "cx_Freeze-6.6-cp39-cp39-win32.whl", hash = "sha256:6e9340cbcf52d4836980ecc83ddba4f7704ff6654dd41168c146b74f512977ce"}, - {file = "cx_Freeze-6.6-cp39-cp39-win_amd64.whl", hash = "sha256:2fcf1c8b77ae5c06f45be3a9aff79e1dd808c0d624e97561f840dec5ea9b214a"}, - {file = "cx_Freeze-6.6.tar.gz", hash = "sha256:c4af8ad3f7e7d71e291c1dec5d0fb26bbe92df834b098ed35434c901fbd6762f"}, + {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, + {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, + {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, + {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, + {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, ] +cx-freeze = [] cx-logging = [ {file = "cx_Logging-3.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:9fcd297e5c51470521c47eff0f86ba844aeca6be97e13c3e2114ebdf03fa3c96"}, {file = "cx_Logging-3.0-cp36-cp36m-win32.whl", hash = "sha256:0df4be47c5022cc54316949e283403214568ef599817ced0c0972183d6d4fabb"}, @@ -1753,20 +1794,20 @@ gitdb = [ {file = "gitdb-4.0.7.tar.gz", hash = "sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005"}, ] gitpython = [ - {file = "GitPython-3.1.17-py3-none-any.whl", hash = "sha256:29fe82050709760081f588dd50ce83504feddbebdc4da6956d02351552b1c135"}, - {file = "GitPython-3.1.17.tar.gz", hash = "sha256:ee24bdc93dce357630764db659edaf6b8d664d4ff5447ccfeedd2dc5c253f41e"}, + {file = "GitPython-3.1.20-py3-none-any.whl", hash = "sha256:b1e1c269deab1b08ce65403cf14e10d2ef1f6c89e33ea7c5e5bb0222ea593b8a"}, + {file = "GitPython-3.1.20.tar.gz", hash = "sha256:df0e072a200703a65387b0cfdf0466e3bab729c0458cf6b7349d0e9877636519"}, ] google-api-core = [ - {file = "google-api-core-1.30.0.tar.gz", hash = "sha256:0724d354d394b3d763bc10dfee05807813c5210f0bd9b8e2ddf6b6925603411c"}, - {file = "google_api_core-1.30.0-py2.py3-none-any.whl", hash = "sha256:92cd9e9f366e84bfcf2524e34d2dc244906c645e731962617ba620da1620a1e0"}, + {file = "google-api-core-1.31.2.tar.gz", hash = "sha256:8500aded318fdb235130bf183c726a05a9cb7c4b09c266bd5119b86cdb8a4d10"}, + {file = "google_api_core-1.31.2-py2.py3-none-any.whl", hash = "sha256:384459a0dc98c1c8cd90b28dc5800b8705e0275a673a7144a513ae80fc77950b"}, ] google-api-python-client = [ {file = "google-api-python-client-1.12.8.tar.gz", hash = "sha256:f3b9684442eec2cfe9f9bb48e796ef919456b82142c7528c5fd527e5224f08bb"}, {file = "google_api_python_client-1.12.8-py2.py3-none-any.whl", hash = "sha256:3c4c4ca46b5c21196bec7ee93453443e477d82cbfa79234d1ce0645f81170eaf"}, ] google-auth = [ - {file = "google-auth-1.31.0.tar.gz", hash = "sha256:154f7889c5d679a6f626f36adb12afbd4dbb0a9a04ec575d989d6ba79c4fd65e"}, - {file = "google_auth-1.31.0-py2.py3-none-any.whl", hash = "sha256:6d47c79b5d09fbc7e8355fd9594cc4cf65fdde5d401c63951eaac4baa1ba2ae1"}, + {file = "google-auth-1.35.0.tar.gz", hash = "sha256:b7033be9028c188ee30200b204ea00ed82ea1162e8ac1df4aa6ded19a191d88e"}, + {file = "google_auth-1.35.0-py2.py3-none-any.whl", hash = "sha256:997516b42ecb5b63e8d80f5632c1a61dddf41d2a4c2748057837e06e00014258"}, ] google-auth-httplib2 = [ {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, @@ -1781,32 +1822,32 @@ httplib2 = [ {file = "httplib2-0.19.1.tar.gz", hash = "sha256:0b12617eeca7433d4c396a100eaecfa4b08ee99aa881e6df6e257a7aad5d533d"}, ] idna = [ - {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, - {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, + {file = "idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a"}, + {file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"}, ] imagesize = [ {file = "imagesize-1.2.0-py2.py3-none-any.whl", hash = "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1"}, {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.5.0-py3-none-any.whl", hash = "sha256:833b26fb89d5de469b24a390e9df088d4e52e4ba33b01dc5e0e4f41b81a16c00"}, - {file = "importlib_metadata-4.5.0.tar.gz", hash = "sha256:b142cc1dd1342f31ff04bb7d022492b09920cb64fed867cd3ea6f80fe3ebd139"}, + {file = "importlib_metadata-4.8.1-py3-none-any.whl", hash = "sha256:b618b6d2d5ffa2f16add5697cf57a46c76a56229b0ed1c438322e4e95645bd15"}, + {file = "importlib_metadata-4.8.1.tar.gz", hash = "sha256:f284b3e11256ad1e5d03ab86bb2ccd6f5339688ff17a4d797a0fe7df326f23b1"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] isort = [ - {file = "isort-5.8.0-py3-none-any.whl", hash = "sha256:2bb1680aad211e3c9944dbce1d4ba09a989f04e238296c87fe2139faa26d655d"}, - {file = "isort-5.8.0.tar.gz", hash = "sha256:0a943902919f65c5684ac4e0154b1ad4fac6dcaa5d9f3426b732f1c8b5419be6"}, + {file = "isort-5.9.3-py3-none-any.whl", hash = "sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"}, + {file = "isort-5.9.3.tar.gz", hash = "sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899"}, ] jedi = [ {file = "jedi-0.13.3-py2.py3-none-any.whl", hash = "sha256:2c6bcd9545c7d6440951b12b44d373479bf18123a401a52025cf98563fbd826c"}, {file = "jedi-0.13.3.tar.gz", hash = "sha256:2bb0603e3506f708e792c7f4ad8fc2a7a9d9c2d292a358fbbd58da531695595b"}, ] jeepney = [ - {file = "jeepney-0.6.0-py3-none-any.whl", hash = "sha256:aec56c0eb1691a841795111e184e13cad504f7703b9a64f63020816afa79a8ae"}, - {file = "jeepney-0.6.0.tar.gz", hash = "sha256:7d59b6622675ca9e993a6bd38de845051d315f8b0c72cca3aef733a20b648657"}, + {file = "jeepney-0.7.1-py3-none-any.whl", hash = "sha256:1b5a0ea5c0e7b166b2f5895b91a08c14de8915afda4407fb5022a195224958ac"}, + {file = "jeepney-0.7.1.tar.gz", hash = "sha256:fa9e232dfa0c498bd0b8a3a73b8d8a31978304dcef0515adc859d4e096f96f4f"}, ] jinja2 = [ {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, @@ -1852,12 +1893,22 @@ log4mongo = [ {file = "log4mongo-1.7.0.tar.gz", hash = "sha256:dc374617206162a0b14167fbb5feac01dbef587539a235dadba6200362984a68"}, ] markupsafe = [ + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -1866,14 +1917,21 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -1883,6 +1941,9 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -1932,56 +1993,79 @@ multidict = [ ] opentimelineio = [] packaging = [ - {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, - {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, + {file = "packaging-21.0-py3-none-any.whl", hash = "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"}, + {file = "packaging-21.0.tar.gz", hash = "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7"}, ] parso = [ {file = "parso-0.8.2-py2.py3-none-any.whl", hash = "sha256:a8c4922db71e4fdb90e0d0bc6e50f9b273d3397925e5e60a717e719201778d22"}, {file = "parso-0.8.2.tar.gz", hash = "sha256:12b83492c6239ce32ff5eed6d3639d6a536170723c6f3f1506869f1ace413398"}, ] pathlib2 = [ - {file = "pathlib2-2.3.5-py2.py3-none-any.whl", hash = "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db"}, - {file = "pathlib2-2.3.5.tar.gz", hash = "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868"}, + {file = "pathlib2-2.3.6-py2.py3-none-any.whl", hash = "sha256:3a130b266b3a36134dcc79c17b3c7ac9634f083825ca6ea9d8f557ee6195c9c8"}, + {file = "pathlib2-2.3.6.tar.gz", hash = "sha256:7d8bcb5555003cdf4a8d2872c538faa3a0f5d20630cb360e518ca3b981795e5f"}, ] pillow = [ - {file = "Pillow-8.2.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:dc38f57d8f20f06dd7c3161c59ca2c86893632623f33a42d592f097b00f720a9"}, - {file = "Pillow-8.2.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a013cbe25d20c2e0c4e85a9daf438f85121a4d0344ddc76e33fd7e3965d9af4b"}, - {file = "Pillow-8.2.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8bb1e155a74e1bfbacd84555ea62fa21c58e0b4e7e6b20e4447b8d07990ac78b"}, - {file = "Pillow-8.2.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c5236606e8570542ed424849f7852a0ff0bce2c4c8d0ba05cc202a5a9c97dee9"}, - {file = "Pillow-8.2.0-cp36-cp36m-win32.whl", hash = "sha256:12e5e7471f9b637762453da74e390e56cc43e486a88289995c1f4c1dc0bfe727"}, - {file = "Pillow-8.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5afe6b237a0b81bd54b53f835a153770802f164c5570bab5e005aad693dab87f"}, - {file = "Pillow-8.2.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:cb7a09e173903541fa888ba010c345893cd9fc1b5891aaf060f6ca77b6a3722d"}, - {file = "Pillow-8.2.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0d19d70ee7c2ba97631bae1e7d4725cdb2ecf238178096e8c82ee481e189168a"}, - {file = "Pillow-8.2.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:083781abd261bdabf090ad07bb69f8f5599943ddb539d64497ed021b2a67e5a9"}, - {file = "Pillow-8.2.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:c6b39294464b03457f9064e98c124e09008b35a62e3189d3513e5148611c9388"}, - {file = "Pillow-8.2.0-cp37-cp37m-win32.whl", hash = "sha256:01425106e4e8cee195a411f729cff2a7d61813b0b11737c12bd5991f5f14bcd5"}, - {file = "Pillow-8.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3b570f84a6161cf8865c4e08adf629441f56e32f180f7aa4ccbd2e0a5a02cba2"}, - {file = "Pillow-8.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:031a6c88c77d08aab84fecc05c3cde8414cd6f8406f4d2b16fed1e97634cc8a4"}, - {file = "Pillow-8.2.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:66cc56579fd91f517290ab02c51e3a80f581aba45fd924fcdee01fa06e635812"}, - {file = "Pillow-8.2.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6c32cc3145928c4305d142ebec682419a6c0a8ce9e33db900027ddca1ec39178"}, - {file = "Pillow-8.2.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:624b977355cde8b065f6d51b98497d6cd5fbdd4f36405f7a8790e3376125e2bb"}, - {file = "Pillow-8.2.0-cp38-cp38-win32.whl", hash = "sha256:5cbf3e3b1014dddc45496e8cf38b9f099c95a326275885199f427825c6522232"}, - {file = "Pillow-8.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:463822e2f0d81459e113372a168f2ff59723e78528f91f0bd25680ac185cf797"}, - {file = "Pillow-8.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:95d5ef984eff897850f3a83883363da64aae1000e79cb3c321915468e8c6add5"}, - {file = "Pillow-8.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b91c36492a4bbb1ee855b7d16fe51379e5f96b85692dc8210831fbb24c43e484"}, - {file = "Pillow-8.2.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d68cb92c408261f806b15923834203f024110a2e2872ecb0bd2a110f89d3c602"}, - {file = "Pillow-8.2.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f217c3954ce5fd88303fc0c317af55d5e0204106d86dea17eb8205700d47dec2"}, - {file = "Pillow-8.2.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5b70110acb39f3aff6b74cf09bb4169b167e2660dabc304c1e25b6555fa781ef"}, - {file = "Pillow-8.2.0-cp39-cp39-win32.whl", hash = "sha256:a7d5e9fad90eff8f6f6106d3b98b553a88b6f976e51fce287192a5d2d5363713"}, - {file = "Pillow-8.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:238c197fc275b475e87c1453b05b467d2d02c2915fdfdd4af126145ff2e4610c"}, - {file = "Pillow-8.2.0-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:0e04d61f0064b545b989126197930807c86bcbd4534d39168f4aa5fda39bb8f9"}, - {file = "Pillow-8.2.0-pp36-pypy36_pp73-manylinux2010_i686.whl", hash = "sha256:63728564c1410d99e6d1ae8e3b810fe012bc440952168af0a2877e8ff5ab96b9"}, - {file = "Pillow-8.2.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:c03c07ed32c5324939b19e36ae5f75c660c81461e312a41aea30acdd46f93a7c"}, - {file = "Pillow-8.2.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:4d98abdd6b1e3bf1a1cbb14c3895226816e666749ac040c4e2554231068c639b"}, - {file = "Pillow-8.2.0-pp37-pypy37_pp73-manylinux2010_i686.whl", hash = "sha256:aac00e4bc94d1b7813fe882c28990c1bc2f9d0e1aa765a5f2b516e8a6a16a9e4"}, - {file = "Pillow-8.2.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:22fd0f42ad15dfdde6c581347eaa4adb9a6fc4b865f90b23378aa7914895e120"}, - {file = "Pillow-8.2.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:e98eca29a05913e82177b3ba3d198b1728e164869c613d76d0de4bde6768a50e"}, - {file = "Pillow-8.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8b56553c0345ad6dcb2e9b433ae47d67f95fc23fe28a0bde15a120f25257e291"}, - {file = "Pillow-8.2.0.tar.gz", hash = "sha256:a787ab10d7bb5494e5f76536ac460741788f1fbce851068d73a87ca7c35fc3e1"}, + {file = "Pillow-8.3.2-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:c691b26283c3a31594683217d746f1dad59a7ae1d4cfc24626d7a064a11197d4"}, + {file = "Pillow-8.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f514c2717012859ccb349c97862568fdc0479aad85b0270d6b5a6509dbc142e2"}, + {file = "Pillow-8.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be25cb93442c6d2f8702c599b51184bd3ccd83adebd08886b682173e09ef0c3f"}, + {file = "Pillow-8.3.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d675a876b295afa114ca8bf42d7f86b5fb1298e1b6bb9a24405a3f6c8338811c"}, + {file = "Pillow-8.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59697568a0455764a094585b2551fd76bfd6b959c9f92d4bdec9d0e14616303a"}, + {file = "Pillow-8.3.2-cp310-cp310-win32.whl", hash = "sha256:2d5e9dc0bf1b5d9048a94c48d0813b6c96fccfa4ccf276d9c36308840f40c228"}, + {file = "Pillow-8.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:11c27e74bab423eb3c9232d97553111cc0be81b74b47165f07ebfdd29d825875"}, + {file = "Pillow-8.3.2-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:11eb7f98165d56042545c9e6db3ce394ed8b45089a67124298f0473b29cb60b2"}, + {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f23b2d3079522fdf3c09de6517f625f7a964f916c956527bed805ac043799b8"}, + {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19ec4cfe4b961edc249b0e04b5618666c23a83bc35842dea2bfd5dfa0157f81b"}, + {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5a31c07cea5edbaeb4bdba6f2b87db7d3dc0f446f379d907e51cc70ea375629"}, + {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15ccb81a6ffc57ea0137f9f3ac2737ffa1d11f786244d719639df17476d399a7"}, + {file = "Pillow-8.3.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8f284dc1695caf71a74f24993b7c7473d77bc760be45f776a2c2f4e04c170550"}, + {file = "Pillow-8.3.2-cp36-cp36m-win32.whl", hash = "sha256:4abc247b31a98f29e5224f2d31ef15f86a71f79c7f4d2ac345a5d551d6393073"}, + {file = "Pillow-8.3.2-cp36-cp36m-win_amd64.whl", hash = "sha256:a048dad5ed6ad1fad338c02c609b862dfaa921fcd065d747194a6805f91f2196"}, + {file = "Pillow-8.3.2-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:06d1adaa284696785375fa80a6a8eb309be722cf4ef8949518beb34487a3df71"}, + {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd24054aaf21e70a51e2a2a5ed1183560d3a69e6f9594a4bfe360a46f94eba83"}, + {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a330bf7014ee034046db43ccbb05c766aa9e70b8d6c5260bfc38d73103b0ba"}, + {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13654b521fb98abdecec105ea3fb5ba863d1548c9b58831dd5105bb3873569f1"}, + {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a1bd983c565f92779be456ece2479840ec39d386007cd4ae83382646293d681b"}, + {file = "Pillow-8.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4326ea1e2722f3dc00ed77c36d3b5354b8fb7399fb59230249ea6d59cbed90da"}, + {file = "Pillow-8.3.2-cp37-cp37m-win32.whl", hash = "sha256:085a90a99404b859a4b6c3daa42afde17cb3ad3115e44a75f0d7b4a32f06a6c9"}, + {file = "Pillow-8.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:18a07a683805d32826c09acfce44a90bf474e6a66ce482b1c7fcd3757d588df3"}, + {file = "Pillow-8.3.2-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4e59e99fd680e2b8b11bbd463f3c9450ab799305d5f2bafb74fefba6ac058616"}, + {file = "Pillow-8.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4d89a2e9219a526401015153c0e9dd48319ea6ab9fe3b066a20aa9aee23d9fd3"}, + {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fd98c8294f57636084f4b076b75f86c57b2a63a8410c0cd172bc93695ee979"}, + {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b11c9d310a3522b0fd3c35667914271f570576a0e387701f370eb39d45f08a4"}, + {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0412516dcc9de9b0a1e0ae25a280015809de8270f134cc2c1e32c4eeb397cf30"}, + {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bcb04ff12e79b28be6c9988f275e7ab69f01cc2ba319fb3114f87817bb7c74b6"}, + {file = "Pillow-8.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0b9911ec70731711c3b6ebcde26caea620cbdd9dcb73c67b0730c8817f24711b"}, + {file = "Pillow-8.3.2-cp38-cp38-win32.whl", hash = "sha256:ce2e5e04bb86da6187f96d7bab3f93a7877830981b37f0287dd6479e27a10341"}, + {file = "Pillow-8.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:35d27687f027ad25a8d0ef45dd5208ef044c588003cdcedf05afb00dbc5c2deb"}, + {file = "Pillow-8.3.2-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:04835e68ef12904bc3e1fd002b33eea0779320d4346082bd5b24bec12ad9c3e9"}, + {file = "Pillow-8.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:10e00f7336780ca7d3653cf3ac26f068fa11b5a96894ea29a64d3dc4b810d630"}, + {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cde7a4d3687f21cffdf5bb171172070bb95e02af448c4c8b2f223d783214056"}, + {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c3ff00110835bdda2b1e2b07f4a2548a39744bb7de5946dc8e95517c4fb2ca6"}, + {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35d409030bf3bd05fa66fb5fdedc39c521b397f61ad04309c90444e893d05f7d"}, + {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bff50ba9891be0a004ef48828e012babaaf7da204d81ab9be37480b9020a82b"}, + {file = "Pillow-8.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7dbfbc0020aa1d9bc1b0b8bcf255a7d73f4ad0336f8fd2533fcc54a4ccfb9441"}, + {file = "Pillow-8.3.2-cp39-cp39-win32.whl", hash = "sha256:963ebdc5365d748185fdb06daf2ac758116deecb2277ec5ae98139f93844bc09"}, + {file = "Pillow-8.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:cc9d0dec711c914ed500f1d0d3822868760954dce98dfb0b7382a854aee55d19"}, + {file = "Pillow-8.3.2-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2c661542c6f71dfd9dc82d9d29a8386287e82813b0375b3a02983feac69ef864"}, + {file = "Pillow-8.3.2-pp36-pypy36_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:548794f99ff52a73a156771a0402f5e1c35285bd981046a502d7e4793e8facaa"}, + {file = "Pillow-8.3.2-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b68f565a4175e12e68ca900af8910e8fe48aaa48fd3ca853494f384e11c8bcd"}, + {file = "Pillow-8.3.2-pp36-pypy36_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:838eb85de6d9307c19c655c726f8d13b8b646f144ca6b3771fa62b711ebf7624"}, + {file = "Pillow-8.3.2-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:feb5db446e96bfecfec078b943cc07744cc759893cef045aa8b8b6d6aaa8274e"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:fc0db32f7223b094964e71729c0361f93db43664dd1ec86d3df217853cedda87"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fd4fd83aa912d7b89b4b4a1580d30e2a4242f3936882a3f433586e5ab97ed0d5"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d0c8ebbfd439c37624db98f3877d9ed12c137cadd99dde2d2eae0dab0bbfc355"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cb3dd7f23b044b0737317f892d399f9e2f0b3a02b22b2c692851fb8120d82c6"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a66566f8a22561fc1a88dc87606c69b84fa9ce724f99522cf922c801ec68f5c1"}, + {file = "Pillow-8.3.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ce651ca46d0202c302a535d3047c55a0131a720cf554a578fc1b8a2aff0e7d96"}, + {file = "Pillow-8.3.2.tar.gz", hash = "sha256:dde3f3ed8d00c72631bc19cbfff8ad3b6215062a5eed402381ad365f82f0c18c"}, +] +platformdirs = [ + {file = "platformdirs-2.3.0-py3-none-any.whl", hash = "sha256:8003ac87717ae2c7ee1ea5a84a1a61e87f3fbd16eb5aadba194ea30a9019f648"}, + {file = "platformdirs-2.3.0.tar.gz", hash = "sha256:15b056538719b1c94bdaccb29e5f81879c7f7f0f4a153f46086d155dffcd4f0f"}, ] pluggy = [ - {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, - {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] prefixed = [ {file = "prefixed-0.3.2-py2.py3-none-any.whl", hash = "sha256:5e107306462d63f2f03c529dbf11b0026fdfec621a9a008ca639d71de22995c3"}, @@ -2006,9 +2090,13 @@ protobuf = [ {file = "protobuf-3.17.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ae692bb6d1992afb6b74348e7bb648a75bb0d3565a3f5eea5bec8f62bd06d87"}, {file = "protobuf-3.17.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:99938f2a2d7ca6563c0ade0c5ca8982264c484fdecf418bd68e880a7ab5730b1"}, {file = "protobuf-3.17.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6902a1e4b7a319ec611a7345ff81b6b004b36b0d2196ce7a748b3493da3d226d"}, + {file = "protobuf-3.17.3-cp38-cp38-win32.whl", hash = "sha256:59e5cf6b737c3a376932fbfb869043415f7c16a0cf176ab30a5bbc419cd709c1"}, + {file = "protobuf-3.17.3-cp38-cp38-win_amd64.whl", hash = "sha256:ebcb546f10069b56dc2e3da35e003a02076aaa377caf8530fe9789570984a8d2"}, {file = "protobuf-3.17.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ffbd23640bb7403574f7aff8368e2aeb2ec9a5c6306580be48ac59a6bac8bde"}, {file = "protobuf-3.17.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:26010f693b675ff5a1d0e1bdb17689b8b716a18709113288fead438703d45539"}, {file = "protobuf-3.17.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e76d9686e088fece2450dbc7ee905f9be904e427341d289acbe9ad00b78ebd47"}, + {file = "protobuf-3.17.3-cp39-cp39-win32.whl", hash = "sha256:a38bac25f51c93e4be4092c88b2568b9f407c27217d3dd23c7a57fa522a17554"}, + {file = "protobuf-3.17.3-cp39-cp39-win_amd64.whl", hash = "sha256:85d6303e4adade2827e43c2b54114d9a6ea547b671cb63fafd5011dc47d0e13d"}, {file = "protobuf-3.17.3-py2.py3-none-any.whl", hash = "sha256:2bfb815216a9cd9faec52b16fd2bfa68437a44b67c56bee59bc3926522ecb04e"}, {file = "protobuf-3.17.3.tar.gz", hash = "sha256:72804ea5eaa9c22a090d2803813e280fb273b62d5ae497aaf3553d141c4fdd7b"}, ] @@ -2071,78 +2159,112 @@ pyflakes = [ {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] pygments = [ - {file = "Pygments-2.9.0-py3-none-any.whl", hash = "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e"}, - {file = "Pygments-2.9.0.tar.gz", hash = "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f"}, + {file = "Pygments-2.10.0-py3-none-any.whl", hash = "sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380"}, + {file = "Pygments-2.10.0.tar.gz", hash = "sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6"}, ] pylint = [ - {file = "pylint-2.8.3-py3-none-any.whl", hash = "sha256:792b38ff30903884e4a9eab814ee3523731abd3c463f3ba48d7b627e87013484"}, - {file = "pylint-2.8.3.tar.gz", hash = "sha256:0a049c5d47b629d9070c3932d13bff482b12119b6a241a93bc460b0be16953c8"}, + {file = "pylint-2.10.2-py3-none-any.whl", hash = "sha256:e178e96b6ba171f8ef51fbce9ca30931e6acbea4a155074d80cc081596c9e852"}, + {file = "pylint-2.10.2.tar.gz", hash = "sha256:6758cce3ddbab60c52b57dcc07f0c5d779e5daf0cf50f6faacbef1d3ea62d2a1"}, ] pymongo = [ - {file = "pymongo-3.11.4-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:b7efc7e7049ef366777cfd35437c18a4166bb50a5606a1c840ee3b9624b54fc9"}, - {file = "pymongo-3.11.4-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:517ba47ca04a55b1f50ee8df9fd97f6c37df5537d118fb2718952b8623860466"}, - {file = "pymongo-3.11.4-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:225c61e08fe517aede7912937939e09adf086c8e6f7e40d4c85ad678c2c2aea3"}, - {file = "pymongo-3.11.4-cp27-cp27m-win32.whl", hash = "sha256:e4e9db78b71db2b1684ee4ecc3e32c4600f18cdf76e6b9ae03e338e52ee4b168"}, - {file = "pymongo-3.11.4-cp27-cp27m-win_amd64.whl", hash = "sha256:8e0004b0393d72d76de94b4792a006cb960c1c65c7659930fbf9a81ce4341982"}, - {file = "pymongo-3.11.4-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:fedf0dee7a412ca6d1d6d92c158fe9cbaa8ea0cae90d268f9ccc0744de7a97d0"}, - {file = "pymongo-3.11.4-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:f947b359cc4769af8b49be7e37af01f05fcf15b401da2528021148e4a54426d1"}, - {file = "pymongo-3.11.4-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:3a3498a8326111221560e930f198b495ea6926937e249f475052ffc6893a6680"}, - {file = "pymongo-3.11.4-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:9a4f6e0b01df820ba9ed0b4e618ca83a1c089e48d4f268d0e00dcd49893d4549"}, - {file = "pymongo-3.11.4-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:d65bac5f6724d9ea6f0b5a0f0e4952fbbf209adcf6b5583b54c54bd2fcd74dc0"}, - {file = "pymongo-3.11.4-cp34-cp34m-win32.whl", hash = "sha256:15b083d1b789b230e5ac284442d9ecb113c93f3785a6824f748befaab803b812"}, - {file = "pymongo-3.11.4-cp34-cp34m-win_amd64.whl", hash = "sha256:f08665d3cc5abc2f770f472a9b5f720a9b3ab0b8b3bb97c7c1487515e5653d39"}, - {file = "pymongo-3.11.4-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:977b1d4f868986b4ba5d03c317fde4d3b66e687d74473130cd598e3103db34fa"}, - {file = "pymongo-3.11.4-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:510cd3bfabb63a07405b7b79fae63127e34c118b7531a2cbbafc7a24fd878594"}, - {file = "pymongo-3.11.4-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:071552b065e809d24c5653fcc14968cfd6fde4e279408640d5ac58e3353a3c5f"}, - {file = "pymongo-3.11.4-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:f4ba58157e8ae33ee86fadf9062c506e535afd904f07f9be32731f4410a23b7f"}, - {file = "pymongo-3.11.4-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:b413117210fa6d92664c3d860571e8e8727c3e8f2ff197276c5d0cb365abd3ad"}, - {file = "pymongo-3.11.4-cp35-cp35m-manylinux2014_ppc64le.whl", hash = "sha256:08b8723248730599c9803ae4c97b8f3f76c55219104303c88cb962a31e3bb5ee"}, - {file = "pymongo-3.11.4-cp35-cp35m-manylinux2014_s390x.whl", hash = "sha256:8a41fdc751dc4707a4fafb111c442411816a7c225ebb5cadb57599534b5d5372"}, - {file = "pymongo-3.11.4-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:f664ed7613b8b18f0ce5696b146776266a038c19c5cd6efffa08ecc189b01b73"}, - {file = "pymongo-3.11.4-cp35-cp35m-win32.whl", hash = "sha256:5c36428cc4f7fae56354db7f46677fd21222fc3cb1e8829549b851172033e043"}, - {file = "pymongo-3.11.4-cp35-cp35m-win_amd64.whl", hash = "sha256:d0a70151d7de8a3194cdc906bcc1a42e14594787c64b0c1c9c975e5a2af3e251"}, - {file = "pymongo-3.11.4-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:9b9298964389c180a063a9e8bac8a80ed42de11d04166b20249bfa0a489e0e0f"}, - {file = "pymongo-3.11.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:b2f41261b648cf5dee425f37ff14f4ad151c2f24b827052b402637158fd056ef"}, - {file = "pymongo-3.11.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e02beaab433fd1104b2804f909e694cfbdb6578020740a9051597adc1cd4e19f"}, - {file = "pymongo-3.11.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:8898f6699f740ca93a0879ed07d8e6db02d68af889d0ebb3d13ab017e6b1af1e"}, - {file = "pymongo-3.11.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:62c29bc36a6d9be68fe7b5aaf1e120b4aa66a958d1e146601fcd583eb12cae7b"}, - {file = "pymongo-3.11.4-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:424799c71ff435094e5fb823c40eebb4500f0e048133311e9c026467e8ccebac"}, - {file = "pymongo-3.11.4-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:3551912f5c34d8dd7c32c6bb00ae04192af47f7b9f653608f107d19c1a21a194"}, - {file = "pymongo-3.11.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:5db59223ed1e634d842a053325f85f908359c6dac9c8ddce8ef145061fae7df8"}, - {file = "pymongo-3.11.4-cp36-cp36m-win32.whl", hash = "sha256:fea5cb1c63efe1399f0812532c7cf65458d38fd011be350bc5021dfcac39fba8"}, - {file = "pymongo-3.11.4-cp36-cp36m-win_amd64.whl", hash = "sha256:d4e62417e89b717a7bcd8576ac3108cd063225942cc91c5b37ff5465fdccd386"}, - {file = "pymongo-3.11.4-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:4c7e8c8e1e1918dcf6a652ac4b9d87164587c26fd2ce5dd81e73a5ab3b3d492f"}, - {file = "pymongo-3.11.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:38a7b5140a48fc91681cdb5cb95b7cd64640b43d19259fdd707fa9d5a715f2b2"}, - {file = "pymongo-3.11.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:aff3656af2add93f290731a6b8930b23b35c0c09569150130a58192b3ec6fc61"}, - {file = "pymongo-3.11.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:03be7ad107d252bb7325d4af6309fdd2c025d08854d35f0e7abc8bf048f4245e"}, - {file = "pymongo-3.11.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:6060794aac9f7b0644b299f46a9c6cbc0bc470bd01572f4134df140afd41ded6"}, - {file = "pymongo-3.11.4-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:73326b211e7410c8bd6a74500b1e3f392f39cf10862e243d00937e924f112c01"}, - {file = "pymongo-3.11.4-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:20d75ea11527331a2980ab04762a9d960bcfea9475c54bbeab777af880de61cd"}, - {file = "pymongo-3.11.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:3135dd574ef1286189f3f04a36c8b7a256376914f8cbbce66b94f13125ded858"}, - {file = "pymongo-3.11.4-cp37-cp37m-win32.whl", hash = "sha256:7c97554ea521f898753d9773891d0347ebfaddcc1dee2ad94850b163171bf1f1"}, - {file = "pymongo-3.11.4-cp37-cp37m-win_amd64.whl", hash = "sha256:a08c8b322b671857c81f4c30cd3c8df2895fd3c0e9358714f39e0ef8fb327702"}, - {file = "pymongo-3.11.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f3d851af3852f16ad4adc7ee054fd9c90a7a5063de94d815b7f6a88477b9f4c6"}, - {file = "pymongo-3.11.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:3bfc7689a1bacb9bcd2f2d5185d99507aa29f667a58dd8adaa43b5a348139e46"}, - {file = "pymongo-3.11.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:b8f94acd52e530a38f25e4d5bf7ddfdd4bea9193e718f58419def0d4406b58d3"}, - {file = "pymongo-3.11.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e4b631688dfbdd61b5610e20b64b99d25771c6d52d9da73349342d2a0f11c46a"}, - {file = "pymongo-3.11.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:474e21d0e07cd09679e357d1dac76e570dab86665e79a9d3354b10a279ac6fb3"}, - {file = "pymongo-3.11.4-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:421d13523d11c57f57f257152bc4a6bb463aadf7a3918e9c96fefdd6be8dbfb8"}, - {file = "pymongo-3.11.4-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:0cabfc297f4cf921f15bc789a8fbfd7115eb9f813d3f47a74b609894bc66ab0d"}, - {file = "pymongo-3.11.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:fe4189846448df013cd9df11bba38ddf78043f8c290a9f06430732a7a8601cce"}, - {file = "pymongo-3.11.4-cp38-cp38-win32.whl", hash = "sha256:eb4d176394c37a76e8b0afe54b12d58614a67a60a7f8c0dd3a5afbb013c01092"}, - {file = "pymongo-3.11.4-cp38-cp38-win_amd64.whl", hash = "sha256:fffff7bfb6799a763d3742c59c6ee7ffadda21abed557637bc44ed1080876484"}, - {file = "pymongo-3.11.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:13acf6164ead81c9fc2afa0e1ea6d6134352973ce2bb35496834fee057063c04"}, - {file = "pymongo-3.11.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d360e5d5dd3d55bf5d1776964625018d85b937d1032bae1926dd52253decd0db"}, - {file = "pymongo-3.11.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:0aaf4d44f1f819360f9432df538d54bbf850f18152f34e20337c01b828479171"}, - {file = "pymongo-3.11.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:08bda7b2c522ff9f1e554570da16298271ebb0c56ab9699446aacba249008988"}, - {file = "pymongo-3.11.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:1a994a42f49dab5b6287e499be7d3d2751776486229980d8857ad53b8333d469"}, - {file = "pymongo-3.11.4-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:161fcd3281c42f644aa8dec7753cca2af03ce654e17d76da4f0dab34a12480ca"}, - {file = "pymongo-3.11.4-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:78f07961f4f214ea8e80be63cffd5cc158eb06cd922ffbf6c7155b11728f28f9"}, - {file = "pymongo-3.11.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ad31f184dcd3271de26ab1f9c51574afb99e1b0e484ab1da3641256b723e4994"}, - {file = "pymongo-3.11.4-cp39-cp39-win32.whl", hash = "sha256:5e606846c049ed40940524057bfdf1105af6066688c0e6a1a3ce2038589bae70"}, - {file = "pymongo-3.11.4-cp39-cp39-win_amd64.whl", hash = "sha256:3491c7de09e44eded16824cb58cf9b5cc1dc6f066a0bb7aa69929d02aa53b828"}, - {file = "pymongo-3.11.4-py2.7-macosx-10.14-intel.egg", hash = "sha256:506a6dab4c7ffdcacdf0b8e70bd20eb2e77fa994519547c9d88d676400fcad58"}, - {file = "pymongo-3.11.4.tar.gz", hash = "sha256:539d4cb1b16b57026999c53e5aab857fe706e70ae5310cc8c232479923f932e6"}, + {file = "pymongo-3.12.0-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:072ba7cb65c8aa4d5c5659bf6722ee85781c9d7816dc00679b8b6f3dff1ddafc"}, + {file = "pymongo-3.12.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:d6e11ffd43184d529d6752d6dcb62b994f903038a17ea2168ef1910c96324d26"}, + {file = "pymongo-3.12.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:7412a36798966624dc4c57d64aa43c2d1100b348abd98daaac8e99e57d87e1d7"}, + {file = "pymongo-3.12.0-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e8a82e35d52ad6f867e88096a1a2b9bdc7ec4d5e65c7b4976a248bf2d1a32a93"}, + {file = "pymongo-3.12.0-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dcd3d0009fbb6e454d729f8b22d0063bd9171c31a55e0f0271119bd4f2700023"}, + {file = "pymongo-3.12.0-cp27-cp27m-win32.whl", hash = "sha256:1bc6fe7279ff40c6818db002bf5284aa03ec181ea1b1ceaeee33c289d412afa7"}, + {file = "pymongo-3.12.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e2b7670c0c8c6b501464150dd49dd0d6be6cb7f049e064124911cec5514fa19e"}, + {file = "pymongo-3.12.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:316c1b8723afa9870567cd6dff35d440b2afeda53aa13da6c5ab85f98ed6f5ca"}, + {file = "pymongo-3.12.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:255a35bf29185f44b412e31a927d9dcedda7c2c380127ecc4fbf2f61b72fa978"}, + {file = "pymongo-3.12.0-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ffbae429ba9e42d0582d3ac63fdb410338892468a2107d8ff68228ec9a39a0ed"}, + {file = "pymongo-3.12.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c188db6cf9e14dbbb42f5254292be96f05374a35e7dfa087cc2140f0ff4f10f6"}, + {file = "pymongo-3.12.0-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:6fb3f85870ae26896bb44e67db94045f2ebf00c5d41e6b66cdcbb5afd644fc18"}, + {file = "pymongo-3.12.0-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:aaa038eafb7186a4abbb311fcf20724be9363645882bbce540bef4797e812a7a"}, + {file = "pymongo-3.12.0-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:7d98ce3c42921bb91566121b658e0d9d59a9082a9bd6f473190607ff25ab637f"}, + {file = "pymongo-3.12.0-cp34-cp34m-win32.whl", hash = "sha256:b0a0cf39f589e52d801fdef418305562bc030cdf8929217463c8433c65fd5c2f"}, + {file = "pymongo-3.12.0-cp34-cp34m-win_amd64.whl", hash = "sha256:ceae3ab9e11a27aaab42878f1d203600dfd24f0e43678b47298219a0f10c0d30"}, + {file = "pymongo-3.12.0-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:5e574664f1468872cd40f74e4811e22b1aa4de9399d6bcfdf1ee6ea94c017fcf"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73b400fdc22de84bae0dbf1a22613928a41612ec0a3d6ed47caf7ad4d3d0f2ff"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:cbf8672edeb7b7128c4a939274801f0e32bbf5159987815e3d1eace625264a46"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:a634a4730ce0b0934ed75e45beba730968e12b4dafbb22f69b3b2f616d9e644e"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:c55782a55f4a013a78ac5b6ee4b8731a192dea7ab09f1b6b3044c96d5128edd4"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux2014_ppc64le.whl", hash = "sha256:11f9e0cfc84ade088a38df2708d0b958bb76360181df1b2e1e1a41beaa57952b"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux2014_s390x.whl", hash = "sha256:186104a94d39b8412f8e3de385acd990a628346a4402d4f3a288a82b8660bd22"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:70761fd3c576b027eec882b43ee0a8e5b22ff9c20cdf4d0400e104bc29e53e34"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:333bfad77aa9cd11711febfb75eed0bb537a1d022e1c252714dad38993590240"}, + {file = "pymongo-3.12.0-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fa8957e9a1b202cb45e6b839c241cd986c897be1e722b81d2f32e9c6aeee80b0"}, + {file = "pymongo-3.12.0-cp35-cp35m-win32.whl", hash = "sha256:4ba0def4abef058c0e5101e05e3d5266e6fffb9795bbf8be0fe912a7361a0209"}, + {file = "pymongo-3.12.0-cp35-cp35m-win_amd64.whl", hash = "sha256:a0e5dff6701fa615f165306e642709e1c1550d5b237c5a7a6ea299886828bd50"}, + {file = "pymongo-3.12.0-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:b542d56ed1b8d5cf3bb36326f814bd2fbe8812dfd2582b80a15689ea433c0e35"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a325600c83e61e3c9cebc0c2b1c8c4140fa887f789085075e8f44c8ff2547eb9"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:48d5bc80ab0af6b60c4163c5617f5cd23f2f880d7600940870ea5055816af024"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c5cab230e7cabdae9ff23c12271231283efefb944c1b79bed79a91beb65ba547"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:d73e10772152605f6648ba4410318594f1043bbfe36d2fadee7c4b8912eff7c5"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:b1c4874331ab960429caca81acb9d2932170d66d6d6f87e65dc4507a85aca152"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:a3566acfbcde46911c52810374ecc0354fdb841284a3efef6ff7105bc007e9a8"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:b3b5b3cbc3fdf4fcfa292529df2a85b5d9c7053913a739d3069af1e12e12219f"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd3854148005c808c485c754a184c71116372263709958b42aefbef2e5dd373a"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f55c1ddcc1f6050b07d468ce594f55dbf6107b459e16f735d26818d7be1e9538"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ced944dcdd561476deef7cb7bfd4987c69fffbfeff6d02ca4d5d4fd592d559b7"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78ecb8d42f50d393af912bfb1fb1dcc9aabe9967973efb49ee577e8f1cea494c"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1970cfe2aec1bf74b40cf30c130ad10cd968941694630386db33e1d044c22a2e"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8bf42d3b32f586f4c9e37541769993783a534ad35531ce8a4379f6fa664fba9"}, + {file = "pymongo-3.12.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bc9ac81e73573516070d24ce15da91281922811f385645df32bd3c8a45ab4684"}, + {file = "pymongo-3.12.0-cp36-cp36m-win32.whl", hash = "sha256:d04ca462cb99077e6c059e97c072957caf2918e6e4191e3161c01c439e0193de"}, + {file = "pymongo-3.12.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f2acf9bbcd514e901f82c4ca6926bbd2ae61716728f110b4343eb0a69612d018"}, + {file = "pymongo-3.12.0-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:b754240daafecd9d5fce426b0fbaaed03f4ebb130745c8a4ae9231fffb8d75e5"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:af586e85144023686fb0af09c8cdf672484ea182f352e7ceead3d832de381e1b"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fe5872ce6f9627deac8314bdffd3862624227c3de4c17ef0cc78bbf0402999eb"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:f6977a520bd96e097c8a37a8cbb9faa1ea99d21bf84190195056e25f688af73d"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:2dbfbbded947a83a3dffc2bd1ec4750c17e40904692186e2c55a3ad314ca0222"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:a752ecd1a26000a6d67be7c9a2e93801994a8b3f866ac95b672fbc00225ca91a"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:1bab889ae7640eba739f67fcbf8eff252dddc60d4495e6ddd3a87cd9a95fdb52"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:f94c7d22fb36b184734dded7345a04ec5f95130421c775b8b0c65044ef073f34"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec5ca7c0007ce268048bbe0ffc6846ed1616cf3d8628b136e81d5e64ff3f52a2"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c72d08acdf573455b2b9d2b75b8237654841d63a48bc2327dc102c6ee89b75a"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6ea08758b6673610b3c5bdf47189286cf9c58b1077558706a2f6f8744922527"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46d5ec90276f71af3a29917b30f2aec2315a2759b5f8d45b3b63a07ca8a070a3"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:625befa3bc9b40746a749115cc6a15bf20b9bd7597ca55d646205b479a2c99c7"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d1131562ddc2ea8a446f66c2648d7dabec2b3816fc818528eb978a75a6d23b2e"}, + {file = "pymongo-3.12.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eee42a1cc06565f6b21caa1f504ec15e07de7ebfd520ab57f8cb3308bc118e22"}, + {file = "pymongo-3.12.0-cp37-cp37m-win32.whl", hash = "sha256:94d38eba4d1b5eb3e6bfece0651b855a35c44f32fd91f512ab4ba41b8c0d3e66"}, + {file = "pymongo-3.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e018a4921657c2d3f89c720b7b90b9182e277178a04a7e9542cc79d7d787ca51"}, + {file = "pymongo-3.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7c6a9948916a7bbcc6d3a9f6fb75db1acb5546078023bfb3db6efabcd5a67527"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e9faf8d4712d5ea301d74abfcf6dafe4b7f4af7936e91f283b0ad7bf69ed3e3a"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cc2894fe91f31a513860238ede69fe47fada21f9e7ddfe73f7f9fef93a971e41"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:053b4ebf91c7395d1fcd2ce6a9edff0024575b7b2de6781554a4114448a8adc9"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:39dafa2eaf577d1969f289dc9a44501859a1897eb45bd589e93ce843fc610800"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:246ec420e4c8744fceb4e259f906211b9c198e1f345e6158dcd7cbad3737e11e"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:208debdcf76ed39ebf24f38509f50dc1c100e31e8653817fedb8e1f867850a13"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:18290649759f9db660972442aa606f845c368db9b08c4c73770f6da14113569b"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657ad80de8ec9ed656f28844efc801a0802961e8c6a85038d97ff6f555ef4919"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b772bab31cbd9cb911e41e1a611ebc9497f9a32a7348e2747c38210f75c00f41"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2399a85b54f68008e483b2871f4a458b4c980469c7fe921595ede073e4844f1e"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e66780f14c2efaf989cd3ac613b03ee6a8e3a0ba7b96c0bb14adca71a427e55"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02dc0b0f48ed3cd06c13b7e31b066bf91e00dac5f8147b0a0a45f9009bfab857"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:070a4ef689c9438a999ec3830e69b208ff0d12251846e064d947f97d819d1d05"}, + {file = "pymongo-3.12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:db93608a246da44d728842b8fa9e45aa9782db76955f634a707739a8d53ff544"}, + {file = "pymongo-3.12.0-cp38-cp38-win32.whl", hash = "sha256:5af390fa9faf56c93252dab09ea57cd020c9123aa921b63a0ed51832fdb492e7"}, + {file = "pymongo-3.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:a2239556ff7241584ce57be1facf25081669bb457a9e5cbe68cce4aae6567aa1"}, + {file = "pymongo-3.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cda9e628b1315beec8341e8c04aac9a0b910650b05e0751e42e399d5694aeacb"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:845a8b83798b2fb11b09928413cb32692866bfbc28830a433d9fa4c8c3720dd0"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:da8288bc4a7807c6715416deed1c57d94d5e03e93537889e002bf985be503f1a"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a9ba2a63777027b06b116e1ea8248e66fd1bedc2c644f93124b81a91ddbf6d88"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:9a13661681d17e43009bb3e85e837aa1ec5feeea1e3654682a01b8821940f8b3"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:6b89dc51206e4971c5568c797991eaaef5dc2a6118d67165858ad11752dba055"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:701e08457183da70ed96b35a6b43e6ba1df0b47c837b063cde39a1fbe1aeda81"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:e7a33322e08021c37e89cae8ff06327503e8a1719e97c69f32c31cbf6c30d72c"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd1f49f949a658c4e8f81ed73f9aad25fcc7d4f62f767f591e749e30038c4e1d"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6d055f01b83b1a4df8bb0c61983d3bdffa913764488910af3620e5c2450bf83"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd6ff2192f34bd622883c745a56f492b1c9ccd44e14953e8051c33024a2947d5"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19d4bd0fc29aa405bb1781456c9cfff9fceabb68543741eb17234952dbc2bbb0"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24f8aeec4d6b894a6128844e50ff423dd02462ee83addf503c598ee3a80ddf3d"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b6055e0ef451ff73c93d0348d122a0750dddf323b9361de5835dac2f6cf7fc1"}, + {file = "pymongo-3.12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6261bee7c5abadeac7497f8f1c43e521da78dd13b0a2439f526a7b0fc3788824"}, + {file = "pymongo-3.12.0-cp39-cp39-win32.whl", hash = "sha256:2e92aa32300a0b5e4175caec7769f482b292769807024a86d674b3f19b8e3755"}, + {file = "pymongo-3.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ce83f17f641a62a4dfb0ba1b8a3c1ced7c842f511b5450d90c030c7828e3693"}, + {file = "pymongo-3.12.0-py2.7-macosx-10.14-intel.egg", hash = "sha256:d1740776b70367277323fafb76bcf09753a5cc9824f5d705bac22a34ff3668ea"}, + {file = "pymongo-3.12.0.tar.gz", hash = "sha256:b88d1742159bc93a078733f9789f563cef26f5e370eba810476a71aa98e5fbc2"}, ] pynput = [ {file = "pynput-1.7.3-py2.py3-none-any.whl", hash = "sha256:fea5777454f896bd79d35393088cd29a089f3b2da166f0848a922b1d5a807d4f"}, @@ -2210,27 +2332,47 @@ pyqt5-sip = [ {file = "PyQt5_sip-12.9.0.tar.gz", hash = "sha256:d3e4489d7c2b0ece9d203ae66e573939f7f60d4d29e089c9f11daa17cfeaae32"}, ] pyrsistent = [ - {file = "pyrsistent-0.17.3.tar.gz", hash = "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f4c8cabb46ff8e5d61f56a037974228e978f26bfefce4f61a4b1ac0ba7a2ab72"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:da6e5e818d18459fa46fac0a4a4e543507fe1110e808101277c5a2b5bab0cd2d"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5e4395bbf841693eaebaa5bb5c8f5cdbb1d139e07c975c682ec4e4f8126e03d2"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-win32.whl", hash = "sha256:527be2bfa8dc80f6f8ddd65242ba476a6c4fb4e3aedbf281dfbac1b1ed4165b1"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2aaf19dc8ce517a8653746d98e962ef480ff34b6bc563fc067be6401ffb457c7"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58a70d93fb79dc585b21f9d72487b929a6fe58da0754fa4cb9f279bb92369396"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4916c10896721e472ee12c95cdc2891ce5890898d2f9907b1b4ae0f53588b710"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:73ff61b1411e3fb0ba144b8f08d6749749775fe89688093e1efef9839d2dcc35"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-win32.whl", hash = "sha256:b29b869cf58412ca5738d23691e96d8aff535e17390128a1a52717c9a109da4f"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-win_amd64.whl", hash = "sha256:097b96f129dd36a8c9e33594e7ebb151b1515eb52cceb08474c10a5479e799f2"}, + {file = "pyrsistent-0.18.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:772e94c2c6864f2cd2ffbe58bb3bdefbe2a32afa0acb1a77e472aac831f83427"}, + {file = "pyrsistent-0.18.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c1a9ff320fa699337e05edcaae79ef8c2880b52720bc031b219e5b5008ebbdef"}, + {file = "pyrsistent-0.18.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd3caef37a415fd0dae6148a1b6957a8c5f275a62cca02e18474608cb263640c"}, + {file = "pyrsistent-0.18.0-cp38-cp38-win32.whl", hash = "sha256:e79d94ca58fcafef6395f6352383fa1a76922268fa02caa2272fff501c2fdc78"}, + {file = "pyrsistent-0.18.0-cp38-cp38-win_amd64.whl", hash = "sha256:a0c772d791c38bbc77be659af29bb14c38ced151433592e326361610250c605b"}, + {file = "pyrsistent-0.18.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d5ec194c9c573aafaceebf05fc400656722793dac57f254cd4741f3c27ae57b4"}, + {file = "pyrsistent-0.18.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:6b5eed00e597b5b5773b4ca30bd48a5774ef1e96f2a45d105db5b4ebb4bca680"}, + {file = "pyrsistent-0.18.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:48578680353f41dca1ca3dc48629fb77dfc745128b56fc01096b2530c13fd426"}, + {file = "pyrsistent-0.18.0-cp39-cp39-win32.whl", hash = "sha256:f3ef98d7b76da5eb19c37fda834d50262ff9167c65658d1d8f974d2e4d90676b"}, + {file = "pyrsistent-0.18.0-cp39-cp39-win_amd64.whl", hash = "sha256:404e1f1d254d314d55adb8d87f4f465c8693d6f902f67eb6ef5b4526dc58e6ea"}, + {file = "pyrsistent-0.18.0.tar.gz", hash = "sha256:773c781216f8c2900b42a7b638d5b517bb134ae1acbebe4d1e8f1f41ea60eb4b"}, ] pytest = [ - {file = "pytest-6.2.4-py3-none-any.whl", hash = "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890"}, - {file = "pytest-6.2.4.tar.gz", hash = "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b"}, + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, ] pytest-cov = [ {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, ] pytest-print = [ - {file = "pytest_print-0.2.1-py2.py3-none-any.whl", hash = "sha256:2cfcdeee8b398457d3e3488f1fde5f8303b404c30187be5fcb4c7818df5f4529"}, - {file = "pytest_print-0.2.1.tar.gz", hash = "sha256:8f61e5bb2d031ee88d19a5a7695a0c863caee7b1478f1a82d080c2128b76ad83"}, + {file = "pytest_print-0.3.0-py2.py3-none-any.whl", hash = "sha256:53fb0f71d371f137ac2e7171d92f204eb45055580e8c7920df619d9b2ee45359"}, + {file = "pytest_print-0.3.0.tar.gz", hash = "sha256:769f1b1b0943b2941dbeeaac6985766e76b341130ed538f88c23ebcd7087b90d"}, ] python-dateutil = [ - {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, - {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] python-xlib = [ - {file = "python-xlib-0.30.tar.gz", hash = "sha256:74131418faf9e7b83178c71d9d80297fbbd678abe99ae9258f5a20cd027acb5f"}, - {file = "python_xlib-0.30-py2.py3-none-any.whl", hash = "sha256:c4c92cd47e07588b2cbc7d52de18407b2902c3812d7cdec39cd2177b060828e2"}, + {file = "python-xlib-0.31.tar.gz", hash = "sha256:74d83a081f532bc07f6d7afcd6416ec38403d68f68b9b9dc9e1f28fbf2d799e9"}, + {file = "python_xlib-0.31-py2.py3-none-any.whl", hash = "sha256:1ec6ce0de73d9e6592ead666779a5732b384e5b8fb1f1886bd0a81cafa477759"}, ] python3-xlib = [ {file = "python3-xlib-0.15.tar.gz", hash = "sha256:dc4245f3ae4aa5949c1d112ee4723901ade37a96721ba9645f2bfa56e5b383f8"}, @@ -2256,16 +2398,16 @@ pywin32-ctypes = [ {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"}, ] "qt.py" = [ - {file = "Qt.py-1.3.3-py2.py3-none-any.whl", hash = "sha256:9e3f5417187c98d246918a9b27a9e1f8055e089bdb2b063a2739986bc19a3d2e"}, - {file = "Qt.py-1.3.3.tar.gz", hash = "sha256:601606127f70be9adc82c248d209d696cccbd1df242c24d3fb1a9e399f3ecaf1"}, + {file = "Qt.py-1.3.6-py2.py3-none-any.whl", hash = "sha256:7edf6048d07a6924707506b5ba34a6e05d66dde9a3f4e3a62f9996ccab0b91c7"}, + {file = "Qt.py-1.3.6.tar.gz", hash = "sha256:0d78656a2f814602eee304521c7bf5da0cec414818b3833712c77524294c404a"}, ] recommonmark = [ {file = "recommonmark-0.7.1-py2.py3-none-any.whl", hash = "sha256:1b1db69af0231efce3fa21b94ff627ea33dee7079a01dd0a7f8482c3da148b3f"}, {file = "recommonmark-0.7.1.tar.gz", hash = "sha256:bdb4db649f2222dcd8d2d844f0006b958d627f732415d399791ee436a3686d67"}, ] requests = [ - {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, - {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, + {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, + {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, ] rsa = [ {file = "rsa-4.7.2-py3-none-any.whl", hash = "sha256:78f9a9bf4e7be0c5ded4583326e7461e3a3c5aae24073648b4bdfa797d78c9d2"}, @@ -2284,8 +2426,8 @@ six = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] slack-sdk = [ - {file = "slack_sdk-3.6.0-py2.py3-none-any.whl", hash = "sha256:e1b257923a1ef88b8620dd3abff94dc5b3eee16ef37975d101ba9e60123ac3af"}, - {file = "slack_sdk-3.6.0.tar.gz", hash = "sha256:195f044e02a2844579a7a26818ce323e85dde8de224730c859644918d793399e"}, + {file = "slack_sdk-3.10.1-py2.py3-none-any.whl", hash = "sha256:f17b71a578e94204d9033bffded634475f4ca0a6274c6c7a4fd8a9cb0ac7cd8b"}, + {file = "slack_sdk-3.10.1.tar.gz", hash = "sha256:2b4dde7728eb4ff5a581025d204578ccff25a5d8f0fe11ae175e3ce6e074434f"}, ] smmap = [ {file = "smmap-4.0.0-py2.py3-none-any.whl", hash = "sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2"}, @@ -2300,8 +2442,8 @@ speedcopy = [ {file = "speedcopy-2.1.0.tar.gz", hash = "sha256:8bb1a6c735900b83901a7be84ba2175ed3887c13c6786f97dea48f2ea7d504c2"}, ] sphinx = [ - {file = "Sphinx-4.0.2-py3-none-any.whl", hash = "sha256:d1cb10bee9c4231f1700ec2e24a91be3f3a3aba066ea4ca9f3bbe47e59d5a1d4"}, - {file = "Sphinx-4.0.2.tar.gz", hash = "sha256:b5c2ae4120bf00c799ba9b3699bc895816d272d120080fbc967292f29b52b48c"}, + {file = "Sphinx-4.1.2-py3-none-any.whl", hash = "sha256:46d52c6cee13fec44744b8c01ed692c18a640f6910a725cbb938bc36e8d64544"}, + {file = "Sphinx-4.1.2.tar.gz", hash = "sha256:3092d929cd807926d846018f2ace47ba2f3b671b309c7a89cd3306e80c826b13"}, ] sphinx-qt-documentation = [ {file = "sphinx_qt_documentation-0.3-py3-none-any.whl", hash = "sha256:bee247cb9e4fc03fc496d07adfdb943100e1103320c3e5e820e0cfa7c790d9b6"}, @@ -2379,17 +2521,17 @@ typed-ast = [ {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, ] typing-extensions = [ - {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, - {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, - {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, + {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, + {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"}, + {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"}, ] uritemplate = [ {file = "uritemplate-3.0.1-py2.py3-none-any.whl", hash = "sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f"}, {file = "uritemplate-3.0.1.tar.gz", hash = "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae"}, ] urllib3 = [ - {file = "urllib3-1.26.5-py2.py3-none-any.whl", hash = "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c"}, - {file = "urllib3-1.26.5.tar.gz", hash = "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098"}, + {file = "urllib3-1.26.6-py2.py3-none-any.whl", hash = "sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4"}, + {file = "urllib3-1.26.6.tar.gz", hash = "sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, @@ -2446,6 +2588,6 @@ yarl = [ {file = "yarl-1.6.3.tar.gz", hash = "sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10"}, ] zipp = [ - {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, - {file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"}, + {file = "zipp-3.5.0-py3-none-any.whl", hash = "sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3"}, + {file = "zipp-3.5.0.tar.gz", hash = "sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4"}, ] diff --git a/pyproject.toml b/pyproject.toml index a57ae19224..24e51a17bf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,7 +68,7 @@ slack-sdk = "^3.6.0" flake8 = "^3.7" autopep8 = "^1.4" coverage = "*" -cx_freeze = "*" +cx_freeze = { version = "6.7", source = "openpype" } GitPython = "^3.1.17" jedi = "^0.13" Jinja2 = "^2.11" diff --git a/tools/docker_build.sh b/tools/docker_build.sh index c27041a1af..d2dbef2e48 100755 --- a/tools/docker_build.sh +++ b/tools/docker_build.sh @@ -27,17 +27,15 @@ create_container () { fi local id=$(<"$openpype_root/build/docker-image.id") echo -e "${BIYellow}---${RST} Creating container from $id ..." - local cid="$(docker create $id bash)" + cid="$(docker create $id bash)" if [ $? -ne 0 ] ; then echo -e "${BIRed}!!!${RST} Cannot create container." exit 1 fi - return $cid } retrieve_build_log () { create_container - local cid=$? echo -e "${BIYellow}***${RST} Copying build log to ${BIWhite}$openpype_root/build/build.log${RST}" docker cp "$cid:/opt/openpype/build/build.log" "$openpype_root/build" } @@ -65,7 +63,6 @@ main () { echo -e "${BIGreen}>>>${RST} Copying build from container ..." create_container - local cid=$? echo -e "${BIYellow}---${RST} Copying ..." docker cp "$cid:/opt/openpype/build/exe.linux-x86_64-3.7" "$openpype_root/build" docker cp "$cid:/opt/openpype/build/build.log" "$openpype_root/build" @@ -79,7 +76,7 @@ main () { chown -R $username ./build echo -e "${BIGreen}>>>${RST} All done, you can delete container:" - echo -e "${BIYellow}$id${RST}" + echo -e "${BIYellow}$cid${RST}" } return_code=0 From 6cced73ac9e07b36112311b2aa03653089571354 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 9 Sep 2021 16:36:50 +0200 Subject: [PATCH 306/308] change to debian, add platform selection --- Dockerfile | 84 +++++++++++++++++------------------------ Dockerfile.centos7 | 87 +++++++++++++++++++++++++++++++++++++++++++ README.md | 11 ++++++ tools/docker_build.sh | 17 ++++++++- 4 files changed, 148 insertions(+), 51 deletions(-) create mode 100644 Dockerfile.centos7 diff --git a/Dockerfile b/Dockerfile index 78611860ea..cef83b5811 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,9 @@ # Build Pype docker image -FROM centos:7 AS builder -ARG OPENPYPE_PYTHON_VERSION=3.7.10 +FROM debian:bookworm-slim AS builder +ARG OPENPYPE_PYTHON_VERSION=3.7.12 +LABEL maintainer="info@openpype.io" +LABEL description="Docker Image to build and run OpenPype" LABEL org.opencontainers.image.name="pypeclub/openpype" LABEL org.opencontainers.image.title="OpenPype Docker Image" LABEL org.opencontainers.image.url="https://openpype.io/" @@ -9,57 +11,49 @@ LABEL org.opencontainers.image.source="https://github.com/pypeclub/pype" USER root -# update base -RUN yum -y install deltarpm \ - && yum -y update \ - && yum clean all +ARG DEBIAN_FRONTEND=noninteractive -# add tools we need -RUN yum -y install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm \ - && yum -y install centos-release-scl \ - && yum -y install \ +# update base +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + ca-certificates \ bash \ - which \ git \ - devtoolset-7-gcc* \ - make \ cmake \ + make \ curl \ wget \ - gcc \ - zlib-devel \ - bzip2 \ - bzip2-devel \ - readline-devel \ - sqlite sqlite-devel \ - openssl-devel \ - openssl-libs \ - tk-devel libffi-devel \ - qt5-qtbase-devel \ - patchelf \ - && yum clean all + build-essential \ + checkinstall \ + libssl-dev \ + zlib1g-dev \ + libbz2-dev \ + libreadline-dev \ + libsqlite3-dev \ + llvm \ + libncursesw5-dev \ + xz-utils \ + tk-dev \ + libxml2-dev \ + libxmlsec1-dev \ + libffi-dev \ + liblzma-dev \ + patchelf + +SHELL ["/bin/bash", "-c"] RUN mkdir /opt/openpype -# RUN useradd -m pype -# RUN chown pype /opt/openpype -# USER pype -RUN curl https://pyenv.run | bash -ENV PYTHON_CONFIGURE_OPTS --enable-shared - -RUN echo 'export PATH="$HOME/.pyenv/bin:$PATH"'>> $HOME/.bashrc \ +RUN curl https://pyenv.run | bash \ + && echo 'export PATH="$HOME/.pyenv/bin:$PATH"'>> $HOME/.bashrc \ && echo 'eval "$(pyenv init -)"' >> $HOME/.bashrc \ && echo 'eval "$(pyenv virtualenv-init -)"' >> $HOME/.bashrc \ - && echo 'eval "$(pyenv init --path)"' >> $HOME/.bashrc -RUN source $HOME/.bashrc && pyenv install ${OPENPYPE_PYTHON_VERSION} + && echo 'eval "$(pyenv init --path)"' >> $HOME/.bashrc \ + && source $HOME/.bashrc && pyenv install ${OPENPYPE_PYTHON_VERSION} COPY . /opt/openpype/ -RUN rm -rf /openpype/.poetry || echo "No Poetry installed yet." -# USER root -# RUN chown -R pype /opt/openpype -RUN chmod +x /opt/openpype/tools/create_env.sh && chmod +x /opt/openpype/tools/build.sh -# USER pype +RUN chmod +x /opt/openpype/tools/create_env.sh && chmod +x /opt/openpype/tools/build.sh WORKDIR /opt/openpype @@ -68,18 +62,8 @@ RUN cd /opt/openpype \ && pyenv local ${OPENPYPE_PYTHON_VERSION} RUN source $HOME/.bashrc \ - && ./tools/create_env.sh - -RUN source $HOME/.bashrc \ + && ./tools/create_env.sh \ && ./tools/fetch_thirdparty_libs.sh RUN source $HOME/.bashrc \ && bash ./tools/build.sh - -RUN cp /usr/lib64/libffi* ./build/exe.linux-x86_64-3.7/lib \ - && cp /usr/lib64/libssl* ./build/exe.linux-x86_64-3.7/lib \ - && cp /usr/lib64/libcrypto* ./build/exe.linux-x86_64-3.7/lib \ - && cp /root/.pyenv/versions/${OPENPYPE_PYTHON_VERSION}/lib/libpython* ./build/exe.linux-x86_64-3.7/lib - -RUN cd /opt/openpype \ - rm -rf ./vendor/bin diff --git a/Dockerfile.centos7 b/Dockerfile.centos7 new file mode 100644 index 0000000000..0e2fdd4ba0 --- /dev/null +++ b/Dockerfile.centos7 @@ -0,0 +1,87 @@ +# Build Pype docker image +FROM centos:7 AS builder +ARG OPENPYPE_PYTHON_VERSION=3.7.10 + +LABEL org.opencontainers.image.name="pypeclub/openpype" +LABEL org.opencontainers.image.title="OpenPype Docker Image" +LABEL org.opencontainers.image.url="https://openpype.io/" +LABEL org.opencontainers.image.source="https://github.com/pypeclub/pype" + +USER root + +# update base +RUN yum -y install deltarpm \ + && yum -y update \ + && yum clean all + +# add tools we need +RUN yum -y install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm \ + && yum -y install centos-release-scl \ + && yum -y install \ + bash \ + which \ + git \ + devtoolset-7-gcc* \ + make \ + cmake \ + curl \ + wget \ + gcc \ + zlib-devel \ + bzip2 \ + bzip2-devel \ + readline-devel \ + sqlite sqlite-devel \ + openssl-devel \ + openssl-libs \ + tk-devel libffi-devel \ + qt5-qtbase-devel \ + patchelf \ + ncurses \ + ncurses-devel \ + && yum clean all + +RUN mkdir /opt/openpype +# RUN useradd -m pype +# RUN chown pype /opt/openpype +# USER pype + +RUN curl https://pyenv.run | bash +# ENV PYTHON_CONFIGURE_OPTS --enable-shared + +RUN echo 'export PATH="$HOME/.pyenv/bin:$PATH"'>> $HOME/.bashrc \ + && echo 'eval "$(pyenv init -)"' >> $HOME/.bashrc \ + && echo 'eval "$(pyenv virtualenv-init -)"' >> $HOME/.bashrc \ + && echo 'eval "$(pyenv init --path)"' >> $HOME/.bashrc +RUN source $HOME/.bashrc && pyenv install ${OPENPYPE_PYTHON_VERSION} + +COPY . /opt/openpype/ +RUN rm -rf /openpype/.poetry || echo "No Poetry installed yet." +# USER root +# RUN chown -R pype /opt/openpype +RUN chmod +x /opt/openpype/tools/create_env.sh && chmod +x /opt/openpype/tools/build.sh + +# USER pype + +WORKDIR /opt/openpype + +RUN cd /opt/openpype \ + && source $HOME/.bashrc \ + && pyenv local ${OPENPYPE_PYTHON_VERSION} + +RUN source $HOME/.bashrc \ + && ./tools/create_env.sh + +RUN source $HOME/.bashrc \ + && ./tools/fetch_thirdparty_libs.sh + +RUN source $HOME/.bashrc \ + && bash ./tools/build.sh + +RUN cp /usr/lib64/libffi* ./build/exe.linux-x86_64-3.7/lib \ + && cp /usr/lib64/libssl* ./build/exe.linux-x86_64-3.7/lib \ + && cp /usr/lib64/libcrypto* ./build/exe.linux-x86_64-3.7/lib \ + && cp /root/.pyenv/versions/${OPENPYPE_PYTHON_VERSION}/lib/libpython* ./build/exe.linux-x86_64-3.7/lib + +RUN cd /opt/openpype \ + rm -rf ./vendor/bin diff --git a/README.md b/README.md index 209af24c75..0e450fc48d 100644 --- a/README.md +++ b/README.md @@ -133,6 +133,12 @@ Easiest way to build OpenPype on Linux is using [Docker](https://www.docker.com/ sudo ./tools/docker_build.sh ``` +This will by default use Debian as base image. If you need to make Centos 7 compatible build, please run: + +```sh +sudo ./tools/docker_build.sh centos7 +``` + If all is successful, you'll find built OpenPype in `./build/` folder. #### Manual build @@ -158,6 +164,11 @@ you'll need also additional libraries for Qt5: ```sh sudo apt install qt5-default ``` +or if you are on Ubuntu > 20.04, there is no `qt5-default` packages so you need to install its content individually: + +```sh +sudo apt-get install qtbase5-dev qtchooser qt5-qmake qtbase5-dev-tools +```
diff --git a/tools/docker_build.sh b/tools/docker_build.sh index d2dbef2e48..04c26424eb 100755 --- a/tools/docker_build.sh +++ b/tools/docker_build.sh @@ -40,6 +40,21 @@ retrieve_build_log () { docker cp "$cid:/opt/openpype/build/build.log" "$openpype_root/build" } +openpype_root=$(realpath $(dirname $(dirname "${BASH_SOURCE[0]}"))) + + +if [ -z $1 ]; then + dockerfile="Dockerfile" +else + dockerfile="Dockerfile.$1" + if [ ! -f "$openpype_root/$dockerfile" ]; then + echo -e "${BIRed}!!!${RST} Dockerfile for specifed platform ${BIWhite}$1${RST} doesn't exist." + exit 1 + else + echo -e "${BIGreen}>>>${RST} Using Dockerfile for ${BIWhite}$1${RST} ..." + fi +fi + # Main main () { openpype_root=$(realpath $(dirname $(dirname "${BASH_SOURCE[0]}"))) @@ -53,7 +68,7 @@ main () { echo -e "${BIGreen}>>>${RST} Running docker build ..." # docker build --pull --no-cache -t pypeclub/openpype:$openpype_version . - docker build --pull --iidfile $openpype_root/build/docker-image.id -t pypeclub/openpype:$openpype_version . + docker build --pull --iidfile $openpype_root/build/docker-image.id -t pypeclub/openpype:$openpype_version -f $dockerfile . if [ $? -ne 0 ] ; then echo $? echo -e "${BIRed}!!!${RST} Docker build failed." From d90a866b5bbcc55878068f20777c9db2ad6e68b1 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 9 Sep 2021 17:11:40 +0200 Subject: [PATCH 307/308] =?UTF-8?q?add=20changes=20to=20docs=20?= =?UTF-8?q?=F0=9F=93=9C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- website/docs/dev_build.md | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/website/docs/dev_build.md b/website/docs/dev_build.md index b3e0c24fc2..f71118eba6 100644 --- a/website/docs/dev_build.md +++ b/website/docs/dev_build.md @@ -84,6 +84,13 @@ You can use Docker to build OpenPype. Just run: ```shell $ sudo ./tools/docker_build.sh ``` + +This will by default use Debian as base image. If you need to make Centos 7 compatible build, please run: + +```sh +sudo ./tools/docker_build.sh centos7 +``` + and you should have built OpenPype in `build` directory. It is using **Centos 7** as a base image. @@ -323,14 +330,18 @@ Same as: poetry run python ./tools/create_zip.py ``` -### docker_build.sh +### docker_build.sh *[variant]* Script to build OpenPype on [Docker](https://www.docker.com/) enabled systems - usually Linux and Windows with [Docker Desktop](https://docs.docker.com/docker-for-windows/install/) and [Windows Subsystem for Linux](https://docs.microsoft.com/en-us/windows/wsl/about) (WSL) installed. It must be run with administrative privileges - `sudo ./docker_build.sh`. -It will use **Centos 7** base image to build OpenPype. You'll see your build in `./build` folder. +It will use latest **Debian** base image to build OpenPype. If you need to build OpenPype for +older systems like Centos 7, use `centos7` as argument. This will use another Dockerfile to build +OpenPype with **Centos 7** as base image. + +You'll see your build in `./build` folder. ### fetch_thirdparty_libs This script will download necessary tools for OpenPype defined in `pyproject.toml` like FFMpeg, From aa9a945b9ca537b91aef4722a18389e8ad6f3f7f Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 15 Sep 2021 14:02:02 +0200 Subject: [PATCH 308/308] remove devtoolset-7 from centos build --- Dockerfile.centos7 | 1 - 1 file changed, 1 deletion(-) diff --git a/Dockerfile.centos7 b/Dockerfile.centos7 index 0e2fdd4ba0..e39fc2dc8c 100644 --- a/Dockerfile.centos7 +++ b/Dockerfile.centos7 @@ -21,7 +21,6 @@ RUN yum -y install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.n bash \ which \ git \ - devtoolset-7-gcc* \ make \ cmake \ curl \