From 7f83c8a2d028ddeeb772d1bcc7e4a0348568ee25 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 15:21:42 +0100 Subject: [PATCH 001/244] OP-2765 - added methods for New Publisher Removed uuid, replaced with instance_id or first members item --- openpype/hosts/aftereffects/api/__init__.py | 8 ++++- openpype/hosts/aftereffects/api/pipeline.py | 39 +++++++++++++++------ openpype/hosts/aftereffects/api/ws_stub.py | 20 +++++------ 3 files changed, 45 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/aftereffects/api/__init__.py b/openpype/hosts/aftereffects/api/__init__.py index cea1bdc023..2ad1255d27 100644 --- a/openpype/hosts/aftereffects/api/__init__.py +++ b/openpype/hosts/aftereffects/api/__init__.py @@ -16,7 +16,10 @@ from .pipeline import ( uninstall, list_instances, remove_instance, - containerise + containerise, + get_context_data, + update_context_data, + get_context_title ) from .workio import ( @@ -51,6 +54,9 @@ __all__ = [ "list_instances", "remove_instance", "containerise", + "get_context_data", + "update_context_data", + "get_context_title", "file_extensions", "has_unsaved_changes", diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 94f1e3d105..ea03542765 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -10,6 +10,7 @@ from avalon import io, pipeline from openpype import lib from openpype.api import Logger import openpype.hosts.aftereffects +from openpype.pipeline import BaseCreator from .launch_logic import get_stub @@ -67,6 +68,7 @@ def install(): avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH) avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH) + avalon.api.register_plugin_path(BaseCreator, CREATE_PATH) log.info(PUBLISH_PATH) pyblish.api.register_callback( @@ -238,12 +240,6 @@ def list_instances(): if instance.get("schema") and \ "container" in instance.get("schema"): continue - - uuid_val = instance.get("uuid") - if uuid_val: - instance['uuid'] = uuid_val - else: - instance['uuid'] = instance.get("members")[0] # legacy instances.append(instance) return instances @@ -265,8 +261,29 @@ def remove_instance(instance): if not stub: return - stub.remove_instance(instance.get("uuid")) - item = stub.get_item(instance.get("uuid")) - if item: - stub.rename_item(item.id, - item.name.replace(stub.PUBLISH_ICON, '')) + inst_id = instance.get("instance_id") + if not inst_id: + log.warning("No instance identifier for {}".format(instance)) + return + + stub.remove_instance(inst_id) + + if instance.members: + item = stub.get_item(instance.members[0]) + if item: + stub.rename_item(item.id, + item.name.replace(stub.PUBLISH_ICON, '')) + + +def get_context_data(): + print("get_context_data") + return {} + + +def update_context_data(data, changes): + print("update_context_data") + + +def get_context_title(): + """Returns title for Creator window""" + return "AfterEffects" diff --git a/openpype/hosts/aftereffects/api/ws_stub.py b/openpype/hosts/aftereffects/api/ws_stub.py index 5a0600e92e..d098419e81 100644 --- a/openpype/hosts/aftereffects/api/ws_stub.py +++ b/openpype/hosts/aftereffects/api/ws_stub.py @@ -28,6 +28,7 @@ class AEItem(object): workAreaDuration = attr.ib(default=None) frameRate = attr.ib(default=None) file_name = attr.ib(default=None) + instance_id = attr.ib(default=None) # New Publisher class AfterEffectsServerStub(): @@ -132,8 +133,9 @@ class AfterEffectsServerStub(): is_new = True for item_meta in items_meta: - if item_meta.get('members') \ - and str(item.id) == str(item_meta.get('members')[0]): + if ((item_meta.get('members') and + str(item.id) == str(item_meta.get('members')[0])) or + item_meta.get("instance_id") == item.id): is_new = False if data: item_meta.update(data) @@ -314,15 +316,12 @@ class AfterEffectsServerStub(): Keep matching item in file though. Args: - instance_id(string): instance uuid + instance_id(string): instance id """ cleaned_data = [] for instance in self.get_metadata(): - uuid_val = instance.get("uuid") - if not uuid_val: - uuid_val = instance.get("members")[0] # legacy - if uuid_val != instance_id: + if instance.get("instance_id") != instance_id: cleaned_data.append(instance) payload = json.dumps(cleaned_data, indent=4) @@ -357,7 +356,7 @@ class AfterEffectsServerStub(): item_id (int): Returns: - (namedtuple) + (AEItem) """ res = self.websocketserver.call(self.client.call @@ -418,7 +417,7 @@ class AfterEffectsServerStub(): """ Get render queue info for render purposes Returns: - (namedtuple): with 'file_name' field + (AEItem): with 'file_name' field """ res = self.websocketserver.call(self.client.call ('AfterEffects.get_render_info')) @@ -606,7 +605,8 @@ class AfterEffectsServerStub(): d.get('workAreaStart'), d.get('workAreaDuration'), d.get('frameRate'), - d.get('file_name')) + d.get('file_name'), + d.get("instance_id")) ret.append(item) return ret From 2af112571dd0435b639c78c4ccac9f185e1338e6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 15:26:56 +0100 Subject: [PATCH 002/244] OP-2765 - refactor - order of methods changed --- openpype/hosts/aftereffects/api/pipeline.py | 187 ++++++++++---------- 1 file changed, 96 insertions(+), 91 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index ea03542765..1ec76fd9dd 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -27,39 +27,6 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -def check_inventory(): - if not lib.any_outdated(): - return - - host = pyblish.api.registered_host() - outdated_containers = [] - for container in host.ls(): - representation = container['representation'] - representation_doc = io.find_one( - { - "_id": io.ObjectId(representation), - "type": "representation" - }, - projection={"parent": True} - ) - if representation_doc and not lib.is_latest(representation_doc): - outdated_containers.append(container) - - # Warn about outdated containers. - print("Starting new QApplication..") - app = QtWidgets.QApplication(sys.argv) - - message_box = QtWidgets.QMessageBox() - message_box.setIcon(QtWidgets.QMessageBox.Warning) - msg = "There are outdated containers in the scene." - message_box.setText(msg) - message_box.exec_() - - -def application_launch(): - check_inventory() - - def install(): print("Installing Pype config...") @@ -84,6 +51,11 @@ def uninstall(): avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH) +def application_launch(): + """Triggered after start of app""" + check_inventory() + + def on_pyblish_instance_toggled(instance, old_value, new_value): """Toggle layer visibility on instance toggles.""" instance[0].Visible = new_value @@ -118,6 +90,77 @@ def get_asset_settings(): } +# loaded containers section +def ls(): + """Yields containers from active AfterEffects document. + + This is the host-equivalent of api.ls(), but instead of listing + assets on disk, it lists assets already loaded in AE; once loaded + they are called 'containers'. Used in Manage tool. + + Containers could be on multiple levels, single images/videos/was as a + FootageItem, or multiple items - backgrounds (folder with automatically + created composition and all imported layers). + + Yields: + dict: container + + """ + try: + stub = get_stub() # only after AfterEffects is up + except lib.ConnectionNotEstablishedYet: + print("Not connected yet, ignoring") + return + + layers_meta = stub.get_metadata() + for item in stub.get_items(comps=True, + folders=True, + footages=True): + data = stub.read(item, layers_meta) + # Skip non-tagged layers. + if not data: + continue + + # Filter to only containers. + if "container" not in data["id"]: + continue + + # Append transient data + data["objectName"] = item.name.replace(stub.LOADED_ICON, '') + data["layer"] = item + yield data + + +def check_inventory(): + """Checks loaded containers if they are of highest version""" + if not lib.any_outdated(): + return + + host = pyblish.api.registered_host() + outdated_containers = [] + for container in host.ls(): + representation = container['representation'] + representation_doc = io.find_one( + { + "_id": io.ObjectId(representation), + "type": "representation" + }, + projection={"parent": True} + ) + if representation_doc and not lib.is_latest(representation_doc): + outdated_containers.append(container) + + # Warn about outdated containers. + print("Starting new QApplication..") + app = QtWidgets.QApplication(sys.argv) + + message_box = QtWidgets.QMessageBox() + message_box.setIcon(QtWidgets.QMessageBox.Warning) + msg = "There are outdated containers in the scene." + message_box.setText(msg) + message_box.exec_() + + def containerise(name, namespace, comp, @@ -159,64 +202,7 @@ def containerise(name, return comp -def _get_stub(): - """ - Handle pulling stub from PS to run operations on host - Returns: - (AEServerStub) or None - """ - try: - stub = get_stub() # only after Photoshop is up - except lib.ConnectionNotEstablishedYet: - print("Not connected yet, ignoring") - return - - if not stub.get_active_document_name(): - return - - return stub - - -def ls(): - """Yields containers from active AfterEffects document. - - This is the host-equivalent of api.ls(), but instead of listing - assets on disk, it lists assets already loaded in AE; once loaded - they are called 'containers'. Used in Manage tool. - - Containers could be on multiple levels, single images/videos/was as a - FootageItem, or multiple items - backgrounds (folder with automatically - created composition and all imported layers). - - Yields: - dict: container - - """ - try: - stub = get_stub() # only after AfterEffects is up - except lib.ConnectionNotEstablishedYet: - print("Not connected yet, ignoring") - return - - layers_meta = stub.get_metadata() - for item in stub.get_items(comps=True, - folders=True, - footages=True): - data = stub.read(item, layers_meta) - # Skip non-tagged layers. - if not data: - continue - - # Filter to only containers. - if "container" not in data["id"]: - continue - - # Append transient data - data["objectName"] = item.name.replace(stub.LOADED_ICON, '') - data["layer"] = item - yield data - - +# created instances section def list_instances(): """ List all created instances from current workfile which @@ -275,6 +261,7 @@ def remove_instance(instance): item.name.replace(stub.PUBLISH_ICON, '')) +# new publisher section def get_context_data(): print("get_context_data") return {} @@ -287,3 +274,21 @@ def update_context_data(data, changes): def get_context_title(): """Returns title for Creator window""" return "AfterEffects" + + +def _get_stub(): + """ + Handle pulling stub from PS to run operations on host + Returns: + (AEServerStub) or None + """ + try: + stub = get_stub() # only after Photoshop is up + except lib.ConnectionNotEstablishedYet: + print("Not connected yet, ignoring") + return + + if not stub.get_active_document_name(): + return + + return stub From a27119bee40d29725eea5493e1b2004d1813669d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 19:26:04 +0100 Subject: [PATCH 003/244] OP-2765 - renamed old creators --- ...ender.py => create_legacy_local_render.py} | 6 +- .../plugins/create/create_legacy_render.py | 62 +++++++++++++++++++ 2 files changed, 65 insertions(+), 3 deletions(-) rename openpype/hosts/aftereffects/plugins/create/{create_local_render.py => create_legacy_local_render.py} (57%) create mode 100644 openpype/hosts/aftereffects/plugins/create/create_legacy_render.py diff --git a/openpype/hosts/aftereffects/plugins/create/create_local_render.py b/openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py similarity index 57% rename from openpype/hosts/aftereffects/plugins/create/create_local_render.py rename to openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py index 9d2cdcd7be..4fb07f31f8 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_local_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py @@ -1,7 +1,7 @@ -from openpype.hosts.aftereffects.plugins.create import create_render +from openpype.hosts.aftereffects.plugins.create import create_legacy_render -class CreateLocalRender(create_render.CreateRender): +class CreateLocalRender(create_legacy_render.CreateRender): """ Creator to render locally. Created only after default render on farm. So family 'render.local' is @@ -10,4 +10,4 @@ class CreateLocalRender(create_render.CreateRender): name = "renderDefault" label = "Render Locally" - family = "renderLocal" + family = "renderLocal" \ No newline at end of file diff --git a/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py b/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py new file mode 100644 index 0000000000..7da489a731 --- /dev/null +++ b/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py @@ -0,0 +1,62 @@ +from avalon.api import CreatorError + +import openpype.api +from openpype.hosts.aftereffects.api import ( + get_stub, + list_instances +) + + +class CreateRender(openpype.api.Creator): + """Render folder for publish. + + Creates subsets in format 'familyTaskSubsetname', + eg 'renderCompositingMain'. + + Create only single instance from composition at a time. + """ + + name = "renderDefault" + label = "Render on Farm" + family = "render" + defaults = ["Main"] + + def process(self): + stub = get_stub() # only after After Effects is up + if (self.options or {}).get("useSelection"): + items = stub.get_selected_items( + comps=True, folders=False, footages=False + ) + if len(items) > 1: + raise CreatorError( + "Please select only single composition at time." + ) + + if not items: + raise CreatorError(( + "Nothing to create. Select composition " + "if 'useSelection' or create at least " + "one composition." + )) + + existing_subsets = [ + instance['subset'].lower() + for instance in list_instances() + ] + + item = items.pop() + if self.name.lower() in existing_subsets: + txt = "Instance with name \"{}\" already exists.".format(self.name) + raise CreatorError(txt) + + self.data["members"] = [item.id] + self.data["uuid"] = item.id # for SubsetManager + self.data["subset"] = ( + self.data["subset"] + .replace(stub.PUBLISH_ICON, '') + .replace(stub.LOADED_ICON, '') + ) + + stub.imprint(item, self.data) + stub.set_label_color(item.id, 14) # Cyan options 0 - 16 + stub.rename_item(item.id, stub.PUBLISH_ICON + self.data["subset"]) \ No newline at end of file From ebc05e82c8001878667aa31d1cba014d9c06f231 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 19:31:18 +0100 Subject: [PATCH 004/244] OP-2765 - refactored imprint method Uses id instead of full AEItem --- openpype/hosts/aftereffects/api/pipeline.py | 8 ++++---- openpype/hosts/aftereffects/api/ws_stub.py | 8 ++++---- .../hosts/aftereffects/plugins/load/load_background.py | 5 ++--- openpype/hosts/aftereffects/plugins/load/load_file.py | 8 ++++---- 4 files changed, 14 insertions(+), 15 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 1ec76fd9dd..550ff25886 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -178,7 +178,7 @@ def containerise(name, Arguments: name (str): Name of resulting assembly namespace (str): Namespace under which to host container - comp (Comp): Composition to containerise + comp (AEItem): Composition to containerise context (dict): Asset information loader (str, optional): Name of loader used to produce this container. suffix (str, optional): Suffix of container, defaults to `_CON`. @@ -197,7 +197,7 @@ def containerise(name, } stub = get_stub() - stub.imprint(comp, data) + stub.imprint(comp.id, data) return comp @@ -254,8 +254,8 @@ def remove_instance(instance): stub.remove_instance(inst_id) - if instance.members: - item = stub.get_item(instance.members[0]) + if instance.get("members"): + item = stub.get_item(instance["members"][0]) if item: stub.rename_item(item.id, item.name.replace(stub.PUBLISH_ICON, '')) diff --git a/openpype/hosts/aftereffects/api/ws_stub.py b/openpype/hosts/aftereffects/api/ws_stub.py index d098419e81..18852d3d6c 100644 --- a/openpype/hosts/aftereffects/api/ws_stub.py +++ b/openpype/hosts/aftereffects/api/ws_stub.py @@ -111,11 +111,11 @@ class AfterEffectsServerStub(): self.log.debug("Couldn't find layer metadata") - def imprint(self, item, data, all_items=None, items_meta=None): + def imprint(self, item_id, data, all_items=None, items_meta=None): """ Save item metadata to Label field of metadata of active document Args: - item (AEItem): + item_id (int|str): id of FootageItem or instance_id for workfiles data(string): json representation for single layer all_items (list of item): for performance, could be injected for usage in loop, if not, single call will be @@ -134,8 +134,8 @@ class AfterEffectsServerStub(): for item_meta in items_meta: if ((item_meta.get('members') and - str(item.id) == str(item_meta.get('members')[0])) or - item_meta.get("instance_id") == item.id): + str(item_id) == str(item_meta.get('members')[0])) or + item_meta.get("instance_id") == item_id): is_new = False if data: item_meta.update(data) diff --git a/openpype/hosts/aftereffects/plugins/load/load_background.py b/openpype/hosts/aftereffects/plugins/load/load_background.py index 1a2d6fc432..9b39556040 100644 --- a/openpype/hosts/aftereffects/plugins/load/load_background.py +++ b/openpype/hosts/aftereffects/plugins/load/load_background.py @@ -91,7 +91,7 @@ class BackgroundLoader(AfterEffectsLoader): container["namespace"] = comp_name container["members"] = comp.members - stub.imprint(comp, container) + stub.imprint(comp.id, container) def remove(self, container): """ @@ -100,10 +100,9 @@ class BackgroundLoader(AfterEffectsLoader): Args: container (dict): container to be removed - used to get layer_id """ - print("!!!! container:: {}".format(container)) stub = self.get_stub() layer = container.pop("layer") - stub.imprint(layer, {}) + stub.imprint(layer.id, {}) stub.delete_item(layer.id) def switch(self, container, representation): diff --git a/openpype/hosts/aftereffects/plugins/load/load_file.py b/openpype/hosts/aftereffects/plugins/load/load_file.py index 9dbbf7aae1..ba5bb5f69a 100644 --- a/openpype/hosts/aftereffects/plugins/load/load_file.py +++ b/openpype/hosts/aftereffects/plugins/load/load_file.py @@ -96,9 +96,9 @@ class FileLoader(AfterEffectsLoader): # with aftereffects.maintained_selection(): # TODO stub.replace_item(layer.id, path, stub.LOADED_ICON + layer_name) stub.imprint( - layer, {"representation": str(representation["_id"]), - "name": context["subset"], - "namespace": layer_name} + layer.id, {"representation": str(representation["_id"]), + "name": context["subset"], + "namespace": layer_name} ) def remove(self, container): @@ -109,7 +109,7 @@ class FileLoader(AfterEffectsLoader): """ stub = self.get_stub() layer = container.pop("layer") - stub.imprint(layer, {}) + stub.imprint(layer.id, {}) stub.delete_item(layer.id) def switch(self, container, representation): From 3c11f46b110d3e74f96b7990845bec375ee46d05 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 19:32:04 +0100 Subject: [PATCH 005/244] OP-2765 - working version of new creator --- .../plugins/create/create_render.py | 126 ++++++++++++------ 1 file changed, 87 insertions(+), 39 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 8dfc85cdc8..c290bd46c3 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -1,37 +1,65 @@ -from avalon.api import CreatorError - -import openpype.api -from openpype.hosts.aftereffects.api import ( - get_stub, - list_instances +import json +from openpype import resources +import openpype.hosts.aftereffects.api as api +from openpype.pipeline import ( + Creator, + CreatedInstance, + lib, + CreatorError ) -class CreateRender(openpype.api.Creator): - """Render folder for publish. - - Creates subsets in format 'familyTaskSubsetname', - eg 'renderCompositingMain'. - - Create only single instance from composition at a time. - """ - - name = "renderDefault" - label = "Render on Farm" +class RenderCreator(Creator): + identifier = "render" + label = "Render" family = "render" - defaults = ["Main"] + description = "Render creator" - def process(self): - stub = get_stub() # only after After Effects is up - if (self.options or {}).get("useSelection"): + create_allow_context_change = False + + def get_icon(self): + return resources.get_openpype_splash_filepath() + + def collect_instances(self): + for instance_data in api.list_instances(): + creator_id = instance_data.get("creator_identifier") + if creator_id == self.identifier: + instance_data = self._handle_legacy(instance_data) + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) + + def update_instances(self, update_list): + created_inst, changes = update_list[0] + print("RenderCreator update_list:: {}-{}".format(created_inst, changes)) + api.get_stub().imprint(created_inst.get("instance_id"), + created_inst.data_to_store()) + + def remove_instances(self, instances): + for instance in instances: + print("instance:: {}".format(instance)) + api.remove_instance(instance) + self._remove_instance_from_context(instance) + + def create(self, subset_name, data, pre_create_data): + print("Data that can be used in create:\n{}".format( + json.dumps(pre_create_data, indent=4) + )) + stub = api.get_stub() # only after After Effects is up + print("pre_create_data:: {}".format(pre_create_data)) + if pre_create_data.get("use_selection"): items = stub.get_selected_items( comps=True, folders=False, footages=False ) + else: + items = stub.get_items(comps=True, folders=False, footages=False) + if len(items) > 1: raise CreatorError( "Please select only single composition at time." ) - + print("items:: {}".format(items)) if not items: raise CreatorError(( "Nothing to create. Select composition " @@ -39,24 +67,44 @@ class CreateRender(openpype.api.Creator): "one composition." )) - existing_subsets = [ - instance['subset'].lower() - for instance in list_instances() + data["members"] = [items[0].id] + new_instance = CreatedInstance(self.family, subset_name, data, self) + new_instance.creator_attributes["farm"] = pre_create_data["farm"] + + api.get_stub().imprint(new_instance.get("instance_id"), + new_instance.data_to_store()) + self.log.info(new_instance.data) + self._add_instance_to_context(new_instance) + + def get_default_variants(self): + return [ + "myVariant", + "variantTwo", + "different_variant" ] - item = items.pop() - if self.name.lower() in existing_subsets: - txt = "Instance with name \"{}\" already exists.".format(self.name) - raise CreatorError(txt) + def get_instance_attr_defs(self): + return [lib.BoolDef("farm", label="Render on farm")] - self.data["members"] = [item.id] - self.data["uuid"] = item.id # for SubsetManager - self.data["subset"] = ( - self.data["subset"] - .replace(stub.PUBLISH_ICON, '') - .replace(stub.LOADED_ICON, '') - ) + def get_pre_create_attr_defs(self): + output = [ + lib.BoolDef("use_selection", default=True, label="Use selection"), + lib.UISeparatorDef(), + lib.BoolDef("farm", label="Render on farm") + ] + return output + + def get_detail_description(self): + return """Creator for Render instances""" + + def _handle_legacy(self, instance_data): + """Converts old instances to new format.""" + if instance_data.get("uuid"): + instance_data["item_id"] = instance_data.get("uuid") + instance_data.pop("uuid") + + if not instance_data.get("members"): + instance_data["members"] = [instance_data["item_id"]] + + return instance_data - stub.imprint(item, self.data) - stub.set_label_color(item.id, 14) # Cyan options 0 - 16 - stub.rename_item(item.id, stub.PUBLISH_ICON + self.data["subset"]) From 082b2306ee08a4f286804d1afe0f8139006e5fe8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 19:32:50 +0100 Subject: [PATCH 006/244] OP-2765 - changed collector to work with new creator --- .../hosts/aftereffects/plugins/publish/collect_workfile.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index c1c2be4855..61c4897cae 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -10,6 +10,11 @@ class CollectWorkfile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder + 0.1 def process(self, context): + for instance in context: + if instance.data["family"] == "workfile": + self.log.debug("Workfile instance found, skipping") + return + task = api.Session["AVALON_TASK"] current_file = context.data["currentFile"] staging_dir = os.path.dirname(current_file) From 64b63369d6b1a8bbf702a3fe34a3ea05e4021d79 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 19:33:21 +0100 Subject: [PATCH 007/244] OP-2765 - added 'newPublishing' flag to differentiate --- openpype/plugins/publish/collect_from_create_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py index 16e3f669c3..09584ab37c 100644 --- a/openpype/plugins/publish/collect_from_create_context.py +++ b/openpype/plugins/publish/collect_from_create_context.py @@ -25,7 +25,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): # Update global data to context context.data.update(create_context.context_data_to_store()) - + context.data["newPublishing"] = True # Update context data for key in ("AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK"): value = create_context.dbcon.Session.get(key) From be05fe990580aff0bc98ffee8243bc4e7536083e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 19:34:00 +0100 Subject: [PATCH 008/244] OP-2765 - updated collecting of render family Added pre collect for backward compatibility --- .../plugins/publish/collect_render.py | 197 ++++++++++-------- .../plugins/publish/pre_collect_render.py | 47 +++++ 2 files changed, 154 insertions(+), 90 deletions(-) create mode 100644 openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index 2a4b773681..1ad3d3dd18 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -2,6 +2,7 @@ import os import re import tempfile import attr +from copy import deepcopy import pyblish.api @@ -29,20 +30,22 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): label = "Collect After Effects Render Layers" hosts = ["aftereffects"] - # internal - family_remapping = { - "render": ("render.farm", "farm"), # (family, label) - "renderLocal": ("render", "local") - } padding_width = 6 rendered_extension = 'png' - stub = get_stub() + _stub = None + + @classmethod + def get_stub(cls): + if not cls._stub: + cls._stub = get_stub() + return cls._stub def get_instances(self, context): instances = [] + instances_to_remove = [] - app_version = self.stub.get_app_version() + app_version = CollectAERender.get_stub().get_app_version() app_version = app_version[0:4] current_file = context.data["currentFile"] @@ -50,105 +53,91 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): asset_entity = context.data["assetEntity"] project_entity = context.data["projectEntity"] - compositions = self.stub.get_items(True) + compositions = CollectAERender.get_stub().get_items(True) compositions_by_id = {item.id: item for item in compositions} - for inst in self.stub.get_metadata(): - schema = inst.get('schema') - # loaded asset container skip it - if schema and 'container' in schema: + for inst in context: + family = inst.data["family"] + if family != "render": continue + self._debug_log(inst) - if not inst["members"]: - raise ValueError("Couldn't find id, unable to publish. " + - "Please recreate instance.") - item_id = inst["members"][0] + item_id = inst.data["members"][0] - work_area_info = self.stub.get_work_area(int(item_id)) + work_area_info = CollectAERender.get_stub().get_work_area( + int(item_id)) if not work_area_info: self.log.warning("Orphaned instance, deleting metadata") - self.stub.remove_instance(int(item_id)) + inst_id = inst.get("instance_id") or item_id + CollectAERender.get_stub().remove_instance(inst_id) continue - frameStart = work_area_info.workAreaStart - - frameEnd = round(work_area_info.workAreaStart + - float(work_area_info.workAreaDuration) * - float(work_area_info.frameRate)) - 1 + frame_start = work_area_info.workAreaStart + frame_end = round(work_area_info.workAreaStart + + float(work_area_info.workAreaDuration) * + float(work_area_info.frameRate)) - 1 fps = work_area_info.frameRate # TODO add resolution when supported by extension - if inst["family"] in self.family_remapping.keys() \ - and inst["active"]: - remapped_family = self.family_remapping[inst["family"]] - instance = AERenderInstance( - family=remapped_family[0], - families=[remapped_family[0]], - version=version, - time="", - source=current_file, - label="{} - {}".format(inst["subset"], remapped_family[1]), - subset=inst["subset"], - asset=context.data["assetEntity"]["name"], - attachTo=False, - setMembers='', - publish=True, - renderer='aerender', - name=inst["subset"], - resolutionWidth=asset_entity["data"].get( - "resolutionWidth", - project_entity["data"]["resolutionWidth"]), - resolutionHeight=asset_entity["data"].get( - "resolutionHeight", - project_entity["data"]["resolutionHeight"]), - pixelAspect=1, - tileRendering=False, - tilesX=0, - tilesY=0, - frameStart=frameStart, - frameEnd=frameEnd, - frameStep=1, - toBeRenderedOn='deadline', - fps=fps, - app_version=app_version - ) + if not inst.data["active"]: + continue - comp = compositions_by_id.get(int(item_id)) - if not comp: - raise ValueError("There is no composition for item {}". - format(item_id)) - instance.comp_name = comp.name - instance.comp_id = item_id - instance._anatomy = context.data["anatomy"] - instance.anatomyData = context.data["anatomyData"] + subset_name = inst.data["subset"] + instance = AERenderInstance( + family=family, + families=[family], + version=version, + time="", + source=current_file, + label="{} - {}".format(subset_name, family), + subset=subset_name, + asset=context.data["assetEntity"]["name"], + attachTo=False, + setMembers='', + publish=True, + renderer='aerender', + name=subset_name, + resolutionWidth=asset_entity["data"].get( + "resolutionWidth", + project_entity["data"]["resolutionWidth"]), + resolutionHeight=asset_entity["data"].get( + "resolutionHeight", + project_entity["data"]["resolutionHeight"]), + pixelAspect=1, + tileRendering=False, + tilesX=0, + tilesY=0, + frameStart=frame_start, + frameEnd=frame_end, + frameStep=1, + toBeRenderedOn='deadline', + fps=fps, + app_version=app_version, + anatomyData=deepcopy(context.data["anatomyData"]), + context=context + ) - instance.outputDir = self._get_output_dir(instance) - instance.context = context + comp = compositions_by_id.get(int(item_id)) + if not comp: + raise ValueError("There is no composition for item {}". + format(item_id)) + instance.outputDir = self._get_output_dir(instance) + instance.comp_name = comp.name + instance.comp_id = item_id - settings = get_project_settings(os.getenv("AVALON_PROJECT")) - reviewable_subset_filter = \ - (settings["deadline"] - ["publish"] - ["ProcessSubmittedJobOnFarm"] - ["aov_filter"]) + is_local = "renderLocal" in inst.data["families"] + if inst.data.get("creator_attributes"): + is_local = inst.data["creator_attributes"].get("farm") + if is_local: + # for local renders + instance = self._update_for_local(instance, project_entity) - if inst["family"] == "renderLocal": - # for local renders - instance.anatomyData["version"] = instance.version - instance.anatomyData["subset"] = instance.subset - instance.stagingDir = tempfile.mkdtemp() - instance.projectEntity = project_entity + self.log.info("New instance:: {}".format(instance)) + instances.append(instance) + instances_to_remove.append(inst) - if self.hosts[0] in reviewable_subset_filter.keys(): - for aov_pattern in \ - reviewable_subset_filter[self.hosts[0]]: - if re.match(aov_pattern, instance.subset): - instance.families.append("review") - instance.review = True - break - - self.log.info("New instance:: {}".format(instance)) - instances.append(instance) + for instance in instances_to_remove: + context.remove(instance) return instances @@ -169,7 +158,7 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): end = render_instance.frameEnd # pull file name from Render Queue Output module - render_q = self.stub.get_render_info() + render_q = CollectAERender.get_stub().get_render_info() if not render_q: raise ValueError("No file extension set in Render Queue") _, ext = os.path.splitext(os.path.basename(render_q.file_name)) @@ -216,3 +205,31 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): # for submit_publish_job return base_dir + + def _update_for_local(self, instance, project_entity): + instance.anatomyData["version"] = instance.version + instance.anatomyData["subset"] = instance.subset + instance.stagingDir = tempfile.mkdtemp() + instance.projectEntity = project_entity + + settings = get_project_settings(os.getenv("AVALON_PROJECT")) + reviewable_subset_filter = (settings["deadline"] + ["publish"] + ["ProcessSubmittedJobOnFarm"] + ["aov_filter"].get(self.hosts[0])) + for aov_pattern in reviewable_subset_filter: + if re.match(aov_pattern, instance.subset): + instance.families.append("review") + instance.review = True + break + + return instance + + def _debug_log(self, instance): + def _default_json(value): + return str(value) + + import json + self.log.info( + json.dumps(instance.data, indent=4, default=_default_json) + ) diff --git a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py new file mode 100644 index 0000000000..56dc884634 --- /dev/null +++ b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py @@ -0,0 +1,47 @@ +import json +import pyblish.api +from openpype.hosts.aftereffects.api import get_stub, list_instances + + +class PreCollectRender(pyblish.api.ContextPlugin): + """ + Checks if render instance is of new type, adds to families to both + existing collectors work same way. + """ + + label = "PreCollect Render" + order = pyblish.api.CollectorOrder + 0.400 + hosts = ["aftereffects"] + + family_remapping = { + "render": ("render.farm", "farm"), # (family, label) + "renderLocal": ("render", "local") + } + + def process(self, context): + if context.data.get("newPublishing"): + self.log.debug("Not applicable for New Publisher, skip") + return + + stub = get_stub() + for inst in list_instances(): + if inst["family"] not in self.family_remapping.keys(): + continue + + if not inst["members"]: + raise ValueError("Couldn't find id, unable to publish. " + + "Please recreate instance.") + + instance = context.create_instance(inst["subset"]) + inst["families"] = [self.family_remapping[inst["family"]]] + instance.data.update(inst) + + self._debug_log(instance) + + def _debug_log(self, instance): + def _default_json(value): + return str(value) + + self.log.info( + json.dumps(instance.data, indent=4, default=_default_json) + ) From c189725f3fdd7babae5709b70fd61708ae67bd91 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 19:34:27 +0100 Subject: [PATCH 009/244] OP-2765 - missed update for imprint --- .../aftereffects/plugins/publish/validate_instance_asset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py index 71c1750457..3019719947 100644 --- a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py +++ b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py @@ -27,7 +27,7 @@ class ValidateInstanceAssetRepair(pyblish.api.Action): data = stub.read(instance[0]) data["asset"] = api.Session["AVALON_ASSET"] - stub.imprint(instance[0], data) + stub.imprint(instance[0].instance_id, data) class ValidateInstanceAsset(pyblish.api.InstancePlugin): From 7967496b5c64c3e1a5c126de7c0a3f90dd3e81f5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Mar 2022 19:34:52 +0100 Subject: [PATCH 010/244] OP-2765 - added CreatorError to pipeline api --- openpype/pipeline/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/__init__.py b/openpype/pipeline/__init__.py index e968df4011..2b7a39d444 100644 --- a/openpype/pipeline/__init__.py +++ b/openpype/pipeline/__init__.py @@ -4,7 +4,8 @@ from .create import ( BaseCreator, Creator, AutoCreator, - CreatedInstance + CreatedInstance, + CreatorError ) from .publish import ( @@ -21,6 +22,7 @@ __all__ = ( "Creator", "AutoCreator", "CreatedInstance", + "CreatorError", "PublishValidationError", "KnownPublishError", From 4434a4b1888f65a55aa86a365d186aabb6ec69cf Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 4 Mar 2022 15:44:19 +0100 Subject: [PATCH 011/244] OP-2765 - added default to Setting for subset name of workfile in AE --- openpype/settings/defaults/project_settings/global.json | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index f08bee8b2d..71c837659e 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -268,6 +268,7 @@ "workfile" ], "hosts": [ + "aftereffects", "tvpaint" ], "task_types": [], From e24ef3a9eba62a9dbcae252dcf70d9608145724b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 4 Mar 2022 16:32:16 +0100 Subject: [PATCH 012/244] OP-2765 - added workfile creator and modified collector Workfile collector shouldn't create new isntance for NP, but should update version --- .../plugins/create/workfile_creator.py | 75 +++++++++++++++++++ .../plugins/publish/collect_workfile.py | 33 ++++---- 2 files changed, 94 insertions(+), 14 deletions(-) create mode 100644 openpype/hosts/aftereffects/plugins/create/workfile_creator.py diff --git a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py new file mode 100644 index 0000000000..2d9d42ee8c --- /dev/null +++ b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py @@ -0,0 +1,75 @@ +from avalon import io + +import openpype.hosts.aftereffects.api as api +from openpype.pipeline import ( + AutoCreator, + CreatedInstance +) + + +class AEWorkfileCreator(AutoCreator): + identifier = "workfile" + family = "workfile" + + def get_instance_attr_defs(self): + return [] + + def collect_instances(self): + for instance_data in api.list_instances(): + creator_id = instance_data.get("creator_identifier") + if creator_id == self.identifier: + subset_name = instance_data["subset"] + instance = CreatedInstance( + self.family, subset_name, instance_data, self + ) + self._add_instance_to_context(instance) + + def update_instances(self, update_list): + # nothing to change on workfiles + pass + + def create(self, options=None): + existing_instance = None + for instance in self.create_context.instances: + if instance.family == self.family: + existing_instance = instance + break + + variant = '' + project_name = io.Session["AVALON_PROJECT"] + asset_name = io.Session["AVALON_ASSET"] + task_name = io.Session["AVALON_TASK"] + host_name = io.Session["AVALON_APP"] + + if existing_instance is None: + asset_doc = io.find_one({"type": "asset", "name": asset_name}) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + data = { + "asset": asset_name, + "task": task_name, + "variant": variant + } + data.update(self.get_dynamic_data( + variant, task_name, asset_doc, project_name, host_name + )) + + new_instance = CreatedInstance( + self.family, subset_name, data, self + ) + self._add_instance_to_context(new_instance) + + api.get_stub().imprint(new_instance.get("instance_id"), + new_instance.data_to_store()) + + elif ( + existing_instance["asset"] != asset_name + or existing_instance["task"] != task_name + ): + asset_doc = io.find_one({"type": "asset", "name": asset_name}) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + existing_instance["asset"] = asset_name + existing_instance["task"] = task_name diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 61c4897cae..29ec3a64e6 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -10,10 +10,11 @@ class CollectWorkfile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder + 0.1 def process(self, context): + create_instance = True for instance in context: if instance.data["family"] == "workfile": - self.log.debug("Workfile instance found, skipping") - return + self.log.debug("Workfile instance found, do not create new") + create_instance = False task = api.Session["AVALON_TASK"] current_file = context.data["currentFile"] @@ -44,20 +45,24 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # workfile instance family = "workfile" subset = family + task.capitalize() - # Create instance - instance = context.create_instance(subset) - # creating instance data - instance.data.update({ - "subset": subset, - "label": scene_file, - "family": family, - "families": [family], - "representations": list() - }) + if create_instance: # old publish + # Create instance + instance = context.create_instance(subset) - # adding basic script data - instance.data.update(shared_instance_data) + # creating instance data + instance.data.update({ + "subset": subset, + "label": scene_file, + "family": family, + "families": [family], + "representations": list() + }) + + # adding basic script data + instance.data.update(shared_instance_data) + else: + instance.data.update({"version": version}) # creating representation representation = { From 97b9b035db68132f22e4d48874a02ad5bf76c9af Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 4 Mar 2022 17:54:21 +0100 Subject: [PATCH 013/244] OP-2765 - added helper logging function --- .../aftereffects/plugins/publish/collect_render.py | 13 +------------ .../plugins/publish/collect_workfile.py | 9 +++------ openpype/lib/__init__.py | 3 ++- openpype/lib/log.py | 12 ++++++++++++ 4 files changed, 18 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index 1ad3d3dd18..b41fb5d5f5 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -59,7 +59,6 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): family = inst.data["family"] if family != "render": continue - self._debug_log(inst) item_id = inst.data["members"][0] @@ -127,12 +126,11 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): is_local = "renderLocal" in inst.data["families"] if inst.data.get("creator_attributes"): - is_local = inst.data["creator_attributes"].get("farm") + is_local = not inst.data["creator_attributes"].get("farm") if is_local: # for local renders instance = self._update_for_local(instance, project_entity) - self.log.info("New instance:: {}".format(instance)) instances.append(instance) instances_to_remove.append(inst) @@ -224,12 +222,3 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): break return instance - - def _debug_log(self, instance): - def _default_json(value): - return str(value) - - import json - self.log.info( - json.dumps(instance.data, indent=4, default=_default_json) - ) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 29ec3a64e6..d8a324f828 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -2,6 +2,8 @@ import os from avalon import api import pyblish.api +from openpype.lib import debug_log_instance + class CollectWorkfile(pyblish.api.ContextPlugin): """ Adds the AE render instances """ @@ -61,8 +63,6 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # adding basic script data instance.data.update(shared_instance_data) - else: - instance.data.update({"version": version}) # creating representation representation = { @@ -74,7 +74,4 @@ class CollectWorkfile(pyblish.api.ContextPlugin): instance.data["representations"].append(representation) - self.log.info('Publishing After Effects workfile') - - for i in context: - self.log.debug(f"{i.data['families']}") + debug_log_instance(self.log, "Workfile instance", instance) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 6a24f30455..fb7afe7cb3 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -35,7 +35,7 @@ from .execute import ( path_to_subprocess_arg, CREATE_NO_WINDOW ) -from .log import PypeLogger, timeit +from .log import PypeLogger, timeit, debug_log_instance from .path_templates import ( merge_dict, @@ -313,6 +313,7 @@ __all__ = [ "OpenPypeMongoConnection", "timeit", + "debug_log_instance", "is_overlapping_otio_ranges", "otio_range_with_handles", diff --git a/openpype/lib/log.py b/openpype/lib/log.py index a42faef008..7824e96159 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -23,6 +23,7 @@ import time import traceback import threading import copy +import json from . import Terminal from .mongo import ( @@ -493,3 +494,14 @@ def timeit(method): print('%r %2.2f ms' % (method.__name__, (te - ts) * 1000)) return result return timed + + +def debug_log_instance(logger, msg, instance): + """Helper function to write instance.data as json""" + def _default_json(value): + return str(value) + + logger.debug(msg) + logger.debug( + json.dumps(instance.data, indent=4, default=_default_json) + ) From 9065530eefdc98daf604d282f9f49e16614bcd0d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 4 Mar 2022 18:20:36 +0100 Subject: [PATCH 014/244] OP-2765 - fixed wrong assignment of representations to instances --- .../aftereffects/plugins/publish/collect_workfile.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index d8a324f828..1bb476d80b 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -12,11 +12,12 @@ class CollectWorkfile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder + 0.1 def process(self, context): - create_instance = True + existing_instance = None for instance in context: if instance.data["family"] == "workfile": - self.log.debug("Workfile instance found, do not create new") - create_instance = False + self.log.debug("Workfile instance found, won't create new") + existing_instance = instance + break task = api.Session["AVALON_TASK"] current_file = context.data["currentFile"] @@ -47,8 +48,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # workfile instance family = "workfile" subset = family + task.capitalize() - - if create_instance: # old publish + if existing_instance is None: # old publish # Create instance instance = context.create_instance(subset) @@ -63,6 +63,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # adding basic script data instance.data.update(shared_instance_data) + else: + instance = existing_instance # creating representation representation = { From 7b9ec117e7a32dd34d634d3a6d9ecaca54bb983f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 4 Mar 2022 19:02:05 +0100 Subject: [PATCH 015/244] OP-2765 - add fallback to uuid for backward compatibility --- openpype/hosts/aftereffects/api/pipeline.py | 2 +- openpype/hosts/aftereffects/api/ws_stub.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 550ff25886..4ae88e649a 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -247,7 +247,7 @@ def remove_instance(instance): if not stub: return - inst_id = instance.get("instance_id") + inst_id = instance.get("instance_id") or instance.get("uuid") # legacy if not inst_id: log.warning("No instance identifier for {}".format(instance)) return diff --git a/openpype/hosts/aftereffects/api/ws_stub.py b/openpype/hosts/aftereffects/api/ws_stub.py index 18852d3d6c..1d3b69e038 100644 --- a/openpype/hosts/aftereffects/api/ws_stub.py +++ b/openpype/hosts/aftereffects/api/ws_stub.py @@ -321,7 +321,8 @@ class AfterEffectsServerStub(): cleaned_data = [] for instance in self.get_metadata(): - if instance.get("instance_id") != instance_id: + inst_id = instance.get("instance_id") or instance.get("uuid") + if inst_id != instance_id: cleaned_data.append(instance) payload = json.dumps(cleaned_data, indent=4) From 0e050d37e91d7730985cfae6d1eed62e97dd915b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 8 Mar 2022 13:30:24 +0100 Subject: [PATCH 016/244] OP-2765 - fix legacy handling when creating --- .../plugins/create/create_render.py | 31 ++++++++++--------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index c290bd46c3..0a907a02d8 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -1,4 +1,5 @@ -import json +import avalon.api + from openpype import resources import openpype.hosts.aftereffects.api as api from openpype.pipeline import ( @@ -22,7 +23,9 @@ class RenderCreator(Creator): def collect_instances(self): for instance_data in api.list_instances(): - creator_id = instance_data.get("creator_identifier") + # legacy instances have family=='render' or 'renderLocal', use them + creator_id = (instance_data.get("creator_identifier") or + instance_data.get("family").replace("Local", '')) if creator_id == self.identifier: instance_data = self._handle_legacy(instance_data) instance = CreatedInstance.from_existing( @@ -32,22 +35,16 @@ class RenderCreator(Creator): def update_instances(self, update_list): created_inst, changes = update_list[0] - print("RenderCreator update_list:: {}-{}".format(created_inst, changes)) api.get_stub().imprint(created_inst.get("instance_id"), created_inst.data_to_store()) def remove_instances(self, instances): for instance in instances: - print("instance:: {}".format(instance)) api.remove_instance(instance) self._remove_instance_from_context(instance) def create(self, subset_name, data, pre_create_data): - print("Data that can be used in create:\n{}".format( - json.dumps(pre_create_data, indent=4) - )) stub = api.get_stub() # only after After Effects is up - print("pre_create_data:: {}".format(pre_create_data)) if pre_create_data.get("use_selection"): items = stub.get_selected_items( comps=True, folders=False, footages=False @@ -59,7 +56,6 @@ class RenderCreator(Creator): raise CreatorError( "Please select only single composition at time." ) - print("items:: {}".format(items)) if not items: raise CreatorError(( "Nothing to create. Select composition " @@ -73,7 +69,6 @@ class RenderCreator(Creator): api.get_stub().imprint(new_instance.get("instance_id"), new_instance.data_to_store()) - self.log.info(new_instance.data) self._add_instance_to_context(new_instance) def get_default_variants(self): @@ -99,12 +94,20 @@ class RenderCreator(Creator): def _handle_legacy(self, instance_data): """Converts old instances to new format.""" + if not instance_data.get("members"): + instance_data["members"] = [instance_data.get("uuid")] + if instance_data.get("uuid"): - instance_data["item_id"] = instance_data.get("uuid") + # uuid not needed, replaced with unique instance_id + api.get_stub().remove_instance(instance_data.get("uuid")) instance_data.pop("uuid") - if not instance_data.get("members"): - instance_data["members"] = [instance_data["item_id"]] + if not instance_data.get("task"): + instance_data["task"] = avalon.api.Session.get("AVALON_TASK") + + if not instance_data.get("creator_attributes"): + is_old_farm = instance_data["family"] != "renderLocal" + instance_data["creator_attributes"] = {"farm": is_old_farm} + instance_data["family"] = self.family return instance_data - From ca0a38f8de82e488e9353d1f1117a4e60620e41f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 8 Mar 2022 13:32:26 +0100 Subject: [PATCH 017/244] OP-2765 - fixed exclude filter to user family or families properly Added render.farm to excluded, as in NP family is always 'render' --- openpype/plugins/publish/integrate_new.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 6e0940d459..581902205f 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -103,7 +103,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "hda", "usd" ] - exclude_families = ["clip"] + exclude_families = ["clip", "render.farm"] db_representation_context_keys = [ "project", "asset", "task", "subset", "version", "representation", "family", "hierarchy", "task", "username" @@ -121,11 +121,15 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset_grouping_profiles = None def process(self, instance): - self.integrated_file_sizes = {} - if [ef for ef in self.exclude_families - if instance.data["family"] in ef]: - return + for ef in self.exclude_families: + if ( + instance.data["family"] == ef or + ef in instance.data["families"]): + self.log.debug("Excluded family '{}' in '{}' or {}".format( + ef, instance.data["family"], instance.data["families"])) + return + self.integrated_file_sizes = {} try: self.register(instance) self.log.info("Integrated Asset in to the database ...") @@ -214,7 +218,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Ensure at least one file is set up for transfer in staging dir. repres = instance.data.get("representations") - assert repres, "Instance has no files to transfer" + repres = instance.data.get("representations") + msg = "Instance {} has no files to transfer".format( + instance.data["family"]) + assert repres, msg assert isinstance(repres, (list, tuple)), ( "Instance 'files' must be a list, got: {0} {1}".format( str(type(repres)), str(repres) From 296a2d162704b9ca0c1974d4b8093fe698760d6b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 8 Mar 2022 13:34:12 +0100 Subject: [PATCH 018/244] OP-2765 - added publish flag to new instance of workfile --- openpype/hosts/aftereffects/plugins/publish/collect_workfile.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 1bb476d80b..67f037e6e6 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -65,6 +65,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): instance.data.update(shared_instance_data) else: instance = existing_instance + instance.data["publish"] = True # for DL # creating representation representation = { From 2d9bac166a466f8489e38997ec440c6f23476f26 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 8 Mar 2022 13:35:02 +0100 Subject: [PATCH 019/244] OP-2765 - modified proper families renderLocal is legacy, should be removed in the future --- .../hosts/aftereffects/plugins/publish/extract_local_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py index b738068a7b..7323a0b125 100644 --- a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py @@ -12,7 +12,7 @@ class ExtractLocalRender(openpype.api.Extractor): order = openpype.api.Extractor.order - 0.47 label = "Extract Local Render" hosts = ["aftereffects"] - families = ["render"] + families = ["renderLocal", "render.local"] def process(self, instance): stub = get_stub() From bf51f8452b8e2410d049f63389e3179bec31b600 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 8 Mar 2022 13:40:41 +0100 Subject: [PATCH 020/244] OP-2765 - modified collect render plugin Should handle both legacy and new style of publishing --- .../hosts/aftereffects/plugins/publish/collect_render.py | 8 +++++--- .../aftereffects/plugins/publish/pre_collect_render.py | 9 +++++---- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index b41fb5d5f5..d31571b6b5 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -84,7 +84,6 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): subset_name = inst.data["subset"] instance = AERenderInstance( family=family, - families=[family], version=version, time="", source=current_file, @@ -124,19 +123,20 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): instance.comp_name = comp.name instance.comp_id = item_id - is_local = "renderLocal" in inst.data["families"] + is_local = "renderLocal" in inst.data["families"] # legacy if inst.data.get("creator_attributes"): is_local = not inst.data["creator_attributes"].get("farm") if is_local: # for local renders instance = self._update_for_local(instance, project_entity) + else: + instance.families = ["render.farm"] instances.append(instance) instances_to_remove.append(inst) for instance in instances_to_remove: context.remove(instance) - return instances def get_expected_files(self, render_instance): @@ -205,10 +205,12 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): return base_dir def _update_for_local(self, instance, project_entity): + """Update old saved instances to current publishing format""" instance.anatomyData["version"] = instance.version instance.anatomyData["subset"] = instance.subset instance.stagingDir = tempfile.mkdtemp() instance.projectEntity = project_entity + instance.families = ["render.local"] settings = get_project_settings(os.getenv("AVALON_PROJECT")) reviewable_subset_filter = (settings["deadline"] diff --git a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py index 56dc884634..614a04b4b7 100644 --- a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py @@ -1,12 +1,14 @@ import json import pyblish.api -from openpype.hosts.aftereffects.api import get_stub, list_instances +from openpype.hosts.aftereffects.api import list_instances class PreCollectRender(pyblish.api.ContextPlugin): """ - Checks if render instance is of new type, adds to families to both + Checks if render instance is of old type, adds to families to both existing collectors work same way. + + Could be removed in the future when no one uses old publish. """ label = "PreCollect Render" @@ -15,7 +17,7 @@ class PreCollectRender(pyblish.api.ContextPlugin): family_remapping = { "render": ("render.farm", "farm"), # (family, label) - "renderLocal": ("render", "local") + "renderLocal": ("render.local", "local") } def process(self, context): @@ -23,7 +25,6 @@ class PreCollectRender(pyblish.api.ContextPlugin): self.log.debug("Not applicable for New Publisher, skip") return - stub = get_stub() for inst in list_instances(): if inst["family"] not in self.family_remapping.keys(): continue From 9e3ea9139a06ad3cc495f8d0c43eb64a7eff8260 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 8 Mar 2022 13:58:20 +0100 Subject: [PATCH 021/244] OP-2765 - Hound --- openpype/hosts/aftereffects/api/pipeline.py | 2 +- .../aftereffects/plugins/create/create_legacy_local_render.py | 2 +- .../hosts/aftereffects/plugins/create/create_legacy_render.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 4ae88e649a..4ade90e4dd 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -152,7 +152,7 @@ def check_inventory(): # Warn about outdated containers. print("Starting new QApplication..") - app = QtWidgets.QApplication(sys.argv) + _app = QtWidgets.QApplication(sys.argv) message_box = QtWidgets.QMessageBox() message_box.setIcon(QtWidgets.QMessageBox.Warning) diff --git a/openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py b/openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py index 4fb07f31f8..04413acbcf 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_legacy_local_render.py @@ -10,4 +10,4 @@ class CreateLocalRender(create_legacy_render.CreateRender): name = "renderDefault" label = "Render Locally" - family = "renderLocal" \ No newline at end of file + family = "renderLocal" diff --git a/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py b/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py index 7da489a731..8dfc85cdc8 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py @@ -59,4 +59,4 @@ class CreateRender(openpype.api.Creator): stub.imprint(item, self.data) stub.set_label_color(item.id, 14) # Cyan options 0 - 16 - stub.rename_item(item.id, stub.PUBLISH_ICON + self.data["subset"]) \ No newline at end of file + stub.rename_item(item.id, stub.PUBLISH_ICON + self.data["subset"]) From 3b72117a946d15954112b77107d04f325d30c0a3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 8 Mar 2022 19:11:55 +0100 Subject: [PATCH 022/244] OP-2765 - refactored validator --- .../publish/validate_scene_settings.py | 39 ++++++++++--------- 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py b/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py index 273ccd295e..0753e3c09a 100644 --- a/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py +++ b/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py @@ -62,12 +62,13 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin): expected_settings = get_asset_settings() self.log.info("config from DB::{}".format(expected_settings)) - if any(re.search(pattern, os.getenv('AVALON_TASK')) + task_name = instance.data["anatomyData"]["task"]["name"] + if any(re.search(pattern, task_name) for pattern in self.skip_resolution_check): expected_settings.pop("resolutionWidth") expected_settings.pop("resolutionHeight") - if any(re.search(pattern, os.getenv('AVALON_TASK')) + if any(re.search(pattern, task_name) for pattern in self.skip_timelines_check): expected_settings.pop('fps', None) expected_settings.pop('frameStart', None) @@ -87,10 +88,14 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin): duration = instance.data.get("frameEndHandle") - \ instance.data.get("frameStartHandle") + 1 - self.log.debug("filtered config::{}".format(expected_settings)) + self.log.debug("validated items::{}".format(expected_settings)) current_settings = { "fps": fps, + "frameStart": instance.data.get("frameStart"), + "frameEnd": instance.data.get("frameEnd"), + "handleStart": instance.data.get("handleStart"), + "handleEnd": instance.data.get("handleEnd"), "frameStartHandle": instance.data.get("frameStartHandle"), "frameEndHandle": instance.data.get("frameEndHandle"), "resolutionWidth": instance.data.get("resolutionWidth"), @@ -103,24 +108,22 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin): invalid_keys = set() for key, value in expected_settings.items(): if value != current_settings[key]: - invalid_settings.append( - "{} expected: {} found: {}".format(key, value, - current_settings[key]) - ) + msg = "'{}' expected: '{}' found: '{}'".format( + key, value, current_settings[key]) + + if key == "duration" and expected_settings.get("handleStart"): + msg += "Handles included in calculation. Remove " \ + "handles in DB or extend frame range in " \ + "Composition Setting." + + invalid_settings.append(msg) invalid_keys.add(key) - if ((expected_settings.get("handleStart") - or expected_settings.get("handleEnd")) - and invalid_settings): - msg = "Handles included in calculation. Remove handles in DB " +\ - "or extend frame range in Composition Setting." - invalid_settings[-1]["reason"] = msg - - msg = "Found invalid settings:\n{}".format( - "\n".join(invalid_settings) - ) - if invalid_settings: + msg = "Found invalid settings:\n{}".format( + "\n".join(invalid_settings) + ) + invalid_keys_str = ",".join(invalid_keys) break_str = "
" invalid_setting_str = "Found invalid settings:
{}".\ From 84b6a6cc6949ea849376f410417c9198a92a9241 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Mar 2022 10:20:58 +0100 Subject: [PATCH 023/244] OP-2868 - added configuration for default variant value to Settings --- .../plugins/create/create_render.py | 16 +++++++++---- .../project_settings/aftereffects.json | 7 ++++++ .../schema_project_aftereffects.json | 23 +++++++++++++++++++ 3 files changed, 41 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 0a907a02d8..e690af63d0 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -18,6 +18,16 @@ class RenderCreator(Creator): create_allow_context_change = False + def __init__( + self, create_context, system_settings, project_settings, headless=False + ): + super(RenderCreator, self).__init__(create_context, system_settings, + project_settings, headless) + self._default_variants = (project_settings["aftereffects"] + ["create"] + ["RenderCreator"] + ["defaults"]) + def get_icon(self): return resources.get_openpype_splash_filepath() @@ -72,11 +82,7 @@ class RenderCreator(Creator): self._add_instance_to_context(new_instance) def get_default_variants(self): - return [ - "myVariant", - "variantTwo", - "different_variant" - ] + return self._default_variants def get_instance_attr_defs(self): return [lib.BoolDef("farm", label="Render on farm")] diff --git a/openpype/settings/defaults/project_settings/aftereffects.json b/openpype/settings/defaults/project_settings/aftereffects.json index 6a9a399069..8083aa0972 100644 --- a/openpype/settings/defaults/project_settings/aftereffects.json +++ b/openpype/settings/defaults/project_settings/aftereffects.json @@ -1,4 +1,11 @@ { + "create": { + "RenderCreator": { + "defaults": [ + "Main" + ] + } + }, "publish": { "ValidateSceneSettings": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json b/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json index 4c4cd225ab..1a3eaef540 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json @@ -5,6 +5,29 @@ "label": "AfterEffects", "is_file": true, "children": [ + { + "type": "dict", + "collapsible": true, + "key": "create", + "label": "Creator plugins", + "children": [ + { + "type": "dict", + "collapsible": true, + "key": "RenderCreator", + "label": "Create render", + "children": [ + { + "type": "list", + "key": "defaults", + "label": "Default Variants", + "object_type": "text", + "docstring": "Fill default variant(s) (like 'Main' or 'Default') used in subset name creation." + } + ] + } + ] + }, { "type": "dict", "collapsible": true, From 87d114a272cac020f1a482b6209ad01a9907ba01 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Mar 2022 10:49:43 +0100 Subject: [PATCH 024/244] OP-2765 - added error message when creating same subset --- openpype/hosts/aftereffects/plugins/create/create_render.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 0a907a02d8..e75353c7a5 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -63,6 +63,11 @@ class RenderCreator(Creator): "one composition." )) + for inst in self.create_context.instances: + if subset_name == inst.subset_name: + raise CreatorError("{} already exists".format( + inst.subset_name)) + data["members"] = [items[0].id] new_instance = CreatedInstance(self.family, subset_name, data, self) new_instance.creator_attributes["farm"] = pre_create_data["farm"] From 32f015098b95d7953d94d878f32afbd4022a18df Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Mar 2022 11:08:51 +0100 Subject: [PATCH 025/244] OP-2765 - reimplemented get_context_title --- openpype/hosts/aftereffects/api/pipeline.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 4ade90e4dd..38ab2225bf 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -273,7 +273,12 @@ def update_context_data(data, changes): def get_context_title(): """Returns title for Creator window""" - return "AfterEffects" + import avalon.api + + project_name = avalon.api.Session["AVALON_PROJECT"] + asset_name = avalon.api.Session["AVALON_ASSET"] + task_name = avalon.api.Session["AVALON_TASK"] + return "{}/{}/{}".format(project_name, asset_name, task_name) def _get_stub(): From 56e2121e308f6bdf7e1551336ae3c28104920775 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Mar 2022 15:23:46 +0100 Subject: [PATCH 026/244] OP-2765 - fix local rendering in old publish --- openpype/hosts/aftereffects/plugins/publish/collect_render.py | 4 ++-- .../hosts/aftereffects/plugins/publish/pre_collect_render.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index d31571b6b5..43efd34635 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -57,7 +57,7 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): compositions_by_id = {item.id: item for item in compositions} for inst in context: family = inst.data["family"] - if family != "render": + if family not in ["render", "renderLocal"]: # legacy continue item_id = inst.data["members"][0] @@ -123,7 +123,7 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): instance.comp_name = comp.name instance.comp_id = item_id - is_local = "renderLocal" in inst.data["families"] # legacy + is_local = "renderLocal" in inst.data["family"] # legacy if inst.data.get("creator_attributes"): is_local = not inst.data["creator_attributes"].get("farm") if is_local: diff --git a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py index 614a04b4b7..3e84753555 100644 --- a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py @@ -34,7 +34,7 @@ class PreCollectRender(pyblish.api.ContextPlugin): "Please recreate instance.") instance = context.create_instance(inst["subset"]) - inst["families"] = [self.family_remapping[inst["family"]]] + inst["families"] = [self.family_remapping[inst["family"]][0]] instance.data.update(inst) self._debug_log(instance) From ec9b4802f40d6fe1d3dd02ab1195bace33ef0c82 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Mar 2022 16:07:18 +0100 Subject: [PATCH 027/244] OP-2765 - trigger failure when new instance tried to be published by Pyblish This could happen if artist try to switch between old Pyblish and New Publish --- .../hosts/aftereffects/plugins/publish/pre_collect_render.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py index 3e84753555..46bb9865b9 100644 --- a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py @@ -26,6 +26,10 @@ class PreCollectRender(pyblish.api.ContextPlugin): return for inst in list_instances(): + if inst.get("creator_attributes"): + raise ValueError("Instance created in New publisher, " + "cannot be published in Pyblish") + if inst["family"] not in self.family_remapping.keys(): continue From a5c38a8b2f19d24c55c2be564ab701f68f886c36 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Mar 2022 16:24:59 +0100 Subject: [PATCH 028/244] OP-2765 - added new label for families In the future they will be both merged to render.farm (when Harmony is updated to New Publisher). --- openpype/lib/abstract_collect_render.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/lib/abstract_collect_render.py b/openpype/lib/abstract_collect_render.py index 3839aad45d..e160f5a040 100644 --- a/openpype/lib/abstract_collect_render.py +++ b/openpype/lib/abstract_collect_render.py @@ -138,7 +138,9 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): try: if "workfile" in instance.data["families"]: instance.data["publish"] = True - if "renderFarm" in instance.data["families"]: + # TODO merge renderFarm and render.farm + if ("renderFarm" in instance.data["families"] or + "render.farm" in instance.data["families"]): instance.data["remove"] = True except KeyError: # be tolerant if 'families' is missing. From 3b9e319de27548a935b2aaba2064193a674fdd88 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Mar 2022 16:26:37 +0100 Subject: [PATCH 029/244] OP-2765 - fixed resolution between local and farm --- .../hosts/aftereffects/plugins/publish/collect_render.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index 43efd34635..aa5bc58ac2 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -84,6 +84,7 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): subset_name = inst.data["subset"] instance = AERenderInstance( family=family, + families=inst.data.get("families", []), version=version, time="", source=current_file, @@ -130,7 +131,9 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): # for local renders instance = self._update_for_local(instance, project_entity) else: - instance.families = ["render.farm"] + fam = "render.farm" + if fam not in instance.families: + instance.families.append(fam) instances.append(instance) instances_to_remove.append(inst) @@ -210,7 +213,9 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): instance.anatomyData["subset"] = instance.subset instance.stagingDir = tempfile.mkdtemp() instance.projectEntity = project_entity - instance.families = ["render.local"] + fam = "render.local" + if fam not in instance.families: + instance.families.append(fam) settings = get_project_settings(os.getenv("AVALON_PROJECT")) reviewable_subset_filter = (settings["deadline"] From d4f50e2abdf55fed0c12f439062c75b5c780a7e3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 10 Mar 2022 15:10:18 +0100 Subject: [PATCH 030/244] OP-2765 - fix imports for legacy farm creator --- .../aftereffects/plugins/create/create_legacy_render.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py b/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py index 8dfc85cdc8..e4fbb47a33 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_legacy_render.py @@ -1,13 +1,12 @@ -from avalon.api import CreatorError - -import openpype.api +from openpype.pipeline import create +from openpype.pipeline import CreatorError from openpype.hosts.aftereffects.api import ( get_stub, list_instances ) -class CreateRender(openpype.api.Creator): +class CreateRender(create.LegacyCreator): """Render folder for publish. Creates subsets in format 'familyTaskSubsetname', @@ -23,6 +22,7 @@ class CreateRender(openpype.api.Creator): def process(self): stub = get_stub() # only after After Effects is up + items = [] if (self.options or {}).get("useSelection"): items = stub.get_selected_items( comps=True, folders=False, footages=False From a15552f878a0aab7ecfa37053ea2b646161cd37b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 10 Mar 2022 15:10:42 +0100 Subject: [PATCH 031/244] OP-2765 - fix imports for new creator --- .../hosts/aftereffects/plugins/create/create_render.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index e75353c7a5..1a5a826137 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -1,7 +1,7 @@ -import avalon.api +from avalon import api as avalon_api from openpype import resources -import openpype.hosts.aftereffects.api as api +from openpype.hosts.aftereffects import api from openpype.pipeline import ( Creator, CreatedInstance, @@ -25,7 +25,7 @@ class RenderCreator(Creator): for instance_data in api.list_instances(): # legacy instances have family=='render' or 'renderLocal', use them creator_id = (instance_data.get("creator_identifier") or - instance_data.get("family").replace("Local", '')) + instance_data.get("family", '').replace("Local", '')) if creator_id == self.identifier: instance_data = self._handle_legacy(instance_data) instance = CreatedInstance.from_existing( @@ -108,7 +108,7 @@ class RenderCreator(Creator): instance_data.pop("uuid") if not instance_data.get("task"): - instance_data["task"] = avalon.api.Session.get("AVALON_TASK") + instance_data["task"] = avalon_api.Session.get("AVALON_TASK") if not instance_data.get("creator_attributes"): is_old_farm = instance_data["family"] != "renderLocal" From 60edd3abe6bf52271d7f1d84635f0be482d31c65 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 10 Mar 2022 15:13:35 +0100 Subject: [PATCH 032/244] OP-2765 - added functionality to store/retrive context data These data is used for context publish information, for example storing enabling/disabling of validators. Currently not present in AE. --- openpype/hosts/aftereffects/api/pipeline.py | 22 +++++++++++++-------- openpype/hosts/aftereffects/api/ws_stub.py | 10 ++++++---- 2 files changed, 20 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 38ab2225bf..978d035020 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -9,6 +9,7 @@ from avalon import io, pipeline from openpype import lib from openpype.api import Logger +from openpype.pipeline import LegacyCreator import openpype.hosts.aftereffects from openpype.pipeline import BaseCreator @@ -34,7 +35,7 @@ def install(): pyblish.api.register_plugin_path(PUBLISH_PATH) avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH) - avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH) + avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH) avalon.api.register_plugin_path(BaseCreator, CREATE_PATH) log.info(PUBLISH_PATH) @@ -48,7 +49,7 @@ def install(): def uninstall(): pyblish.api.deregister_plugin_path(PUBLISH_PATH) avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH) - avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH) + avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH) def application_launch(): @@ -223,10 +224,8 @@ def list_instances(): layers_meta = stub.get_metadata() for instance in layers_meta: - if instance.get("schema") and \ - "container" in instance.get("schema"): - continue - instances.append(instance) + if instance.get("id") == "pyblish.avalon.instance": + instances.append(instance) return instances @@ -263,12 +262,19 @@ def remove_instance(instance): # new publisher section def get_context_data(): - print("get_context_data") + meta = _get_stub().get_metadata() + for item in meta: + if item.get("id") == "publish_context": + item.pop("id") + return item + return {} def update_context_data(data, changes): - print("update_context_data") + item = data + item["id"] = "publish_context" + _get_stub().imprint(item["id"], item) def get_context_title(): diff --git a/openpype/hosts/aftereffects/api/ws_stub.py b/openpype/hosts/aftereffects/api/ws_stub.py index 1d3b69e038..d2dc40ec89 100644 --- a/openpype/hosts/aftereffects/api/ws_stub.py +++ b/openpype/hosts/aftereffects/api/ws_stub.py @@ -155,10 +155,12 @@ class AfterEffectsServerStub(): item_ids = [int(item.id) for item in all_items] cleaned_data = [] for meta in result_meta: - # for creation of instance OR loaded container - if 'instance' in meta.get('id') or \ - int(meta.get('members')[0]) in item_ids: - cleaned_data.append(meta) + # do not added instance with nonexistend item id + if meta.get("members"): + if int(meta["members"][0]) not in item_ids: + continue + + cleaned_data.append(meta) payload = json.dumps(cleaned_data, indent=4) From 3b4f96efa601351bb894f64a6e3d2d2e2c55d88b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 10 Mar 2022 15:19:42 +0100 Subject: [PATCH 033/244] OP-2765 - more explicit error message --- .../hosts/aftereffects/plugins/publish/pre_collect_render.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py index 46bb9865b9..03ec184524 100644 --- a/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/pre_collect_render.py @@ -28,7 +28,9 @@ class PreCollectRender(pyblish.api.ContextPlugin): for inst in list_instances(): if inst.get("creator_attributes"): raise ValueError("Instance created in New publisher, " - "cannot be published in Pyblish") + "cannot be published in Pyblish.\n" + "Please publish in New Publisher " + "or recreate instances with legacy Creators") if inst["family"] not in self.family_remapping.keys(): continue From 65b00455614cadd5f279fcfdd37c41f976697c99 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 14 Mar 2022 17:31:57 +0100 Subject: [PATCH 034/244] OP-2766 - fixed not working self.log in New Publisher --- openpype/pipeline/create/creator_plugins.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 1ac2c420a2..f05b132fc6 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -69,7 +69,9 @@ class BaseCreator: @property def log(self): if self._log is None: - self._log = logging.getLogger(self.__class__.__name__) + from openpype.api import Logger + + self._log = Logger.get_logger(self.__class__.__name__) return self._log def _add_instance_to_context(self, instance): From a71dad4608e0be4a91c75769e5edf6722f52f9ff Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 14 Mar 2022 17:35:17 +0100 Subject: [PATCH 035/244] OP-2766 - implemented auto creator for PS Creates workfile instance, updated imprint function. --- openpype/hosts/photoshop/api/pipeline.py | 52 +++++++++---- openpype/hosts/photoshop/api/ws_stub.py | 33 +++++---- .../plugins/create/workfile_creator.py | 73 +++++++++++++++++++ 3 files changed, 131 insertions(+), 27 deletions(-) create mode 100644 openpype/hosts/photoshop/plugins/create/workfile_creator.py diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 1be8129aa1..0e3f1215aa 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -8,7 +8,7 @@ from avalon import pipeline, io from openpype.api import Logger from openpype.lib import register_event_callback -from openpype.pipeline import LegacyCreator +from openpype.pipeline import LegacyCreator, BaseCreator import openpype.hosts.photoshop from . import lib @@ -71,6 +71,7 @@ def install(): pyblish.api.register_plugin_path(PUBLISH_PATH) avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH) avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH) + avalon.api.register_plugin_path(BaseCreator, CREATE_PATH) log.info(PUBLISH_PATH) pyblish.api.register_callback( @@ -144,12 +145,9 @@ def list_instances(): layers_meta = stub.get_layers_metadata() if layers_meta: for key, instance in layers_meta.items(): - schema = instance.get("schema") - if schema and "container" in schema: - continue - - instance['uuid'] = key - instances.append(instance) + if instance.get("id") == "pyblish.avalon.instance": # TODO only this way? + instance['uuid'] = key + instances.append(instance) return instances @@ -170,11 +168,18 @@ def remove_instance(instance): if not stub: return - stub.remove_instance(instance.get("uuid")) - layer = stub.get_layer(instance.get("uuid")) - if layer: - stub.rename_layer(instance.get("uuid"), - layer.name.replace(stub.PUBLISH_ICON, '')) + inst_id = instance.get("instance_id") or instance.get("uuid") # legacy + if not inst_id: + log.warning("No instance identifier for {}".format(instance)) + return + + stub.remove_instance(inst_id) + + if instance.get("members"): + item = stub.get_item(instance["members"][0]) + if item: + stub.rename_item(item.id, + item.name.replace(stub.PUBLISH_ICON, '')) def _get_stub(): @@ -226,6 +231,27 @@ def containerise( "members": [str(layer.id)] } stub = lib.stub() - stub.imprint(layer, data) + stub.imprint(layer.id, data) return layer + + +def get_context_data(): + pass + + +def update_context_data(data, changes): + # item = data + # item["id"] = "publish_context" + # _get_stub().imprint(item["id"], item) + pass + + +def get_context_title(): + """Returns title for Creator window""" + import avalon.api + + project_name = avalon.api.Session["AVALON_PROJECT"] + asset_name = avalon.api.Session["AVALON_ASSET"] + task_name = avalon.api.Session["AVALON_TASK"] + return "{}/{}/{}".format(project_name, asset_name, task_name) \ No newline at end of file diff --git a/openpype/hosts/photoshop/api/ws_stub.py b/openpype/hosts/photoshop/api/ws_stub.py index 64d89f5420..a99f184080 100644 --- a/openpype/hosts/photoshop/api/ws_stub.py +++ b/openpype/hosts/photoshop/api/ws_stub.py @@ -27,6 +27,7 @@ class PSItem(object): members = attr.ib(factory=list) long_name = attr.ib(default=None) color_code = attr.ib(default=None) # color code of layer + instance_id = attr.ib(default=None) class PhotoshopServerStub: @@ -82,7 +83,7 @@ class PhotoshopServerStub: return layers_meta.get(str(layer.id)) - def imprint(self, layer, data, all_layers=None, layers_meta=None): + def imprint(self, item_id, data, all_layers=None, items_meta=None): """Save layer metadata to Headline field of active document Stores metadata in format: @@ -108,28 +109,29 @@ class PhotoshopServerStub: }] - for loaded instances Args: - layer (PSItem): + item_id (str): data(string): json representation for single layer all_layers (list of PSItem): for performance, could be injected for usage in loop, if not, single call will be triggered - layers_meta(string): json representation from Headline + items_meta(string): json representation from Headline (for performance - provide only if imprint is in loop - value should be same) Returns: None """ - if not layers_meta: - layers_meta = self.get_layers_metadata() + if not items_meta: + items_meta = self.get_layers_metadata() # json.dumps writes integer values in a dictionary to string, so # anticipating it here. - if str(layer.id) in layers_meta and layers_meta[str(layer.id)]: + item_id = str(item_id) + if item_id in items_meta.keys(): if data: - layers_meta[str(layer.id)].update(data) + items_meta[item_id].update(data) else: - layers_meta.pop(str(layer.id)) + items_meta.pop(item_id) else: - layers_meta[str(layer.id)] = data + items_meta[item_id] = data # Ensure only valid ids are stored. if not all_layers: @@ -137,12 +139,14 @@ class PhotoshopServerStub: layer_ids = [layer.id for layer in all_layers] cleaned_data = [] - for layer_id in layers_meta: - if int(layer_id) in layer_ids: - cleaned_data.append(layers_meta[layer_id]) + for item in items_meta.values(): + if item.get("members"): + if int(item["members"][0]) not in layer_ids: + continue + + cleaned_data.append(item) payload = json.dumps(cleaned_data, indent=4) - self.websocketserver.call( self.client.call('Photoshop.imprint', payload=payload) ) @@ -528,6 +532,7 @@ class PhotoshopServerStub: d.get('type'), d.get('members'), d.get('long_name'), - d.get("color_code") + d.get("color_code"), + d.get("instance_id") )) return ret diff --git a/openpype/hosts/photoshop/plugins/create/workfile_creator.py b/openpype/hosts/photoshop/plugins/create/workfile_creator.py new file mode 100644 index 0000000000..d66a05cad7 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/create/workfile_creator.py @@ -0,0 +1,73 @@ +from avalon import io + +import openpype.hosts.photoshop.api as api +from openpype.pipeline import ( + AutoCreator, + CreatedInstance +) + + +class PSWorkfileCreator(AutoCreator): + identifier = "workfile" + family = "workfile" + + def get_instance_attr_defs(self): + return [] + + def collect_instances(self): + for instance_data in api.list_instances(): + creator_id = instance_data.get("creator_identifier") + if creator_id == self.identifier: + subset_name = instance_data["subset"] + instance = CreatedInstance( + self.family, subset_name, instance_data, self + ) + self._add_instance_to_context(instance) + + def update_instances(self, update_list): + # nothing to change on workfiles + pass + + def create(self, options=None): + existing_instance = None + for instance in self.create_context.instances: + if instance.family == self.family: + existing_instance = instance + break + + variant = '' + project_name = io.Session["AVALON_PROJECT"] + asset_name = io.Session["AVALON_ASSET"] + task_name = io.Session["AVALON_TASK"] + host_name = io.Session["AVALON_APP"] + if existing_instance is None: + asset_doc = io.find_one({"type": "asset", "name": asset_name}) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + data = { + "asset": asset_name, + "task": task_name, + "variant": variant + } + data.update(self.get_dynamic_data( + variant, task_name, asset_doc, project_name, host_name + )) + + new_instance = CreatedInstance( + self.family, subset_name, data, self + ) + self._add_instance_to_context(new_instance) + api.stub().imprint(new_instance.get("instance_id"), + new_instance.data_to_store()) + + elif ( + existing_instance["asset"] != asset_name + or existing_instance["task"] != task_name + ): + asset_doc = io.find_one({"type": "asset", "name": asset_name}) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + existing_instance["asset"] = asset_name + existing_instance["task"] = task_name From cdb2047ef7e205054f2c31fb6f336e259fa93d47 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 14 Mar 2022 17:35:40 +0100 Subject: [PATCH 036/244] OP-2766 - renamed legacy creator --- .../plugins/create/create_legacy_image.py | 99 +++++++++++++++++++ 1 file changed, 99 insertions(+) create mode 100644 openpype/hosts/photoshop/plugins/create/create_legacy_image.py diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py new file mode 100644 index 0000000000..a001b5f171 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -0,0 +1,99 @@ +from Qt import QtWidgets +from openpype.pipeline import create +from openpype.hosts.photoshop import api as photoshop + + +class CreateImage(create.LegacyCreator): + """Image folder for publish.""" + + name = "imageDefault" + label = "Image" + family = "image" + defaults = ["Main"] + + def process(self): + groups = [] + layers = [] + create_group = False + + stub = photoshop.stub() + if (self.options or {}).get("useSelection"): + multiple_instances = False + selection = stub.get_selected_layers() + self.log.info("selection {}".format(selection)) + if len(selection) > 1: + # Ask user whether to create one image or image per selected + # item. + msg_box = QtWidgets.QMessageBox() + msg_box.setIcon(QtWidgets.QMessageBox.Warning) + msg_box.setText( + "Multiple layers selected." + "\nDo you want to make one image per layer?" + ) + msg_box.setStandardButtons( + QtWidgets.QMessageBox.Yes | + QtWidgets.QMessageBox.No | + QtWidgets.QMessageBox.Cancel + ) + ret = msg_box.exec_() + if ret == QtWidgets.QMessageBox.Yes: + multiple_instances = True + elif ret == QtWidgets.QMessageBox.Cancel: + return + + if multiple_instances: + for item in selection: + if item.group: + groups.append(item) + else: + layers.append(item) + else: + group = stub.group_selected_layers(self.name) + groups.append(group) + + elif len(selection) == 1: + # One selected item. Use group if its a LayerSet (group), else + # create a new group. + if selection[0].group: + groups.append(selection[0]) + else: + layers.append(selection[0]) + elif len(selection) == 0: + # No selection creates an empty group. + create_group = True + else: + group = stub.create_group(self.name) + groups.append(group) + + if create_group: + group = stub.create_group(self.name) + groups.append(group) + + for layer in layers: + stub.select_layers([layer]) + group = stub.group_selected_layers(layer.name) + groups.append(group) + + creator_subset_name = self.data["subset"] + for group in groups: + long_names = [] + group.name = group.name.replace(stub.PUBLISH_ICON, ''). \ + replace(stub.LOADED_ICON, '') + + subset_name = creator_subset_name + if len(groups) > 1: + subset_name += group.name.title().replace(" ", "") + + if group.long_name: + for directory in group.long_name[::-1]: + name = directory.replace(stub.PUBLISH_ICON, '').\ + replace(stub.LOADED_ICON, '') + long_names.append(name) + + self.data.update({"subset": subset_name}) + self.data.update({"uuid": str(group.id)}) + self.data.update({"long_name": "_".join(long_names)}) + stub.imprint(group, self.data) + # reusing existing group, need to rename afterwards + if not create_group: + stub.rename_layer(group.id, stub.PUBLISH_ICON + group.name) From bfce93027ccd5ebbb227b7af80ba8d73c77f3453 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Mar 2022 15:00:17 +0100 Subject: [PATCH 037/244] Update openpype/hosts/aftereffects/plugins/create/create_render.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/hosts/aftereffects/plugins/create/create_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 1a5a826137..550fb6b0ef 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -72,7 +72,7 @@ class RenderCreator(Creator): new_instance = CreatedInstance(self.family, subset_name, data, self) new_instance.creator_attributes["farm"] = pre_create_data["farm"] - api.get_stub().imprint(new_instance.get("instance_id"), + api.get_stub().imprint(new_instance.id, new_instance.data_to_store()) self._add_instance_to_context(new_instance) From d3441215749e303311370a41a9c82aa934b6cfb0 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Mar 2022 15:00:33 +0100 Subject: [PATCH 038/244] Update openpype/hosts/aftereffects/plugins/create/create_render.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/hosts/aftereffects/plugins/create/create_render.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 550fb6b0ef..88462667ed 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -70,7 +70,9 @@ class RenderCreator(Creator): data["members"] = [items[0].id] new_instance = CreatedInstance(self.family, subset_name, data, self) - new_instance.creator_attributes["farm"] = pre_create_data["farm"] + if "farm" in pre_create_data: + use_farm = pre_create_data["farm"] + new_instance.creator_attributes["farm"] = use_farm api.get_stub().imprint(new_instance.id, new_instance.data_to_store()) From bff1b77c0635493c3236f663c7a444eaf2d350e4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Mar 2022 16:17:46 +0100 Subject: [PATCH 039/244] OP-2766 - changed format of layer metadata Removing uuid, replaced with members[0] and instance_id. Layers metadata now returned as a list, not dictionary to follow AE implementation. --- openpype/hosts/photoshop/api/pipeline.py | 3 +- openpype/hosts/photoshop/api/ws_stub.py | 60 ++++++++++++------------ 2 files changed, 32 insertions(+), 31 deletions(-) diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 0e3f1215aa..8d64942c9e 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -144,9 +144,8 @@ def list_instances(): instances = [] layers_meta = stub.get_layers_metadata() if layers_meta: - for key, instance in layers_meta.items(): + for instance in layers_meta: if instance.get("id") == "pyblish.avalon.instance": # TODO only this way? - instance['uuid'] = key instances.append(instance) return instances diff --git a/openpype/hosts/photoshop/api/ws_stub.py b/openpype/hosts/photoshop/api/ws_stub.py index a99f184080..dd29ef4e84 100644 --- a/openpype/hosts/photoshop/api/ws_stub.py +++ b/openpype/hosts/photoshop/api/ws_stub.py @@ -81,7 +81,11 @@ class PhotoshopServerStub: if layers_meta is None: layers_meta = self.get_layers_metadata() - return layers_meta.get(str(layer.id)) + for layer_meta in layers_meta: + if layer_meta.get("members"): + if layer.id == layer_meta["members"][0]: + return layer + print("Unable to find layer metadata for {}".format(layer.id)) def imprint(self, item_id, data, all_layers=None, items_meta=None): """Save layer metadata to Headline field of active document @@ -125,13 +129,21 @@ class PhotoshopServerStub: # json.dumps writes integer values in a dictionary to string, so # anticipating it here. item_id = str(item_id) - if item_id in items_meta.keys(): - if data: - items_meta[item_id].update(data) + is_new = True + result_meta = [] + for item_meta in items_meta: + if ((item_meta.get('members') and + item_id == str(item_meta.get('members')[0])) or + item_meta.get("instance_id") == item_id): + is_new = False + if data: + item_meta.update(data) + result_meta.append(item_meta) else: - items_meta.pop(item_id) - else: - items_meta[item_id] = data + result_meta.append(item_meta) + + if is_new: + result_meta.append(data) # Ensure only valid ids are stored. if not all_layers: @@ -139,7 +151,7 @@ class PhotoshopServerStub: layer_ids = [layer.id for layer in all_layers] cleaned_data = [] - for item in items_meta.values(): + for item in result_meta: if item.get("members"): if int(item["members"][0]) not in layer_ids: continue @@ -374,38 +386,27 @@ class PhotoshopServerStub: (Headline accessible by File > File Info) Returns: - (string): - json documents + (list) example: {"8":{"active":true,"subset":"imageBG", "family":"image","id":"pyblish.avalon.instance", "asset":"Town"}} 8 is layer(group) id - used for deletion, update etc. """ - layers_data = {} res = self.websocketserver.call(self.client.call('Photoshop.read')) + layers_data = [] try: - layers_data = json.loads(res) + if res: + layers_data = json.loads(res) except json.decoder.JSONDecodeError: pass # format of metadata changed from {} to [] because of standardization # keep current implementation logic as its working - if not isinstance(layers_data, dict): - temp_layers_meta = {} - for layer_meta in layers_data: - layer_id = layer_meta.get("uuid") - if not layer_id: - layer_id = layer_meta.get("members")[0] - - temp_layers_meta[layer_id] = layer_meta - layers_data = temp_layers_meta - else: - # legacy version of metadata + if isinstance(layers_data, dict): for layer_id, layer_meta in layers_data.items(): if layer_meta.get("schema") != "openpype:container-2.0": - layer_meta["uuid"] = str(layer_id) - else: layer_meta["members"] = [str(layer_id)] - + layers_data = list(layers_data.values()) return layers_data def import_smart_object(self, path, layer_name, as_reference=False): @@ -476,11 +477,12 @@ class PhotoshopServerStub: ) def remove_instance(self, instance_id): - cleaned_data = {} + cleaned_data = [] - for key, instance in self.get_layers_metadata().items(): - if key != instance_id: - cleaned_data[key] = instance + for item in self.get_layers_metadata(): + inst_id = item.get("instance_id") or item.get("uuid") + if inst_id != instance_id: + cleaned_data.append(inst_id) payload = json.dumps(cleaned_data, indent=4) From c46b41804d108cc976aae64410ce520ac3117dda Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Mar 2022 16:18:07 +0100 Subject: [PATCH 040/244] OP-2766 - implemented new image Creator Working implementation of New Publisher (not full backward compatibility yet). --- openpype/hosts/photoshop/api/__init__.py | 8 +- .../photoshop/plugins/create/create_image.py | 156 ++++++++++++------ .../plugins/create/create_legacy_image.py | 2 +- .../plugins/create/workfile_creator.py | 2 + .../plugins/publish/collect_instances.py | 4 + .../plugins/publish/collect_workfile.py | 30 ++-- .../plugins/publish/extract_image.py | 9 +- 7 files changed, 148 insertions(+), 63 deletions(-) diff --git a/openpype/hosts/photoshop/api/__init__.py b/openpype/hosts/photoshop/api/__init__.py index 17ea957066..94152b5706 100644 --- a/openpype/hosts/photoshop/api/__init__.py +++ b/openpype/hosts/photoshop/api/__init__.py @@ -12,7 +12,10 @@ from .pipeline import ( remove_instance, install, uninstall, - containerise + containerise, + get_context_data, + update_context_data, + get_context_title ) from .plugin import ( PhotoshopLoader, @@ -43,6 +46,9 @@ __all__ = [ "install", "uninstall", "containerise", + "get_context_data", + "update_context_data", + "get_context_title", # Plugin "PhotoshopLoader", diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index a001b5f171..a73b79e0fd 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -1,46 +1,50 @@ -from Qt import QtWidgets -from openpype.pipeline import create -from openpype.hosts.photoshop import api as photoshop +from avalon import api as avalon_api +from openpype.hosts.photoshop import api +from openpype.pipeline import ( + Creator, + CreatedInstance, + lib, + CreatorError +) -class CreateImage(create.LegacyCreator): - """Image folder for publish.""" - - name = "imageDefault" +class ImageCreator(Creator): + """Creates image instance for publishing.""" + identifier = "image" label = "Image" family = "image" - defaults = ["Main"] + description = "Image creator" - def process(self): + def collect_instances(self): + import json + self.log.info("ImageCreator: api.list_instances():: {}".format( + json.dumps(api.list_instances(), indent=4))) + for instance_data in api.list_instances(): + # legacy instances have family=='image' + creator_id = (instance_data.get("creator_identifier") or + instance_data.get("family")) + + self.log.info("ImageCreator: instance_data:: {}".format(json.dumps(instance_data, indent=4))) + if creator_id == self.identifier: + instance_data = self._handle_legacy(instance_data) + + layer = api.stub().get_layer(instance_data["members"][0]) + instance_data["layer"] = layer + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) + + def create(self, subset_name, data, pre_create_data): groups = [] layers = [] create_group = False - stub = photoshop.stub() - if (self.options or {}).get("useSelection"): - multiple_instances = False - selection = stub.get_selected_layers() - self.log.info("selection {}".format(selection)) + stub = api.stub() # only after PS is up + multiple_instances = pre_create_data.get("create_multiple") + selection = stub.get_selected_layers() + if pre_create_data.get("use_selection"): if len(selection) > 1: - # Ask user whether to create one image or image per selected - # item. - msg_box = QtWidgets.QMessageBox() - msg_box.setIcon(QtWidgets.QMessageBox.Warning) - msg_box.setText( - "Multiple layers selected." - "\nDo you want to make one image per layer?" - ) - msg_box.setStandardButtons( - QtWidgets.QMessageBox.Yes | - QtWidgets.QMessageBox.No | - QtWidgets.QMessageBox.Cancel - ) - ret = msg_box.exec_() - if ret == QtWidgets.QMessageBox.Yes: - multiple_instances = True - elif ret == QtWidgets.QMessageBox.Cancel: - return - if multiple_instances: for item in selection: if item.group: @@ -48,25 +52,25 @@ class CreateImage(create.LegacyCreator): else: layers.append(item) else: - group = stub.group_selected_layers(self.name) + group = stub.group_selected_layers(subset_name) groups.append(group) - elif len(selection) == 1: # One selected item. Use group if its a LayerSet (group), else # create a new group. - if selection[0].group: - groups.append(selection[0]) + selected_item = selection[0] + if selected_item.group: + groups.append(selected_item) else: - layers.append(selection[0]) + layers.append(selected_item) elif len(selection) == 0: # No selection creates an empty group. create_group = True else: - group = stub.create_group(self.name) + group = stub.create_group(subset_name) groups.append(group) if create_group: - group = stub.create_group(self.name) + group = stub.create_group(subset_name) groups.append(group) for layer in layers: @@ -74,26 +78,78 @@ class CreateImage(create.LegacyCreator): group = stub.group_selected_layers(layer.name) groups.append(group) - creator_subset_name = self.data["subset"] for group in groups: long_names = [] - group.name = group.name.replace(stub.PUBLISH_ICON, ''). \ - replace(stub.LOADED_ICON, '') + group.name = self._clean_highlights(stub, group.name) - subset_name = creator_subset_name if len(groups) > 1: subset_name += group.name.title().replace(" ", "") if group.long_name: for directory in group.long_name[::-1]: - name = directory.replace(stub.PUBLISH_ICON, '').\ - replace(stub.LOADED_ICON, '') + name = self._clean_highlights(stub, directory) long_names.append(name) - self.data.update({"subset": subset_name}) - self.data.update({"uuid": str(group.id)}) - self.data.update({"long_name": "_".join(long_names)}) - stub.imprint(group, self.data) + data.update({"subset": subset_name}) + data.update({"layer": group}) + data.update({"members": [str(group.id)]}) + data.update({"long_name": "_".join(long_names)}) + + new_instance = CreatedInstance(self.family, subset_name, data, + self) + + stub.imprint(new_instance.get("instance_id"), + new_instance.data_to_store()) + self._add_instance_to_context(new_instance) # reusing existing group, need to rename afterwards if not create_group: stub.rename_layer(group.id, stub.PUBLISH_ICON + group.name) + + def update_instances(self, update_list): + self.log.info("update_list:: {}".format(update_list)) + created_inst, changes = update_list[0] + api.stub().imprint(created_inst.get("instance_id"), + created_inst.data_to_store()) + + def remove_instances(self, instances): + for instance in instances: + api.remove_instance(instance) + self._remove_instance_from_context(instance) + + def get_default_variants(self): + return [ + "Main" + ] + + def get_pre_create_attr_defs(self): + output = [ + lib.BoolDef("use_selection", default=True, label="Use selection"), + lib.BoolDef("create_multiple", + default=True, + label="Create separate instance for each selected") + ] + return output + + def get_detail_description(self): + return """Creator for Image instances""" + + def _handle_legacy(self, instance_data): + """Converts old instances to new format.""" + if not instance_data.get("members"): + instance_data["members"] = [instance_data.get("uuid")] + + if instance_data.get("uuid"): + # uuid not needed, replaced with unique instance_id + api.stub().remove_instance(instance_data.get("uuid")) + instance_data.pop("uuid") + + if not instance_data.get("task"): + instance_data["task"] = avalon_api.Session.get("AVALON_TASK") + + return instance_data + + def _clean_highlights(self, stub, item): + return item.replace(stub.PUBLISH_ICON, '').replace(stub.LOADED_ICON, + '') + + diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py index a001b5f171..6fa455fa03 100644 --- a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -91,7 +91,7 @@ class CreateImage(create.LegacyCreator): long_names.append(name) self.data.update({"subset": subset_name}) - self.data.update({"uuid": str(group.id)}) + self.data.update({"members": [str(group.id)]}) self.data.update({"long_name": "_".join(long_names)}) stub.imprint(group, self.data) # reusing existing group, need to rename afterwards diff --git a/openpype/hosts/photoshop/plugins/create/workfile_creator.py b/openpype/hosts/photoshop/plugins/create/workfile_creator.py index d66a05cad7..2a2fda3cc4 100644 --- a/openpype/hosts/photoshop/plugins/create/workfile_creator.py +++ b/openpype/hosts/photoshop/plugins/create/workfile_creator.py @@ -15,6 +15,7 @@ class PSWorkfileCreator(AutoCreator): return [] def collect_instances(self): + print("coll::{}".format(api.list_instances())) for instance_data in api.list_instances(): creator_id = instance_data.get("creator_identifier") if creator_id == self.identifier: @@ -29,6 +30,7 @@ class PSWorkfileCreator(AutoCreator): pass def create(self, options=None): + print("create") existing_instance = None for instance in self.create_context.instances: if instance.family == self.family: diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py index c3e27e9646..ee402dcabf 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py @@ -21,6 +21,10 @@ class CollectInstances(pyblish.api.ContextPlugin): } def process(self, context): + if context.data.get("newPublishing"): + self.log.debug("Not applicable for New Publisher, skip") + return + stub = photoshop.stub() layers = stub.get_layers() layers_meta = stub.get_layers_metadata() diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index db1ede14d5..bdbd379a33 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -10,6 +10,13 @@ class CollectWorkfile(pyblish.api.ContextPlugin): hosts = ["photoshop"] def process(self, context): + existing_instance = None + for instance in context: + if instance.data["family"] == "workfile": + self.log.debug("Workfile instance found, won't create new") + existing_instance = instance + break + family = "workfile" task = os.getenv("AVALON_TASK", None) subset = family + task.capitalize() @@ -19,16 +26,19 @@ class CollectWorkfile(pyblish.api.ContextPlugin): base_name = os.path.basename(file_path) # Create instance - instance = context.create_instance(subset) - instance.data.update({ - "subset": subset, - "label": base_name, - "name": base_name, - "family": family, - "families": [], - "representations": [], - "asset": os.environ["AVALON_ASSET"] - }) + if existing_instance is None: + instance = context.create_instance(subset) + instance.data.update({ + "subset": subset, + "label": base_name, + "name": base_name, + "family": family, + "families": [], + "representations": [], + "asset": os.environ["AVALON_ASSET"] + }) + else: + instance = existing_instance # creating representation _, ext = os.path.splitext(file_path) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_image.py b/openpype/hosts/photoshop/plugins/publish/extract_image.py index 04ce77ee34..d27c5bc028 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_image.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_image.py @@ -16,7 +16,8 @@ class ExtractImage(openpype.api.Extractor): formats = ["png", "jpg"] def process(self, instance): - + print("PPPPPP") + self.log.info("fdfdsfdfs") staging_dir = self.staging_dir(instance) self.log.info("Outputting image to {}".format(staging_dir)) @@ -26,7 +27,13 @@ class ExtractImage(openpype.api.Extractor): with photoshop.maintained_selection(): self.log.info("Extracting %s" % str(list(instance))) with photoshop.maintained_visibility(): + self.log.info("instance.data:: {}".format(instance.data)) + print("instance.data::: {}".format(instance.data)) layer = instance.data.get("layer") + self.log.info("layer:: {}".format(layer)) + print("layer::: {}".format(layer)) + if not layer: + return ids = set([layer.id]) add_ids = instance.data.pop("ids", None) if add_ids: From a5ac3ab55b2c67604ef8e2530c57bdf242e6c599 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Mar 2022 16:21:29 +0100 Subject: [PATCH 041/244] OP-2766 - implemented new context methods --- openpype/hosts/photoshop/api/pipeline.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 8d64942c9e..0a99d1779d 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -236,14 +236,21 @@ def containerise( def get_context_data(): - pass + """Get stored values for context (validation enable/disable etc)""" + meta = _get_stub().get_layers_metadata() + for item in meta: + if item.get("id") == "publish_context": + item.pop("id") + return item + + return {} def update_context_data(data, changes): - # item = data - # item["id"] = "publish_context" - # _get_stub().imprint(item["id"], item) - pass + """Store value needed for context""" + item = data + item["id"] = "publish_context" + _get_stub().imprint(item["id"], item) def get_context_title(): From df5fdcc54c6ff125d307036b26a07572671047c9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 17 Mar 2022 10:45:54 +0100 Subject: [PATCH 042/244] OP-2766 - do not store PSItem in metadata PSItem is not serializable --- openpype/hosts/photoshop/plugins/create/create_image.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index a73b79e0fd..4fc9a86635 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -108,6 +108,7 @@ class ImageCreator(Creator): def update_instances(self, update_list): self.log.info("update_list:: {}".format(update_list)) created_inst, changes = update_list[0] + created_inst.pop("layer") # not storing PSItem layer to metadata api.stub().imprint(created_inst.get("instance_id"), created_inst.data_to_store()) From c422176553ff27cff8d5113958fadf0dc4ddf12e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 17 Mar 2022 11:29:41 +0100 Subject: [PATCH 043/244] OP-2766 - removed hardcoded ftrack, CollectFtrackFamily should be used Added defaults for Ftrack Settings. --- .../plugins/publish/collect_review.py | 25 +++++++++++++------ .../defaults/project_settings/ftrack.json | 12 +++++++++ 2 files changed, 29 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index 5ab48b76da..4b6f855a6a 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -1,12 +1,24 @@ +""" +Requires: + None + +Provides: + instance -> family ("review") +""" + import os import pyblish.api class CollectReview(pyblish.api.ContextPlugin): - """Gather the active document as review instance.""" + """Gather the active document as review instance. - label = "Review" + Triggers once even if no 'image' is published as by defaults it creates + flatten image from a workfile. + """ + + label = "Collect Review" order = pyblish.api.CollectorOrder hosts = ["photoshop"] @@ -15,16 +27,13 @@ class CollectReview(pyblish.api.ContextPlugin): task = os.getenv("AVALON_TASK", None) subset = family + task.capitalize() - file_path = context.data["currentFile"] - base_name = os.path.basename(file_path) - instance = context.create_instance(subset) instance.data.update({ "subset": subset, - "label": base_name, - "name": base_name, + "label": subset, + "name": subset, "family": family, - "families": ["ftrack"], + "families": [], "representations": [], "asset": os.environ["AVALON_ASSET"] }) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 01831efad1..015413e64f 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -344,6 +344,18 @@ "tasks": [], "add_ftrack_family": true, "advanced_filtering": [] + }, + { + "hosts": [ + "photoshop" + ], + "families": [ + "review" + ], + "task_types": [], + "tasks": [], + "add_ftrack_family": true, + "advanced_filtering": [] } ] }, From a6a1d0fc545d8fc5a8781f40468a95a261ca3b01 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Mar 2022 15:22:46 +0100 Subject: [PATCH 044/244] OP-2766 - fixed broken remove_instance --- openpype/hosts/photoshop/api/ws_stub.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/photoshop/api/ws_stub.py b/openpype/hosts/photoshop/api/ws_stub.py index dd29ef4e84..fa076ecc7e 100644 --- a/openpype/hosts/photoshop/api/ws_stub.py +++ b/openpype/hosts/photoshop/api/ws_stub.py @@ -77,14 +77,28 @@ class PhotoshopServerStub: layer: (PSItem) layers_meta: full list from Headline (for performance in loops) Returns: + (dict) of layer metadata stored in PS file + + Example: + { + 'id': 'pyblish.avalon.container', + 'loader': 'ImageLoader', + 'members': ['64'], + 'name': 'imageMainMiddle', + 'namespace': 'Hero_imageMainMiddle_001', + 'representation': '6203dc91e80934d9f6ee7d96', + 'schema': 'openpype:container-2.0' + } """ if layers_meta is None: layers_meta = self.get_layers_metadata() for layer_meta in layers_meta: + layer_id = layer_meta.get("uuid") # legacy if layer_meta.get("members"): - if layer.id == layer_meta["members"][0]: - return layer + layer_id = layer_meta["members"][0] + if str(layer.id) == str(layer_id): + return layer_meta print("Unable to find layer metadata for {}".format(layer.id)) def imprint(self, item_id, data, all_layers=None, items_meta=None): @@ -399,7 +413,7 @@ class PhotoshopServerStub: if res: layers_data = json.loads(res) except json.decoder.JSONDecodeError: - pass + raise ValueError("{} cannot be parsed, recreate meta".format(res)) # format of metadata changed from {} to [] because of standardization # keep current implementation logic as its working if isinstance(layers_data, dict): @@ -482,7 +496,7 @@ class PhotoshopServerStub: for item in self.get_layers_metadata(): inst_id = item.get("instance_id") or item.get("uuid") if inst_id != instance_id: - cleaned_data.append(inst_id) + cleaned_data.append(item) payload = json.dumps(cleaned_data, indent=4) From 01f2c8c1044ddeb78912dc2f6e401a4700e1a67d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Mar 2022 15:24:51 +0100 Subject: [PATCH 045/244] OP-2766 - fixed layer and variant keys --- .../hosts/photoshop/plugins/create/create_image.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index 4fc9a86635..c24d8bde2f 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -16,18 +16,13 @@ class ImageCreator(Creator): description = "Image creator" def collect_instances(self): - import json - self.log.info("ImageCreator: api.list_instances():: {}".format( - json.dumps(api.list_instances(), indent=4))) for instance_data in api.list_instances(): # legacy instances have family=='image' creator_id = (instance_data.get("creator_identifier") or instance_data.get("family")) - self.log.info("ImageCreator: instance_data:: {}".format(json.dumps(instance_data, indent=4))) if creator_id == self.identifier: instance_data = self._handle_legacy(instance_data) - layer = api.stub().get_layer(instance_data["members"][0]) instance_data["layer"] = layer instance = CreatedInstance.from_existing( @@ -106,9 +101,10 @@ class ImageCreator(Creator): stub.rename_layer(group.id, stub.PUBLISH_ICON + group.name) def update_instances(self, update_list): - self.log.info("update_list:: {}".format(update_list)) + self.log.debug("update_list:: {}".format(update_list)) created_inst, changes = update_list[0] - created_inst.pop("layer") # not storing PSItem layer to metadata + if created_inst.get("layer"): + created_inst.pop("layer") # not storing PSItem layer to metadata api.stub().imprint(created_inst.get("instance_id"), created_inst.data_to_store()) @@ -147,6 +143,9 @@ class ImageCreator(Creator): if not instance_data.get("task"): instance_data["task"] = avalon_api.Session.get("AVALON_TASK") + if not instance_data.get("variant"): + instance_data["variant"] = '' + return instance_data def _clean_highlights(self, stub, item): From b71554fe25375af9e87b7c854d3492d9f932de02 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 22 Mar 2022 14:56:29 +0100 Subject: [PATCH 046/244] OP-2765 - fix for update of multiple instances --- openpype/hosts/aftereffects/plugins/create/create_render.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 1a5a826137..e4f1f57b84 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -34,9 +34,9 @@ class RenderCreator(Creator): self._add_instance_to_context(instance) def update_instances(self, update_list): - created_inst, changes = update_list[0] - api.get_stub().imprint(created_inst.get("instance_id"), - created_inst.data_to_store()) + for created_inst, _changes in update_list: + api.get_stub().imprint(created_inst.get("instance_id"), + created_inst.data_to_store()) def remove_instances(self, instances): for instance in instances: From 6fde2110148e62649ae3bd0d25726d5dd9c16859 Mon Sep 17 00:00:00 2001 From: Pype Club Date: Tue, 22 Mar 2022 16:37:31 +0100 Subject: [PATCH 047/244] OP-2766 - fix loaders because of change in imprint signature --- openpype/hosts/photoshop/plugins/load/load_image.py | 4 ++-- openpype/hosts/photoshop/plugins/load/load_reference.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/load/load_image.py b/openpype/hosts/photoshop/plugins/load/load_image.py index 0a9421b8f2..91a9787781 100644 --- a/openpype/hosts/photoshop/plugins/load/load_image.py +++ b/openpype/hosts/photoshop/plugins/load/load_image.py @@ -61,7 +61,7 @@ class ImageLoader(photoshop.PhotoshopLoader): ) stub.imprint( - layer, {"representation": str(representation["_id"])} + layer.id, {"representation": str(representation["_id"])} ) def remove(self, container): @@ -73,7 +73,7 @@ class ImageLoader(photoshop.PhotoshopLoader): stub = self.get_stub() layer = container.pop("layer") - stub.imprint(layer, {}) + stub.imprint(layer.id, {}) stub.delete_layer(layer.id) def switch(self, container, representation): diff --git a/openpype/hosts/photoshop/plugins/load/load_reference.py b/openpype/hosts/photoshop/plugins/load/load_reference.py index f5f0545d39..1f32a5d23c 100644 --- a/openpype/hosts/photoshop/plugins/load/load_reference.py +++ b/openpype/hosts/photoshop/plugins/load/load_reference.py @@ -61,7 +61,7 @@ class ReferenceLoader(photoshop.PhotoshopLoader): ) stub.imprint( - layer, {"representation": str(representation["_id"])} + layer.id, {"representation": str(representation["_id"])} ) def remove(self, container): @@ -72,7 +72,7 @@ class ReferenceLoader(photoshop.PhotoshopLoader): """ stub = self.get_stub() layer = container.pop("layer") - stub.imprint(layer, {}) + stub.imprint(layer.id, {}) stub.delete_layer(layer.id) def switch(self, container, representation): From bdc3a05c4d52a29c1aaff99d83c993be48c7563e Mon Sep 17 00:00:00 2001 From: Pype Club Date: Tue, 22 Mar 2022 16:38:46 +0100 Subject: [PATCH 048/244] OP-2766 - fix wrongly used functions --- openpype/hosts/photoshop/api/pipeline.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 2f4343753c..abc4e63bf6 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -179,10 +179,10 @@ def remove_instance(instance): stub.remove_instance(inst_id) if instance.get("members"): - item = stub.get_item(instance["members"][0]) + item = stub.get_layer(instance["members"][0]) if item: - stub.rename_item(item.id, - item.name.replace(stub.PUBLISH_ICON, '')) + stub.rename_layer(item.id, + item.name.replace(stub.PUBLISH_ICON, '')) def _get_stub(): From b8dd330be3f0de72ba1a28652dff2ae4702c3dc2 Mon Sep 17 00:00:00 2001 From: Pype Club Date: Tue, 22 Mar 2022 16:40:28 +0100 Subject: [PATCH 049/244] OP-2766 - fix new creator for multiple instance's update --- .../hosts/photoshop/plugins/create/create_image.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index c24d8bde2f..bc0fa6a051 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -102,11 +102,11 @@ class ImageCreator(Creator): def update_instances(self, update_list): self.log.debug("update_list:: {}".format(update_list)) - created_inst, changes = update_list[0] - if created_inst.get("layer"): - created_inst.pop("layer") # not storing PSItem layer to metadata - api.stub().imprint(created_inst.get("instance_id"), - created_inst.data_to_store()) + for created_inst, _changes in update_list: + if created_inst.get("layer"): + created_inst.pop("layer") # not storing PSItem layer to metadata + api.stub().imprint(created_inst.get("instance_id"), + created_inst.data_to_store()) def remove_instances(self, instances): for instance in instances: From adc135cb4c1d09eb27d51dae067f054a93c74d77 Mon Sep 17 00:00:00 2001 From: Pype Club Date: Tue, 22 Mar 2022 16:59:53 +0100 Subject: [PATCH 050/244] OP-2766 - added newPublishing flag to differentiate old from new --- openpype/plugins/publish/collect_from_create_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py index 16e3f669c3..09584ab37c 100644 --- a/openpype/plugins/publish/collect_from_create_context.py +++ b/openpype/plugins/publish/collect_from_create_context.py @@ -25,7 +25,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): # Update global data to context context.data.update(create_context.context_data_to_store()) - + context.data["newPublishing"] = True # Update context data for key in ("AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK"): value = create_context.dbcon.Session.get(key) From 96d88e592d56cb5193a13764aba9f5fcecff9616 Mon Sep 17 00:00:00 2001 From: Pype Club Date: Tue, 22 Mar 2022 17:01:35 +0100 Subject: [PATCH 051/244] OP-2766 - renamed collector --- openpype/hosts/photoshop/plugins/publish/collect_instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py index ee402dcabf..d506b9a5bf 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py @@ -13,7 +13,7 @@ class CollectInstances(pyblish.api.ContextPlugin): id (str): "pyblish.avalon.instance" """ - label = "Instances" + label = "Collect Instances" order = pyblish.api.CollectorOrder hosts = ["photoshop"] families_mapping = { From e86dc1acd77b841d36486a594862473e6aaf76a8 Mon Sep 17 00:00:00 2001 From: Pype Club Date: Tue, 22 Mar 2022 19:57:02 +0100 Subject: [PATCH 052/244] OP-2766 - refactored new creator --- .../photoshop/plugins/create/create_image.py | 79 ++++++++----------- 1 file changed, 33 insertions(+), 46 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index bc0fa6a051..cd7e219bd0 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -3,8 +3,7 @@ from openpype.hosts.photoshop import api from openpype.pipeline import ( Creator, CreatedInstance, - lib, - CreatorError + lib ) @@ -30,65 +29,53 @@ class ImageCreator(Creator): ) self._add_instance_to_context(instance) - def create(self, subset_name, data, pre_create_data): - groups = [] - layers = [] - create_group = False + def create(self, subset_name_from_ui, data, pre_create_data): + groups_to_create = [] + top_layers_to_wrap = [] + create_empty_group = False stub = api.stub() # only after PS is up - multiple_instances = pre_create_data.get("create_multiple") - selection = stub.get_selected_layers() + top_level_selected_items = stub.get_selected_layers() if pre_create_data.get("use_selection"): - if len(selection) > 1: - if multiple_instances: - for item in selection: - if item.group: - groups.append(item) - else: - layers.append(item) + only_single_item_selected = len(top_level_selected_items) == 1 + for selected_item in top_level_selected_items: + if only_single_item_selected or pre_create_data.get("create_multiple"): + if selected_item.group: + groups_to_create.append(selected_item) + else: + top_layers_to_wrap.append(selected_item) else: - group = stub.group_selected_layers(subset_name) - groups.append(group) - elif len(selection) == 1: - # One selected item. Use group if its a LayerSet (group), else - # create a new group. - selected_item = selection[0] - if selected_item.group: - groups.append(selected_item) - else: - layers.append(selected_item) - elif len(selection) == 0: - # No selection creates an empty group. - create_group = True - else: - group = stub.create_group(subset_name) - groups.append(group) + group = stub.group_selected_layers(subset_name_from_ui) + groups_to_create.append(group) - if create_group: - group = stub.create_group(subset_name) - groups.append(group) + if not groups_to_create and not top_layers_to_wrap: + group = stub.create_group(subset_name_from_ui) + groups_to_create.append(group) - for layer in layers: + # wrap each top level layer into separate new group + for layer in top_layers_to_wrap: stub.select_layers([layer]) group = stub.group_selected_layers(layer.name) - groups.append(group) + groups_to_create.append(group) - for group in groups: - long_names = [] - group.name = self._clean_highlights(stub, group.name) + creating_multiple_groups = len(groups_to_create) > 1 + for group in groups_to_create: + subset_name = subset_name_from_ui # reset to name from creator UI + layer_names_in_hierarchy = [] + created_group_name = self._clean_highlights(stub, group.name) - if len(groups) > 1: + if creating_multiple_groups: + # concatenate with layer name to differentiate subsets subset_name += group.name.title().replace(" ", "") if group.long_name: for directory in group.long_name[::-1]: name = self._clean_highlights(stub, directory) - long_names.append(name) + layer_names_in_hierarchy.append(name) data.update({"subset": subset_name}) - data.update({"layer": group}) data.update({"members": [str(group.id)]}) - data.update({"long_name": "_".join(long_names)}) + data.update({"long_name": "_".join(layer_names_in_hierarchy)}) new_instance = CreatedInstance(self.family, subset_name, data, self) @@ -97,8 +84,8 @@ class ImageCreator(Creator): new_instance.data_to_store()) self._add_instance_to_context(new_instance) # reusing existing group, need to rename afterwards - if not create_group: - stub.rename_layer(group.id, stub.PUBLISH_ICON + group.name) + if not create_empty_group: + stub.rename_layer(group.id, stub.PUBLISH_ICON + created_group_name) def update_instances(self, update_list): self.log.debug("update_list:: {}".format(update_list)) @@ -120,7 +107,7 @@ class ImageCreator(Creator): def get_pre_create_attr_defs(self): output = [ - lib.BoolDef("use_selection", default=True, label="Use selection"), + lib.BoolDef("use_selection", default=True, label="Create only for selected"), lib.BoolDef("create_multiple", default=True, label="Create separate instance for each selected") From 9be8885bc3845d3fd5a4aed6b9558a3758e38a8b Mon Sep 17 00:00:00 2001 From: Pype Club Date: Wed, 23 Mar 2022 10:47:50 +0100 Subject: [PATCH 053/244] OP-2766 - added support for new publisher NP already collected instances, need to only add layer information --- .../plugins/publish/collect_instances.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py index d506b9a5bf..1b30fb053a 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py @@ -1,3 +1,4 @@ +import pprint import pyblish.api from openpype.hosts.photoshop import api as photoshop @@ -21,9 +22,10 @@ class CollectInstances(pyblish.api.ContextPlugin): } def process(self, context): - if context.data.get("newPublishing"): - self.log.debug("Not applicable for New Publisher, skip") - return + instance_by_layer_id = {} + for instance in context: + if instance.data["family"] == "image" and instance.data.get("members"): + instance_by_layer_id[str(instance.data["members"][0])] = instance stub = photoshop.stub() layers = stub.get_layers() @@ -40,13 +42,10 @@ class CollectInstances(pyblish.api.ContextPlugin): if "container" in layer_data["id"]: continue - # child_layers = [*layer.Layers] - # self.log.debug("child_layers {}".format(child_layers)) - # if not child_layers: - # self.log.info("%s skipped, it was empty." % layer.Name) - # continue + instance = instance_by_layer_id.get(str(layer.id)) + if instance is None: + instance = context.create_instance(layer_data["subset"]) - instance = context.create_instance(layer_data["subset"]) instance.data["layer"] = layer instance.data.update(layer_data) instance.data["families"] = self.families_mapping[ @@ -58,7 +57,7 @@ class CollectInstances(pyblish.api.ContextPlugin): # Produce diagnostic message for any graphical # user interface interested in visualising it. self.log.info("Found: \"%s\" " % instance.data["name"]) - self.log.info("instance: {} ".format(instance.data)) + self.log.info("instance: {} ".format(pprint.pformat(instance.data, indent=4))) if len(instance_names) != len(set(instance_names)): self.log.warning("Duplicate instances found. " + From 11a9ad18738ffa9ff036722f699d715663d3fb53 Mon Sep 17 00:00:00 2001 From: Pype Club Date: Wed, 23 Mar 2022 10:53:58 +0100 Subject: [PATCH 054/244] OP-2766 - refactor --- .../plugins/publish/collect_instances.py | 38 ++++++++++--------- 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py index 1b30fb053a..9449662067 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py @@ -7,8 +7,8 @@ from openpype.hosts.photoshop import api as photoshop class CollectInstances(pyblish.api.ContextPlugin): """Gather instances by LayerSet and file metadata - This collector takes into account assets that are associated with - an LayerSet and marked with a unique identifier; + Collects publishable instances from file metadata or enhance + already collected by creator (family == "image"). Identifier: id (str): "pyblish.avalon.instance" @@ -24,40 +24,44 @@ class CollectInstances(pyblish.api.ContextPlugin): def process(self, context): instance_by_layer_id = {} for instance in context: - if instance.data["family"] == "image" and instance.data.get("members"): - instance_by_layer_id[str(instance.data["members"][0])] = instance + if ( + instance.data["family"] == "image" and + instance.data.get("members")): + layer_id = str(instance.data["members"][0]) + instance_by_layer_id[layer_id] = instance stub = photoshop.stub() - layers = stub.get_layers() + layer_items = stub.get_layers() layers_meta = stub.get_layers_metadata() instance_names = [] - for layer in layers: - layer_data = stub.read(layer, layers_meta) + for layer_item in layer_items: + layer_instance_data = stub.read(layer_item, layers_meta) # Skip layers without metadata. - if layer_data is None: + if layer_instance_data is None: continue # Skip containers. - if "container" in layer_data["id"]: + if "container" in layer_instance_data["id"]: continue - instance = instance_by_layer_id.get(str(layer.id)) + instance = instance_by_layer_id.get(str(layer_item.id)) if instance is None: - instance = context.create_instance(layer_data["subset"]) + instance = context.create_instance(layer_instance_data["subset"]) - instance.data["layer"] = layer - instance.data.update(layer_data) + instance.data["layer"] = layer_item + instance.data.update(layer_instance_data) instance.data["families"] = self.families_mapping[ - layer_data["family"] + layer_instance_data["family"] ] - instance.data["publish"] = layer.visible - instance_names.append(layer_data["subset"]) + instance.data["publish"] = layer_item.visible + instance_names.append(layer_instance_data["subset"]) # Produce diagnostic message for any graphical # user interface interested in visualising it. self.log.info("Found: \"%s\" " % instance.data["name"]) - self.log.info("instance: {} ".format(pprint.pformat(instance.data, indent=4))) + self.log.info("instance: {} ".format( + pprint.pformat(instance.data, indent=4))) if len(instance_names) != len(set(instance_names)): self.log.warning("Duplicate instances found. " + From 755a6dabfd1ba5d1bb80000ab69140d1a54d9c3d Mon Sep 17 00:00:00 2001 From: Pype Club Date: Wed, 23 Mar 2022 11:47:36 +0100 Subject: [PATCH 055/244] OP-2766 - added NP validators for subset names and uniqueness --- .../plugins/publish/help/validate_naming.xml | 21 +++++++++ .../publish/help/validate_unique_subsets.xml | 14 ++++++ .../plugins/publish/validate_naming.py | 47 +++++++++++-------- .../publish/validate_unique_subsets.py | 9 +++- 4 files changed, 71 insertions(+), 20 deletions(-) create mode 100644 openpype/hosts/photoshop/plugins/publish/help/validate_naming.xml create mode 100644 openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml diff --git a/openpype/hosts/photoshop/plugins/publish/help/validate_naming.xml b/openpype/hosts/photoshop/plugins/publish/help/validate_naming.xml new file mode 100644 index 0000000000..5a1e266748 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/publish/help/validate_naming.xml @@ -0,0 +1,21 @@ + + + +Subset name + +## Invalid subset or layer name + +Subset or layer name cannot contain specific characters (spaces etc) which could cause issue when subset name is used in a published file name. + {msg} + +### How to repair? + +You can fix this with "repair" button on the right. + + +### __Detailed Info__ (optional) + +Not all characters are available in a file names on all OS. Wrong characters could be configured in Settings. + + + \ No newline at end of file diff --git a/openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml b/openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml new file mode 100644 index 0000000000..4b47973193 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/publish/help/validate_unique_subsets.xml @@ -0,0 +1,14 @@ + + + +Subset not unique + +## Non unique subset name found + + Non unique subset names: '{non_unique}' +### How to repair? + +Remove offending instance, rename it to have unique name. Maybe layer name wasn't used for multiple instances? + + + \ No newline at end of file diff --git a/openpype/hosts/photoshop/plugins/publish/validate_naming.py b/openpype/hosts/photoshop/plugins/publish/validate_naming.py index b40e44d016..c0ca4cfb69 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_naming.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_naming.py @@ -2,6 +2,7 @@ import re import pyblish.api import openpype.api +from openpype.pipeline import PublishXmlValidationError from openpype.hosts.photoshop import api as photoshop @@ -22,32 +23,33 @@ class ValidateNamingRepair(pyblish.api.Action): failed.append(result["instance"]) invalid_chars, replace_char = plugin.get_replace_chars() - self.log.info("{} --- {}".format(invalid_chars, replace_char)) + self.log.debug("{} --- {}".format(invalid_chars, replace_char)) # Apply pyblish.logic to get the instances for the plug-in instances = pyblish.api.instances_by_plugin(failed, plugin) stub = photoshop.stub() for instance in instances: - self.log.info("validate_naming instance {}".format(instance)) - metadata = stub.read(instance[0]) - self.log.info("metadata instance {}".format(metadata)) - layer_name = None - if metadata.get("uuid"): - layer_data = stub.get_layer(metadata["uuid"]) - self.log.info("layer_data {}".format(layer_data)) - if layer_data: - layer_name = re.sub(invalid_chars, - replace_char, - layer_data.name) + self.log.debug("validate_naming instance {}".format(instance)) + current_layer_state = stub.get_layer(instance.data["layer"].id) + self.log.debug("current_layer_state instance {}".format(current_layer_state)) - stub.rename_layer(instance.data["uuid"], layer_name) + layer_meta = stub.read(current_layer_state) + instance_id = layer_meta.get("instance_id") or layer_meta.get("uuid") + if not instance_id: + self.log.warning("Unable to repair, cannot find layer") + continue + + layer_name = re.sub(invalid_chars, + replace_char, + current_layer_state.name) + + stub.rename_layer(current_layer_state.id, layer_name) subset_name = re.sub(invalid_chars, replace_char, - instance.data["name"]) + instance.data["subset"]) - instance[0].Name = layer_name or subset_name - metadata["subset"] = subset_name - stub.imprint(instance[0], metadata) + layer_meta["subset"] = subset_name + stub.imprint(instance_id, layer_meta) return True @@ -72,11 +74,18 @@ class ValidateNaming(pyblish.api.InstancePlugin): help_msg = ' Use Repair action (A) in Pyblish to fix it.' msg = "Name \"{}\" is not allowed.{}".format(instance.data["name"], help_msg) - assert not re.search(self.invalid_chars, instance.data["name"]), msg + + formatting_data = {"msg": msg} + if re.search(self.invalid_chars, instance.data["name"]): + raise PublishXmlValidationError(self, msg, + formatting_data=formatting_data) msg = "Subset \"{}\" is not allowed.{}".format(instance.data["subset"], help_msg) - assert not re.search(self.invalid_chars, instance.data["subset"]), msg + formatting_data = {"msg": msg} + if re.search(self.invalid_chars, instance.data["subset"]): + raise PublishXmlValidationError(self, msg, + formatting_data=formatting_data) @classmethod def get_replace_chars(cls): diff --git a/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py b/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py index 40abfb1bbd..01f2323157 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_unique_subsets.py @@ -1,6 +1,7 @@ import collections import pyblish.api import openpype.api +from openpype.pipeline import PublishXmlValidationError class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): @@ -27,4 +28,10 @@ class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): if count > 1] msg = ("Instance subset names {} are not unique. ".format(non_unique) + "Remove duplicates via SubsetManager.") - assert not non_unique, msg + formatting_data = { + "non_unique": ",".join(non_unique) + } + + if non_unique: + raise PublishXmlValidationError(self, msg, + formatting_data=formatting_data) From 85b49da44e14ec82a93e43bd4f8f1571b403627a Mon Sep 17 00:00:00 2001 From: Pype Club Date: Wed, 23 Mar 2022 12:11:48 +0100 Subject: [PATCH 056/244] OP-2766 - skip non active instances --- .../plugins/publish/collect_instances.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py index 9449662067..52a8310594 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py @@ -35,27 +35,30 @@ class CollectInstances(pyblish.api.ContextPlugin): layers_meta = stub.get_layers_metadata() instance_names = [] for layer_item in layer_items: - layer_instance_data = stub.read(layer_item, layers_meta) + layer_meta_data = stub.read(layer_item, layers_meta) # Skip layers without metadata. - if layer_instance_data is None: + if layer_meta_data is None: continue # Skip containers. - if "container" in layer_instance_data["id"]: + if "container" in layer_meta_data["id"]: + continue + + if not layer_meta_data.get("active", True): # active might not be in legacy meta continue instance = instance_by_layer_id.get(str(layer_item.id)) if instance is None: - instance = context.create_instance(layer_instance_data["subset"]) + instance = context.create_instance(layer_meta_data["subset"]) instance.data["layer"] = layer_item - instance.data.update(layer_instance_data) + instance.data.update(layer_meta_data) instance.data["families"] = self.families_mapping[ - layer_instance_data["family"] + layer_meta_data["family"] ] instance.data["publish"] = layer_item.visible - instance_names.append(layer_instance_data["subset"]) + instance_names.append(layer_meta_data["subset"]) # Produce diagnostic message for any graphical # user interface interested in visualising it. From d211471ea099f53d8349f33d7e20ad29da7f178c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 13:49:50 +0100 Subject: [PATCH 057/244] OP-2766 - Hound --- openpype/hosts/photoshop/api/pipeline.py | 4 ++-- .../photoshop/plugins/create/create_image.py | 15 +++++++++------ .../plugins/publish/collect_instances.py | 3 ++- .../photoshop/plugins/publish/validate_naming.py | 5 +++-- 4 files changed, 16 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 1b471ef1d3..db40e456db 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -151,7 +151,7 @@ def list_instances(): layers_meta = stub.get_layers_metadata() if layers_meta: for instance in layers_meta: - if instance.get("id") == "pyblish.avalon.instance": # TODO only this way? + if instance.get("id") == "pyblish.avalon.instance": instances.append(instance) return instances @@ -266,4 +266,4 @@ def get_context_title(): project_name = avalon.api.Session["AVALON_PROJECT"] asset_name = avalon.api.Session["AVALON_ASSET"] task_name = avalon.api.Session["AVALON_TASK"] - return "{}/{}/{}".format(project_name, asset_name, task_name) \ No newline at end of file + return "{}/{}/{}".format(project_name, asset_name, task_name) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index cd7e219bd0..e332cfd9c2 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -39,7 +39,9 @@ class ImageCreator(Creator): if pre_create_data.get("use_selection"): only_single_item_selected = len(top_level_selected_items) == 1 for selected_item in top_level_selected_items: - if only_single_item_selected or pre_create_data.get("create_multiple"): + if ( + only_single_item_selected or + pre_create_data.get("create_multiple")): if selected_item.group: groups_to_create.append(selected_item) else: @@ -85,13 +87,15 @@ class ImageCreator(Creator): self._add_instance_to_context(new_instance) # reusing existing group, need to rename afterwards if not create_empty_group: - stub.rename_layer(group.id, stub.PUBLISH_ICON + created_group_name) + stub.rename_layer(group.id, + stub.PUBLISH_ICON + created_group_name) def update_instances(self, update_list): self.log.debug("update_list:: {}".format(update_list)) for created_inst, _changes in update_list: if created_inst.get("layer"): - created_inst.pop("layer") # not storing PSItem layer to metadata + # not storing PSItem layer to metadata + created_inst.pop("layer") api.stub().imprint(created_inst.get("instance_id"), created_inst.data_to_store()) @@ -107,7 +111,8 @@ class ImageCreator(Creator): def get_pre_create_attr_defs(self): output = [ - lib.BoolDef("use_selection", default=True, label="Create only for selected"), + lib.BoolDef("use_selection", default=True, + label="Create only for selected"), lib.BoolDef("create_multiple", default=True, label="Create separate instance for each selected") @@ -138,5 +143,3 @@ class ImageCreator(Creator): def _clean_highlights(self, stub, item): return item.replace(stub.PUBLISH_ICON, '').replace(stub.LOADED_ICON, '') - - diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py index 52a8310594..a7bb2d40c7 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py @@ -45,7 +45,8 @@ class CollectInstances(pyblish.api.ContextPlugin): if "container" in layer_meta_data["id"]: continue - if not layer_meta_data.get("active", True): # active might not be in legacy meta + # active might not be in legacy meta + if not layer_meta_data.get("active", True): continue instance = instance_by_layer_id.get(str(layer_item.id)) diff --git a/openpype/hosts/photoshop/plugins/publish/validate_naming.py b/openpype/hosts/photoshop/plugins/publish/validate_naming.py index c0ca4cfb69..bcae24108c 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_naming.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_naming.py @@ -31,10 +31,11 @@ class ValidateNamingRepair(pyblish.api.Action): for instance in instances: self.log.debug("validate_naming instance {}".format(instance)) current_layer_state = stub.get_layer(instance.data["layer"].id) - self.log.debug("current_layer_state instance {}".format(current_layer_state)) + self.log.debug("current_layer{}".format(current_layer_state)) layer_meta = stub.read(current_layer_state) - instance_id = layer_meta.get("instance_id") or layer_meta.get("uuid") + instance_id = (layer_meta.get("instance_id") or + layer_meta.get("uuid")) if not instance_id: self.log.warning("Unable to repair, cannot find layer") continue From 49d26ef9593271a6b36dfbdd353f7bed017478ad Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 14:11:39 +0100 Subject: [PATCH 058/244] OP-2766 - changed imports after refactor of attribute definitions --- openpype/hosts/photoshop/plugins/create/create_image.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index e332cfd9c2..12898bb7f4 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -1,9 +1,9 @@ from avalon import api as avalon_api from openpype.hosts.photoshop import api +from openpype.lib import BoolDef from openpype.pipeline import ( Creator, - CreatedInstance, - lib + CreatedInstance ) @@ -111,9 +111,9 @@ class ImageCreator(Creator): def get_pre_create_attr_defs(self): output = [ - lib.BoolDef("use_selection", default=True, + BoolDef("use_selection", default=True, label="Create only for selected"), - lib.BoolDef("create_multiple", + BoolDef("create_multiple", default=True, label="Create separate instance for each selected") ] From 7273fd44daa2ebb266c9f95f9beb0cbfad53258a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 14:14:35 +0100 Subject: [PATCH 059/244] OP-2765 - changed imports after refactor of attribute definitions --- .../hosts/aftereffects/plugins/create/create_render.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 1eff992fe0..826d438fa3 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -1,11 +1,11 @@ from avalon import api as avalon_api from openpype import resources +from openpype.lib import BoolDef, UISeparatorDef from openpype.hosts.aftereffects import api from openpype.pipeline import ( Creator, CreatedInstance, - lib, CreatorError ) @@ -86,13 +86,13 @@ class RenderCreator(Creator): ] def get_instance_attr_defs(self): - return [lib.BoolDef("farm", label="Render on farm")] + return [BoolDef("farm", label="Render on farm")] def get_pre_create_attr_defs(self): output = [ - lib.BoolDef("use_selection", default=True, label="Use selection"), - lib.UISeparatorDef(), - lib.BoolDef("farm", label="Render on farm") + BoolDef("use_selection", default=True, label="Use selection"), + UISeparatorDef(), + BoolDef("farm", label="Render on farm") ] return output From c829cc19ac675bbc9752980b805b69964cccb6b7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 14:15:16 +0100 Subject: [PATCH 060/244] OP-2765 - changed default variant --- openpype/hosts/aftereffects/plugins/create/create_render.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 826d438fa3..c43ada84b5 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -79,11 +79,7 @@ class RenderCreator(Creator): self._add_instance_to_context(new_instance) def get_default_variants(self): - return [ - "myVariant", - "variantTwo", - "different_variant" - ] + return ["Main"] def get_instance_attr_defs(self): return [BoolDef("farm", label="Render on farm")] From 1534c878d2e57dad50823d52d434feb2cecd3f10 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 14:33:49 +0100 Subject: [PATCH 061/244] OP-2766 - Hound --- openpype/hosts/photoshop/plugins/create/create_image.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index 12898bb7f4..c2fe8b6c78 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -112,10 +112,10 @@ class ImageCreator(Creator): def get_pre_create_attr_defs(self): output = [ BoolDef("use_selection", default=True, - label="Create only for selected"), + label="Create only for selected"), BoolDef("create_multiple", - default=True, - label="Create separate instance for each selected") + default=True, + label="Create separate instance for each selected") ] return output From c7039e91f8665b1a3f47e317e5b807faee03783c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 15:20:43 +0100 Subject: [PATCH 062/244] OP-2766 - return back uuid for legacy creator --- openpype/hosts/photoshop/plugins/create/create_legacy_image.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py index 6fa455fa03..9736471a26 100644 --- a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -91,6 +91,7 @@ class CreateImage(create.LegacyCreator): long_names.append(name) self.data.update({"subset": subset_name}) + self.data.update({"uuid": str(group.id)}) self.data.update({"members": [str(group.id)]}) self.data.update({"long_name": "_".join(long_names)}) stub.imprint(group, self.data) From 8964fdb754ff837028f032d6bafbdc3ef160aa31 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 17:11:25 +0100 Subject: [PATCH 063/244] OP-2766 - clean up import --- openpype/hosts/aftereffects/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 2a213e1b59..e14b8adc8c 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -11,12 +11,12 @@ from openpype import lib from openpype.api import Logger from openpype.pipeline import ( LegacyCreator, + BaseCreator, register_loader_plugin_path, deregister_loader_plugin_path, AVALON_CONTAINER_ID, ) import openpype.hosts.aftereffects -from openpype.pipeline import BaseCreator from openpype.lib import register_event_callback from .launch_logic import get_stub From 0858ee0ce8483c123a67525342fba6f782c15ae2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 17:15:46 +0100 Subject: [PATCH 064/244] OP-2765 - remove wrong logging function --- .../aftereffects/plugins/publish/collect_workfile.py | 4 ---- openpype/lib/__init__.py | 3 +-- openpype/lib/log.py | 11 ----------- 3 files changed, 1 insertion(+), 17 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 67f037e6e6..f285ae49e4 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -2,8 +2,6 @@ import os from avalon import api import pyblish.api -from openpype.lib import debug_log_instance - class CollectWorkfile(pyblish.api.ContextPlugin): """ Adds the AE render instances """ @@ -76,5 +74,3 @@ class CollectWorkfile(pyblish.api.ContextPlugin): } instance.data["representations"].append(representation) - - debug_log_instance(self.log, "Workfile instance", instance) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index f02706e44f..e8b6d18f4e 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -63,7 +63,7 @@ from .execute import ( path_to_subprocess_arg, CREATE_NO_WINDOW ) -from .log import PypeLogger, timeit, debug_log_instance +from .log import PypeLogger, timeit from .path_templates import ( merge_dict, @@ -369,7 +369,6 @@ __all__ = [ "OpenPypeMongoConnection", "timeit", - "debug_log_instance", "is_overlapping_otio_ranges", "otio_range_with_handles", diff --git a/openpype/lib/log.py b/openpype/lib/log.py index 991dc3349a..c963807014 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -498,14 +498,3 @@ def timeit(method): print('%r %2.2f ms' % (method.__name__, (te - ts) * 1000)) return result return timed - - -def debug_log_instance(logger, msg, instance): - """Helper function to write instance.data as json""" - def _default_json(value): - return str(value) - - logger.debug(msg) - logger.debug( - json.dumps(instance.data, indent=4, default=_default_json) - ) From 91879de0ad4ed7859b4fa330bcc03685fd3d39ad Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 17:24:24 +0100 Subject: [PATCH 065/244] OP-2765 - revert of unwanted commit --- openpype/modules/log_viewer/log_view_module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/log_viewer/log_view_module.py b/openpype/modules/log_viewer/log_view_module.py index 5e141f6aa2..14be6b392e 100644 --- a/openpype/modules/log_viewer/log_view_module.py +++ b/openpype/modules/log_viewer/log_view_module.py @@ -8,7 +8,7 @@ class LogViewModule(OpenPypeModule, ITrayModule): def initialize(self, modules_settings): logging_settings = modules_settings[self.name] - self.enabled = False # logging_settings["enabled"] + self.enabled = logging_settings["enabled"] # Tray attributes self.window = None From bfbb2061bcbe900a05ac59ff1e4894f1ae4cefa5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 17:25:34 +0100 Subject: [PATCH 066/244] OP-2765 - revert of unwanted commit --- .../deadline/repository/custom/plugins/GlobalJobPreLoad.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index ed932d35b9..eeb1f7744c 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -85,9 +85,7 @@ def inject_openpype_environment(deadlinePlugin): with open(export_url) as fp: contents = json.load(fp) for key, value in contents.items(): - print("key:: {}".format(key)) - if key != 'NUMBER_OF_PROCESSORS': - deadlinePlugin.SetProcessEnvironmentVariable(key, value) + deadlinePlugin.SetProcessEnvironmentVariable(key, value) print(">>> Removing temporary file") os.remove(export_url) From 16c919e93d0d65af801a10dff431058ec1da8203 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 17:26:40 +0100 Subject: [PATCH 067/244] OP-2765 - revert of unwanted commit --- openpype/hosts/harmony/plugins/publish/extract_render.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/harmony/plugins/publish/extract_render.py b/openpype/hosts/harmony/plugins/publish/extract_render.py index 49133d9608..2f8169248e 100644 --- a/openpype/hosts/harmony/plugins/publish/extract_render.py +++ b/openpype/hosts/harmony/plugins/publish/extract_render.py @@ -41,7 +41,6 @@ class ExtractRender(pyblish.api.InstancePlugin): func = """function %s(args) { node.setTextAttr(args[0], "DRAWING_NAME", 1, args[1]); - node.setTextAttr(args[0], 'MOVIE_PATH', 1, args[1]); } %s """ % (sig, sig) From 59f2adbf341334fcb0ef239ce082f2c50bfe6a43 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 17:27:48 +0100 Subject: [PATCH 068/244] OP-2765 - revert of unwanted commit --- openpype/lib/log.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/lib/log.py b/openpype/lib/log.py index c963807014..f33385e0ba 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -23,7 +23,6 @@ import time import traceback import threading import copy -import json from . import Terminal from .mongo import ( From 881ec1579ec82460734e9bdf93e9d5c968525b1d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 23 Mar 2022 18:10:17 +0100 Subject: [PATCH 069/244] OP-2765 - fix exception if no file opened Should be refactored, merged 2 functions in code and extension. --- openpype/hosts/aftereffects/api/workio.py | 23 +++++++++++++--------- openpype/hosts/aftereffects/api/ws_stub.py | 2 +- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/aftereffects/api/workio.py b/openpype/hosts/aftereffects/api/workio.py index 5a8f86ead5..d6c732285a 100644 --- a/openpype/hosts/aftereffects/api/workio.py +++ b/openpype/hosts/aftereffects/api/workio.py @@ -5,14 +5,6 @@ from openpype.pipeline import HOST_WORKFILE_EXTENSIONS from .launch_logic import get_stub -def _active_document(): - document_name = get_stub().get_active_document_name() - if not document_name: - return None - - return document_name - - def file_extensions(): return HOST_WORKFILE_EXTENSIONS["aftereffects"] @@ -39,7 +31,8 @@ def current_file(): full_name = get_stub().get_active_document_full_name() if full_name and full_name != "null": return os.path.normpath(full_name).replace("\\", "/") - except Exception: + except ValueError: + print("Nothing opened") pass return None @@ -47,3 +40,15 @@ def current_file(): def work_root(session): return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/") + + +def _active_document(): + # TODO merge with current_file - even in extension + document_name = None + try: + document_name = get_stub().get_active_document_name() + except ValueError: + print("Nothing opened") + pass + + return document_name diff --git a/openpype/hosts/aftereffects/api/ws_stub.py b/openpype/hosts/aftereffects/api/ws_stub.py index 1dfea697a1..9a6462fcd4 100644 --- a/openpype/hosts/aftereffects/api/ws_stub.py +++ b/openpype/hosts/aftereffects/api/ws_stub.py @@ -171,7 +171,7 @@ class AfterEffectsServerStub(): def get_active_document_full_name(self): """ - Returns just a name of active document via ws call + Returns absolute path of active document via ws call Returns(string): file name """ res = self.websocketserver.call(self.client.call( From 41d54727529b8f2b8a1580fd455616cbe5905da7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 11:50:32 +0100 Subject: [PATCH 070/244] OP-2765 - implemented support for optional validation in new publisher --- .../plugins/publish/validate_scene_settings.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py b/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py index 0753e3c09a..14e224fdc2 100644 --- a/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py +++ b/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py @@ -5,11 +5,15 @@ import re import pyblish.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline import ( + PublishXmlValidationError, + OptionalPyblishPluginMixin +) from openpype.hosts.aftereffects.api import get_asset_settings -class ValidateSceneSettings(pyblish.api.InstancePlugin): +class ValidateSceneSettings(OptionalPyblishPluginMixin, + pyblish.api.InstancePlugin): """ Ensures that Composition Settings (right mouse on comp) are same as in FTrack on task. @@ -59,6 +63,10 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin): def process(self, instance): """Plugin entry point.""" + # Skip the instance if is not active by data on the instance + if not self.is_active(instance.data): + return + expected_settings = get_asset_settings() self.log.info("config from DB::{}".format(expected_settings)) From e5f605b1236893c9917a3ea2931f6f3e75650f27 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 11:51:57 +0100 Subject: [PATCH 071/244] OP-2765 - render.farm is in families not in family Better handling of potentially multiple instances. (Still requiring that there is only one publishable composition at the moment.) --- openpype/hosts/aftereffects/plugins/publish/collect_audio.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_audio.py b/openpype/hosts/aftereffects/plugins/publish/collect_audio.py index 80679725e6..8647ba498b 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_audio.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_audio.py @@ -17,12 +17,11 @@ class CollectAudio(pyblish.api.ContextPlugin): def process(self, context): for instance in context: - if instance.data["family"] == 'render.farm': + if 'render.farm' in instance.data.get("families", []): comp_id = instance.data["comp_id"] if not comp_id: self.log.debug("No comp_id filled in instance") - # @iLLiCiTiT QUESTION Should return or continue? - return + continue context.data["audioFile"] = os.path.normpath( get_stub().get_audio_url(comp_id) ).replace("\\", "/") From 71cd7a3fb0aad57e191fb0c520b09921d668d542 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 11:53:32 +0100 Subject: [PATCH 072/244] OP-2765 - added support for optional validations Asset and Task should be ALWAYS on instance, not on context. (Publishable instance might allow different context than "real context".) --- .../plugins/publish/collect_render.py | 18 +++++++++++------- openpype/lib/abstract_collect_render.py | 1 + 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index aa5bc58ac2..24d08b343e 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -22,6 +22,7 @@ class AERenderInstance(RenderInstance): projectEntity = attr.ib(default=None) stagingDir = attr.ib(default=None) app_version = attr.ib(default=None) + publish_attributes = attr.ib(default=None) class CollectAERender(abstract_collect_render.AbstractCollectRender): @@ -50,16 +51,21 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): current_file = context.data["currentFile"] version = context.data["version"] - asset_entity = context.data["assetEntity"] + project_entity = context.data["projectEntity"] compositions = CollectAERender.get_stub().get_items(True) compositions_by_id = {item.id: item for item in compositions} for inst in context: + if not inst.data["active"]: + continue + family = inst.data["family"] if family not in ["render", "renderLocal"]: # legacy continue + asset_entity = inst.data["assetEntity"] + item_id = inst.data["members"][0] work_area_info = CollectAERender.get_stub().get_work_area( @@ -78,9 +84,6 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): fps = work_area_info.frameRate # TODO add resolution when supported by extension - if not inst.data["active"]: - continue - subset_name = inst.data["subset"] instance = AERenderInstance( family=family, @@ -90,7 +93,8 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): source=current_file, label="{} - {}".format(subset_name, family), subset=subset_name, - asset=context.data["assetEntity"]["name"], + asset=inst.data["asset"], + task=inst.data["task"], attachTo=False, setMembers='', publish=True, @@ -112,8 +116,8 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): toBeRenderedOn='deadline', fps=fps, app_version=app_version, - anatomyData=deepcopy(context.data["anatomyData"]), - context=context + anatomyData=deepcopy(inst.data["anatomyData"]), + publish_attributes=inst.data.get("publish_attributes") ) comp = compositions_by_id.get(int(item_id)) diff --git a/openpype/lib/abstract_collect_render.py b/openpype/lib/abstract_collect_render.py index 029bd3ec39..cce161b51c 100644 --- a/openpype/lib/abstract_collect_render.py +++ b/openpype/lib/abstract_collect_render.py @@ -30,6 +30,7 @@ class RenderInstance(object): source = attr.ib() # path to source scene file label = attr.ib() # label to show in GUI subset = attr.ib() # subset name + task = attr.ib() # task name asset = attr.ib() # asset name (AVALON_ASSET) attachTo = attr.ib() # subset name to attach render to setMembers = attr.ib() # list of nodes/members producing render output From 0506c38e00008d26eb8ce7b8391b6f53844efed3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 11:54:13 +0100 Subject: [PATCH 073/244] OP-2765 - cleaned up workfile collector --- .../plugins/publish/collect_workfile.py | 66 +++++++++---------- .../plugins/publish/submit_publish_job.py | 12 +++- 2 files changed, 43 insertions(+), 35 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index f285ae49e4..ac552a6a5f 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -17,16 +17,37 @@ class CollectWorkfile(pyblish.api.ContextPlugin): existing_instance = instance break - task = api.Session["AVALON_TASK"] current_file = context.data["currentFile"] staging_dir = os.path.dirname(current_file) scene_file = os.path.basename(current_file) + if existing_instance is None: # old publish + instance = self._get_new_instance(context, scene_file) + else: + instance = existing_instance + + # creating representation + representation = { + 'name': 'aep', + 'ext': 'aep', + 'files': scene_file, + "stagingDir": staging_dir, + } + + instance.data["representations"].append(representation) + + def _get_new_instance(self, context, scene_file): + task = api.Session["AVALON_TASK"] version = context.data["version"] asset_entity = context.data["assetEntity"] project_entity = context.data["projectEntity"] - shared_instance_data = { + # workfile instance + family = "workfile" + subset = family + task.capitalize() # TOOD use method + + instance_data = { "asset": asset_entity["name"], + "task": task, "frameStart": asset_entity["data"]["frameStart"], "frameEnd": asset_entity["data"]["frameEnd"], "handleStart": asset_entity["data"]["handleStart"], @@ -40,37 +61,16 @@ class CollectWorkfile(pyblish.api.ContextPlugin): project_entity["data"]["resolutionHeight"]), "pixelAspect": 1, "step": 1, - "version": version + "version": version, + "subset": subset, + "label": scene_file, + "family": family, + "families": [family], + "representations": list() } - # workfile instance - family = "workfile" - subset = family + task.capitalize() - if existing_instance is None: # old publish - # Create instance - instance = context.create_instance(subset) + # Create instance + instance = context.create_instance(subset) + instance.data.update(instance_data) - # creating instance data - instance.data.update({ - "subset": subset, - "label": scene_file, - "family": family, - "families": [family], - "representations": list() - }) - - # adding basic script data - instance.data.update(shared_instance_data) - else: - instance = existing_instance - instance.data["publish"] = True # for DL - - # creating representation - representation = { - 'name': 'aep', - 'ext': 'aep', - 'files': scene_file, - "stagingDir": staging_dir, - } - - instance.data["representations"].append(representation) + return instance \ No newline at end of file diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index fad4d14ea0..f624f40635 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -392,6 +392,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): list of instances """ + self.log.info("!!!!! _create_instances_for_aov") task = os.environ["AVALON_TASK"] subset = instance_data["subset"] cameras = instance_data.get("cameras", []) @@ -454,6 +455,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): break if instance_data.get("multipartExr"): + self.log.info("!!!!! _create_instances_for_aov add multipartExr") preview = True new_instance = copy(instance_data) @@ -519,9 +521,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): """ representations = [] collections, remainders = clique.assemble(exp_files) - + self.log.info("!!!!! _get_representations") # create representation for every collected sequento ce for collection in collections: + self.log.info("!!!!! collection") ext = collection.tail.lstrip(".") preview = False # if filtered aov name is found in filename, toggle it for @@ -533,6 +536,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): aov, list(collection)[0] ): + self.log.info("!!!!! add preview") preview = True break @@ -582,6 +586,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # add reminders as representations for remainder in remainders: + self.log.info("!!!!! remainder") ext = remainder.split(".")[-1] staging = os.path.dirname(remainder) @@ -602,7 +607,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "files": os.path.basename(remainder), "stagingDir": os.path.dirname(remainder), } - if "render" in instance.get("families"): + is_render_type = set(["render"]).\ + intersection(instance.get("families")) + if is_render_type: + self.log.info("!!!!! is_render_type") rep.update({ "fps": instance.get("fps"), "tags": ["review"] From 2c20f6832dadcc85c1ae4fda23d952b7ae7d2c92 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 11:59:53 +0100 Subject: [PATCH 074/244] Revert "OP-2765 - cleaned up workfile collector" This reverts commit 0506c38e --- .../plugins/publish/collect_workfile.py | 66 +++++++++---------- .../plugins/publish/submit_publish_job.py | 12 +--- 2 files changed, 35 insertions(+), 43 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index ac552a6a5f..f285ae49e4 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -17,37 +17,16 @@ class CollectWorkfile(pyblish.api.ContextPlugin): existing_instance = instance break + task = api.Session["AVALON_TASK"] current_file = context.data["currentFile"] staging_dir = os.path.dirname(current_file) scene_file = os.path.basename(current_file) - if existing_instance is None: # old publish - instance = self._get_new_instance(context, scene_file) - else: - instance = existing_instance - - # creating representation - representation = { - 'name': 'aep', - 'ext': 'aep', - 'files': scene_file, - "stagingDir": staging_dir, - } - - instance.data["representations"].append(representation) - - def _get_new_instance(self, context, scene_file): - task = api.Session["AVALON_TASK"] version = context.data["version"] asset_entity = context.data["assetEntity"] project_entity = context.data["projectEntity"] - # workfile instance - family = "workfile" - subset = family + task.capitalize() # TOOD use method - - instance_data = { + shared_instance_data = { "asset": asset_entity["name"], - "task": task, "frameStart": asset_entity["data"]["frameStart"], "frameEnd": asset_entity["data"]["frameEnd"], "handleStart": asset_entity["data"]["handleStart"], @@ -61,16 +40,37 @@ class CollectWorkfile(pyblish.api.ContextPlugin): project_entity["data"]["resolutionHeight"]), "pixelAspect": 1, "step": 1, - "version": version, - "subset": subset, - "label": scene_file, - "family": family, - "families": [family], - "representations": list() + "version": version } - # Create instance - instance = context.create_instance(subset) - instance.data.update(instance_data) + # workfile instance + family = "workfile" + subset = family + task.capitalize() + if existing_instance is None: # old publish + # Create instance + instance = context.create_instance(subset) - return instance \ No newline at end of file + # creating instance data + instance.data.update({ + "subset": subset, + "label": scene_file, + "family": family, + "families": [family], + "representations": list() + }) + + # adding basic script data + instance.data.update(shared_instance_data) + else: + instance = existing_instance + instance.data["publish"] = True # for DL + + # creating representation + representation = { + 'name': 'aep', + 'ext': 'aep', + 'files': scene_file, + "stagingDir": staging_dir, + } + + instance.data["representations"].append(representation) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index f624f40635..fad4d14ea0 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -392,7 +392,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): list of instances """ - self.log.info("!!!!! _create_instances_for_aov") task = os.environ["AVALON_TASK"] subset = instance_data["subset"] cameras = instance_data.get("cameras", []) @@ -455,7 +454,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): break if instance_data.get("multipartExr"): - self.log.info("!!!!! _create_instances_for_aov add multipartExr") preview = True new_instance = copy(instance_data) @@ -521,10 +519,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): """ representations = [] collections, remainders = clique.assemble(exp_files) - self.log.info("!!!!! _get_representations") + # create representation for every collected sequento ce for collection in collections: - self.log.info("!!!!! collection") ext = collection.tail.lstrip(".") preview = False # if filtered aov name is found in filename, toggle it for @@ -536,7 +533,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): aov, list(collection)[0] ): - self.log.info("!!!!! add preview") preview = True break @@ -586,7 +582,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): # add reminders as representations for remainder in remainders: - self.log.info("!!!!! remainder") ext = remainder.split(".")[-1] staging = os.path.dirname(remainder) @@ -607,10 +602,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "files": os.path.basename(remainder), "stagingDir": os.path.dirname(remainder), } - is_render_type = set(["render"]).\ - intersection(instance.get("families")) - if is_render_type: - self.log.info("!!!!! is_render_type") + if "render" in instance.get("families"): rep.update({ "fps": instance.get("fps"), "tags": ["review"] From 349827b3a20a718130c214057081f0fdcaa9e41f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 12:00:37 +0100 Subject: [PATCH 075/244] OP-2765 - cleaned up workfile collector --- .../plugins/publish/collect_workfile.py | 66 +++++++++---------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index f285ae49e4..93c7a448c6 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -17,16 +17,37 @@ class CollectWorkfile(pyblish.api.ContextPlugin): existing_instance = instance break - task = api.Session["AVALON_TASK"] current_file = context.data["currentFile"] staging_dir = os.path.dirname(current_file) scene_file = os.path.basename(current_file) + if existing_instance is None: # old publish + instance = self._get_new_instance(context, scene_file) + else: + instance = existing_instance + + # creating representation + representation = { + 'name': 'aep', + 'ext': 'aep', + 'files': scene_file, + "stagingDir": staging_dir, + } + + instance.data["representations"].append(representation) + + def _get_new_instance(self, context, scene_file): + task = api.Session["AVALON_TASK"] version = context.data["version"] asset_entity = context.data["assetEntity"] project_entity = context.data["projectEntity"] - shared_instance_data = { + # workfile instance + family = "workfile" + subset = family + task.capitalize() # TOOD use method + + instance_data = { "asset": asset_entity["name"], + "task": task, "frameStart": asset_entity["data"]["frameStart"], "frameEnd": asset_entity["data"]["frameEnd"], "handleStart": asset_entity["data"]["handleStart"], @@ -40,37 +61,16 @@ class CollectWorkfile(pyblish.api.ContextPlugin): project_entity["data"]["resolutionHeight"]), "pixelAspect": 1, "step": 1, - "version": version + "version": version, + "subset": subset, + "label": scene_file, + "family": family, + "families": [family], + "representations": list() } - # workfile instance - family = "workfile" - subset = family + task.capitalize() - if existing_instance is None: # old publish - # Create instance - instance = context.create_instance(subset) + # Create instance + instance = context.create_instance(subset) + instance.data.update(instance_data) - # creating instance data - instance.data.update({ - "subset": subset, - "label": scene_file, - "family": family, - "families": [family], - "representations": list() - }) - - # adding basic script data - instance.data.update(shared_instance_data) - else: - instance = existing_instance - instance.data["publish"] = True # for DL - - # creating representation - representation = { - 'name': 'aep', - 'ext': 'aep', - 'files': scene_file, - "stagingDir": staging_dir, - } - - instance.data["representations"].append(representation) + return instance From 8b424f0b013b07c66a17e33d71aee2737c4effb4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 14:58:44 +0100 Subject: [PATCH 076/244] OP-2764 - fixed missed keys for old publishing in AE --- .../hosts/aftereffects/plugins/publish/collect_render.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index 24d08b343e..d64e7abc5f 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -57,7 +57,7 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): compositions = CollectAERender.get_stub().get_items(True) compositions_by_id = {item.id: item for item in compositions} for inst in context: - if not inst.data["active"]: + if not inst.data.get("active", True): continue family = inst.data["family"] @@ -84,6 +84,9 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): fps = work_area_info.frameRate # TODO add resolution when supported by extension + task_name = (inst.data.get("task") or + list(asset_entity["data"]["tasks"].keys())[0]) # lega + subset_name = inst.data["subset"] instance = AERenderInstance( family=family, @@ -94,7 +97,7 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): label="{} - {}".format(subset_name, family), subset=subset_name, asset=inst.data["asset"], - task=inst.data["task"], + task=task_name, attachTo=False, setMembers='', publish=True, From 4dcf12ee4c7c77af12c1620c756f4453b31c40c6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 25 Mar 2022 15:30:28 +0100 Subject: [PATCH 077/244] OP-2764 - scene should be always saved --- .../aftereffects/plugins/publish/extract_save_scene.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py b/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py index e20598b311..eb2977309f 100644 --- a/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py +++ b/openpype/hosts/aftereffects/plugins/publish/extract_save_scene.py @@ -1,15 +1,16 @@ +import pyblish.api + import openpype.api from openpype.hosts.aftereffects.api import get_stub -class ExtractSaveScene(openpype.api.Extractor): +class ExtractSaveScene(pyblish.api.ContextPlugin): """Save scene before extraction.""" order = openpype.api.Extractor.order - 0.48 label = "Extract Save Scene" hosts = ["aftereffects"] - families = ["workfile"] - def process(self, instance): + def process(self, context): stub = get_stub() stub.save() From ea8b3b79b1c3426194a49db7ac5c6d909a0c1d38 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 29 Mar 2022 13:34:25 +0200 Subject: [PATCH 078/244] OP-2951 - added force_only_broken argument to sync methods Cleaned up representation in sync methods --- .../modules/sync_server/sync_server_module.py | 46 +++++++++++-------- openpype/modules/sync_server/utils.py | 5 ++ 2 files changed, 33 insertions(+), 18 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index caf58503f1..9895a6d430 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -23,7 +23,7 @@ from openpype.settings.lib import ( from .providers.local_drive import LocalDriveHandler from .providers import lib -from .utils import time_function, SyncStatus +from .utils import time_function, SyncStatus, SiteAlreadyPresentError log = PypeLogger().get_logger("SyncServer") @@ -129,7 +129,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ Start of Public API """ def add_site(self, collection, representation_id, site_name=None, - force=False): + force=False, force_only_broken=False): """ Adds new site to representation to be synced. @@ -143,6 +143,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): representation_id (string): MongoDB _id value site_name (string): name of configured and active site force (bool): reset site if exists + force_only_broken (bool): reset only if "error" present Returns: throws ValueError if any issue @@ -155,7 +156,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.reset_site_on_representation(collection, representation_id, - site_name=site_name, force=force) + site_name=site_name, + force=force, + force_only_broken=force_only_broken) # public facing API def remove_site(self, collection, representation_id, site_name, @@ -281,7 +284,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): os.path.getmtime(local_file_path)) elem = {"name": site_name, "created_dt": created_dt} - self._add_site(collection, query, [repre], elem, + self._add_site(collection, query, repre, elem, site_name=site_name, file_id=repre_file["_id"]) sites_added += 1 @@ -819,7 +822,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.log.debug("Adding alternate {} to {}".format( alt_site, representation["_id"])) self._add_site(collection, query, - [representation], elem, + representation, elem, alt_site, file_id=file_id, force=True) """ End of Public API """ @@ -1394,7 +1397,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def reset_site_on_representation(self, collection, representation_id, side=None, file_id=None, site_name=None, - remove=False, pause=None, force=False): + remove=False, pause=None, force=False, + force_only_broken=False): """ Reset information about synchronization for particular 'file_id' and provider. @@ -1417,6 +1421,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): remove (bool): if True remove site altogether pause (bool or None): if True - pause, False - unpause force (bool): hard reset - currently only for add_site + force_only_broken(bool): reset site only if there is "error" field Returns: throws ValueError @@ -1425,7 +1430,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): "_id": ObjectId(representation_id) } - representation = list(self.connection.database[collection].find(query)) + representation = self.connection.database[collection].find_one(query) if not representation: raise ValueError("Representation {} not found in {}". format(representation_id, collection)) @@ -1456,7 +1461,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): representation, site_name, pause) else: # add new site to all files for representation self._add_site(collection, query, representation, elem, site_name, - force) + force=force, force_only_broken=force_only_broken) def _update_site(self, collection, query, update, arr_filter): """ @@ -1511,7 +1516,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Throws ValueError if 'site_name' not found on 'representation' """ found = False - for repre_file in representation.pop().get("files"): + for repre_file in representation.get("files"): for site in repre_file.get("sites"): if site.get("name") == site_name: found = True @@ -1537,7 +1542,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ found = False site = None - for repre_file in representation.pop().get("files"): + for repre_file in representation.get("files"): for site in repre_file.get("sites"): if site["name"] == site_name: found = True @@ -1564,34 +1569,39 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self._update_site(collection, query, update, arr_filter) def _add_site(self, collection, query, representation, elem, site_name, - force=False, file_id=None): + force=False, file_id=None, force_only_broken=False): """ Adds 'site_name' to 'representation' on 'collection' Args: - representation (list of 1 dict) + representation (dict) file_id (ObjectId) Use 'force' to remove existing or raises ValueError """ - reseted_existing = False - for repre_file in representation.pop().get("files"): + reset_existing = False + files = representation.get("files", []) + if not files: + log.debug("No files for {}".format(representation["_id"])) + return + + for repre_file in files: if file_id and file_id != repre_file["_id"]: continue for site in repre_file.get("sites"): if site["name"] == site_name: - if force: + if force or (force_only_broken and site.get("error")): self._reset_site_for_file(collection, query, elem, repre_file["_id"], site_name) - reseted_existing = True + reset_existing = True else: msg = "Site {} already present".format(site_name) log.info(msg) - raise ValueError(msg) + raise SiteAlreadyPresentError(msg) - if reseted_existing: + if reset_existing: return if not file_id: diff --git a/openpype/modules/sync_server/utils.py b/openpype/modules/sync_server/utils.py index 85e4e03f77..03f362202f 100644 --- a/openpype/modules/sync_server/utils.py +++ b/openpype/modules/sync_server/utils.py @@ -8,6 +8,11 @@ class ResumableError(Exception): pass +class SiteAlreadyPresentError(Exception): + """Representation has already site skeleton present.""" + pass + + class SyncStatus: DO_NOTHING = 0 DO_UPLOAD = 1 From d340d05bf01a5f8beda6cdae1736cb59219c4a07 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 29 Mar 2022 13:37:12 +0200 Subject: [PATCH 079/244] OP-2951 - implemented synching referenced files in workfile When workfile is synched, it checks for referenced files (added by Loader) and tries to sync them too. --- openpype/plugins/load/add_site.py | 72 ++++++++++++++++++++++++++----- 1 file changed, 61 insertions(+), 11 deletions(-) diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index 95001691e2..0ddce6e160 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -1,9 +1,19 @@ from openpype.modules import ModulesManager from openpype.pipeline import load +:from openpype.lib.avalon_context import get_linked_ids_for_representations +from openpype.modules.sync_server.utils import SiteAlreadyPresentError class AddSyncSite(load.LoaderPlugin): - """Add sync site to representation""" + """Add sync site to representation + + If family of synced representation is 'workfile', it looks for all + representations which are referenced (loaded) in workfile with content of + 'inputLinks'. + It doesn't do any checks for site, most common use case is when artist is + downloading workfile to his local site, but it might be helpful when + artist is re-uploading broken representation on remote site also. + """ representations = ["*"] families = ["*"] @@ -12,21 +22,61 @@ class AddSyncSite(load.LoaderPlugin): icon = "download" color = "#999999" + _sync_server = None + + @property + def sync_server(self): + if not self._sync_server: + manager = ModulesManager() + self._sync_server = manager.modules_by_name["sync_server"] + + return self._sync_server + def load(self, context, name=None, namespace=None, data=None): self.log.info("Adding {} to representation: {}".format( data["site_name"], data["_id"])) - self.add_site_to_representation(data["project_name"], - data["_id"], - data["site_name"]) + family = context["representation"]["context"]["family"] + project_name = data["project_name"] + repre_id = data["_id"] + + add_ids = [repre_id] + if family == "workfile": + links = get_linked_ids_for_representations(project_name, + add_ids, + link_type="reference") + add_ids.extend(links) + + add_ids = set(add_ids) + self.log.info("Add to repre_ids {}".format(add_ids)) + is_main = True + for add_repre_id in add_ids: + self.add_site_to_representation(project_name, + add_repre_id, + data["site_name"], + is_main) + is_main = False + self.log.debug("Site added.") - @staticmethod - def add_site_to_representation(project_name, representation_id, site_name): - """Adds new site to representation_id, resets if exists""" - manager = ModulesManager() - sync_server = manager.modules_by_name["sync_server"] - sync_server.add_site(project_name, representation_id, site_name, - force=True) + def add_site_to_representation(self, project_name, representation_id, + site_name, is_main): + """Adds new site to representation_id, resets if exists + + Args: + project_name (str) + representation_id (ObjectId): + site_name (str) + is_main (bool): true for really downloaded, false for references, + force redownload main file always, for references only if + broken + """ + try: + self.sync_server.add_site(project_name, representation_id, + site_name, + force=is_main, + force_only_broken=not is_main) + except SiteAlreadyPresentError: + self.log.debug("Site present", exc_info=True) def filepath_from_context(self, context): """No real file loading""" From a197334a251404d06f89ec3de6940db68c4b1dde Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 29 Mar 2022 13:57:21 +0200 Subject: [PATCH 080/244] OP-2951 - added function to collect referenced representation ids --- openpype/lib/avalon_context.py | 120 +++++++++++++++++++++++++++++++++ 1 file changed, 120 insertions(+) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index b4e6abb72d..e8a365ec39 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1971,3 +1971,123 @@ def get_last_workfile( return os.path.normpath(os.path.join(workdir, filename)) return filename + + +@with_avalon +def get_linked_ids_for_representations(project, repre_ids, dbcon=None, + link_type=None, max_depth=0): + """Returns list of linked ids of particular type (if provided). + + Goes from representations to version, back to representations + Args: + project (str) + repre_ids (list) or (ObjectId) + dbcon (avalon.mongodb.AvalonMongoDB, optional): Avalon Mongo connection + with Session. + link_type (str): ['reference', '..] + max_depth (int): limit how many levels of recursion + Returns: + (list) of ObjectId - linked representations + """ + if not dbcon: + log.debug("Using `avalon.io` for query.") + dbcon = avalon.io + # Make sure is installed + dbcon.install() + + if dbcon.Session["AVALON_PROJECT"] != project: + dbcon.Session["AVALON_PROJECT"] = project + + if not isinstance(repre_ids, list): + repre_ids = [repre_ids] + + versions = avalon.io.find( + { + "_id": {"$in": repre_ids}, + "type": "representation" + }, + projection={"parent": True} + ) + version_ids = [version["parent"] for version in versions] + + graph_lookup = { + "from": project, + "startWith": "$data.inputLinks.id", + "connectFromField": "data.inputLinks.id", + "connectToField": "_id", + "as": "outputs_recursive", + "depthField": "depth" + } + if max_depth != 0: + # We offset by -1 since 0 basically means no recursion + # but the recursion only happens after the initial lookup + # for outputs. + graph_lookup["maxDepth"] = max_depth - 1 + + match = { + "_id": {"$in": version_ids}, + "type": "version" + } + + pipeline_ = [ + # Match + {"$match": match}, + # Recursive graph lookup for inputs + {"$graphLookup": graph_lookup} + ] + + result = dbcon.aggregate(pipeline_) + referenced_version_ids = _process_referenced_pipeline_result(result, + link_type) + + representations = avalon.io.find( + { + "parent": {"$in": list(referenced_version_ids)}, + "type": "representation" + }, + projection={"_id": True} + ) + ref_ids = {representation["_id"] for representation in representations} + return list(ref_ids) + + +def _process_referenced_pipeline_result(result, link_type): + """Filters result from pipeline for particular link_type. + + Pipeline cannot use link_type directly in a query. + Returns: + (list) + """ + referenced_version_ids = set() + correctly_linked_ids = set() + for item in result: + correctly_linked_ids = _filter_input_links(item["data"]["inputLinks"], + link_type, + correctly_linked_ids) + + # outputs_recursive in random order, sort by _id + outputs_recursive = sorted(item.get("outputs_recursive", []), + key=lambda d: d["_id"]) + # go from oldest to newest + # only older _id can reference another newer _id + for output in outputs_recursive[::-1]: + if output["_id"] not in correctly_linked_ids: # leaf + continue + + correctly_linked_ids = _filter_input_links( + output["data"].get("inputLinks", []), + link_type, + correctly_linked_ids) + + referenced_version_ids.add(output["_id"]) + + return referenced_version_ids + + +def _filter_input_links(input_links, link_type, correctly_linked_ids): + for input_link in input_links: + if not link_type or input_link["type"] == link_type: + correctly_linked_ids.add(input_link.get("id") or + input_link.get("_id")) # legacy + + return correctly_linked_ids From a0a2e2678e55f449201981b419d9a6a13f8b4a49 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 29 Mar 2022 14:07:57 +0200 Subject: [PATCH 081/244] OP-2951 - fixed typo --- openpype/plugins/load/add_site.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index 0ddce6e160..59720eb5b6 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -1,6 +1,6 @@ from openpype.modules import ModulesManager from openpype.pipeline import load -:from openpype.lib.avalon_context import get_linked_ids_for_representations +from openpype.lib.avalon_context import get_linked_ids_for_representations from openpype.modules.sync_server.utils import SiteAlreadyPresentError From af092348e50e1bda0ac6b3a13a58f1908cf5b939 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 29 Mar 2022 16:32:32 +0200 Subject: [PATCH 082/244] OP-2766 - Fix creation of subset names in PS review and workfile --- .../hosts/photoshop/plugins/publish/collect_review.py | 10 +++++++++- .../photoshop/plugins/publish/collect_workfile.py | 10 +++++++++- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index 4b6f855a6a..dafeb95d0e 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -10,6 +10,8 @@ import os import pyblish.api +from openpype.lib import get_subset_name + class CollectReview(pyblish.api.ContextPlugin): """Gather the active document as review instance. @@ -25,7 +27,13 @@ class CollectReview(pyblish.api.ContextPlugin): def process(self, context): family = "review" task = os.getenv("AVALON_TASK", None) - subset = family + task.capitalize() + subset = get_subset_name( + family, + "", + task, + context.data["assetEntity"]["_id"], + host_name="photoshop" + ) instance = context.create_instance(subset) instance.data.update({ diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index bdbd379a33..1a826c3f2a 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -1,6 +1,8 @@ import os import pyblish.api +from openpype.lib import get_subset_name + class CollectWorkfile(pyblish.api.ContextPlugin): """Collect current script for publish.""" @@ -19,7 +21,13 @@ class CollectWorkfile(pyblish.api.ContextPlugin): family = "workfile" task = os.getenv("AVALON_TASK", None) - subset = family + task.capitalize() + subset = get_subset_name( + family, + "", + task, + context.data["assetEntity"]["_id"], + host_name="photoshop" + ) file_path = context.data["currentFile"] staging_dir = os.path.dirname(file_path) From 0f08f3e31df5a6ec54c025776d490343a587ab5b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 29 Mar 2022 17:25:19 +0200 Subject: [PATCH 083/244] OP-2766 - Fix pulling task and project from context --- openpype/hosts/photoshop/plugins/publish/collect_review.py | 5 +++-- openpype/hosts/photoshop/plugins/publish/collect_workfile.py | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index dafeb95d0e..09fed2df78 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -23,15 +23,16 @@ class CollectReview(pyblish.api.ContextPlugin): label = "Collect Review" order = pyblish.api.CollectorOrder hosts = ["photoshop"] + order = pyblish.api.CollectorOrder + 0.1 def process(self, context): family = "review" - task = os.getenv("AVALON_TASK", None) subset = get_subset_name( family, "", - task, + context.data["anatomyData"]["task"]["name"], context.data["assetEntity"]["_id"], + context.data["anatomyData"]["project"]["name"], host_name="photoshop" ) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index 1a826c3f2a..71022a86fd 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -20,12 +20,12 @@ class CollectWorkfile(pyblish.api.ContextPlugin): break family = "workfile" - task = os.getenv("AVALON_TASK", None) subset = get_subset_name( family, "", - task, + context.data["anatomyData"]["task"]["name"], context.data["assetEntity"]["_id"], + context.data["anatomyData"]["project"]["name"], host_name="photoshop" ) From 9e4e6d4b85a1273d1eeab0f473c88cb8b7f62f30 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 30 Mar 2022 13:30:53 +0200 Subject: [PATCH 084/244] OP-2766 Switched subset function according to review comments --- .../hosts/photoshop/plugins/publish/collect_review.py | 8 ++++---- .../hosts/photoshop/plugins/publish/collect_workfile.py | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index 09fed2df78..d825950b9e 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -10,7 +10,7 @@ import os import pyblish.api -from openpype.lib import get_subset_name +from openpype.lib import get_subset_name_with_asset_doc class CollectReview(pyblish.api.ContextPlugin): @@ -27,13 +27,13 @@ class CollectReview(pyblish.api.ContextPlugin): def process(self, context): family = "review" - subset = get_subset_name( + subset = get_subset_name_with_asset_doc( family, "", context.data["anatomyData"]["task"]["name"], - context.data["assetEntity"]["_id"], + context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], - host_name="photoshop" + host_name=context.data["hostName"] ) instance = context.create_instance(subset) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py index 71022a86fd..e4f0a07b34 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_workfile.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_workfile.py @@ -1,7 +1,7 @@ import os import pyblish.api -from openpype.lib import get_subset_name +from openpype.lib import get_subset_name_with_asset_doc class CollectWorkfile(pyblish.api.ContextPlugin): @@ -20,13 +20,13 @@ class CollectWorkfile(pyblish.api.ContextPlugin): break family = "workfile" - subset = get_subset_name( + subset = get_subset_name_with_asset_doc( family, "", context.data["anatomyData"]["task"]["name"], - context.data["assetEntity"]["_id"], + context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], - host_name="photoshop" + host_name=context.data["hostName"] ) file_path = context.data["currentFile"] From f6fb60bb49bed7a0c26825ef85b5d0f65c4aa6bb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 31 Mar 2022 11:53:58 +0200 Subject: [PATCH 085/244] Update openpype/plugins/load/add_site.py Co-authored-by: Roy Nieterau --- openpype/plugins/load/add_site.py | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index 59720eb5b6..e26ef586e0 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -38,23 +38,20 @@ class AddSyncSite(load.LoaderPlugin): family = context["representation"]["context"]["family"] project_name = data["project_name"] repre_id = data["_id"] + self.add_site_to_representation(project_name, + repre_id, + data["site_name"], + is_main=True) - add_ids = [repre_id] if family == "workfile": links = get_linked_ids_for_representations(project_name, add_ids, link_type="reference") - add_ids.extend(links) - - add_ids = set(add_ids) - self.log.info("Add to repre_ids {}".format(add_ids)) - is_main = True - for add_repre_id in add_ids: - self.add_site_to_representation(project_name, - add_repre_id, - data["site_name"], - is_main) - is_main = False + for link_repre_id in links: + self.add_site_to_representation(project_name, + link_repre_id, + data["site_name"], + is_main=False) self.log.debug("Site added.") From 6b6c466d8b6ca5b587c8ccf1a8e1dac5e9326bfe Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 31 Mar 2022 12:00:48 +0200 Subject: [PATCH 086/244] OP-2951 - fix wrong variable --- openpype/plugins/load/add_site.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index e26ef586e0..22d3ebf24b 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -45,7 +45,7 @@ class AddSyncSite(load.LoaderPlugin): if family == "workfile": links = get_linked_ids_for_representations(project_name, - add_ids, + [repre_id], link_type="reference") for link_repre_id in links: self.add_site_to_representation(project_name, From af079897a884538a5af90bbf2301a004fef7233c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 31 Mar 2022 12:07:43 +0200 Subject: [PATCH 087/244] OP-2951 - refactor use better function --- openpype/lib/avalon_context.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index e8a365ec39..496b55a6f2 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -2040,14 +2040,14 @@ def get_linked_ids_for_representations(project, repre_ids, dbcon=None, referenced_version_ids = _process_referenced_pipeline_result(result, link_type) - representations = avalon.io.find( - { + ref_ids = avalon.io.distinct( + "_id", + filter={ "parent": {"$in": list(referenced_version_ids)}, "type": "representation" - }, - projection={"_id": True} + } ) - ref_ids = {representation["_id"] for representation in representations} + return list(ref_ids) From b826cfac4115f51d8387daab30e3475445256e0f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 31 Mar 2022 12:14:17 +0200 Subject: [PATCH 088/244] OP-2951 - change sort by depth Previous sorting by _id might not be deterministic, not reliable. The main logic is to have outputs sorted by how they were traversed, which should be denoted by 'depth' field. --- openpype/lib/avalon_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 496b55a6f2..9a5d382c98 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -2067,7 +2067,7 @@ def _process_referenced_pipeline_result(result, link_type): # outputs_recursive in random order, sort by _id outputs_recursive = sorted(item.get("outputs_recursive", []), - key=lambda d: d["_id"]) + key=lambda d: d["depth"]) # go from oldest to newest # only older _id can reference another newer _id for output in outputs_recursive[::-1]: From d8c56f0a67cacfc2e05b726efc0e3d8e392c0f78 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Apr 2022 10:39:52 +0200 Subject: [PATCH 089/244] Update openpype/lib/avalon_context.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/lib/avalon_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 9a5d382c98..5ea472f11e 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -2001,7 +2001,7 @@ def get_linked_ids_for_representations(project, repre_ids, dbcon=None, if not isinstance(repre_ids, list): repre_ids = [repre_ids] - versions = avalon.io.find( + versions = dbcon.find( { "_id": {"$in": repre_ids}, "type": "representation" From 6f86f78860c795f027ac481b1f6494ddd5b6979c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Apr 2022 10:40:00 +0200 Subject: [PATCH 090/244] Update openpype/lib/avalon_context.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/lib/avalon_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 5ea472f11e..68d38acf35 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -2040,7 +2040,7 @@ def get_linked_ids_for_representations(project, repre_ids, dbcon=None, referenced_version_ids = _process_referenced_pipeline_result(result, link_type) - ref_ids = avalon.io.distinct( + ref_ids = dbcon.distinct( "_id", filter={ "parent": {"$in": list(referenced_version_ids)}, From d14d739e1cfd312390d9ab880da0a589b3c6d567 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Apr 2022 10:40:08 +0200 Subject: [PATCH 091/244] Update openpype/lib/avalon_context.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/lib/avalon_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 68d38acf35..7d562733fc 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1974,7 +1974,7 @@ def get_last_workfile( @with_avalon -def get_linked_ids_for_representations(project, repre_ids, dbcon=None, +def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, link_type=None, max_depth=0): """Returns list of linked ids of particular type (if provided). From 44afe82d5a21f8ac4bf393fa35b2357df0c583a5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Apr 2022 11:07:54 +0200 Subject: [PATCH 092/244] OP-2951 - refactored distinct version ids Fixed ordering of referenced versions --- openpype/lib/avalon_context.py | 37 +++++++++++++++------------------- 1 file changed, 16 insertions(+), 21 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 7d562733fc..65575493e0 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1980,7 +1980,7 @@ def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, Goes from representations to version, back to representations Args: - project (str) + project_name (str) repre_ids (list) or (ObjectId) dbcon (avalon.mongodb.AvalonMongoDB, optional): Avalon Mongo connection with Session. @@ -1995,23 +1995,24 @@ def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, # Make sure is installed dbcon.install() - if dbcon.Session["AVALON_PROJECT"] != project: - dbcon.Session["AVALON_PROJECT"] = project + if dbcon.Session["AVALON_PROJECT"] != project_name: + dbcon.Session["AVALON_PROJECT"] = project_name if not isinstance(repre_ids, list): repre_ids = [repre_ids] - versions = dbcon.find( - { - "_id": {"$in": repre_ids}, - "type": "representation" - }, - projection={"parent": True} - ) - version_ids = [version["parent"] for version in versions] + version_ids = dbcon.distinct("parent", { + "_id": {"$in": repre_ids}, + "type": "representation" + }) + + match = { + "_id": {"$in": version_ids}, + "type": "version" + } graph_lookup = { - "from": project, + "from": project_name, "startWith": "$data.inputLinks.id", "connectFromField": "data.inputLinks.id", "connectToField": "_id", @@ -2024,11 +2025,6 @@ def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, # for outputs. graph_lookup["maxDepth"] = max_depth - 1 - match = { - "_id": {"$in": version_ids}, - "type": "version" - } - pipeline_ = [ # Match {"$match": match}, @@ -2065,12 +2061,11 @@ def _process_referenced_pipeline_result(result, link_type): link_type, correctly_linked_ids) - # outputs_recursive in random order, sort by _id + # outputs_recursive in random order, sort by depth outputs_recursive = sorted(item.get("outputs_recursive", []), key=lambda d: d["depth"]) - # go from oldest to newest - # only older _id can reference another newer _id - for output in outputs_recursive[::-1]: + + for output in outputs_recursive: if output["_id"] not in correctly_linked_ids: # leaf continue From 2694d9d557633e06ed51f684e30056c443a4a401 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Mar 2022 10:20:58 +0100 Subject: [PATCH 093/244] OP-2868 - added configuration for default variant value to Settings --- .../plugins/create/create_render.py | 12 +++++++++- .../project_settings/aftereffects.json | 7 ++++++ .../schema_project_aftereffects.json | 23 +++++++++++++++++++ 3 files changed, 41 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index c43ada84b5..aee660673b 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -18,6 +18,16 @@ class RenderCreator(Creator): create_allow_context_change = False + def __init__( + self, create_context, system_settings, project_settings, headless=False + ): + super(RenderCreator, self).__init__(create_context, system_settings, + project_settings, headless) + self._default_variants = (project_settings["aftereffects"] + ["create"] + ["RenderCreator"] + ["defaults"]) + def get_icon(self): return resources.get_openpype_splash_filepath() @@ -79,7 +89,7 @@ class RenderCreator(Creator): self._add_instance_to_context(new_instance) def get_default_variants(self): - return ["Main"] + return self._default_variants def get_instance_attr_defs(self): return [BoolDef("farm", label="Render on farm")] diff --git a/openpype/settings/defaults/project_settings/aftereffects.json b/openpype/settings/defaults/project_settings/aftereffects.json index 6a9a399069..8083aa0972 100644 --- a/openpype/settings/defaults/project_settings/aftereffects.json +++ b/openpype/settings/defaults/project_settings/aftereffects.json @@ -1,4 +1,11 @@ { + "create": { + "RenderCreator": { + "defaults": [ + "Main" + ] + } + }, "publish": { "ValidateSceneSettings": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json b/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json index 4c4cd225ab..1a3eaef540 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_aftereffects.json @@ -5,6 +5,29 @@ "label": "AfterEffects", "is_file": true, "children": [ + { + "type": "dict", + "collapsible": true, + "key": "create", + "label": "Creator plugins", + "children": [ + { + "type": "dict", + "collapsible": true, + "key": "RenderCreator", + "label": "Create render", + "children": [ + { + "type": "list", + "key": "defaults", + "label": "Default Variants", + "object_type": "text", + "docstring": "Fill default variant(s) (like 'Main' or 'Default') used in subset name creation." + } + ] + } + ] + }, { "type": "dict", "collapsible": true, From 55246ce4a77e25b6d8f7479f741b64839213f5a2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Apr 2022 17:30:28 +0200 Subject: [PATCH 094/244] Update openpype/lib/avalon_context.py Co-authored-by: Roy Nieterau --- openpype/lib/avalon_context.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 65575493e0..224d8129a7 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1995,8 +1995,7 @@ def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, # Make sure is installed dbcon.install() - if dbcon.Session["AVALON_PROJECT"] != project_name: - dbcon.Session["AVALON_PROJECT"] = project_name + dbcon.Session["AVALON_PROJECT"] = project_name if not isinstance(repre_ids, list): repre_ids = [repre_ids] From 80ee8c523ad20df67ddfd763933b47fc4e6a3b0d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Apr 2022 18:07:47 +0200 Subject: [PATCH 095/244] OP-2766 - clean up logging --- openpype/hosts/photoshop/plugins/create/workfile_creator.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/workfile_creator.py b/openpype/hosts/photoshop/plugins/create/workfile_creator.py index 2a2fda3cc4..d66a05cad7 100644 --- a/openpype/hosts/photoshop/plugins/create/workfile_creator.py +++ b/openpype/hosts/photoshop/plugins/create/workfile_creator.py @@ -15,7 +15,6 @@ class PSWorkfileCreator(AutoCreator): return [] def collect_instances(self): - print("coll::{}".format(api.list_instances())) for instance_data in api.list_instances(): creator_id = instance_data.get("creator_identifier") if creator_id == self.identifier: @@ -30,7 +29,6 @@ class PSWorkfileCreator(AutoCreator): pass def create(self, options=None): - print("create") existing_instance = None for instance in self.create_context.instances: if instance.family == self.family: From 9efa30d7569f0025cdd8a2d1f9a970edfdbb1aad Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Apr 2022 18:09:38 +0200 Subject: [PATCH 096/244] OP-2766 - revert unwanted commit --- .../aftereffects/plugins/publish/collect_workfile.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 1983851028..c1c2be4855 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -38,13 +38,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # workfile instance family = "workfile" - subset = get_subset_name( - family, - "", - task, - context.data["assetEntity"]["_id"], - host_name="photoshop" - ) + subset = family + task.capitalize() # Create instance instance = context.create_instance(subset) From d92ccf8c2ee97e38d236cd20764f9a3432a3e1a3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Apr 2022 18:11:26 +0200 Subject: [PATCH 097/244] OP-2766 - cleanup logging --- openpype/hosts/photoshop/plugins/publish/extract_image.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/extract_image.py b/openpype/hosts/photoshop/plugins/publish/extract_image.py index 75e6323da7..a133e33409 100644 --- a/openpype/hosts/photoshop/plugins/publish/extract_image.py +++ b/openpype/hosts/photoshop/plugins/publish/extract_image.py @@ -16,8 +16,6 @@ class ExtractImage(openpype.api.Extractor): formats = ["png", "jpg"] def process(self, instance): - print("PPPPPP") - self.log.info("fdfdsfdfs") staging_dir = self.staging_dir(instance) self.log.info("Outputting image to {}".format(staging_dir)) From b16b1ee5c48df8438cbe716561df437f941e24c1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 5 Apr 2022 16:39:51 +0200 Subject: [PATCH 098/244] OP-2766 - fix broken merge --- openpype/hosts/photoshop/api/pipeline.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 54db09be2d..2e2717d420 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -78,8 +78,7 @@ def install(): pyblish.api.register_plugin_path(PUBLISH_PATH) register_loader_plugin_path(LOAD_PATH) - avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH) - avalon.api.register_plugin_path(BaseCreator, CREATE_PATH) + register_creator_plugin_path(CREATE_PATH) log.info(PUBLISH_PATH) pyblish.api.register_callback( From 43a6863dc534ab514a91a9ade561c9c82e87f277 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 5 Apr 2022 16:40:16 +0200 Subject: [PATCH 099/244] OP-2766 - added documentation and resources for New Publisher --- website/docs/artist_hosts_photoshop.md | 64 ++++++++++++++++++ ...rtist_photoshop_new_publisher_instance.png | Bin 0 -> 21366 bytes ...otoshop_new_publisher_instance_created.png | Bin 0 -> 27811 bytes ...photoshop_new_publisher_publish_failed.png | Bin 0 -> 27081 bytes ...rtist_photoshop_new_publisher_workfile.png | Bin 0 -> 22231 bytes .../docs/assets/experimental_tools_menu.png | Bin 0 -> 9307 bytes .../assets/experimental_tools_settings.png | Bin 0 -> 8543 bytes 7 files changed, 64 insertions(+) create mode 100644 website/docs/assets/artist_photoshop_new_publisher_instance.png create mode 100644 website/docs/assets/artist_photoshop_new_publisher_instance_created.png create mode 100644 website/docs/assets/artist_photoshop_new_publisher_publish_failed.png create mode 100644 website/docs/assets/artist_photoshop_new_publisher_workfile.png create mode 100644 website/docs/assets/experimental_tools_menu.png create mode 100644 website/docs/assets/experimental_tools_settings.png diff --git a/website/docs/artist_hosts_photoshop.md b/website/docs/artist_hosts_photoshop.md index a140170c49..36670054ee 100644 --- a/website/docs/artist_hosts_photoshop.md +++ b/website/docs/artist_hosts_photoshop.md @@ -111,3 +111,67 @@ You can switch to a previous version of the image or update to the latest. ![Loader](assets/photoshop_manage_switch.gif) ![Loader](assets/photoshop_manage_update.gif) + + +### New Publisher + +All previous screenshot came from regular [pyblish](https://pyblish.com/) process, there is also a different UI available. This process extends existing implementation and adds new functionalities. + +To test this in Photoshop, the artist needs first to enable experimental `New publisher` in Settings. (Tray > Settings > Experimental tools) +![Settings](assets/experimental_tools_settings.png) + +New dialog opens after clicking on `Experimental tools` button in Openpype extension menu. +![Menu](assets/experimental_tools_menu.png) + +After you click on this button, this dialog will show up. + +![Menu](assets/artist_photoshop_new_publisher_workfile.png) + +You can see the first instance, called `workfileYourTaskName`. (Name depends on studio naming convention for Photoshop's workfiles.). This instance is so called "automatic", +it was created without instigation by the artist. You shouldn't delete this instance as it might hold necessary values for future publishing, but you can choose to skip it +from publishing (by toggling the pill button inside of the rectangular object denoting instance). + +New publisher allows publishing into different context, just click on a workfile instance, update `Variant`, `Asset` or `Task` in the form in the middle and don't forget to click on the 'Confirm' button. + +Similarly to the old publishing approach, you need to create instances for everything you want to publish. You will initiate by clicking on the '+' sign in the bottom left corner. + +![Instance creator](assets/artist_photoshop_new_publisher_instance.png) + +In this dialog you can select the family for the published layer or group. Currently only 'image' is implemented. + +On right hand side you can see creator attributes: +- `Create only for selected` - mimics `Use selected` option of regular publish +- `Create separate instance for each selected` - if separate instance should be created for each layer if multiple selected + +![Instance created](assets/artist_photoshop_new_publisher_instance_created.png) + +Here you can see a newly created instance of image family. (Name depends on studio naming convention for image family.) You can disable instance from publishing in the same fashion as a workfile instance. +You could also decide delete instance by selecting it and clicking on a trashcan icon (next to plus button on left button) + +Buttons on the bottom right are for: +- `Refresh publishing` - set publishing process to starting position - useful if previous publish failed, or you changed configuration of a publish +- `Stop/pause publishing` - if you would like to pause publishing process at any time +- `Validate` - if you would like to run only collecting and validating phases (nothing will be published yet) +- `Publish` - standard way how to kick off full publishing process + +In the unfortunate case of some error during publishing, you would receive this kind of error dialog. + +![Publish failed](assets/artist_photoshop_new_publisher_publish_failed.png) + +In this case there is an issue that you are publishing two or more instances with the same subset name ('imageMaing'). If the error is recoverable by the artist, you should +see helpful information in a `How to repair?` section or fix it automatically by clicking on a 'Wrench' button on the right if present. + +If you would like to ask for help admin or support, you could use any of the three buttons on bottom left: +- `Copy report` - stash full publishing log to a clipboard +- `Export and save report` - save log into a file for sending it via mail or any communication tool +- `Show details` - switches into a more detailed list of published instances and plugins. Similar to the old pyblish list. + +If you are able to fix the workfile yourself, use the first button on the right to set the UI to initial state before publish. (Click the `Publish` button to start again.) + +New publishing process should be backward compatible, eg. if you have a workfile with instances created in the previous publishing approach, they will be translated automatically and +could be used right away. + +If you would create instances in a new publisher, you cannot use them in the old approach though! + +If you would hit on unexpected behaviour with old instances, contact support first, then you could try some steps to recover your publish. Delete instances in New publisher UI, or try `Subset manager` in the extension menu. +Nuclear option is to purge workfile metadata in `File > File Info > Origin > Headline`. This is only for most determined daredevils though! diff --git a/website/docs/assets/artist_photoshop_new_publisher_instance.png b/website/docs/assets/artist_photoshop_new_publisher_instance.png new file mode 100644 index 0000000000000000000000000000000000000000..723a032c94488abf6ee60e11ab86b178e1cd5ade GIT binary patch literal 21366 zcmeFZXIN8Rw>BCSks_i}6c9v_E`szTMGz3_y@NT8qG(YtFgm8uu9Y7o24Z+5b48QC#>65LQS+8hg^ z;U&vuXD257*gELF)8w#|IvbpJdWPUvlURFtdS$N(pT3{Yw$^#uc$Nbg7bt*j#ZAnS z_TuvA{d-Njc|Yo{{(OG*CMf}M@wt;2m?`i_^I(~a^DeK<{fo1gzGiI!UQt zdA%;fq^CZ*-B*8mTh(Gm+Ae^S^y-U@p#13j89!)|IX_(YF%9=52MrRIX8w#YHtwSz zOV`z&Y4=%!L$se>&%AhfG{yC@{oVWSes5lg?FH7}O^jyLIE~ALyk0Xq*d4&vLNEQf zuYqPxRbHN~!u9%+ytdJX9_cmzS*rkn&j#uE)#N>49pQjSmr z?#NV|tnSrGF1$YHxiGG!CQ-0(e%N4$@M$thGoYiR6F8~i&nMlri%2t{Ly$^s)g0gb4+lVFKEd;qJul zM#FZaxl?iJ&=yf~)j7E=B6EAJQwtow7+h#1i@JCqvGPiZkA~sknb|kVB@cqkoryZ0 z1%*ANB^}=g?NDW-Y7QJSIB!Ku%wfmyMQM6?Q#2#0D(6isnpU=FlAv=IFOR|U8#XPX zQW^O{SV)%>;)IXQcz$c_uG3(ye{l)ewY*K#@_3`aaC{3|z&K3QFEO#O`o7vfb~_xp zyZZU}W3uhUh?;iM2K;Uy4%d>r9mEA20{_b3@zLMzcgS1Nnt*y5EiEV{S)8U(`bq*r zmd0C$M(XP6p`^~2q-N(klxgU$LAh>VS#FcMij6(V)}s4G*m0lr&gyzl^wRXnz%$=h zrfN?gMRQpJJm8HeUqiVDs%_D zxR)mjC8h?i0yA50S7s`Q!N$uCY8}a<>lsw`Qe@nV=SV^rRGGHi-kB?8){4+pwRI{6 zJ2BmTxI##Q=^q^!E++7X({zR=VN$u}^(tufrozys{yH>gZ_cQo%Plo%%y78!b-Wt;pvhHBNL z*g`!_Y~N+^*2Df-wf!aYMj46 z%W(XD(wte}X0-FVxv?*{hs$clZ1kf`i=DQ{p_<|+T)(evL8iAaOxHOpr>rownbJe$ z*ySY&zxkt`V1W#XvT);lSTDOaa{*LMUQOyAFgT`K+^TT9hN5CP%&Ov>4KZ`p6Xk~d zuoyVSrgo1aD;4(L;_<#bD!lzP_}l>-Qrfb23HNLC_QrWkMp?~G<}K6T>3K$IY%^mM z--BI|XKP<<3D(p1-QpNG#PWaFC)IzEx;DOj#nq_gL%n~^>mC2-3cidW;n}in{t6fS zLWD`#ZG>YpNrvHIx+_Wv(ys`enR7xAf1ii4(|5PbD*QFb?0dYGxxAG{-z=+H6E^x^ zCC}~-8?#Ye-`Ne3@%*miWEvex&X^xhL_RbW*ff2(dVlWsh#6)#n)Qg9hD60zKckX+ z*n{m3HdI=ymwZG=L%g_f)7-|WUqZMMrWmiaW*5G?Y3#X>vk%-P^=}zMoc=$VD5!flG@1y5`Anv6YzkeV038SSPfSg+=tX zy&3H9vDQ;;tM|lukoWGlK{|LPl!CVU^7!IEg({0(zG(5ySP?_DBkuRS_oyw`lSu4K zCBH(apn9!*ChRQm3$47F|4|k448uU3CHi6_QnVNT7Xl@0a42(9{kg7D8|hT|gNIY$ z>ckg)GRm_PUYseI|NKWl*uOyEs`QA*9BUGPLGrVn7=)VI{fl;9v^h5}&QBx%d;R?b z&F}o(oKsj>PxQ8l=DZzB5w&hR@$-wUz&8UI*Q=AdAe;vD^sUW?`sr_>wFQqbFyPln z`wP7K49alXdqd91_xy7D=cE^-I}9aZ9jf)H{CN5Gw#(@C#*^%x>@x^m_NnbkPS7=%T zo`Lk9vMDP1lgwgvV9@OSJW$Q(Z@3V-+j)ipo#g9$7tK4NLt3lGtLYm$<)v43iwiq+ z`8B>{42>IP+y`nJODBa2JE_LUbb38eg{|s3EKa~GS;kpr-j8H0XzRNzn0cSDCW*F& z7JRLlmT!6@7o*S0@=*UI>42zwUhD0gz>4r=EuC@YIo*ao`{tkmaXKy);D>tN$2SXG zaMRH)bNV7s|0rXFhzmcGbxGG@pC0~6mB(H~2Zo%X1BPGxi8Ym0=!|N6Zj|0kA7uHV zRz7ApCkQ>2I#_)6!06?GqHNgaITSr~<({M(5B@~Q_o(9DdQfMHRK%IJLp(Q2=QWBp zHA=dj$eWk?vCfKR^aADs0xK9@pFR2{TEvJgu~ASEZ!{+-x%k#P+W>IG#IfwpA=^s<7LRBcpypuI%fG znqcF$pQW4e=*+vc`NT}5WD6Qy!FUou?yL!aviovbv7)jVneM~ZH|1_kLevhv>(`>~d8*6}cJnh$+jbn9H4x@`Q_g8)WjE=UbL3LAY4lRCha7r;M(xN9Dlj0b zC(PNtH4+Q{AlUTCEUK*k#M90~wv0}gl9&Lro+Vk)ZqoXqVZukz3|mt>Hd}ZVA+B@2 z@GR`568l|)y(hj07B`g?-zh#&%u+E?>gqGvT!|Kznt&;N;E#9Z}T)p(t^ zhT2#|soMxu!)n^PHP;~HB~UC2U6#qtbz1P-L}9RzfG@949V%91v{nT9T~}5<6jxsw@7$aDfcUG12!Xe0y8{V#LZokRbW(_^EcTlzLwPd-i(eWX9B2%i97s$V|s9;89-JTMo5~iJsJLXNJl^I5UD` zcF|D#@b2s0T9w!d(cTKrrMjM`=;8^6eUSi9V+`KsrA@kDYRehw_Wh@*%%)J3Wq9PUJJn%I~bjm0sGr3$~y zdmzsGx}NPVgZXQQtGlr_=WE_=ZD@vuEuv0CGfr;xRBhW5aT8Q#uGNYv9z zUX0I+Zq3y@d&c?sTIbz@yh=o_M3`)^#-V8a^Lv68{LIOwO?rpRA=~s(w33kd;vpu8 zbj#&I{#4i7y?5t4pW@x+?P8z|3a@7!%9k$V)I7};%|nB+(90H20(a9~(L)H&k0@2O z?F6cIp?$v|(GksgD`Ky7w7W3nSoXb;R;R1d_?y6A!wNvnNIosw^R(lVxW@+FJ->#Ee zB5m3*Oc`--z*U=s`|k?1&?lPz~$ggJ$j}ns7JO})kiEfFD@j6^R9$pUMS;u+9}26 zPedM_q*FTOw{#&-+LH96=AD@kN@CXmcI4!P1;0)TK~zSdw-~~?Jv!YupKO2!-b23~ z+$D@jGC6gBs@t;fUtP)k!@VKv=8_1rVCnI-HCNG}K94(Pe+)Z;zTNE%jTx$j$1XUD zgl74C;AZhS-o1%i4I#%audIk|e50{t33w@iR#w z9V>;YIgL2^w`D6+9+c9Vfpt`$1@2{Fi==!JE~a2NL$Y+t6*2s~XZAijYC9Im8H^>p~>PRfEzxc}jxh^#NNgOI@&Cys)E8@=D^{?ajEDpk0 z9pxK7>v27I_((xG(}>gujC_vZ=y65$yFz1i1LVQaSmY34sqaq1T07XZH?}@MnY}@p ztq1tQ566awos@iw+VVORfzVIBOE6Vy6Q!7LCa9&O(>rFXM=PG@qxEa;cJy1kgN&s{ zU7rnvbPg9p))p-NdJcE~Nt4~t*?A)SSrC|98cr+nrfMJEigK+4gTe4I&j(o!grEcF z6H0-m*B?slE3TtB#;`C%tiG60ghQ#0);_mR=)im1`C)sPyq0r_PO?QK<;3pJb0%7_ zLdy}3qf31PKDaf!rgX1`?(*Q4`zraiL!yRv!3KsRO@iE>#U?trDhjp zG@GnJG$Im(enj5s=i$}~r1QDY`?-~7N%iQ?Nx`TZ5t7;%Fxy z8zsBkl-XJ16>^3`&joL-o*0H&7^tQz#fExgH!et(9fh?pm)-%9-+Q7NL`3oSM;rg% z>@{y*iD1?n@T`FQWuO4WYL+c)_j5szrqDlK_K&lkLj1$ZFLQ?Oh}gyg?s=|k*!A^E zz&HLg z(|-zblr~a2WUo?8YHrG_okemJNV{lyo3b6Gbd-$OH_zbV%ZX-oNYBjiGB_z{qk|vl zi~!i)-(EU6lnIudtCakHn{eTxfqvx{-H`6ybW+QCJ0FR*_g-}6ti)w#aSaKdpOlSk zO0DmSW)16mMo4XDrpz3KQ=4go7$qTjV8{jM-Gn(Dm%<5W^pN{V52o|8pY!JLf`PY1 zO86VZ**PfYqbsr8HL#+pKc2F@f!A4EAoaMRaox*?@$DLN=|kYo0b!9NddhFo3}AaH zWEijBu4ZVLP5u6LEziUPf9i0%u^ccOx-nI0qVCxM|YuGmRLSV>LN9#1?Z8Jax0l??|ZaC*hFVr_)l< zkbm;+ejaU|uD`;b9ZeXiiiS!yLPqf1{ATpW(vZNzZ|f!sW&FN^jVUKgc^c_A+i)=_HYSg0EL3ZG;@4#nsF>-XILAMe&zG zI8_8^k&;wtYDsCSxXyxRsi?bNnKzgh7%JJ=uNdG?l344fgMyVn0lp8W^w+)#; zA-6KTL393KkmBGT8#`6>Xc4Podx`IJPePhCvxR)ks0Jg@XA&*HyhOd7af$QAJpOwW zhg9+?4Y#0dD`Xn2+T~MhA`cOSh9KMD&Ul+bq@}L<;-MJ2pMTw#k1!-773kLOEMMSU zo5@i7UXZFjR9tk4z$s}{_bSrEq}??hE=J&{Y00_elK$q4gZo3MGV_69UbxGAhPzr> zb+&T6Y%^i!0L1|_t&q=wV?QxUVoRPUci(EqhJR$2{=|)e&4h0o65x1ygx+gDa*)X1 z2pkqPe`62*b~8T${M{Uh;>SwjO$ofzk7N6$NS~$%7fPRSb3;0HO<2{Zb6e&Q>aHBN zsU-}=48ME8FCg%|Zi%q2RMbuGoke#m()L&81kbNCX{nFTa^8gL24n7qKK_36Ak#HLO1D?89e8EuRmxHLuD#tZO}7aP zLvB*`x07DK7ggie`2oF2|0Ts1#3Zhx?eNShB5`^#0f2*dZO+3Gs<|QS6pVHE=w}3? zpm!_lH-weBOWeBEA&NC&{}N7JS4CjS-yEZlPpaOGnGv*xc;S<34RO4d9an4Bb|cc9 zO}3WYsLy9Lep&Q5#JFda7*l{2prn^&h36)oCuh@~8xnhZXdJmyBX?{tT`5w-h7G@F zBt;JPJyqx02aYq&=ly+bPEHGB7xOB1F)RFN3RT<7ps7ds!m#|!%~$9&hUOD*Pt7}7 zRkV`*5S7seS)>B>ipUcdG3#=N`aLq)2e0Z-l_{{1L?WX;|A4!gLyNnFwA4iW5!E5XLi=20og^b8AAx1tmcQB}tBT)GN$QHs61l@K z_Wa8(&lX2L_NYhSw9)Xe4{7Pt8=_Uaf6?dK)ggICku@at&2#1aeb+L+n5RZz9JJM0 zaqPStO2-iXyS0Ajtv~l82o`5jNX(te=$GnJNZveXh;i>DoYnWPL|6E)u_M{I=S}R2*AIycn-S}~Qwc*ry&Zds*M?^u(?G9zN1uq;jy;s8dlSL0W z=55A+N3e{SMq#MTK6?0I{%Uf#RqmM!iL^&ATzztlvs6d8t!swo`&GUGVN=t4&JZup z#>u6Ty9>=b%FDN0r?L7y(c=RGhtlmgg`SQ-B9sBW(5NmgOnM3HCzN5jZ0XOgEF#g0 zpUMh>fn zJ_)GVVsiH;oDhVYAn_?w|G|TgM2KuSX4s*AZ%_25`NOE`=c_RyTl~-+<$$R38&;VZ zGTP}9VazeZ5h~{a{}bhJWsEXeWFH{uFFf9n%n&rToWE^T7cv#^e-yHl&#w{9P&Nnq z+NjF!V6HiF+48o`{QzZvb&MrfwFPkYIRLSI0Es9q#6-o)U2}EN#`lwf5cmX(fJ-mn?>CQr_aUM%uB7xPS!XB=t6~qAwgqxJO|{R-fnk6oc|Ue{Uz!`6U(0Rs7cAt84sD=e|vP!>KrPMRlvMjihW zT!Oa+`*kK_i_VWz@|=@xh~v!xo^xFG&UZ)8@wLKf!@=Lm7fT8_Tzm%hgH$wSn}MeS zIg0~#?hjovOV94#H#+cU6IpF2A3mG$ugGvZbv#*;&!c3&4LeFuDy6Z1W`(HywOd(M zY>*?GbMp1zGl0AtUnO-Lcekk;X*y?n8uG`?9l(t^Y-1prMO#{PDy$S*1 zJ)7r~aVg0a*mUy~)5=#RtFym?FO3GR6J4fEwl4cj!r)Z->r9X{Sqnzmf*EM`{=(Q( z5XD)KLh__?6m5o5duGt9{<_D-{#x87WkJ30iuqvkO4&&8Z9EoIvX}VHX3?X>$vtnc z_c6I04|2K2N3h~Lxx|1+LzuC1-Dxz0JT6P;{RjL0@1E~+=O!K@n2`O0OpYxT6V@`$ zz?^D#674j9im1NN+@W~?P3SwJOlKco>W6ru0=Vzfi>!WtroS<5I_yy*|r|9^dk;_HAXP~)gTwlHL`~4E012Io0M|Fp1f!&jkfTqHC zcFhC+*P&befs74zn%|O}&)dGOU|VkWo5Nj`X^3zEKPhm#YxZ8=W78H@Rd6gR;y0Q2 z(ZW$!g7R{iY)|HYaiTp@?q4 zR7HW@hIcbsPC{d$bVN~WHr1Ig1@`h2NqkG_`$5jJfh-if)Cw^6ZA7g8iYxr@>rFVb z)Jl7vm?LT8Orl%kjlE_trnTHGHa+tX{Y2v)Uf2m~XIE(Wo}jtQb5M9sFyVe{Iz!>Q z*{sb0XK8(Qjhxcu?cQHURNdmb>+3h`_(oeX`$zx zW&a?ItNZo4{KAcyYE#!6Bcs}y?(RvM9cLY}-{RKw{XE_;t;xd6ivRKqlz9&XOEaV< zw4zk+AiA0mmNCPl4(uG)y$|0crJl_>m-rA>!-190`FP%!H^*F?WDUFQ4>1Z$oo}e0 zJiI$yhEXwczv0HcI1>o?fB=#Af4o|WLy1Ztc6r6+H#S%C({^ZD2er+{=;fUrucpW z46VflG}SnT$ndzxrq+#-M+JCZ6mam=2&1|C#KE?+28#U=TfO^I6~AuE?Ud(RYtrst z%c147hY*X4$49>*xo9u&7ceS^R`&25p3)Me+qM>6EvPSV;x*xmvzcA}F*Zu=$cK4zPEFYPgQZU8;-h+TjzJ32b{GM!Z=o{If_Pm{Jg1CqW zesrnK z(^nttH%oQQ9Sri8A>0*4Ibt>j#yXDl{BqT$jQTmLrl&*wSsNGNv>EQc{>_%k&_(KlRqEaWkE%*zNR`sYnGdxP*nawiui7VyvU z?4B10BfjY=bJiDZjpCqCjfhhF`gvqC7p2q;a#h%oq+~NfN^Sp|IbR=ta5aROIc2*7 zJF5-!mHEfFCU9k728IBd18UipRBg~XEeU^B+)0_4g&bYEQg{QNu%>h zSd?nEv89%Z;^7-E((pK`sHd18BvDoZ&&P)_dP_^|;fk4H5P2O$~pg&Z1qn zccyj62eXQrGn`mf>${tsF+bW;z!kc+P?PXV(do4+Z_R`K9z^@sweZ$MBm}<@_xx!2 zR9wg4RoRpC0m2#b+Q-uiwd&P_^rw@oaKiU&Tcq@5APkAnEBQO_q~ z7uZH;oF&B@e$Do3uFV%JNp0?l68Rp}t4>DnmqePmZ!FjyXPd(lC%?dDvD4(jGloY` zhOY%gUU-4uHAzEuZhz+kNBy=E*<}&fQi42>b&Ju+F3QDtq?mpK80?k-pQdvkS6h(E zp8l%Hs8KFqh}Y@No7|uErya6NKOr5#CJq1`#a2LF2dU{u^Nd{$f+`vJUEh106Pr(@ z{d^iX!5F(GA!n*Ku@sWUGrX2@=AAcZQ-1p17k~e?-|Fv*5>%Gm!S-ttvpT6k?dYYl zuz53q+p}U{5`VS0j&T3zSM+1DQLqU^oE20~{~0@Ax3latw91{MbeA?`c@kNY>}8(i zW$yVK%rz+*0ogf@>3jxML+LP0-pfOM=epwhSE&^dj%u&*Lkt`(A&N@79*nSKm-4lBFBGGp|*?6ar*auRauU?mJc9Y$Z83K%2I1Yc?UYV=Ow# zxw~DKGnPAMDvnaY9HQcftT$capItH8mSs*1Aef)qv=A_0h^aMx=P$nlA=1yN9 zJZRvFSV*Hm>zgE#81!Mq>+8297n1fr9SV0BSA7nR(XOo5LvvGWNQT*Jw#}=rPFrA7 z-PbZ-Gw>Mv9-v1{LLu`3jAwHZ4JMouNthU#p$tZ#Q;u zkoXfHth*!3tpRF?1wSA}`A|@0A(~oV2J+}-X~p%!Y5kLPCo$&X>a&+0Wak^;N-F1% zV;>&!WH9>Zr9maQ$*0%v@hAT@Y*~+jD#?`(ejycakK4qZG_^lsA75jxY->59TJZuB zWS#g&zZBi(^^a&#lA6Q+e17?ONjXtfzS<%CG3%)jF3Ui)*?eJRyzXt*P z`=!A!cPaZp-}IFQE;Gd!2%Q$|n6s`C<8N)GP4T*QTx;pDVk4>+^n`Z~g=>;JI^g)g z+-cc`Guy#TJIi-GLciYO=Ol`;DwAKvq%g*RCZzjav0|ht8^OtibC8W;KpW=GdGgE| zO*Z+gu^TgpmgQ%tKb&71S~+w6+B8qN_Ljb*QUbY^H;n!sj98~EIIb*0WtrEt+vL2? z5dM;-Gu<#y z_qVo`#X4Aky%jtBS76`cTzk(Ik-5M95xUs4e(UiOIOoeX7N$f1RAqG!HATfA^i-!&fqhccuU-EHvmX}F5D6F2?ox1_p|j= z2^5E;6Rr&o=I>3LUV@9@-BAN0)4$+1T10fB{J=%$CTebGZeT=a?u$`&uF~GmC zJtx=V7+H3n&rj=)anVbWZrUDJz$~d;x!J=xB6u{L!yGif91F&l-IX0YnQlaVr?FHVCznDxB&F}Q?g#Dky= zk^qEsqs=x&`p-21mimhVY7i4`oA_{FVVL{G9EHPpDYv0cyY7tg%=$2=o1S4_50_aK z_V?VvC?_TM>~Kp1K}p=tAHjNKzMk{Jao04(MGvucrq@CWlhF4t=pE(UfjtWA7q-M~ zypr}13c;6p^KN11xXd*7{yPht6YGN;KWyVCAO54^JXy%OdFD(a>eMVPul;n|#Z1lo zeH0!mQG~mSh!w_KG}pM~nk?0Vw|VA#<<;6a9v!p%-8OxmLDD`F@!Tp&bxd#}oerrb za+vBr*0#`UFkx)?%IE>kG^#8U8FLTUro~vmwO#u@{OC_f@x!tsnZ-hzOYyzN-lLCv zjU9j@0h_w5xb9PA3S*bVj9&>_0b03O7>2>suZm=iO8L0H-a7@zU7%F!vJL~`-q%gvLo@<(i&v| z0^sM_LEp6hZz__JWI4NzzB#MUaIbzRxu<~tjNY=aF07;1{@g%IiA+GuzuBp@Y4-1W z>6@Jj(0=M>hkla*eD_CNUw>ZJ+C=Wr^0z<|A7ES;6rt=scEg@^@M<|IogNArVOj;7 z<8?}8G^~}!-0zjeiTWD9)vGkYQ+tN_sPGI{B*6BaBIY5uB%=^IF$MP<2(IF3eWJIZkLZNwPc$0zYKvPJ zaH17=F7r?Dd**6QYSJTOPPTYvVwc}4t1UjYTXe*XfWIvLNnBi{DJ+8o5JkH#3EjlO z-RnP;w36MrN$B`lOHRHwy}gt}3!yFZ?7ME)`k=08_~3mA+Ep)`|Izeeoa}?8jKSMg zmr&_lo{qIKXpwV}uGjXh1tBLj<2SyW6R#Sgrnr*(ek(JUk0*wY-_MfEA9ARhWt_d! zX$nR=RVfIZxkmUh0`mm1y)+Dq*(ouDf#F-D+4UH1czXU@>QwE?@_v+syF|)oEtm|O zfUauV>f`aW$2wqrY0fc0{(QL|Y?I;mQ$^Q_KJ`)Kg~NpUG{Lv><=Yuc5*#MAU+?+c zN=JWa5op*|)R}R&4!>55+DR#7FmEjpOu2rUh`5&*}GSOOhWZ0*m0lNMi9$Tn>{4UdPy$52g z6%T8$jcKp5OJD>fCZ;u*;8{i%Hq~H{(~u7%y-9vuoSb#{*-G|dqis99{Y}$88y^p@ zb^;~&p%re5=6CFp_Jtf0XcLWQsQ8wV<`qq%KKd{Nh(qolRl{67JhQ_DfBMc8TL!BW z%c&Yq#BbK&EB)p8d}NvX^B-wIE4FyQ@&prBa_v+gzpko$IJxusmQ2=@yTT6gyI$b~ z!cNBDTp{9h;;Kg`i^)#%Q??UtrXCu-q{Q=%Yz{49A<97nzUks5>e_Lg>ivQ!!|?<8 z!3#-YK;8Gg#XQx|Z`gvWW%)X7D&1t39L-|V+vhUYtvq&5n#Kej#tR>ePcMy|a={Gm z_^>|NIr&g_>GWiR`@;x5e5XBAd;hEbf(M6GyG=!oZvXj|QbxfU`I?spEM>0la9W0K zN~|YFl-cg}4zH%s;oE_hT}6&J^DLaxR8O7r-H-alOJ&yRW=~O&{$XqB3rqOsb-}ho zh-4CK#(iPr7)b=yO6gt`ULP*K$13G^5X@U^DQ0eCB?V>6_0IYkS;!b_&Q$yRPi%U0 zk-fC@Q8QOBqZOL2QH)wup@N;;T#GaHxMQ>LbP32DfC3m5UDeF}$D|;nIG)cGyzz}g zO79;jTx2cffrH~K2q_7W0i*e6Cbf;vhYDy1xE6mBbpPHym*19IoWjcV2VuYb;z9Av z{HcQarff$Mk~Dr+TbM{=>vrxor|sHwp~(RB1`k4G4ktzW4dKeQ-BX+wA006NZ?^l_ zlTL-T&0FB%&WLlZ7Dopc<4y0f~?fv-D%r;|jM?n_7*@CccaDA>29!0b^|bdOCX{MaE$F%_i93z9C1fA)K}0Q*p5?RKEhbo zbYJmrkEhx?7}*zX%)29MNc>%KTz9SKLVG)A(&csLEj-`5M8B;~EIFKhCHy*fX`$uu z1FGn>g0MtmE(u!h;7c#Am^A{kha{1N*!dQ|QN!7TYpt?~U@+(E6*pdja>EeSpQY(S zDhaMx+U6J&)jC(>w@Irsk|p+|PPZvnk#qbiO7P^k9FHt%rbwrry{SMPmxHjw>vv0G z)qz!0BoTyv!Xn%=e5$RraR@856O!jF&;c+fPMP^G@zWlHB~81AI!z>xRgmD=csAc# zjCj&HtIOs|ReuuVw59$b9lst>+kafGFvx>={#Lo8CE}63@oQALaQx*s!kxn1RZN}V zXAH3agkdG!QhqrV2Cv~@T`hb5KQhYCBnkh*qF8+xr0jkLLG@kBxi2fyf9q~(lj>13 z*tPy-gr($aC)bYVcD;(>EfEiJRnZVN>bT!faKf$9)?+7g^ffqxL`VZQ~Jbo^D z2Xvt|!<47~cG!s~S^fibT@H{di5*DQF23wPTgj|)uW|b_(Y2vnEqVLhpI?6TGyH_J zy!LxG`oFYB(n2pzxi}(q`5o)5;_3R+*bQ-mcP@c6$EP@iZy?(0b%0D8+l@8Qr@%woRAF^9STob!bffpB`22Y_H~WGVk$&Q7cbXyfex ztn)|jplev<;aK(;Sw|8ORtgB&EsJMc39ivz8CgGGw4md4d-YOQ-hwu^SC1Vu6_jOr z@woptBIvoGoS^%7X;uA}<@`7#X}=x7u3jLPuaOzQ%vA@doRIHcmeQ2&T-5iW*q+O| zk2#YfqO_I$g)`SG$>^%1x3_~nI{cWul(!DXAu5$(&(-;~g8QU(YTEK5{>1!%lJb6}`G_4FMe^@{`5znq1Itn>OGZ0fww;8uj@+q1T)5?lDA+M>-*5oLmSEE;I zbwt6b&Lwl@;p5er%#Z$fh#oVWYy}Rh{hDkE^oU9Q>i8tS^S473rK1Ki(lwu|r3c_p%B-5Jhg!%|3L;D5g7D*-Xvi2nMyQ zCFOxt<$%*UA1CPI^xl2RMhUU2vV~u`fBro`qcLe?#`^gO667QY6mbNx(!@+N1rk(< zx^;2GnaNyWLFbr4@DJ@TdC_xz3zB&sDEX|nH<#5a`ZNL%_m zkYV;ZfQasn+?lW14a!|o9-WYrVSx&n_&P;Y-ZhLsrvj>fnI9-}4Tm#GJDxMWU?Ti^ z#f~f_Nk^hMYYf+6u(dlpvVm1-I48qpmsWq4V;NF#>@DDOWSE$7<$ zgxa|!#@#|YvQeopgS>VmFmsH`>529==-1Qi~Q>0Q#EQqNMysL%ib@w53xU<6X(nfjF+w| zrlJPwZo6#Je(|-)@SKwvt+R~SZ`=IdgmCweZ)c*!mg=mVp&$JfgYa5jx|*%FHJcAx z?NL<<5(<@MY6O%&f+}QNH-?;1b}YE15@2W-+8^@LouScG4%G;>4sD@AZI4I!g!Mkr z2X6Y@7-N0CBAsU4`or6rf`%JskLoUf^R69)rE3N2G1nk4O%ej-EOKZCV?sJEslXhE z^K_)PR#6fx?0b4cdI zgwb(`@aiUg;KJ9ibfw{FO1$B(Ec$@BX)cZ$wQ@a8&^qFIW&VpRmHD7pa>z;o)yA7# zR=M>E}v46L+0H+O?3Fp(^(rxhuH() zuDS8l$-BM~FVGA5nRB6)452Y3=wR%|+Je?hi^4dduP|z^{G+eP#`m@^=CRpp1{1EC z9GR&#^Pe@zD$PwP$ZikqO#$A6rnq=XZ6CLf=6G!oPasi%cKx<^VIUg?To5E7XnV}G zwxE+XJw5HHbj08wZV%Wme1Tmqt<*v=|B?}Xor)D**1}K_i8aV8V}dP}R>UV;6`!VI z?|054){K4+zA^+T$WG0-p>HeQ^u=&}8b7{o&!fA=#l`B-FyZmcP(}ri$S}kzH{~C& z>$ctZ;|gfb1AzLrqaM!$GmUbs0mhPNY{L%3`9A;wgQ*Yjbr=+Atn_x+^c=5^%p3rp zT69GAaKY0X!~~XU0)US%jrD3P;vM@I6Fv6xEQc;u_%j^zA{0<6fEP4;tM+5*0d>Hl zvfBN>YbyYMx+2o|FXeT%^FN$;O*%=)-WA`&ZfT%Kk;l231$aycM4>Sk-g-dKKd6{H zlB7%)PkaPyzr{W!zaye9Dth*qLgc2H*!1}Pe8p1Fbh^tPS_lw6B<*o=d?X7g+Nd3s zgpsM8M)>XP2f?RwK`*EQCx#)mthLo#46lr*c z7N4{bfv^px-i2?@>QD#auJW6GqmmW0quoF!(w^7)=_}xQVdL|D>C(jM(oSlNzn8>Z zj%C-xPHg(lg9+l0a%UcqV6Xk-|GB1KRw*8 zjT;Ly!hYI%wy0L6Oq@lmBN=F#hpO|Ni~ z;xi8mJ%pwnfPzblbxr0z#9HN)zT9f2{L++pQSte4ZlE38(!2t%wA)L8-ycg!a##fq zV9R*w$N~N;KA(eXXK3FcYr@&ocLI5Y;x?K-wa?S704txf9Igo4d#{+zE!~q8+yDI- z;IN0}0IB-ZB5BONO=;O4#lc5@nw67=tlmcqCFQLSyW;IW zH3tKdMoJG|(G-ApngEarNGbFXyaBwD;fYDyU-^uIkDlB3vnX;y(~~z`0U3e(0hPQk z{7oGVcpIJ8XpMaPru zbD{TPWhuYvu%RW`A9GfrdRXZ4z{s+J>IpNPZNQ{&NU1ie{oKR^bJJ5{B1H>9V;f_< zCJy({kk~2jrQSM=@HJkVn=l)1S(3c_jdm-RNW_NZ=&uk6l+~RH9R#PLNct`cYLaK3?lJg!45@BaXp~yZi;@9 zbL83X2bbe);S?yo1PaLf@!5vd=wQh6Oje-`)0NQfx(VnMJm=5{NBrms*_OGNLx6Ze z4*y2W`rehN_DP#gp6`;aXG~khve+Drajqxgbe>2FST|7cBGluRCkMMS>SnTB74lI@ zFeh6>ZG3xg9L$?R)nxJ>kkdG3D`&Mk`hB@RwNN%;w!pSMnH_chj`Bo>7j@<<#I zRj{AjWow|(T6cebvsG%XuCkTx)DL)!4fzsOJA%w|3nvlq7Cc7Ef*9o6c`(!@;i_b$ z-IUEn+25M8n&?9v{q_T(Z=NYlRCM)`eGbS=B01C%fMWkT1FWn?E#ZphGmTZ%$CQ$e zuBnoiGw$8AVwj8uYN5#m!k0#F6TJsA*rPTLD^)&m%)zV5>|Vmw(ScD>R_auy5NJewKMLvO zvL&}L)9$9Nh)8lBEs~w0kV!5XV;k4VjL49oGqPb)34_77q$DCLs}M1jG($9o8clL5 z8b!!5zkPqRop$$}&gu8(oOjN9=KS7yp6~lSpYJ0YGlOq*wgo3zkr6WsI)L#>6wVTlYLYyiZfvdA1kp*os{#Sm9vLtEo0y!e8}1Uz!r^6f{e zju@gG)t2sB*BRY?gCKHoy{4ZdEXuhY#y&Y|*ErCt?&eZA z^`KE{TZt(*PQb}rYTad9AZH2RB_Xb$;2LYS8f(?4_r@Mgq+- zKK_ZxVP*C9aXo|gk}2>uz~TNpWa^#(p|k^i*p02ZgVoL*h%QVy&Am5P5* z-C1HR@pzL`J9<{{tn^5fLYejq${I>g0{dP&xAOoIDg50eWaxX%crK z(jmmRI`myIW%d~`N7kyCmQTDiS;#6|^F(K5v@*~Jw}8P805dI(OWh~My@S0(HvcSM zHl$UpJL3n|SDS!Jy#tr~rB-(&25XlD$i zv-$@tGK|se=U|41*XZEy6pN+`sdOuF3r}a!Q5NOvPdmFD+lnbq*lQF$wO^YgNy@Ks zWx-{iPFDm9z+J$_YZ`{9N07!$mOH5!`;)na3BarDnrG8@`6<1mpizpN%0}S)OkTmJ zRVMy>@+0Jvn&k=klc5+vC4qfzHPA@sx*u<1;# z0ZH;3Q*nksE;MrAEZU@X>4-meDm9TaPS0_EtLJGlD_+>O^osB9iM`leHAX+s@u0#*^E`&l&gmZq0zzQMh}b_D(4~6Oa^RE!hmHixvKv-hcW+Ppp&6>Uq3sr}IzeAq zHZ2)Ft}~=(6*Vvv6wW&nPFY{N_JlVtzeR;ktB|;T0Oebx0oh<`ag-w2mO_m9F=~aj z464_Gvv4tgTCO1zGp0l_=wbgB*K3#---8O>R_;cZ0bj5InBa^pF_4^~zxz|BU?yrp z<1^8W;&p|QBI2!%sy|p@jny>SJm?*obZ(&^6dYAG4<-_%Anu@Htcp#$-~2B{*bzpH z^LQs$Sb8*h6j#cYZgsS^TcxI8c~L!hKHx6lzAK`R*MapASE=r8Q~alm%unNT2bL_VWetZBGYr_~=H>tbFH<4)^ z^`N9|asSJ9-gH0`Rg)6GWV|FSRtx4V0kDs`ryYd3_KD*A1pa2r1PO_ToZT-BI3T-a zN3@6%D*AsTme5O&RB$MFphBz)3kKsZoujNA+Dm1gMvq80nO)^%*ju7 zfX&fEZ-$vAL+s*sd`HK+AnXSP4I?{bv{J${x$^qRbLJN26FG0R%p&?STiPt(z9MO6 zY|ZorQ+ZKS{GNu~!$79L&%@-ruC{+4A2oR_0?w^)<6uJi9RGYnPd?>q>!2YT=T@D54*$RFSjo8LVhO;CByDc}>(KIiKO*Fm6)gu|N; z_5-gu9_w2Afk51?tY5Zv@1jQ_kjGWM8`n*O>?lNz`zA9vj2-*6kKYY7k0o9xKX~E5 zjWhLTj<=pp+jGjR`f_?s;_^TAXV0y}Mjd*BdsLj?&nWf!WhVJ7R3Cyf<0LtX!tNWoXN_YFb=g_}e?EJV zb(9yiWMl8I^LuT0MUa(g;DyRpML z!IGk2-Q10tk(%9;G|>7>R63n5978)Hx`>ru8g!eWYHiSRwN} zWp4kwE1yx8I{fpc?ZVStKer=y&;KfQydUC0g?;QR6n7qGwupdXzPj5{F~_g^Ydnc! z&ufV4d8}K4Nx}FB(in(RGAlhYXgARv{zJ76?_%1EjDeV#Yavn2- zceeAfVFm|Dx#wrk9o2`C+sMreTUilmck|C4sAp3L{8LR z3?aC-L+-8-E`=c`rw@wUPH*S+gk0&Fs%r{>TeG|x6mA=I$0$HUNrih}0JdJW=Uyf+ zc2)mQFZOywl41nfp1qqgT$vH+72S289_-~*(DPl^zdct{6qQSHahD}^Ll=wWVckY~ z>;`^+?lb*`bP=b6X{`+kgC^S?^Ag=t1D12$Bek39A!4_>Hyfc_HyA1QCB4D{@5-Cz z(;J|co%bRv15r)E14D<*R-m&dzi3-GpXDfLq!RqQpGiDz|H;|xvCVeQ1A^?)S@iql zvl(T$F8hzCq7IsN1t?_CEdaaZS86ZEJWL{+JtLdi3q2r_#N0I0gzgQAUzN>wl;2Tb zyl}#4hEROLF37ms5}oWD=WVz&)3AD7CQXyJspt%MZ_* z_=cupi7z`xYfXgPC->oI;}>AXDd25W{6u{pQcn@QC?vaOb9u0KV93Fx5r+P0(M!1@ zlg$psa}km7MxCKFzk|CwK!@OtCD`R=9`XT088iESEqQB(TrSG;@4B!xMb7Hm=~BOL zI+uVy>ui9PmJmT+jAAF$JghM2>g8#UCnz*G-E3Ih__)0JRD_$p$OjW<-feezN{#DB zqy6GH#`61H>*i}?h6?!VOWTh)h6^Kq&pfD^j^!gtmg^iBh7 zqIGQm$v$G0iv7HT6u6tUJ9XzTm} zwQyBsATR6-rUClJz|+eWH{|f57DkFW)JT;xMhVM|hTMHd^cz>0X)bPMR_0Hh+5;G- zalE_!Y{#Uwae=-oKUPcgI`LAHhXC6EPB|J<=YdUXbT`dHkTcL8h3AZo**S+koNaGh z?Ow3o8fY9%GA)eG3D_9_bJqXc^Yjb68UE%b#pi*$_=E56?(7@FBed=Yr^&->6 zMp}}Y@HaNr6k@kj^at%v2EXeOuqzvg<}_bVe4r9KtZRGsGOmYfBVpxOxYOX}aEC67 zWFA4M+o5|xBYgZIm3L%Gtg}IRdNJL^-GMaR+K6NS47|Q)gh-pRdYtnMh4m`slLm#AtK zQ=9xn*GS^HEh#vIfWDORne!*2Ne71M#t}Vu%okwwDfEeRX z%8go4+K7@jxZ3N#?GRnYDUl5s7v@Fb@MFmx*+`c!9R|Ib?kDQ>l2$?7g~{bq1)ct{lBr}&4-G785!j5Qw`gRh zIqtw8d$o5@Eb7nbUmGWihkWWkcB0gbpk6(o%iSXdTpdeJvml;_`EAw9fA>G+ytkp! z+O6L`b2CoeX6-O$hpXsIj+DSz>7vS=a9R$)G48qU`EqI&Ykg7gH=^y zVh(?@CoA=?XdNz@l?srKGRAC`mjrC59uiigR>0NszmIGw);#*rv~BY>SAGsG5Yk%D za*dFL@)&D>FLx97iWkNF5K!nE$>r_RisD!>yb9fcFH_g{WSu)iRe+quVGju!35LaekiwoOJG^S4t- zcQE#iic4I4?>~CF^&*-JaAC~KiSHBGqOqy{f-2LhjywXFBFcFuSI^luM}}>6PZ}HV z+ypO<6&TG)b9ogWmgE~N2zErK_uyrk@8ZoyLMu{xuuqq4vMV1p>KoLg$cCDjmVTiQ zQDyw{t6n1%!2?4HWarpxgYWm7w~fP&7jSH-5_6xs7cMleb{rqmM3$uP*u=Ktq>*TY z*jpmzGY?%`C~GH6mV634HKZ_I%C%|I?R-;zh~X!Y*e_(*#JyF)-f6{A&t{w@b#7Ee zI=x$iCkcAl0Q31TbFg*w$H@+!epV4^_htV!zgmix6Gwg*xII^M7LnCAAPw3y@* zk3$2LYNmSc3__z4><{91eer9aC%N7#0wB zan|)Baf5?-qJ1Jh&yDYlVfcuiKSeyosfKm+vLUTiC_;tIC|bK)_&#Jhz5Bj|luTOj z2l#!iRY^#%0++p&baHpor63-+I)2ZA>Z=}Urp@@DlGaHSeH{X<-K}PmdfrRg?N3u| zLB{CDHonbK(k>#m+;;I)YsnDR=RJI&V`*Q)j&s2nk>O@qqqcqUXULcnv#RqPRN3!! z4-Ao0(or_u6^MJe^(hK(fG>}z$XETQG9NBeTeQ$XCQ?3;uWAkb3^hTXF+iv*e$v%p zQeB$Pmba7Moe#6K^-+yZ)ZNL<$XVTqXGPpRO}tjadKT}X!BwdHHv%Uti?@HT;R~MU zH0&blu&(PMOWU>{jam&(eUW`W4^aw<$Ik1%M4-z@h0> zi_5KPOXV5oz@ww$9N{Et=&ORdQKK3Wcl_ejAX{JNM?Oy>F&}m6NkYQy`C|A&nSB*F zbGF|uNJa(Q*91|m4eAYLKc<4}yRpGw5#`VBZ;g{OUETVJ0HEkAH?s<^uX8nyu97m2 z_4*PbWMJ)9v*VmFTs$Zx*?2FOk2vP`UP)?m)W%p#S?6%73P;)ZOU%R)uPoH7NBh3Q z;VGlH-B-<=uh|~&RaAkE*(wxRXd9liY3wj;7YpxwUY{A-MEykM7&h%uBu;fGB8md4 zm;S^fJLl9Eg5&0{u>6#y2jr)40XB&|h}f9_0q3U;OuWD%cn;RwE1w?ZMYR>4ioxboMU@F5>Qa*5Fh)3S4m7FZZLnOKbj6Pm~1g8Llob^pEp09dKKU z5H|l*(>}4b5zzkrSXCU|iuVeso%!k7prY}`QxBNJLZy8@VqYgr)Z==ed#c-EXdAOl zY)@xe+`DoYTqY>9(3LB-$7axhYrC_!TrI<(+AXik4<0{QG_L>Nj;N(&xV%K&3BD6E zPAt+|!sJ9Pc?`BdC+-lL-laaFaeLz8sEYRr8fPZS$%#MAN+ zxdS~LuX`?umX2wSjAqSurC+_vYt(3g*YCnj=%dE+Q(ar|5-uJ%nDh1#XX48Z{OzW) zi)}+zf$gz!TY-RIUh+yHx5dm4|8|$6Z;UWmO0dziqX zNBdC9y_X{pVrS19gAc>9KX301bnUcsB3*QrT6%HTw2rx#!ekmBQaIPFh#A`Iw8L4)!3*9*_b@i9N z`pq;g?yAEy69-ePD_SRxvO8zmmOv}(1PLj+sXoPn)|rP<2-d)$V%$vPILZ5+EP-Po z*icvLx*C4zfPaMHG71h~d@E)c>0Rwuz&#-#m(iwgw4_F)xf+9|e?Bg;Ju~!_F)1UB z>EePKHjq=S{OhVJ;Z{g%Ufd(~?*1K{b5GcNSx7Q+aKYD)anwAc5G^lJDs#>JNtuVl zW9O@SYON;kN~iMcXg@rgYi`$`3|R;k^ue89p}XG$5L0;N+LD*-62^8RJ-ZkC6dPXY zUQ$DA_>MolOM}qoUGaHR@>*NlGy5Fhy*4=wxqWEg-UjbEJ8)T;_LDovV4e`f zonmS(kN+|qpYgU_c$fZx)J#|YavRdyyns`?f+Xl|)c*+q`WGkrZ?LEv-&~!04h{RT z76Fuaps}_ozo9|J?H~4~Tc>gQbn^u2HAhlsvEHtQr{gy1p~OT(ga3jl=2<#96;xJL zar~pF=B0Cx*4Nkn8ixLF81`Q?&i_k;j7z*WouTl!UVn~Q98gi=m#@xVP&7@6{x&DI z<~lvL2lN(M7jD+(_Zk#=L^H(b9)M0wd7g9S@Q@UI;^*Q!Dgw{2A#8a-ks==+UjH)} z@R%fjjkKc71G3fBUAlNnc0hO+%$_nei;gCn){OKRMZI!$47_v&VT)oSaX&lGTR9d2 zQRfC_MaWvcD%Gsi!%h-r!x(oqW%dTE8?l`}`VXPKitc(w({2m7^fM;I^=9CcT4~P1 z6l980_gMK`9Nq<2hGlm~^8T?-QEL9vdzh(@v{TWEB?_l`+81NX?gSHNOv4i-`BCu| zh-?>f4KcyZ%w;N|*_=6XdeOnM*3NjY+vJTk-?l|Vh|CwfHwO7V0hv3^2BK-t%1?js zR9YAC( zT(`HY3&`aKvw?ic+iGv~m<}R7KZ%8r;^8Tw)Q0f-)Y%bSGMs`Loq= z>=QaG8&U(o_VbsR3#(2xB`7H3F7^i7|M<~e<9r2%mbh~H za-Q#t{2}l>#{t?}!+ceVan=uxFm5Pa{@RoFlk@8j@87?#>#vK0_TM-M&P#XFF1R?~ zvX?o-Sogem@8M9c6*cU%Awp!P^#|8*#p}CzKFC^d#4Wg?ZCC0Gp&GA_?iL=kRri{D z!?N|mHy^vWHu32_qxgT_GF1)f_G=$iPf#~rWhbz{fG=K?Jh zjKR0HgnK;h4iTmDX5A;e%zkW~aNDXkMdo+DO0TPMNvYoS3dNYGd-h6Mj)$oOMB?|l78DK zpP%ea0(c4n(VF@IMEquB@8xB5lnYonAY9hOHzkR8?4!MX`}P+wBW!9Q|E$QcD=JT? z)i{99i}{(rI(|zKMb-PwiI>)gHW9$UA*o~NFospOZjccmYH2`U&=CrE3sa47iz8LQ zl|+`I%5!@)H1|9qEkS^V0FVyk>xSW|rC#HHAy#4pkp%xs9rDJ#5NL0AH{fL$#4Q^$ zvy=nefILi{y&!c3(Tyf4& z`{vLB#Vzo>rQrop^$&N}*S8LmG(_*)&CSx)!ye@@H(1&Cv|62ai3n_=m=;5O5*2gd zY?2v6!`{J;F-zwGKK@+ciP&D@;ju~bf`8geO4naK4ZI8L*Ul0^F`GjJH?&j?#-21u z#hS4yp&Hna)3kT|@Wx#vgT<>gtZ7kvwvYWy{Z#|(sDXUv(Cm5$YYiZ&m`4$pKI0h0X+-)3+KQG=MJmLC!rD-6Uajtrf!5s5*>j`#pN|D|V%8TvA z|KfzL!v!-KF3r;W6S*sZy|X}D`2kl84HV^?Z<||S9`3pG1fgKJvn1qiUxRp|Gh$BL z`cgLfsmXNH6caL$QlR)AT}3@I-&DM5Zh6Plv}5 zoIzSfiuNckyzKkz9}mcNquZrIHC5T4UozrH5j}qdRAisbGC+wQ0%>UqKJ1``)%EZW z|8W!>O(cBzgs_y0PO&4Df{jNYwZcra_e&1f8{btLdIN8sFT`&fZmdW*tj1f@wpOP{ zF0%H1b|KkX4C54~c;oP>BQN-hlXPg#pmM)HLx!y@Yp5BXshQ(ZHe>=-CuKf&uRP*l*G(Pfhzeq1~&64|TH6 zZEvCphR)|9{#q7ZX?GaK#3&NvT+4XJy>Wx|e}H8f)AUl1F25J~&&dOvr}7YlYt_cm z`no%sKNOQaLM^Ib=>C)gZE$vwZrszm4*vIdBmxg^}=ngc^ z#6Y?n=b(71v1Y-P2h4LAPN*alQ1Vmfx#fG2!Ag#G#rUcFUNA6j%1 zmLb#JE^uuC_q9>$-Dn6vE$vtsDACz5hcEdxcu43&d+|?``M#a`?3c_Ae%f2%xL^4(qF8ZAlj~6I zsF|DixoNQ+H1bupe2RY_77}I^<@}>}*MIv#U#g z|Jl~JL{k5jsCkZS$lX#+4v0Q^mbx17N*c&+8leD7)J+@OXkD>NHb~C_$Wn>zw##kf z6AmR!w+L`=CIX0CmJ0eAbPiul3Dm+Uy6UH!J#CRZ2W~%|oDzAT*zoz`#x;23NzQ|s- zrx&HT7x0aUlP89zt-MlVb~L+1R*clO<}Ab>Z0g{UaDS6p zBgN$5r~TH9j?PYVr@(kQc{vXbAaQnqNlgXf;B`R`k_J2<@MKq>dCN3QLG{Wz0%_G~ z_KmlZ6DVS1;CK8FoWibfkY|lT8#Syy^=c|g7X-tb_F%Gt5$X6)^F1sP0&AU*0hU@N z3eKuX1IL#Aqf~&d|4R;;GWZKI#xG~O_p{OTZIZ^rH(^9ll zIRaK#)vZK6li@bu{Ny@tY8|4`j66cAEvZOovIXfLsB6PM6$q5nmYgN;sv>PYXual7 zVHpV0(?^~XmgqpBx}I&+@&0nX>iu?zw33n$m+hw~jT|Gpb;6%RJ?}W?mE2RzTym_d zyb5F8cPoH$rj(x4zLF7eAWX<Z!SG&(przTriEUrwcdrH)M-IvLd0$9Pbw$cqzLMggsG&K0rO%1o19}ruT0`B#^ z7O@&@Wo}&HI#{q*_w8%)CCgkK2-V70WuMoq ziGR2Adu8JL8)4P-@OjG4WEb`n@lgSgi;9LH;5d?FX0;~_L#Jh$3yB+IZ)$~0RY!fq zeOb*6#lJ@2w6~6a)x&1ab(!9WtnfjNtIj9(&()Vif#=`2jS;P+=ba0>kO9!9k=nbB zs`WK@L2pscgL&zUs?u}DQRN>QBJxsJvDrn^&mY!p>UkS%T(yPuBH(Kuq=lgMFrPUW zHMvy@tKeA!E7-hQck>Ec4KmBC8FBSJr8|}Sx`tB3eC?CbGiE5vv)f*&u|X?_(>^KN zyYP3H_-L-5GHqy8liszZ{dlQzPx?Aos`_IvaZ*ErJ@NPne%BP7dz>DjAV1z|;q4vd zC+7BXMd4$}u}BC*KdjV_9jZ$=bEVX}N-8DyS=YK*|7a`GH*2H)ld2jhE;AMJ zysReA5SXr#o7?dc@+Ipp1Y3kO8_9T#Atdow);GN{ zy$PIG=~{bnV|Ic(+7X%;+)_a9Ml@+lK_l*88c>B=OB19-C{Gxuid@qVt_=nC0_%u| zq8pORVHv#7M6q?wGH+(t=K=~GYH{7fIr4F9kpMzphl8*Lkd<`+?@Jeldl}O!r6ABO znrVVtJ#TfC{Zr9?DE(6kemHnmkVdB7H6W=NIKQv;eC|dW(_JNsOx9aMB4yHZTpefo z)0b0J7)1>pC03Hn=I#~Nuw0x$y>c0J?cj`^*SdpSdEM6CRp*|;vX43WBY$pfeU)y! z>28FPugHc0tL6LKj5vZOo(JZ>+=UQ-XMD;BV4`{UqgoC1Mj@;Gh73V{-VEz<9VJC` z+X-K_hAEuF^N{;IG7~G$18U@j$04DlPOrxnXmDA5V<~;JqhpZX#F@SH*qUyFa`!8I zF=S=!Y0Y>S&&9m!8(SL|mo?*0}~!FS`(#Pqk~eE%f% zVJ97=8R4Z9!E8={UtHs6e+rxim}VwIzKZ-555%U^&sF<;Y#0@F_e(K_0VHbi>_98qaP%8b5u;6Jk{lIXZK-yNhKTIwpQPlBTEK?t@X!i}5lTEF0pt1H}Da z5)tDZD+mPoOn@x+#{eOlPfU&8pp;jKoZ=2~{h`10?2squ#mhwQJfljjjrl11vw3?- zX$R+vbpUal#`(@{Q)9=Fi+h6ESs=y)uX^uO1`?8w{G1+AMKY%(3$)R#?%K<$!`oKv^Bi>T-@%o z7Bc-mIwft|22JdClAR{rJK8@mK1SR}%V|%(=uOhyqk?6BkFO8Lq~kHcTdb6o_*XC} zxR#eL=dZ6MO#x$O2j&}#>4*oLy-VH3Rk_~Yhw z2SK_YY#YMX)*$_Y4Nv!+KA)L@0TB(*7?vajj)6$-2$R z{Ocu@_D2vo`iZr1Nbt=~*v1+ZH572zyFW3f)q_P#kNqA??_cn&0bq^>K8$WXfg7A3 z2>6o6Yr5{8N&ve1LFaych?VI#_WQ=$&-R)5tz>15U}EXU(zmc=G@X>Y(O0DaWsiJU zgx(S}HPt@p)vS+^cAwbkw09xG!PNWT8r#y8=J`_-s(?7kf*;dHc%I&qw{+PfO^O=4 z;w7cJA19QUmd+fe&;pwAPL29T(2)!j32&Ol zRJ=Yhm4K$fl}HF~e|>$h8|fFV%sk% zD>E>B{ro}}(TF{Lp*onMsAjtbSHx84yll!@gv}^JQr@4M3oqQw`Qu4%y`%X|Y_mNN zqs)^H<^agz+E?~r@>y=k{3&U>6Q@uX%Zane``hCtRR_klBxWZ?>?66dRj| zg?XtdU4uqu@Cx~Wfmgnm$K7V(Q$4Y!m6U`K7eX7{?^UA(_V#TBay#`42z&O#Yrz5P zR$vQw$E$tRI&3atq~dhd_J4vi3dDs;WVzKng1!$C2ZV{)8P~Uq*whAIuI$KdnAl$V z9jo%-&lSP<@*2SY1*Hva0AS~8oOU9Z~eJE8AdU7s%A9xPxlA1q6gk7|8eqvE)2V5rKI zR^VO^ZF&a}HdoJ%iBL6$Hl6ciPJ>%!fE^u@Q+r+YoRu;w;EbQ2rgbkNki~#RAy{Z> z9;S6|iJnm9Ee9rP#e3eN-jDcrI$9vNaNghf@&mPF#<1B>#tN&OdUsPaZUAI-G=8B~ zaEPyW@VwWpyJS5SPSF{2fs32ssQN!maZ%MeqCoItq#w7a5$4)^hQJ{9ol8{+byq1ds3 zF-*REz0}ZHL-^`afai=_KY9`kk!&9oxcSCOTCNl4X8ntgqy~4THifkvf^?aaBRmP! z;H|g=pwrTtEg6yAG~s77nUTCu@9^Cq^dJY&Pl8rkObR zQt9hU56Z76#MB>9v*HS|cB=2qRQ+L?gZRs7)6=Wd<7d7gaRA1;iA?wPv$#`-=#@tL zd};6$$+|gfwaQ%DdX7Vd_aVw5{#ap7W2f1+P_>5n;_x4B)&b5%^~|pG6rfz2EqgY5 zZ+Pp!3BowIpq|OE;GQR-R5RG~=eIF!Zyg`RB*`;{!yrOyg7a{9u2d7&W<1dDF(D^7 zuo3Cjoa>FR2}K27jX0!J?Z2R~@#(Ay3HOSy$KAheq^H)pmcA;LF&!wHbad3H$ZXuu z{;HudV%i&4hQwVrdZ6+$HfH{u%u$`!FHne^S*FIhcN~LUXZ8aqBj2IbtG8C$`YhPq zvgXF^(oO~M7EbF#bs_suQ$p>5h$dzb!wY&@KP@da(bF2Cb4r1_a+@%ir(-53j?M>y zmBx~Wu2G4YN})}|4$gkQ#CoSl0-EYZWfX$t5~ZXL5q&wXP5E z+m<)|G@B5(Eq`zQk;MI&>a>U*DM=4IrC;HgwRqnC1m%9J_j^Nirx&z%W1tUhc;n6b zl9O7!Q+{TcGfH7hnQAGl5OZxqzZ)S#339Gde7`i~gb*03`c(LGC^q!!Ky2ulje$@b z|5A}5-|oRPgt4v05Svc-Mf4SZaB6ys{~1QrK}0LkDl9jJ-?mtiXCFP!!ZKecpdw{> zmU@`>QH1C;r(LlUv0da@*u|rmuyg%3Kk#T3NNwoP?`bfUYNHua=@}$8cxlrCvsud< zG!bms39aAXuM=AB@aIm=SkT3;YvTh4Ac7t#fWB&b!cB6cBBl9Apz}=aDil+Q53F+? zr3S4_J0qraNRNzLJ<}V!gWMBt)_AULClmsEGqZoiG`%{E-q|w!A~55DVp-4)hWgq& zs_phGYj@NGt!Z?Ru-(z+HvF@#`vYPSK2bkw#b@tl$y5vXtpXnU$Fwa;n%RwL%)v<3?5qEjmzOL-1=TR&kx5L7iLFT08= zLS06*ZqZ_t7WkpWDQ+iKDmNerfUw+RYKSjxojT?jWA~yw(@|OGefQ+m0l-hq8(W8$ zp`yDJ3f6Kb!LvzY1=K54In_m7MA+ul?grovvE-p4r_u-Z?cRF{Jd1-R$lvt3t;I%J z*8!nfTsSZ~=$N*yQEG$@c;(mHsgsCj~$E+Q`+q2iqt(_K|o}Od0{oRg1al1A_ zVN=&!TCko7nJDKkE^cf+6+^j1%}8@zxZ!tdX*iff*C+mIs$2PaXrZ*Oe8|t*{p7 zv7guvilmw@h2i#xsjmZ-x6vd0%*0}Tn3i8cEpPPvg?KA}n&z&aL^JupIX{q*xZO5V z2*9d5{@o`dfMy=84G$HTMhGxYsee6I*ixSSSA_a^>kI#D)g%DY0Y89n(M)lPC zBb%<2R}0SX7w4%m*z*{v#vKjEM{}e7G!6xD(flM#c=Y!ZLB>*5K5!~gBCYN|1QS;2 zU zE(Ev+=%Jj!Sm;yOeuu@AfHl3Vv+3RoE$pKD5xPqF<9kTyn{nvv&^>-!Ax4QqJC`9q z+Y;6hA` z56k~g054iLa&BFaay(MKHJI3l+OaF!6{y_gFTH;-$0~?YfD15%7HZ~@ZbM5W+5_g< zJeN^^t079L(AAA4id*VLQAdZebnDNdK*P$izg;xCaqbRj>D3xJun45>BW=BhODgW4F^if zP0}F3O=#DjFrd)}ZD*RUHGM6$$*-!Q+#BN!%`(Q70*V>o?H|spum)jyeLqH0&DOR~ zbr}9~cf-%B{=-Y;uW3o9q@IZ~cJFYbq-JP=c>O?s^oizv%+A61gz`U6z22*zyE=6+ zi+?Q81%nCkeO^WMo!@4G1#3R_W=g|#1ZGJcSPc@_)5*Vt5T<+!SePZrXK zgRM8xAUijuQ5OePmBM1AuQBDcn`vQy&}3Tb(oy`N^YswzqIQX0iiO6~@DYT=QyQ|l z-?VP~t`Svc$VTaFx~sEAg=JDlp-Z)G$iH(YzApW9M8P5aW?xUlO<(n*r!5mM@;lq+ zCc|hMz!;V$8F^e19^hB%-oI3I%6;ycWsTubpu*Q@^WD^8KBT2R`M7F%$^9p^T`lay zYP5q>dYgQJ-i7tl-R>Bx^rwNph3pOv`A3wcPK--oRx4AMd=KX(thux==hSLz2<-$J z2`v`g6e;G7@y6MkhkG1W$F1;d@(lL{ZkID?qttr_>grd4DnHg5*wm+=kJ=@PxWT;O znRU+?DNc(&s;nX`>`E(LTjCcaSuNxX0mL9m$Rhv<^)@rOAk_-C_!l{nV;G&9z<$R) zO~L_lorC=`IE=*{XX+sx&Z@(}?xAr59!c^#v0CF|7MG$J!w4N6%9CiF1(Qv!1D{CV zc6PC{M5gSBjUaI6N&kPc2oMo=$JC913x>Ps+?JBn1!}Git4nl?fHwuO_7PZj2$I8H zEmQG9%}lP8p;xh9T{phB)TLM;E5v5ll;PAQPA+}03gDoSvj8{ruVYC@PHt~JsoS<{ zxW@}&*0(aoO&Gs zN2~vFerM^1-#;Lz5}zG)K==S9h7=d+EzO7#!|q~kH83|)n1^GeF#6C-@cA`yo5sxt zLtTP}HC{O|q+)6te|liA1({#d)#8 zyuEfTnFi>tK$_$7(zC;KW-K5lGRhmaB}-v82}69vk6vFqS=y&vV^#O<*|agZ=95*zbFjgL4gFI|Y@tht1qDj4 zyraQ0_l&B@rsn0z_}l%a;5GnkR}e^V_KADHwHva|ZY8k*8Su1vw$yD@#dJF-FCRCm z5SnGnYj)e~M(I{VLH* zY$MYRiInLdoKJNHysnvY}N`zWJl6gdZT!VpW7K{wI++B2i`EJqT@GV}%cphGfvR`n;EP@SEyw zZNj^7phl4T>Xm1AgL`aAdE;Gj!PwYX=LY)2vf)paTDRYER(AgRCC3geI5jhhzR~tf zjJ1YV+fDAy9tmv+12O>S1pe;~1RI0|PG$qmwQBeNdEOEPEFV@L&lI$;Q7f@y{zVg8%uKZFW#iN3FcF$xa$2=Hx2uEn&!HiA7@Ad#xApz zvOLXs#|8(xiWJ$5+>Q?HV@!!6Vcqr46=YX@K$|!*lx%F}T!?c6jKF-#19-Dnn06 zEc|YTNU6;S)7qjoPU)fs4FX^E^EI>1^jWI89vRqPW*02*Do>gn*I(`DakaCUE&tD( zaBF!hxc&l%<^RjTf!Gs>;EE;SiPVnmE`y1~4RT+2G!*1lPtTK;cpQwLE_jHrP|9sbE z&?nqIVRy-H9sh9mqBGs_d%4eR`CwDNt~jVn5&)yze8^KCKI7C zjxMM7g>2Cg_Yme+y7=B-hEQQ3KFgsLa&^VGU;ooU+^*IH|q1RisNxN zu~s!st)^I%|7|cawSGFV7W35gA~n*h^>7ZHnm}J6gyuu-yoz17T@nI0MN7^=Mml!6 zdC41{g|AcTbiAo3;VGaAJ^$#wceH1Mo@qz0E&0*TE}@0P)L1@mKjo@LyNn2XP5ZTQ zpAu{`)qeDP=d|CKE$bzn2v#xA_Kv+hN-cCP4-mkCitmQ_fND{lJb2ivo8B+jZ8ex2 zKdnAk3qO%jRJ6U*!6KV4}v47xOc2EIGM3s?5AIH($`5-Z=`72&1g z;}?@sKo$fDcO#<+XV{?sxh|I)mS(kqZQv(oOx>G$kz(bS$}!fOfc;UhSDAN_-el)B zpg?>PatbiX_3fKCIbB^{v>1P&)iB)L%-&7AYT>RlRC8@`{_dVCp^pv%xo>}3c`~tv zZZ&+321M_{hKowQO|tjgqn!oztB^fq4mv~W+4 z!%+2nXv;Ddsn$KKR7CIrX~-qVbJ<-J`KhRl8fpNGUuJi2v);o5Fp1a1YRHI3C|5&n z^~#>)p)$PSz#jBaZJmpI(Z6}^e6h6sFO=gi3{+E}3 z^Lz`VrNKk}gI3oSKM1q(C7czrPJFY;)9&u>K|JWwAG*Xv?`hIDg`yIjh+|`yFpsug z;S4bU&mP>kkaW*qy+sR?R{rdQ{CDvR@Cz%Dgr73q5_H{DouzG z6ciBzDbkBdZviAEv=9^p5eX;)f`p<-mm(!d6;V2&6Iuw05LyU5l91$E(bw~x^SyW6 zaqc(n&v*Y}3j=+ZpX z8BfV_Cmc{ZlU%#aLgu{NH@LlBW*R&C&r&>Y(xT8Wrncx(fI@x|sF+e^v?iqo4)M+# zTUiMMADrNU#NG2aTcYzzhuHY*qSQsWnqnX~wA7Zf6hYDxT=H6i$N&+iKlP2?d&6v5 zut_`3cOgEV75?H^B|!Y~9c$88js{E+z#7$(a_I))p&Jk*WmiOf%IW;VcQW_f%UVMT zkFf6l{0I-niNN{JgDeZ%Dv*2eAZ#2&YL$2=1NU@-`0yTNjYr=7%2V{N{3$!BKJXVm z0?U})A7hbr=n3I2o57IxvCm@eXb%g?r>aeSu|EXK(qI-drU8)UKDwG_oNxY?)(?BH z{Ur*}K|ncS`1V^O{IYUqqy1WI_eaC96OMNI(&48K4hg4eI=MRaE{^sqYTPNgSJyz`1SEHx2xD^sQ`q45(gclU|C5KkZ zMh>k$*~Lk*xsL4pC7Q+{&5e!A0K*7-z3pTD_Q9g$#$GiW>y=RB>f%~IdH~sBdbRJ> zO7_VUj3@I+7Iv98WLI3}Xd}h&%!U6L62rlz1diELlGl` za~5Q~sw|S5vH=-0G_qlORM@cR9H~>BZ!V~P?Ye5Wjm;n|UQklH7 zbvu*LF(Puy9_z1)br0}$3MQoqv)vQRYV)DQdm$o2Cn$2mu9a@5+}A39nDGZG!6_B` z>CG|enQVGaS@n#YXLowx_&IaYo}Sv>!v!Z9tGNiJ6{8}5FJ4D~FFlRdtsn#D+5xCY z_ji^-Bz67>9ep33SiX2r<%4(nE~0i>gj~+;c!etS*@tsliAD>d$$_um?g|*@>zG=7 z5U1u*#WG6&hE4(-IEHg;n(1P;N*ZzwUVDTm?kXMc|y|ZmH9FOfcLXD0qT5u7n`?##}2_Z zd+Oicg_t)oJ+-_8;jCG??dnY4Hk;nhkC(sQUUPM+cb8c}j(U~irJ{F*)E?bhAR}`@ zoe8)7#`*Y~lwMgYVDpd*oNo_$$z<1W^St0Ft4KUA(uI3MieOS!S(mE>1R}| z7QXzTI?kVGFtLcFzV{>uwOltvMnSkC(rtaOn9hP`SebBBsVSS_SnB=2`>=yWILnDk zZ;1VEwEO{m=AG;6NWb8l{;XF^vFt!=y^AX*fDi~~qR&yCH4;MQLq|p~%FFc*XMf%= z5Lsr+)qRrM#F|I-6m#1*&aI+)gEz+tV)KeZG%KsiU+I1WaVjG`z|o}>*$XTEh1?$c zpYNH=#c~TDA0Gg*Okd$p8P)V+pG50=1G*|WF8&9?zA2)xn)&MIQ6e|5@SR5RPh&oe zwLrVZ=TQJZo$@LFDcA6q;=mH%Q2RH+BOpuQ=3jUOB zz<$dzQm-^TY@XNz8Fl7#yHf|Hf?+SltEd{TXF7oNi3Z_T?y4VYdIkjCl`A1(n9dp%QK0QoyLcBRdBbSDj+m31tBtvDJ{io!ysI5(hNx(vV8rjT5YAcmiIlef+kv;mHer8L@<7zV=JUceB zXeCPN?}!^EV;YY=0|Q&+5$b$Q(As1yD)jvM^KM#p_tn(Us2MY25JARWD*%fU_5^L? zYiBrVQuu5v?zXlD@Vip>AK42ye?9gs%`tH1hDT-ninIL?dMp@{m){+bokRmxUU(Q=%6Dj0`D18A^rGMl}0jZ-cO};2@NI*VE9y@9wvA1^Nt$_(0OsAj9^tF`lEDIX!LHGh*jkS zs%}{lDHrNckqvjPV+yLux=>gRb&%G2fX!a*8{P66`%4GRs{k6+2zfP1z|{tXU|nSR z#ZVI@lUEV2SLnlMSEm!q66iWtKCn8XJiH<8Pagdm+IPVOnVs(Y0{g_>q8-uBZ0JMe zaHL1RTLA8{FaAk1UV5Nn>|>q!p-!N$>d284=O?h;JxGW0E_5kt`z64e3$L`a=7h$ z(PmClEVzG)sPe!UGd*j=e6CQl7#9FtiS%IgNJHjddil>Ju>0cARNjQvOz7fH3%GQD zwJ);fcH~eQrhEX>6a3lhMnzcQQge{KgI$H>;#ge>tONKe^T3u#sFaf4A6u%s%D!BL4=vDN(La5Wm3M%Lt0G6ul-VO4 zIqy!_`wla|UYElBEyi@tMP2r)}dnh1i}Hk-m}YQ_qPbDhHkBcgrl(XM+KQ z7od%bzm?Jv0R02M;97WG5(cj&IW`(J#*a}3t9*6TkmEj_zrbrt!d5~x~v z{p-sP=nYgL1;4+)KO^D)v~|$dQA5?PVKXzk#Y?Ai2jd1a2FnIDkN*4Mqz61e?*u9y zP_7Yw)jYtd@voTJ?>znJNimgUvh|ovYP6=N$E=!qunmI#r#gT>u(2qa_S`MZhQOiGJO?i$Q5nA&Rn`>Bg*7?_HQ2%BZ@BMW*1%h;Ku}tu=K0e zFsQsTRdEv;C3$Ex28e z=H}<0k^d8Y{4BG-`-A3QFk>#R|0JesQHaEm{u4j}rG&p-krVE0#3U8O=zQ|1_^HzprVE)ObkPSI(utDAqm_`RIdf@dFrpme9Y3%`k; zksi7dT>NO$mhBars%u!$K==-Z9eD43PtLCCny@nbW1L3Z#8&$KkkAu~gq<`kmr_^r z&5CkzPVAcMC8hDALiTrRZRYjwRpLjp#fWkqP~R~_c@Fjp8iFTeYE$T|P@PDg)w1-` z(~|pli<)IUZR>?+^Di(;0|=O``w`4c1Y6~nCRtiPEjsx{anhfSO?CBh)!uqMAs84YvgaoGM(%aTEa5k%hD(cN{IMv0B#|XcvgfVLU zY1Qo&WRu!1u-oIj>2Xfrj&PoM<&yl&0G~V}3%hk+e^+gq*r9-LMTh!i`t#3xq`X09|tflQ@u?}*IRFF~=(r4|*aU*@4fbcFPk+ z_Z((1cy4Tf-UTGVx(`KQ8P8H)2-@@Sk6hRL1@7n$04!mFm=KeoLEc|=pZ5n~t*@pyb*bx^vJVSLvEYMG8o;3+#JbNG203V&%U(mqis zsz(j|qiDsDxjXx;8#WFlQBl?9&o09C-4i*g!aW%nrja5#yBPWsm6G~AsZ(j!y$&BO zjFoCXqOGcEMv#@XD)iP??|>KyQ=Q{Yj=CqJUDJ>}cCif6xQ9`MtYXfM)uv_C!@rjo z)s4HB2GxWI|A)vHS3a@7tHIS)<3LvEy6U^&cHuU1_;hCimVcL870fm-5VX4{Cnh2a#8epCgq| zL-P?L?2b}Lsw(a(W29L1%^S?=z5uF+SHUARt(xZ3hpWkfo?#SQ9eb}qL$`Hl&h#1{ zH9?9hc&4f@%S?#Uwzkwc(cJ#>C%loGXAg%E3q=? zot#vsp7cDm`jt%&x2`@DgLbKREpV=%tlr!Dw1R>d`UAf?gqPQf?fPKAbRsMc8H$GS zK57f(-+#@*ZGBj`N&3RbP8|fSd}^3i(A|d8N*Yx(Z9FVgQ?OvWo{8w^>HjJzki()B zq6FZGbp-}e#3?9uJSs@!HI^FPv^?%tbroAi9m%XJB#-1l>^y7MC?oT2kw{3<1@eR^ zV}RvcHfa2?F-S*~fC=;N7q!g9)f1wC<5t0_-Sg!-#B}od>%Tdgqxk0O_P5{`MifUaMXEqpnWYi zb3+#f%^&wpc{Rnx$IvaohIu5Xp+O>2>a-{1f{@a^htiJ(`ICHnTIHv0;zX_&n$c(V zm^X&!egH?4uGwvTa!SVD>qFQfGtW@`sp1~@lA+ODA*}sBjO;QEvn?)H1gI4AnPQX{ z0flGLl>dUW-NM+9*>mbTeA8O<3T$1?aPr&oX4b{YGcisD;6lIYZtSRYcDKTlzs`hk$HcTq>c@kR`sqGQRZ)LHD|vKrWA`)i2jWR zsM#Fq2bwN5Qp94>#Atcv474~ZWg9Yx>3PsK3}Y=%4*L*!pdk9- z2D*<+N!`3J9GQ3X(YDHxH~qR-?ZN4wyGGvf#N@AO+|={C)**T;t1ivF$04B9gLIoe z^ikYJ$pfW*`E5JlX_nt%-~~)Xgn1MgzIKONmY}Yj5H(OX4w-e6Wg8X8?|n-7r1W^@ z@IzO5Xjfx^Qzc@ql`vM1T|QK!6E=MA&VZSRr&CbPRep^?hr7FK`$;Gm-$>BP>YFJ+ zR(;4aRqCGT3GiU2Ue~HHllt=m%t#e?F_UPr(7gPNpWnM&%Ne0VCEf&0L=J7e-WF|O zYx^dM{^rpw^D=0_Wc22ufh=lHhfl*5$#YD%XjjmEkHQB)Yx#uEjF{cNQT5s>5oO{%b=iN8RQbiTY4$<|$N4Z#!{xNyZUF%jZXe9-!w)G0I$?eFg zMJV$I2UiL5qp^F&c}WFb;zYw^hu+m!T-#@+us5jI2D;IFw|Zk{)_mm+b+&V3cy? zX5<&eBtF+L=N*`=E=e~47!9J2r&*8iRc}}Mzo)yu>LW}vW`5rvtsyj~tPi1CQ-jm# zo7(w&Z+E4Xnb?P7I-M1eV;8nE|6!XD{Vd<}koKm3%;4bOz<&zd>EuVznEkSZq~Xy? zMWoF!nm`@XFoJ`irAq$7&==%>)!c$_Y~|Nd=%AJ;!dp0si>EUJRe>wV&`-yv$2JH= ze$2_&$ff?y&f8#nFA}3OLk7?@G?L*LWe;p@dh|RnB`M<&P`HAB*h9=d8EsrCMw6_! z8Qiozc(=@bZ7&Z)j{;_j0i*v;^87Z{0+*t^&`|Z0AK0AlQVJe*ohrPnsC}XZmDxsV zo!Ko6Keb)_FK}sH(bdSmeEY$F0mey`9smFU literal 0 HcmV?d00001 diff --git a/website/docs/assets/artist_photoshop_new_publisher_publish_failed.png b/website/docs/assets/artist_photoshop_new_publisher_publish_failed.png new file mode 100644 index 0000000000000000000000000000000000000000..e34497b77da37b31ebbf26882242ed73040eba64 GIT binary patch literal 27081 zcmcG#cU05cwl<90z4dq$P}x|hTR~7-M0$5`6r`*4j#TL)osh%|N{dPf9TfqokrrA) zL`tMa0wjb0kzPVT5=clw!W;LwcYNoL@xJ5U@qXX;2P6D4ves{{HP@Q+na_OI%iGqb zg8vlzr+|QfpxLb(cLf9vFAE6#0{q7j{x@M;sY3jZLy>n){}QMhkyztD{2K75)t>?a z_0Z${_kQC)AA4}iIZ{ACsQc&NAx!v3UjczY53?J8+QoTLX9No+qd@@PmYja6zCyIt zOXv6y(U3K{v%mDnfBFse>!&Mg3N1mD-@@u{J$?{s&TRoX=RZ95)+_SytVhX5*Q`O~ zR(sd3if0vXCLBK6|9;D|A@%af*8yKQA^szUGmRZ95z^O)i`+idl|r2WFq$=7Xs{9q zM&||UZG7rJBBQFE%RS5=W60sq(9kjnnT>BTU*tdEeuenced*^vx#wAE{^#{Wzw+lG zaQT(j&-wm#dgzdVz}xFjbodXyf1GnBLRd7#ZKyC4BVvO-2SBNx0|<}2=sMqV>2;^_ z5z-MK=FF7$Ca(h%0Vm0Ow5)8hs`)J+`KzB;ZaG-2h**KL7tY$~OQZK$^!n&v+V?c~ zi~e$5iT*v`>rNf+5H=yr9g#Mt`|mnt4oaF>N~{nDYwHG1!W?K`v{+`=!sXtr#7|PbbocGw*ICb?t9Sv(3&7l{ld< ziFXzsp?MO+x3uRzF`N!k14@|{HcMUl^Vy&y^GjUMm%`CfXSQZm1X-tp;&w(_dBMUf zu@{AM4?61iR=IRwCUvxx&W7K-8K`;?dlr)=s&ygI9k}qQYPUM~EIGZPbLWA?-AIFV z<)Z=uUZh)x!%BZl4MifF)#Jzsq|gXP9mSMKhJ%tbVkan|MBhc1^k*z+v9L*($k?xri*a z*utewB>BmACJQLpE6DQN*=h2AHIU zddPlaA_QBf-RhRtq;)Xj+0zxCcwsZFO(=W4*4S|C`$Lez)=G4NJvBO;a)*^)G2fvD zG7R%9z?W$uBwp3opq6CUo90SgJEdc%N2T5SefZl_$-7 zXfyspFIu*?6^Yb*A?-h^Fbw&Her4A1B7=)MC0}klW8s4>RmT||gA(f}SXcLA<-{ZI z|09u>FE2bIZ6sS&p#B6JSlx{QkkXFBp;u9a=8E~}n& z&Ga&S^^DdfTPVEo+R{vjto!N%O(|@rcT?GJ*Hc*teMTpnZ4Pt9OSYV-Hx5~bm7%vo@>%C)E~i} zQ3t&3kzQ8A*owja*V|s8#Y)y4kDk5t+m(G3@u2N!&Lt}22cb%$lt1JF#{F+e$x;4r zz7-H^OCzl&BAFEr_Y#&qG5W__F^PTGPi*L(d zyjUl5tkPJ%Ku0lYRSLZ#4H{zk1_oW`;N6eS$0_aZ^eVNZJ-!^HjI*k#msWz_v$?kPH?@#0mZ^TIIfNX~PH@%;CwWgh2} zMO}bHULAf=dKR-a zV)RFGP}X9_+rP&r+ibm~DLRL&9gQA>gBQD67b>{he+^lB@2iYz`VeFvZArtVc-1Vm zi=FgIwTQ89qtrGyNy9MeM@_EtLDR;i(mEgTS3N~&y0y(;_=1@AzHVWoJcDcA;Dq@G z*{`r=S)urV56L&6$gtSJ^JSdumPW_4?bRXzhH%l{y6x5W_`7QcT5+Hyw)DeWd{<{H zv+lx;*1!i^__>xF%&on6G_X<*g|?IJ#Q6cm(o(mo<^0;MS*n@K78AK*pkcF*Uw`~R?EAkqn##~;w|4C_TAPM*Pf60D=`#$V6_Q4~j%77J@{5&X z&=rrkd%?&Znio?4stlq}MO8bpMJZQa$Kd$#Q!^9k!n_{3`x^nHA0hiRy;#hcC#T|E z+%ggOh2NK#yxN=>c)g-7{j7%szb~K;nAJny-p7`tK877NR~x%-D8oWW+!9lb;5{jH3)mltI5w`rT>6-MQ|e&KC36B%^8~X8=>Ij`CIn0wcnSn#0Oq~9AC!r zHi&nHkW<^L zeg-hKbPpS=9-NU^6G0aox`H%BzbjaBC zVzTwYjGxIy`*%Wv-?icBk(7*8=&(4QV<|-CqjJ3u>gG0IuP5loU%Q#$-J+j`QRy~V z1|96TO~z3UQcR5?&inI537Hs^BwE+I2Zl2K5~Ch2mqic{h=QYy8_)J<5fg1*9!<>J zR3Lsu`0ACv2bU{%Avd5ResHAKu+@V0`L?!AcML6@)w=z-b~_c)q1dq*uk%9qIo;jg z-0JI)jf3b>!zuzcIbXE7$r|O=Md4@PF@F+0s3qch^ zv3KmH1B808`0y(Rua0aq!Sq>LR7uDN9^3ob0)PoSD)l?3WF-LKwPSC3SRuQ$D4rlJU3u9wP^a#Lt2T8tM@=eEQS*)Pi@J8?lFr|&=buF&`2Or_p1;(%;R=>cTDZ2d zNX~$@2=&mh$a7DL#K|#_AHMm{?I>npsW)0n`YceIx18IyQtZ4HHUxcu&!`fX zI}qx*ehsw)H%fzTasTXG+~$~(jkotA!_!!%`oCA_99T^!t`HVFo5e43*8VtFi9I$Jo;ZZbBr901nf5)Q0U?s6T2gn*nqu*nBbacz7 z*_Y}9DiGJG{cFddC7zP8vuG7|NxYmUm*ZZ)%tizMj6S3o=_18zmCq zWB0m6vCfe`H5>iFnoLe}4%rD()45z#)6&$i!HCCXAPu8U9iwVGWL4bq#_tVJ)kp;j z2QuT!*q%j(#n~v$1QP-}r5vy4&RcUoMWo|&<(A}ZZEV>2zA?0XHpN|6s2wtc>V^{H zt?EhM#yf6N5s1PKjsEdQ+SF<~BNy!zwss|bAdJ5N=|ACRe-_w((=c(W>=*xp8$1oVHF$fpl%P|i1GFS+OUvZpdjC)YPPwt zMJ{|+)i&y^+Pln?MMm`$?gUb{(m9+r-<8yi-7pPX9S z=~MPe`W=>`CMEtYNV&A8f48o2zdTAxoSsQwh6edx-eG)F7Q1g3)7`E?+7xUWuWPm= zmGFZ(wQF%F7kdjhjqf(`h*QCDGSImK%-*p7Cr^Vl|q#XC2^L_TEvvba4V47`ZrXw;wbWIG-{B?Nk z1w&R~wnYw2em0M|nUKi%En-R3?Bk|cPM+0DhVChs(uRU+(lZjuzeBU}XAZ~+%#7FE>*}{E=G5gr8#K3HyF@K45_%y;%A8HXU0V;1L1BXL zO&Pc?B_}7O5YI%Ga#GYKVtcqPV1+slk2>S_je4={nftsOBve8N0`d3Yg69pE%`TH& zHdSqm1UHju5lNm@~OUrsdnh37P9capXXz+gLntgUh;OZsvkduY|Y zFBU9Z`P5V^z)YE%Iy>g_(G{)f;1LPjk$*?YeJM}RHF$5Tfi{e`eXI#TSX~$uiS#cU zbUMql6J!q8G5aP9V|WhEV4p~zn7hxHle^BgD%0N0p{iA+bn)n&xV7Bve6ef;f1WlW z;+Ys#cQ%?2Fc5kohKig&4_MUgf$v_(+*vbhpGR?wGMCeH)=(UKu-6sGdkulEfOov~ znF|o;St-zE4{#0Oh$mRbwa;#{;ox)rSM;r6sA5o~9m}Cx6vjLHd`;c!!E+!72LBX;PYD|uX`MkYght$L-(SgG!rm&5 z9N6mF=8(nLY=i1zSNgY)21uL&#ex&|PHeSN!(2m%Mj0;pfOh|3tEo>$f!)4srbQCc z2?v!Cd(|?3z}IlQ7?J4Ux*5!9-Ef`p`&piPG4~uFGrNg&X0TopcsG%)(N~LS zrp`PKE%W(G!a*`CYeP+TlsQf(4Cx`f4L4dL_h3xVhp8nY0np#h-0RgspLq(f0>fD^ zD0$FUp}k|HsJ$j9yLiEsPTnf2Igz{K%M7F)=i^;U&WDPQ+f7 ztKNK?2|?9!{A_iQV6+s7x^bq?#{+_j4E!zulB5SG!AsnuQH_)k(_!84aFX|iu3Tu` z@Nj+Lc3DKlOM{=AvIS<-zvs?L4w+-e0N}`G3Uh#hHwI$uR`iVVYrtw z1Ml=DFO?{<>W}l3jzDy-jVi-cp%kN2HLr~G?5(cnbd zme+|9C94i8P~6bu)&xtWyt-|)?Z-1Gr)cx#CG3>y66`n9^gtlF9GB6cjA)7BQ?lh> zo;_s^9tL3W?r$D+0#6T(J?JD|KQnNYzq`fZ$TvofO2?>x6=)e~w|_iGU)jKz6V>uP zmla_-u{ODX-nKR>ePd+glJi@O(=(cD%2;b{JVBaCwpEqr^y&>yHTxuq-~-&NEIIuw zt_CFT;RY|Gbm6%{OlntNhwOz8WwVvgz~ngO4#4O|1>hi&H*}994Sa}P{?N-$4A`xG=!zLQXc^XNDkNtx^THROO#d67<+ndhX~MVCRz6s3xUZwRLdl zMc3WSWcFT8+!}?P-t};6?=nzwzi-lK^WF9rEaWuS;754Bj7arQ*4olkMEGYi_PNz+ z@7zyX*K7a(D#QA38#PNFWZZnp3jO&C@i8C19~Ag4Nbe&kL+Wob`Nh9J{&$@6|7|kp z)e%GU1SxLrPq#(f0Hbp@;vv!IV0iR!j+$tnBU5I+5Nj1ubm$TFsh7~_Mcw$D)2wva zs}9yB9T!v#0!Sva7oRe2h~8nYO?wDjo#CgfmXA9YXpz{AGU5(lyVvRzhBv!QYKmo! z3X1!TxszW=FEcCn=azFNVol4Tr=Nvt;5W5}Clnm|?0ahHP_CHK{yiExif!)3ln>~| zK_<`De^(|cP4E9`Z=B~6tgyK{Wzj|~<%ke)BQ7coe#NnLSiq~{AIkzEB1Y4%ex>J% zMAGYV6VU3q$myMa#ra0#?uGCd7B+Q8s|iCisP-ilE0}wI8di>uQSyt@(MbMIsH1$n zH`h<+DO?G@pGU6p2zwqB8rhS(iw#J-jjoXyX}hqwzZ%hByD&dbi=;iQ3WLgwND}=^ zTu+r27=UI1GBw3)(MOV1e51_de%yqfQwt&21liQqAD5?rMAq)G(WDXFz=z<>--Qg8Z0Y z@WQ4bG)cZ4;3yM7EqCOxgx9u|clJ`LDI3 zV!I!-F=dxBW&tiv(4gJGO1?!3`J27G@06+Rfd%lcO@BfJz`A?gC{eC69~(B@w>Mul zxKPcXGeH^f{1a0%zM(;8xo^Ywd~C&^)yOoshd6-`K0vWM`(Qtvl9vFvyp72gyR8e{ zIW0n8xwgugjDI{?`?T_*COijLB(#K%iOcikT22=!MKY$5JN?>^?;WYLsS>@O`5SlF?kUj-)af4Rv9(q(MpL_V~ZyBkn zaFn4#*_i2pc1A(buav=&;?Jr>KESE5%Su)EQ|$?4mff5cEHFO15BOrL}< zYgvHLiEA!tRrlxUU3=s&b6B%3{hgbk$y2|KhzCfpbx*kd1dvf01$n@#ezQYjc6-qDB4h^n4dJLNb=rYlnk3`k`BAIK9y#6xmpPj8I$d4U`{oB zPHWggg~UlFRW!aIeVclUbMrOjkbv^Zp+lEPHriy8v!%Bg+I2x5Vk9+QlDboROR}1Q z2?tj!+oVBS@W48HTP3*BU9GPO`k+@x-I&o8OaWvYx1+tfU+>=^<~7ws6kh^o!NHtOSuP6D zKBf!mm`|I5mKz`<=UZEfuDNY`f@%hcI;Eu+F0o0|)is@0!y zTAj4VH_I@l<^z_H)?0&?fb*K(@n(B~5(ALAT}}8+;@Z;K$oWX{BfbOMc=hZd*JSH# zW~0<_^Lc@^5-*|a7jT2O-HVF?I}#II*9<*xZj>NQD3-*Arvh za3Qe)Z*bsioM&?)xJ$Ooji0U_W@8Nv)GY*!6w0nbb>k<6;pu2|fHMOKDTHzAom%G> zSW7J~KPHy?gB4)(&N%K_L&JvJd}T#v{1P)uLZ#72` zgLPN4&(=>^TUt6nzX%|F{&#e#PT$`|=?BdJj4%B-w}h+P!wb_~FTs@K1Hi}mA^HB| zHb!%~rRqNd^na=C{@W0(la$=|()!v8<2UV;=|TpM}eYd+}ii+rqS~bLR`a-j(JKx%Q9ctGzg% z0aqQ~ul)D%%1|q6u|y5Dmg>)jJ2%3U;D-f1XZU$Nxa!~3yu_3<`4^Q<{k1DGEXp&& zSjjb)d@15DB}&^L>c{v@^>(?efMMW(e_HO+_|*BN ziY3%*woSIqZR%7ttVDEXW@(L2T$Nw}pTGFD(_C$Rf?)jIF2++dmGeQgVdok%ABZg^A>2pc*|QU#p( zFgQ7RmMdA&H$Q6DK%dMdkBYbKgj}A%LL{`ypZDmz9|GbpE&U`=MppggO~wP?j4PpW zx$Ux{px|(D$Fa!VhEBVm2-GhSp6Gavkgh$4bOX3B`eBDI5d2$7$3yqC*w^b5gZ z853roiqUUP_6EOr8$^0Lm_kCN%uCn$zu~??%%+XDaCfzBfVi=R&PGgf#c{=iHjFZ* z42}aDoDw9oC2fzFHPlpw$n)2s=(D^PtA1qxk#IMM(uVaG{=y0DT0sZP>O+;0eQQZ_fkKK_zjFyb3xngn7o}q@xNog|P~qwLgm)&|ocO$x_EM zn{28eyO$#FVUspA$4mp5ziq$1xn}5rm*%2cdM6YKeeC_$Yu+mm9>4(Y*qvK6f*d3R_3b#zrdzv^`WprWbf*r~kE z9}ab~J09XQW=`*(cp28FPofnq387bV58rO;|L1a2L-$DC!nhOiD%z?B9pJtb(8BiF z1^vZ}0dU_{t|YClwf(Wtmp+vK8(o3j4-ALeSJ$Q6SBFDgH@6QT7x>-m>e2VXmABP= zx#$Y=ALUe7RR&fI*JF_m9*&vL0U-#}(R>m+Y99;7iGv*eqz#hw_w-_KcYg z9L+|PP0vMq?TecwnJMQ_L;6SzpIKmFQ5e^5I(Dcmsp0VL%f`gV$0!}Uh($_-8RxyQ zX>0(vqHggxO|&02w==kNTl$4+6CEYMlPkYhH@v4vt5j+W6- zu5VV+uLZ>xihso`9J}w{uM5lU`f!WHObW@9nQC!!5HUuaRjF(_n|J6g+KhWwEIbfl8aJxs1r+BuD`#1j76Qo zU2nR-Y5|1o)LBFCP}6yx`|lQ0cTL+@mSYtzd43oJ;6bQ*UA;Pucq>Es_lg;nFa^1g zdB4~g`7SxMLDOt?7<9IC?vho~DJlsFRPqFyY1X>9t5hiFs8qKUS-cc#D ziq}!GW&+OOQTx$MNkW3XoBOF7y8br!vpg|5JI2>ce?QnA3j%OBy`J{o#{OB+%PJ-L zW7caAmUqP)0rNVa1g$TgQQgRJi+(xEa1Krg)cl`9+B}9j_U=Sa0P3#hqQB!L!=Yzgg zVifcb_s+DtYSuqc22xhdTtvdwq3iX(Moz9}C;T$lZVlIRi?$vTTfaj|waV_g&kySd z-(ZUwx$dDCxCQdHEeJmmg^q_P;6mrAxftVV+fyTQlB^#U>gfPsk-(2|i1^imdXBM?VYo(>k zwP!<1>#CW&kJEmdVN$c6Y{Crb28;AQRBn%Ni1Mj_8N%s_&a2&Dg?7i6y6p+!RXEWJ z9YvN)vAEH)C)OqAb;A=bK}hgiE%Xu_>Xch`jGWpniET@U#HpRF}lpnzqW9b5#^A%ma*b zn`u*Aq`$^!a9dHO%gO-5Dp@H27Fw<8uJQh8OpWXbI8wWP%;#0+b$tn`F)0dL{+Ob; zV*&oeE35GU93G@lcrrCPp^sA}okYORIBo*^RqViY7Q8N!0g2wL1UryDLG#ekWr$pDfmY8`k`_~_sWGBdszX+Ws7LDqGbKHEROpV~!Fqu_ z$}zI%8@n!YVc=n#qvhN-#@Iw^aH_cpIa}tF5$xVjxvT4VC17rPbwkOjjNnnMR58-O zj-!mrWBb=(=sGn)XZwMIxB@xO=lFQ%nX^6FqJpYX8&PAsd8_TQz2^sWd`Ysk7LhPs zFz&_dVtoBuH~5Fb@(k6IPiB(A$#{o${%6jA`%hy!##(wr5(1absZ@wTGPiAw3XYXdd z$AR9AT%fy2)OTwqKOM9C$^^wL0j?XsN;4ur>^=*HN}tVaCIhKuppybI0#9_V;ywaM z6E)894oWtiUBd2U?d-%qt>J(|4|ZUXNqzW>;@~<-(I;cq053OF!>YB9#8%a$XRk%S zRUF)+q}va|Q>@IKK87VmTPFiyb#w!Ts1Nrln2gq?VqK%e8njlJ*?@R`=-nl1re|Zj z@5Rt5X{Ki;zLCZr>n~l?KqxXF=d6#h+u_d6x?!lhVt6?Y(mPP6lvIhLji|E%V*<{c6Fz8KC&ZS;pUEk0(OJjAC!`Ff%M zD`56NZt)|5#jm7=#8s~&BLoezN*-C@Upnv2Ys6KlG)n}b`H?=}@xz9Nj8Ytn}W5#5ZI zgHZk!c*A_ygxT-yHfm7TFPDjjF#D=f!-k{-M|$w^2+kn^nP$mx!+@)3!KT%>gyH`1 z2?Vp6@iL$!UKj0%d_JfGCHA*0vl=&{$H%Vz%sKS|^IzTpHX}=c#AKg4j=&gzAhku7 zj+bp2lVXUbPL-ltwBA=Mz7%-5VR2wj+&3A3Z@EW1)L44FVhku#%?J1y+L>SSUBFX^ zb2~5J0ngkD#NNAmkg+qtHRG!{lnk`m-|RYdhI!1;)QaSL8t89UVzV|G%vmW%I8qf4 z+tj>g$0;*<)&w7%Y8ho$w8LBV=kd|1lpR}(=v#z?d|z&|!fhZcFJafFWjDpvg6i_l zt;8gsWN6{-+_o9Wfp;o9Fg_TuKcGIY#jK{!u9`2)!(BxqOmG6!?7{n!JVCsXn${Hx z?n9Pu+srVvptKByO=ZI+IZy3}x$yqoYNDZE|HHbk1S8N)yD!syT!WuSP&>5ed5nz7 ztQ^=3xp2C~v%SQy;tnf%*0UCQ5R8x(*zo$-9u}v&POPaW7mXS?_9{oXOk5ZX{Fq*# zlCinj*s%5`SQDe_sPuBDGEkU9o8XQXC(5Ax;sbeAybSHq{tNIW0;~-6aHjeVfYnx> zgI=!M*dAE28(4=}wed<)I2k(1$CqZz9t7KgtAN}4t!wY^64#>UtO|U#8xYm=(xU$wc-!z#>xm3m?OiMJb*gvC1fxEepkvr33L`~<`IFFqf zEroMhm(jDF>Pl)!H4!Xu-JXw-KH#UDRF+E||2b`}3+vhEk=|D{0OPn*t#Ak*V8wZF z)jA69ey=loGU_YNq>ZsL?J4Z=ulUH2o{twmAV+6POXv9(G!8Eg8QXG*l(c;`!PBRvB- zHRP=6O_Y)ANZ)Xo^YA#jjWKglvGKbOLcYa$4Jwz7>b0tF0aqqaZD%2b#ics(t7jW2 z(qZ{Coxx$&Q>!>%BBhVFio4HIf0xHxY2}~wn7jN5*5JwK9N`6LQ_n6k&8B=Kcgj!{ z=a1EeF2~}6ef-)?BUzWs#7&h9!mrspu@7Kn-aUA!&DX-^YE3T`SUSRk*#WQRC`J=h zy==ry&Ie83Q1yp#kf0|^hXrg+_^!lhigS*xkfCG!h0UdwCbh;EpTso5Q-L(8hW>A= z9qK#Hk#lcauNf6b-~F%)jThYo2(C!ZTyg5y$E`k6?7%>h*gNO8{f^V$p(t86LQ+<@ z;2YOMn|Rdv%aZ-C;KuAi=ny59LtC_mu!iwy8Gjgz4EocJ7}@ppwa5Y=N$IlL^Tp|9 z#ki);)Rku3B~mMIxG8+?9KFdjPEW7jy_9htbr3U>zBwizaZvb!!KW@$GSOYXTDD}O zvu!kU#B{?n42WZEAfoEzrBds<2`$EAc;&{kd!ws2LhS8|kwN4AwRIW7T28z|+%ZIh zsX`gX5(X5^WJ3*Eq_swGzj*F?mS+{@^m68|XFeRVvn$`Vg=kG!nku&zfE8~iIzUXL z1W|@pN@W=Z`BH0MjLEdy_H5Otq1JE;^hD7lfS8P1>V1=v$<3bf_8F#T z=6Hs4O*&(yw;%$OU6%NM@~@G(kJjQG7)3T>Q3`yzQRaB&)kKp-^VQ!TQM0^+NQb%- zISx91{ekm|d>&c;c;l`QacRgLHC5|gCoC|T!^h`BKO^KPdVY=9=FiCa|9hl?l9(J>G&-5an)5Q{=Y=Zm zvV3_`d3G=zvwo!+lOnp(_x`5)=)^?!J}|6iwN1ahEK=Z+V)h&TikjJ)y74zQr`05eHOq3Gw|&BftF@lN=229bHMkmq@@%%z z=M0UpQ1fNRL`S6{n
    `u+FfieK(csR09(%#9Hb075(8VbCH z++AKZ`{EbGr-!@)9Uqp?M;kB6vK>87pJj}LH(!lhs^37`mIm8h{X2NO*|yJoRPo6s zR2w+}(At~MuaMw7H1WMz!fuY1BP?Mm^VKw3H=atfm|w3s3W>`LgeSV-pU)6- zl2YWUjd4T20DbHoHl6E{ZrBuQ8E-yG7TvHB1xD1Qj)>KfRfh%jq+K!pEV7s%5M420 zkW6q#e148x`PlOHP|-gVcCCWx7f9mX!=U&nrK8)s%sy@)|K3jG*&)dU_k;F=g-#`O zPBDQmV8)9`fGG3D%R--|@FDK&W?wd*8}tVgRxq)`%4+eus%2`$09Q@OUOlnD-EJLa zL_4)BYGe}Ct7_bz7&hd*KiArj7{l=$NUCY^j*?6sO5dbgEAM`QTYoticqq=7v+&gr zpBl}fs#qYzV(e1A?s01hAjuaqdJ*~Z z8?_8C;$QAAu;Qp%nrRBo&hVZ)@kc$)Ua8Pi%)3~bGVes>D?XhYH8t&PceEF~7hwQ& z&_{w4|1k5#Ama#dWY&Dl8qTfk^?anyJ=Te^{%NbFx-AXn3dSFs$_l)zBJ2nNl!81A zZy@J2cB7(tUYwgYch9%4i)!X#IUB2`N_(bP6X39w?iqQ~B&+IMhZnhBKt*hEVxr>)|qsHD%_Zx8w=yU6=1xePW z9)*mtI{%L=2JPN~;6g_BOyIfDGL>N$ajhl!h+vx>;#g$7TYT@5bDn|XtQsx(vE|(g z(K6j^z;9YjhN2fBcW8wnI!hwWR!Ps#_N0`p^x?{}KZFE6M_tH0yj$C*kOOZ=MaAqk z>`cv7tDJ8~#1X&wjh)WMm^&JJP_G5b$S&2c*Ge4BNP`M!1`Ebq>+By~&wT z`j=QMr_|`!bg5}yMSumo#ubtfv-ghj0tVK;j2#}SD<1I#bH4dx`dl>TV}ePkf6rAk zv`J3hTaHu_tRX!L3|$N0SJPg5nyVa}S|ff-%@l%5gAja@J_Yx;Z>c6XzQ;&-O`RM# zBoK4sUzbk*qcF)Yf69GU{W}gmO#AyHiopMny#Ie0!t@X$mm>VIf}@$1jxktCAmm!v zV5hfPlz;5M?g4z+VEE=&EnSO8ENSNRGsWqXgXL7Adk~coU#i}kCQ`rM0s9l$t*CVL@Y2bj_ZbeIt8C=ZRx3|*kyzy1z}+@|_XCgcmR)N| z+{b-u-6F{SVr_o^e{rh;{e}XKf%g1zVZG0R7zrKk1FrVn;a1lFXZkCA}e}KiA44-TPT3D zts%Ag%9{L$mC#_by~As}!xNePyNU_A_@P_MNFsvLo{QcYzYiErx1V8RRP9Sj#M)|B zI}CC>qkL$77u@k>st|WS19LPDp4*}Vv93)>b3muvlw87~&y!!Y^pcV{VI_nCHUqQ% zF2?+Wfuo|ra;*|YJBS97!?)uF~Xb@OIDSUPHn32UYfeg z0VpV6nz-wl7KGWsl;9Ax6P?W;uqDz=<*igT9HQtMb$lwJ8DZn-S+b*37TOdTv5Qa< zM}d-7ph^L85Xn#^rKB_L+U(PAif$9YcPllc^%CbA@gum@)6?0XhRolp+&ES`V6I`i zLX9Xne`N-cK}-Zj$wEFS5tOr4g25pkGpADhXEd7t>EYsJ@#DB?XTsUkGugpf4U+lQ zoUhbtERJhn5r$`VnZ!e^tYV29pjnSQe_xp3qjBy>=azbiv^iZ29?s}=Lh7hkGN4ls zp8=CF^w3A9{cggCmi4BaGgwRm4y?MbxI&rP!HEZ;`J5w!>yc7KX<=XnaV@AjRs>y6}HX-ExE_;$k3J$V8VZziiWh zezYdN5LsF}UD~P7@@{Qv&_WrUDU+@t0t#R~>z#ZC8RW&M3nIrG-UPDRT2HI%d?DzJ3S)HZ64|<%4pVeH?l9WTINiV zV?B7KNKDm^QE|~40C{9T=H)DdFCA@Am~alG9fE<*53Q8hvM;py``y(00A2CK;hApw zJID}M1nx{J!g%d)`yCGc&4%O3lAl$aUUP$o^5&RTjZG~=rP>e@0uR0XQi2Z`BPs@x@~{^kVWUOEAU!Q zXIR6gZ6i9JS8#QT(QKd1o>g!?4cP2&^)f>nA)fro!iX7w8&?)5dRKozGQdgnA4HQ_T zO9BIVL+}nG*rj)dW}DrRk{ClidVV%_)?GJB3Hu=Y=KKGsh-U%S@nmtPy!3sHUEZNf^#QU?b|N+6!>iR3~vbwBG!ICS09RNH#HrS;R@{IZ2 zH-9z3#=EGc03>(3QS?upfzkq#s+RO{bKZ#KOrL#d8XreKk_ec=*u#?i(%A*y-iLM? z5W+J$_0!5w3n*soFxa;rZ*Uv{s=q?c(4gSw$y--!tEG0QE*5FcVw-~Cl0M#?ZiM}I zan<-n=XeU0rH1}<1~+;=(1tT2F<{HzOg(sJqJo2IFCcgAXg~teq4#!aRF%Hmr$8dS zbQkj4BXW!y5C1FhBl)(??Z(~v@6+1l8agKiMhvt1?p=dvn6P>jU_D&AN-g(K)Q=}l z^&bHag3aB{K41fQB|awi7RMp~>qEH)k5Vf?aQzOg4gkd)rwyOB#FM5jn0|7sS6Z_th91w{AGwZiz=!m%pOs@m43K z=x3Gs@oK@?9n+j0RL*AOlC-QCzP2At&I}y_wwm)d%1zyHWJ%zJM@MZsGu+8ubgRa=w3j}@-pt_ord6fxj(t@gG=G_ z=XSO|SM=Q-)*MG^4GkiYH0?VUyzc9S)l87OE7X7BW57j90g{nVl2Py|D83n=NG|Hb z`|J-^U0pG#zv$_qTNWA_&Fk$Pw@nBO^k}D!asOalS2jA3iEwT3^tB27GRcO4kHN<} ziyGgTuFbs=nY0#hfsgh%{;&4FJgli}Z&!-WCz!W@fs8k&{?>c$zospTbgMMDNj6H3%Rn&rE4@Po5{ zbAzbidH!&Omp36U=5jI5A{>th*;Dh4;)@a`Eb^0N-E}uf^ZOb{4$K$c@T@iXeXHr$ zCmTpr$`Zs=cE`%`Gqk1$k5${Rbl>HDRc<&DT$=lf*9_7R9}}lSwb!ZW53iIicjF_b z$^&4T0imY2aLzRo4nNRycZ2%dELB-`FV_cOzg|}7h2Izm|Lf1~zRj*oca3-T($Sp< zFWUzk%?b~CCn8=sJsB@r556|-aS%G2?Yhr9t3%}XU0e0D=M@hOI4pYT{Igix^Qf>z zfEwe_kn*j+ddbK-^ZM9o%>+{O9BG1{t9oMt*ZeLKS-0c4_@ue!wAPVJ){cFq4!PlT z#>24g1+HaI+53~2kIq#-l^j(gC(zM7Cf>7U%O5>SJ)UQD@cEAbV4qIfHMGg{ci8d| z#?3#k-~RuDU?c3W1G6DMr zfWiDcd(TET4n@U%w#lh(glRsG4uzq#fwe+sA)Z}Pvn&FqC^u~`4$Q^gB_f~+um;@wKu>N8hvE7keT zx9CILHMa}xbSL9~RFn+*e!=}sD|x;|=^YzSPsJ2|Qv&S4Kg~1tB(3K+3f4@)ZC?(7 z&{4pahkcLX!G~`)jm@|zyFZR~nE}Ym|L`{ZZ~54EwU~K zd(g+oKssfMWy?xT7e%FMoJX&3{MpS35h4YSfVs4+2W+i>GzR z2LhEiBy$!<0+QSO8&qqq9j;SSFWC?(<1daYuenC0vhr;uA{&ZY4l)_27tfmEfJJ+Y z*IB@Q!&&uK%F!L~e+E)oZr9Wd#yCla*xu9+i*U{{RxL!)8%#iNzP5a5mz;%lEqYd# z)_a=&XHHLA1%s3RM|T1>{?l{yWefgN{KcoG!g(p;4^1Doah%`UG;{3% z;ed0}SmLt90`*vD8=q%RN}VTtd?Mqrmic$Bz2)Hfa<`b5WgUm`<5lZ|!Qs_A9t(s6 z=o^DiyLle@cWY?kIpZy_0kzY)mxuUBn4#pk4RZ~-ra83gtA`eyfTl1~OJy2{Q!_f0 zZstzJRTTNEX8>+Shfe0*lrlWl6h-TZF+@V2&l=L9Dw>c@qp7^YBUZe-Q`_0%Ef1Z0 z3@<+p1sS1}{rc8%wcOg-FL25c4V`KuUAD=)%p4c&k&LbaE|zY(&{j8oACLc*Sq7r4 z%SMf+toI6s&qPTG|a1}jWkMHl~!m4mpN1gDEbXBL-2zw(ebY|1{*aQ{w(Un zNa_y++7X&F_5jaXR8kRBPJ43R!KTY-XxWjxVBgk)&Pq^N35^ZBk@T?wC(9$m3~S>` zzxfBg)u%#Q=SXg<(eMa#OdMqE!|-42o?p>_6McBge$4;p0{qGQ=Y}CGV#z7VleccWh^`ra5RC?t*mU68HlOY|c zW@08BJw0~dyBum=Lb^#QsEM-R<<>x=LL;oNf5%zLgz+e-lzxBC{xB&;^6{}kcTnkj zYuDb%cfGvW{_~a8oww^w@#Yre^)IMpX82$$-Dy-~HzkKB%&ur^-$;D@bU0aH43M5I|G4?7jC~|$;YNf{3iJukyzD9%jL>cOv+)|VHHbZ(I zMYhr$cvp`4z3T>)gFU;U+ii$P0z=?VXXKyQEf9L zi?iyWkz?SvL^QiODlEbrmPWB^TnLs`NN8g#H)@jocg@^*){P! zA--!5FwJfX-csQiz1dxdv++pBUvih9#`!Z-=dG=)vTUL=@D-H(K86ZK6KyT22$(L* z@r93h-qs(}@G9gHU!3-SPU5#U1!>K=jhp4^P?cs_skEo<)7w?Q6KL)P^b~5tLT?;u+y}dL;e9*CkvwF zLm_I*r5_NL!FR~Ju+>=bP+7b4m`rNJTuo6)oj&IerIi&7la3keh1p$cZ*5U9FwOzZ zL<7oEeAD$AY-+;$^_k-;XS2u>C#lPjOKxtPJLdLiLANuo3ZUF1q9R*6dMUAn>DbBT?$?SY~I6Fmrn7 zqBcJ4a2f5Kfz9hhdn;e>SjEe}+2BlXiS8D^y+C_R37G6IlC;fs?LLh0!~#<4#!p^+ zE?v{Z0y`#`lI<$MIJ%BDszp@NJ4OUEG%8&~++LiA-vg!YVStB2s<$rQ)gFlZ$^;Wx z$z_hI-@ag_UgNGUpPiCrVI05Y4+Y}f(Xtok&mc=Io6Cr=f6Zk;*(Tq;&tO(#HJVz-T+H}Q(9$|`;}Vs8cW&Jr61@8uoyt5g-+`?^Q-SBUQZ@;v&n@@- zpLwTaLje?~2Rv(qpJGpHf7hS5R@5jf*NuH%A39LXUNvSDgK>#Gl7$djPn#Lm+;ZdW zNfhLwcro9}Kg(p;66qb^LrlYM1Vxr?v|}jXK>Zh>qD%ON;8T~>f!cMQ-zA6v(ci-h z_vs$_PF5ivK8;s)VO@(Ku8>rsyww|=mdWE0Gyhqyb^dbeOyZ|YRuwK*Res&aR=_SM+~D^4Gk@yWQo(qq4@@I0|lT2%hyi zLWT{LHy3sek92$}N0&h+N(6am)dVC{mreq_@K$i2JT{Z@$KXv_^*$T^suw@C4sEt| z6a_)?BDFH<%B=Dr^@cdhq%7rdGFj1`xK?W>n;WQ0g*5tzb0kDlEyz=g+1c4OXq|zf zVP-mt=o~|o4a@cgxBA6fnH+gPr1D34z{PNa-yuaS-foZTZ38`nVysS5P8vnFpc77w zUfC5ELO->jDqu2n)j}}iE?Gr5HrWZ1gJp!O1!E-F7|4Kzq2LjfkCaa>Ao|rfnPyzI z1&XiDtXS;a5*(f3Q5Ji#B-R6!aG_!tKn*jQ$&x|#(~n2I$MSIf}TZ~`)EjV@u#d>ksT+t`uk^GX0~ip zTCNtW5?2I5!lCX`=c#k^epX@LNG?ecb1cCfs-$X{?7FuQ7etKUcdvEQCRB*m0L_F6 z$XzJ!Gnri;S><9=Ix3Rrj3gGE8ZNfaH69^fo4GiH|ESlq2lPTRr$eDnE5TP9A5hm< z1XQ1t!$;*zvc82;{QDjmA^u3XN0R&={XSg&=6#8SLqP1?t%A08Lz|%GtLl7xef_J` z^;+Dw)~PnxAVJYDZ}L=b3r%hUw$AghBRg?u)ZlHzELz2rE>lT5>=MZ<+u%V^--HlL zu_49zl*klh zCR_eS$`2!hvmtHuEYOJn@`nan0&>yo2lJz|d7?s4f=ZL{h(EKrDni2+bOHz2(UE~N zv4*U1FkhTkP3|+24FqNp)NdQO@%Cm8>N(pK&7F(oqh)W5@u{PcLG;uBBQol>0rZ@v z@e{P_$mQz%ZLiua$h)8c_tD0Slm4zPxW)u~&-J8jC;H$x(Ufhd3O*2rPH?#tH%cGq z#WY|70T&%d5fjR2F;yRQmPi7JNMthWw_u?U*Y;~=bveOYc^f=$j8St3r9Cc~E1t;? zsYgn&&S7;t5-kS7fMK#KLCe{JPQ+G6k%+7gPHASRO z(eNtNw~Aj~nB2s@#=DoTJknctN=iP{?io(sv*0S82qOctZG#d$tRfTmQ= zIpmHgx`vzXCd_}0keERHlT{%rx$OKjRelU6P#!tQCja88#JH-CD>OcIlF5gPRkAU7 z|KkL;x2nO2=A)JwAkcg=(9W1U9;#8b@iC_le8>yj$J~+}aw{%rkK&Q|*~c=M;Ol(` zNs~zm>vKjoOMALJoDWUGr@vH7 zrqCmR`*j++!ww~vj_0sT3n48`sRud9D}`qeNziGmrQ0r3TMp$r7mT^$2#pmKF)nG~ zkU%9V)4@Bux002cO@HW?VyY!ssbV+2m)6N~o@52FcIO)o`A)SYHS#0|+4v*mnr}IB zIUUK#Wu5dpRnBs2I;6b>Aqhwws+wNrJYf>a(Uh7AE_XsV?!oVAg6otYO#<;;(Vg+7 z?!yp!$l+ufwA;E=ot@5jqVrC9Gm%OWQUepNE8juEcfdIBSGUx!M1|QKjCub1I|Tai zW~M`%)@OaekPTn)c97kM<80_T)#v#UW6P@24)OAE5YC5ekKW%x;{>ZKkqFg`*Jm&w zh@6Va?8M`YK=KokI~;1ZttI;FC!Vu^prUMBp)?tH<)Uf{O+0QpB`4}DlFY00oOixR zf~T$<%kSg#(x*)MWeyB~4ABgghs}3yMMu@F=0)J3Zbv0A54ff+X$K&@EV?g6am3sZQJy>z6NtB~;3X8#>FZXtnwFA2~d8 z(OS(!Z!Fw$d3XJ3J9)h$Y5{MIx*ETiX><}`%XLc@EVQP{B46zGwIp)V73=)oBzthG zyR70D+Sz2K45U)=2e{dRgP`zu93^MY^l6t0{tF%yg@VH6NGvJY>hUKup; zH;Se2rmPB|zl@yVG(B3&VHxd0z)gpXw|RaslAgRRhw}svGKLJ1MEhI75zu~I0eSx@ z%B3XfH4shogXb#2+YY23wF~WNPZdS#&L}mF@1h%|A*8kS)(ViS5mh^DyrE=!>_`8$ zG{8cyR}1QVcwYhT*J1GvsoXFKPHwTUDy%a_jvep22PPG6oIY>oBvI~|iCtA-`Jw8M z9LhDAAGpw&*;C3P-^{lvA3F)4S-beaa8SUVKDcpv!e|dh0^~p1XjD53q9Q z4FIqmTK@9XZegdd>Qa=bvj`AdDwVS8>S%AgQ(cX0#=kZ{#Jef1F+QR>`}r7;g$>N0 zZVY{Q=ES^dfMOOQVjQm&Crx&xBk>ql7eu7Xm<>b3ubQOzDGblo}5u_cH-c^`UzyC9>(fmf-&J6_C@mTT@cR-VcK z=1$QDBMZ9TFPSuLtHe7kKeuhLEKkc$4BjBTgzF+#o1tq1=;)D>W33urkmRlKfr~gf z%~rZIHh7Hd`%3jJa4@DCQz@*PO;GcCxYAO~J8Ck1?!Fs602!zpD5igjmcJ4ba-$l4 zN^(<0U1ob8D6y(vU;~ViTR^yK0nb=5KpAH~2*eX-pcMZEoDSlA_(DbL^Tt6|wjram z3Fg$85)(hbsXlp0%}e=dgO0XXb>_XaJ<#omciLT7<5IbObHcXHhJPQoE#8uoFhNfg z-na6&Cg!zD2HL0Qa*YyDZ>-kUIqoK$1FyhnfmKMo^SCs>I7!13As}$40a2@ z=6HP&PBD4R@L3i4@XJqWL0zk7O+K3af{xy!Ud!7EDw9-1u_qo`s#)Flm~Co_hisoG zdpNM$Vh4lsvgG<%cr_+^2RV)b3toVe?CjZN8R~ekf&kK-a6KP2I|owJDLw-*ciV$q zY5Sk(4kd#Xn^A(r{x=F@jUwr#4o(`hHs|f(IQv0ZYtb}pthihvTlb)9v3-AvI(dt= z0Br1t2Q`?(E^+q3=PsP7&21-GxD|P`Si2v1v9@m z)jys5ZTS=KRh6JV;fl`=dFdp*6mon@O}uuoMEFj9KTAG$Upbs$d?$gl@!KQNfr%3V zS}aa5hlu8E1jM6Zi4ntKhHE>@%2{z6R<$HwQdlo{yMHfpK4YssiE6JBUHOHhy;ZGW z%qDhy_{F)OQi#p|*}&fY$j0M>ZR--M{QKMvl|?Tl6JzV_1bQsSeudu{3TgK;BnQyu zSD*!0)nUj*lRIbV+S(|!CPH>mZAudGrbuO3KD+zCT?}{&#F}yTsnF+YD zo$hEsa#}h%Wjd$yLsOX!4r+yUIC~PGAnq)rA1!{#4po)F zzzV@M#ULfXr_MM1Y*vA9q*G+8=RII@Myan;+FSS!*~)jmh@K?1)ylWDRLMMGrwt~F zE;y*1rw2LfZ2jc`yuUl13YL$dcT8#S4WMAD0->3Q|l_2s-ZS^ z;ISt|sa8FD#BqZ=?p=^ZrehM1q>gzNzOvpII#Q@ZN4`}E-pg@)<0$bD@z!z?l|myc zTe-XMb1IoC;USa$To2w)xn}T&W1Hgz`)1=ax8K8LNqkIj z&iLz;fTbGgn&!c^<&XjjbDPnx{(3E8UTq=lKAHDcyuH$vpaXqnk$l_SZz?;O^rF z&B<#HbYrT2tnZw--y;kwDUE1#xAk!|9gp^YHevu*U2}Z)vnMe@P%Ch_hcYdm0M>jC zaqwvSH3X+cy?ztaN57<&AgJBlGum5o*|mvpn>6>PWq*fG;1UEk^^3}F2D-IZ^~m|4 zaup1ISBmLM!|326=r-C;kkSd=o_R%gP^{~wrOt9pB}L(?nNiu>TjiY`|7Ov3RMQVa z<9XR~z&7mmswpG1y(!u1Lirk=$Q@^nLJP*8{xmExR$~5$@aXk1jx6*Q4k$DXQ2HCb z03AxYzA@187OnIpJ`%1Aw`WO32NMTSbLozSH5LhQa< z(PzW^%EQs&a}NmQ^Kr7J9aZI;?r0c0T_(`Gr+tUI3PVbJ|Ci^Pv~xKTj=oD^)#?J5 zzCR&MJ=J3LXCfrt@K0ZoNx)7F$cy~f({%pII9YwRWD%O@uAv4pH~>Teimm5ch%$?^ zz>QPpg@83i%nTVVK%b=sZYFD%O~c;%CzypSCx@2c-2{A1b$!#;E8gh)NR%iC8(b2Yi&n?yVT36VoXsNH)p=7@-M%k2W80&L87cu zZ!?Lv<;1?O)ixv-{q{Rq_w0keE!AQJQoboeuNEcBSlFP;D^tqzK&gjEoT?m+NF;Uu z{_cD{eINQMsp{4yE8AQKuvLKT|1bsgu&tf6-uMp#$w&w3+gpUo&bGFNZa`gt@?>i2 u>*#dDYo~K4t~W3JVdoNE-PmZRLc6=)@!X#*rUJ#boH^4G2snVjNB2pC*q)YFJlmMaFK$NC*X_1cfPyz-* zKuV+wfdmK;CA1JA^gu$AZ{pf#zh|HGp1sd^UGMw-_^$IqlQ5H+XO20>^W69S+#_#| z4753q@E>7gW8>7hd+Pxk8#|ee?RW4Whk(Bn_2H*?AN#VgakcIL`wi_~?7+tM$V2DWjfa8O%acc+TRDcxF0r!RZ$3Zp z;`PVpZ=-&QJx|NOd%oPf>HgtQ=Sb&BYi8b=4{x8}j2f+6zE%*}eS1)eW6QlfS!u+) z_4~9HneWu4kCJ@&cf~hsBM*oip1Jz5t2e}b8R_IS;IKU*>+Y1TZR0eF8+X6zP9cO5 zwkmFd{{#Yi?z;7rulb6qs%oy$#akvyoWS3{9eyGRJUsi;b1(b!Ur+CtaRBelJbRzR zcOc+gOgPikeJNz_dTl*eTN`ZRXF4KwA9Z{(Vx_oJs=YdI(Z0_|yVGZbTvNCI6(8=Y zrKvsras8o*Nvw1kVK;httq)q;xspXEj@hil_xC&4ndsm0KZz|BLAZ+`78~=bK1pGb zpDs*oe@#q$95K`loTa5I_$w_6@``{PJj;<-P3AMyM$6Ol+zHHuDU>pTx*Y3*PfcQ+ z;26)XS-u|f1V1p%gJ+6Z@Bp7ZQ+^_1tD>U91&=n*G@|eLv&eUHuEE zh5gN%d`32=9S79lr^cfT#1Zc9x$=kCEye?q9tm})5+*5pzS}bK4)Wq{sl-V~y5y<; zIGxGKhJ3U+X{UQm_JGQkp{;R;IA= zq}|y)(W$*%KD}qQHkd{fG(=&V;f)k?@{7|Tt?zhI!Nm2%Y9HXc#e2JxrHI*-snEN& zl*5apCclqwhc`MpzPV#d6L=So6tq~I@=lHvm{4nlW1P9fH{LzbxTIqws}wCj7hlG6s)B1c`1ERxzZc2f8Wf_>0KyuJ2n6pc zA8;*ajk%W7&Ej$M_A1AO>_X=Z6E zG%BH1Og;11OxEwVsXzEu=*eL+ro}oc)QZC%U%B1hyea23qGM#JYw}qYSy3Wsea&0P zry*BSHjdBX6(dxZ$&eVcy}HK)z1`6p*J~d3(l}bE^C_ftp4g|Mzd(7nlQ?xey=;TR z=Y%2f5=xDXS>I&BPeLs>=61>x4c_&DHY;{!1VeR0cjF8@8#Vock|Y`EXUAqG%R!m< zE2y&q(+sHs!$yCF=x4rH!Jg}~h2F*}YG$IH&Q_C0*-Huovgx!QhHa0A81JR; zEI3zx4;h+W%KDK+|f6&xM*J_5DYu zP=Bt_yx_}zUks-P1?(t+3weVzZ()L3UyhKu-IK`3A04o{#CgsK>^AVM^$oe+Yatun zyf|PIJ}WGG>DBPH{gOA$dP+3xYHXPkvP25u)|d;izJQoB6$2$!SzAQ)$HgrepkO*o z8w;x_V?M#NxU9`wGw?$#xYNOoxfN(kb@x(&-Np{D&-$lpYaar4ay+UQtb9y&L7mMd zGZ#c#>*}={4Q~}<4(zBtaVNme6G@Wrm7>SU+6)vkuz_45HhOS6tos*hQ~9vYU6r=F246PAtQtGrYy!&(>Y!giaOd%H+jF(-Grv~lfPp?!xB zDIfk7nbPOsj}s3MY<~cPz4tZP%JfnhpRUQnKB@MXh;fxNxau74VF!0TC6+skI=}W7 zs%afEjIXk0y`2snnlI3=ik@wXYD5ql8QEHJ@ke`*&xH-|5A5=Sw}vB8nws~U;^_y+ z5ISccn>=%LHQwNb#u!%Ye1EQ;6AQ#xPS}XJ4POy-)c}9FM>a9q?xoefWxa_E&Blvn zY5}SLNU*PVM1pqdqjFWzV(P--CDPTzobOriaG_3Z;n4HV@ES#~;bAcenE1yByrq2S zcyZ93>FGT`9&dV)_B3e`?gA>231f8(M4Mh#FI4ef8nW(UiCXU^eul*Dq!O&6)GkX? zGCiBnSGFpF-2MC`DMB{nNo`#nFRJOx{XSX~qGTZx$D4KZPt|$wZPQR^=`_>eTNaX& zMxI!nko>I9RDUI9d-Z3!dpfANSzspG2FF}9VWrv|zQdiHJmV~@s*05kHfW+jkzSW=QZ)}IGZ^}7oAPd=n45Gk)Up{dERL^1&XVrlx zSNvc50}tQM{zEN2fdv9S@B9yk3ah4L6tio%fOlic>2K)K*`k5E`4W8M--Z5Q{w>ar24YSDLPDsJryzphru1e6~k}{J0t-FRxTm zHBTH-N(Smi*Gfm#RMyP)%A#mBPBf?4-CLdRO^9}GW{xb3*VNa_0cT8mC9YluWzE;$ z!M~^&t*;cxp8NvZW<9g<^Y=@3D!hP{+%+^WISS<8PWL|%Zi(Nmae`J`lY?VlB*Jzh zWbe=igk>sz-w#bc$PAhW!IqZhl{TtvgD|Hd=Gkxiw_goTPV&0uQCDsbfPW(mtT+^P z#pmaPqT6($WY59QXKz#}Yxf;S7aRpn(vqokVZZQ~p21Qz>*^mSS|9uAs`E1NQJ&C9 z*X!C-?qj-blNv51%$4_>v&9XVKzQ{BMu4XGCNj(gJ993ae$eM5? z*~m?W%+yqc;?%Iy!M}$*DuFhtESf?sC&c9^ufcxO$kWqxV;6bagATGi|8hQdgoqtU z-eyis@(tI#pR(F}%*&BICA6$VT2wJQGG{U}HlUF2+(sHmFDcd_-|q0d+9qSeA3Mlz zsGbrlFZU||o*@?vFDnv*r1Q3Rqd~e@Ti>QHxCh4+7iuK3ItM`Xa64+af&_-fv9&VQ zV$Ew(q>!Z4-B3RH)WT!@sezJGT?({LlHVNOse!LPtoO$}FB{ujCDk8(z~>#(`|`J#+sWx zQ~7KpWU^|Kg~O2Mg+#XZmM4AY3h2DAAuR)oN+df zX3hwP^H?nl-Un|^t1M>XZujLz44R}K*ri4F?@kT|nVG8+e>@Q!Y$m=93}AE)1#~`H zjufzCbTjoAEtG8pOuf3ZXPfsjM^~qg+Bw?8^>lqbnu*acSKYCispuZd6zRm+BE^wD zn|JY(J7$?xKQ*+xQ7>y;8XtA!sk5c;$UM!>PJ10;{Kf;N_;_W2xF)Z?iojW) z$iOOh&be2_>Elzx&~m-1n9at!?j<-Sq}*lmte}Hvtt{Df}budb3YkNSMFR)E}fnz zC|_G1VBX%_;!9i>+H&`4m6DCBA*T~dxtA|ru3s3&wSOr(cXj}>jBxQl%4r5kod}|NdF)bu+ECYP2&d+SV2VuI1}O|YzYYG7IjchVM4gQ&4;K(-k@3Wg zu3&l`;=FZGYjj@4E_uz!hh1w~O{3xGoJ3@3Y+GiY9&2OVUbHf22vzhZkuZ;nTblq% z2v2#befT-`z^XG?Ou#%5YjXExzy;2Uh$y?A71tv+M`o=ivpVLB_$Vf_N};lf5ZO5D z#6Y$yYx~vMcNI^FLalv6H(qYc`+<_@J)Dk{1o;>Pp-VNX_hN(* zHZNd5P7yltV5E}LX#Ay$#3X&?DgM5}cH6=&J+nL3}uHYQ#n@L(bL)*gF@sxp^Wr$&giNQy160z-mibSIG}}xhlp0_eKo@s*I@3!aTg}Hb0YE7ryFR< z6bGm>4+4l1ExmGbiew!l%F^e@hOdGjyh9?LUxX3{gkqe9cD$Bo_2F-=EKq`tC|3FC zR(a#}XB6G9cCYn~9ge`V_p7gSw#e`RqmeoZu4dHE!GlOoFgjKkC2yX1K45MK6MT+P zIj0B-+m5>F6|l!n@ixq?Qkd+-HULx{G=yWAsegEMlDnGmQzn|2sC6cYhvvFx<>g2VZv$CJAl6146#Af!^V#e|~C>rU6{?`9#M1sULi(pUFN|jcQ!^BCfKl8U^r8 z-+4l)_%>+WXo)ozZ$~f`dKVhMcX2$7`P5H+?Bt}NV27t(q?LAgRJ3Vj43Qkr9#1`G zH<)`H=59K^5#X5_x*|1g;+}GQl9@hgoZPFnHLS9=q})L_EPq>BhzyiPi7GYO-OFHKoh;tO}@d0qbGIznK7KC`hr#l)GT7SAebWv@JCpG)oErsyy zaJ1E9j_HD_MrbUZ39GGq)@_^q_go4LG<;Zq-j1Vo7D&;(?QOkRMS>?`YC_{E z>C0gwQqvQDbb8{lsaQ`FQ{QL%J|W)k!@EkzE_oU(t14#Ryi01%o|XSXw^~w_G(ZNL zlvyIAue~Ynx~2z*ERv@My){4vnt$32#G_0WKZZAi#{U?-6EKB-YOPJ<~X?=t6cc_4qy3cer#TV;t~@b3F)UHQbx@_f81-!3Kv|nG9l05?OWya~MxwLQ z>uNhgsY!|jP$vwlF^%n-dL^*vXENS9t)iP4{`XlW^0}VYR(_^9(bmTIqT4skeT0@- zH0W9{C$ukR$#=NNh7X+y@$u^io#EgnM>(%BQtc{NYbIxu^+6I!o9fPu1=p0U6?ZiD z#l)GO*Mu4&lZZ4HS@!MMA^W_>Le<>HQ^RWDrFYxjjPnSH+aQO^Qac~I$x-4qUN-y$ zU8o84@hVg67s+&)Zc_!$a^*75RO`9@CqDnc;{MzG>tBBA~IJ$aE!(p;~*ShZX+*T}XH(fXH^XDQj5^92U&j9W=^( z#5C`OT<*qEQ01*Q_@`+07R#h&XUom@I-l7>?V3v@dWS!3Gj0IifROd_9WXrm;d99=gsWpNfan_2t1vUi~0{=T;uzB^{yqWX^ zdMT}h8mT7jiukju)+Xlgb;z>_&#h)|?rlw*uqOM7uz|f3Vc5GtsDt7|w?f1P4FGPQ zg7Ch1DFLO&ogBVnHS%>oxMOJ3&! zd|;WFxxhetG{=-~{oX7L0lVf!396agW;9efI%Ym5WjEf#pY3m9h|&ZJ=edGp2G5}5 zr$ZhBmvL-koaRZ%F7K+2^|`5}`P~-fTkuS$Qe*I(iKS9kH2E{CwGe> zR^6K)Q z7~_hM=S2M?VkpCgw(K7){FuTK`+($*RZ6=7m(QH8pGkJleWScuckIfd@BOYb)&e|> zEiV`y70dqQ^dVM!g24HoU=B0rsMtRY2~Skm8fD5kh`)a798@bcxQjnNuKR+w|`cE z`&hA3QCwc$Q+Tdh@$k3$b12i;RgItqdooqi#_Jemmk|g$^vq5as)M%w`ZZYJzkIy3 zE8wdDk!EMSS!Nj?R_n1uN}C%T-CR=>38ut0UQY2s%F|Hcis()?en`C!0}<6HJg~wc zP_?|p3GPI{tD7f1@ngwi|f+3LgeRa?r&>>YYG=sDH?ymlmA zPgRHqE3miM#w9z&eI&7RSl5Fzs5~Y*mtTu%*v!spZ4m!W?=+>2Z8nmor)r)!BoJbX zYxVfU)-6B2zqnoTFx&IbSB9O3*3zn~vNe6VY$g4<*PiR*MA_I1zZ(4-i1b$6$OvC! z4M&<}U6+4R-dBJ0-#<2(4ylCR=j6!vPiYlZ$y<@c5?Sc@szj8K~2nY1pa?lG^vY66_WbT<`*? z5b*Hq|6FzcTfO@w+D?wGHVlu9XhY~PJUkrYG>OX9IUiSc*D44gxv*7dn|ONlrIv-| zy;g8_&WF$X6>5UK6>9ch#N`@jw4nP|#yk-(Dd6AM^Gi$RRn^ppw%U-Z6Uq7q3&(-c z*Dkdu^t-sb>uQ2|=WNYD0VaE``2HxdF_*;`Y9LKfyFhoiQXL$ulPWqL`rA(rd%4SMM)Y+ zU4B2h`)X&^#w!*bEu!TOQhX{5s3dRB-s=kv%hxAxiwm=1K5<=Vjy%NOGSSzs6?W+< zWHY|qM++2$*ky)oRs^o6js<>U?L1aL9-aGh)IrKYxLIOa(Hr7lW-vDa zrWZ}igWisY4S-VI=Db<1uF2M}XTnXSNgtsVtksQac5fSAw6pFp1jB!9j}NIjMavYO zaNZdDvzXmj2B|&Ch-Z`6Ht(^vGR}zot)}%HS;ob*)~OEn#{3joFp*PSXvf-6Kd_x> z!SuYICVX>E_l4(L@)UinW-C6-x~a^A5r$_CK+)TI@$Vx20LUlN=G-2X=aRyGWpJo} zMj@0V?HU5nm{(%-t@HVqZCu1w&hOv|b82o8u`7$w7IGVXj#oE8Q?Pt&p;)6%kx7My zN=*ZI5Xxez5xKFN5*fhCXt9(7f2A3NqUllxSNO8d6mdQ#hdn%J`uQ;9dPn+NSxQLn z5Epq!e3f;E3f&Wb0k{Ed<34IBl2Pya2>8=QSu7hntDxmz+Qs^fR5Uwyo6^UqRSc!3 z38S_hea8EtXjF2%H{==H)S@1uz^dB^$gR&aa_7kTFpKt ztca=`JHy6SLzr+o#1giNr-snxeQ5Z5`n}O&kR%J1!9+j@PDoQy^S#W*i<&>1;a@01 z8}t?C4Uh2F(tBi^%{>pdc$alYSNx z6SdL;j6bDk{gN2C%c;dOyLKLN8$?>!B|3dt7k16cOSH4b%dUrSMLO(- zk;lzS@r^5qz%Y7Z7ke2k}1kxA$O^F2%+gW3xA7x6C+x9OqEux)nP4)**XVvzX#K!Ud#xs7d39Dc4ycTMozK54wO|Z8vOQVZv9U4Q_4={< z^+c?Sea0X-K6t*he?cYiy3rd~g~#z;8Q3c8;PH=x$1i&tHt=7&>B1eV!LU<7)dFy=K}SA^Vv5})Q(Q|+|x5mHI2 z+HY1~6Db1<@b&%%XTdbsM2)86kaoQZZunNt54zQJ^-G$F@)~)rfLe(*E1>3==wmg= zUsbQzA0{W3mLMeO`YeufjBoRS4g`Sd=l^&wVPZ0JzgY5CDC0>NWQr+T3wH2*{yG2E zHMYXxy`VSR^cRf5u^=NOj&C`3eYDnBgYa;};gPMGp^p_N{j?Nh2JUc}1FN-(1RqUG%E-WWMCzNik59+=Cj7jB1up!#j*_!G z7H1t2rl3IJ*Vj+&OwI@csyA?iq+FEITCcfgrsM~n7~T9Znh?96fc?_3Rm}lf;|D?? zD?Q>T0O@ED|9j{%uOrcQWAj1Hmfaa)oX2+vSaJ?)&{DI}4x59y`aL ztg%2zq%%X~1PT7<;YA!&SZ1I8)+}x`dluoPmg#TZhZrf`Mfi(&Z~mQ1?-;!H{iCSW zV*Ba~wknNKrlXR=)xV)eXK7si;Z$q+Df}6LnHf6Z>bkzOf|cY>p9^zXn%vM+I@F@~ zW1JIXUp$bXHv;o^2AcV|bBxla(ziUR9sya z21>Sq=-+a!qwuZzzo%fgwhYR`;l(PqP{%tttX#GrtbY`&Y{LxrEZH?JQcqX`IJcINKb9`{;d)DB!_V@lKr%?l&- z(sM~ib?I&}%-kv;JZU7oNOC07xh7hXjO##j`ZXvjQ8SdQ#7n-y4eoAdgdGw3EYQ=}A?!}qzcK%Uu8O-a|gD1~$-csqg&5$aPqLJGqaZs~U^ zb-(+>bsXIKxiZ?MYEL;>y~ z>_&tI<`&3ZMm%*?AY@GSWPj64vmUOLM{EjhW&20MW<_AIAjdLc&yDX(?Avs|i32f1 zfb?=lw3SY?tJ!)W1xmW2TaKL*px1SOsT+2?Xpy`5?VDdPX&j5opr6OotphpB zw{SPx|2eFOtU+c#{RWGuQ|pifN4Ul~-H}*@;RyQk&KJKgTR!9qbOCOv#_zrA6_Zyq z#L3zIPk^Mh&X3LTLdc{{$$jIw7_9ofF;A{SmeN!`4@)HNwHESe5B1B+MmK+P6m&kV zmZ=8fAL{Dk;TaG8!Twh*`Gd^HKBHJRa7c6Yx4)fPs?3n30oX{{u7mzBRG5$>>_3iM zSy>HEHG}5*Y>tm8*4;NT>fw5??24g|kEg3FRf!~ini86{;#q$$ab7|l!I=hAM|ipo z;|8K7UkIKD4?%|2xC8E>TiIW#>g7GAXKpPnE~@N(yWK_eSCxNxO`P$5aB4~j5kp7--l%6Tp)pgOQn(S;r-{Amr(Mx{3yP(9j{uxpD$cQ8D!ch8F zCbsGOEUF$ zd~1QWeDzwXCLg}q5dhg2BY2(5Nd1xMf{F^waqZpne_ikO>|to2*)WJwReY^l{77Ad z$6`*LMU@Ixp(4gn`*LiX+Z?+272O_h9N$q7_jn7Pq+>ijf51=C0{=1bz6Be2jQZVB z_vssfUD74>0CFrDrY-vYcSwV29jUkT(BLR@Vh0??6|D3yxYPdE(r`rf>H?J?K zgp7PG)__c&Q!?-qgzFQ_aB|E;iiARYVsZAo68$7IrrHlKp zpU`l9Q|G!Bow3n#QaGCv?wGUgl~McJilW9w!Vu3i#PJ4F?C@IR+2P;5#lLvbq+LL2 zS-Ma*|3IthC%b8f0DsNk3w|2`;ilcTwZZ(5)h5$+yf-t;%p`Md$RRLl%l3?R=9i0Z z@#Q_8l4|oi@gW_UmK3dEqzbL-aj8XD3jID8E}zP+Sf zxFGcWv8U}7zTqcc$Fa_zOe!Ic(qRh}BEvN7Jae_F*^11*_DJl_xcr>VW(I_ri`#Mk5qciLF*j*X_r#@-{k z6BVPwPjO#n_MI5%tW;%)jUBJK9~@7+n@=q^Ex9jTY}VEcw=-FrsM&tk5N4(8vE;d! z9dO-rWu*b8!WeJx9`Sd4p+U~~ve!L1UgNaE)_V5{7XVDUWsTysMTd@U~X|%V6LXOu|9yR9)>Y z!`Yqn!lN`dY^tmjqzs?87U-^I=GCKZXTQ;Hh611v(4VSFuF||yZm7u{9dD5ki&QYM zS$nt?#p`qDbUMa-L6BShxZx=Zh_!6C9&7Qg4~u!$#x6Cz7?!gq=0ggP-qBNKwb|%n zFC`wDYm6Ls&OE&qTe){7 zd*?}YE5<-zRqO`Kp4MrTysVtHZ3^m5v4@{2!BL;NR#Nih#%G=DWNc#ke8McDim$?L zLM|!QJ8T#P+EX@b^RxOwJ>DD4{H@qYD;-_}E+-wh9M_gf*!1v&9jhR>Oxky4qPtpL ztXRj9#Yh#eHRH7wZi~dKQ1eXqj`mtzBdw$>yMdgs_G7x)78<-tesz$N+KI3E#7t%2 zySae=goRKPM?jOP0p)Y2MUYpf0CxxX(5LtP(Waf`L&HtnVS2rfCIv+x@9b%T@WC_7 z>ovOLWk?Fl(=|4TAU}!`nxq(Xf=+fYr{N*_;9%IMW}=3b^Lx*UK(G=QTZ@NiS-eTl z_NY)au|f1{QQZRf6sv7Cm=!-gjIl{SI4ij3!2ep{Gd)@Y`m9Vxi4}PB+@nHe!yGNGlqv(4kev-SPGzqK&h7|o}> zEBI4n<6#J~le?YH3eY||-jh)oZ-gKS65<4g#L14chS7@4t~Ctd4o}7$Ba}3%>n_hB zxU<{r;o?GYca5t!^H?SXF;QP1*bj&oF)3A=T(0`GY0RVc1i{C?>!U(=uyh=*3GonP zoumi~*GKg)gsB-UTRETUgT~^8f^{ra$GIk}3zxkGXYRY}4$%8l>gIc1Ta!|`8S%bX zLa8Kq{JEX6=7qo#4S~|7E~J`W50RA~%eib5wujsqLxTD`0fO=2Bo>`#<{sv83nubq z`t?pLyUr&o%c<8*iok5GZ)Y!ZL>5cT4s`%m6}fb-0#zRFuNz{&<{e+nvF(Qn7sf+^ zy=7BsaXYp4U+n44dKZmp>rZU8eM8}4vv`>O?Pupm@XbRYspVZ}#fgL4jlwA9=8YYy z6t=1egW4ieI<4wM#WKF?`BvqLxVgF4$g!{aG;3}zhbm?K-r`{KZyL<#w}KnUiZJWiWddJT zmI(RPp)?h(P;tTXxKOpc@^YCY5ihH_X5Lg~3wP1A)>|o6qP6i$yL2`^XdK`yUtEfM z#Q|heE`Xbo4+FrgS0RxXJNe6v`)>pnfN_9>NB-}4?cZ_Ve|-qUk8MP#)?BcyATKR; zN1plOYu5&NcswbqoP)JZD!;R}98)fTf=c0rILUCbdm6BBA6&$(M3dvMr>cH3o_lmtt%dZmH;Bm3r$h9Ke%45^kwCtn_jtIq5{)_3<&}+*?mYF9J>&QzPKKzl5c$tJz!G=G)A^AF|QO z9iMqfD?4B3ux3F{pS2q6_@Kg^ebg`Yf^4>j0PI&dG%hv$OQKS4Cj{T^qum1=O$D&wf$fv0T7m z08e@14ewR9n6JP+d+J#0#mxQ0#dd`qn6owaXnI?gg zTldkq!mE8h`L&yW?dJODX!bvu)Bkznd{u49cX^BE;GCatk9<+L+yS-T#;vFjyCtLZE zg4!GBoK@VO)t|$73ae^Zg@q=J2w<&D)9Zg`4jltJJ-UDx=<-VpEQ{H)=jz~ud70>1 zCXRI5)LgThgvh?Nv$lcUF7s}v0b84UO(gW+rTC+y`IT)f(!=_)1{<0;54NNLI!A2Z ziE@n%uO>CC-`Jj?+YkL$i(2$N+VQarb%A2)-AwcyG?qhoVp4Oz_?ToSdl6wNE}rV3 z%R-~SP5y1lg#y@lZRzMMd5+5*Y%MW>>ZF%Yd#BMX5efU%9_E_+t36CQ-1P&PD#`lw zcgkcH{VB@|E3Z3UqLN{n?!Tj4F~FUb%Uui1>i&(b_ZBcj4*2zs(ahJ&BhDs&^mlyH zNaC?`wRcws1SfvL;s(?|TxV!$h19|A+D>Fa_{L7H<(M%BD9|x~0s+*{_S~bUQZFjq z_l~8n=)Cg%oYExQARAqHlzaJe=xoH8#^T#qeZKbYlyV=#@ypEwxNt1mqNcTW43a5K z5tv@{;Or;~0tDcfU*E}N{hZr~4@^ykmzO!J<(_w$W>|t%B`v{)Tj@!f_8QA!NWG2D z?6@B%T`3eClwgqo@WXx=OWc`yRaU@W_O zucqE6OjK`mQ^!I|4dwv5)VF`v7>mA{0}Q9Usn@Z~L{m(%C|0_p*{Ewv=_Mx<*BCajoGO5gl59qCp4X=?F7xm%_G`5$f zc(b}zU1rzPY!S0-lw1JA`#o$}iP=g&a3xhneKBp6u_+5sx;u9Nxf`=M!_FE+OTP`; zm@^!G!+MA)_sh^0Zp?3pAN)p?YxUaz1I5(X{cmb-j@vrdg~@mGo*7VGpnvj? zNId!grjG#h%=F3ucMs24COl#BKK9Vuer&sqz&-jri`m~GxZjA~r(N>p@m>}~PeYUx zngC17PINre)n|t~#cEwxT$JRBZ{NnEYjS?Z&ZSwi7H#4qw1w`U3!59$@WJ7ljn4_a z9X!XQSD(YYT!Gk|Ttk?ms&@k4=J%H1{?&a_e!&OF zksgCs6caC-BL47`Q`Sd^VpOy#Y7s|F?r6!@FaD&qo430SZWgd%VOgpA*OJoxpOXrl z1Z_9POP#FDgl!w_&DV1I+AE<*)v!%r(A$cNm5UFPPu?4s1}*o|+UGTth4u#H?aACi zk_MHx%$&u~_7N5X0S=1+Q1cn&H9jl`GTf8VC>+#&Vr>kyXXdj5o?N z@sUk$LjPiw^TCMM1!4}z7hOTS9^KyqCzBfNzoAU>`S5%8(6*6aB511E; z)vz3m!F7kGt*lgM`!~S;*}_!7b`T7JBn^aT3|B33qSyyw7RbVzQi8 z*<brb-YPTWANE>C&h z4U6I4c9nRhQJ|h#)Hb^jd1Nv10utEjjZj@A10p3k;b&ajE7zXDG~=OFMy$8@+se=l zHLVoCk2(8&_3zr=!1vCig>CRUcgPC4zfCl-JdFx$s#EcFZJTajwX$OB+#hxDHE)gp z*IE1+xK)dkuX%X3zrKGyd+Xd7igo=VZ%M-W??N10=b`{HqwEp#IDNa`QMYwsl}T+9 z3G*D$wcqN_(fu_Ocg2d?XV5W+$)r#cAV<3DyeY!KwT0tr`qlFSD_439!gZQ9hxs~S zXmtr|$jbQ67#v+)VC%Jm1vlgA4`1ix_vH5g8f#mIT;C70eQrjp;eoy%A9dDg;GF#( z&lvq*8=%uf{=(lMf%R-m(>7ebiVJucV*xe|2^7AhbW|u<3{(GK0(HMb; z+YAFvHY-MxgU2(kYL2<;n=p720wD>=IL;VJ-~BkJV^`!!X1tFwVNPEyMsZl|ZGC-U zTUeN>sk0xr5pwEGW9^&1>^L=5)p2dca>0HSM8&Cre38Mw+UmSi#Y?}oq@T%JuhPu< z#hIS30sQvAqW&LOr+#42e~th?N6!Z(CHE%BfpI-HJQj52lGR{Pl}6VIz~7P9D84-C zIXQUtjUfC=_86m7C5_qYAK(Q1{0$qew7fjJ1n43@`#I?IOM_sl2; z@M_!EpBB!SuS9gq|42XpQ1Sxn?uQmctXD|1zZ@3xiI$#schUhna%4S-Vi zw-RQw6h2J_8OP2w#dhYu-l`p=HzNn`Gy<{+Tg(GS)2^PKqyRDJ`Bxz|+!FNecL7~X zeK#Y$`mB|_lXU0=W8h(1Ry{BqYO;j#A;W++0K4lhUK;twD_0D;D+PYKqMa!b&XON7 zn)g%>Q3Te@%+$jP5i5=M)Pvigddhv_?)@YGz)_vP2EVe|motf7=xU(aJs*4=GI98q z!OSdNmp^VGGBRLEP8;209qe|l&gIF{-Uw!J?-j?isUf9%j>{WDCXN|}Yb~?(lb8gw zwrTVg)dd?Gyo~srq`2 zrw_S^DIT_tfjaxnrQDnMtQ9X%-q}m%LuWk;mR6G#h`2AXLinC*7C^h?GqIQbbG9q| z%``!jt>qSQdt<4V+wg|vN=+eCn>7%ytBYQ_2CAeXcN$SsXDGDE`cu#onvd_C95yCh zB~>SiEq|i)d7JZPB;fYc^t~lh{8X}(9?pjn*An=$P98md?sO59Ha!~HvjzLfV2Nb= z4$o}HY1T#twsB~l^S1dID>PlOww+P=PFZ357|WmxsAdig%lgfixXIBeP@y6j;h9wM z@Vd%uL(te=rz%&y?4SMh1V9D5mK6B|gxKzG_`z8jTN%Q@udPs-0}TragxIZA8y*6) z9~}K;i(OzUQAT482p1CLG&Xp*ZAUoVMGI!qsu;dg1VF$ef4+9ZjX)) zo@~u{w?j4zRS2V_EY0?QcT{-9Sb0OtQ>Myd&ZZRMha1b*OUq+GeEDF z-k#DLv%9cb74Q3}vd7MjtthRP zPpI~%ZC@LoHD*vjn{Zlxn=R_qW(c)dJ-ir)7n!8$+PQiHsN+&Lxu9-rUCUD_ZqGe5&YKYg zTWD}#_WL!l;6ob$>(xt&Vg;M@Z*b+W;fa9e)bX;7?+)Nh^C&~L#j9KuP#vB{QR7A% z!Qpag+;gqM4Cf~C!<%TCYf8;)Qc2Ty9Zcg?GVcEL{{l(B?aiq_O|-^6FZ!<(1GEt{6= zBs!-k1w=-DGIoK*q~!Ocb`RMU(tF_48WG}jDdDWQssn_7ISqa!e{-`EFG8` z!ss}7#M%TVwf)7Lp6xGzWj)-Z;p;c&x73*3^EpGiD{?z~ADX5fTBv`q2M?z*`Z4}| zGMRjhVD*o^f~0`Rgv``=r^;nCNr{7hrVgq;ft;O905(l^w9S%Dca(?8#j|S@v}GhY ze|E5_Cg_#mz*9Uso2}7+;s4XfxrZf{r*XV#w%poLi_%(^<=Gm?%3EcMnTlmfUP+_y zQev7~78z!clF>+NY08>+O=wcn>7tn$ninQb&HJV%2@0c*7X&;Z26CTc?LNEn*Y4lv z_s2QU@0{m%KJWYazDM}_u2K9Ruo^>WuQS@x#wmB@J<628rd-i@N&PiylXlw3Kj6<4i(DN!zP<2YFgkAWtff0ru_VMIw5IR!%j;m3X;2c_S zd<0W+X=38Sb!a~!<+z5uei*2);7}C>V6Kijzmmo*?$HjIErVR>;s%B4OgEUOr$c{& zl7#!l&`amVjrp>PT20;^j{DeY^_yz4E$v+j{c8)`0N;7Me?FM_HnXGp9_x8ur_Ma1 z1?HS%(PVG*QrvA9akk|;^D5-Dud{mDUbK8741eF#(8?sfC6ZdlU%t?RD~kyHKG@PD z$u|*_nX>WXPCvYIW{>D%Qs&vdA$5gqUrQI(VNkMQwtH>n!so>_5r>mNo=eZOxT~0G z@{|!!@8Z3Z#Fcjcu%ZizeHD>^5&-1=H*aW5vs6dYnEEA>Z`OEcf@)ko?bV~OIh>X^ z!J`6~8Y`~Dlt^by_oW44mNN~qL%M!4Y>#s=ql``jZax09xHTkof z$1x6+x$B~y$^{ZeFnfbTbMsPe{W%V09b59saVc%@@R2JO{-%gFw;?%`w zD#`)EtqpcPtl(!(Aq51h0uAZBITzwrv@b=HXMv^bX8IV_4NGz}^bQE6sew!kd-Ak` zw1Qs()LX$oKqy2d!L)Af&4^xh za_s_KQ~vd&1f>1gfjpYgbJS%j;`QR=hJ|m*CJ42>bZlh9$P0VJC<-MQ;$?_1-~`vc zPaD#k8?x59d{@V}$r1izqGy|;c{=ZH8!c!gq$y0cm|Rn4(L7@I&4q`qk4@iHnZXPg z#k#Tsaw{IU0R82Ojm{|^&Gd$y4IWjIUq&#tSr~U6+%6;@ui~lRw{q2}PU(asZ1QTh zB&fCT1;@h`|B~BAlS`%=Iw+dR6f}36i6Y~N`xB{iB$1n*b}O$9in6>Oa2uH z%Mw}gS%>`MT}mlH)yk^j9Y6@-WDQbr*YAlbLR9W_`1RkppBD1ZwI})}UgJ5gqgervQ5|-DQ}uAo84jcC$K+nmen#!w3)p@U z-z*#narg4!qo@(x)|Zem{gJ_r@@Zr#&I4;B^<#zUHL7ECrMbS=A0A4*tqFVW&ZkCY zPyTb-v$#$FhIfc5MXlEZbs#TS!zbK>E>37QOOvm%`?PK^LjZw;=>pNL%l`q~A6FAD zYv%Gq-CjEgs;~nb9+dgf?}B>cER@xBBWQ@VJb$h}7DT)DA@(%5>$59Eh(z#MS-^Cf zIcvyMUUY4o2{SG_`k48(OYnmC7Ke;Lby1v3jty6F^kMe|+7$|31Td?=zC+*_^C*}3 zV7WeVMxcdg)g=IxsZBkUhTY`-V05K|DJL8SoJ9Fu_jOuJeq+QU(m1WH_EiIm@l3_| zofwr5&uNYlZOPWBe0yDvORw4@P{3 zi?it($6^}>IDhir9<+hha)ZG8C;#&y4a%-;B24`dgoj&&p*z|X*L~0de|y^4sx6G( z-=-LGdYy65Lbtbkb|zil(V^q;YJz8W^Jw8(z7szXSmr#g5|ijdnh#GvfU^Oud5}#) zKy0)-5buyWf2;pL+yYWCkw9?AfxHTiaoihgYkSS(apm}5Pm@Tb)zxfsM9jPYTE6!g z(G84W{cEvD-ZAqv=3})aOdQHzH*(Kv0nk%Ojs;Oom&olbO~AKrAm`{(QACV|Jvg_G z0nl^iWFUXmvit@E{rez(lYuAs;e%``V18Y$STvwH&9{VDjzA;wd-DCS?GeQ-FCue-P{O{JyJE%o%?-vf(_1spIQU|VmCU)el( zHIPiBszLd#jpge(+JQ&DbdmId^3;7@}IeYKVIcx8=Kj*dg=l%JptF1ytLQet!0GXPqqCNoN zBx0Yx5fNa^uQUFc*c*@mbAXwG;J15v56XK-+mDgEe*elDFhSexKhQk}3VH^=ob+fCB> zO2l$rZ%q{KH|%6wHzq!Z$vCR2h(Dj7s6L8>2_CK{YZu9&>m9n zN{t+1RXNs6{Ia3TagG4I%7S&9_v8*${8g;ZFW(5;Ddpt*?HU_AHUJswDn*S;J$TjX zMUdZ3;w9N1uKM)Mn@j_~mIK=p&S8zK&vEp3aTk2Mej@dbW4YcB-c@=e3D-Jki~D%TJz!r2xs(-}Q0jK_T2WxyDAGJa!{jvO=S zZQJ;D$(f2(uc;TT3iffE+o`c`=8HZ}mPkJtk$L6cbr$V17Yz;$lu_ah`{reGcS|^j zcY#$nOW&gF_)6j1_=hX;oZ=w=@4cu1pO_g`z>)g=U9QX-j3S|eM;fjw|9_$ZpArCw z@Nw|1o69Am#zOAQ65`n6C%UlxZ?5naZc)Nxx7Yd;U=y&Un&~NXh zFgd`gNyIAt?P)|Dose=oUc?E#Q$vV3-k_Jg98ScE5UJN0Upg6rAbFSYN!CxDm!Q`; zwz=k&1;vJV&xM`@6JpT}@gT#Oe-!*PN}?tbxLa-LLerwlaCDcIh;g=`H)0_KJ>pCY zOgOL;P^{9+>60-NiDY&&%MFL&Q$`e|yG9J0$x+>x(v|ssuC_i0 zdL;1cE~O<-uZz@OX>@C%PF?FxX;xW%y36s8kdbV0WSiIi(w=ZwtRbYj$H?UYE5d2gw;}BG2l+DE6F3{(;m|oU~)V_ z&U;V3i&@dTTH~ioG%GGZ8J9T-#caZSLPY%4Fl=l9z>XX?z?k_h951>4NLX`bolOY< zA~bE%M%yoa%7-)_;M~&$fS!$ElRELdc+D&kUe426p|hI&eAe>TFEX7$@mS2dU+(>S z_VGOLLw)00(GEe|zrSlt-cE7Q zH=l5jg`9GXZ@`;d6@51ZPc1rLw9enCdemdI`H2BQ+dfhT5=||)J_t|m+$*_}S#mPn zeR87$HpdKXeU^PZVP)+b95kw{!cRy83`n@Sx%po0H&yD_`kpWPv*gHzTk4EFOYTTZ zqn3W4Q(Dqm@8PlN1tIJ1(F(w^&9?%7JBlRvJ4E}i;FU(-go1DrYO!hg4L;RWd|~8U zy~+OWjj;`%zMhYa0X-6GC0VVQ`rJfv!foRD#z#plDbpj|qi)LEAMt|p5!p-HJ|AqC z_1XH_a-7u9FIA|CSrcR~pPZhYjO9uPZ>M)&b6{2oWycJJhK|WhbeF-o}U$Pe4Yb;#<}JE{^tk zLSp7vGpBiOG<2BTti#0_8a2S^5)5m+Cw;Z;ryibrf3eip1ot1l42+tIbG5I+e(lXa4X#;@-iNa|?`+oQ(rOKfl3HL7IwUAur7hc_jlX~ljZ z6K5{@iIx7Jxed%Nubf%`$jBajGndweEAUcx%FH5B^L9&<(=6edcGr*Q)Kqq6%cU$? zy6|)|hE)1ne5_IoNhyE&gRA<^ zh4q5b6u!3Nm4lx|6gm|iKX-FZda3TdGqSb9W|6D6|73Wm=t@~f7n8ebIBvMeu=^xO zsw9}hXIAy3Q8Bn^gd6%?dr@gde}C~HO$C>E4lC3eq-+{iR3S}45wiEU^yn!g>b-5|DU-v1z7$ zgj3A=BGd#v0XR^9yQNuIakTfk*ACj)HMIJL&UK$MY=F2%hebKhLFG~A=pNL`2jj)c zs^W8BZUeB|L*Ye1pRbA2ZZ1X~?&X>Fd{(Zu&0amK3Ow(Ff}_kkLC@7R4Ttgy-F!~| z7JQ4`Ao93%t8q|cRk&~L+4=W7e_3sVvMYy`kOTGzmc}2xe4xOl(+rU}bV&SSW9ZLPH(D{s@yYgmA0O zmwodFB@S9E)=Q2nJM(yTKM^4#V8lPPXdMfkU>CIE+Ry&l?M1q0jk0#3{^fpfhF6~x zyHm}hOD~+KG_Bq zwK)B57h$tMZyHsh+@g0!FvH?It;-m`cFfs)&=aAl$;oebC?Ir-w#=$Ir_Y`_efA9d zVUuqCZt$aG6Cg82qTdsXJ^HT!s=jxw;DsLL6RObcD?E6#tZj%pQu2 zj=Gt15?2!lA_NjHm3?^EgKFaAR-psnP+yGWnx6$5VWHe|q>^`C+Wy<1=|s+?jMy#g z;n2jf<*(JEU^I0ce^JZ#H7D&OE#F(Mrm^KEvjQ0X4Kn+q@1dLM_@`qMsDUqpIg9LM znM@TD6WBxrbP>^*-KPeCd}2}n=%T?T!b$~i0bBrx#72RD94j_f0Vs+7OEdeaKkFTv z{z@+ZNc=ekm+4)O#L5Zm|JQf^hffu6=y6_LT=**JeKix|=eJ6@0}PO3Frkz4^BPv> zVmhGZcaapRe#3A*us+T?YM6x-7PF*57-5e=w*pL<`u; z4i2&F;Q1G9olLmi0TQ2*kib!)D-VJI`MQU|dU8m8SwK(HzoU0-#E(xoaE}AzbJPoJ zdO^`wUfdF|;aG}b=D~!kL#6`w!*=lie99Q1mV?)32Wj2L*WA4D%1JyxV1g|o;)6GW z%OOgMfywDcRHEnuz?yN5-QPRQuGR*uiiWIGw`k&~Y_1nQZITY6T!m|X@qMaiE=3+@ zt3YILdBNsm_dh+|8TF#sUK83nC2}IU&T2red3r3x9xfC~KYF1BxnAzoG2y*(bii5xA@w0#9ObS10%WqQ&zAz9Yc zki$1d3l(s~wP?{-Kff`}QP&J88mB$D@WbjeX9Ugko(TsQ@)Ug`P_9GqcneYVRKyPu2JuwA) z+E4T84Yv%Tp3_C#l>vfU&wl4aI`^NyN9Glb#%>r|vb8&G z|0*VVEL2^V|ItCV5JJ=MrJICZ?V4>|aPIh!xNoDf&^)>m6C+U`qko~W(!3E{7&$Z* zjrRTO$ho-V^;OC~wAM=Ld-kI=>szfeI~?zOxJ2x}n`5tkXM^}}+}TH`VvAE2pU^Cn zywbGV%0BJ$;>gal%twOz_#5I!h1`5M@6OnB`mpNL9V!hrLAr!#p8mm4`gXjfGbSs_ z!U?t6>&Nr=x`ZS--acGvoIN2-ETWX_`k-Qvma@R#CxTHuM^-2dD4aXR9=Efk8sawkUcEZyohG@8G6sZnmy9Ocv+?(Oj zD)EgVd=cYO>vrm zXcYK7fOxs2z{Ip}@VHs7bN5NB619-tw5JV&{7@R}=!c?`~d{6PG}IS>$-uYdFbCs?S>U0l{9_q60WjwoMnS zcXko}3r18yK$1T(Ua@u!cm4&5ppI2|=K&sU3@@fFh8bWrG2d@5yz1gU0gk^%qd^fd zWPrHbvEbmpfKd$ZOpv)c)Y&=S&?vjakuy$i(>NCRH#(O4NCAQ5RS`=p)L8x-{Qd`! z{eNptazd4osUo`~E@LkC4i8~HJui4@B`xDM`kIroz$}2Ad-E7t7J*#Gg1TrZo!@|+ z>f^4aedoNGBsO5i7yd`rU_YTJ0=iV+V@n`{;(tjU|5fPx7b^c#0r_vuszoHu3zv_b zD^O8hn|cOld*`ajsK^U9t`VM2C4SM{7-&my%hwkP!^X!sRRyaODTx-8a=1c*ybka6 zW}7piO<}PFIb3g>5W;k+dNaJQUX<{{4Q$qWPt1K5%JMGt99Xsg`C$S}^(^KOgZbK3Y zktXzo>jCgu@+Xkw+qBQ@XH&t8qhMv&MTyGUAx8y2Dr)+Cw>^92tM@X$HzNzR&*uR@ z=bf3|R>vLAr9<|{!*)2Q>GXnq#TF%zl;*u;KgT9h((#6bMf*RAEXxqXKDu_0n`+Zd z0GS)fWZT0|zPj?_>cAY~8ZQ-6BY16-Rl0sC-gsruWwo{mGk-?DUZ3`ZR9>?%5GemOu0*H@$nS+-~UD!+$n1~pS7rqh^H1Gy|m zcV7k`rsp}EHJBYXEuTrB3n$qk*=Rpf$`MrV_t(Rs!@ueOa=FN?E)JlWN^2Nnew7vQ z3XLo)sd{e@yWI6LO|Mkwk3r|t@&=F4Q+V8B)+Q~Cl{bQZ{xT1rC-kkVgx-8hd&Swn zyqZn`w@Fl_zYZvUK8}z?1axrJ54Ysx@2J^{r`tSy7ut!pUzt$c8BFU%Dfa?9gWc?V zo>NN|p!7b~nY<;qU6x?4L5pjWpWFrVLz`u)9?EJEn1Gr7YZ8oSYbw&#@>0 zjc4MK36c?Wn2FK=xh^3&j;YT_5HU<3X_=wusR!*N}QsRacWOJs83rauF0R26(XfhW|^Wq=- z3QdoT6Lt)u;-%?3-vEEkc)RyhPL7Cmr!b2=Z7wVytY!XdPeS{!@(d_qx0IaEwv>-2 zW8Tn92b;PILw@uTMx{F1Vd~%Il0!5978fAc=Im=qU-fr!eg~!W8NH@btv~od@+vq? zGG~9fT|VLCvbRh>k6)+0n2P4VM_->rTBXd5bgk8;5vXqcGCUtKvco_AZS86hBg(rQ zl!K@W1fzQk@+M?PpNZ^vVZ~)ft8`0tj-y=avwO|p2DCp62o|8S$)C0VC)<_7UO{X?*?f&WX2JVq+MEo%SXGcya0sLwVM zqvx*IWE;)(_>`v;*+x@_W_>Up6Viry-}|Yl`ll7E!2z%QW2Kpv>5d^^y#2I(eOMt& zMR@4h2;vxqRb{2=HiUfe-a{ZRuGS;j1NY%q0fEQ75_2#f+26R#bg`$D~-xo6RKsLa9-SOKM?syyxPTWs!|= zrA?`+Qwl7zX1~}jO5G}DvM)C+qT)mJk#e`U3(K_s^aScuDk13p;%jX#eghl)*WbOn(KcP1~xe(a(HsFTI>v~{xauMQeI;klttj=W+dMX5rYG@V<5LKm;4 z2Nm#lD@WG!<-uCZ#S9Qpk;SRp(xs0X%iG@hi)nZ`Y;W2?09BoE$f~Y=G;V^?0H#rIOZy~hPJP|%w zI7kKt?3rJHhb#Fq_iumQd>Fxi!HcP=HAeoKa?2MQ8m)%=5kt+5b2#B&vvLi&yv}80 zOwSv1{Y{0d=hhu7tHP3vr-sDYN^F_a0@FCq5$(5y=LthLJB2!u!Wv?W{jxFkJyBYD z+hV2XIXcRj{{6f4TcPvBgI8bF1aWfRWuCzItFDJ+U30_pD#VWvQ&}r>5RImE1-R~7 zkfnn^9qg3DdFjUvxH3MIU-DOd`vM4gL?DqDdOl~>xwew>r*#ALvb!i6q3$9UKx>eK z_zSrDqF8t^_{PXqK>rr1D zW-J*4~{22FpBG zu}jHbGRj)t=@`uq&S4U^;Pbz0KYvPTyoJA(H;1pB)O(X9&Ht=p=mZuvs^q&IrC#H(k*dDo2p#nzo-u`xZY=m`i`F!!edBN!|J)ap z0bV$fR2x1YdH1JPf2Zp!*O~q(O2r4~} z`Vy@9(6nMatNK(#gIHbXIeVr%`KEFobyf@agyvUGD0g#bWfo!-0a|LpWQ5C6pn`6` zms1HkynjyfQDOF6v8{9dhgz~{ZZEW3Pi{5N`9q3`_~L1098_@u4DV)UQG$LMrV*XZ zvLHvf3)R^#UWGyD0y#nw;OMra2_*-nNQs~Ysdp!>u^axDFuoh9m1kurv4dl3<<1?i zWO0u!sxaWW4(C>D`beC1&WUqexAEnZylwA7@4@aHd*Pj`T^_YM%s|PB&DC=zeevh% z*TxJZa=e{>DeLdqL&@!hp)+o6rhGRe;Eo;Y2}YX|4F~rL=4bvCYC=j*H2V;$XX&-S z<^ziOS)3vIMS8BWnDG*uWyqx8^gwMPzx5~ne8yxm#Whmp_qxVfr=z7K@=Po&=B~HSB-8eP4*Ryy7qg&4FQb~Ln59KltJ|*8n5L^f<{WtdM z29Z0630!$Lmk}FNSQ-5poUSLFuDSjuf8oe_l5#!m8_i^NRxID6i_(jwqYWwUqZBNB_MkXlWO~y;+G$w{&U}r6n}3+j&Gg|NIf{C!zRzj>G-cfDagl)9Af$ij2%-;PC~1&c1>^E!=Xk%J zKwnFLticMDzBV;o1xIej{!V)>d%pOt&TZyUyc|83b=A_l6S@zU=}5Mu;}48~01R&` zcJ7eNBeU)~(=MxuA<=2=1@=pRbUyQ51SQDWV1x<}+IV&t(@`jRyYP0!li`p^&y>r5 zRq=2(p=Dnz%FYGR%Q3?X#TwmP1N#=zhy(;a}8=d2C&lw~CP`k~g#f6HBR(9DK-^UC;tLc8KYYg`2p{L~EK zjn0=72Ny$pfL8nL8pX?UI`pcJ}2X()qI#K6DraF(edtALP|8sEj!1D*4x0!D(^A{GQS5XOI#gx?KR!Xw8?1w zEB$!C9C##s#=7!QAUlt9OJJQ7o68M2TaKJPJmqe?+{fleq3>`Mo9~c0ba|31xW52! zVjOOnnoonZ{++&TA)s7+C_pKtT;noRQx2J5!in%t^t@5Nejh#GJgj(fi=)c6n}=`V SEsPCz0W~FU#hMq^Z~p^3&eD4T literal 0 HcmV?d00001 diff --git a/website/docs/assets/experimental_tools_settings.png b/website/docs/assets/experimental_tools_settings.png new file mode 100644 index 0000000000000000000000000000000000000000..4d514e8a8fc63292579bd7f4a8b63b20c7c8e3d9 GIT binary patch literal 8543 zcmbVyWl&tr)-Hhrf?Ehdf`%mM1a}DTE(spoEx1lX@E~Dea0$)~1cKWz1PJaHf-|@c zkbz-vF7JD)Zk_Yhcg{U^f9$<`RqfT?)w|d8JZtri(biNVCZHw2!ong}QI^-m!n$X1 z*WP}Jdv{kR-a5N`-1E{^lEtbVq2IYP?%T^~$Y5brClX#+KDe{-J(P{Su&_wF{<`k< zx|i8tVKJSk$jj*an9cG1K^K9+)TKXQIzo+!P-@g%yN2v7W3ttP#1xe^ zpCSm!m`wEc0r`92VbBu4mmObn#9Sn7A4+6z&BCJka^rhfDK!8=n9(A&|FgR`4}mJp%fGCZ{(3nn zjkP5W-r-;A3@{Y~-fK0H`f-|Sk;NSo{W2p35p<5S&A#p0Y6?~%utD#auLDn>*&aU0 zShtVNWZY8^5|@_W+oBkfmrv2(Jwn1OUMM$MxkQG$;#Q8rY5h_jucU@mi7;1`mXArE za+reNNd?|Cj`XwzxIX3M<73gA{e>Bh?Kqthp(utH=%qck){e4Lj76NOhvXw&x zlE?F~cVTq>gn4Ye)xN79C4fr@Q!Y20kCrzzdo6f)6mQaA?5z06y?x-}jIp=dd!~QH zc<*`*7AqB17nwTHwMZiz(jd4 zf9pq28cnRkL3zv*`&uojTWtIGFFsi1JTwO8lgyJ`)FNS(zE&hyl!n5aPdRa2^XM zG%Mb!Z(rm1qtA@>d4v+pCey5@7ZhQoIOF;jRS>9+>=9{jwq`IquT+O_V6!&rQpScy<=S}nSbdb>Z_Rv5V zeLNGVBbQV~x}EnO+gd1zy>{zjtf7h}^MEk<3Pc{llS znc8o4^G!eLboq~Vp_3zG84X#aknZ{6V_SP?KNr4Ht<`$&-*3HNRmRJQD|!|Kxp9t(g%yZ8GmHr&v|&ydjHsyW1O}DR|nmV1U|@uNsy`E>p4%vUm^#6Z0=# z9gY?{UiKcj6YrW`tKk5Z((n zSkCL%#ZYyDv-&ZSYl*|R^dnQ1q4-@r3;&lB3>4)AH3 zBy^w_RB&s4kDXNo@4>7msO4&FzN22F?p=_0T=Xg{0R-npov)%=@)(Chk_w+>uz^l$H~GIL#59gO%$uX|oTn!oDp&dIB5`ftQ zSjz(w^9P39XvIUOlMWZU^P{A9i*;SDGw1Ji#}9V&L=H%%w?ns!*)^86KTm?D;-Lah z!)Lf7oo>$*U3hzo|5~Q#^t+k8heSOsLbVBK{MwVxS#CVxlM zIL(@sYNtR^=-?=0;%bquC;b<&Ctzm{X-X;0v^t=6NST=Ha)iOSWmNdx_g&Pzag8dK zJ6y?=cI%BO@9EFl;xGcmh|btpAJ=6Bw#{7~00S9wtVh<4_m9syhOoIF1{+oFNkhW< zg)BFB@-IUn7eX=y#TcuxF$^NMAX3#XyVIIiPC|ZN)#zOFNm`5=Wzj{OB6KB2_g-Rq zxbt%n3wAwyLH=LCllOMj$a6otvqp-c6Pw2ranN(A@#FavEW6CMx;Hab>b-epX`gE< z_Ff5d;sG;{sTP7l#wvWSx)ot@JFHv(D~;XjN{jBpjs)DjKxDzfLdX|uH@bldj){(Z!ugl| z1THWC_|=`w_3qy~vIG0AjAbe_a7OC<(nVQVXdfE>7hZo4_y6eL;-+Z*Ge9Eha3Lfw zU_CFp^ZkMpC+WuzMr0(cC#SshPP+nxk+MRnho-Ssjij;Y(7M&W5n6$rjaYlLCh{N* zEFOXBm0pzUK}*^{HZ*+9Ot#|}HbZ9_E*I~A?NKYrgAuh9&^IVd0PdtHGA(mhQ2huU zU^j}S+R!|T?E3aFqEnkRY(uTO=GnZ2vqOa3Qs|JJV}cRyI!asH!490wgtG439aw=c zS>7jmm}=SGiCr7%uukW7odW9}4wjvroyinkS$L6`(fk$+9%Pj+KSjE{2! z=9QnB_D8FpIe6@fK{Jw1=WKe9JY9m^UMq5pebmHz6xb1#t)87Dgy+X~Cgzs$^rw$K zFp5-cR9s`@(O2*OU@o}Ew^o`)^}(Lm<-Uds?FHm0!eu0w%k;`;;n$idWs>;ubM;)Z zo_5A_dwOv+gKUOOuqDZ}?u=@ZW&_CJOkY5}2O>B=O z(TonaR&}zOicHcEOOHl+1Po49mrYKr(r#b@?MwHk$S0U~tsx*V>+> zO)1>I5RGvvhR7~<`RlcyvkSh{Ebh__J0ZgtVy#0JY2hb(K~5gq9)jlXN8y%;_%S1L zX!pT|oxt==I@%M#Eh{)((^mIKvG9ITA3?f@mr7RcVD7-C$>rLrLfC8553c))!gBVg*T~Ge>A-fk`QpjIv*`fhXot87hu`o&ZYA&RLjr=zW{Ni7c2um@w4jQ#+ZQ~^-?Dm3t9uI2oQ$G4)BGTAV9A-e=9``@ zsB%l>r;u2p$Dzc=IVf(Id-Xf0f&3DYb|ZD6^k#DN_F;ab^VrXtv`Qv;L{-(YuZJ9y zcDMY_2R3Hn=daEG^u z$^P*RgL<>_QLeC!K`!%^XQ(^?V()flDDw}eS128q#V4?zSm`AOTJ@~I#-mHz(=s_~ zc2|@z&9Xs?)Hm|}*hzv{CKq^PAfi_L?b{C%p!bVQA4X%_hC;ES9Gcn@B#O7*5$w|q zwC+gqQ&si(5jf`woDvPdH?#b#Om}S6j{U1N#Jk|Y70t}1g}KQ}-P*CmPn2=)$*0}w z`sUwJ@l#GFaGB5O;xUm9@JwoRFD>E1X&)?X_BJXrsD)9nY!k~js87z=z67dLn9hZsKp&*Wk3mtNl??vG;}Vi%d7r&I6CAZw1)z-QVh#TNG1|e#YU3`A`k(ysp9vu>rQKXzUjD}V z-?|S1sX{SM%xL&V`?u4yaaW{<9W`;~{@k9(5`n!IJmV+~$&A`IyR3UM`BvP$B$xf8sp zEPbm94$iis1?GRx`>?tJM&w>^=1DKaWrN-?Xe z@R`)EiG)|tHXY^InjF?tnFw6`-?)%CoPe^jI68ac)ck@c+n@o_N2$HLqP2wCJ|Pa) z5d`Lt76VFErL>D>LxDbDVyI>PA~s13nNDCZsf7i{vWs3Zttead8oZbTLEt_G<}IRF zG+T}NmK?l0`8ij@ZSv+sK5Y3JS~u#q8S4@I9ztYt zT;BNAkTch-%SxD3J!9F;sY2|He)+T=XP9oBrvJ@pXhtHA*#JT}#2MR`gd`{&k<`%z{z*3lUHUN|8X2ie%a-Gazd<%QiLyC5z z+DJ)01=j-Zq$n>R!tZh?w+c9E{)Zg<=l%FE<%j<@Zi=qT?d0XxrT@MBIz%Ym#oT|b z?EZsB%WA+*S!%|`Ea@f4*4o&x|0oEGY`GOWRT1DLnmig7lA#vouDKvN0I|n;h{S2y zf0Puh{f^4DeL2jVc>U^$D}|137aT<#WlYYemo`1MC%>5Mw7+yLe+XuQ zTd2XJ>h>C_e%(!-%kL!_vUPAdWC|=B@MDj~`CGS*)XryvY~Si|@shj!G=Ca?;AN}x zS4541^cMN<%f*X?N$B4DHYj>@-${Dbxf{pVt2) zvQo7{=Ub#*MkE0TngNwt0ZQ67r-f2nPJb@?vD}hQbIckGWabj+8m(5h!nXOsrmTL8z*C7eh@8-)yj)ZAZ?6bP z_gCNY$QSl~;8?b~C;oi}O~3^76@ARDKgm_bzj{Xz^!N}1g{HP=CfN45B<9NRcD{Q& zV1w0+sCLt*Hu__ApaM2ijIr};)T1n#WMnJvq|k?OZ`*|?oA2tf<_|z~aB38cppDT{ zi2--Y7p%~?3vx08^vV6fEe05l?*Eb^}Z$Q`PWjyP#pn9B|3~IY1cCJES$I! z`(%uDHxH^<$%f>#weCT(=ry98^N$<-A$kwW2X;Mb&8eB)k7h7aD`N- z*r|%0s_Y7ti&BigSZnr-B&XVlm~O?Pj1+x>NU=ohVA|@E_+Y@tL`Cjpo~Cm~SX*C4 z#Jpra)k}EumSbh<>R>9g8V~X-TRTxRDiTUS7Z=i6dNOfpzE}cj!TVacS8+qrH!vYh zg{j{MX+`PpQ96Ql&(3r3vdK~jntngkWTv%^Ja2povxd8YQ@#P-LK^+cHf{F>OOaH5 zJ&CTHmJRxbpNnX-zp4FrWjrNfHz_`rrHpQVkKlUr`0>}eB6`k^=C&O6%g&h)HFVPt z7*9kl2mhX;x*n2;YgcK5ZpKa>m@`u&D-0bWSXY9gJ*T>L(Xfe5hqsjRhFucDY$Pn! z@BnT5nk7OPsPTT8^yd8!%~;grZVGBM&m;{icrqd7yCcFRQ*U}mwh47Vz9Z)t7}Pe+ zE){wbrY3!}->MS7wQKEbo)!$Y<#@R0q&qjPR4DOFOG~ZuE|Tta&deM142|E+vR&Ex zs1Lf|5(_zO19Erspc7sBiu*_(iJa{L!LL=B#9DtW-0!l`(8!QGnc&U#cj4CK0f{&n zIWe%}Kc}+)8{mCG&3ulD?)(37{b2zp(Xo{2mZ)ew05d1;yAUoX=!Mr&})g?%u%1I0alaA(%tSrOi*oVpM@L+96g__5Ym6muGu=h(+UOU^oLYJvw2lTV*(!@6)4;enNnvhKYZkO|*TWSiCe zdyebvN0N4qr2Q9#&!lepzDFM9RU}&%-(-n+V#6=;?9!EV)_N0r*Q9Ump-PrM45k6r zN=xeH%q8L6$eGed)m#TE<`LFoP-MT56Xi3``#ox9V{-!GD!)-$&navgh9=up|E zDrAes6Fj_zvGWVxTkG#5eQskPZ{_zs;;OoztAp%6=?i^#zG-u*CEoyW?P6U*GDef0 zi=bq_hpPJ6&|<5}+x~t*hSr_R!Fif-={kxTOUBd1B4Q$#l z%!Dn(SaT}B%_f0T{&?$aQ7GeSgVx97UK#bAiG)VYbmZ#Pn>Gskja0Ks?L<>`q@biV z`nQxY?FD{)L%yURjp1{Zv-Hy`&y7u4MnypHA<9kdIyochBfc)e&dzSNBYyAC3k$&b zpzu|qPr9C%$>{A`Zb;c2PCKTxJjeooq9+eCBf z4p5H9n_+8_ixU0uM%C!OF{dsi$6{U1k-FymMMg)LWfTFt{le&7n-~2O0%VT%y)qUb zsmnClZ9JytSs8C%DC+ezsK1nh_%dF9KC~Q1E|2ZqE+c4RNA12l0RPV+GLxZjRPjdU*+%1kYh~R9$+3Vs@>uo44@YqjlkI+?K z&UU-&%{Bs$|L~#Z%%{-;*K^9vIungtcbx5NeTW(>^)Fcw9X$1&dJ5Oxn-)_SA*8E0 z$^LD#xEsgiWq=^r>@sCjex-545wy1fYI8TRpVIodlP>jgu1a~JhUe9K>sS;c0>LPi zkOR1OzdEZDTI-A;v?CrLYm&Wf_;hjl;l2|XGmk5f`G<6G%%kfLdh*+Wy1K*~F2Dn} z8xK@MoT$Q>#0gsQ2fcw%${HcUCb&G!sp($0P7WnHE>l?zUXPQVPH zm2?TEne>f|$sa1eI^YDu zg_o&Q^T}Yy^#+=SmOZizrSX1k=phvshIQs!4+BK8GQe#w!&g0KfzLk|9%Cn7(CG2T zz$Iw?R5r319a!LWxvl5SWENu>swKL->@th?FlF0D?){BoynBqZq+Gxs#ELtYID;RQ z+0HSqH)_(eJWrPzI1dE1FCm=Wo9S4Z;hKQA2dH!!O7 zMHRJt17V)$kGI!Ld|3klxA2A7YX;*;23n4rU-1j;3v+F!I42Y`F zE|Y6*(W^F=0+OiiVl3TTO^P4z`lyO8iID^I1bnmv@QoJoia@F|i%(~VT6W6|Fz%Bf z1H%yqvcb#K@bcBhQ{}OSUxiPp<}S7K=&rx&OdLdQ393KSJui2Z{QfPm3y1ewd2E#7 zB;+jB(iU_>wB7`pr-*+fM-7)<7U3U1`UH1vy;gp)-!KP<#IR-Re%iV zW9D`~FPV4Ux9SPTli=A~AQpQc22vj6JiFPo|A^MSoq(@opqeI{bELKDrEQiSDTYa{73r^A2x+TV3m8|FgN_s(amhnb6+) z9^kED4;4rwA!1!@psb&yZi7eStV`yGqderQ{6M}CLJz2gbf-zNRuT1&PXVU#kBL3c z2>i}u%+T*4dMaF9e#tNzgJSXGx<}2fJ|g+$rUs51Rtm*>_?Rz@aZiC$RPXnXJ1a_&Yv{-WQXWUa~E?VK)E8XF&~{h->|e9NB=JkDz# zq7J#BId-S$RI0}8XpJ2 zW;1QL^z=8qqf*K6;8+9qg+Xm$Yx%gjRnMpCCw0)c4dMg0W3&Ba)!6_yrwL5n8=PGD za-iazu=Aw5NgJ-F!gfO;KwC2Xz~3%*=9TN8-sAgjWykc1hKa5Q%}LgCF#6xO$B(Y# zE`@b^Pt}d5%Po2p1Yk$*;$vk$q$}jQh`Z7hBW`X~`fImFEHAwPAA!1{tDe6O`+p3i z1IWi6djIdi|G&w+a}VpnO!@J$9S>i5$;%6n{kK9>j^yk%)B%sTf8*DSyPqPkR1`Gj JD`nq>{U56@0@(ln literal 0 HcmV?d00001 From e150db350f972ac23a337fad14b6bcf8889be12c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 8 Apr 2022 12:38:27 +0200 Subject: [PATCH 100/244] Docs - changed docstring --- openpype/plugins/load/add_site.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index 22d3ebf24b..cfb0ede328 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -63,9 +63,11 @@ class AddSyncSite(load.LoaderPlugin): project_name (str) representation_id (ObjectId): site_name (str) - is_main (bool): true for really downloaded, false for references, - force redownload main file always, for references only if - broken + is_main (bool): true for main representation, false for referenced + loaded repres. Drives if site state should be reset. + (it should be for main, not for referenced as they might be + shared from multiple workfiles). In necessary cases, referenced + repres should be reset (re-downloaded) manually. """ try: self.sync_server.add_site(project_name, representation_id, From 4b3405056982e7dea251e6a1d9b4d30d5a979bf6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 8 Apr 2022 20:33:02 +0200 Subject: [PATCH 101/244] OP-2951 - clean up of unnecessary argument force and force_only_broken could be replaced only by force and explicitly catching SiteAlreadyPresentError --- .../modules/sync_server/sync_server_module.py | 48 ++++++++++--------- openpype/plugins/load/add_site.py | 41 +++++----------- 2 files changed, 36 insertions(+), 53 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 9895a6d430..af69e645d5 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -129,24 +129,27 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ Start of Public API """ def add_site(self, collection, representation_id, site_name=None, - force=False, force_only_broken=False): + force=False): """ - Adds new site to representation to be synced. + Adds new site to representation to be synced. - 'collection' must have synchronization enabled (globally or - project only) + 'collection' must have synchronization enabled (globally or + project only) - Used as a API endpoint from outside applications (Loader etc) + Used as a API endpoint from outside applications (Loader etc). - Args: - collection (string): project name (must match DB) - representation_id (string): MongoDB _id value - site_name (string): name of configured and active site - force (bool): reset site if exists - force_only_broken (bool): reset only if "error" present + Use 'force' to reset existing site. - Returns: - throws ValueError if any issue + Args: + collection (string): project name (must match DB) + representation_id (string): MongoDB _id value + site_name (string): name of configured and active site + force (bool): reset site if exists + + Throws: + SiteAlreadyPresentError - if adding already existing site and + not 'force' + ValueError - other errors (repre not found, misconfiguration) """ if not self.get_sync_project_setting(collection): raise ValueError("Project not configured") @@ -157,8 +160,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.reset_site_on_representation(collection, representation_id, site_name=site_name, - force=force, - force_only_broken=force_only_broken) + force=force) # public facing API def remove_site(self, collection, representation_id, site_name, @@ -1397,8 +1399,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def reset_site_on_representation(self, collection, representation_id, side=None, file_id=None, site_name=None, - remove=False, pause=None, force=False, - force_only_broken=False): + remove=False, pause=None, force=False): """ Reset information about synchronization for particular 'file_id' and provider. @@ -1421,10 +1422,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule): remove (bool): if True remove site altogether pause (bool or None): if True - pause, False - unpause force (bool): hard reset - currently only for add_site - force_only_broken(bool): reset site only if there is "error" field - Returns: - throws ValueError + Throws: + SiteAlreadyPresentError - if adding already existing site and + not 'force' + ValueError - other errors (repre not found, misconfiguration) """ query = { "_id": ObjectId(representation_id) @@ -1461,7 +1463,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): representation, site_name, pause) else: # add new site to all files for representation self._add_site(collection, query, representation, elem, site_name, - force=force, force_only_broken=force_only_broken) + force=force) def _update_site(self, collection, query, update, arr_filter): """ @@ -1569,7 +1571,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self._update_site(collection, query, update, arr_filter) def _add_site(self, collection, query, representation, elem, site_name, - force=False, file_id=None, force_only_broken=False): + force=False, file_id=None): """ Adds 'site_name' to 'representation' on 'collection' @@ -1591,7 +1593,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): for site in repre_file.get("sites"): if site["name"] == site_name: - if force or (force_only_broken and site.get("error")): + if force or site.get("error"): self._reset_site_for_file(collection, query, elem, repre_file["_id"], site_name) diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index cfb0ede328..ae765c81b7 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -38,45 +38,26 @@ class AddSyncSite(load.LoaderPlugin): family = context["representation"]["context"]["family"] project_name = data["project_name"] repre_id = data["_id"] - self.add_site_to_representation(project_name, - repre_id, - data["site_name"], - is_main=True) + site_name = data["site_name"] + + self.sync_server.add_site(project_name, repre_id, site_name, + force=True) if family == "workfile": links = get_linked_ids_for_representations(project_name, [repre_id], link_type="reference") for link_repre_id in links: - self.add_site_to_representation(project_name, - link_repre_id, - data["site_name"], - is_main=False) + try: + self.sync_server.add_site(project_name, link_repre_id, + site_name, + force=False) + except SiteAlreadyPresentError: + # do not add/reset working site for references + self.log.debug("Site present", exc_info=True) self.log.debug("Site added.") - def add_site_to_representation(self, project_name, representation_id, - site_name, is_main): - """Adds new site to representation_id, resets if exists - - Args: - project_name (str) - representation_id (ObjectId): - site_name (str) - is_main (bool): true for main representation, false for referenced - loaded repres. Drives if site state should be reset. - (it should be for main, not for referenced as they might be - shared from multiple workfiles). In necessary cases, referenced - repres should be reset (re-downloaded) manually. - """ - try: - self.sync_server.add_site(project_name, representation_id, - site_name, - force=is_main, - force_only_broken=not is_main) - except SiteAlreadyPresentError: - self.log.debug("Site present", exc_info=True) - def filepath_from_context(self, context): """No real file loading""" return "" From ad3dfa872e8fd5b88b1581e0a32eccfe6c2e335c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 11 Apr 2022 12:30:33 +0200 Subject: [PATCH 102/244] OP-2951 - refactor sync loaders Changed structure of loaders bit, cannot use isinstance check though. --- openpype/plugins/load/add_site.py | 1 + openpype/plugins/load/remove_site.py | 26 +++++++++++++++----------- openpype/tools/utils/lib.py | 4 ++-- 3 files changed, 18 insertions(+), 13 deletions(-) diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py index ae765c81b7..55fda55d17 100644 --- a/openpype/plugins/load/add_site.py +++ b/openpype/plugins/load/add_site.py @@ -23,6 +23,7 @@ class AddSyncSite(load.LoaderPlugin): color = "#999999" _sync_server = None + is_add_site_loader = True @property def sync_server(self): diff --git a/openpype/plugins/load/remove_site.py b/openpype/plugins/load/remove_site.py index adffec9986..c5f442b2f5 100644 --- a/openpype/plugins/load/remove_site.py +++ b/openpype/plugins/load/remove_site.py @@ -12,22 +12,26 @@ class RemoveSyncSite(load.LoaderPlugin): icon = "download" color = "#999999" + _sync_server = None + is_remove_site_loader = True + + @property + def sync_server(self): + if not self._sync_server: + manager = ModulesManager() + self._sync_server = manager.modules_by_name["sync_server"] + + return self._sync_server + def load(self, context, name=None, namespace=None, data=None): self.log.info("Removing {} on representation: {}".format( data["site_name"], data["_id"])) - self.remove_site_on_representation(data["project_name"], - data["_id"], - data["site_name"]) + self.sync_server.remove_site(data["project_name"], + data["_id"], + data["site_name"], + True) self.log.debug("Site added.") - @staticmethod - def remove_site_on_representation(project_name, representation_id, - site_name): - manager = ModulesManager() - sync_server = manager.modules_by_name["sync_server"] - sync_server.remove_site(project_name, representation_id, - site_name, True) - def filepath_from_context(self, context): """No real file loading""" return "" diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py index 422d0f5389..e5d978c3b2 100644 --- a/openpype/tools/utils/lib.py +++ b/openpype/tools/utils/lib.py @@ -719,11 +719,11 @@ def is_sync_loader(loader): def is_remove_site_loader(loader): - return hasattr(loader, "remove_site_on_representation") + return hasattr(loader, "is_remove_site_loader") def is_add_site_loader(loader): - return hasattr(loader, "add_site_to_representation") + return hasattr(loader, "is_add_site_loader") class WrappedCallbackItem: From b33667255813f009f32d63182a202412fe2a465f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 11 Apr 2022 12:31:08 +0200 Subject: [PATCH 103/244] OP-2951 - safer pulling of inputLinks from legacy records --- openpype/lib/avalon_context.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index d26011e522..7f35694e58 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -2057,7 +2057,8 @@ def _process_referenced_pipeline_result(result, link_type): referenced_version_ids = set() correctly_linked_ids = set() for item in result: - correctly_linked_ids = _filter_input_links(item["data"]["inputLinks"], + input_links = item["data"].get("inputLinks", []) + correctly_linked_ids = _filter_input_links(input_links, link_type, correctly_linked_ids) From 50dc946fa1ba5e34d33a7c34cfd65021178c28f3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Apr 2022 17:51:12 +0200 Subject: [PATCH 104/244] copied schema and mongodb to openpype pipeline --- openpype/pipeline/__init__.py | 6 + openpype/pipeline/mongodb.py | 407 ++++++++++++++++++++++++++++++++++ openpype/pipeline/schema.py | 140 ++++++++++++ 3 files changed, 553 insertions(+) create mode 100644 openpype/pipeline/mongodb.py create mode 100644 openpype/pipeline/schema.py diff --git a/openpype/pipeline/__init__.py b/openpype/pipeline/__init__.py index 308be6da64..2c35ea2d57 100644 --- a/openpype/pipeline/__init__.py +++ b/openpype/pipeline/__init__.py @@ -85,6 +85,10 @@ from .context_tools import ( install = install_host uninstall = uninstall_host +from .mongodb import ( + AvalonMongoDB, +) + __all__ = ( "AVALON_CONTAINER_ID", @@ -170,4 +174,6 @@ __all__ = ( # Backwards compatible function names "install", "uninstall", + + "AvalonMongoDB", ) diff --git a/openpype/pipeline/mongodb.py b/openpype/pipeline/mongodb.py new file mode 100644 index 0000000000..436f6994db --- /dev/null +++ b/openpype/pipeline/mongodb.py @@ -0,0 +1,407 @@ +import os +import time +import functools +import logging +import pymongo +import ctypes +from uuid import uuid4 + +from . import schema + + +def requires_install(func): + func_obj = getattr(func, "__self__", None) + + @functools.wraps(func) + def decorated(*args, **kwargs): + if func_obj is not None: + _obj = func_obj + else: + _obj = args[0] + if not _obj.is_installed(): + if _obj.auto_install: + _obj.install() + else: + raise IOError( + "'{}.{}()' requires to run install() first".format( + _obj.__class__.__name__, func.__name__ + ) + ) + return func(*args, **kwargs) + return decorated + + +def auto_reconnect(func): + """Handling auto reconnect in 3 retry times""" + retry_times = 3 + reconnect_msg = "Reconnecting..." + func_obj = getattr(func, "__self__", None) + + @functools.wraps(func) + def decorated(*args, **kwargs): + if func_obj is not None: + _obj = func_obj + else: + _obj = args[0] + + for retry in range(1, retry_times + 1): + try: + return func(*args, **kwargs) + except pymongo.errors.AutoReconnect: + if hasattr(_obj, "log"): + _obj.log.warning(reconnect_msg) + else: + print(reconnect_msg) + + if retry >= retry_times: + raise + time.sleep(0.1) + return decorated + + +SESSION_CONTEXT_KEYS = ( + # Root directory of projects on disk + "AVALON_PROJECTS", + # Name of current Project + "AVALON_PROJECT", + # Name of current Asset + "AVALON_ASSET", + # Name of current silo + "AVALON_SILO", + # Name of current task + "AVALON_TASK", + # Name of current app + "AVALON_APP", + # Path to working directory + "AVALON_WORKDIR", + # Optional path to scenes directory (see Work Files API) + "AVALON_SCENEDIR" +) + + +def session_data_from_environment(context_keys=False): + session_data = {} + if context_keys: + for key in SESSION_CONTEXT_KEYS: + value = os.environ.get(key) + session_data[key] = value or "" + else: + for key in SESSION_CONTEXT_KEYS: + session_data[key] = None + + for key, default_value in ( + # Name of current Config + # TODO(marcus): Establish a suitable default config + ("AVALON_CONFIG", "no_config"), + + # Name of Avalon in graphical user interfaces + # Use this to customise the visual appearance of Avalon + # to better integrate with your surrounding pipeline + ("AVALON_LABEL", "Avalon"), + + # Used during any connections to the outside world + ("AVALON_TIMEOUT", "1000"), + + # Address to Asset Database + ("AVALON_MONGO", "mongodb://localhost:27017"), + + # Name of database used in MongoDB + ("AVALON_DB", "avalon"), + + # Address to Sentry + ("AVALON_SENTRY", None), + + # Address to Deadline Web Service + # E.g. http://192.167.0.1:8082 + ("AVALON_DEADLINE", None), + + # Enable features not necessarily stable, at the user's own risk + ("AVALON_EARLY_ADOPTER", None), + + # Address of central asset repository, contains + # the following interface: + # /upload + # /download + # /manager (optional) + ("AVALON_LOCATION", "http://127.0.0.1"), + + # Boolean of whether to upload published material + # to central asset repository + ("AVALON_UPLOAD", None), + + # Generic username and password + ("AVALON_USERNAME", "avalon"), + ("AVALON_PASSWORD", "secret"), + + # Unique identifier for instances in working files + ("AVALON_INSTANCE_ID", "avalon.instance"), + + # Enable debugging + ("AVALON_DEBUG", None) + ): + value = os.environ.get(key) or default_value + if value is not None: + session_data[key] = value + + return session_data + + +class AvalonMongoConnection: + _mongo_client = None + _is_installed = False + _databases = {} + log = logging.getLogger("AvalonMongoConnection") + + @classmethod + def register_database(cls, dbcon): + if dbcon.id in cls._databases: + return + + cls._databases[dbcon.id] = { + "object": dbcon, + "installed": False + } + + @classmethod + def database(cls): + return cls._mongo_client[str(os.environ["AVALON_DB"])] + + @classmethod + def mongo_client(cls): + return cls._mongo_client + + @classmethod + def install(cls, dbcon): + if not cls._is_installed or cls._mongo_client is None: + cls._mongo_client = cls.create_connection() + cls._is_installed = True + + cls.register_database(dbcon) + cls._databases[dbcon.id]["installed"] = True + + cls.check_db_existence() + + @classmethod + def is_installed(cls, dbcon): + info = cls._databases.get(dbcon.id) + if not info: + return False + return cls._databases[dbcon.id]["installed"] + + @classmethod + def _uninstall(cls): + try: + cls._mongo_client.close() + except AttributeError: + pass + cls._is_installed = False + cls._mongo_client = None + + @classmethod + def uninstall(cls, dbcon, force=False): + if force: + for key in cls._databases: + cls._databases[key]["object"].uninstall() + cls._uninstall() + return + + cls._databases[dbcon.id]["installed"] = False + + cls.check_db_existence() + + any_is_installed = False + for key in cls._databases: + if cls._databases[key]["installed"]: + any_is_installed = True + break + + if not any_is_installed: + cls._uninstall() + + @classmethod + def check_db_existence(cls): + items_to_pop = set() + for db_id, info in cls._databases.items(): + obj = info["object"] + # TODO check if should check for 1 or more + cls.log.info(ctypes.c_long.from_address(id(obj)).value) + if ctypes.c_long.from_address(id(obj)).value == 1: + items_to_pop.add(db_id) + + for db_id in items_to_pop: + cls._databases.pop(db_id, None) + + @classmethod + def create_connection(cls): + from openpype.lib import OpenPypeMongoConnection + + mongo_url = os.environ["AVALON_MONGO"] + + mongo_client = OpenPypeMongoConnection.create_connection(mongo_url) + + return mongo_client + + +class AvalonMongoDB: + def __init__(self, session=None, auto_install=True): + self._id = uuid4() + self._database = None + self.auto_install = auto_install + + if session is None: + session = session_data_from_environment(context_keys=False) + + self.Session = session + + self.log = logging.getLogger(self.__class__.__name__) + + def __getattr__(self, attr_name): + attr = None + if not self.is_installed() and self.auto_install: + self.install() + + if not self.is_installed(): + raise IOError( + "'{}.{}()' requires to run install() first".format( + self.__class__.__name__, attr_name + ) + ) + + project_name = self.active_project() + if project_name is None: + raise ValueError( + "Value of 'Session[\"AVALON_PROJECT\"]' is not set." + ) + + collection = self._database[project_name] + not_set = object() + attr = getattr(collection, attr_name, not_set) + + if attr is not_set: + # Raise attribute error + raise AttributeError( + "{} has no attribute '{}'.".format( + collection.__class__.__name__, attr_name + ) + ) + + # Decorate function + if callable(attr): + attr = auto_reconnect(attr) + return attr + + @property + def mongo_client(self): + AvalonMongoConnection.mongo_client() + + @property + def id(self): + return self._id + + @property + def database(self): + if not self.is_installed() and self.auto_install: + self.install() + + if self.is_installed(): + return self._database + + raise IOError( + "'{}.database' requires to run install() first".format( + self.__class__.__name__ + ) + ) + + def is_installed(self): + return AvalonMongoConnection.is_installed(self) + + def install(self): + """Establish a persistent connection to the database""" + if self.is_installed(): + return + + AvalonMongoConnection.install(self) + + self._database = AvalonMongoConnection.database() + + def uninstall(self): + """Close any connection to the database""" + AvalonMongoConnection.uninstall(self) + self._database = None + + @requires_install + def active_project(self): + """Return the name of the active project""" + return self.Session["AVALON_PROJECT"] + + @requires_install + @auto_reconnect + def projects(self, projection=None, only_active=True): + """Iter project documents + + Args: + projection (optional): MongoDB query projection operation + only_active (optional): Skip inactive projects, default True. + + Returns: + Project documents iterator + + """ + query_filter = {"type": "project"} + if only_active: + query_filter.update({ + "$or": [ + {"data.active": {"$exists": 0}}, + {"data.active": True}, + ] + }) + + for project_name in self._database.collection_names(): + if project_name in ("system.indexes",): + continue + + # Each collection will have exactly one project document + + doc = self._database[project_name].find_one( + query_filter, projection=projection + ) + if doc is not None: + yield doc + + @auto_reconnect + def insert_one(self, item, *args, **kwargs): + assert isinstance(item, dict), "item must be of type " + schema.validate(item) + return self._database[self.active_project()].insert_one( + item, *args, **kwargs + ) + + @auto_reconnect + def insert_many(self, items, *args, **kwargs): + # check if all items are valid + assert isinstance(items, list), "`items` must be of type " + for item in items: + assert isinstance(item, dict), "`item` must be of type " + schema.validate(item) + + return self._database[self.active_project()].insert_many( + items, *args, **kwargs + ) + + def parenthood(self, document): + assert document is not None, "This is a bug" + + parents = list() + + while document.get("parent") is not None: + document = self.find_one({"_id": document["parent"]}) + if document is None: + break + + if document.get("type") == "hero_version": + _document = self.find_one({"_id": document["version_id"]}) + document["data"] = _document["data"] + + parents.append(document) + + return parents diff --git a/openpype/pipeline/schema.py b/openpype/pipeline/schema.py new file mode 100644 index 0000000000..26d987b8f3 --- /dev/null +++ b/openpype/pipeline/schema.py @@ -0,0 +1,140 @@ +"""Wrapper around :mod:`jsonschema` + +Schemas are implicitly loaded from the /schema directory of this project. + +Attributes: + _cache: Cache of previously loaded schemas + +Resources: + http://json-schema.org/ + http://json-schema.org/latest/json-schema-core.html + http://spacetelescope.github.io/understanding-json-schema/index.html + +""" + +import os +import re +import json +import logging + +import jsonschema +import six + +log_ = logging.getLogger(__name__) + +ValidationError = jsonschema.ValidationError +SchemaError = jsonschema.SchemaError + +_CACHED = False + + +def get_schema_version(schema_name): + """Extract version form schema name. + + It is expected that schema name contain only major and minor version. + + Expected name should match to: + "{name}:{type}-{major version}.{minor version}" + - `name` - must not contain colon + - `type` - must not contain dash + - major and minor versions must be numbers separated by dot + + Args: + schema_name(str): Name of schema that should be parsed. + + Returns: + tuple: Contain two values major version as first and minor version as + second. When schema does not match parsing regex then `(0, 0)` is + returned. + """ + schema_regex = re.compile(r"[^:]+:[^-]+-(\d.\d)") + groups = schema_regex.findall(schema_name) + if not groups: + return 0, 0 + + maj_version, min_version = groups[0].split(".") + return int(maj_version), int(min_version) + + +def validate(data, schema=None): + """Validate `data` with `schema` + + Arguments: + data (dict): JSON-compatible data + schema (str): DEPRECATED Name of schema. Now included in the data. + + Raises: + ValidationError on invalid schema + + """ + if not _CACHED: + _precache() + + root, schema = data["schema"].rsplit(":", 1) + # assert root in ( + # "mindbender-core", # Backwards compatiblity + # "avalon-core", + # "pype" + # ) + + if isinstance(schema, six.string_types): + schema = _cache[schema + ".json"] + + resolver = jsonschema.RefResolver( + "", + None, + store=_cache, + cache_remote=True + ) + + jsonschema.validate(data, + schema, + types={"array": (list, tuple)}, + resolver=resolver) + + +_cache = { + # A mock schema for docstring tests + "_doctest.json": { + "$schema": "http://json-schema.org/schema#", + + "title": "_doctest", + "description": "A test schema", + + "type": "object", + + "additionalProperties": False, + + "required": ["key"], + + "properties": { + "key": { + "description": "A test key", + "type": "string" + } + } + } +} + + +def _precache(): + global _CACHED + + if os.environ.get('AVALON_SCHEMA'): + schema_dir = os.environ['AVALON_SCHEMA'] + else: + current_dir = os.path.dirname(os.path.abspath(__file__)) + schema_dir = os.path.join(current_dir, "schema") + + """Store available schemas in-memory for reduced disk access""" + for schema in os.listdir(schema_dir): + if schema.startswith(("_", ".")): + continue + if not schema.endswith(".json"): + continue + if not os.path.isfile(os.path.join(schema_dir, schema)): + continue + with open(os.path.join(schema_dir, schema)) as f: + log_.debug("Installing schema '%s'.." % schema) + _cache[schema] = json.load(f) + _CACHED = True From e953c8602f66e33e9d1ca54e5d7c0b12c25897a4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Apr 2022 17:54:49 +0200 Subject: [PATCH 105/244] replaced AvalonMongoConnection with using OpenPypeMongoConnection --- openpype/pipeline/mongodb.py | 111 +++-------------------------------- 1 file changed, 8 insertions(+), 103 deletions(-) diff --git a/openpype/pipeline/mongodb.py b/openpype/pipeline/mongodb.py index 436f6994db..1a1b6f7ce9 100644 --- a/openpype/pipeline/mongodb.py +++ b/openpype/pipeline/mongodb.py @@ -3,7 +3,6 @@ import time import functools import logging import pymongo -import ctypes from uuid import uuid4 from . import schema @@ -146,107 +145,12 @@ def session_data_from_environment(context_keys=False): return session_data -class AvalonMongoConnection: - _mongo_client = None - _is_installed = False - _databases = {} - log = logging.getLogger("AvalonMongoConnection") - - @classmethod - def register_database(cls, dbcon): - if dbcon.id in cls._databases: - return - - cls._databases[dbcon.id] = { - "object": dbcon, - "installed": False - } - - @classmethod - def database(cls): - return cls._mongo_client[str(os.environ["AVALON_DB"])] - - @classmethod - def mongo_client(cls): - return cls._mongo_client - - @classmethod - def install(cls, dbcon): - if not cls._is_installed or cls._mongo_client is None: - cls._mongo_client = cls.create_connection() - cls._is_installed = True - - cls.register_database(dbcon) - cls._databases[dbcon.id]["installed"] = True - - cls.check_db_existence() - - @classmethod - def is_installed(cls, dbcon): - info = cls._databases.get(dbcon.id) - if not info: - return False - return cls._databases[dbcon.id]["installed"] - - @classmethod - def _uninstall(cls): - try: - cls._mongo_client.close() - except AttributeError: - pass - cls._is_installed = False - cls._mongo_client = None - - @classmethod - def uninstall(cls, dbcon, force=False): - if force: - for key in cls._databases: - cls._databases[key]["object"].uninstall() - cls._uninstall() - return - - cls._databases[dbcon.id]["installed"] = False - - cls.check_db_existence() - - any_is_installed = False - for key in cls._databases: - if cls._databases[key]["installed"]: - any_is_installed = True - break - - if not any_is_installed: - cls._uninstall() - - @classmethod - def check_db_existence(cls): - items_to_pop = set() - for db_id, info in cls._databases.items(): - obj = info["object"] - # TODO check if should check for 1 or more - cls.log.info(ctypes.c_long.from_address(id(obj)).value) - if ctypes.c_long.from_address(id(obj)).value == 1: - items_to_pop.add(db_id) - - for db_id in items_to_pop: - cls._databases.pop(db_id, None) - - @classmethod - def create_connection(cls): - from openpype.lib import OpenPypeMongoConnection - - mongo_url = os.environ["AVALON_MONGO"] - - mongo_client = OpenPypeMongoConnection.create_connection(mongo_url) - - return mongo_client - - class AvalonMongoDB: def __init__(self, session=None, auto_install=True): self._id = uuid4() self._database = None self.auto_install = auto_install + self._installed = False if session is None: session = session_data_from_environment(context_keys=False) @@ -292,7 +196,9 @@ class AvalonMongoDB: @property def mongo_client(self): - AvalonMongoConnection.mongo_client() + from openpype.lib import OpenPypeMongoConnection + + return OpenPypeMongoConnection.get_mongo_client() @property def id(self): @@ -313,20 +219,19 @@ class AvalonMongoDB: ) def is_installed(self): - return AvalonMongoConnection.is_installed(self) + return self._installed def install(self): """Establish a persistent connection to the database""" if self.is_installed(): return - AvalonMongoConnection.install(self) - - self._database = AvalonMongoConnection.database() + self._installed = True + self._database = self.mongo_client[str(os.environ["AVALON_DB"])] def uninstall(self): """Close any connection to the database""" - AvalonMongoConnection.uninstall(self) + self._installed = False self._database = None @requires_install From b2c4210920963bc6f85d973b54a5c271e7a391f3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Apr 2022 17:55:18 +0200 Subject: [PATCH 106/244] removed unused environments --- openpype/pipeline/mongodb.py | 37 ------------------------------------ 1 file changed, 37 deletions(-) diff --git a/openpype/pipeline/mongodb.py b/openpype/pipeline/mongodb.py index 1a1b6f7ce9..9efd231bb2 100644 --- a/openpype/pipeline/mongodb.py +++ b/openpype/pipeline/mongodb.py @@ -65,8 +65,6 @@ SESSION_CONTEXT_KEYS = ( "AVALON_PROJECT", # Name of current Asset "AVALON_ASSET", - # Name of current silo - "AVALON_SILO", # Name of current task "AVALON_TASK", # Name of current app @@ -89,10 +87,6 @@ def session_data_from_environment(context_keys=False): session_data[key] = None for key, default_value in ( - # Name of current Config - # TODO(marcus): Establish a suitable default config - ("AVALON_CONFIG", "no_config"), - # Name of Avalon in graphical user interfaces # Use this to customise the visual appearance of Avalon # to better integrate with your surrounding pipeline @@ -106,37 +100,6 @@ def session_data_from_environment(context_keys=False): # Name of database used in MongoDB ("AVALON_DB", "avalon"), - - # Address to Sentry - ("AVALON_SENTRY", None), - - # Address to Deadline Web Service - # E.g. http://192.167.0.1:8082 - ("AVALON_DEADLINE", None), - - # Enable features not necessarily stable, at the user's own risk - ("AVALON_EARLY_ADOPTER", None), - - # Address of central asset repository, contains - # the following interface: - # /upload - # /download - # /manager (optional) - ("AVALON_LOCATION", "http://127.0.0.1"), - - # Boolean of whether to upload published material - # to central asset repository - ("AVALON_UPLOAD", None), - - # Generic username and password - ("AVALON_USERNAME", "avalon"), - ("AVALON_PASSWORD", "secret"), - - # Unique identifier for instances in working files - ("AVALON_INSTANCE_ID", "avalon.instance"), - - # Enable debugging - ("AVALON_DEBUG", None) ): value = os.environ.get(key) or default_value if value is not None: From e91d84546e9b3cd69790876bae4d953f06250033 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Apr 2022 17:59:01 +0200 Subject: [PATCH 107/244] use AvalonMongoDB from openpype pipeline --- openpype/hooks/pre_global_host_data.py | 5 ++--- openpype/hosts/hiero/api/lib.py | 3 +-- openpype/hosts/hiero/api/pipeline.py | 15 ++++----------- .../hosts/maya/plugins/publish/collect_ass.py | 11 ++--------- openpype/hosts/nuke/api/pipeline.py | 11 +++++------ .../hosts/nuke/plugins/create/create_read.py | 2 -- openpype/hosts/testhost/run_publish.py | 2 -- openpype/hosts/tvpaint/api/pipeline.py | 1 - .../webserver_service/webpublish_routes.py | 16 +++++++++------- openpype/lib/applications.py | 2 +- openpype/lib/avalon_context.py | 11 +++++------ openpype/lib/plugin_tools.py | 2 +- openpype/lib/project_backpack.py | 2 +- openpype/modules/avalon_apps/rest_api.py | 3 +-- .../action_prepare_project.py | 2 +- .../event_handlers_server/event_sync_links.py | 2 +- .../event_sync_to_avalon.py | 3 +-- .../event_user_assigment.py | 3 +-- .../event_handlers_user/action_applications.py | 3 +-- .../event_handlers_user/action_delete_asset.py | 3 ++- .../action_delete_old_versions.py | 2 +- .../event_handlers_user/action_delivery.py | 2 +- .../action_fill_workfile_attr.py | 2 +- .../action_prepare_project.py | 2 +- .../action_store_thumbnails_to_avalon.py | 3 ++- openpype/modules/ftrack/ftrack_server/lib.py | 2 +- openpype/modules/ftrack/lib/avalon_sync.py | 8 ++------ .../modules/ftrack/scripts/sub_event_storer.py | 2 +- .../modules/sync_server/sync_server_module.py | 8 +++++--- .../modules/timers_manager/timers_manager.py | 3 ++- openpype/pipeline/create/context.py | 11 +++++++---- openpype/plugin.py | 1 - openpype/plugins/load/delete_old_versions.py | 3 +-- openpype/plugins/load/delivery.py | 4 +--- openpype/settings/handlers.py | 2 +- openpype/tools/context_dialog/window.py | 2 +- openpype/tools/launcher/window.py | 3 +-- openpype/tools/libraryloader/app.py | 2 +- openpype/tools/loader/__main__.py | 1 - .../project_manager/project_manager/widgets.py | 2 +- .../project_manager/project_manager/window.py | 2 +- openpype/tools/settings/settings/widgets.py | 15 +-------------- openpype/tools/standalonepublish/app.py | 2 +- openpype/tools/traypublisher/window.py | 6 ++++-- 44 files changed, 77 insertions(+), 115 deletions(-) diff --git a/openpype/hooks/pre_global_host_data.py b/openpype/hooks/pre_global_host_data.py index 4c85a511ed..ea5e290d6f 100644 --- a/openpype/hooks/pre_global_host_data.py +++ b/openpype/hooks/pre_global_host_data.py @@ -5,8 +5,7 @@ from openpype.lib import ( prepare_app_environments, prepare_context_environments ) - -import avalon.api +from openpype.pipeline import AvalonMongoDB class GlobalHostDataHook(PreLaunchHook): @@ -64,7 +63,7 @@ class GlobalHostDataHook(PreLaunchHook): self.data["anatomy"] = Anatomy(project_name) # Mongo connection - dbcon = avalon.api.AvalonMongoDB() + dbcon = AvalonMongoDB() dbcon.Session["AVALON_PROJECT"] = project_name dbcon.install() diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index df3b24ff2c..00c30538fc 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -38,8 +38,6 @@ self.pype_tag_name = "openpypeData" self.default_sequence_name = "openpypeSequence" self.default_bin_name = "openpypeBin" -AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") - def flatten(_list): for item in _list: @@ -49,6 +47,7 @@ def flatten(_list): else: yield item + def get_current_project(remove_untitled=False): projects = flatten(hiero.core.projects()) if not remove_untitled: diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index 616ff53fd8..5001043a74 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -20,8 +20,6 @@ from . import lib, menu, events log = Logger().get_logger(__name__) -AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") - # plugin paths API_DIR = os.path.dirname(os.path.abspath(__file__)) HOST_DIR = os.path.dirname(API_DIR) @@ -247,15 +245,10 @@ def reload_config(): import importlib for module in ( - "avalon", - "avalon.lib", - "avalon.pipeline", - "pyblish", - "pypeapp", - "{}.api".format(AVALON_CONFIG), - "{}.hosts.hiero.lib".format(AVALON_CONFIG), - "{}.hosts.hiero.menu".format(AVALON_CONFIG), - "{}.hosts.hiero.tags".format(AVALON_CONFIG) + "openpype.api", + "openpype.hosts.hiero.lib", + "openpype.hosts.hiero.menu", + "openpype.hosts.hiero.tags" ): log.info("Reloading module: {}...".format(module)) try: diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py index 8e6691120a..7c9a1b76fb 100644 --- a/openpype/hosts/maya/plugins/publish/collect_ass.py +++ b/openpype/hosts/maya/plugins/publish/collect_ass.py @@ -1,23 +1,16 @@ from maya import cmds -import pymel.core as pm import pyblish.api -import avalon.api + class CollectAssData(pyblish.api.InstancePlugin): - """Collect Ass data - - """ + """Collect Ass data.""" order = pyblish.api.CollectorOrder + 0.2 label = 'Collect Ass' families = ["ass"] def process(self, instance): - - - context = instance.context - objsets = instance.data['setMembers'] for objset in objsets: diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index 6ee3d2ce05..0194acd196 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -38,7 +38,6 @@ from .lib import ( log = Logger.get_logger(__name__) -AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.nuke.__file__)) PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") @@ -79,11 +78,11 @@ def reload_config(): """ for module in ( - "{}.api".format(AVALON_CONFIG), - "{}.hosts.nuke.api.actions".format(AVALON_CONFIG), - "{}.hosts.nuke.api.menu".format(AVALON_CONFIG), - "{}.hosts.nuke.api.plugin".format(AVALON_CONFIG), - "{}.hosts.nuke.api.lib".format(AVALON_CONFIG), + "openpype.api", + "openpype.hosts.nuke.api.actions", + "openpype.hosts.nuke.api.menu", + "openpype.hosts.nuke.api.plugin", + "openpype.hosts.nuke.api.lib", ): log.info("Reloading module: {}...".format(module)) diff --git a/openpype/hosts/nuke/plugins/create/create_read.py b/openpype/hosts/nuke/plugins/create/create_read.py index bdc67add42..87a9dff0f8 100644 --- a/openpype/hosts/nuke/plugins/create/create_read.py +++ b/openpype/hosts/nuke/plugins/create/create_read.py @@ -2,8 +2,6 @@ from collections import OrderedDict import nuke -import avalon.api -from openpype import api as pype from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import ( set_avalon_knob_data diff --git a/openpype/hosts/testhost/run_publish.py b/openpype/hosts/testhost/run_publish.py index cc80bdc604..c7ad63aafd 100644 --- a/openpype/hosts/testhost/run_publish.py +++ b/openpype/hosts/testhost/run_publish.py @@ -22,13 +22,11 @@ openpype_dir = multi_dirname(current_file, 4) os.environ["OPENPYPE_MONGO"] = mongo_url os.environ["OPENPYPE_ROOT"] = openpype_dir -os.environ["AVALON_MONGO"] = mongo_url os.environ["AVALON_PROJECT"] = project_name os.environ["AVALON_ASSET"] = asset_name os.environ["AVALON_TASK"] = task_name os.environ["AVALON_APP"] = host_name os.environ["OPENPYPE_DATABASE_NAME"] = "openpype" -os.environ["AVALON_CONFIG"] = "openpype" os.environ["AVALON_TIMEOUT"] = "1000" os.environ["AVALON_DB"] = "avalon" os.environ["FTRACK_SERVER"] = ftrack_url diff --git a/openpype/hosts/tvpaint/api/pipeline.py b/openpype/hosts/tvpaint/api/pipeline.py index 78c10c3dae..d57ec3178a 100644 --- a/openpype/hosts/tvpaint/api/pipeline.py +++ b/openpype/hosts/tvpaint/api/pipeline.py @@ -7,7 +7,6 @@ import logging import requests import pyblish.api -import avalon.api from avalon import io diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 1f9089aa27..e82ba7f2b8 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -7,18 +7,20 @@ import collections from aiohttp.web_response import Response import subprocess -from avalon.api import AvalonMongoDB - -from openpype.lib import OpenPypeMongoConnection -from openpype_modules.avalon_apps.rest_api import _RestApiEndpoint -from openpype.settings import get_project_settings - -from openpype.lib import PypeLogger +from openpype.lib import ( + OpenPypeMongoConnection, + PypeLogger, +) from openpype.lib.remote_publish import ( get_task_data, ERROR_STATUS, REPROCESS_STATUS ) +from openpype.pipeline import AvalonMongoDB +from openpype_modules.avalon_apps.rest_api import _RestApiEndpoint +from openpype.settings import get_project_settings + + log = PypeLogger.get_logger("WebServer") diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index 07b91dda03..b52da52dc9 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -1295,7 +1295,7 @@ def get_app_environments_for_context( Returns: dict: Environments for passed context and application. """ - from avalon.api import AvalonMongoDB + from openpype.pipeline import AvalonMongoDB # Avalon database connection dbcon = AvalonMongoDB() diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index e82dcc558f..d95d1b983f 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -64,8 +64,8 @@ def create_project( """ from openpype.settings import ProjectSettings, SaveWarningExc - from avalon.api import AvalonMongoDB - from avalon.schema import validate + from openpype.pipeline import AvalonMongoDB + from openpype.pipeline.schema import validate if dbcon is None: dbcon = AvalonMongoDB() @@ -333,8 +333,7 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): Args: asset_name (str): Name of asset. subset_name (str): Name of subset. - dbcon (avalon.mongodb.AvalonMongoDB, optional): Avalon Mongo connection - with Session. + dbcon (AvalonMongoDB, optional): Avalon Mongo connection with Session. project_name (str, optional): Find latest version in specific project. Returns: @@ -429,7 +428,7 @@ def get_workfile_template_key_from_context( "`get_workfile_template_key_from_context` requires to pass" " one of 'dbcon' or 'project_name' arguments." )) - from avalon.api import AvalonMongoDB + from openpype.pipeline import AvalonMongoDB dbcon = AvalonMongoDB() dbcon.Session["AVALON_PROJECT"] = project_name @@ -1794,7 +1793,7 @@ def get_custom_workfile_template_by_string_context( """ if dbcon is None: - from avalon.api import AvalonMongoDB + from openpype.pipeline import AvalonMongoDB dbcon = AvalonMongoDB() diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index f11ba56865..3f78407931 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -136,7 +136,7 @@ def get_subset_name( `get_subset_name_with_asset_doc` where asset document is expected. """ if dbcon is None: - from avalon.api import AvalonMongoDB + from openpype.pipeline import AvalonMongoDB dbcon = AvalonMongoDB() dbcon.Session["AVALON_PROJECT"] = project_name diff --git a/openpype/lib/project_backpack.py b/openpype/lib/project_backpack.py index 11fd0c0c3e..396479c725 100644 --- a/openpype/lib/project_backpack.py +++ b/openpype/lib/project_backpack.py @@ -25,7 +25,7 @@ from bson.json_util import ( CANONICAL_JSON_OPTIONS ) -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB DOCUMENTS_FILE_NAME = "database" METADATA_FILE_NAME = "metadata" diff --git a/openpype/modules/avalon_apps/rest_api.py b/openpype/modules/avalon_apps/rest_api.py index 533050fc0c..b35f5bf357 100644 --- a/openpype/modules/avalon_apps/rest_api.py +++ b/openpype/modules/avalon_apps/rest_api.py @@ -1,4 +1,3 @@ -import os import json import datetime @@ -6,7 +5,7 @@ from bson.objectid import ObjectId from aiohttp.web_response import Response -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB from openpype_modules.webserver.base_routes import RestApiEndpoint diff --git a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py b/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py index 2e55be2743..975e49cb28 100644 --- a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py +++ b/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py @@ -1,8 +1,8 @@ import json -from avalon.api import AvalonMongoDB from openpype.api import ProjectSettings from openpype.lib import create_project +from openpype.pipeline import AvalonMongoDB from openpype.settings import SaveWarningExc from openpype_modules.ftrack.lib import ( diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_links.py b/openpype/modules/ftrack/event_handlers_server/event_sync_links.py index 9610e7f5de..ae70c6756f 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_sync_links.py +++ b/openpype/modules/ftrack/event_handlers_server/event_sync_links.py @@ -1,7 +1,7 @@ from pymongo import UpdateOne from bson.objectid import ObjectId -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB from openpype_modules.ftrack.lib import ( CUST_ATTR_ID_KEY, diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py index 46c333c4c4..b5f199b3e4 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py +++ b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py @@ -12,8 +12,7 @@ from pymongo import UpdateOne import arrow import ftrack_api -from avalon import schema -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB, schema from openpype_modules.ftrack.lib import ( get_openpype_attr, diff --git a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py b/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py index 96243c8c36..593fc5e596 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py +++ b/openpype/modules/ftrack/event_handlers_server/event_user_assigment.py @@ -1,10 +1,9 @@ -import os import re import subprocess from openpype_modules.ftrack.lib import BaseEvent from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB from bson.objectid import ObjectId diff --git a/openpype/modules/ftrack/event_handlers_user/action_applications.py b/openpype/modules/ftrack/event_handlers_user/action_applications.py index 48a0dea006..b25bc1b5cb 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_applications.py +++ b/openpype/modules/ftrack/event_handlers_user/action_applications.py @@ -1,5 +1,4 @@ import os -from uuid import uuid4 from openpype_modules.ftrack.lib import BaseAction from openpype.lib.applications import ( @@ -8,7 +7,7 @@ from openpype.lib.applications import ( ApplictionExecutableNotFound, CUSTOM_LAUNCH_APP_GROUPS ) -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB class AppplicationsAction(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py b/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py index 94385a36c5..ee5c3d0d97 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delete_asset.py @@ -3,7 +3,8 @@ import uuid from datetime import datetime from bson.objectid import ObjectId -from avalon.api import AvalonMongoDB + +from openpype.pipeline import AvalonMongoDB from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype_modules.ftrack.lib.avalon_sync import create_chunks diff --git a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py index 5871646b20..f5addde8ae 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delete_old_versions.py @@ -5,10 +5,10 @@ import uuid import clique from pymongo import UpdateOne -from avalon.api import AvalonMongoDB from openpype.api import Anatomy from openpype.lib import StringTemplate, TemplateUnsolved +from openpype.pipeline import AvalonMongoDB from openpype_modules.ftrack.lib import BaseAction, statics_icon diff --git a/openpype/modules/ftrack/event_handlers_user/action_delivery.py b/openpype/modules/ftrack/event_handlers_user/action_delivery.py index 1f28b18900..9ef2a1668e 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_delivery.py +++ b/openpype/modules/ftrack/event_handlers_user/action_delivery.py @@ -15,7 +15,7 @@ from openpype.lib.delivery import ( process_single_file, process_sequence ) -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB class Delivery(BaseAction): diff --git a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py index 3888379e04..c7237a1150 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py +++ b/openpype/modules/ftrack/event_handlers_user/action_fill_workfile_attr.py @@ -7,7 +7,6 @@ import datetime import ftrack_api -from avalon.api import AvalonMongoDB from openpype.api import get_project_settings from openpype.lib import ( get_workfile_template_key, @@ -15,6 +14,7 @@ from openpype.lib import ( Anatomy, StringTemplate, ) +from openpype.pipeline import AvalonMongoDB from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype_modules.ftrack.lib.avalon_sync import create_chunks diff --git a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py b/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py index 3759bc81ac..0b14e7aa2b 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py +++ b/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py @@ -1,8 +1,8 @@ import json -from avalon.api import AvalonMongoDB from openpype.api import ProjectSettings from openpype.lib import create_project +from openpype.pipeline import AvalonMongoDB from openpype.settings import SaveWarningExc from openpype_modules.ftrack.lib import ( diff --git a/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py b/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py index 4820925844..62fdfa2bdd 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py +++ b/openpype/modules/ftrack/event_handlers_user/action_store_thumbnails_to_avalon.py @@ -4,9 +4,10 @@ import json import requests from bson.objectid import ObjectId + from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype.api import Anatomy -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB from openpype_modules.ftrack.lib.avalon_sync import CUST_ATTR_ID_KEY diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index f8319b67d4..e89113a86c 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -192,7 +192,7 @@ class ProcessEventHub(SocketBaseEventHub): except pymongo.errors.AutoReconnect: self.pypelog.error(( "Mongo server \"{}\" is not responding, exiting." - ).format(os.environ["AVALON_MONGO"])) + ).format(os.environ["OPENPYPE_MONGO"])) sys.exit(0) # Additional special processing of events. if event['topic'] == 'ftrack.meta.disconnected': diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/ftrack/lib/avalon_sync.py index c5b58ca94d..124787e467 100644 --- a/openpype/modules/ftrack/lib/avalon_sync.py +++ b/openpype/modules/ftrack/lib/avalon_sync.py @@ -6,16 +6,12 @@ import numbers import six -from avalon.api import AvalonMongoDB - -import avalon - from openpype.api import ( Logger, - Anatomy, get_anatomy_settings ) from openpype.lib import ApplicationManager +from openpype.pipeline import AvalonMongoDB, schema from .constants import CUST_ATTR_ID_KEY, FPS_KEYS from .custom_attributes import get_openpype_attr, query_custom_attributes @@ -175,7 +171,7 @@ def check_regex(name, entity_type, in_schema=None, schema_patterns=None): if not name_pattern: default_pattern = "^[a-zA-Z0-9_.]*$" - schema_obj = avalon.schema._cache.get(schema_name + ".json") + schema_obj = schema._cache.get(schema_name + ".json") if not schema_obj: name_pattern = default_pattern else: diff --git a/openpype/modules/ftrack/scripts/sub_event_storer.py b/openpype/modules/ftrack/scripts/sub_event_storer.py index 5543ed74e2..946ecbff79 100644 --- a/openpype/modules/ftrack/scripts/sub_event_storer.py +++ b/openpype/modules/ftrack/scripts/sub_event_storer.py @@ -67,7 +67,7 @@ def launch(event): except pymongo.errors.AutoReconnect: log.error("Mongo server \"{}\" is not responding, exiting.".format( - os.environ["AVALON_MONGO"] + os.environ["OPENPYPE_MONGO"] )) sys.exit(0) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 2c27571f9f..7d4e3ccc96 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -6,7 +6,6 @@ import platform import copy from collections import deque -from avalon.api import AvalonMongoDB from openpype.modules import OpenPypeModule from openpype_interfaces import ITrayModule @@ -14,11 +13,14 @@ from openpype.api import ( Anatomy, get_project_settings, get_system_settings, - get_local_site_id) + get_local_site_id +) from openpype.lib import PypeLogger +from openpype.pipeline import AvalonMongoDB from openpype.settings.lib import ( get_default_anatomy_settings, - get_anatomy_settings) + get_anatomy_settings +) from .providers.local_drive import LocalDriveHandler from .providers import lib diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index 47d020104b..3f77a2b7dc 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -1,13 +1,14 @@ import os import platform -from avalon.api import AvalonMongoDB from openpype.modules import OpenPypeModule from openpype_interfaces import ( ITrayService, ILaunchHookPaths ) +from openpype.pipeline import AvalonMongoDB + from .exceptions import InvalidContextError diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 0cc2819172..584752e38a 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -6,6 +6,11 @@ import inspect from uuid import uuid4 from contextlib import contextmanager +from openpype.pipeline.mongodb import ( + AvalonMongoDB, + session_data_from_environment, +) + from .creator_plugins import ( BaseCreator, Creator, @@ -659,10 +664,8 @@ class CreateContext: ): # Create conncetion if is not passed if dbcon is None: - import avalon.api - - session = avalon.api.session_data_from_environment(True) - dbcon = avalon.api.AvalonMongoDB(session) + session = session_data_from_environment(True) + dbcon = AvalonMongoDB(session) dbcon.install() self.dbcon = dbcon diff --git a/openpype/plugin.py b/openpype/plugin.py index 3569936dac..bb9bc2ff85 100644 --- a/openpype/plugin.py +++ b/openpype/plugin.py @@ -1,7 +1,6 @@ import tempfile import os import pyblish.api -import avalon.api ValidatePipelineOrder = pyblish.api.ValidatorOrder + 0.05 ValidateContentsOrder = pyblish.api.ValidatorOrder + 0.1 diff --git a/openpype/plugins/load/delete_old_versions.py b/openpype/plugins/load/delete_old_versions.py index 2789f4ea23..c3e9e9fa0a 100644 --- a/openpype/plugins/load/delete_old_versions.py +++ b/openpype/plugins/load/delete_old_versions.py @@ -8,9 +8,8 @@ import ftrack_api import qargparse from Qt import QtWidgets, QtCore -from avalon.api import AvalonMongoDB from openpype import style -from openpype.pipeline import load +from openpype.pipeline import load, AvalonMongoDB from openpype.lib import StringTemplate from openpype.api import Anatomy diff --git a/openpype/plugins/load/delivery.py b/openpype/plugins/load/delivery.py index 04080053e3..7df07e3f64 100644 --- a/openpype/plugins/load/delivery.py +++ b/openpype/plugins/load/delivery.py @@ -3,9 +3,7 @@ from collections import defaultdict from Qt import QtWidgets, QtCore, QtGui -from avalon.api import AvalonMongoDB - -from openpype.pipeline import load +from openpype.pipeline import load, AvalonMongoDB from openpype.api import Anatomy, config from openpype import resources, style diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 2109b53b09..0c94623a64 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -337,7 +337,7 @@ class MongoSettingsHandler(SettingsHandler): def __init__(self): # Get mongo connection from openpype.lib import OpenPypeMongoConnection - from avalon.api import AvalonMongoDB + from openpype.pipeline import AvalonMongoDB settings_collection = OpenPypeMongoConnection.get_mongo_client() diff --git a/openpype/tools/context_dialog/window.py b/openpype/tools/context_dialog/window.py index 9e030853bf..3b544bd375 100644 --- a/openpype/tools/context_dialog/window.py +++ b/openpype/tools/context_dialog/window.py @@ -2,9 +2,9 @@ import os import json from Qt import QtWidgets, QtCore, QtGui -from avalon.api import AvalonMongoDB from openpype import style +from openpype.pipeline import AvalonMongoDB from openpype.tools.utils.lib import center_window from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget from openpype.tools.utils.constants import ( diff --git a/openpype/tools/launcher/window.py b/openpype/tools/launcher/window.py index d80b3eabf0..dab6949613 100644 --- a/openpype/tools/launcher/window.py +++ b/openpype/tools/launcher/window.py @@ -3,10 +3,9 @@ import logging from Qt import QtWidgets, QtCore, QtGui -from avalon.api import AvalonMongoDB - from openpype import style from openpype.api import resources +from openpype.pipeline import AvalonMongoDB import qtawesome from .models import ( diff --git a/openpype/tools/libraryloader/app.py b/openpype/tools/libraryloader/app.py index 328e16205c..7fda6bd6f9 100644 --- a/openpype/tools/libraryloader/app.py +++ b/openpype/tools/libraryloader/app.py @@ -2,8 +2,8 @@ import sys from Qt import QtWidgets, QtCore, QtGui -from avalon.api import AvalonMongoDB from openpype import style +from openpype.pipeline import AvalonMongoDB from openpype.tools.utils import lib as tools_lib from openpype.tools.loader.widgets import ( ThumbnailWidget, diff --git a/openpype/tools/loader/__main__.py b/openpype/tools/loader/__main__.py index 146ba7fd10..400a034a76 100644 --- a/openpype/tools/loader/__main__.py +++ b/openpype/tools/loader/__main__.py @@ -24,7 +24,6 @@ if __name__ == '__main__': os.environ["AVALON_DB"] = "avalon" os.environ["AVALON_TIMEOUT"] = "1000" os.environ["OPENPYPE_DEBUG"] = "1" - os.environ["AVALON_CONFIG"] = "pype" os.environ["AVALON_ASSET"] = "Jungle" # Set the exception hook to our wrapping function diff --git a/openpype/tools/project_manager/project_manager/widgets.py b/openpype/tools/project_manager/project_manager/widgets.py index 39ea833961..dc75b30bd7 100644 --- a/openpype/tools/project_manager/project_manager/widgets.py +++ b/openpype/tools/project_manager/project_manager/widgets.py @@ -10,11 +10,11 @@ from openpype.lib import ( PROJECT_NAME_REGEX ) from openpype.style import load_stylesheet +from openpype.pipeline import AvalonMongoDB from openpype.tools.utils import ( PlaceholderLineEdit, get_warning_pixmap ) -from avalon.api import AvalonMongoDB from Qt import QtWidgets, QtCore, QtGui diff --git a/openpype/tools/project_manager/project_manager/window.py b/openpype/tools/project_manager/project_manager/window.py index bdf32c7415..c281479d4f 100644 --- a/openpype/tools/project_manager/project_manager/window.py +++ b/openpype/tools/project_manager/project_manager/window.py @@ -16,6 +16,7 @@ from .style import ResourceCache from openpype.style import load_stylesheet from openpype.lib import is_admin_password_required from openpype.widgets import PasswordDialog +from openpype.pipeline import AvalonMongoDB from openpype import resources from openpype.api import ( @@ -23,7 +24,6 @@ from openpype.api import ( create_project_folders, Logger ) -from avalon.api import AvalonMongoDB class ProjectManagerWindow(QtWidgets.QWidget): diff --git a/openpype/tools/settings/settings/widgets.py b/openpype/tools/settings/settings/widgets.py index 6db001f2f6..45c21d5685 100644 --- a/openpype/tools/settings/settings/widgets.py +++ b/openpype/tools/settings/settings/widgets.py @@ -1,13 +1,9 @@ -import os import copy import uuid from Qt import QtWidgets, QtCore, QtGui import qtawesome -from avalon.mongodb import ( - AvalonMongoConnection, - AvalonMongoDB -) +from openpype.pipeline import AvalonMongoDB from openpype.style import get_objected_colors from openpype.tools.utils.widgets import ImageButton from openpype.tools.utils.lib import paint_image_with_color @@ -1209,15 +1205,6 @@ class ProjectListWidget(QtWidgets.QWidget): selected_project = index.data(PROJECT_NAME_ROLE) break - mongo_url = os.environ["OPENPYPE_MONGO"] - - # Force uninstall of whole avalon connection if url does not match - # to current environment and set it as environment - if mongo_url != os.environ["AVALON_MONGO"]: - AvalonMongoConnection.uninstall(self.dbcon, force=True) - os.environ["AVALON_MONGO"] = mongo_url - self.dbcon = None - if not self.dbcon: try: self.dbcon = AvalonMongoDB() diff --git a/openpype/tools/standalonepublish/app.py b/openpype/tools/standalonepublish/app.py index 3630d92c83..1ad5cd119e 100644 --- a/openpype/tools/standalonepublish/app.py +++ b/openpype/tools/standalonepublish/app.py @@ -12,7 +12,7 @@ from .widgets import ( from .widgets.constants import HOST_NAME from openpype import style from openpype.api import resources -from avalon.api import AvalonMongoDB +from openpype.pipeline import AvalonMongoDB from openpype.modules import ModulesManager diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index a550c88ead..972e89a3ae 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -8,8 +8,10 @@ publishing plugins. from Qt import QtWidgets, QtCore -from avalon.api import AvalonMongoDB -from openpype.pipeline import install_host +from openpype.pipeline import ( + install_host, + AvalonMongoDB, +) from openpype.hosts.traypublisher import ( api as traypublisher ) From a05755e8327c279ea373914a68595d731df187bd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 13 Apr 2022 18:10:40 +0200 Subject: [PATCH 108/244] use schema from openpype --- openpype/hosts/blender/api/pipeline.py | 3 ++- openpype/hosts/hiero/api/pipeline.py | 2 +- openpype/hosts/maya/api/setdress.py | 4 ++-- openpype/hosts/resolve/api/pipeline.py | 3 +-- openpype/pipeline/load/utils.py | 3 ++- openpype/plugins/publish/integrate_hero_version.py | 3 ++- openpype/tools/loader/model.py | 6 ++++-- openpype/tools/sceneinventory/model.py | 3 ++- 8 files changed, 16 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/blender/api/pipeline.py b/openpype/hosts/blender/api/pipeline.py index 0ea579970e..9420a10228 100644 --- a/openpype/hosts/blender/api/pipeline.py +++ b/openpype/hosts/blender/api/pipeline.py @@ -11,9 +11,10 @@ from . import ops import pyblish.api import avalon.api -from avalon import io, schema +from avalon import io from openpype.pipeline import ( + schema, register_loader_plugin_path, register_creator_plugin_path, deregister_loader_plugin_path, diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index 5001043a74..8025ebff05 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -5,10 +5,10 @@ import os import contextlib from collections import OrderedDict -from avalon import schema from pyblish import api as pyblish from openpype.api import Logger from openpype.pipeline import ( + schema, register_creator_plugin_path, register_loader_plugin_path, deregister_creator_plugin_path, diff --git a/openpype/hosts/maya/api/setdress.py b/openpype/hosts/maya/api/setdress.py index 0b60564e5e..018ea4558c 100644 --- a/openpype/hosts/maya/api/setdress.py +++ b/openpype/hosts/maya/api/setdress.py @@ -12,6 +12,7 @@ from maya import cmds from avalon import io from openpype.pipeline import ( + schema, discover_loader_plugins, loaders_from_representation, load_container, @@ -253,7 +254,6 @@ def get_contained_containers(container): """ - import avalon.schema from .pipeline import parse_container # Get avalon containers in this package setdress container @@ -263,7 +263,7 @@ def get_contained_containers(container): try: member_container = parse_container(node) containers.append(member_container) - except avalon.schema.ValidationError: + except schema.ValidationError: pass return containers diff --git a/openpype/hosts/resolve/api/pipeline.py b/openpype/hosts/resolve/api/pipeline.py index 636c826a11..4a7d1c5bea 100644 --- a/openpype/hosts/resolve/api/pipeline.py +++ b/openpype/hosts/resolve/api/pipeline.py @@ -7,10 +7,9 @@ from collections import OrderedDict from pyblish import api as pyblish -from avalon import schema - from openpype.api import Logger from openpype.pipeline import ( + schema, register_loader_plugin_path, register_creator_plugin_path, deregister_loader_plugin_path, diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index cb7c76f133..ca04f79ae6 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -9,10 +9,11 @@ import numbers import six from bson.objectid import ObjectId -from avalon import io, schema +from avalon import io from avalon.api import Session from openpype.lib import Anatomy +from openpype.pipeline import schema log = logging.getLogger(__name__) diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index ded149bdd0..76720fc9a3 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -8,11 +8,12 @@ from bson.objectid import ObjectId from pymongo import InsertOne, ReplaceOne import pyblish.api -from avalon import api, io, schema +from avalon import api, io from openpype.lib import ( create_hard_link, filter_profiles ) +from openpype.pipeline import schema class IntegrateHeroVersion(pyblish.api.InstancePlugin): diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py index 6cc6fae1fb..8cb8f30013 100644 --- a/openpype/tools/loader/model.py +++ b/openpype/tools/loader/model.py @@ -6,8 +6,10 @@ from uuid import uuid4 from Qt import QtCore, QtGui import qtawesome -from avalon import schema -from openpype.pipeline import HeroVersionType +from openpype.pipeline import ( + HeroVersionType, + schema, +) from openpype.style import get_default_entity_icon_color from openpype.tools.utils.models import TreeModel, Item diff --git a/openpype/tools/sceneinventory/model.py b/openpype/tools/sceneinventory/model.py index f8fd8a911a..2c47381751 100644 --- a/openpype/tools/sceneinventory/model.py +++ b/openpype/tools/sceneinventory/model.py @@ -7,8 +7,9 @@ from Qt import QtCore, QtGui import qtawesome from bson.objectid import ObjectId -from avalon import io, schema +from avalon import io from openpype.pipeline import ( + schema, HeroVersionType, registered_host, ) From 2591e81877985b86ab1afd15d58d0f39acf112e5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 11:56:41 +0200 Subject: [PATCH 109/244] replaced avalon imports in blender --- openpype/hosts/blender/api/ops.py | 14 ++++----- openpype/hosts/blender/api/pipeline.py | 31 +++---------------- .../blender/plugins/create/create_action.py | 4 +-- .../plugins/create/create_animation.py | 4 +-- .../blender/plugins/create/create_camera.py | 4 +-- .../blender/plugins/create/create_layout.py | 4 +-- .../blender/plugins/create/create_model.py | 4 +-- .../plugins/create/create_pointcache.py | 4 +-- .../blender/plugins/create/create_rig.py | 4 +-- .../blender/plugins/publish/extract_layout.py | 8 ++--- .../plugins/publish/integrate_animation.py | 1 - 11 files changed, 29 insertions(+), 53 deletions(-) diff --git a/openpype/hosts/blender/api/ops.py b/openpype/hosts/blender/api/ops.py index 29d6d356c8..c1b5add518 100644 --- a/openpype/hosts/blender/api/ops.py +++ b/openpype/hosts/blender/api/ops.py @@ -15,9 +15,9 @@ from Qt import QtWidgets, QtCore import bpy import bpy.utils.previews -import avalon.api -from openpype.tools.utils import host_tools from openpype import style +from openpype.pipeline import legacy_io +from openpype.tools.utils import host_tools from .workio import OpenFileCacher @@ -279,7 +279,7 @@ class LaunchLoader(LaunchQtApp): def before_window_show(self): self._window.set_context( - {"asset": avalon.api.Session["AVALON_ASSET"]}, + {"asset": legacy_io.Session["AVALON_ASSET"]}, refresh=True ) @@ -327,8 +327,8 @@ class LaunchWorkFiles(LaunchQtApp): def execute(self, context): result = super().execute(context) self._window.set_context({ - "asset": avalon.api.Session["AVALON_ASSET"], - "task": avalon.api.Session["AVALON_TASK"] + "asset": legacy_io.Session["AVALON_ASSET"], + "task": legacy_io.Session["AVALON_TASK"] }) return result @@ -358,8 +358,8 @@ class TOPBAR_MT_avalon(bpy.types.Menu): else: pyblish_menu_icon_id = 0 - asset = avalon.api.Session['AVALON_ASSET'] - task = avalon.api.Session['AVALON_TASK'] + asset = legacy_io.Session['AVALON_ASSET'] + task = legacy_io.Session['AVALON_TASK'] context_label = f"{asset}, {task}" context_label_item = layout.row() context_label_item.operator( diff --git a/openpype/hosts/blender/api/pipeline.py b/openpype/hosts/blender/api/pipeline.py index 9420a10228..5b81764644 100644 --- a/openpype/hosts/blender/api/pipeline.py +++ b/openpype/hosts/blender/api/pipeline.py @@ -1,6 +1,5 @@ import os import sys -import importlib import traceback from typing import Callable, Dict, Iterator, List, Optional @@ -10,17 +9,15 @@ from . import lib from . import ops import pyblish.api -import avalon.api -from avalon import io from openpype.pipeline import ( schema, + legacy_io, register_loader_plugin_path, register_creator_plugin_path, deregister_loader_plugin_path, deregister_creator_plugin_path, AVALON_CONTAINER_ID, - uninstall_host, ) from openpype.api import Logger from openpype.lib import ( @@ -86,8 +83,8 @@ def uninstall(): def set_start_end_frames(): - asset_name = io.Session["AVALON_ASSET"] - asset_doc = io.find_one({ + asset_name = legacy_io.Session["AVALON_ASSET"] + asset_doc = legacy_io.find_one({ "type": "asset", "name": asset_name }) @@ -191,7 +188,7 @@ def _on_task_changed(): # `directory` attribute, so it opens in that directory (does it?). # https://docs.blender.org/api/blender2.8/bpy.types.Operator.html#calling-a-file-selector # https://docs.blender.org/api/blender2.8/bpy.types.WindowManager.html#bpy.types.WindowManager.fileselect_add - workdir = avalon.api.Session["AVALON_WORKDIR"] + workdir = legacy_io.Session["AVALON_WORKDIR"] log.debug("New working directory: %s", workdir) @@ -202,26 +199,6 @@ def _register_events(): log.info("Installed event callback for 'taskChanged'...") -def reload_pipeline(*args): - """Attempt to reload pipeline at run-time. - - Warning: - This is primarily for development and debugging purposes and not well - tested. - - """ - - uninstall_host() - - for module in ( - "avalon.io", - "avalon.pipeline", - "avalon.api", - ): - module = importlib.import_module(module) - importlib.reload(module) - - def _discover_gui() -> Optional[Callable]: """Return the most desirable of the currently registered GUIs""" diff --git a/openpype/hosts/blender/plugins/create/create_action.py b/openpype/hosts/blender/plugins/create/create_action.py index 5f66f5da6e..54b3a501a7 100644 --- a/openpype/hosts/blender/plugins/create/create_action.py +++ b/openpype/hosts/blender/plugins/create/create_action.py @@ -2,7 +2,7 @@ import bpy -from avalon import api +from openpype.pipeline import legacy_io import openpype.hosts.blender.api.plugin from openpype.hosts.blender.api import lib @@ -22,7 +22,7 @@ class CreateAction(openpype.hosts.blender.api.plugin.Creator): name = openpype.hosts.blender.api.plugin.asset_name(asset, subset) collection = bpy.data.collections.new(name=name) bpy.context.scene.collection.children.link(collection) - self.data['task'] = api.Session.get('AVALON_TASK') + self.data['task'] = legacy_io.Session.get('AVALON_TASK') lib.imprint(collection, self.data) if (self.options or {}).get("useSelection"): diff --git a/openpype/hosts/blender/plugins/create/create_animation.py b/openpype/hosts/blender/plugins/create/create_animation.py index b88010ae90..a0e9e5e399 100644 --- a/openpype/hosts/blender/plugins/create/create_animation.py +++ b/openpype/hosts/blender/plugins/create/create_animation.py @@ -2,7 +2,7 @@ import bpy -from avalon import api +from openpype.pipeline import legacy_io from openpype.hosts.blender.api import plugin, lib, ops from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES @@ -37,7 +37,7 @@ class CreateAnimation(plugin.Creator): # asset_group.empty_display_type = 'SINGLE_ARROW' asset_group = bpy.data.collections.new(name=name) instances.children.link(asset_group) - self.data['task'] = api.Session.get('AVALON_TASK') + self.data['task'] = legacy_io.Session.get('AVALON_TASK') lib.imprint(asset_group, self.data) if (self.options or {}).get("useSelection"): diff --git a/openpype/hosts/blender/plugins/create/create_camera.py b/openpype/hosts/blender/plugins/create/create_camera.py index cc796d464d..1a3c008069 100644 --- a/openpype/hosts/blender/plugins/create/create_camera.py +++ b/openpype/hosts/blender/plugins/create/create_camera.py @@ -2,7 +2,7 @@ import bpy -from avalon import api +from openpype.pipeline import legacy_io from openpype.hosts.blender.api import plugin, lib, ops from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES @@ -40,7 +40,7 @@ class CreateCamera(plugin.Creator): asset_group = bpy.data.objects.new(name=name, object_data=None) asset_group.empty_display_type = 'SINGLE_ARROW' instances.objects.link(asset_group) - self.data['task'] = api.Session.get('AVALON_TASK') + self.data['task'] = legacy_io.Session.get('AVALON_TASK') print(f"self.data: {self.data}") lib.imprint(asset_group, self.data) diff --git a/openpype/hosts/blender/plugins/create/create_layout.py b/openpype/hosts/blender/plugins/create/create_layout.py index f62cbc52ba..5949a4b86e 100644 --- a/openpype/hosts/blender/plugins/create/create_layout.py +++ b/openpype/hosts/blender/plugins/create/create_layout.py @@ -2,7 +2,7 @@ import bpy -from avalon import api +from openpype.pipeline import legacy_io from openpype.hosts.blender.api import plugin, lib, ops from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES @@ -34,7 +34,7 @@ class CreateLayout(plugin.Creator): asset_group = bpy.data.objects.new(name=name, object_data=None) asset_group.empty_display_type = 'SINGLE_ARROW' instances.objects.link(asset_group) - self.data['task'] = api.Session.get('AVALON_TASK') + self.data['task'] = legacy_io.Session.get('AVALON_TASK') lib.imprint(asset_group, self.data) # Add selected objects to instance diff --git a/openpype/hosts/blender/plugins/create/create_model.py b/openpype/hosts/blender/plugins/create/create_model.py index 75c90f9bb1..fedc708943 100644 --- a/openpype/hosts/blender/plugins/create/create_model.py +++ b/openpype/hosts/blender/plugins/create/create_model.py @@ -2,7 +2,7 @@ import bpy -from avalon import api +from openpype.pipeline import legacy_io from openpype.hosts.blender.api import plugin, lib, ops from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES @@ -34,7 +34,7 @@ class CreateModel(plugin.Creator): asset_group = bpy.data.objects.new(name=name, object_data=None) asset_group.empty_display_type = 'SINGLE_ARROW' instances.objects.link(asset_group) - self.data['task'] = api.Session.get('AVALON_TASK') + self.data['task'] = legacy_io.Session.get('AVALON_TASK') lib.imprint(asset_group, self.data) # Add selected objects to instance diff --git a/openpype/hosts/blender/plugins/create/create_pointcache.py b/openpype/hosts/blender/plugins/create/create_pointcache.py index bf5a84048f..38707fd3b1 100644 --- a/openpype/hosts/blender/plugins/create/create_pointcache.py +++ b/openpype/hosts/blender/plugins/create/create_pointcache.py @@ -2,7 +2,7 @@ import bpy -from avalon import api +from openpype.pipeline import legacy_io import openpype.hosts.blender.api.plugin from openpype.hosts.blender.api import lib @@ -22,7 +22,7 @@ class CreatePointcache(openpype.hosts.blender.api.plugin.Creator): name = openpype.hosts.blender.api.plugin.asset_name(asset, subset) collection = bpy.data.collections.new(name=name) bpy.context.scene.collection.children.link(collection) - self.data['task'] = api.Session.get('AVALON_TASK') + self.data['task'] = legacy_io.Session.get('AVALON_TASK') lib.imprint(collection, self.data) if (self.options or {}).get("useSelection"): diff --git a/openpype/hosts/blender/plugins/create/create_rig.py b/openpype/hosts/blender/plugins/create/create_rig.py index 65f5061924..0abd306c6b 100644 --- a/openpype/hosts/blender/plugins/create/create_rig.py +++ b/openpype/hosts/blender/plugins/create/create_rig.py @@ -2,7 +2,7 @@ import bpy -from avalon import api +from openpype.pipeline import legacy_io from openpype.hosts.blender.api import plugin, lib, ops from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES @@ -34,7 +34,7 @@ class CreateRig(plugin.Creator): asset_group = bpy.data.objects.new(name=name, object_data=None) asset_group.empty_display_type = 'SINGLE_ARROW' instances.objects.link(asset_group) - self.data['task'] = api.Session.get('AVALON_TASK') + self.data['task'] = legacy_io.Session.get('AVALON_TASK') lib.imprint(asset_group, self.data) # Add selected objects to instance diff --git a/openpype/hosts/blender/plugins/publish/extract_layout.py b/openpype/hosts/blender/plugins/publish/extract_layout.py index b78a193d81..8ecc78a2c6 100644 --- a/openpype/hosts/blender/plugins/publish/extract_layout.py +++ b/openpype/hosts/blender/plugins/publish/extract_layout.py @@ -7,7 +7,7 @@ import bpy import bpy_extras import bpy_extras.anim_utils -from avalon import io +from openpype.pipeline import legacy_io from openpype.hosts.blender.api import plugin from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY import openpype.api @@ -139,7 +139,7 @@ class ExtractLayout(openpype.api.Extractor): self.log.debug("Parent: {}".format(parent)) # Get blend reference - blend = io.find_one( + blend = legacy_io.find_one( { "type": "representation", "parent": ObjectId(parent), @@ -150,7 +150,7 @@ class ExtractLayout(openpype.api.Extractor): if blend: blend_id = blend["_id"] # Get fbx reference - fbx = io.find_one( + fbx = legacy_io.find_one( { "type": "representation", "parent": ObjectId(parent), @@ -161,7 +161,7 @@ class ExtractLayout(openpype.api.Extractor): if fbx: fbx_id = fbx["_id"] # Get abc reference - abc = io.find_one( + abc = legacy_io.find_one( { "type": "representation", "parent": ObjectId(parent), diff --git a/openpype/hosts/blender/plugins/publish/integrate_animation.py b/openpype/hosts/blender/plugins/publish/integrate_animation.py index 90e94a4aac..d9a85bc79b 100644 --- a/openpype/hosts/blender/plugins/publish/integrate_animation.py +++ b/openpype/hosts/blender/plugins/publish/integrate_animation.py @@ -1,6 +1,5 @@ import json -from avalon import io import pyblish.api From 3edce9456ed4bd6adf1fdb0db3368ed28d110b9e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 11:56:54 +0200 Subject: [PATCH 110/244] replced avalon imports in aftereffects --- openpype/hosts/aftereffects/api/launch_logic.py | 9 ++++----- openpype/hosts/aftereffects/api/pipeline.py | 3 --- .../aftereffects/plugins/publish/collect_workfile.py | 3 +-- .../plugins/publish/validate_instance_asset.py | 11 +++++++---- 4 files changed, 12 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/aftereffects/api/launch_logic.py b/openpype/hosts/aftereffects/api/launch_logic.py index c549268978..30a3e1f1c3 100644 --- a/openpype/hosts/aftereffects/api/launch_logic.py +++ b/openpype/hosts/aftereffects/api/launch_logic.py @@ -12,9 +12,8 @@ from wsrpc_aiohttp import ( from Qt import QtCore +from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools - -from avalon import api from openpype.tools.adobe_webserver.app import WebServerTool from .ws_stub import AfterEffectsServerStub @@ -271,13 +270,13 @@ class AfterEffectsRoute(WebSocketRoute): log.info("Setting context change") log.info("project {} asset {} ".format(project, asset)) if project: - api.Session["AVALON_PROJECT"] = project + legacy_io.Session["AVALON_PROJECT"] = project os.environ["AVALON_PROJECT"] = project if asset: - api.Session["AVALON_ASSET"] = asset + legacy_io.Session["AVALON_ASSET"] = asset os.environ["AVALON_ASSET"] = asset if task: - api.Session["AVALON_TASK"] = task + legacy_io.Session["AVALON_TASK"] = task os.environ["AVALON_TASK"] = task async def read(self): diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 3ed2de0e9d..73aea2da11 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -2,10 +2,8 @@ import os import sys from Qt import QtWidgets -from bson.objectid import ObjectId import pyblish.api -from avalon import io from openpype import lib from openpype.api import Logger @@ -15,7 +13,6 @@ from openpype.pipeline import ( deregister_loader_plugin_path, deregister_creator_plugin_path, AVALON_CONTAINER_ID, - registered_host, ) import openpype.hosts.aftereffects from openpype.lib import register_event_callback diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index cb5a2bad4f..21a0cd7a1b 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -1,5 +1,5 @@ import os -from avalon import api + import pyblish.api from openpype.lib import get_subset_name_with_asset_doc @@ -11,7 +11,6 @@ class CollectWorkfile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder + 0.1 def process(self, context): - task = api.Session["AVALON_TASK"] current_file = context.data["currentFile"] staging_dir = os.path.dirname(current_file) scene_file = os.path.basename(current_file) diff --git a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py index 37cecfbcc4..1a303f5da4 100644 --- a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py +++ b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py @@ -1,7 +1,10 @@ -from avalon import api import pyblish.api + import openpype.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline import ( + PublishXmlValidationError, + legacy_io, +) from openpype.hosts.aftereffects.api import get_stub @@ -27,7 +30,7 @@ class ValidateInstanceAssetRepair(pyblish.api.Action): for instance in instances: data = stub.read(instance[0]) - data["asset"] = api.Session["AVALON_ASSET"] + data["asset"] = legacy_io.Session["AVALON_ASSET"] stub.imprint(instance[0], data) @@ -51,7 +54,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin): def process(self, instance): instance_asset = instance.data["asset"] - current_asset = api.Session["AVALON_ASSET"] + current_asset = legacy_io.Session["AVALON_ASSET"] msg = ( f"Instance asset {instance_asset} is not the same " f"as current context {current_asset}." From d5c52df5ce35cc4bcae79a175b91c5384fa02622 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 11:57:13 +0200 Subject: [PATCH 111/244] replaced avalon imports in celaction --- .../hosts/celaction/plugins/publish/collect_audio.py | 10 +++++----- .../plugins/publish/collect_celaction_instances.py | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/celaction/plugins/publish/collect_audio.py b/openpype/hosts/celaction/plugins/publish/collect_audio.py index 80c1c37d7e..8acda5fc7c 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_audio.py +++ b/openpype/hosts/celaction/plugins/publish/collect_audio.py @@ -1,10 +1,10 @@ import os import collections +from pprint import pformat import pyblish.api -from avalon import io -from pprint import pformat +from openpype.pipeline import legacy_io class AppendCelactionAudio(pyblish.api.ContextPlugin): @@ -60,7 +60,7 @@ class AppendCelactionAudio(pyblish.api.ContextPlugin): """ # Query all subsets for asset - subset_docs = io.find({ + subset_docs = legacy_io.find({ "type": "subset", "parent": asset_doc["_id"] }) @@ -93,7 +93,7 @@ class AppendCelactionAudio(pyblish.api.ContextPlugin): }} ] last_versions_by_subset_id = dict() - for doc in io.aggregate(pipeline): + for doc in legacy_io.aggregate(pipeline): doc["parent"] = doc["_id"] doc["_id"] = doc.pop("_version_id") last_versions_by_subset_id[doc["parent"]] = doc @@ -102,7 +102,7 @@ class AppendCelactionAudio(pyblish.api.ContextPlugin): for version_doc in last_versions_by_subset_id.values(): version_docs_by_id[version_doc["_id"]] = version_doc - repre_docs = io.find({ + repre_docs = legacy_io.find({ "type": "representation", "parent": {"$in": list(version_docs_by_id.keys())}, "name": {"$in": representations} diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py index f393e471c4..1d2d9da1af 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py @@ -1,6 +1,6 @@ import os -from avalon import api import pyblish.api +from openpype.pipeline import legacy_io class CollectCelactionInstances(pyblish.api.ContextPlugin): @@ -10,7 +10,7 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder + 0.1 def process(self, context): - task = api.Session["AVALON_TASK"] + task = legacy_io.Session["AVALON_TASK"] current_file = context.data["currentFile"] staging_dir = os.path.dirname(current_file) scene_file = os.path.basename(current_file) From 480029f6828867124cb0eb650b8600bf976d8c8f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 11:57:25 +0200 Subject: [PATCH 112/244] replaced avalon imports in flame --- .../hosts/flame/plugins/publish/collect_timeline_otio.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py index c6aeae7730..f2ae1f62a9 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py @@ -1,6 +1,7 @@ import pyblish.api -import avalon.api as avalon + import openpype.lib as oplib +from openpype.pipeline import legacy_io import openpype.hosts.flame.api as opfapi from openpype.hosts.flame.otio import flame_export @@ -18,7 +19,7 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin): # main asset_doc = context.data["assetEntity"] - task_name = avalon.Session["AVALON_TASK"] + task_name = legacy_io.Session["AVALON_TASK"] project = opfapi.get_current_project() sequence = opfapi.get_current_sequence(opfapi.CTX.selection) From 93fa04e1da15c4819051dc86a5e495f8c5ba5270 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 11:58:19 +0200 Subject: [PATCH 113/244] replaced avalon imports in fusion --- openpype/hosts/fusion/api/lib.py | 20 +++++++++++-------- .../fusion/plugins/load/load_sequence.py | 9 +++++---- .../fusion/plugins/publish/submit_deadline.py | 8 ++++---- .../fusion/scripts/fusion_switch_shot.py | 13 +++++------- .../hosts/fusion/utility_scripts/switch_ui.py | 8 +++++--- 5 files changed, 31 insertions(+), 27 deletions(-) diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py index f7a2360bfa..29f3a3a3eb 100644 --- a/openpype/hosts/fusion/api/lib.py +++ b/openpype/hosts/fusion/api/lib.py @@ -6,8 +6,10 @@ import contextlib from bson.objectid import ObjectId from Qt import QtGui -from avalon import io -from openpype.pipeline import switch_container +from openpype.pipeline import ( + switch_container, + legacy_io, +) from .pipeline import get_current_comp, comp_lock_and_undo_chunk self = sys.modules[__name__] @@ -94,8 +96,10 @@ def switch_item(container, # so we can use the original name from those. if any(not x for x in [asset_name, subset_name, representation_name]): _id = ObjectId(container["representation"]) - representation = io.find_one({"type": "representation", "_id": _id}) - version, subset, asset, project = io.parenthood(representation) + representation = legacy_io.find_one({ + "type": "representation", "_id": _id + }) + version, subset, asset, project = legacy_io.parenthood(representation) if asset_name is None: asset_name = asset["name"] @@ -107,14 +111,14 @@ def switch_item(container, representation_name = representation["name"] # Find the new one - asset = io.find_one({ + asset = legacy_io.find_one({ "name": asset_name, "type": "asset" }) assert asset, ("Could not find asset in the database with the name " "'%s'" % asset_name) - subset = io.find_one({ + subset = legacy_io.find_one({ "name": subset_name, "type": "subset", "parent": asset["_id"] @@ -122,7 +126,7 @@ def switch_item(container, assert subset, ("Could not find subset in the database with the name " "'%s'" % subset_name) - version = io.find_one( + version = legacy_io.find_one( { "type": "version", "parent": subset["_id"] @@ -134,7 +138,7 @@ def switch_item(container, asset_name, subset_name ) - representation = io.find_one({ + representation = legacy_io.find_one({ "name": representation_name, "type": "representation", "parent": version["_id"]} diff --git a/openpype/hosts/fusion/plugins/load/load_sequence.py b/openpype/hosts/fusion/plugins/load/load_sequence.py index 075820de35..b860abd88b 100644 --- a/openpype/hosts/fusion/plugins/load/load_sequence.py +++ b/openpype/hosts/fusion/plugins/load/load_sequence.py @@ -1,10 +1,9 @@ import os import contextlib -from avalon import io - from openpype.pipeline import ( load, + legacy_io, get_representation_path, ) from openpype.hosts.fusion.api import ( @@ -212,8 +211,10 @@ class FusionLoadSequence(load.LoaderPlugin): path = self._get_first_image(root) # Get start frame from version data - version = io.find_one({"type": "version", - "_id": representation["parent"]}) + version = legacy_io.find_one({ + "type": "version", + "_id": representation["parent"] + }) start = version["data"].get("frameStart") if start is None: self.log.warning("Missing start frame for updated version" diff --git a/openpype/hosts/fusion/plugins/publish/submit_deadline.py b/openpype/hosts/fusion/plugins/publish/submit_deadline.py index 9da99dd9e2..8570c759bc 100644 --- a/openpype/hosts/fusion/plugins/publish/submit_deadline.py +++ b/openpype/hosts/fusion/plugins/publish/submit_deadline.py @@ -4,10 +4,10 @@ import getpass import requests -from avalon import api - import pyblish.api +from openpype.pipeline import legacy_io + class FusionSubmitDeadline(pyblish.api.InstancePlugin): """Submit current Comp to Deadline @@ -133,7 +133,7 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin): "FUSION9_MasterPrefs" ] environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **api.Session) + if key in os.environ}, **legacy_io.Session) payload["JobInfo"].update({ "EnvironmentKeyValue%d" % index: "{key}={value}".format( @@ -146,7 +146,7 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin): self.log.info(json.dumps(payload, indent=4, sort_keys=True)) # E.g. http://192.168.0.1:8082/api/jobs - url = "{}/api/jobs".format(DEADLINE_REST_URL) + url = "{}/api/jobs".format(deadline_url) response = requests.post(url, json=payload) if not response.ok: raise Exception(response.text) diff --git a/openpype/hosts/fusion/scripts/fusion_switch_shot.py b/openpype/hosts/fusion/scripts/fusion_switch_shot.py index ca8e5c9e37..704f420796 100644 --- a/openpype/hosts/fusion/scripts/fusion_switch_shot.py +++ b/openpype/hosts/fusion/scripts/fusion_switch_shot.py @@ -4,10 +4,8 @@ import sys import logging # Pipeline imports -import avalon.api -from avalon import io - from openpype.pipeline import ( + legacy_io, install_host, registered_host, ) @@ -167,7 +165,7 @@ def update_frame_range(comp, representations): """ version_ids = [r["parent"] for r in representations] - versions = io.find({"type": "version", "_id": {"$in": version_ids}}) + versions = legacy_io.find({"type": "version", "_id": {"$in": version_ids}}) versions = list(versions) versions = [v for v in versions @@ -205,12 +203,11 @@ def switch(asset_name, filepath=None, new=True): # Assert asset name exists # It is better to do this here then to wait till switch_shot does it - asset = io.find_one({"type": "asset", "name": asset_name}) + asset = legacy_io.find_one({"type": "asset", "name": asset_name}) assert asset, "Could not find '%s' in the database" % asset_name # Get current project - self._project = io.find_one({"type": "project", - "name": avalon.api.Session["AVALON_PROJECT"]}) + self._project = legacy_io.find_one({"type": "project"}) # Go to comp if not filepath: @@ -241,7 +238,7 @@ def switch(asset_name, filepath=None, new=True): current_comp.Print(message) # Build the session to switch to - switch_to_session = avalon.api.Session.copy() + switch_to_session = legacy_io.Session.copy() switch_to_session["AVALON_ASSET"] = asset['name'] if new: diff --git a/openpype/hosts/fusion/utility_scripts/switch_ui.py b/openpype/hosts/fusion/utility_scripts/switch_ui.py index 37306c7a2a..70eb3d0a19 100644 --- a/openpype/hosts/fusion/utility_scripts/switch_ui.py +++ b/openpype/hosts/fusion/utility_scripts/switch_ui.py @@ -5,11 +5,13 @@ import logging from Qt import QtWidgets, QtCore -from avalon import io import qtawesome as qta from openpype import style -from openpype.pipeline import install_host +from openpype.pipeline import ( + install_host, + legacy_io, +) from openpype.hosts.fusion import api from openpype.lib.avalon_context import get_workdir_from_session @@ -164,7 +166,7 @@ class App(QtWidgets.QWidget): return items def collect_assets(self): - return list(io.find({"type": "asset"}, {"name": True})) + return list(legacy_io.find({"type": "asset"}, {"name": True})) def populate_comp_box(self, files): """Ensure we display the filename only but the path is stored as well From 5bded18fbd709b1f61e8c2f40e400845bbe9cf99 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 11:59:13 +0200 Subject: [PATCH 114/244] replaced avalon imports in harmony --- openpype/hosts/harmony/api/README.md | 3 +-- openpype/hosts/harmony/api/pipeline.py | 5 ++--- .../harmony/plugins/publish/collect_farm_render.py | 12 ++++++------ 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/harmony/api/README.md b/openpype/hosts/harmony/api/README.md index e8d354e1e6..dd45eb14dd 100644 --- a/openpype/hosts/harmony/api/README.md +++ b/openpype/hosts/harmony/api/README.md @@ -419,7 +419,6 @@ class ExtractImage(pyblish.api.InstancePlugin): ```python import os -from avalon import api, io import openpype.hosts.harmony.api as harmony signature = str(uuid4()).replace("-", "_") @@ -611,7 +610,7 @@ class ImageSequenceLoader(load.LoaderPlugin): def update(self, container, representation): node = container.pop("node") - version = io.find_one({"_id": representation["parent"]}) + version = legacy_io.find_one({"_id": representation["parent"]}) files = [] for f in version["data"]["files"]: files.append( diff --git a/openpype/hosts/harmony/api/pipeline.py b/openpype/hosts/harmony/api/pipeline.py index 88f11dd16f..b953d0e984 100644 --- a/openpype/hosts/harmony/api/pipeline.py +++ b/openpype/hosts/harmony/api/pipeline.py @@ -5,11 +5,10 @@ import logging from bson.objectid import ObjectId import pyblish.api -from avalon import io - from openpype import lib from openpype.lib import register_event_callback from openpype.pipeline import ( + legacy_io, register_loader_plugin_path, register_creator_plugin_path, deregister_loader_plugin_path, @@ -111,7 +110,7 @@ def check_inventory(): outdated_containers = [] for container in ls(): representation = container['representation'] - representation_doc = io.find_one( + representation_doc = legacy_io.find_one( { "_id": ObjectId(representation), "type": "representation" diff --git a/openpype/hosts/harmony/plugins/publish/collect_farm_render.py b/openpype/hosts/harmony/plugins/publish/collect_farm_render.py index 35b123f97d..f5bf051243 100644 --- a/openpype/hosts/harmony/plugins/publish/collect_farm_render.py +++ b/openpype/hosts/harmony/plugins/publish/collect_farm_render.py @@ -3,13 +3,13 @@ from pathlib import Path import attr -from avalon import api -from openpype.lib import get_formatted_current_time -import openpype.lib.abstract_collect_render -import openpype.hosts.harmony.api as harmony -from openpype.lib.abstract_collect_render import RenderInstance import openpype.lib +import openpype.lib.abstract_collect_render +from openpype.lib.abstract_collect_render import RenderInstance +from openpype.lib import get_formatted_current_time +from openpype.pipeline import legacy_io +import openpype.hosts.harmony.api as harmony @attr.s @@ -143,7 +143,7 @@ class CollectFarmRender(openpype.lib.abstract_collect_render. source=context.data["currentFile"], label=node.split("/")[1], subset=subset_name, - asset=api.Session["AVALON_ASSET"], + asset=legacy_io.Session["AVALON_ASSET"], attachTo=False, setMembers=[node], publish=info[4], From cea55ccc715e2aeb10cd6890f8c09377cbb1fef6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 11:59:43 +0200 Subject: [PATCH 115/244] replaced avalon imports in hiero --- openpype/hosts/hiero/api/lib.py | 14 ++++++-------- openpype/hosts/hiero/api/menu.py | 19 ++++++++++++------- openpype/hosts/hiero/api/tags.py | 8 ++++---- .../hosts/hiero/plugins/load/load_clip.py | 10 ++++++---- .../plugins/publish/precollect_workfile.py | 19 +++++++++++-------- .../collect_assetbuilds.py | 4 ++-- .../precollect_workfile.py | 4 ++-- 7 files changed, 43 insertions(+), 35 deletions(-) diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index 00c30538fc..0e64ddcaf5 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -12,8 +12,7 @@ import hiero from Qt import QtWidgets from bson.objectid import ObjectId -import avalon.api as avalon -import avalon.io +from openpype.pipeline import legacy_io from openpype.api import (Logger, Anatomy, get_anatomy_settings) from . import tags @@ -383,7 +382,7 @@ def get_publish_attribute(tag): def sync_avalon_data_to_workfile(): # import session to get project dir - project_name = avalon.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] anatomy = Anatomy(project_name) work_template = anatomy.templates["work"]["path"] @@ -408,7 +407,7 @@ def sync_avalon_data_to_workfile(): project.setProjectRoot(active_project_root) # get project data from avalon db - project_doc = avalon.io.find_one({"type": "project"}) + project_doc = legacy_io.find_one({"type": "project"}) project_data = project_doc["data"] log.debug("project_data: {}".format(project_data)) @@ -994,7 +993,6 @@ def check_inventory_versions(): it to red. """ from . import parse_container - from avalon import io # presets clip_color_last = "green" @@ -1006,19 +1004,19 @@ def check_inventory_versions(): if container: # get representation from io - representation = io.find_one({ + representation = legacy_io.find_one({ "type": "representation", "_id": ObjectId(container["representation"]) }) # Get start frame from version data - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/hiero/api/menu.py b/openpype/hosts/hiero/api/menu.py index de20b86f30..e262abec00 100644 --- a/openpype/hosts/hiero/api/menu.py +++ b/openpype/hosts/hiero/api/menu.py @@ -1,14 +1,16 @@ import os import sys + import hiero.core -from openpype.api import Logger -from openpype.tools.utils import host_tools -from avalon.api import Session from hiero.ui import findMenuAction +from openpype.api import Logger +from openpype.pipeline import legacy_io +from openpype.tools.utils import host_tools + from . import tags -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) self = sys.modules[__name__] self._change_context_menu = None @@ -24,8 +26,10 @@ def update_menu_task_label(): log.warning("Can't find menuItem: {}".format(object_name)) return - label = "{}, {}".format(Session["AVALON_ASSET"], - Session["AVALON_TASK"]) + label = "{}, {}".format( + legacy_io.Session["AVALON_ASSET"], + legacy_io.Session["AVALON_TASK"] + ) menu = found_menu.menu() self._change_context_menu = label @@ -51,7 +55,8 @@ def menu_install(): menu_name = os.environ['AVALON_LABEL'] context_label = "{0}, {1}".format( - Session["AVALON_ASSET"], Session["AVALON_TASK"] + legacy_io.Session["AVALON_ASSET"], + legacy_io.Session["AVALON_TASK"] ) self._change_context_menu = context_label diff --git a/openpype/hosts/hiero/api/tags.py b/openpype/hosts/hiero/api/tags.py index fe5c0d5257..e15e3119a6 100644 --- a/openpype/hosts/hiero/api/tags.py +++ b/openpype/hosts/hiero/api/tags.py @@ -3,9 +3,9 @@ import os import hiero from openpype.api import Logger -from avalon import io +from openpype.pipeline import legacy_io -log = Logger().get_logger(__name__) +log = Logger.get_logger(__name__) def tag_data(): @@ -141,7 +141,7 @@ def add_tags_to_workfile(): nks_pres_tags = tag_data() # Get project task types. - tasks = io.find_one({"type": "project"})["config"]["tasks"] + tasks = legacy_io.find_one({"type": "project"})["config"]["tasks"] nks_pres_tags["[Tasks]"] = {} log.debug("__ tasks: {}".format(tasks)) for task_type in tasks.keys(): @@ -159,7 +159,7 @@ def add_tags_to_workfile(): # asset builds and shots. if int(os.getenv("TAG_ASSETBUILD_STARTUP", 0)) == 1: nks_pres_tags["[AssetBuilds]"] = {} - for asset in io.find({"type": "asset"}): + for asset in legacy_io.find({"type": "asset"}): if asset["data"]["entityType"] == "AssetBuild": nks_pres_tags["[AssetBuilds]"][asset["name"]] = { "editable": "1", diff --git a/openpype/hosts/hiero/plugins/load/load_clip.py b/openpype/hosts/hiero/plugins/load/load_clip.py index d3908695a2..da4326c8c1 100644 --- a/openpype/hosts/hiero/plugins/load/load_clip.py +++ b/openpype/hosts/hiero/plugins/load/load_clip.py @@ -1,5 +1,7 @@ -from avalon import io -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + legacy_io, + get_representation_path, +) import openpype.hosts.hiero.api as phiero # from openpype.hosts.hiero.api import plugin, lib # reload(lib) @@ -105,7 +107,7 @@ class LoadClip(phiero.SequenceLoader): namespace = container['namespace'] track_item = phiero.get_track_items( track_item_name=namespace) - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -174,7 +176,7 @@ class LoadClip(phiero.SequenceLoader): # define version name version_name = version.get("name", None) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/hiero/plugins/publish/precollect_workfile.py b/openpype/hosts/hiero/plugins/publish/precollect_workfile.py index d48d6949bd..29c0397f79 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_workfile.py @@ -1,12 +1,15 @@ import os -import pyblish.api -import hiero.ui -from openpype.hosts.hiero import api as phiero -from avalon import api as avalon -from pprint import pformat -from openpype.hosts.hiero.api.otio import hiero_export -from Qt.QtGui import QPixmap import tempfile +from pprint import pformat + +import pyblish.api +from Qt.QtGui import QPixmap + +import hiero.ui + +from openpype.pipeline import legacy_io +from openpype.hosts.hiero import api as phiero +from openpype.hosts.hiero.api.otio import hiero_export class PrecollectWorkfile(pyblish.api.ContextPlugin): @@ -17,7 +20,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): def process(self, context): - asset = avalon.Session["AVALON_ASSET"] + asset = legacy_io.Session["AVALON_ASSET"] subset = "workfile" project = phiero.get_current_project() active_timeline = hiero.ui.activeSequence() diff --git a/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py index a90856c6fd..10baf25803 100644 --- a/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py +++ b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py @@ -1,5 +1,5 @@ from pyblish import api -from avalon import io +from openpype.pipeline import legacy_io class CollectAssetBuilds(api.ContextPlugin): @@ -18,7 +18,7 @@ class CollectAssetBuilds(api.ContextPlugin): def process(self, context): asset_builds = {} - for asset in io.find({"type": "asset"}): + for asset in legacy_io.find({"type": "asset"}): if asset["data"]["entityType"] == "AssetBuild": self.log.debug("Found \"{}\" in database.".format(asset)) asset_builds[asset["name"]] = asset diff --git a/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py b/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py index ef7d07421b..693e151f6f 100644 --- a/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py +++ b/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py @@ -1,7 +1,7 @@ import os import pyblish.api from openpype.hosts.hiero import api as phiero -from avalon import api as avalon +from openpype.pipeline import legacy_io class PreCollectWorkfile(pyblish.api.ContextPlugin): @@ -11,7 +11,7 @@ class PreCollectWorkfile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.51 def process(self, context): - asset = avalon.Session["AVALON_ASSET"] + asset = legacy_io.Session["AVALON_ASSET"] subset = "workfile" project = phiero.get_current_project() From 785bdb09c21ba9987d2258d5195f44354c2dc250 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:00:59 +0200 Subject: [PATCH 116/244] replaced avalon imports in houdini --- openpype/hosts/houdini/api/lib.py | 16 ++++++----- openpype/hosts/houdini/api/usd.py | 10 ++++--- .../houdini/plugins/create/create_hda.py | 14 ++++++---- .../plugins/publish/collect_usd_bootstrap.py | 12 ++++++--- .../plugins/publish/extract_usd_layered.py | 15 ++++++----- .../validate_usd_shade_model_exists.py | 14 ++++++---- .../avalon_uri_processor.py | 27 ++++++++++--------- 7 files changed, 65 insertions(+), 43 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index bd41618856..7b8a3dc46c 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -4,8 +4,8 @@ from contextlib import contextmanager import six -from avalon import api, io from openpype.api import get_asset +from openpype.pipeline import legacy_io import hou @@ -75,9 +75,13 @@ def generate_ids(nodes, asset_id=None): if asset_id is None: # Get the asset ID from the database for the asset of current context - asset_data = io.find_one({"type": "asset", - "name": api.Session["AVALON_ASSET"]}, - projection={"_id": True}) + asset_data = legacy_io.find_one( + { + "type": "asset", + "name": legacy_io.Session["AVALON_ASSET"] + }, + projection={"_id": True} + ) assert asset_data, "No current asset found in Session" asset_id = asset_data['_id'] @@ -424,8 +428,8 @@ def maintained_selection(): def reset_framerange(): """Set frame range to current asset""" - asset_name = api.Session["AVALON_ASSET"] - asset = io.find_one({"name": asset_name, "type": "asset"}) + asset_name = legacy_io.Session["AVALON_ASSET"] + asset = legacy_io.find_one({"name": asset_name, "type": "asset"}) frame_start = asset["data"].get("frameStart") frame_end = asset["data"].get("frameEnd") diff --git a/openpype/hosts/houdini/api/usd.py b/openpype/hosts/houdini/api/usd.py index a992f1d082..e9991e38ec 100644 --- a/openpype/hosts/houdini/api/usd.py +++ b/openpype/hosts/houdini/api/usd.py @@ -1,11 +1,12 @@ """Houdini-specific USD Library functions.""" import contextlib - import logging + from Qt import QtWidgets, QtCore, QtGui -from avalon import io + from openpype import style +from openpype.pipeline import legacy_io from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget from pxr import Sdf @@ -20,11 +21,12 @@ class SelectAssetDialog(QtWidgets.QWidget): Args: parm: Parameter where selected asset name is set. """ + def __init__(self, parm): self.setWindowTitle("Pick Asset") self.setWindowFlags(QtCore.Qt.FramelessWindowHint | QtCore.Qt.Popup) - assets_widget = SingleSelectAssetsWidget(io, parent=self) + assets_widget = SingleSelectAssetsWidget(legacy_io, parent=self) layout = QtWidgets.QHBoxLayout(self) layout.addWidget(assets_widget) @@ -44,7 +46,7 @@ class SelectAssetDialog(QtWidgets.QWidget): select_id = None name = self._parm.eval() if name: - db_asset = io.find_one( + db_asset = legacy_io.find_one( {"name": name, "type": "asset"}, {"_id": True} ) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index 0a9c1bad1e..5fc78c7539 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- import hou -from avalon import io + +from openpype.pipeline import legacy_io from openpype.hosts.houdini.api import lib from openpype.hosts.houdini.api import plugin @@ -22,13 +23,16 @@ class CreateHDA(plugin.Creator): # type: (str) -> bool """Check if existing subset name versions already exists.""" # Get all subsets of the current asset - asset_id = io.find_one({"name": self.data["asset"], "type": "asset"}, - projection={"_id": True})['_id'] - subset_docs = io.find( + asset_id = legacy_io.find_one( + {"name": self.data["asset"], "type": "asset"}, + projection={"_id": True} + )['_id'] + subset_docs = legacy_io.find( { "type": "subset", "parent": asset_id - }, {"name": 1} + }, + {"name": 1} ) existing_subset_names = set(subset_docs.distinct("name")) existing_subset_names_low = { diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py index 66dfba64df..3f0d10e0ba 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py @@ -1,6 +1,6 @@ import pyblish.api -from avalon import io +from openpype.pipeline import legacy_io import openpype.lib.usdlib as usdlib @@ -50,7 +50,10 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): self.log.debug("Add bootstrap for: %s" % bootstrap) - asset = io.find_one({"name": instance.data["asset"], "type": "asset"}) + asset = legacy_io.find_one({ + "name": instance.data["asset"], + "type": "asset" + }) assert asset, "Asset must exist: %s" % asset # Check which are not about to be created and don't exist yet @@ -104,7 +107,8 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): # Or, if they already exist in the database we can # skip them too. return bool( - io.find_one( - {"name": subset, "type": "subset", "parent": asset["_id"]} + legacy_io.find_one( + {"name": subset, "type": "subset", "parent": asset["_id"]}, + {"_id": True} ) ) diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index 3e842ae766..bfcd93c1cb 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -7,7 +7,10 @@ from collections import deque import pyblish.api import openpype.api -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + get_representation_path, + legacy_io, +) import openpype.hosts.houdini.api.usd as hou_usdlib from openpype.hosts.houdini.api.lib import render_rop @@ -266,8 +269,6 @@ class ExtractUSDLayered(openpype.api.Extractor): instance.data["files"].append(fname) def _compare_with_latest_publish(self, dependency, new_file): - - from avalon import api, io import filecmp _, ext = os.path.splitext(new_file) @@ -275,10 +276,10 @@ class ExtractUSDLayered(openpype.api.Extractor): # Compare this dependency with the latest published version # to detect whether we should make this into a new publish # version. If not, skip it. - asset = io.find_one( + asset = legacy_io.find_one( {"name": dependency.data["asset"], "type": "asset"} ) - subset = io.find_one( + subset = legacy_io.find_one( { "name": dependency.data["subset"], "type": "subset", @@ -290,7 +291,7 @@ class ExtractUSDLayered(openpype.api.Extractor): self.log.debug("No existing subset..") return False - version = io.find_one( + version = legacy_io.find_one( {"type": "version", "parent": subset["_id"], }, sort=[("name", -1)] ) @@ -298,7 +299,7 @@ class ExtractUSDLayered(openpype.api.Extractor): self.log.debug("No existing version..") return False - representation = io.find_one( + representation = legacy_io.find_one( { "name": ext.lstrip("."), "type": "representation", diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py index fcfbf6b22d..44719ae488 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py @@ -1,9 +1,9 @@ import re import pyblish.api -import openpype.api -from avalon import io +import openpype.api +from openpype.pipeline import legacy_io class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): @@ -23,16 +23,20 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): shade_subset = subset.split(".", 1)[0] model_subset = re.sub("^usdShade", "usdModel", shade_subset) - asset_doc = io.find_one({"name": asset, "type": "asset"}) + asset_doc = legacy_io.find_one( + {"name": asset, "type": "asset"}, + {"_id": True} + ) if not asset_doc: raise RuntimeError("Asset does not exist: %s" % asset) - subset_doc = io.find_one( + subset_doc = legacy_io.find_one( { "name": model_subset, "type": "subset", "parent": asset_doc["_id"], - } + }, + {"_id": True} ) if not subset_doc: raise RuntimeError( diff --git a/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py b/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py index 8cd51e6641..01a29472e7 100644 --- a/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py +++ b/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py @@ -1,17 +1,21 @@ +import os import hou import husdoutputprocessors.base as base -import os -import re -import logging import colorbleed.usdlib as usdlib +from openpype.pipeline import ( + legacy_io, + registered_root, +) + def _get_project_publish_template(): """Return publish template from database for current project""" - from avalon import io - project = io.find_one({"type": "project"}, - projection={"config.template.publish": True}) + project = legacy_io.find_one( + {"type": "project"}, + projection={"config.template.publish": True} + ) return project["config"]["template"]["publish"] @@ -133,12 +137,11 @@ class AvalonURIOutputProcessor(base.OutputProcessorBase): """ - from avalon import api, io - from openpype.pipeline import registered_root - - PROJECT = api.Session["AVALON_PROJECT"] - asset_doc = io.find_one({"name": asset, - "type": "asset"}) + PROJECT = legacy_io.Session["AVALON_PROJECT"] + asset_doc = legacy_io.find_one({ + "name": asset, + "type": "asset" + }) if not asset_doc: raise RuntimeError("Invalid asset name: '%s'" % asset) From 4a5f4c16f4bcdf1f2de341615fc8badce8ed6237 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:12:41 +0200 Subject: [PATCH 117/244] replace avalon import in maya --- openpype/hosts/maya/api/action.py | 8 ++- openpype/hosts/maya/api/commands.py | 15 ++-- openpype/hosts/maya/api/lib.py | 72 +++++++++++-------- openpype/hosts/maya/api/menu.py | 13 ++-- openpype/hosts/maya/api/pipeline.py | 12 ++-- openpype/hosts/maya/api/setdress.py | 20 +++--- .../maya/plugins/create/create_render.py | 9 +-- .../create/create_unreal_skeletalmesh.py | 4 +- .../create/create_unreal_staticmesh.py | 6 +- .../maya/plugins/create/create_vrayscene.py | 9 +-- .../plugins/inventory/import_modelrender.py | 9 +-- .../hosts/maya/plugins/load/load_audio.py | 11 +-- .../maya/plugins/load/load_image_plane.py | 8 +-- openpype/hosts/maya/plugins/load/load_look.py | 8 ++- .../hosts/maya/plugins/load/load_reference.py | 8 ++- .../hosts/maya/plugins/load/load_vrayproxy.py | 13 ++-- .../maya/plugins/load/load_yeti_cache.py | 6 +- .../maya/plugins/publish/collect_render.py | 4 +- .../maya/plugins/publish/collect_review.py | 5 +- .../maya/plugins/publish/collect_vrayscene.py | 5 +- .../maya/plugins/publish/collect_workfile.py | 7 +- .../maya/plugins/publish/extract_look.py | 4 +- .../plugins/publish/submit_maya_muster.py | 5 +- .../plugins/publish/validate_model_name.py | 11 +-- .../publish/validate_node_ids_in_database.py | 5 +- .../publish/validate_node_ids_related.py | 5 +- .../publish/validate_renderlayer_aovs.py | 6 +- .../validate_unreal_staticmesh_naming.py | 10 +-- 28 files changed, 164 insertions(+), 134 deletions(-) diff --git a/openpype/hosts/maya/api/action.py b/openpype/hosts/maya/api/action.py index ab26748c8a..ca1006b6aa 100644 --- a/openpype/hosts/maya/api/action.py +++ b/openpype/hosts/maya/api/action.py @@ -2,8 +2,8 @@ from __future__ import absolute_import import pyblish.api -from avalon import io +from openpype.pipeline import legacy_io from openpype.api import get_errored_instances_from_context @@ -75,8 +75,10 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action): from . import lib asset = instance.data['asset'] - asset_id = io.find_one({"name": asset, "type": "asset"}, - projection={"_id": True})['_id'] + asset_id = legacy_io.find_one( + {"name": asset, "type": "asset"}, + projection={"_id": True} + )['_id'] for node, _id in lib.generate_ids(nodes, asset_id=asset_id): lib.set_id(node, _id, overwrite=True) diff --git a/openpype/hosts/maya/api/commands.py b/openpype/hosts/maya/api/commands.py index a1e0be2cfe..dd616b6dd6 100644 --- a/openpype/hosts/maya/api/commands.py +++ b/openpype/hosts/maya/api/commands.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- """OpenPype script commands to be used directly in Maya.""" from maya import cmds -from avalon import api, io + +from openpype.pipeline import legacy_io class ToolWindows: @@ -73,13 +74,13 @@ def reset_frame_range(): 59.94: '59.94fps', 44100: '44100fps', 48000: '48000fps' - }.get(float(api.Session.get("AVALON_FPS", 25)), "pal") + }.get(float(legacy_io.Session.get("AVALON_FPS", 25)), "pal") cmds.currentUnit(time=fps) # Set frame start/end - asset_name = api.Session["AVALON_ASSET"] - asset = io.find_one({"name": asset_name, "type": "asset"}) + asset_name = legacy_io.Session["AVALON_ASSET"] + asset = legacy_io.find_one({"name": asset_name, "type": "asset"}) frame_start = asset["data"].get("frameStart") frame_end = asset["data"].get("frameEnd") @@ -144,8 +145,8 @@ def reset_resolution(): resolution_height = 1080 # Get resolution from asset - asset_name = api.Session["AVALON_ASSET"] - asset_doc = io.find_one({"name": asset_name, "type": "asset"}) + asset_name = legacy_io.Session["AVALON_ASSET"] + asset_doc = legacy_io.find_one({"name": asset_name, "type": "asset"}) resolution = _resolution_from_document(asset_doc) # Try get resolution from project if resolution is None: @@ -154,7 +155,7 @@ def reset_resolution(): "Asset \"{}\" does not have set resolution." " Trying to get resolution from project" ).format(asset_name)) - project_doc = io.find_one({"type": "project"}) + project_doc = legacy_io.find_one({"type": "project"}) resolution = _resolution_from_document(project_doc) if resolution is None: diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 9e99b96477..cf09c39b21 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -17,11 +17,10 @@ import bson from maya import cmds, mel import maya.api.OpenMaya as om -from avalon import api, io - from openpype import lib from openpype.api import get_anatomy_settings from openpype.pipeline import ( + legacy_io, discover_loader_plugins, loaders_from_representation, get_representation_path, @@ -1388,9 +1387,13 @@ def generate_ids(nodes, asset_id=None): if asset_id is None: # Get the asset ID from the database for the asset of current context - asset_data = io.find_one({"type": "asset", - "name": api.Session["AVALON_ASSET"]}, - projection={"_id": True}) + asset_data = legacy_io.find_one( + { + "type": "asset", + "name": legacy_io.Session["AVALON_ASSET"] + }, + projection={"_id": True} + ) assert asset_data, "No current asset found in Session" asset_id = asset_data['_id'] @@ -1545,9 +1548,11 @@ def list_looks(asset_id): # # get all subsets with look leading in # the name associated with the asset - subset = io.find({"parent": bson.ObjectId(asset_id), - "type": "subset", - "name": {"$regex": "look*"}}) + subset = legacy_io.find({ + "parent": bson.ObjectId(asset_id), + "type": "subset", + "name": {"$regex": "look*"} + }) return list(subset) @@ -1566,13 +1571,17 @@ def assign_look_by_version(nodes, version_id): """ # Get representations of shader file and relationships - look_representation = io.find_one({"type": "representation", - "parent": version_id, - "name": "ma"}) + look_representation = legacy_io.find_one({ + "type": "representation", + "parent": version_id, + "name": "ma" + }) - json_representation = io.find_one({"type": "representation", - "parent": version_id, - "name": "json"}) + json_representation = legacy_io.find_one({ + "type": "representation", + "parent": version_id, + "name": "json" + }) # See if representation is already loaded, if so reuse it. host = registered_host() @@ -1637,9 +1646,11 @@ def assign_look(nodes, subset="lookDefault"): except bson.errors.InvalidId: log.warning("Asset ID is not compatible with bson") continue - subset_data = io.find_one({"type": "subset", - "name": subset, - "parent": asset_id}) + subset_data = legacy_io.find_one({ + "type": "subset", + "name": subset, + "parent": asset_id + }) if not subset_data: log.warning("No subset '{}' found for {}".format(subset, asset_id)) @@ -1647,13 +1658,18 @@ def assign_look(nodes, subset="lookDefault"): # get last version # with backwards compatibility - version = io.find_one({"parent": subset_data['_id'], - "type": "version", - "data.families": - {"$in": ["look"]} - }, - sort=[("name", -1)], - projection={"_id": True, "name": True}) + version = legacy_io.find_one( + { + "parent": subset_data['_id'], + "type": "version", + "data.families": {"$in": ["look"]} + }, + sort=[("name", -1)], + projection={ + "_id": True, + "name": True + } + ) log.debug("Assigning look '{}' ".format(subset, version["name"])) @@ -2136,7 +2152,7 @@ def reset_scene_resolution(): None """ - project_doc = io.find_one({"type": "project"}) + project_doc = legacy_io.find_one({"type": "project"}) project_data = project_doc["data"] asset_data = lib.get_asset()["data"] @@ -2169,13 +2185,13 @@ def set_context_settings(): """ # Todo (Wijnand): apply renderer and resolution of project - project_doc = io.find_one({"type": "project"}) + project_doc = legacy_io.find_one({"type": "project"}) project_data = project_doc["data"] asset_data = lib.get_asset()["data"] # Set project fps fps = asset_data.get("fps", project_data.get("fps", 25)) - api.Session["AVALON_FPS"] = str(fps) + legacy_io.Session["AVALON_FPS"] = str(fps) set_scene_fps(fps) reset_scene_resolution() @@ -2935,7 +2951,7 @@ def update_content_on_context_change(): This will update scene content to match new asset on context change """ scene_sets = cmds.listSets(allSets=True) - new_asset = api.Session["AVALON_ASSET"] + new_asset = legacy_io.Session["AVALON_ASSET"] new_data = lib.get_asset()["data"] for s in scene_sets: try: diff --git a/openpype/hosts/maya/api/menu.py b/openpype/hosts/maya/api/menu.py index 5f0fc39bf3..97f06c43af 100644 --- a/openpype/hosts/maya/api/menu.py +++ b/openpype/hosts/maya/api/menu.py @@ -6,10 +6,9 @@ from Qt import QtWidgets, QtGui import maya.utils import maya.cmds as cmds -import avalon.api - from openpype.api import BuildWorkfile from openpype.settings import get_project_settings +from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools from openpype.hosts.maya.api import lib from .lib import get_main_window, IS_HEADLESS @@ -40,15 +39,15 @@ def install(): parent_widget = get_main_window() cmds.menu( MENU_NAME, - label=avalon.api.Session["AVALON_LABEL"], + label=legacy_io.Session["AVALON_LABEL"], tearOff=True, parent="MayaWindow" ) # Create context menu context_label = "{}, {}".format( - avalon.api.Session["AVALON_ASSET"], - avalon.api.Session["AVALON_TASK"] + legacy_io.Session["AVALON_ASSET"], + legacy_io.Session["AVALON_TASK"] ) cmds.menuItem( "currentContext", @@ -211,7 +210,7 @@ def update_menu_task_label(): return label = "{}, {}".format( - avalon.api.Session["AVALON_ASSET"], - avalon.api.Session["AVALON_TASK"] + legacy_io.Session["AVALON_ASSET"], + legacy_io.Session["AVALON_TASK"] ) cmds.menuItem(object_name, edit=True, label=label) diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index f6f3472eef..dd05bfbb21 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -7,7 +7,6 @@ from maya import utils, cmds, OpenMaya import maya.api.OpenMaya as om import pyblish.api -import avalon.api import openpype.hosts.maya from openpype.tools.utils import host_tools @@ -18,6 +17,7 @@ from openpype.lib import ( ) from openpype.lib.path_tools import HostDirmap from openpype.pipeline import ( + legacy_io, register_loader_plugin_path, register_inventory_action_path, register_creator_plugin_path, @@ -93,7 +93,7 @@ def _set_project(): None """ - workdir = avalon.api.Session["AVALON_WORKDIR"] + workdir = legacy_io.Session["AVALON_WORKDIR"] try: os.makedirs(workdir) @@ -473,7 +473,7 @@ def on_task_changed(): # Run menu.update_menu_task_label() - workdir = avalon.api.Session["AVALON_WORKDIR"] + workdir = legacy_io.Session["AVALON_WORKDIR"] if os.path.exists(workdir): log.info("Updating Maya workspace for task change to %s", workdir) @@ -494,9 +494,9 @@ def on_task_changed(): lib.update_content_on_context_change() msg = " project: {}\n asset: {}\n task:{}".format( - avalon.api.Session["AVALON_PROJECT"], - avalon.api.Session["AVALON_ASSET"], - avalon.api.Session["AVALON_TASK"] + legacy_io.Session["AVALON_PROJECT"], + legacy_io.Session["AVALON_ASSET"], + legacy_io.Session["AVALON_TASK"] ) lib.show_message( diff --git a/openpype/hosts/maya/api/setdress.py b/openpype/hosts/maya/api/setdress.py index 018ea4558c..f8d3ed79b8 100644 --- a/openpype/hosts/maya/api/setdress.py +++ b/openpype/hosts/maya/api/setdress.py @@ -10,9 +10,9 @@ from bson.objectid import ObjectId from maya import cmds -from avalon import io from openpype.pipeline import ( schema, + legacy_io, discover_loader_plugins, loaders_from_representation, load_container, @@ -283,21 +283,23 @@ def update_package_version(container, version): """ # Versioning (from `core.maya.pipeline`) - current_representation = io.find_one({ + current_representation = legacy_io.find_one({ "_id": ObjectId(container["representation"]) }) assert current_representation is not None, "This is a bug" - version_, subset, asset, project = io.parenthood(current_representation) + version_, subset, asset, project = legacy_io.parenthood( + current_representation + ) if version == -1: - new_version = io.find_one({ + new_version = legacy_io.find_one({ "type": "version", "parent": subset["_id"] }, sort=[("name", -1)]) else: - new_version = io.find_one({ + new_version = legacy_io.find_one({ "type": "version", "parent": subset["_id"], "name": version, @@ -306,7 +308,7 @@ def update_package_version(container, version): assert new_version is not None, "This is a bug" # Get the new representation (new file) - new_representation = io.find_one({ + new_representation = legacy_io.find_one({ "type": "representation", "parent": new_version["_id"], "name": current_representation["name"] @@ -328,7 +330,7 @@ def update_package(set_container, representation): """ # Load the original package data - current_representation = io.find_one({ + current_representation = legacy_io.find_one({ "_id": ObjectId(set_container['representation']), "type": "representation" }) @@ -479,10 +481,10 @@ def update_scene(set_container, containers, current_data, new_data, new_file): # Check whether the conversion can be done by the Loader. # They *must* use the same asset, subset and Loader for # `update_container` to make sense. - old = io.find_one({ + old = legacy_io.find_one({ "_id": ObjectId(representation_current) }) - new = io.find_one({ + new = legacy_io.find_one({ "_id": ObjectId(representation_new) }) is_valid = compare_representations(old=old, new=new) diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 4f0a394f85..1e3fc3f0ae 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -19,9 +19,10 @@ from openpype.api import ( get_project_settings, get_asset) from openpype.modules import ModulesManager -from openpype.pipeline import CreatorError - -from avalon.api import Session +from openpype.pipeline import ( + CreatorError, + legacy_io, +) class CreateRender(plugin.Creator): @@ -104,7 +105,7 @@ class CreateRender(plugin.Creator): self.deadline_servers = {} return self._project_settings = get_project_settings( - Session["AVALON_PROJECT"]) + legacy_io.Session["AVALON_PROJECT"]) # project_settings/maya/create/CreateRender/aov_separator try: diff --git a/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py b/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py index a6deeeee2e..1a8e84c80d 100644 --- a/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py +++ b/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator for Unreal Skeletal Meshes.""" from openpype.hosts.maya.api import plugin, lib -from avalon.api import Session +from openpype.pipeline import legacy_io from maya import cmds # noqa @@ -26,7 +26,7 @@ class CreateUnrealSkeletalMesh(plugin.Creator): dynamic_data = super(CreateUnrealSkeletalMesh, cls).get_dynamic_data( variant, task_name, asset_id, project_name, host_name ) - dynamic_data["asset"] = Session.get("AVALON_ASSET") + dynamic_data["asset"] = legacy_io.Session.get("AVALON_ASSET") return dynamic_data def process(self): diff --git a/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py b/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py index f62d15fe62..4e4417ff34 100644 --- a/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py +++ b/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- """Creator for Unreal Static Meshes.""" from openpype.hosts.maya.api import plugin, lib -from avalon.api import Session from openpype.api import get_project_settings +from openpype.pipeline import legacy_io from maya import cmds # noqa @@ -18,7 +18,7 @@ class CreateUnrealStaticMesh(plugin.Creator): """Constructor.""" super(CreateUnrealStaticMesh, self).__init__(*args, **kwargs) self._project_settings = get_project_settings( - Session["AVALON_PROJECT"]) + legacy_io.Session["AVALON_PROJECT"]) @classmethod def get_dynamic_data( @@ -27,7 +27,7 @@ class CreateUnrealStaticMesh(plugin.Creator): dynamic_data = super(CreateUnrealStaticMesh, cls).get_dynamic_data( variant, task_name, asset_id, project_name, host_name ) - dynamic_data["asset"] = Session.get("AVALON_ASSET") + dynamic_data["asset"] = legacy_io.Session.get("AVALON_ASSET") return dynamic_data def process(self): diff --git a/openpype/hosts/maya/plugins/create/create_vrayscene.py b/openpype/hosts/maya/plugins/create/create_vrayscene.py index fa9c59e016..38cf5818a6 100644 --- a/openpype/hosts/maya/plugins/create/create_vrayscene.py +++ b/openpype/hosts/maya/plugins/create/create_vrayscene.py @@ -19,11 +19,12 @@ from openpype.api import ( get_project_settings ) -from openpype.pipeline import CreatorError +from openpype.pipeline import ( + CreatorError, + legacy_io, +) from openpype.modules import ModulesManager -from avalon.api import Session - class CreateVRayScene(plugin.Creator): """Create Vray Scene.""" @@ -44,7 +45,7 @@ class CreateVRayScene(plugin.Creator): self.deadline_servers = {} return self._project_settings = get_project_settings( - Session["AVALON_PROJECT"]) + legacy_io.Session["AVALON_PROJECT"]) try: default_servers = deadline_settings["deadline_urls"] diff --git a/openpype/hosts/maya/plugins/inventory/import_modelrender.py b/openpype/hosts/maya/plugins/inventory/import_modelrender.py index c2e43f196f..a5367f16e5 100644 --- a/openpype/hosts/maya/plugins/inventory/import_modelrender.py +++ b/openpype/hosts/maya/plugins/inventory/import_modelrender.py @@ -1,9 +1,10 @@ import json -from avalon import io from bson.objectid import ObjectId + from openpype.pipeline import ( InventoryAction, get_representation_context, + legacy_io, ) from openpype.hosts.maya.api.lib import ( maintained_selection, @@ -39,7 +40,7 @@ class ImportModelRender(InventoryAction): else: nodes.append(n) - repr_doc = io.find_one({ + repr_doc = legacy_io.find_one({ "_id": ObjectId(container["representation"]), }) version_id = repr_doc["parent"] @@ -63,7 +64,7 @@ class ImportModelRender(InventoryAction): from maya import cmds # Get representations of shader file and relationships - look_repr = io.find_one({ + look_repr = legacy_io.find_one({ "type": "representation", "parent": version_id, "name": {"$regex": self.scene_type_regex}, @@ -72,7 +73,7 @@ class ImportModelRender(InventoryAction): print("No model render sets for this model version..") return - json_repr = io.find_one({ + json_repr = legacy_io.find_one({ "type": "representation", "parent": version_id, "name": self.look_data_type, diff --git a/openpype/hosts/maya/plugins/load/load_audio.py b/openpype/hosts/maya/plugins/load/load_audio.py index d8844ffea6..ce814e1299 100644 --- a/openpype/hosts/maya/plugins/load/load_audio.py +++ b/openpype/hosts/maya/plugins/load/load_audio.py @@ -1,8 +1,9 @@ from maya import cmds, mel -from avalon import io + from openpype.pipeline import ( + legacy_io, load, - get_representation_path + get_representation_path, ) from openpype.hosts.maya.api.pipeline import containerise from openpype.hosts.maya.api.lib import unique_namespace @@ -64,9 +65,9 @@ class AudioLoader(load.LoaderPlugin): ) # Set frame range. - version = io.find_one({"_id": representation["parent"]}) - subset = io.find_one({"_id": version["parent"]}) - asset = io.find_one({"_id": subset["parent"]}) + version = legacy_io.find_one({"_id": representation["parent"]}) + subset = legacy_io.find_one({"_id": version["parent"]}) + asset = legacy_io.find_one({"_id": subset["parent"]}) audio_node.sourceStart.set(1 - asset["data"]["frameStart"]) audio_node.sourceEnd.set(asset["data"]["frameEnd"]) diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index b250986489..b67c2cb209 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -1,7 +1,7 @@ from Qt import QtWidgets, QtCore -from avalon import io from openpype.pipeline import ( + legacy_io, load, get_representation_path ) @@ -216,9 +216,9 @@ class ImagePlaneLoader(load.LoaderPlugin): ) # Set frame range. - version = io.find_one({"_id": representation["parent"]}) - subset = io.find_one({"_id": version["parent"]}) - asset = io.find_one({"_id": subset["parent"]}) + version = legacy_io.find_one({"_id": representation["parent"]}) + subset = legacy_io.find_one({"_id": version["parent"]}) + asset = legacy_io.find_one({"_id": subset["parent"]}) start_frame = asset["data"]["frameStart"] end_frame = asset["data"]["frameEnd"] image_plane_shape.frameOffset.set(1 - start_frame) diff --git a/openpype/hosts/maya/plugins/load/load_look.py b/openpype/hosts/maya/plugins/load/load_look.py index 8f02ed59b8..80eac8e0b5 100644 --- a/openpype/hosts/maya/plugins/load/load_look.py +++ b/openpype/hosts/maya/plugins/load/load_look.py @@ -5,8 +5,10 @@ from collections import defaultdict from Qt import QtWidgets -from avalon import io -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + legacy_io, + get_representation_path, +) import openpype.hosts.maya.api.plugin from openpype.hosts.maya.api import lib from openpype.widgets.message_window import ScrollMessageBox @@ -71,7 +73,7 @@ class LookLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): shader_nodes = cmds.ls(members, type='shadingEngine') nodes = set(self._get_nodes_with_shader(shader_nodes)) - json_representation = io.find_one({ + json_representation = legacy_io.find_one({ "type": "representation", "parent": representation['parent'], "name": "json" diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index a7222edfd4..a8875cf216 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -1,10 +1,12 @@ import os from maya import cmds -from avalon import api from openpype.api import get_project_settings from openpype.lib import get_creator_by_name -from openpype.pipeline import legacy_create +from openpype.pipeline import ( + legacy_io, + legacy_create, +) import openpype.hosts.maya.api.plugin from openpype.hosts.maya.api.lib import maintained_selection @@ -143,7 +145,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): roots = cmds.ls(self[:], assemblies=True, long=True) assert roots, "No root nodes in rig, this is a bug." - asset = api.Session["AVALON_ASSET"] + asset = legacy_io.Session["AVALON_ASSET"] dependency = str(context["representation"]["_id"]) self.log.info("Creating subset: {}".format(namespace)) diff --git a/openpype/hosts/maya/plugins/load/load_vrayproxy.py b/openpype/hosts/maya/plugins/load/load_vrayproxy.py index 69d54df62b..22d56139f6 100644 --- a/openpype/hosts/maya/plugins/load/load_vrayproxy.py +++ b/openpype/hosts/maya/plugins/load/load_vrayproxy.py @@ -11,9 +11,9 @@ from bson.objectid import ObjectId import maya.cmds as cmds -from avalon import io from openpype.api import get_project_settings from openpype.pipeline import ( + legacy_io, load, get_representation_path ) @@ -185,12 +185,11 @@ class VRayProxyLoader(load.LoaderPlugin): """ self.log.debug( "Looking for abc in published representations of this version.") - abc_rep = io.find_one( - { - "type": "representation", - "parent": ObjectId(version_id), - "name": "abc" - }) + abc_rep = legacy_io.find_one({ + "type": "representation", + "parent": ObjectId(version_id), + "name": "abc" + }) if abc_rep: self.log.debug("Found, we'll link alembic to vray proxy.") diff --git a/openpype/hosts/maya/plugins/load/load_yeti_cache.py b/openpype/hosts/maya/plugins/load/load_yeti_cache.py index c64e1c540b..fb903785ae 100644 --- a/openpype/hosts/maya/plugins/load/load_yeti_cache.py +++ b/openpype/hosts/maya/plugins/load/load_yeti_cache.py @@ -7,9 +7,9 @@ from pprint import pprint from maya import cmds -from avalon import io from openpype.api import get_project_settings from openpype.pipeline import ( + legacy_io, load, get_representation_path ) @@ -111,11 +111,11 @@ class YetiCacheLoader(load.LoaderPlugin): def update(self, container, representation): - io.install() + legacy_io.install() namespace = container["namespace"] container_node = container["objectName"] - fur_settings = io.find_one( + fur_settings = legacy_io.find_one( {"parent": representation["parent"], "name": "fursettings"} ) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index a525b562f3..2ce7c02737 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -49,8 +49,8 @@ import maya.app.renderSetup.model.renderSetup as renderSetup import pyblish.api -from avalon import api from openpype.lib import get_formatted_current_time +from openpype.pipeline import legacy_io from openpype.hosts.maya.api.lib_renderproducts import get as get_layer_render_products # noqa: E501 from openpype.hosts.maya.api import lib @@ -93,7 +93,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): render_globals = render_instance collected_render_layers = render_instance.data["setMembers"] filepath = context.data["currentFile"].replace("\\", "/") - asset = api.Session["AVALON_ASSET"] + asset = legacy_io.Session["AVALON_ASSET"] workspace = context.data["workspaceDir"] deadline_settings = ( diff --git a/openpype/hosts/maya/plugins/publish/collect_review.py b/openpype/hosts/maya/plugins/publish/collect_review.py index 60183341f9..1af92c3bfc 100644 --- a/openpype/hosts/maya/plugins/publish/collect_review.py +++ b/openpype/hosts/maya/plugins/publish/collect_review.py @@ -2,7 +2,8 @@ from maya import cmds, mel import pymel.core as pm import pyblish.api -import avalon.api + +from openpype.pipeline import legacy_io class CollectReview(pyblish.api.InstancePlugin): @@ -19,7 +20,7 @@ class CollectReview(pyblish.api.InstancePlugin): self.log.debug('instance: {}'.format(instance)) - task = avalon.api.Session["AVALON_TASK"] + task = legacy_io.Session["AVALON_TASK"] # get cameras members = instance.data['setMembers'] diff --git a/openpype/hosts/maya/plugins/publish/collect_vrayscene.py b/openpype/hosts/maya/plugins/publish/collect_vrayscene.py index 327fc836dc..afdb570cbc 100644 --- a/openpype/hosts/maya/plugins/publish/collect_vrayscene.py +++ b/openpype/hosts/maya/plugins/publish/collect_vrayscene.py @@ -6,7 +6,8 @@ import maya.app.renderSetup.model.renderSetup as renderSetup from maya import cmds import pyblish.api -from avalon import api + +from openpype.pipeline import legacy_io from openpype.lib import get_formatted_current_time from openpype.hosts.maya.api import lib @@ -117,7 +118,7 @@ class CollectVrayScene(pyblish.api.InstancePlugin): # instance subset "family": "vrayscene_layer", "families": ["vrayscene_layer"], - "asset": api.Session["AVALON_ASSET"], + "asset": legacy_io.Session["AVALON_ASSET"], "time": get_formatted_current_time(), "author": context.data["user"], # Add source to allow tracing back to the scene from diff --git a/openpype/hosts/maya/plugins/publish/collect_workfile.py b/openpype/hosts/maya/plugins/publish/collect_workfile.py index ee676f50d0..12d86869ea 100644 --- a/openpype/hosts/maya/plugins/publish/collect_workfile.py +++ b/openpype/hosts/maya/plugins/publish/collect_workfile.py @@ -1,7 +1,8 @@ -import pyblish.api -import avalon.api import os +import pyblish.api + from maya import cmds +from openpype.pipeline import legacy_io class CollectWorkfile(pyblish.api.ContextPlugin): @@ -19,7 +20,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): folder, file = os.path.split(current_file) filename, ext = os.path.splitext(file) - task = avalon.api.Session["AVALON_TASK"] + task = legacy_io.Session["AVALON_TASK"] data = {} diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 6fcc308f78..881705b92c 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -12,9 +12,9 @@ from collections import OrderedDict from maya import cmds # noqa import pyblish.api -from avalon import io import openpype.api +from openpype.pipeline import legacy_io from openpype.hosts.maya.api import lib # Modes for transfer @@ -40,7 +40,7 @@ def find_paths_by_hash(texture_hash): """ key = "data.sourceHashes.{0}".format(texture_hash) - return io.distinct(key, {"type": "version"}) + return legacy_io.distinct(key, {"type": "version"}) def maketx(source, destination, *args): diff --git a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py index f852904580..3ce9ec714c 100644 --- a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py +++ b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py @@ -8,10 +8,9 @@ import requests from maya import cmds -from avalon import api - import pyblish.api from openpype.hosts.maya.api import lib +from openpype.pipeline import legacy_io from openpype.api import get_system_settings @@ -503,7 +502,7 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin): "TOOL_ENV" ] environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **api.Session) + if key in os.environ}, **legacy_io.Session) # self.log.debug("enviro: {}".format(pprint(environment))) for path in os.environ: if path.lower().startswith('pype_'): diff --git a/openpype/hosts/maya/plugins/publish/validate_model_name.py b/openpype/hosts/maya/plugins/publish/validate_model_name.py index 3757e13a9b..50acf2b8b7 100644 --- a/openpype/hosts/maya/plugins/publish/validate_model_name.py +++ b/openpype/hosts/maya/plugins/publish/validate_model_name.py @@ -1,16 +1,17 @@ # -*- coding: utf-8 -*- """Validate model nodes names.""" +import os +import re from maya import cmds import pyblish.api + import openpype.api -import avalon.api +from openpype.pipeline import legacy_io import openpype.hosts.maya.api.action from openpype.hosts.maya.api.shader_definition_editor import ( DEFINITION_FILENAME) from openpype.lib.mongo import OpenPypeMongoConnection import gridfs -import re -import os class ValidateModelName(pyblish.api.InstancePlugin): @@ -68,7 +69,7 @@ class ValidateModelName(pyblish.api.InstancePlugin): invalid.append(top_group) else: if "asset" in r.groupindex: - if m.group("asset") != avalon.api.Session["AVALON_ASSET"]: + if m.group("asset") != legacy_io.Session["AVALON_ASSET"]: cls.log.error("Invalid asset name in top level group.") return top_group if "subset" in r.groupindex: @@ -76,7 +77,7 @@ class ValidateModelName(pyblish.api.InstancePlugin): cls.log.error("Invalid subset name in top level group.") return top_group if "project" in r.groupindex: - if m.group("project") != avalon.api.Session["AVALON_PROJECT"]: + if m.group("project") != legacy_io.Session["AVALON_PROJECT"]: cls.log.error("Invalid project name in top level group.") return top_group diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py index c5f675c8ca..068d6b38a1 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_in_database.py @@ -1,8 +1,7 @@ import pyblish.api -from avalon import io - import openpype.api +from openpype.pipeline import legacy_io import openpype.hosts.maya.api.action from openpype.hosts.maya.api import lib @@ -43,7 +42,7 @@ class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin): nodes=instance[:]) # check ids against database ids - db_asset_ids = io.find({"type": "asset"}).distinct("_id") + db_asset_ids = legacy_io.find({"type": "asset"}).distinct("_id") db_asset_ids = set(str(i) for i in db_asset_ids) # Get all asset IDs diff --git a/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py b/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py index 276b6713f4..38407e4176 100644 --- a/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py +++ b/openpype/hosts/maya/plugins/publish/validate_node_ids_related.py @@ -1,9 +1,8 @@ import pyblish.api import openpype.api -from avalon import io +from openpype.pipeline import legacy_io import openpype.hosts.maya.api.action - from openpype.hosts.maya.api import lib @@ -38,7 +37,7 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin): invalid = list() asset = instance.data['asset'] - asset_data = io.find_one( + asset_data = legacy_io.find_one( { "name": asset, "type": "asset" diff --git a/openpype/hosts/maya/plugins/publish/validate_renderlayer_aovs.py b/openpype/hosts/maya/plugins/publish/validate_renderlayer_aovs.py index 4eb445ac68..e65150eb0f 100644 --- a/openpype/hosts/maya/plugins/publish/validate_renderlayer_aovs.py +++ b/openpype/hosts/maya/plugins/publish/validate_renderlayer_aovs.py @@ -1,7 +1,7 @@ import pyblish.api import openpype.hosts.maya.api.action -from avalon import io +from openpype.pipeline import legacy_io import openpype.api @@ -48,8 +48,8 @@ class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin): def validate_subset_registered(self, asset_name, subset_name): """Check if subset is registered in the database under the asset""" - asset = io.find_one({"type": "asset", "name": asset_name}) - is_valid = io.find_one({ + asset = legacy_io.find_one({"type": "asset", "name": asset_name}) + is_valid = legacy_io.find_one({ "type": "subset", "name": subset_name, "parent": asset["_id"] diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py index 43f6c85827..33788d1835 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py +++ b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py @@ -1,12 +1,12 @@ # -*- coding: utf-8 -*- """Validator for correct naming of Static Meshes.""" -from maya import cmds # noqa +import re + import pyblish.api import openpype.api import openpype.hosts.maya.api.action -from avalon.api import Session +from openpype.pipeline import legacy_io from openpype.api import get_project_settings -import re class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin): @@ -63,7 +63,9 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin): invalid = [] - project_settings = get_project_settings(Session["AVALON_PROJECT"]) + project_settings = get_project_settings( + legacy_io.Session["AVALON_PROJECT"] + ) collision_prefixes = ( project_settings ["maya"] From dc0c46dff9e121b333f98b0511dd45bb1920a344 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:17:36 +0200 Subject: [PATCH 118/244] replaced avalon imports in nuke --- openpype/hosts/nuke/api/command.py | 15 ++++----- openpype/hosts/nuke/api/lib.py | 32 +++++++++++-------- .../hosts/nuke/plugins/load/load_backdrop.py | 6 ++-- .../nuke/plugins/load/load_camera_abc.py | 6 ++-- openpype/hosts/nuke/plugins/load/load_clip.py | 10 +++--- .../hosts/nuke/plugins/load/load_effects.py | 7 ++-- .../nuke/plugins/load/load_effects_ip.py | 7 ++-- .../hosts/nuke/plugins/load/load_gizmo.py | 7 ++-- .../hosts/nuke/plugins/load/load_gizmo_ip.py | 6 ++-- .../hosts/nuke/plugins/load/load_image.py | 6 ++-- .../hosts/nuke/plugins/load/load_model.py | 7 ++-- .../nuke/plugins/load/load_script_precomp.py | 7 ++-- .../nuke/plugins/publish/collect_reads.py | 9 ++++-- .../plugins/publish/precollect_instances.py | 7 ++-- .../nuke/plugins/publish/precollect_writes.py | 9 ++++-- .../nuke/plugins/publish/validate_script.py | 5 +-- 16 files changed, 78 insertions(+), 68 deletions(-) diff --git a/openpype/hosts/nuke/api/command.py b/openpype/hosts/nuke/api/command.py index 6f74c08e97..c756c48a12 100644 --- a/openpype/hosts/nuke/api/command.py +++ b/openpype/hosts/nuke/api/command.py @@ -3,8 +3,7 @@ import contextlib import nuke from bson.objectid import ObjectId -from avalon import api, io - +from openpype.pipeline import legacy_io log = logging.getLogger(__name__) @@ -15,11 +14,11 @@ def reset_frame_range(): displayed handles """ - fps = float(api.Session.get("AVALON_FPS", 25)) + fps = float(legacy_io.Session.get("AVALON_FPS", 25)) nuke.root()["fps"].setValue(fps) - name = api.Session["AVALON_ASSET"] - asset = io.find_one({"name": name, "type": "asset"}) + name = legacy_io.Session["AVALON_ASSET"] + asset = legacy_io.find_one({"name": name, "type": "asset"}) asset_data = asset["data"] handles = get_handles(asset) @@ -71,10 +70,10 @@ def get_handles(asset): if "visualParent" in data: vp = data["visualParent"] if vp is not None: - parent_asset = io.find_one({"_id": ObjectId(vp)}) + parent_asset = legacy_io.find_one({"_id": ObjectId(vp)}) if parent_asset is None: - parent_asset = io.find_one({"_id": ObjectId(asset["parent"])}) + parent_asset = legacy_io.find_one({"_id": ObjectId(asset["parent"])}) if parent_asset is not None: return get_handles(parent_asset) @@ -84,7 +83,7 @@ def get_handles(asset): def reset_resolution(): """Set resolution to project resolution.""" - project = io.find_one({"type": "project"}) + project = legacy_io.find_one({"type": "project"}) p_data = project["data"] width = p_data.get("resolution_width", diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index e05c6aecbd..eafb707249 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -10,8 +10,6 @@ from bson.objectid import ObjectId import nuke -from avalon import api, io - from openpype.api import ( Logger, Anatomy, @@ -26,7 +24,10 @@ from openpype.tools.utils import host_tools from openpype.lib.path_tools import HostDirmap from openpype.settings import get_project_settings from openpype.modules import ModulesManager -from openpype.pipeline import discover_legacy_creator_plugins +from openpype.pipeline import ( + discover_legacy_creator_plugins, + legacy_io, +) from .workio import ( save_file, @@ -569,7 +570,7 @@ def check_inventory_versions(): avalon_knob_data = read(node) # get representation from io - representation = io.find_one({ + representation = legacy_io.find_one({ "type": "representation", "_id": ObjectId(avalon_knob_data["representation"]) }) @@ -583,13 +584,13 @@ def check_inventory_versions(): continue # Get start frame from version data - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') @@ -726,8 +727,8 @@ def format_anatomy(data): file = script_name() data["version"] = get_version_from_path(file) - project_doc = io.find_one({"type": "project"}) - asset_doc = io.find_one({ + project_doc = legacy_io.find_one({"type": "project"}) + asset_doc = legacy_io.find_one({ "type": "asset", "name": data["avalon"]["asset"] }) @@ -1138,8 +1139,11 @@ class WorkfileSettings(object): nodes=None, **kwargs): Context._project_doc = kwargs.get( - "project") or io.find_one({"type": "project"}) - self._asset = kwargs.get("asset_name") or api.Session["AVALON_ASSET"] + "project") or legacy_io.find_one({"type": "project"}) + self._asset = ( + kwargs.get("asset_name") + or legacy_io.Session["AVALON_ASSET"] + ) self._asset_entity = get_asset(self._asset) self._root_node = root_node or nuke.root() self._nodes = self.get_nodes(nodes=nodes) @@ -1486,9 +1490,9 @@ class WorkfileSettings(object): def reset_resolution(self): """Set resolution to project resolution.""" log.info("Resetting resolution") - project = io.find_one({"type": "project"}) - asset = api.Session["AVALON_ASSET"] - asset = io.find_one({"name": asset, "type": "asset"}) + project = legacy_io.find_one({"type": "project"}) + asset = legacy_io.Session["AVALON_ASSET"] + asset = legacy_io.find_one({"name": asset, "type": "asset"}) asset_data = asset.get('data', {}) data = { @@ -1608,7 +1612,7 @@ def get_hierarchical_attr(entity, attr, default=None): ): parent_id = entity['data']['visualParent'] - parent = io.find_one({'_id': parent_id}) + parent = legacy_io.find_one({'_id': parent_id}) return get_hierarchical_attr(parent, attr) diff --git a/openpype/hosts/nuke/plugins/load/load_backdrop.py b/openpype/hosts/nuke/plugins/load/load_backdrop.py index 36cec6f4c5..91f1c80b2a 100644 --- a/openpype/hosts/nuke/plugins/load/load_backdrop.py +++ b/openpype/hosts/nuke/plugins/load/load_backdrop.py @@ -1,8 +1,8 @@ -from avalon import io import nuke import nukescripts from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -188,7 +188,7 @@ class LoadBackdropNodes(load.LoaderPlugin): # get main variables # Get version from io - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -237,7 +237,7 @@ class LoadBackdropNodes(load.LoaderPlugin): GN["name"].setValue(object_name) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_camera_abc.py b/openpype/hosts/nuke/plugins/load/load_camera_abc.py index fb5f7f8ede..964ca5ec90 100644 --- a/openpype/hosts/nuke/plugins/load/load_camera_abc.py +++ b/openpype/hosts/nuke/plugins/load/load_camera_abc.py @@ -1,7 +1,7 @@ import nuke -from avalon import io from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -102,7 +102,7 @@ class AlembicCameraLoader(load.LoaderPlugin): None """ # Get version from io - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -175,7 +175,7 @@ class AlembicCameraLoader(load.LoaderPlugin): """ Coloring a node by correct color by actual version """ # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index 9b0588feac..681561e303 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -1,8 +1,10 @@ import nuke import qargparse -from avalon import io -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + legacy_io, + get_representation_path, +) from openpype.hosts.nuke.api.lib import ( get_imageio_input_colorspace, maintained_selection @@ -194,7 +196,7 @@ class LoadClip(plugin.NukeLoader): start_at_workfile = bool("start at" in read_node['frame_mode'].value()) - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -264,7 +266,7 @@ class LoadClip(plugin.NukeLoader): # change color of read_node # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_effects.py b/openpype/hosts/nuke/plugins/load/load_effects.py index 56c5acbb0a..6a30330ed0 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects.py +++ b/openpype/hosts/nuke/plugins/load/load_effects.py @@ -3,9 +3,8 @@ from collections import OrderedDict import nuke import six -from avalon import io - from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -149,7 +148,7 @@ class LoadEffects(load.LoaderPlugin): """ # get main variables # Get version from io - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -245,7 +244,7 @@ class LoadEffects(load.LoaderPlugin): self.connect_read_node(GN, namespace, json_f["assignTo"]) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_effects_ip.py b/openpype/hosts/nuke/plugins/load/load_effects_ip.py index 0bc5f5a514..eaf151b3b8 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_effects_ip.py @@ -3,9 +3,8 @@ from collections import OrderedDict import six import nuke -from avalon import io - from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -154,7 +153,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin): # get main variables # Get version from io - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -252,7 +251,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin): # return # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_gizmo.py b/openpype/hosts/nuke/plugins/load/load_gizmo.py index 6f2b191be9..4ea9d64d7d 100644 --- a/openpype/hosts/nuke/plugins/load/load_gizmo.py +++ b/openpype/hosts/nuke/plugins/load/load_gizmo.py @@ -1,8 +1,7 @@ import nuke -from avalon import io - from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -102,7 +101,7 @@ class LoadGizmo(load.LoaderPlugin): # get main variables # Get version from io - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -150,7 +149,7 @@ class LoadGizmo(load.LoaderPlugin): GN["name"].setValue(object_name) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py b/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py index 46134afcf0..38dd70935e 100644 --- a/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py @@ -1,8 +1,8 @@ import nuke import six -from avalon import io from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -108,7 +108,7 @@ class LoadGizmoInputProcess(load.LoaderPlugin): # get main variables # Get version from io - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -156,7 +156,7 @@ class LoadGizmoInputProcess(load.LoaderPlugin): GN["name"].setValue(object_name) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_image.py b/openpype/hosts/nuke/plugins/load/load_image.py index 9a175a0cba..6df286a4f7 100644 --- a/openpype/hosts/nuke/plugins/load/load_image.py +++ b/openpype/hosts/nuke/plugins/load/load_image.py @@ -1,9 +1,9 @@ import nuke import qargparse -from avalon import io from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -186,13 +186,13 @@ class LoadImage(load.LoaderPlugin): format(frame_number, "0{}".format(padding))) # Get start frame from version data - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_model.py b/openpype/hosts/nuke/plugins/load/load_model.py index e445beca05..9788bb25d2 100644 --- a/openpype/hosts/nuke/plugins/load/load_model.py +++ b/openpype/hosts/nuke/plugins/load/load_model.py @@ -1,6 +1,7 @@ import nuke -from avalon import io + from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -99,7 +100,7 @@ class AlembicModelLoader(load.LoaderPlugin): None """ # Get version from io - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -172,7 +173,7 @@ class AlembicModelLoader(load.LoaderPlugin): """ Coloring a node by correct color by actual version """ # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/load/load_script_precomp.py b/openpype/hosts/nuke/plugins/load/load_script_precomp.py index 779f101682..bd351ad785 100644 --- a/openpype/hosts/nuke/plugins/load/load_script_precomp.py +++ b/openpype/hosts/nuke/plugins/load/load_script_precomp.py @@ -1,8 +1,7 @@ import nuke -from avalon import io - from openpype.pipeline import ( + legacy_io, load, get_representation_path, ) @@ -117,13 +116,13 @@ class LinkAsGroup(load.LoaderPlugin): root = get_representation_path(representation).replace("\\", "/") # Get start frame from version data - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/nuke/plugins/publish/collect_reads.py b/openpype/hosts/nuke/plugins/publish/collect_reads.py index 45e9969eb9..4d6944f523 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_reads.py +++ b/openpype/hosts/nuke/plugins/publish/collect_reads.py @@ -2,7 +2,8 @@ import os import re import nuke import pyblish.api -from avalon import io, api + +from openpype.pipeline import legacy_io @pyblish.api.log @@ -15,8 +16,10 @@ class CollectNukeReads(pyblish.api.InstancePlugin): families = ["source"] def process(self, instance): - asset_data = io.find_one({"type": "asset", - "name": api.Session["AVALON_ASSET"]}) + asset_data = legacy_io.find_one({ + "type": "asset", + "name": legacy_io.Session["AVALON_ASSET"] + }) self.log.debug("asset_data: {}".format(asset_data["data"])) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/precollect_instances.py index 29c706f302..d778421bde 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_instances.py @@ -1,6 +1,7 @@ import nuke import pyblish.api -from avalon import io, api + +from openpype.pipeline import legacy_io from openpype.hosts.nuke.api.lib import ( add_publish_knob, get_avalon_knob_data @@ -19,9 +20,9 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): sync_workfile_version_on_families = [] def process(self, context): - asset_data = io.find_one({ + asset_data = legacy_io.find_one({ "type": "asset", - "name": api.Session["AVALON_ASSET"] + "name": legacy_io.Session["AVALON_ASSET"] }) self.log.debug("asset_data: {}".format(asset_data["data"])) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_writes.py b/openpype/hosts/nuke/plugins/publish/precollect_writes.py index 4826b2788f..8669f4f485 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_writes.py @@ -3,9 +3,12 @@ import re from pprint import pformat import nuke import pyblish.api -from avalon import io + import openpype.api as pype -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + legacy_io, + get_representation_path, +) @pyblish.api.log @@ -180,7 +183,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): repre_doc = None if version_doc: # Try to find it's representation (Expected there is only one) - repre_doc = io.find_one( + repre_doc = legacy_io.find_one( {"type": "representation", "parent": version_doc["_id"]} ) diff --git a/openpype/hosts/nuke/plugins/publish/validate_script.py b/openpype/hosts/nuke/plugins/publish/validate_script.py index c35d09dcde..10c9e93f8b 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_script.py +++ b/openpype/hosts/nuke/plugins/publish/validate_script.py @@ -1,6 +1,7 @@ import pyblish.api -from avalon import io + from openpype import lib +from openpype.pipeline import legacy_io @pyblish.api.log @@ -115,7 +116,7 @@ class ValidateScript(pyblish.api.InstancePlugin): def check_parent_hierarchical(self, entityId, attr): if entityId is None: return None - entity = io.find_one({"_id": entityId}) + entity = legacy_io.find_one({"_id": entityId}) if attr in entity['data']: self.log.info(attr) return entity['data'][attr] From f13c2d287f49688e31701f67104c4c6516fcb9a4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:18:08 +0200 Subject: [PATCH 119/244] replaced avalon imports in photoshop --- openpype/hosts/photoshop/api/launch_logic.py | 9 ++++----- openpype/hosts/photoshop/api/pipeline.py | 4 ++-- .../hosts/photoshop/plugins/publish/collect_instances.py | 4 ++-- .../photoshop/plugins/publish/validate_instance_asset.py | 7 ++++--- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/photoshop/api/launch_logic.py b/openpype/hosts/photoshop/api/launch_logic.py index 0021905cb5..0bbb19523d 100644 --- a/openpype/hosts/photoshop/api/launch_logic.py +++ b/openpype/hosts/photoshop/api/launch_logic.py @@ -11,9 +11,8 @@ from wsrpc_aiohttp import ( from Qt import QtCore from openpype.api import Logger +from openpype.pipeline import legacy_io from openpype.tools.utils import host_tools - -from avalon import api from openpype.tools.adobe_webserver.app import WebServerTool from .ws_stub import PhotoshopServerStub @@ -320,13 +319,13 @@ class PhotoshopRoute(WebSocketRoute): log.info("Setting context change") log.info("project {} asset {} ".format(project, asset)) if project: - api.Session["AVALON_PROJECT"] = project + legacy_io.Session["AVALON_PROJECT"] = project os.environ["AVALON_PROJECT"] = project if asset: - api.Session["AVALON_ASSET"] = asset + legacy_io.Session["AVALON_ASSET"] = asset os.environ["AVALON_ASSET"] = asset if task: - api.Session["AVALON_TASK"] = task + legacy_io.Session["AVALON_TASK"] = task os.environ["AVALON_TASK"] = task async def read(self): diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 1f069c2636..906418aced 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -3,11 +3,11 @@ from Qt import QtWidgets from bson.objectid import ObjectId import pyblish.api -from avalon import io from openpype.api import Logger from openpype.lib import register_event_callback from openpype.pipeline import ( + legacy_io, register_loader_plugin_path, register_creator_plugin_path, deregister_loader_plugin_path, @@ -37,7 +37,7 @@ def check_inventory(): outdated_containers = [] for container in host.ls(): representation = container['representation'] - representation_doc = io.find_one( + representation_doc = legacy_io.find_one( { "_id": ObjectId(representation), "type": "representation" diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py index 6198ed0156..50b50f86d9 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py @@ -1,9 +1,9 @@ -from avalon import api import pyblish.api from openpype.settings import get_project_settings from openpype.hosts.photoshop import api as photoshop from openpype.lib import prepare_template_data +from openpype.pipeline import legacy_io class CollectInstances(pyblish.api.ContextPlugin): @@ -79,7 +79,7 @@ class CollectInstances(pyblish.api.ContextPlugin): "CreateImage", {}).get( "defaults", ['']) family = "image" - task_name = api.Session["AVALON_TASK"] + task_name = legacy_io.Session["AVALON_TASK"] asset_name = context.data["assetEntity"]["name"] fill_pairs = { diff --git a/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py b/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py index ebe9cc21ea..b65f9d259f 100644 --- a/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py +++ b/openpype/hosts/photoshop/plugins/publish/validate_instance_asset.py @@ -1,6 +1,7 @@ -from avalon import api import pyblish.api + import openpype.api +from openpype.pipeline import legacy_io from openpype.hosts.photoshop import api as photoshop @@ -26,7 +27,7 @@ class ValidateInstanceAssetRepair(pyblish.api.Action): for instance in instances: data = stub.read(instance[0]) - data["asset"] = api.Session["AVALON_ASSET"] + data["asset"] = legacy_io.Session["AVALON_ASSET"] stub.imprint(instance[0], data) @@ -48,7 +49,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin): def process(self, instance): instance_asset = instance.data["asset"] - current_asset = api.Session["AVALON_ASSET"] + current_asset = legacy_io.Session["AVALON_ASSET"] msg = ( f"Instance asset {instance_asset} is not the same " f"as current context {current_asset}. PLEASE DO:\n" From c13a4cd7c4d22263dfabbfc496084b32f66be0fe Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:18:49 +0200 Subject: [PATCH 120/244] replacead avalon imports in resolve --- openpype/hosts/resolve/plugins/load/load_clip.py | 10 ++++++---- .../resolve/plugins/publish/precollect_workfile.py | 9 ++++----- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/resolve/plugins/load/load_clip.py b/openpype/hosts/resolve/plugins/load/load_clip.py index 71850d95f6..cf88b14e81 100644 --- a/openpype/hosts/resolve/plugins/load/load_clip.py +++ b/openpype/hosts/resolve/plugins/load/load_clip.py @@ -1,9 +1,11 @@ from copy import deepcopy from importlib import reload -from avalon import io from openpype.hosts import resolve -from openpype.pipeline import get_representation_path +from openpype.pipeline import ( + get_representation_path, + legacy_io, +) from openpype.hosts.resolve.api import lib, plugin reload(plugin) reload(lib) @@ -94,7 +96,7 @@ class LoadClip(resolve.TimelineItemLoader): namespace = container['namespace'] timeline_item_data = resolve.get_pype_timeline_item_by_name(namespace) timeline_item = timeline_item_data["clip"]["item"] - version = io.find_one({ + version = legacy_io.find_one({ "type": "version", "_id": representation["parent"] }) @@ -140,7 +142,7 @@ class LoadClip(resolve.TimelineItemLoader): # define version name version_name = version.get("name", None) # get all versions in list - versions = io.find({ + versions = legacy_io.find({ "type": "version", "parent": version["parent"] }).distinct('name') diff --git a/openpype/hosts/resolve/plugins/publish/precollect_workfile.py b/openpype/hosts/resolve/plugins/publish/precollect_workfile.py index 1333516177..a58f288770 100644 --- a/openpype/hosts/resolve/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/resolve/plugins/publish/precollect_workfile.py @@ -1,10 +1,9 @@ import pyblish.api -from openpype.hosts import resolve -from avalon import api as avalon from pprint import pformat - -# dev from importlib import reload + +from openpype.hosts import resolve +from openpype.pipeline import legacy_io from openpype.hosts.resolve.otio import davinci_export reload(davinci_export) @@ -17,7 +16,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): def process(self, context): - asset = avalon.Session["AVALON_ASSET"] + asset = legacy_io.Session["AVALON_ASSET"] subset = "workfile" project = resolve.get_current_project() fps = project.GetSetting("timelineFrameRate") From 4eb6f09b8d46500349613e5f827b075f2a716679 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:19:56 +0200 Subject: [PATCH 121/244] replace avalon imports in standalone publisher --- .../plugins/publish/collect_bulk_mov_instances.py | 6 +++--- .../plugins/publish/collect_context.py | 5 +++-- .../plugins/publish/collect_hierarchy.py | 13 ++++++++----- .../plugins/publish/collect_matching_asset.py | 5 +++-- .../plugins/publish/extract_bg_for_compositing.py | 7 ++++--- .../plugins/publish/extract_bg_main_groups.py | 10 ++++++---- .../plugins/publish/extract_images_from_psd.py | 9 +++++---- .../plugins/publish/validate_task_existence.py | 8 +++++--- 8 files changed, 37 insertions(+), 26 deletions(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py index 9f075d66cf..3e7fb19c00 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_bulk_mov_instances.py @@ -2,8 +2,8 @@ import copy import json import pyblish.api -from avalon import io from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline import legacy_io class CollectBulkMovInstances(pyblish.api.InstancePlugin): @@ -26,7 +26,7 @@ class CollectBulkMovInstances(pyblish.api.InstancePlugin): context = instance.context asset_name = instance.data["asset"] - asset_doc = io.find_one({ + asset_doc = legacy_io.find_one({ "type": "asset", "name": asset_name }) @@ -52,7 +52,7 @@ class CollectBulkMovInstances(pyblish.api.InstancePlugin): self.subset_name_variant, task_name, asset_doc, - io.Session["AVALON_PROJECT"] + legacy_io.Session["AVALON_PROJECT"] ) instance_name = f"{asset_name}_{subset_name}" diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py index 6913e0836d..bfa9dcf73a 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py @@ -19,7 +19,8 @@ import copy from pprint import pformat import clique import pyblish.api -from avalon import io + +from openpype.pipeline import legacy_io class CollectContextDataSAPublish(pyblish.api.ContextPlugin): @@ -37,7 +38,7 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin): def process(self, context): # get json paths from os and load them - io.install() + legacy_io.install() # get json file context input_json_path = os.environ.get("SAPUBLISH_INPATH") diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py index b2735f3428..77163651c4 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py @@ -1,8 +1,10 @@ -import pyblish.api -import re import os -from avalon import io +import re from copy import deepcopy +import pyblish.api + +from openpype.pipeline import legacy_io + class CollectHierarchyInstance(pyblish.api.ContextPlugin): """Collecting hierarchy context from `parents` and `hierarchy` data @@ -63,7 +65,7 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin): hierarchy = list() visual_hierarchy = [instance.context.data["assetEntity"]] while True: - visual_parent = io.find_one( + visual_parent = legacy_io.find_one( {"_id": visual_hierarchy[-1]["data"]["visualParent"]} ) if visual_parent: @@ -129,7 +131,8 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin): if self.shot_add_tasks: tasks_to_add = dict() - project_tasks = io.find_one({"type": "project"})["config"]["tasks"] + project_doc = legacy_io.find_one({"type": "project"}) + project_tasks = project_doc["config"]["tasks"] for task_name, task_data in self.shot_add_tasks.items(): _task_data = deepcopy(task_data) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_matching_asset.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_matching_asset.py index 0d629b1b44..9d94bfdc91 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_matching_asset.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_matching_asset.py @@ -2,9 +2,10 @@ import os import re import collections import pyblish.api -from avalon import io from pprint import pformat +from openpype.pipeline import legacy_io + class CollectMatchingAssetToInstance(pyblish.api.InstancePlugin): """ @@ -119,7 +120,7 @@ class CollectMatchingAssetToInstance(pyblish.api.InstancePlugin): def _asset_docs_by_parent_id(self, instance): # Query all assets for project and store them by parent's id to list asset_docs_by_parent_id = collections.defaultdict(list) - for asset_doc in io.find({"type": "asset"}): + for asset_doc in legacy_io.find({"type": "asset"}): parent_id = asset_doc["data"]["visualParent"] asset_docs_by_parent_id[parent_id].append(asset_doc) return asset_docs_by_parent_id diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_for_compositing.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_for_compositing.py index f07499c15d..9621d70739 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_for_compositing.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_for_compositing.py @@ -1,8 +1,9 @@ import os import json import copy + import openpype.api -from avalon import io +from openpype.pipeline import legacy_io PSDImage = None @@ -221,7 +222,7 @@ class ExtractBGForComp(openpype.api.Extractor): self.log.debug("resourcesDir: \"{}\"".format(resources_folder)) def find_last_version(self, subset_name, asset_doc): - subset_doc = io.find_one({ + subset_doc = legacy_io.find_one({ "type": "subset", "name": subset_name, "parent": asset_doc["_id"] @@ -230,7 +231,7 @@ class ExtractBGForComp(openpype.api.Extractor): if subset_doc is None: self.log.debug("Subset entity does not exist yet.") else: - version_doc = io.find_one( + version_doc = legacy_io.find_one( { "type": "version", "parent": subset_doc["_id"] diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_main_groups.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_main_groups.py index 2c92366ae9..b45f04e574 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_main_groups.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_bg_main_groups.py @@ -1,9 +1,11 @@ import os import copy import json -import openpype.api + import pyblish.api -from avalon import io + +import openpype.api +from openpype.pipeline import legacy_io PSDImage = None @@ -225,7 +227,7 @@ class ExtractBGMainGroups(openpype.api.Extractor): self.log.debug("resourcesDir: \"{}\"".format(resources_folder)) def find_last_version(self, subset_name, asset_doc): - subset_doc = io.find_one({ + subset_doc = legacy_io.find_one({ "type": "subset", "name": subset_name, "parent": asset_doc["_id"] @@ -234,7 +236,7 @@ class ExtractBGMainGroups(openpype.api.Extractor): if subset_doc is None: self.log.debug("Subset entity does not exist yet.") else: - version_doc = io.find_one( + version_doc = legacy_io.find_one( { "type": "version", "parent": subset_doc["_id"] diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_images_from_psd.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_images_from_psd.py index e3094b2e3f..8485fa0915 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_images_from_psd.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_images_from_psd.py @@ -1,8 +1,9 @@ import os import copy -import openpype.api import pyblish.api -from avalon import io + +import openpype.api +from openpype.pipeline import legacy_io PSDImage = None @@ -149,7 +150,7 @@ class ExtractImagesFromPSD(openpype.api.Extractor): new_instance.data["representations"] = [new_repre] def find_last_version(self, subset_name, asset_doc): - subset_doc = io.find_one({ + subset_doc = legacy_io.find_one({ "type": "subset", "name": subset_name, "parent": asset_doc["_id"] @@ -158,7 +159,7 @@ class ExtractImagesFromPSD(openpype.api.Extractor): if subset_doc is None: self.log.debug("Subset entity does not exist yet.") else: - version_doc = io.find_one( + version_doc = legacy_io.find_one( { "type": "version", "parent": subset_doc["_id"] diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_task_existence.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_task_existence.py index 825092c81b..4c761c7a4c 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_task_existence.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_task_existence.py @@ -1,7 +1,9 @@ import pyblish.api -from avalon import io -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline import ( + PublishXmlValidationError, + legacy_io, +) class ValidateTaskExistence(pyblish.api.ContextPlugin): @@ -18,7 +20,7 @@ class ValidateTaskExistence(pyblish.api.ContextPlugin): for instance in context: asset_names.add(instance.data["asset"]) - asset_docs = io.find( + asset_docs = legacy_io.find( { "type": "asset", "name": {"$in": list(asset_names)} From 32e02701a17ab058a9efeb9d66017a5673922531 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:22:43 +0200 Subject: [PATCH 122/244] replaced avalon imports in testhost --- openpype/hosts/testhost/api/__init__.py | 6 +++--- openpype/hosts/testhost/api/pipeline.py | 5 ++--- .../testhost/plugins/create/auto_creator.py | 20 ++++++++++++------- openpype/hosts/traypublisher/api/pipeline.py | 11 +++++----- 4 files changed, 24 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/testhost/api/__init__.py b/openpype/hosts/testhost/api/__init__.py index 7840b25892..a929a891aa 100644 --- a/openpype/hosts/testhost/api/__init__.py +++ b/openpype/hosts/testhost/api/__init__.py @@ -1,8 +1,8 @@ import os import logging import pyblish.api -import avalon.api -from openpype.pipeline import BaseCreator + +from openpype.pipeline import register_creator_plugin_path from .pipeline import ( ls, @@ -27,7 +27,7 @@ def install(): log.info("OpenPype - Installing TestHost integration") pyblish.api.register_host("testhost") pyblish.api.register_plugin_path(PUBLISH_PATH) - avalon.api.register_plugin_path(BaseCreator, CREATE_PATH) + register_creator_plugin_path(CREATE_PATH) __all__ = ( diff --git a/openpype/hosts/testhost/api/pipeline.py b/openpype/hosts/testhost/api/pipeline.py index 1f5d680705..285fe8f8d6 100644 --- a/openpype/hosts/testhost/api/pipeline.py +++ b/openpype/hosts/testhost/api/pipeline.py @@ -1,5 +1,6 @@ import os import json +from openpype.pipeline import legacy_io class HostContext: @@ -16,9 +17,7 @@ class HostContext: if not asset_name: return project_name - from avalon import io - - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( {"type": "asset", "name": asset_name}, {"data.parents": 1} ) diff --git a/openpype/hosts/testhost/plugins/create/auto_creator.py b/openpype/hosts/testhost/plugins/create/auto_creator.py index 4c22eea9dd..06b95375b1 100644 --- a/openpype/hosts/testhost/plugins/create/auto_creator.py +++ b/openpype/hosts/testhost/plugins/create/auto_creator.py @@ -1,7 +1,7 @@ -from avalon import io from openpype.lib import NumberDef from openpype.hosts.testhost.api import pipeline from openpype.pipeline import ( + legacy_io, AutoCreator, CreatedInstance, ) @@ -38,13 +38,16 @@ class MyAutoCreator(AutoCreator): break variant = "Main" - project_name = io.Session["AVALON_PROJECT"] - asset_name = io.Session["AVALON_ASSET"] - task_name = io.Session["AVALON_TASK"] - host_name = io.Session["AVALON_APP"] + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] if existing_instance is None: - asset_doc = io.find_one({"type": "asset", "name": asset_name}) + asset_doc = legacy_io.find_one({ + "type": "asset", + "name": asset_name + }) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) @@ -66,7 +69,10 @@ class MyAutoCreator(AutoCreator): existing_instance["asset"] != asset_name or existing_instance["task"] != task_name ): - asset_doc = io.find_one({"type": "asset", "name": asset_name}) + asset_doc = legacy_io.find_one({ + "type": "asset", + "name": asset_name + }) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) diff --git a/openpype/hosts/traypublisher/api/pipeline.py b/openpype/hosts/traypublisher/api/pipeline.py index 24175883d9..954a0bae47 100644 --- a/openpype/hosts/traypublisher/api/pipeline.py +++ b/openpype/hosts/traypublisher/api/pipeline.py @@ -3,11 +3,12 @@ import json import tempfile import atexit -from avalon import io -import avalon.api import pyblish.api -from openpype.pipeline import register_creator_plugin_path +from openpype.pipeline import ( + register_creator_plugin_path, + legacy_io, +) ROOT_DIR = os.path.dirname(os.path.dirname( os.path.abspath(__file__) @@ -175,6 +176,6 @@ def install(): def set_project_name(project_name): # TODO Deregister project specific plugins and register new project plugins os.environ["AVALON_PROJECT"] = project_name - avalon.api.Session["AVALON_PROJECT"] = project_name - io.install() + legacy_io.Session["AVALON_PROJECT"] = project_name + legacy_io.install() HostContext.set_project_name(project_name) From 98ba730a9932685e93825f515e489867b127b5cf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:26:02 +0200 Subject: [PATCH 123/244] replace avalon imports in tvpaint --- openpype/hosts/tvpaint/api/lib.py | 2 -- openpype/hosts/tvpaint/api/pipeline.py | 13 ++++++------- openpype/hosts/tvpaint/api/workio.py | 12 +++++++----- openpype/hosts/tvpaint/hooks/pre_launch_args.py | 6 ------ .../hosts/tvpaint/plugins/load/load_workfile.py | 16 +++++++++------- .../tvpaint/plugins/publish/collect_instances.py | 11 +++++------ .../plugins/publish/collect_scene_render.py | 4 ++-- .../tvpaint/plugins/publish/collect_workfile.py | 6 +++--- .../plugins/publish/collect_workfile_data.py | 11 ++++++----- 9 files changed, 38 insertions(+), 43 deletions(-) diff --git a/openpype/hosts/tvpaint/api/lib.py b/openpype/hosts/tvpaint/api/lib.py index 9e6404e72f..0c63dbe5be 100644 --- a/openpype/hosts/tvpaint/api/lib.py +++ b/openpype/hosts/tvpaint/api/lib.py @@ -2,8 +2,6 @@ import os import logging import tempfile -import avalon.io - from . import CommunicationWrapper log = logging.getLogger(__name__) diff --git a/openpype/hosts/tvpaint/api/pipeline.py b/openpype/hosts/tvpaint/api/pipeline.py index d57ec3178a..f473f51457 100644 --- a/openpype/hosts/tvpaint/api/pipeline.py +++ b/openpype/hosts/tvpaint/api/pipeline.py @@ -8,12 +8,11 @@ import requests import pyblish.api -from avalon import io - from openpype.hosts import tvpaint from openpype.api import get_current_project_settings from openpype.lib import register_event_callback from openpype.pipeline import ( + legacy_io, register_loader_plugin_path, register_creator_plugin_path, deregister_loader_plugin_path, @@ -69,10 +68,10 @@ def install(): """Install TVPaint-specific functionality.""" log.info("OpenPype - Installing TVPaint integration") - io.install() + legacy_io.install() # Create workdir folder if does not exist yet - workdir = io.Session["AVALON_WORKDIR"] + workdir = legacy_io.Session["AVALON_WORKDIR"] if not os.path.exists(workdir): os.makedirs(workdir) @@ -445,12 +444,12 @@ def set_context_settings(asset_doc=None): """ if asset_doc is None: # Use current session asset if not passed - asset_doc = avalon.io.find_one({ + asset_doc = legacy_io.find_one({ "type": "asset", - "name": avalon.io.Session["AVALON_ASSET"] + "name": legacy_io.Session["AVALON_ASSET"] }) - project_doc = avalon.io.find_one({"type": "project"}) + project_doc = legacy_io.find_one({"type": "project"}) framerate = asset_doc["data"].get("fps") if framerate is None: diff --git a/openpype/hosts/tvpaint/api/workio.py b/openpype/hosts/tvpaint/api/workio.py index 88bdd7117e..1a5ad00ca8 100644 --- a/openpype/hosts/tvpaint/api/workio.py +++ b/openpype/hosts/tvpaint/api/workio.py @@ -3,8 +3,10 @@ has_unsaved_changes """ -from avalon import api -from openpype.pipeline import HOST_WORKFILE_EXTENSIONS +from openpype.pipeline import ( + HOST_WORKFILE_EXTENSIONS, + legacy_io, +) from .lib import ( execute_george, execute_george_through_file @@ -24,9 +26,9 @@ def save_file(filepath): """Save the open scene file.""" # Store context to workfile before save context = { - "project": api.Session["AVALON_PROJECT"], - "asset": api.Session["AVALON_ASSET"], - "task": api.Session["AVALON_TASK"] + "project": legacy_io.Session["AVALON_PROJECT"], + "asset": legacy_io.Session["AVALON_ASSET"], + "task": legacy_io.Session["AVALON_TASK"] } save_current_workfile_context(context) diff --git a/openpype/hosts/tvpaint/hooks/pre_launch_args.py b/openpype/hosts/tvpaint/hooks/pre_launch_args.py index 2a8f49d5b0..c31403437a 100644 --- a/openpype/hosts/tvpaint/hooks/pre_launch_args.py +++ b/openpype/hosts/tvpaint/hooks/pre_launch_args.py @@ -1,14 +1,8 @@ -import os -import shutil - -from openpype.hosts import tvpaint from openpype.lib import ( PreLaunchHook, get_openpype_execute_args ) -import avalon - class TvpaintPrelaunchHook(PreLaunchHook): """Launch arguments preparation. diff --git a/openpype/hosts/tvpaint/plugins/load/load_workfile.py b/openpype/hosts/tvpaint/plugins/load/load_workfile.py index 1ce5449065..0eab083c22 100644 --- a/openpype/hosts/tvpaint/plugins/load/load_workfile.py +++ b/openpype/hosts/tvpaint/plugins/load/load_workfile.py @@ -1,13 +1,15 @@ import os -from avalon import io from openpype.lib import ( StringTemplate, get_workfile_template_key_from_context, get_workdir_data, get_last_workfile_with_version, ) -from openpype.pipeline import registered_host +from openpype.pipeline import ( + registered_host, + legacy_io, +) from openpype.api import Anatomy from openpype.hosts.tvpaint.api import lib, pipeline, plugin @@ -46,13 +48,13 @@ class LoadWorkfile(plugin.Loader): task_name = context.get("task") # Far cases when there is workfile without context if not asset_name: - asset_name = io.Session["AVALON_ASSET"] - task_name = io.Session["AVALON_TASK"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] - project_doc = io.find_one({ + project_doc = legacy_io.find_one({ "type": "project" }) - asset_doc = io.find_one({ + asset_doc = legacy_io.find_one({ "type": "asset", "name": asset_name }) @@ -63,7 +65,7 @@ class LoadWorkfile(plugin.Loader): task_name, host_name, project_name=project_name, - dbcon=io + dbcon=legacy_io ) anatomy = Anatomy(project_name) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py index 5e8d13592c..188aa8c41a 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py @@ -1,10 +1,9 @@ -import os import json import copy import pyblish.api -from avalon import io from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline import legacy_io class CollectInstances(pyblish.api.ContextPlugin): @@ -82,7 +81,7 @@ class CollectInstances(pyblish.api.ContextPlugin): # - not sure if it's good idea to require asset id in # get_subset_name? asset_name = context.data["workfile_context"]["asset"] - asset_doc = io.find_one({ + asset_doc = legacy_io.find_one({ "type": "asset", "name": asset_name }) @@ -93,7 +92,7 @@ class CollectInstances(pyblish.api.ContextPlugin): host_name = context.data["hostName"] # Use empty variant value variant = "" - task_name = io.Session["AVALON_TASK"] + task_name = legacy_io.Session["AVALON_TASK"] new_subset_name = get_subset_name_with_asset_doc( family, variant, @@ -157,7 +156,7 @@ class CollectInstances(pyblish.api.ContextPlugin): # Change subset name # Final family of an instance will be `render` new_family = "render" - task_name = io.Session["AVALON_TASK"] + task_name = legacy_io.Session["AVALON_TASK"] new_subset_name = "{}{}_{}_Beauty".format( new_family, task_name.capitalize(), name ) @@ -202,7 +201,7 @@ class CollectInstances(pyblish.api.ContextPlugin): # Final family of an instance will be `render` new_family = "render" old_subset_name = instance_data["subset"] - task_name = io.Session["AVALON_TASK"] + task_name = legacy_io.Session["AVALON_TASK"] new_subset_name = "{}{}_{}_{}".format( new_family, task_name.capitalize(), render_layer, pass_name ) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py index 0af9a9a400..1c042a62fb 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_scene_render.py @@ -1,9 +1,9 @@ import json import copy import pyblish.api -from avalon import io from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline import legacy_io class CollectRenderScene(pyblish.api.ContextPlugin): @@ -57,7 +57,7 @@ class CollectRenderScene(pyblish.api.ContextPlugin): # get_subset_name? workfile_context = context.data["workfile_context"] asset_name = workfile_context["asset"] - asset_doc = io.find_one({ + asset_doc = legacy_io.find_one({ "type": "asset", "name": asset_name }) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py index 89348037d3..70d92f82e9 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py @@ -1,9 +1,9 @@ import os import json import pyblish.api -from avalon import io from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline import legacy_io class CollectWorkfile(pyblish.api.ContextPlugin): @@ -28,7 +28,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # get_subset_name? family = "workfile" asset_name = context.data["workfile_context"]["asset"] - asset_doc = io.find_one({ + asset_doc = legacy_io.find_one({ "type": "asset", "name": asset_name }) @@ -39,7 +39,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): host_name = os.environ["AVALON_APP"] # Use empty variant value variant = "" - task_name = io.Session["AVALON_TASK"] + task_name = legacy_io.Session["AVALON_TASK"] subset_name = get_subset_name_with_asset_doc( family, variant, diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py index f5c86c613b..c59ef82f85 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py @@ -3,7 +3,8 @@ import json import tempfile import pyblish.api -import avalon.api + +from openpype.pipeline import legacy_io from openpype.hosts.tvpaint.api import pipeline, lib @@ -49,9 +50,9 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): # Collect and store current context to have reference current_context = { - "project": avalon.api.Session["AVALON_PROJECT"], - "asset": avalon.api.Session["AVALON_ASSET"], - "task": avalon.api.Session["AVALON_TASK"] + "project": legacy_io.Session["AVALON_PROJECT"], + "asset": legacy_io.Session["AVALON_ASSET"], + "task": legacy_io.Session["AVALON_TASK"] } context.data["previous_context"] = current_context self.log.debug("Current context is: {}".format(current_context)) @@ -69,7 +70,7 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): ("AVALON_TASK", "task") ) for env_key, key in key_map: - avalon.api.Session[env_key] = workfile_context[key] + legacy_io.Session[env_key] = workfile_context[key] os.environ[env_key] = workfile_context[key] self.log.info("Context changed to: {}".format(workfile_context)) From ee93213cf113dea428e9a4dc6a26ac2364e37105 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:27:06 +0200 Subject: [PATCH 124/244] replace avalon imports in unreal --- openpype/hosts/unreal/plugins/load/load_camera.py | 14 ++++++++------ .../hosts/unreal/plugins/publish/extract_layout.py | 4 ++-- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/unreal/plugins/load/load_camera.py b/openpype/hosts/unreal/plugins/load/load_camera.py index 40bca0b0c7..63c0845ec2 100644 --- a/openpype/hosts/unreal/plugins/load/load_camera.py +++ b/openpype/hosts/unreal/plugins/load/load_camera.py @@ -2,8 +2,10 @@ """Load camera from FBX.""" import os -from avalon import io -from openpype.pipeline import AVALON_CONTAINER_ID +from openpype.pipeline import ( + AVALON_CONTAINER_ID, + legacy_io, +) from openpype.hosts.unreal.api import plugin from openpype.hosts.unreal.api import pipeline as unreal_pipeline import unreal # noqa @@ -87,8 +89,8 @@ class CameraLoader(plugin.Loader): factory=unreal.LevelSequenceFactoryNew() ) - io_asset = io.Session["AVALON_ASSET"] - asset_doc = io.find_one({ + io_asset = legacy_io.Session["AVALON_ASSET"] + asset_doc = legacy_io.find_one({ "type": "asset", "name": io_asset }) @@ -172,8 +174,8 @@ class CameraLoader(plugin.Loader): factory=unreal.LevelSequenceFactoryNew() ) - io_asset = io.Session["AVALON_ASSET"] - asset_doc = io.find_one({ + io_asset = legacy_io.Session["AVALON_ASSET"] + asset_doc = legacy_io.find_one({ "type": "asset", "name": io_asset }) diff --git a/openpype/hosts/unreal/plugins/publish/extract_layout.py b/openpype/hosts/unreal/plugins/publish/extract_layout.py index f34a47b89f..87e6693a97 100644 --- a/openpype/hosts/unreal/plugins/publish/extract_layout.py +++ b/openpype/hosts/unreal/plugins/publish/extract_layout.py @@ -10,7 +10,7 @@ from unreal import EditorLevelLibrary as ell from unreal import EditorAssetLibrary as eal import openpype.api -from avalon import io +from openpype.pipeline import legacy_io class ExtractLayout(openpype.api.Extractor): @@ -61,7 +61,7 @@ class ExtractLayout(openpype.api.Extractor): family = eal.get_metadata_tag(asset_container, "family") self.log.info("Parent: {}".format(parent)) - blend = io.find_one( + blend = legacy_io.find_one( { "type": "representation", "parent": ObjectId(parent), From c1246f5349e61891d70dd84135308e0ab303bad0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:27:19 +0200 Subject: [PATCH 125/244] replace avalon imports in webpublisher --- openpype/hosts/webpublisher/api/__init__.py | 4 ++-- .../webpublisher/plugins/publish/collect_batch_data.py | 7 ++++--- .../plugins/publish/collect_published_files.py | 4 ++-- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/webpublisher/api/__init__.py b/openpype/hosts/webpublisher/api/__init__.py index 72bbffd099..18e3a16cf5 100644 --- a/openpype/hosts/webpublisher/api/__init__.py +++ b/openpype/hosts/webpublisher/api/__init__.py @@ -1,9 +1,9 @@ import os import logging -from avalon import io from pyblish import api as pyblish import openpype.hosts.webpublisher +from openpype.pipeline import legacy_io log = logging.getLogger("openpype.hosts.webpublisher") @@ -19,7 +19,7 @@ def install(): pyblish.register_plugin_path(PUBLISH_PATH) log.info(PUBLISH_PATH) - io.install() + legacy_io.install() def uninstall(): diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py index ca14538d7d..d954c04c60 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py @@ -7,12 +7,13 @@ Provides: import os import pyblish.api -from avalon import io + from openpype.lib.plugin_tools import ( parse_json, get_batch_asset_task_info ) from openpype.lib.remote_publish import get_webpublish_conn, IN_PROGRESS_STATUS +from openpype.pipeline import legacy_io class CollectBatchData(pyblish.api.ContextPlugin): @@ -52,9 +53,9 @@ class CollectBatchData(pyblish.api.ContextPlugin): ) os.environ["AVALON_ASSET"] = asset_name - io.Session["AVALON_ASSET"] = asset_name + legacy_io.Session["AVALON_ASSET"] = asset_name os.environ["AVALON_TASK"] = task_name - io.Session["AVALON_TASK"] = task_name + legacy_io.Session["AVALON_TASK"] = task_name context.data["asset"] = asset_name context.data["task"] = task_name diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 8edaf4f67b..84a1f63418 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -12,7 +12,6 @@ import clique import tempfile import math -from avalon import io import pyblish.api from openpype.lib import ( prepare_template_data, @@ -24,6 +23,7 @@ from openpype.lib.plugin_tools import ( parse_json, get_subset_name_with_asset_doc ) +from openpype.pipeline import legacy_io class CollectPublishedFiles(pyblish.api.ContextPlugin): @@ -261,7 +261,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): } } ] - version = list(io.aggregate(query)) + version = list(legacy_io.aggregate(query)) if version: return version[0].get("version") or 0 From 066d6123d6763b6a90737d9e69cda7c2151c69cc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:29:41 +0200 Subject: [PATCH 126/244] replace avalon imports in openpype.lib --- openpype/lib/abstract_collect_render.py | 5 +- openpype/lib/avalon_context.py | 149 ++++++++++++------------ openpype/lib/plugin_tools.py | 4 +- openpype/lib/usdlib.py | 18 +-- 4 files changed, 87 insertions(+), 89 deletions(-) diff --git a/openpype/lib/abstract_collect_render.py b/openpype/lib/abstract_collect_render.py index 7c768e280c..fe202824a7 100644 --- a/openpype/lib/abstract_collect_render.py +++ b/openpype/lib/abstract_collect_render.py @@ -9,9 +9,10 @@ from abc import abstractmethod import attr import six -from avalon import api import pyblish.api +from openpype.pipeline import legacy_io + from .abstract_metaplugins import AbstractMetaContextPlugin @@ -127,7 +128,7 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): """Constructor.""" super(AbstractCollectRender, self).__init__(*args, **kwargs) self._file_path = None - self._asset = api.Session["AVALON_ASSET"] + self._asset = legacy_io.Session["AVALON_ASSET"] self._context = None def process(self, context): diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index d95d1b983f..139fb7edde 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -20,9 +20,7 @@ from .profiles_filtering import filter_profiles from .events import emit_event from .path_templates import StringTemplate -# avalon module is not imported at the top -# - may not be in path at the time of pype.lib initialization -avalon = None +legacy_io = None log = logging.getLogger("AvalonContext") @@ -120,17 +118,17 @@ def create_project( return project_doc -def with_avalon(func): +def with_pipeline_io(func): @functools.wraps(func) - def wrap_avalon(*args, **kwargs): - global avalon - if avalon is None: - import avalon + def wrapped(*args, **kwargs): + global legacy_io + if legacy_io is None: + from openpype.pipeline import legacy_io return func(*args, **kwargs) - return wrap_avalon + return wrapped -@with_avalon +@with_pipeline_io def is_latest(representation): """Return whether the representation is from latest version @@ -142,12 +140,12 @@ def is_latest(representation): """ - version = avalon.io.find_one({"_id": representation['parent']}) + version = legacy_io.find_one({"_id": representation['parent']}) if version["type"] == "hero_version": return True # Get highest version under the parent - highest_version = avalon.io.find_one({ + highest_version = legacy_io.find_one({ "type": "version", "parent": version["parent"] }, sort=[("name", -1)], projection={"name": True}) @@ -158,7 +156,7 @@ def is_latest(representation): return False -@with_avalon +@with_pipeline_io def any_outdated(): """Return whether the current scene has any outdated content""" from openpype.pipeline import registered_host @@ -170,7 +168,7 @@ def any_outdated(): if representation in checked: continue - representation_doc = avalon.io.find_one( + representation_doc = legacy_io.find_one( { "_id": ObjectId(representation), "type": "representation" @@ -189,7 +187,7 @@ def any_outdated(): return False -@with_avalon +@with_pipeline_io def get_asset(asset_name=None): """ Returning asset document from database by its name. @@ -202,9 +200,9 @@ def get_asset(asset_name=None): (MongoDB document) """ if not asset_name: - asset_name = avalon.api.Session["AVALON_ASSET"] + asset_name = legacy_io.Session["AVALON_ASSET"] - asset_document = avalon.io.find_one({ + asset_document = legacy_io.find_one({ "name": asset_name, "type": "asset" }) @@ -215,7 +213,7 @@ def get_asset(asset_name=None): return asset_document -@with_avalon +@with_pipeline_io def get_hierarchy(asset_name=None): """ Obtain asset hierarchy path string from mongo db @@ -228,12 +226,12 @@ def get_hierarchy(asset_name=None): """ if not asset_name: - asset_name = avalon.io.Session.get( + asset_name = legacy_io.Session.get( "AVALON_ASSET", os.environ["AVALON_ASSET"] ) - asset_entity = avalon.io.find_one({ + asset_entity = legacy_io.find_one({ "type": 'asset', "name": asset_name }) @@ -252,13 +250,13 @@ def get_hierarchy(asset_name=None): parent_id = entity.get("data", {}).get("visualParent") if not parent_id: break - entity = avalon.io.find_one({"_id": parent_id}) + entity = legacy_io.find_one({"_id": parent_id}) hierarchy_items.append(entity["name"]) # Add parents to entity data for next query entity_data = asset_entity.get("data", {}) entity_data["parents"] = hierarchy_items - avalon.io.update_many( + legacy_io.update_many( {"_id": asset_entity["_id"]}, {"$set": {"data": entity_data}} ) @@ -305,7 +303,7 @@ def get_linked_asset_ids(asset_doc): return output -@with_avalon +@with_pipeline_io def get_linked_assets(asset_doc): """Return linked assets for `asset_doc` from DB @@ -319,10 +317,10 @@ def get_linked_assets(asset_doc): if not link_ids: return [] - return list(avalon.io.find({"_id": {"$in": link_ids}})) + return list(legacy_io.find({"_id": {"$in": link_ids}})) -@with_avalon +@with_pipeline_io def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): """Retrieve latest version from `asset_name`, and `subset_name`. @@ -342,13 +340,13 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): """ if not dbcon: - log.debug("Using `avalon.io` for query.") - dbcon = avalon.io + log.debug("Using `legacy_io` for query.") + dbcon = legacy_io # Make sure is installed dbcon.install() if project_name and project_name != dbcon.Session.get("AVALON_PROJECT"): - # `avalon.io` has only `_database` attribute + # `legacy_io` has only `_database` attribute # but `AvalonMongoDB` has `database` database = getattr(dbcon, "database", dbcon._database) collection = database[project_name] @@ -648,6 +646,7 @@ def get_workdir( ) +@with_pipeline_io def template_data_from_session(session=None): """ Return dictionary with template from session keys. @@ -657,15 +656,15 @@ def template_data_from_session(session=None): Returns: dict: All available data from session. """ - from avalon import io - import avalon.api if session is None: - session = avalon.api.Session + session = legacy_io.Session project_name = session["AVALON_PROJECT"] - project_doc = io._database[project_name].find_one({"type": "project"}) - asset_doc = io._database[project_name].find_one({ + project_doc = legacy_io.database[project_name].find_one({ + "type": "project" + }) + asset_doc = legacy_io.database[project_name].find_one({ "type": "asset", "name": session["AVALON_ASSET"] }) @@ -674,6 +673,7 @@ def template_data_from_session(session=None): return get_workdir_data(project_doc, asset_doc, task_name, host_name) +@with_pipeline_io def compute_session_changes( session, task=None, asset=None, app=None, template_key=None ): @@ -712,10 +712,8 @@ def compute_session_changes( asset = asset["name"] if not asset_document or not asset_tasks: - from avalon import io - # Assume asset name - asset_document = io.find_one( + asset_document = legacy_io.find_one( { "name": asset, "type": "asset" @@ -747,11 +745,10 @@ def compute_session_changes( return changes +@with_pipeline_io def get_workdir_from_session(session=None, template_key=None): - import avalon.api - if session is None: - session = avalon.api.Session + session = legacy_io.Session project_name = session["AVALON_PROJECT"] host_name = session["AVALON_APP"] anatomy = Anatomy(project_name) @@ -768,6 +765,7 @@ def get_workdir_from_session(session=None, template_key=None): return anatomy_filled[template_key]["folder"] +@with_pipeline_io def update_current_task(task=None, asset=None, app=None, template_key=None): """Update active Session to a new task work area. @@ -782,10 +780,8 @@ def update_current_task(task=None, asset=None, app=None, template_key=None): dict: The changed key, values in the current Session. """ - import avalon.api - changes = compute_session_changes( - avalon.api.Session, + legacy_io.Session, task=task, asset=asset, app=app, @@ -795,7 +791,7 @@ def update_current_task(task=None, asset=None, app=None, template_key=None): # Update the Session and environments. Pop from environments all keys with # value set to None. for key, value in changes.items(): - avalon.api.Session[key] = value + legacy_io.Session[key] = value if value is None: os.environ.pop(key, None) else: @@ -807,7 +803,7 @@ def update_current_task(task=None, asset=None, app=None, template_key=None): return changes -@with_avalon +@with_pipeline_io def get_workfile_doc(asset_id, task_name, filename, dbcon=None): """Return workfile document for entered context. @@ -819,14 +815,14 @@ def get_workfile_doc(asset_id, task_name, filename, dbcon=None): task_name (str): Name of task under which the workfile belongs. filename (str): Name of a workfile. dbcon (AvalonMongoDB): Optionally enter avalon AvalonMongoDB object and - `avalon.io` is used if not entered. + `legacy_io` is used if not entered. Returns: dict: Workfile document or None. """ - # Use avalon.io if dbcon is not entered + # Use legacy_io if dbcon is not entered if not dbcon: - dbcon = avalon.io + dbcon = legacy_io return dbcon.find_one({ "type": "workfile", @@ -836,7 +832,7 @@ def get_workfile_doc(asset_id, task_name, filename, dbcon=None): }) -@with_avalon +@with_pipeline_io def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): """Creates or replace workfile document in mongo. @@ -849,11 +845,11 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): filename (str): Filename of workfile. workdir (str): Path to directory where `filename` is located. dbcon (AvalonMongoDB): Optionally enter avalon AvalonMongoDB object and - `avalon.io` is used if not entered. + `legacy_io` is used if not entered. """ - # Use avalon.io if dbcon is not entered + # Use legacy_io if dbcon is not entered if not dbcon: - dbcon = avalon.io + dbcon = legacy_io # Filter of workfile document doc_filter = { @@ -898,7 +894,7 @@ def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None): ) -@with_avalon +@with_pipeline_io def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): if not workfile_doc: # TODO add log message @@ -907,9 +903,9 @@ def save_workfile_data_to_doc(workfile_doc, data, dbcon=None): if not data: return - # Use avalon.io if dbcon is not entered + # Use legacy_io if dbcon is not entered if not dbcon: - dbcon = avalon.io + dbcon = legacy_io # Convert data to mongo modification keys/values # - this is naive implementation which does not expect nested @@ -959,7 +955,7 @@ class BuildWorkfile: return containers - @with_avalon + @with_pipeline_io def build_workfile(self): """Prepares and load containers into workfile. @@ -986,8 +982,8 @@ class BuildWorkfile: from openpype.pipeline import discover_loader_plugins # Get current asset name and entity - current_asset_name = avalon.io.Session["AVALON_ASSET"] - current_asset_entity = avalon.io.find_one({ + current_asset_name = legacy_io.Session["AVALON_ASSET"] + current_asset_entity = legacy_io.find_one({ "type": "asset", "name": current_asset_name }) @@ -1015,7 +1011,7 @@ class BuildWorkfile: return # Get current task name - current_task_name = avalon.io.Session["AVALON_TASK"] + current_task_name = legacy_io.Session["AVALON_TASK"] # Load workfile presets for task self.build_presets = self.get_build_presets( @@ -1103,7 +1099,7 @@ class BuildWorkfile: # Return list of loaded containers return loaded_containers - @with_avalon + @with_pipeline_io def get_build_presets(self, task_name, asset_doc): """ Returns presets to build workfile for task name. @@ -1119,7 +1115,7 @@ class BuildWorkfile: """ host_name = os.environ["AVALON_APP"] project_settings = get_project_settings( - avalon.io.Session["AVALON_PROJECT"] + legacy_io.Session["AVALON_PROJECT"] ) host_settings = project_settings.get(host_name) or {} @@ -1369,7 +1365,7 @@ class BuildWorkfile: "containers": containers } - @with_avalon + @with_pipeline_io def _load_containers( self, repres_by_subset_id, subsets_by_id, profiles_per_subset_id, loaders_by_name @@ -1495,7 +1491,7 @@ class BuildWorkfile: return loaded_containers - @with_avalon + @with_pipeline_io def _collect_last_version_repres(self, asset_entities): """Collect subsets, versions and representations for asset_entities. @@ -1534,13 +1530,13 @@ class BuildWorkfile: asset_entity_by_ids = {asset["_id"]: asset for asset in asset_entities} - subsets = list(avalon.io.find({ + subsets = list(legacy_io.find({ "type": "subset", "parent": {"$in": asset_entity_by_ids.keys()} })) subset_entity_by_ids = {subset["_id"]: subset for subset in subsets} - sorted_versions = list(avalon.io.find({ + sorted_versions = list(legacy_io.find({ "type": "version", "parent": {"$in": subset_entity_by_ids.keys()} }).sort("name", -1)) @@ -1554,7 +1550,7 @@ class BuildWorkfile: subset_id_with_latest_version.append(subset_id) last_versions_by_id[version["_id"]] = version - repres = avalon.io.find({ + repres = legacy_io.find({ "type": "representation", "parent": {"$in": last_versions_by_id.keys()} }) @@ -1592,7 +1588,7 @@ class BuildWorkfile: return output -@with_avalon +@with_pipeline_io def get_creator_by_name(creator_name, case_sensitive=False): """Find creator plugin by name. @@ -1622,7 +1618,7 @@ def get_creator_by_name(creator_name, case_sensitive=False): return None -@with_avalon +@with_pipeline_io def change_timer_to_current_context(): """Called after context change to change timers. @@ -1641,9 +1637,9 @@ def change_timer_to_current_context(): log.warning("Couldn't start timer") return data = { - "project_name": avalon.io.Session["AVALON_PROJECT"], - "asset_name": avalon.io.Session["AVALON_ASSET"], - "task_name": avalon.io.Session["AVALON_TASK"] + "project_name": legacy_io.Session["AVALON_PROJECT"], + "asset_name": legacy_io.Session["AVALON_ASSET"], + "task_name": legacy_io.Session["AVALON_TASK"] } requests.post(rest_api_url, json=data) @@ -1827,10 +1823,11 @@ def get_custom_workfile_template_by_string_context( ) +@with_pipeline_io def get_custom_workfile_template(template_profiles): """Filter and fill workfile template profiles by current context. - Current context is defined by `avalon.api.Session`. That's why this + Current context is defined by `legacy_io.Session`. That's why this function should be used only inside host where context is set and stable. Args: @@ -1840,15 +1837,13 @@ def get_custom_workfile_template(template_profiles): str: Path to template or None if none of profiles match current context. (Existence of formatted path is not validated.) """ - # Use `avalon.io` as Mongo connection - from avalon import io return get_custom_workfile_template_by_string_context( template_profiles, - io.Session["AVALON_PROJECT"], - io.Session["AVALON_ASSET"], - io.Session["AVALON_TASK"], - io + legacy_io.Session["AVALON_PROJECT"], + legacy_io.Session["AVALON_ASSET"], + legacy_io.Session["AVALON_TASK"], + legacy_io ) diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index 3f78407931..bcbf06a0e8 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -72,9 +72,9 @@ def get_subset_name_with_asset_doc( family = family.rsplit(".", 1)[-1] if project_name is None: - import avalon.api + from openpype.pipeline import legacy_io - project_name = avalon.api.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] asset_tasks = asset_doc.get("data", {}).get("tasks") or {} task_info = asset_tasks.get(task_name) or {} diff --git a/openpype/lib/usdlib.py b/openpype/lib/usdlib.py index 7b3b7112de..86de19b4be 100644 --- a/openpype/lib/usdlib.py +++ b/openpype/lib/usdlib.py @@ -8,8 +8,10 @@ except ImportError: # Allow to fall back on Multiverse 6.3.0+ pxr usd library from mvpxr import Usd, UsdGeom, Sdf, Kind -from avalon import io, api -from openpype.pipeline import registered_root +from openpype.pipeline import ( + registered_root, + legacy_io, +) log = logging.getLogger(__name__) @@ -126,7 +128,7 @@ def create_model(filename, asset, variant_subsets): """ - asset_doc = io.find_one({"name": asset, "type": "asset"}) + asset_doc = legacy_io.find_one({"name": asset, "type": "asset"}) assert asset_doc, "Asset not found: %s" % asset variants = [] @@ -176,7 +178,7 @@ def create_shade(filename, asset, variant_subsets): """ - asset_doc = io.find_one({"name": asset, "type": "asset"}) + asset_doc = legacy_io.find_one({"name": asset, "type": "asset"}) assert asset_doc, "Asset not found: %s" % asset variants = [] @@ -211,7 +213,7 @@ def create_shade_variation(filename, asset, model_variant, shade_variants): """ - asset_doc = io.find_one({"name": asset, "type": "asset"}) + asset_doc = legacy_io.find_one({"name": asset, "type": "asset"}) assert asset_doc, "Asset not found: %s" % asset variants = [] @@ -311,7 +313,7 @@ def get_usd_master_path(asset, subset, representation): """ - project = io.find_one( + project = legacy_io.find_one( {"type": "project"}, projection={"config.template.publish": True} ) template = project["config"]["template"]["publish"] @@ -320,12 +322,12 @@ def get_usd_master_path(asset, subset, representation): # Allow explicitly passing asset document asset_doc = asset else: - asset_doc = io.find_one({"name": asset, "type": "asset"}) + asset_doc = legacy_io.find_one({"name": asset, "type": "asset"}) path = template.format( **{ "root": registered_root(), - "project": api.Session["AVALON_PROJECT"], + "project": legacy_io.Session["AVALON_PROJECT"], "asset": asset_doc["name"], "subset": subset, "representation": representation, From 213ab8a811bb800d62b514cbe4258f0813d1dac6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:32:24 +0200 Subject: [PATCH 127/244] replaced avalon imports in tools --- openpype/tools/adobe_webserver/app.py | 8 +- openpype/tools/creator/window.py | 36 ++---- openpype/tools/loader/app.py | 48 ++++---- openpype/tools/mayalookassigner/app.py | 12 +- openpype/tools/mayalookassigner/commands.py | 9 +- .../tools/mayalookassigner/vray_proxies.py | 37 ++++--- openpype/tools/sceneinventory/model.py | 16 +-- .../tools/sceneinventory/switch_dialog.py | 104 +++++++++--------- openpype/tools/sceneinventory/view.py | 27 +++-- openpype/tools/sceneinventory/window.py | 15 +-- .../widgets/widget_components.py | 24 ++-- openpype/tools/texture_copy/app.py | 16 ++- openpype/tools/utils/host_tools.py | 15 ++- openpype/tools/workfiles/app.py | 15 +-- openpype/tools/workfiles/files_widget.py | 22 ++-- openpype/tools/workfiles/save_as_dialog.py | 15 +-- openpype/tools/workfiles/window.py | 23 ++-- 17 files changed, 232 insertions(+), 210 deletions(-) diff --git a/openpype/tools/adobe_webserver/app.py b/openpype/tools/adobe_webserver/app.py index b79d6c6c60..3911baf7ac 100644 --- a/openpype/tools/adobe_webserver/app.py +++ b/openpype/tools/adobe_webserver/app.py @@ -16,7 +16,7 @@ from wsrpc_aiohttp import ( WSRPCClient ) -from avalon import api +from openpype.pipeline import legacy_io log = logging.getLogger(__name__) @@ -80,9 +80,9 @@ class WebServerTool: loop=asyncio.get_event_loop()) await client.connect() - project = api.Session["AVALON_PROJECT"] - asset = api.Session["AVALON_ASSET"] - task = api.Session["AVALON_TASK"] + project = legacy_io.Session["AVALON_PROJECT"] + asset = legacy_io.Session["AVALON_ASSET"] + task = legacy_io.Session["AVALON_TASK"] log.info("Sending context change to {}-{}-{}".format(project, asset, task)) diff --git a/openpype/tools/creator/window.py b/openpype/tools/creator/window.py index 51cc66e715..e0c329fb78 100644 --- a/openpype/tools/creator/window.py +++ b/openpype/tools/creator/window.py @@ -4,16 +4,14 @@ import re from Qt import QtWidgets, QtCore -from avalon import api, io - from openpype import style from openpype.api import get_current_project_settings from openpype.tools.utils.lib import qt_app_context +from openpype.pipeline import legacy_io from openpype.pipeline.create import ( SUBSET_NAME_ALLOWED_SYMBOLS, legacy_create, CreatorError, - LegacyCreator, ) from .model import CreatorsModel @@ -220,7 +218,7 @@ class CreatorWindow(QtWidgets.QDialog): asset_doc = None if creator_plugin: # Get the asset from the database which match with the name - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( {"name": asset_name, "type": "asset"}, projection={"_id": 1} ) @@ -237,9 +235,9 @@ class CreatorWindow(QtWidgets.QDialog): self._set_valid_state(False) return - project_name = io.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] asset_id = asset_doc["_id"] - task_name = io.Session["AVALON_TASK"] + task_name = legacy_io.Session["AVALON_TASK"] # Calculate subset name with Creator plugin subset_name = creator_plugin.get_subset_name( @@ -271,7 +269,7 @@ class CreatorWindow(QtWidgets.QDialog): self._subset_name_input.setText(subset_name) # Get all subsets of the current asset - subset_docs = io.find( + subset_docs = legacy_io.find( { "type": "subset", "parent": asset_id @@ -372,7 +370,7 @@ class CreatorWindow(QtWidgets.QDialog): self.setStyleSheet(style.load_stylesheet()) def refresh(self): - self._asset_name_input.setText(io.Session["AVALON_ASSET"]) + self._asset_name_input.setText(legacy_io.Session["AVALON_ASSET"]) self._creators_model.reset() @@ -385,7 +383,7 @@ class CreatorWindow(QtWidgets.QDialog): ) current_index = None family = None - task_name = io.Session.get("AVALON_TASK", None) + task_name = legacy_io.Session.get("AVALON_TASK", None) lowered_task_name = task_name.lower() if task_name: for _family, _task_names in pype_project_setting.items(): @@ -471,7 +469,7 @@ class CreatorWindow(QtWidgets.QDialog): self._msg_timer.start() -def show(debug=False, parent=None): +def show(parent=None): """Display asset creator GUI Arguments: @@ -488,24 +486,6 @@ def show(debug=False, parent=None): except (AttributeError, RuntimeError): pass - if debug: - from avalon import mock - for creator in mock.creators: - api.register_plugin(LegacyCreator, creator) - - import traceback - sys.excepthook = lambda typ, val, tb: traceback.print_last() - - io.install() - - any_project = next( - project for project in io.projects() - if project.get("active", True) is not False - ) - - api.Session["AVALON_PROJECT"] = any_project["name"] - module.project = any_project["name"] - with qt_app_context(): window = CreatorWindow(parent) window.refresh() diff --git a/openpype/tools/loader/app.py b/openpype/tools/loader/app.py index fad284d82b..bb589c199d 100644 --- a/openpype/tools/loader/app.py +++ b/openpype/tools/loader/app.py @@ -1,11 +1,14 @@ import sys +import traceback from Qt import QtWidgets, QtCore -from avalon import api, io from openpype import style from openpype.lib import register_event_callback -from openpype.pipeline import install_openpype_plugins +from openpype.pipeline import ( + install_openpype_plugins, + legacy_io, +) from openpype.tools.utils import ( lib, PlaceholderLineEdit @@ -36,14 +39,14 @@ class LoaderWindow(QtWidgets.QDialog): def __init__(self, parent=None): super(LoaderWindow, self).__init__(parent) title = "Asset Loader 2.1" - project_name = api.Session.get("AVALON_PROJECT") + project_name = legacy_io.Session.get("AVALON_PROJECT") if project_name: title += " - {}".format(project_name) self.setWindowTitle(title) # Groups config - self.groups_config = lib.GroupsConfig(io) - self.family_config_cache = lib.FamilyConfigCache(io) + self.groups_config = lib.GroupsConfig(legacy_io) + self.family_config_cache = lib.FamilyConfigCache(legacy_io) # Enable minimize and maximize for app window_flags = QtCore.Qt.Window @@ -60,13 +63,13 @@ class LoaderWindow(QtWidgets.QDialog): # Assets widget assets_widget = MultiSelectAssetsWidget( - io, parent=left_side_splitter + legacy_io, parent=left_side_splitter ) assets_widget.set_current_asset_btn_visibility(True) # Families widget families_filter_view = FamilyListView( - io, self.family_config_cache, left_side_splitter + legacy_io, self.family_config_cache, left_side_splitter ) left_side_splitter.addWidget(assets_widget) left_side_splitter.addWidget(families_filter_view) @@ -76,7 +79,7 @@ class LoaderWindow(QtWidgets.QDialog): # --- Middle part --- # Subsets widget subsets_widget = SubsetWidget( - io, + legacy_io, self.groups_config, self.family_config_cache, tool_name=self.tool_name, @@ -87,8 +90,12 @@ class LoaderWindow(QtWidgets.QDialog): thumb_ver_splitter = QtWidgets.QSplitter(main_splitter) thumb_ver_splitter.setOrientation(QtCore.Qt.Vertical) - thumbnail_widget = ThumbnailWidget(io, parent=thumb_ver_splitter) - version_info_widget = VersionWidget(io, parent=thumb_ver_splitter) + thumbnail_widget = ThumbnailWidget( + legacy_io, parent=thumb_ver_splitter + ) + version_info_widget = VersionWidget( + legacy_io, parent=thumb_ver_splitter + ) thumb_ver_splitter.addWidget(thumbnail_widget) thumb_ver_splitter.addWidget(version_info_widget) @@ -105,7 +112,7 @@ class LoaderWindow(QtWidgets.QDialog): repres_widget = None if sync_server_enabled: repres_widget = RepresentationWidget( - io, self.tool_name, parent=thumb_ver_splitter + legacy_io, self.tool_name, parent=thumb_ver_splitter ) thumb_ver_splitter.addWidget(repres_widget) @@ -259,13 +266,15 @@ class LoaderWindow(QtWidgets.QDialog): # Refresh families config self._families_filter_view.refresh() # Change to context asset on context change - self._assets_widget.select_asset_by_name(io.Session["AVALON_ASSET"]) + self._assets_widget.select_asset_by_name( + legacy_io.Session["AVALON_ASSET"] + ) def _refresh(self): """Load assets from database""" # Ensure a project is loaded - project = io.find_one({"type": "project"}, {"type": 1}) + project = legacy_io.find_one({"type": "project"}, {"type": 1}) assert project, "Project was not found! This is a bug" self._assets_widget.refresh() @@ -562,17 +571,16 @@ def show(debug=False, parent=None, use_context=False): module.window = None if debug: - import traceback sys.excepthook = lambda typ, val, tb: traceback.print_last() - io.install() + legacy_io.install() any_project = next( - project for project in io.projects() + project for project in legacy_io.projects() if project.get("active", True) is not False ) - api.Session["AVALON_PROJECT"] = any_project["name"] + legacy_io.Session["AVALON_PROJECT"] = any_project["name"] module.project = any_project["name"] with lib.qt_app_context(): @@ -580,7 +588,7 @@ def show(debug=False, parent=None, use_context=False): window.show() if use_context: - context = {"asset": api.Session["AVALON_ASSET"]} + context = {"asset": legacy_io.Session["AVALON_ASSET"]} window.set_context(context, refresh=True) else: window.refresh() @@ -604,10 +612,10 @@ def cli(args): print("Entering Project: %s" % project) - io.install() + legacy_io.install() # Store settings - api.Session["AVALON_PROJECT"] = project + legacy_io.Session["AVALON_PROJECT"] = project install_openpype_plugins(project) diff --git a/openpype/tools/mayalookassigner/app.py b/openpype/tools/mayalookassigner/app.py index 0e633a21e3..1b6cad77a8 100644 --- a/openpype/tools/mayalookassigner/app.py +++ b/openpype/tools/mayalookassigner/app.py @@ -4,8 +4,8 @@ import logging from Qt import QtWidgets, QtCore -from avalon import io from openpype import style +from openpype.pipeline import legacy_io from openpype.tools.utils.lib import qt_app_context from openpype.hosts.maya.api.lib import assign_look_by_version @@ -227,9 +227,13 @@ class MayaLookAssignerWindow(QtWidgets.QWidget): continue # Get the latest version of this asset's look subset - version = io.find_one({"type": "version", - "parent": assign_look["_id"]}, - sort=[("name", -1)]) + version = legacy_io.find_one( + { + "type": "version", + "parent": assign_look["_id"] + }, + sort=[("name", -1)] + ) subset_name = assign_look["name"] self.echo("{} Assigning {} to {}\t".format(prefix, diff --git a/openpype/tools/mayalookassigner/commands.py b/openpype/tools/mayalookassigner/commands.py index 8fd592d347..d41d8ca5a2 100644 --- a/openpype/tools/mayalookassigner/commands.py +++ b/openpype/tools/mayalookassigner/commands.py @@ -5,9 +5,8 @@ import os from bson.objectid import ObjectId import maya.cmds as cmds -from avalon import io - from openpype.pipeline import ( + legacy_io, remove_container, registered_host, ) @@ -161,8 +160,10 @@ def create_items_from_nodes(nodes): return asset_view_items for _id, id_nodes in id_hashes.items(): - asset = io.find_one({"_id": ObjectId(_id)}, - projection={"name": True}) + asset = legacy_io.find_one( + {"_id": ObjectId(_id)}, + projection={"name": True} + ) # Skip if asset id is not found if not asset: diff --git a/openpype/tools/mayalookassigner/vray_proxies.py b/openpype/tools/mayalookassigner/vray_proxies.py index c97664f3cb..3523b24bf3 100644 --- a/openpype/tools/mayalookassigner/vray_proxies.py +++ b/openpype/tools/mayalookassigner/vray_proxies.py @@ -11,9 +11,8 @@ from bson.objectid import ObjectId import alembic.Abc from maya import cmds -from avalon import io - from openpype.pipeline import ( + legacy_io, load_container, loaders_from_representation, discover_loader_plugins, @@ -158,9 +157,11 @@ def get_look_relationships(version_id): dict: Dictionary of relations. """ - json_representation = io.find_one({"type": "representation", - "parent": version_id, - "name": "json"}) + json_representation = legacy_io.find_one({ + "type": "representation", + "parent": version_id, + "name": "json" + }) # Load relationships shader_relation = get_representation_path(json_representation) @@ -184,9 +185,11 @@ def load_look(version_id): """ # Get representations of shader file and relationships - look_representation = io.find_one({"type": "representation", - "parent": version_id, - "name": "ma"}) + look_representation = legacy_io.find_one({ + "type": "representation", + "parent": version_id, + "name": "ma" + }) # See if representation is already loaded, if so reuse it. host = registered_host() @@ -232,15 +235,21 @@ def get_latest_version(asset_id, subset): RuntimeError: When subset or version doesn't exist. """ - subset = io.find_one({"name": subset, - "parent": ObjectId(asset_id), - "type": "subset"}) + subset = legacy_io.find_one({ + "name": subset, + "parent": ObjectId(asset_id), + "type": "subset" + }) if not subset: raise RuntimeError("Subset does not exist: %s" % subset) - version = io.find_one({"type": "version", - "parent": subset["_id"]}, - sort=[("name", -1)]) + version = legacy_io.find_one( + { + "type": "version", + "parent": subset["_id"] + }, + sort=[("name", -1)] + ) if not version: raise RuntimeError("Version does not exist.") diff --git a/openpype/tools/sceneinventory/model.py b/openpype/tools/sceneinventory/model.py index 2c47381751..8d72020c98 100644 --- a/openpype/tools/sceneinventory/model.py +++ b/openpype/tools/sceneinventory/model.py @@ -7,8 +7,8 @@ from Qt import QtCore, QtGui import qtawesome from bson.objectid import ObjectId -from avalon import io from openpype.pipeline import ( + legacy_io, schema, HeroVersionType, registered_host, @@ -55,7 +55,7 @@ class InventoryModel(TreeModel): if not self.sync_enabled: return - project_name = io.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] active_site = sync_server.get_active_site(project_name) remote_site = sync_server.get_remote_site(project_name) @@ -304,32 +304,32 @@ class InventoryModel(TreeModel): for repre_id, group_dict in sorted(grouped.items()): group_items = group_dict["items"] # Get parenthood per group - representation = io.find_one({"_id": ObjectId(repre_id)}) + representation = legacy_io.find_one({"_id": ObjectId(repre_id)}) if not representation: not_found["representation"].append(group_items) not_found_ids.append(repre_id) continue - version = io.find_one({"_id": representation["parent"]}) + version = legacy_io.find_one({"_id": representation["parent"]}) if not version: not_found["version"].append(group_items) not_found_ids.append(repre_id) continue elif version["type"] == "hero_version": - _version = io.find_one({ + _version = legacy_io.find_one({ "_id": version["version_id"] }) version["name"] = HeroVersionType(_version["name"]) version["data"] = _version["data"] - subset = io.find_one({"_id": version["parent"]}) + subset = legacy_io.find_one({"_id": version["parent"]}) if not subset: not_found["subset"].append(group_items) not_found_ids.append(repre_id) continue - asset = io.find_one({"_id": subset["parent"]}) + asset = legacy_io.find_one({"_id": subset["parent"]}) if not asset: not_found["asset"].append(group_items) not_found_ids.append(repre_id) @@ -390,7 +390,7 @@ class InventoryModel(TreeModel): # Store the highest available version so the model can know # whether current version is currently up-to-date. - highest_version = io.find_one({ + highest_version = legacy_io.find_one({ "type": "version", "parent": version["parent"] }, sort=[("name", -1)]) diff --git a/openpype/tools/sceneinventory/switch_dialog.py b/openpype/tools/sceneinventory/switch_dialog.py index bb3e2615ac..b2d770330f 100644 --- a/openpype/tools/sceneinventory/switch_dialog.py +++ b/openpype/tools/sceneinventory/switch_dialog.py @@ -4,7 +4,7 @@ from Qt import QtWidgets, QtCore import qtawesome from bson.objectid import ObjectId -from avalon import io +from openpype.pipeline import legacy_io from openpype.pipeline.load import ( discover_loader_plugins, switch_container, @@ -151,7 +151,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): repre_ids.add(ObjectId(item["representation"])) content_loaders.add(item["loader"]) - repres = list(io.find({ + repres = list(legacy_io.find({ "type": {"$in": ["representation", "archived_representation"]}, "_id": {"$in": list(repre_ids)} })) @@ -179,7 +179,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): content_repres[repre_id] = repres_by_id[repre_id] version_ids.append(repre["parent"]) - versions = io.find({ + versions = legacy_io.find({ "type": {"$in": ["version", "hero_version"]}, "_id": {"$in": list(set(version_ids))} }) @@ -198,7 +198,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): else: subset_ids.append(content_versions[version_id]["parent"]) - subsets = io.find({ + subsets = legacy_io.find({ "type": {"$in": ["subset", "archived_subset"]}, "_id": {"$in": subset_ids} }) @@ -220,7 +220,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): asset_ids.append(subset["parent"]) content_subsets[subset_id] = subset - assets = io.find({ + assets = legacy_io.find({ "type": {"$in": ["asset", "archived_asset"]}, "_id": {"$in": list(asset_ids)} }) @@ -472,7 +472,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): # Prepare asset document if asset is selected asset_doc = None if selected_asset: - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( {"type": "asset", "name": selected_asset}, {"_id": True} ) @@ -523,7 +523,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): def _get_current_output_repre_ids_xxx( self, asset_doc, selected_subset, selected_repre ): - subset_doc = io.find_one( + subset_doc = legacy_io.find_one( { "type": "subset", "name": selected_subset, @@ -537,7 +537,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): if not version_doc: return [] - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "parent": version_doc["_id"], @@ -548,7 +548,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): return [repre_doc["_id"] for repre_doc in repre_docs] def _get_current_output_repre_ids_xxo(self, asset_doc, selected_subset): - subset_doc = io.find_one( + subset_doc = legacy_io.find_one( { "type": "subset", "parent": asset_doc["_id"], @@ -563,7 +563,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): for repre_doc in self.content_repres.values(): repre_names.add(repre_doc["name"]) - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "parent": subset_doc["_id"], @@ -578,7 +578,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): for subset_doc in self.content_subsets.values(): susbet_names.add(subset_doc["name"]) - subset_docs = io.find( + subset_docs = legacy_io.find( { "type": "subset", "name": {"$in": list(susbet_names)}, @@ -587,7 +587,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): {"_id": True} ) subset_ids = [subset_doc["_id"] for subset_doc in subset_docs] - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "parent": {"$in": subset_ids}, @@ -606,7 +606,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): subset_name = subset_doc["name"] repres_by_subset_name[subset_name].add(repre_name) - subset_docs = list(io.find( + subset_docs = list(legacy_io.find( { "type": "subset", "parent": asset_doc["_id"], @@ -637,7 +637,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): "parent": version_id, "name": {"$in": list(repre_names)} }) - repre_docs = io.find( + repre_docs = legacy_io.find( {"$or": repre_or_query}, {"_id": True} ) @@ -646,7 +646,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): def _get_current_output_repre_ids_oxx( self, selected_subset, selected_repre ): - subset_docs = list(io.find({ + subset_docs = list(legacy_io.find({ "type": "subset", "parent": {"$in": list(self.content_assets.keys())}, "name": selected_subset @@ -657,7 +657,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): last_version["_id"] for last_version in last_versions_by_subset_id.values() ] - repre_docs = io.find({ + repre_docs = legacy_io.find({ "type": "representation", "parent": {"$in": last_version_ids}, "name": selected_repre @@ -666,7 +666,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): return [repre_doc["_id"] for repre_doc in repre_docs] def _get_current_output_repre_ids_oxo(self, selected_subset): - subset_docs = list(io.find( + subset_docs = list(legacy_io.find( { "type": "subset", "parent": {"$in": list(self.content_assets.keys())}, @@ -713,7 +713,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): "parent": last_version_id, "name": {"$in": list(repre_names)} }) - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "$or": repre_or_query @@ -724,7 +724,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): return [repre_doc["_id"] for repre_doc in repre_docs] def _get_current_output_repre_ids_oox(self, selected_repre): - repre_docs = io.find( + repre_docs = legacy_io.find( { "name": selected_repre, "parent": {"$in": list(self.content_versions.keys())} @@ -734,7 +734,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): return [repre_doc["_id"] for repre_doc in repre_docs] def _get_asset_box_values(self): - asset_docs = io.find( + asset_docs = legacy_io.find( {"type": "asset"}, {"_id": 1, "name": 1} ) @@ -742,7 +742,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): asset_doc["_id"]: asset_doc["name"] for asset_doc in asset_docs } - subsets = io.find( + subsets = legacy_io.find( { "type": "subset", "parent": {"$in": list(asset_names_by_id.keys())} @@ -762,12 +762,15 @@ class SwitchAssetDialog(QtWidgets.QDialog): def _get_subset_box_values(self): selected_asset = self._assets_box.get_valid_value() if selected_asset: - asset_doc = io.find_one({"type": "asset", "name": selected_asset}) + asset_doc = legacy_io.find_one({ + "type": "asset", + "name": selected_asset + }) asset_ids = [asset_doc["_id"]] else: asset_ids = list(self.content_assets.keys()) - subsets = io.find( + subsets = legacy_io.find( { "type": "subset", "parent": {"$in": asset_ids} @@ -804,7 +807,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): # [ ] [ ] [?] if not selected_asset and not selected_subset: # Find all representations of selection's subsets - possible_repres = list(io.find( + possible_repres = list(legacy_io.find( { "type": "representation", "parent": {"$in": list(self.content_versions.keys())} @@ -833,11 +836,11 @@ class SwitchAssetDialog(QtWidgets.QDialog): # [x] [x] [?] if selected_asset and selected_subset: - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( {"type": "asset", "name": selected_asset}, {"_id": 1} ) - subset_doc = io.find_one( + subset_doc = legacy_io.find_one( { "type": "subset", "name": selected_subset, @@ -848,7 +851,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): subset_id = subset_doc["_id"] last_versions_by_subset_id = self.find_last_versions([subset_id]) version_doc = last_versions_by_subset_id.get(subset_id) - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "parent": version_doc["_id"] @@ -865,7 +868,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): # [x] [ ] [?] # If asset only is selected if selected_asset: - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( {"type": "asset", "name": selected_asset}, {"_id": 1} ) @@ -876,7 +879,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): subset_names = set() for subset_doc in self.content_subsets.values(): subset_names.add(subset_doc["name"]) - subset_docs = io.find( + subset_docs = legacy_io.find( { "type": "subset", "parent": asset_doc["_id"], @@ -900,7 +903,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): if not subset_id_by_version_id: return list() - repre_docs = list(io.find( + repre_docs = list(legacy_io.find( { "type": "representation", "parent": {"$in": list(subset_id_by_version_id.keys())} @@ -930,7 +933,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): return list(available_repres) # [ ] [x] [?] - subset_docs = list(io.find( + subset_docs = list(legacy_io.find( { "type": "subset", "parent": {"$in": list(self.content_assets.keys())}, @@ -957,7 +960,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): if not subset_id_by_version_id: return list() - repre_docs = list(io.find( + repre_docs = list(legacy_io.find( { "type": "representation", "parent": {"$in": list(subset_id_by_version_id.keys())} @@ -1013,11 +1016,11 @@ class SwitchAssetDialog(QtWidgets.QDialog): return # [x] [ ] [?] - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( {"type": "asset", "name": selected_asset}, {"_id": 1} ) - subset_docs = io.find( + subset_docs = legacy_io.find( {"type": "subset", "parent": asset_doc["_id"]}, {"name": 1} ) @@ -1048,7 +1051,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): }} ] last_versions_by_subset_id = dict() - for doc in io.aggregate(_pipeline): + for doc in legacy_io.aggregate(_pipeline): doc["parent"] = doc["_id"] doc["_id"] = doc.pop("_version_id") last_versions_by_subset_id[doc["parent"]] = doc @@ -1076,11 +1079,11 @@ class SwitchAssetDialog(QtWidgets.QDialog): # [x] [x] [ ] if selected_asset is not None and selected_subset is not None: - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( {"type": "asset", "name": selected_asset}, {"_id": 1} ) - subset_doc = io.find_one( + subset_doc = legacy_io.find_one( { "type": "subset", "parent": asset_doc["_id"], @@ -1096,7 +1099,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): validation_state.repre_ok = False return - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "parent": last_version["_id"] @@ -1116,11 +1119,11 @@ class SwitchAssetDialog(QtWidgets.QDialog): # [x] [ ] [ ] if selected_asset is not None: - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( {"type": "asset", "name": selected_asset}, {"_id": 1} ) - subset_docs = list(io.find( + subset_docs = list(legacy_io.find( { "type": "subset", "parent": asset_doc["_id"] @@ -1142,7 +1145,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): version_id = last_version["_id"] subset_id_by_version_id[version_id] = subset_id - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "parent": {"$in": list(subset_id_by_version_id.keys())} @@ -1173,7 +1176,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): # [ ] [x] [ ] # Subset documents - subset_docs = io.find( + subset_docs = legacy_io.find( { "type": "subset", "parent": {"$in": list(self.content_assets.keys())}, @@ -1194,7 +1197,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): version_id = last_version["_id"] subset_id_by_version_id[version_id] = subset_id - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "parent": {"$in": list(subset_id_by_version_id.keys())} @@ -1225,7 +1228,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): def _on_current_asset(self): # Set initial asset as current. - asset_name = io.Session["AVALON_ASSET"] + asset_name = legacy_io.Session["AVALON_ASSET"] index = self._assets_box.findText( asset_name, QtCore.Qt.MatchFixedString ) @@ -1243,7 +1246,10 @@ class SwitchAssetDialog(QtWidgets.QDialog): selected_representation = self._representations_box.get_valid_value() if selected_asset: - asset_doc = io.find_one({"type": "asset", "name": selected_asset}) + asset_doc = legacy_io.find_one({ + "type": "asset", + "name": selected_asset + }) asset_docs_by_id = {asset_doc["_id"]: asset_doc} else: asset_docs_by_id = self.content_assets @@ -1262,7 +1268,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): if selected_subset: subset_query["name"] = selected_subset - subset_docs = list(io.find(subset_query)) + subset_docs = list(legacy_io.find(subset_query)) subset_ids = [] subset_docs_by_parent_and_name = collections.defaultdict(dict) for subset in subset_docs: @@ -1272,12 +1278,12 @@ class SwitchAssetDialog(QtWidgets.QDialog): subset_docs_by_parent_and_name[parent_id][name] = subset # versions - version_docs = list(io.find({ + version_docs = list(legacy_io.find({ "type": "version", "parent": {"$in": subset_ids} }, sort=[("name", -1)])) - hero_version_docs = list(io.find({ + hero_version_docs = list(legacy_io.find({ "type": "hero_version", "parent": {"$in": subset_ids} })) @@ -1297,7 +1303,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): parent_id = hero_version_doc["parent"] hero_version_docs_by_parent_id[parent_id] = hero_version_doc - repre_docs = io.find({ + repre_docs = legacy_io.find({ "type": "representation", "parent": {"$in": version_ids} }) diff --git a/openpype/tools/sceneinventory/view.py b/openpype/tools/sceneinventory/view.py index 2df6d00406..448e3f4e6f 100644 --- a/openpype/tools/sceneinventory/view.py +++ b/openpype/tools/sceneinventory/view.py @@ -6,10 +6,9 @@ from Qt import QtWidgets, QtCore import qtawesome from bson.objectid import ObjectId -from avalon import io - from openpype import style from openpype.pipeline import ( + legacy_io, HeroVersionType, update_container, remove_container, @@ -84,7 +83,7 @@ class SceneInventoryView(QtWidgets.QTreeView): if item_id not in repre_ids: repre_ids.append(item_id) - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "_id": {"$in": repre_ids} @@ -98,7 +97,7 @@ class SceneInventoryView(QtWidgets.QTreeView): if version_id not in version_ids: version_ids.append(version_id) - loaded_versions = io.find({ + loaded_versions = legacy_io.find({ "_id": {"$in": version_ids}, "type": {"$in": ["version", "hero_version"]} }) @@ -115,7 +114,7 @@ class SceneInventoryView(QtWidgets.QTreeView): if parent_id not in version_parents: version_parents.append(parent_id) - all_versions = io.find({ + all_versions = legacy_io.find({ "type": {"$in": ["hero_version", "version"]}, "parent": {"$in": version_parents} }) @@ -151,7 +150,7 @@ class SceneInventoryView(QtWidgets.QTreeView): if item_id not in repre_ids: repre_ids.append(item_id) - repre_docs = io.find( + repre_docs = legacy_io.find( { "type": "representation", "_id": {"$in": repre_ids} @@ -166,7 +165,7 @@ class SceneInventoryView(QtWidgets.QTreeView): version_id_by_repre_id[repre_doc["_id"]] = version_id if version_id not in version_ids: version_ids.append(version_id) - hero_versions = io.find( + hero_versions = legacy_io.find( { "_id": {"$in": version_ids}, "type": "hero_version" @@ -184,7 +183,7 @@ class SceneInventoryView(QtWidgets.QTreeView): if current_version_id == hero_version_id: version_id_by_repre_id[_repre_id] = version_id - version_docs = io.find( + version_docs = legacy_io.find( { "_id": {"$in": list(version_ids)}, "type": "version" @@ -367,11 +366,11 @@ class SceneInventoryView(QtWidgets.QTreeView): repre_ids (list) side (str): 'active_site'|'remote_site' """ - project_name = io.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] active_site = self.sync_server.get_active_site(project_name) remote_site = self.sync_server.get_remote_site(project_name) - repre_docs = io.find({ + repre_docs = legacy_io.find({ "type": "representation", "_id": {"$in": repre_ids} }) @@ -661,12 +660,12 @@ class SceneInventoryView(QtWidgets.QTreeView): # Get available versions for active representation representation_id = ObjectId(active["representation"]) - representation = io.find_one({"_id": representation_id}) - version = io.find_one({ + representation = legacy_io.find_one({"_id": representation_id}) + version = legacy_io.find_one({ "_id": representation["parent"] }) - versions = list(io.find( + versions = list(legacy_io.find( { "parent": version["parent"], "type": "version" @@ -674,7 +673,7 @@ class SceneInventoryView(QtWidgets.QTreeView): sort=[("name", 1)] )) - hero_version = io.find_one({ + hero_version = legacy_io.find_one({ "parent": version["parent"], "type": "hero_version" }) diff --git a/openpype/tools/sceneinventory/window.py b/openpype/tools/sceneinventory/window.py index b40fbb69e4..054c2a2daa 100644 --- a/openpype/tools/sceneinventory/window.py +++ b/openpype/tools/sceneinventory/window.py @@ -3,8 +3,8 @@ import sys from Qt import QtWidgets, QtCore import qtawesome -from avalon import io, api +from openpype.pipeline import legacy_io from openpype import style from openpype.tools.utils.delegates import VersionDelegate from openpype.tools.utils.lib import ( @@ -72,7 +72,7 @@ class SceneInventoryWindow(QtWidgets.QDialog): control_layout.addWidget(refresh_button) # endregion control - family_config_cache = FamilyConfigCache(io) + family_config_cache = FamilyConfigCache(legacy_io) model = InventoryModel(family_config_cache) proxy = FilterProxyModel() @@ -91,7 +91,7 @@ class SceneInventoryWindow(QtWidgets.QDialog): view.setColumnWidth(4, 100) # namespace # apply delegates - version_delegate = VersionDelegate(io, self) + version_delegate = VersionDelegate(legacy_io, self) column = model.Columns.index("version") view.setItemDelegateForColumn(column, version_delegate) @@ -191,17 +191,18 @@ def show(root=None, debug=False, parent=None, items=None): pass if debug is True: - io.install() + legacy_io.install() if not os.environ.get("AVALON_PROJECT"): any_project = next( - project for project in io.projects() + project for project in legacy_io.projects() if project.get("active", True) is not False ) - api.Session["AVALON_PROJECT"] = any_project["name"] + project_name = any_project["name"] else: - api.Session["AVALON_PROJECT"] = os.environ.get("AVALON_PROJECT") + project_name = os.environ.get("AVALON_PROJECT") + legacy_io.Session["AVALON_PROJECT"] = project_name with qt_app_context(): window = SceneInventoryWindow(parent) diff --git a/openpype/tools/standalonepublish/widgets/widget_components.py b/openpype/tools/standalonepublish/widgets/widget_components.py index 4d7f94f825..fbafc7142a 100644 --- a/openpype/tools/standalonepublish/widgets/widget_components.py +++ b/openpype/tools/standalonepublish/widgets/widget_components.py @@ -5,16 +5,18 @@ import random import string from Qt import QtWidgets, QtCore -from . import DropDataFrame -from .constants import HOST_NAME -from avalon import io + from openpype.api import execute, Logger +from openpype.pipeline import legacy_io from openpype.lib import ( get_openpype_execute_args, apply_project_environments_value ) -log = Logger().get_logger("standalonepublisher") +from . import DropDataFrame +from .constants import HOST_NAME + +log = Logger.get_logger("standalonepublisher") class ComponentsWidget(QtWidgets.QWidget): @@ -152,18 +154,18 @@ def set_context(project, asset, task): :type asset: str ''' os.environ["AVALON_PROJECT"] = project - io.Session["AVALON_PROJECT"] = project + legacy_io.Session["AVALON_PROJECT"] = project os.environ["AVALON_ASSET"] = asset - io.Session["AVALON_ASSET"] = asset + legacy_io.Session["AVALON_ASSET"] = asset if not task: task = '' os.environ["AVALON_TASK"] = task - io.Session["AVALON_TASK"] = task + legacy_io.Session["AVALON_TASK"] = task - io.Session["current_dir"] = os.path.normpath(os.getcwd()) + legacy_io.Session["current_dir"] = os.path.normpath(os.getcwd()) os.environ["AVALON_APP"] = HOST_NAME - io.Session["AVALON_APP"] = HOST_NAME + legacy_io.Session["AVALON_APP"] = HOST_NAME def cli_publish(data, publish_paths, gui=True): @@ -171,7 +173,7 @@ def cli_publish(data, publish_paths, gui=True): os.path.dirname(os.path.dirname(__file__)), "publish.py" ) - io.install() + legacy_io.install() # Create hash name folder in temp chars = "".join([random.choice(string.ascii_letters) for i in range(15)]) @@ -203,6 +205,6 @@ def cli_publish(data, publish_paths, gui=True): log.info(f"Publish result: {result}") - io.uninstall() + legacy_io.uninstall() return False diff --git a/openpype/tools/texture_copy/app.py b/openpype/tools/texture_copy/app.py index 0c3c260e51..fd8d6dc02e 100644 --- a/openpype/tools/texture_copy/app.py +++ b/openpype/tools/texture_copy/app.py @@ -1,14 +1,12 @@ import os import re import click -from avalon import io, api -from pprint import pprint + +import speedcopy from openpype.lib import Terminal from openpype.api import Anatomy - -import shutil -import speedcopy +from openpype.pipeline import legacy_io t = Terminal() @@ -20,8 +18,8 @@ texture_extensions = ['.tif', '.tiff', '.jpg', '.jpeg', '.tx', '.png', '.tga', class TextureCopy: def __init__(self): - if not io.Session: - io.install() + if not legacy_io.Session: + legacy_io.install() def _get_textures(self, path): textures = [] @@ -32,14 +30,14 @@ class TextureCopy: return textures def _get_project(self, project_name): - project = io.find_one({ + project = legacy_io.find_one({ 'type': 'project', 'name': project_name }) return project def _get_asset(self, asset_name): - asset = io.find_one({ + asset = legacy_io.find_one({ 'type': 'asset', 'name': asset_name }) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index b0c30f6dfb..d8f4570120 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -4,9 +4,14 @@ It is possible to create `HostToolsHelper` in host implementation or use singleton approach with global functions (using helper anyway). """ import os -import avalon.api + import pyblish.api -from openpype.pipeline import registered_host + +from openpype.pipeline import ( + registered_host, + legacy_io, +) + from .lib import qt_app_context @@ -73,8 +78,8 @@ class HostToolsHelper: if use_context: context = { - "asset": avalon.api.Session["AVALON_ASSET"], - "task": avalon.api.Session["AVALON_TASK"] + "asset": legacy_io.Session["AVALON_ASSET"], + "task": legacy_io.Session["AVALON_TASK"] } workfiles_tool.set_context(context) @@ -105,7 +110,7 @@ class HostToolsHelper: use_context = False if use_context: - context = {"asset": avalon.api.Session["AVALON_ASSET"]} + context = {"asset": legacy_io.Session["AVALON_ASSET"]} loader_tool.set_context(context, refresh=True) else: loader_tool.refresh() diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py index 38e1911060..352847ede8 100644 --- a/openpype/tools/workfiles/app.py +++ b/openpype/tools/workfiles/app.py @@ -1,9 +1,10 @@ import sys import logging -from avalon import api - -from openpype.pipeline import registered_host +from openpype.pipeline import ( + registered_host, + legacy_io, +) from openpype.tools.utils import qt_app_context from .window import Window @@ -52,8 +53,8 @@ def show(root=None, debug=False, parent=None, use_context=True, save=True): validate_host_requirements(host) if debug: - api.Session["AVALON_ASSET"] = "Mock" - api.Session["AVALON_TASK"] = "Testing" + legacy_io.Session["AVALON_ASSET"] = "Mock" + legacy_io.Session["AVALON_TASK"] = "Testing" with qt_app_context(): window = Window(parent=parent) @@ -61,8 +62,8 @@ def show(root=None, debug=False, parent=None, use_context=True, save=True): if use_context: context = { - "asset": api.Session["AVALON_ASSET"], - "task": api.Session["AVALON_TASK"] + "asset": legacy_io.Session["AVALON_ASSET"], + "task": legacy_io.Session["AVALON_TASK"] } window.set_context(context) diff --git a/openpype/tools/workfiles/files_widget.py b/openpype/tools/workfiles/files_widget.py index bb2ded3b94..977111b71b 100644 --- a/openpype/tools/workfiles/files_widget.py +++ b/openpype/tools/workfiles/files_widget.py @@ -4,7 +4,6 @@ import shutil import Qt from Qt import QtWidgets, QtCore -from avalon import io, api from openpype.tools.utils import PlaceholderLineEdit from openpype.tools.utils.delegates import PrettyTimeDelegate @@ -18,7 +17,10 @@ from openpype.lib.avalon_context import ( update_current_task, compute_session_changes ) -from openpype.pipeline import registered_host +from openpype.pipeline import ( + registered_host, + legacy_io, +) from .model import ( WorkAreaFilesModel, PublishFilesModel, @@ -87,7 +89,7 @@ class FilesWidget(QtWidgets.QWidget): self._task_type = None # Pype's anatomy object for current project - self.anatomy = Anatomy(io.Session["AVALON_PROJECT"]) + self.anatomy = Anatomy(legacy_io.Session["AVALON_PROJECT"]) # Template key used to get work template from anatomy templates self.template_key = "work" @@ -147,7 +149,9 @@ class FilesWidget(QtWidgets.QWidget): workarea_files_view.setColumnWidth(0, 330) # --- Publish files view --- - publish_files_model = PublishFilesModel(extensions, io, self.anatomy) + publish_files_model = PublishFilesModel( + extensions, legacy_io, self.anatomy + ) publish_proxy_model = QtCore.QSortFilterProxyModel() publish_proxy_model.setSourceModel(publish_files_model) @@ -380,13 +384,13 @@ class FilesWidget(QtWidgets.QWidget): return None if self._asset_doc is None: - self._asset_doc = io.find_one({"_id": self._asset_id}) + self._asset_doc = legacy_io.find_one({"_id": self._asset_id}) return self._asset_doc def _get_session(self): """Return a modified session for the current asset and task""" - session = api.Session.copy() + session = legacy_io.Session.copy() self.template_key = get_workfile_template_key( self._task_type, session["AVALON_APP"], @@ -405,7 +409,7 @@ class FilesWidget(QtWidgets.QWidget): def _enter_session(self): """Enter the asset and task session currently selected""" - session = api.Session.copy() + session = legacy_io.Session.copy() changes = compute_session_changes( session, asset=self._get_asset_doc(), @@ -595,10 +599,10 @@ class FilesWidget(QtWidgets.QWidget): # Create extra folders create_workdir_extra_folders( self._workdir_path, - api.Session["AVALON_APP"], + legacy_io.Session["AVALON_APP"], self._task_type, self._task_name, - api.Session["AVALON_PROJECT"] + legacy_io.Session["AVALON_PROJECT"] ) # Trigger after save events emit_event( diff --git a/openpype/tools/workfiles/save_as_dialog.py b/openpype/tools/workfiles/save_as_dialog.py index 0a7c7821ba..3e97d6c938 100644 --- a/openpype/tools/workfiles/save_as_dialog.py +++ b/openpype/tools/workfiles/save_as_dialog.py @@ -5,13 +5,14 @@ import logging from Qt import QtWidgets, QtCore -from avalon import api, io - from openpype.lib import ( get_last_workfile_with_version, get_workdir_data, ) -from openpype.pipeline import registered_host +from openpype.pipeline import ( + registered_host, + legacy_io, +) from openpype.tools.utils import PlaceholderLineEdit log = logging.getLogger(__name__) @@ -24,7 +25,7 @@ def build_workfile_data(session): asset_name = session["AVALON_ASSET"] task_name = session["AVALON_TASK"] host_name = session["AVALON_APP"] - project_doc = io.find_one( + project_doc = legacy_io.find_one( {"type": "project"}, { "name": True, @@ -33,7 +34,7 @@ def build_workfile_data(session): } ) - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( { "type": "asset", "name": asset_name @@ -208,7 +209,7 @@ class SaveAsDialog(QtWidgets.QDialog): if not session: # Fallback to active session - session = api.Session + session = legacy_io.Session self.data = build_workfile_data(session) @@ -283,7 +284,7 @@ class SaveAsDialog(QtWidgets.QDialog): if current_filepath: # We match the current filename against the current session # instead of the session where the user is saving to. - current_data = build_workfile_data(api.Session) + current_data = build_workfile_data(legacy_io.Session) matcher = CommentMatcher(anatomy, template_key, current_data) comment = matcher.parse_comment(current_filepath) if comment: diff --git a/openpype/tools/workfiles/window.py b/openpype/tools/workfiles/window.py index 73e63d30b5..02a22af26c 100644 --- a/openpype/tools/workfiles/window.py +++ b/openpype/tools/workfiles/window.py @@ -2,14 +2,13 @@ import os import datetime from Qt import QtCore, QtWidgets -from avalon import io - from openpype import style from openpype.lib import ( get_workfile_doc, create_workfile_doc, save_workfile_data_to_doc, ) +from openpype.pipeline import legacy_io from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget from openpype.tools.utils.tasks_widget import TasksWidget @@ -158,10 +157,12 @@ class Window(QtWidgets.QMainWindow): home_page_widget = QtWidgets.QWidget(pages_widget) home_body_widget = QtWidgets.QWidget(home_page_widget) - assets_widget = SingleSelectAssetsWidget(io, parent=home_body_widget) + assets_widget = SingleSelectAssetsWidget( + legacy_io, parent=home_body_widget + ) assets_widget.set_current_asset_btn_visibility(True) - tasks_widget = TasksWidget(io, home_body_widget) + tasks_widget = TasksWidget(legacy_io, home_body_widget) files_widget = FilesWidget(home_body_widget) side_panel = SidePanelWidget(home_body_widget) @@ -250,7 +251,7 @@ class Window(QtWidgets.QMainWindow): if asset_id and task_name and filepath: filename = os.path.split(filepath)[1] workfile_doc = get_workfile_doc( - asset_id, task_name, filename, io + asset_id, task_name, filename, legacy_io ) self.side_panel.set_context( asset_id, task_name, filepath, workfile_doc @@ -272,7 +273,7 @@ class Window(QtWidgets.QMainWindow): self._create_workfile_doc(filepath, force=True) workfile_doc = self._get_current_workfile_doc() - save_workfile_data_to_doc(workfile_doc, data, io) + save_workfile_data_to_doc(workfile_doc, data, legacy_io) def _get_current_workfile_doc(self, filepath=None): if filepath is None: @@ -284,7 +285,7 @@ class Window(QtWidgets.QMainWindow): filename = os.path.split(filepath)[1] return get_workfile_doc( - asset_id, task_name, filename, io + asset_id, task_name, filename, legacy_io ) def _create_workfile_doc(self, filepath, force=False): @@ -295,9 +296,11 @@ class Window(QtWidgets.QMainWindow): if not workfile_doc: workdir, filename = os.path.split(filepath) asset_id = self.assets_widget.get_selected_asset_id() - asset_doc = io.find_one({"_id": asset_id}) + asset_doc = legacy_io.find_one({"_id": asset_id}) task_name = self.tasks_widget.get_selected_task_name() - create_workfile_doc(asset_doc, task_name, filename, workdir, io) + create_workfile_doc( + asset_doc, task_name, filename, workdir, legacy_io + ) def refresh(self): # Refresh asset widget @@ -319,7 +322,7 @@ class Window(QtWidgets.QMainWindow): self._context_to_set, context = None, self._context_to_set if "asset" in context: - asset_doc = io.find_one( + asset_doc = legacy_io.find_one( { "name": context["asset"], "type": "asset" From 2f02e0399589e35dd3434450f8e068016daedad6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:34:03 +0200 Subject: [PATCH 128/244] replace avalon imports in global plugins --- openpype/plugins/publish/cleanup_farm.py | 5 ++- .../publish/collect_anatomy_context_data.py | 7 ++-- .../publish/collect_anatomy_instance_data.py | 9 +++-- .../publish/collect_avalon_entities.py | 15 +++---- .../publish/collect_from_create_context.py | 5 ++- openpype/plugins/publish/collect_hierarchy.py | 5 ++- .../plugins/publish/collect_rendered_files.py | 5 ++- .../plugins/publish/collect_resources_path.py | 5 ++- .../publish/collect_scene_loaded_versions.py | 12 ++++-- .../publish/extract_hierarchy_avalon.py | 25 ++++++------ .../plugins/publish/integrate_hero_version.py | 21 +++++----- .../plugins/publish/integrate_inputlinks.py | 8 ++-- openpype/plugins/publish/integrate_new.py | 39 ++++++++++--------- .../plugins/publish/integrate_thumbnail.py | 14 +++---- .../publish/validate_editorial_asset_name.py | 12 +++--- 15 files changed, 106 insertions(+), 81 deletions(-) diff --git a/openpype/plugins/publish/cleanup_farm.py b/openpype/plugins/publish/cleanup_farm.py index ab0c6e469e..2c6c1625bb 100644 --- a/openpype/plugins/publish/cleanup_farm.py +++ b/openpype/plugins/publish/cleanup_farm.py @@ -3,7 +3,8 @@ import os import shutil import pyblish.api -import avalon.api + +from openpype.pipeline import legacy_io class CleanUpFarm(pyblish.api.ContextPlugin): @@ -22,7 +23,7 @@ class CleanUpFarm(pyblish.api.ContextPlugin): def process(self, context): # Get source host from which farm publishing was started - src_host_name = avalon.api.Session.get("AVALON_APP") + src_host_name = legacy_io.Session.get("AVALON_APP") self.log.debug("Host name from session is {}".format(src_host_name)) # Skip process if is not in list of source hosts in which this # plugin should run diff --git a/openpype/plugins/publish/collect_anatomy_context_data.py b/openpype/plugins/publish/collect_anatomy_context_data.py index bd8d9e50c4..0794adfb67 100644 --- a/openpype/plugins/publish/collect_anatomy_context_data.py +++ b/openpype/plugins/publish/collect_anatomy_context_data.py @@ -13,11 +13,12 @@ Provides: """ import json +import pyblish.api + from openpype.lib import ( get_system_general_anatomy_data ) -from avalon import api -import pyblish.api +from openpype.pipeline import legacy_io class CollectAnatomyContextData(pyblish.api.ContextPlugin): @@ -65,7 +66,7 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): asset_entity = context.data.get("assetEntity") if asset_entity: - task_name = api.Session["AVALON_TASK"] + task_name = legacy_io.Session["AVALON_TASK"] asset_tasks = asset_entity["data"]["tasks"] task_type = asset_tasks.get(task_name, {}).get("type") diff --git a/openpype/plugins/publish/collect_anatomy_instance_data.py b/openpype/plugins/publish/collect_anatomy_instance_data.py index 42836e796b..6a6ea170b5 100644 --- a/openpype/plugins/publish/collect_anatomy_instance_data.py +++ b/openpype/plugins/publish/collect_anatomy_instance_data.py @@ -25,9 +25,10 @@ import copy import json import collections -from avalon import io import pyblish.api +from openpype.pipeline import legacy_io + class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): """Collect Instance specific Anatomy data. @@ -83,7 +84,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): self.log.debug("Querying asset documents with names: {}".format( ", ".join(["\"{}\"".format(name) for name in asset_names]) )) - asset_docs = io.find({ + asset_docs = legacy_io.find({ "type": "asset", "name": {"$in": asset_names} }) @@ -153,7 +154,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): subset_docs = [] if subset_filters: - subset_docs = list(io.find({ + subset_docs = list(legacy_io.find({ "type": "subset", "$or": subset_filters })) @@ -202,7 +203,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): ] last_version_by_subset_id = {} - for doc in io.aggregate(_pipeline): + for doc in legacy_io.aggregate(_pipeline): subset_id = doc["_id"] last_version_by_subset_id[subset_id] = doc["name"] diff --git a/openpype/plugins/publish/collect_avalon_entities.py b/openpype/plugins/publish/collect_avalon_entities.py index c099a2cf75..3e7843407f 100644 --- a/openpype/plugins/publish/collect_avalon_entities.py +++ b/openpype/plugins/publish/collect_avalon_entities.py @@ -8,9 +8,10 @@ Provides: context -> assetEntity - asset entity from database """ -from avalon import io, api import pyblish.api +from openpype.pipeline import legacy_io + class CollectAvalonEntities(pyblish.api.ContextPlugin): """Collect Anatomy into Context""" @@ -19,12 +20,12 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin): label = "Collect Avalon Entities" def process(self, context): - io.install() - project_name = api.Session["AVALON_PROJECT"] - asset_name = api.Session["AVALON_ASSET"] - task_name = api.Session["AVALON_TASK"] + legacy_io.install() + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] - project_entity = io.find_one({ + project_entity = legacy_io.find_one({ "type": "project", "name": project_name }) @@ -38,7 +39,7 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin): if not asset_name: self.log.info("Context is not set. Can't collect global data.") return - asset_entity = io.find_one({ + asset_entity = legacy_io.find_one({ "type": "asset", "name": asset_name, "parent": project_entity["_id"] diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py index 16e3f669c3..b2f757f108 100644 --- a/openpype/plugins/publish/collect_from_create_context.py +++ b/openpype/plugins/publish/collect_from_create_context.py @@ -3,7 +3,8 @@ """ import os import pyblish.api -import avalon.api + +from openpype.pipeline import legacy_io class CollectFromCreateContext(pyblish.api.ContextPlugin): @@ -30,7 +31,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): for key in ("AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK"): value = create_context.dbcon.Session.get(key) if value is not None: - avalon.api.Session[key] = value + legacy_io.Session[key] = value os.environ[key] = value def create_instance(self, context, in_data): diff --git a/openpype/plugins/publish/collect_hierarchy.py b/openpype/plugins/publish/collect_hierarchy.py index efb40407d9..4e94acce4a 100644 --- a/openpype/plugins/publish/collect_hierarchy.py +++ b/openpype/plugins/publish/collect_hierarchy.py @@ -1,5 +1,6 @@ import pyblish.api -import avalon.api as avalon + +from openpype.pipeline import legacy_io class CollectHierarchy(pyblish.api.ContextPlugin): @@ -19,7 +20,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin): def process(self, context): temp_context = {} - project_name = avalon.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] final_context = {} final_context[project_name] = {} final_context[project_name]['entity_type'] = 'Project' diff --git a/openpype/plugins/publish/collect_rendered_files.py b/openpype/plugins/publish/collect_rendered_files.py index 1005c38b9d..670e57ed10 100644 --- a/openpype/plugins/publish/collect_rendered_files.py +++ b/openpype/plugins/publish/collect_rendered_files.py @@ -11,7 +11,8 @@ import os import json import pyblish.api -from avalon import api + +from openpype.pipeline import legacy_io class CollectRenderedFiles(pyblish.api.ContextPlugin): @@ -150,7 +151,7 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): session_data["AVALON_WORKDIR"] = remapped self.log.info("Setting session using data from file") - api.Session.update(session_data) + legacy_io.Session.update(session_data) os.environ.update(session_data) session_is_set = True self._process_path(data, anatomy) diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index 1f509365c7..89df031fb0 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -12,7 +12,8 @@ import os import copy import pyblish.api -from avalon import api + +from openpype.pipeline import legacy_io class CollectResourcesPath(pyblish.api.InstancePlugin): @@ -84,7 +85,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): else: # solve deprecated situation when `folder` key is not underneath # `publish` anatomy - project_name = api.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] self.log.warning(( "Deprecation warning: Anatomy does not have set `folder`" " key underneath `publish` (in global of for project `{}`)." diff --git a/openpype/plugins/publish/collect_scene_loaded_versions.py b/openpype/plugins/publish/collect_scene_loaded_versions.py index e54592abb8..f2ade1ac28 100644 --- a/openpype/plugins/publish/collect_scene_loaded_versions.py +++ b/openpype/plugins/publish/collect_scene_loaded_versions.py @@ -1,8 +1,11 @@ from bson.objectid import ObjectId import pyblish.api -from avalon import io -from openpype.pipeline import registered_host + +from openpype.pipeline import ( + registered_host, + legacy_io, +) class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): @@ -40,7 +43,10 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): _repr_ids = [ObjectId(c["representation"]) for c in _containers] version_by_repr = { str(doc["_id"]): doc["parent"] for doc in - io.find({"_id": {"$in": _repr_ids}}, projection={"parent": 1}) + legacy_io.find( + {"_id": {"$in": _repr_ids}}, + projection={"parent": 1} + ) } for con in _containers: diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index b062a9c4b5..2f528d4469 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -1,7 +1,10 @@ -import pyblish.api -from avalon import io from copy import deepcopy +import pyblish.api + +from openpype.pipeline import legacy_io + + class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): """Create entities in Avalon based on collected data.""" @@ -16,8 +19,8 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): return hierarchy_context = deepcopy(context.data["hierarchyContext"]) - if not io.Session: - io.install() + if not legacy_io.Session: + legacy_io.install() active_assets = [] # filter only the active publishing insatnces @@ -78,7 +81,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): update_data = True # Process project if entity_type.lower() == "project": - entity = io.find_one({"type": "project"}) + entity = legacy_io.find_one({"type": "project"}) # TODO: should be in validator? assert (entity is not None), "Did not find project in DB" @@ -95,7 +98,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): ) # Else process assset else: - entity = io.find_one({"type": "asset", "name": name}) + entity = legacy_io.find_one({"type": "asset", "name": name}) if entity: # Do not override data, only update cur_entity_data = entity.get("data") or {} @@ -119,7 +122,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): # Skip updating data update_data = False - archived_entities = io.find({ + archived_entities = legacy_io.find({ "type": "archived_asset", "name": name }) @@ -143,7 +146,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): if update_data: # Update entity data with input data - io.update_many( + legacy_io.update_many( {"_id": entity["_id"]}, {"$set": {"data": data}} ) @@ -161,7 +164,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): "type": "asset", "data": data } - io.replace_one( + legacy_io.replace_one( {"_id": entity["_id"]}, new_entity ) @@ -176,9 +179,9 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): "data": data } self.log.debug("Creating asset: {}".format(item)) - entity_id = io.insert_one(item).inserted_id + entity_id = legacy_io.insert_one(item).inserted_id - return io.find_one({"_id": entity_id}) + return legacy_io.find_one({"_id": entity_id}) def _get_assets(self, input_dict): """ Returns only asset dictionary. diff --git a/openpype/plugins/publish/integrate_hero_version.py b/openpype/plugins/publish/integrate_hero_version.py index 76720fc9a3..a706b653c4 100644 --- a/openpype/plugins/publish/integrate_hero_version.py +++ b/openpype/plugins/publish/integrate_hero_version.py @@ -8,12 +8,14 @@ from bson.objectid import ObjectId from pymongo import InsertOne, ReplaceOne import pyblish.api -from avalon import api, io from openpype.lib import ( create_hard_link, filter_profiles ) -from openpype.pipeline import schema +from openpype.pipeline import ( + schema, + legacy_io, +) class IntegrateHeroVersion(pyblish.api.InstancePlugin): @@ -63,7 +65,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): template_key = self._get_template_key(instance) anatomy = instance.context.data["anatomy"] - project_name = api.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] if template_key not in anatomy.templates: self.log.warning(( "!!! Anatomy of project \"{}\" does not have set" @@ -221,7 +223,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): if old_repres_by_name: old_repres_to_delete = old_repres_by_name - archived_repres = list(io.find({ + archived_repres = list(legacy_io.find({ # Check what is type of archived representation "type": "archived_repsentation", "parent": new_version_id @@ -442,7 +444,8 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): ) if bulk_writes: - io._database[io.Session["AVALON_PROJECT"]].bulk_write( + project_name = legacy_io.Session["AVALON_PROJECT"] + legacy_io.database[project_name].bulk_write( bulk_writes ) @@ -504,7 +507,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): anatomy_filled = anatomy.format(template_data) # solve deprecated situation when `folder` key is not underneath # `publish` anatomy - project_name = api.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] self.log.warning(( "Deprecation warning: Anatomy does not have set `folder`" " key underneath `publish` (in global of for project `{}`)." @@ -585,12 +588,12 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): def version_from_representations(self, repres): for repre in repres: - version = io.find_one({"_id": repre["parent"]}) + version = legacy_io.find_one({"_id": repre["parent"]}) if version: return version def current_hero_ents(self, version): - hero_version = io.find_one({ + hero_version = legacy_io.find_one({ "parent": version["parent"], "type": "hero_version" }) @@ -598,7 +601,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): if not hero_version: return (None, []) - hero_repres = list(io.find({ + hero_repres = list(legacy_io.find({ "parent": hero_version["_id"], "type": "representation" })) diff --git a/openpype/plugins/publish/integrate_inputlinks.py b/openpype/plugins/publish/integrate_inputlinks.py index 11cffc4638..6964f2d938 100644 --- a/openpype/plugins/publish/integrate_inputlinks.py +++ b/openpype/plugins/publish/integrate_inputlinks.py @@ -3,7 +3,7 @@ from collections import OrderedDict from bson.objectid import ObjectId import pyblish.api -from avalon import io +from openpype.pipeline import legacy_io class IntegrateInputLinks(pyblish.api.ContextPlugin): @@ -129,5 +129,7 @@ class IntegrateInputLinks(pyblish.api.ContextPlugin): if input_links is None: continue - io.update_one({"_id": version_doc["_id"]}, - {"$set": {"data.inputLinks": input_links}}) + legacy_io.update_one( + {"_id": version_doc["_id"]}, + {"$set": {"data.inputLinks": input_links}} + ) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 5dcbb8fabd..891d47f471 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -9,14 +9,13 @@ import six import re import shutil from collections import deque, defaultdict +from datetime import datetime from bson.objectid import ObjectId from pymongo import DeleteOne, InsertOne import pyblish.api -from avalon import io + import openpype.api -from datetime import datetime -# from pype.modules import ModulesManager from openpype.lib.profiles_filtering import filter_profiles from openpype.lib import ( prepare_template_data, @@ -24,6 +23,7 @@ from openpype.lib import ( StringTemplate, TemplateUnsolved ) +from openpype.pipeline import legacy_io # this is needed until speedcopy for linux is fixed if sys.platform == "win32": @@ -152,7 +152,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Required environment variables anatomy_data = instance.data["anatomyData"] - io.install() + legacy_io.install() context = instance.context @@ -166,7 +166,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): asset_name = instance.data["asset"] asset_entity = instance.data.get("assetEntity") if not asset_entity or asset_entity["name"] != context_asset_name: - asset_entity = io.find_one({ + asset_entity = legacy_io.find_one({ "type": "asset", "name": asset_name, "parent": project_entity["_id"] @@ -259,14 +259,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): new_repre_names_low = [_repre["name"].lower() for _repre in repres] - existing_version = io.find_one({ + existing_version = legacy_io.find_one({ 'type': 'version', 'parent': subset["_id"], 'name': version_number }) if existing_version is None: - version_id = io.insert_one(version).inserted_id + version_id = legacy_io.insert_one(version).inserted_id else: # Check if instance have set `append` mode which cause that # only replicated representations are set to archive @@ -274,7 +274,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Update version data # TODO query by _id and - io.update_many({ + legacy_io.update_many({ 'type': 'version', 'parent': subset["_id"], 'name': version_number @@ -284,7 +284,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): version_id = existing_version['_id'] # Find representations of existing version and archive them - current_repres = list(io.find({ + current_repres = list(legacy_io.find({ "type": "representation", "parent": version_id })) @@ -307,14 +307,15 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # bulk updates if bulk_writes: - io._database[io.Session["AVALON_PROJECT"]].bulk_write( + project_name = legacy_io.Session["AVALON_PROJECT"] + legacy_io.database[project_name].bulk_write( bulk_writes ) - version = io.find_one({"_id": version_id}) + version = legacy_io.find_one({"_id": version_id}) instance.data["versionEntity"] = version - existing_repres = list(io.find({ + existing_repres = list(legacy_io.find({ "parent": version_id, "type": "archived_representation" })) @@ -654,12 +655,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): repre_ids_to_remove = [] for repre in existing_repres: repre_ids_to_remove.append(repre["_id"]) - io.delete_many({"_id": {"$in": repre_ids_to_remove}}) + legacy_io.delete_many({"_id": {"$in": repre_ids_to_remove}}) for rep in instance.data["representations"]: self.log.debug("__ rep: {}".format(rep)) - io.insert_many(representations) + legacy_io.insert_many(representations) instance.data["published_representations"] = ( published_representations ) @@ -761,7 +762,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): def get_subset(self, asset, instance): subset_name = instance.data["subset"] - subset = io.find_one({ + subset = legacy_io.find_one({ "type": "subset", "parent": asset["_id"], "name": subset_name @@ -782,7 +783,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): if _family not in families: families.append(_family) - _id = io.insert_one({ + _id = legacy_io.insert_one({ "schema": "openpype:subset-3.0", "type": "subset", "name": subset_name, @@ -792,7 +793,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "parent": asset["_id"] }).inserted_id - subset = io.find_one({"_id": _id}) + subset = legacy_io.find_one({"_id": _id}) # QUESTION Why is changing of group and updating it's # families in 'get_subset'? @@ -801,7 +802,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Update families on subset. families = [instance.data["family"]] families.extend(instance.data.get("families", [])) - io.update_many( + legacy_io.update_many( {"type": "subset", "_id": ObjectId(subset["_id"])}, {"$set": {"data.families": families}} ) @@ -825,7 +826,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): subset_group = self._get_subset_group(instance) if subset_group: - io.update_many({ + legacy_io.update_many({ 'type': 'subset', '_id': ObjectId(subset_id) }, {'$set': {'data.subsetGroup': subset_group}}) diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index 28a93efb9a..5d6fc561ea 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -8,7 +8,7 @@ import six import pyblish.api from bson.objectid import ObjectId -from avalon import api, io +from openpype.pipeline import legacy_io class IntegrateThumbnails(pyblish.api.InstancePlugin): @@ -38,7 +38,7 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ) return - project_name = api.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] anatomy = instance.context.data["anatomy"] if "publish" not in anatomy.templates: @@ -66,11 +66,11 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): ) return - io.install() + legacy_io.install() thumbnail_template = anatomy.templates["publish"]["thumbnail"] - version = io.find_one({"_id": thumb_repre["parent"]}) + version = legacy_io.find_one({"_id": thumb_repre["parent"]}) if not version: raise AssertionError( "There does not exist version with id {}".format( @@ -137,12 +137,12 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): } } # Create thumbnail entity - io.insert_one(thumbnail_entity) + legacy_io.insert_one(thumbnail_entity) self.log.debug( "Creating entity in database {}".format(str(thumbnail_entity)) ) # Set thumbnail id for version - io.update_many( + legacy_io.update_many( {"_id": version["_id"]}, {"$set": {"data.thumbnail_id": thumbnail_id}} ) @@ -151,7 +151,7 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin): )) asset_entity = instance.data["assetEntity"] - io.update_many( + legacy_io.update_many( {"_id": asset_entity["_id"]}, {"$set": {"data.thumbnail_id": thumbnail_id}} ) diff --git a/openpype/plugins/publish/validate_editorial_asset_name.py b/openpype/plugins/publish/validate_editorial_asset_name.py index 4a65f3c64a..f9cdaebf0c 100644 --- a/openpype/plugins/publish/validate_editorial_asset_name.py +++ b/openpype/plugins/publish/validate_editorial_asset_name.py @@ -1,7 +1,9 @@ -import pyblish.api -from avalon import io from pprint import pformat +import pyblish.api + +from openpype.pipeline import legacy_io + class ValidateEditorialAssetName(pyblish.api.ContextPlugin): """ Validating if editorial's asset names are not already created in db. @@ -24,10 +26,10 @@ class ValidateEditorialAssetName(pyblish.api.ContextPlugin): asset_and_parents = self.get_parents(context) self.log.debug("__ asset_and_parents: {}".format(asset_and_parents)) - if not io.Session: - io.install() + if not legacy_io.Session: + legacy_io.install() - db_assets = list(io.find( + db_assets = list(legacy_io.find( {"type": "asset"}, {"name": 1, "data.parents": 1})) self.log.debug("__ db_assets: {}".format(db_assets)) From e75170d5c6fe99ccd4a58a8dafd5602bf73c3f9e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:35:27 +0200 Subject: [PATCH 129/244] replace avalon imports in modules --- .../publish/submit_aftereffects_deadline.py | 5 ++-- .../publish/submit_harmony_deadline.py | 4 ++-- .../publish/submit_houdini_remote_publish.py | 10 ++++---- .../publish/submit_houdini_render_deadline.py | 8 +++---- .../plugins/publish/submit_maya_deadline.py | 4 ++-- .../plugins/publish/submit_nuke_deadline.py | 6 ++--- .../plugins/publish/submit_publish_job.py | 23 ++++++++++--------- .../ftrack/event_handlers_user/action_rv.py | 21 +++++++++-------- .../plugins/publish/collect_ftrack_api.py | 9 ++++---- .../plugins/publish/collect_ftrack_family.py | 6 ++--- .../publish/integrate_hierarchy_ftrack.py | 9 ++++---- .../publish/collect_sequences_from_job.py | 9 +++++--- .../plugins/publish/collect_slack_family.py | 4 ++-- 13 files changed, 63 insertions(+), 55 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index c499c14d40..ba79e1ed4d 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -3,10 +3,9 @@ import attr import getpass import pyblish.api -from avalon import api - from openpype.lib import env_value_to_bool from openpype.lib.delivery import collect_frames +from openpype.pipeline import legacy_io from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo @@ -89,7 +88,7 @@ class AfterEffectsSubmitDeadline( keys.append("OPENPYPE_MONGO") environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **api.Session) + if key in os.environ}, **legacy_io.Session) for key in keys: val = environment.get(key) if val: diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py index 918efb6630..dda7f7f3aa 100644 --- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -8,8 +8,8 @@ import re import attr import pyblish.api -from avalon import api +from openpype.pipeline import legacy_io from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo @@ -282,7 +282,7 @@ class HarmonySubmitDeadline( keys.append("OPENPYPE_MONGO") environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **api.Session) + if key in os.environ}, **legacy_io.Session) for key in keys: val = environment.get(key) if val: diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py index c683eb68a8..f834ae7e92 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py @@ -4,10 +4,10 @@ import json import requests import hou -from avalon import api, io - import pyblish.api +from openpype.pipeline import legacy_io + class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): """Submit Houdini scene to perform a local publish in Deadline. @@ -35,7 +35,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): ), "Errors found, aborting integration.." # Deadline connection - AVALON_DEADLINE = api.Session.get( + AVALON_DEADLINE = legacy_io.Session.get( "AVALON_DEADLINE", "http://localhost:8082" ) assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" @@ -55,7 +55,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): scenename = os.path.basename(scene) # Get project code - project = io.find_one({"type": "project"}) + project = legacy_io.find_one({"type": "project"}) code = project["data"].get("code", project["name"]) job_name = "{scene} [PUBLISH]".format(scene=scenename) @@ -137,7 +137,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): environment = dict( {key: os.environ[key] for key in keys if key in os.environ}, - **api.Session + **legacy_io.Session ) environment["PYBLISH_ACTIVE_INSTANCES"] = ",".join(instances) diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py index 59aeb68b79..b94ad24397 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -3,12 +3,12 @@ import json import getpass import requests -from avalon import api - import pyblish.api import hou +from openpype.pipeline import legacy_io + class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): """Submit Solaris USD Render ROPs to Deadline. @@ -106,7 +106,7 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): keys.append("OPENPYPE_MONGO") environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **api.Session) + if key in os.environ}, **legacy_io.Session) payload["JobInfo"].update({ "EnvironmentKeyValue%d" % index: "{key}={value}".format( @@ -140,7 +140,7 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): def submit(self, instance, payload): - AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE", + AVALON_DEADLINE = legacy_io.Session.get("AVALON_DEADLINE", "http://localhost:8082") assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 34147712bc..37bdaede1c 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -32,10 +32,10 @@ import requests from maya import cmds -from avalon import api import pyblish.api from openpype.hosts.maya.api import lib +from openpype.pipeline import legacy_io # Documentation for keys available at: # https://docs.thinkboxsoftware.com @@ -488,7 +488,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): keys.append("OPENPYPE_MONGO") environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **api.Session) + if key in os.environ}, **legacy_io.Session) environment["OPENPYPE_LOG_NO_COLORS"] = "1" environment["OPENPYPE_MAYA_VERSION"] = cmds.about(v=True) # to recognize job from PYPE for turning Event On/Off diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index 9b5800c33f..942d442c25 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -4,10 +4,10 @@ import json import getpass import requests - -from avalon import api import pyblish.api + import nuke +from openpype.pipeline import legacy_io class NukeSubmitDeadline(pyblish.api.InstancePlugin): @@ -266,7 +266,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): keys += self.env_allowed_keys environment = dict({key: os.environ[key] for key in keys - if key in os.environ}, **api.Session) + if key in os.environ}, **legacy_io.Session) for _path in os.environ: if _path.lower().startswith('openpype_'): diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 3c4e0d2913..78e05d80fc 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -7,13 +7,14 @@ import re from copy import copy, deepcopy import requests import clique -import openpype.api - -from avalon import api, io import pyblish.api -from openpype.pipeline import get_representation_path +import openpype.api +from openpype.pipeline import ( + get_representation_path, + legacy_io, +) def get_resources(version, extension=None): @@ -22,7 +23,7 @@ def get_resources(version, extension=None): if extension: query["name"] = extension - representation = io.find_one(query) + representation = legacy_io.find_one(query) assert representation, "This is a bug" directory = get_representation_path(representation) @@ -221,9 +222,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): self._create_metadata_path(instance) environment = job["Props"].get("Env", {}) - environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"] - environment["AVALON_ASSET"] = io.Session["AVALON_ASSET"] - environment["AVALON_TASK"] = io.Session["AVALON_TASK"] + environment["AVALON_PROJECT"] = legacy_io.Session["AVALON_PROJECT"] + environment["AVALON_ASSET"] = legacy_io.Session["AVALON_ASSET"] + environment["AVALON_TASK"] = legacy_io.Session["AVALON_TASK"] environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME") environment["OPENPYPE_LOG_NO_COLORS"] = "1" environment["OPENPYPE_USERNAME"] = instance.context.data["user"] @@ -663,7 +664,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): if hasattr(instance, "_log"): data['_log'] = instance._log - asset = data.get("asset") or api.Session["AVALON_ASSET"] + asset = data.get("asset") or legacy_io.Session["AVALON_ASSET"] subset = data.get("subset") start = instance.data.get("frameStart") @@ -955,7 +956,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "intent": context.data.get("intent"), "comment": context.data.get("comment"), "job": render_job or None, - "session": api.Session.copy(), + "session": legacy_io.Session.copy(), "instances": instances } @@ -1063,7 +1064,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): else: # solve deprecated situation when `folder` key is not underneath # `publish` anatomy - project_name = api.Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] self.log.warning(( "Deprecation warning: Anatomy does not have set `folder`" " key underneath `publish` (in global of for project `{}`)." diff --git a/openpype/modules/ftrack/event_handlers_user/action_rv.py b/openpype/modules/ftrack/event_handlers_user/action_rv.py index bdb0eaf250..040ca75582 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_rv.py +++ b/openpype/modules/ftrack/event_handlers_user/action_rv.py @@ -4,8 +4,11 @@ import traceback import json import ftrack_api -from avalon import io, api -from openpype.pipeline import get_representation_path + +from openpype.pipeline import ( + get_representation_path, + legacy_io, +) from openpype_modules.ftrack.lib import BaseAction, statics_icon @@ -253,8 +256,8 @@ class RVAction(BaseAction): )["version"]["asset"]["parent"]["link"][0] project = session.get(link["type"], link["id"]) os.environ["AVALON_PROJECT"] = project["name"] - api.Session["AVALON_PROJECT"] = project["name"] - io.install() + legacy_io.Session["AVALON_PROJECT"] = project["name"] + legacy_io.install() location = ftrack_api.Session().pick_location() @@ -278,22 +281,22 @@ class RVAction(BaseAction): if online_source: continue - asset = io.find_one({"type": "asset", "name": parent_name}) - subset = io.find_one( + asset = legacy_io.find_one({"type": "asset", "name": parent_name}) + subset = legacy_io.find_one( { "type": "subset", "name": component["version"]["asset"]["name"], "parent": asset["_id"] } ) - version = io.find_one( + version = legacy_io.find_one( { "type": "version", "name": component["version"]["version"], "parent": subset["_id"] } ) - representation = io.find_one( + representation = legacy_io.find_one( { "type": "representation", "parent": version["_id"], @@ -301,7 +304,7 @@ class RVAction(BaseAction): } ) if representation is None: - representation = io.find_one( + representation = legacy_io.find_one( { "type": "representation", "parent": version["_id"], diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py index 436a61cc18..14da188150 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/collect_ftrack_api.py @@ -1,6 +1,7 @@ import logging import pyblish.api -import avalon.api + +from openpype.pipeline import legacy_io class CollectFtrackApi(pyblish.api.ContextPlugin): @@ -23,9 +24,9 @@ class CollectFtrackApi(pyblish.api.ContextPlugin): self.log.debug("Ftrack user: \"{0}\"".format(session.api_user)) # Collect task - project_name = avalon.api.Session["AVALON_PROJECT"] - asset_name = avalon.api.Session["AVALON_ASSET"] - task_name = avalon.api.Session["AVALON_TASK"] + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] # Find project entity project_query = 'Project where full_name is "{0}"'.format(project_name) diff --git a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py b/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py index 95987fe42e..820390b1f0 100644 --- a/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py +++ b/openpype/modules/ftrack/plugins/publish/collect_ftrack_family.py @@ -6,8 +6,8 @@ Provides: instance -> families ([]) """ import pyblish.api -import avalon.api +from openpype.pipeline import legacy_io from openpype.lib.plugin_tools import filter_profiles @@ -35,8 +35,8 @@ class CollectFtrackFamily(pyblish.api.InstancePlugin): return task_name = instance.data.get("task", - avalon.api.Session["AVALON_TASK"]) - host_name = avalon.api.Session["AVALON_APP"] + legacy_io.Session["AVALON_TASK"]) + host_name = legacy_io.Session["AVALON_APP"] family = instance.data["family"] filtering_criteria = { diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 61892240d7..cf90c11b65 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -2,7 +2,8 @@ import sys import collections import six import pyblish.api -from avalon import io + +from openpype.pipeline import legacy_io # Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC` CUST_ATTR_AUTO_SYNC = "avalon_auto_sync" @@ -80,8 +81,8 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): auto_sync_state = project[ "custom_attributes"][CUST_ATTR_AUTO_SYNC] - if not io.Session: - io.install() + if not legacy_io.Session: + legacy_io.install() self.ft_project = None @@ -271,7 +272,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): # Create new links. for input in entity_data.get("inputs", []): - input_id = io.find_one({"_id": input})["data"]["ftrackId"] + input_id = legacy_io.find_one({"_id": input})["data"]["ftrackId"] assetbuild = self.session.get("AssetBuild", input_id) self.log.debug( "Creating link from {0} to {1}".format( diff --git a/openpype/modules/royalrender/plugins/publish/collect_sequences_from_job.py b/openpype/modules/royalrender/plugins/publish/collect_sequences_from_job.py index 4d216c1c0a..65af90e8a6 100644 --- a/openpype/modules/royalrender/plugins/publish/collect_sequences_from_job.py +++ b/openpype/modules/royalrender/plugins/publish/collect_sequences_from_job.py @@ -7,7 +7,8 @@ import json from pprint import pformat import pyblish.api -from avalon import api + +from openpype.pipeline import legacy_io def collect(root, @@ -127,7 +128,7 @@ class CollectSequencesFromJob(pyblish.api.ContextPlugin): session = metadata.get("session") if session: self.log.info("setting session using metadata") - api.Session.update(session) + legacy_io.Session.update(session) os.environ.update(session) else: @@ -187,7 +188,9 @@ class CollectSequencesFromJob(pyblish.api.ContextPlugin): "family": families[0], # backwards compatibility / pyblish "families": list(families), "subset": subset, - "asset": data.get("asset", api.Session["AVALON_ASSET"]), + "asset": data.get( + "asset", legacy_io.Session["AVALON_ASSET"] + ), "stagingDir": root, "frameStart": start, "frameEnd": end, diff --git a/openpype/modules/slack/plugins/publish/collect_slack_family.py b/openpype/modules/slack/plugins/publish/collect_slack_family.py index 7475bdc89e..39b05937dc 100644 --- a/openpype/modules/slack/plugins/publish/collect_slack_family.py +++ b/openpype/modules/slack/plugins/publish/collect_slack_family.py @@ -1,7 +1,7 @@ -from avalon import io import pyblish.api from openpype.lib.profiles_filtering import filter_profiles +from openpype.pipeline import legacy_io class CollectSlackFamilies(pyblish.api.InstancePlugin): @@ -18,7 +18,7 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin): profiles = None def process(self, instance): - task_name = io.Session.get("AVALON_TASK") + task_name = legacy_io.Session.get("AVALON_TASK") family = self.main_family_from_instance(instance) key_values = { "families": family, From 3772e1d68cec4399f94d01a148ba5177bbb7b021 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:36:53 +0200 Subject: [PATCH 130/244] replace avalon imports in pipeline --- openpype/pipeline/__init__.py | 13 ++++++------ openpype/pipeline/context_tools.py | 15 +++++++------- openpype/pipeline/create/context.py | 12 +++++------ openpype/pipeline/load/utils.py | 32 ++++++++++++++--------------- openpype/pipeline/thumbnail.py | 4 ++-- 5 files changed, 36 insertions(+), 40 deletions(-) diff --git a/openpype/pipeline/__init__.py b/openpype/pipeline/__init__.py index 2c35ea2d57..e67b21105c 100644 --- a/openpype/pipeline/__init__.py +++ b/openpype/pipeline/__init__.py @@ -3,6 +3,10 @@ from .constants import ( HOST_WORKFILE_EXTENSIONS, ) +from .mongodb import ( + AvalonMongoDB, +) + from .create import ( BaseCreator, Creator, @@ -85,16 +89,13 @@ from .context_tools import ( install = install_host uninstall = uninstall_host -from .mongodb import ( - AvalonMongoDB, -) - __all__ = ( "AVALON_CONTAINER_ID", "HOST_WORKFILE_EXTENSIONS", - "attribute_definitions", + # --- MongoDB --- + "AvalonMongoDB", # --- Create --- "BaseCreator", @@ -174,6 +175,4 @@ __all__ = ( # Backwards compatible function names "install", "uninstall", - - "AvalonMongoDB", ) diff --git a/openpype/pipeline/context_tools.py b/openpype/pipeline/context_tools.py index 1bef260ec9..06bd639776 100644 --- a/openpype/pipeline/context_tools.py +++ b/openpype/pipeline/context_tools.py @@ -11,8 +11,6 @@ import platform import pyblish.api from pyblish.lib import MessageHandler -from avalon import io, Session - import openpype from openpype.modules import load_modules from openpype.settings import get_project_settings @@ -24,6 +22,7 @@ from openpype.lib import ( ) from . import ( + legacy_io, register_loader_plugin_path, register_inventory_action, register_creator_plugin_path, @@ -57,7 +56,7 @@ def registered_root(): if root: return root - root = Session.get("AVALON_PROJECTS") + root = legacy_io.Session.get("AVALON_PROJECTS") if root: return os.path.normpath(root) return "" @@ -74,20 +73,20 @@ def install_host(host): _is_installed = True - io.install() + legacy_io.install() missing = list() for key in ("AVALON_PROJECT", "AVALON_ASSET"): - if key not in Session: + if key not in legacy_io.Session: missing.append(key) assert not missing, ( "%s missing from environment, %s" % ( ", ".join(missing), - json.dumps(Session, indent=4, sort_keys=True) + json.dumps(legacy_io.Session, indent=4, sort_keys=True) )) - project_name = Session["AVALON_PROJECT"] + project_name = legacy_io.Session["AVALON_PROJECT"] log.info("Activating %s.." % project_name) # Optional host install function @@ -170,7 +169,7 @@ def uninstall_host(): deregister_host() - io.uninstall() + legacy_io.uninstall() log.info("Successfully uninstalled Avalon!") diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 584752e38a..6f862e0588 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -6,13 +6,13 @@ import inspect from uuid import uuid4 from contextlib import contextmanager +from openpype.pipeline import legacy_io from openpype.pipeline.mongodb import ( AvalonMongoDB, session_data_from_environment, ) from .creator_plugins import ( - BaseCreator, Creator, AutoCreator, discover_creator_plugins, @@ -773,12 +773,11 @@ class CreateContext: """Give ability to reset avalon context. Reset is based on optional host implementation of `get_current_context` - function or using `avalon.api.Session`. + function or using `legacy_io.Session`. Some hosts have ability to change context file without using workfiles tool but that change is not propagated to """ - import avalon.api project_name = asset_name = task_name = None if hasattr(self.host, "get_current_context"): @@ -789,11 +788,11 @@ class CreateContext: task_name = host_context.get("task_name") if not project_name: - project_name = avalon.api.Session.get("AVALON_PROJECT") + project_name = legacy_io.Session.get("AVALON_PROJECT") if not asset_name: - asset_name = avalon.api.Session.get("AVALON_ASSET") + asset_name = legacy_io.Session.get("AVALON_ASSET") if not task_name: - task_name = avalon.api.Session.get("AVALON_TASK") + task_name = legacy_io.Session.get("AVALON_TASK") if project_name: self.dbcon.Session["AVALON_PROJECT"] = project_name @@ -808,7 +807,6 @@ class CreateContext: Reloads creators from preregistered paths and can load publish plugins if it's enabled on context. """ - import avalon.api import pyblish.logic from openpype.pipeline import OpenPypePyblishPluginMixin diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index ca04f79ae6..99e5d11f82 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -9,11 +9,11 @@ import numbers import six from bson.objectid import ObjectId -from avalon import io -from avalon.api import Session - from openpype.lib import Anatomy -from openpype.pipeline import schema +from openpype.pipeline import ( + schema, + legacy_io, +) log = logging.getLogger(__name__) @@ -60,7 +60,7 @@ def get_repres_contexts(representation_ids, dbcon=None): """ if not dbcon: - dbcon = io + dbcon = legacy_io contexts = {} if not representation_ids: @@ -167,7 +167,7 @@ def get_subset_contexts(subset_ids, dbcon=None): dict: The full representation context by representation id. """ if not dbcon: - dbcon = io + dbcon = legacy_io contexts = {} if not subset_ids: @@ -230,10 +230,10 @@ def get_representation_context(representation): assert representation is not None, "This is a bug" if isinstance(representation, (six.string_types, ObjectId)): - representation = io.find_one( + representation = legacy_io.find_one( {"_id": ObjectId(str(representation))}) - version, subset, asset, project = io.parenthood(representation) + version, subset, asset, project = legacy_io.parenthood(representation) assert all([representation, version, subset, asset, project]), ( "This is a bug" @@ -405,17 +405,17 @@ def update_container(container, version=-1): """Update a container""" # Compute the different version from 'representation' - current_representation = io.find_one({ + current_representation = legacy_io.find_one({ "_id": ObjectId(container["representation"]) }) assert current_representation is not None, "This is a bug" - current_version, subset, asset, project = io.parenthood( + current_version, subset, asset, project = legacy_io.parenthood( current_representation) if version == -1: - new_version = io.find_one({ + new_version = legacy_io.find_one({ "type": "version", "parent": subset["_id"] }, sort=[("name", -1)]) @@ -431,11 +431,11 @@ def update_container(container, version=-1): "type": "version", "name": version } - new_version = io.find_one(version_query) + new_version = legacy_io.find_one(version_query) assert new_version is not None, "This is a bug" - new_representation = io.find_one({ + new_representation = legacy_io.find_one({ "type": "representation", "parent": new_version["_id"], "name": current_representation["name"] @@ -482,7 +482,7 @@ def switch_container(container, representation, loader_plugin=None): )) # Get the new representation to switch to - new_representation = io.find_one({ + new_representation = legacy_io.find_one({ "type": "representation", "_id": representation["_id"], }) @@ -501,7 +501,7 @@ def get_representation_path_from_context(context): representation = context['representation'] project_doc = context.get("project") root = None - session_project = Session.get("AVALON_PROJECT") + session_project = legacy_io.Session.get("AVALON_PROJECT") if project_doc and project_doc["name"] != session_project: anatomy = Anatomy(project_doc["name"]) root = anatomy.roots @@ -530,7 +530,7 @@ def get_representation_path(representation, root=None, dbcon=None): from openpype.lib import StringTemplate, TemplateUnsolved if dbcon is None: - dbcon = io + dbcon = legacy_io if root is None: from openpype.pipeline import registered_root diff --git a/openpype/pipeline/thumbnail.py b/openpype/pipeline/thumbnail.py index c09dab70eb..ec97b36954 100644 --- a/openpype/pipeline/thumbnail.py +++ b/openpype/pipeline/thumbnail.py @@ -2,6 +2,7 @@ import os import copy import logging +from . import legacy_io from .plugin_discover import ( discover, register_plugin, @@ -17,8 +18,7 @@ def get_thumbnail_binary(thumbnail_entity, thumbnail_type, dbcon=None): resolvers = discover_thumbnail_resolvers() resolvers = sorted(resolvers, key=lambda cls: cls.priority) if dbcon is None: - from avalon import io - dbcon = io + dbcon = legacy_io for Resolver in resolvers: available_types = Resolver.thumbnail_types From eacfaa7f11d0d96bd1c037c2bdbc16a1d0e62dae Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:37:03 +0200 Subject: [PATCH 131/244] added missing legacy_io --- openpype/pipeline/legacy_io.py | 146 +++++++++++++++++++++++++++++++++ 1 file changed, 146 insertions(+) create mode 100644 openpype/pipeline/legacy_io.py diff --git a/openpype/pipeline/legacy_io.py b/openpype/pipeline/legacy_io.py new file mode 100644 index 0000000000..c41406b208 --- /dev/null +++ b/openpype/pipeline/legacy_io.py @@ -0,0 +1,146 @@ +"""Wrapper around interactions with the database""" + +import sys +import logging +import functools + +from . import schema +from .mongodb import AvalonMongoDB, session_data_from_environment + +module = sys.modules[__name__] + +Session = {} +_is_installed = False +_connection_object = AvalonMongoDB(Session) +_mongo_client = None +_database = database = None + +log = logging.getLogger(__name__) + + +def install(): + """Establish a persistent connection to the database""" + if module._is_installed: + return + + session = session_data_from_environment(context_keys=True) + + session["schema"] = "openpype:session-2.0" + try: + schema.validate(session) + except schema.ValidationError as e: + # TODO(marcus): Make this mandatory + log.warning(e) + + _connection_object.Session.update(session) + _connection_object.install() + + module._mongo_client = _connection_object.mongo_client + module._database = module.database = _connection_object.database + + module._is_installed = True + + +def uninstall(): + """Close any connection to the database""" + module._mongo_client = None + module._database = module.database = None + module._is_installed = False + try: + module._connection_object.uninstall() + except AttributeError: + pass + + +def requires_install(func): + @functools.wraps(func) + def decorated(*args, **kwargs): + if not module._is_installed: + install() + return func(*args, **kwargs) + return decorated + + +@requires_install +def projects(*args, **kwargs): + return _connection_object.projects(*args, **kwargs) + + +@requires_install +def insert_one(doc, *args, **kwargs): + return _connection_object.insert_one(doc, *args, **kwargs) + + +@requires_install +def insert_many(docs, *args, **kwargs): + return _connection_object.insert_many(docs, *args, **kwargs) + + +@requires_install +def update_one(*args, **kwargs): + return _connection_object.update_one(*args, **kwargs) + + +@requires_install +def update_many(*args, **kwargs): + return _connection_object.update_many(*args, **kwargs) + + +@requires_install +def replace_one(*args, **kwargs): + return _connection_object.replace_one(*args, **kwargs) + + +@requires_install +def replace_many(*args, **kwargs): + return _connection_object.replace_many(*args, **kwargs) + + +@requires_install +def delete_one(*args, **kwargs): + return _connection_object.delete_one(*args, **kwargs) + + +@requires_install +def delete_many(*args, **kwargs): + return _connection_object.delete_many(*args, **kwargs) + + +@requires_install +def find(*args, **kwargs): + return _connection_object.find(*args, **kwargs) + + +@requires_install +def find_one(*args, **kwargs): + return _connection_object.find_one(*args, **kwargs) + + +@requires_install +def distinct(*args, **kwargs): + return _connection_object.distinct(*args, **kwargs) + + +@requires_install +def aggregate(*args, **kwargs): + return _connection_object.aggregate(*args, **kwargs) + + +@requires_install +def save(*args, **kwargs): + return _connection_object.save(*args, **kwargs) + + +@requires_install +def drop(*args, **kwargs): + return _connection_object.drop(*args, **kwargs) + + +@requires_install +def parenthood(*args, **kwargs): + return _connection_object.parenthood(*args, **kwargs) + + +@requires_install +def bulk_write(*args, **kwargs): + return _connection_object.bulk_write(*args, **kwargs) From b334a4251b36f986b16c49b52629831c8a81747d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 12:53:18 +0200 Subject: [PATCH 132/244] replace avalon imports on remaining places --- .../clockify/launcher_actions/ClockifyStart.py | 9 +++++---- .../clockify/launcher_actions/ClockifySync.py | 11 ++++++----- openpype/scripts/fusion_switch_shot.py | 12 ++++++------ 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/openpype/modules/clockify/launcher_actions/ClockifyStart.py b/openpype/modules/clockify/launcher_actions/ClockifyStart.py index 6428d5e7aa..4669f98b01 100644 --- a/openpype/modules/clockify/launcher_actions/ClockifyStart.py +++ b/openpype/modules/clockify/launcher_actions/ClockifyStart.py @@ -1,7 +1,8 @@ -from avalon import io - from openpype.api import Logger -from openpype.pipeline import LauncherAction +from openpype.pipeline import ( + legacy_io, + LauncherAction, +) from openpype_modules.clockify.clockify_api import ClockifyAPI @@ -28,7 +29,7 @@ class ClockifyStart(LauncherAction): task_name = session['AVALON_TASK'] description = asset_name - asset = io.find_one({ + asset = legacy_io.find_one({ 'type': 'asset', 'name': asset_name }) diff --git a/openpype/modules/clockify/launcher_actions/ClockifySync.py b/openpype/modules/clockify/launcher_actions/ClockifySync.py index 3c81e2766c..356bbd0306 100644 --- a/openpype/modules/clockify/launcher_actions/ClockifySync.py +++ b/openpype/modules/clockify/launcher_actions/ClockifySync.py @@ -1,8 +1,9 @@ -from avalon import io - from openpype_modules.clockify.clockify_api import ClockifyAPI from openpype.api import Logger -from openpype.pipeline import LauncherAction +from openpype.pipeline import ( + legacy_io, + LauncherAction, +) log = Logger.get_logger(__name__) @@ -25,10 +26,10 @@ class ClockifySync(LauncherAction): projects_to_sync = [] if project_name.strip() == '' or project_name is None: - for project in io.projects(): + for project in legacy_io.projects(): projects_to_sync.append(project) else: - project = io.find_one({'type': 'project'}) + project = legacy_io.find_one({'type': 'project'}) projects_to_sync.append(project) projects_info = {} diff --git a/openpype/scripts/fusion_switch_shot.py b/openpype/scripts/fusion_switch_shot.py index 3ba150902e..245fc665f0 100644 --- a/openpype/scripts/fusion_switch_shot.py +++ b/openpype/scripts/fusion_switch_shot.py @@ -4,7 +4,6 @@ import sys import logging # Pipeline imports -from avalon import io from openpype.hosts.fusion import api import openpype.hosts.fusion.api.lib as fusion_lib @@ -13,6 +12,7 @@ from openpype.lib import version_up from openpype.pipeline import ( install_host, registered_host, + legacy_io, ) from openpype.lib.avalon_context import get_workdir_from_session @@ -131,7 +131,7 @@ def update_frame_range(comp, representations): """ version_ids = [r["parent"] for r in representations] - versions = io.find({"type": "version", "_id": {"$in": version_ids}}) + versions = legacy_io.find({"type": "version", "_id": {"$in": version_ids}}) versions = list(versions) start = min(v["data"]["frameStart"] for v in versions) @@ -162,13 +162,13 @@ def switch(asset_name, filepath=None, new=True): # Assert asset name exists # It is better to do this here then to wait till switch_shot does it - asset = io.find_one({"type": "asset", "name": asset_name}) + asset = legacy_io.find_one({"type": "asset", "name": asset_name}) assert asset, "Could not find '%s' in the database" % asset_name # Get current project - self._project = io.find_one({ + self._project = legacy_io.find_one({ "type": "project", - "name": io.Session["AVALON_PROJECT"] + "name": legacy_io.Session["AVALON_PROJECT"] }) # Go to comp @@ -198,7 +198,7 @@ def switch(asset_name, filepath=None, new=True): current_comp.Print(message) # Build the session to switch to - switch_to_session = io.Session.copy() + switch_to_session = legacy_io.Session.copy() switch_to_session["AVALON_ASSET"] = asset['name'] if new: From 14fbabd4771508d5088bfe30f9cd4cc46f64fd1d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 14 Apr 2022 13:37:15 +0200 Subject: [PATCH 133/244] OP-3021 - refactor CreateWriteRender as a base for other write creators There are 3 creators which contained almost same code. CreateWriteRender was chosen as a base implementation, which other 2 could inherit and modify necessary parts --- .../plugins/create/create_write_render.py | 35 +++++++++++++------ 1 file changed, 24 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index a9c4b5341e..52edd85e41 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -97,9 +97,28 @@ class CreateWriteRender(plugin.OpenPypeCreator): else: self.log.info("Adding template path from plugin") write_data.update({ - "fpath_template": ("{work}/renders/nuke/{subset}" - "/{subset}.{frame}.{ext}")}) + "fpath_template": + ("{work}/{}s/nuke/{subset}".format(self.family) + + "/{subset}.{frame}.{ext}")}) + write_node = self._create_write_node(selected_node, + inputs, outputs, + write_data) + + # relinking to collected connections + for i, input in enumerate(inputs): + write_node.setInput(i, input) + + write_node.autoplace() + + for output in outputs: + output.setInput(0, write_node) + + write_node = self._modify_write_node(write_node) + + return write_node + + def _create_write_node(self, selected_node, inputs, outputs, write_data): # add reformat node to cut off all outside of format bounding box # get width and height try: @@ -126,13 +145,7 @@ class CreateWriteRender(plugin.OpenPypeCreator): input=selected_node, prenodes=_prenodes) - # relinking to collected connections - for i, input in enumerate(inputs): - write_node.setInput(i, input) - - write_node.autoplace() - - for output in outputs: - output.setInput(0, write_node) - return write_node + + def _modify_write_node(self, write_node): + return write_node \ No newline at end of file From 0b423fc6b5964a4ab4f749564ebbea7b3706516c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 14 Apr 2022 13:57:28 +0200 Subject: [PATCH 134/244] OP-3021 - refactor CreateWriteRender as a base for other write creators There are 3 creators which contained almost same code. CreateWriteRender was chosen as a base implementation, which other 2 could inherit and modify necessary parts --- openpype/hosts/nuke/plugins/create/create_write_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 52edd85e41..4c494a58be 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -17,7 +17,7 @@ class CreateWriteRender(plugin.OpenPypeCreator): defaults = ["Main", "Mask"] def __init__(self, *args, **kwargs): - super(CreateWriteRender, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) data = OrderedDict() From 87a99d6d029b1c5ac88063616abfc74629395bc9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 16:36:21 +0200 Subject: [PATCH 135/244] removed AVALON_MONGO --- openpype/pipeline/mongodb.py | 3 --- openpype/tools/loader/__main__.py | 1 - 2 files changed, 4 deletions(-) diff --git a/openpype/pipeline/mongodb.py b/openpype/pipeline/mongodb.py index 9efd231bb2..565e26b966 100644 --- a/openpype/pipeline/mongodb.py +++ b/openpype/pipeline/mongodb.py @@ -95,9 +95,6 @@ def session_data_from_environment(context_keys=False): # Used during any connections to the outside world ("AVALON_TIMEOUT", "1000"), - # Address to Asset Database - ("AVALON_MONGO", "mongodb://localhost:27017"), - # Name of database used in MongoDB ("AVALON_DB", "avalon"), ): diff --git a/openpype/tools/loader/__main__.py b/openpype/tools/loader/__main__.py index 400a034a76..acf357aa97 100644 --- a/openpype/tools/loader/__main__.py +++ b/openpype/tools/loader/__main__.py @@ -19,7 +19,6 @@ def my_exception_hook(exctype, value, traceback): if __name__ == '__main__': - os.environ["AVALON_MONGO"] = "mongodb://localhost:27017" os.environ["OPENPYPE_MONGO"] = "mongodb://localhost:27017" os.environ["AVALON_DB"] = "avalon" os.environ["AVALON_TIMEOUT"] = "1000" From b0da2a07f80110d92dde21eb8e7e3da667f7d0c8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 14 Apr 2022 16:39:44 +0200 Subject: [PATCH 136/244] removed avalon-core submodule --- .gitmodules | 3 --- repos/avalon-core | 1 - 2 files changed, 4 deletions(-) delete mode 160000 repos/avalon-core diff --git a/.gitmodules b/.gitmodules index 9920ceaad6..e69de29bb2 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +0,0 @@ -[submodule "repos/avalon-core"] - path = repos/avalon-core - url = https://github.com/pypeclub/avalon-core.git \ No newline at end of file diff --git a/repos/avalon-core b/repos/avalon-core deleted file mode 160000 index 2fa14cea6f..0000000000 --- a/repos/avalon-core +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 2fa14cea6f6a9d86eec70bbb96860cbe4c75c8eb From 6bf92ef0f6dee5d6475e169df73294d41c62befa Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 14 Apr 2022 17:20:50 +0200 Subject: [PATCH 137/244] OP-3021 - refactor CreateWritePrerender --- openpype/hosts/nuke/plugins/__init__.py | 0 .../hosts/nuke/plugins/create/__init__.py | 0 .../plugins/create/create_write_prerender.py | 96 +------------------ .../plugins/create/create_write_render.py | 2 +- 4 files changed, 6 insertions(+), 92 deletions(-) create mode 100644 openpype/hosts/nuke/plugins/__init__.py create mode 100644 openpype/hosts/nuke/plugins/create/__init__.py diff --git a/openpype/hosts/nuke/plugins/__init__.py b/openpype/hosts/nuke/plugins/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/nuke/plugins/create/__init__.py b/openpype/hosts/nuke/plugins/create/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index 761439fdb2..e9309d8170 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -1,12 +1,10 @@ -from collections import OrderedDict - import nuke -from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import create_write_node +from openpype.hosts.nuke.plugins.create import create_write_render -class CreateWritePrerender(plugin.OpenPypeCreator): +class CreateWritePrerender(create_write_render.CreateWriteRender): # change this to template preset name = "WritePrerender" label = "Create Write Prerender" @@ -19,85 +17,7 @@ class CreateWritePrerender(plugin.OpenPypeCreator): def __init__(self, *args, **kwargs): super(CreateWritePrerender, self).__init__(*args, **kwargs) - data = OrderedDict() - - data["family"] = self.family - data["families"] = self.n_class - - for k, v in self.data.items(): - if k not in data.keys(): - data.update({k: v}) - - self.data = data - self.nodes = nuke.selectedNodes() - self.log.debug("_ self.data: '{}'".format(self.data)) - - def process(self): - inputs = [] - outputs = [] - instance = nuke.toNode(self.data["subset"]) - selected_node = None - - # use selection - if (self.options or {}).get("useSelection"): - nodes = self.nodes - - if not (len(nodes) < 2): - msg = ("Select only one node. The node " - "you want to connect to, " - "or tick off `Use selection`") - self.log.error(msg) - nuke.message(msg) - - if len(nodes) == 0: - msg = ( - "No nodes selected. Please select a single node to connect" - " to or tick off `Use selection`" - ) - self.log.error(msg) - nuke.message(msg) - - selected_node = nodes[0] - inputs = [selected_node] - outputs = selected_node.dependent() - - if instance: - if (instance.name() in selected_node.name()): - selected_node = instance.dependencies()[0] - - # if node already exist - if instance: - # collect input / outputs - inputs = instance.dependencies() - outputs = instance.dependent() - selected_node = inputs[0] - # remove old one - nuke.delete(instance) - - # recreate new - write_data = { - "nodeclass": self.n_class, - "families": [self.family], - "avalon": self.data - } - - # add creator data - creator_data = {"creator": self.__class__.__name__} - self.data.update(creator_data) - write_data.update(creator_data) - - if self.presets.get('fpath_template'): - self.log.info("Adding template path from preset") - write_data.update( - {"fpath_template": self.presets["fpath_template"]} - ) - else: - self.log.info("Adding template path from plugin") - write_data.update({ - "fpath_template": ("{work}/prerenders/nuke/{subset}" - "/{subset}.{frame}.{ext}")}) - - self.log.info("write_data: {}".format(write_data)) + def _create_write_node(self, selected_node, inputs, outputs, write_data): reviewable = self.presets.get("reviewable") write_node = create_write_node( self.data["subset"], @@ -107,15 +27,9 @@ class CreateWritePrerender(plugin.OpenPypeCreator): review=reviewable, linked_knobs=["channels", "___", "first", "last", "use_limit"]) - # relinking to collected connections - for i, input in enumerate(inputs): - write_node.setInput(i, input) - - write_node.autoplace() - - for output in outputs: - output.setInput(0, write_node) + return write_node + def _modify_write_node(self, write_node): # open group node write_node.begin() for n in nuke.allNodes(): diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 4c494a58be..52edd85e41 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -17,7 +17,7 @@ class CreateWriteRender(plugin.OpenPypeCreator): defaults = ["Main", "Mask"] def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) + super(CreateWriteRender, self).__init__(*args, **kwargs) data = OrderedDict() From 4d1345f2e4790b876eed9e4a55f547ff67c450c6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 14 Apr 2022 17:39:10 +0200 Subject: [PATCH 138/244] OP-3021 - fix template value Cannot use format function --- openpype/hosts/nuke/plugins/create/create_write_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 52edd85e41..8204c6420d 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -98,7 +98,7 @@ class CreateWriteRender(plugin.OpenPypeCreator): self.log.info("Adding template path from plugin") write_data.update({ "fpath_template": - ("{work}/{}s/nuke/{subset}".format(self.family) + + ("{work}/" + self.family + "s/nuke/{subset}" "/{subset}.{frame}.{ext}")}) write_node = self._create_write_node(selected_node, From dc88fb91679521c581bc4e8e16a5c6a23a81868e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Apr 2022 10:22:14 +0000 Subject: [PATCH 139/244] Bump async from 2.6.3 to 2.6.4 in /website Bumps [async](https://github.com/caolan/async) from 2.6.3 to 2.6.4. - [Release notes](https://github.com/caolan/async/releases) - [Changelog](https://github.com/caolan/async/blob/v2.6.4/CHANGELOG.md) - [Commits](https://github.com/caolan/async/compare/v2.6.3...v2.6.4) --- updated-dependencies: - dependency-name: async dependency-type: indirect ... Signed-off-by: dependabot[bot] --- website/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index e01f0c4ef2..04b9dd658b 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -2311,9 +2311,9 @@ asap@~2.0.3: integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= async@^2.6.2: - version "2.6.3" - resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" - integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== + version "2.6.4" + resolved "https://registry.yarnpkg.com/async/-/async-2.6.4.tgz#706b7ff6084664cd7eae713f6f965433b5504221" + integrity sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA== dependencies: lodash "^4.17.14" From d87e70b3c90490e160d314373bf45da8c255fd93 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Apr 2022 15:21:05 +0200 Subject: [PATCH 140/244] OP-3103 - added plugin to parse batch file to PS Plugin in webpublisher folder doesn't get triggered in PS processing. --- .../plugins/publish/collect_batch_data.py | 73 +++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 openpype/hosts/photoshop/plugins/publish/collect_batch_data.py diff --git a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py new file mode 100644 index 0000000000..5e6e916611 --- /dev/null +++ b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py @@ -0,0 +1,73 @@ +"""Parses batch context from json and continues in publish process. + +Provides: + context -> Loaded batch file. + - asset + - task (task name) + - taskType + - project_name + - variant + +Code is practically copy of `openype/hosts/webpublish/collect_batch_data` as +webpublisher should be eventually ejected as an addon, eg. mentioned plugin +shouldn't be pushed into general publish plugins. +""" + +import os + +import pyblish.api +from avalon import io +from openpype.lib.plugin_tools import ( + parse_json, + get_batch_asset_task_info +) + + +class CollectBatchData(pyblish.api.ContextPlugin): + """Collect batch data from json stored in 'OPENPYPE_PUBLISH_DATA' env dir. + + The directory must contain 'manifest.json' file where batch data should be + stored. + """ + # must be really early, context values are only in json file + order = pyblish.api.CollectorOrder - 0.495 + label = "Collect batch data" + hosts = ["photoshop"] + targets = ["remotepublish"] + + def process(self, context): + self.log.info("CollectBatchData") + batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA") + + assert batch_dir, ( + "Missing `OPENPYPE_PUBLISH_DATA`") + + assert os.path.exists(batch_dir), \ + "Folder {} doesn't exist".format(batch_dir) + + project_name = os.environ.get("AVALON_PROJECT") + if project_name is None: + raise AssertionError( + "Environment `AVALON_PROJECT` was not found." + "Could not set project `root` which may cause issues." + ) + + batch_data = parse_json(os.path.join(batch_dir, "manifest.json")) + + context.data["batchDir"] = batch_dir + context.data["batchData"] = batch_data + + asset_name, task_name, task_type = get_batch_asset_task_info( + batch_data["context"] + ) + + os.environ["AVALON_ASSET"] = asset_name + io.Session["AVALON_ASSET"] = asset_name + os.environ["AVALON_TASK"] = task_name + io.Session["AVALON_TASK"] = task_name + + context.data["asset"] = asset_name + context.data["task"] = task_name + context.data["taskType"] = task_type + context.data["project_name"] = project_name + context.data["variant"] = batch_data["variant"] From 40d426ed00abdc558797f193e5b2ae66b9b67a1b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Apr 2022 15:22:29 +0200 Subject: [PATCH 141/244] OP-3103 - fixes hosts, update variant location --- .../webpublisher/plugins/publish/collect_batch_data.py | 10 ++++++++-- .../plugins/publish/collect_published_files.py | 5 +++-- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py index ca14538d7d..c9ba903007 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py @@ -1,7 +1,12 @@ -"""Loads batch context from json and continues in publish process. +"""Parses batch context from json and continues in publish process. Provides: context -> Loaded batch file. + - asset + - task (task name) + - taskType + - project_name + - variant """ import os @@ -24,7 +29,7 @@ class CollectBatchData(pyblish.api.ContextPlugin): # must be really early, context values are only in json file order = pyblish.api.CollectorOrder - 0.495 label = "Collect batch data" - host = ["webpublisher"] + hosts = ["webpublisher"] def process(self, context): batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA") @@ -60,6 +65,7 @@ class CollectBatchData(pyblish.api.ContextPlugin): context.data["task"] = task_name context.data["taskType"] = task_type context.data["project_name"] = project_name + context.data["variant"] = batch_data["variant"] self._set_ctx_path(batch_data) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 8edaf4f67b..65db9d7e2e 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -40,7 +40,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): # must be really early, context values are only in json file order = pyblish.api.CollectorOrder - 0.490 label = "Collect rendered frames" - host = ["webpublisher"] + hosts = ["webpublisher"] targets = ["filespublish"] # from Settings @@ -61,6 +61,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): task_name = context.data["task"] task_type = context.data["taskType"] project_name = context.data["project_name"] + variant = context.data["variant"] for task_dir in task_subfolders: task_data = parse_json(os.path.join(task_dir, "manifest.json")) @@ -76,7 +77,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): extension.replace(".", '')) subset_name = get_subset_name_with_asset_doc( - family, task_data["variant"], task_name, asset_doc, + family, variant, task_name, asset_doc, project_name=project_name, host_name="webpublisher" ) version = self._get_last_version(asset_name, subset_name) + 1 From df2f7f7c590064a6d3d91d4c4593e20cc141cb2d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Apr 2022 15:24:09 +0200 Subject: [PATCH 142/244] OP-3103 - use variant from context for PS --- .../publish/collect_color_coded_instances.py | 25 +++---------------- .../plugins/publish/collect_instances.py | 3 ++- .../plugins/publish/collect_review.py | 2 +- 3 files changed, 7 insertions(+), 23 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py index 7d44d55a80..122428eea0 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py @@ -4,7 +4,6 @@ import re import pyblish.api from openpype.lib import prepare_template_data -from openpype.lib.plugin_tools import parse_json, get_batch_asset_task_info from openpype.hosts.photoshop import api as photoshop @@ -46,7 +45,10 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): existing_subset_names = self._get_existing_subset_names(context) - asset_name, task_name, variant = self._parse_batch(batch_dir) + # from CollectBatchData + asset_name = context.data["asset"] + task_name = context.data["task"] + variant = context.data["variant"] stub = photoshop.stub() layers = stub.get_layers() @@ -130,25 +132,6 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): return existing_subset_names - def _parse_batch(self, batch_dir): - """Parses asset_name, task_name, variant from batch manifest.""" - task_data = None - if batch_dir and os.path.exists(batch_dir): - task_data = parse_json(os.path.join(batch_dir, - "manifest.json")) - if not task_data: - raise ValueError( - "Cannot parse batch meta in {} folder".format(batch_dir)) - variant = task_data["variant"] - - asset, task_name, task_type = get_batch_asset_task_info( - task_data["context"]) - - if not task_name: - task_name = task_type - - return asset, task_name, variant - def _create_instance(self, context, layer, family, asset, subset, task_name): instance = context.create_instance(layer.name) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_instances.py index 6198ed0156..9f95441e6f 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_instances.py @@ -82,8 +82,9 @@ class CollectInstances(pyblish.api.ContextPlugin): task_name = api.Session["AVALON_TASK"] asset_name = context.data["assetEntity"]["name"] + variant = context.data.get("variant") or variants[0] fill_pairs = { - "variant": variants[0], + "variant": variant, "family": family, "task": task_name } diff --git a/openpype/hosts/photoshop/plugins/publish/collect_review.py b/openpype/hosts/photoshop/plugins/publish/collect_review.py index f3842b9ee5..89432553c5 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_review.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_review.py @@ -16,7 +16,7 @@ class CollectReview(pyblish.api.ContextPlugin): family = "review" subset = get_subset_name_with_asset_doc( family, - "", + context.data.get("variant", ''), context.data["anatomyData"]["task"]["name"], context.data["assetEntity"], context.data["anatomyData"]["project"]["name"], From 094729c3f86db58dd6f28cff75173933f7603f07 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Apr 2022 17:28:08 +0200 Subject: [PATCH 143/244] OP-3021 - refactored still image creator Not tested yet as it is not working in regular develop either. --- .../nuke/plugins/create/create_write_still.py | 90 ++----------------- 1 file changed, 6 insertions(+), 84 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_write_still.py b/openpype/hosts/nuke/plugins/create/create_write_still.py index 0037b64ce3..3361bc2602 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_still.py +++ b/openpype/hosts/nuke/plugins/create/create_write_still.py @@ -1,12 +1,10 @@ -from collections import OrderedDict - import nuke -from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import create_write_node +from openpype.hosts.nuke.plugins.create import create_write_render -class CreateWriteStill(plugin.OpenPypeCreator): +class CreateWriteStill(create_write_render.CreateWriteRender): # change this to template preset name = "WriteStillFrame" label = "Create Write Still Image" @@ -23,77 +21,8 @@ class CreateWriteStill(plugin.OpenPypeCreator): def __init__(self, *args, **kwargs): super(CreateWriteStill, self).__init__(*args, **kwargs) - data = OrderedDict() - - data["family"] = self.family - data["families"] = self.n_class - - for k, v in self.data.items(): - if k not in data.keys(): - data.update({k: v}) - - self.data = data - self.nodes = nuke.selectedNodes() - self.log.debug("_ self.data: '{}'".format(self.data)) - - def process(self): - - inputs = [] - outputs = [] - instance = nuke.toNode(self.data["subset"]) - selected_node = None - - # use selection - if (self.options or {}).get("useSelection"): - nodes = self.nodes - - if not (len(nodes) < 2): - msg = ("Select only one node. " - "The node you want to connect to, " - "or tick off `Use selection`") - self.log.error(msg) - nuke.message(msg) - return - - if len(nodes) == 0: - msg = ( - "No nodes selected. Please select a single node to connect" - " to or tick off `Use selection`" - ) - self.log.error(msg) - nuke.message(msg) - return - - selected_node = nodes[0] - inputs = [selected_node] - outputs = selected_node.dependent() - - if instance: - if (instance.name() in selected_node.name()): - selected_node = instance.dependencies()[0] - - # if node already exist - if instance: - # collect input / outputs - inputs = instance.dependencies() - outputs = instance.dependent() - selected_node = inputs[0] - # remove old one - nuke.delete(instance) - - # recreate new - write_data = { - "nodeclass": self.n_class, - "families": [self.family], - "avalon": self.data - } - - # add creator data - creator_data = {"creator": self.__class__.__name__} - self.data.update(creator_data) - write_data.update(creator_data) - - self.log.info("Adding template path from plugin") + def _create_write_node(self, selected_node, inputs, outputs, write_data): + # explicitly reset template to 'renders', not same as other 2 writes write_data.update({ "fpath_template": ( "{work}/renders/nuke/{subset}/{subset}.{ext}")}) @@ -118,16 +47,9 @@ class CreateWriteStill(plugin.OpenPypeCreator): farm=False, linked_knobs=["channels", "___", "first", "last", "use_limit"]) - # relinking to collected connections - for i, input in enumerate(inputs): - write_node.setInput(i, input) + return write_node - write_node.autoplace() - - for output in outputs: - output.setInput(0, write_node) - - # link frame hold to group node + def _modify_write_node(self, write_node): write_node.begin() for n in nuke.allNodes(): # get write node From 8680e841787aa3f51392a9a57859fe482923219b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Apr 2022 18:05:48 +0200 Subject: [PATCH 144/244] removed usage of AVALON_SCHEMA --- .../hosts/maya/plugins/publish/submit_maya_muster.py | 2 -- .../deadline/plugins/publish/submit_nuke_deadline.py | 1 - openpype/pipeline/schema.py | 9 +++------ 3 files changed, 3 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py index 43a01fe542..c4250a20bd 100644 --- a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py +++ b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py @@ -488,7 +488,6 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin): "MAYA_RENDER_DESC_PATH", "MAYA_MODULE_PATH", "ARNOLD_PLUGIN_PATH", - "AVALON_SCHEMA", "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", @@ -547,4 +546,3 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin): "%f=%d was rounded off to nearest integer" % (value, int(value)) ) - diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index ed0041b153..94c703d66d 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -242,7 +242,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): keys = [ "PYTHONPATH", "PATH", - "AVALON_SCHEMA", "AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK", diff --git a/openpype/pipeline/schema.py b/openpype/pipeline/schema.py index 26d987b8f3..7e96bfe1b1 100644 --- a/openpype/pipeline/schema.py +++ b/openpype/pipeline/schema.py @@ -118,15 +118,12 @@ _cache = { def _precache(): + """Store available schemas in-memory for reduced disk access""" global _CACHED - if os.environ.get('AVALON_SCHEMA'): - schema_dir = os.environ['AVALON_SCHEMA'] - else: - current_dir = os.path.dirname(os.path.abspath(__file__)) - schema_dir = os.path.join(current_dir, "schema") + repos_root = os.environ["OPENPYPE_REPOS_ROOT"] + schema_dir = os.path.join(repos_root, "schema") - """Store available schemas in-memory for reduced disk access""" for schema in os.listdir(schema_dir): if schema.startswith(("_", ".")): continue From 3babf06542241dadf38b3fe32e3a1b457e781b56 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Apr 2022 18:17:05 +0200 Subject: [PATCH 145/244] changed how and if are repos added to sys path --- start.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/start.py b/start.py index f8a01dd9ab..c7dc251f5f 100644 --- a/start.py +++ b/start.py @@ -320,6 +320,7 @@ def run_disk_mapping_commands(settings): destination)) raise + def set_avalon_environments(): """Set avalon specific environments. @@ -838,8 +839,14 @@ def _bootstrap_from_code(use_version, use_staging): version_path = Path(_openpype_root) os.environ["OPENPYPE_REPOS_ROOT"] = _openpype_root - repos = os.listdir(os.path.join(_openpype_root, "repos")) - repos = [os.path.join(_openpype_root, "repos", repo) for repo in repos] + repos = [] + # Check for "openpype/repos" directory for sumodules + # NOTE: Is not used at this moment but can be re-used in future + repos_dir = os.path.join(_openpype_root, "repos") + if os.path.exists(repos_dir): + for name in os.listdir(repos_dir): + repos.append(os.path.join(repos_dir, name)) + # add self to python paths repos.insert(0, _openpype_root) for repo in repos: From 89eeb4b31b5c7ad661237ad68d68f0c1fd92be76 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Apr 2022 18:17:24 +0200 Subject: [PATCH 146/244] don't set all environments --- start.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/start.py b/start.py index c7dc251f5f..35a14a059e 100644 --- a/start.py +++ b/start.py @@ -328,28 +328,12 @@ def set_avalon_environments(): before avalon module is imported because avalon works with globals set with environment variables. """ - from openpype import PACKAGE_DIR - # Path to OpenPype's schema - schema_path = os.path.join( - os.path.dirname(PACKAGE_DIR), - "schema" - ) - # Avalon mongo URL - avalon_mongo_url = ( - os.environ.get("AVALON_MONGO") - or os.environ["OPENPYPE_MONGO"] - ) avalon_db = os.environ.get("AVALON_DB") or "avalon" # for tests os.environ.update({ - # Mongo url (use same as OpenPype has) - "AVALON_MONGO": avalon_mongo_url, - - "AVALON_SCHEMA": schema_path, # Mongo DB name where avalon docs are stored "AVALON_DB": avalon_db, # Name of config - "AVALON_CONFIG": "openpype", "AVALON_LABEL": "OpenPype" }) From 15d59e1f00fdf30aa2f7fd292a2a723f838ae9e3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Apr 2022 18:45:55 +0200 Subject: [PATCH 147/244] use 'OPENPYPE_LOG_LEVEL' if available instead of 'OPENPYPE_DEBUG' --- openpype/lib/log.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/openpype/lib/log.py b/openpype/lib/log.py index f33385e0ba..51afac6d8d 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -216,8 +216,8 @@ class PypeLogger: # Collection name under database in Mongo log_collection_name = "logs" - # OPENPYPE_DEBUG - pype_debug = 0 + # Logging level - OPENPYPE_LOG_LEVEL + log_level = None # Data same for all record documents process_data = None @@ -231,10 +231,7 @@ class PypeLogger: logger = logging.getLogger(name or "__main__") - if cls.pype_debug > 0: - logger.setLevel(logging.DEBUG) - else: - logger.setLevel(logging.INFO) + logger.setLevel(cls.log_level) add_mongo_handler = cls.use_mongo_logging add_console_handler = True @@ -357,8 +354,16 @@ class PypeLogger: # Store result to class definition cls.use_mongo_logging = use_mongo_logging - # Define if is in OPENPYPE_DEBUG mode - cls.pype_debug = int(os.getenv("OPENPYPE_DEBUG") or "0") + # Define what is logging level + log_level = os.getenv("OPENPYPE_LOG_LEVEL") + if not log_level: + # Check OPENPYPE_DEBUG for backwards compatibility + op_debug = os.getenv("OPENPYPE_DEBUG") + if op_debug and int(op_debug) > 0: + log_level = 10 + else: + log_level = 20 + cls.log_level = int(log_level) if not os.environ.get("OPENPYPE_MONGO"): cls.use_mongo_logging = False From 9ff8f3011b6f6e8eae0336fdb0e829b5d3d0a5ce Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Apr 2022 18:49:54 +0200 Subject: [PATCH 148/244] added global verbose argument which can change log level --- start.py | 40 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/start.py b/start.py index f8a01dd9ab..c61d72dabf 100644 --- a/start.py +++ b/start.py @@ -191,6 +191,46 @@ else: if os.getenv("OPENPYPE_HEADLESS_MODE") != "1": os.environ.pop("OPENPYPE_HEADLESS_MODE", None) +# Enabled logging debug mode when "--debug" is passed +if "--verbose" in sys.argv: + expected_values = ( + "Expected: notset, debug, info, warning, error, critical" + " or integer [0-50]." + ) + idx = sys.argv.index("--verbose") + sys.argv.pop(idx) + if idx < len(sys.argv): + value = sys.argv.pop(idx) + else: + raise RuntimeError(( + "Expect value after \"--verbose\" argument. {}" + ).format(expected_values)) + + log_level = None + low_value = value.lower() + if low_value.isdigit(): + log_level = int(low_value) + elif low_value == "notset": + log_level = 0 + elif low_value == "debug": + log_level = 10 + elif low_value == "info": + log_level = 20 + elif low_value == "warning": + log_level = 30 + elif low_value == "error": + log_level = 40 + elif low_value == "critical": + log_level = 50 + + if log_level is None: + raise RuntimeError(( + "Unexpected value after \"--verbose\" argument \"{}\". {}" + ).format(value, expected_values)) + + os.environ["OPENPYPE_LOG_LEVEL"] = str(log_level) + + import igniter # noqa: E402 from igniter import BootstrapRepos # noqa: E402 from igniter.tools import ( From d71baa839006c68360401c40b5573a6f65a93d25 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Apr 2022 18:50:11 +0200 Subject: [PATCH 149/244] added debug argument to global sys argv handling --- start.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/start.py b/start.py index c61d72dabf..541a28f6c9 100644 --- a/start.py +++ b/start.py @@ -230,6 +230,11 @@ if "--verbose" in sys.argv: os.environ["OPENPYPE_LOG_LEVEL"] = str(log_level) +# Enable debug mode, may affect log level if log level is not defined +if "--debug" in sys.argv: + sys.argv.remove("--debug") + os.environ["OPENPYPE_DEBUG"] = "1" + import igniter # noqa: E402 from igniter import BootstrapRepos # noqa: E402 From 4c495d0aa6b256a8d81ed00bdd62455d7840d54d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Apr 2022 18:50:40 +0200 Subject: [PATCH 150/244] added settings which defines if logs are added to mongo --- openpype/settings/defaults/system_settings/general.json | 1 + .../entities/schemas/system_schema/schema_general.json | 5 +++++ openpype/settings/handlers.py | 1 + 3 files changed, 7 insertions(+) diff --git a/openpype/settings/defaults/system_settings/general.json b/openpype/settings/defaults/system_settings/general.json index e1785f8709..a06947ba77 100644 --- a/openpype/settings/defaults/system_settings/general.json +++ b/openpype/settings/defaults/system_settings/general.json @@ -7,6 +7,7 @@ "global": [] } }, + "log_to_server": true, "disk_mapping": { "windows": [], "linux": [], diff --git a/openpype/settings/entities/schemas/system_schema/schema_general.json b/openpype/settings/entities/schemas/system_schema/schema_general.json index fcab4cd5d8..0090c54386 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_general.json +++ b/openpype/settings/entities/schemas/system_schema/schema_general.json @@ -40,6 +40,11 @@ { "type": "splitter" }, + { + "type": "boolean", + "key": "log_to_server", + "label": "Log to mongo" + }, { "type": "dict", "key": "disk_mapping", diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py index 2109b53b09..af54946d5e 100644 --- a/openpype/settings/handlers.py +++ b/openpype/settings/handlers.py @@ -324,6 +324,7 @@ class MongoSettingsHandler(SettingsHandler): global_general_keys = ( "openpype_path", "admin_password", + "log_to_server", "disk_mapping", "production_version", "staging_version" From a118c1e98014477bc008dd73b5c58a858bb33aca Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 19 Apr 2022 18:51:05 +0200 Subject: [PATCH 151/244] check global settings on start if logs should be send to mongo --- openpype/lib/log.py | 3 +++ start.py | 10 ++++++++++ 2 files changed, 13 insertions(+) diff --git a/openpype/lib/log.py b/openpype/lib/log.py index 51afac6d8d..2cdb7ec8e4 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -330,6 +330,9 @@ class PypeLogger: # Define if should logging to mongo be used use_mongo_logging = bool(log4mongo is not None) + if use_mongo_logging: + use_mongo_logging = os.environ.get("OPENPYPE_LOG_TO_SERVER") == "1" + # Set mongo id for process (ONLY ONCE) if use_mongo_logging and cls.mongo_process_id is None: try: diff --git a/start.py b/start.py index 541a28f6c9..0c4cfb90ea 100644 --- a/start.py +++ b/start.py @@ -972,6 +972,16 @@ def boot(): _print(">>> run disk mapping command ...") run_disk_mapping_commands(global_settings) + # Logging to server enabled/disabled + log_to_server = global_settings.get("log_to_server", True) + if log_to_server: + os.environ["OPENPYPE_LOG_TO_SERVER"] = "1" + log_to_server_msg = "ON" + else: + os.environ.pop("OPENPYPE_LOG_TO_SERVER", None) + log_to_server_msg = "OFF" + _print(f">>> Logging to server is turned {log_to_server_msg}") + # Get openpype path from database and set it to environment so openpype can # find its versions there and bootstrap them. openpype_path = get_openpype_path_from_settings(global_settings) From e93fb7f4f3d72586da4e673c28b18881478e61ef Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 20 Apr 2022 11:45:00 +0200 Subject: [PATCH 152/244] Removed obsolete openpype.install --- tests/lib/testing_classes.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 0a9da1aca8..7dfbf6fd0d 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -273,8 +273,6 @@ class PublishTest(ModuleUnitTest): ) os.environ["AVALON_SCHEMA"] = schema_path - import openpype - openpype.install() os.environ["OPENPYPE_EXECUTABLE"] = sys.executable from openpype.lib import ApplicationManager From 76d50cf6aad40d0e066ea2510d127c27fe9f7edd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 20 Apr 2022 11:45:39 +0200 Subject: [PATCH 153/244] Fixed unwanted pop Counted without type. --- tests/lib/assert_classes.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/lib/assert_classes.py b/tests/lib/assert_classes.py index 7f4d8efc10..9a94f89fd0 100644 --- a/tests/lib/assert_classes.py +++ b/tests/lib/assert_classes.py @@ -24,13 +24,14 @@ class DBAssert: else: args[key] = val + no_of_docs = dbcon.count_documents(args) + + msg = None args.pop("type") detail_str = " " if args: detail_str = " with '{}'".format(args) - msg = None - no_of_docs = dbcon.count_documents(args) if expected != no_of_docs: msg = "Not expected no of '{}'{}."\ "Expected {}, found {}".format(queried_type, From fc4f7ed5d93b5bdf8c8738bdc7264bb1e7ae627a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 20 Apr 2022 12:09:22 +0200 Subject: [PATCH 154/244] Update openpype/modules/sync_server/sync_server_module.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/modules/sync_server/sync_server_module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index af69e645d5..fb81791da2 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1423,7 +1423,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): pause (bool or None): if True - pause, False - unpause force (bool): hard reset - currently only for add_site - Throws: + Raises: SiteAlreadyPresentError - if adding already existing site and not 'force' ValueError - other errors (repre not found, misconfiguration) From eb1f72fccd192d5e6d6b326d4fac18206e454aa9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 20 Apr 2022 12:29:02 +0200 Subject: [PATCH 155/244] OP-2951 - refactored to use AvalonMongoDB --- openpype/lib/avalon_context.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 7f35694e58..35ccb1b68d 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1990,13 +1990,14 @@ def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, Returns: (list) of ObjectId - linked representations """ + # Create new dbcon if not passed and use passed project name if not dbcon: - log.debug("Using `avalon.io` for query.") - dbcon = avalon.io - # Make sure is installed - dbcon.install() - - dbcon.Session["AVALON_PROJECT"] = project_name + from avalon.api import AvalonMongoDB + dbcon = AvalonMongoDB() + dbcon.Session["AVALON_PROJECT"] = project_name + # Validate that passed dbcon has same project + elif dbcon.Session["AVALON_PROJECT"] != project_name: + raise ValueError("Passed connection does not have right project") if not isinstance(repre_ids, list): repre_ids = [repre_ids] From c86f62e2d2afc7302b2765c4c750c6c94bbed941 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Apr 2022 13:25:34 +0200 Subject: [PATCH 156/244] ignore missing repos folder --- igniter/bootstrap_repos.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index ad49f868d5..2e47f549d7 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -1097,7 +1097,7 @@ class BootstrapRepos: sys.path.insert(0, directory.as_posix()) directory /= "repos" if not directory.exists() and not directory.is_dir(): - raise ValueError("directory is invalid") + return roots = [] for item in directory.iterdir(): From b9b199c61de0ef07d1b9fb340ea28db56f152545 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Apr 2022 13:26:28 +0200 Subject: [PATCH 157/244] ignore repos dir in include files if not available --- setup.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index bf42602b52..dc6c003ed6 100644 --- a/setup.py +++ b/setup.py @@ -123,12 +123,15 @@ bin_includes = [ include_files = [ "igniter", "openpype", - "repos", "schema", "LICENSE", "README.md" ] +repos_path = openpype_root / "repos" +if repos_path.exists(): + include_files.append("repos") + if IS_WINDOWS: install_requires.extend([ # `pywin32` packages From 0dd46fe51329b15dd6a73e68534904d11d194587 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Apr 2022 14:40:22 +0200 Subject: [PATCH 158/244] added new function convert_input_paths_for_ffmpeg which converts list of input paths to output dir keeping the source filenames --- openpype/lib/transcoding.py | 122 ++++++++++++++++++++++++++++++++++++ 1 file changed, 122 insertions(+) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index c2fecf6628..f379f81dec 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -516,6 +516,128 @@ def convert_for_ffmpeg( run_subprocess(oiio_cmd, logger=logger) +def convert_input_paths_for_ffmpeg( + input_paths, + output_dir, + logger=None +): + """Contert source file to format supported in ffmpeg. + + Currently can convert only exrs. The input filepaths should be files + with same type. Information about input is loaded only from first found + file. + + Filenames of input files are kept so make sure that output directory + is not the same directory as input files have. + + Args: + input_paths (str): Paths that should be converted. It is expected that + contains single file or image sequence of samy type. + output_dir (str): Path to directory where output will be rendered. + Must not be same as input's directory. + logger (logging.Logger): Logger used for logging. + + Raises: + ValueError: If input filepath has extension not supported by function. + Currently is supported only ".exr" extension. + """ + if logger is None: + logger = logging.getLogger(__name__) + + first_input_path = input_paths[0] + ext = os.path.splitext(first_input_path)[1].lower() + if ext != ".exr": + raise ValueError(( + "Function 'convert_for_ffmpeg' currently support only" + " \".exr\" extension. Got \"{}\"." + ).format(ext)) + + input_info = get_oiio_info_for_input(first_input_path) + + # Change compression only if source compression is "dwaa" or "dwab" + # - they're not supported in ffmpeg + compression = input_info["attribs"].get("compression") + if compression in ("dwaa", "dwab"): + compression = "none" + + # Collect channels to export + channel_names = input_info["channelnames"] + review_channels = get_convert_rgb_channels(channel_names) + if review_channels is None: + raise ValueError( + "Couldn't find channels that can be used for conversion." + ) + + red, green, blue, alpha = review_channels + input_channels = [red, green, blue] + channels_arg = "R={},G={},B={}".format(red, green, blue) + if alpha is not None: + channels_arg += ",A={}".format(alpha) + input_channels.append(alpha) + input_channels_str = ",".join(input_channels) + + for input_path in input_paths: + # Prepare subprocess arguments + oiio_cmd = [ + get_oiio_tools_path(), + + # Don't add any additional attributes + "--nosoftwareattrib", + ] + # Add input compression if available + if compression: + oiio_cmd.extend(["--compression", compression]) + + oiio_cmd.extend([ + # Tell oiiotool which channels should be loaded + # - other channels are not loaded to memory so helps to + # avoid memory leak issues + "-i:ch={}".format(input_channels_str), input_path, + # Tell oiiotool which channels should be put to top stack + # (and output) + "--ch", channels_arg + ]) + + for attr_name, attr_value in input_info["attribs"].items(): + if not isinstance(attr_value, str): + continue + + # Remove attributes that have string value longer than allowed length + # for ffmpeg or when containt unallowed symbols + erase_reason = "Missing reason" + erase_attribute = False + if len(attr_value) > MAX_FFMPEG_STRING_LEN: + erase_reason = "has too long value ({} chars).".format( + len(attr_value) + ) + + if erase_attribute: + for char in NOT_ALLOWED_FFMPEG_CHARS: + if char in attr_value: + erase_attribute = True + erase_reason = ( + "contains unsupported character \"{}\"." + ).format(char) + break + + if erase_attribute: + # Set attribute to empty string + logger.info(( + "Removed attribute \"{}\" from metadata because {}." + ).format(attr_name, erase_reason)) + oiio_cmd.extend(["--eraseattrib", attr_name]) + + # Add last argument - path to output + base_filename = os.path.basename(first_input_path) + output_path = os.path.join(output_dir, base_filename) + oiio_cmd.extend([ + "-o", output_path + ]) + + logger.debug("Conversion command: {}".format(" ".join(oiio_cmd))) + run_subprocess(oiio_cmd, logger=logger) + + # FFMPEG functions def get_ffprobe_data(path_to_file, logger=None): """Load data about entered filepath via ffprobe. From 91d2eb7355a2934e1afeb2371cb9bb97b501f7ae Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Apr 2022 14:41:03 +0200 Subject: [PATCH 159/244] replace convert_for_ffmpeg with new function --- .../plugins/publish/extract_thumbnail.py | 8 +++----- openpype/lib/__init__.py | 2 ++ openpype/plugins/publish/extract_burnin.py | 13 ++++++++----- openpype/plugins/publish/extract_jpeg_exr.py | 8 +++----- openpype/plugins/publish/extract_review.py | 18 +++++++++--------- 5 files changed, 25 insertions(+), 24 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/extract_thumbnail.py b/openpype/hosts/webpublisher/plugins/publish/extract_thumbnail.py index cb6ed8481c..a56521891b 100644 --- a/openpype/hosts/webpublisher/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/webpublisher/plugins/publish/extract_thumbnail.py @@ -8,7 +8,7 @@ from openpype.lib import ( run_subprocess, get_transcode_temp_directory, - convert_for_ffmpeg, + convert_input_paths_for_ffmpeg, should_convert_for_ffmpeg ) @@ -59,11 +59,9 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): if do_convert: convert_dir = get_transcode_temp_directory() filename = os.path.basename(full_input_path) - convert_for_ffmpeg( - full_input_path, + convert_input_paths_for_ffmpeg( + [full_input_path], convert_dir, - None, - None, self.log ) full_input_path = os.path.join(convert_dir, filename) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index b57e469f5b..29719b63bd 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -105,6 +105,7 @@ from .transcoding import ( get_transcode_temp_directory, should_convert_for_ffmpeg, convert_for_ffmpeg, + convert_input_paths_for_ffmpeg, get_ffprobe_data, get_ffprobe_streams, get_ffmpeg_codec_args, @@ -276,6 +277,7 @@ __all__ = [ "get_transcode_temp_directory", "should_convert_for_ffmpeg", "convert_for_ffmpeg", + "convert_input_paths_for_ffmpeg", "get_ffprobe_data", "get_ffprobe_streams", "get_ffmpeg_codec_args", diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 41c84103a6..544c763b52 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -16,7 +16,7 @@ from openpype.lib import ( run_openpype_process, get_transcode_temp_directory, - convert_for_ffmpeg, + convert_input_paths_for_ffmpeg, should_convert_for_ffmpeg, CREATE_NO_WINDOW @@ -187,8 +187,13 @@ class ExtractBurnin(openpype.api.Extractor): repre_files = repre["files"] if isinstance(repre_files, (tuple, list)): filename = repre_files[0] + src_filepaths = [ + os.path.join(src_repre_staging_dir, filename) + for filename in repre_files + ] else: filename = repre_files + src_filepaths = [os.path.join(src_repre_staging_dir, filename)] first_input_path = os.path.join(src_repre_staging_dir, filename) # Determine if representation requires pre conversion for ffmpeg @@ -209,11 +214,9 @@ class ExtractBurnin(openpype.api.Extractor): new_staging_dir = get_transcode_temp_directory() repre["stagingDir"] = new_staging_dir - convert_for_ffmpeg( - first_input_path, + convert_input_paths_for_ffmpeg( + src_filepaths, new_staging_dir, - _temp_data["frameStart"], - _temp_data["frameEnd"], self.log ) diff --git a/openpype/plugins/publish/extract_jpeg_exr.py b/openpype/plugins/publish/extract_jpeg_exr.py index 468ed96199..d6d6854092 100644 --- a/openpype/plugins/publish/extract_jpeg_exr.py +++ b/openpype/plugins/publish/extract_jpeg_exr.py @@ -8,7 +8,7 @@ from openpype.lib import ( path_to_subprocess_arg, get_transcode_temp_directory, - convert_for_ffmpeg, + convert_input_paths_for_ffmpeg, should_convert_for_ffmpeg ) @@ -79,11 +79,9 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin): if do_convert: convert_dir = get_transcode_temp_directory() filename = os.path.basename(full_input_path) - convert_for_ffmpeg( - full_input_path, + convert_input_paths_for_ffmpeg( + [full_input_path], convert_dir, - None, - None, self.log ) full_input_path = os.path.join(convert_dir, filename) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index d569d82762..f2473839d9 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -18,7 +18,7 @@ from openpype.lib import ( path_to_subprocess_arg, should_convert_for_ffmpeg, - convert_for_ffmpeg, + convert_input_paths_for_ffmpeg, get_transcode_temp_directory ) import speedcopy @@ -194,16 +194,20 @@ class ExtractReview(pyblish.api.InstancePlugin): src_repre_staging_dir = repre["stagingDir"] # Receive filepath to first file in representation first_input_path = None + input_filepaths = [] if not self.input_is_sequence(repre): first_input_path = os.path.join( src_repre_staging_dir, repre["files"] ) + input_filepaths.append(first_input_path) else: for filename in repre["files"]: - first_input_path = os.path.join( + filepath = os.path.join( src_repre_staging_dir, filename ) - break + input_filepaths.append(filepath) + if first_input_path is None: + first_input_path = filepath # Skip if file is not set if first_input_path is None: @@ -230,13 +234,9 @@ class ExtractReview(pyblish.api.InstancePlugin): new_staging_dir = get_transcode_temp_directory() repre["stagingDir"] = new_staging_dir - frame_start = instance.data["frameStart"] - frame_end = instance.data["frameEnd"] - convert_for_ffmpeg( - first_input_path, + convert_input_paths_for_ffmpeg( + input_filepaths, new_staging_dir, - frame_start, - frame_end, self.log ) From d9d772db72f8e085817d7e93b267ea8f70d75399 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Apr 2022 14:41:15 +0200 Subject: [PATCH 160/244] added deprecation warning to convert_to_ffmpeg --- openpype/lib/transcoding.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index f379f81dec..b75ae2baf1 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -409,6 +409,12 @@ def convert_for_ffmpeg( if logger is None: logger = logging.getLogger(__name__) + logger.warning(( + "DEPRECATED: 'openpype.lib.transcoding.convert_for_ffmpeg' is" + " deprecated function of conversion for FFMpeg. Please replace usage" + " with 'openpype.lib.transcoding.convert_input_paths_for_ffmpeg'" + )) + ext = os.path.splitext(first_input_path)[1].lower() if ext != ".exr": raise ValueError(( From 7deed5e74d8124b6f1d52cc45d1c9c06e4e0bd76 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Apr 2022 15:28:15 +0200 Subject: [PATCH 161/244] added few more comments --- openpype/lib/transcoding.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index b75ae2baf1..7cc345fcc0 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -382,6 +382,11 @@ def should_convert_for_ffmpeg(src_filepath): return False +# Deprecated since 2022 4 20 +# - Reason - Doesn't convert sequences right way: Can't handle gaps, reuse +# first frame for all frames and changes filenames when input +# is sequence. +# - use 'convert_input_paths_for_ffmpeg' instead def convert_for_ffmpeg( first_input_path, output_dir, @@ -535,6 +540,8 @@ def convert_input_paths_for_ffmpeg( Filenames of input files are kept so make sure that output directory is not the same directory as input files have. + - This way it can handle gaps and can keep input filenames without handling + frame template Args: input_paths (str): Paths that should be converted. It is expected that From 52a1450df2a4c9f6604899add111670aafe7bd79 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Apr 2022 15:28:49 +0200 Subject: [PATCH 162/244] fix line length --- openpype/lib/transcoding.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 7cc345fcc0..cdfe240e68 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -615,8 +615,8 @@ def convert_input_paths_for_ffmpeg( if not isinstance(attr_value, str): continue - # Remove attributes that have string value longer than allowed length - # for ffmpeg or when containt unallowed symbols + # Remove attributes that have string value longer than allowed + # length for ffmpeg or when containt unallowed symbols erase_reason = "Missing reason" erase_attribute = False if len(attr_value) > MAX_FFMPEG_STRING_LEN: From 9de22092cfd516d8ba4a96eb8fc83d750387064e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 20 Apr 2022 16:58:24 +0200 Subject: [PATCH 163/244] OP-3021 - added Suspend publish knob to Nuke Added to Deadline tab of Write node. --- openpype/hosts/nuke/api/lib.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index e05c6aecbd..b859454e8f 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -1062,6 +1062,14 @@ def add_deadline_tab(node): knob.setValue(0) node.addKnob(knob) + knob = nuke.Text_Knob("divd", '') + knob.setValue('') + node.addKnob(knob) + + knob = nuke.Boolean_Knob("suspend_publish", "Suspend publish") + knob.setValue(False) + node.addKnob(knob) + def get_deadline_knob_names(): return [ From cf362dc50266fe9ae656f3aa5c100711fa8e47a2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 20 Apr 2022 16:58:59 +0200 Subject: [PATCH 164/244] OP-3021 - collect suspend_publish --- openpype/hosts/nuke/plugins/publish/precollect_instances.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/precollect_instances.py index 29c706f302..76d402164c 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_instances.py @@ -69,6 +69,11 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): instance = context.create_instance(subset) instance.append(node) + suspend_publish = False + if "suspend_publish" in node.knobs(): + suspend_publish = node["suspend_publish"].value() + instance.data["suspend_publish"] = suspend_publish + # get review knob value review = False if "review" in node.knobs(): From 2c7b1aab50e191f8dda0d2dcf67bd549d5ed34fd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 20 Apr 2022 16:59:40 +0200 Subject: [PATCH 165/244] OP-3021 - check if suspend_publish exists If exists and set to True, suspend publish job. Artist need to enable it manually. --- .../modules/deadline/plugins/publish/submit_publish_job.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 4f781de62d..18d01555e5 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -284,6 +284,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): else: payload["JobInfo"]["JobDependency0"] = job["_id"] + self.log.info("suspend {}".format(instance.data.get("suspend_publish"))) + if instance.data.get("suspend_publish"): + payload["JobInfo"]["InitialStatus"] = "Suspended" + index = 0 for key in environment: if key.upper() in self.enviro_filter: From 5e4b292a86ea7221d12776850b9680b47955b23f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Apr 2022 17:51:37 +0200 Subject: [PATCH 166/244] change openpype install to install openpype plugins --- openpype/tools/standalonepublish/publish.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/standalonepublish/publish.py b/openpype/tools/standalonepublish/publish.py index 582e7eccf8..e1e9edebb9 100644 --- a/openpype/tools/standalonepublish/publish.py +++ b/openpype/tools/standalonepublish/publish.py @@ -1,14 +1,14 @@ import os import sys -import openpype import pyblish.api +from openpype.pipeline import install_openpype_plugins from openpype.tools.utils.host_tools import show_publish def main(env): # Registers pype's Global pyblish plugins - openpype.install() + install_openpype_plugins() # Register additional paths addition_paths_str = env.get("PUBLISH_PATHS") or "" From acaa3cad9f20aaa6445917f9ea077b63d1d6921b Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 20 Apr 2022 17:54:35 +0200 Subject: [PATCH 167/244] Fix Houdini FPS + outdated content pop-ups --- openpype/hosts/houdini/api/lib.py | 4 +- openpype/hosts/maya/api/lib.py | 4 +- openpype/hosts/maya/api/pipeline.py | 2 +- openpype/widgets/popup.py | 81 +++++++++++++++-------------- 4 files changed, 47 insertions(+), 44 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index bd41618856..7ee72d0b9f 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -155,7 +155,7 @@ def validate_fps(): if parent is None: pass else: - dialog = popup.Popup(parent=parent) + dialog = popup.PopupUpdateKeys(parent=parent) dialog.setModal(True) dialog.setWindowTitle("Houdini scene does not match project FPS") dialog.setMessage("Scene %i FPS does not match project %i FPS" % @@ -163,7 +163,7 @@ def validate_fps(): dialog.setButtonText("Fix") # on_show is the Fix button clicked callback - dialog.on_clicked.connect(lambda: set_scene_fps(fps)) + dialog.on_clicked_state.connect(lambda: set_scene_fps(fps)) dialog.show() diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 9e99b96477..b5b4f4b7d1 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2210,7 +2210,7 @@ def validate_fps(): parent = get_main_window() - dialog = popup.Popup2(parent=parent) + dialog = popup.PopupUpdateKeys(parent=parent) dialog.setModal(True) dialog.setWindowTitle("Maya scene not in line with project") dialog.setMessage("The FPS is out of sync, please fix") @@ -2218,7 +2218,7 @@ def validate_fps(): # Set new text for button (add optional argument for the popup?) toggle = dialog.widgets["toggle"] update = toggle.isChecked() - dialog.on_show.connect(lambda: set_scene_fps(fps, update)) + dialog.on_clicked_state.connect(lambda: set_scene_fps(fps, update)) dialog.show() diff --git a/openpype/hosts/maya/api/pipeline.py b/openpype/hosts/maya/api/pipeline.py index f6f3472eef..d5887adb24 100644 --- a/openpype/hosts/maya/api/pipeline.py +++ b/openpype/hosts/maya/api/pipeline.py @@ -448,7 +448,7 @@ def on_open(): dialog.setWindowTitle("Maya scene has outdated content") dialog.setMessage("There are outdated containers in " "your Maya scene.") - dialog.on_show.connect(_on_show_inventory) + dialog.on_clicked.connect(_on_show_inventory) dialog.show() diff --git a/openpype/widgets/popup.py b/openpype/widgets/popup.py index e661d3d293..6c9e4c1b51 100644 --- a/openpype/widgets/popup.py +++ b/openpype/widgets/popup.py @@ -3,14 +3,20 @@ import logging import contextlib -from Qt import QtCore, QtWidgets +from avalon.vendor.Qt import QtCore, QtWidgets, QtGui log = logging.getLogger(__name__) class Popup(QtWidgets.QDialog): + """A Popup that moves itself to bottom right of screen on show event. - on_show = QtCore.Signal() + The UI contains a message label and a red highlighted button to "show" + or perform another custom action from this pop-up. + + """ + + on_clicked = QtCore.Signal() def __init__(self, parent=None, *args, **kwargs): super(Popup, self).__init__(parent=parent, *args, **kwargs) @@ -19,32 +25,34 @@ class Popup(QtWidgets.QDialog): # Layout layout = QtWidgets.QHBoxLayout(self) layout.setContentsMargins(10, 5, 10, 10) + + # Increase spacing slightly for readability + layout.setSpacing(10) + message = QtWidgets.QLabel("") message.setStyleSheet(""" QLabel { font-size: 12px; } """) - show = QtWidgets.QPushButton("Show") - show.setSizePolicy(QtWidgets.QSizePolicy.Maximum, + button = QtWidgets.QPushButton("Show") + button.setSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Maximum) - show.setStyleSheet("""QPushButton { background-color: #BB0000 }""") + button.setStyleSheet("""QPushButton { background-color: #BB0000 }""") layout.addWidget(message) - layout.addWidget(show) + layout.addWidget(button) - # Size + # Default size self.resize(400, 40) - geometry = self.calculate_window_geometry() - self.setGeometry(geometry) self.widgets = { "message": message, - "show": show, + "button": button, } # Signals - show.clicked.connect(self._on_show_clicked) + button.clicked.connect(self._on_clicked) # Set default title self.setWindowTitle("Popup") @@ -52,7 +60,10 @@ class Popup(QtWidgets.QDialog): def setMessage(self, message): self.widgets['message'].setText(message) - def _on_show_clicked(self): + def setButtonText(self, text): + self.widgets["button"].setText(text) + + def _on_clicked(self): """Callback for when the 'show' button is clicked. Raises the parent (if any) @@ -63,11 +74,19 @@ class Popup(QtWidgets.QDialog): self.close() # Trigger the signal - self.on_show.emit() + self.on_clicked.emit() if parent: parent.raise_() + def showEvent(self, event): + + # Position popup based on contents on show event + geo = self.calculate_window_geometry() + self.setGeometry(geo) + + return super(Popup, self).showEvent(event) + def calculate_window_geometry(self): """Respond to status changes @@ -104,45 +123,29 @@ class Popup(QtWidgets.QDialog): return QtCore.QRect(x, y, width, height) -class Popup2(Popup): +class PopupUpdateKeys(Popup): + """Popup with Update Keys checkbox (intended for Maya)""" - on_show = QtCore.Signal() + on_clicked_state = QtCore.Signal(bool) def __init__(self, parent=None, *args, **kwargs): Popup.__init__(self, parent=parent, *args, **kwargs) layout = self.layout() - # Add toggle + # Insert toggle for Update keys toggle = QtWidgets.QCheckBox("Update Keys") layout.insertWidget(1, toggle) self.widgets["toggle"] = toggle + self.on_clicked.connect(self.emit_click_with_state) + layout.insertStretch(1, 1) - # Update button text - fix = self.widgets["show"] - fix.setText("Fix") - - def calculate_window_geometry(self): - """Respond to status changes - - On creation, align window with screen bottom right. - - """ - parent_widget = self.parent() - - desktop = QtWidgets.QApplication.desktop() - if parent_widget: - screen = desktop.screenNumber(parent_widget) - else: - screen = desktop.screenNumber(desktop.cursor().pos()) - center_point = desktop.screenGeometry(screen).center() - - frame_geo = self.frameGeometry() - frame_geo.moveCenter(center_point) - - return frame_geo + def emit_click_with_state(self): + """Emit the on_clicked_state signal with the toggled state""" + checked = self.widgets["toggle"].isChecked() + self.on_clicked_state.emit(checked) @contextlib.contextmanager From 08afa8b088f1dfa97710a3cd056cf289d1cd57f7 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 20 Apr 2022 17:55:43 +0200 Subject: [PATCH 168/244] Remove unused import --- openpype/widgets/popup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/widgets/popup.py b/openpype/widgets/popup.py index 6c9e4c1b51..670f6d8d8a 100644 --- a/openpype/widgets/popup.py +++ b/openpype/widgets/popup.py @@ -3,7 +3,7 @@ import logging import contextlib -from avalon.vendor.Qt import QtCore, QtWidgets, QtGui +from avalon.vendor.Qt import QtCore, QtWidgets log = logging.getLogger(__name__) From 98dc1f0a43e2ba7c0eb63190343b312027ac36a8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 20 Apr 2022 18:08:42 +0200 Subject: [PATCH 169/244] OP-3021 - Hound --- openpype/hosts/nuke/plugins/create/create_write_render.py | 2 +- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 8204c6420d..79766929ac 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -148,4 +148,4 @@ class CreateWriteRender(plugin.OpenPypeCreator): return write_node def _modify_write_node(self, write_node): - return write_node \ No newline at end of file + return write_node diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 18d01555e5..715d9a8336 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -284,7 +284,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): else: payload["JobInfo"]["JobDependency0"] = job["_id"] - self.log.info("suspend {}".format(instance.data.get("suspend_publish"))) if instance.data.get("suspend_publish"): payload["JobInfo"]["InitialStatus"] = "Suspended" From fca3645a7afaa3725b81a37a5494f0eb100ec6e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 20 Apr 2022 18:10:05 +0200 Subject: [PATCH 170/244] add support for bgeo and vdb add support for standalone publisher to publish bgeo and vdb sequences --- .../plugins/publish/collect_context.py | 3 ++- .../project_settings/standalonepublisher.json | 11 ++++++++++- .../standalonepublish/widgets/widget_drop_frame.py | 8 ++++++-- .../standalonepublish/widgets/widget_family_desc.py | 1 + 4 files changed, 19 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py index 6913e0836d..aabccc0328 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_context.py @@ -247,7 +247,8 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin): self.log.debug("collecting sequence: {}".format(collections)) instance.data["frameStart"] = int(component["frameStart"]) instance.data["frameEnd"] = int(component["frameEnd"]) - instance.data["fps"] = int(component["fps"]) + if component.get("fps"): + instance.data["fps"] = int(component["fps"]) ext = component["ext"] if ext.startswith("."): diff --git a/openpype/settings/defaults/project_settings/standalonepublisher.json b/openpype/settings/defaults/project_settings/standalonepublisher.json index bc91a5ea8a..e36232d3f7 100644 --- a/openpype/settings/defaults/project_settings/standalonepublisher.json +++ b/openpype/settings/defaults/project_settings/standalonepublisher.json @@ -141,6 +141,14 @@ "defaults": [], "help": "Texture files with Unreal naming convention" }, + "create_vdb": { + "name": "vdb", + "label": "VDB Volumetric Data", + "family": "vdbcache", + "icon": "cloud", + "defaults": [], + "help": "Hierarchical data structure for the efficient storage and manipulation of sparse volumetric data discretized on three-dimensional grids" + }, "__dynamic_keys_labels__": { "create_workfile": "Workfile", "create_model": "Model", @@ -154,7 +162,8 @@ "create_render": "Render", "create_mov_batch": "Batch Mov", "create_texture_batch": "Batch Texture", - "create_simple_unreal_texture": "Simple Unreal Texture" + "create_simple_unreal_texture": "Simple Unreal Texture", + "create_vdb": "VDB Cache" } }, "publish": { diff --git a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py index c1c59d65b6..e6c7328e88 100644 --- a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py +++ b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py @@ -37,6 +37,10 @@ class DropDataFrame(QtWidgets.QFrame): "video_file": video_extensions } + sequence_types = [ + ".bgeo", ".vdb" + ] + def __init__(self, parent): super().__init__() self.parent_widget = parent @@ -176,7 +180,7 @@ class DropDataFrame(QtWidgets.QFrame): non_collectionable_paths = [] for path in in_paths: ext = os.path.splitext(path)[1] - if ext in self.image_extensions: + if ext in self.image_extensions or ext in self.sequence_types: collectionable_paths.append(path) else: non_collectionable_paths.append(path) @@ -289,7 +293,7 @@ class DropDataFrame(QtWidgets.QFrame): def get_file_data(self, data): filepath = data['files'][0] ext = data['ext'].lower() - output = {} + output = {"fps": None} file_info = None if 'file_info' in data: diff --git a/openpype/tools/standalonepublish/widgets/widget_family_desc.py b/openpype/tools/standalonepublish/widgets/widget_family_desc.py index 79681615b9..2095b332bd 100644 --- a/openpype/tools/standalonepublish/widgets/widget_family_desc.py +++ b/openpype/tools/standalonepublish/widgets/widget_family_desc.py @@ -52,6 +52,7 @@ class FamilyDescriptionWidget(QtWidgets.QWidget): family.setAlignment(QtCore.Qt.AlignBottom | QtCore.Qt.AlignLeft) help = QtWidgets.QLabel("help") + help.setWordWrap(True) help.setAlignment(QtCore.Qt.AlignTop | QtCore.Qt.AlignLeft) label_layout.addWidget(family) From c426c8a1566541c99bcafb0c60905373985a0586 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 20 Apr 2022 18:15:11 +0200 Subject: [PATCH 171/244] Remove unused logger --- openpype/widgets/popup.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/widgets/popup.py b/openpype/widgets/popup.py index 670f6d8d8a..1a975376f8 100644 --- a/openpype/widgets/popup.py +++ b/openpype/widgets/popup.py @@ -1,12 +1,9 @@ import sys -import logging import contextlib from avalon.vendor.Qt import QtCore, QtWidgets -log = logging.getLogger(__name__) - class Popup(QtWidgets.QDialog): """A Popup that moves itself to bottom right of screen on show event. From 5438decc1d1a169e1139d2a6d67eb62eae32e486 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 20 Apr 2022 18:16:05 +0200 Subject: [PATCH 172/244] Refactor Qt import --- openpype/widgets/popup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/widgets/popup.py b/openpype/widgets/popup.py index 1a975376f8..9fc33ccbb8 100644 --- a/openpype/widgets/popup.py +++ b/openpype/widgets/popup.py @@ -2,7 +2,7 @@ import sys import contextlib -from avalon.vendor.Qt import QtCore, QtWidgets +from Qt import QtCore, QtWidgets class Popup(QtWidgets.QDialog): From 2c9a5998b3b1725023d606ab871bdb11b1128920 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 20 Apr 2022 18:26:37 +0200 Subject: [PATCH 173/244] Improve Maya FPS pop-up message similar to Houdini --- openpype/hosts/maya/api/lib.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index b5b4f4b7d1..801cdb16f4 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2212,8 +2212,10 @@ def validate_fps(): dialog = popup.PopupUpdateKeys(parent=parent) dialog.setModal(True) - dialog.setWindowTitle("Maya scene not in line with project") - dialog.setMessage("The FPS is out of sync, please fix") + dialog.setWindowTitle("Maya scene does not match project FPS") + dialog.setMessage("Scene %i FPS does not match project %i FPS" % + (current_fps, fps)) + dialog.setButtonText("Fix") # Set new text for button (add optional argument for the popup?) toggle = dialog.widgets["toggle"] From 5142cd613935e3e6b9c514c4a08371dad2555009 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 20 Apr 2022 19:07:56 +0200 Subject: [PATCH 174/244] fix keeping of filenames --- openpype/lib/transcoding.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index cdfe240e68..fcec5d4216 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -641,7 +641,7 @@ def convert_input_paths_for_ffmpeg( oiio_cmd.extend(["--eraseattrib", attr_name]) # Add last argument - path to output - base_filename = os.path.basename(first_input_path) + base_filename = os.path.basename(input_path) output_path = os.path.join(output_dir, base_filename) oiio_cmd.extend([ "-o", output_path From 4e0a3259ed87971481bb33d8fbd1a077350ddfeb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Apr 2022 10:05:28 +0200 Subject: [PATCH 175/244] query parent and data.parents from asset document --- .../tools/project_manager/project_manager/model.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/openpype/tools/project_manager/project_manager/model.py b/openpype/tools/project_manager/project_manager/model.py index 1c3ec089f6..b3fd7fa0c7 100644 --- a/openpype/tools/project_manager/project_manager/model.py +++ b/openpype/tools/project_manager/project_manager/model.py @@ -1819,12 +1819,16 @@ class AssetItem(BaseItem): } query_projection = { "_id": 1, - "data.tasks": 1, - "data.visualParent": 1, - "schema": 1, - "name": 1, + "schema": 1, "type": 1, + "parent": 1, + + "data.visualParent": 1, + "data.parents": 1, + + "data.tasks": 1, + "data.frameStart": 1, "data.frameEnd": 1, "data.fps": 1, @@ -1835,7 +1839,7 @@ class AssetItem(BaseItem): "data.clipIn": 1, "data.clipOut": 1, "data.pixelAspect": 1, - "data.tools_env": 1 + "data.tools_env": 1, } def __init__(self, asset_doc): From 589666682c7a71b6d948c6c765fbfb7b433bcf95 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Apr 2022 10:23:17 +0200 Subject: [PATCH 176/244] added info logs --- .../project_manager/project_manager/model.py | 31 ++++++++++++++++--- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/openpype/tools/project_manager/project_manager/model.py b/openpype/tools/project_manager/project_manager/model.py index b3fd7fa0c7..5fd06ef442 100644 --- a/openpype/tools/project_manager/project_manager/model.py +++ b/openpype/tools/project_manager/project_manager/model.py @@ -7,6 +7,11 @@ from pymongo import UpdateOne, DeleteOne from Qt import QtCore, QtGui +from openpype.lib import ( + CURRENT_DOC_SCHEMAS, + PypeLogger, +) + from .constants import ( IDENTIFIER_ROLE, ITEM_TYPE_ROLE, @@ -18,8 +23,6 @@ from .constants import ( ) from .style import ResourceCache -from openpype.lib import CURRENT_DOC_SCHEMAS - class ProjectModel(QtGui.QStandardItemModel): """Load possible projects to modify from MongoDB. @@ -185,6 +188,7 @@ class HierarchyModel(QtCore.QAbstractItemModel): for key in self.multiselection_columns } + self._log = None # TODO Reset them on project change self._current_project = None self._root_item = None @@ -194,6 +198,12 @@ class HierarchyModel(QtCore.QAbstractItemModel): self._reset_root_item() + @property + def log(self): + if self._log is None: + self._log = PypeLogger.get_logger("ProjectManagerModel") + return self._log + @property def items_by_id(self): return self._items_by_id @@ -1367,6 +1377,9 @@ class HierarchyModel(QtCore.QAbstractItemModel): to_process = collections.deque() to_process.append(project_item) + updated_count = 0 + created_count = 0 + removed_count = 0 bulk_writes = [] while to_process: parent = to_process.popleft() @@ -1378,9 +1391,11 @@ class HierarchyModel(QtCore.QAbstractItemModel): to_process.append(item) if item.is_new: + created_count += 1 insert_list.append(item) elif item.data(REMOVED_ROLE): + removed_count += 1 if item.data(HIERARCHY_CHANGE_ABLE_ROLE): bulk_writes.append(DeleteOne( {"_id": item.asset_id} @@ -1394,6 +1409,7 @@ class HierarchyModel(QtCore.QAbstractItemModel): else: update_data = item.update_data() if update_data: + updated_count += 1 bulk_writes.append(UpdateOne( {"_id": item.asset_id}, update_data @@ -1408,8 +1424,15 @@ class HierarchyModel(QtCore.QAbstractItemModel): for idx, mongo_id in enumerate(result.inserted_ids): insert_list[idx].mongo_id = mongo_id - if bulk_writes: - project_col.bulk_write(bulk_writes) + if not bulk_writes: + self.log.info("Nothing has changed") + return + + project_col.bulk_write(bulk_writes) + self.log.info(( + "Save finished." + " Created {} | Updated {} | Removed {} asset documents" + ).format(created_count, updated_count, removed_count)) self.refresh_project() From 7d2dc0b0ea0835c3ccde25cb41ba095811581982 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Apr 2022 11:34:53 +0200 Subject: [PATCH 177/244] fixed changes check --- .../tools/project_manager/project_manager/model.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/openpype/tools/project_manager/project_manager/model.py b/openpype/tools/project_manager/project_manager/model.py index 5fd06ef442..871704e13c 100644 --- a/openpype/tools/project_manager/project_manager/model.py +++ b/openpype/tools/project_manager/project_manager/model.py @@ -1377,8 +1377,8 @@ class HierarchyModel(QtCore.QAbstractItemModel): to_process = collections.deque() to_process.append(project_item) - updated_count = 0 created_count = 0 + updated_count = 0 removed_count = 0 bulk_writes = [] while to_process: @@ -1391,7 +1391,6 @@ class HierarchyModel(QtCore.QAbstractItemModel): to_process.append(item) if item.is_new: - created_count += 1 insert_list.append(item) elif item.data(REMOVED_ROLE): @@ -1422,13 +1421,16 @@ class HierarchyModel(QtCore.QAbstractItemModel): result = project_col.insert_many(new_docs) for idx, mongo_id in enumerate(result.inserted_ids): + created_count += 1 insert_list[idx].mongo_id = mongo_id - if not bulk_writes: + if sum([created_count, updated_count, removed_count]) == 0: self.log.info("Nothing has changed") return - project_col.bulk_write(bulk_writes) + if bulk_writes: + project_col.bulk_write(bulk_writes) + self.log.info(( "Save finished." " Created {} | Updated {} | Removed {} asset documents" From 1b3026bdc7f0e73ba1cac50a655119e1fa30e86f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 21 Apr 2022 12:05:24 +0200 Subject: [PATCH 178/244] replaced prints with logs --- openpype/modules/ftrack/ftrack_server/lib.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index f8319b67d4..bc595430fc 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -31,10 +31,13 @@ TOPIC_STATUS_SERVER = "openpype.event.server.status" TOPIC_STATUS_SERVER_RESULT = "openpype.event.server.status.result" -def check_ftrack_url(url, log_errors=True): +def check_ftrack_url(url, log_errors=True, logger=None): """Checks if Ftrack server is responding""" + if logger is None: + logger = Logger.get_logger(__name__) + if not url: - print('ERROR: Ftrack URL is not set!') + logger.error("Ftrack URL is not set!") return None url = url.strip('/ ') @@ -48,15 +51,15 @@ def check_ftrack_url(url, log_errors=True): result = requests.get(url, allow_redirects=False) except requests.exceptions.RequestException: if log_errors: - print('ERROR: Entered Ftrack URL is not accesible!') + logger.error("Entered Ftrack URL is not accesible!") return False if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers): if log_errors: - print('ERROR: Entered Ftrack URL is not accesible!') + logger.error("Entered Ftrack URL is not accesible!") return False - print('DEBUG: Ftrack server {} is accessible.'.format(url)) + logger.debug("Ftrack server {} is accessible.".format(url)) return url @@ -133,7 +136,7 @@ class ProcessEventHub(SocketBaseEventHub): hearbeat_msg = b"processor" is_collection_created = False - pypelog = Logger().get_logger("Session Processor") + pypelog = Logger.get_logger("Session Processor") def __init__(self, *args, **kwargs): self.mongo_url = None From 87878cf9538b07c9c8336ee6fb121e81275a17fd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 21 Apr 2022 12:06:46 +0200 Subject: [PATCH 179/244] OP-2765 - minor update of validation message --- .../plugins/publish/help/validate_scene_settings.xml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/aftereffects/plugins/publish/help/validate_scene_settings.xml b/openpype/hosts/aftereffects/plugins/publish/help/validate_scene_settings.xml index 36fa90456e..0591020ed3 100644 --- a/openpype/hosts/aftereffects/plugins/publish/help/validate_scene_settings.xml +++ b/openpype/hosts/aftereffects/plugins/publish/help/validate_scene_settings.xml @@ -12,6 +12,8 @@ One of the settings in a scene doesn't match to asset settings in database. ### How to repair? Change values for {invalid_keys_str} in the scene OR change them in the asset database if they are wrong there. + + In the scene it is right mouse click on published composition > `Composition Settings`. ### __Detailed Info__ (optional) From 613e14c012430d7df3f9494cd0acccbed20165ec Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 11:25:40 +0200 Subject: [PATCH 180/244] don't look into repos directory to be added to sys path --- start.py | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/start.py b/start.py index 35a14a059e..c066fa3ab7 100644 --- a/start.py +++ b/start.py @@ -823,23 +823,14 @@ def _bootstrap_from_code(use_version, use_staging): version_path = Path(_openpype_root) os.environ["OPENPYPE_REPOS_ROOT"] = _openpype_root - repos = [] - # Check for "openpype/repos" directory for sumodules - # NOTE: Is not used at this moment but can be re-used in future - repos_dir = os.path.join(_openpype_root, "repos") - if os.path.exists(repos_dir): - for name in os.listdir(repos_dir): - repos.append(os.path.join(repos_dir, name)) - - # add self to python paths - repos.insert(0, _openpype_root) - for repo in repos: - sys.path.insert(0, repo) + # add self to sys.path of current process + sys.path.insert(0, _openpype_root) # add venv 'site-packages' to PYTHONPATH python_path = os.getenv("PYTHONPATH", "") split_paths = python_path.split(os.pathsep) - # Add repos as first in list - split_paths = repos + split_paths + # add self to python paths + split_paths.insert(0, _openpype_root) + # last one should be venv site-packages # this is slightly convoluted as we can get here from frozen code too # in case when we are running without any version installed. From bf45122d8cf013418b16d90bdc242ccb5daea33c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 11:25:51 +0200 Subject: [PATCH 181/244] don't handle repos directory in setup --- setup.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/setup.py b/setup.py index dc6c003ed6..899e9375c0 100644 --- a/setup.py +++ b/setup.py @@ -128,10 +128,6 @@ include_files = [ "README.md" ] -repos_path = openpype_root / "repos" -if repos_path.exists(): - include_files.append("repos") - if IS_WINDOWS: install_requires.extend([ # `pywin32` packages From a616611b6cdffba1a8fab080d22fe27be306b21a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 11:27:30 +0200 Subject: [PATCH 182/244] don't use repos subdir to create zip --- igniter/bootstrap_repos.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 2e47f549d7..0638ee2341 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -668,9 +668,9 @@ class BootstrapRepos: self._progress_callback = progress_callback if getattr(sys, "frozen", False): - self.live_repo_dir = Path(sys.executable).parent / "repos" + self.live_repo_dir = Path(sys.executable).parent else: - self.live_repo_dir = Path(Path(__file__).parent / ".." / "repos") + self.live_repo_dir = Path(Path(__file__).parent / "..") @staticmethod def get_version_path_from_list( @@ -756,7 +756,7 @@ class BootstrapRepos: Path(temp_dir) / f"openpype-v{version}.zip" self._print(f"creating zip: {temp_zip}") - self._create_openpype_zip(temp_zip, repo_dir.parent) + self._create_openpype_zip(temp_zip, repo_dir) if not os.path.exists(temp_zip): self._print("make archive failed.", LOG_ERROR) return None From db10343a292be770cdc20ae73d11659caf11b81d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 11:27:40 +0200 Subject: [PATCH 183/244] removed repos from filter list --- igniter/bootstrap_repos.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 0638ee2341..c882ec6e49 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -654,7 +654,7 @@ class BootstrapRepos: self.registry = OpenPypeSettingsRegistry() self.zip_filter = [".pyc", "__pycache__"] self.openpype_filter = [ - "openpype", "repos", "schema", "LICENSE" + "openpype", "schema", "LICENSE" ] self._message = message From 38d93c1b46b74ea5596543747058841f7d384bf2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 11:30:40 +0200 Subject: [PATCH 184/244] don't look for repos directory in add_paths_from_directory --- igniter/bootstrap_repos.py | 18 +----------------- start.py | 1 + 2 files changed, 2 insertions(+), 17 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index c882ec6e49..50b46c36ab 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -1094,24 +1094,8 @@ class BootstrapRepos: directory (Path): path to directory. """ + sys.path.insert(0, directory.as_posix()) - directory /= "repos" - if not directory.exists() and not directory.is_dir(): - return - - roots = [] - for item in directory.iterdir(): - if item.is_dir(): - root = item.as_posix() - if root not in roots: - roots.append(root) - sys.path.insert(0, root) - - pythonpath = os.getenv("PYTHONPATH", "") - paths = pythonpath.split(os.pathsep) - paths += roots - - os.environ["PYTHONPATH"] = os.pathsep.join(paths) @staticmethod def find_openpype_version(version, staging): diff --git a/start.py b/start.py index c066fa3ab7..8944da4ba0 100644 --- a/start.py +++ b/start.py @@ -824,6 +824,7 @@ def _bootstrap_from_code(use_version, use_staging): os.environ["OPENPYPE_REPOS_ROOT"] = _openpype_root # add self to sys.path of current process + # NOTE: this seems to be duplicate of 'add_paths_from_directory' sys.path.insert(0, _openpype_root) # add venv 'site-packages' to PYTHONPATH python_path = os.getenv("PYTHONPATH", "") From 4bae7484faf8e5777ba2e45a046dae1324409810 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 11:32:48 +0200 Subject: [PATCH 185/244] modified adding paths from archive to sys path --- igniter/bootstrap_repos.py | 22 +++------------------- tests/unit/igniter/test_bootstrap_repos.py | 2 -- 2 files changed, 3 insertions(+), 21 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 50b46c36ab..e9fb6fa0ec 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -1057,27 +1057,11 @@ class BootstrapRepos: if not archive.is_file() and not archive.exists(): raise ValueError("Archive is not file.") - with ZipFile(archive, "r") as zip_file: - name_list = zip_file.namelist() - - roots = [] - paths = [] - for item in name_list: - if not item.startswith("repos/"): - continue - - root = item.split("/")[1] - - if root not in roots: - roots.append(root) - paths.append( - f"{archive}{os.path.sep}repos{os.path.sep}{root}") - sys.path.insert(0, paths[-1]) - - sys.path.insert(0, f"{archive}") + archive_path = str(archive) + sys.path.insert(0, archive_path) pythonpath = os.getenv("PYTHONPATH", "") python_paths = pythonpath.split(os.pathsep) - python_paths += paths + python_paths.insert(0, archive_path) os.environ["PYTHONPATH"] = os.pathsep.join(python_paths) diff --git a/tests/unit/igniter/test_bootstrap_repos.py b/tests/unit/igniter/test_bootstrap_repos.py index 65cd5a2399..10278c4928 100644 --- a/tests/unit/igniter/test_bootstrap_repos.py +++ b/tests/unit/igniter/test_bootstrap_repos.py @@ -152,8 +152,6 @@ def test_install_live_repos(fix_bootstrap, printer, monkeypatch, pytestconfig): openpype_version = fix_bootstrap.create_version_from_live_code() sep = os.path.sep expected_paths = [ - f"{openpype_version.path}{sep}repos{sep}avalon-core", - f"{openpype_version.path}{sep}repos{sep}avalon-unreal-integration", f"{openpype_version.path}" ] printer("testing zip creation") From fc73f253b27b6d4d4faa672112bf744a34e0e8c8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 11:35:50 +0200 Subject: [PATCH 186/244] remove copied zip from version repository after extraction --- igniter/bootstrap_repos.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index e9fb6fa0ec..6392517cda 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -1405,6 +1405,7 @@ class BootstrapRepos: # create destination parent directories even if they don't exist. destination.mkdir(parents=True) + remove_source_file = False # version is directory if openpype_version.path.is_dir(): # create zip inside temporary directory. @@ -1438,6 +1439,8 @@ class BootstrapRepos: self._progress_callback(35) openpype_version.path = self._copy_zip( openpype_version.path, destination) + # Mark zip to be deleted when done + remove_source_file = True # extract zip there self._print("extracting zip to destination ...") @@ -1446,6 +1449,10 @@ class BootstrapRepos: zip_ref.extractall(destination) self._progress_callback(100) + # Remove zip file copied to local app data + if remove_source_file: + os.remove(openpype_version.path) + return destination def _copy_zip(self, source: Path, destination: Path) -> Path: From e5b6105476e293759e07aa85294238669de3b122 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Apr 2022 11:46:40 +0200 Subject: [PATCH 187/244] OP-2765 - bump order to run after precollect --- openpype/hosts/aftereffects/plugins/publish/collect_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index 58aa01ad87..adbbe7eee9 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -27,7 +27,7 @@ class AERenderInstance(RenderInstance): class CollectAERender(abstract_collect_render.AbstractCollectRender): - order = pyblish.api.CollectorOrder + 0.400 + order = pyblish.api.CollectorOrder + 0.405 label = "Collect After Effects Render Layers" hosts = ["aftereffects"] From 01e045d9abeae87accaaa19cd14cd5c17d519323 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Apr 2022 12:37:52 +0200 Subject: [PATCH 188/244] OP-2765 - removed obsolete code Replaced lower by get_subset_name_with_asset_doc --- .../hosts/aftereffects/plugins/publish/collect_workfile.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 0f09b5fdf8..e96541e47b 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -42,10 +42,6 @@ class CollectWorkfile(pyblish.api.ContextPlugin): asset_entity = context.data["assetEntity"] project_entity = context.data["projectEntity"] - # workfile instance - family = "workfile" - subset = family + task.capitalize() # TOOD use method - instance_data = { "asset": asset_entity["name"], "task": task, From e1995e8828ffa7178149813905407c2a08718ebf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 12:52:11 +0200 Subject: [PATCH 189/244] added verbose and debug to global click options and removed debug from each individual callback --- openpype/cli.py | 55 +++++++++++++-------------------------- openpype/pype_commands.py | 2 +- 2 files changed, 19 insertions(+), 38 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index cbeb7fef9b..2aa4a46929 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -20,6 +20,10 @@ from .pype_commands import PypeCommands "to list staging versions.")) @click.option("--validate-version", expose_value=False, help="validate given version integrity") +@click.option("--debug", is_flag=True, expose_value=False, + help=("Enable debug")) +@click.option("--verbose", expose_value=False, + help=("Change OpenPype log level (debug - critical or 0-50)")) def main(ctx): """Pype is main command serving as entry point to pipeline system. @@ -49,18 +53,13 @@ def traypublisher(): @main.command() -@click.option("-d", "--debug", - is_flag=True, help=("Run pype tray in debug mode")) -def tray(debug=False): +def tray(): """Launch pype tray. Default action of pype command is to launch tray widget to control basic aspects of pype. See documentation for more information. - - Running pype with `--debug` will result in lot of information useful for - debugging to be shown in console. """ - PypeCommands().launch_tray(debug) + PypeCommands().launch_tray() @PypeCommands.add_modules @@ -75,7 +74,6 @@ def module(ctx): @main.command() -@click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("--ftrack-url", envvar="FTRACK_SERVER", help="Ftrack server url") @click.option("--ftrack-user", envvar="FTRACK_API_USER", @@ -88,8 +86,7 @@ def module(ctx): help="Clockify API key.") @click.option("--clockify-workspace", envvar="CLOCKIFY_WORKSPACE", help="Clockify workspace") -def eventserver(debug, - ftrack_url, +def eventserver(ftrack_url, ftrack_user, ftrack_api_key, legacy, @@ -100,8 +97,6 @@ def eventserver(debug, This should be ideally used by system service (such us systemd or upstart on linux and window service). """ - if debug: - os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands().launch_eventservercli( ftrack_url, @@ -114,12 +109,11 @@ def eventserver(debug, @main.command() -@click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("-h", "--host", help="Host", default=None) @click.option("-p", "--port", help="Port", default=None) @click.option("-e", "--executable", help="Executable") @click.option("-u", "--upload_dir", help="Upload dir") -def webpublisherwebserver(debug, executable, upload_dir, host=None, port=None): +def webpublisherwebserver(executable, upload_dir, host=None, port=None): """Starts webserver for communication with Webpublish FR via command line OP must be congigured on a machine, eg. OPENPYPE_MONGO filled AND @@ -127,8 +121,6 @@ def webpublisherwebserver(debug, executable, upload_dir, host=None, port=None): Expect "pype.club" user created on Ftrack. """ - if debug: - os.environ["OPENPYPE_DEBUG"] = "1" PypeCommands().launch_webpublisher_webservercli( upload_dir=upload_dir, @@ -164,38 +156,34 @@ def extractenvironments(output_json_path, project, asset, task, app, envgroup): @main.command() @click.argument("paths", nargs=-1) -@click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("-t", "--targets", help="Targets module", default=None, multiple=True) @click.option("-g", "--gui", is_flag=True, help="Show Publish UI", default=False) -def publish(debug, paths, targets, gui): +def publish(paths, targets, gui): """Start CLI publishing. Publish collects json from paths provided as an argument. More than one path is allowed. """ - if debug: - os.environ["OPENPYPE_DEBUG"] = "1" + PypeCommands.publish(list(paths), targets, gui) @main.command() @click.argument("path") -@click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("-h", "--host", help="Host") @click.option("-u", "--user", help="User email address") @click.option("-p", "--project", help="Project") @click.option("-t", "--targets", help="Targets", default=None, multiple=True) -def remotepublishfromapp(debug, project, path, host, user=None, targets=None): +def remotepublishfromapp(project, path, host, user=None, targets=None): """Start CLI publishing. Publish collects json from paths provided as an argument. More than one path is allowed. """ - if debug: - os.environ["OPENPYPE_DEBUG"] = "1" + PypeCommands.remotepublishfromapp( project, path, host, user, targets=targets ) @@ -203,24 +191,21 @@ def remotepublishfromapp(debug, project, path, host, user=None, targets=None): @main.command() @click.argument("path") -@click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("-u", "--user", help="User email address") @click.option("-p", "--project", help="Project") @click.option("-t", "--targets", help="Targets", default=None, multiple=True) -def remotepublish(debug, project, path, user=None, targets=None): +def remotepublish(project, path, user=None, targets=None): """Start CLI publishing. Publish collects json from paths provided as an argument. More than one path is allowed. """ - if debug: - os.environ["OPENPYPE_DEBUG"] = "1" + PypeCommands.remotepublish(project, path, user, targets=targets) @main.command() -@click.option("-d", "--debug", is_flag=True, help="Print debug messages") @click.option("-p", "--project", required=True, help="name of project asset is under") @click.option("-a", "--asset", required=True, @@ -228,7 +213,7 @@ def remotepublish(debug, project, path, user=None, targets=None): @click.option("--path", required=True, help="path where textures are found", type=click.Path(exists=True)) -def texturecopy(debug, project, asset, path): +def texturecopy(project, asset, path): """Copy specified textures to provided asset path. It validates if project and asset exists. Then it will use speedcopy to @@ -239,8 +224,7 @@ def texturecopy(debug, project, asset, path): Result will be copied without directory structure so it will be flat then. Nothing is written to database. """ - if debug: - os.environ["OPENPYPE_DEBUG"] = "1" + PypeCommands().texture_copy(project, asset, path) @@ -389,11 +373,9 @@ def runtests(folder, mark, pyargs, test_data_folder, persist, app_variant, @main.command() -@click.option("-d", "--debug", - is_flag=True, help=("Run process in debug mode")) @click.option("-a", "--active_site", required=True, help="Name of active stie") -def syncserver(debug, active_site): +def syncserver(active_site): """Run sync site server in background. Some Site Sync use cases need to expose site to another one. @@ -408,8 +390,7 @@ def syncserver(debug, active_site): Settings (configured by starting OP Tray with env var OPENPYPE_LOCAL_ID set to 'active_site'. """ - if debug: - os.environ["OPENPYPE_DEBUG"] = "1" + PypeCommands().syncserver(active_site) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index e0c8847040..7dcfc001f0 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -25,7 +25,7 @@ class PypeCommands: Most of its methods are called by :mod:`cli` module. """ @staticmethod - def launch_tray(debug=False): + def launch_tray(): PypeLogger.set_process_name("Tray") from openpype.tools import tray From 4ad395953e6818142ae764e73066e4eb5baea04f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Apr 2022 13:10:39 +0200 Subject: [PATCH 190/244] OP-2765 - add publish value Fix wrong recreation of legacy instance --- .../hosts/aftereffects/plugins/publish/collect_workfile.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index e96541e47b..450a4540b8 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -36,6 +36,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin): instance.data["representations"].append(representation) + instance.data["publish"] = instance.data["active"] # for DL + def _get_new_instance(self, context, scene_file): task = api.Session["AVALON_TASK"] version = context.data["version"] @@ -83,8 +85,6 @@ class CollectWorkfile(pyblish.api.ContextPlugin): "representations": list() }) - # Create instance - instance = context.create_instance(subset) instance.data.update(instance_data) return instance From 5dd449e31bf9a65b4d07aa7e8a816b30b59ddd9c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 13:55:20 +0200 Subject: [PATCH 191/244] added args to documentation --- website/docs/admin_openpype_commands.md | 21 ++++++++++++--------- website/docs/admin_use.md | 16 ++++++++++++++++ 2 files changed, 28 insertions(+), 9 deletions(-) diff --git a/website/docs/admin_openpype_commands.md b/website/docs/admin_openpype_commands.md index 74cb895ac9..53b4799d6e 100644 --- a/website/docs/admin_openpype_commands.md +++ b/website/docs/admin_openpype_commands.md @@ -24,7 +24,11 @@ openpype_console --use-version=3.0.0-foo+bar `--list-versions [--use-staging]` - to list available versions. -`--validate-version` to validate integrity of given version +`--validate-version` - to validate integrity of given version + +`--verbose` `` - change log verbose level of OpenPype loggers + +`--debug` - set debug flag affects logging For more information [see here](admin_use.md#run-openpype). @@ -47,13 +51,9 @@ For more information [see here](admin_use.md#run-openpype). --- ### `tray` arguments {#tray-arguments} -| Argument | Description | -| --- | --- | -| `--debug` | print verbose information useful for debugging (works with `openpype_console`) | -To launch Tray with debugging information: ```shell -openpype_console tray --debug +openpype_console tray ``` --- ### `launch` arguments {#eventserver-arguments} @@ -62,7 +62,6 @@ option to specify them. | Argument | Description | | --- | --- | -| `--debug` | print debug info | | `--ftrack-url` | URL to ftrack server (can be set with `FTRACK_SERVER`) | | `--ftrack-user` |user name to log in to ftrack (can be set with `FTRACK_API_USER`) | | `--ftrack-api-key` | ftrack api key (can be set with `FTRACK_API_KEY`) | @@ -98,12 +97,16 @@ pype launch --app python --project my_project --asset my_asset --task my_task --- ### `publish` arguments {#publish-arguments} +Run publishing based on metadata passed in json file e.g. on farm. + | Argument | Description | | --- | --- | -| `--debug` | print more verbose information | +| `--targets` | define publishing targets (e.g. "farm") | +| `--gui` (`-g`) | Show publishing | +| Positional argument | Path to metadata json file | ```shell -pype publish +openpype publish --targes farm ``` --- diff --git a/website/docs/admin_use.md b/website/docs/admin_use.md index 178241ad19..f84905c486 100644 --- a/website/docs/admin_use.md +++ b/website/docs/admin_use.md @@ -69,6 +69,22 @@ stored in `checksums` file. Add `--headless` to run OpenPype without graphical UI (useful on server or on automated tasks, etc.) ::: +`--verbose` `` - change log verbose level of OpenPype loggers. + +Level value can be integer in range `0-50` or one of enum strings `"notset" (0)`, `"debug" (10)`, `"info" (20)`, `"warning" (30)`, `"error" (40)`, `"ciritcal" (50)`. Value is stored to `OPENPYPE_LOG_LEVEL` environment variable for next processes. + +```shell +openpype_console --verbose debug +``` + +`--debug` - set debug flag affects logging + +Enable debug flag for OpenPype process. Change value of environment variable `OPENPYPE_DEBUG` to `"1"`. At this moment affects only OpenPype loggers. Argument `--verbose` or environment variable `OPENPYPE_LOG_LEVEL` are used in preference to affect log level. + +```shell +openpype_console --debug +``` + ### Details When you run OpenPype from executable, few check are made: From 91e2ffb8dcdc40254fc751f9683d8dd747d10ff5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Apr 2022 14:56:06 +0200 Subject: [PATCH 192/244] OP-2765 - fix missing representation for disabled workfile --- .../hosts/aftereffects/plugins/publish/collect_workfile.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 450a4540b8..64a81b58eb 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -34,6 +34,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin): "stagingDir": staging_dir, } + if not instance.data.get("representations"): + instance.data["representations"] = [] instance.data["representations"].append(representation) instance.data["publish"] = instance.data["active"] # for DL @@ -45,6 +47,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): project_entity = context.data["projectEntity"] instance_data = { + "active": True, "asset": asset_entity["name"], "task": task, "frameStart": asset_entity["data"]["frameStart"], From 3bc4d98c98e7b3054f4d254b3aa42ac61f0cde1b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Apr 2022 14:59:03 +0200 Subject: [PATCH 193/244] OP-2765 - removed unwanted assetEntity AssetEntity not available after change of order. anatomyData not available after change of order. Added pulling of resolution from workfile. --- .../plugins/publish/collect_render.py | 33 +++++++------------ 1 file changed, 12 insertions(+), 21 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index adbbe7eee9..fa23bf92b0 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -2,7 +2,6 @@ import os import re import tempfile import attr -from copy import deepcopy import pyblish.api @@ -23,6 +22,7 @@ class AERenderInstance(RenderInstance): stagingDir = attr.ib(default=None) app_version = attr.ib(default=None) publish_attributes = attr.ib(default=None) + file_name = attr.ib(default=None) class CollectAERender(abstract_collect_render.AbstractCollectRender): @@ -64,8 +64,6 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): if family not in ["render", "renderLocal"]: # legacy continue - asset_entity = inst.data["assetEntity"] - item_id = inst.data["members"][0] work_area_info = CollectAERender.get_stub().get_work_area( @@ -84,8 +82,11 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): fps = work_area_info.frameRate # TODO add resolution when supported by extension - task_name = (inst.data.get("task") or - list(asset_entity["data"]["tasks"].keys())[0]) # lega + task_name = inst.data.get("task") # legacy + + render_q = CollectAERender.get_stub().get_render_info() + if not render_q: + raise ValueError("No file extension set in Render Queue") subset_name = inst.data["subset"] instance = AERenderInstance( @@ -103,12 +104,8 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): publish=True, renderer='aerender', name=subset_name, - resolutionWidth=asset_entity["data"].get( - "resolutionWidth", - project_entity["data"]["resolutionWidth"]), - resolutionHeight=asset_entity["data"].get( - "resolutionHeight", - project_entity["data"]["resolutionHeight"]), + resolutionWidth=render_q.width, + resolutionHeight=render_q.height, pixelAspect=1, tileRendering=False, tilesX=0, @@ -119,8 +116,8 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): toBeRenderedOn='deadline', fps=fps, app_version=app_version, - anatomyData=deepcopy(inst.data["anatomyData"]), - publish_attributes=inst.data.get("publish_attributes") + publish_attributes=inst.data.get("publish_attributes"), + file_name=render_q.file_name ) comp = compositions_by_id.get(int(item_id)) @@ -165,15 +162,11 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): start = render_instance.frameStart end = render_instance.frameEnd - # pull file name from Render Queue Output module - render_q = CollectAERender.get_stub().get_render_info() - if not render_q: - raise ValueError("No file extension set in Render Queue") - _, ext = os.path.splitext(os.path.basename(render_q.file_name)) + _, ext = os.path.splitext(os.path.basename(render_instance.file_name)) base_dir = self._get_output_dir(render_instance) expected_files = [] - if "#" not in render_q.file_name: # single frame (mov)W + if "#" not in render_instance.file_name: # single frame (mov)W path = os.path.join(base_dir, "{}_{}_{}.{}".format( render_instance.asset, render_instance.subset, @@ -216,8 +209,6 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): def _update_for_local(self, instance, project_entity): """Update old saved instances to current publishing format""" - instance.anatomyData["version"] = instance.version - instance.anatomyData["subset"] = instance.subset instance.stagingDir = tempfile.mkdtemp() instance.projectEntity = project_entity fam = "render.local" From ac1eeca9060008578472d82a6ec4439b37df090a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Apr 2022 15:00:12 +0200 Subject: [PATCH 194/244] OP-2765 - added pulling resolution from scene --- openpype/hosts/aftereffects/api/extension.zxp | Bin 100982 -> 101003 bytes .../api/extension/CSXS/manifest.xml | 2 +- .../api/extension/jsx/hostscript.jsx | 4 +++- openpype/hosts/aftereffects/api/ws_stub.py | 6 +++++- 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/aftereffects/api/extension.zxp b/openpype/hosts/aftereffects/api/extension.zxp index 389d74505dfc45ecba6313435056634a64b27c20..0ed799991e9c791565a92d3899f3db67b9fea910 100644 GIT binary patch delta 9400 zcmaKy1yoy0x5pFQT}p9xDDLi7Tw0(McekQ};O+z|?(R~cc#Av5trRWpQ0$d^*Zpq$ zy}q}yPO_5U?Ah}_IkRWh?0uOC7m);qs-gf5g988n5CCCv)^VtAD26bMM%j5rMlz_d zMl~@?KStZy(1^i}KcMmm)`j_1xhVSo01Jex%-Tz)TVFW7{C8Zha zKd$O5v4v5gy`7U2O3BIT+GIpa$$8-gh7c{s4SMNV z7uZ`dA(C7I+A@U=5n3)S^(A^KrcMR(hj=1ckdO8QfZ%=L(syX%kIO!-_S6+8ED*=Q z%n}X70@j4rh-qQJ{Gjkf)7<$&)C4>TG9uuLJB9Jt+kZ6bTx9~{(Ns~$pL@4qR8ZzcsG^3sdKs4uN0+zIDSzNT zFm#!mV}hTzh~X5-E93?(k$>&XTGGbqHWU=(|Dhtjk{3R5!!qxguqfYH7zc8ZU(u}%&kf<^^ zkg713TAMqXms1)YD-f?-b(i*J}_*@N*`)R zUQuZ~w>SkAqxtLisNJkYIerjvkzU>0)?76X^&2Gy?GtY z@ZL1S+&{u8Y@rg33!Pw{>9R{OMJn%3qPh}!yo1HJOAxJ<#ETFu7*4 ziV;t5JUfk+a7p6xs>7?*1HsMRl8lFu(6(6Y)@*JQYvY7j`#gP1fn!28de+c=6yb+!9ok&e*-+!|;IGnH zyt7kQ4q@&1iW^5QwX@e~Fc+JUWKYB!dDmJg59%Sp9Ye2SoE|%_{8LPAjsd2u(=p3! z(x(ZB^fM#?0D%tj2}IQI2%ciWKQZCo>G(8_E2u+%pT=S|A2I)$;6UE+-wA|w;BR6q zAN+riF1^6tiK>3e-!}Vp|7GKAKZyBPh*9wO{5`}>_)|hd_xuc6SAnhsBfmtm-*p8A zdHz4To(2DxztUiQ+P`%5Rz&v>p)rd!I)7 zz;PI8Mb~5qml4p7syg>i5%wq6K<@6tO8B`X2x>ycjDYSazqW*^83UDI-SbW)@|yCE zfzm)wV{Up!H4|fQTzr&Sd1+o3l6}0CX8FLb>&PjLMpp90+v%^>CKr~z1pcLJ{6i^Q zc`c6;C}XMjg1u|BT2p~D)3T1o$2svo$jPf<8DGk`3?QX-W)B?8@Egy^GO593YB3?C zTnh1y#3nSXo9KB~oNUSiU&q@8>DfK1%iVko#8+@KH*;PYd^|c1!F->*PF8lo@(|WEtDNgD#W7 zL(Qt1OKVR9WaNr{YsgVuw%B0wwtP?&&eAnrRwNx&VquLAKH@MXA zK%-md{F|=%tx~jP4b1jY z8zWcR2@y;5`xItPVKKSYd~r`d%IRzD*9oC_kHwQVqM?$Gu&s0TR}&FqMOK=;*?6IA zwS4X#b6-osco%hbyQs4$#i7B*Mpfiy02(E$Hr=gas`VZ|t(8ivsf*uSGrM5eFusmW zkac~51p|W+BKzW8&rp?tSU5y0eski@mCOw?j-hI_wEwXe$|}ozozV&ajPV^6-UvG^ zD3q_h2T>H3OAhCQWu=sK9s*P!hf&*RAXb}U>tQs|aO&AD=|MEGNK;c|VoftA5ro7~ zc2)>=u4Q#_35B(l^f*~QiwiA3uQyu;0{HuqEIJiAE!sph3)!vOMD>7-aYeDPhPc`{GE|`k1RUzC$w;C~p~i z?FtY_V?`8**ofG-`G#tk@4ZvvH1u=>3@a2%TH zP>jm9kN)snY@+9@YHKzUbP->brLg&$5&P!JWL=&F3%uaDZPoc2p#;6h@rl{Eb!au2 zndg42WIbZ&`s;OPQ$C==5e|%g4rS1Bj4~!rq;fYqCI-6KJwU>dxf--Gw;7owmbQau ziRBp$RfZ>iO^DBA>=(9)#-wi(NYcU!+ZZOLN!TWoyc8(;eZHP92tCBCTVX<6Y>Q4m zAVYS9S6Vkx%fpeYyF|0yI-lR%yg$-y-dBfyj-bC`QxI$f81L>{yXfW19B(p7v8jsl}iLSriLK%Y6Ff(Gtu! zt8s8DwmFnPNx#f`i))Lt91i3BSZX4QySt39vA6Fq>qeWa%;^iQFnUt-6+T@c^X#}C zNAJ7+RzW^qz?VUlr3`|l z(nL*lL$eO2Slw(=(t`>GP}Ni=e*7aHZ=IW=`sq3SO|uhl}XN@%bCovSgYCQ4sZ=N?} z7bLgW`Aks%2zlcO2eP2X-1t;`jnKX&g=(#zKzq_T8Kc1(W^x_FyhYy&B1Q*FyA-MOLVahQ%zDO`dq##Pd6%#&^XO$#zUTSb!)jHkCUYw+v9|*HesC2!MQ0U=uAZxlL zZSEYY#7hN4c5>XawaFnQ=rwZTZVx46WlTUGt!KWlyAlwA+p~-eXzZ3&*kFLYdM-$j ziNn(_=l4o_8w#co?nK3$SCedqw`+q-T;be$g1#HZF=(-(P?73A40ls!tUQ!)S+7D( z)F+#qNlx@bFi5yTje#EZBK{l^JJEd;sK4!VZ~#&K@DFPe2NgN2T}f;XH^6*+|p zDU$z;}HX_D1bMrT}!|(1` zpLO1=__eUz;A0|o+Pen=yt@n1!&XAV?dXi^3%O|J1wb_Ir1{tzodgw~oFy5Zl)e2_ zW(WQ0?{DwkxYqM9FhVAL?hD!YfPxwV!3ZR9Lf9i@!*;JfxoaxD5sNH-2FDxc$n4}W z_!=pgjimkULF9g2fp}%3A@-x*u>yUI(cXG(23ki{Fr@sUpjenlJ2FkSnUXYfDq)-<*{F=Di8>)OLUT2zl#FE>dy|fpHSx#*u#RCVv zcDq`efbg}n2NEyAemx6Tr@E2J9NTe#sE&<;p@|M{4`#3XNTsCv+n0%c$V;x>OGY^e zl%lN1sFjJ`v|JTd8IlTWXofzNheA~_NC)IzGa4Xqk6<-@FS+I}s1I#$<2i(BgghM; zF(;KVQDJ$3mul!}c+$+a=y+S;V1!kPro?L;!GIz}NlC|xw#uD5=nsk|C*@UdFQV^R z)G*mI$VqMvzKB-1dG7nF=OGHJ%&dZRktc_a`R-syj@v2Sk*U*AE8n`nD59~9W@gCU z!8w8SHKp2#kl)fZPDRp?6GVI1)59YUff3&{rR5n^0)$aXGCJ_S?o(gCuSf`~zQM3`_Et!aX(9*c~vD?E%~Djrvjk_{!(tj<~*YzqfHPH5IPAwslO zgmvkm-Zeu)JKZGb+STd6YGC;_ujix?EJr;EQyy-As_hWCVWdlS_6HYjs3vf5>J^TA@`kBuNxHj1p|k|Y&Oes zo$KI&sT*z5QnMyeqvQw0&m3R zqbsSA1Y zU3XBd!P|`e#K!)6sk&rSv5Zo{a)nCuCEqKq4Lq0~G*byi;<9s+VUfMdlW?b;!}fB1 z)-hV%xW$y3J>Cb7B^7_UTz%YM%F%i(}YKXm-M9=!XeTjF)2W13p&|)myx4 zL>!iJg)(w&TVC;W`*CwFU}MM?0dhc%mTmn;71d=h+|}+lFKT7o(a}hF?9Y&W(t9(= zL9UT}3~j@ljQf4%vJ%9$ zYyBiR>m&3&!QT->C!as8C?|velAKg+9#5+<*EyWV29_J4y?SQ4JkAZg z7&H=M1?YszD6iOpqWD)H<}WmP^RvWEW4R+i*Tu^Ad(HYE%wFR-z6W@u}@CFYlHnQI7*$kHrB*ey>2R4)Gs91uVO7 zsG{qai`U?W4S>N*?@Fi3^e9_8avMESh%SaGR6?Ik5>SW)24!aprJGKam=L#4sewGr z>UO?}^0+r(VdzTTSI0{te;_N~GW8s(HI80P6V9bIK3&dQe^hBb!>x$-h@zLQusI6s@h3ZTE8Rk0M~BmviD0W8&ZOf@DzN}BDwdrYB88G@ z7w}9_>K&OljGd8IN>*N(f3I)_v(jyI_1-DKeO`3PAl;=3sHgc(T*bSq7`r^91WQb0 z!B-wd$LYAyDWNA0+IEJkF*IW^lvT!}*NA5&wwYa|A$OIh?8k|>FR!#Zm=WUck??rG zPi*WXWMae z%bFAU&)AIIe?UvL!AkqKV2ZeXO0ipdbs&mtJjqyy(ZHd^1*&s+g%J%K(2?QZIPZgW zKEtpVwCdE>6ufr5+j(61%tux)whsvxX5#um(o(14`PZP&9mrcZQ6iW-9!@LooIM#- zN9sFapLc{J;9DNAD`vU1-xZ3U9ICKkP)k;N$)w78p_G;hEvT#b&jzOr*~*s=*CavF zUXQv(_^)-L34$cR4Vs5?H3V_N*jk}_vW&fLsd7S6F@0NR(jNC}Qnf(dhAS_qU}fzB z_mhM}%nOOr+|G@4B$_Wyrf=%7;fGImG$e%M#1x%rp;uq_-M47>Gt_=xuP8b^#Z|p4 zAVV%3Owvi3k?R!D^-Q)v_cC38Ul7waW{1u?G1ZozDh6$4L5Gkry3c>HE(($WK)JkO zux!AEqtFuJ$z;GSt>!)7nJ7K7Ce$5Lr#(BYvhwklESg^A^fl&v+1#On1;P4>+g4JeD|lC_DVFz8GE^i0Kd23uU5v{tWnOqTd+ z-B;BGh>Ah@m2f;XTJuW>P0$y%t;?g|3b;jP>8Vr(l#Vby(|5R`=h2Lo>zuEJ|;CmUD!59bwiotb3q!{8(&*D@t9sWQvK9 zfSYk}H6PJAI%hMptg*Dw7Vq+egSD!xqpCDK@yzfgDv^ zQZv`iO7Ot^HEYvN|M=i}%7#r1vtRN1iY97r)@`mkefuGp_9ExI0K#$P7pxw&%>Euy z_{Z*`hKgj4JL5%$ykq!BNCdM%p)MNA9@X-ULQ92M{PWfh#huIe*~L8(md4q1gLwo{ zFH4YVlHfa}A+GWt41|ByOTTh#0ge*dH<3V@6ov8;7qsc(_Ut#4BKMS^ zfiRm};-js&`ET`f&=35TidJVG4GtV$_uG^`M$35R5chnZLWAa0I5uZT3${Yv-OsOR z;uq)EN@#9nR6WY{-j5%mrM<559fk78P6Q^maQQlMJb6j zO9Y(NNBDn_3Wt9lm@T*iz389LKmWP@Jp=uTHBf`5AKKsn02(|103TotISvCphorm% z>Vy7i|MdAK<&|@~B3|&3{^nz>6N@HyYjO#%rj0mR4W;N3xy1}zFNOd49=t#nwm?E! zIQzt>C6)7swxTgUm$L$;Z<9ySs1_Y7t3GV2@>#)9$p>c=ldd5ka@o}@WuX%dE;eu; zU3fOK$v#GOfQX1UR?jy42s+3~1a7rYq-_LFye6^~g0%P_TR>_I;`5>y#BF+xZ1SeaLanX-LUz!3`q65d<@gHU{sL!6lRLZ&GnW3 znvs^DCJJ^9YnPsLVFgG~y~$#o8Up>HjFq0<`P&CKkj&R)W)d-@Z>aPvmmV{>gtE*m z3Zwf!5~N?tp|c4}jebhB-+^V7j7(H-ppYtan6GWUZB?HU7??ie_II1Zp{`L-BzLD7 zD)7!Ext+SJr`Ra4CtUFNekeQ-0jnMqyQ7ZUPwFG-JNRyQe5?ay?@n4ohNF2WNyVk= z=_I?kB!A%pSzY;dlJLv~X{GR1B?#i;2@Q^Ww&wE%CtWm}Ul5z`wRB!#xr4S%?xYKy z{XaJfD#S$Ta2*p=aMT!axKP)zNWgYqHuu2WKu!vW$E$pH+aX}w{i7qnguzQjfh2;- zO{N;ihifKXPHK>WlVVL-%}WxKh$GEIFa4?x-HV54xs6%Zd1PiET^8sTd93Bie~dGVX#pmAD;p7@~}aLq1NgtiE#5 zqf07Im?txwPdrq~+QqgaDUk|N`r#VN<)!gmfa+z*iOl>)PWMsv*Sl(cDRa2LI z{vgSeX0HoyG3gHPtkfCBP8mobK-v^iWVwegAixmedg%=+QqdJ@>r|?=mMqx(+-8nt zh-R+!Vx9!#^o50@L7H1|A(xOF#Xa*C>-i{gOZ;%QB~RULYrdVQdC_2~V+m7nvrW4o z0>&(XHf2V`t|9|WaJPvQ4vHQht6L)f@ot=ig#k>xOs69cWm%(fBx}oSO(c%N*QPyJ zMXBpq-bP+6l0s}3JJ2bYV`HrT4|U2NUq0Rlp0eP9rhVYRhnok?QSHjGq)t0A!*^-s z5%qxOlw8vGhoJf^^4)`p)Yl~V{;)w@TSPA%n=}j)m6<{-7uJeHmo<+0$E^QGjSE|y0L$N{zk>QckuC8HC8Yx{KEm#~)lp}w{Q+PO{31mH{V zX>7>GXSA~j`Wn8zU_F9;;#M?e4tUBF93`}+d07coyne=#*6$b^zE{t{+@!0d5*-6s z;q`4%(Y(pq+s^3%GDKHr<4)pr^iX#xvg>xBGW5j*JLhSHa7C&IB;xYjG}#knCXpxr zh_p@bn?=b2qd=7&x_Y9ofG(Lo{#LOuyBr#R`dRdXtYkHA6+k(l)Fuu>%af*05ThaN z&7a5(VN<(HHw`BnN3BEr8#ISJRIicqFB8yUBM+sucJ9xnTQpoqNz309mty!J1$CE5rB*Bl6XiHa&o3s|g zbdgxwK2>%YOS8ilB^QH( zA}WS;ruEGuo}!X`^fgT3EuNv0qu?569I0Q?J{-NTznd23ICwcGzf2?GqVS|JqLE=I z&CRe_oJT~BtDUn{Bzx>TrNwB>HTI9d4&OpM@~%^&oYx#{rV;Y$w z>=xTb=M%SLxsOiMQ`O&3XbZNB=FOZNrj#GLNbsh`vl`>J0eY%7L z_8~Fd)LZ$r4ga%ZX#1(~oC?AgYtT#lm`b(aFi4=$cIot+Gt2vaAI9`;Sf$)aJnH-W zc%EyUkDQIKP)2_cRHha@Q&y@K7%+UpHmfHfec9cCS+kWtIox8AlB5vQwVdwL8mi12 zzk4T&FP}BcLb%m0ZvrxP+Q(AyI}zu7158hkg_h=_G)5r@<6ISqBGQ{U_?Spr`oO|p zyyIrvE0|^$s%>Twgr_Q5s6(#!H6I59m+o|eeBPb9-`_GJNPJ3;0g~1_Tof6!{}C1_Sy37kJ7#R)c|x*iV%d zzxw!DDFHzb0J1%Ms*?B{{O#kP5(!q5C|0gBM=SxgT>PgO9}#V|04hI;XWlnSAu}7*iT8&U*-SI zTSLf#ft-KTVIKSkA5S{kf`KxB92H&&P~eZiBm^k&N6-=i6#OGV4E=-hCmnjBKL!4) z$y0{**MIB3vbd6AKo0DG+x*?b)4n!53`i)q z=?(-aNoj~N%e|ClRhNVT3=qPx)1=r8h!Bt0&Fd9OZ-0)yP}h!ualsWlw=_wUx9&v!Gdw>io_$iBbLDbFfQ}FGk!VN#>oR9 zeqK*p1-RqBTqys4;15-zJ$UdRvie8xA*8=_L#Y0L5;ZjUUlvEv-~UOhFa(hx#B4+p z%Sk+l^nd#K|9$jEuHt*B<< z1likm>1m=4qlo6uW40v|k5_ycgyT~5&5QPY11GVuO?vjya(oKX+axq;AL8iycojL%Joc1KStFzN z1G{v{OzC840dcq}0%lanuANK(S4xXw7iF~(XxJs^ zE2>MyA&M-Zyh@d}t&6R86nb@W@(0h>Y`*T!@L{>ayC0iZX)+PWy`(wO&F{MM-(-Mo zr@I%DH}Xd%ell<}5}iMP%?Pbdmi%!(`|tb7L>5tG0PAf-H(IvIOkP zm+eoQG@i(0I$%-;%wIR+X|FL@7*+R;@o6KQHao)Cf$8f+g&qTv&EfSoQ!M%XM=K&O z{Q*HF^$d1~r4eN*Q&Jj$1^<#C$WRsFjrh+5o3~W~hp>O?X7m6-f7&$B2TVZzrQaHWEb2R)^qjKAzpKd^$U7rCaq9+h%?4#&x6yZ4IdQLHG2t1OG( zSX&pPyb=>NK+woca!tf!4-`pUk8|y>Bh@ZMWT@0aOjb|C8*TITR76iaTaqr?kG>VLc=<5xR z!5RWW^vfzUgRL4J>*Y(2=*K!N`oniOz{N$a@o0-uUz0Zqc3qi)ro&T)+Gg{q=H}7t zrk0e$JtO<-gZ-zZ)baa96tJ+U$ni$IX}$SsyuQel6vG9b-{a1)X?pVs-hm(A;B@KN zS@v}RAW3KN-9PtZAfUOt4Mzxow@#pOp4N+wNhE_M7>+ng*_1gAvuopn4v{xK1Ogit zVKOz;l_i2TLsjT@14c67C#16MdVSJ9MDNI_EQ=Dm_D5Hg;r76hy5;drj;%-UiBHU9 zk+9b)sVp3;R7lZH43QZ)`r3*mduC(mWgIfHGcg=R)8A9(5)c6z=y{BI0|om)`NgvR zV!U&BQfhGE!gB<-sy(Q-Xxf6DFxL5}d@4ULo*1o*K;U(Gz!@!|U1_-2upL5@RzD zc73rMMWe2&vlWfrfIT8E;G9*;l=XCWHSJt`UOK1a_-7hWvm}CUB>x;Mpe50jbOVZ` zH6P#E(+GDQJQrr91t$KIU*W6}C5i|+ZUISM&N2NW{ksb^6_pqfbElW<;C4Xx(-OVF zZDL*$|E(w6ZdODMiG|nUd+`Rih>fw&J44RkY@Mp8?4hx*dIbd~fRGO@sxW@O1SDPf z1W`7DSN%+u4~SFZ&2JJXTwZB4qV}$~7~vAiF(?}CnUG&j^wB!XF_bT?9o248QK*a; zOVHYKH=r9tVZpOX?csfooQWxDmT*YM)+E=%o8b(=3d~fC5w!`tVc79SP-Lz#^l3XJ zQKN;EF;`8?(=I$028Q;AQNXr3t~U$OAyIm#99>0bRlUb zH)nsJ$b{O)lX0E11UbQM8WxXX2?!(_l-fvlYm=0IML$2DCY*$*=pp5A>gzwkyw&C= zb*>RfON_F@BL!fbo3LjS2Oehd^KgTGmC;<%5A5yKVsPE!z{}2jbQL6sKlx(1Y6(KM z1}Fg=ov&}8RS`sFkr35~%F9^l40#3_C*tC+XN!QdRyPP=fUoql+iPgO?vi|^x_%VX zJK>}W`Zp6#bCVU^e?AR+JCEQQc!XTkY=9+TMcs<#+MI8thmBF&3cHoIAjpt%hIlN+ zW^Yt7WR+s~V9@Q*6pyGlh@avFee_(UnF#=nB{-g&==wq$UgU>$R((nIT#o?AGtxtRMYi|U!B_qh^Qr**U)=c_N zFTn#UYbMcKWqLZG<07d0-U(55GZeT0z!%}plq|W`NOrk<);L6E8*CPce-+)H;cr0z-p!~Seaa>}c7qh!9Waz3WCCkPLi9oXY@Bn# z*A?K_miw(?gQj2H&M1K<)e@_}a^wJ;1yQVy#*KG*^@cwh#R=YMENiidXXNU3n7N_k z+ce$0fB$uD{e_~|*{$J3;C&UCo9PB~FGj%fu_NF%M3xPs69scw>GrL>4XX?v5UcB? z5M!$wx6;U3g)cxVB3`;;A-yUj;3Z>)Su8kdz9!$9(QNb05Dn(f?E3| z(If-1H}UmoLlv!czVb$-6cQ}mNiO*qz`a@!yRIk;IY%sNyAE;;hH~~o$`#{c=d$}WqrgtKBWAX39L*EKceX9 zPE3C-z4r)f)D_leSAda~Lv5aDu4gZ0=LW;gkBi08B~L)kFs-$g4b{d*`sy@i2j4Mv zKfsZ)bJcbJ^PGm^(%WJ_#LRwwU~1-VXL zA#MGuiDFJDR_t2$8G+0|D`2dVe^>sG1hlUZt}$}=+UcW-gSj*YXik}6)G5?IXv93HZ%XpHgT*03x(G(XzMh5?tJdjiwkhSsm`w(vct#|2rE*p6B=$? zhqAFTsfOM2QX&IJcV98&gwMZ<;x_}xV6$Ydi*DtEg%f;4qogmTch`@S^Yi)kl=eX` zq1;|X7GugU@S+pOkU%H!y68l*G4RLWSqMGO$8PAkGU<4jU<<|rFWx=h8In>rhxa!;R&^=SfrqKY5;7^hB@V1oN@ zUa(s>zbpWWcK|LeYN8zN5DZu|-0;@339dpaxvhqG#onQl_fhzka4b7rqQz74^RQEV zzY#MPxq}=hYT|-J_0bovTslL3r>=7u*smB5V3tAVp>56NCt(@o%)-PyC2p^i#oOT# zEy74aU;G#>Y$suWvv@5%GXa$!^>FU7HZfqY(PLRzueT7f@dn@FWsk1;@^k5ly9rWH zs;3*RdG}3imw!JphR>v|NJWuLu3?;tnzLZUXEH%-F6~2Pdc$UGnY9z(GE7B*m;l{8OJ9M0HDHhC!&X>6hb%vtjdAzA?p&0;;e%=)5ay7OFs0#X1vQAnm9p5= zshd9Zop^&%>O^{_rB=zzCZK{2QG3#CdBO#JKUv*98=?j^&Y+`tY3;0j;>&=7Xx)xh zE0@ikfY+2NaEodEn3Jc42EJ}qnbV$*Y+yelYT9k-A-G5Iq>v|6xU(WtN|88#oXMEF z6TcVV8k}GV2u8&cwfc2J;gT08^Q}?E!8F8^^!3K9iMRZHISZ6!8QoVEaRUKsVaF^T zP*;%L!_pMuX)x5GC>TM&JhNkbXJ5=A-+5^`lSb9I^Ur1?BKC4HS9Eg`$0wzgL=ZgQ z+MRGjVt8U(?KP;c8Z_%Gu#ziGJ+$`~$E~qC@piu-12w=+=&Ok}65f(>L4NI!tJZcO zIn$b!x-^sPx>tQ5-q-5PS7&8Lmqc&MmDjjfj8QY6yzk!wiE(GwI858#THn3P* z$H8BU-H>1s5}})K0?KayreB8a(-{kDA2rTp3q;C%wCQKb`T2&*2mAJYUbY>A^lQFT z!XgEbJ9`52#WX5fa6H4Humu%2~C;LL5&)i^-YPlJxti z+&-;0KPMO4&;e*D5vjYhlNg-Ew~ekdI?L5%80AYecyKhmmfCQ#mym*4nE9Bh7*G&R zM9zg8e3fb7u}yQ3t`2^ovgG(0Q~4p17@>G5OY7@r>25w6 z{bI$#XLN-E2XJalAedjT_PFJm_5y93dM{avjBkuTV(qubFKZ25(=IG5YN|#ogdGvY zR{&7QAQX+@5%zaMUciwv9V}j459gy}1dDKptKylp{#&|?_D7wZF1zN%Gz05Rmn?WN z%%#f&7KP=8jF>w4b+f*;8X9`($#%34MyFeFs zPgTw4;H%R-Pd!MxP^lpP`IPRCe1zNf1&v|cr&_yY<=f!r;l32+X-1eZw*>nG%H>%S z>37S9RIWZ>4vwyfu@?_Ut8GI*3W9o@WQh1fkMP|6o)L?n=Wyv&IgBM#kRwTPULo18i(1pe#_R8d{|~hPl}_j4yf;~yk)}K zQ5N&o!6Puj0#{(u2EJ1R2M0TRSsp$B@S;qfWMcF-|tLshG8HvdXCw!GgU!> znJE@{dzRE&AiZZV#dvA*62&xFkzUQ`WCXEe3O zRWttq9YCy?R(1U_G*@Ix#4QO0E1tb#x-{QQ-=Beq@L9q}9dXXsrZbXF|6*IPIG^;C z9Wv_V`5%*(Ha4Q5a}R(I@k@2!W!miJ_Z8 zSOYgz=N@lPl(ziXx&hZPtgNb)@7E7Uvp?&8~LuPBsUPIrkivtQN7}p+b^V(3Rl!gGQL>a|yJp*f-(gOyQ+07rlE(h?_AMC9t7e8HC z?miA;w>UKUGWuPHIOtw4JeE9NES#U3`&h}>g~rUlTzgw3`a$kzYmU9?81PIY+4fhZ zmDdE?u*og-PFT&MN8{OI_QsmIjPc!gh$s%#cg3*a$Y@$%wEf_2R(2qQW3tI2i0V#x zoML-;+ecbJyBE}J`+CnY|CW^BVQK3rcA%(S5+%A@m+%@kkaIC@Zn|#PH6vX&11xMT z$}xiJHqGy;{IWer`l!$YKgMBdl&8ZabJh#!Qxe=J-%CrkuogXdF>@$7U6?n~(90cd zdgAHYJk4AvE}@NS3D>-D9Ho}d@kObjlwF`wYeZR@y9qn>o{!wzgDlPI=VYptv$gn8 zpuBWwR3W)U3YndXOB|mo7B7j$kTsb^*EQw6!6|3%$B*@mUimzFrNrtDl^dYWeDey59HBxfPTtJBufuXrCv$55Lbvk%K>U zE;~bdA}pX;S@kO{EZg{aS#Oh&RHl~p&@;3l800rURJF zdTMY}U$+7oOKdJ^`nraq?Agqc+B{w~?#5&4VeJ;XW;jVeN~Jn|5l47aw`wk-&BhZ`)&5G28JY@n5i&*M*eo@2F1-1%lj#TC zdUS9>t_-)#D4ETn^kIk$S444MU*QFuRCl(S52hhCUbVP0 zBsubXIo5>V(9JX&*#&Sr#n3o)d`v5+p$&8X^Sr{jkJiX|J+Xu53~ z2Bqerf$Ywk`_b;K-fxTz8gp4o?V5aRWo|(Fl?8J+PvjqUz?HIt?|_`}dMd+&?5`$! z1-N^0)8cXRik}!-GrR4DT6{+1sZmbPuQ&ql6-1`EnP4BD74LNK#@re zNc+6|1WcSrYVPTOd_Z-B@4uH4ZyRyttLL}oITWji;-9bGHct%|kb)gKF$!h{@8JRC zs&7N2JOD4Ki4hR}>iba$PxjGMWz+dEi;-0Nmh=ii#^Ck$6K;jo8R`?NB@HEt6>zaL z4AXJN!}z3`$0i`v;+=2p7uLMex9_9}w@SyEFDK;zp7T_0BwQH_Z`GPV*o`-BmJA%@~FibeGz4 zSQ?9U`ZToltzU#l4!d^`UTZakKlc@*S_x{e1aRdOEMLKYk)Neau3l5o@9G z-OgO-4p^W0G^asU<^r7W?Zp<<6kJ1AD{TC5GsXYT6d2)YXZ z7$Lrl5dPjk7J&ecziMdNfdE#FmsJl+vH)-*u>X_}K?some9%)M0P`R8KWivJR6zit ze+8aF0N#HEeL(=Ze}G?(elB(ZQ3L~Cn*U;{R~;8>ocxpuh5$tW6@Z5Vg#Q(&hyE{$@=yThKNP>heW_z!gaVi`Uh0wm z{hT?%0PO!z{0`SQ?0@kw83vI0R|S1IfbU;{S2#fIU%_}dfd5|sQN+Jw{*s4N#LonO z=kYt-wg`aWKTQ43{O60rLiFE?|Ej@iM*`UXVfB~zWsIBi^DP97g4!bixXizXxG!(% zFR%X@ - diff --git a/openpype/hosts/aftereffects/api/extension/jsx/hostscript.jsx b/openpype/hosts/aftereffects/api/extension/jsx/hostscript.jsx index 8f82c9709d..91df433908 100644 --- a/openpype/hosts/aftereffects/api/extension/jsx/hostscript.jsx +++ b/openpype/hosts/aftereffects/api/extension/jsx/hostscript.jsx @@ -417,7 +417,9 @@ function getRenderInfo(){ var file_url = item.file.toString(); return JSON.stringify({ - "file_name": file_url + "file_name": file_url, + "width": render_item.comp.width, + "height": render_item.comp.height }) } diff --git a/openpype/hosts/aftereffects/api/ws_stub.py b/openpype/hosts/aftereffects/api/ws_stub.py index 9a6462fcd4..8719a8f46e 100644 --- a/openpype/hosts/aftereffects/api/ws_stub.py +++ b/openpype/hosts/aftereffects/api/ws_stub.py @@ -29,6 +29,8 @@ class AEItem(object): frameRate = attr.ib(default=None) file_name = attr.ib(default=None) instance_id = attr.ib(default=None) # New Publisher + width = attr.ib(default=None) + height = attr.ib(default=None) class AfterEffectsServerStub(): @@ -609,7 +611,9 @@ class AfterEffectsServerStub(): d.get('workAreaDuration'), d.get('frameRate'), d.get('file_name'), - d.get("instance_id")) + d.get("instance_id"), + d.get("width"), + d.get("height")) ret.append(item) return ret From 1adec078d88952bf0dcbb6705e904c5dbab09182 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Apr 2022 15:47:06 +0200 Subject: [PATCH 195/244] OP-3021 - refactored base class into abstract class Moved to api.plugin to make it clearer. --- openpype/hosts/nuke/api/plugin.py | 138 ++++++++++++++++++ .../plugins/create/create_write_prerender.py | 4 +- .../plugins/create/create_write_render.py | 103 +------------ .../nuke/plugins/create/create_write_still.py | 4 +- 4 files changed, 143 insertions(+), 106 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 3ac750a48f..eaf0ab6911 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -1,6 +1,8 @@ import os import random import string +from collections import OrderedDict +from abc import abstractmethod import nuke @@ -594,3 +596,139 @@ class ExporterReviewMov(ExporterReview): nuke.scriptSave() return self.data + + +class AbstractWriteRender(OpenPypeCreator): + """Abstract creator to gather similar implementation for Write creators""" + name = "" + label = "" + hosts = ["nuke"] + n_class = "Write" + family = "render" + icon = "sign-out" + defaults = ["Main", "Mask"] + + def __init__(self, *args, **kwargs): + super(AbstractWriteRender, self).__init__(*args, **kwargs) + + data = OrderedDict() + + data["family"] = self.family + data["families"] = self.n_class + + for k, v in self.data.items(): + if k not in data.keys(): + data.update({k: v}) + + self.data = data + self.nodes = nuke.selectedNodes() + self.log.debug("_ self.data: '{}'".format(self.data)) + + def process(self): + + inputs = [] + outputs = [] + instance = nuke.toNode(self.data["subset"]) + selected_node = None + + # use selection + if (self.options or {}).get("useSelection"): + nodes = self.nodes + + if not (len(nodes) < 2): + msg = ("Select only one node. " + "The node you want to connect to, " + "or tick off `Use selection`") + self.log.error(msg) + nuke.message(msg) + return + + if len(nodes) == 0: + msg = ( + "No nodes selected. Please select a single node to connect" + " to or tick off `Use selection`" + ) + self.log.error(msg) + nuke.message(msg) + return + + selected_node = nodes[0] + inputs = [selected_node] + outputs = selected_node.dependent() + + if instance: + if (instance.name() in selected_node.name()): + selected_node = instance.dependencies()[0] + + # if node already exist + if instance: + # collect input / outputs + inputs = instance.dependencies() + outputs = instance.dependent() + selected_node = inputs[0] + # remove old one + nuke.delete(instance) + + # recreate new + write_data = { + "nodeclass": self.n_class, + "families": [self.family], + "avalon": self.data + } + + # add creator data + creator_data = {"creator": self.__class__.__name__} + self.data.update(creator_data) + write_data.update(creator_data) + + if self.presets.get('fpath_template'): + self.log.info("Adding template path from preset") + write_data.update( + {"fpath_template": self.presets["fpath_template"]} + ) + else: + self.log.info("Adding template path from plugin") + write_data.update({ + "fpath_template": + ("{work}/" + self.family + "s/nuke/{subset}" + "/{subset}.{frame}.{ext}")}) + + write_node = self._create_write_node(selected_node, + inputs, outputs, + write_data) + + # relinking to collected connections + for i, input in enumerate(inputs): + write_node.setInput(i, input) + + write_node.autoplace() + + for output in outputs: + output.setInput(0, write_node) + + write_node = self._modify_write_node(write_node) + + return write_node + + @abstractmethod + def _create_write_node(self, selected_node, inputs, outputs, write_data): + """Family dependent implementation of Write node creation + + Args: + selected_node (nuke.Node) + inputs (list of nuke.Node) - input dependencies (what is connected) + outputs (list of nuke.Node) - output dependencies + write_data (dict) - values used to fill Knobs + Returns: + node (nuke.Node): group node with data as Knobs + """ + pass + + @abstractmethod + def _modify_write_node(self, write_node): + """Family dependent modification of created 'write_node' + + Returns: + node (nuke.Node): group node with data as Knobs + """ + pass diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index e9309d8170..7297f74c13 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -1,10 +1,10 @@ import nuke +from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import create_write_node -from openpype.hosts.nuke.plugins.create import create_write_render -class CreateWritePrerender(create_write_render.CreateWriteRender): +class CreateWritePrerender(plugin.AbstractWriteRender): # change this to template preset name = "WritePrerender" label = "Create Write Prerender" diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 79766929ac..18a101546f 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -1,12 +1,10 @@ -from collections import OrderedDict - import nuke from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import create_write_node -class CreateWriteRender(plugin.OpenPypeCreator): +class CreateWriteRender(plugin.AbstractWriteRender): # change this to template preset name = "WriteRender" label = "Create Write Render" @@ -19,105 +17,6 @@ class CreateWriteRender(plugin.OpenPypeCreator): def __init__(self, *args, **kwargs): super(CreateWriteRender, self).__init__(*args, **kwargs) - data = OrderedDict() - - data["family"] = self.family - data["families"] = self.n_class - - for k, v in self.data.items(): - if k not in data.keys(): - data.update({k: v}) - - self.data = data - self.nodes = nuke.selectedNodes() - self.log.debug("_ self.data: '{}'".format(self.data)) - - def process(self): - - inputs = [] - outputs = [] - instance = nuke.toNode(self.data["subset"]) - selected_node = None - - # use selection - if (self.options or {}).get("useSelection"): - nodes = self.nodes - - if not (len(nodes) < 2): - msg = ("Select only one node. " - "The node you want to connect to, " - "or tick off `Use selection`") - self.log.error(msg) - nuke.message(msg) - return - - if len(nodes) == 0: - msg = ( - "No nodes selected. Please select a single node to connect" - " to or tick off `Use selection`" - ) - self.log.error(msg) - nuke.message(msg) - return - - selected_node = nodes[0] - inputs = [selected_node] - outputs = selected_node.dependent() - - if instance: - if (instance.name() in selected_node.name()): - selected_node = instance.dependencies()[0] - - # if node already exist - if instance: - # collect input / outputs - inputs = instance.dependencies() - outputs = instance.dependent() - selected_node = inputs[0] - # remove old one - nuke.delete(instance) - - # recreate new - write_data = { - "nodeclass": self.n_class, - "families": [self.family], - "avalon": self.data - } - - # add creator data - creator_data = {"creator": self.__class__.__name__} - self.data.update(creator_data) - write_data.update(creator_data) - - if self.presets.get('fpath_template'): - self.log.info("Adding template path from preset") - write_data.update( - {"fpath_template": self.presets["fpath_template"]} - ) - else: - self.log.info("Adding template path from plugin") - write_data.update({ - "fpath_template": - ("{work}/" + self.family + "s/nuke/{subset}" - "/{subset}.{frame}.{ext}")}) - - write_node = self._create_write_node(selected_node, - inputs, outputs, - write_data) - - # relinking to collected connections - for i, input in enumerate(inputs): - write_node.setInput(i, input) - - write_node.autoplace() - - for output in outputs: - output.setInput(0, write_node) - - write_node = self._modify_write_node(write_node) - - return write_node - def _create_write_node(self, selected_node, inputs, outputs, write_data): # add reformat node to cut off all outside of format bounding box # get width and height diff --git a/openpype/hosts/nuke/plugins/create/create_write_still.py b/openpype/hosts/nuke/plugins/create/create_write_still.py index 3361bc2602..d22b5eab3f 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_still.py +++ b/openpype/hosts/nuke/plugins/create/create_write_still.py @@ -1,10 +1,10 @@ import nuke +from openpype.hosts.nuke.api import plugin from openpype.hosts.nuke.api.lib import create_write_node -from openpype.hosts.nuke.plugins.create import create_write_render -class CreateWriteStill(create_write_render.CreateWriteRender): +class CreateWriteStill(plugin.AbstractWriteRender): # change this to template preset name = "WriteStillFrame" label = "Create Write Still Image" From 37e43de5dee0418dc95d5135aeb81d6567fcbd14 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 16:24:35 +0200 Subject: [PATCH 196/244] updated create folders action to use task as dictionary --- .../action_create_folders.py | 180 +++++++++++------- 1 file changed, 116 insertions(+), 64 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_folders.py b/openpype/modules/ftrack/event_handlers_user/action_create_folders.py index 0ed12bd03e..8104818195 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_folders.py +++ b/openpype/modules/ftrack/event_handlers_user/action_create_folders.py @@ -1,6 +1,8 @@ import os -from openpype_modules.ftrack.lib import BaseAction, statics_icon +import collections +import copy from openpype.api import Anatomy +from openpype_modules.ftrack.lib import BaseAction, statics_icon class CreateFolders(BaseAction): @@ -86,10 +88,19 @@ class CreateFolders(BaseAction): 'message': 'Nothing was created' } - all_entities = [] - all_entities.append(entity) - if with_childrens: - all_entities = self.get_notask_children(entity) + task_entities = [] + other_entities = [] + self.get_all_entities( + session, entities, task_entities, other_entities + ) + hierarchy = self.get_entities_hierarchy( + session, task_entities, other_entities + ) + task_types = session.query("select id, name from Type").all() + task_type_names_by_id = { + task_type["id"]: task_type["name"] + for task_type in task_types + } anatomy = Anatomy(project_name) @@ -97,77 +108,67 @@ class CreateFolders(BaseAction): work_template = anatomy.templates for key in work_keys: work_template = work_template[key] - work_has_apps = "{app" in work_template publish_keys = ["publish", "folder"] publish_template = anatomy.templates for key in publish_keys: publish_template = publish_template[key] - publish_has_apps = "{app" in publish_template + + project_data = { + "project": { + "name": project_name, + "code": project_code + } + } collected_paths = [] - for entity in all_entities: - if entity.entity_type.lower() == "project": - continue - ent_data = { - "project": { - "name": project_name, - "code": project_code - } - } + for item in hierarchy: + parent_entity, task_entities = item - ent_data["asset"] = entity["name"] + parent_data = copy.deepcopy(project_data) - parents = entity["link"][1:-1] + parents = parent_entity["link"][1:-1] hierarchy_names = [p["name"] for p in parents] - hierarchy = "" + hierarchy = "/".join(hierarchy_names) + if hierarchy_names: - hierarchy = os.path.sep.join(hierarchy_names) - ent_data["hierarchy"] = hierarchy + parent_name = hierarchy_names[-1] + else: + parent_name = project_name - tasks_created = False - for child in entity["children"]: - if child["object_type"]["name"].lower() != "task": - continue - tasks_created = True - task_data = ent_data.copy() - task_data["task"] = child["name"] + parent_data.update({ + "asset": parent_entity["name"], + "hierarchy": hierarchy, + "parent": parent_name + }) - apps = [] - - # Template wok - if work_has_apps: - app_data = task_data.copy() - for app in apps: - app_data["app"] = app - collected_paths.append(self.compute_template( - anatomy, app_data, work_keys - )) - else: - collected_paths.append(self.compute_template( - anatomy, task_data, work_keys - )) - - # Template publish - if publish_has_apps: - app_data = task_data.copy() - for app in apps: - app_data["app"] = app - collected_paths.append(self.compute_template( - anatomy, app_data, publish_keys - )) - else: - collected_paths.append(self.compute_template( - anatomy, task_data, publish_keys - )) - - if not tasks_created: + if not task_entities: # create path for entity collected_paths.append(self.compute_template( - anatomy, ent_data, work_keys + anatomy, parent_data, work_keys )) collected_paths.append(self.compute_template( - anatomy, ent_data, publish_keys + anatomy, parent_data, publish_keys + )) + continue + + for task_entity in task_entities: + task_type_id = task_entity["type_id"] + task_type_name = task_type_names_by_id[task_type_id] + task_data = copy.deepcopy(parent_data) + task_data["task"] = { + "name": task_entity["name"], + "type": task_type_name + } + + # Template wok + collected_paths.append(self.compute_template( + anatomy, task_data, work_keys + )) + + # Template publish + collected_paths.append(self.compute_template( + anatomy, task_data, publish_keys )) if len(collected_paths) == 0: @@ -188,14 +189,65 @@ class CreateFolders(BaseAction): "message": "Successfully created project folders." } - def get_notask_children(self, entity): + def get_all_entities( + self, session, entities, task_entities, other_entities + ): + if not entities: + return + + no_task_entities = [] + for entity in entities: + if entity.entity_type.lower() == "task": + task_entities.append(entity) + else: + no_task_entities.append(entity) + + if not no_task_entities: + return task_entities + + other_entities.extend(no_task_entities) + + no_task_entity_ids = [entity["id"] for entity in no_task_entities] + next_entities = session.query(( + "select id, object_type_id, parent_id" + " from TypedContext where parent_id in ({})" + ).format(self.join_query_keys(no_task_entity_ids))).all() + + self.get_all_entities( + session, next_entities, task_entities, other_entities + ) + + def get_entities_hierarchy(self, session, task_entities, other_entities): + task_entity_ids = [entity["id"] for entity in task_entities] + full_task_entities = session.query(( + "select id, name, type_id, parent_id" + " from TypedContext where id in ({})" + ).format(self.join_query_keys(task_entity_ids))) + task_entities_by_parent_id = collections.defaultdict(list) + for entity in full_task_entities: + parent_id = entity["parent_id"] + task_entities_by_parent_id[parent_id].append(entity) + output = [] - if entity.entity_type.lower() == "task": + if not task_entities_by_parent_id: return output - output.append(entity) - for child in entity["children"]: - output.extend(self.get_notask_children(child)) + other_ids = set() + for entity in other_entities: + other_ids.add(entity["id"]) + other_ids |= set(task_entities_by_parent_id.keys()) + + parent_entities = session.query(( + "select id, name from TypedContext where id in ({})" + ).format(self.join_query_keys(other_ids))).all() + + for parent_entity in parent_entities: + parent_id = parent_entity["id"] + output.append(( + parent_entity, + task_entities_by_parent_id[parent_id] + )) + return output def compute_template(self, anatomy, data, anatomy_keys): From 730f79a316a41d9b8c0e4b70fc3934c26a3790fa Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Apr 2022 16:32:48 +0200 Subject: [PATCH 197/244] OP-2765 - render creator can change context --- openpype/hosts/aftereffects/plugins/create/create_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index c43ada84b5..8bddbb2e99 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -16,7 +16,7 @@ class RenderCreator(Creator): family = "render" description = "Render creator" - create_allow_context_change = False + create_allow_context_change = True def get_icon(self): return resources.get_openpype_splash_filepath() From f20551854f639c758cdce1f187ca3b88c70d4f11 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 22 Apr 2022 16:56:57 +0200 Subject: [PATCH 198/244] change default app name hack --- .../plugins/control_job/perjob/m50__openpype_publish_render.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py b/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py index 82a79daf3b..7f5b514253 100644 --- a/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py +++ b/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py @@ -119,7 +119,7 @@ class OpenPypeContextSelector: # app names and versions, but since app_name is not used # currently down the line (but it is required by OP publish command # right now). - self.context["app_name"] = "maya/2020" + self.context["app_name"] = "celaction/local" return True @staticmethod From 6968d2fdfdd293dd757dc0cddaed7aa4ec2bd37d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 17:04:20 +0200 Subject: [PATCH 199/244] allow multiselection --- .../action_create_folders.py | 106 ++++++++++-------- 1 file changed, 59 insertions(+), 47 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_folders.py b/openpype/modules/ftrack/event_handlers_user/action_create_folders.py index 8104818195..81f38e0c39 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_folders.py +++ b/openpype/modules/ftrack/event_handlers_user/action_create_folders.py @@ -11,55 +11,59 @@ class CreateFolders(BaseAction): icon = statics_icon("ftrack", "action_icons", "CreateFolders.svg") def discover(self, session, entities, event): - if len(entities) != 1: - return False - - not_allowed = ["assetversion", "project"] - if entities[0].entity_type.lower() in not_allowed: - return False - - return True + for entity_item in event["data"]["selection"]: + if entity_item.get("entityType").lower() in ("task", "show"): + return True + return False def interface(self, session, entities, event): if event["data"].get("values", {}): return - entity = entities[0] - without_interface = True - for child in entity["children"]: - if child["object_type"]["name"].lower() != "task": - without_interface = False + + with_interface = False + for entity in entities: + if entity.entity_type.lower() != "task": + with_interface = True break - self.without_interface = without_interface - if without_interface: + + if "values" not in event["data"]: + event["data"]["values"] = {} + + event["data"]["values"]["with_interface"] = with_interface + if not with_interface: return + title = "Create folders" entity_name = entity["name"] msg = ( "

    Do you want create folders also" - " for all children of \"{}\"?

    " + " for all children of your selection?" ) if entity.entity_type.lower() == "project": entity_name = entity["full_name"] msg = msg.replace(" also", "") msg += "

    (Project root won't be created if not checked)

    " - items = [] - item_msg = { - "type": "label", - "value": msg.format(entity_name) - } - item_label = { - "type": "label", - "value": "With all chilren entities" - } - item = { - "name": "children_included", - "type": "boolean", - "value": False - } - items.append(item_msg) - items.append(item_label) - items.append(item) + items = [ + { + "type": "label", + "value": msg.format(entity_name) + }, + { + "type": "label", + "value": "With all chilren entities" + }, + { + "name": "children_included", + "type": "boolean", + "value": False + }, + { + "type": "hidden", + "name": "with_interface", + "value": with_interface + } + ] return { "items": items, @@ -68,26 +72,34 @@ class CreateFolders(BaseAction): def launch(self, session, entities, event): '''Callback method for custom action.''' + + if "values" not in event["data"]: + return + + with_interface = event["data"]["values"]["with_interface"] with_childrens = True - if self.without_interface is False: - if "values" not in event["data"]: - return + if with_interface: with_childrens = event["data"]["values"]["children_included"] - entity = entities[0] - if entity.entity_type.lower() == "project": - proj = entity - else: - proj = entity["project"] - project_name = proj["full_name"] - project_code = proj["name"] + filtered_entities = [] + for entity in entities: + low_context_type = entity["context_type"].lower() + if low_context_type in ("task", "show"): + if not with_childrens and low_context_type == "show": + continue + filtered_entities.append(entity) - if entity.entity_type.lower() == 'project' and with_childrens is False: + if not filtered_entities: return { - 'success': True, - 'message': 'Nothing was created' + "success": True, + "message": 'Nothing was created' } + project_entity = self.get_project_from_entity(filtered_entities[0]) + + project_name = project_entity["full_name"] + project_code = project_entity["name"] + task_entities = [] other_entities = [] self.get_all_entities( @@ -209,7 +221,7 @@ class CreateFolders(BaseAction): no_task_entity_ids = [entity["id"] for entity in no_task_entities] next_entities = session.query(( - "select id, object_type_id, parent_id" + "select id, parent_id" " from TypedContext where parent_id in ({})" ).format(self.join_query_keys(no_task_entity_ids))).all() From 015d0b3e15d4144f79414c77631db85d271ed0d0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 17:06:38 +0200 Subject: [PATCH 200/244] removed unused imports --- .../event_handlers_user/action_create_project_structure.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py b/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py index 94f359c317..ebea8872f9 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py +++ b/openpype/modules/ftrack/event_handlers_user/action_create_project_structure.py @@ -1,6 +1,4 @@ -import os import re -import json from openpype_modules.ftrack.lib import BaseAction, statics_icon from openpype.api import get_project_basic_paths, create_project_folders From d4bc73cad4e9428e8e817d2f59bffe620316caeb Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:10:51 +0200 Subject: [PATCH 201/244] Add @mkolar as a contributor --- .all-contributorsrc | 26 ++++++++++++++++++++++++++ README.md | 23 +++++++++++++++++++++++ 2 files changed, 49 insertions(+) create mode 100644 .all-contributorsrc diff --git a/.all-contributorsrc b/.all-contributorsrc new file mode 100644 index 0000000000..c5e3d06746 --- /dev/null +++ b/.all-contributorsrc @@ -0,0 +1,26 @@ +{ + "projectName": "OpenPype", + "projectOwner": "pypeclub", + "repoType": "github", + "repoHost": "https://github.com", + "files": [ + "README.md" + ], + "imageSize": 80, + "commit": true, + "commitConvention": "none", + "contributors": [ + { + "login": "mkolar", + "name": "Milan Kolar", + "avatar_url": "https://avatars.githubusercontent.com/u/3333008?v=4", + "profile": "http://pype.club/", + "contributions": [ + "code", + "doc", + "infra" + ] + } + ], + "contributorsPerLine": 7 +} diff --git a/README.md b/README.md index 0e450fc48d..5f3b98a339 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,7 @@ + +[![All Contributors](https://img.shields.io/badge/all_contributors-1-orange.svg?style=flat-square)](#contributors-) + OpenPype ==== @@ -283,3 +286,23 @@ Running tests To run tests, execute `.\tools\run_tests(.ps1|.sh)`. **Note that it needs existing virtual environment.** + +## Contributors ✨ + +Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)): + + + + + + + + +

    Milan Kolar

    πŸ’» πŸ“– πŸš‡
    + + + + + + +This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome! \ No newline at end of file From 832c2ab7eb1b9ebd8bff392943fdad080229dded Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:11:05 +0200 Subject: [PATCH 202/244] Add @antirotor as a contributor --- .all-contributorsrc | 11 +++++++++++ README.md | 3 ++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index c5e3d06746..1ca62667ee 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -20,6 +20,17 @@ "doc", "infra" ] + }, + { + "login": "antirotor", + "name": "OndΕ™ej Samohel", + "avatar_url": "https://avatars.githubusercontent.com/u/33513211?v=4", + "profile": "https://github.com/antirotor", + "contributions": [ + "code", + "doc", + "infra" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 5f3b98a339..4ff733eea5 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-1-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-2-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -297,6 +297,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d +

    Milan Kolar

    πŸ’» πŸ“– πŸš‡

    OndΕ™ej Samohel

    πŸ’» πŸ“– πŸš‡
    From 6e1cc50c0dbf10235df2af3b9a3c1b5bd5e86628 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:11:14 +0200 Subject: [PATCH 203/244] Add @iLLiCiTiT as a contributor --- .all-contributorsrc | 11 +++++++++++ README.md | 3 ++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 1ca62667ee..15c43a502e 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -31,6 +31,17 @@ "doc", "infra" ] + }, + { + "login": "iLLiCiTiT", + "name": "Jakub Trllo", + "avatar_url": "https://avatars.githubusercontent.com/u/43494761?v=4", + "profile": "https://github.com/iLLiCiTiT", + "contributions": [ + "code", + "doc", + "infra" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 4ff733eea5..01ca3f26c7 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-2-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-3-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -298,6 +298,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Milan Kolar

    πŸ’» πŸ“– πŸš‡
    OndΕ™ej Samohel

    πŸ’» πŸ“– πŸš‡ +
    Jakub Trllo

    πŸ’» πŸ“– πŸš‡ From 912af06a94d741bb2c7b6c7eb99f4e67bd057411 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:12:47 +0200 Subject: [PATCH 204/244] Update @mkolar as a contributor --- .all-contributorsrc | 9 ++++++++- README.md | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 15c43a502e..1a2287b39d 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -18,7 +18,14 @@ "contributions": [ "code", "doc", - "infra" + "infra", + "business", + "content", + "fundingFinding", + "ideas", + "maintenance", + "projectManagement", + "review" ] }, { diff --git a/README.md b/README.md index 01ca3f26c7..157cba8391 100644 --- a/README.md +++ b/README.md @@ -296,7 +296,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d - + From 2bf0a76898c494ed4c527172a865a31038fcd200 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:15:27 +0200 Subject: [PATCH 205/244] Add @jakubjezek001 as a contributor --- .all-contributorsrc | 23 +++++++++++++++++++---- README.md | 9 +++++---- 2 files changed, 24 insertions(+), 8 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 1a2287b39d..bbc613aa1a 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -22,10 +22,10 @@ "business", "content", "fundingFinding", - "ideas", "maintenance", "projectManagement", - "review" + "review", + "mentoring" ] }, { @@ -36,7 +36,11 @@ "contributions": [ "code", "doc", - "infra" + "infra", + "content", + "review", + "maintenance", + "mentoring" ] }, { @@ -47,7 +51,18 @@ "contributions": [ "code", "doc", - "infra" + "infra", + "review", + "maintenance" + ] + }, + { + "login": "jakubjezek001", + "name": "Jakub JeΕΎek", + "avatar_url": "https://avatars.githubusercontent.com/u/40640033?v=4", + "profile": "https://www.linkedin.com/in/jakubjezek79", + "contributions": [ + "code" ] } ], diff --git a/README.md b/README.md index 157cba8391..b313b85da5 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-3-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-4-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -296,9 +296,10 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d

    Milan Kolar

    πŸ’» πŸ“– πŸš‡

    Milan Kolar

    πŸ’» πŸ“– πŸš‡ πŸ’Ό πŸ–‹ πŸ” πŸ€” 🚧 πŸ“† πŸ‘€

    OndΕ™ej Samohel

    πŸ’» πŸ“– πŸš‡

    Jakub Trllo

    πŸ’» πŸ“– πŸš‡
    - - - + + + +

    Milan Kolar

    πŸ’» πŸ“– πŸš‡ πŸ’Ό πŸ–‹ πŸ” πŸ€” 🚧 πŸ“† πŸ‘€

    OndΕ™ej Samohel

    πŸ’» πŸ“– πŸš‡

    Jakub Trllo

    πŸ’» πŸ“– πŸš‡

    Milan Kolar

    πŸ’» πŸ“– πŸš‡ πŸ’Ό πŸ–‹ πŸ” 🚧 πŸ“† πŸ‘€ πŸ§‘β€πŸ«

    OndΕ™ej Samohel

    πŸ’» πŸ“– πŸš‡ πŸ–‹ πŸ‘€ 🚧 πŸ§‘β€πŸ«

    Jakub Trllo

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧

    Jakub JeΕΎek

    πŸ’»
    From 2709e0c86db952936e30ae097c10f35ad42d051f Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:16:39 +0200 Subject: [PATCH 206/244] Add @kalisp as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index bbc613aa1a..a52e278a1d 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -64,6 +64,15 @@ "contributions": [ "code" ] + }, + { + "login": "kalisp", + "name": "Petr Kalis", + "avatar_url": "https://avatars.githubusercontent.com/u/4457962?v=4", + "profile": "https://github.com/kalisp", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index b313b85da5..cc60f9a194 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-4-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-5-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -300,6 +300,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    OndΕ™ej Samohel

    πŸ’» πŸ“– πŸš‡ πŸ–‹ πŸ‘€ 🚧 πŸ§‘β€πŸ«
    Jakub Trllo

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧
    Jakub JeΕΎek

    πŸ’» +
    Petr Kalis

    πŸ’» From 8be18076f611d79a0761d3a2aa57621dc8c9f358 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:16:49 +0200 Subject: [PATCH 207/244] Add @aardschok as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index a52e278a1d..422a3112f8 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -73,6 +73,15 @@ "contributions": [ "code" ] + }, + { + "login": "aardschok", + "name": "Wijnand Koreman", + "avatar_url": "https://avatars.githubusercontent.com/u/26920875?v=4", + "profile": "https://github.com/aardschok", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index cc60f9a194..2272882092 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-5-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-6-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -301,6 +301,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Jakub Trllo

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧
    Jakub JeΕΎek

    πŸ’»
    Petr Kalis

    πŸ’» +
    Wijnand Koreman

    πŸ’» From 258af755f4872a0a26c4f991a0e799897c7552f2 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:16:57 +0200 Subject: [PATCH 208/244] Add @BigRoy as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 422a3112f8..ed0a4d729c 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -82,6 +82,15 @@ "contributions": [ "code" ] + }, + { + "login": "BigRoy", + "name": "Roy Nieterau", + "avatar_url": "https://avatars.githubusercontent.com/u/2439881?v=4", + "profile": "http://www.colorbleed.nl/", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 2272882092..df652f97e7 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-6-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-7-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -302,6 +302,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Jakub JeΕΎek

    πŸ’»
    Petr Kalis

    πŸ’»
    Wijnand Koreman

    πŸ’» +
    Roy Nieterau

    πŸ’» From b8f8f3ca1946893193c1dfafe570d1dbb3521c67 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:17:35 +0200 Subject: [PATCH 209/244] Add @tokejepsen as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 5 ++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index ed0a4d729c..793b03cc78 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -91,6 +91,15 @@ "contributions": [ "code" ] + }, + { + "login": "tokejepsen", + "name": "Toke Jepsen", + "avatar_url": "https://avatars.githubusercontent.com/u/1860085?v=4", + "profile": "https://github.com/tokejepsen", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index df652f97e7..d86ec93ae8 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-7-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-8-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -304,6 +304,9 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Wijnand Koreman

    πŸ’»
    Roy Nieterau

    πŸ’» + +
    Toke Jepsen

    πŸ’» + From 8433d789f1cdb7130278df5c4fefe9bc188bdc69 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:17:46 +0200 Subject: [PATCH 210/244] Add @simonebarbieri as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 793b03cc78..1e800ad238 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -100,6 +100,15 @@ "contributions": [ "code" ] + }, + { + "login": "simonebarbieri", + "name": "Simone Barbieri", + "avatar_url": "https://avatars.githubusercontent.com/u/1087869?v=4", + "profile": "https://barbierisimone.com/", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index d86ec93ae8..b7bc0651c5 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-8-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-9-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -306,6 +306,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Toke Jepsen

    πŸ’» +
    Simone Barbieri

    πŸ’» From 37e29dd7ae7ed16b3eeabf16a214812a82e7a9ce Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:17:52 +0200 Subject: [PATCH 211/244] Add @karimmozilla as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 1e800ad238..6b7d654848 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -109,6 +109,15 @@ "contributions": [ "code" ] + }, + { + "login": "karimmozilla", + "name": "karimmozilla", + "avatar_url": "https://avatars.githubusercontent.com/u/82811760?v=4", + "profile": "http://karimmozilla.xyz/", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index b7bc0651c5..8c41ce69a9 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-9-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-10-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -307,6 +307,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Toke Jepsen

    πŸ’»
    Simone Barbieri

    πŸ’» +
    karimmozilla

    πŸ’» From f70e67feb6200d5f56359410794cf3c3e7c1d122 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:17:59 +0200 Subject: [PATCH 212/244] Add @zhoub as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 6b7d654848..cbcafe14d2 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -118,6 +118,15 @@ "contributions": [ "code" ] + }, + { + "login": "zhoub", + "name": "Bo Zhou", + "avatar_url": "https://avatars.githubusercontent.com/u/1798206?v=4", + "profile": "http://jedimaster.cnblogs.com/", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 8c41ce69a9..ad6dfb52fb 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-10-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-11-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -308,6 +308,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Toke Jepsen

    πŸ’»
    Simone Barbieri

    πŸ’»
    karimmozilla

    πŸ’» +
    Bo Zhou

    πŸ’» From 35ae3e08d07dd274de30833405ac0bacbd973923 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:24:22 +0200 Subject: [PATCH 213/244] Add @ClementHector as a contributor --- .all-contributorsrc | 43 ++++++++++++++++++++++++++++++++----------- README.md | 9 +++++---- 2 files changed, 37 insertions(+), 15 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index cbcafe14d2..4a21a7d3fb 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -28,6 +28,22 @@ "mentoring" ] }, + { + "login": "jakubjezek001", + "name": "Jakub JeΕΎek", + "avatar_url": "https://avatars.githubusercontent.com/u/40640033?v=4", + "profile": "https://www.linkedin.com/in/jakubjezek79", + "contributions": [ + "code", + "doc", + "infra", + "content", + "review", + "maintenance", + "mentoring", + "projectManagement" + ] + }, { "login": "antirotor", "name": "OndΕ™ej Samohel", @@ -40,7 +56,8 @@ "content", "review", "maintenance", - "mentoring" + "mentoring", + "projectManagement" ] }, { @@ -56,22 +73,17 @@ "maintenance" ] }, - { - "login": "jakubjezek001", - "name": "Jakub JeΕΎek", - "avatar_url": "https://avatars.githubusercontent.com/u/40640033?v=4", - "profile": "https://www.linkedin.com/in/jakubjezek79", - "contributions": [ - "code" - ] - }, { "login": "kalisp", "name": "Petr Kalis", "avatar_url": "https://avatars.githubusercontent.com/u/4457962?v=4", "profile": "https://github.com/kalisp", "contributions": [ - "code" + "code", + "doc", + "infra", + "review", + "maintenance" ] }, { @@ -127,6 +139,15 @@ "contributions": [ "code" ] + }, + { + "login": "ClementHector", + "name": "ClΓ©ment Hector", + "avatar_url": "https://avatars.githubusercontent.com/u/7068597?v=4", + "profile": "https://www.linkedin.com/in/clementhector/", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index ad6dfb52fb..83e117ea40 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-11-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-12-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -297,10 +297,10 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d - + + - - + @@ -309,6 +309,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d +

    Milan Kolar

    πŸ’» πŸ“– πŸš‡ πŸ’Ό πŸ–‹ πŸ” 🚧 πŸ“† πŸ‘€ πŸ§‘β€πŸ«

    OndΕ™ej Samohel

    πŸ’» πŸ“– πŸš‡ πŸ–‹ πŸ‘€ 🚧 πŸ§‘β€πŸ«

    Jakub JeΕΎek

    πŸ’» πŸ“– πŸš‡ πŸ–‹ πŸ‘€ 🚧 πŸ§‘β€πŸ« πŸ“†

    OndΕ™ej Samohel

    πŸ’» πŸ“– πŸš‡ πŸ–‹ πŸ‘€ 🚧 πŸ§‘β€πŸ« πŸ“†

    Jakub Trllo

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧

    Jakub JeΕΎek

    πŸ’»

    Petr Kalis

    πŸ’»

    Petr Kalis

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧

    Wijnand Koreman

    πŸ’»

    Roy Nieterau

    πŸ’»

    Simone Barbieri

    πŸ’»

    karimmozilla

    πŸ’»

    Bo Zhou

    πŸ’»

    ClΓ©ment Hector

    πŸ’»
    From c60dd2e75963f0aa185d3c2f8b0297f6341ecc2f Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:24:38 +0200 Subject: [PATCH 214/244] Add @ClementHector as a contributor --- .all-contributorsrc | 30 ++++++++++++++++++++++-------- README.md | 16 ++++++++-------- 2 files changed, 30 insertions(+), 16 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 4a21a7d3fb..47d38fa911 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -25,7 +25,8 @@ "maintenance", "projectManagement", "review", - "mentoring" + "mentoring", + "question" ] }, { @@ -41,7 +42,8 @@ "review", "maintenance", "mentoring", - "projectManagement" + "projectManagement", + "question" ] }, { @@ -57,7 +59,8 @@ "review", "maintenance", "mentoring", - "projectManagement" + "projectManagement", + "question" ] }, { @@ -70,7 +73,8 @@ "doc", "infra", "review", - "maintenance" + "maintenance", + "question" ] }, { @@ -83,7 +87,8 @@ "doc", "infra", "review", - "maintenance" + "maintenance", + "question" ] }, { @@ -101,7 +106,11 @@ "avatar_url": "https://avatars.githubusercontent.com/u/2439881?v=4", "profile": "http://www.colorbleed.nl/", "contributions": [ - "code" + "code", + "doc", + "review", + "mentoring", + "question" ] }, { @@ -110,7 +119,11 @@ "avatar_url": "https://avatars.githubusercontent.com/u/1860085?v=4", "profile": "https://github.com/tokejepsen", "contributions": [ - "code" + "code", + "doc", + "review", + "mentoring", + "question" ] }, { @@ -119,7 +132,8 @@ "avatar_url": "https://avatars.githubusercontent.com/u/1087869?v=4", "profile": "https://barbierisimone.com/", "contributions": [ - "code" + "code", + "doc" ] }, { diff --git a/README.md b/README.md index 83e117ea40..06659f26d3 100644 --- a/README.md +++ b/README.md @@ -296,17 +296,17 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d - - - - - + + + + + - + - - + + From 60d8dafa30f111bfc5d6f964c3ae90243d8266c9 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:25:20 +0200 Subject: [PATCH 215/244] Add @davidlatwe as a contributor --- .all-contributorsrc | 13 ++++++++++++- README.md | 5 +++-- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 47d38fa911..ac51161163 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -160,7 +160,18 @@ "avatar_url": "https://avatars.githubusercontent.com/u/7068597?v=4", "profile": "https://www.linkedin.com/in/clementhector/", "contributions": [ - "code" + "code", + "review" + ] + }, + { + "login": "davidlatwe", + "name": "David Lai", + "avatar_url": "https://avatars.githubusercontent.com/u/3357009?v=4", + "profile": "https://twitter.com/davidlatwe", + "contributions": [ + "code", + "review" ] } ], diff --git a/README.md b/README.md index 06659f26d3..7621bd5798 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-12-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-13-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -309,7 +309,8 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d - + +

    Milan Kolar

    πŸ’» πŸ“– πŸš‡ πŸ’Ό πŸ–‹ πŸ” 🚧 πŸ“† πŸ‘€ πŸ§‘β€πŸ«

    Jakub JeΕΎek

    πŸ’» πŸ“– πŸš‡ πŸ–‹ πŸ‘€ 🚧 πŸ§‘β€πŸ« πŸ“†

    OndΕ™ej Samohel

    πŸ’» πŸ“– πŸš‡ πŸ–‹ πŸ‘€ 🚧 πŸ§‘β€πŸ« πŸ“†

    Jakub Trllo

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧

    Petr Kalis

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧

    Milan Kolar

    πŸ’» πŸ“– πŸš‡ πŸ’Ό πŸ–‹ πŸ” 🚧 πŸ“† πŸ‘€ πŸ§‘β€πŸ« πŸ’¬

    Jakub JeΕΎek

    πŸ’» πŸ“– πŸš‡ πŸ–‹ πŸ‘€ 🚧 πŸ§‘β€πŸ« πŸ“† πŸ’¬

    OndΕ™ej Samohel

    πŸ’» πŸ“– πŸš‡ πŸ–‹ πŸ‘€ 🚧 πŸ§‘β€πŸ« πŸ“† πŸ’¬

    Jakub Trllo

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧 πŸ’¬

    Petr Kalis

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧 πŸ’¬

    Wijnand Koreman

    πŸ’»

    Roy Nieterau

    πŸ’»

    Roy Nieterau

    πŸ’» πŸ“– πŸ‘€ πŸ§‘β€πŸ« πŸ’¬

    Toke Jepsen

    πŸ’»

    Simone Barbieri

    πŸ’»

    Toke Jepsen

    πŸ’» πŸ“– πŸ‘€ πŸ§‘β€πŸ« πŸ’¬

    Simone Barbieri

    πŸ’» πŸ“–

    karimmozilla

    πŸ’»

    Bo Zhou

    πŸ’»

    ClΓ©ment Hector

    πŸ’»

    Simone Barbieri

    πŸ’» πŸ“–

    karimmozilla

    πŸ’»

    Bo Zhou

    πŸ’»

    ClΓ©ment Hector

    πŸ’»

    ClΓ©ment Hector

    πŸ’» πŸ‘€

    David Lai

    πŸ’» πŸ‘€
    From f8ce0055dff275d4011ad97a8aa625e75d25a3b6 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:26:32 +0200 Subject: [PATCH 216/244] Add @2-REC as a contributor --- .all-contributorsrc | 10 ++++++++++ README.md | 3 ++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index ac51161163..2dbd36c2ff 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -173,6 +173,16 @@ "code", "review" ] + }, + { + "login": "2-REC", + "name": "Derek ", + "avatar_url": "https://avatars.githubusercontent.com/u/42170307?v=4", + "profile": "https://github.com/2-REC", + "contributions": [ + "code", + "review" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 7621bd5798..66189dd430 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-13-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-14-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -311,6 +311,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Bo Zhou

    πŸ’»
    ClΓ©ment Hector

    πŸ’» πŸ‘€
    David Lai

    πŸ’» πŸ‘€ +
    Derek

    πŸ’» πŸ‘€ From ad8e91dd918775eb1b3e05359e6ca02d137442a5 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:26:56 +0200 Subject: [PATCH 217/244] Add @Allan-I as a contributor --- .all-contributorsrc | 14 +++++++++++++- README.md | 7 +++++-- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 2dbd36c2ff..32e664e0fc 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -181,7 +181,19 @@ "profile": "https://github.com/2-REC", "contributions": [ "code", - "review" + "review", + "doc" + ] + }, + { + "login": "Allan-I", + "name": "Allan I. A.", + "avatar_url": "https://avatars.githubusercontent.com/u/76656700?v=4", + "profile": "https://github.com/Allan-I", + "contributions": [ + "code", + "review", + "doc" ] } ], diff --git a/README.md b/README.md index 66189dd430..44d23e2039 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-14-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-15-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -311,7 +311,10 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Bo Zhou

    πŸ’»
    ClΓ©ment Hector

    πŸ’» πŸ‘€
    David Lai

    πŸ’» πŸ‘€ -
    Derek

    πŸ’» πŸ‘€ +
    Derek

    πŸ’» πŸ‘€ πŸ“– + + +
    Allan I. A.

    πŸ’» πŸ‘€ πŸ“– From b8c37088f4c70851d570d9363c5c5c608463ad32 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:28:35 +0200 Subject: [PATCH 218/244] Add @gabormarinov as a contributor --- .all-contributorsrc | 10 ++++++++++ README.md | 3 ++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 32e664e0fc..6564be1cd1 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -195,6 +195,16 @@ "review", "doc" ] + }, + { + "login": "gabormarinov", + "name": "GΓ‘bor Marinov", + "avatar_url": "https://avatars.githubusercontent.com/u/8620515?v=4", + "profile": "https://github.com/gabormarinov", + "contributions": [ + "code", + "doc" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 44d23e2039..8da8fae349 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-15-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-16-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -315,6 +315,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Allan I. A.

    πŸ’» πŸ‘€ πŸ“– +
    GΓ‘bor Marinov

    πŸ’» πŸ“– From 43e29a3a2eb3c0ae9e7af7a16c4e10418eb8986d Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:28:47 +0200 Subject: [PATCH 219/244] Add @gabormarinov as a contributor --- .all-contributorsrc | 4 +--- README.md | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 6564be1cd1..4260b2fb4b 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -191,9 +191,7 @@ "avatar_url": "https://avatars.githubusercontent.com/u/76656700?v=4", "profile": "https://github.com/Allan-I", "contributions": [ - "code", - "review", - "doc" + "code" ] }, { diff --git a/README.md b/README.md index 8da8fae349..6524868bae 100644 --- a/README.md +++ b/README.md @@ -314,7 +314,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Derek

    πŸ’» πŸ‘€ πŸ“– -
    Allan I. A.

    πŸ’» πŸ‘€ πŸ“– +
    Allan I. A.

    πŸ’»
    GΓ‘bor Marinov

    πŸ’» πŸ“– From 8dbdf655a986cfe5b7545a551c990e73b0ffd886 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:29:24 +0200 Subject: [PATCH 220/244] Add @icyvapor as a contributor --- .all-contributorsrc | 10 ++++++++++ README.md | 3 ++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 4260b2fb4b..fbca23716c 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -203,6 +203,16 @@ "code", "doc" ] + }, + { + "login": "icyvapor", + "name": "icyvapor", + "avatar_url": "https://avatars.githubusercontent.com/u/1195278?v=4", + "profile": "https://github.com/icyvapor", + "contributions": [ + "code", + "doc" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 6524868bae..6f252f3035 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-16-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-17-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -316,6 +316,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Allan I. A.

    πŸ’»
    GΓ‘bor Marinov

    πŸ’» πŸ“– +
    icyvapor

    πŸ’» πŸ“– From 8265882b22b907fdaf98a069a5d3952e4e36b1ea Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:29:48 +0200 Subject: [PATCH 221/244] Add @jlorrain as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index fbca23716c..13b7a627f7 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -213,6 +213,15 @@ "code", "doc" ] + }, + { + "login": "jlorrain", + "name": "JΓ©rΓ΄me LORRAIN", + "avatar_url": "https://avatars.githubusercontent.com/u/7955673?v=4", + "profile": "https://github.com/jlorrain", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 6f252f3035..d01308c843 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-17-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-18-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -317,6 +317,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Allan I. A.

    πŸ’»
    GΓ‘bor Marinov

    πŸ’» πŸ“–
    icyvapor

    πŸ’» πŸ“– +
    JΓ©rΓ΄me LORRAIN

    πŸ’» From 3377a9e05dc25c516846d448f1a0cd8d2551d3b1 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:30:12 +0200 Subject: [PATCH 222/244] Add @dmo-j-cube as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 13b7a627f7..1f969a61bb 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -222,6 +222,15 @@ "contributions": [ "code" ] + }, + { + "login": "dmo-j-cube", + "name": "David Morris-Oliveros", + "avatar_url": "https://avatars.githubusercontent.com/u/89823400?v=4", + "profile": "https://github.com/dmo-j-cube", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index d01308c843..e853001aec 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-18-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-19-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -318,6 +318,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    GΓ‘bor Marinov

    πŸ’» πŸ“–
    icyvapor

    πŸ’» πŸ“–
    JΓ©rΓ΄me LORRAIN

    πŸ’» +
    David Morris-Oliveros

    πŸ’» From 25ca9c0e8aa4f5f38beb25ee2c9ab709e9c00deb Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:30:29 +0200 Subject: [PATCH 223/244] Add @BenoitConnan as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 1f969a61bb..15e0dbca5b 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -231,6 +231,15 @@ "contributions": [ "code" ] + }, + { + "login": "BenoitConnan", + "name": "BenoitConnan", + "avatar_url": "https://avatars.githubusercontent.com/u/82808268?v=4", + "profile": "https://github.com/BenoitConnan", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index e853001aec..5778969ac1 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-19-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-20-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -319,6 +319,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    icyvapor

    πŸ’» πŸ“–
    JΓ©rΓ΄me LORRAIN

    πŸ’»
    David Morris-Oliveros

    πŸ’» +
    BenoitConnan

    πŸ’» From 6ab23cd3a4b02ad1d0d8c50983cb54b5da666aa2 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:30:41 +0200 Subject: [PATCH 224/244] Add @Malthaldar as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 15e0dbca5b..f5650dd82c 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -240,6 +240,15 @@ "contributions": [ "code" ] + }, + { + "login": "Malthaldar", + "name": "Malthaldar", + "avatar_url": "https://avatars.githubusercontent.com/u/33671694?v=4", + "profile": "https://github.com/Malthaldar", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 5778969ac1..aaaf719caa 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-20-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-21-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -320,6 +320,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    JΓ©rΓ΄me LORRAIN

    πŸ’»
    David Morris-Oliveros

    πŸ’»
    BenoitConnan

    πŸ’» +
    Malthaldar

    πŸ’» From 6d4c29831c859c6cf1006b9c56637b54fb34505d Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:31:22 +0200 Subject: [PATCH 225/244] Add @64qam as a contributor --- .all-contributorsrc | 15 +++++++++++++++ README.md | 5 ++++- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index f5650dd82c..9b186f2a40 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -249,6 +249,21 @@ "contributions": [ "code" ] + }, + { + "login": "64qam", + "name": "64qam", + "avatar_url": "https://avatars.githubusercontent.com/u/26925793?v=4", + "profile": "https://github.com/64qam", + "contributions": [ + "code", + "review", + "doc", + "infra", + "projectManagement", + "maintenance", + "content" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index aaaf719caa..56bbc92768 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-21-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-22-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -322,6 +322,9 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    BenoitConnan

    πŸ’»
    Malthaldar

    πŸ’» + +
    64qam

    πŸ’» πŸ‘€ πŸ“– πŸš‡ πŸ“† 🚧 πŸ–‹ + From 46d425bd8dbc5e12984638073a559ad87f69c117 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:33:17 +0200 Subject: [PATCH 226/244] Add @jrsndl as a contributor --- .all-contributorsrc | 30 ++++++++++++++++++++++-------- README.md | 11 ++++++----- 2 files changed, 28 insertions(+), 13 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 9b186f2a40..ea1d162c4e 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -91,6 +91,21 @@ "question" ] }, + { + "login": "64qam", + "name": "64qam", + "avatar_url": "https://avatars.githubusercontent.com/u/26925793?v=4", + "profile": "https://github.com/64qam", + "contributions": [ + "code", + "review", + "doc", + "infra", + "projectManagement", + "maintenance", + "content" + ] + }, { "login": "aardschok", "name": "Wijnand Koreman", @@ -251,18 +266,17 @@ ] }, { - "login": "64qam", - "name": "64qam", - "avatar_url": "https://avatars.githubusercontent.com/u/26925793?v=4", - "profile": "https://github.com/64qam", + "login": "jrsndl", + "name": "Jiri Sindelar", + "avatar_url": "https://avatars.githubusercontent.com/u/45896205?v=4", + "profile": "https://github.com/jrsndl", "contributions": [ "code", "review", "doc", - "infra", - "projectManagement", - "maintenance", - "content" + "content", + "tutorial", + "userTesting" ] } ], diff --git a/README.md b/README.md index 56bbc92768..0c8bc93aa8 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-22-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-23-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -301,29 +301,30 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    OndΕ™ej Samohel

    πŸ’» πŸ“– πŸš‡ πŸ–‹ πŸ‘€ 🚧 πŸ§‘β€πŸ« πŸ“† πŸ’¬
    Jakub Trllo

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧 πŸ’¬
    Petr Kalis

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧 πŸ’¬ +
    64qam

    πŸ’» πŸ‘€ πŸ“– πŸš‡ πŸ“† 🚧 πŸ–‹
    Wijnand Koreman

    πŸ’» -
    Roy Nieterau

    πŸ’» πŸ“– πŸ‘€ πŸ§‘β€πŸ« πŸ’¬ +
    Roy Nieterau

    πŸ’» πŸ“– πŸ‘€ πŸ§‘β€πŸ« πŸ’¬
    Toke Jepsen

    πŸ’» πŸ“– πŸ‘€ πŸ§‘β€πŸ« πŸ’¬
    Simone Barbieri

    πŸ’» πŸ“–
    karimmozilla

    πŸ’»
    Bo Zhou

    πŸ’»
    ClΓ©ment Hector

    πŸ’» πŸ‘€
    David Lai

    πŸ’» πŸ‘€ -
    Derek

    πŸ’» πŸ‘€ πŸ“– +
    Derek

    πŸ’» πŸ‘€ πŸ“–
    Allan I. A.

    πŸ’»
    GΓ‘bor Marinov

    πŸ’» πŸ“–
    icyvapor

    πŸ’» πŸ“–
    JΓ©rΓ΄me LORRAIN

    πŸ’»
    David Morris-Oliveros

    πŸ’»
    BenoitConnan

    πŸ’» -
    Malthaldar

    πŸ’» -
    64qam

    πŸ’» πŸ‘€ πŸ“– πŸš‡ πŸ“† 🚧 πŸ–‹ +
    Malthaldar

    πŸ’» +
    Jiri Sindelar

    πŸ’» πŸ‘€ πŸ“– πŸ–‹ βœ… πŸ““ From 5c793bb8ecdbf97be39ace78d90ea5b3187c2d41 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:33:24 +0200 Subject: [PATCH 227/244] Add @jrsndl as a contributor --- .all-contributorsrc | 36 ++++++++++++++++++------------------ README.md | 8 ++++---- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index ea1d162c4e..737bf7e174 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -106,15 +106,6 @@ "content" ] }, - { - "login": "aardschok", - "name": "Wijnand Koreman", - "avatar_url": "https://avatars.githubusercontent.com/u/26920875?v=4", - "profile": "https://github.com/aardschok", - "contributions": [ - "code" - ] - }, { "login": "BigRoy", "name": "Roy Nieterau", @@ -160,6 +151,24 @@ "code" ] }, + { + "login": "Allan-I", + "name": "Allan I. A.", + "avatar_url": "https://avatars.githubusercontent.com/u/76656700?v=4", + "profile": "https://github.com/Allan-I", + "contributions": [ + "code" + ] + }, + { + "login": "aardschok", + "name": "Wijnand Koreman", + "avatar_url": "https://avatars.githubusercontent.com/u/26920875?v=4", + "profile": "https://github.com/aardschok", + "contributions": [ + "code" + ] + }, { "login": "zhoub", "name": "Bo Zhou", @@ -200,15 +209,6 @@ "doc" ] }, - { - "login": "Allan-I", - "name": "Allan I. A.", - "avatar_url": "https://avatars.githubusercontent.com/u/76656700?v=4", - "profile": "https://github.com/Allan-I", - "contributions": [ - "code" - ] - }, { "login": "gabormarinov", "name": "GΓ‘bor Marinov", diff --git a/README.md b/README.md index 0c8bc93aa8..1a239d4f03 100644 --- a/README.md +++ b/README.md @@ -302,20 +302,20 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Jakub Trllo

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧 πŸ’¬
    Petr Kalis

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧 πŸ’¬
    64qam

    πŸ’» πŸ‘€ πŸ“– πŸš‡ πŸ“† 🚧 πŸ–‹ -
    Wijnand Koreman

    πŸ’» +
    Roy Nieterau

    πŸ’» πŸ“– πŸ‘€ πŸ§‘β€πŸ« πŸ’¬ -
    Roy Nieterau

    πŸ’» πŸ“– πŸ‘€ πŸ§‘β€πŸ« πŸ’¬
    Toke Jepsen

    πŸ’» πŸ“– πŸ‘€ πŸ§‘β€πŸ« πŸ’¬
    Simone Barbieri

    πŸ’» πŸ“–
    karimmozilla

    πŸ’» +
    Allan I. A.

    πŸ’» +
    Wijnand Koreman

    πŸ’»
    Bo Zhou

    πŸ’»
    ClΓ©ment Hector

    πŸ’» πŸ‘€ -
    David Lai

    πŸ’» πŸ‘€ +
    David Lai

    πŸ’» πŸ‘€
    Derek

    πŸ’» πŸ‘€ πŸ“– -
    Allan I. A.

    πŸ’»
    GΓ‘bor Marinov

    πŸ’» πŸ“–
    icyvapor

    πŸ’» πŸ“–
    JΓ©rΓ΄me LORRAIN

    πŸ’» From 6f7e9b749e7e3ff6853ad68cd7e7855374d4f626 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:34:35 +0200 Subject: [PATCH 228/244] Update @jrsndl as a contributor --- .all-contributorsrc | 31 ++++++++++++++++--------------- README.md | 8 ++++---- 2 files changed, 20 insertions(+), 19 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 737bf7e174..156cda9324 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -103,7 +103,8 @@ "infra", "projectManagement", "maintenance", - "content" + "content", + "userTesting" ] }, { @@ -132,6 +133,20 @@ "question" ] }, + { + "login": "jrsndl", + "name": "Jiri Sindelar", + "avatar_url": "https://avatars.githubusercontent.com/u/45896205?v=4", + "profile": "https://github.com/jrsndl", + "contributions": [ + "code", + "review", + "doc", + "content", + "tutorial", + "userTesting" + ] + }, { "login": "simonebarbieri", "name": "Simone Barbieri", @@ -264,20 +279,6 @@ "contributions": [ "code" ] - }, - { - "login": "jrsndl", - "name": "Jiri Sindelar", - "avatar_url": "https://avatars.githubusercontent.com/u/45896205?v=4", - "profile": "https://github.com/jrsndl", - "contributions": [ - "code", - "review", - "doc", - "content", - "tutorial", - "userTesting" - ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 1a239d4f03..b29d5c0c9a 100644 --- a/README.md +++ b/README.md @@ -301,30 +301,30 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    OndΕ™ej Samohel

    πŸ’» πŸ“– πŸš‡ πŸ–‹ πŸ‘€ 🚧 πŸ§‘β€πŸ« πŸ“† πŸ’¬
    Jakub Trllo

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧 πŸ’¬
    Petr Kalis

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧 πŸ’¬ -
    64qam

    πŸ’» πŸ‘€ πŸ“– πŸš‡ πŸ“† 🚧 πŸ–‹ +
    64qam

    πŸ’» πŸ‘€ πŸ“– πŸš‡ πŸ“† 🚧 πŸ–‹ πŸ““
    Roy Nieterau

    πŸ’» πŸ“– πŸ‘€ πŸ§‘β€πŸ« πŸ’¬
    Toke Jepsen

    πŸ’» πŸ“– πŸ‘€ πŸ§‘β€πŸ« πŸ’¬ +
    Jiri Sindelar

    πŸ’» πŸ‘€ πŸ“– πŸ–‹ βœ… πŸ““
    Simone Barbieri

    πŸ’» πŸ“–
    karimmozilla

    πŸ’»
    Allan I. A.

    πŸ’»
    Wijnand Koreman

    πŸ’»
    Bo Zhou

    πŸ’» -
    ClΓ©ment Hector

    πŸ’» πŸ‘€ +
    ClΓ©ment Hector

    πŸ’» πŸ‘€
    David Lai

    πŸ’» πŸ‘€
    Derek

    πŸ’» πŸ‘€ πŸ“–
    GΓ‘bor Marinov

    πŸ’» πŸ“–
    icyvapor

    πŸ’» πŸ“–
    JΓ©rΓ΄me LORRAIN

    πŸ’»
    David Morris-Oliveros

    πŸ’» -
    BenoitConnan

    πŸ’» +
    BenoitConnan

    πŸ’»
    Malthaldar

    πŸ’» -
    Jiri Sindelar

    πŸ’» πŸ‘€ πŸ“– πŸ–‹ βœ… πŸ““ From 54d5240efa54617e44290b9d8bfbad1f1004fc18 Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:34:47 +0200 Subject: [PATCH 229/244] Add @svenneve as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 156cda9324..b8f621afcb 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -279,6 +279,15 @@ "contributions": [ "code" ] + }, + { + "login": "svenneve", + "name": "Sven Neve", + "avatar_url": "https://avatars.githubusercontent.com/u/2472863?v=4", + "profile": "http://www.svenneve.com/", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index b29d5c0c9a..061b4ec707 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-23-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-24-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -325,6 +325,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    BenoitConnan

    πŸ’»
    Malthaldar

    πŸ’» +
    Sven Neve

    πŸ’» From 1d5406b378771bf88360a94a72b2b378945e94ca Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:34:58 +0200 Subject: [PATCH 230/244] Add @zafrs as a contributor --- .all-contributorsrc | 9 +++++++++ README.md | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index b8f621afcb..2578651ee4 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -288,6 +288,15 @@ "contributions": [ "code" ] + }, + { + "login": "zafrs", + "name": "zafrs", + "avatar_url": "https://avatars.githubusercontent.com/u/26890002?v=4", + "profile": "https://github.com/zafrs", + "contributions": [ + "code" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 061b4ec707..2abb7791ff 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-24-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-25-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -326,6 +326,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    BenoitConnan

    πŸ’»
    Malthaldar

    πŸ’»
    Sven Neve

    πŸ’» +
    zafrs

    πŸ’» From 6c560ac3f7d514fd90ec4ebd5e65cd6093f1e985 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 22 Apr 2022 17:35:48 +0200 Subject: [PATCH 231/244] ignore node.js files --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index fa3fae1ad2..28cfb4b1e9 100644 --- a/.gitignore +++ b/.gitignore @@ -70,6 +70,8 @@ coverage.xml ################## node_modules package-lock.json +package.json +yarn.lock openpype/premiere/ppro/js/debug.log From 8d45b649ec97d4dd041ab6617ba67658141d64a9 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 22 Apr 2022 17:37:31 +0200 Subject: [PATCH 232/244] remove the need to set AVALON_APP_NAME --- .../perjob/m50__openpype_publish_render.py | 7 ++++--- openpype/pype_commands.py | 15 ++++++++------- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py b/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py index 7f5b514253..cdc37588cd 100644 --- a/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py +++ b/openpype/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py @@ -119,7 +119,7 @@ class OpenPypeContextSelector: # app names and versions, but since app_name is not used # currently down the line (but it is required by OP publish command # right now). - self.context["app_name"] = "celaction/local" + # self.context["app_name"] = "maya/2022" return True @staticmethod @@ -139,7 +139,8 @@ class OpenPypeContextSelector: env = {"AVALON_PROJECT": str(self.context.get("project")), "AVALON_ASSET": str(self.context.get("asset")), "AVALON_TASK": str(self.context.get("task")), - "AVALON_APP_NAME": str(self.context.get("app_name"))} + # "AVALON_APP_NAME": str(self.context.get("app_name")) + } print(">>> setting environment:") for k, v in env.items(): @@ -184,7 +185,7 @@ selector = OpenPypeContextSelector() selector.context["project"] = os.getenv("AVALON_PROJECT") selector.context["asset"] = os.getenv("AVALON_ASSET") selector.context["task"] = os.getenv("AVALON_TASK") -selector.context["app_name"] = os.getenv("AVALON_APP_NAME") +# selector.context["app_name"] = os.getenv("AVALON_APP_NAME") # if anything inside is None, scratch the whole thing and # ask user for context. diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index e0c8847040..bd2008e144 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -125,13 +125,14 @@ class PypeCommands: if not any(paths): raise RuntimeError("No publish paths specified") - env = get_app_environments_for_context( - os.environ["AVALON_PROJECT"], - os.environ["AVALON_ASSET"], - os.environ["AVALON_TASK"], - os.environ["AVALON_APP_NAME"] - ) - os.environ.update(env) + if os.getenv("AVALON_APP_NAME"): + env = get_app_environments_for_context( + os.environ["AVALON_PROJECT"], + os.environ["AVALON_ASSET"], + os.environ["AVALON_TASK"], + os.environ["AVALON_APP_NAME"] + ) + os.environ.update(env) pyblish.api.register_host("shell") From 61fa6b23ac94ac158df290d22066999bee21514e Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 22 Apr 2022 17:41:36 +0200 Subject: [PATCH 233/244] update style --- .all-contributorsrc | 2 +- README.md | 50 ++++++++++++++++++++++----------------------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 2578651ee4..492d6f2f1f 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -6,7 +6,7 @@ "files": [ "README.md" ], - "imageSize": 80, + "imageSize": 100, "commit": true, "commitConvention": "none", "contributors": [ diff --git a/README.md b/README.md index 2abb7791ff..448ca1a263 100644 --- a/README.md +++ b/README.md @@ -296,37 +296,37 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d - - - - - - - + + + + + + + - - - - - - - + + + + + + + - - - - - - - + + + + + + + - - - - + + + +

    Milan Kolar

    πŸ’» πŸ“– πŸš‡ πŸ’Ό πŸ–‹ πŸ” 🚧 πŸ“† πŸ‘€ πŸ§‘β€πŸ« πŸ’¬

    Jakub JeΕΎek

    πŸ’» πŸ“– πŸš‡ πŸ–‹ πŸ‘€ 🚧 πŸ§‘β€πŸ« πŸ“† πŸ’¬

    OndΕ™ej Samohel

    πŸ’» πŸ“– πŸš‡ πŸ–‹ πŸ‘€ 🚧 πŸ§‘β€πŸ« πŸ“† πŸ’¬

    Jakub Trllo

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧 πŸ’¬

    Petr Kalis

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧 πŸ’¬

    64qam

    πŸ’» πŸ‘€ πŸ“– πŸš‡ πŸ“† 🚧 πŸ–‹ πŸ““

    Roy Nieterau

    πŸ’» πŸ“– πŸ‘€ πŸ§‘β€πŸ« πŸ’¬

    Milan Kolar

    πŸ’» πŸ“– πŸš‡ πŸ’Ό πŸ–‹ πŸ” 🚧 πŸ“† πŸ‘€ πŸ§‘β€πŸ« πŸ’¬

    Jakub JeΕΎek

    πŸ’» πŸ“– πŸš‡ πŸ–‹ πŸ‘€ 🚧 πŸ§‘β€πŸ« πŸ“† πŸ’¬

    OndΕ™ej Samohel

    πŸ’» πŸ“– πŸš‡ πŸ–‹ πŸ‘€ 🚧 πŸ§‘β€πŸ« πŸ“† πŸ’¬

    Jakub Trllo

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧 πŸ’¬

    Petr Kalis

    πŸ’» πŸ“– πŸš‡ πŸ‘€ 🚧 πŸ’¬

    64qam

    πŸ’» πŸ‘€ πŸ“– πŸš‡ πŸ“† 🚧 πŸ–‹ πŸ““

    Roy Nieterau

    πŸ’» πŸ“– πŸ‘€ πŸ§‘β€πŸ« πŸ’¬

    Toke Jepsen

    πŸ’» πŸ“– πŸ‘€ πŸ§‘β€πŸ« πŸ’¬

    Jiri Sindelar

    πŸ’» πŸ‘€ πŸ“– πŸ–‹ βœ… πŸ““

    Simone Barbieri

    πŸ’» πŸ“–

    karimmozilla

    πŸ’»

    Allan I. A.

    πŸ’»

    Wijnand Koreman

    πŸ’»

    Bo Zhou

    πŸ’»

    Toke Jepsen

    πŸ’» πŸ“– πŸ‘€ πŸ§‘β€πŸ« πŸ’¬

    Jiri Sindelar

    πŸ’» πŸ‘€ πŸ“– πŸ–‹ βœ… πŸ““

    Simone Barbieri

    πŸ’» πŸ“–

    karimmozilla

    πŸ’»

    Allan I. A.

    πŸ’»

    Wijnand Koreman

    πŸ’»

    Bo Zhou

    πŸ’»

    ClΓ©ment Hector

    πŸ’» πŸ‘€

    David Lai

    πŸ’» πŸ‘€

    Derek

    πŸ’» πŸ‘€ πŸ“–

    GΓ‘bor Marinov

    πŸ’» πŸ“–

    icyvapor

    πŸ’» πŸ“–

    JΓ©rΓ΄me LORRAIN

    πŸ’»

    David Morris-Oliveros

    πŸ’»

    ClΓ©ment Hector

    πŸ’» πŸ‘€

    David Lai

    πŸ’» πŸ‘€

    Derek

    πŸ’» πŸ‘€ πŸ“–

    GΓ‘bor Marinov

    πŸ’» πŸ“–

    icyvapor

    πŸ’» πŸ“–

    JΓ©rΓ΄me LORRAIN

    πŸ’»

    David Morris-Oliveros

    πŸ’»

    BenoitConnan

    πŸ’»

    Malthaldar

    πŸ’»

    Sven Neve

    πŸ’»

    zafrs

    πŸ’»

    BenoitConnan

    πŸ’»

    Malthaldar

    πŸ’»

    Sven Neve

    πŸ’»

    zafrs

    πŸ’»
    From e74a295c0f17defaa8cf59d02c9541c834769eaa Mon Sep 17 00:00:00 2001 From: Milan Date: Fri, 22 Apr 2022 17:46:03 +0200 Subject: [PATCH 234/244] Add @m-u-r-p-h-y as a contributor --- .all-contributorsrc | 14 +++++++++++++- README.md | 5 +++-- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 492d6f2f1f..3277d64485 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -220,7 +220,6 @@ "profile": "https://github.com/2-REC", "contributions": [ "code", - "review", "doc" ] }, @@ -297,6 +296,19 @@ "contributions": [ "code" ] + }, + { + "login": "m-u-r-p-h-y", + "name": "murphy", + "avatar_url": "https://avatars.githubusercontent.com/u/352795?v=4", + "profile": "https://www.linkedin.com/in/mmuurrpphhyy/", + "contributions": [ + "code", + "review", + "userTesting", + "doc", + "projectManagement" + ] } ], "contributorsPerLine": 7 diff --git a/README.md b/README.md index 448ca1a263..52a3556a4e 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![All Contributors](https://img.shields.io/badge/all_contributors-25-orange.svg?style=flat-square)](#contributors-) +[![All Contributors](https://img.shields.io/badge/all_contributors-26-orange.svg?style=flat-square)](#contributors-) OpenPype ==== @@ -316,7 +316,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    ClΓ©ment Hector

    πŸ’» πŸ‘€
    David Lai

    πŸ’» πŸ‘€ -
    Derek

    πŸ’» πŸ‘€ πŸ“– +
    Derek

    πŸ’» πŸ“–
    GΓ‘bor Marinov

    πŸ’» πŸ“–
    icyvapor

    πŸ’» πŸ“–
    JΓ©rΓ΄me LORRAIN

    πŸ’» @@ -327,6 +327,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Malthaldar

    πŸ’»
    Sven Neve

    πŸ’»
    zafrs

    πŸ’» +
    murphy

    πŸ’» πŸ‘€ πŸ““ πŸ“– πŸ“† From 910e875d3388718f78f71c1d981be534504326b1 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 22 Apr 2022 17:46:34 +0200 Subject: [PATCH 235/244] update readme --- .all-contributorsrc | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 3277d64485..81447b3dfe 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -175,6 +175,19 @@ "code" ] }, + { + "login": "m-u-r-p-h-y", + "name": "murphy", + "avatar_url": "https://avatars.githubusercontent.com/u/352795?v=4", + "profile": "https://www.linkedin.com/in/mmuurrpphhyy/", + "contributions": [ + "code", + "review", + "userTesting", + "doc", + "projectManagement" + ] + } { "login": "aardschok", "name": "Wijnand Koreman", @@ -296,19 +309,6 @@ "contributions": [ "code" ] - }, - { - "login": "m-u-r-p-h-y", - "name": "murphy", - "avatar_url": "https://avatars.githubusercontent.com/u/352795?v=4", - "profile": "https://www.linkedin.com/in/mmuurrpphhyy/", - "contributions": [ - "code", - "review", - "userTesting", - "doc", - "projectManagement" - ] } ], "contributorsPerLine": 7 From 85027923b29be628f7b3ab658209229e739d3799 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 22 Apr 2022 17:51:30 +0200 Subject: [PATCH 236/244] change order --- .all-contributorsrc | 4 ++-- README.md | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.all-contributorsrc b/.all-contributorsrc index 81447b3dfe..a3b85cae68 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -187,7 +187,7 @@ "doc", "projectManagement" ] - } + }, { "login": "aardschok", "name": "Wijnand Koreman", @@ -312,4 +312,4 @@ } ], "contributorsPerLine": 7 -} +} \ No newline at end of file diff --git a/README.md b/README.md index 52a3556a4e..b6966adbc4 100644 --- a/README.md +++ b/README.md @@ -310,24 +310,24 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
    Simone Barbieri

    πŸ’» πŸ“–
    karimmozilla

    πŸ’»
    Allan I. A.

    πŸ’» +
    murphy

    πŸ’» πŸ‘€ πŸ““ πŸ“– πŸ“†
    Wijnand Koreman

    πŸ’» -
    Bo Zhou

    πŸ’» +
    Bo Zhou

    πŸ’»
    ClΓ©ment Hector

    πŸ’» πŸ‘€
    David Lai

    πŸ’» πŸ‘€
    Derek

    πŸ’» πŸ“–
    GΓ‘bor Marinov

    πŸ’» πŸ“–
    icyvapor

    πŸ’» πŸ“–
    JΓ©rΓ΄me LORRAIN

    πŸ’» -
    David Morris-Oliveros

    πŸ’» +
    David Morris-Oliveros

    πŸ’»
    BenoitConnan

    πŸ’»
    Malthaldar

    πŸ’»
    Sven Neve

    πŸ’»
    zafrs

    πŸ’» -
    murphy

    πŸ’» πŸ‘€ πŸ““ πŸ“– πŸ“† From a003bceb166dac3884388c2cf0fb0d07fc999766 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 18:59:14 +0200 Subject: [PATCH 237/244] removed live_repo_dir usage --- igniter/bootstrap_repos.py | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 6392517cda..fc814f871a 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -627,8 +627,6 @@ class BootstrapRepos: Attributes: data_dir (Path): local OpenPype installation directory. - live_repo_dir (Path): path to repos directory if running live, - otherwise `None`. registry (OpenPypeSettingsRegistry): OpenPype registry object. zip_filter (list): List of files to exclude from zip openpype_filter (list): list of top level directories to @@ -667,11 +665,6 @@ class BootstrapRepos: progress_callback = empty_progress self._progress_callback = progress_callback - if getattr(sys, "frozen", False): - self.live_repo_dir = Path(sys.executable).parent - else: - self.live_repo_dir = Path(Path(__file__).parent / "..") - @staticmethod def get_version_path_from_list( version: str, version_list: list) -> Union[Path, None]: @@ -736,11 +729,16 @@ class BootstrapRepos: # if repo dir is not set, we detect local "live" OpenPype repository # version and use it as a source. Otherwise repo_dir is user # entered location. - if not repo_dir: - version = OpenPypeVersion.get_installed_version_str() - repo_dir = self.live_repo_dir - else: + if repo_dir: version = self.get_version(repo_dir) + else: + version = OpenPypeVersion.get_installed_version_str() + # QUESTION Can we use 'OPENPYPE_ROOT' env variable or it may + # not be defined yet? + if getattr(sys, "frozen", False): + repo_dir = Path(sys.executable).parent + else: + repo_dir = Path(Path(__file__).parent / "..") if not version: self._print("OpenPype not found.", LOG_ERROR) From 9fd2a7b978355616146805852fec24db50324e57 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 22 Apr 2022 19:04:30 +0200 Subject: [PATCH 238/244] simplified repository resolving using OpenPypeVersion.get_installed_version --- igniter/bootstrap_repos.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index fc814f871a..08333885c0 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -732,13 +732,9 @@ class BootstrapRepos: if repo_dir: version = self.get_version(repo_dir) else: - version = OpenPypeVersion.get_installed_version_str() - # QUESTION Can we use 'OPENPYPE_ROOT' env variable or it may - # not be defined yet? - if getattr(sys, "frozen", False): - repo_dir = Path(sys.executable).parent - else: - repo_dir = Path(Path(__file__).parent / "..") + installed_version = OpenPypeVersion.get_installed_version() + version = str(installed_version) + repo_dir = installed_version.path if not version: self._print("OpenPype not found.", LOG_ERROR) From 19a2ef4cb69f2fc0dcf3247ffa25c628c480e1ff Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 23 Apr 2022 03:40:51 +0000 Subject: [PATCH 239/244] [Automated] Bump version --- CHANGELOG.md | 39 ++++++++++++++++++--------------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 20 insertions(+), 23 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a48e9ee806..e2ff9f919c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,25 +1,39 @@ # Changelog -## [3.10.0-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.10.0-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.4...HEAD) ### πŸ“– Documentation +- Docs: add all-contributors config and initial list [\#3094](https://github.com/pypeclub/OpenPype/pull/3094) - Nuke docs with videos [\#3052](https://github.com/pypeclub/OpenPype/pull/3052) **πŸš€ Enhancements** +- Standalone publisher: add support for bgeo and vdb [\#3080](https://github.com/pypeclub/OpenPype/pull/3080) - Update collect\_render.py [\#3055](https://github.com/pypeclub/OpenPype/pull/3055) +- SiteSync: Added compute\_resource\_sync\_sites to sync\_server\_module [\#2983](https://github.com/pypeclub/OpenPype/pull/2983) **πŸ› Bug fixes** +- RoyalRender Control Submission - AVALON\_APP\_NAME default [\#3091](https://github.com/pypeclub/OpenPype/pull/3091) +- Ftrack: Update Create Folders action [\#3089](https://github.com/pypeclub/OpenPype/pull/3089) +- Project Manager: Avoid unnecessary updates of asset documents [\#3083](https://github.com/pypeclub/OpenPype/pull/3083) +- Standalone publisher: Fix plugins install [\#3077](https://github.com/pypeclub/OpenPype/pull/3077) +- General: Extract review sequence is not converted with same names [\#3076](https://github.com/pypeclub/OpenPype/pull/3076) +- Webpublisher: Use variant value [\#3068](https://github.com/pypeclub/OpenPype/pull/3068) - Nuke: Add aov matching even for remainder and prerender [\#3060](https://github.com/pypeclub/OpenPype/pull/3060) **πŸ”€ Refactored code** - General: Move host install [\#3009](https://github.com/pypeclub/OpenPype/pull/3009) +**Merged pull requests:** + +- Nuke: added suspend\_publish knob [\#3078](https://github.com/pypeclub/OpenPype/pull/3078) +- Bump async from 2.6.3 to 2.6.4 in /website [\#3065](https://github.com/pypeclub/OpenPype/pull/3065) + ## [3.9.4](https://github.com/pypeclub/OpenPype/tree/3.9.4) (2022-04-15) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.9.4-nightly.2...3.9.4) @@ -53,6 +67,7 @@ - LibraryLoader: Use current project for asset query in families filter [\#3042](https://github.com/pypeclub/OpenPype/pull/3042) - SiteSync: Providers ignore that site is disabled [\#3041](https://github.com/pypeclub/OpenPype/pull/3041) - Unreal: Creator import fixes [\#3040](https://github.com/pypeclub/OpenPype/pull/3040) +- Settings UI: Version column can be extended so version are visible [\#3032](https://github.com/pypeclub/OpenPype/pull/3032) - SiteSync: fix transitive alternate sites, fix dropdown in Local Settings [\#3018](https://github.com/pypeclub/OpenPype/pull/3018) **Merged pull requests:** @@ -72,7 +87,6 @@ - Ftrack: Add description integrator [\#3027](https://github.com/pypeclub/OpenPype/pull/3027) - Publishing textures for Unreal [\#2988](https://github.com/pypeclub/OpenPype/pull/2988) -- Maya to Unreal: Static and Skeletal Meshes [\#2978](https://github.com/pypeclub/OpenPype/pull/2978) **πŸš€ Enhancements** @@ -80,14 +94,11 @@ - Console Interpreter: Changed how console splitter size are reused on show [\#3016](https://github.com/pypeclub/OpenPype/pull/3016) - Deadline: Use more suitable name for sequence review logic [\#3015](https://github.com/pypeclub/OpenPype/pull/3015) - General: default workfile subset name for workfile [\#3011](https://github.com/pypeclub/OpenPype/pull/3011) -- Nuke: add concurrency attr to deadline job [\#3005](https://github.com/pypeclub/OpenPype/pull/3005) - Deadline: priority configurable in Maya jobs [\#2995](https://github.com/pypeclub/OpenPype/pull/2995) -- Workfiles tool: Save as published workfiles [\#2937](https://github.com/pypeclub/OpenPype/pull/2937) **πŸ› Bug fixes** - Deadline: Fixed default value of use sequence for review [\#3033](https://github.com/pypeclub/OpenPype/pull/3033) -- Settings UI: Version column can be extended so version are visible [\#3032](https://github.com/pypeclub/OpenPype/pull/3032) - General: Fix validate asset docs plug-in filename and class name [\#3029](https://github.com/pypeclub/OpenPype/pull/3029) - General: Fix import after movements [\#3028](https://github.com/pypeclub/OpenPype/pull/3028) - Harmony: Added creating subset name for workfile from template [\#3024](https://github.com/pypeclub/OpenPype/pull/3024) @@ -99,10 +110,6 @@ - Nuke: fixing unicode type detection in effect loaders [\#3002](https://github.com/pypeclub/OpenPype/pull/3002) - Nuke: removing redundant Ftrack asset when farm publishing [\#2996](https://github.com/pypeclub/OpenPype/pull/2996) -**πŸ”€ Refactored code** - -- General: Move plugins register and discover [\#2935](https://github.com/pypeclub/OpenPype/pull/2935) - **Merged pull requests:** - Maya: Allow to select invalid camera contents if no cameras found [\#3030](https://github.com/pypeclub/OpenPype/pull/3030) @@ -120,19 +127,17 @@ **πŸ†• New features** - nuke: bypass baking [\#2992](https://github.com/pypeclub/OpenPype/pull/2992) +- Maya to Unreal: Static and Skeletal Meshes [\#2978](https://github.com/pypeclub/OpenPype/pull/2978) **πŸš€ Enhancements** +- Nuke: add concurrency attr to deadline job [\#3005](https://github.com/pypeclub/OpenPype/pull/3005) - Photoshop: create image without instance [\#3001](https://github.com/pypeclub/OpenPype/pull/3001) - TVPaint: Render scene family [\#3000](https://github.com/pypeclub/OpenPype/pull/3000) - Nuke: ReviewDataMov Read RAW attribute [\#2985](https://github.com/pypeclub/OpenPype/pull/2985) -- SiteSync: Added compute\_resource\_sync\_sites to sync\_server\_module [\#2983](https://github.com/pypeclub/OpenPype/pull/2983) - General: `METADATA\_KEYS` constant as `frozenset` for optimal immutable lookup [\#2980](https://github.com/pypeclub/OpenPype/pull/2980) - General: Tools with host filters [\#2975](https://github.com/pypeclub/OpenPype/pull/2975) - Hero versions: Use custom templates [\#2967](https://github.com/pypeclub/OpenPype/pull/2967) -- Slack: Added configurable maximum file size of review upload to Slack [\#2945](https://github.com/pypeclub/OpenPype/pull/2945) -- NewPublisher: Prepared implementation of optional pyblish plugin [\#2943](https://github.com/pypeclub/OpenPype/pull/2943) -- TVPaint: Extractor to convert PNG into EXR [\#2942](https://github.com/pypeclub/OpenPype/pull/2942) **πŸ› Bug fixes** @@ -148,14 +153,6 @@ - General: OIIO conversion for ffmeg can handle sequences [\#2958](https://github.com/pypeclub/OpenPype/pull/2958) - Settings: Conditional dictionary avoid invalid logs [\#2956](https://github.com/pypeclub/OpenPype/pull/2956) - General: Smaller fixes and typos [\#2950](https://github.com/pypeclub/OpenPype/pull/2950) -- LogViewer: Don't refresh on initialization [\#2949](https://github.com/pypeclub/OpenPype/pull/2949) -- nuke: python3 compatibility issue with `iteritems` [\#2948](https://github.com/pypeclub/OpenPype/pull/2948) -- General: anatomy data with correct task short key [\#2947](https://github.com/pypeclub/OpenPype/pull/2947) -- SceneInventory: Fix imports in UI [\#2944](https://github.com/pypeclub/OpenPype/pull/2944) -- Slack: add generic exception [\#2941](https://github.com/pypeclub/OpenPype/pull/2941) -- General: Python specific vendor paths on env injection [\#2939](https://github.com/pypeclub/OpenPype/pull/2939) -- General: More fail safe delete old versions [\#2936](https://github.com/pypeclub/OpenPype/pull/2936) -- Settings UI: Collapsed of collapsible wrapper works as expected [\#2934](https://github.com/pypeclub/OpenPype/pull/2934) **Merged pull requests:** diff --git a/openpype/version.py b/openpype/version.py index 9e2525e3b8..662adf28ca 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.10.0-nightly.1" +__version__ = "3.10.0-nightly.2" diff --git a/pyproject.toml b/pyproject.toml index 4c65ac9bda..f32e385e80 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.10.0-nightly.1" # OpenPype +version = "3.10.0-nightly.2" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 3f976f00d2ef46612a770a96bacde8d8d3df039d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 25 Apr 2022 12:30:42 +0200 Subject: [PATCH 240/244] OP-2951 - refactored Validate projects Checks also if set in DB and not physically present --- .../modules/sync_server/sync_server_module.py | 83 ++++++++++--------- 1 file changed, 44 insertions(+), 39 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index fb81791da2..7afbdc2e9b 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -212,36 +212,38 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def create_validate_project_task(self, collection, site_name): """Adds metadata about project files validation on a queue. - This process will loop through all representation and check if - their files actually exist on an active site. + This process will loop through all representation and check if + their files actually exist on an active site. - This might be useful for edge cases when artists is switching - between sites, remote site is actually physically mounted and - active site has same file urls etc. + It also checks if site is set in DB, but file is physically not + present - Task will run on a asyncio loop, shouldn't be blocking. + This might be useful for edge cases when artists is switching + between sites, remote site is actually physically mounted and + active site has same file urls etc. + + Task will run on a asyncio loop, shouldn't be blocking. """ task = { "type": "validate", "project_name": collection, - "func": lambda: self.validate_project(collection, site_name) + "func": lambda: self.validate_project(collection, site_name, + reset_missing=True) } self.projects_processed.add(collection) self.long_running_tasks.append(task) - def validate_project(self, collection, site_name, remove_missing=False): - """ - Validate 'collection' of 'site_name' and its local files + def validate_project(self, collection, site_name, reset_missing=False): + """Validate 'collection' of 'site_name' and its local files - If file present and not marked with a 'site_name' in DB, DB is - updated with site name and file modified date. + If file present and not marked with a 'site_name' in DB, DB is + updated with site name and file modified date. - Args: - module (SyncServerModule) - collection (string): project name - site_name (string): active site name - remove_missing (bool): if True remove sites in DB if missing - physically + Args: + collection (string): project name + site_name (string): active site name + reset_missing (bool): if True reset site in DB if missing + physically """ self.log.debug("Validation of {} for {} started".format(collection, site_name)) @@ -256,29 +258,32 @@ class SyncServerModule(OpenPypeModule, ITrayModule): return sites_added = 0 - sites_removed = 0 + sites_reset = 0 for repre in representations: repre_id = repre["_id"] for repre_file in repre.get("files", []): try: - has_site = site_name in [site["name"] - for site in repre_file["sites"]] - except TypeError: + is_on_site = site_name in [site["name"] + for site in repre_file["sites"] + if (site.get("created_dt") and + not site.get("error"))] + except (TypeError, AttributeError): self.log.debug("Structure error in {}".format(repre_id)) continue - if has_site and not remove_missing: - continue - file_path = repre_file.get("path", "") local_file_path = self.get_local_file_path(collection, site_name, file_path) - if local_file_path and os.path.exists(local_file_path): - self.log.debug("Adding site {} for {}".format(site_name, - repre_id)) - if not has_site: + file_exists = (local_file_path and + os.path.exists(local_file_path)) + if not is_on_site: + if file_exists: + self.log.debug( + "Adding site {} for {}".format(site_name, + repre_id)) + query = { "_id": repre_id } @@ -288,25 +293,25 @@ class SyncServerModule(OpenPypeModule, ITrayModule): "created_dt": created_dt} self._add_site(collection, query, repre, elem, site_name=site_name, - file_id=repre_file["_id"]) + file_id=repre_file["_id"], + force=True) sites_added += 1 else: - if has_site and remove_missing: - self.log.debug("Removing site {} for {}". + if not file_exists and reset_missing: + self.log.debug("Resetting site {} for {}". format(site_name, repre_id)) - self.reset_provider_for_file(collection, - repre_id, - file_id=repre_file["_id"], - remove=True) - sites_removed += 1 + self.reset_site_on_representation( + collection, repre_id, site_name=site_name, + file_id=repre_file["_id"]) + sites_reset += 1 if sites_added % 100 == 0: self.log.debug("Sites added {}".format(sites_added)) self.log.debug("Validation of {} for {} ended".format(collection, site_name)) - self.log.info("Sites added {}, sites removed {}".format(sites_added, - sites_removed)) + self.log.info("Sites added {}, sites reset {}".format(sites_added, + reset_missing)) def pause_representation(self, collection, representation_id, site_name): """ From 581647a65cf912c09eb79684af40e49fffe60bde Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 25 Apr 2022 13:33:23 +0200 Subject: [PATCH 241/244] OP-2951 - safer querying of tries --- openpype/modules/sync_server/sync_server_module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 7afbdc2e9b..ccd0fd111d 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1772,7 +1772,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): (int) - number of failed attempts """ _, rec = self._get_site_rec(file.get("sites", []), provider) - return rec.get("tries", 0) + return self._get_tries_count_from_rec(rec) def _get_progress_dict(self, progress): """ From 5e4d618be47515e9088329958ed6ea8aaee9456c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Apr 2022 18:19:20 +0200 Subject: [PATCH 242/244] fixed new imports of avalon --- openpype/hosts/aftereffects/api/pipeline.py | 12 +++++----- .../plugins/create/create_render.py | 7 +++--- .../plugins/create/workfile_creator.py | 20 ++++++++-------- .../plugins/publish/collect_workfile.py | 3 ++- openpype/hosts/photoshop/api/pipeline.py | 7 +++--- .../photoshop/plugins/create/create_image.py | 6 ++--- .../plugins/create/workfile_creator.py | 23 +++++++++++-------- .../plugins/publish/collect_batch_data.py | 7 +++--- openpype/lib/avalon_context.py | 4 ++-- 9 files changed, 48 insertions(+), 41 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 3a41b4f26d..0d739df748 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -13,6 +13,7 @@ from openpype.pipeline import ( deregister_loader_plugin_path, deregister_creator_plugin_path, AVALON_CONTAINER_ID, + legacy_io, ) import openpype.hosts.aftereffects from openpype.lib import register_event_callback @@ -142,9 +143,9 @@ def check_inventory(): outdated_containers = [] for container in host.ls(): representation = container['representation'] - representation_doc = io.find_one( + representation_doc = legacy_io.find_one( { - "_id": io.ObjectId(representation), + "_id": legacy_io.ObjectId(representation), "type": "representation" }, projection={"parent": True} @@ -280,11 +281,10 @@ def update_context_data(data, changes): def get_context_title(): """Returns title for Creator window""" - import avalon.api - project_name = avalon.api.Session["AVALON_PROJECT"] - asset_name = avalon.api.Session["AVALON_ASSET"] - task_name = avalon.api.Session["AVALON_TASK"] + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] return "{}/{}/{}".format(project_name, asset_name, task_name) diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py index 78d43d259a..215c148f37 100644 --- a/openpype/hosts/aftereffects/plugins/create/create_render.py +++ b/openpype/hosts/aftereffects/plugins/create/create_render.py @@ -1,12 +1,11 @@ -from avalon import api as avalon_api - from openpype import resources from openpype.lib import BoolDef, UISeparatorDef from openpype.hosts.aftereffects import api from openpype.pipeline import ( Creator, CreatedInstance, - CreatorError + CreatorError, + legacy_io, ) @@ -116,7 +115,7 @@ class RenderCreator(Creator): instance_data.pop("uuid") if not instance_data.get("task"): - instance_data["task"] = avalon_api.Session.get("AVALON_TASK") + instance_data["task"] = legacy_io.Session.get("AVALON_TASK") if not instance_data.get("creator_attributes"): is_old_farm = instance_data["family"] != "renderLocal" diff --git a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py index 2d9d42ee8c..7cc9bb54d4 100644 --- a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py +++ b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py @@ -1,9 +1,8 @@ -from avalon import io - import openpype.hosts.aftereffects.api as api from openpype.pipeline import ( AutoCreator, - CreatedInstance + CreatedInstance, + legacy_io, ) @@ -36,13 +35,13 @@ class AEWorkfileCreator(AutoCreator): break variant = '' - project_name = io.Session["AVALON_PROJECT"] - asset_name = io.Session["AVALON_ASSET"] - task_name = io.Session["AVALON_TASK"] - host_name = io.Session["AVALON_APP"] + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] if existing_instance is None: - asset_doc = io.find_one({"type": "asset", "name": asset_name}) + asset_doc = legacy_io.find_one({"type": "asset", "name": asset_name}) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) @@ -67,7 +66,10 @@ class AEWorkfileCreator(AutoCreator): existing_instance["asset"] != asset_name or existing_instance["task"] != task_name ): - asset_doc = io.find_one({"type": "asset", "name": asset_name}) + asset_doc = legacy_io.find_one({ + "type": "asset", + "name": asset_name + }) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py index 06b73f4b5d..9cb6900b0a 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py @@ -2,6 +2,7 @@ import os import pyblish.api from openpype.lib import get_subset_name_with_asset_doc +from openpype.pipeline import legacy_io class CollectWorkfile(pyblish.api.ContextPlugin): @@ -41,7 +42,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): instance.data["publish"] = instance.data["active"] # for DL def _get_new_instance(self, context, scene_file): - task = api.Session["AVALON_TASK"] + task = legacy_io.Session["AVALON_TASK"] version = context.data["version"] asset_entity = context.data["assetEntity"] project_entity = context.data["projectEntity"] diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index fc90be8716..6db4470428 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -260,9 +260,8 @@ def update_context_data(data, changes): def get_context_title(): """Returns title for Creator window""" - import avalon.api - project_name = avalon.api.Session["AVALON_PROJECT"] - asset_name = avalon.api.Session["AVALON_ASSET"] - task_name = avalon.api.Session["AVALON_TASK"] + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] return "{}/{}/{}".format(project_name, asset_name, task_name) diff --git a/openpype/hosts/photoshop/plugins/create/create_image.py b/openpype/hosts/photoshop/plugins/create/create_image.py index c2fe8b6c78..f15068b031 100644 --- a/openpype/hosts/photoshop/plugins/create/create_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_image.py @@ -1,9 +1,9 @@ -from avalon import api as avalon_api from openpype.hosts.photoshop import api from openpype.lib import BoolDef from openpype.pipeline import ( Creator, - CreatedInstance + CreatedInstance, + legacy_io ) @@ -133,7 +133,7 @@ class ImageCreator(Creator): instance_data.pop("uuid") if not instance_data.get("task"): - instance_data["task"] = avalon_api.Session.get("AVALON_TASK") + instance_data["task"] = legacy_io.Session.get("AVALON_TASK") if not instance_data.get("variant"): instance_data["variant"] = '' diff --git a/openpype/hosts/photoshop/plugins/create/workfile_creator.py b/openpype/hosts/photoshop/plugins/create/workfile_creator.py index d66a05cad7..875a9b8a94 100644 --- a/openpype/hosts/photoshop/plugins/create/workfile_creator.py +++ b/openpype/hosts/photoshop/plugins/create/workfile_creator.py @@ -1,9 +1,8 @@ -from avalon import io - import openpype.hosts.photoshop.api as api from openpype.pipeline import ( AutoCreator, - CreatedInstance + CreatedInstance, + legacy_io ) @@ -36,12 +35,15 @@ class PSWorkfileCreator(AutoCreator): break variant = '' - project_name = io.Session["AVALON_PROJECT"] - asset_name = io.Session["AVALON_ASSET"] - task_name = io.Session["AVALON_TASK"] - host_name = io.Session["AVALON_APP"] + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] if existing_instance is None: - asset_doc = io.find_one({"type": "asset", "name": asset_name}) + asset_doc = legacy_io.find_one({ + "type": "asset", + "name": asset_name + }) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) @@ -65,7 +67,10 @@ class PSWorkfileCreator(AutoCreator): existing_instance["asset"] != asset_name or existing_instance["task"] != task_name ): - asset_doc = io.find_one({"type": "asset", "name": asset_name}) + asset_doc = legacy_io.find_one({ + "type": "asset", + "name": asset_name + }) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py index 5e6e916611..448493d370 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py @@ -16,11 +16,12 @@ shouldn't be pushed into general publish plugins. import os import pyblish.api -from avalon import io + from openpype.lib.plugin_tools import ( parse_json, get_batch_asset_task_info ) +from openpype.pipeline import legacy_io class CollectBatchData(pyblish.api.ContextPlugin): @@ -62,9 +63,9 @@ class CollectBatchData(pyblish.api.ContextPlugin): ) os.environ["AVALON_ASSET"] = asset_name - io.Session["AVALON_ASSET"] = asset_name os.environ["AVALON_TASK"] = task_name - io.Session["AVALON_TASK"] = task_name + legacy_io.Session["AVALON_ASSET"] = asset_name + legacy_io.Session["AVALON_TASK"] = task_name context.data["asset"] = asset_name context.data["task"] = task_name diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 3d57ee4b91..3fcddef745 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1969,7 +1969,7 @@ def get_last_workfile( return filename -@with_avalon +@with_pipeline_io def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, link_type=None, max_depth=0): """Returns list of linked ids of particular type (if provided). @@ -1987,7 +1987,7 @@ def get_linked_ids_for_representations(project_name, repre_ids, dbcon=None, """ # Create new dbcon if not passed and use passed project name if not dbcon: - from avalon.api import AvalonMongoDB + from openpype.pipeline import AvalonMongoDB dbcon = AvalonMongoDB() dbcon.Session["AVALON_PROJECT"] = project_name # Validate that passed dbcon has same project From b30db92921aee1998c5cb57fb5a5e3ea6f4a9129 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Apr 2022 18:20:42 +0200 Subject: [PATCH 243/244] fix line length --- .../hosts/aftereffects/plugins/create/workfile_creator.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py index 7cc9bb54d4..88e55e21b5 100644 --- a/openpype/hosts/aftereffects/plugins/create/workfile_creator.py +++ b/openpype/hosts/aftereffects/plugins/create/workfile_creator.py @@ -41,7 +41,10 @@ class AEWorkfileCreator(AutoCreator): host_name = legacy_io.Session["AVALON_APP"] if existing_instance is None: - asset_doc = legacy_io.find_one({"type": "asset", "name": asset_name}) + asset_doc = legacy_io.find_one({ + "type": "asset", + "name": asset_name + }) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name ) From 25848817d92b47fc7e7b3f44cc68799e5d0cfa3e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 25 Apr 2022 18:33:53 +0200 Subject: [PATCH 244/244] removed redundant code from aftereffects and photoshop --- openpype/hosts/aftereffects/api/pipeline.py | 20 ++++---------------- openpype/hosts/photoshop/api/pipeline.py | 19 ++++--------------- 2 files changed, 8 insertions(+), 31 deletions(-) diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 0d739df748..a428a1470d 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -139,23 +139,11 @@ def check_inventory(): if not lib.any_outdated(): return - host = pyblish.api.registered_host() - outdated_containers = [] - for container in host.ls(): - representation = container['representation'] - representation_doc = legacy_io.find_one( - { - "_id": legacy_io.ObjectId(representation), - "type": "representation" - }, - projection={"parent": True} - ) - if representation_doc and not lib.is_latest(representation_doc): - outdated_containers.append(container) - # Warn about outdated containers. - print("Starting new QApplication..") - _app = QtWidgets.QApplication(sys.argv) + _app = QtWidgets.QApplication.instance() + if not _app: + print("Starting new QApplication..") + _app = QtWidgets.QApplication([]) message_box = QtWidgets.QMessageBox() message_box.setIcon(QtWidgets.QMessageBox.Warning) diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 6db4470428..20a6e3169f 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -33,22 +33,11 @@ def check_inventory(): if not lib.any_outdated(): return - host = registered_host() - outdated_containers = [] - for container in host.ls(): - representation = container['representation'] - representation_doc = legacy_io.find_one( - { - "_id": ObjectId(representation), - "type": "representation" - }, - projection={"parent": True} - ) - if representation_doc and not lib.is_latest(representation_doc): - outdated_containers.append(container) - # Warn about outdated containers. - print("Starting new QApplication..") + _app = QtWidgets.QApplication.instance() + if not _app: + print("Starting new QApplication..") + _app = QtWidgets.QApplication([]) message_box = QtWidgets.QMessageBox() message_box.setIcon(QtWidgets.QMessageBox.Warning)