From 535aa5b29be4112ce94f7f25a2ecf19486b35266 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 8 Dec 2021 19:04:02 +0100 Subject: [PATCH 001/160] handle invalid file type --- openpype/tools/mayalookassigner/vray_proxies.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/tools/mayalookassigner/vray_proxies.py b/openpype/tools/mayalookassigner/vray_proxies.py index d2f345e628..9252f22d8f 100644 --- a/openpype/tools/mayalookassigner/vray_proxies.py +++ b/openpype/tools/mayalookassigner/vray_proxies.py @@ -41,7 +41,12 @@ def get_alembic_paths_by_property(filename, attr, verbose=False): filename = filename.replace("\\", "/") filename = str(filename) # path must be string - archive = alembic.Abc.IArchive(filename) + try: + archive = alembic.Abc.IArchive(filename) + except RuntimeError: + # invalid alembic file - probably vrmesh + log.warning("{} is not an alembic file".format(filename)) + return {} root = archive.getTop() iterator = list(root.children) From 1812a05dde7fa0c37aaa4bc25dde1197040d5a86 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Mon, 13 Dec 2021 10:11:27 +0100 Subject: [PATCH 002/160] fix get all assets --- openpype/tools/mayalookassigner/commands.py | 5 ++--- openpype/tools/mayalookassigner/widgets.py | 6 +++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/openpype/tools/mayalookassigner/commands.py b/openpype/tools/mayalookassigner/commands.py index f7d26f9adb..9449d042f1 100644 --- a/openpype/tools/mayalookassigner/commands.py +++ b/openpype/tools/mayalookassigner/commands.py @@ -106,7 +106,7 @@ def create_asset_id_hash(nodes): # iterate over content of reference node if cmds.nodeType(node) == "reference": ref_hashes = create_asset_id_hash( - cmds.referenceQuery(node, nodes=True)) + cmds.referenceQuery(node, nodes=True, dp=True)) for asset_id, ref_nodes in ref_hashes.items(): node_id_hash[asset_id] += ref_nodes else: @@ -151,8 +151,7 @@ def create_items_from_nodes(nodes): for k, _ in ids.items(): pid = k.split(":")[0] if not parent_id.get(pid): - parent_id.update({pid: [vp]}) - + parent_id[pid] = [vp] print("Adding ids from alembic {}".format(path)) id_hashes.update(parent_id) diff --git a/openpype/tools/mayalookassigner/widgets.py b/openpype/tools/mayalookassigner/widgets.py index 625e9ef8c6..bc19913b8b 100644 --- a/openpype/tools/mayalookassigner/widgets.py +++ b/openpype/tools/mayalookassigner/widgets.py @@ -90,8 +90,8 @@ class AssetOutliner(QtWidgets.QWidget): return items def get_all_assets(self): - """Add all items from the current scene""" - + """Add all items from the current scene.""" + items = [] with lib.preserve_expanded_rows(self.view): with lib.preserve_selection(self.view): self.clear() @@ -237,7 +237,7 @@ class LookOutliner(QtWidgets.QWidget): """ datas = [i.data(TreeModel.ItemRole) for i in self.view.get_indices()] - items = [d for d in datas if d is not None] # filter Nones + items = [d for d in datas if d is not None] # filter Nones return items From 5d06c85a34826c040ecc62c47622719444e053f0 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 17 Dec 2021 18:38:56 +0100 Subject: [PATCH 003/160] fixing vray look assigning --- openpype/tools/mayalookassigner/app.py | 12 ++++---- openpype/tools/mayalookassigner/commands.py | 30 +++++++++++++++---- .../tools/mayalookassigner/vray_proxies.py | 10 ++++--- openpype/tools/mayalookassigner/widgets.py | 15 ++++------ 4 files changed, 42 insertions(+), 25 deletions(-) diff --git a/openpype/tools/mayalookassigner/app.py b/openpype/tools/mayalookassigner/app.py index fb99333f87..31bb455f95 100644 --- a/openpype/tools/mayalookassigner/app.py +++ b/openpype/tools/mayalookassigner/app.py @@ -24,7 +24,6 @@ from .commands import ( ) from .vray_proxies import vrayproxy_assign_look - module = sys.modules[__name__] module.window = None @@ -210,7 +209,7 @@ class App(QtWidgets.QWidget): # Assign the first matching look relevant for this asset # (since assigning multiple to the same nodes makes no sense) assign_look = next((subset for subset in item["looks"] - if subset["name"] in looks), None) + if subset["name"] in looks), None) if not assign_look: self.echo("{} No matching selected " "look for {}".format(prefix, asset)) @@ -229,11 +228,14 @@ class App(QtWidgets.QWidget): if cmds.pluginInfo('vrayformaya', query=True, loaded=True): self.echo("Getting vray proxy nodes ...") - vray_proxies = set(cmds.ls(type="VRayProxy")) - nodes = list(set(item["nodes"]).difference(vray_proxies)) + vray_proxies = set(cmds.ls(type="VRayProxy", long=True)) + if vray_proxies: for vp in vray_proxies: - vrayproxy_assign_look(vp, subset_name) + if vp in nodes: + vrayproxy_assign_look(vp, subset_name) + + nodes = list(set(item["nodes"]).difference(vray_proxies)) # Assign look if nodes: diff --git a/openpype/tools/mayalookassigner/commands.py b/openpype/tools/mayalookassigner/commands.py index f7d26f9adb..740e4fa6fe 100644 --- a/openpype/tools/mayalookassigner/commands.py +++ b/openpype/tools/mayalookassigner/commands.py @@ -8,7 +8,6 @@ from openpype.hosts.maya.api import lib from avalon import io, api - from .vray_proxies import get_alembic_ids_cache log = logging.getLogger(__name__) @@ -68,7 +67,9 @@ def get_selected_nodes(): selection = cmds.ls(selection=True, long=True) hierarchy = list_descendents(selection) - return list(set(selection + hierarchy)) + selected_nodes = list(set(selection + hierarchy)) + log.warning("selected nodes: {}".format(selected_nodes)) + return selected_nodes def get_all_asset_nodes(): @@ -79,17 +80,23 @@ def get_all_asset_nodes(): """ host = api.registered_host() + containers = host.ls() nodes = [] + log.debug("got {}".format(containers)) for container in host.ls(): # We are not interested in looks but assets! if container["loader"] == "LookLoader": + log.warning("skipping {}".format(container)) continue # Gather all information container_name = container["objectName"] + log.warning("--- listing: {}".format(container_name)) nodes += cmds.sets(container_name, query=True, nodesOnly=True) or [] + nodes = list(set(nodes)) + log.warning("returning {}".format(nodes)) return nodes @@ -102,13 +109,24 @@ def create_asset_id_hash(nodes): dict """ node_id_hash = defaultdict(list) + + # log.warning(pformat(nodes)) for node in nodes: # iterate over content of reference node if cmds.nodeType(node) == "reference": ref_hashes = create_asset_id_hash( - cmds.referenceQuery(node, nodes=True)) + list(set(cmds.referenceQuery(node, nodes=True, dp=True)))) for asset_id, ref_nodes in ref_hashes.items(): node_id_hash[asset_id] += ref_nodes + elif cmds.pluginInfo('vrayformaya', query=True, + loaded=True) and cmds.nodeType( + node) == "VRayProxy": + path = cmds.getAttr("{}.fileName".format(node)) + ids = get_alembic_ids_cache(path) + for k, _ in ids.items(): + pid = k.split(":")[0] + if not node_id_hash.get(pid): + node_id_hash[pid] = [node] else: value = lib.get_id(node) if value is None: @@ -151,12 +169,12 @@ def create_items_from_nodes(nodes): for k, _ in ids.items(): pid = k.split(":")[0] if not parent_id.get(pid): - parent_id.update({pid: [vp]}) - - print("Adding ids from alembic {}".format(path)) + parent_id[pid] = [vp] + log.warning("Adding ids from alembic {}".format(path)) id_hashes.update(parent_id) if not id_hashes: + log.warning("No id hashes") return asset_view_items for _id, id_nodes in id_hashes.items(): diff --git a/openpype/tools/mayalookassigner/vray_proxies.py b/openpype/tools/mayalookassigner/vray_proxies.py index d2f345e628..fe36894466 100644 --- a/openpype/tools/mayalookassigner/vray_proxies.py +++ b/openpype/tools/mayalookassigner/vray_proxies.py @@ -41,7 +41,11 @@ def get_alembic_paths_by_property(filename, attr, verbose=False): filename = filename.replace("\\", "/") filename = str(filename) # path must be string - archive = alembic.Abc.IArchive(filename) + try: + archive = alembic.Abc.IArchive(filename) + except RuntimeError: + # invalid file format + return {} root = archive.getTop() iterator = list(root.children) @@ -201,9 +205,7 @@ def load_look(version_id): with avalon.maya.maintained_selection(): container_node = api.load(loader, look_representation) - # Get container members - shader_nodes = cmds.sets(container_node, query=True) - return shader_nodes + return cmds.sets(container_node, query=True) def get_latest_version(asset_id, subset): diff --git a/openpype/tools/mayalookassigner/widgets.py b/openpype/tools/mayalookassigner/widgets.py index 625e9ef8c6..fceaf27244 100644 --- a/openpype/tools/mayalookassigner/widgets.py +++ b/openpype/tools/mayalookassigner/widgets.py @@ -20,7 +20,6 @@ MODELINDEX = QtCore.QModelIndex() class AssetOutliner(QtWidgets.QWidget): - refreshed = QtCore.Signal() selection_changed = QtCore.Signal() @@ -84,14 +83,13 @@ class AssetOutliner(QtWidgets.QWidget): """ selection_model = self.view.selectionModel() - items = [row.data(TreeModel.ItemRole) for row in - selection_model.selectedRows(0)] - - return items + return [row.data(TreeModel.ItemRole) + for row in selection_model.selectedRows(0)] def get_all_assets(self): """Add all items from the current scene""" + items = [] with lib.preserve_expanded_rows(self.view): with lib.preserve_selection(self.view): self.clear() @@ -118,7 +116,7 @@ class AssetOutliner(QtWidgets.QWidget): # Collect all nodes by hash (optimization) if not selection: - nodes = cmds.ls(dag=True, long=True) + nodes = cmds.ls(dag=True, long=True) else: nodes = commands.get_selected_nodes() id_nodes = commands.create_asset_id_hash(nodes) @@ -187,7 +185,6 @@ class AssetOutliner(QtWidgets.QWidget): class LookOutliner(QtWidgets.QWidget): - menu_apply_action = QtCore.Signal() def __init__(self, parent=None): @@ -237,9 +234,7 @@ class LookOutliner(QtWidgets.QWidget): """ datas = [i.data(TreeModel.ItemRole) for i in self.view.get_indices()] - items = [d for d in datas if d is not None] # filter Nones - - return items + return [d for d in datas if d is not None] def right_mouse_menu(self, pos): """Build RMB menu for look view""" From f820602caae96c61a8ab4fdeb9767982fe5765a9 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 17 Dec 2021 18:43:51 +0100 Subject: [PATCH 004/160] remove debug prints --- openpype/tools/mayalookassigner/commands.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/openpype/tools/mayalookassigner/commands.py b/openpype/tools/mayalookassigner/commands.py index 740e4fa6fe..9f6f244a35 100644 --- a/openpype/tools/mayalookassigner/commands.py +++ b/openpype/tools/mayalookassigner/commands.py @@ -67,9 +67,7 @@ def get_selected_nodes(): selection = cmds.ls(selection=True, long=True) hierarchy = list_descendents(selection) - selected_nodes = list(set(selection + hierarchy)) - log.warning("selected nodes: {}".format(selected_nodes)) - return selected_nodes + return list(set(selection + hierarchy)) def get_all_asset_nodes(): @@ -83,20 +81,16 @@ def get_all_asset_nodes(): containers = host.ls() nodes = [] - log.debug("got {}".format(containers)) for container in host.ls(): # We are not interested in looks but assets! if container["loader"] == "LookLoader": - log.warning("skipping {}".format(container)) continue # Gather all information container_name = container["objectName"] - log.warning("--- listing: {}".format(container_name)) nodes += cmds.sets(container_name, query=True, nodesOnly=True) or [] nodes = list(set(nodes)) - log.warning("returning {}".format(nodes)) return nodes @@ -109,8 +103,6 @@ def create_asset_id_hash(nodes): dict """ node_id_hash = defaultdict(list) - - # log.warning(pformat(nodes)) for node in nodes: # iterate over content of reference node if cmds.nodeType(node) == "reference": From 6199f6e6654ee0f672935f767e5bde22dbf2c25f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 5 Jan 2022 10:36:36 +0100 Subject: [PATCH 005/160] Collect 'fps' animation data only for "review" instances --- openpype/hosts/maya/api/lib.py | 7 ++++--- openpype/hosts/maya/plugins/create/create_review.py | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 52ebcaff64..d1054988d1 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -280,7 +280,7 @@ def shape_from_element(element): return node -def collect_animation_data(): +def collect_animation_data(fps=False): """Get the basic animation data Returns: @@ -291,7 +291,6 @@ def collect_animation_data(): # get scene values as defaults start = cmds.playbackOptions(query=True, animationStartTime=True) end = cmds.playbackOptions(query=True, animationEndTime=True) - fps = mel.eval('currentTimeUnitToFPS()') # build attributes data = OrderedDict() @@ -299,7 +298,9 @@ def collect_animation_data(): data["frameEnd"] = end data["handles"] = 0 data["step"] = 1.0 - data["fps"] = fps + + if fps: + data["fps"] = mel.eval('currentTimeUnitToFPS()') return data diff --git a/openpype/hosts/maya/plugins/create/create_review.py b/openpype/hosts/maya/plugins/create/create_review.py index 05b05be7a5..ae636ec691 100644 --- a/openpype/hosts/maya/plugins/create/create_review.py +++ b/openpype/hosts/maya/plugins/create/create_review.py @@ -22,7 +22,7 @@ class CreateReview(plugin.Creator): # get basic animation data : start / end / handles / steps data = OrderedDict(**self.data) - animation_data = lib.collect_animation_data() + animation_data = lib.collect_animation_data(fps=True) for key, value in animation_data.items(): data[key] = value From 1be9a4112a7baff6ae91f324f048b5af849bb32a Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 9 Jan 2022 20:42:40 +0100 Subject: [PATCH 006/160] Improve FusionPreLaunch hook error readability + make it a pop-up from the launcher. - I've removed the usage of ` in the string as they would convert into special characters in the pop-up. So those are changed to '. --- .../hosts/fusion/hooks/pre_fusion_setup.py | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/fusion/hooks/pre_fusion_setup.py b/openpype/hosts/fusion/hooks/pre_fusion_setup.py index a0c16a6700..9da7237505 100644 --- a/openpype/hosts/fusion/hooks/pre_fusion_setup.py +++ b/openpype/hosts/fusion/hooks/pre_fusion_setup.py @@ -1,6 +1,6 @@ import os import importlib -from openpype.lib import PreLaunchHook +from openpype.lib import PreLaunchHook, ApplicationLaunchFailed from openpype.hosts.fusion.api import utils @@ -14,24 +14,26 @@ class FusionPrelaunch(PreLaunchHook): def execute(self): # making sure pyton 3.6 is installed at provided path py36_dir = os.path.normpath(self.launch_context.env.get("PYTHON36", "")) - assert os.path.isdir(py36_dir), ( - "Python 3.6 is not installed at the provided folder path. Either " - "make sure the `environments\resolve.json` is having correctly " - "set `PYTHON36` or make sure Python 3.6 is installed " - f"in given path. \nPYTHON36E: `{py36_dir}`" + if not os.path.isdir(py36_dir): + raise ApplicationLaunchFailed( + "Python 3.6 is not installed at the provided path.\n" + "Either make sure the 'environments/fusion.json' has " + "'PYTHON36' set corectly or make sure Python 3.6 is installed " + f"in the given path.\n\nPYTHON36: {py36_dir}" ) - self.log.info(f"Path to Fusion Python folder: `{py36_dir}`...") + self.log.info(f"Path to Fusion Python folder: '{py36_dir}'...") self.launch_context.env["PYTHON36"] = py36_dir # setting utility scripts dir for scripts syncing us_dir = os.path.normpath( self.launch_context.env.get("FUSION_UTILITY_SCRIPTS_DIR", "") ) - assert os.path.isdir(us_dir), ( - "Fusion utility script dir does not exists. Either make sure " - "the `environments\fusion.json` is having correctly set " - "`FUSION_UTILITY_SCRIPTS_DIR` or reinstall DaVinci Resolve. \n" - f"FUSION_UTILITY_SCRIPTS_DIR: `{us_dir}`" + if not os.path.isdir(us_dir): + raise ApplicationLaunchFailed( + "Fusion utility script dir does not exist. Either make sure " + "the 'environments/fusion.json' has 'FUSION_UTILITY_SCRIPTS_DIR' " + "set correctly or reinstall DaVinci Resolve.\n\n" + f"FUSION_UTILITY_SCRIPTS_DIR: '{us_dir}'" ) try: From ff8643a128e57bb72ad42c8e31ad9925026c2e81 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 9 Jan 2022 20:48:39 +0100 Subject: [PATCH 007/160] Fix indentations --- .../hosts/fusion/hooks/pre_fusion_setup.py | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/fusion/hooks/pre_fusion_setup.py b/openpype/hosts/fusion/hooks/pre_fusion_setup.py index 9da7237505..906c1e7b8a 100644 --- a/openpype/hosts/fusion/hooks/pre_fusion_setup.py +++ b/openpype/hosts/fusion/hooks/pre_fusion_setup.py @@ -16,11 +16,11 @@ class FusionPrelaunch(PreLaunchHook): py36_dir = os.path.normpath(self.launch_context.env.get("PYTHON36", "")) if not os.path.isdir(py36_dir): raise ApplicationLaunchFailed( - "Python 3.6 is not installed at the provided path.\n" - "Either make sure the 'environments/fusion.json' has " - "'PYTHON36' set corectly or make sure Python 3.6 is installed " - f"in the given path.\n\nPYTHON36: {py36_dir}" - ) + "Python 3.6 is not installed at the provided path.\n" + "Either make sure the 'environments/fusion.json' has " + "'PYTHON36' set corectly or make sure Python 3.6 is installed " + f"in the given path.\n\nPYTHON36: {py36_dir}" + ) self.log.info(f"Path to Fusion Python folder: '{py36_dir}'...") self.launch_context.env["PYTHON36"] = py36_dir @@ -30,11 +30,12 @@ class FusionPrelaunch(PreLaunchHook): ) if not os.path.isdir(us_dir): raise ApplicationLaunchFailed( - "Fusion utility script dir does not exist. Either make sure " - "the 'environments/fusion.json' has 'FUSION_UTILITY_SCRIPTS_DIR' " - "set correctly or reinstall DaVinci Resolve.\n\n" - f"FUSION_UTILITY_SCRIPTS_DIR: '{us_dir}'" - ) + "Fusion utility script dir does not exist. Either make sure " + "the 'environments/fusion.json' has " + "'FUSION_UTILITY_SCRIPTS_DIR' set correctly or reinstall " + "DaVinci Resolve.\n\n" + f"FUSION_UTILITY_SCRIPTS_DIR: '{us_dir}'" + ) try: __import__("avalon.fusion") From 425dbad2ac33cdcb960aa1ed539f2caf9532543e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 9 Jan 2022 20:49:24 +0100 Subject: [PATCH 008/160] Refactor mention of Resolve to Fusion. --- openpype/hosts/fusion/hooks/pre_fusion_setup.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/fusion/hooks/pre_fusion_setup.py b/openpype/hosts/fusion/hooks/pre_fusion_setup.py index 906c1e7b8a..8c4973cf43 100644 --- a/openpype/hosts/fusion/hooks/pre_fusion_setup.py +++ b/openpype/hosts/fusion/hooks/pre_fusion_setup.py @@ -33,8 +33,7 @@ class FusionPrelaunch(PreLaunchHook): "Fusion utility script dir does not exist. Either make sure " "the 'environments/fusion.json' has " "'FUSION_UTILITY_SCRIPTS_DIR' set correctly or reinstall " - "DaVinci Resolve.\n\n" - f"FUSION_UTILITY_SCRIPTS_DIR: '{us_dir}'" + f"Fusion.\n\nFUSION_UTILITY_SCRIPTS_DIR: '{us_dir}'" ) try: From 3e53a45bfadc50ae5dd2167f714ebba657edeec2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 12:20:27 +0100 Subject: [PATCH 009/160] Flame: collect timeline ocio plugin --- .../plugins/publish/precollect_workfile.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 openpype/hosts/flame/plugins/publish/precollect_workfile.py diff --git a/openpype/hosts/flame/plugins/publish/precollect_workfile.py b/openpype/hosts/flame/plugins/publish/precollect_workfile.py new file mode 100644 index 0000000000..0533d01e00 --- /dev/null +++ b/openpype/hosts/flame/plugins/publish/precollect_workfile.py @@ -0,0 +1,26 @@ +import pyblish.api +import openpype.hosts.flame.api as opfapi +from openpype.hosts.flame.otio import flame_export + + +class PrecollecTimelineOCIO(pyblish.api.ContextPlugin): + """Inject the current working context into publish context""" + + label = "Precollect Timeline OTIO" + order = pyblish.api.CollectorOrder - 0.5 + + def process(self, context): + project = opfapi.get_current_project() + sequence = opfapi.get_current_sequence(opfapi.CTX.selection) + + # adding otio timeline to context + otio_timeline = flame_export.create_otio_timeline(sequence) + + # update context with main project attributes + context.data.update({ + "otioTimeline": otio_timeline, + "currentFile": "Flame/{}/{}".format( + project.name, sequence.name + ), + "fps": float(str(sequence.frame_rate)[:-4]) + }) From 104b57120c64d3095c492848adca11a47a958749 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 12:32:35 +0100 Subject: [PATCH 010/160] Flame: collect instance in otio timeline plugin --- .../flame/plugins/publish/precollect_workfile.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/openpype/hosts/flame/plugins/publish/precollect_workfile.py b/openpype/hosts/flame/plugins/publish/precollect_workfile.py index 0533d01e00..3497d19d15 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/flame/plugins/publish/precollect_workfile.py @@ -1,4 +1,5 @@ import pyblish.api +import avalon.api as avalon import openpype.hosts.flame.api as opfapi from openpype.hosts.flame.otio import flame_export @@ -10,12 +11,25 @@ class PrecollecTimelineOCIO(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.5 def process(self, context): + asset = avalon.Session["AVALON_ASSET"] + subset = "otioTimeline" project = opfapi.get_current_project() sequence = opfapi.get_current_sequence(opfapi.CTX.selection) # adding otio timeline to context otio_timeline = flame_export.create_otio_timeline(sequence) + instance_data = { + "name": "{}_{}".format(asset, subset), + "asset": asset, + "subset": "{}{}".format(asset, subset.capitalize()), + "family": "workfile" + } + + # create instance with workfile + instance = context.create_instance(**instance_data) + self.log.info("Creating instance: {}".format(instance)) + # update context with main project attributes context.data.update({ "otioTimeline": otio_timeline, From 9e70f67f4716d8af3956af3486ffc47256b9db96 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 12:32:53 +0100 Subject: [PATCH 011/160] Flame: exctracting otio file --- .../plugins/publish/extract_otio_file.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 openpype/hosts/flame/plugins/publish/extract_otio_file.py diff --git a/openpype/hosts/flame/plugins/publish/extract_otio_file.py b/openpype/hosts/flame/plugins/publish/extract_otio_file.py new file mode 100644 index 0000000000..7dd75974fc --- /dev/null +++ b/openpype/hosts/flame/plugins/publish/extract_otio_file.py @@ -0,0 +1,43 @@ +import os +import pyblish.api +import openpype.api +import opentimelineio as otio + + +class ExtractOTIOFile(openpype.api.Extractor): + """ + Extractor export OTIO file + """ + + label = "Extract OTIO file" + order = pyblish.api.ExtractorOrder - 0.45 + families = ["workfile"] + hosts = ["flame"] + + def process(self, instance): + # create representation data + if "representations" not in instance.data: + instance.data["representations"] = [] + + name = instance.data["name"] + staging_dir = self.staging_dir(instance) + + otio_timeline = instance.context.data["otioTimeline"] + # create otio timeline representation + otio_file_name = name + ".otio" + otio_file_path = os.path.join(staging_dir, otio_file_name) + + # export otio file to temp dir + otio.adapters.write_to_file(otio_timeline, otio_file_path) + + representation_otio = { + 'name': "otio", + 'ext': "otio", + 'files': otio_file_name, + "stagingDir": staging_dir, + } + + instance.data["representations"].append(representation_otio) + + self.log.info("Added OTIO file representation: {}".format( + representation_otio)) From 32ceb9e9a98fa662bab525be4b8a007f4e8624f6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 15:24:09 +0100 Subject: [PATCH 012/160] flame: enhancing code of api lib --- openpype/hosts/flame/api/lib.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 2cc9fee173..787ecf4569 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -448,6 +448,8 @@ def get_sequence_segments(sequence, selected=False): for segment in track.segments: if segment.name.get_value() == "": continue + if segment.hidden: + continue if ( selected is True and segment.selected.get_value() is not True @@ -522,7 +524,7 @@ def _get_shot_tokens_values(clip, tokens): def get_segment_attributes(segment): - if str(segment.name)[1:-1] == "": + if segment.name.get_value() == "": return None # Add timeline segment to tree From 02af9b69a195dca87a109fbfd28880372f4feaf4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 15:25:16 +0100 Subject: [PATCH 013/160] flame: adding flameSequnce attribute to publishing context --- openpype/hosts/flame/plugins/publish/precollect_workfile.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/flame/plugins/publish/precollect_workfile.py b/openpype/hosts/flame/plugins/publish/precollect_workfile.py index 3497d19d15..3d2ce97755 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/flame/plugins/publish/precollect_workfile.py @@ -32,6 +32,7 @@ class PrecollecTimelineOCIO(pyblish.api.ContextPlugin): # update context with main project attributes context.data.update({ + "flameSequence": sequence, "otioTimeline": otio_timeline, "currentFile": "Flame/{}/{}".format( project.name, sequence.name From 281ae76794f2c04ba9081c402b8632bb37b3cafc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 15:55:18 +0100 Subject: [PATCH 014/160] flame: adding functions to lib and api --- openpype/hosts/flame/api/__init__.py | 10 ++- openpype/hosts/flame/api/lib.py | 114 +++++++++++++++++++++++++++ 2 files changed, 123 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index dc47488dc1..308682b884 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -23,7 +23,11 @@ from .lib import ( get_sequence_segments, maintained_segment_selection, reset_segment_selection, - get_segment_attributes + get_segment_attributes, + get_clips_in_reels, + get_reformated_path, + get_frame_from_path, + get_padding_from_path ) from .utils import ( setup @@ -80,6 +84,10 @@ __all__ = [ "maintained_segment_selection", "reset_segment_selection", "get_segment_attributes", + "get_clips_in_reels", + "get_reformated_path", + "get_frame_from_path", + "get_padding_from_path", # pipeline "install", diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 787ecf4569..4404f7a612 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -537,6 +537,12 @@ def get_segment_attributes(segment): "PySegment": segment } + # head and tail with forward compatibility + if segment.head: + clip_data["segment_head"] = int(segment.head) + if segment.tail: + clip_data["segment_tail"] = int(segment.tail) + # add all available shot tokens shot_tokens = _get_shot_tokens_values(segment, [ "", "", "", "", "", @@ -564,3 +570,111 @@ def get_segment_attributes(segment): clip_data["segment_timecodes"] = segment_attrs_data return clip_data + + +def get_clips_in_reels(project): + output_clips = [] + project_desktop = project.current_workspace.desktop + + for reel_group in project_desktop.reel_groups: + for reel in reel_group.reels: + for clip in reel.clips: + clip_data = { + "PyClip": clip, + "fps": float(str(clip.frame_rate)[:-4]) + } + + attrs = [ + "name", "width", "height", + "ratio", "sample_rate", "bit_depth" + ] + + for attr in attrs: + val = getattr(clip, attr) + clip_data[attr] = val + + version = clip.versions[-1] + track = version.tracks[-1] + for segment in track.segments: + segment_data = get_segment_attributes(segment) + clip_data.update(segment_data) + + output_clips.append(clip_data) + + return output_clips + + +def get_reformated_path(path, padded=True): + """ + Return fixed python expression path + + Args: + path (str): path url or simple file name + + Returns: + type: string with reformated path + + Example: + get_reformated_path("plate.1001.exr") > plate.%04d.exr + + """ + padding = get_padding_from_path(path) + found = get_frame_from_path(path) + + if not found: + log.info("Path is not sequence: {}".format(path)) + return path + + if padded: + path = path.replace(found, "%0{}d".format(padding)) + else: + path = path.replace(found, "%d") + + return path + + +def get_padding_from_path(path): + """ + Return padding number from Flame path style + + Args: + path (str): path url or simple file name + + Returns: + int: padding number + + Example: + get_padding_from_path("plate.0001.exr") > 4 + + """ + found = get_frame_from_path(path) + + if found: + return len(found) + else: + return None + + +def get_frame_from_path(path): + """ + Return sequence number from Flame path style + + Args: + path (str): path url or simple file name + + Returns: + int: sequence frame number + + Example: + def get_frame_from_path(path): + ("plate.0001.exr") > 0001 + + """ + frame_pattern = re.compile(r"[._](\d+)[.]") + + found = re.findall(frame_pattern, path) + + if found: + return found.pop() + else: + return None From 50e1cbf31e38e7923aceba97da4d1d37eee7c47c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 15:55:40 +0100 Subject: [PATCH 015/160] flame: adding flameProject to publishing context attributes --- openpype/hosts/flame/plugins/publish/precollect_workfile.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/flame/plugins/publish/precollect_workfile.py b/openpype/hosts/flame/plugins/publish/precollect_workfile.py index 3d2ce97755..e7383ddec8 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/flame/plugins/publish/precollect_workfile.py @@ -32,6 +32,7 @@ class PrecollecTimelineOCIO(pyblish.api.ContextPlugin): # update context with main project attributes context.data.update({ + "flameProject": project, "flameSequence": sequence, "otioTimeline": otio_timeline, "currentFile": "Flame/{}/{}".format( From 460048ef4c1a5b6c90ef8161f6394acb85a95d0c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 15:58:12 +0100 Subject: [PATCH 016/160] flame: collect instances wip --- .../plugins/publish/precollect_instances.py | 251 ++++++++++++++++++ 1 file changed, 251 insertions(+) create mode 100644 openpype/hosts/flame/plugins/publish/precollect_instances.py diff --git a/openpype/hosts/flame/plugins/publish/precollect_instances.py b/openpype/hosts/flame/plugins/publish/precollect_instances.py new file mode 100644 index 0000000000..5f3b71eba4 --- /dev/null +++ b/openpype/hosts/flame/plugins/publish/precollect_instances.py @@ -0,0 +1,251 @@ +import pyblish +# import openpype +import openpype.hosts.flame.api as opfapi + +# # developer reload modules +from pprint import pformat + + +class PrecollectInstances(pyblish.api.ContextPlugin): + """Collect all Track items selection.""" + + order = pyblish.api.CollectorOrder - 0.49 + label = "Precollect Instances" + hosts = ["flame"] + + audio_track_items = [] + + def process(self, context): + project = context.data["flameProject"] + sequence = context.data["flameSequence"] + self.otio_timeline = context.data["otioTimeline"] + self.clips_in_reels = opfapi.get_clips_in_reels(project) + + # return only actually selected and enabled segments + selected_segments = opfapi.get_sequence_segments(sequence, True) + + # only return enabled segments + if not selected_segments: + selected_segments = opfapi.get_sequence_segments( + sequence) + + self.log.info( + "Processing following segments: {}".format( + [s.name for s in selected_segments])) + + # process all sellected timeline track items + for segment in selected_segments: + + clip_data = opfapi.get_segment_attributes(segment) + clip_name = clip_data["segment_name"] + self.log.debug("clip_name: {}".format(clip_name)) + + # get openpype tag data + marker_data = opfapi.get_segment_data_marker(segment) + self.log.debug("__ marker_data: {}".format(pformat(marker_data))) + + if not marker_data: + continue + + if marker_data.get("id") != "pyblish.avalon.instance": + continue + + file_path = clip_data["fpath"] + first_frame = opfapi.get_frame_from_path(file_path) or 0 + + # calculate head and tail with forward compatibility + head = clip_data.get("segment_head") + tail = clip_data.get("segment_tail") + + if not head: + head = int(clip_data["source_in"]) - int(first_frame) + if not tail: + tail = int( + clip_data["source_duration"] - ( + head + clip_data["record_duration"] + ) + ) + + # solve handles length + marker_data["handleStart"] = min( + marker_data["handleStart"], head) + marker_data["handleEnd"] = min( + marker_data["handleEnd"], tail) + + # add audio to families + with_audio = False + if marker_data.pop("audio"): + with_audio = True + + # add tag data to instance data + data = { + k: v for k, v in marker_data.items() + if k not in ("id", "applieswhole", "label") + } + + asset = marker_data["asset"] + subset = marker_data["subset"] + + # insert family into families + family = marker_data["family"] + families = [str(f) for f in marker_data["families"]] + families.insert(0, str(family)) + + # form label + label = asset + if asset != clip_name: + label += " ({})".format(clip_name) + label += " {}".format(subset) + label += " {}".format("[" + ", ".join(families) + "]") + + data.update({ + "name": "{}_{}".format(asset, subset), + "label": label, + "asset": asset, + "item": segment, + "families": families, + "publish": marker_data["publish"], + "fps": context.data["fps"], + }) + + # # otio clip data + # otio_data = self.get_otio_clip_instance_data(segment) or {} + # self.log.debug("__ otio_data: {}".format(pformat(otio_data))) + # data.update(otio_data) + # self.log.debug("__ data: {}".format(pformat(data))) + + # # add resolution + # self.get_resolution_to_data(data, context) + + # create instance + instance = context.create_instance(**data) + + # add colorspace data + instance.data.update({ + "versionData": { + "colorspace": clip_data["colour_space"], + } + }) + + # create shot instance for shot attributes create/update + self.create_shot_instance(context, clip_name, **data) + + self.log.info("Creating instance: {}".format(instance)) + self.log.info( + "_ instance.data: {}".format(pformat(instance.data))) + + if not with_audio: + continue + + # add audioReview attribute to plate instance data + # if reviewTrack is on + if marker_data.get("reviewTrack") is not None: + instance.data["reviewAudio"] = True + + def get_resolution_to_data(self, data, context): + assert data.get("otioClip"), "Missing `otioClip` data" + + # solve source resolution option + if data.get("sourceResolution", None): + otio_clip_metadata = data[ + "otioClip"].media_reference.metadata + data.update({ + "resolutionWidth": otio_clip_metadata[ + "openpype.source.width"], + "resolutionHeight": otio_clip_metadata[ + "openpype.source.height"], + "pixelAspect": otio_clip_metadata[ + "openpype.source.pixelAspect"] + }) + else: + otio_tl_metadata = context.data["otioTimeline"].metadata + data.update({ + "resolutionWidth": otio_tl_metadata["openpype.timeline.width"], + "resolutionHeight": otio_tl_metadata[ + "openpype.timeline.height"], + "pixelAspect": otio_tl_metadata[ + "openpype.timeline.pixelAspect"] + }) + + def create_shot_instance(self, context, clip_name, **data): + master_layer = data.get("heroTrack") + hierarchy_data = data.get("hierarchyData") + asset = data.get("asset") + + if not master_layer: + return + + if not hierarchy_data: + return + + asset = data["asset"] + subset = "shotMain" + + # insert family into families + family = "shot" + + # form label + label = asset + if asset != clip_name: + label += " ({}) ".format(clip_name) + label += " {}".format(subset) + label += " [{}]".format(family) + + data.update({ + "name": "{}_{}".format(asset, subset), + "label": label, + "subset": subset, + "asset": asset, + "family": family, + "families": [] + }) + + instance = context.create_instance(**data) + self.log.info("Creating instance: {}".format(instance)) + self.log.debug( + "_ instance.data: {}".format(pformat(instance.data))) + + # def get_otio_clip_instance_data(self, segment): + # """ + # Return otio objects for timeline, track and clip + + # Args: + # timeline_item_data (dict): timeline_item_data from list returned by + # resolve.get_current_timeline_items() + # otio_timeline (otio.schema.Timeline): otio object + + # Returns: + # dict: otio clip object + + # """ + # ti_track_name = segment.parent().name() + # timeline_range = self.create_otio_time_range_from_timeline_item_data( + # segment) + # for otio_clip in self.otio_timeline.each_clip(): + # track_name = otio_clip.parent().name + # parent_range = otio_clip.range_in_parent() + # if ti_track_name not in track_name: + # continue + # if otio_clip.name not in segment.name(): + # continue + # if openpype.lib.is_overlapping_otio_ranges( + # parent_range, timeline_range, strict=True): + + # # add pypedata marker to otio_clip metadata + # for marker in otio_clip.markers: + # if phiero.pype_tag_name in marker.name: + # otio_clip.metadata.update(marker.metadata) + # return {"otioClip": otio_clip} + + # return None + + # @staticmethod + # def create_otio_time_range_from_timeline_item_data(segment): + # speed = segment.playbackSpeed() + # timeline = phiero.get_current_sequence() + # frame_start = int(segment.timelineIn()) + # frame_duration = int(segment.sourceDuration() / speed) + # fps = timeline.framerate().toFloat() + + # return hiero_export.create_otio_time_range( + # frame_start, frame_duration, fps) From 4fa7eb25ffabc6f83a7af09400f42d3e61addbb3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 16:32:00 +0100 Subject: [PATCH 017/160] flame: fix selection --- openpype/hosts/flame/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 4404f7a612..a409e731e3 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -448,7 +448,7 @@ def get_sequence_segments(sequence, selected=False): for segment in track.segments: if segment.name.get_value() == "": continue - if segment.hidden: + if segment.hidden.get_value() is True: continue if ( selected is True From a326ab429040c799ac6b45683b326aba65da3fc4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 17:01:39 +0100 Subject: [PATCH 018/160] flame: deactivating test plugin --- openpype/hosts/flame/plugins/publish/collect_test_selection.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/flame/plugins/publish/collect_test_selection.py b/openpype/hosts/flame/plugins/publish/collect_test_selection.py index 0c75b3204f..84fd4fafe8 100644 --- a/openpype/hosts/flame/plugins/publish/collect_test_selection.py +++ b/openpype/hosts/flame/plugins/publish/collect_test_selection.py @@ -14,6 +14,7 @@ class CollectTestSelection(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder label = "test selection" hosts = ["flame"] + active = False def process(self, context): self.log.info( From 1669f1782b08c2906dc2c0a705e66bda8031e73c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 17:03:03 +0100 Subject: [PATCH 019/160] flame: adding maintained selection to publish plugins --- .../plugins/publish/precollect_instances.py | 198 ++++++++---------- .../plugins/publish/precollect_workfile.py | 3 +- 2 files changed, 95 insertions(+), 106 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/precollect_instances.py b/openpype/hosts/flame/plugins/publish/precollect_instances.py index 5f3b71eba4..e302bc42a4 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_instances.py +++ b/openpype/hosts/flame/plugins/publish/precollect_instances.py @@ -21,126 +21,114 @@ class PrecollectInstances(pyblish.api.ContextPlugin): self.otio_timeline = context.data["otioTimeline"] self.clips_in_reels = opfapi.get_clips_in_reels(project) - # return only actually selected and enabled segments - selected_segments = opfapi.get_sequence_segments(sequence, True) + # process all sellected + with opfapi.maintained_segment_selection(sequence) as selected_segments: + for segment in selected_segments: + clip_data = opfapi.get_segment_attributes(segment) + clip_name = clip_data["segment_name"] + self.log.debug("clip_name: {}".format(clip_name)) - # only return enabled segments - if not selected_segments: - selected_segments = opfapi.get_sequence_segments( - sequence) + # get openpype tag data + marker_data = opfapi.get_segment_data_marker(segment) + self.log.debug("__ marker_data: {}".format(pformat(marker_data))) - self.log.info( - "Processing following segments: {}".format( - [s.name for s in selected_segments])) + if not marker_data: + continue - # process all sellected timeline track items - for segment in selected_segments: + if marker_data.get("id") != "pyblish.avalon.instance": + continue - clip_data = opfapi.get_segment_attributes(segment) - clip_name = clip_data["segment_name"] - self.log.debug("clip_name: {}".format(clip_name)) + file_path = clip_data["fpath"] + first_frame = opfapi.get_frame_from_path(file_path) or 0 - # get openpype tag data - marker_data = opfapi.get_segment_data_marker(segment) - self.log.debug("__ marker_data: {}".format(pformat(marker_data))) + # calculate head and tail with forward compatibility + head = clip_data.get("segment_head") + tail = clip_data.get("segment_tail") - if not marker_data: - continue - - if marker_data.get("id") != "pyblish.avalon.instance": - continue - - file_path = clip_data["fpath"] - first_frame = opfapi.get_frame_from_path(file_path) or 0 - - # calculate head and tail with forward compatibility - head = clip_data.get("segment_head") - tail = clip_data.get("segment_tail") - - if not head: - head = int(clip_data["source_in"]) - int(first_frame) - if not tail: - tail = int( - clip_data["source_duration"] - ( - head + clip_data["record_duration"] + if not head: + head = int(clip_data["source_in"]) - int(first_frame) + if not tail: + tail = int( + clip_data["source_duration"] - ( + head + clip_data["record_duration"] + ) ) - ) - # solve handles length - marker_data["handleStart"] = min( - marker_data["handleStart"], head) - marker_data["handleEnd"] = min( - marker_data["handleEnd"], tail) + # solve handles length + marker_data["handleStart"] = min( + marker_data["handleStart"], head) + marker_data["handleEnd"] = min( + marker_data["handleEnd"], tail) - # add audio to families - with_audio = False - if marker_data.pop("audio"): - with_audio = True + # add audio to families + with_audio = False + if marker_data.pop("audio"): + with_audio = True - # add tag data to instance data - data = { - k: v for k, v in marker_data.items() - if k not in ("id", "applieswhole", "label") - } - - asset = marker_data["asset"] - subset = marker_data["subset"] - - # insert family into families - family = marker_data["family"] - families = [str(f) for f in marker_data["families"]] - families.insert(0, str(family)) - - # form label - label = asset - if asset != clip_name: - label += " ({})".format(clip_name) - label += " {}".format(subset) - label += " {}".format("[" + ", ".join(families) + "]") - - data.update({ - "name": "{}_{}".format(asset, subset), - "label": label, - "asset": asset, - "item": segment, - "families": families, - "publish": marker_data["publish"], - "fps": context.data["fps"], - }) - - # # otio clip data - # otio_data = self.get_otio_clip_instance_data(segment) or {} - # self.log.debug("__ otio_data: {}".format(pformat(otio_data))) - # data.update(otio_data) - # self.log.debug("__ data: {}".format(pformat(data))) - - # # add resolution - # self.get_resolution_to_data(data, context) - - # create instance - instance = context.create_instance(**data) - - # add colorspace data - instance.data.update({ - "versionData": { - "colorspace": clip_data["colour_space"], + # add tag data to instance data + data = { + k: v for k, v in marker_data.items() + if k not in ("id", "applieswhole", "label") } - }) - # create shot instance for shot attributes create/update - self.create_shot_instance(context, clip_name, **data) + asset = marker_data["asset"] + subset = marker_data["subset"] - self.log.info("Creating instance: {}".format(instance)) - self.log.info( - "_ instance.data: {}".format(pformat(instance.data))) + # insert family into families + family = marker_data["family"] + families = [str(f) for f in marker_data["families"]] + families.insert(0, str(family)) - if not with_audio: - continue + # form label + label = asset + if asset != clip_name: + label += " ({})".format(clip_name) + label += " {}".format(subset) + label += " {}".format("[" + ", ".join(families) + "]") - # add audioReview attribute to plate instance data - # if reviewTrack is on - if marker_data.get("reviewTrack") is not None: - instance.data["reviewAudio"] = True + data.update({ + "name": "{}_{}".format(asset, subset), + "label": label, + "asset": asset, + "item": segment, + "families": families, + "publish": marker_data["publish"], + "fps": context.data["fps"], + }) + + # # otio clip data + # otio_data = self.get_otio_clip_instance_data(segment) or {} + # self.log.debug("__ otio_data: {}".format(pformat(otio_data))) + # data.update(otio_data) + # self.log.debug("__ data: {}".format(pformat(data))) + + # # add resolution + # self.get_resolution_to_data(data, context) + + # create instance + instance = context.create_instance(**data) + + # add colorspace data + instance.data.update({ + "versionData": { + "colorspace": clip_data["colour_space"], + } + }) + + # create shot instance for shot attributes create/update + self.create_shot_instance(context, clip_name, **data) + + self.log.info("Creating instance: {}".format(instance)) + self.log.info( + "_ instance.data: {}".format(pformat(instance.data))) + + if not with_audio: + continue + + # add audioReview attribute to plate instance data + # if reviewTrack is on + if marker_data.get("reviewTrack") is not None: + instance.data["reviewAudio"] = True def get_resolution_to_data(self, data, context): assert data.get("otioClip"), "Missing `otioClip` data" diff --git a/openpype/hosts/flame/plugins/publish/precollect_workfile.py b/openpype/hosts/flame/plugins/publish/precollect_workfile.py index e7383ddec8..aff85e22e6 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/flame/plugins/publish/precollect_workfile.py @@ -17,7 +17,8 @@ class PrecollecTimelineOCIO(pyblish.api.ContextPlugin): sequence = opfapi.get_current_sequence(opfapi.CTX.selection) # adding otio timeline to context - otio_timeline = flame_export.create_otio_timeline(sequence) + with opfapi.maintained_segment_selection(sequence): + otio_timeline = flame_export.create_otio_timeline(sequence) instance_data = { "name": "{}_{}".format(asset, subset), From 74958ba642643dc78988ad1b6b9fbfcaa2127148 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 12:20:27 +0100 Subject: [PATCH 020/160] Flame: collect timeline ocio plugin --- .../plugins/publish/precollect_workfile.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 openpype/hosts/flame/plugins/publish/precollect_workfile.py diff --git a/openpype/hosts/flame/plugins/publish/precollect_workfile.py b/openpype/hosts/flame/plugins/publish/precollect_workfile.py new file mode 100644 index 0000000000..0533d01e00 --- /dev/null +++ b/openpype/hosts/flame/plugins/publish/precollect_workfile.py @@ -0,0 +1,26 @@ +import pyblish.api +import openpype.hosts.flame.api as opfapi +from openpype.hosts.flame.otio import flame_export + + +class PrecollecTimelineOCIO(pyblish.api.ContextPlugin): + """Inject the current working context into publish context""" + + label = "Precollect Timeline OTIO" + order = pyblish.api.CollectorOrder - 0.5 + + def process(self, context): + project = opfapi.get_current_project() + sequence = opfapi.get_current_sequence(opfapi.CTX.selection) + + # adding otio timeline to context + otio_timeline = flame_export.create_otio_timeline(sequence) + + # update context with main project attributes + context.data.update({ + "otioTimeline": otio_timeline, + "currentFile": "Flame/{}/{}".format( + project.name, sequence.name + ), + "fps": float(str(sequence.frame_rate)[:-4]) + }) From 4f4efea936d4a198fe1b220c07e71ae77a065621 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 12:32:35 +0100 Subject: [PATCH 021/160] Flame: collect instance in otio timeline plugin --- .../flame/plugins/publish/precollect_workfile.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/openpype/hosts/flame/plugins/publish/precollect_workfile.py b/openpype/hosts/flame/plugins/publish/precollect_workfile.py index 0533d01e00..3497d19d15 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/flame/plugins/publish/precollect_workfile.py @@ -1,4 +1,5 @@ import pyblish.api +import avalon.api as avalon import openpype.hosts.flame.api as opfapi from openpype.hosts.flame.otio import flame_export @@ -10,12 +11,25 @@ class PrecollecTimelineOCIO(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.5 def process(self, context): + asset = avalon.Session["AVALON_ASSET"] + subset = "otioTimeline" project = opfapi.get_current_project() sequence = opfapi.get_current_sequence(opfapi.CTX.selection) # adding otio timeline to context otio_timeline = flame_export.create_otio_timeline(sequence) + instance_data = { + "name": "{}_{}".format(asset, subset), + "asset": asset, + "subset": "{}{}".format(asset, subset.capitalize()), + "family": "workfile" + } + + # create instance with workfile + instance = context.create_instance(**instance_data) + self.log.info("Creating instance: {}".format(instance)) + # update context with main project attributes context.data.update({ "otioTimeline": otio_timeline, From 65fe3a28bb74b26055913fe909208fd6e97becdf Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 12:32:53 +0100 Subject: [PATCH 022/160] Flame: exctracting otio file --- .../plugins/publish/extract_otio_file.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 openpype/hosts/flame/plugins/publish/extract_otio_file.py diff --git a/openpype/hosts/flame/plugins/publish/extract_otio_file.py b/openpype/hosts/flame/plugins/publish/extract_otio_file.py new file mode 100644 index 0000000000..7dd75974fc --- /dev/null +++ b/openpype/hosts/flame/plugins/publish/extract_otio_file.py @@ -0,0 +1,43 @@ +import os +import pyblish.api +import openpype.api +import opentimelineio as otio + + +class ExtractOTIOFile(openpype.api.Extractor): + """ + Extractor export OTIO file + """ + + label = "Extract OTIO file" + order = pyblish.api.ExtractorOrder - 0.45 + families = ["workfile"] + hosts = ["flame"] + + def process(self, instance): + # create representation data + if "representations" not in instance.data: + instance.data["representations"] = [] + + name = instance.data["name"] + staging_dir = self.staging_dir(instance) + + otio_timeline = instance.context.data["otioTimeline"] + # create otio timeline representation + otio_file_name = name + ".otio" + otio_file_path = os.path.join(staging_dir, otio_file_name) + + # export otio file to temp dir + otio.adapters.write_to_file(otio_timeline, otio_file_path) + + representation_otio = { + 'name': "otio", + 'ext': "otio", + 'files': otio_file_name, + "stagingDir": staging_dir, + } + + instance.data["representations"].append(representation_otio) + + self.log.info("Added OTIO file representation: {}".format( + representation_otio)) From 42bdd8db7f3c1127f376372f9c5c12bc70daad89 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 15:24:09 +0100 Subject: [PATCH 023/160] flame: enhancing code of api lib --- openpype/hosts/flame/api/lib.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 7788a6b3f4..b5c7f2031b 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -445,6 +445,8 @@ def get_sequence_segments(sequence, selected=False): for segment in track.segments: if segment.name.get_value() == "": continue + if segment.hidden: + continue if ( selected is True and segment.selected.get_value() is not True @@ -519,7 +521,7 @@ def _get_shot_tokens_values(clip, tokens): def get_segment_attributes(segment): - if str(segment.name)[1:-1] == "": + if segment.name.get_value() == "": return None # Add timeline segment to tree From eb6c6a5c9fc96cd8596484f06ab91b5bbad1db64 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 15:25:16 +0100 Subject: [PATCH 024/160] flame: adding flameSequnce attribute to publishing context --- openpype/hosts/flame/plugins/publish/precollect_workfile.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/flame/plugins/publish/precollect_workfile.py b/openpype/hosts/flame/plugins/publish/precollect_workfile.py index 3497d19d15..3d2ce97755 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/flame/plugins/publish/precollect_workfile.py @@ -32,6 +32,7 @@ class PrecollecTimelineOCIO(pyblish.api.ContextPlugin): # update context with main project attributes context.data.update({ + "flameSequence": sequence, "otioTimeline": otio_timeline, "currentFile": "Flame/{}/{}".format( project.name, sequence.name From 093015bf34b438d66c6b773c248bf7d67168a6ae Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 15:55:18 +0100 Subject: [PATCH 025/160] flame: adding functions to lib and api --- openpype/hosts/flame/api/__init__.py | 10 ++- openpype/hosts/flame/api/lib.py | 114 +++++++++++++++++++++++++++ 2 files changed, 123 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index dc47488dc1..308682b884 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -23,7 +23,11 @@ from .lib import ( get_sequence_segments, maintained_segment_selection, reset_segment_selection, - get_segment_attributes + get_segment_attributes, + get_clips_in_reels, + get_reformated_path, + get_frame_from_path, + get_padding_from_path ) from .utils import ( setup @@ -80,6 +84,10 @@ __all__ = [ "maintained_segment_selection", "reset_segment_selection", "get_segment_attributes", + "get_clips_in_reels", + "get_reformated_path", + "get_frame_from_path", + "get_padding_from_path", # pipeline "install", diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index b5c7f2031b..b204230d9a 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -534,6 +534,12 @@ def get_segment_attributes(segment): "PySegment": segment } + # head and tail with forward compatibility + if segment.head: + clip_data["segment_head"] = int(segment.head) + if segment.tail: + clip_data["segment_tail"] = int(segment.tail) + # add all available shot tokens shot_tokens = _get_shot_tokens_values(segment, [ "", "", "", "", "", @@ -561,3 +567,111 @@ def get_segment_attributes(segment): clip_data["segment_timecodes"] = segment_attrs_data return clip_data + + +def get_clips_in_reels(project): + output_clips = [] + project_desktop = project.current_workspace.desktop + + for reel_group in project_desktop.reel_groups: + for reel in reel_group.reels: + for clip in reel.clips: + clip_data = { + "PyClip": clip, + "fps": float(str(clip.frame_rate)[:-4]) + } + + attrs = [ + "name", "width", "height", + "ratio", "sample_rate", "bit_depth" + ] + + for attr in attrs: + val = getattr(clip, attr) + clip_data[attr] = val + + version = clip.versions[-1] + track = version.tracks[-1] + for segment in track.segments: + segment_data = get_segment_attributes(segment) + clip_data.update(segment_data) + + output_clips.append(clip_data) + + return output_clips + + +def get_reformated_path(path, padded=True): + """ + Return fixed python expression path + + Args: + path (str): path url or simple file name + + Returns: + type: string with reformated path + + Example: + get_reformated_path("plate.1001.exr") > plate.%04d.exr + + """ + padding = get_padding_from_path(path) + found = get_frame_from_path(path) + + if not found: + log.info("Path is not sequence: {}".format(path)) + return path + + if padded: + path = path.replace(found, "%0{}d".format(padding)) + else: + path = path.replace(found, "%d") + + return path + + +def get_padding_from_path(path): + """ + Return padding number from Flame path style + + Args: + path (str): path url or simple file name + + Returns: + int: padding number + + Example: + get_padding_from_path("plate.0001.exr") > 4 + + """ + found = get_frame_from_path(path) + + if found: + return len(found) + else: + return None + + +def get_frame_from_path(path): + """ + Return sequence number from Flame path style + + Args: + path (str): path url or simple file name + + Returns: + int: sequence frame number + + Example: + def get_frame_from_path(path): + ("plate.0001.exr") > 0001 + + """ + frame_pattern = re.compile(r"[._](\d+)[.]") + + found = re.findall(frame_pattern, path) + + if found: + return found.pop() + else: + return None From 402b18640967070ad8fb2079f7ec0d92fb5a222b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 15:55:40 +0100 Subject: [PATCH 026/160] flame: adding flameProject to publishing context attributes --- openpype/hosts/flame/plugins/publish/precollect_workfile.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/flame/plugins/publish/precollect_workfile.py b/openpype/hosts/flame/plugins/publish/precollect_workfile.py index 3d2ce97755..e7383ddec8 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/flame/plugins/publish/precollect_workfile.py @@ -32,6 +32,7 @@ class PrecollecTimelineOCIO(pyblish.api.ContextPlugin): # update context with main project attributes context.data.update({ + "flameProject": project, "flameSequence": sequence, "otioTimeline": otio_timeline, "currentFile": "Flame/{}/{}".format( From 70d31f2ef16b9a97e03c5f956b344c64bb25c1df Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 15:58:12 +0100 Subject: [PATCH 027/160] flame: collect instances wip --- .../plugins/publish/precollect_instances.py | 251 ++++++++++++++++++ 1 file changed, 251 insertions(+) create mode 100644 openpype/hosts/flame/plugins/publish/precollect_instances.py diff --git a/openpype/hosts/flame/plugins/publish/precollect_instances.py b/openpype/hosts/flame/plugins/publish/precollect_instances.py new file mode 100644 index 0000000000..5f3b71eba4 --- /dev/null +++ b/openpype/hosts/flame/plugins/publish/precollect_instances.py @@ -0,0 +1,251 @@ +import pyblish +# import openpype +import openpype.hosts.flame.api as opfapi + +# # developer reload modules +from pprint import pformat + + +class PrecollectInstances(pyblish.api.ContextPlugin): + """Collect all Track items selection.""" + + order = pyblish.api.CollectorOrder - 0.49 + label = "Precollect Instances" + hosts = ["flame"] + + audio_track_items = [] + + def process(self, context): + project = context.data["flameProject"] + sequence = context.data["flameSequence"] + self.otio_timeline = context.data["otioTimeline"] + self.clips_in_reels = opfapi.get_clips_in_reels(project) + + # return only actually selected and enabled segments + selected_segments = opfapi.get_sequence_segments(sequence, True) + + # only return enabled segments + if not selected_segments: + selected_segments = opfapi.get_sequence_segments( + sequence) + + self.log.info( + "Processing following segments: {}".format( + [s.name for s in selected_segments])) + + # process all sellected timeline track items + for segment in selected_segments: + + clip_data = opfapi.get_segment_attributes(segment) + clip_name = clip_data["segment_name"] + self.log.debug("clip_name: {}".format(clip_name)) + + # get openpype tag data + marker_data = opfapi.get_segment_data_marker(segment) + self.log.debug("__ marker_data: {}".format(pformat(marker_data))) + + if not marker_data: + continue + + if marker_data.get("id") != "pyblish.avalon.instance": + continue + + file_path = clip_data["fpath"] + first_frame = opfapi.get_frame_from_path(file_path) or 0 + + # calculate head and tail with forward compatibility + head = clip_data.get("segment_head") + tail = clip_data.get("segment_tail") + + if not head: + head = int(clip_data["source_in"]) - int(first_frame) + if not tail: + tail = int( + clip_data["source_duration"] - ( + head + clip_data["record_duration"] + ) + ) + + # solve handles length + marker_data["handleStart"] = min( + marker_data["handleStart"], head) + marker_data["handleEnd"] = min( + marker_data["handleEnd"], tail) + + # add audio to families + with_audio = False + if marker_data.pop("audio"): + with_audio = True + + # add tag data to instance data + data = { + k: v for k, v in marker_data.items() + if k not in ("id", "applieswhole", "label") + } + + asset = marker_data["asset"] + subset = marker_data["subset"] + + # insert family into families + family = marker_data["family"] + families = [str(f) for f in marker_data["families"]] + families.insert(0, str(family)) + + # form label + label = asset + if asset != clip_name: + label += " ({})".format(clip_name) + label += " {}".format(subset) + label += " {}".format("[" + ", ".join(families) + "]") + + data.update({ + "name": "{}_{}".format(asset, subset), + "label": label, + "asset": asset, + "item": segment, + "families": families, + "publish": marker_data["publish"], + "fps": context.data["fps"], + }) + + # # otio clip data + # otio_data = self.get_otio_clip_instance_data(segment) or {} + # self.log.debug("__ otio_data: {}".format(pformat(otio_data))) + # data.update(otio_data) + # self.log.debug("__ data: {}".format(pformat(data))) + + # # add resolution + # self.get_resolution_to_data(data, context) + + # create instance + instance = context.create_instance(**data) + + # add colorspace data + instance.data.update({ + "versionData": { + "colorspace": clip_data["colour_space"], + } + }) + + # create shot instance for shot attributes create/update + self.create_shot_instance(context, clip_name, **data) + + self.log.info("Creating instance: {}".format(instance)) + self.log.info( + "_ instance.data: {}".format(pformat(instance.data))) + + if not with_audio: + continue + + # add audioReview attribute to plate instance data + # if reviewTrack is on + if marker_data.get("reviewTrack") is not None: + instance.data["reviewAudio"] = True + + def get_resolution_to_data(self, data, context): + assert data.get("otioClip"), "Missing `otioClip` data" + + # solve source resolution option + if data.get("sourceResolution", None): + otio_clip_metadata = data[ + "otioClip"].media_reference.metadata + data.update({ + "resolutionWidth": otio_clip_metadata[ + "openpype.source.width"], + "resolutionHeight": otio_clip_metadata[ + "openpype.source.height"], + "pixelAspect": otio_clip_metadata[ + "openpype.source.pixelAspect"] + }) + else: + otio_tl_metadata = context.data["otioTimeline"].metadata + data.update({ + "resolutionWidth": otio_tl_metadata["openpype.timeline.width"], + "resolutionHeight": otio_tl_metadata[ + "openpype.timeline.height"], + "pixelAspect": otio_tl_metadata[ + "openpype.timeline.pixelAspect"] + }) + + def create_shot_instance(self, context, clip_name, **data): + master_layer = data.get("heroTrack") + hierarchy_data = data.get("hierarchyData") + asset = data.get("asset") + + if not master_layer: + return + + if not hierarchy_data: + return + + asset = data["asset"] + subset = "shotMain" + + # insert family into families + family = "shot" + + # form label + label = asset + if asset != clip_name: + label += " ({}) ".format(clip_name) + label += " {}".format(subset) + label += " [{}]".format(family) + + data.update({ + "name": "{}_{}".format(asset, subset), + "label": label, + "subset": subset, + "asset": asset, + "family": family, + "families": [] + }) + + instance = context.create_instance(**data) + self.log.info("Creating instance: {}".format(instance)) + self.log.debug( + "_ instance.data: {}".format(pformat(instance.data))) + + # def get_otio_clip_instance_data(self, segment): + # """ + # Return otio objects for timeline, track and clip + + # Args: + # timeline_item_data (dict): timeline_item_data from list returned by + # resolve.get_current_timeline_items() + # otio_timeline (otio.schema.Timeline): otio object + + # Returns: + # dict: otio clip object + + # """ + # ti_track_name = segment.parent().name() + # timeline_range = self.create_otio_time_range_from_timeline_item_data( + # segment) + # for otio_clip in self.otio_timeline.each_clip(): + # track_name = otio_clip.parent().name + # parent_range = otio_clip.range_in_parent() + # if ti_track_name not in track_name: + # continue + # if otio_clip.name not in segment.name(): + # continue + # if openpype.lib.is_overlapping_otio_ranges( + # parent_range, timeline_range, strict=True): + + # # add pypedata marker to otio_clip metadata + # for marker in otio_clip.markers: + # if phiero.pype_tag_name in marker.name: + # otio_clip.metadata.update(marker.metadata) + # return {"otioClip": otio_clip} + + # return None + + # @staticmethod + # def create_otio_time_range_from_timeline_item_data(segment): + # speed = segment.playbackSpeed() + # timeline = phiero.get_current_sequence() + # frame_start = int(segment.timelineIn()) + # frame_duration = int(segment.sourceDuration() / speed) + # fps = timeline.framerate().toFloat() + + # return hiero_export.create_otio_time_range( + # frame_start, frame_duration, fps) From da1bb80b62d8e606e5c1b5bdc1fa0a53685c3fba Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 16:32:00 +0100 Subject: [PATCH 028/160] flame: fix selection --- openpype/hosts/flame/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index b204230d9a..e53127503b 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -445,7 +445,7 @@ def get_sequence_segments(sequence, selected=False): for segment in track.segments: if segment.name.get_value() == "": continue - if segment.hidden: + if segment.hidden.get_value() is True: continue if ( selected is True From 8f786f325541e5b8282eef515789333044727a8e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 17:01:39 +0100 Subject: [PATCH 029/160] flame: deactivating test plugin --- openpype/hosts/flame/plugins/publish/collect_test_selection.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/flame/plugins/publish/collect_test_selection.py b/openpype/hosts/flame/plugins/publish/collect_test_selection.py index 73401368b1..9f982321cc 100644 --- a/openpype/hosts/flame/plugins/publish/collect_test_selection.py +++ b/openpype/hosts/flame/plugins/publish/collect_test_selection.py @@ -16,6 +16,7 @@ class CollectTestSelection(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder label = "test selection" hosts = ["flame"] + active = False def process(self, context): self.log.info( From 28341de97f283f51043530b581fb7a34ffb6337a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 11 Jan 2022 17:03:03 +0100 Subject: [PATCH 030/160] flame: adding maintained selection to publish plugins --- .../plugins/publish/precollect_instances.py | 198 ++++++++---------- .../plugins/publish/precollect_workfile.py | 3 +- 2 files changed, 95 insertions(+), 106 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/precollect_instances.py b/openpype/hosts/flame/plugins/publish/precollect_instances.py index 5f3b71eba4..e302bc42a4 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_instances.py +++ b/openpype/hosts/flame/plugins/publish/precollect_instances.py @@ -21,126 +21,114 @@ class PrecollectInstances(pyblish.api.ContextPlugin): self.otio_timeline = context.data["otioTimeline"] self.clips_in_reels = opfapi.get_clips_in_reels(project) - # return only actually selected and enabled segments - selected_segments = opfapi.get_sequence_segments(sequence, True) + # process all sellected + with opfapi.maintained_segment_selection(sequence) as selected_segments: + for segment in selected_segments: + clip_data = opfapi.get_segment_attributes(segment) + clip_name = clip_data["segment_name"] + self.log.debug("clip_name: {}".format(clip_name)) - # only return enabled segments - if not selected_segments: - selected_segments = opfapi.get_sequence_segments( - sequence) + # get openpype tag data + marker_data = opfapi.get_segment_data_marker(segment) + self.log.debug("__ marker_data: {}".format(pformat(marker_data))) - self.log.info( - "Processing following segments: {}".format( - [s.name for s in selected_segments])) + if not marker_data: + continue - # process all sellected timeline track items - for segment in selected_segments: + if marker_data.get("id") != "pyblish.avalon.instance": + continue - clip_data = opfapi.get_segment_attributes(segment) - clip_name = clip_data["segment_name"] - self.log.debug("clip_name: {}".format(clip_name)) + file_path = clip_data["fpath"] + first_frame = opfapi.get_frame_from_path(file_path) or 0 - # get openpype tag data - marker_data = opfapi.get_segment_data_marker(segment) - self.log.debug("__ marker_data: {}".format(pformat(marker_data))) + # calculate head and tail with forward compatibility + head = clip_data.get("segment_head") + tail = clip_data.get("segment_tail") - if not marker_data: - continue - - if marker_data.get("id") != "pyblish.avalon.instance": - continue - - file_path = clip_data["fpath"] - first_frame = opfapi.get_frame_from_path(file_path) or 0 - - # calculate head and tail with forward compatibility - head = clip_data.get("segment_head") - tail = clip_data.get("segment_tail") - - if not head: - head = int(clip_data["source_in"]) - int(first_frame) - if not tail: - tail = int( - clip_data["source_duration"] - ( - head + clip_data["record_duration"] + if not head: + head = int(clip_data["source_in"]) - int(first_frame) + if not tail: + tail = int( + clip_data["source_duration"] - ( + head + clip_data["record_duration"] + ) ) - ) - # solve handles length - marker_data["handleStart"] = min( - marker_data["handleStart"], head) - marker_data["handleEnd"] = min( - marker_data["handleEnd"], tail) + # solve handles length + marker_data["handleStart"] = min( + marker_data["handleStart"], head) + marker_data["handleEnd"] = min( + marker_data["handleEnd"], tail) - # add audio to families - with_audio = False - if marker_data.pop("audio"): - with_audio = True + # add audio to families + with_audio = False + if marker_data.pop("audio"): + with_audio = True - # add tag data to instance data - data = { - k: v for k, v in marker_data.items() - if k not in ("id", "applieswhole", "label") - } - - asset = marker_data["asset"] - subset = marker_data["subset"] - - # insert family into families - family = marker_data["family"] - families = [str(f) for f in marker_data["families"]] - families.insert(0, str(family)) - - # form label - label = asset - if asset != clip_name: - label += " ({})".format(clip_name) - label += " {}".format(subset) - label += " {}".format("[" + ", ".join(families) + "]") - - data.update({ - "name": "{}_{}".format(asset, subset), - "label": label, - "asset": asset, - "item": segment, - "families": families, - "publish": marker_data["publish"], - "fps": context.data["fps"], - }) - - # # otio clip data - # otio_data = self.get_otio_clip_instance_data(segment) or {} - # self.log.debug("__ otio_data: {}".format(pformat(otio_data))) - # data.update(otio_data) - # self.log.debug("__ data: {}".format(pformat(data))) - - # # add resolution - # self.get_resolution_to_data(data, context) - - # create instance - instance = context.create_instance(**data) - - # add colorspace data - instance.data.update({ - "versionData": { - "colorspace": clip_data["colour_space"], + # add tag data to instance data + data = { + k: v for k, v in marker_data.items() + if k not in ("id", "applieswhole", "label") } - }) - # create shot instance for shot attributes create/update - self.create_shot_instance(context, clip_name, **data) + asset = marker_data["asset"] + subset = marker_data["subset"] - self.log.info("Creating instance: {}".format(instance)) - self.log.info( - "_ instance.data: {}".format(pformat(instance.data))) + # insert family into families + family = marker_data["family"] + families = [str(f) for f in marker_data["families"]] + families.insert(0, str(family)) - if not with_audio: - continue + # form label + label = asset + if asset != clip_name: + label += " ({})".format(clip_name) + label += " {}".format(subset) + label += " {}".format("[" + ", ".join(families) + "]") - # add audioReview attribute to plate instance data - # if reviewTrack is on - if marker_data.get("reviewTrack") is not None: - instance.data["reviewAudio"] = True + data.update({ + "name": "{}_{}".format(asset, subset), + "label": label, + "asset": asset, + "item": segment, + "families": families, + "publish": marker_data["publish"], + "fps": context.data["fps"], + }) + + # # otio clip data + # otio_data = self.get_otio_clip_instance_data(segment) or {} + # self.log.debug("__ otio_data: {}".format(pformat(otio_data))) + # data.update(otio_data) + # self.log.debug("__ data: {}".format(pformat(data))) + + # # add resolution + # self.get_resolution_to_data(data, context) + + # create instance + instance = context.create_instance(**data) + + # add colorspace data + instance.data.update({ + "versionData": { + "colorspace": clip_data["colour_space"], + } + }) + + # create shot instance for shot attributes create/update + self.create_shot_instance(context, clip_name, **data) + + self.log.info("Creating instance: {}".format(instance)) + self.log.info( + "_ instance.data: {}".format(pformat(instance.data))) + + if not with_audio: + continue + + # add audioReview attribute to plate instance data + # if reviewTrack is on + if marker_data.get("reviewTrack") is not None: + instance.data["reviewAudio"] = True def get_resolution_to_data(self, data, context): assert data.get("otioClip"), "Missing `otioClip` data" diff --git a/openpype/hosts/flame/plugins/publish/precollect_workfile.py b/openpype/hosts/flame/plugins/publish/precollect_workfile.py index e7383ddec8..aff85e22e6 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/flame/plugins/publish/precollect_workfile.py @@ -17,7 +17,8 @@ class PrecollecTimelineOCIO(pyblish.api.ContextPlugin): sequence = opfapi.get_current_sequence(opfapi.CTX.selection) # adding otio timeline to context - otio_timeline = flame_export.create_otio_timeline(sequence) + with opfapi.maintained_segment_selection(sequence): + otio_timeline = flame_export.create_otio_timeline(sequence) instance_data = { "name": "{}_{}".format(asset, subset), From e4368e69b1088ea3345932b9109a20a5c0d83de7 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 12 Jan 2022 12:25:50 +0100 Subject: [PATCH 031/160] moved nuke implementation from avalon --- openpype/hosts/nuke/api/__init__.py | 164 ++--- openpype/hosts/nuke/api/actions.py | 5 +- openpype/hosts/nuke/api/command.py | 135 ++++ openpype/hosts/nuke/api/lib.py | 616 ++++++++++++++++-- openpype/hosts/nuke/api/menu.py | 166 ----- openpype/hosts/nuke/api/pipeline.py | 421 ++++++++++++ openpype/hosts/nuke/api/plugin.py | 67 +- openpype/hosts/nuke/api/utils.py | 5 +- openpype/hosts/nuke/api/workio.py | 55 ++ .../nuke/plugins/create/create_backdrop.py | 15 +- .../nuke/plugins/create/create_camera.py | 12 +- .../hosts/nuke/plugins/create/create_gizmo.py | 26 +- .../hosts/nuke/plugins/create/create_model.py | 12 +- .../hosts/nuke/plugins/create/create_read.py | 15 +- .../plugins/create/create_write_prerender.py | 11 +- .../plugins/create/create_write_render.py | 11 +- .../nuke/plugins/create/create_write_still.py | 11 +- .../plugins/inventory/repair_old_loaders.py | 9 +- .../plugins/inventory/select_containers.py | 4 +- .../hosts/nuke/plugins/load/load_backdrop.py | 40 +- .../nuke/plugins/load/load_camera_abc.py | 18 +- openpype/hosts/nuke/plugins/load/load_clip.py | 13 +- .../hosts/nuke/plugins/load/load_effects.py | 17 +- .../nuke/plugins/load/load_effects_ip.py | 17 +- .../hosts/nuke/plugins/load/load_gizmo.py | 23 +- .../hosts/nuke/plugins/load/load_gizmo_ip.py | 31 +- .../hosts/nuke/plugins/load/load_image.py | 17 +- .../hosts/nuke/plugins/load/load_model.py | 15 +- .../nuke/plugins/load/load_script_precomp.py | 17 +- .../nuke/plugins/publish/extract_backdrop.py | 25 +- .../nuke/plugins/publish/extract_camera.py | 10 +- .../nuke/plugins/publish/extract_gizmo.py | 20 +- .../nuke/plugins/publish/extract_model.py | 13 +- .../plugins/publish/extract_ouput_node.py | 2 +- .../publish/extract_review_data_lut.py | 6 +- .../publish/extract_review_data_mov.py | 6 +- .../plugins/publish/extract_slate_frame.py | 4 +- .../nuke/plugins/publish/extract_thumbnail.py | 4 +- .../plugins/publish/precollect_instances.py | 9 +- .../plugins/publish/precollect_workfile.py | 15 +- .../nuke/plugins/publish/validate_backdrop.py | 6 +- .../nuke/plugins/publish/validate_gizmo.py | 6 +- .../publish/validate_instance_in_context.py | 13 +- .../plugins/publish/validate_write_legacy.py | 5 +- .../plugins/publish/validate_write_nodes.py | 15 +- openpype/hosts/nuke/startup/init.py | 2 + openpype/hosts/nuke/startup/menu.py | 15 +- 47 files changed, 1581 insertions(+), 563 deletions(-) create mode 100644 openpype/hosts/nuke/api/command.py delete mode 100644 openpype/hosts/nuke/api/menu.py create mode 100644 openpype/hosts/nuke/api/pipeline.py create mode 100644 openpype/hosts/nuke/api/workio.py diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index 1567189ed1..d3b7f74d6d 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -1,130 +1,52 @@ -import os -import nuke +from .workio import ( + file_extensions, + has_unsaved_changes, + save_file, + open_file, + current_file, + work_root, +) -import avalon.api -import pyblish.api -import openpype -from . import lib, menu +from .command import ( + reset_frame_range, + get_handles, + reset_resolution, + viewer_update_and_undo_stop +) -log = openpype.api.Logger().get_logger(__name__) +from .plugin import OpenPypeCreator +from .pipeline import ( + install, + uninstall, -AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.nuke.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") -PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") -LOAD_PATH = os.path.join(PLUGINS_DIR, "load") -CREATE_PATH = os.path.join(PLUGINS_DIR, "create") -INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") + ls, + + containerise, + parse_container, + update_container, +) -# registering pyblish gui regarding settings in presets -if os.getenv("PYBLISH_GUI", None): - pyblish.api.register_gui(os.getenv("PYBLISH_GUI", None)) +__all__ = ( + "file_extensions", + "has_unsaved_changes", + "save_file", + "open_file", + "current_file", + "work_root", + "reset_frame_range", + "get_handles", + "reset_resolution", + "viewer_update_and_undo_stop", -def reload_config(): - """Attempt to reload pipeline at run-time. + "OpenPypeCreator", + "install", + "uninstall", - CAUTION: This is primarily for development and debugging purposes. + "ls", - """ - - import importlib - - for module in ( - "{}.api".format(AVALON_CONFIG), - "{}.hosts.nuke.api.actions".format(AVALON_CONFIG), - "{}.hosts.nuke.api.menu".format(AVALON_CONFIG), - "{}.hosts.nuke.api.plugin".format(AVALON_CONFIG), - "{}.hosts.nuke.api.lib".format(AVALON_CONFIG), - ): - log.info("Reloading module: {}...".format(module)) - - module = importlib.import_module(module) - - try: - importlib.reload(module) - except AttributeError as e: - from importlib import reload - log.warning("Cannot reload module: {}".format(e)) - reload(module) - - -def install(): - ''' Installing all requarements for Nuke host - ''' - - # remove all registred callbacks form avalon.nuke - from avalon import pipeline - pipeline._registered_event_handlers.clear() - - log.info("Registering Nuke plug-ins..") - pyblish.api.register_plugin_path(PUBLISH_PATH) - avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH) - avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH) - avalon.api.register_plugin_path(avalon.api.InventoryAction, INVENTORY_PATH) - - # Register Avalon event for workfiles loading. - avalon.api.on("workio.open_file", lib.check_inventory_versions) - avalon.api.on("taskChanged", menu.change_context_label) - - pyblish.api.register_callback( - "instanceToggled", on_pyblish_instance_toggled) - workfile_settings = lib.WorkfileSettings() - # Disable all families except for the ones we explicitly want to see - family_states = [ - "write", - "review", - "nukenodes", - "model", - "gizmo" - ] - - avalon.api.data["familiesStateDefault"] = False - avalon.api.data["familiesStateToggled"] = family_states - - # Set context settings. - nuke.addOnCreate(workfile_settings.set_context_settings, nodeClass="Root") - nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root") - nuke.addOnCreate(lib.process_workfile_builder, nodeClass="Root") - nuke.addOnCreate(lib.launch_workfiles_app, nodeClass="Root") - menu.install() - - -def uninstall(): - '''Uninstalling host's integration - ''' - log.info("Deregistering Nuke plug-ins..") - pyblish.api.deregister_plugin_path(PUBLISH_PATH) - avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH) - avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH) - - pyblish.api.deregister_callback( - "instanceToggled", on_pyblish_instance_toggled) - - reload_config() - menu.uninstall() - - -def on_pyblish_instance_toggled(instance, old_value, new_value): - """Toggle node passthrough states on instance toggles.""" - - log.info("instance toggle: {}, old_value: {}, new_value:{} ".format( - instance, old_value, new_value)) - - from avalon.nuke import ( - viewer_update_and_undo_stop, - add_publish_knob - ) - - # Whether instances should be passthrough based on new value - - with viewer_update_and_undo_stop(): - n = instance[0] - try: - n["publish"].value() - except ValueError: - n = add_publish_knob(n) - log.info(" `Publish` knob was added to write node..") - - n["publish"].setValue(new_value) + "containerise", + "parse_container", + "update_container", +) diff --git a/openpype/hosts/nuke/api/actions.py b/openpype/hosts/nuke/api/actions.py index fd18c787c4..c4a6f0fb84 100644 --- a/openpype/hosts/nuke/api/actions.py +++ b/openpype/hosts/nuke/api/actions.py @@ -1,12 +1,11 @@ import pyblish.api -from avalon.nuke.lib import ( +from openpype.api import get_errored_instances_from_context +from .lib import ( reset_selection, select_nodes ) -from openpype.api import get_errored_instances_from_context - class SelectInvalidAction(pyblish.api.Action): """Select invalid nodes in Nuke when plug-in failed. diff --git a/openpype/hosts/nuke/api/command.py b/openpype/hosts/nuke/api/command.py new file mode 100644 index 0000000000..212d4757c6 --- /dev/null +++ b/openpype/hosts/nuke/api/command.py @@ -0,0 +1,135 @@ +import logging +import contextlib +import nuke + +from avalon import api, io + + +log = logging.getLogger(__name__) + + +def reset_frame_range(): + """ Set frame range to current asset + Also it will set a Viewer range with + displayed handles + """ + + fps = float(api.Session.get("AVALON_FPS", 25)) + + nuke.root()["fps"].setValue(fps) + name = api.Session["AVALON_ASSET"] + asset = io.find_one({"name": name, "type": "asset"}) + asset_data = asset["data"] + + handles = get_handles(asset) + + frame_start = int(asset_data.get( + "frameStart", + asset_data.get("edit_in"))) + + frame_end = int(asset_data.get( + "frameEnd", + asset_data.get("edit_out"))) + + if not all([frame_start, frame_end]): + missing = ", ".join(["frame_start", "frame_end"]) + msg = "'{}' are not set for asset '{}'!".format(missing, name) + log.warning(msg) + nuke.message(msg) + return + + frame_start -= handles + frame_end += handles + + nuke.root()["first_frame"].setValue(frame_start) + nuke.root()["last_frame"].setValue(frame_end) + + # setting active viewers + vv = nuke.activeViewer().node() + vv["frame_range_lock"].setValue(True) + vv["frame_range"].setValue("{0}-{1}".format( + int(asset_data["frameStart"]), + int(asset_data["frameEnd"])) + ) + + +def get_handles(asset): + """ Gets handles data + + Arguments: + asset (dict): avalon asset entity + + Returns: + handles (int) + """ + data = asset["data"] + if "handles" in data and data["handles"] is not None: + return int(data["handles"]) + + parent_asset = None + if "visualParent" in data: + vp = data["visualParent"] + if vp is not None: + parent_asset = io.find_one({"_id": io.ObjectId(vp)}) + + if parent_asset is None: + parent_asset = io.find_one({"_id": io.ObjectId(asset["parent"])}) + + if parent_asset is not None: + return get_handles(parent_asset) + else: + return 0 + + +def reset_resolution(): + """Set resolution to project resolution.""" + project = io.find_one({"type": "project"}) + p_data = project["data"] + + width = p_data.get("resolution_width", + p_data.get("resolutionWidth")) + height = p_data.get("resolution_height", + p_data.get("resolutionHeight")) + + if not all([width, height]): + missing = ", ".join(["width", "height"]) + msg = "No resolution information `{0}` found for '{1}'.".format( + missing, + project["name"]) + log.warning(msg) + nuke.message(msg) + return + + current_width = nuke.root()["format"].value().width() + current_height = nuke.root()["format"].value().height() + + if width != current_width or height != current_height: + + fmt = None + for f in nuke.formats(): + if f.width() == width and f.height() == height: + fmt = f.name() + + if not fmt: + nuke.addFormat( + "{0} {1} {2}".format(int(width), int(height), project["name"]) + ) + fmt = project["name"] + + nuke.root()["format"].setValue(fmt) + + +@contextlib.contextmanager +def viewer_update_and_undo_stop(): + """Lock viewer from updating and stop recording undo steps""" + try: + # stop active viewer to update any change + viewer = nuke.activeViewer() + if viewer: + viewer.stop() + else: + log.warning("No available active Viewer") + nuke.Undo.disable() + yield + finally: + nuke.Undo.enable() diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index e36a5aa5ba..0508de9f1d 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -3,15 +3,15 @@ import re import sys import six import platform +import contextlib from collections import OrderedDict +import clique + +import nuke from avalon import api, io, lib -import avalon.nuke -from avalon.nuke import lib as anlib -from avalon.nuke import ( - save_file, open_file -) + from openpype.api import ( Logger, Anatomy, @@ -28,21 +28,476 @@ from openpype.lib.path_tools import HostDirmap from openpype.settings import get_project_settings from openpype.modules import ModulesManager -import nuke +from .workio import ( + save_file, + open_file +) -from .utils import set_context_favorites +log = Logger.get_logger(__name__) -log = Logger().get_logger(__name__) +_NODE_TAB_NAME = "{}".format(os.getenv("AVALON_LABEL") or "Avalon") +AVALON_LABEL = os.getenv("AVALON_LABEL") or "Avalon" +AVALON_TAB = "{}".format(AVALON_LABEL) +AVALON_DATA_GROUP = "{}DataGroup".format(AVALON_LABEL.capitalize()) +EXCLUDED_KNOB_TYPE_ON_READ = ( + 20, # Tab Knob + 26, # Text Knob (But for backward compatibility, still be read + # if value is not an empty string.) +) -opnl = sys.modules[__name__] -opnl._project = None -opnl.project_name = os.getenv("AVALON_PROJECT") -opnl.workfiles_launched = False -opnl._node_tab_name = "{}".format(os.getenv("AVALON_LABEL") or "Avalon") + +class Context: + main_window = None + context_label = None + project_name = os.getenv("AVALON_PROJECT") + workfiles_launched = False + # Seems unused + _project_doc = None + + +class Knobby(object): + """For creating knob which it's type isn't mapped in `create_knobs` + + Args: + type (string): Nuke knob type name + value: Value to be set with `Knob.setValue`, put `None` if not required + flags (list, optional): Knob flags to be set with `Knob.setFlag` + *args: Args other than knob name for initializing knob class + + """ + + def __init__(self, type, value, flags=None, *args): + self.type = type + self.value = value + self.flags = flags or [] + self.args = args + + def create(self, name, nice=None): + knob_cls = getattr(nuke, self.type) + knob = knob_cls(name, nice, *self.args) + if self.value is not None: + knob.setValue(self.value) + for flag in self.flags: + knob.setFlag(flag) + return knob + + +def create_knobs(data, tab=None): + """Create knobs by data + + Depending on the type of each dict value and creates the correct Knob. + + Mapped types: + bool: nuke.Boolean_Knob + int: nuke.Int_Knob + float: nuke.Double_Knob + list: nuke.Enumeration_Knob + six.string_types: nuke.String_Knob + + dict: If it's a nested dict (all values are dict), will turn into + A tabs group. Or just a knobs group. + + Args: + data (dict): collection of attributes and their value + tab (string, optional): Knobs' tab name + + Returns: + list: A list of `nuke.Knob` objects + + """ + def nice_naming(key): + """Convert camelCase name into UI Display Name""" + words = re.findall('[A-Z][^A-Z]*', key[0].upper() + key[1:]) + return " ".join(words) + + # Turn key-value pairs into knobs + knobs = list() + + if tab: + knobs.append(nuke.Tab_Knob(tab)) + + for key, value in data.items(): + # Knob name + if isinstance(key, tuple): + name, nice = key + else: + name, nice = key, nice_naming(key) + + # Create knob by value type + if isinstance(value, Knobby): + knobby = value + knob = knobby.create(name, nice) + + elif isinstance(value, float): + knob = nuke.Double_Knob(name, nice) + knob.setValue(value) + + elif isinstance(value, bool): + knob = nuke.Boolean_Knob(name, nice) + knob.setValue(value) + knob.setFlag(nuke.STARTLINE) + + elif isinstance(value, int): + knob = nuke.Int_Knob(name, nice) + knob.setValue(value) + + elif isinstance(value, six.string_types): + knob = nuke.String_Knob(name, nice) + knob.setValue(value) + + elif isinstance(value, list): + knob = nuke.Enumeration_Knob(name, nice, value) + + elif isinstance(value, dict): + if all(isinstance(v, dict) for v in value.values()): + # Create a group of tabs + begain = nuke.BeginTabGroup_Knob() + end = nuke.EndTabGroup_Knob() + begain.setName(name) + end.setName(name + "_End") + knobs.append(begain) + for k, v in value.items(): + knobs += create_knobs(v, tab=k) + knobs.append(end) + else: + # Create a group of knobs + knobs.append(nuke.Tab_Knob( + name, nice, nuke.TABBEGINCLOSEDGROUP)) + knobs += create_knobs(value) + knobs.append( + nuke.Tab_Knob(name + "_End", nice, nuke.TABENDGROUP)) + continue + + else: + raise TypeError("Unsupported type: %r" % type(value)) + + knobs.append(knob) + + return knobs + + +def imprint(node, data, tab=None): + """Store attributes with value on node + + Parse user data into Node knobs. + Use `collections.OrderedDict` to ensure knob order. + + Args: + node(nuke.Node): node object from Nuke + data(dict): collection of attributes and their value + + Returns: + None + + Examples: + ``` + import nuke + from avalon.nuke import lib + + node = nuke.createNode("NoOp") + data = { + # Regular type of attributes + "myList": ["x", "y", "z"], + "myBool": True, + "myFloat": 0.1, + "myInt": 5, + + # Creating non-default imprint type of knob + "MyFilePath": lib.Knobby("File_Knob", "/file/path"), + "divider": lib.Knobby("Text_Knob", ""), + + # Manual nice knob naming + ("my_knob", "Nice Knob Name"): "some text", + + # dict type will be created as knob group + "KnobGroup": { + "knob1": 5, + "knob2": "hello", + "knob3": ["a", "b"], + }, + + # Nested dict will be created as tab group + "TabGroup": { + "tab1": {"count": 5}, + "tab2": {"isGood": True}, + "tab3": {"direction": ["Left", "Right"]}, + }, + } + lib.imprint(node, data, tab="Demo") + + ``` + + """ + for knob in create_knobs(data, tab): + node.addKnob(knob) + + +def add_publish_knob(node): + """Add Publish knob to node + + Arguments: + node (nuke.Node): nuke node to be processed + + Returns: + node (nuke.Node): processed nuke node + + """ + if "publish" not in node.knobs(): + body = OrderedDict() + body[("divd", "Publishing")] = Knobby("Text_Knob", '') + body["publish"] = True + imprint(node, body) + return node + + +def set_avalon_knob_data(node, data=None, prefix="avalon:"): + """ Sets data into nodes's avalon knob + + Arguments: + node (nuke.Node): Nuke node to imprint with data, + data (dict, optional): Data to be imprinted into AvalonTab + prefix (str, optional): filtering prefix + + Returns: + node (nuke.Node) + + Examples: + data = { + 'asset': 'sq020sh0280', + 'family': 'render', + 'subset': 'subsetMain' + } + """ + data = data or dict() + create = OrderedDict() + + tab_name = AVALON_TAB + editable = ["asset", "subset", "name", "namespace"] + + existed_knobs = node.knobs() + + for key, value in data.items(): + knob_name = prefix + key + gui_name = key + + if knob_name in existed_knobs: + # Set value + try: + node[knob_name].setValue(value) + except TypeError: + node[knob_name].setValue(str(value)) + else: + # New knob + name = (knob_name, gui_name) # Hide prefix on GUI + if key in editable: + create[name] = value + else: + create[name] = Knobby("String_Knob", + str(value), + flags=[nuke.READ_ONLY]) + if tab_name in existed_knobs: + tab_name = None + else: + tab = OrderedDict() + warn = Knobby("Text_Knob", "Warning! Do not change following data!") + divd = Knobby("Text_Knob", "") + head = [ + (("warn", ""), warn), + (("divd", ""), divd), + ] + tab[AVALON_DATA_GROUP] = OrderedDict(head + list(create.items())) + create = tab + + imprint(node, create, tab=tab_name) + return node + + +def get_avalon_knob_data(node, prefix="avalon:"): + """ Gets a data from nodes's avalon knob + + Arguments: + node (obj): Nuke node to search for data, + prefix (str, optional): filtering prefix + + Returns: + data (dict) + """ + + # check if lists + if not isinstance(prefix, list): + prefix = list([prefix]) + + data = dict() + + # loop prefix + for p in prefix: + # check if the node is avalon tracked + if AVALON_TAB not in node.knobs(): + continue + try: + # check if data available on the node + test = node[AVALON_DATA_GROUP].value() + log.debug("Only testing if data avalable: `{}`".format(test)) + except NameError as e: + # if it doesn't then create it + log.debug("Creating avalon knob: `{}`".format(e)) + node = set_avalon_knob_data(node) + return get_avalon_knob_data(node) + + # get data from filtered knobs + data.update({k.replace(p, ''): node[k].value() + for k in node.knobs().keys() + if p in k}) + + return data + + +def fix_data_for_node_create(data): + """Fixing data to be used for nuke knobs + """ + for k, v in data.items(): + if isinstance(v, six.text_type): + data[k] = str(v) + if str(v).startswith("0x"): + data[k] = int(v, 16) + return data + + +def add_write_node(name, **kwarg): + """Adding nuke write node + + Arguments: + name (str): nuke node name + kwarg (attrs): data for nuke knobs + + Returns: + node (obj): nuke write node + """ + frame_range = kwarg.get("frame_range", None) + + w = nuke.createNode( + "Write", + "name {}".format(name)) + + w["file"].setValue(kwarg["file"]) + + for k, v in kwarg.items(): + if "frame_range" in k: + continue + log.info([k, v]) + try: + w[k].setValue(v) + except KeyError as e: + log.debug(e) + continue + + if frame_range: + w["use_limit"].setValue(True) + w["first"].setValue(frame_range[0]) + w["last"].setValue(frame_range[1]) + + return w + + +def read(node): + """Return user-defined knobs from given `node` + + Args: + node (nuke.Node): Nuke node object + + Returns: + list: A list of nuke.Knob object + + """ + def compat_prefixed(knob_name): + if knob_name.startswith("avalon:"): + return knob_name[len("avalon:"):] + elif knob_name.startswith("ak:"): + return knob_name[len("ak:"):] + else: + return knob_name + + data = dict() + + pattern = ("(?<=addUserKnob {)" + "([0-9]*) (\\S*)" # Matching knob type and knob name + "(?=[ |}])") + tcl_script = node.writeKnobs(nuke.WRITE_USER_KNOB_DEFS) + result = re.search(pattern, tcl_script) + + if result: + first_user_knob = result.group(2) + # Collect user knobs from the end of the knob list + for knob in reversed(node.allKnobs()): + knob_name = knob.name() + if not knob_name: + # Ignore unnamed knob + continue + + knob_type = nuke.knob(knob.fullyQualifiedName(), type=True) + value = knob.value() + + if ( + knob_type not in EXCLUDED_KNOB_TYPE_ON_READ or + # For compating read-only string data that imprinted + # by `nuke.Text_Knob`. + (knob_type == 26 and value) + ): + key = compat_prefixed(knob_name) + data[key] = value + + if knob_name == first_user_knob: + break + + return data + + +def get_node_path(path, padding=4): + """Get filename for the Nuke write with padded number as '#' + + Arguments: + path (str): The path to render to. + + Returns: + tuple: head, padding, tail (extension) + + Examples: + >>> get_frame_path("test.exr") + ('test', 4, '.exr') + + >>> get_frame_path("filename.#####.tif") + ('filename.', 5, '.tif') + + >>> get_frame_path("foobar##.tif") + ('foobar', 2, '.tif') + + >>> get_frame_path("foobar_%08d.tif") + ('foobar_', 8, '.tif') + """ + filename, ext = os.path.splitext(path) + + # Find a final number group + if '%' in filename: + match = re.match('.*?(%[0-9]+d)$', filename) + if match: + padding = int(match.group(1).replace('%', '').replace('d', '')) + # remove number from end since fusion + # will swap it with the frame number + filename = filename.replace(match.group(1), '') + elif '#' in filename: + match = re.match('.*?(#+)$', filename) + + if match: + padding = len(match.group(1)) + # remove number from end since fusion + # will swap it with the frame number + filename = filename.replace(match.group(1), '') + + return filename, padding, ext def get_nuke_imageio_settings(): - return get_anatomy_settings(opnl.project_name)["imageio"]["nuke"] + return get_anatomy_settings(Context.project_name)["imageio"]["nuke"] def get_created_node_imageio_setting(**kwarg): @@ -103,14 +558,15 @@ def check_inventory_versions(): and check if the node is having actual version. If not then it will color it to red. """ + from .pipeline import parse_container + # get all Loader nodes by avalon attribute metadata for each in nuke.allNodes(): - container = avalon.nuke.parse_container(each) + container = parse_container(each) if container: node = nuke.toNode(container["objectName"]) - avalon_knob_data = avalon.nuke.read( - node) + avalon_knob_data = read(node) # get representation from io representation = io.find_one({ @@ -163,11 +619,10 @@ def writes_version_sync(): for each in nuke.allNodes(filter="Write"): # check if the node is avalon tracked - if opnl._node_tab_name not in each.knobs(): + if _NODE_TAB_NAME not in each.knobs(): continue - avalon_knob_data = avalon.nuke.read( - each) + avalon_knob_data = read(each) try: if avalon_knob_data['families'] not in ["render"]: @@ -209,14 +664,14 @@ def check_subsetname_exists(nodes, subset_name): bool: True of False """ return next((True for n in nodes - if subset_name in avalon.nuke.read(n).get("subset", "")), + if subset_name in read(n).get("subset", "")), False) def get_render_path(node): ''' Generate Render path from presets regarding avalon knob data ''' - data = {'avalon': avalon.nuke.read(node)} + data = {'avalon': read(node)} data_preset = { "nodeclass": data['avalon']['family'], "families": [data['avalon']['families']], @@ -385,7 +840,7 @@ def create_write_node(name, data, input=None, prenodes=None, for knob in imageio_writes["knobs"]: _data.update({knob["name"]: knob["value"]}) - _data = anlib.fix_data_for_node_create(_data) + _data = fix_data_for_node_create(_data) log.debug("_data: `{}`".format(_data)) @@ -466,7 +921,7 @@ def create_write_node(name, data, input=None, prenodes=None, prev_node = now_node # creating write node - write_node = now_node = anlib.add_write_node( + write_node = now_node = add_write_node( "inside_{}".format(name), **_data ) @@ -484,8 +939,8 @@ def create_write_node(name, data, input=None, prenodes=None, now_node.setInput(0, prev_node) # imprinting group node - anlib.set_avalon_knob_data(GN, data["avalon"]) - anlib.add_publish_knob(GN) + set_avalon_knob_data(GN, data["avalon"]) + add_publish_knob(GN) add_rendering_knobs(GN, farm) if review: @@ -537,7 +992,7 @@ def create_write_node(name, data, input=None, prenodes=None, add_deadline_tab(GN) # open the our Tab as default - GN[opnl._node_tab_name].setFlag(0) + GN[_NODE_TAB_NAME].setFlag(0) # set tile color tile_color = _data.get("tile_color", "0xff0000ff") @@ -663,7 +1118,7 @@ class WorkfileSettings(object): root_node=None, nodes=None, **kwargs): - opnl._project = kwargs.get( + Context._project_doc = kwargs.get( "project") or io.find_one({"type": "project"}) self._asset = kwargs.get("asset_name") or api.Session["AVALON_ASSET"] self._asset_entity = get_asset(self._asset) @@ -804,8 +1259,6 @@ class WorkfileSettings(object): ''' Adds correct colorspace to write node dict ''' - from avalon.nuke import read - for node in nuke.allNodes(filter="Group"): # get data from avalon knob @@ -1005,7 +1458,7 @@ class WorkfileSettings(object): node['frame_range_lock'].setValue(True) # adding handle_start/end to root avalon knob - if not anlib.set_avalon_knob_data(self._root_node, { + if not set_avalon_knob_data(self._root_node, { "handleStart": int(handle_start), "handleEnd": int(handle_end) }): @@ -1089,6 +1542,8 @@ class WorkfileSettings(object): self.set_colorspace() def set_favorites(self): + from .utils import set_context_favorites + work_dir = os.getenv("AVALON_WORKDIR") asset = os.getenv("AVALON_ASSET") favorite_items = OrderedDict() @@ -1096,9 +1551,9 @@ class WorkfileSettings(object): # project # get project's root and split to parts projects_root = os.path.normpath(work_dir.split( - opnl.project_name)[0]) + Context.project_name)[0]) # add project name - project_dir = os.path.join(projects_root, opnl.project_name) + "/" + project_dir = os.path.join(projects_root, Context.project_name) + "/" # add to favorites favorite_items.update({"Project dir": project_dir.replace("\\", "/")}) @@ -1145,8 +1600,7 @@ def get_write_node_template_attr(node): ''' # get avalon data from node data = dict() - data['avalon'] = avalon.nuke.read( - node) + data['avalon'] = read(node) data_preset = { "nodeclass": data['avalon']['family'], "families": [data['avalon']['families']], @@ -1167,7 +1621,7 @@ def get_write_node_template_attr(node): if k not in ["_id", "_previous"]} # fix badly encoded data - return anlib.fix_data_for_node_create(correct_data) + return fix_data_for_node_create(correct_data) def get_dependent_nodes(nodes): @@ -1274,13 +1728,53 @@ def find_free_space_to_paste_nodes( return xpos, ypos +@contextlib.contextmanager +def maintained_selection(): + """Maintain selection during context + + Example: + >>> with maintained_selection(): + ... node['selected'].setValue(True) + >>> print(node['selected'].value()) + False + """ + previous_selection = nuke.selectedNodes() + try: + yield + finally: + # unselect all selection in case there is some + current_seletion = nuke.selectedNodes() + [n['selected'].setValue(False) for n in current_seletion] + # and select all previously selected nodes + if previous_selection: + [n['selected'].setValue(True) for n in previous_selection] + + +def reset_selection(): + """Deselect all selected nodes""" + for node in nuke.selectedNodes(): + node["selected"].setValue(False) + + +def select_nodes(nodes): + """Selects all inputed nodes + + Arguments: + nodes (list): nuke nodes to be selected + """ + assert isinstance(nodes, (list, tuple)), "nodes has to be list or tuple" + + for node in nodes: + node["selected"].setValue(True) + + def launch_workfiles_app(): '''Function letting start workfiles after start of host ''' from openpype.lib import ( env_value_to_bool ) - from avalon.nuke.pipeline import get_main_window + from .pipeline import get_main_window # get all imortant settings open_at_start = env_value_to_bool( @@ -1291,8 +1785,8 @@ def launch_workfiles_app(): if not open_at_start: return - if not opnl.workfiles_launched: - opnl.workfiles_launched = True + if not Context.workfiles_launched: + Context.workfiles_launched = True main_window = get_main_window() host_tools.show_workfiles(parent=main_window) @@ -1378,7 +1872,7 @@ def recreate_instance(origin_node, avalon_data=None): knobs_wl = ["render", "publish", "review", "ypos", "use_limit", "first", "last"] # get data from avalon knobs - data = anlib.get_avalon_knob_data( + data = get_avalon_knob_data( origin_node) # add input data to avalon data @@ -1494,3 +1988,45 @@ def dirmap_file_name_filter(file_name): if os.path.exists(dirmap_processor.file_name): return dirmap_processor.file_name return file_name + + +# ------------------------------------ +# This function seems to be deprecated +# ------------------------------------ +def ls_img_sequence(path): + """Listing all available coherent image sequence from path + + Arguments: + path (str): A nuke's node object + + Returns: + data (dict): with nuke formated path and frameranges + """ + file = os.path.basename(path) + dirpath = os.path.dirname(path) + base, ext = os.path.splitext(file) + name, padding = os.path.splitext(base) + + # populate list of files + files = [ + f for f in os.listdir(dirpath) + if name in f + if ext in f + ] + + # create collection from list of files + collections, reminder = clique.assemble(files) + + if len(collections) > 0: + head = collections[0].format("{head}") + padding = collections[0].format("{padding}") % 1 + padding = "#" * len(padding) + tail = collections[0].format("{tail}") + file = head + padding + tail + + return { + "path": os.path.join(dirpath, file).replace("\\", "/"), + "frames": collections[0].format("[{ranges}]") + } + + return False diff --git a/openpype/hosts/nuke/api/menu.py b/openpype/hosts/nuke/api/menu.py deleted file mode 100644 index 86293edb99..0000000000 --- a/openpype/hosts/nuke/api/menu.py +++ /dev/null @@ -1,166 +0,0 @@ -import os -import nuke -from avalon.nuke.pipeline import get_main_window - -from .lib import WorkfileSettings -from openpype.api import Logger, BuildWorkfile, get_current_project_settings -from openpype.tools.utils import host_tools - - -log = Logger().get_logger(__name__) - -menu_label = os.environ["AVALON_LABEL"] -context_label = None - - -def change_context_label(*args): - global context_label - menubar = nuke.menu("Nuke") - menu = menubar.findItem(menu_label) - - label = "{0}, {1}".format( - os.environ["AVALON_ASSET"], os.environ["AVALON_TASK"] - ) - - rm_item = [ - (i, item) for i, item in enumerate(menu.items()) - if context_label in item.name() - ][0] - - menu.removeItem(rm_item[1].name()) - - context_action = menu.addCommand( - label, - index=(rm_item[0]) - ) - context_action.setEnabled(False) - - log.info("Task label changed from `{}` to `{}`".format( - context_label, label)) - - context_label = label - - - -def install(): - from openpype.hosts.nuke.api import reload_config - - global context_label - - # uninstall original avalon menu - uninstall() - - main_window = get_main_window() - menubar = nuke.menu("Nuke") - menu = menubar.addMenu(menu_label) - - label = "{0}, {1}".format( - os.environ["AVALON_ASSET"], os.environ["AVALON_TASK"] - ) - context_label = label - context_action = menu.addCommand(label) - context_action.setEnabled(False) - - menu.addSeparator() - menu.addCommand( - "Work Files...", - lambda: host_tools.show_workfiles(parent=main_window) - ) - - menu.addSeparator() - menu.addCommand( - "Create...", - lambda: host_tools.show_creator(parent=main_window) - ) - menu.addCommand( - "Load...", - lambda: host_tools.show_loader( - parent=main_window, - use_context=True - ) - ) - menu.addCommand( - "Publish...", - lambda: host_tools.show_publish(parent=main_window) - ) - menu.addCommand( - "Manage...", - lambda: host_tools.show_scene_inventory(parent=main_window) - ) - - menu.addSeparator() - menu.addCommand( - "Set Resolution", - lambda: WorkfileSettings().reset_resolution() - ) - menu.addCommand( - "Set Frame Range", - lambda: WorkfileSettings().reset_frame_range_handles() - ) - menu.addCommand( - "Set Colorspace", - lambda: WorkfileSettings().set_colorspace() - ) - menu.addCommand( - "Apply All Settings", - lambda: WorkfileSettings().set_context_settings() - ) - - menu.addSeparator() - menu.addCommand( - "Build Workfile", - lambda: BuildWorkfile().process() - ) - - menu.addSeparator() - menu.addCommand( - "Experimental tools...", - lambda: host_tools.show_experimental_tools_dialog(parent=main_window) - ) - - # add reload pipeline only in debug mode - if bool(os.getenv("NUKE_DEBUG")): - menu.addSeparator() - menu.addCommand("Reload Pipeline", reload_config) - - # adding shortcuts - add_shortcuts_from_presets() - - -def uninstall(): - - menubar = nuke.menu("Nuke") - menu = menubar.findItem(menu_label) - - for item in menu.items(): - log.info("Removing menu item: {}".format(item.name())) - menu.removeItem(item.name()) - - -def add_shortcuts_from_presets(): - menubar = nuke.menu("Nuke") - nuke_presets = get_current_project_settings()["nuke"]["general"] - - if nuke_presets.get("menu"): - menu_label_mapping = { - "manage": "Manage...", - "create": "Create...", - "load": "Load...", - "build_workfile": "Build Workfile", - "publish": "Publish..." - } - - for command_name, shortcut_str in nuke_presets.get("menu").items(): - log.info("menu_name `{}` | menu_label `{}`".format( - command_name, menu_label - )) - log.info("Adding Shortcut `{}` to `{}`".format( - shortcut_str, command_name - )) - try: - menu = menubar.findItem(menu_label) - item_label = menu_label_mapping[command_name] - menuitem = menu.findItem(item_label) - menuitem.setShortcut(shortcut_str) - except AttributeError as e: - log.error(e) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py new file mode 100644 index 0000000000..c47187666b --- /dev/null +++ b/openpype/hosts/nuke/api/pipeline.py @@ -0,0 +1,421 @@ +import os +import importlib +from collections import OrderedDict + +import nuke + +import pyblish.api +import avalon.api +from avalon import pipeline + +import openpype +from openpype.api import ( + Logger, + BuildWorkfile, + get_current_project_settings +) +from openpype.tools.utils import host_tools + +from .command import viewer_update_and_undo_stop +from .lib import ( + add_publish_knob, + WorkfileSettings, + process_workfile_builder, + launch_workfiles_app, + check_inventory_versions, + set_avalon_knob_data, + read, + Context +) + +log = Logger.get_logger(__name__) + +AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") +HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.nuke.__file__)) +PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") +LOAD_PATH = os.path.join(PLUGINS_DIR, "load") +CREATE_PATH = os.path.join(PLUGINS_DIR, "create") +INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") + +MENU_LABEL = os.environ["AVALON_LABEL"] + + +# registering pyblish gui regarding settings in presets +if os.getenv("PYBLISH_GUI", None): + pyblish.api.register_gui(os.getenv("PYBLISH_GUI", None)) + + +def get_main_window(): + """Acquire Nuke's main window""" + if Context.main_window is None: + from Qt import QtWidgets + + top_widgets = QtWidgets.QApplication.topLevelWidgets() + name = "Foundry::UI::DockMainWindow" + for widget in top_widgets: + if ( + widget.inherits("QMainWindow") + and widget.metaObject().className() == name + ): + Context.main_window = widget + break + return Context.main_window + + +def reload_config(): + """Attempt to reload pipeline at run-time. + + CAUTION: This is primarily for development and debugging purposes. + + """ + + for module in ( + "{}.api".format(AVALON_CONFIG), + "{}.hosts.nuke.api.actions".format(AVALON_CONFIG), + "{}.hosts.nuke.api.menu".format(AVALON_CONFIG), + "{}.hosts.nuke.api.plugin".format(AVALON_CONFIG), + "{}.hosts.nuke.api.lib".format(AVALON_CONFIG), + ): + log.info("Reloading module: {}...".format(module)) + + module = importlib.import_module(module) + + try: + importlib.reload(module) + except AttributeError as e: + from importlib import reload + log.warning("Cannot reload module: {}".format(e)) + reload(module) + + +def install(): + ''' Installing all requarements for Nuke host + ''' + + pyblish.api.register_host("nuke") + + log.info("Registering Nuke plug-ins..") + pyblish.api.register_plugin_path(PUBLISH_PATH) + avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH) + avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH) + avalon.api.register_plugin_path(avalon.api.InventoryAction, INVENTORY_PATH) + + # Register Avalon event for workfiles loading. + avalon.api.on("workio.open_file", check_inventory_versions) + avalon.api.on("taskChanged", change_context_label) + + pyblish.api.register_callback( + "instanceToggled", on_pyblish_instance_toggled) + workfile_settings = WorkfileSettings() + # Disable all families except for the ones we explicitly want to see + family_states = [ + "write", + "review", + "nukenodes", + "model", + "gizmo" + ] + + avalon.api.data["familiesStateDefault"] = False + avalon.api.data["familiesStateToggled"] = family_states + + # Set context settings. + nuke.addOnCreate(workfile_settings.set_context_settings, nodeClass="Root") + nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root") + nuke.addOnCreate(process_workfile_builder, nodeClass="Root") + nuke.addOnCreate(launch_workfiles_app, nodeClass="Root") + _install_menu() + + +def uninstall(): + '''Uninstalling host's integration + ''' + log.info("Deregistering Nuke plug-ins..") + pyblish.deregister_host("nuke") + pyblish.api.deregister_plugin_path(PUBLISH_PATH) + avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH) + avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH) + + pyblish.api.deregister_callback( + "instanceToggled", on_pyblish_instance_toggled) + + reload_config() + _uninstall_menu() + + +def _install_menu(): + # uninstall original avalon menu + main_window = get_main_window() + menubar = nuke.menu("Nuke") + menu = menubar.addMenu(MENU_LABEL) + + label = "{0}, {1}".format( + os.environ["AVALON_ASSET"], os.environ["AVALON_TASK"] + ) + Context.context_label = label + context_action = menu.addCommand(label) + context_action.setEnabled(False) + + menu.addSeparator() + menu.addCommand( + "Work Files...", + lambda: host_tools.show_workfiles(parent=main_window) + ) + + menu.addSeparator() + menu.addCommand( + "Create...", + lambda: host_tools.show_creator(parent=main_window) + ) + menu.addCommand( + "Load...", + lambda: host_tools.show_loader( + parent=main_window, + use_context=True + ) + ) + menu.addCommand( + "Publish...", + lambda: host_tools.show_publish(parent=main_window) + ) + menu.addCommand( + "Manage...", + lambda: host_tools.show_scene_inventory(parent=main_window) + ) + + menu.addSeparator() + menu.addCommand( + "Set Resolution", + lambda: WorkfileSettings().reset_resolution() + ) + menu.addCommand( + "Set Frame Range", + lambda: WorkfileSettings().reset_frame_range_handles() + ) + menu.addCommand( + "Set Colorspace", + lambda: WorkfileSettings().set_colorspace() + ) + menu.addCommand( + "Apply All Settings", + lambda: WorkfileSettings().set_context_settings() + ) + + menu.addSeparator() + menu.addCommand( + "Build Workfile", + lambda: BuildWorkfile().process() + ) + + menu.addSeparator() + menu.addCommand( + "Experimental tools...", + lambda: host_tools.show_experimental_tools_dialog(parent=main_window) + ) + + # add reload pipeline only in debug mode + if bool(os.getenv("NUKE_DEBUG")): + menu.addSeparator() + menu.addCommand("Reload Pipeline", reload_config) + + # adding shortcuts + add_shortcuts_from_presets() + + +def _uninstall_menu(): + menubar = nuke.menu("Nuke") + menu = menubar.findItem(MENU_LABEL) + + for item in menu.items(): + log.info("Removing menu item: {}".format(item.name())) + menu.removeItem(item.name()) + + +def change_context_label(*args): + menubar = nuke.menu("Nuke") + menu = menubar.findItem(MENU_LABEL) + + label = "{0}, {1}".format( + os.environ["AVALON_ASSET"], os.environ["AVALON_TASK"] + ) + + rm_item = [ + (i, item) for i, item in enumerate(menu.items()) + if Context.context_label in item.name() + ][0] + + menu.removeItem(rm_item[1].name()) + + context_action = menu.addCommand( + label, + index=(rm_item[0]) + ) + context_action.setEnabled(False) + + log.info("Task label changed from `{}` to `{}`".format( + Context.context_label, label)) + + +def add_shortcuts_from_presets(): + menubar = nuke.menu("Nuke") + nuke_presets = get_current_project_settings()["nuke"]["general"] + + if nuke_presets.get("menu"): + menu_label_mapping = { + "manage": "Manage...", + "create": "Create...", + "load": "Load...", + "build_workfile": "Build Workfile", + "publish": "Publish..." + } + + for command_name, shortcut_str in nuke_presets.get("menu").items(): + log.info("menu_name `{}` | menu_label `{}`".format( + command_name, MENU_LABEL + )) + log.info("Adding Shortcut `{}` to `{}`".format( + shortcut_str, command_name + )) + try: + menu = menubar.findItem(MENU_LABEL) + item_label = menu_label_mapping[command_name] + menuitem = menu.findItem(item_label) + menuitem.setShortcut(shortcut_str) + except AttributeError as e: + log.error(e) + + +def on_pyblish_instance_toggled(instance, old_value, new_value): + """Toggle node passthrough states on instance toggles.""" + + log.info("instance toggle: {}, old_value: {}, new_value:{} ".format( + instance, old_value, new_value)) + + # Whether instances should be passthrough based on new value + + with viewer_update_and_undo_stop(): + n = instance[0] + try: + n["publish"].value() + except ValueError: + n = add_publish_knob(n) + log.info(" `Publish` knob was added to write node..") + + n["publish"].setValue(new_value) + + +def containerise(node, + name, + namespace, + context, + loader=None, + data=None): + """Bundle `node` into an assembly and imprint it with metadata + + Containerisation enables a tracking of version, author and origin + for loaded assets. + + Arguments: + node (nuke.Node): Nuke's node object to imprint as container + name (str): Name of resulting assembly + namespace (str): Namespace under which to host container + context (dict): Asset information + loader (str, optional): Name of node used to produce this container. + + Returns: + node (nuke.Node): containerised nuke's node object + + """ + data = OrderedDict( + [ + ("schema", "openpype:container-2.0"), + ("id", pipeline.AVALON_CONTAINER_ID), + ("name", name), + ("namespace", namespace), + ("loader", str(loader)), + ("representation", context["representation"]["_id"]), + ], + + **data or dict() + ) + + set_avalon_knob_data(node, data) + + return node + + +def parse_container(node): + """Returns containerised data of a node + + Reads the imprinted data from `containerise`. + + Arguments: + node (nuke.Node): Nuke's node object to read imprinted data + + Returns: + dict: The container schema data for this container node. + + """ + data = read(node) + + # (TODO) Remove key validation when `ls` has re-implemented. + # + # If not all required data return the empty container + required = ["schema", "id", "name", + "namespace", "loader", "representation"] + if not all(key in data for key in required): + return + + # Store the node's name + data["objectName"] = node["name"].value() + + return data + + +def update_container(node, keys=None): + """Returns node with updateted containder data + + Arguments: + node (nuke.Node): The node in Nuke to imprint as container, + keys (dict, optional): data which should be updated + + Returns: + node (nuke.Node): nuke node with updated container data + + Raises: + TypeError on given an invalid container node + + """ + keys = keys or dict() + + container = parse_container(node) + if not container: + raise TypeError("Not a valid container node.") + + container.update(keys) + node = set_avalon_knob_data(node, container) + + return node + + +def ls(): + """List available containers. + + This function is used by the Container Manager in Nuke. You'll + need to implement a for-loop that then *yields* one Container at + a time. + + See the `container.json` schema for details on how it should look, + and the Maya equivalent, which is in `avalon.maya.pipeline` + """ + all_nodes = nuke.allNodes(recurseGroups=False) + + # TODO: add readgeo, readcamera, readimage + nodes = [n for n in all_nodes] + + for n in nodes: + log.debug("name: `{}`".format(n.name())) + container = parse_container(n) + if container: + yield container diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 82299dd354..66b42f7bb1 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -2,23 +2,30 @@ import os import random import string -import avalon.nuke -from avalon.nuke import lib as anlib -from avalon import api +import nuke + +import avalon.api from openpype.api import ( get_current_project_settings, PypeCreatorMixin ) -from .lib import check_subsetname_exists -import nuke +from .lib import ( + Knobby, + check_subsetname_exists, + reset_selection, + maintained_selection, + set_avalon_knob_data, + add_publish_knob +) -class PypeCreator(PypeCreatorMixin, avalon.nuke.pipeline.Creator): - """Pype Nuke Creator class wrapper - """ +class OpenPypeCreator(PypeCreatorMixin, avalon.api.Creator): + """Pype Nuke Creator class wrapper""" + node_color = "0xdfea5dff" + def __init__(self, *args, **kwargs): - super(PypeCreator, self).__init__(*args, **kwargs) + super(OpenPypeCreator, self).__init__(*args, **kwargs) self.presets = get_current_project_settings()["nuke"]["create"].get( self.__class__.__name__, {} ) @@ -31,6 +38,38 @@ class PypeCreator(PypeCreatorMixin, avalon.nuke.pipeline.Creator): raise NameError("`{0}: {1}".format(__name__, msg)) return + def process(self): + from nukescripts import autoBackdrop + + instance = None + + if (self.options or {}).get("useSelection"): + + nodes = nuke.selectedNodes() + if not nodes: + nuke.message("Please select nodes that you " + "wish to add to a container") + return + + elif len(nodes) == 1: + # only one node is selected + instance = nodes[0] + + if not instance: + # Not using selection or multiple nodes selected + bckd_node = autoBackdrop() + bckd_node["tile_color"].setValue(int(self.node_color, 16)) + bckd_node["note_font_size"].setValue(24) + bckd_node["label"].setValue("[{}]".format(self.name)) + + instance = bckd_node + + # add avalon knobs + set_avalon_knob_data(instance, self.data) + add_publish_knob(instance) + + return instance + def get_review_presets_config(): settings = get_current_project_settings() @@ -48,7 +87,7 @@ def get_review_presets_config(): return [str(name) for name, _prop in outputs.items()] -class NukeLoader(api.Loader): +class NukeLoader(avalon.api.Loader): container_id_knob = "containerId" container_id = None @@ -74,7 +113,7 @@ class NukeLoader(api.Loader): node[self.container_id_knob].setValue(source_id) else: HIDEN_FLAG = 0x00040000 - _knob = anlib.Knobby( + _knob = Knobby( "String_Knob", self.container_id, flags=[ @@ -183,7 +222,7 @@ class ExporterReview(object): Returns: nuke.Node: copy node of Input Process node """ - anlib.reset_selection() + reset_selection() ipn_orig = None for v in nuke.allNodes(filter="Viewer"): ip = v["input_process"].getValue() @@ -196,7 +235,7 @@ class ExporterReview(object): # copy selected to clipboard nuke.nodeCopy("%clipboard%") # reset selection - anlib.reset_selection() + reset_selection() # paste node and selection is on it only nuke.nodePaste("%clipboard%") # assign to variable @@ -396,7 +435,7 @@ class ExporterReviewMov(ExporterReview): def save_file(self): import shutil - with anlib.maintained_selection(): + with maintained_selection(): self.log.info("Saving nodes as file... ") # create nk path path = os.path.splitext(self.path)[0] + ".nk" diff --git a/openpype/hosts/nuke/api/utils.py b/openpype/hosts/nuke/api/utils.py index e43c11a380..f8f248357b 100644 --- a/openpype/hosts/nuke/api/utils.py +++ b/openpype/hosts/nuke/api/utils.py @@ -1,7 +1,8 @@ import os import nuke -from avalon.nuke import lib as anlib + from openpype.api import resources +from .lib import maintained_selection def set_context_favorites(favorites=None): @@ -55,7 +56,7 @@ def bake_gizmos_recursively(in_group=nuke.Root()): is_group (nuke.Node)[optonal]: group node or all nodes """ # preserve selection after all is done - with anlib.maintained_selection(): + with maintained_selection(): # jump to the group with in_group: for node in nuke.allNodes(): diff --git a/openpype/hosts/nuke/api/workio.py b/openpype/hosts/nuke/api/workio.py new file mode 100644 index 0000000000..dbc24fdc9b --- /dev/null +++ b/openpype/hosts/nuke/api/workio.py @@ -0,0 +1,55 @@ +"""Host API required Work Files tool""" +import os +import nuke +import avalon.api + + +def file_extensions(): + return avalon.api.HOST_WORKFILE_EXTENSIONS["nuke"] + + +def has_unsaved_changes(): + return nuke.root().modified() + + +def save_file(filepath): + path = filepath.replace("\\", "/") + nuke.scriptSaveAs(path) + nuke.Root()["name"].setValue(path) + nuke.Root()["project_directory"].setValue(os.path.dirname(path)) + nuke.Root().setModified(False) + + +def open_file(filepath): + filepath = filepath.replace("\\", "/") + + # To remain in the same window, we have to clear the script and read + # in the contents of the workfile. + nuke.scriptClear() + nuke.scriptReadFile(filepath) + nuke.Root()["name"].setValue(filepath) + nuke.Root()["project_directory"].setValue(os.path.dirname(filepath)) + nuke.Root().setModified(False) + return True + + +def current_file(): + current_file = nuke.root().name() + + # Unsaved current file + if current_file == 'Root': + return None + + return os.path.normpath(current_file).replace("\\", "/") + + +def work_root(session): + + work_dir = session["AVALON_WORKDIR"] + scene_dir = session.get("AVALON_SCENEDIR") + if scene_dir: + path = os.path.join(work_dir, scene_dir) + else: + path = work_dir + + return os.path.normpath(path).replace("\\", "/") diff --git a/openpype/hosts/nuke/plugins/create/create_backdrop.py b/openpype/hosts/nuke/plugins/create/create_backdrop.py index cda2629587..0c11b3f274 100644 --- a/openpype/hosts/nuke/plugins/create/create_backdrop.py +++ b/openpype/hosts/nuke/plugins/create/create_backdrop.py @@ -1,9 +1,12 @@ -from avalon.nuke import lib as anlib -from openpype.hosts.nuke.api import plugin import nuke +from openpype.hosts.nuke.api import plugin +from openpype.hosts.nuke.api.lib import ( + select_nodes, + set_avalon_knob_data +) -class CreateBackdrop(plugin.PypeCreator): +class CreateBackdrop(plugin.OpenPypeCreator): """Add Publishable Backdrop""" name = "nukenodes" @@ -25,14 +28,14 @@ class CreateBackdrop(plugin.PypeCreator): nodes = self.nodes if len(nodes) >= 1: - anlib.select_nodes(nodes) + select_nodes(nodes) bckd_node = autoBackdrop() bckd_node["name"].setValue("{}_BDN".format(self.name)) bckd_node["tile_color"].setValue(int(self.node_color, 16)) bckd_node["note_font_size"].setValue(24) bckd_node["label"].setValue("[{}]".format(self.name)) # add avalon knobs - instance = anlib.set_avalon_knob_data(bckd_node, self.data) + instance = set_avalon_knob_data(bckd_node, self.data) return instance else: @@ -48,6 +51,6 @@ class CreateBackdrop(plugin.PypeCreator): bckd_node["note_font_size"].setValue(24) bckd_node["label"].setValue("[{}]".format(self.name)) # add avalon knobs - instance = anlib.set_avalon_knob_data(bckd_node, self.data) + instance = set_avalon_knob_data(bckd_node, self.data) return instance diff --git a/openpype/hosts/nuke/plugins/create/create_camera.py b/openpype/hosts/nuke/plugins/create/create_camera.py index 359086d48f..3b13c80dc4 100644 --- a/openpype/hosts/nuke/plugins/create/create_camera.py +++ b/openpype/hosts/nuke/plugins/create/create_camera.py @@ -1,9 +1,11 @@ -from avalon.nuke import lib as anlib -from openpype.hosts.nuke.api import plugin import nuke +from openpype.hosts.nuke.api import plugin +from openpype.hosts.nuke.api.lib import ( + set_avalon_knob_data +) -class CreateCamera(plugin.PypeCreator): +class CreateCamera(plugin.OpenPypeCreator): """Add Publishable Backdrop""" name = "camera" @@ -36,7 +38,7 @@ class CreateCamera(plugin.PypeCreator): # change node color n["tile_color"].setValue(int(self.node_color, 16)) # add avalon knobs - anlib.set_avalon_knob_data(n, data) + set_avalon_knob_data(n, data) return True else: msg = str("Please select nodes you " @@ -49,5 +51,5 @@ class CreateCamera(plugin.PypeCreator): camera_node = nuke.createNode("Camera2") camera_node["tile_color"].setValue(int(self.node_color, 16)) # add avalon knobs - instance = anlib.set_avalon_knob_data(camera_node, self.data) + instance = set_avalon_knob_data(camera_node, self.data) return instance diff --git a/openpype/hosts/nuke/plugins/create/create_gizmo.py b/openpype/hosts/nuke/plugins/create/create_gizmo.py index c59713cff1..de73623a1e 100644 --- a/openpype/hosts/nuke/plugins/create/create_gizmo.py +++ b/openpype/hosts/nuke/plugins/create/create_gizmo.py @@ -1,9 +1,14 @@ -from avalon.nuke import lib as anlib -from openpype.hosts.nuke.api import plugin import nuke +from openpype.hosts.nuke.api import plugin +from openpype.hosts.nuke.api.lib import ( + maintained_selection, + select_nodes, + set_avalon_knob_data +) -class CreateGizmo(plugin.PypeCreator): + +class CreateGizmo(plugin.OpenPypeCreator): """Add Publishable "gizmo" group The name is symbolically gizmo as presumably @@ -28,13 +33,13 @@ class CreateGizmo(plugin.PypeCreator): nodes = self.nodes self.log.info(len(nodes)) if len(nodes) == 1: - anlib.select_nodes(nodes) + select_nodes(nodes) node = nodes[-1] # check if Group node if node.Class() in "Group": node["name"].setValue("{}_GZM".format(self.name)) node["tile_color"].setValue(int(self.node_color, 16)) - return anlib.set_avalon_knob_data(node, self.data) + return set_avalon_knob_data(node, self.data) else: msg = ("Please select a group node " "you wish to publish as the gizmo") @@ -42,7 +47,7 @@ class CreateGizmo(plugin.PypeCreator): nuke.message(msg) if len(nodes) >= 2: - anlib.select_nodes(nodes) + select_nodes(nodes) nuke.makeGroup() gizmo_node = nuke.selectedNode() gizmo_node["name"].setValue("{}_GZM".format(self.name)) @@ -57,16 +62,15 @@ class CreateGizmo(plugin.PypeCreator): "- create User knobs on the group") # add avalon knobs - return anlib.set_avalon_knob_data(gizmo_node, self.data) + return set_avalon_knob_data(gizmo_node, self.data) else: - msg = ("Please select nodes you " - "wish to add to the gizmo") + msg = "Please select nodes you wish to add to the gizmo" self.log.error(msg) nuke.message(msg) return else: - with anlib.maintained_selection(): + with maintained_selection(): gizmo_node = nuke.createNode("Group") gizmo_node["name"].setValue("{}_GZM".format(self.name)) gizmo_node["tile_color"].setValue(int(self.node_color, 16)) @@ -80,4 +84,4 @@ class CreateGizmo(plugin.PypeCreator): "- create User knobs on the group") # add avalon knobs - return anlib.set_avalon_knob_data(gizmo_node, self.data) + return set_avalon_knob_data(gizmo_node, self.data) diff --git a/openpype/hosts/nuke/plugins/create/create_model.py b/openpype/hosts/nuke/plugins/create/create_model.py index 4e30860e05..15a4e3ab8a 100644 --- a/openpype/hosts/nuke/plugins/create/create_model.py +++ b/openpype/hosts/nuke/plugins/create/create_model.py @@ -1,9 +1,11 @@ -from avalon.nuke import lib as anlib -from openpype.hosts.nuke.api import plugin import nuke +from openpype.hosts.nuke.api import plugin +from openpype.hosts.nuke.api.lib import ( + set_avalon_knob_data +) -class CreateModel(plugin.PypeCreator): +class CreateModel(plugin.OpenPypeCreator): """Add Publishable Model Geometry""" name = "model" @@ -68,7 +70,7 @@ class CreateModel(plugin.PypeCreator): # change node color n["tile_color"].setValue(int(self.node_color, 16)) # add avalon knobs - anlib.set_avalon_knob_data(n, data) + set_avalon_knob_data(n, data) return True else: msg = str("Please select nodes you " @@ -81,5 +83,5 @@ class CreateModel(plugin.PypeCreator): model_node = nuke.createNode("WriteGeo") model_node["tile_color"].setValue(int(self.node_color, 16)) # add avalon knobs - instance = anlib.set_avalon_knob_data(model_node, self.data) + instance = set_avalon_knob_data(model_node, self.data) return instance diff --git a/openpype/hosts/nuke/plugins/create/create_read.py b/openpype/hosts/nuke/plugins/create/create_read.py index bf5de23346..bdc67add42 100644 --- a/openpype/hosts/nuke/plugins/create/create_read.py +++ b/openpype/hosts/nuke/plugins/create/create_read.py @@ -1,13 +1,16 @@ from collections import OrderedDict -import avalon.api -import avalon.nuke -from openpype import api as pype -from openpype.hosts.nuke.api import plugin import nuke +import avalon.api +from openpype import api as pype +from openpype.hosts.nuke.api import plugin +from openpype.hosts.nuke.api.lib import ( + set_avalon_knob_data +) -class CrateRead(plugin.PypeCreator): + +class CrateRead(plugin.OpenPypeCreator): # change this to template preset name = "ReadCopy" label = "Create Read Copy" @@ -45,7 +48,7 @@ class CrateRead(plugin.PypeCreator): continue avalon_data = self.data avalon_data['subset'] = "{}".format(self.name) - avalon.nuke.lib.set_avalon_knob_data(node, avalon_data) + set_avalon_knob_data(node, avalon_data) node['tile_color'].setValue(16744935) count_reads += 1 diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index 1b925014ad..3285e5f92d 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -1,11 +1,12 @@ from collections import OrderedDict -from openpype.hosts.nuke.api import ( - plugin, - lib) + import nuke +from openpype.hosts.nuke.api import plugin +from openpype.hosts.nuke.api.lib import create_write_node -class CreateWritePrerender(plugin.PypeCreator): + +class CreateWritePrerender(plugin.OpenPypeCreator): # change this to template preset name = "WritePrerender" label = "Create Write Prerender" @@ -98,7 +99,7 @@ class CreateWritePrerender(plugin.PypeCreator): self.log.info("write_data: {}".format(write_data)) - write_node = lib.create_write_node( + write_node = create_write_node( self.data["subset"], write_data, input=selected_node, diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 5f13fddf4e..a9c4b5341e 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -1,11 +1,12 @@ from collections import OrderedDict -from openpype.hosts.nuke.api import ( - plugin, - lib) + import nuke +from openpype.hosts.nuke.api import plugin +from openpype.hosts.nuke.api.lib import create_write_node -class CreateWriteRender(plugin.PypeCreator): + +class CreateWriteRender(plugin.OpenPypeCreator): # change this to template preset name = "WriteRender" label = "Create Write Render" @@ -119,7 +120,7 @@ class CreateWriteRender(plugin.PypeCreator): } ] - write_node = lib.create_write_node( + write_node = create_write_node( self.data["subset"], write_data, input=selected_node, diff --git a/openpype/hosts/nuke/plugins/create/create_write_still.py b/openpype/hosts/nuke/plugins/create/create_write_still.py index eebb5613c3..0037b64ce3 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_still.py +++ b/openpype/hosts/nuke/plugins/create/create_write_still.py @@ -1,11 +1,12 @@ from collections import OrderedDict -from openpype.hosts.nuke.api import ( - plugin, - lib) + import nuke +from openpype.hosts.nuke.api import plugin +from openpype.hosts.nuke.api.lib import create_write_node -class CreateWriteStill(plugin.PypeCreator): + +class CreateWriteStill(plugin.OpenPypeCreator): # change this to template preset name = "WriteStillFrame" label = "Create Write Still Image" @@ -108,7 +109,7 @@ class CreateWriteStill(plugin.PypeCreator): } ] - write_node = lib.create_write_node( + write_node = create_write_node( self.name, write_data, input=selected_node, diff --git a/openpype/hosts/nuke/plugins/inventory/repair_old_loaders.py b/openpype/hosts/nuke/plugins/inventory/repair_old_loaders.py index e7ae51fa86..49405fd213 100644 --- a/openpype/hosts/nuke/plugins/inventory/repair_old_loaders.py +++ b/openpype/hosts/nuke/plugins/inventory/repair_old_loaders.py @@ -1,7 +1,6 @@ from avalon import api, style -from avalon.nuke import lib as anlib -from openpype.api import ( - Logger) +from openpype.api import Logger +from openpype.hosts.nuke.api.lib import set_avalon_knob_data class RepairOldLoaders(api.InventoryAction): @@ -10,7 +9,7 @@ class RepairOldLoaders(api.InventoryAction): icon = "gears" color = style.colors.alert - log = Logger().get_logger(__name__) + log = Logger.get_logger(__name__) def process(self, containers): import nuke @@ -34,4 +33,4 @@ class RepairOldLoaders(api.InventoryAction): }) node["name"].setValue(new_name) # get data from avalon knob - anlib.set_avalon_knob_data(node, cdata) + set_avalon_knob_data(node, cdata) diff --git a/openpype/hosts/nuke/plugins/inventory/select_containers.py b/openpype/hosts/nuke/plugins/inventory/select_containers.py index bd00983172..3f174b3562 100644 --- a/openpype/hosts/nuke/plugins/inventory/select_containers.py +++ b/openpype/hosts/nuke/plugins/inventory/select_containers.py @@ -1,4 +1,5 @@ from avalon import api +from openpype.hosts.nuke.api.commands import viewer_update_and_undo_stop class SelectContainers(api.InventoryAction): @@ -9,11 +10,10 @@ class SelectContainers(api.InventoryAction): def process(self, containers): import nuke - import avalon.nuke nodes = [nuke.toNode(i["objectName"]) for i in containers] - with avalon.nuke.viewer_update_and_undo_stop(): + with viewer_update_and_undo_stop(): # clear previous_selection [n['selected'].setValue(False) for n in nodes] # Select tool diff --git a/openpype/hosts/nuke/plugins/load/load_backdrop.py b/openpype/hosts/nuke/plugins/load/load_backdrop.py index 9148260e9e..a2bd458948 100644 --- a/openpype/hosts/nuke/plugins/load/load_backdrop.py +++ b/openpype/hosts/nuke/plugins/load/load_backdrop.py @@ -1,9 +1,18 @@ from avalon import api, style, io import nuke import nukescripts -from openpype.hosts.nuke.api import lib as pnlib -from avalon.nuke import lib as anlib -from avalon.nuke import containerise, update_container + +from openpype.hosts.nuke.api.lib import ( + find_free_space_to_paste_nodes, + maintained_selection, + reset_selection, + select_nodes, + get_avalon_knob_data, + set_avalon_knob_data +) +from openpype.hosts.nuke.api.commands import viewer_update_and_undo_stop +from openpype.hosts.nuke.api import containerise, update_container + class LoadBackdropNodes(api.Loader): """Loading Published Backdrop nodes (workfile, nukenodes)""" @@ -66,12 +75,12 @@ class LoadBackdropNodes(api.Loader): # Get mouse position n = nuke.createNode("NoOp") xcursor, ycursor = (n.xpos(), n.ypos()) - anlib.reset_selection() + reset_selection() nuke.delete(n) bdn_frame = 50 - with anlib.maintained_selection(): + with maintained_selection(): # add group from nk nuke.nodePaste(file) @@ -81,11 +90,13 @@ class LoadBackdropNodes(api.Loader): nodes = nuke.selectedNodes() # get pointer position in DAG - xpointer, ypointer = pnlib.find_free_space_to_paste_nodes(nodes, direction="right", offset=200+bdn_frame) + xpointer, ypointer = find_free_space_to_paste_nodes( + nodes, direction="right", offset=200 + bdn_frame + ) # reset position to all nodes and replace inputs and output for n in nodes: - anlib.reset_selection() + reset_selection() xpos = (n.xpos() - xcursor) + xpointer ypos = (n.ypos() - ycursor) + ypointer n.setXYpos(xpos, ypos) @@ -108,7 +119,7 @@ class LoadBackdropNodes(api.Loader): d.setInput(index, dot) # remove Input node - anlib.reset_selection() + reset_selection() nuke.delete(n) continue @@ -127,15 +138,15 @@ class LoadBackdropNodes(api.Loader): dot.setInput(0, dep) # remove Input node - anlib.reset_selection() + reset_selection() nuke.delete(n) continue else: new_nodes.append(n) # reselect nodes with new Dot instead of Inputs and Output - anlib.reset_selection() - anlib.select_nodes(new_nodes) + reset_selection() + select_nodes(new_nodes) # place on backdrop bdn = nukescripts.autoBackdrop() @@ -208,16 +219,16 @@ class LoadBackdropNodes(api.Loader): # just in case we are in group lets jump out of it nuke.endGroup() - with anlib.maintained_selection(): + with maintained_selection(): xpos = GN.xpos() ypos = GN.ypos() - avalon_data = anlib.get_avalon_knob_data(GN) + avalon_data = get_avalon_knob_data(GN) nuke.delete(GN) # add group from nk nuke.nodePaste(file) GN = nuke.selectedNode() - anlib.set_avalon_knob_data(GN, avalon_data) + set_avalon_knob_data(GN, avalon_data) GN.setXYpos(xpos, ypos) GN["name"].setValue(object_name) @@ -243,7 +254,6 @@ class LoadBackdropNodes(api.Loader): self.update(container, representation) def remove(self, container): - from avalon.nuke import viewer_update_and_undo_stop node = nuke.toNode(container['objectName']) with viewer_update_and_undo_stop(): nuke.delete(node) diff --git a/openpype/hosts/nuke/plugins/load/load_camera_abc.py b/openpype/hosts/nuke/plugins/load/load_camera_abc.py index 377d60e84b..b9d4bb358f 100644 --- a/openpype/hosts/nuke/plugins/load/load_camera_abc.py +++ b/openpype/hosts/nuke/plugins/load/load_camera_abc.py @@ -1,8 +1,15 @@ -from avalon import api, io -from avalon.nuke import lib as anlib -from avalon.nuke import containerise, update_container import nuke +from avalon import api, io +from openpype.hosts.nuke.api import ( + containerise, + update_container, + viewer_update_and_undo_stop +) +from openpype.hosts.nuke.api.lib import ( + maintained_selection +) + class AlembicCameraLoader(api.Loader): """ @@ -43,7 +50,7 @@ class AlembicCameraLoader(api.Loader): # getting file path file = self.fname.replace("\\", "/") - with anlib.maintained_selection(): + with maintained_selection(): camera_node = nuke.createNode( "Camera2", "name {} file {} read_from_file True".format( @@ -122,7 +129,7 @@ class AlembicCameraLoader(api.Loader): # getting file path file = api.get_representation_path(representation).replace("\\", "/") - with anlib.maintained_selection(): + with maintained_selection(): camera_node = nuke.toNode(object_name) camera_node['selected'].setValue(True) @@ -181,7 +188,6 @@ class AlembicCameraLoader(api.Loader): self.update(container, representation) def remove(self, container): - from avalon.nuke import viewer_update_and_undo_stop node = nuke.toNode(container['objectName']) with viewer_update_and_undo_stop(): nuke.delete(node) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index 9ce72c0519..712cdf213f 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -3,13 +3,13 @@ from avalon.vendor import qargparse from avalon import api, io from openpype.hosts.nuke.api.lib import ( - get_imageio_input_colorspace + get_imageio_input_colorspace, + maintained_selection ) -from avalon.nuke import ( +from openpype.hosts.nuke.api import ( containerise, update_container, - viewer_update_and_undo_stop, - maintained_selection + viewer_update_and_undo_stop ) from openpype.hosts.nuke.api import plugin @@ -280,9 +280,6 @@ class LoadClip(plugin.NukeLoader): self.set_as_member(read_node) def remove(self, container): - - from avalon.nuke import viewer_update_and_undo_stop - read_node = nuke.toNode(container['objectName']) assert read_node.Class() == "Read", "Must be Read" @@ -378,4 +375,4 @@ class LoadClip(plugin.NukeLoader): "class_name": self.__class__.__name__ } - return self.node_name_template.format(**name_data) \ No newline at end of file + return self.node_name_template.format(**name_data) diff --git a/openpype/hosts/nuke/plugins/load/load_effects.py b/openpype/hosts/nuke/plugins/load/load_effects.py index 8ba1b6b7c1..8b8867feba 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects.py +++ b/openpype/hosts/nuke/plugins/load/load_effects.py @@ -1,7 +1,12 @@ -from avalon import api, style, io -import nuke import json from collections import OrderedDict +import nuke +from avalon import api, style, io +from openpype.hosts.nuke.api import ( + containerise, + update_container, + viewer_update_and_undo_stop +) class LoadEffects(api.Loader): @@ -30,9 +35,6 @@ class LoadEffects(api.Loader): Returns: nuke node: containerised nuke node object """ - # import dependencies - from avalon.nuke import containerise - # get main variables version = context['version'] version_data = version.get("data", {}) @@ -138,10 +140,6 @@ class LoadEffects(api.Loader): inputs: """ - - from avalon.nuke import ( - update_container - ) # get main variables # Get version from io version = io.find_one({ @@ -338,7 +336,6 @@ class LoadEffects(api.Loader): self.update(container, representation) def remove(self, container): - from avalon.nuke import viewer_update_and_undo_stop node = nuke.toNode(container['objectName']) with viewer_update_and_undo_stop(): nuke.delete(node) diff --git a/openpype/hosts/nuke/plugins/load/load_effects_ip.py b/openpype/hosts/nuke/plugins/load/load_effects_ip.py index d0cab26842..7948cbba9a 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_effects_ip.py @@ -1,8 +1,15 @@ -from avalon import api, style, io -import nuke import json from collections import OrderedDict + +import nuke + +from avalon import api, style, io from openpype.hosts.nuke.api import lib +from openpype.hosts.nuke.api import ( + containerise, + update_container, + viewer_update_and_undo_stop +) class LoadEffectsInputProcess(api.Loader): @@ -30,8 +37,6 @@ class LoadEffectsInputProcess(api.Loader): Returns: nuke node: containerised nuke node object """ - # import dependencies - from avalon.nuke import containerise # get main variables version = context['version'] @@ -142,9 +147,6 @@ class LoadEffectsInputProcess(api.Loader): """ - from avalon.nuke import ( - update_container - ) # get main variables # Get version from io version = io.find_one({ @@ -355,7 +357,6 @@ class LoadEffectsInputProcess(api.Loader): self.update(container, representation) def remove(self, container): - from avalon.nuke import viewer_update_and_undo_stop node = nuke.toNode(container['objectName']) with viewer_update_and_undo_stop(): nuke.delete(node) diff --git a/openpype/hosts/nuke/plugins/load/load_gizmo.py b/openpype/hosts/nuke/plugins/load/load_gizmo.py index c6228b95f6..f549623b88 100644 --- a/openpype/hosts/nuke/plugins/load/load_gizmo.py +++ b/openpype/hosts/nuke/plugins/load/load_gizmo.py @@ -1,7 +1,15 @@ -from avalon import api, style, io import nuke -from avalon.nuke import lib as anlib -from avalon.nuke import containerise, update_container +from avalon import api, style, io +from openpype.hosts.nuke.api.lib import ( + maintained_selection, + get_avalon_knob_data, + set_avalon_knob_data +) +from openpype.hosts.nuke.api import ( + containerise, + update_container, + viewer_update_and_undo_stop +) class LoadGizmo(api.Loader): @@ -61,7 +69,7 @@ class LoadGizmo(api.Loader): # just in case we are in group lets jump out of it nuke.endGroup() - with anlib.maintained_selection(): + with maintained_selection(): # add group from nk nuke.nodePaste(file) @@ -122,16 +130,16 @@ class LoadGizmo(api.Loader): # just in case we are in group lets jump out of it nuke.endGroup() - with anlib.maintained_selection(): + with maintained_selection(): xpos = GN.xpos() ypos = GN.ypos() - avalon_data = anlib.get_avalon_knob_data(GN) + avalon_data = get_avalon_knob_data(GN) nuke.delete(GN) # add group from nk nuke.nodePaste(file) GN = nuke.selectedNode() - anlib.set_avalon_knob_data(GN, avalon_data) + set_avalon_knob_data(GN, avalon_data) GN.setXYpos(xpos, ypos) GN["name"].setValue(object_name) @@ -157,7 +165,6 @@ class LoadGizmo(api.Loader): self.update(container, representation) def remove(self, container): - from avalon.nuke import viewer_update_and_undo_stop node = nuke.toNode(container['objectName']) with viewer_update_and_undo_stop(): nuke.delete(node) diff --git a/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py b/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py index 5ca101d6cb..4f17446673 100644 --- a/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py @@ -1,8 +1,16 @@ from avalon import api, style, io import nuke -from openpype.hosts.nuke.api import lib as pnlib -from avalon.nuke import lib as anlib -from avalon.nuke import containerise, update_container +from openpype.hosts.nuke.api.lib import ( + maintained_selection, + create_backdrop, + get_avalon_knob_data, + set_avalon_knob_data +) +from openpype.hosts.nuke.api import ( + containerise, + update_container, + viewer_update_and_undo_stop +) class LoadGizmoInputProcess(api.Loader): @@ -62,7 +70,7 @@ class LoadGizmoInputProcess(api.Loader): # just in case we are in group lets jump out of it nuke.endGroup() - with anlib.maintained_selection(): + with maintained_selection(): # add group from nk nuke.nodePaste(file) @@ -128,16 +136,16 @@ class LoadGizmoInputProcess(api.Loader): # just in case we are in group lets jump out of it nuke.endGroup() - with anlib.maintained_selection(): + with maintained_selection(): xpos = GN.xpos() ypos = GN.ypos() - avalon_data = anlib.get_avalon_knob_data(GN) + avalon_data = get_avalon_knob_data(GN) nuke.delete(GN) # add group from nk nuke.nodePaste(file) GN = nuke.selectedNode() - anlib.set_avalon_knob_data(GN, avalon_data) + set_avalon_knob_data(GN, avalon_data) GN.setXYpos(xpos, ypos) GN["name"].setValue(object_name) @@ -197,8 +205,12 @@ class LoadGizmoInputProcess(api.Loader): viewer["input_process_node"].setValue(group_node_name) # put backdrop under - pnlib.create_backdrop(label="Input Process", layer=2, - nodes=[viewer, group_node], color="0x7c7faaff") + create_backdrop( + label="Input Process", + layer=2, + nodes=[viewer, group_node], + color="0x7c7faaff" + ) return True @@ -234,7 +246,6 @@ class LoadGizmoInputProcess(api.Loader): self.update(container, representation) def remove(self, container): - from avalon.nuke import viewer_update_and_undo_stop node = nuke.toNode(container['objectName']) with viewer_update_and_undo_stop(): nuke.delete(node) diff --git a/openpype/hosts/nuke/plugins/load/load_image.py b/openpype/hosts/nuke/plugins/load/load_image.py index 02a5b55c18..427167ca98 100644 --- a/openpype/hosts/nuke/plugins/load/load_image.py +++ b/openpype/hosts/nuke/plugins/load/load_image.py @@ -7,6 +7,11 @@ from avalon import api, io from openpype.hosts.nuke.api.lib import ( get_imageio_input_colorspace ) +from openpype.hosts.nuke.api import ( + containerise, + update_container, + viewer_update_and_undo_stop +) class LoadImage(api.Loader): @@ -46,10 +51,6 @@ class LoadImage(api.Loader): return cls.representations + cls._representations def load(self, context, name, namespace, options): - from avalon.nuke import ( - containerise, - viewer_update_and_undo_stop - ) self.log.info("__ options: `{}`".format(options)) frame_number = options.get("frame_number", 1) @@ -154,11 +155,6 @@ class LoadImage(api.Loader): inputs: """ - - from avalon.nuke import ( - update_container - ) - node = nuke.toNode(container["objectName"]) frame_number = node["first"].value() @@ -234,9 +230,6 @@ class LoadImage(api.Loader): self.log.info("udated to version: {}".format(version.get("name"))) def remove(self, container): - - from avalon.nuke import viewer_update_and_undo_stop - node = nuke.toNode(container['objectName']) assert node.Class() == "Read", "Must be Read" diff --git a/openpype/hosts/nuke/plugins/load/load_model.py b/openpype/hosts/nuke/plugins/load/load_model.py index 15fa4fa35c..8c8dc7f37d 100644 --- a/openpype/hosts/nuke/plugins/load/load_model.py +++ b/openpype/hosts/nuke/plugins/load/load_model.py @@ -1,7 +1,11 @@ -from avalon import api, io -from avalon.nuke import lib as anlib -from avalon.nuke import containerise, update_container import nuke +from avalon import api, io +from openpype.hosts.nuke.api.lib import maintained_selection +from openpype.hosts.nuke.api import ( + containerise, + update_container, + viewer_update_and_undo_stop +) class AlembicModelLoader(api.Loader): @@ -43,7 +47,7 @@ class AlembicModelLoader(api.Loader): # getting file path file = self.fname.replace("\\", "/") - with anlib.maintained_selection(): + with maintained_selection(): model_node = nuke.createNode( "ReadGeo2", "name {} file {} ".format( @@ -122,7 +126,7 @@ class AlembicModelLoader(api.Loader): # getting file path file = api.get_representation_path(representation).replace("\\", "/") - with anlib.maintained_selection(): + with maintained_selection(): model_node = nuke.toNode(object_name) model_node['selected'].setValue(True) @@ -181,7 +185,6 @@ class AlembicModelLoader(api.Loader): self.update(container, representation) def remove(self, container): - from avalon.nuke import viewer_update_and_undo_stop node = nuke.toNode(container['objectName']) with viewer_update_and_undo_stop(): nuke.delete(node) diff --git a/openpype/hosts/nuke/plugins/load/load_script_precomp.py b/openpype/hosts/nuke/plugins/load/load_script_precomp.py index 7444dd6e96..8489283e8c 100644 --- a/openpype/hosts/nuke/plugins/load/load_script_precomp.py +++ b/openpype/hosts/nuke/plugins/load/load_script_precomp.py @@ -1,6 +1,11 @@ -from avalon import api, style, io -from avalon.nuke import get_avalon_knob_data import nuke +from avalon import api, style, io +from openpype.hosts.nuke.api.lib import get_avalon_knob_data +from openpype.hosts.nuke.api import ( + containerise, + update_container, + viewer_update_and_undo_stop +) class LinkAsGroup(api.Loader): @@ -15,8 +20,6 @@ class LinkAsGroup(api.Loader): color = style.colors.alert def load(self, context, name, namespace, data): - - from avalon.nuke import containerise # for k, v in context.items(): # log.info("key: `{}`, value: {}\n".format(k, v)) version = context['version'] @@ -103,11 +106,6 @@ class LinkAsGroup(api.Loader): inputs: """ - - from avalon.nuke import ( - update_container - ) - node = nuke.toNode(container['objectName']) root = api.get_representation_path(representation).replace("\\", "/") @@ -155,7 +153,6 @@ class LinkAsGroup(api.Loader): self.log.info("udated to version: {}".format(version.get("name"))) def remove(self, container): - from avalon.nuke import viewer_update_and_undo_stop node = nuke.toNode(container['objectName']) with viewer_update_and_undo_stop(): nuke.delete(node) diff --git a/openpype/hosts/nuke/plugins/publish/extract_backdrop.py b/openpype/hosts/nuke/plugins/publish/extract_backdrop.py index 0747c15ea7..0a2df0898e 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/extract_backdrop.py @@ -1,9 +1,16 @@ -import pyblish.api -from avalon.nuke import lib as anlib -from openpype.hosts.nuke.api import lib as pnlib -import nuke import os + +import nuke + +import pyblish.api + import openpype +from openpype.hosts.nuke.api.lib import ( + maintained_selection, + reset_selection, + select_nodes +) + class ExtractBackdropNode(openpype.api.Extractor): """Extracting content of backdrop nodes @@ -27,7 +34,7 @@ class ExtractBackdropNode(openpype.api.Extractor): path = os.path.join(stagingdir, filename) # maintain selection - with anlib.maintained_selection(): + with maintained_selection(): # all connections outside of backdrop connections_in = instance.data["nodeConnectionsIn"] connections_out = instance.data["nodeConnectionsOut"] @@ -44,7 +51,7 @@ class ExtractBackdropNode(openpype.api.Extractor): nodes.append(inpn) tmp_nodes.append(inpn) - anlib.reset_selection() + reset_selection() # connect output node for n, output in connections_out.items(): @@ -58,11 +65,11 @@ class ExtractBackdropNode(openpype.api.Extractor): opn.autoplace() nodes.append(opn) tmp_nodes.append(opn) - anlib.reset_selection() + reset_selection() # select nodes to copy - anlib.reset_selection() - anlib.select_nodes(nodes) + reset_selection() + select_nodes(nodes) # create tmp nk file # save file to the path nuke.nodeCopy(path) diff --git a/openpype/hosts/nuke/plugins/publish/extract_camera.py b/openpype/hosts/nuke/plugins/publish/extract_camera.py index bc50dac108..942cdc537d 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_camera.py +++ b/openpype/hosts/nuke/plugins/publish/extract_camera.py @@ -1,10 +1,12 @@ -import nuke import os import math +from pprint import pformat + +import nuke + import pyblish.api import openpype.api -from avalon.nuke import lib as anlib -from pprint import pformat +from openpype.hosts.nuke.api.lib import maintained_selection class ExtractCamera(openpype.api.Extractor): @@ -52,7 +54,7 @@ class ExtractCamera(openpype.api.Extractor): filename = subset + ".{}".format(extension) file_path = os.path.join(staging_dir, filename).replace("\\", "/") - with anlib.maintained_selection(): + with maintained_selection(): # bake camera with axeses onto word coordinate XYZ rm_n = bakeCameraWithAxeses( nuke.toNode(instance.data["name"]), output_range) diff --git a/openpype/hosts/nuke/plugins/publish/extract_gizmo.py b/openpype/hosts/nuke/plugins/publish/extract_gizmo.py index 78bf9c998d..2d5bfdeb5e 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/extract_gizmo.py @@ -1,9 +1,15 @@ -import pyblish.api -from avalon.nuke import lib as anlib -from openpype.hosts.nuke.api import utils as pnutils -import nuke import os +import nuke + +import pyblish.api + import openpype +from openpype.hosts.nuke.api import utils as pnutils +from openpype.hosts.nuke.api.lib import ( + maintained_selection, + reset_selection, + select_nodes +) class ExtractGizmo(openpype.api.Extractor): @@ -26,17 +32,17 @@ class ExtractGizmo(openpype.api.Extractor): path = os.path.join(stagingdir, filename) # maintain selection - with anlib.maintained_selection(): + with maintained_selection(): orig_grpn_name = orig_grpn.name() tmp_grpn_name = orig_grpn_name + "_tmp" # select original group node - anlib.select_nodes([orig_grpn]) + select_nodes([orig_grpn]) # copy to clipboard nuke.nodeCopy("%clipboard%") # reset selection to none - anlib.reset_selection() + reset_selection() # paste clipboard nuke.nodePaste("%clipboard%") diff --git a/openpype/hosts/nuke/plugins/publish/extract_model.py b/openpype/hosts/nuke/plugins/publish/extract_model.py index 43214bf3e9..0375263338 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_model.py +++ b/openpype/hosts/nuke/plugins/publish/extract_model.py @@ -1,9 +1,12 @@ -import nuke import os +from pprint import pformat +import nuke import pyblish.api import openpype.api -from avalon.nuke import lib as anlib -from pprint import pformat +from openpype.hosts.nuke.api.lib import ( + maintained_selection, + select_nodes +) class ExtractModel(openpype.api.Extractor): @@ -49,9 +52,9 @@ class ExtractModel(openpype.api.Extractor): filename = subset + ".{}".format(extension) file_path = os.path.join(staging_dir, filename).replace("\\", "/") - with anlib.maintained_selection(): + with maintained_selection(): # select model node - anlib.select_nodes([model_node]) + select_nodes([model_node]) # create write geo node wg_n = nuke.createNode("WriteGeo") diff --git a/openpype/hosts/nuke/plugins/publish/extract_ouput_node.py b/openpype/hosts/nuke/plugins/publish/extract_ouput_node.py index c3a6a3b167..e38927c3a7 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_ouput_node.py +++ b/openpype/hosts/nuke/plugins/publish/extract_ouput_node.py @@ -1,6 +1,6 @@ import nuke import pyblish.api -from avalon.nuke import maintained_selection +from openpype.hosts.nuke.api.lib import maintained_selection class CreateOutputNode(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py index 8ba746a3c4..4cf2fd7d9f 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_lut.py @@ -1,8 +1,8 @@ import os import pyblish.api -from avalon.nuke import lib as anlib -from openpype.hosts.nuke.api import plugin import openpype +from openpype.hosts.nuke.api import plugin +from openpype.hosts.nuke.api.lib import maintained_selection class ExtractReviewDataLut(openpype.api.Extractor): @@ -37,7 +37,7 @@ class ExtractReviewDataLut(openpype.api.Extractor): "StagingDir `{0}`...".format(instance.data["stagingDir"])) # generate data - with anlib.maintained_selection(): + with maintained_selection(): exporter = plugin.ExporterReviewLut( self, instance ) diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index 32962b57a6..13d23ffb9c 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -1,8 +1,8 @@ import os import pyblish.api -from avalon.nuke import lib as anlib -from openpype.hosts.nuke.api import plugin import openpype +from openpype.hosts.nuke.api import plugin +from openpype.hosts.nuke.api.lib import maintained_selection class ExtractReviewDataMov(openpype.api.Extractor): @@ -41,7 +41,7 @@ class ExtractReviewDataMov(openpype.api.Extractor): self.log.info(self.outputs) # generate data - with anlib.maintained_selection(): + with maintained_selection(): generated_repres = [] for o_name, o_data in self.outputs.items(): f_families = o_data["filter"]["families"] diff --git a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py index 0f68680742..50e5f995f4 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -1,8 +1,8 @@ import os import nuke -from avalon.nuke import lib as anlib import pyblish.api import openpype +from openpype.hosts.nuke.api.lib import maintained_selection class ExtractSlateFrame(openpype.api.Extractor): @@ -25,7 +25,7 @@ class ExtractSlateFrame(openpype.api.Extractor): else: self.viewer_lut_raw = False - with anlib.maintained_selection(): + with maintained_selection(): self.log.debug("instance: {}".format(instance)) self.log.debug("instance.data[families]: {}".format( instance.data["families"])) diff --git a/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py b/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py index 0c9af66435..ef6d486ca2 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py @@ -1,9 +1,9 @@ import sys import os import nuke -from avalon.nuke import lib as anlib import pyblish.api import openpype +from openpype.hosts.nuke.api.lib import maintained_selection if sys.version_info[0] >= 3: @@ -30,7 +30,7 @@ class ExtractThumbnail(openpype.api.Extractor): if "render.farm" in instance.data["families"]: return - with anlib.maintained_selection(): + with maintained_selection(): self.log.debug("instance: {}".format(instance)) self.log.debug("instance.data[families]: {}".format( instance.data["families"])) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/precollect_instances.py index 5c30df9a62..97ddef0a59 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_instances.py @@ -1,7 +1,10 @@ import nuke import pyblish.api from avalon import io, api -from avalon.nuke import lib as anlib +from openpype.hosts.nuke.api.lib import ( + add_publish_knob, + get_avalon_knob_data +) @pyblish.api.log @@ -39,7 +42,7 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): self.log.warning(E) # get data from avalon knob - avalon_knob_data = anlib.get_avalon_knob_data( + avalon_knob_data = get_avalon_knob_data( node, ["avalon:", "ak:"]) self.log.debug("avalon_knob_data: {}".format(avalon_knob_data)) @@ -115,7 +118,7 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): # get publish knob value if "publish" not in node.knobs(): - anlib.add_publish_knob(node) + add_publish_knob(node) # sync workfile version _families_test = [family] + families diff --git a/openpype/hosts/nuke/plugins/publish/precollect_workfile.py b/openpype/hosts/nuke/plugins/publish/precollect_workfile.py index 0e27273ceb..a2d1c80628 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_workfile.py @@ -1,8 +1,13 @@ -import nuke -import pyblish.api import os + +import nuke + +import pyblish.api import openpype.api as pype -from avalon.nuke import lib as anlib +from openpype.hosts.nuke.api.lib import ( + add_publish_knob, + get_avalon_knob_data +) class CollectWorkfile(pyblish.api.ContextPlugin): @@ -17,9 +22,9 @@ class CollectWorkfile(pyblish.api.ContextPlugin): current_file = os.path.normpath(nuke.root().name()) - knob_data = anlib.get_avalon_knob_data(root) + knob_data = get_avalon_knob_data(root) - anlib.add_publish_knob(root) + add_publish_knob(root) family = "workfile" task = os.getenv("AVALON_TASK", None) diff --git a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py index f280ad4af1..7694c3d2ba 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py @@ -1,6 +1,6 @@ -import pyblish -from avalon.nuke import lib as anlib import nuke +import pyblish +from openpype.hosts.nuke.api.lib import maintained_selection class SelectCenterInNodeGraph(pyblish.api.Action): @@ -28,7 +28,7 @@ class SelectCenterInNodeGraph(pyblish.api.Action): all_yC = list() # maintain selection - with anlib.maintained_selection(): + with maintained_selection(): # collect all failed nodes xpos and ypos for instance in instances: bdn = instance[0] diff --git a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py index 9c94ea88ef..d0d930f50c 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py @@ -1,6 +1,6 @@ -import pyblish -from avalon.nuke import lib as anlib import nuke +import pyblish +from openpype.hosts.nuke.api.lib import maintained_selection class OpenFailedGroupNode(pyblish.api.Action): @@ -25,7 +25,7 @@ class OpenFailedGroupNode(pyblish.api.Action): instances = pyblish.api.instances_by_plugin(failed, plugin) # maintain selection - with anlib.maintained_selection(): + with maintained_selection(): # collect all failed nodes xpos and ypos for instance in instances: grpn = instance[0] diff --git a/openpype/hosts/nuke/plugins/publish/validate_instance_in_context.py b/openpype/hosts/nuke/plugins/publish/validate_instance_in_context.py index ddf46a0873..842f74b6f6 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_instance_in_context.py +++ b/openpype/hosts/nuke/plugins/publish/validate_instance_in_context.py @@ -6,8 +6,11 @@ import nuke import pyblish.api import openpype.api -import avalon.nuke.lib -import openpype.hosts.nuke.api as nuke_api +from openpype.hosts.nuke.api.lib import ( + recreate_instance, + reset_selection, + select_nodes +) class SelectInvalidInstances(pyblish.api.Action): @@ -47,12 +50,12 @@ class SelectInvalidInstances(pyblish.api.Action): self.deselect() def select(self, instances): - avalon.nuke.lib.select_nodes( + select_nodes( [nuke.toNode(str(x)) for x in instances] ) def deselect(self): - avalon.nuke.lib.reset_selection() + reset_selection() class RepairSelectInvalidInstances(pyblish.api.Action): @@ -82,7 +85,7 @@ class RepairSelectInvalidInstances(pyblish.api.Action): context_asset = context.data["assetEntity"]["name"] for instance in instances: origin_node = instance[0] - nuke_api.lib.recreate_instance( + recreate_instance( origin_node, avalon_data={"asset": context_asset} ) diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py b/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py index ba34ec8338..a73bed8edd 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py @@ -1,13 +1,12 @@ -import toml import os +import toml import nuke from avalon import api -import re import pyblish.api import openpype.api -from avalon.nuke import get_avalon_knob_data +from openpype.hosts.nuke.api.lib import get_avalon_knob_data class ValidateWriteLegacy(pyblish.api.InstancePlugin): diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index 732f321b85..c0d5c8f402 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -1,8 +1,11 @@ import os import pyblish.api import openpype.utils -import openpype.hosts.nuke.lib as nukelib -import avalon.nuke +from openpype.hosts.nuke.api.lib import ( + get_write_node_template_attr, + get_node_path +) + @pyblish.api.log class RepairNukeWriteNodeAction(pyblish.api.Action): @@ -15,7 +18,7 @@ class RepairNukeWriteNodeAction(pyblish.api.Action): for instance in instances: node = instance[1] - correct_data = nukelib.get_write_node_template_attr(node) + correct_data = get_write_node_template_attr(node) for k, v in correct_data.items(): node[k].setValue(v) self.log.info("Node attributes were fixed") @@ -34,14 +37,14 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin): def process(self, instance): node = instance[1] - correct_data = nukelib.get_write_node_template_attr(node) + correct_data = get_write_node_template_attr(node) check = [] for k, v in correct_data.items(): if k is 'file': padding = len(v.split('#')) - ref_path = avalon.nuke.lib.get_node_path(v, padding) - n_path = avalon.nuke.lib.get_node_path(node[k].value(), padding) + ref_path = get_node_path(v, padding) + n_path = get_node_path(node[k].value(), padding) isnt = False for i, p in enumerate(ref_path): if str(n_path[i]) not in str(p): diff --git a/openpype/hosts/nuke/startup/init.py b/openpype/hosts/nuke/startup/init.py index 0ea5d1ad7d..d7560814bf 100644 --- a/openpype/hosts/nuke/startup/init.py +++ b/openpype/hosts/nuke/startup/init.py @@ -1,2 +1,4 @@ +import nuke + # default write mov nuke.knobDefault('Write.mov.colorspace', 'sRGB') diff --git a/openpype/hosts/nuke/startup/menu.py b/openpype/hosts/nuke/startup/menu.py index b7ed35b3b4..2cac6d09e7 100644 --- a/openpype/hosts/nuke/startup/menu.py +++ b/openpype/hosts/nuke/startup/menu.py @@ -1,14 +1,19 @@ +import nuke +import avalon.api + +from openpype.api import Logger +from openpype.hosts.nuke import api from openpype.hosts.nuke.api.lib import ( on_script_load, check_inventory_versions, - WorkfileSettings + WorkfileSettings, + dirmap_file_name_filter ) -import nuke -from openpype.api import Logger -from openpype.hosts.nuke.api.lib import dirmap_file_name_filter +log = Logger.get_logger(__name__) -log = Logger().get_logger(__name__) + +avalon.api.install(api) # fix ffmpeg settings on script nuke.addOnScriptLoad(on_script_load) From 26d8304fd9704f04bd9ac076d193dc1646e4a38b Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 12 Jan 2022 12:27:09 +0100 Subject: [PATCH 032/160] removed avalon nuke path from add implementation environments --- openpype/hosts/nuke/__init__.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/openpype/hosts/nuke/__init__.py b/openpype/hosts/nuke/__init__.py index 366f704dd8..60b37ce1dd 100644 --- a/openpype/hosts/nuke/__init__.py +++ b/openpype/hosts/nuke/__init__.py @@ -6,10 +6,7 @@ def add_implementation_envs(env, _app): # Add requirements to NUKE_PATH pype_root = os.environ["OPENPYPE_REPOS_ROOT"] new_nuke_paths = [ - os.path.join(pype_root, "openpype", "hosts", "nuke", "startup"), - os.path.join( - pype_root, "repos", "avalon-core", "setup", "nuke", "nuke_path" - ) + os.path.join(pype_root, "openpype", "hosts", "nuke", "startup") ] old_nuke_path = env.get("NUKE_PATH") or "" for path in old_nuke_path.split(os.pathsep): From 9980aa90fa196eb07e57ea7155b7ce98469d81e9 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 12 Jan 2022 12:32:21 +0100 Subject: [PATCH 033/160] fix default value of function argument --- openpype/hosts/nuke/api/utils.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/utils.py b/openpype/hosts/nuke/api/utils.py index f8f248357b..205b23efe6 100644 --- a/openpype/hosts/nuke/api/utils.py +++ b/openpype/hosts/nuke/api/utils.py @@ -49,12 +49,14 @@ def gizmo_is_nuke_default(gizmo): return gizmo.filename().startswith(plug_dir) -def bake_gizmos_recursively(in_group=nuke.Root()): +def bake_gizmos_recursively(in_group=None): """Converting a gizmo to group Argumets: is_group (nuke.Node)[optonal]: group node or all nodes """ + if in_group is None: + in_group = nuke.Root() # preserve selection after all is done with maintained_selection(): # jump to the group From 197b2d33a672e4ece48bfe7b6f5b38076c2209bd Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Jan 2022 13:30:49 +0100 Subject: [PATCH 034/160] flame: instance collector update --- .../plugins/publish/precollect_instances.py | 151 ++++++++++-------- 1 file changed, 88 insertions(+), 63 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/precollect_instances.py b/openpype/hosts/flame/plugins/publish/precollect_instances.py index e302bc42a4..fa007b3efd 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_instances.py +++ b/openpype/hosts/flame/plugins/publish/precollect_instances.py @@ -1,6 +1,7 @@ import pyblish -# import openpype +import openpype import openpype.hosts.flame.api as opfapi +from openpype.hosts.flame.otio import flame_export # # developer reload modules from pprint import pformat @@ -20,10 +21,11 @@ class PrecollectInstances(pyblish.api.ContextPlugin): sequence = context.data["flameSequence"] self.otio_timeline = context.data["otioTimeline"] self.clips_in_reels = opfapi.get_clips_in_reels(project) + self.fps = context.data["fps"] # process all sellected - with opfapi.maintained_segment_selection(sequence) as selected_segments: - for segment in selected_segments: + with opfapi.maintained_segment_selection(sequence) as segments: + for segment in segments: clip_data = opfapi.get_segment_attributes(segment) clip_name = clip_data["segment_name"] self.log.debug("clip_name: {}".format(clip_name)) @@ -38,21 +40,15 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if marker_data.get("id") != "pyblish.avalon.instance": continue + # get file path file_path = clip_data["fpath"] + + # get source clip + source_clip = self._get_reel_clip(file_path) + first_frame = opfapi.get_frame_from_path(file_path) or 0 - # calculate head and tail with forward compatibility - head = clip_data.get("segment_head") - tail = clip_data.get("segment_tail") - - if not head: - head = int(clip_data["source_in"]) - int(first_frame) - if not tail: - tail = int( - clip_data["source_duration"] - ( - head + clip_data["record_duration"] - ) - ) + head, tail = self._get_head_tail(clip_data, first_frame) # solve handles length marker_data["handleStart"] = min( @@ -93,17 +89,19 @@ class PrecollectInstances(pyblish.api.ContextPlugin): "item": segment, "families": families, "publish": marker_data["publish"], - "fps": context.data["fps"], + "fps": self.fps, + "flameSourceClip": source_clip, + "sourceFirstFrame": first_frame }) - # # otio clip data - # otio_data = self.get_otio_clip_instance_data(segment) or {} - # self.log.debug("__ otio_data: {}".format(pformat(otio_data))) - # data.update(otio_data) - # self.log.debug("__ data: {}".format(pformat(data))) + # otio clip data + otio_data = self._get_otio_clip_instance_data(clip_data) or {} + self.log.debug("__ otio_data: {}".format(pformat(otio_data))) + data.update(otio_data) + self.log.debug("__ data: {}".format(pformat(data))) - # # add resolution - # self.get_resolution_to_data(data, context) + # add resolution + self._get_resolution_to_data(data, context) # create instance instance = context.create_instance(**data) @@ -116,7 +114,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): }) # create shot instance for shot attributes create/update - self.create_shot_instance(context, clip_name, **data) + self._create_shot_instance(context, clip_name, **data) self.log.info("Creating instance: {}".format(instance)) self.log.info( @@ -130,7 +128,30 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if marker_data.get("reviewTrack") is not None: instance.data["reviewAudio"] = True - def get_resolution_to_data(self, data, context): + def _get_head_tail(self, clip_data, first_frame): + # calculate head and tail with forward compatibility + head = clip_data.get("segment_head") + tail = clip_data.get("segment_tail") + + if not head: + head = int(clip_data["source_in"]) - int(first_frame) + if not tail: + tail = int( + clip_data["source_duration"] - ( + head + clip_data["record_duration"] + ) + ) + return head, tail + + def _get_reel_clip(self, path): + match_reel_clip = [ + clip for clip in self.clips_in_reels + if clip["fpath"] == path + ] + if match_reel_clip: + return match_reel_clip.pop() + + def _get_resolution_to_data(self, data, context): assert data.get("otioClip"), "Missing `otioClip` data" # solve source resolution option @@ -155,7 +176,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): "openpype.timeline.pixelAspect"] }) - def create_shot_instance(self, context, clip_name, **data): + def _create_shot_instance(self, context, clip_name, **data): master_layer = data.get("heroTrack") hierarchy_data = data.get("hierarchyData") asset = data.get("asset") @@ -193,47 +214,51 @@ class PrecollectInstances(pyblish.api.ContextPlugin): self.log.debug( "_ instance.data: {}".format(pformat(instance.data))) - # def get_otio_clip_instance_data(self, segment): - # """ - # Return otio objects for timeline, track and clip + def _get_otio_clip_instance_data(self, clip_data): + """ + Return otio objects for timeline, track and clip - # Args: - # timeline_item_data (dict): timeline_item_data from list returned by - # resolve.get_current_timeline_items() - # otio_timeline (otio.schema.Timeline): otio object + Args: + timeline_item_data (dict): timeline_item_data from list returned by + resolve.get_current_timeline_items() + otio_timeline (otio.schema.Timeline): otio object - # Returns: - # dict: otio clip object + Returns: + dict: otio clip object - # """ - # ti_track_name = segment.parent().name() - # timeline_range = self.create_otio_time_range_from_timeline_item_data( - # segment) - # for otio_clip in self.otio_timeline.each_clip(): - # track_name = otio_clip.parent().name - # parent_range = otio_clip.range_in_parent() - # if ti_track_name not in track_name: - # continue - # if otio_clip.name not in segment.name(): - # continue - # if openpype.lib.is_overlapping_otio_ranges( - # parent_range, timeline_range, strict=True): + """ + segment = clip_data["PySegment"] - # # add pypedata marker to otio_clip metadata - # for marker in otio_clip.markers: - # if phiero.pype_tag_name in marker.name: - # otio_clip.metadata.update(marker.metadata) - # return {"otioClip": otio_clip} + self.log.debug( + ">> flame Track.dir: {}".format(dir(segment.parent))) + s_track_name = segment.parent.name.get_value() - # return None + timeline_range = self._create_otio_time_range_from_timeline_item_data( + clip_data) - # @staticmethod - # def create_otio_time_range_from_timeline_item_data(segment): - # speed = segment.playbackSpeed() - # timeline = phiero.get_current_sequence() - # frame_start = int(segment.timelineIn()) - # frame_duration = int(segment.sourceDuration() / speed) - # fps = timeline.framerate().toFloat() + for otio_clip in self.otio_timeline.each_clip(): + self.log.debug( + ">> OTIO Track.dir: {}".format(dir(otio_clip.parent()))) + track_name = otio_clip.parent().name + parent_range = otio_clip.range_in_parent() + if s_track_name not in track_name: + continue + if otio_clip.name not in segment.name.get_value(): + continue + if openpype.lib.is_overlapping_otio_ranges( + parent_range, timeline_range, strict=True): - # return hiero_export.create_otio_time_range( - # frame_start, frame_duration, fps) + # add pypedata marker to otio_clip metadata + for marker in otio_clip.markers: + if opfapi.MARKER_NAME in marker.name: + otio_clip.metadata.update(marker.metadata) + return {"otioClip": otio_clip} + + return None + + def _create_otio_time_range_from_timeline_item_data(self, clip_data): + frame_start = int(clip_data["record_in"]) + frame_duration = int(clip_data["record_duration"]) + + return flame_export.create_otio_time_range( + frame_start, frame_duration, self.fps) From ac4302793b58c0e62f89ddee1764b3697bfc097b Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 12 Jan 2022 13:34:27 +0100 Subject: [PATCH 035/160] fix multiple copies of loaded proxy assignment --- openpype/tools/mayalookassigner/commands.py | 19 +++---------------- 1 file changed, 3 insertions(+), 16 deletions(-) diff --git a/openpype/tools/mayalookassigner/commands.py b/openpype/tools/mayalookassigner/commands.py index a1f26f01ab..b9402d8ea1 100644 --- a/openpype/tools/mayalookassigner/commands.py +++ b/openpype/tools/mayalookassigner/commands.py @@ -116,8 +116,9 @@ def create_asset_id_hash(nodes): ids = get_alembic_ids_cache(path) for k, _ in ids.items(): pid = k.split(":")[0] - if not node_id_hash.get(pid): - node_id_hash[pid] = [node] + if node not in node_id_hash[pid]: + node_id_hash[pid].append(node) + else: value = lib.get_id(node) if value is None: @@ -150,20 +151,6 @@ def create_items_from_nodes(nodes): id_hashes = create_asset_id_hash(nodes) - # get ids from alembic - if cmds.pluginInfo('vrayformaya', query=True, loaded=True): - vray_proxy_nodes = cmds.ls(nodes, type="VRayProxy") - for vp in vray_proxy_nodes: - path = cmds.getAttr("{}.fileName".format(vp)) - ids = get_alembic_ids_cache(path) - parent_id = {} - for k, _ in ids.items(): - pid = k.split(":")[0] - if not parent_id.get(pid): - parent_id[pid] = [vp] - print("Adding ids from alembic {}".format(path)) - id_hashes.update(parent_id) - if not id_hashes: log.warning("No id hashes") return asset_view_items From 34a9269688290169d072fe1786c89674d5015047 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 12 Jan 2022 14:33:31 +0100 Subject: [PATCH 036/160] Refactor publish families `mayaAscii` -> `mayaScene` --- .../projects_schema/schemas/template_publish_families.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json b/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json index 9db1427562..b5e33e2cf9 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json @@ -17,7 +17,7 @@ {"editorial": "editorial"}, {"layout": "layout"}, {"look": "look"}, - {"mayaAscii": "mayaAscii"}, + {"mayaScene": "mayaScene"}, {"model": "model"}, {"pointcache": "pointcache"}, {"reference": "reference"}, From 67138f2787bb871b79ab865e162e53a542414779 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Jan 2022 14:35:20 +0100 Subject: [PATCH 037/160] flame: fix correct search condition --- openpype/hosts/flame/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index e53127503b..b963a1cb39 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -559,7 +559,7 @@ def get_segment_attributes(segment): attr = getattr(segment, attr_name) segment_attrs_data[attr] = str(attr).replace("+", ":") - if attr in ["record_in", "record_out"]: + if attr_name in ["record_in", "record_out"]: clip_data[attr_name] = attr.relative_frame else: clip_data[attr_name] = attr.frame From 56035a1dbbf7a91953ed85e685e188c2fc068b85 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 12 Jan 2022 14:43:05 +0100 Subject: [PATCH 038/160] Allow space in filenames when converting with `maketx` --- openpype/hosts/maya/plugins/publish/extract_look.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 953539f65c..a101a627cf 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -75,8 +75,12 @@ def maketx(source, destination, *args): "--filter lanczos3", ] + def _escape(path): + # Ensure path is enclosed by quotes to allow paths with spaces + return '"{}"'.format(path) + cmd.extend(args) - cmd.extend(["-o", destination, source]) + cmd.extend(["-o", _escape(destination), _escape(source)]) cmd = " ".join(cmd) From 36a4261db7e35c86c906adebce86321b2eb3bd10 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 12 Jan 2022 14:56:50 +0100 Subject: [PATCH 039/160] Escape space in filename in sourceHash too by enclosing in quotes --- .../hosts/maya/plugins/publish/extract_look.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index a101a627cf..6f7b438408 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -22,6 +22,11 @@ COPY = 1 HARDLINK = 2 +def escape_space(path): + """Ensure path is enclosed by quotes to allow paths with spaces""" + return '"{}"'.format(path) if " " in path else path + + def find_paths_by_hash(texture_hash): """Find the texture hash key in the dictionary. @@ -57,7 +62,7 @@ def maketx(source, destination, *args): """ from openpype.lib import get_oiio_tools_path - maketx_path = get_oiio_tools_path("maketx") + maketx_path = get_oiio_tools_path("maketx") + ".exe" if not os.path.exists(maketx_path): print( "OIIO tool not found in {}".format(maketx_path)) @@ -75,12 +80,8 @@ def maketx(source, destination, *args): "--filter lanczos3", ] - def _escape(path): - # Ensure path is enclosed by quotes to allow paths with spaces - return '"{}"'.format(path) - cmd.extend(args) - cmd.extend(["-o", _escape(destination), _escape(source)]) + cmd.extend(["-o", escape_space(destination), escape_space(source)]) cmd = " ".join(cmd) @@ -318,7 +319,6 @@ class ExtractLook(openpype.api.Extractor): do_maketx = instance.data.get("maketx", False) # Collect all unique files used in the resources - files = set() files_metadata = {} for resource in resources: # Preserve color space values (force value after filepath change) @@ -329,7 +329,6 @@ class ExtractLook(openpype.api.Extractor): for f in resource["files"]: files_metadata[os.path.normpath(f)] = { "color_space": color_space} - # files.update(os.path.normpath(f)) # Process the resource files transfers = [] @@ -337,7 +336,6 @@ class ExtractLook(openpype.api.Extractor): hashes = {} force_copy = instance.data.get("forceCopy", False) - self.log.info(files) for filepath in files_metadata: linearize = False @@ -496,7 +494,7 @@ class ExtractLook(openpype.api.Extractor): # Include `source-hash` as string metadata "-sattrib", "sourceHash", - texture_hash, + escape_space(texture_hash), colorconvert, ) From 3b101627978709a1bf5d648a5616286642a7f8d7 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 12 Jan 2022 15:36:16 +0100 Subject: [PATCH 040/160] Revert adding the .exe to filepath (was for testing on my end only) --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 6f7b438408..bf79ddbf44 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -62,7 +62,7 @@ def maketx(source, destination, *args): """ from openpype.lib import get_oiio_tools_path - maketx_path = get_oiio_tools_path("maketx") + ".exe" + maketx_path = get_oiio_tools_path("maketx") if not os.path.exists(maketx_path): print( "OIIO tool not found in {}".format(maketx_path)) From f0a11fa0bfdf4ac33218c4154cd150cbe5a99490 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Jan 2022 15:37:11 +0100 Subject: [PATCH 041/160] flame: fix otio path padding --- openpype/hosts/flame/otio/flame_export.py | 2 +- openpype/hosts/flame/otio/utils.py | 13 ++++++++----- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/flame/otio/flame_export.py b/openpype/hosts/flame/otio/flame_export.py index aea1f387e8..615904ec09 100644 --- a/openpype/hosts/flame/otio/flame_export.py +++ b/openpype/hosts/flame/otio/flame_export.py @@ -295,7 +295,7 @@ def create_otio_reference(clip_data): if is_sequence: metadata.update({ "isSequence": True, - "padding": padding + "padding": len(padding) }) otio_ex_ref_item = None diff --git a/openpype/hosts/flame/otio/utils.py b/openpype/hosts/flame/otio/utils.py index 229946343b..57a15d65a1 100644 --- a/openpype/hosts/flame/otio/utils.py +++ b/openpype/hosts/flame/otio/utils.py @@ -1,4 +1,5 @@ import re +import os import opentimelineio as otio import logging log = logging.getLogger(__name__) @@ -33,19 +34,21 @@ def get_reformated_path(path, padded=True): get_reformated_path("plate.1001.exr") > plate.%04d.exr """ - padding = get_padding_from_path(path) - found = get_frame_from_path(path) + basename = os.path.basename(path) + dirpath = os.path.dirname(path) + padding = get_padding_from_path(basename) + found = get_frame_from_path(basename) if not found: log.info("Path is not sequence: {}".format(path)) return path if padded: - path = path.replace(found, "%0{}d".format(padding)) + basename = basename.replace(found, "%0{}d".format(padding)) else: - path = path.replace(found, "%d") + basename = basename.replace(found, "%d") - return path + return os.path.join(dirpath, basename) def get_padding_from_path(path): From 10de030e133d7beca656515638da48312b33751f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Jan 2022 15:40:33 +0100 Subject: [PATCH 042/160] flame: adding host to global plugins --- openpype/plugins/publish/collect_hierarchy.py | 2 +- openpype/plugins/publish/collect_otio_frame_ranges.py | 2 +- openpype/plugins/publish/collect_otio_review.py | 2 +- openpype/plugins/publish/collect_otio_subset_resources.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/collect_hierarchy.py b/openpype/plugins/publish/collect_hierarchy.py index f7d1c6b4be..7f7306f73b 100644 --- a/openpype/plugins/publish/collect_hierarchy.py +++ b/openpype/plugins/publish/collect_hierarchy.py @@ -15,7 +15,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin): label = "Collect Hierarchy" order = pyblish.api.CollectorOrder - 0.47 families = ["shot"] - hosts = ["resolve", "hiero"] + hosts = ["resolve", "hiero", "flame"] def process(self, context): temp_context = {} diff --git a/openpype/plugins/publish/collect_otio_frame_ranges.py b/openpype/plugins/publish/collect_otio_frame_ranges.py index a35ef47e79..511ed757b3 100644 --- a/openpype/plugins/publish/collect_otio_frame_ranges.py +++ b/openpype/plugins/publish/collect_otio_frame_ranges.py @@ -20,7 +20,7 @@ class CollectOcioFrameRanges(pyblish.api.InstancePlugin): label = "Collect OTIO Frame Ranges" order = pyblish.api.CollectorOrder - 0.48 families = ["shot", "clip"] - hosts = ["resolve", "hiero"] + hosts = ["resolve", "hiero", "flame"] def process(self, instance): # get basic variables diff --git a/openpype/plugins/publish/collect_otio_review.py b/openpype/plugins/publish/collect_otio_review.py index 10ceafdcca..6634be0671 100644 --- a/openpype/plugins/publish/collect_otio_review.py +++ b/openpype/plugins/publish/collect_otio_review.py @@ -22,7 +22,7 @@ class CollectOcioReview(pyblish.api.InstancePlugin): label = "Collect OTIO Review" order = pyblish.api.CollectorOrder - 0.47 families = ["clip"] - hosts = ["resolve", "hiero"] + hosts = ["resolve", "hiero", "flame"] def process(self, instance): # get basic variables diff --git a/openpype/plugins/publish/collect_otio_subset_resources.py b/openpype/plugins/publish/collect_otio_subset_resources.py index 571d0d56a4..d740ceb508 100644 --- a/openpype/plugins/publish/collect_otio_subset_resources.py +++ b/openpype/plugins/publish/collect_otio_subset_resources.py @@ -20,7 +20,7 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin): label = "Collect OTIO Subset Resources" order = pyblish.api.CollectorOrder - 0.47 families = ["clip"] - hosts = ["resolve", "hiero"] + hosts = ["resolve", "hiero", "flame"] def process(self, instance): From 35f721fc8ac41de47473f93eeb8a650dfa76c8a0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Jan 2022 16:08:09 +0100 Subject: [PATCH 043/160] flame: adding file path to instance data --- .../hosts/flame/plugins/publish/precollect_instances.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/precollect_instances.py b/openpype/hosts/flame/plugins/publish/precollect_instances.py index fa007b3efd..a093bb82fa 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_instances.py +++ b/openpype/hosts/flame/plugins/publish/precollect_instances.py @@ -91,7 +91,8 @@ class PrecollectInstances(pyblish.api.ContextPlugin): "publish": marker_data["publish"], "fps": self.fps, "flameSourceClip": source_clip, - "sourceFirstFrame": first_frame + "sourceFirstFrame": first_frame, + "path": file_path }) # otio clip data @@ -228,17 +229,11 @@ class PrecollectInstances(pyblish.api.ContextPlugin): """ segment = clip_data["PySegment"] - - self.log.debug( - ">> flame Track.dir: {}".format(dir(segment.parent))) s_track_name = segment.parent.name.get_value() - timeline_range = self._create_otio_time_range_from_timeline_item_data( clip_data) for otio_clip in self.otio_timeline.each_clip(): - self.log.debug( - ">> OTIO Track.dir: {}".format(dir(otio_clip.parent()))) track_name = otio_clip.parent().name parent_range = otio_clip.range_in_parent() if s_track_name not in track_name: From 39578a4a5104737a5e5bbcaa44bd8eebc64cebe2 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Jan 2022 16:11:19 +0100 Subject: [PATCH 044/160] flame: adding host to ftrack plugin --- .../ftrack/plugins/publish/integrate_hierarchy_ftrack.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index fbd64d9f70..61892240d7 100644 --- a/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/default_modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -63,7 +63,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): order = pyblish.api.IntegratorOrder - 0.04 label = 'Integrate Hierarchy To Ftrack' families = ["shot"] - hosts = ["hiero", "resolve", "standalonepublisher"] + hosts = ["hiero", "resolve", "standalonepublisher", "flame"] optional = False def process(self, context): From 6c2204c92d577a9cddc9533b13d16fd2829f1974 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 12 Jan 2022 18:44:33 +0100 Subject: [PATCH 045/160] added ability to hide publish if plugin need it --- openpype/tools/pyblish_pype/window.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/tools/pyblish_pype/window.py b/openpype/tools/pyblish_pype/window.py index fdd2d80e23..edcf6f53b6 100644 --- a/openpype/tools/pyblish_pype/window.py +++ b/openpype/tools/pyblish_pype/window.py @@ -909,6 +909,13 @@ class Window(QtWidgets.QDialog): self.tr("Processing"), plugin_item.data(QtCore.Qt.DisplayRole) )) + visibility = True + if hasattr(plugin, "hide_ui_on_process") and plugin.hide_ui_on_process: + visibility = False + + if self.isVisible() != visibility: + self.setVisible(visibility) + def on_plugin_action_menu_requested(self, pos): """The user right-clicked on a plug-in __________ From 226903ea0b45d7ff09e5dd284a80566d2b88402d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 12 Jan 2022 20:24:06 +0100 Subject: [PATCH 046/160] hound: flake8 fix --- openpype/hosts/flame/plugins/publish/precollect_instances.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/precollect_instances.py b/openpype/hosts/flame/plugins/publish/precollect_instances.py index a093bb82fa..b4b2ebf63f 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_instances.py +++ b/openpype/hosts/flame/plugins/publish/precollect_instances.py @@ -32,7 +32,8 @@ class PrecollectInstances(pyblish.api.ContextPlugin): # get openpype tag data marker_data = opfapi.get_segment_data_marker(segment) - self.log.debug("__ marker_data: {}".format(pformat(marker_data))) + self.log.debug("__ marker_data: {}".format( + pformat(marker_data))) if not marker_data: continue From 41f22ff86558b700ea08515340c3134a89c74de5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 13 Jan 2022 04:10:59 +0000 Subject: [PATCH 047/160] build(deps): bump pillow from 8.4.0 to 9.0.0 Bumps [pillow](https://github.com/python-pillow/Pillow) from 8.4.0 to 9.0.0. - [Release notes](https://github.com/python-pillow/Pillow/releases) - [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) - [Commits](https://github.com/python-pillow/Pillow/compare/8.4.0...9.0.0) --- updated-dependencies: - dependency-name: pillow dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- poetry.lock | 123 +++++++++++++++++++++++++++++++------------------ pyproject.toml | 2 +- 2 files changed, 80 insertions(+), 45 deletions(-) diff --git a/poetry.lock b/poetry.lock index f513b76611..3b6931f21f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -789,11 +789,11 @@ six = "*" [[package]] name = "pillow" -version = "8.4.0" +version = "9.0.0" description = "Python Imaging Library (Fork)" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "platformdirs" @@ -1580,7 +1580,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [metadata] lock-version = "1.1" python-versions = "3.7.*" -content-hash = "877c1c6292735f495d915fc6aa85450eb20fc63f266a9c6bf7ba1125af3579a5" +content-hash = "79f05a4232aae63e024dd16678ccb173318cbdca5b54e04d320f36fd5d916b5c" [metadata.files] acre = [] @@ -1668,10 +1668,13 @@ babel = [ {file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"}, ] bcrypt = [ + {file = "bcrypt-3.2.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b589229207630484aefe5899122fb938a5b017b0f4349f769b8c13e78d99a8fd"}, {file = "bcrypt-3.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c95d4cbebffafcdd28bd28bb4e25b31c50f6da605c81ffd9ad8a3d1b2ab7b1b6"}, {file = "bcrypt-3.2.0-cp36-abi3-manylinux1_x86_64.whl", hash = "sha256:63d4e3ff96188e5898779b6057878fecf3f11cfe6ec3b313ea09955d587ec7a7"}, {file = "bcrypt-3.2.0-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:cd1ea2ff3038509ea95f687256c46b79f5fc382ad0aa3664d200047546d511d1"}, {file = "bcrypt-3.2.0-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:cdcdcb3972027f83fe24a48b1e90ea4b584d35f1cc279d76de6fc4b13376239d"}, + {file = "bcrypt-3.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a0584a92329210fcd75eb8a3250c5a941633f8bfaf2a18f81009b097732839b7"}, + {file = "bcrypt-3.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:56e5da069a76470679f312a7d3d23deb3ac4519991a0361abc11da837087b61d"}, {file = "bcrypt-3.2.0-cp36-abi3-win32.whl", hash = "sha256:a67fb841b35c28a59cebed05fbd3e80eea26e6d75851f0574a9273c80f3e9b55"}, {file = "bcrypt-3.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:81fec756feff5b6818ea7ab031205e1d323d8943d237303baca2c5f9c7846f34"}, {file = "bcrypt-3.2.0.tar.gz", hash = "sha256:5b93c1726e50a93a033c36e5ca7fdcd29a5c7395af50a6892f5d9e7c6cfbfb29"}, @@ -1972,12 +1975,28 @@ log4mongo = [ {file = "log4mongo-1.7.0.tar.gz", hash = "sha256:dc374617206162a0b14167fbb5feac01dbef587539a235dadba6200362984a68"}, ] markupsafe = [ + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -1986,14 +2005,27 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -2003,6 +2035,12 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -2103,47 +2141,38 @@ pathlib2 = [ {file = "pathlib2-2.3.6.tar.gz", hash = "sha256:7d8bcb5555003cdf4a8d2872c538faa3a0f5d20630cb360e518ca3b981795e5f"}, ] pillow = [ - {file = "Pillow-8.4.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:81f8d5c81e483a9442d72d182e1fb6dcb9723f289a57e8030811bac9ea3fef8d"}, - {file = "Pillow-8.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3f97cfb1e5a392d75dd8b9fd274d205404729923840ca94ca45a0af57e13dbe6"}, - {file = "Pillow-8.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb9fc393f3c61f9054e1ed26e6fe912c7321af2f41ff49d3f83d05bacf22cc78"}, - {file = "Pillow-8.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d82cdb63100ef5eedb8391732375e6d05993b765f72cb34311fab92103314649"}, - {file = "Pillow-8.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62cc1afda735a8d109007164714e73771b499768b9bb5afcbbee9d0ff374b43f"}, - {file = "Pillow-8.4.0-cp310-cp310-win32.whl", hash = "sha256:e3dacecfbeec9a33e932f00c6cd7996e62f53ad46fbe677577394aaa90ee419a"}, - {file = "Pillow-8.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:620582db2a85b2df5f8a82ddeb52116560d7e5e6b055095f04ad828d1b0baa39"}, - {file = "Pillow-8.4.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:1bc723b434fbc4ab50bb68e11e93ce5fb69866ad621e3c2c9bdb0cd70e345f55"}, - {file = "Pillow-8.4.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72cbcfd54df6caf85cc35264c77ede902452d6df41166010262374155947460c"}, - {file = "Pillow-8.4.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70ad9e5c6cb9b8487280a02c0ad8a51581dcbbe8484ce058477692a27c151c0a"}, - {file = "Pillow-8.4.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25a49dc2e2f74e65efaa32b153527fc5ac98508d502fa46e74fa4fd678ed6645"}, - {file = "Pillow-8.4.0-cp36-cp36m-win32.whl", hash = "sha256:93ce9e955cc95959df98505e4608ad98281fff037350d8c2671c9aa86bcf10a9"}, - {file = "Pillow-8.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2e4440b8f00f504ee4b53fe30f4e381aae30b0568193be305256b1462216feff"}, - {file = "Pillow-8.4.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:8c803ac3c28bbc53763e6825746f05cc407b20e4a69d0122e526a582e3b5e153"}, - {file = "Pillow-8.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8a17b5d948f4ceeceb66384727dde11b240736fddeda54ca740b9b8b1556b29"}, - {file = "Pillow-8.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1394a6ad5abc838c5cd8a92c5a07535648cdf6d09e8e2d6df916dfa9ea86ead8"}, - {file = "Pillow-8.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:792e5c12376594bfcb986ebf3855aa4b7c225754e9a9521298e460e92fb4a488"}, - {file = "Pillow-8.4.0-cp37-cp37m-win32.whl", hash = "sha256:d99ec152570e4196772e7a8e4ba5320d2d27bf22fdf11743dd882936ed64305b"}, - {file = "Pillow-8.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:7b7017b61bbcdd7f6363aeceb881e23c46583739cb69a3ab39cb384f6ec82e5b"}, - {file = "Pillow-8.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:d89363f02658e253dbd171f7c3716a5d340a24ee82d38aab9183f7fdf0cdca49"}, - {file = "Pillow-8.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a0956fdc5defc34462bb1c765ee88d933239f9a94bc37d132004775241a7585"}, - {file = "Pillow-8.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b7bb9de00197fb4261825c15551adf7605cf14a80badf1761d61e59da347779"}, - {file = "Pillow-8.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72b9e656e340447f827885b8d7a15fc8c4e68d410dc2297ef6787eec0f0ea409"}, - {file = "Pillow-8.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5a4532a12314149d8b4e4ad8ff09dde7427731fcfa5917ff16d0291f13609df"}, - {file = "Pillow-8.4.0-cp38-cp38-win32.whl", hash = "sha256:82aafa8d5eb68c8463b6e9baeb4f19043bb31fefc03eb7b216b51e6a9981ae09"}, - {file = "Pillow-8.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:066f3999cb3b070a95c3652712cffa1a748cd02d60ad7b4e485c3748a04d9d76"}, - {file = "Pillow-8.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:5503c86916d27c2e101b7f71c2ae2cddba01a2cf55b8395b0255fd33fa4d1f1a"}, - {file = "Pillow-8.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4acc0985ddf39d1bc969a9220b51d94ed51695d455c228d8ac29fcdb25810e6e"}, - {file = "Pillow-8.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b052a619a8bfcf26bd8b3f48f45283f9e977890263e4571f2393ed8898d331b"}, - {file = "Pillow-8.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:493cb4e415f44cd601fcec11c99836f707bb714ab03f5ed46ac25713baf0ff20"}, - {file = "Pillow-8.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8831cb7332eda5dc89b21a7bce7ef6ad305548820595033a4b03cf3091235ed"}, - {file = "Pillow-8.4.0-cp39-cp39-win32.whl", hash = "sha256:5e9ac5f66616b87d4da618a20ab0a38324dbe88d8a39b55be8964eb520021e02"}, - {file = "Pillow-8.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:3eb1ce5f65908556c2d8685a8f0a6e989d887ec4057326f6c22b24e8a172c66b"}, - {file = "Pillow-8.4.0-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ddc4d832a0f0b4c52fff973a0d44b6c99839a9d016fe4e6a1cb8f3eea96479c2"}, - {file = "Pillow-8.4.0-pp36-pypy36_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a3e5ddc44c14042f0844b8cf7d2cd455f6cc80fd7f5eefbe657292cf601d9ad"}, - {file = "Pillow-8.4.0-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c70e94281588ef053ae8998039610dbd71bc509e4acbc77ab59d7d2937b10698"}, - {file = "Pillow-8.4.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:3862b7256046fcd950618ed22d1d60b842e3a40a48236a5498746f21189afbbc"}, - {file = "Pillow-8.4.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4901622493f88b1a29bd30ec1a2f683782e57c3c16a2dbc7f2595ba01f639df"}, - {file = "Pillow-8.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84c471a734240653a0ec91dec0996696eea227eafe72a33bd06c92697728046b"}, - {file = "Pillow-8.4.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:244cf3b97802c34c41905d22810846802a3329ddcb93ccc432870243211c79fc"}, - {file = "Pillow-8.4.0.tar.gz", hash = "sha256:b8e2f83c56e141920c39464b852de3719dfbfb6e3c99a2d8da0edf4fb33176ed"}, + {file = "Pillow-9.0.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:113723312215b25c22df1fdf0e2da7a3b9c357a7d24a93ebbe80bfda4f37a8d4"}, + {file = "Pillow-9.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bb47a548cea95b86494a26c89d153fd31122ed65255db5dcbc421a2d28eb3379"}, + {file = "Pillow-9.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31b265496e603985fad54d52d11970383e317d11e18e856971bdbb86af7242a4"}, + {file = "Pillow-9.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d154ed971a4cc04b93a6d5b47f37948d1f621f25de3e8fa0c26b2d44f24e3e8f"}, + {file = "Pillow-9.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80fe92813d208ce8aa7d76da878bdc84b90809f79ccbad2a288e9bcbeac1d9bd"}, + {file = "Pillow-9.0.0-cp310-cp310-win32.whl", hash = "sha256:d5dcea1387331c905405b09cdbfb34611050cc52c865d71f2362f354faee1e9f"}, + {file = "Pillow-9.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:52abae4c96b5da630a8b4247de5428f593465291e5b239f3f843a911a3cf0105"}, + {file = "Pillow-9.0.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:72c3110228944019e5f27232296c5923398496b28be42535e3b2dc7297b6e8b6"}, + {file = "Pillow-9.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97b6d21771da41497b81652d44191489296555b761684f82b7b544c49989110f"}, + {file = "Pillow-9.0.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72f649d93d4cc4d8cf79c91ebc25137c358718ad75f99e99e043325ea7d56100"}, + {file = "Pillow-9.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aaf07085c756f6cb1c692ee0d5a86c531703b6e8c9cae581b31b562c16b98ce"}, + {file = "Pillow-9.0.0-cp37-cp37m-win32.whl", hash = "sha256:03b27b197deb4ee400ed57d8d4e572d2d8d80f825b6634daf6e2c18c3c6ccfa6"}, + {file = "Pillow-9.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a09a9d4ec2b7887f7a088bbaacfd5c07160e746e3d47ec5e8050ae3b2a229e9f"}, + {file = "Pillow-9.0.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:490e52e99224858f154975db61c060686df8a6b3f0212a678e5d2e2ce24675c9"}, + {file = "Pillow-9.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:500d397ddf4bbf2ca42e198399ac13e7841956c72645513e8ddf243b31ad2128"}, + {file = "Pillow-9.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ebd8b9137630a7bbbff8c4b31e774ff05bbb90f7911d93ea2c9371e41039b52"}, + {file = "Pillow-9.0.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd0e5062f11cb3e730450a7d9f323f4051b532781026395c4323b8ad055523c4"}, + {file = "Pillow-9.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f3b4522148586d35e78313db4db0df4b759ddd7649ef70002b6c3767d0fdeb7"}, + {file = "Pillow-9.0.0-cp38-cp38-win32.whl", hash = "sha256:0b281fcadbb688607ea6ece7649c5d59d4bbd574e90db6cd030e9e85bde9fecc"}, + {file = "Pillow-9.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5050d681bcf5c9f2570b93bee5d3ec8ae4cf23158812f91ed57f7126df91762"}, + {file = "Pillow-9.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:c2067b3bb0781f14059b112c9da5a91c80a600a97915b4f48b37f197895dd925"}, + {file = "Pillow-9.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2d16b6196fb7a54aff6b5e3ecd00f7c0bab1b56eee39214b2b223a9d938c50af"}, + {file = "Pillow-9.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98cb63ca63cb61f594511c06218ab4394bf80388b3d66cd61d0b1f63ee0ea69f"}, + {file = "Pillow-9.0.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc462d24500ba707e9cbdef436c16e5c8cbf29908278af053008d9f689f56dee"}, + {file = "Pillow-9.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3586e12d874ce2f1bc875a3ffba98732ebb12e18fb6d97be482bd62b56803281"}, + {file = "Pillow-9.0.0-cp39-cp39-win32.whl", hash = "sha256:68e06f8b2248f6dc8b899c3e7ecf02c9f413aab622f4d6190df53a78b93d97a5"}, + {file = "Pillow-9.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:6579f9ba84a3d4f1807c4aab4be06f373017fc65fff43498885ac50a9b47a553"}, + {file = "Pillow-9.0.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:47f5cf60bcb9fbc46011f75c9b45a8b5ad077ca352a78185bd3e7f1d294b98bb"}, + {file = "Pillow-9.0.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fd8053e1f8ff1844419842fd474fc359676b2e2a2b66b11cc59f4fa0a301315"}, + {file = "Pillow-9.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c5439bfb35a89cac50e81c751317faea647b9a3ec11c039900cd6915831064d"}, + {file = "Pillow-9.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95545137fc56ce8c10de646074d242001a112a92de169986abd8c88c27566a05"}, + {file = "Pillow-9.0.0.tar.gz", hash = "sha256:ee6e2963e92762923956fe5d3479b1fdc3b76c83f290aad131a2f98c3df0593e"}, ] platformdirs = [ {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, @@ -2392,6 +2421,8 @@ pynput = [ ] pyobjc-core = [ {file = "pyobjc-core-7.3.tar.gz", hash = "sha256:5081aedf8bb40aac1a8ad95adac9e44e148a882686ded614adf46bb67fd67574"}, + {file = "pyobjc_core-7.3-1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a1f1e6b457127cbf2b5bd2b94520a7c89fb590b739911eadb2b0499a3a5b0e6f"}, + {file = "pyobjc_core-7.3-1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:ed708cc47bae8b711f81f252af09898a5f986c7a38cec5ad5623d571d328bff8"}, {file = "pyobjc_core-7.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4e93ad769a20b908778fe950f62a843a6d8f0fa71996e5f3cc9fab5ae7d17771"}, {file = "pyobjc_core-7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f63fd37bbf3785af4ddb2f86cad5ca81c62cfc7d1c0099637ca18343c3656c1"}, {file = "pyobjc_core-7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9b1311f72f2e170742a7ee3a8149f52c35158dc024a21e88d6f1e52ba5d718b"}, @@ -2410,6 +2441,8 @@ pyobjc-framework-applicationservices = [ ] pyobjc-framework-cocoa = [ {file = "pyobjc-framework-Cocoa-7.3.tar.gz", hash = "sha256:b18d05e7a795a3455ad191c3e43d6bfa673c2a4fd480bb1ccf57191051b80b7e"}, + {file = "pyobjc_framework_Cocoa-7.3-1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1e31376806e5de883a1d7c7c87d9ff2a8b09fc05d267e0dfce6e42409fb70c67"}, + {file = "pyobjc_framework_Cocoa-7.3-1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d999387927284346035cb63ebb51f86331abc41f9376f9a6970e7f18207db392"}, {file = "pyobjc_framework_Cocoa-7.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9edffdfa6dd1f71f21b531c3e61fdd3e4d5d3bf6c5a528c98e88828cd60bac11"}, {file = "pyobjc_framework_Cocoa-7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:35a6340437a4e0109a302150b7d1f6baf57004ccf74834f9e6062fcafe2fd8d7"}, {file = "pyobjc_framework_Cocoa-7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7c3886f2608ab3ed02482f8b2ebf9f782b324c559e84b52cfd92dba8a1109872"}, @@ -2418,6 +2451,8 @@ pyobjc-framework-cocoa = [ ] pyobjc-framework-quartz = [ {file = "pyobjc-framework-Quartz-7.3.tar.gz", hash = "sha256:98812844c34262def980bdf60923a875cd43428a8375b6fd53bd2cd800eccf0b"}, + {file = "pyobjc_framework_Quartz-7.3-1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1139bc6874c0f8b58f0b8602015e0994198bc506a6bcec1071208de32b55ed26"}, + {file = "pyobjc_framework_Quartz-7.3-1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d94a3ed7051266c52392ec07d3b5adbf28d4be83341a24df0d88639344dcd84f"}, {file = "pyobjc_framework_Quartz-7.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ef18f5a16511ded65980bf4f5983ea5d35c88224dbad1b3112abd29c60413ea"}, {file = "pyobjc_framework_Quartz-7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b41eec8d4b10c7c7e011e2f9051367f5499ef315ba52dfbae573c3a2e05469c"}, {file = "pyobjc_framework_Quartz-7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c65456ed045dfe1711d0298734e5a3ad670f8c770f7eb3b19979256c388bdd2"}, diff --git a/pyproject.toml b/pyproject.toml index f9155f05a3..481c93b4f8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,7 @@ jsonschema = "^3.2.0" keyring = "^22.0.1" log4mongo = "^1.7" pathlib2= "^2.3.5" # deadline submit publish job only (single place, maybe not needed?) -Pillow = "^8.3" # only used for slates prototype +Pillow = "^9.0" # only used for slates prototype pyblish-base = "^1.8.8" pynput = "^1.7.2" # idle manager in tray pymongo = "^3.11.2" From 365368554f4ebf6d34ff6139e4193de4edd022d4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 13 Jan 2022 10:38:39 +0100 Subject: [PATCH 048/160] flame: starting render utlis modul --- openpype/hosts/flame/api/render_utils.py | 46 ++++++++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 openpype/hosts/flame/api/render_utils.py diff --git a/openpype/hosts/flame/api/render_utils.py b/openpype/hosts/flame/api/render_utils.py new file mode 100644 index 0000000000..01efc9e5b9 --- /dev/null +++ b/openpype/hosts/flame/api/render_utils.py @@ -0,0 +1,46 @@ +import os + +SHARED_PRESET_PATH = '/opt/Autodesk/shared/export/presets/file_sequence' +SHARED_PRESETS = ['Default Jpeg'] + [ + preset[:-4] for preset in os.listdir(SHARED_PRESET_PATH)] + + +def export_clip(export_path, clip, export_preset, **kwargs): + import flame + + # Set exporter + exporter = flame.PyExporter() + exporter.foreground = True + exporter.export_between_marks = True + + if "in_mark" not in kwargs.keys(): + exporter.export_between_marks = False + + # Duplicate the clip to avoid modifying the original clip + duplicate_clip = flame.duplicate(clip) + + # Set export preset path + if export_preset == 'Default Jpeg': + # Get default export preset path + preset_dir = flame.PyExporter.get_presets_dir( + flame.PyExporter.PresetVisibility.Autodesk, + flame.PyExporter.PresetType.Image_Sequence) + export_preset_path = os.path.join( + preset_dir, "Jpeg", "Jpeg (8-bit).xml") + else: + export_preset_path = os.path.join( + SHARED_PRESET_PATH, export_preset + '.xml') + + try: + if kwargs.get("in_mark") and kwargs.get("out_mark"): + duplicate_clip.in_mark = int(kwargs["in_mark"]) + duplicate_clip.in_mark = int(kwargs["out_mark"]) + + exporter.export(duplicate_clip, export_preset_path, export_path) + finally: + print('Exported: {} at {}-{}'.format( + clip.name.get_value(), + duplicate_clip.in_mark, + duplicate_clip.out_mark + )) + flame.delete(duplicate_clip) \ No newline at end of file From aa19d699d3283c7db94d1de5d13a53700bf011a8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 13 Jan 2022 12:03:19 +0100 Subject: [PATCH 049/160] flame: updating render_utils modul --- openpype/hosts/flame/api/render_utils.py | 88 ++++++++++++++++++++---- 1 file changed, 74 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/flame/api/render_utils.py b/openpype/hosts/flame/api/render_utils.py index 01efc9e5b9..d2e312785f 100644 --- a/openpype/hosts/flame/api/render_utils.py +++ b/openpype/hosts/flame/api/render_utils.py @@ -1,41 +1,99 @@ import os -SHARED_PRESET_PATH = '/opt/Autodesk/shared/export/presets/file_sequence' -SHARED_PRESETS = ['Default Jpeg'] + [ - preset[:-4] for preset in os.listdir(SHARED_PRESET_PATH)] +SHARED_PRESET_PATH = '/opt/Autodesk/shared/export/presets' def export_clip(export_path, clip, export_preset, **kwargs): + """Flame exported wrapper + + Args: + export_path (str): exporting directory path + clip (PyClip): flame api object + export_preset (str): name of exporting preset xml file + + Kwargs: + export_type (str)[optional]: name of export type folder + thumb_frame_number (int)[optional]: source frame number + in_mark (int)[optional]: cut in mark + out_mark (int)[optional]: cut out mark + + Raises: + KeyError: Missing input kwarg `thumb_frame_number` + in case `thumbnail` in `export_preset` + KeyError: Missing input kwarg `export_type` + in case of other `export_preset` then `thumbnail` + FileExistsError: Missing export preset in shared folder + """ import flame + in_mark = out_mark = None + # Set exporter exporter = flame.PyExporter() exporter.foreground = True exporter.export_between_marks = True - if "in_mark" not in kwargs.keys(): - exporter.export_between_marks = False - # Duplicate the clip to avoid modifying the original clip duplicate_clip = flame.duplicate(clip) - # Set export preset path - if export_preset == 'Default Jpeg': - # Get default export preset path + if export_preset == 'thumbnail': + thumb_frame_number = kwargs.get("thumb_frame_number") + # make sure it exists in kwargs + if not thumb_frame_number: + raise KeyError( + "Missing key `thumb_frame_number` in input kwargs") + + in_mark = int(thumb_frame_number) + out_mark = int(thumb_frame_number) + 1 + + # In case Thumbnail is needed preset_dir = flame.PyExporter.get_presets_dir( flame.PyExporter.PresetVisibility.Autodesk, flame.PyExporter.PresetType.Image_Sequence) export_preset_path = os.path.join( preset_dir, "Jpeg", "Jpeg (8-bit).xml") + else: + # In case other output is needed + # get compulsory kwargs + export_type = kwargs.get("export_type") + # make sure it exists in kwargs + if not export_type: + raise KeyError( + "Missing key `export_type` in input kwargs") + + # create full shared preset path + shared_preset_dir = os.path.join( + SHARED_PRESET_PATH, export_type + ) + + # check if export preset is available in shared presets + shared_presets = [ + preset[:-4] for preset in os.listdir(shared_preset_dir)] + if export_preset not in shared_presets: + raise FileExistsError( + "Missing preset file `{}` in `{}`".format( + export_preset, + shared_preset_dir + )) + export_preset_path = os.path.join( - SHARED_PRESET_PATH, export_preset + '.xml') + shared_preset_dir, export_preset + '.xml') + + # check if mark in/out is set in kwargs + if kwargs.get("in_mark") and kwargs.get("out_mark"): + in_mark = int(kwargs["in_mark"]) + out_mark = int(kwargs["out_mark"]) + else: + exporter.export_between_marks = False try: - if kwargs.get("in_mark") and kwargs.get("out_mark"): - duplicate_clip.in_mark = int(kwargs["in_mark"]) - duplicate_clip.in_mark = int(kwargs["out_mark"]) + # set in and out marks if they are available + if in_mark and out_mark: + duplicate_clip.in_mark = in_mark + duplicate_clip.out_mark = out_mark + # export with exporter exporter.export(duplicate_clip, export_preset_path, export_path) finally: print('Exported: {} at {}-{}'.format( @@ -43,4 +101,6 @@ def export_clip(export_path, clip, export_preset, **kwargs): duplicate_clip.in_mark, duplicate_clip.out_mark )) - flame.delete(duplicate_clip) \ No newline at end of file + + # delete duplicated clip it is not needed anymore + flame.delete(duplicate_clip) From 574466f6dcea84d6a09bff0ac13493d4a5179c36 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 13 Jan 2022 12:52:22 +0100 Subject: [PATCH 050/160] flame: adding export clip to api --- openpype/hosts/flame/api/__init__.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 308682b884..fce59af506 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -59,6 +59,9 @@ from .workio import ( file_extensions, work_root ) +from .render_utils import ( + export_clip +) __all__ = [ # constants @@ -119,5 +122,8 @@ __all__ = [ "current_file", "has_unsaved_changes", "file_extensions", - "work_root" + "work_root", + + # render utils + "export_clip" ] From a2d414c64657d11ddbbacfd546db3613ee91ab85 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 13 Jan 2022 12:53:02 +0100 Subject: [PATCH 051/160] flame: adding exporter plugin --- .../publish/extract_subset_resources.py | 91 +++++++++++++++++++ 1 file changed, 91 insertions(+) create mode 100644 openpype/hosts/flame/plugins/publish/extract_subset_resources.py diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py new file mode 100644 index 0000000000..ffa01eb1b3 --- /dev/null +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -0,0 +1,91 @@ +import os +import pyblish.api +import openpype.api +from openpype.hosts.flame import api as opfapi + + +class ExtractSubsetResources(openpype.api.Extractor): + """ + Extractor for transcoding files from Flame clip + """ + + label = "Extract subset resources" + order = pyblish.api.CollectorOrder + 0.49 + families = ["clip"] + hosts = ["flame"] + + # hide publisher during exporting + hide_ui_on_process = True + + export_presets_mapping = { + "thumbnail": { + "ext": "jpg", + "uniqueName": "thumbnail" + }, + "OpenEXR (16-bit fp DWAA)_custom": { + "ext": "exr", + "preset_type": "file_sequence", + "uniqueName": "exr16fpdwaa" + }, + "QuickTime (H.264 1080p 8Mbits)_custom": { + "ext": "mov", + "preset_type": "movie_file", + "uniqueName": "ftrackpreview" + } + } + + def process(self, instance): + # create representation data + if "representations" not in instance.data: + instance.data["representations"] = [] + + name = instance.data["name"] + clip = instance.data["flameSourceClip"] + staging_dir = self.staging_dir(instance) + + # prepare full export path + export_dir_path = os.path.join( + staging_dir, name + ) + # loop all preset names and + for preset_name, preset_config in self.export_presets_mapping: + kwargs = {} + unique_name = preset_config["uniqueName"] + preset_type = None + + # define kwargs based on preset type + if "thumbnail" in preset_name: + kwargs["thumb_frame_number"] = 2 + else: + preset_type = preset_config["preset_type"] + kwargs.update({ + "in_mark": 2, + "out_mark": 5, + "preset_type": preset_type + }) + + _export_dir_path = os.path.join( + export_dir_path, unique_name + ) + # export + opfapi.export_clip( + _export_dir_path, clip, preset_name, **kwargs) + + # create representation data + representation_data = { + 'name': unique_name, + 'ext': preset_config["ext"], + "stagingDir": _export_dir_path, + } + + files = os.listdir(_export_dir_path) + + if preset_type and preset_type == "movie_file": + representation_data["files"] = files + else: + representation_data["files"] = files.pop() + + instance.data["representations"].append(representation_data) + + self.log.info("Added representation: {}".format( + representation_data)) From 9c20580d699c77ad5f5f3462050069172145d3dd Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 13 Jan 2022 15:32:40 +0100 Subject: [PATCH 052/160] flame: export clip to correct frame range --- .../publish/extract_subset_resources.py | 55 ++++++++++++------- .../plugins/publish/precollect_instances.py | 2 +- 2 files changed, 36 insertions(+), 21 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index ffa01eb1b3..ea782845ef 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -15,7 +15,7 @@ class ExtractSubsetResources(openpype.api.Extractor): hosts = ["flame"] # hide publisher during exporting - hide_ui_on_process = True + # hide_ui_on_process = True export_presets_mapping = { "thumbnail": { @@ -39,51 +39,66 @@ class ExtractSubsetResources(openpype.api.Extractor): if "representations" not in instance.data: instance.data["representations"] = [] - name = instance.data["name"] - clip = instance.data["flameSourceClip"] + source_first_frame = instance.data["sourceFirstFrame"] + source_start_handles = instance.data["sourceStartH"] + source_end_handles = instance.data["sourceEndH"] + source_duration_handles = ( + source_end_handles - source_start_handles) + 1 + + clip_data = instance.data["flameSourceClip"] + clip = clip_data["PyClip"] + + in_mark = (source_start_handles - source_first_frame) + 1 + out_mark = in_mark + source_duration_handles + staging_dir = self.staging_dir(instance) - # prepare full export path - export_dir_path = os.path.join( - staging_dir, name - ) # loop all preset names and - for preset_name, preset_config in self.export_presets_mapping: + for preset_name, preset_config in self.export_presets_mapping.items(): kwargs = {} unique_name = preset_config["uniqueName"] preset_type = None # define kwargs based on preset type if "thumbnail" in preset_name: - kwargs["thumb_frame_number"] = 2 + kwargs["thumb_frame_number"] = in_mark + ( + source_duration_handles / 2) else: preset_type = preset_config["preset_type"] kwargs.update({ - "in_mark": 2, - "out_mark": 5, - "preset_type": preset_type + "in_mark": in_mark, + "out_mark": out_mark, + "export_type": preset_type }) - _export_dir_path = os.path.join( - export_dir_path, unique_name + export_dir_path = os.path.join( + staging_dir, unique_name ) + os.makedirs(export_dir_path) + # export opfapi.export_clip( - _export_dir_path, clip, preset_name, **kwargs) + export_dir_path, clip, preset_name, **kwargs) # create representation data representation_data = { 'name': unique_name, 'ext': preset_config["ext"], - "stagingDir": _export_dir_path, + "stagingDir": export_dir_path, } - files = os.listdir(_export_dir_path) + files = os.listdir(export_dir_path) - if preset_type and preset_type == "movie_file": - representation_data["files"] = files - else: + # add files to represetation but add + # imagesequence as list + if ( + preset_type + and preset_type == "movie_file" + or preset_name == "thumbnail" + ): representation_data["files"] = files.pop() + else: + representation_data["files"] = files instance.data["representations"].append(representation_data) diff --git a/openpype/hosts/flame/plugins/publish/precollect_instances.py b/openpype/hosts/flame/plugins/publish/precollect_instances.py index b4b2ebf63f..bda583fe8e 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_instances.py +++ b/openpype/hosts/flame/plugins/publish/precollect_instances.py @@ -92,7 +92,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): "publish": marker_data["publish"], "fps": self.fps, "flameSourceClip": source_clip, - "sourceFirstFrame": first_frame, + "sourceFirstFrame": int(first_frame), "path": file_path }) From 9fa024daae060fbbfc88aecb7b9639bf2cc7c087 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 13 Jan 2022 17:45:58 +0100 Subject: [PATCH 053/160] flame: hide gui when processing plugin --- .../hosts/flame/plugins/publish/extract_subset_resources.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index ea782845ef..6061c80762 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -10,12 +10,12 @@ class ExtractSubsetResources(openpype.api.Extractor): """ label = "Extract subset resources" - order = pyblish.api.CollectorOrder + 0.49 + order = pyblish.api.ExtractorOrder families = ["clip"] hosts = ["flame"] # hide publisher during exporting - # hide_ui_on_process = True + hide_ui_on_process = True export_presets_mapping = { "thumbnail": { From f323ae61f02d79703a5809384b6e4e090f48a0e5 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 14 Jan 2022 04:13:44 +0100 Subject: [PATCH 054/160] Fix namespace not going back to original namespace when started from inside a namespace --- openpype/hosts/maya/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 52ebcaff64..f100aee7c0 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -733,7 +733,7 @@ def namespaced(namespace, new=True): str: The namespace that is used during the context """ - original = cmds.namespaceInfo(cur=True) + original = cmds.namespaceInfo(cur=True, absoluteName=True) if new: namespace = avalon.maya.lib.unique_namespace(namespace) cmds.namespace(add=namespace) From 4a230b710ea605fce9b9edadb455d0277301032a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Jan 2022 11:53:04 +0100 Subject: [PATCH 055/160] flame: add function to get flame version and root install path --- openpype/hosts/flame/api/__init__.py | 6 +++++- openpype/hosts/flame/api/utils.py | 15 +++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index fce59af506..e7590bb36e 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -30,7 +30,9 @@ from .lib import ( get_padding_from_path ) from .utils import ( - setup + setup, + get_flame_version, + get_flame_install_root ) from .pipeline import ( install, @@ -107,6 +109,8 @@ __all__ = [ # utils "setup", + "get_flame_version", + "get_flame_install_root", # menu "FlameMenuProjectConnect", diff --git a/openpype/hosts/flame/api/utils.py b/openpype/hosts/flame/api/utils.py index b9899900f5..0e40e40aa7 100644 --- a/openpype/hosts/flame/api/utils.py +++ b/openpype/hosts/flame/api/utils.py @@ -125,3 +125,18 @@ def setup(env=None): _sync_utility_scripts(env) log.info("Flame OpenPype wrapper has been installed") + + +def get_flame_version(): + import flame + + return { + "full": flame.get_version(), + "major": flame.get_version_major(), + "minor": flame.get_version_minor(), + "patch": flame.get_version_patch() + } + + +def get_flame_install_root(): + return "/opt/Autodesk" \ No newline at end of file From cc20a22e3ad70639d58c55e65e68b67e24264fca Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Jan 2022 11:53:35 +0100 Subject: [PATCH 056/160] flame: add function to maintain object duplication --- openpype/hosts/flame/api/__init__.py | 4 +++- openpype/hosts/flame/api/lib.py | 23 +++++++++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index e7590bb36e..7f516fb11f 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -27,7 +27,8 @@ from .lib import ( get_clips_in_reels, get_reformated_path, get_frame_from_path, - get_padding_from_path + get_padding_from_path, + maintained_object_duplication ) from .utils import ( setup, @@ -93,6 +94,7 @@ __all__ = [ "get_reformated_path", "get_frame_from_path", "get_padding_from_path", + "maintained_object_duplication", # pipeline "install", diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index b963a1cb39..800afebf41 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -675,3 +675,26 @@ def get_frame_from_path(path): return found.pop() else: return None + + + +@contextlib.contextmanager +def maintained_object_duplication(item): + """Maintain input item duplication + + Attributes: + item (any flame.PyObject): python api object + + Yield: + duplicate input PyObject type + """ + import flame + # Duplicate the clip to avoid modifying the original clip + duplicate = flame.duplicate(item) + + try: + # do the operation on selected segments + yield duplicate + finally: + # delete the item at the end + flame.delete(duplicate) From f6ab7f2cbaef91afa7ca5a35f3a540c22b7529e6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Jan 2022 12:06:35 +0100 Subject: [PATCH 057/160] flame: adding settings for `ExtractSubsetResources` plugin --- .../defaults/project_settings/flame.json | 12 ++++ .../projects_schema/schema_project_flame.json | 55 +++++++++++++++++++ 2 files changed, 67 insertions(+) diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index b6fbdecc95..ed54d631be 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -16,5 +16,17 @@ "handleStart": 10, "handleEnd": 10 } + }, + "publish": { + "ExtractSubsetResources": { + "export_presets_mapping": { + "exr16fpdwaa": { + "ext": "exr", + "xmlPresetDir": "", + "xmlPresetFile": "OpenEXR (16-bit fp DWAA).xml", + "representationTags": [] + } + } + } } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json index d713c37620..6ca5fc049d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json @@ -119,6 +119,61 @@ ] } ] + }, + { + "type": "dict", + "collapsible": true, + "key": "publish", + "label": "Publish plugins", + "children": [ + { + "type": "dict", + "collapsible": true, + "key": "ExtractSubsetResources", + "label": "Extract Subset Resources", + "is_group": true, + "children": [ + { + "key": "export_presets_mapping", + "label": "Export presets mapping", + "type": "dict-modifiable", + "highlight_content": true, + "object_type": { + "type": "dict", + "children": [ + { + "key": "ext", + "label": "Output extension", + "type": "text" + }, + { + "key": "xmlPresetFile", + "label": "XML preset file (with ext)", + "type": "text" + }, + { + "key": "xmlPresetDir", + "label": "XML preset folder (optional)", + "type": "text" + }, + { + "type": "separator" + }, + { + "type": "list", + "key": "representationTags", + "label": "Add representation tags", + "object_type": { + "type": "text", + "multiline": false + } + } + ] + } + } + ] + } + ] } ] } From 0e96a2e3b1d4f0481ec425ecfa0275e885185099 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Jan 2022 12:50:55 +0100 Subject: [PATCH 058/160] flame: update utils modules --- openpype/hosts/flame/api/__init__.py | 6 +- openpype/hosts/flame/api/render_utils.py | 135 +++++++++++++---------- openpype/hosts/flame/api/utils.py | 2 +- 3 files changed, 82 insertions(+), 61 deletions(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 7f516fb11f..656ba11617 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -63,7 +63,8 @@ from .workio import ( work_root ) from .render_utils import ( - export_clip + export_clip, + get_preset_path_by_xml_name ) __all__ = [ @@ -131,5 +132,6 @@ __all__ = [ "work_root", # render utils - "export_clip" + "export_clip", + "get_preset_path_by_xml_name" ] diff --git a/openpype/hosts/flame/api/render_utils.py b/openpype/hosts/flame/api/render_utils.py index d2e312785f..1cc94f6548 100644 --- a/openpype/hosts/flame/api/render_utils.py +++ b/openpype/hosts/flame/api/render_utils.py @@ -1,18 +1,15 @@ import os -SHARED_PRESET_PATH = '/opt/Autodesk/shared/export/presets' - -def export_clip(export_path, clip, export_preset, **kwargs): +def export_clip(export_path, clip, preset_path, **kwargs): """Flame exported wrapper Args: export_path (str): exporting directory path clip (PyClip): flame api object - export_preset (str): name of exporting preset xml file + preset_path (str): full export path to xml file Kwargs: - export_type (str)[optional]: name of export type folder thumb_frame_number (int)[optional]: source frame number in_mark (int)[optional]: cut in mark out_mark (int)[optional]: cut out mark @@ -20,8 +17,6 @@ def export_clip(export_path, clip, export_preset, **kwargs): Raises: KeyError: Missing input kwarg `thumb_frame_number` in case `thumbnail` in `export_preset` - KeyError: Missing input kwarg `export_type` - in case of other `export_preset` then `thumbnail` FileExistsError: Missing export preset in shared folder """ import flame @@ -33,11 +28,8 @@ def export_clip(export_path, clip, export_preset, **kwargs): exporter.foreground = True exporter.export_between_marks = True - # Duplicate the clip to avoid modifying the original clip - duplicate_clip = flame.duplicate(clip) - - if export_preset == 'thumbnail': - thumb_frame_number = kwargs.get("thumb_frame_number") + if kwargs.get("thumb_frame_number"): + thumb_frame_number = kwargs["thumb_frame_number"] # make sure it exists in kwargs if not thumb_frame_number: raise KeyError( @@ -46,61 +38,88 @@ def export_clip(export_path, clip, export_preset, **kwargs): in_mark = int(thumb_frame_number) out_mark = int(thumb_frame_number) + 1 - # In case Thumbnail is needed - preset_dir = flame.PyExporter.get_presets_dir( - flame.PyExporter.PresetVisibility.Autodesk, - flame.PyExporter.PresetType.Image_Sequence) - export_preset_path = os.path.join( - preset_dir, "Jpeg", "Jpeg (8-bit).xml") - + elif kwargs.get("in_mark") and kwargs.get("out_mark"): + in_mark = int(kwargs["in_mark"]) + out_mark = int(kwargs["out_mark"]) else: - # In case other output is needed - # get compulsory kwargs - export_type = kwargs.get("export_type") - # make sure it exists in kwargs - if not export_type: - raise KeyError( - "Missing key `export_type` in input kwargs") - - # create full shared preset path - shared_preset_dir = os.path.join( - SHARED_PRESET_PATH, export_type - ) - - # check if export preset is available in shared presets - shared_presets = [ - preset[:-4] for preset in os.listdir(shared_preset_dir)] - if export_preset not in shared_presets: - raise FileExistsError( - "Missing preset file `{}` in `{}`".format( - export_preset, - shared_preset_dir - )) - - export_preset_path = os.path.join( - shared_preset_dir, export_preset + '.xml') - - # check if mark in/out is set in kwargs - if kwargs.get("in_mark") and kwargs.get("out_mark"): - in_mark = int(kwargs["in_mark"]) - out_mark = int(kwargs["out_mark"]) - else: - exporter.export_between_marks = False + exporter.export_between_marks = False try: # set in and out marks if they are available if in_mark and out_mark: - duplicate_clip.in_mark = in_mark - duplicate_clip.out_mark = out_mark + clip.in_mark = in_mark + clip.out_mark = out_mark # export with exporter - exporter.export(duplicate_clip, export_preset_path, export_path) + exporter.export(clip, preset_path, export_path) finally: print('Exported: {} at {}-{}'.format( clip.name.get_value(), - duplicate_clip.in_mark, - duplicate_clip.out_mark + clip.in_mark, + clip.out_mark )) - # delete duplicated clip it is not needed anymore - flame.delete(duplicate_clip) + +def get_preset_path_by_xml_name(xml_preset_name): + def _search_path(root): + output = [] + for root, dirs, files in os.walk(root): + for f in files: + if f != xml_preset_name: + continue + file_path = os.path.join(root, f) + output.append(file_path) + return output + + def _validate_results(results): + if results and len(results) == 1: + return results.pop() + elif results and len(results) > 1: + print(( + "More matching presets for `{}`: /n" + "{}").format(xml_preset_name, results)) + return results.pop() + else: + return None + + from .utils import ( + get_flame_install_root, + get_flame_version + ) + + # get actual flame version and install path + _version = get_flame_version()["full"] + _install_root = get_flame_install_root() + + # search path templates + shared_search_root = "{install_root}/shared/export/presets" + install_search_root = ( + "{install_root}/presets/{version}/export/presets/flame") + + # fill templates + shared_search_root = shared_search_root.format( + install_root=_install_root + ) + install_search_root = install_search_root.format( + install_root=_install_root, + version=_version + ) + + # get search results + shared_results = _search_path(shared_search_root) + installed_results = _search_path(install_search_root) + + # first try to return shared results + shared_preset_path = _validate_results(shared_results) + + if shared_preset_path: + return os.path.dirname(shared_preset_path) + + # then try installed results + installed_preset_path = _validate_results(installed_results) + + if installed_preset_path: + return os.path.dirname(installed_preset_path) + + # if nothing found then return None + return False diff --git a/openpype/hosts/flame/api/utils.py b/openpype/hosts/flame/api/utils.py index 0e40e40aa7..9939371358 100644 --- a/openpype/hosts/flame/api/utils.py +++ b/openpype/hosts/flame/api/utils.py @@ -139,4 +139,4 @@ def get_flame_version(): def get_flame_install_root(): - return "/opt/Autodesk" \ No newline at end of file + return "/opt/Autodesk" From 183acf4bd3b34dd046401434c67932ad9e8b6050 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Jan 2022 12:51:48 +0100 Subject: [PATCH 059/160] flame: update export plugin with more dynamic preset path abstraction --- .../publish/extract_subset_resources.py | 155 +++++++++++------- 1 file changed, 96 insertions(+), 59 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 6061c80762..3a8fd631d8 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -1,4 +1,5 @@ import os +from copy import deepcopy import pyblish.api import openpype.api from openpype.hosts.flame import api as opfapi @@ -14,25 +15,29 @@ class ExtractSubsetResources(openpype.api.Extractor): families = ["clip"] hosts = ["flame"] + # plugin defaults + default_presets = { + "thumbnail": { + "ext": "jpg", + "xmlPresetFile": "Jpeg (8-bit).xml", + "xmlPresetDir": "", + "representationTags": ["thumbnail"] + }, + "ftrackpreview": { + "ext": "mov", + "xmlPresetFile": "Apple iPad (1920x1080).xml", + "xmlPresetDir": "", + "representationTags": [ + "review", + "delete" + ] + } + } # hide publisher during exporting hide_ui_on_process = True - export_presets_mapping = { - "thumbnail": { - "ext": "jpg", - "uniqueName": "thumbnail" - }, - "OpenEXR (16-bit fp DWAA)_custom": { - "ext": "exr", - "preset_type": "file_sequence", - "uniqueName": "exr16fpdwaa" - }, - "QuickTime (H.264 1080p 8Mbits)_custom": { - "ext": "mov", - "preset_type": "movie_file", - "uniqueName": "ftrackpreview" - } - } + # settings + export_presets_mapping = {} def process(self, instance): # create representation data @@ -53,54 +58,86 @@ class ExtractSubsetResources(openpype.api.Extractor): staging_dir = self.staging_dir(instance) - # loop all preset names and - for preset_name, preset_config in self.export_presets_mapping.items(): - kwargs = {} - unique_name = preset_config["uniqueName"] - preset_type = None + # add default preset type for thumbnail and reviewable video + # update them with settings and overide in case the same + # are found in there + export_presets = deepcopy(self.default_presets) + export_presets.update(self.export_presets_mapping) - # define kwargs based on preset type - if "thumbnail" in preset_name: - kwargs["thumb_frame_number"] = in_mark + ( - source_duration_handles / 2) - else: - preset_type = preset_config["preset_type"] - kwargs.update({ - "in_mark": in_mark, - "out_mark": out_mark, - "export_type": preset_type - }) + # with maintained duplication loop all presets + with opfapi.maintained_object_duplication(clip) as duplclip: + # loop all preset names and + for unique_name, preset_config in export_presets.items(): + kwargs = {} + preset_file = preset_config["xmlPresetFile"] + preset_dir = preset_config["xmlPresetDir"] - export_dir_path = os.path.join( - staging_dir, unique_name - ) - os.makedirs(export_dir_path) + # validate xml preset file is filled + if preset_file == "": + raise ValueError( + ("Check Settings for {} preset: " + "`XML preset file` is not filled").format( + unique_name) + ) - # export - opfapi.export_clip( - export_dir_path, clip, preset_name, **kwargs) + # resolve xml preset dir if not filled + if preset_dir == "": + preset_dir = opfapi.get_preset_path_by_xml_name( + preset_file) - # create representation data - representation_data = { - 'name': unique_name, - 'ext': preset_config["ext"], - "stagingDir": export_dir_path, - } + if not preset_dir: + raise ValueError( + ("Check Settings for {} preset: " + "`XML preset file` {} is not found").format( + unique_name, preset_file) + ) - files = os.listdir(export_dir_path) + # create preset path + preset_path = os.path.join( + preset_dir, preset_file + ) - # add files to represetation but add - # imagesequence as list - if ( - preset_type - and preset_type == "movie_file" - or preset_name == "thumbnail" - ): - representation_data["files"] = files.pop() - else: - representation_data["files"] = files + # define kwargs based on preset type + if "thumbnail" in unique_name: + kwargs["thumb_frame_number"] = in_mark + ( + source_duration_handles / 2) + else: + kwargs.update({ + "in_mark": in_mark, + "out_mark": out_mark + }) - instance.data["representations"].append(representation_data) + export_dir_path = os.path.join( + staging_dir, unique_name + ) + os.makedirs(export_dir_path) - self.log.info("Added representation: {}".format( - representation_data)) + # export + opfapi.export_clip( + export_dir_path, duplclip, preset_path, **kwargs) + + # create representation data + representation_data = { + "name": unique_name, + "outputName": unique_name, + "ext": preset_config["ext"], + "stagingDir": export_dir_path, + "tags": preset_config["representationTags"] + } + + files = os.listdir(export_dir_path) + + # add files to represetation but add + # imagesequence as list + if ( + "movie_file" in preset_path + or unique_name == "thumbnail" + ): + representation_data["files"] = files.pop() + else: + representation_data["files"] = files + + instance.data["representations"].append(representation_data) + + self.log.info("Added representation: {}".format( + representation_data)) From aa39f98ae626b702d02b5b57b786ee22bd0c8252 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Jan 2022 12:57:26 +0100 Subject: [PATCH 060/160] flame: add bool to settings to control if range will be added to repres --- .../hosts/flame/plugins/publish/extract_subset_resources.py | 2 ++ openpype/settings/defaults/project_settings/flame.json | 1 + .../schemas/projects_schema/schema_project_flame.json | 5 +++++ 3 files changed, 8 insertions(+) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 3a8fd631d8..b2a737cbcb 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -21,12 +21,14 @@ class ExtractSubsetResources(openpype.api.Extractor): "ext": "jpg", "xmlPresetFile": "Jpeg (8-bit).xml", "xmlPresetDir": "", + "representationAddRange": False, "representationTags": ["thumbnail"] }, "ftrackpreview": { "ext": "mov", "xmlPresetFile": "Apple iPad (1920x1080).xml", "xmlPresetDir": "", + "representationAddRange": False, "representationTags": [ "review", "delete" diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index ed54d631be..dfecd8a12e 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -24,6 +24,7 @@ "ext": "exr", "xmlPresetDir": "", "xmlPresetFile": "OpenEXR (16-bit fp DWAA).xml", + "representationAddRange": false, "representationTags": [] } } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json index 6ca5fc049d..8ad2b11616 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json @@ -159,6 +159,11 @@ { "type": "separator" }, + { + "type": "boolean", + "key": "representationAddRange", + "label": "Add frame range to representation" + }, { "type": "list", "key": "representationTags", From 26c3ba7e1be4e7a94c3cf8d6869e813dd716bc25 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Jan 2022 17:37:19 +0100 Subject: [PATCH 061/160] flame: add frame ranges to representation --- .../plugins/publish/extract_subset_resources.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index b2a737cbcb..3495309409 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -28,7 +28,7 @@ class ExtractSubsetResources(openpype.api.Extractor): "ext": "mov", "xmlPresetFile": "Apple iPad (1920x1080).xml", "xmlPresetDir": "", - "representationAddRange": False, + "representationAddRange": True, "representationTags": [ "review", "delete" @@ -46,6 +46,9 @@ class ExtractSubsetResources(openpype.api.Extractor): if "representations" not in instance.data: instance.data["representations"] = [] + frame_start = instance.data["frameStart"] + handle_start = instance.data["handleStart"] + frame_start_handle = frame_start - handle_start source_first_frame = instance.data["sourceFirstFrame"] source_start_handles = instance.data["sourceStartH"] source_end_handles = instance.data["sourceEndH"] @@ -139,6 +142,15 @@ class ExtractSubsetResources(openpype.api.Extractor): else: representation_data["files"] = files + # add frame range + if preset_config["representationAddRange"]: + representation_data.update({ + "frameStart": frame_start_handle, + "frameEnd": ( + frame_start_handle + source_duration_handles), + "fps": instance.data["fps"] + }) + instance.data["representations"].append(representation_data) self.log.info("Added representation: {}".format( From cbfb3e734eba83852846b6d047f18bf53c752012 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 14 Jan 2022 04:10:55 +0100 Subject: [PATCH 062/160] Fix not unique group name error (cherry picked from commit f1b7aed767b76bced648b785dc3d40a68b36db7b) --- openpype/hosts/maya/plugins/load/load_reference.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index dd64fd0a16..2cc24f1360 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -63,6 +63,8 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): if current_namespace != ":": group_name = current_namespace + ":" + group_name + group_name = "|" + group_name + self[:] = new_nodes if attach_to_root: From 6e8e1173d8dcfeccbc944b976a1e0504642ea0cf Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 14 Jan 2022 17:42:59 +0100 Subject: [PATCH 063/160] Fix Load VDB to V-Ray for Maya (cherry picked from commit d53db6cd2b02a9b4ac251c70d600b47cc5e2493c) --- .../maya/plugins/load/load_vdb_to_vray.py | 239 ++++++++++++++++-- 1 file changed, 223 insertions(+), 16 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_vdb_to_vray.py b/openpype/hosts/maya/plugins/load/load_vdb_to_vray.py index 80b453bd13..151731c13c 100644 --- a/openpype/hosts/maya/plugins/load/load_vdb_to_vray.py +++ b/openpype/hosts/maya/plugins/load/load_vdb_to_vray.py @@ -2,6 +2,72 @@ from avalon import api from openpype.api import get_project_settings import os +from maya import cmds + +# List of 3rd Party Channels Mapping names for VRayVolumeGrid +# See: https://docs.chaosgroup.com/display/VRAY4MAYA/Input +# #Input-3rdPartyChannelsMapping +THIRD_PARTY_CHANNELS = { + 2: "Smoke", + 1: "Temperature", + 10: "Fuel", + 4: "Velocity.x", + 5: "Velocity.y", + 6: "Velocity.z", + 7: "Red", + 8: "Green", + 9: "Blue", + 14: "Wavelet Energy", + 19: "Wavelet.u", + 20: "Wavelet.v", + 21: "Wavelet.w", + # These are not in UI or documentation but V-Ray does seem to set these. + 15: "AdvectionOrigin.x", + 16: "AdvectionOrigin.y", + 17: "AdvectionOrigin.z", + +} + + +def _fix_duplicate_vvg_callbacks(): + """Workaround to kill duplicate VRayVolumeGrids attribute callbacks. + + This fixes a huge lag in Maya on switching 3rd Party Channels Mappings + or to different .vdb file paths because it spams an attribute changed + callback: `vvgUserChannelMappingsUpdateUI`. + + ChaosGroup bug ticket: 154-008-9890 + + Found with: + - Maya 2019.2 on Windows 10 + - V-Ray: V-Ray Next for Maya, update 1 version 4.12.01.00001 + + Bug still present in: + - Maya 2022.1 on Windows 10 + - V-Ray 5 for Maya, Update 2.1 (v5.20.01 from Dec 16 2021) + + """ + # todo(roy): Remove when new V-Ray release fixes duplicate calls + + jobs = cmds.scriptJob(listJobs=True) + + matched = set() + for entry in jobs: + # Remove the number + index, callback = entry.split(":", 1) + callback = callback.strip() + + # Detect whether it is a `vvgUserChannelMappingsUpdateUI` + # attribute change callback + if callback.startswith('"-runOnce" 1 "-attributeChange" "'): + if '"vvgUserChannelMappingsUpdateUI(' in callback: + if callback in matched: + # If we've seen this callback before then + # delete the duplicate callback + cmds.scriptJob(kill=int(index)) + else: + matched.add(callback) + class LoadVDBtoVRay(api.Loader): @@ -14,15 +80,24 @@ class LoadVDBtoVRay(api.Loader): def load(self, context, name, namespace, data): - from maya import cmds import avalon.maya.lib as lib from avalon.maya.pipeline import containerise + assert os.path.exists(self.fname), ( + "Path does not exist: %s" % self.fname + ) + try: family = context["representation"]["context"]["family"] except ValueError: family = "vdbcache" + # Ensure V-ray is loaded with the vrayvolumegrid + if not cmds.pluginInfo("vrayformaya", query=True, loaded=True): + cmds.loadPlugin("vrayformaya") + if not cmds.pluginInfo("vrayvolumegrid", query=True, loaded=True): + cmds.loadPlugin("vrayvolumegrid") + # Check if viewport drawing engine is Open GL Core (compat) render_engine = None compatible = "OpenGLCoreProfileCompat" @@ -30,13 +105,11 @@ class LoadVDBtoVRay(api.Loader): render_engine = cmds.optionVar(query="vp2RenderingEngine") if not render_engine or render_engine != compatible: - raise RuntimeError("Current scene's settings are incompatible." - "See Preferences > Display > Viewport 2.0 to " - "set the render engine to '%s'" % compatible) + self.log.warning("Current scene's settings are incompatible." + "See Preferences > Display > Viewport 2.0 to " + "set the render engine to '%s'" % compatible) asset = context['asset'] - version = context["version"] - asset_name = asset["name"] namespace = namespace or lib.unique_namespace( asset_name + "_", @@ -45,7 +118,7 @@ class LoadVDBtoVRay(api.Loader): ) # Root group - label = "{}:{}".format(namespace, name) + label = "{}:{}_VDB".format(namespace, name) root = cmds.group(name=label, empty=True) settings = get_project_settings(os.environ['AVALON_PROJECT']) @@ -55,20 +128,25 @@ class LoadVDBtoVRay(api.Loader): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) + float(c[0])/255, + float(c[1])/255, + float(c[2])/255 ) - # Create VR + # Create VRayVolumeGrid grid_node = cmds.createNode("VRayVolumeGrid", - name="{}VVGShape".format(label), + name="{}Shape".format(label), parent=root) - # Set attributes - cmds.setAttr("{}.inFile".format(grid_node), self.fname, type="string") - cmds.setAttr("{}.inReadOffset".format(grid_node), - version["startFrames"]) + # Ensure .currentTime is connected to time1.outTime + cmds.connectAttr("time1.outTime", grid_node + ".currentTime") + + # Set path + self._set_path(grid_node, self.fname, show_preset_popup=True) + + # Lock the shape node so the user can't delete the transform/shape + # as if it was referenced + cmds.lockNode(grid_node, lock=True) nodes = [root, grid_node] self[:] = nodes @@ -79,3 +157,132 @@ class LoadVDBtoVRay(api.Loader): nodes=nodes, context=context, loader=self.__class__.__name__) + + def _set_path(self, grid_node, path, show_preset_popup=True): + + from openpype.hosts.maya.api.lib import attribute_values + from maya import cmds + + def _get_filename_from_folder(path): + # Using the sequence of .vdb files we check the frame range, etc. + # to set the filename with #### padding. + files = sorted(x for x in os.listdir(path) if x.endswith(".vdb")) + if not files: + raise RuntimeError("Couldn't find .vdb files in: %s" % path) + + if len(files) == 1: + # Ensure check for single file is also done in folder + fname = files[0] + else: + # Sequence + from avalon.vendor import clique + # todo: check support for negative frames as input + collections, remainder = clique.assemble(files) + assert len(collections) == 1, ( + "Must find a single image sequence, " + "found: %s" % (collections,) + ) + collection = collections[0] + + fname = collection.format('{head}{{padding}}{tail}') + padding = collection.padding + if padding == 0: + # Clique doesn't provide padding if the frame number never + # starts with a zero and thus has never any visual padding. + # So we fall back to the smallest frame number as padding. + padding = min(len(str(i)) for i in collection.indexes) + + # Supply frame/padding with # signs + padding_str = "#" * padding + fname = fname.format(padding=padding_str) + + return os.path.join(path, fname) + + # The path is either a single file or sequence in a folder so + # we do a quick lookup for our files + if os.path.isfile(path): + path = os.path.dirname(path) + path = _get_filename_from_folder(path) + + # Even when not applying a preset V-Ray will reset the 3rd Party + # Channels Mapping of the VRayVolumeGrid when setting the .inPath + # value. As such we try and preserve the values ourselves. + # Reported as ChaosGroup bug ticket: 154-011-2909  + # todo(roy): Remove when new V-Ray release preserves values + original_user_mapping = cmds.getAttr(grid_node + ".usrchmap") or "" + + # Workaround for V-Ray bug: fix lag on path change, see function + _fix_duplicate_vvg_callbacks() + + # Suppress preset pop-up if we want. + popup_attr = "{0}.inDontOfferPresets".format(grid_node) + popup = {popup_attr: not show_preset_popup} + with attribute_values(popup): + cmds.setAttr(grid_node + ".inPath", path, type="string") + + # Reapply the 3rd Party channels user mapping when no preset popup + # was shown to the user + if not show_preset_popup: + channels = cmds.getAttr(grid_node + ".usrchmapallch").split(";") + channels = set(channels) # optimize lookup + restored_mapping = "" + for entry in original_user_mapping.split(";"): + if not entry: + # Ignore empty entries + continue + + # If 3rd Party Channels selection channel still exists then + # add it again. + index, channel = entry.split(",") + attr = THIRD_PARTY_CHANNELS.get(int(index), + # Fallback for when a mapping + # was set that is not in the + # documentation + "???") + if channel in channels: + restored_mapping += entry + ";" + else: + self.log.warning("Can't preserve '%s' mapping due to " + "missing channel '%s' on node: " + "%s" % (attr, channel, grid_node)) + + if restored_mapping: + cmds.setAttr(grid_node + ".usrchmap", + restored_mapping, + type="string") + + def update(self, container, representation): + + path = api.get_representation_path(representation) + + # Find VRayVolumeGrid + members = cmds.sets(container['objectName'], query=True) + grid_nodes = cmds.ls(members, type="VRayVolumeGrid", long=True) + assert len(grid_nodes) > 0, "This is a bug" + + # Update the VRayVolumeGrid + for grid_node in grid_nodes: + self._set_path(grid_node, path=path, show_preset_popup=False) + + # Update container representation + cmds.setAttr(container["objectName"] + ".representation", + str(representation["_id"]), + type="string") + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + + # Get all members of the avalon container, ensure they are unlocked + # and delete everything + members = cmds.sets(container['objectName'], query=True) + cmds.lockNode(members, lock=False) + cmds.delete([container['objectName']] + members) + + # Clean up the namespace + try: + cmds.namespace(removeNamespace=container['namespace'], + deleteNamespaceContent=True) + except RuntimeError: + pass From ea469e213031bedc412a9368e4668e6b0d18bc98 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Jan 2022 17:55:43 +0100 Subject: [PATCH 064/160] flame: fixing extract exporter --- .../plugins/publish/extract_subset_resources.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 3495309409..8bdcf989b6 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -76,6 +76,7 @@ class ExtractSubsetResources(openpype.api.Extractor): kwargs = {} preset_file = preset_config["xmlPresetFile"] preset_dir = preset_config["xmlPresetDir"] + repre_tags = preset_config["representationTags"] # validate xml preset file is filled if preset_file == "": @@ -98,9 +99,9 @@ class ExtractSubsetResources(openpype.api.Extractor): ) # create preset path - preset_path = os.path.join( + preset_path = str(os.path.join( preset_dir, preset_file - ) + )) # define kwargs based on preset type if "thumbnail" in unique_name: @@ -112,9 +113,9 @@ class ExtractSubsetResources(openpype.api.Extractor): "out_mark": out_mark }) - export_dir_path = os.path.join( + export_dir_path = str(os.path.join( staging_dir, unique_name - ) + )) os.makedirs(export_dir_path) # export @@ -127,7 +128,7 @@ class ExtractSubsetResources(openpype.api.Extractor): "outputName": unique_name, "ext": preset_config["ext"], "stagingDir": export_dir_path, - "tags": preset_config["representationTags"] + "tags": repre_tags } files = os.listdir(export_dir_path) @@ -153,5 +154,9 @@ class ExtractSubsetResources(openpype.api.Extractor): instance.data["representations"].append(representation_data) + # add review family if found in tags + if "review" in repre_tags: + instance.data["families"].append("review") + self.log.info("Added representation: {}".format( representation_data)) From 25b54be8727d35a608e64236f1189468ca9fe4ae Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 14 Jan 2022 17:56:16 +0100 Subject: [PATCH 065/160] flame: adding host to some extract plugins --- openpype/plugins/publish/extract_burnin.py | 3 ++- openpype/plugins/publish/extract_otio_audio_tracks.py | 2 +- openpype/plugins/publish/extract_otio_review.py | 2 +- openpype/plugins/publish/extract_otio_trimming_video.py | 2 +- openpype/plugins/publish/extract_review.py | 3 ++- 5 files changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 459c66ee43..7ff1b24689 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -48,7 +48,8 @@ class ExtractBurnin(openpype.api.Extractor): "tvpaint", "webpublisher", "aftereffects", - "photoshop" + "photoshop", + "flame" # "resolve" ] optional = True diff --git a/openpype/plugins/publish/extract_otio_audio_tracks.py b/openpype/plugins/publish/extract_otio_audio_tracks.py index be0bae5cdc..00c1748cdc 100644 --- a/openpype/plugins/publish/extract_otio_audio_tracks.py +++ b/openpype/plugins/publish/extract_otio_audio_tracks.py @@ -19,7 +19,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): order = pyblish.api.ExtractorOrder - 0.44 label = "Extract OTIO Audio Tracks" - hosts = ["hiero", "resolve"] + hosts = ["hiero", "resolve", "flame"] # FFmpeg tools paths ffmpeg_path = get_ffmpeg_tool_path("ffmpeg") diff --git a/openpype/plugins/publish/extract_otio_review.py b/openpype/plugins/publish/extract_otio_review.py index ed2ba017d5..79d5b2fc8f 100644 --- a/openpype/plugins/publish/extract_otio_review.py +++ b/openpype/plugins/publish/extract_otio_review.py @@ -41,7 +41,7 @@ class ExtractOTIOReview(openpype.api.Extractor): order = api.ExtractorOrder - 0.45 label = "Extract OTIO review" families = ["review"] - hosts = ["resolve", "hiero"] + hosts = ["resolve", "hiero", "flame"] # plugin default attributes temp_file_head = "tempFile." diff --git a/openpype/plugins/publish/extract_otio_trimming_video.py b/openpype/plugins/publish/extract_otio_trimming_video.py index 3e2d39c99c..30b57e2c69 100644 --- a/openpype/plugins/publish/extract_otio_trimming_video.py +++ b/openpype/plugins/publish/extract_otio_trimming_video.py @@ -19,7 +19,7 @@ class ExtractOTIOTrimmingVideo(openpype.api.Extractor): order = api.ExtractorOrder label = "Extract OTIO trim longer video" families = ["trim"] - hosts = ["resolve", "hiero"] + hosts = ["resolve", "hiero", "flame"] def process(self, instance): self.staging_dir = self.staging_dir(instance) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index b6c2e49385..b27cca0085 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -51,7 +51,8 @@ class ExtractReview(pyblish.api.InstancePlugin): "tvpaint", "resolve", "webpublisher", - "aftereffects" + "aftereffects", + "flame" ] # Supported extensions From 8cb71742921357aae79e8f66c950d4aa3c1bd8bc Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 14 Jan 2022 17:56:37 +0100 Subject: [PATCH 066/160] Cosmetics --- .../hosts/maya/plugins/load/load_vdb_to_vray.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_vdb_to_vray.py b/openpype/hosts/maya/plugins/load/load_vdb_to_vray.py index 151731c13c..ed561e1131 100644 --- a/openpype/hosts/maya/plugins/load/load_vdb_to_vray.py +++ b/openpype/hosts/maya/plugins/load/load_vdb_to_vray.py @@ -84,7 +84,7 @@ class LoadVDBtoVRay(api.Loader): from avalon.maya.pipeline import containerise assert os.path.exists(self.fname), ( - "Path does not exist: %s" % self.fname + "Path does not exist: %s" % self.fname ) try: @@ -128,10 +128,9 @@ class LoadVDBtoVRay(api.Loader): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - float(c[0])/255, - float(c[1])/255, - float(c[2])/255 - ) + float(c[0]) / 255, + float(c[1]) / 255, + float(c[2]) / 255) # Create VRayVolumeGrid grid_node = cmds.createNode("VRayVolumeGrid", @@ -179,8 +178,8 @@ class LoadVDBtoVRay(api.Loader): # todo: check support for negative frames as input collections, remainder = clique.assemble(files) assert len(collections) == 1, ( - "Must find a single image sequence, " - "found: %s" % (collections,) + "Must find a single image sequence, " + "found: %s" % (collections,) ) collection = collections[0] From abf0d2b6a0c1722a0fdf1a8e14c28da99c553ec9 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 14 Jan 2022 17:44:38 +0100 Subject: [PATCH 067/160] Add houdini for validate version (cherry picked from commit bc55371de067c75511fb7f906989cd4f3c2f5aaf) --- openpype/plugins/publish/validate_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/validate_version.py b/openpype/plugins/publish/validate_version.py index e48ce6e3c3..b94152ef2d 100644 --- a/openpype/plugins/publish/validate_version.py +++ b/openpype/plugins/publish/validate_version.py @@ -10,7 +10,7 @@ class ValidateVersion(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder label = "Validate Version" - hosts = ["nuke", "maya", "blender", "standalonepublisher"] + hosts = ["nuke", "maya", "houdini", "blender", "standalonepublisher"] optional = False active = True From 9a3a709149108efa45770a094ac1dacee9730d70 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Fri, 14 Jan 2022 17:45:08 +0100 Subject: [PATCH 068/160] Preserve VDB frame numbers on Integrating (cherry picked from commit 09fe8b3540f8a3cc2936d8c07e7bdf72a4690c26) --- openpype/hosts/houdini/plugins/publish/collect_instances.py | 3 +++ openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py | 2 ++ 2 files changed, 5 insertions(+) diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py index ac081ac297..12d118f0cc 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py @@ -74,6 +74,9 @@ class CollectInstances(pyblish.api.ContextPlugin): instance = context.create_instance(label) + # Include `families` using `family` data + instance.data["families"] = [instance.data["family"]] + instance[:] = [node] instance.data.update(data) diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py index 78794acc97..113e1b0bcb 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py +++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py @@ -37,5 +37,7 @@ class ExtractVDBCache(openpype.api.Extractor): "ext": "vdb", "files": output, "stagingDir": staging_dir, + "frameStart": instance.data["frameStart"], + "frameEnd": instance.data["frameEnd"], } instance.data["representations"].append(representation) From dfb42dc78c3d8f7d2fa83e52471917315bfa57df Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 14 Jan 2022 17:09:17 +0000 Subject: [PATCH 069/160] build(deps): bump follow-redirects from 1.14.4 to 1.14.7 in /website Bumps [follow-redirects](https://github.com/follow-redirects/follow-redirects) from 1.14.4 to 1.14.7. - [Release notes](https://github.com/follow-redirects/follow-redirects/releases) - [Commits](https://github.com/follow-redirects/follow-redirects/compare/v1.14.4...v1.14.7) --- updated-dependencies: - dependency-name: follow-redirects dependency-type: indirect ... Signed-off-by: dependabot[bot] --- website/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index 89da2289de..16d2316fc2 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -3983,9 +3983,9 @@ flux@^4.0.1: fbjs "^3.0.0" follow-redirects@^1.0.0, follow-redirects@^1.14.0: - version "1.14.4" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.4.tgz#838fdf48a8bbdd79e52ee51fb1c94e3ed98b9379" - integrity sha512-zwGkiSXC1MUJG/qmeIFH2HBJx9u0V46QGUe3YR1fXG8bXQxq7fLj0RjLZQ5nubr9qNJUZrH+xUcwXEoXNpfS+g== + version "1.14.7" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.7.tgz#2004c02eb9436eee9a21446a6477debf17e81685" + integrity sha512-+hbxoLbFMbRKDwohX8GkTataGqO6Jb7jGwpAlwgy2bIz25XtRm7KEzJM76R1WiNT5SwZkX4Y75SwBolkpmE7iQ== for-in@^1.0.2: version "1.0.2" From 969dfdc69e2f820e67acf26ca783fef6af56e71c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 14 Jan 2022 19:03:46 +0100 Subject: [PATCH 070/160] add basic support for extended static mesh workflow wip --- .../create/create_unreal_staticmesh.py | 33 +++++- .../hosts/maya/plugins/publish/clean_nodes.py | 27 +++++ .../publish/collect_unreal_staticmesh.py | 20 ++-- .../publish/extract_unreal_staticmesh.py | 28 +++++ .../validate_unreal_staticmesh_naming.py | 109 +++++++++--------- .../defaults/project_settings/global.json | 2 +- .../defaults/project_settings/maya.json | 22 +++- .../schemas/schema_maya_create.json | 36 +++++- .../schemas/schema_maya_publish.json | 25 ++++ 9 files changed, 230 insertions(+), 72 deletions(-) create mode 100644 openpype/hosts/maya/plugins/publish/clean_nodes.py create mode 100644 openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py diff --git a/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py b/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py index db1684bbc8..30f024a160 100644 --- a/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py +++ b/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py @@ -1,11 +1,42 @@ -from openpype.hosts.maya.api import plugin +# -*- coding: utf-8 -*- +"""Creator for Unreal Static Meshes.""" +from openpype.hosts.maya.api import plugin, lib +from avalon.api import CreatorError, Session +from openpype.api import get_project_settings +from maya import cmds # noqa class CreateUnrealStaticMesh(plugin.Creator): + """Unreal Static Meshes with collisions.""" name = "staticMeshMain" label = "Unreal - Static Mesh" family = "unrealStaticMesh" icon = "cube" + dynamic_subset_keys = ["asset"] def __init__(self, *args, **kwargs): + """Constructor.""" super(CreateUnrealStaticMesh, self).__init__(*args, **kwargs) + self._project_settings = get_project_settings( + Session["AVALON_PROJECT"]) + + @classmethod + def get_dynamic_data( + cls, variant, task_name, asset_id, project_name, host_name + ): + dynamic_data = super(CreateUnrealStaticMesh, cls).get_dynamic_data( + variant, task_name, asset_id, project_name, host_name + ) + dynamic_data["asset"] = Session.get("AVALON_ASSET") + + return dynamic_data + + def process(self): + with lib.undo_chunk(): + instance = super(CreateUnrealStaticMesh, self).process() + content = cmds.sets(instance, query=True) + geometry = cmds.sets(name="geometry_SET", empty=True) + collisions = cmds.sets(name="collisions_SET", empty=True) + cmds.sets([geometry, collisions], forceElement=instance) + # todo: Iterate over collision prefixes and add them to correct + # sets. Put rest to the geometry set. diff --git a/openpype/hosts/maya/plugins/publish/clean_nodes.py b/openpype/hosts/maya/plugins/publish/clean_nodes.py new file mode 100644 index 0000000000..e6667b7036 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/clean_nodes.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +"""Cleanup leftover nodes.""" +from maya import cmds # noqa +import pyblish.api + + +class CleanNodesUp(pyblish.api.InstancePlugin): + """Cleans up the staging directory after a successful publish. + + This will also clean published renders and delete their parent directories. + + """ + + order = pyblish.api.IntegratorOrder + 10 + label = "Clean Nodes" + optional = True + active = True + + def process(self, instance): + if not instance.data.get("cleanNodes"): + self.log.info("nothing to clean") + + nodes_to_clean = instance.data.pop("cleanNodes") + self.log.info("Removing {} nodes".format(len(nodes_to_clean))) + for node in nodes_to_clean: + cmds.remove(node) + \ No newline at end of file diff --git a/openpype/hosts/maya/plugins/publish/collect_unreal_staticmesh.py b/openpype/hosts/maya/plugins/publish/collect_unreal_staticmesh.py index 5ab9643f4b..ad6398041b 100644 --- a/openpype/hosts/maya/plugins/publish/collect_unreal_staticmesh.py +++ b/openpype/hosts/maya/plugins/publish/collect_unreal_staticmesh.py @@ -4,25 +4,31 @@ import pyblish.api class CollectUnrealStaticMesh(pyblish.api.InstancePlugin): - """Collect unreal static mesh + """Collect Unreal Static Mesh Ensures always only a single frame is extracted (current frame). This also sets correct FBX options for later extraction. - Note: - This is a workaround so that the `pype.model` family can use the - same pointcache extractor implementation as animation and pointcaches. - This always enforces the "current" frame to be published. - """ order = pyblish.api.CollectorOrder + 0.2 - label = "Collect Model Data" + label = "Collect Unreal Static Meshes" families = ["unrealStaticMesh"] def process(self, instance): # add fbx family to trigger fbx extractor instance.data["families"].append("fbx") + # take the name from instance (without the `S_` prefix) + instance.data["staticMeshCombinedName"] = instance.name[1:] + + geometry_set = [i for i in instance if i == "geometry_SET"] + instance.data["membersToCombine"] = cmds.sets( + geometry_set, query=True) + + collision_set = [i for i in instance if i == "collisions_SET"] + instance.data["collisionMembers"] = cmds.sets( + collision_set, query=True) + # set fbx overrides on instance instance.data["smoothingGroups"] = True instance.data["smoothMesh"] = True diff --git a/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py b/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py new file mode 100644 index 0000000000..fd9cf69612 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +"""Create Unreal Static Mesh data to be extracted as FBX.""" +import openpype.api +import pyblish.api +from maya import cmds # noqa + + +class ExtractUnrealStaticMesh(openpype.api.Extractor): + """Extract FBX from Maya. """ + + order = pyblish.api.ExtractorOrder - 0.1 + label = "Extract Unreal Static Mesh" + families = ["unrealStaticMesh"] + + def process(self, instance): + to_combine = instance.data.get("membersToCombine") + static_mesh_name = instance.data.get("staticMeshCombinedName") + self.log.info( + "merging {] into {}".format( + "+ ".join(to_combine), static_mesh_name)) + cmds.polyUnite( + *to_combine, + n=static_mesh_name) + + if not instance.data.get("cleanNodes"): + instance.data["cleanNodes"] = [] + + instance.data["cleanNodes"].append(static_mesh_name) diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py index 99d6cfd4c5..e7df7c8cbb 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py +++ b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py @@ -1,18 +1,19 @@ # -*- coding: utf-8 -*- -from maya import cmds +from maya import cmds # noqa import pyblish.api import openpype.api import openpype.hosts.maya.api.action import re -class ValidateUnrealStaticmeshName(pyblish.api.InstancePlugin): +class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin): """Validate name of Unreal Static Mesh - Unreals naming convention states that staticMesh sould start with `SM` - prefix - SM_[Name]_## (Eg. SM_sube_01). This plugin also validates other - types of meshes - collision meshes: + Unreals naming convention states that staticMesh should start with `SM` + prefix - SM_[Name]_## (Eg. SM_sube_01).These prefixes can be configured + in Settings UI. This plugin also validates other types of + meshes - collision meshes: UBX_[RenderMeshName]_##: Boxes are created with the Box objects type in @@ -52,69 +53,69 @@ class ValidateUnrealStaticmeshName(pyblish.api.InstancePlugin): families = ["unrealStaticMesh"] label = "Unreal StaticMesh Name" actions = [openpype.hosts.maya.api.action.SelectInvalidAction] - regex_mesh = r"SM_(?P.*)_(\d{2})" - regex_collision = r"((UBX)|(UCP)|(USP)|(UCX))_(?P.*)_(\d{2})" + regex_mesh = r"(?P.*)_(\d{2})" + regex_collision = r"_(?P.*)_(\d{2})" @classmethod def get_invalid(cls, instance): - # find out if supplied transform is group or not - def is_group(groupName): - try: - children = cmds.listRelatives(groupName, children=True) - for child in children: - if not cmds.ls(child, transforms=True): - return False + invalid = [] + + combined_geometry_name = instance.data.get( + "staticMeshCombinedName", None) + if cls.validate_mesh: + # compile regex for testing names + regex_mesh = "{}{}".format( + ("_" + cls.static_mesh_prefix) or "", cls.regex_mesh + ) + sm_r = re.compile(regex_mesh) + if not sm_r.match(combined_geometry_name): + cls.log.error("Mesh doesn't comply with name validation.") return True - except Exception: + + if cls.validate_collision: + collision_set = instance.data.get("collisionMembers", None) + # soft-fail is there are no collision objects + if not collision_set: + cls.log.warning("No collision objects to validate.") return False - invalid = [] - content_instance = instance.data.get("setMembers", None) - if not content_instance: - cls.log.error("Instance has no nodes!") - return True - pass - descendants = cmds.listRelatives(content_instance, - allDescendents=True, - fullPath=True) or [] + regex_collision = "{}{}".format( + "({})_".format( + "|".join("(0}".format(p) for p in cls.collision_prefixes) + ) or "", cls.regex_collision + ) + cl_r = re.compile(regex_collision) - descendants = cmds.ls(descendants, noIntermediate=True, long=True) - trns = cmds.ls(descendants, long=False, type=('transform')) - - # filter out groups - filter = [node for node in trns if not is_group(node)] - - # compile regex for testing names - sm_r = re.compile(cls.regex_mesh) - cl_r = re.compile(cls.regex_collision) - - sm_names = [] - col_names = [] - for obj in filter: - sm_m = sm_r.match(obj) - if sm_m is None: - # test if it matches collision mesh - cl_r = sm_r.match(obj) - if cl_r is None: - cls.log.error("invalid mesh name on: {}".format(obj)) + for obj in collision_set: + cl_m = cl_r.match(obj) + if not cl_m: + cls.log.error("{} is invalid".format(obj)) + invalid.append(obj) + elif cl_m.group("renderName") != combined_geometry_name: + cls.log.error( + "Collision object name doesn't match" + "static mesh name: {} != {}".format( + cl_m.group("renderName"), + combined_geometry_name) + ) invalid.append(obj) - else: - col_names.append((cl_r.group("renderName"), obj)) - else: - sm_names.append(sm_m.group("renderName")) - - for c_mesh in col_names: - if c_mesh[0] not in sm_names: - cls.log.error(("collision name {} doesn't match any " - "static mesh names.").format(obj)) - invalid.append(c_mesh[1]) return invalid def process(self, instance): + # todo: load prefixes from creator settings. + + if not self.validate_mesh and not self.validate_collision: + self.log.info("Validation of both mesh and collision names" + "is disabled.") + return + + if not instance.data.get("collisionMembers", None): + self.log.info("There are no collision objects to validate") + return invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Model naming is invalid. See log.") + raise RuntimeError("Model naming is invalid. See log.") \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index cff1259c98..2169a62746 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -219,7 +219,7 @@ "hosts": [], "task_types": [], "tasks": [], - "template": "{family}{Variant}" + "template": "{family}{variant}" }, { "families": [ diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index a756071106..67a7b84cdc 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -127,6 +127,13 @@ "enabled": true, "defaults": [ "Main" + ], + "static_mesh_prefix": "S_", + "collision_prefixes": [ + "UBX", + "UCP", + "USP", + "UCX" ] }, "CreateVrayProxy": { @@ -180,6 +187,11 @@ "whitelist_native_plugins": false, "authorized_plugins": [] }, + "ValidateUnrealStaticMeshName": { + "enabled": true, + "validate_mesh": false, + "validate_collision": true + }, "ValidateRenderSettings": { "arnold_render_attributes": [], "vray_render_attributes": [], @@ -197,6 +209,11 @@ "regex": "(.*)_(\\d)*_(?P.*)_(GEO)", "top_level_regex": ".*_GRP" }, + "ValidateModelContent": { + "enabled": true, + "optional": false, + "validate_top_group": true + }, "ValidateTransformNamingSuffix": { "enabled": true, "SUFFIX_NAMING_TABLE": { @@ -281,11 +298,6 @@ "optional": true, "active": true }, - "ValidateModelContent": { - "enabled": true, - "optional": false, - "validate_top_group": true - }, "ValidateNoAnimation": { "enabled": false, "optional": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 088d5d1f96..0544b4bab7 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -66,6 +66,38 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CreateUnrealStaticMesh", + "label": "Create Unreal - Static Mesh", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + }, + { + "type": "text", + "key": "static_mesh_prefix", + "label": "Static Mesh Prefix" + }, + { + "type": "list", + "key": "collision_prefixes", + "label": "Collision Mesh Prefixes", + "object_type": "text" + } + ] + + }, { "type": "schema_template", "name": "template_create_plugin", @@ -118,10 +150,6 @@ "key": "CreateSetDress", "label": "Create Set Dress" }, - { - "key": "CreateUnrealStaticMesh", - "label": "Create Unreal - Static Mesh" - }, { "key": "CreateVrayProxy", "label": "Create VRay Proxy" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index 7c9a5a6b46..f4a371c6de 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -129,6 +129,31 @@ ] }, + { + "type": "dict", + "collapsible": true, + "key": "ValidateUnrealStaticMeshName", + "label": "Validate Unreal Static Mesh Name", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "validate_mesh", + "label": "Validate mesh Names " + }, + { + "type": "boolean", + "key": "validate_collision", + "label": "Validate collision names" + } + ] + }, + { "type": "dict", "collapsible": true, From ab97a3266a9bfdb563ae74692656f8c8b86e4f4a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 15 Jan 2022 07:05:47 +0000 Subject: [PATCH 071/160] build(deps): bump shelljs from 0.8.4 to 0.8.5 in /website Bumps [shelljs](https://github.com/shelljs/shelljs) from 0.8.4 to 0.8.5. - [Release notes](https://github.com/shelljs/shelljs/releases) - [Changelog](https://github.com/shelljs/shelljs/blob/master/CHANGELOG.md) - [Commits](https://github.com/shelljs/shelljs/compare/v0.8.4...v0.8.5) --- updated-dependencies: - dependency-name: shelljs dependency-type: indirect ... Signed-off-by: dependabot[bot] --- website/yarn.lock | 43 ++++++++++++++++++++++++++++++++----------- 1 file changed, 32 insertions(+), 11 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index 89da2289de..e34f951572 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -2250,9 +2250,9 @@ bail@^1.0.0: integrity sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ== balanced-match@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" - integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== base16@^1.0.0: version "1.0.0" @@ -4136,9 +4136,9 @@ glob-to-regexp@^0.4.1: integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== glob@^7.0.0, glob@^7.0.3, glob@^7.1.3: - version "7.1.6" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" - integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== + version "7.2.0" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023" + integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" @@ -4825,6 +4825,13 @@ is-core-module@^2.2.0: dependencies: has "^1.0.3" +is-core-module@^2.8.0: + version "2.8.1" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.8.1.tgz#f59fdfca701d5879d0a6b100a40aa1560ce27211" + integrity sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA== + dependencies: + has "^1.0.3" + is-data-descriptor@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" @@ -6167,7 +6174,7 @@ path-key@^3.0.0, path-key@^3.1.0: resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== -path-parse@^1.0.6: +path-parse@^1.0.6, path-parse@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== @@ -7208,7 +7215,16 @@ resolve-url@^0.2.1: resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= -resolve@^1.1.6, resolve@^1.14.2, resolve@^1.3.2: +resolve@^1.1.6: + version "1.21.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.21.0.tgz#b51adc97f3472e6a5cf4444d34bc9d6b9037591f" + integrity sha512-3wCbTpk5WJlyE4mSOtDLhqQmGFi0/TD9VPwmiolnk8U0wRgMEktqCXd3vy5buTO3tljvalNvKrjHEfrd2WpEKA== + dependencies: + is-core-module "^2.8.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +resolve@^1.14.2, resolve@^1.3.2: version "1.20.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== @@ -7533,9 +7549,9 @@ shell-quote@1.7.2: integrity sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg== shelljs@^0.8.4: - version "0.8.4" - resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.4.tgz#de7684feeb767f8716b326078a8a00875890e3c2" - integrity sha512-7gk3UZ9kOfPLIAbslLzyWeGiEqx9e3rxwZM0KE6EL8GlGwjym9Mrlx5/p33bWTu9YG6vcS4MBxYZDHYr5lr8BQ== + version "0.8.5" + resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.5.tgz#de055408d8361bed66c669d2f000538ced8ee20c" + integrity sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow== dependencies: glob "^7.0.0" interpret "^1.0.0" @@ -7896,6 +7912,11 @@ supports-color@^7.0.0, supports-color@^7.1.0: dependencies: has-flag "^4.0.0" +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + svg-parser@^2.0.2: version "2.0.4" resolved "https://registry.yarnpkg.com/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5" From a20499eb3ebef7bf5ef2826414ec4416054e388f Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 16 Jan 2022 10:55:19 +0100 Subject: [PATCH 072/160] Remove mayalookassigner check to avoid tool initialize on startup --- openpype/hosts/maya/api/customize.py | 33 ++++++++++------------------ 1 file changed, 12 insertions(+), 21 deletions(-) diff --git a/openpype/hosts/maya/api/customize.py b/openpype/hosts/maya/api/customize.py index 8474262626..eef100ddfc 100644 --- a/openpype/hosts/maya/api/customize.py +++ b/openpype/hosts/maya/api/customize.py @@ -95,29 +95,20 @@ def override_toolbox_ui(): # Create our controls background_color = (0.267, 0.267, 0.267) controls = [] - look_assigner = None - try: - look_assigner = host_tools.get_tool_by_name( - "lookassigner", - parent=pipeline._parent - ) - except Exception: - log.warning("Couldn't create Look assigner window.", exc_info=True) - if look_assigner is not None: - controls.append( - mc.iconTextButton( - "pype_toolbox_lookmanager", - annotation="Look Manager", - label="Look Manager", - image=os.path.join(icons, "lookmanager.png"), - command=host_tools.show_look_assigner, - bgc=background_color, - width=icon_size, - height=icon_size, - parent=parent - ) + controls.append( + mc.iconTextButton( + "pype_toolbox_lookmanager", + annotation="Look Manager", + label="Look Manager", + image=os.path.join(icons, "lookmanager.png"), + command=host_tools.show_look_assigner, + bgc=background_color, + width=icon_size, + height=icon_size, + parent=parent ) + ) controls.append( mc.iconTextButton( From 1bfe3c1fd013e5ebf010cdfef8114cedae18cdf2 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 16 Jan 2022 10:56:35 +0100 Subject: [PATCH 073/160] Remove explicit background color - icons are transparent and it works fine without --- openpype/hosts/maya/api/customize.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/openpype/hosts/maya/api/customize.py b/openpype/hosts/maya/api/customize.py index eef100ddfc..c7fb042ead 100644 --- a/openpype/hosts/maya/api/customize.py +++ b/openpype/hosts/maya/api/customize.py @@ -93,7 +93,6 @@ def override_toolbox_ui(): return # Create our controls - background_color = (0.267, 0.267, 0.267) controls = [] controls.append( @@ -103,7 +102,6 @@ def override_toolbox_ui(): label="Look Manager", image=os.path.join(icons, "lookmanager.png"), command=host_tools.show_look_assigner, - bgc=background_color, width=icon_size, height=icon_size, parent=parent @@ -119,7 +117,6 @@ def override_toolbox_ui(): command=lambda: host_tools.show_workfiles( parent=pipeline._parent ), - bgc=background_color, width=icon_size, height=icon_size, parent=parent @@ -135,7 +132,6 @@ def override_toolbox_ui(): command=lambda: host_tools.show_loader( parent=pipeline._parent, use_context=True ), - bgc=background_color, width=icon_size, height=icon_size, parent=parent @@ -151,7 +147,6 @@ def override_toolbox_ui(): command=lambda: host_tools.show_scene_inventory( parent=pipeline._parent ), - bgc=background_color, width=icon_size, height=icon_size, parent=parent From 7e41dc49b675e1789fff31dbc9dfc3c5175f4d10 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 16 Jan 2022 11:54:08 +0100 Subject: [PATCH 074/160] Allow to toggle family filters between "include" or "exclude" filtering --- openpype/settings/defaults/project_settings/global.json | 1 + .../projects_schema/schemas/schema_global_tools.json | 8 +++++++- openpype/tools/utils/lib.py | 8 ++++++-- 3 files changed, 14 insertions(+), 3 deletions(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index cff1259c98..c418377682 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -297,6 +297,7 @@ "family_filter_profiles": [ { "hosts": [], + "is_include": true, "task_types": [], "filter_families": [] } diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json index bb71c9bde6..863ec7f979 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json @@ -267,7 +267,13 @@ "label": "Task types" }, { - "type": "splitter" + "type": "boolean", + "key": "is_include", + "label": "Exclude (OFF) / Include (ON)" + }, + { + "type": "label", + "label": "Include: show selected families by default. Hides others by default.
Exclude: hide selected families by default. Shows others by default." }, { "type": "template", diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py index 6742df8557..886cdb5186 100644 --- a/openpype/tools/utils/lib.py +++ b/openpype/tools/utils/lib.py @@ -228,6 +228,7 @@ class FamilyConfigCache: self.dbcon = dbcon self.family_configs = {} self._family_filters_set = False + self._family_filters_is_include = True self._require_refresh = True @classmethod @@ -249,7 +250,7 @@ class FamilyConfigCache: "icon": self.default_icon() } if self._family_filters_set: - item["state"] = False + item["state"] = not self._family_filters_is_include return item def refresh(self, force=False): @@ -313,20 +314,23 @@ class FamilyConfigCache: matching_item = filter_profiles(profiles, profiles_filter) families = [] + is_include = True if matching_item: families = matching_item["filter_families"] + is_include = matching_item["is_include"] if not families: return self._family_filters_set = True + self._family_filters_is_include = is_include # Replace icons with a Qt icon we can use in the user interfaces for family in families: family_info = { "name": family, "icon": self.default_icon(), - "state": True + "state": is_include } self.family_configs[family] = family_info From b2f82c35bb3551bb6a301dc473e34f822e496a35 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 16 Jan 2022 12:18:38 +0100 Subject: [PATCH 075/160] Remove dot (.) from end of Loader label to match others --- openpype/hosts/maya/plugins/load/load_audio.py | 2 +- openpype/hosts/maya/plugins/load/load_image_plane.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_audio.py b/openpype/hosts/maya/plugins/load/load_audio.py index 73a2a4f448..d5bb495480 100644 --- a/openpype/hosts/maya/plugins/load/load_audio.py +++ b/openpype/hosts/maya/plugins/load/load_audio.py @@ -7,7 +7,7 @@ class AudioLoader(api.Loader): """Specific loader of audio.""" families = ["audio"] - label = "Import audio." + label = "Import audio" representations = ["wav"] icon = "volume-up" color = "orange" diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index eea5844e8b..36786115f9 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -77,7 +77,7 @@ class ImagePlaneLoader(api.Loader): """Specific loader of plate for image planes on selected camera.""" families = ["image", "plate", "render"] - label = "Load imagePlane." + label = "Load imagePlane" representations = ["mov", "exr", "preview", "png"] icon = "image" color = "orange" From 41a2ee812ac6c0a6ff742bfe1490558e7d520363 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 16 Jan 2022 12:21:48 +0100 Subject: [PATCH 076/160] Code cosmetics + fix over-indentation --- openpype/hosts/maya/plugins/load/load_audio.py | 2 +- openpype/hosts/maya/plugins/load/load_gpucache.py | 1 + openpype/hosts/maya/plugins/load/load_image_plane.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_audio.py b/openpype/hosts/maya/plugins/load/load_audio.py index d5bb495480..0611dcc189 100644 --- a/openpype/hosts/maya/plugins/load/load_audio.py +++ b/openpype/hosts/maya/plugins/load/load_audio.py @@ -3,6 +3,7 @@ from avalon.maya.pipeline import containerise from avalon.maya import lib from maya import cmds, mel + class AudioLoader(api.Loader): """Specific loader of audio.""" @@ -12,7 +13,6 @@ class AudioLoader(api.Loader): icon = "volume-up" color = "orange" - def load(self, context, name, namespace, data): start_frame = cmds.playbackOptions(query=True, min=True) diff --git a/openpype/hosts/maya/plugins/load/load_gpucache.py b/openpype/hosts/maya/plugins/load/load_gpucache.py index d0a83b8177..444f98f22e 100644 --- a/openpype/hosts/maya/plugins/load/load_gpucache.py +++ b/openpype/hosts/maya/plugins/load/load_gpucache.py @@ -2,6 +2,7 @@ import os from avalon import api from openpype.api import get_project_settings + class GpuCacheLoader(api.Loader): """Load model Alembic as gpuCache""" diff --git a/openpype/hosts/maya/plugins/load/load_image_plane.py b/openpype/hosts/maya/plugins/load/load_image_plane.py index 36786115f9..0652147324 100644 --- a/openpype/hosts/maya/plugins/load/load_image_plane.py +++ b/openpype/hosts/maya/plugins/load/load_image_plane.py @@ -118,7 +118,7 @@ class ImagePlaneLoader(api.Loader): camera = pm.createNode("camera") if camera is None: - return + return try: camera.displayResolution.set(1) From 107e2e637e955bf1971199c7e490b67e01a97b8d Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 16 Jan 2022 13:05:26 +0100 Subject: [PATCH 077/160] Fix typos / cosmetics --- .../plugins/publish/collect_render.py | 2 +- openpype/hosts/blender/api/action.py | 2 +- openpype/hosts/blender/api/lib.py | 2 +- .../hosts/blender/hooks/pre_pyside_install.py | 4 ++-- .../plugins/create/create_animation.py | 2 +- .../blender/plugins/create/create_camera.py | 2 +- .../blender/plugins/create/create_layout.py | 2 +- .../blender/plugins/create/create_model.py | 2 +- .../blender/plugins/create/create_rig.py | 2 +- .../publish/increment_workfile_version.py | 2 +- .../plugins/publish/collect_audio.py | 4 ++-- openpype/hosts/flame/api/lib.py | 2 +- openpype/hosts/flame/api/menu.py | 2 +- .../hosts/flame/api/scripts/wiretap_com.py | 10 ++++---- .../modules/ftrack_lib.py | 2 +- .../modules/panel_app.py | 4 ++-- .../api/utility_scripts/openpype_in_flame.py | 4 ++-- openpype/hosts/flame/api/utils.py | 2 +- openpype/hosts/flame/hooks/pre_flame_setup.py | 2 +- openpype/hosts/flame/otio/flame_export.py | 6 ++--- openpype/hosts/fusion/api/pipeline.py | 2 +- .../hosts/fusion/hooks/pre_fusion_setup.py | 2 +- .../fusion/plugins/load/load_sequence.py | 10 ++++---- .../fusion/plugins/publish/submit_deadline.py | 2 +- .../fusion/scripts/fusion_switch_shot.py | 2 +- .../utility_scripts/__OpenPype_Menu__.py | 2 +- openpype/hosts/harmony/js/README.md | 4 ++-- .../harmony/js/loaders/ImageSequenceLoader.js | 20 ++++++++-------- .../harmony/plugins/load/load_background.py | 16 ++++++------- openpype/hosts/hiero/api/events.py | 4 ++-- openpype/hosts/hiero/api/lib.py | 14 +++++------ openpype/hosts/hiero/api/otio/hiero_export.py | 10 ++++---- openpype/hosts/hiero/api/plugin.py | 24 +++++++++---------- .../Python/StartupUI/PimpMySpreadsheet.py | 2 +- openpype/hosts/hiero/api/tags.py | 2 +- .../hiero/plugins/create/create_shot_clip.py | 2 +- .../publish/precollect_clip_effects.py | 4 ++-- .../plugins/publish/precollect_instances.py | 2 +- .../collect_clip_resolution.py | 2 +- .../publish_old_workflow/precollect_retime.py | 4 ++-- openpype/hosts/maya/api/lib.py | 6 ++--- openpype/hosts/maya/api/menu.json | 10 ++++---- openpype/hosts/maya/api/menu_backup.json | 10 ++++---- openpype/hosts/maya/api/setdress.py | 2 +- .../maya/api/shader_definition_editor.py | 2 +- .../maya/plugins/create/create_render.py | 4 ++-- .../maya/plugins/publish/collect_assembly.py | 2 +- .../maya/plugins/publish/collect_render.py | 2 +- .../maya/plugins/publish/collect_vrayscene.py | 2 +- .../maya/plugins/publish/extract_vrayscene.py | 4 ++-- .../plugins/publish/submit_maya_muster.py | 4 ++-- .../publish/validate_mesh_overlapping_uvs.py | 2 +- .../plugins/publish/validate_rig_contents.py | 2 +- .../validate_unreal_staticmesh_naming.py | 2 +- openpype/hosts/nuke/api/__init__.py | 2 +- openpype/hosts/nuke/api/lib.py | 20 ++++++++-------- openpype/hosts/nuke/api/plugin.py | 4 ++-- openpype/hosts/nuke/api/utils.py | 6 ++--- .../hosts/nuke/plugins/create/create_gizmo.py | 4 ++-- .../hosts/nuke/plugins/load/load_backdrop.py | 2 +- .../nuke/plugins/load/load_camera_abc.py | 2 +- openpype/hosts/nuke/plugins/load/load_clip.py | 4 ++-- .../hosts/nuke/plugins/load/load_effects.py | 4 ++-- .../nuke/plugins/load/load_effects_ip.py | 4 ++-- .../hosts/nuke/plugins/load/load_gizmo.py | 2 +- .../hosts/nuke/plugins/load/load_gizmo_ip.py | 4 ++-- .../hosts/nuke/plugins/load/load_image.py | 2 +- .../hosts/nuke/plugins/load/load_model.py | 2 +- .../nuke/plugins/load/load_script_precomp.py | 4 ++-- .../nuke/plugins/publish/extract_camera.py | 2 +- .../plugins/publish/extract_ouput_node.py | 2 +- .../publish/extract_review_data_mov.py | 2 +- .../publish/increment_script_version.py | 2 +- .../nuke/plugins/publish/remove_ouput_node.py | 2 +- .../nuke/plugins/publish/validate_backdrop.py | 2 +- .../publish/validate_write_deadline_tab.py | 2 +- .../startup/KnobScripter/knob_scripter.py | 8 +++---- .../plugins/load/load_image_from_sequence.py | 2 +- openpype/hosts/resolve/README.markdown | 4 ++-- .../resolve/RESOLVE_API_README_v16.2.0_up.txt | 2 +- openpype/hosts/resolve/api/lib.py | 10 ++++---- openpype/hosts/resolve/api/pipeline.py | 2 +- openpype/hosts/resolve/api/plugin.py | 18 +++++++------- openpype/hosts/resolve/api/testing_utils.py | 4 ++-- .../hosts/resolve/hooks/pre_resolve_setup.py | 2 +- openpype/hosts/resolve/otio/davinci_export.py | 2 +- .../plugins/create/create_shot_clip.py | 2 +- .../utility_scripts/__OpenPype__Menu__.py | 2 +- .../publish/collect_editorial_instances.py | 12 +++++----- .../publish/collect_editorial_resources.py | 16 ++++++------- .../plugins/publish/collect_hierarchy.py | 6 ++--- .../publish/collect_representation_names.py | 2 +- .../plugins/publish/validate_texture_name.py | 2 +- .../hosts/tvpaint/api/communication_server.py | 4 ++-- openpype/hosts/tvpaint/api/lib.py | 2 +- openpype/hosts/tvpaint/api/pipeline.py | 4 ++-- openpype/hosts/tvpaint/api/plugin.py | 2 +- openpype/hosts/tvpaint/lib.py | 6 ++--- .../plugins/publish/collect_instances.py | 2 +- .../plugins/publish/collect_workfile.py | 2 +- .../plugins/publish/extract_sequence.py | 6 ++--- .../publish/increment_workfile_version.py | 2 +- .../tvpaint/plugins/publish/validate_marks.py | 2 +- .../tvpaint_plugin/plugin_code/README.md | 2 +- .../tvpaint_plugin/plugin_code/library.cpp | 4 ++-- openpype/hosts/tvpaint/worker/worker_job.py | 4 ++-- openpype/hosts/unreal/api/lib.py | 2 +- .../load/load_alembic_geometrycache.py | 2 +- .../publish/collect_tvpaint_instances.py | 2 +- .../publish/extract_tvpaint_workfile.py | 4 ++-- .../webserver_service/webpublish_routes.py | 2 +- 111 files changed, 237 insertions(+), 237 deletions(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index be024b7e24..6eeb10303c 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -157,7 +157,7 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): in url Returns: - (list) of absolut urls to rendered file + (list) of absolute urls to rendered file """ start = render_instance.frameStart end = render_instance.frameEnd diff --git a/openpype/hosts/blender/api/action.py b/openpype/hosts/blender/api/action.py index f3426ac3cf..09ef76326e 100644 --- a/openpype/hosts/blender/api/action.py +++ b/openpype/hosts/blender/api/action.py @@ -25,7 +25,7 @@ class SelectInvalidAction(pyblish.api.Action): invalid.extend(invalid_nodes) else: self.log.warning( - "Failed plug-in doens't have any selectable objects." + "Failed plug-in doesn't have any selectable objects." ) bpy.ops.object.select_all(action='DESELECT') diff --git a/openpype/hosts/blender/api/lib.py b/openpype/hosts/blender/api/lib.py index fe5d3f93e9..e7210f7e31 100644 --- a/openpype/hosts/blender/api/lib.py +++ b/openpype/hosts/blender/api/lib.py @@ -9,7 +9,7 @@ import addon_utils def load_scripts(paths): """Copy of `load_scripts` from Blender's implementation. - It is possible that whis function will be changed in future and usage will + It is possible that this function will be changed in future and usage will be based on Blender version. """ import bpy_types diff --git a/openpype/hosts/blender/hooks/pre_pyside_install.py b/openpype/hosts/blender/hooks/pre_pyside_install.py index e2a419c8ef..a37f8f0379 100644 --- a/openpype/hosts/blender/hooks/pre_pyside_install.py +++ b/openpype/hosts/blender/hooks/pre_pyside_install.py @@ -21,7 +21,7 @@ class InstallPySideToBlender(PreLaunchHook): platforms = ["windows"] def execute(self): - # Prelaunch hook is not crutial + # Prelaunch hook is not crucial try: self.inner_execute() except Exception: @@ -156,7 +156,7 @@ class InstallPySideToBlender(PreLaunchHook): except pywintypes.error: pass - self.log.warning("Failed to instal PySide2 module to blender.") + self.log.warning("Failed to install PySide2 module to blender.") def is_pyside_installed(self, python_executable): """Check if PySide2 module is in blender's pip list. diff --git a/openpype/hosts/blender/plugins/create/create_animation.py b/openpype/hosts/blender/plugins/create/create_animation.py index f7887b7e80..3b4cabe8ec 100644 --- a/openpype/hosts/blender/plugins/create/create_animation.py +++ b/openpype/hosts/blender/plugins/create/create_animation.py @@ -22,7 +22,7 @@ class CreateAnimation(plugin.Creator): ops.execute_in_main_thread(mti) def _process(self): - # Get Instance Containter or create it if it does not exist + # Get Instance Container or create it if it does not exist instances = bpy.data.collections.get(AVALON_INSTANCES) if not instances: instances = bpy.data.collections.new(name=AVALON_INSTANCES) diff --git a/openpype/hosts/blender/plugins/create/create_camera.py b/openpype/hosts/blender/plugins/create/create_camera.py index 98ccca313c..6fa80b5a5d 100644 --- a/openpype/hosts/blender/plugins/create/create_camera.py +++ b/openpype/hosts/blender/plugins/create/create_camera.py @@ -22,7 +22,7 @@ class CreateCamera(plugin.Creator): ops.execute_in_main_thread(mti) def _process(self): - # Get Instance Containter or create it if it does not exist + # Get Instance Container or create it if it does not exist instances = bpy.data.collections.get(AVALON_INSTANCES) if not instances: instances = bpy.data.collections.new(name=AVALON_INSTANCES) diff --git a/openpype/hosts/blender/plugins/create/create_layout.py b/openpype/hosts/blender/plugins/create/create_layout.py index 831261f027..dac12e19b1 100644 --- a/openpype/hosts/blender/plugins/create/create_layout.py +++ b/openpype/hosts/blender/plugins/create/create_layout.py @@ -22,7 +22,7 @@ class CreateLayout(plugin.Creator): ops.execute_in_main_thread(mti) def _process(self): - # Get Instance Containter or create it if it does not exist + # Get Instance Container or create it if it does not exist instances = bpy.data.collections.get(AVALON_INSTANCES) if not instances: instances = bpy.data.collections.new(name=AVALON_INSTANCES) diff --git a/openpype/hosts/blender/plugins/create/create_model.py b/openpype/hosts/blender/plugins/create/create_model.py index e778f5b74f..903b70033b 100644 --- a/openpype/hosts/blender/plugins/create/create_model.py +++ b/openpype/hosts/blender/plugins/create/create_model.py @@ -22,7 +22,7 @@ class CreateModel(plugin.Creator): ops.execute_in_main_thread(mti) def _process(self): - # Get Instance Containter or create it if it does not exist + # Get Instance Container or create it if it does not exist instances = bpy.data.collections.get(AVALON_INSTANCES) if not instances: instances = bpy.data.collections.new(name=AVALON_INSTANCES) diff --git a/openpype/hosts/blender/plugins/create/create_rig.py b/openpype/hosts/blender/plugins/create/create_rig.py index 2e1c71f570..ec74e279c6 100644 --- a/openpype/hosts/blender/plugins/create/create_rig.py +++ b/openpype/hosts/blender/plugins/create/create_rig.py @@ -22,7 +22,7 @@ class CreateRig(plugin.Creator): ops.execute_in_main_thread(mti) def _process(self): - # Get Instance Containter or create it if it does not exist + # Get Instance Container or create it if it does not exist instances = bpy.data.collections.get(AVALON_INSTANCES) if not instances: instances = bpy.data.collections.new(name=AVALON_INSTANCES) diff --git a/openpype/hosts/blender/plugins/publish/increment_workfile_version.py b/openpype/hosts/blender/plugins/publish/increment_workfile_version.py index db73842323..b81e1111ea 100644 --- a/openpype/hosts/blender/plugins/publish/increment_workfile_version.py +++ b/openpype/hosts/blender/plugins/publish/increment_workfile_version.py @@ -14,7 +14,7 @@ class IncrementWorkfileVersion(pyblish.api.ContextPlugin): def process(self, context): assert all(result["success"] for result in context.data["results"]), ( - "Publishing not succesfull so version is not increased.") + "Publishing not successful so version is not increased.") from openpype.lib import version_up path = context.data["currentFile"] diff --git a/openpype/hosts/celaction/plugins/publish/collect_audio.py b/openpype/hosts/celaction/plugins/publish/collect_audio.py index 8d3c1568e6..80c1c37d7e 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_audio.py +++ b/openpype/hosts/celaction/plugins/publish/collect_audio.py @@ -32,7 +32,7 @@ class AppendCelactionAudio(pyblish.api.ContextPlugin): repr = next((r for r in reprs), None) if not repr: raise "Missing `audioMain` representation" - self.log.info(f"represetation is: {repr}") + self.log.info(f"representation is: {repr}") audio_file = repr.get('data', {}).get('path', "") @@ -56,7 +56,7 @@ class AppendCelactionAudio(pyblish.api.ContextPlugin): representations (list): list for all representations Returns: - dict: subsets with version and representaions in keys + dict: subsets with version and representations in keys """ # Query all subsets for asset diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 96bffab774..2a72d3d88d 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -230,7 +230,7 @@ def maintain_current_timeline(to_timeline, from_timeline=None): project = get_current_project() working_timeline = from_timeline or project.GetCurrentTimeline() - # swith to the input timeline + # switch to the input timeline project.SetCurrentTimeline(to_timeline) try: diff --git a/openpype/hosts/flame/api/menu.py b/openpype/hosts/flame/api/menu.py index fef6dbfa35..64277a46eb 100644 --- a/openpype/hosts/flame/api/menu.py +++ b/openpype/hosts/flame/api/menu.py @@ -40,7 +40,7 @@ class _FlameMenuApp(object): self.menu_group_name = menu_group_name self.dynamic_menu_data = {} - # flame module is only avaliable when a + # flame module is only available when a # flame project is loaded and initialized self.flame = None try: diff --git a/openpype/hosts/flame/api/scripts/wiretap_com.py b/openpype/hosts/flame/api/scripts/wiretap_com.py index d8dc1884cf..bad96373d8 100644 --- a/openpype/hosts/flame/api/scripts/wiretap_com.py +++ b/openpype/hosts/flame/api/scripts/wiretap_com.py @@ -37,7 +37,7 @@ class WireTapCom(object): This way we are able to set new project with settings and correct colorspace policy. Also we are able to create new user - or get actuall user with similar name (users are usually cloning + or get actual user with similar name (users are usually cloning their profiles and adding date stamp into suffix). """ @@ -223,7 +223,7 @@ class WireTapCom(object): volumes = [] - # go trough all children and get volume names + # go through all children and get volume names child_obj = WireTapNodeHandle() for child_idx in range(children_num): @@ -263,7 +263,7 @@ class WireTapCom(object): filtered_users = [user for user in used_names if user_name in user] if filtered_users: - # todo: need to find lastly created following regex patern for + # todo: need to find lastly created following regex pattern for # date used in name return filtered_users.pop() @@ -308,7 +308,7 @@ class WireTapCom(object): usernames = [] - # go trough all children and get volume names + # go through all children and get volume names child_obj = WireTapNodeHandle() for child_idx in range(children_num): @@ -355,7 +355,7 @@ class WireTapCom(object): if not requested: raise AttributeError(( "Error: Cannot request number of " - "childrens from the node {}. Make sure your " + "children from the node {}. Make sure your " "wiretap service is running: {}").format( parent_path, parent.lastError()) ) diff --git a/openpype/hosts/flame/api/utility_scripts/openpype_flame_to_ftrack/modules/ftrack_lib.py b/openpype/hosts/flame/api/utility_scripts/openpype_flame_to_ftrack/modules/ftrack_lib.py index 26b197ee1d..c2168016c6 100644 --- a/openpype/hosts/flame/api/utility_scripts/openpype_flame_to_ftrack/modules/ftrack_lib.py +++ b/openpype/hosts/flame/api/utility_scripts/openpype_flame_to_ftrack/modules/ftrack_lib.py @@ -234,7 +234,7 @@ class FtrackComponentCreator: ).first() if component_entity: - # overwrite existing members in component enity + # overwrite existing members in component entity # - get data for member from `ftrack.origin` location self._overwrite_members(component_entity, comp_data) diff --git a/openpype/hosts/flame/api/utility_scripts/openpype_flame_to_ftrack/modules/panel_app.py b/openpype/hosts/flame/api/utility_scripts/openpype_flame_to_ftrack/modules/panel_app.py index 9e39147776..648f902872 100644 --- a/openpype/hosts/flame/api/utility_scripts/openpype_flame_to_ftrack/modules/panel_app.py +++ b/openpype/hosts/flame/api/utility_scripts/openpype_flame_to_ftrack/modules/panel_app.py @@ -304,7 +304,7 @@ class FlameToFtrackPanel(object): self._resolve_project_entity() self._save_ui_state_to_cfg() - # get hanldes from gui input + # get handles from gui input handles = self.handles_input.text() # get frame start from gui input @@ -517,7 +517,7 @@ class FlameToFtrackPanel(object): if self.temp_data_dir: shutil.rmtree(self.temp_data_dir) self.temp_data_dir = None - print("All Temp data were destroied ...") + print("All Temp data were destroyed ...") def close(self): self._save_ui_state_to_cfg() diff --git a/openpype/hosts/flame/api/utility_scripts/openpype_in_flame.py b/openpype/hosts/flame/api/utility_scripts/openpype_in_flame.py index c5fa881f3c..87e27d4851 100644 --- a/openpype/hosts/flame/api/utility_scripts/openpype_in_flame.py +++ b/openpype/hosts/flame/api/utility_scripts/openpype_in_flame.py @@ -16,7 +16,7 @@ def openpype_install(): """ openpype.install() avalon.api.install(opflame) - print("Avalon registred hosts: {}".format( + print("Avalon registered hosts: {}".format( avalon.api.registered_host())) @@ -100,7 +100,7 @@ def app_initialized(parent=None): """ Initialisation of the hook is starting from here -First it needs to test if it can import the flame modul. +First it needs to test if it can import the flame module. This will happen only in case a project has been loaded. Then `app_initialized` will load main Framework which will load all menu objects as apps. diff --git a/openpype/hosts/flame/api/utils.py b/openpype/hosts/flame/api/utils.py index 201c7d2fac..64b9569f90 100644 --- a/openpype/hosts/flame/api/utils.py +++ b/openpype/hosts/flame/api/utils.py @@ -65,7 +65,7 @@ def _sync_utility_scripts(env=None): if _itm not in remove_black_list: skip = True - # do not skyp if pyc in extension + # do not skip if pyc in extension if not os.path.isdir(_itm) and "pyc" in os.path.splitext(_itm)[-1]: skip = False diff --git a/openpype/hosts/flame/hooks/pre_flame_setup.py b/openpype/hosts/flame/hooks/pre_flame_setup.py index 159fb37410..9deeeda810 100644 --- a/openpype/hosts/flame/hooks/pre_flame_setup.py +++ b/openpype/hosts/flame/hooks/pre_flame_setup.py @@ -13,7 +13,7 @@ from pprint import pformat class FlamePrelaunch(PreLaunchHook): """ Flame prelaunch hook - Will make sure flame_script_dirs are coppied to user's folder defined + Will make sure flame_script_dirs are copied to user's folder defined in environment var FLAME_SCRIPT_DIR. """ app_groups = ["flame"] diff --git a/openpype/hosts/flame/otio/flame_export.py b/openpype/hosts/flame/otio/flame_export.py index aea1f387e8..1fa9b727d5 100644 --- a/openpype/hosts/flame/otio/flame_export.py +++ b/openpype/hosts/flame/otio/flame_export.py @@ -127,7 +127,7 @@ def create_time_effects(otio_clip, item): # # add otio effect to clip effects # otio_clip.effects.append(otio_effect) - # # loop trought and get all Timewarps + # # loop through and get all Timewarps # for effect in subTrackItems: # if ((track_item not in effect.linkedItems()) # and (len(effect.linkedItems()) > 0)): @@ -615,11 +615,11 @@ def create_otio_timeline(sequence): # Add Gap if needed if itemindex == 0: # if it is first track item at track then add - # it to previouse item + # it to previous item prev_item = segment_data else: - # get previouse item + # get previous item prev_item = segments_ordered[itemindex - 1] log.debug("_ segment_data: {}".format(segment_data)) diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index c721146830..6b16339e53 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -52,7 +52,7 @@ def install(): def uninstall(): - """Uninstall all tha was installed + """Uninstall all that was installed This is where you undo everything that was done in `install()`. That means, removing menus, deregistering families and data diff --git a/openpype/hosts/fusion/hooks/pre_fusion_setup.py b/openpype/hosts/fusion/hooks/pre_fusion_setup.py index a0c16a6700..3a0ef6e370 100644 --- a/openpype/hosts/fusion/hooks/pre_fusion_setup.py +++ b/openpype/hosts/fusion/hooks/pre_fusion_setup.py @@ -12,7 +12,7 @@ class FusionPrelaunch(PreLaunchHook): app_groups = ["fusion"] def execute(self): - # making sure pyton 3.6 is installed at provided path + # making sure python 3.6 is installed at provided path py36_dir = os.path.normpath(self.launch_context.env.get("PYTHON36", "")) assert os.path.isdir(py36_dir), ( "Python 3.6 is not installed at the provided folder path. Either " diff --git a/openpype/hosts/fusion/plugins/load/load_sequence.py b/openpype/hosts/fusion/plugins/load/load_sequence.py index 24d48fb9da..8f5be75484 100644 --- a/openpype/hosts/fusion/plugins/load/load_sequence.py +++ b/openpype/hosts/fusion/plugins/load/load_sequence.py @@ -185,22 +185,22 @@ class FusionLoadSequence(api.Loader): - We do the same like Fusion - allow fusion to take control. - HoldFirstFrame: Fusion resets this to 0 - - We preverse the value. + - We preserve the value. - HoldLastFrame: Fusion resets this to 0 - - We preverse the value. + - We preserve the value. - Reverse: Fusion resets to disabled if "Loop" is not enabled. - We preserve the value. - Depth: Fusion resets to "Format" - - We preverse the value. + - We preserve the value. - KeyCode: Fusion resets to "" - - We preverse the value. + - We preserve the value. - TimeCodeOffset: Fusion resets to 0 - - We preverse the value. + - We preserve the value. """ diff --git a/openpype/hosts/fusion/plugins/publish/submit_deadline.py b/openpype/hosts/fusion/plugins/publish/submit_deadline.py index 050e558d2e..28671295ab 100644 --- a/openpype/hosts/fusion/plugins/publish/submit_deadline.py +++ b/openpype/hosts/fusion/plugins/publish/submit_deadline.py @@ -124,7 +124,7 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin): # Include critical variables with submission keys = [ - # TODO: This won't work if the slaves don't have accesss to + # TODO: This won't work if the slaves don't have access to # these paths, such as if slaves are running Linux and the # submitter is on Windows. "PYTHONPATH", diff --git a/openpype/hosts/fusion/scripts/fusion_switch_shot.py b/openpype/hosts/fusion/scripts/fusion_switch_shot.py index 05b577c8ba..efb3cad800 100644 --- a/openpype/hosts/fusion/scripts/fusion_switch_shot.py +++ b/openpype/hosts/fusion/scripts/fusion_switch_shot.py @@ -85,7 +85,7 @@ def _format_filepath(session): new_filename = "{}_{}_slapcomp_v001.comp".format(project, asset) new_filepath = os.path.join(slapcomp_dir, new_filename) - # Create new unqiue filepath + # Create new unique filepath if os.path.exists(new_filepath): new_filepath = pype.version_up(new_filepath) diff --git a/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py b/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py index 81df2bc31d..4f804f9bce 100644 --- a/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py +++ b/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py @@ -16,7 +16,7 @@ def main(env): # activate resolve from pype avalon.api.install(avalon.fusion) - log.info(f"Avalon registred hosts: {avalon.api.registered_host()}") + log.info(f"Avalon registered hosts: {avalon.api.registered_host()}") menu.launch_openpype_menu() diff --git a/openpype/hosts/harmony/js/README.md b/openpype/hosts/harmony/js/README.md index ca610e49f5..1265a38305 100644 --- a/openpype/hosts/harmony/js/README.md +++ b/openpype/hosts/harmony/js/README.md @@ -2,13 +2,13 @@ ### Development -#### Setting up ESLint as linter for javasript code +#### Setting up ESLint as linter for javascript code You nee [node.js](https://nodejs.org/en/) installed. All you need to do then is to run: ```sh -npm intall +npm install ``` in **js** directory. This will install eslint and all requirements locally. diff --git a/openpype/hosts/harmony/js/loaders/ImageSequenceLoader.js b/openpype/hosts/harmony/js/loaders/ImageSequenceLoader.js index d809c350ab..cf8a9a29ca 100644 --- a/openpype/hosts/harmony/js/loaders/ImageSequenceLoader.js +++ b/openpype/hosts/harmony/js/loaders/ImageSequenceLoader.js @@ -18,11 +18,11 @@ if (typeof $ === 'undefined'){ * @classdesc Image Sequence loader JS code. */ var ImageSequenceLoader = function() { - this.PNGTransparencyMode = 0; // Premultiplied wih Black - this.TGATransparencyMode = 0; // Premultiplied wih Black - this.SGITransparencyMode = 0; // Premultiplied wih Black + this.PNGTransparencyMode = 0; // Premultiplied with Black + this.TGATransparencyMode = 0; // Premultiplied with Black + this.SGITransparencyMode = 0; // Premultiplied with Black this.LayeredPSDTransparencyMode = 1; // Straight - this.FlatPSDTransparencyMode = 2; // Premultiplied wih White + this.FlatPSDTransparencyMode = 2; // Premultiplied with White }; @@ -84,7 +84,7 @@ ImageSequenceLoader.getUniqueColumnName = function(columnPrefix) { * @return {string} Read node name * * @example - * // Agrguments are in following order: + * // Arguments are in following order: * var args = [ * files, // Files in file sequences. * asset, // Asset name. @@ -97,11 +97,11 @@ ImageSequenceLoader.prototype.importFiles = function(args) { MessageLog.trace("ImageSequence:: " + typeof PypeHarmony); MessageLog.trace("ImageSequence $:: " + typeof $); MessageLog.trace("ImageSequence OH:: " + typeof PypeHarmony.OpenHarmony); - var PNGTransparencyMode = 0; // Premultiplied wih Black - var TGATransparencyMode = 0; // Premultiplied wih Black - var SGITransparencyMode = 0; // Premultiplied wih Black + var PNGTransparencyMode = 0; // Premultiplied with Black + var TGATransparencyMode = 0; // Premultiplied with Black + var SGITransparencyMode = 0; // Premultiplied with Black var LayeredPSDTransparencyMode = 1; // Straight - var FlatPSDTransparencyMode = 2; // Premultiplied wih White + var FlatPSDTransparencyMode = 2; // Premultiplied with White var doc = $.scn; var files = args[0]; @@ -224,7 +224,7 @@ ImageSequenceLoader.prototype.importFiles = function(args) { * @return {string} Read node name * * @example - * // Agrguments are in following order: + * // Arguments are in following order: * var args = [ * files, // Files in file sequences * name, // Node name diff --git a/openpype/hosts/harmony/plugins/load/load_background.py b/openpype/hosts/harmony/plugins/load/load_background.py index 946090f6e6..993a09e042 100644 --- a/openpype/hosts/harmony/plugins/load/load_background.py +++ b/openpype/hosts/harmony/plugins/load/load_background.py @@ -13,11 +13,11 @@ copy_files = """function copyFile(srcFilename, dstFilename) } """ -import_files = """var PNGTransparencyMode = 1; //Premultiplied wih Black -var TGATransparencyMode = 0; //Premultiplied wih Black -var SGITransparencyMode = 0; //Premultiplied wih Black +import_files = """var PNGTransparencyMode = 1; //Premultiplied with Black +var TGATransparencyMode = 0; //Premultiplied with Black +var SGITransparencyMode = 0; //Premultiplied with Black var LayeredPSDTransparencyMode = 1; //Straight -var FlatPSDTransparencyMode = 2; //Premultiplied wih White +var FlatPSDTransparencyMode = 2; //Premultiplied with White function getUniqueColumnName( column_prefix ) { @@ -140,11 +140,11 @@ function import_files(args) import_files """ -replace_files = """var PNGTransparencyMode = 1; //Premultiplied wih Black -var TGATransparencyMode = 0; //Premultiplied wih Black -var SGITransparencyMode = 0; //Premultiplied wih Black +replace_files = """var PNGTransparencyMode = 1; //Premultiplied with Black +var TGATransparencyMode = 0; //Premultiplied with Black +var SGITransparencyMode = 0; //Premultiplied with Black var LayeredPSDTransparencyMode = 1; //Straight -var FlatPSDTransparencyMode = 2; //Premultiplied wih White +var FlatPSDTransparencyMode = 2; //Premultiplied with White function replace_files(args) { diff --git a/openpype/hosts/hiero/api/events.py b/openpype/hosts/hiero/api/events.py index 3df095f9e4..7563503593 100644 --- a/openpype/hosts/hiero/api/events.py +++ b/openpype/hosts/hiero/api/events.py @@ -31,7 +31,7 @@ def beforeNewProjectCreated(event): def afterNewProjectCreated(event): log.info("after new project created event...") - # sync avalon data to project properities + # sync avalon data to project properties sync_avalon_data_to_workfile() # add tags from preset @@ -51,7 +51,7 @@ def beforeProjectLoad(event): def afterProjectLoad(event): log.info("after project load event...") - # sync avalon data to project properities + # sync avalon data to project properties sync_avalon_data_to_workfile() # add tags from preset diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index 9a22d8cf27..a9467ae5a4 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -299,7 +299,7 @@ def get_track_item_pype_data(track_item): if not tag: return None - # get tag metadata attribut + # get tag metadata attribute tag_data = tag.metadata() # convert tag metadata to normal keys names and values to correct types for k, v in dict(tag_data).items(): @@ -402,7 +402,7 @@ def sync_avalon_data_to_workfile(): try: project.setProjectDirectory(active_project_root) except Exception: - # old way of seting it + # old way of setting it project.setProjectRoot(active_project_root) # get project data from avalon db @@ -614,7 +614,7 @@ def create_nuke_workfile_clips(nuke_workfiles, seq=None): if not seq: seq = hiero.core.Sequence('NewSequences') root.addItem(hiero.core.BinItem(seq)) - # todo will ned to define this better + # todo will need to define this better # track = seq[1] # lazy example to get a destination# track clips_lst = [] for nk in nuke_workfiles: @@ -838,7 +838,7 @@ def apply_colorspace_project(): # remove the TEMP file as we dont need it os.remove(copy_current_file_tmp) - # use the code from bellow for changing xml hrox Attributes + # use the code from below for changing xml hrox Attributes presets.update({"name": os.path.basename(copy_current_file)}) # read HROX in as QDomSocument @@ -874,7 +874,7 @@ def apply_colorspace_clips(): if "default" in clip_colorspace: continue - # check if any colorspace presets for read is mathing + # check if any colorspace presets for read is matching preset_clrsp = None for k in presets: if not bool(re.search(k["regex"], clip_media_source_path)): @@ -931,7 +931,7 @@ def get_sequence_pattern_and_padding(file): Can find file.0001.ext, file.%02d.ext, file.####.ext Return: - string: any matching sequence patern + string: any matching sequence pattern int: padding of sequnce numbering """ foundall = re.findall( @@ -950,7 +950,7 @@ def get_sequence_pattern_and_padding(file): def sync_clip_name_to_data_asset(track_items_list): - # loop trough all selected clips + # loop through all selected clips for track_item in track_items_list: # ignore if parent track is locked or disabled if track_item.parent().isLocked(): diff --git a/openpype/hosts/hiero/api/otio/hiero_export.py b/openpype/hosts/hiero/api/otio/hiero_export.py index abf510403e..1e4088d9c0 100644 --- a/openpype/hosts/hiero/api/otio/hiero_export.py +++ b/openpype/hosts/hiero/api/otio/hiero_export.py @@ -92,7 +92,7 @@ def create_time_effects(otio_clip, track_item): # add otio effect to clip effects otio_clip.effects.append(otio_effect) - # loop trought and get all Timewarps + # loop through and get all Timewarps for effect in subTrackItems: if ((track_item not in effect.linkedItems()) and (len(effect.linkedItems()) > 0)): @@ -388,11 +388,11 @@ def create_otio_timeline(): # Add Gap if needed if itemindex == 0: # if it is first track item at track then add - # it to previouse item + # it to previous item return track_item else: - # get previouse item + # get previous item return track_item.parent().items()[itemindex - 1] # get current timeline @@ -416,11 +416,11 @@ def create_otio_timeline(): # Add Gap if needed if itemindex == 0: # if it is first track item at track then add - # it to previouse item + # it to previous item prev_item = track_item else: - # get previouse item + # get previous item prev_item = track_item.parent().items()[itemindex - 1] # calculate clip frame range difference from each other diff --git a/openpype/hosts/hiero/api/plugin.py b/openpype/hosts/hiero/api/plugin.py index 2bbb1df8c1..3506af2d6a 100644 --- a/openpype/hosts/hiero/api/plugin.py +++ b/openpype/hosts/hiero/api/plugin.py @@ -146,7 +146,7 @@ class CreatorWidget(QtWidgets.QDialog): # convert label text to normal capitalized text with spaces label_text = self.camel_case_split(text) - # assign the new text to lable widget + # assign the new text to label widget label = QtWidgets.QLabel(label_text) label.setObjectName("LineLabel") @@ -337,7 +337,7 @@ class SequenceLoader(avalon.Loader): "Sequentially in order" ], default="Original timing", - help="Would you like to place it at orignal timing?" + help="Would you like to place it at original timing?" ) ] @@ -475,7 +475,7 @@ class ClipLoader: def _get_asset_data(self): """ Get all available asset data - joint `data` key with asset.data dict into the representaion + joint `data` key with asset.data dict into the representation """ asset_name = self.context["representation"]["context"]["asset"] @@ -550,7 +550,7 @@ class ClipLoader: (self.timeline_out - self.timeline_in + 1) + self.handle_start + self.handle_end) < self.media_duration) - # if slate is on then remove the slate frame from begining + # if slate is on then remove the slate frame from beginning if slate_on: self.media_duration -= 1 self.handle_start += 1 @@ -634,8 +634,8 @@ class PublishClip: "track": "sequence", } - # parents search patern - parents_search_patern = r"\{([a-z]*?)\}" + # parents search pattern + parents_search_pattern = r"\{([a-z]*?)\}" # default templates for non-ui use rename_default = False @@ -719,7 +719,7 @@ class PublishClip: return self.track_item def _populate_track_item_default_data(self): - """ Populate default formating data from track item. """ + """ Populate default formatting data from track item. """ self.track_item_default_data = { "_folder_": "shots", @@ -814,7 +814,7 @@ class PublishClip: # mark review layer if self.review_track and ( self.review_track not in self.review_track_default): - # if review layer is defined and not the same as defalut + # if review layer is defined and not the same as default self.review_layer = self.review_track # shot num calculate if self.rename_index == 0: @@ -863,7 +863,7 @@ class PublishClip: # in case track name and subset name is the same then add if self.subset_name == self.track_name: hero_data["subset"] = self.subset - # assing data to return hierarchy data to tag + # assign data to return hierarchy data to tag tag_hierarchy_data = hero_data # add data to return data dict @@ -897,7 +897,7 @@ class PublishClip: type ) - # first collect formating data to use for formating template + # first collect formatting data to use for formatting template formating_data = {} for _k, _v in self.hierarchy_data.items(): value = _v["value"].format( @@ -915,9 +915,9 @@ class PublishClip: """ Create parents and return it in list. """ self.parents = [] - patern = re.compile(self.parents_search_patern) + pattern = re.compile(self.parents_search_pattern) - par_split = [(patern.findall(t).pop(), t) + par_split = [(pattern.findall(t).pop(), t) for t in self.hierarchy.split("/")] for type, template in par_split: diff --git a/openpype/hosts/hiero/api/startup/Python/StartupUI/PimpMySpreadsheet.py b/openpype/hosts/hiero/api/startup/Python/StartupUI/PimpMySpreadsheet.py index 39a65045a7..b8dfb07b47 100644 --- a/openpype/hosts/hiero/api/startup/Python/StartupUI/PimpMySpreadsheet.py +++ b/openpype/hosts/hiero/api/startup/Python/StartupUI/PimpMySpreadsheet.py @@ -1,5 +1,5 @@ # PimpMySpreadsheet 1.0, Antony Nasce, 23/05/13. -# Adds custom spreadsheet columns and right-click menu for setting the Shot Status, and Artist Shot Assignement. +# Adds custom spreadsheet columns and right-click menu for setting the Shot Status, and Artist Shot Assignment. # gStatusTags is a global dictionary of key(status)-value(icon) pairs, which can be overridden with custom icons if required # Requires Hiero 1.7v2 or later. # Install Instructions: Copy to ~/.hiero/Python/StartupUI diff --git a/openpype/hosts/hiero/api/tags.py b/openpype/hosts/hiero/api/tags.py index 68f8d35106..fe5c0d5257 100644 --- a/openpype/hosts/hiero/api/tags.py +++ b/openpype/hosts/hiero/api/tags.py @@ -172,7 +172,7 @@ def add_tags_to_workfile(): } } - # loop trough tag data dict and create deep tag structure + # loop through tag data dict and create deep tag structure for _k, _val in nks_pres_tags.items(): # check if key is not decorated with [] so it is defined as bin bin_find = None diff --git a/openpype/hosts/hiero/plugins/create/create_shot_clip.py b/openpype/hosts/hiero/plugins/create/create_shot_clip.py index 0c5bf93a3f..d0c81cffa2 100644 --- a/openpype/hosts/hiero/plugins/create/create_shot_clip.py +++ b/openpype/hosts/hiero/plugins/create/create_shot_clip.py @@ -139,7 +139,7 @@ class CreateShotClip(phiero.Creator): "type": "QComboBox", "label": "Subset Name", "target": "ui", - "toolTip": "chose subset name patern, if is selected, name of track layer will be used", # noqa + "toolTip": "chose subset name pattern, if is selected, name of track layer will be used", # noqa "order": 0}, "subsetFamily": { "value": ["plate", "take"], diff --git a/openpype/hosts/hiero/plugins/publish/precollect_clip_effects.py b/openpype/hosts/hiero/plugins/publish/precollect_clip_effects.py index 80c6abbaef..9ade7603e0 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_clip_effects.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_clip_effects.py @@ -34,7 +34,7 @@ class PreCollectClipEffects(pyblish.api.InstancePlugin): if clip_effect_items: tracks_effect_items[track_index] = clip_effect_items - # process all effects and devide them to instance + # process all effects and divide them to instance for _track_index, sub_track_items in tracks_effect_items.items(): # skip if track index is the same as review track index if review and review_track_index == _track_index: @@ -156,7 +156,7 @@ class PreCollectClipEffects(pyblish.api.InstancePlugin): 'postage_stamp_frame', 'maskChannel', 'export_cc', 'select_cccid', 'mix', 'version', 'matrix'] - # loop trough all knobs and collect not ignored + # loop through all knobs and collect not ignored # and any with any value for knob in node.knobs().keys(): # skip nodes in ignore keys diff --git a/openpype/hosts/hiero/plugins/publish/precollect_instances.py b/openpype/hosts/hiero/plugins/publish/precollect_instances.py index bf3a779ab1..4eac6a008a 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_instances.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_instances.py @@ -264,7 +264,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): timeline_range = self.create_otio_time_range_from_timeline_item_data( track_item) - # loop trough audio track items and search for overlaping clip + # loop through audio track items and search for overlapping clip for otio_audio in self.audio_track_items: parent_range = otio_audio.range_in_parent() diff --git a/openpype/hosts/hiero/plugins/publish_old_workflow/collect_clip_resolution.py b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_clip_resolution.py index fea36d00fb..1d0727d0af 100644 --- a/openpype/hosts/hiero/plugins/publish_old_workflow/collect_clip_resolution.py +++ b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_clip_resolution.py @@ -5,7 +5,7 @@ class CollectClipResolution(pyblish.api.InstancePlugin): """Collect clip geometry resolution""" order = pyblish.api.CollectorOrder - 0.1 - label = "Collect Clip Resoluton" + label = "Collect Clip Resolution" hosts = ["hiero"] families = ["clip"] diff --git a/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_retime.py b/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_retime.py index f0e0f1a1a3..2f65a8bd4f 100644 --- a/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_retime.py +++ b/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_retime.py @@ -52,7 +52,7 @@ class PrecollectRetime(api.InstancePlugin): handle_end )) - # loop withing subtrack items + # loop within subtrack items time_warp_nodes = [] source_in_change = 0 source_out_change = 0 @@ -76,7 +76,7 @@ class PrecollectRetime(api.InstancePlugin): (timeline_in - handle_start), (timeline_out + handle_end) + 1) ] - # calculate differnce + # calculate difference diff_in = (node["lookup"].getValueAt( timeline_in)) - timeline_in diff_out = (node["lookup"].getValueAt( diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 52ebcaff64..98e1b20132 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -184,7 +184,7 @@ def uv_from_element(element): parent = element.split(".", 1)[0] # Maya is funny in that when the transform of the shape - # of the component elemen has children, the name returned + # of the component element has children, the name returned # by that elementection is the shape. Otherwise, it is # the transform. So lets see what type we're dealing with here. if cmds.nodeType(parent) in supported: @@ -1595,7 +1595,7 @@ def get_container_transforms(container, members=None, root=False): Args: container (dict): the container members (list): optional and convenience argument - root (bool): return highest node in hierachy if True + root (bool): return highest node in hierarchy if True Returns: root (list / str): @@ -2482,7 +2482,7 @@ class shelf(): def _get_render_instances(): """Return all 'render-like' instances. - This returns list of instance sets that needs to receive informations + This returns list of instance sets that needs to receive information about render layer changes. Returns: diff --git a/openpype/hosts/maya/api/menu.json b/openpype/hosts/maya/api/menu.json index bf4d812d33..a2efd5233c 100644 --- a/openpype/hosts/maya/api/menu.json +++ b/openpype/hosts/maya/api/menu.json @@ -506,8 +506,8 @@ "transforms", "local" ], - "title": "# Copy Local Transfroms", - "tooltip": "Copy local transfroms" + "title": "# Copy Local Transforms", + "tooltip": "Copy local transforms" }, { "type": "action", @@ -520,8 +520,8 @@ "transforms", "matrix" ], - "title": "# Copy Matrix Transfroms", - "tooltip": "Copy Matrix transfroms" + "title": "# Copy Matrix Transforms", + "tooltip": "Copy Matrix transforms" }, { "type": "action", @@ -842,7 +842,7 @@ "sourcetype": "file", "tags": ["cleanup", "remove_user_defined_attributes"], "title": "# Remove User Defined Attributes", - "tooltip": "Remove all user-defined attributs from all nodes" + "tooltip": "Remove all user-defined attributes from all nodes" }, { "type": "action", diff --git a/openpype/hosts/maya/api/menu_backup.json b/openpype/hosts/maya/api/menu_backup.json index 731a33a630..e2a558aedc 100644 --- a/openpype/hosts/maya/api/menu_backup.json +++ b/openpype/hosts/maya/api/menu_backup.json @@ -794,8 +794,8 @@ "transforms", "local" ], - "title": "Copy Local Transfroms", - "tooltip": "Copy local transfroms" + "title": "Copy Local Transforms", + "tooltip": "Copy local transforms" }, { "type": "action", @@ -808,8 +808,8 @@ "transforms", "matrix" ], - "title": "Copy Matrix Transfroms", - "tooltip": "Copy Matrix transfroms" + "title": "Copy Matrix Transforms", + "tooltip": "Copy Matrix transforms" }, { "type": "action", @@ -1274,7 +1274,7 @@ "sourcetype": "file", "tags": ["cleanup", "remove_user_defined_attributes"], "title": "Remove User Defined Attributes", - "tooltip": "Remove all user-defined attributs from all nodes" + "tooltip": "Remove all user-defined attributes from all nodes" }, { "type": "action", diff --git a/openpype/hosts/maya/api/setdress.py b/openpype/hosts/maya/api/setdress.py index 3537fa3837..4f826b8fde 100644 --- a/openpype/hosts/maya/api/setdress.py +++ b/openpype/hosts/maya/api/setdress.py @@ -341,7 +341,7 @@ def update_package(set_container, representation): def update_scene(set_container, containers, current_data, new_data, new_file): """Updates the hierarchy, assets and their matrix - Updates the following withing the scene: + Updates the following within the scene: * Setdress hierarchy alembic * Matrix * Parenting diff --git a/openpype/hosts/maya/api/shader_definition_editor.py b/openpype/hosts/maya/api/shader_definition_editor.py index ed425f4718..911db48ac2 100644 --- a/openpype/hosts/maya/api/shader_definition_editor.py +++ b/openpype/hosts/maya/api/shader_definition_editor.py @@ -92,7 +92,7 @@ class ShaderDefinitionsEditor(QtWidgets.QWidget): def _write_definition_file(self, content, force=False): """Write content as definition to file in database. - Before file is writen, check is made if its content has not + Before file is written, check is made if its content has not changed. If is changed, warning is issued to user if he wants it to overwrite. Note: GridFs doesn't allow changing file content. You need to delete existing file and create new one. diff --git a/openpype/hosts/maya/plugins/create/create_render.py b/openpype/hosts/maya/plugins/create/create_render.py index 85919d1166..fa5e73f3ed 100644 --- a/openpype/hosts/maya/plugins/create/create_render.py +++ b/openpype/hosts/maya/plugins/create/create_render.py @@ -53,8 +53,8 @@ class CreateRender(plugin.Creator): renderer. ass (bool): Submit as ``ass`` file for standalone Arnold renderer. tileRendering (bool): Instance is set to tile rendering mode. We - won't submit actuall render, but we'll make publish job to wait - for Tile Assemly job done and then publish. + won't submit actual render, but we'll make publish job to wait + for Tile Assembly job done and then publish. See Also: https://pype.club/docs/artist_hosts_maya#creating-basic-render-setup diff --git a/openpype/hosts/maya/plugins/publish/collect_assembly.py b/openpype/hosts/maya/plugins/publish/collect_assembly.py index 22af1239b1..313636793b 100644 --- a/openpype/hosts/maya/plugins/publish/collect_assembly.py +++ b/openpype/hosts/maya/plugins/publish/collect_assembly.py @@ -24,7 +24,7 @@ class CollectAssembly(pyblish.api.InstancePlugin): """ order = pyblish.api.CollectorOrder + 0.49 - label = "Assemby" + label = "Assembly" families = ["assembly"] def process(self, instance): diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index ac1e495f08..cbddb86e53 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -126,7 +126,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): r"^.+:(.*)", layer).group(1) except IndexError: msg = "Invalid layer name in set [ {} ]".format(layer) - self.log.warnig(msg) + self.log.warning(msg) continue self.log.info("processing %s" % layer) diff --git a/openpype/hosts/maya/plugins/publish/collect_vrayscene.py b/openpype/hosts/maya/plugins/publish/collect_vrayscene.py index 7097d7ce9c..e5c182c908 100644 --- a/openpype/hosts/maya/plugins/publish/collect_vrayscene.py +++ b/openpype/hosts/maya/plugins/publish/collect_vrayscene.py @@ -48,7 +48,7 @@ class CollectVrayScene(pyblish.api.InstancePlugin): expected_layer_name = re.search(r"^.+:(.*)", layer).group(1) except IndexError: msg = "Invalid layer name in set [ {} ]".format(layer) - self.log.warnig(msg) + self.log.warning(msg) continue self.log.info("processing %s" % layer) diff --git a/openpype/hosts/maya/plugins/publish/extract_vrayscene.py b/openpype/hosts/maya/plugins/publish/extract_vrayscene.py index c9edfc8343..1d7c0fa717 100644 --- a/openpype/hosts/maya/plugins/publish/extract_vrayscene.py +++ b/openpype/hosts/maya/plugins/publish/extract_vrayscene.py @@ -36,7 +36,7 @@ class ExtractVrayscene(openpype.api.Extractor): else: node = vray_settings[0] - # setMembers on vrayscene_layer shoudl contain layer name. + # setMembers on vrayscene_layer should contain layer name. layer_name = instance.data.get("layer") staging_dir = self.staging_dir(instance) @@ -111,7 +111,7 @@ class ExtractVrayscene(openpype.api.Extractor): layer (str): layer name. template (str): token template. start_frame (int, optional): start frame - if set we use - mutliple files export mode. + multiple files export mode. Returns: str: formatted path. diff --git a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py index ac3de4114c..f852904580 100644 --- a/openpype/hosts/maya/plugins/publish/submit_maya_muster.py +++ b/openpype/hosts/maya/plugins/publish/submit_maya_muster.py @@ -331,7 +331,7 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin): # but dispatcher (Server) and not render clients. Render clients # inherit environment from publisher including PATH, so there's # no problem finding PYPE, but there is now way (as far as I know) - # to set environment dynamically for dispatcher. Therefor this hack. + # to set environment dynamically for dispatcher. Therefore this hack. args = [muster_python, _get_script().replace('\\', '\\\\'), "--paths", @@ -478,7 +478,7 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin): # such that proper initialisation happens the same # way as it does on a local machine. # TODO(marcus): This won't work if the slaves don't - # have accesss to these paths, such as if slaves are + # have access to these paths, such as if slaves are # running Linux and the submitter is on Windows. "PYTHONPATH", "PATH", diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_overlapping_uvs.py b/openpype/hosts/maya/plugins/publish/validate_mesh_overlapping_uvs.py index 57cf0803a4..5ce422239d 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_overlapping_uvs.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_overlapping_uvs.py @@ -78,7 +78,7 @@ class GetOverlappingUVs(object): if len(uarray) == 0 or len(varray) == 0: return (False, None, None) - # loop throught all vertices to construct edges/rays + # loop through all vertices to construct edges/rays u = uarray[-1] v = varray[-1] for i in xrange(len(uarray)): # noqa: F821 diff --git a/openpype/hosts/maya/plugins/publish/validate_rig_contents.py b/openpype/hosts/maya/plugins/publish/validate_rig_contents.py index 4a6914ef90..6fe51d7b51 100644 --- a/openpype/hosts/maya/plugins/publish/validate_rig_contents.py +++ b/openpype/hosts/maya/plugins/publish/validate_rig_contents.py @@ -9,7 +9,7 @@ class ValidateRigContents(pyblish.api.InstancePlugin): Every rig must contain at least two object sets: "controls_SET" - Set of all animatable controls - "out_SET" - Set of all cachable meshes + "out_SET" - Set of all cacheable meshes """ diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py index 99d6cfd4c5..00f1fda2d3 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py +++ b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py @@ -10,7 +10,7 @@ import re class ValidateUnrealStaticmeshName(pyblish.api.InstancePlugin): """Validate name of Unreal Static Mesh - Unreals naming convention states that staticMesh sould start with `SM` + Unreals naming convention states that staticMesh should start with `SM` prefix - SM_[Name]_## (Eg. SM_sube_01). This plugin also validates other types of meshes - collision meshes: diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index 1567189ed1..fe30caf3ab 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -54,7 +54,7 @@ def install(): ''' Installing all requarements for Nuke host ''' - # remove all registred callbacks form avalon.nuke + # remove all registered callbacks form avalon.nuke from avalon import pipeline pipeline._registered_event_handlers.clear() diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index e36a5aa5ba..fb66ac1b0b 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -141,7 +141,7 @@ def check_inventory_versions(): max_version = max(versions) # check the available version and do match - # change color of node if not max verion + # change color of node if not max version if version.get("name") not in [max_version]: node["tile_color"].setValue(int("0xd84f20ff", 16)) else: @@ -236,10 +236,10 @@ def get_render_path(node): def format_anatomy(data): - ''' Helping function for formating of anatomy paths + ''' Helping function for formatting of anatomy paths Arguments: - data (dict): dictionary with attributes used for formating + data (dict): dictionary with attributes used for formatting Return: path (str) @@ -462,7 +462,7 @@ def create_write_node(name, data, input=None, prenodes=None, else: now_node.setInput(0, prev_node) - # swith actual node to previous + # switch actual node to previous prev_node = now_node # creating write node @@ -474,7 +474,7 @@ def create_write_node(name, data, input=None, prenodes=None, # connect to previous node now_node.setInput(0, prev_node) - # swith actual node to previous + # switch actual node to previous prev_node = now_node now_node = nuke.createNode("Output", "name Output1") @@ -516,7 +516,7 @@ def create_write_node(name, data, input=None, prenodes=None, GN.addKnob(knob) else: if "___" in _k_name: - # add devider + # add divider GN.addKnob(nuke.Text_Knob("")) else: # add linked knob by _k_name @@ -725,7 +725,7 @@ class WorkfileSettings(object): for i, n in enumerate(copy_inputs): nv.setInput(i, n) - # set coppied knobs + # set copied knobs for k, v in copy_knobs.items(): print(k, v) nv[k].setValue(v) @@ -862,7 +862,7 @@ class WorkfileSettings(object): def set_reads_colorspace(self, read_clrs_inputs): """ Setting colorspace to Read nodes - Looping trought all read nodes and tries to set colorspace based + Looping through all read nodes and tries to set colorspace based on regex rules in presets """ changes = {} @@ -871,7 +871,7 @@ class WorkfileSettings(object): if n.Class() != "Read": continue - # check if any colorspace presets for read is mathing + # check if any colorspace presets for read is matching preset_clrsp = None for input in read_clrs_inputs: @@ -1013,7 +1013,7 @@ class WorkfileSettings(object): def reset_resolution(self): """Set resolution to project resolution.""" - log.info("Reseting resolution") + log.info("Resetting resolution") project = io.find_one({"type": "project"}) asset = api.Session["AVALON_ASSET"] asset = io.find_one({"name": asset, "type": "asset"}) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 82299dd354..ec1d04bc63 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -209,7 +209,7 @@ class ExporterReview(object): nuke_imageio = opnlib.get_nuke_imageio_settings() # TODO: this is only securing backward compatibility lets remove - # this once all projects's anotomy are upated to newer config + # this once all projects's anotomy are updated to newer config if "baking" in nuke_imageio.keys(): return nuke_imageio["baking"]["viewerProcess"] else: @@ -477,7 +477,7 @@ class ExporterReviewMov(ExporterReview): write_node["file_type"].setValue(str(self.ext)) # Knobs `meta_codec` and `mov64_codec` are not available on centos. - # TODO should't this come from settings on outputs? + # TODO shouldn't this come from settings on outputs? try: write_node["meta_codec"].setValue("ap4h") except Exception: diff --git a/openpype/hosts/nuke/api/utils.py b/openpype/hosts/nuke/api/utils.py index e43c11a380..caacdfe3bb 100644 --- a/openpype/hosts/nuke/api/utils.py +++ b/openpype/hosts/nuke/api/utils.py @@ -5,9 +5,9 @@ from openpype.api import resources def set_context_favorites(favorites=None): - """ Addig favorite folders to nuke's browser + """ Adding favorite folders to nuke's browser - Argumets: + Arguments: favorites (dict): couples of {name:path} """ favorites = favorites or {} @@ -51,7 +51,7 @@ def gizmo_is_nuke_default(gizmo): def bake_gizmos_recursively(in_group=nuke.Root()): """Converting a gizmo to group - Argumets: + Arguments: is_group (nuke.Node)[optonal]: group node or all nodes """ # preserve selection after all is done diff --git a/openpype/hosts/nuke/plugins/create/create_gizmo.py b/openpype/hosts/nuke/plugins/create/create_gizmo.py index c59713cff1..a66311cb4b 100644 --- a/openpype/hosts/nuke/plugins/create/create_gizmo.py +++ b/openpype/hosts/nuke/plugins/create/create_gizmo.py @@ -48,7 +48,7 @@ class CreateGizmo(plugin.PypeCreator): gizmo_node["name"].setValue("{}_GZM".format(self.name)) gizmo_node["tile_color"].setValue(int(self.node_color, 16)) - # add sticky node wit guide + # add sticky node with guide with gizmo_node: sticky = nuke.createNode("StickyNote") sticky["label"].setValue( @@ -71,7 +71,7 @@ class CreateGizmo(plugin.PypeCreator): gizmo_node["name"].setValue("{}_GZM".format(self.name)) gizmo_node["tile_color"].setValue(int(self.node_color, 16)) - # add sticky node wit guide + # add sticky node with guide with gizmo_node: sticky = nuke.createNode("StickyNote") sticky["label"].setValue( diff --git a/openpype/hosts/nuke/plugins/load/load_backdrop.py b/openpype/hosts/nuke/plugins/load/load_backdrop.py index 9148260e9e..44f7e60782 100644 --- a/openpype/hosts/nuke/plugins/load/load_backdrop.py +++ b/openpype/hosts/nuke/plugins/load/load_backdrop.py @@ -235,7 +235,7 @@ class LoadBackdropNodes(api.Loader): else: GN["tile_color"].setValue(int(self.node_color, 16)) - self.log.info("udated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version.get("name"))) return update_container(GN, data_imprint) diff --git a/openpype/hosts/nuke/plugins/load/load_camera_abc.py b/openpype/hosts/nuke/plugins/load/load_camera_abc.py index 377d60e84b..1645e513b4 100644 --- a/openpype/hosts/nuke/plugins/load/load_camera_abc.py +++ b/openpype/hosts/nuke/plugins/load/load_camera_abc.py @@ -156,7 +156,7 @@ class AlembicCameraLoader(api.Loader): # color node by correct color by actual version self.node_version_color(version, camera_node) - self.log.info("udated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version.get("name"))) return update_container(camera_node, data_imprint) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index 9ce72c0519..aae7dcdc77 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -270,7 +270,7 @@ class LoadClip(plugin.NukeLoader): read_node, updated_dict ) - self.log.info("udated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version.get("name"))) if version_data.get("retime", None): self._make_retimes(read_node, version_data) @@ -302,7 +302,7 @@ class LoadClip(plugin.NukeLoader): self._loader_shift(read_node, start_at_workfile) def _make_retimes(self, parent_node, version_data): - ''' Create all retime and timewarping nodes with coppied animation ''' + ''' Create all retime and timewarping nodes with copied animation ''' speed = version_data.get('speed', 1) time_warp_nodes = version_data.get('timewarps', []) last_node = None diff --git a/openpype/hosts/nuke/plugins/load/load_effects.py b/openpype/hosts/nuke/plugins/load/load_effects.py index 8ba1b6b7c1..cecb61696b 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects.py +++ b/openpype/hosts/nuke/plugins/load/load_effects.py @@ -253,7 +253,7 @@ class LoadEffects(api.Loader): else: GN["tile_color"].setValue(int("0x3469ffff", 16)) - self.log.info("udated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version.get("name"))) def connect_read_node(self, group_node, asset, subset): """ @@ -314,7 +314,7 @@ class LoadEffects(api.Loader): def byteify(self, input): """ Converts unicode strings to strings - It goes trought all dictionary + It goes through all dictionary Arguments: input (dict/str): input diff --git a/openpype/hosts/nuke/plugins/load/load_effects_ip.py b/openpype/hosts/nuke/plugins/load/load_effects_ip.py index d0cab26842..665b3b07d1 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_effects_ip.py @@ -258,7 +258,7 @@ class LoadEffectsInputProcess(api.Loader): else: GN["tile_color"].setValue(int("0x3469ffff", 16)) - self.log.info("udated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version.get("name"))) def connect_active_viewer(self, group_node): """ @@ -331,7 +331,7 @@ class LoadEffectsInputProcess(api.Loader): def byteify(self, input): """ Converts unicode strings to strings - It goes trought all dictionary + It goes through all dictionary Arguments: input (dict/str): input diff --git a/openpype/hosts/nuke/plugins/load/load_gizmo.py b/openpype/hosts/nuke/plugins/load/load_gizmo.py index c6228b95f6..28c31c2261 100644 --- a/openpype/hosts/nuke/plugins/load/load_gizmo.py +++ b/openpype/hosts/nuke/plugins/load/load_gizmo.py @@ -149,7 +149,7 @@ class LoadGizmo(api.Loader): else: GN["tile_color"].setValue(int(self.node_color, 16)) - self.log.info("udated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version.get("name"))) return update_container(GN, data_imprint) diff --git a/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py b/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py index 5ca101d6cb..1796b1f992 100644 --- a/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py @@ -155,7 +155,7 @@ class LoadGizmoInputProcess(api.Loader): else: GN["tile_color"].setValue(int(self.node_color, 16)) - self.log.info("udated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version.get("name"))) return update_container(GN, data_imprint) @@ -210,7 +210,7 @@ class LoadGizmoInputProcess(api.Loader): def byteify(self, input): """ Converts unicode strings to strings - It goes trought all dictionary + It goes through all dictionary Arguments: input (dict/str): input diff --git a/openpype/hosts/nuke/plugins/load/load_image.py b/openpype/hosts/nuke/plugins/load/load_image.py index 02a5b55c18..06c7ecf6ab 100644 --- a/openpype/hosts/nuke/plugins/load/load_image.py +++ b/openpype/hosts/nuke/plugins/load/load_image.py @@ -231,7 +231,7 @@ class LoadImage(api.Loader): node, updated_dict ) - self.log.info("udated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version.get("name"))) def remove(self, container): diff --git a/openpype/hosts/nuke/plugins/load/load_model.py b/openpype/hosts/nuke/plugins/load/load_model.py index 15fa4fa35c..c1241e0383 100644 --- a/openpype/hosts/nuke/plugins/load/load_model.py +++ b/openpype/hosts/nuke/plugins/load/load_model.py @@ -156,7 +156,7 @@ class AlembicModelLoader(api.Loader): # color node by correct color by actual version self.node_version_color(version, model_node) - self.log.info("udated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version.get("name"))) return update_container(model_node, data_imprint) diff --git a/openpype/hosts/nuke/plugins/load/load_script_precomp.py b/openpype/hosts/nuke/plugins/load/load_script_precomp.py index 7444dd6e96..94dc70e341 100644 --- a/openpype/hosts/nuke/plugins/load/load_script_precomp.py +++ b/openpype/hosts/nuke/plugins/load/load_script_precomp.py @@ -67,7 +67,7 @@ class LinkAsGroup(api.Loader): P["useOutput"].setValue(True) with P: - # iterate trough all nodes in group node and find pype writes + # iterate through all nodes in group node and find pype writes writes = [n.name() for n in nuke.allNodes() if n.Class() == "Group" if get_avalon_knob_data(n)] @@ -152,7 +152,7 @@ class LinkAsGroup(api.Loader): else: node["tile_color"].setValue(int("0xff0ff0ff", 16)) - self.log.info("udated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version.get("name"))) def remove(self, container): from avalon.nuke import viewer_update_and_undo_stop diff --git a/openpype/hosts/nuke/plugins/publish/extract_camera.py b/openpype/hosts/nuke/plugins/publish/extract_camera.py index bc50dac108..3333da1909 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_camera.py +++ b/openpype/hosts/nuke/plugins/publish/extract_camera.py @@ -113,7 +113,7 @@ class ExtractCamera(openpype.api.Extractor): def bakeCameraWithAxeses(camera_node, output_range): - """ Baking all perent hiearchy of axeses into camera + """ Baking all perent hierarchy of axeses into camera with transposition onto word XYZ coordinance """ bakeFocal = False diff --git a/openpype/hosts/nuke/plugins/publish/extract_ouput_node.py b/openpype/hosts/nuke/plugins/publish/extract_ouput_node.py index c3a6a3b167..a78424be78 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_ouput_node.py +++ b/openpype/hosts/nuke/plugins/publish/extract_ouput_node.py @@ -4,7 +4,7 @@ from avalon.nuke import maintained_selection class CreateOutputNode(pyblish.api.ContextPlugin): - """Adding output node for each ouput write node + """Adding output node for each output write node So when latly user will want to Load .nk as LifeGroup or Precomp Nuke will not complain about missing Output node """ diff --git a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py index 32962b57a6..5d53e99e9d 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py +++ b/openpype/hosts/nuke/plugins/publish/extract_review_data_mov.py @@ -49,7 +49,7 @@ class ExtractReviewDataMov(openpype.api.Extractor): # test if family found in context test_families = any([ - # first if exact family set is mathing + # first if exact family set is matching # make sure only interesetion of list is correct bool(set(families).intersection(f_families)), # and if famiies are set at all diff --git a/openpype/hosts/nuke/plugins/publish/increment_script_version.py b/openpype/hosts/nuke/plugins/publish/increment_script_version.py index f55ed21ee2..b854dc0aa1 100644 --- a/openpype/hosts/nuke/plugins/publish/increment_script_version.py +++ b/openpype/hosts/nuke/plugins/publish/increment_script_version.py @@ -15,7 +15,7 @@ class IncrementScriptVersion(pyblish.api.ContextPlugin): def process(self, context): assert all(result["success"] for result in context.data["results"]), ( - "Publishing not succesfull so version is not increased.") + "Publishing not successful so version is not increased.") from openpype.lib import version_up path = context.data["currentFile"] diff --git a/openpype/hosts/nuke/plugins/publish/remove_ouput_node.py b/openpype/hosts/nuke/plugins/publish/remove_ouput_node.py index 12361595fe..fb77e8638c 100644 --- a/openpype/hosts/nuke/plugins/publish/remove_ouput_node.py +++ b/openpype/hosts/nuke/plugins/publish/remove_ouput_node.py @@ -3,7 +3,7 @@ import pyblish.api class RemoveOutputNode(pyblish.api.ContextPlugin): - """Removing output node for each ouput write node + """Removing output node for each output write node """ label = 'Output Node Remove' diff --git a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py index f280ad4af1..ceb70a8c86 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py @@ -48,7 +48,7 @@ class SelectCenterInNodeGraph(pyblish.api.Action): @pyblish.api.log class ValidateBackdrop(pyblish.api.InstancePlugin): """Validate amount of nodes on backdrop node in case user - forgoten to add nodes above the publishing backdrop node""" + forgotten to add nodes above the publishing backdrop node""" order = pyblish.api.ValidatorOrder optional = True diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_deadline_tab.py b/openpype/hosts/nuke/plugins/publish/validate_write_deadline_tab.py index 72fd51a900..5ee93403d0 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_deadline_tab.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_deadline_tab.py @@ -23,7 +23,7 @@ class RepairNukeWriteDeadlineTab(pyblish.api.Action): for instance in instances: group_node = [x for x in instance if x.Class() == "Group"][0] - # Remove exising knobs. + # Remove existing knobs. knob_names = openpype.hosts.nuke.lib.get_deadline_knob_names() for name, knob in group_node.knobs().iteritems(): if name in knob_names: diff --git a/openpype/hosts/nuke/startup/KnobScripter/knob_scripter.py b/openpype/hosts/nuke/startup/KnobScripter/knob_scripter.py index f03067aa4b..368ee64e32 100644 --- a/openpype/hosts/nuke/startup/KnobScripter/knob_scripter.py +++ b/openpype/hosts/nuke/startup/KnobScripter/knob_scripter.py @@ -1,6 +1,6 @@ # ------------------------------------------------- # KnobScripter by Adrian Pueyo -# Complete python sript editor for Nuke +# Complete python script editor for Nuke # adrianpueyo.com, 2016-2019 import string import traceback @@ -2539,7 +2539,7 @@ class KnobScripterTextEdit(QtWidgets.QPlainTextEdit): if self.noSelection: self.cursor.setPosition(self.lastChar) - # check whether the the orignal selection was from top to bottom or vice versa + # check whether the the original selection was from top to bottom or vice versa else: if self.originalPosition == self.firstChar: first = self.lastChar @@ -3012,7 +3012,7 @@ class KnobScripterTextEditMain(KnobScripterTextEdit): return match_key, match_snippet def placeholderToEnd(self, text, placeholder): - '''Returns distance (int) from the first ocurrence of the placeholder, to the end of the string with placeholders removed''' + '''Returns distance (int) from the first occurrence of the placeholder, to the end of the string with placeholders removed''' search = re.search(placeholder, text) if not search: return -1 @@ -3671,7 +3671,7 @@ class KnobScripterPrefs(QtWidgets.QDialog): def updateContext(): ''' - Get the current selection of nodes with their appropiate context + Get the current selection of nodes with their appropriate context Doing this outside the KnobScripter -> forces context update inside groups when needed ''' global knobScripterSelectedNodes diff --git a/openpype/hosts/photoshop/plugins/load/load_image_from_sequence.py b/openpype/hosts/photoshop/plugins/load/load_image_from_sequence.py index 8704627b12..74b612fc72 100644 --- a/openpype/hosts/photoshop/plugins/load/load_image_from_sequence.py +++ b/openpype/hosts/photoshop/plugins/load/load_image_from_sequence.py @@ -12,7 +12,7 @@ stub = photoshop.stub() class ImageFromSequenceLoader(api.Loader): - """ Load specifing image from sequence + """ Load specific image from sequence Used only as quick load of reference file from a sequence. diff --git a/openpype/hosts/resolve/README.markdown b/openpype/hosts/resolve/README.markdown index 50664fbd21..8c9f72fb0c 100644 --- a/openpype/hosts/resolve/README.markdown +++ b/openpype/hosts/resolve/README.markdown @@ -4,10 +4,10 @@ - add absolute path to ffmpeg into openpype settings ![image](https://user-images.githubusercontent.com/40640033/102630786-43294f00-414d-11eb-98de-f0ae51f62077.png) - install Python 3.6 into `%LOCALAPPDATA%/Programs/Python/Python36` (only respected path by Resolve) -- install OpenTimelineIO for 3.6 `%LOCALAPPDATA%\Programs\Python\Python36\python.exe -m pip install git+https://github.com/PixarAnimationStudios/OpenTimelineIO.git@5aa24fbe89d615448876948fe4b4900455c9a3e8` and move builded files from `%LOCALAPPDATA%/Programs/Python/Python36/Lib/site-packages/opentimelineio/cxx-libs/bin and lib` to `%LOCALAPPDATA%/Programs/Python/Python36/Lib/site-packages/opentimelineio/`. I was building it on Win10 machine with Visual Studio Community 2019 and +- install OpenTimelineIO for 3.6 `%LOCALAPPDATA%\Programs\Python\Python36\python.exe -m pip install git+https://github.com/PixarAnimationStudios/OpenTimelineIO.git@5aa24fbe89d615448876948fe4b4900455c9a3e8` and move built files from `%LOCALAPPDATA%/Programs/Python/Python36/Lib/site-packages/opentimelineio/cxx-libs/bin and lib` to `%LOCALAPPDATA%/Programs/Python/Python36/Lib/site-packages/opentimelineio/`. I was building it on Win10 machine with Visual Studio Community 2019 and ![image](https://user-images.githubusercontent.com/40640033/102792588-ffcb1c80-43a8-11eb-9c6b-bf2114ed578e.png) with installed CMake in PATH. - install PySide2 for 3.6 `%LOCALAPPDATA%\Programs\Python\Python36\python.exe -m pip install PySide2` -- make sure Resovle Fusion (Fusion Tab/menu/Fusion/Fusion Setings) is set to Python 3.6 +- make sure Resolve Fusion (Fusion Tab/menu/Fusion/Fusion Settings) is set to Python 3.6 ![image](https://user-images.githubusercontent.com/40640033/102631545-280b0f00-414e-11eb-89fc-98ac268d209d.png) #### Editorial setup diff --git a/openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_up.txt b/openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_up.txt index a24a053cd7..f1b8b81a71 100644 --- a/openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_up.txt +++ b/openpype/hosts/resolve/RESOLVE_API_README_v16.2.0_up.txt @@ -366,7 +366,7 @@ TimelineItem DeleteTakeByIndex(idx) --> Bool # Deletes a take by index, 1 <= idx <= number of takes. SelectTakeByIndex(idx) --> Bool # Selects a take by index, 1 <= idx <= number of takes. FinalizeTake() --> Bool # Finalizes take selection. - CopyGrades([tgtTimelineItems]) --> Bool # Copies the current grade to all the items in tgtTimelineItems list. Returns True on success and False if any error occured. + CopyGrades([tgtTimelineItems]) --> Bool # Copies the current grade to all the items in tgtTimelineItems list. Returns True on success and False if any error occurred. List and Dict Data Structures diff --git a/openpype/hosts/resolve/api/lib.py b/openpype/hosts/resolve/api/lib.py index aa4b2e7219..22f83c6eed 100644 --- a/openpype/hosts/resolve/api/lib.py +++ b/openpype/hosts/resolve/api/lib.py @@ -16,7 +16,7 @@ self = sys.modules[__name__] self.project_manager = None self.media_storage = None -# OpenPype sequencial rename variables +# OpenPype sequential rename variables self.rename_index = 0 self.rename_add = 0 @@ -59,7 +59,7 @@ def maintain_current_timeline(to_timeline: object, project = get_current_project() working_timeline = from_timeline or project.GetCurrentTimeline() - # swith to the input timeline + # switch to the input timeline project.SetCurrentTimeline(to_timeline) try: @@ -566,7 +566,7 @@ def create_compound_clip(clip_data, name, folder): mp_in_rc = opentime.RationalTime((ci_l_offset), rate) mp_out_rc = opentime.RationalTime((ci_l_offset + ci_duration - 1), rate) - # get frame in and out for clip swaping + # get frame in and out for clip swapping in_frame = opentime.to_frames(mp_in_rc) out_frame = opentime.to_frames(mp_out_rc) @@ -628,7 +628,7 @@ def create_compound_clip(clip_data, name, folder): def swap_clips(from_clip, to_clip, to_in_frame, to_out_frame): """ - Swaping clips on timeline in timelineItem + Swapping clips on timeline in timelineItem It will add take and activate it to the frame range which is inputted @@ -699,7 +699,7 @@ def get_pype_clip_metadata(clip): def get_clip_attributes(clip): """ - Collect basic atrributes from resolve timeline item + Collect basic attributes from resolve timeline item Args: clip (resolve.TimelineItem): timeline item object diff --git a/openpype/hosts/resolve/api/pipeline.py b/openpype/hosts/resolve/api/pipeline.py index ce95cfe02a..8b7e2a6c6a 100644 --- a/openpype/hosts/resolve/api/pipeline.py +++ b/openpype/hosts/resolve/api/pipeline.py @@ -64,7 +64,7 @@ def install(): def uninstall(): - """Uninstall all tha was installed + """Uninstall all that was installed This is where you undo everything that was done in `install()`. That means, removing menus, deregistering families and data diff --git a/openpype/hosts/resolve/api/plugin.py b/openpype/hosts/resolve/api/plugin.py index f1c55a6180..8612cf82ec 100644 --- a/openpype/hosts/resolve/api/plugin.py +++ b/openpype/hosts/resolve/api/plugin.py @@ -133,7 +133,7 @@ class CreatorWidget(QtWidgets.QDialog): # convert label text to normal capitalized text with spaces label_text = self.camel_case_split(text) - # assign the new text to lable widget + # assign the new text to label widget label = QtWidgets.QLabel(label_text) label.setObjectName("LineLabel") @@ -367,7 +367,7 @@ class ClipLoader: def _get_asset_data(self): """ Get all available asset data - joint `data` key with asset.data dict into the representaion + joint `data` key with asset.data dict into the representation """ asset_name = self.context["representation"]["context"]["asset"] @@ -540,8 +540,8 @@ class PublishClip: "track": "sequence", } - # parents search patern - parents_search_patern = r"\{([a-z]*?)\}" + # parents search pattern + parents_search_pattern = r"\{([a-z]*?)\}" # default templates for non-ui use rename_default = False @@ -630,7 +630,7 @@ class PublishClip: return self.timeline_item def _populate_timeline_item_default_data(self): - """ Populate default formating data from track item. """ + """ Populate default formatting data from track item. """ self.timeline_item_default_data = { "_folder_": "shots", @@ -722,7 +722,7 @@ class PublishClip: # mark review layer if self.review_track and ( self.review_track not in self.review_track_default): - # if review layer is defined and not the same as defalut + # if review layer is defined and not the same as default self.review_layer = self.review_track # shot num calculate if self.rename_index == 0: @@ -771,7 +771,7 @@ class PublishClip: # in case track name and subset name is the same then add if self.subset_name == self.track_name: hero_data["subset"] = self.subset - # assing data to return hierarchy data to tag + # assign data to return hierarchy data to tag tag_hierarchy_data = hero_data # add data to return data dict @@ -823,8 +823,8 @@ class PublishClip: """ Create parents and return it in list. """ self.parents = [] - patern = re.compile(self.parents_search_patern) - par_split = [patern.findall(t).pop() + pattern = re.compile(self.parents_search_pattern) + par_split = [pattern.findall(t).pop() for t in self.hierarchy.split("/")] for key in par_split: diff --git a/openpype/hosts/resolve/api/testing_utils.py b/openpype/hosts/resolve/api/testing_utils.py index 98ad6abcf1..4aac66f4b7 100644 --- a/openpype/hosts/resolve/api/testing_utils.py +++ b/openpype/hosts/resolve/api/testing_utils.py @@ -25,12 +25,12 @@ class TestGUI: ui.Button( { "ID": "inputTestSourcesFolder", - "Text": "Select folder with testing medias", + "Text": "Select folder with testing media", "Weight": 1.25, "ToolTip": ( "Chose folder with videos, sequences, " "single images, nested folders with " - "medias" + "media" ), "Flat": False } diff --git a/openpype/hosts/resolve/hooks/pre_resolve_setup.py b/openpype/hosts/resolve/hooks/pre_resolve_setup.py index bcb27e24fc..978e3760fd 100644 --- a/openpype/hosts/resolve/hooks/pre_resolve_setup.py +++ b/openpype/hosts/resolve/hooks/pre_resolve_setup.py @@ -15,7 +15,7 @@ class ResolvePrelaunch(PreLaunchHook): def execute(self): # TODO: add OTIO installation from `openpype/requirements.py` - # making sure pyton 3.6 is installed at provided path + # making sure python 3.6 is installed at provided path py36_dir = os.path.normpath( self.launch_context.env.get("PYTHON36_RESOLVE", "")) assert os.path.isdir(py36_dir), ( diff --git a/openpype/hosts/resolve/otio/davinci_export.py b/openpype/hosts/resolve/otio/davinci_export.py index 2c276d9888..5f11c81fc5 100644 --- a/openpype/hosts/resolve/otio/davinci_export.py +++ b/openpype/hosts/resolve/otio/davinci_export.py @@ -306,7 +306,7 @@ def create_otio_timeline(resolve_project): if index == 0: otio_track.append(clip) else: - # add previouse otio track to timeline + # add previous otio track to timeline otio_timeline.tracks.append(otio_track) # convert track to otio otio_track = create_otio_track( diff --git a/openpype/hosts/resolve/plugins/create/create_shot_clip.py b/openpype/hosts/resolve/plugins/create/create_shot_clip.py index 41fdbf5c61..62d5557a50 100644 --- a/openpype/hosts/resolve/plugins/create/create_shot_clip.py +++ b/openpype/hosts/resolve/plugins/create/create_shot_clip.py @@ -135,7 +135,7 @@ class CreateShotClip(resolve.Creator): "type": "QComboBox", "label": "Subset Name", "target": "ui", - "toolTip": "chose subset name patern, if is selected, name of track layer will be used", # noqa + "toolTip": "chose subset name pattern, if is selected, name of track layer will be used", # noqa "order": 0}, "subsetFamily": { "value": ["plate", "take"], diff --git a/openpype/hosts/resolve/utility_scripts/__OpenPype__Menu__.py b/openpype/hosts/resolve/utility_scripts/__OpenPype__Menu__.py index b1037a9c93..b0cef1838a 100644 --- a/openpype/hosts/resolve/utility_scripts/__OpenPype__Menu__.py +++ b/openpype/hosts/resolve/utility_scripts/__OpenPype__Menu__.py @@ -16,7 +16,7 @@ def main(env): # activate resolve from openpype avalon.install(bmdvr) - log.info(f"Avalon registred hosts: {avalon.registered_host()}") + log.info(f"Avalon registered hosts: {avalon.registered_host()}") bmdvr.launch_pype_menu() diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py index 45c6a264dd..d0d36bb717 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_instances.py @@ -83,7 +83,7 @@ class CollectInstances(pyblish.api.InstancePlugin): if isinstance(clip, otio.schema.Gap): continue - # skip all generators like black ampty + # skip all generators like black empty if isinstance( clip.media_reference, otio.schema.GeneratorReference): @@ -142,7 +142,7 @@ class CollectInstances(pyblish.api.InstancePlugin): "item": clip, "clipName": clip_name, - # parent time properities + # parent time properties "trackStartFrame": track_start_frame, "handleStart": handle_start, "handleEnd": handle_end, @@ -180,14 +180,14 @@ class CollectInstances(pyblish.api.InstancePlugin): "families": [] } }) - for subset, properities in self.subsets.items(): - version = properities.get("version") + for subset, properties in self.subsets.items(): + version = properties.get("version") if version == 0: - properities.pop("version") + properties.pop("version") # adding Review-able instance subset_instance_data = deepcopy(instance_data) - subset_instance_data.update(deepcopy(properities)) + subset_instance_data.update(deepcopy(properties)) subset_instance_data.update({ # unique attributes "name": f"{name}_{subset}", diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_resources.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_resources.py index 36bacceb1c..4d7a13fcf2 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_resources.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_editorial_resources.py @@ -31,7 +31,7 @@ class CollectInstanceResources(pyblish.api.InstancePlugin): editorial_source_root = instance.data["editorialSourceRoot"] editorial_source_path = instance.data["editorialSourcePath"] - # if `editorial_source_path` then loop trough + # if `editorial_source_path` then loop through if editorial_source_path: # add family if mov or mp4 found which is longer for # cutting `trimming` to enable `ExtractTrimmingVideoAudio` plugin @@ -42,7 +42,7 @@ class CollectInstanceResources(pyblish.api.InstancePlugin): instance.data["families"] += ["trimming"] return - # if template patern in path then fill it with `anatomy_data` + # if template pattern in path then fill it with `anatomy_data` if "{" in editorial_source_root: editorial_source_root = editorial_source_root.format( **anatomy_data) @@ -86,7 +86,7 @@ class CollectInstanceResources(pyblish.api.InstancePlugin): subset_files.update({clip_dir_path: subset_files_items}) # break the loop if correct_clip_dir was captured - # no need to cary on if corect folder was found + # no need to cary on if correct folder was found if correct_clip_dir: break @@ -113,10 +113,10 @@ class CollectInstanceResources(pyblish.api.InstancePlugin): unique_subset_names = list() root_dir = list(subset_files.keys()).pop() files_list = subset_files[root_dir] - search_patern = f"({subset}[A-Za-z0-9]+)(?=[\\._\\s])" + search_pattern = f"({subset}[A-Za-z0-9]+)(?=[\\._\\s])" for _file in files_list: - patern = re.compile(search_patern) - match = patern.findall(_file) + pattern = re.compile(search_pattern) + match = pattern.findall(_file) if not match: continue match_subset = match.pop() @@ -175,7 +175,7 @@ class CollectInstanceResources(pyblish.api.InstancePlugin): instance_data["representations"] = list() collection_head_name = None - # loop trough collections and create representations + # loop through collections and create representations for _collection in collections: ext = _collection.tail[1:] collection_head_name = _collection.head @@ -210,7 +210,7 @@ class CollectInstanceResources(pyblish.api.InstancePlugin): frames.append(frame_start) frames.append(frame_end) - # loop trough reminders and create representations + # loop through reminders and create representations for _reminding_file in remainder: ext = os.path.splitext(_reminding_file)[-1][1:] if ext not in instance_data["extensions"]: diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py index acad98d784..b2735f3428 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py @@ -99,7 +99,7 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin): # in case SP context is set to the same folder if (_index == 0) and ("folder" in parent_key) \ and (parents[-1]["entity_name"] == parent_filled): - self.log.debug(f" skiping : {parent_filled}") + self.log.debug(f" skipping : {parent_filled}") continue # in case first parent is project then start parents from start @@ -119,7 +119,7 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin): # convert hierarchy to string hierarchy = "/".join(hierarchy) - # assing to instance data + # assign to instance data instance.data["hierarchy"] = hierarchy instance.data["parents"] = parents @@ -202,7 +202,7 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin): class CollectHierarchyContext(pyblish.api.ContextPlugin): - '''Collecting Hierarchy from instaces and building + '''Collecting Hierarchy from instances and building context hierarchy tree ''' diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_representation_names.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_representation_names.py index c9063c22ed..82dbba3345 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_representation_names.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_representation_names.py @@ -8,7 +8,7 @@ class CollectRepresentationNames(pyblish.api.InstancePlugin): Sets the representation names for given families based on RegEx filter """ - label = "Collect Representaion Names" + label = "Collect Representation Names" order = pyblish.api.CollectorOrder families = [] hosts = ["standalonepublisher"] diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_name.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_name.py index f210be3631..4bafe81020 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_name.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_name.py @@ -16,7 +16,7 @@ class ValidateTextureBatchNaming(pyblish.api.InstancePlugin): if isinstance(file_name, list): file_name = file_name[0] - msg = "Couldnt find asset name in '{}'\n".format(file_name) + \ + msg = "Couldn't find asset name in '{}'\n".format(file_name) + \ "File name doesn't follow configured pattern.\n" + \ "Please rename the file." assert "NOT_AVAIL" not in instance.data["asset_build"], msg diff --git a/openpype/hosts/tvpaint/api/communication_server.py b/openpype/hosts/tvpaint/api/communication_server.py index 6c8aca5445..c8d6d3b458 100644 --- a/openpype/hosts/tvpaint/api/communication_server.py +++ b/openpype/hosts/tvpaint/api/communication_server.py @@ -351,7 +351,7 @@ class QtTVPaintRpc(BaseTVPaintRpc): async def scene_inventory_tool(self): """Open Scene Inventory tool. - Funciton can't confirm if tool was opened becauise one part of + Function can't confirm if tool was opened becauise one part of SceneInventory initialization is calling websocket request to host but host can't response because is waiting for response from this call. """ @@ -578,7 +578,7 @@ class BaseCommunicator: # Folder for right windows plugin files source_plugins_dir = os.path.join(plugin_files_path, subfolder) - # Path to libraies (.dll) required for plugin library + # Path to libraries (.dll) required for plugin library # - additional libraries can be copied to TVPaint installation folder # (next to executable) or added to PATH environment variable additional_libs_folder = os.path.join( diff --git a/openpype/hosts/tvpaint/api/lib.py b/openpype/hosts/tvpaint/api/lib.py index 654aff19d8..9e6404e72f 100644 --- a/openpype/hosts/tvpaint/api/lib.py +++ b/openpype/hosts/tvpaint/api/lib.py @@ -159,7 +159,7 @@ def get_layers_data(layer_ids=None, communicator=None): def parse_group_data(data): - """Paser group data collected in 'get_groups_data'.""" + """Parse group data collected in 'get_groups_data'.""" output = [] groups_raw = data.split("\n") for group_raw in groups_raw: diff --git a/openpype/hosts/tvpaint/api/pipeline.py b/openpype/hosts/tvpaint/api/pipeline.py index e7c5159bbc..6b4632e2f2 100644 --- a/openpype/hosts/tvpaint/api/pipeline.py +++ b/openpype/hosts/tvpaint/api/pipeline.py @@ -112,7 +112,7 @@ def containerise( members (list): List of members that were loaded and belongs to the container (layer names). current_containers (list): Preloaded containers. Should be used only - on update/switch when containers were modified durring the process. + on update/switch when containers were modified during the process. Returns: dict: Container data stored to workfile metadata. @@ -166,7 +166,7 @@ def split_metadata_string(text, chunk_length=None): set to global variable `TVPAINT_CHUNK_LENGTH`. Returns: - list: List of strings wil at least one item. + list: List of strings with at least one item. """ if chunk_length is None: chunk_length = TVPAINT_CHUNK_LENGTH diff --git a/openpype/hosts/tvpaint/api/plugin.py b/openpype/hosts/tvpaint/api/plugin.py index e65c25b8d1..af80c9eae2 100644 --- a/openpype/hosts/tvpaint/api/plugin.py +++ b/openpype/hosts/tvpaint/api/plugin.py @@ -35,7 +35,7 @@ class Creator(PypeCreatorMixin, avalon.api.Creator): def are_instances_same(instance_1, instance_2): """Compare instances but skip keys with unique values. - During compare are skiped keys that will be 100% sure + During compare are skipped keys that will be 100% sure different on new instance, like "id". Returns: diff --git a/openpype/hosts/tvpaint/lib.py b/openpype/hosts/tvpaint/lib.py index 513bb2d952..715ebb4a9d 100644 --- a/openpype/hosts/tvpaint/lib.py +++ b/openpype/hosts/tvpaint/lib.py @@ -278,7 +278,7 @@ def _cleanup_out_range_frames(output_idx_by_frame_idx, range_start, range_end): } // Result { - 2: 2, // Redirect to self as is first that refence out range + 2: 2, // Redirect to self as is first that reference out range 3: 2 // Redirect to first redirected frame } ``` @@ -593,7 +593,7 @@ def composite_rendered_layers( transparent_filepaths.add(dst_filepath) continue - # Store first destionation filepath to be used for transparent images + # Store first destination filepath to be used for transparent images if first_dst_filepath is None: first_dst_filepath = dst_filepath @@ -657,7 +657,7 @@ def rename_filepaths_by_frame_start( max(range_end, new_frame_end) ) - # Use differnet ranges based on Mark In and output Frame Start values + # Use different ranges based on Mark In and output Frame Start values # - this is to make sure that filename renaming won't affect files that # are not renamed yet if range_start < new_frame_start: diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py index 31d2fd1fd5..9cbfb61550 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py @@ -77,7 +77,7 @@ class CollectInstances(pyblish.api.ContextPlugin): # Project name from workfile context project_name = context.data["workfile_context"]["project"] - # Host name from environemnt variable + # Host name from environment variable host_name = os.environ["AVALON_APP"] # Use empty variant value variant = "" diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py index 68ba350a85..89348037d3 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile.py @@ -35,7 +35,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin): # Project name from workfile context project_name = context.data["workfile_context"]["project"] - # Host name from environemnt variable + # Host name from environment variable host_name = os.environ["AVALON_APP"] # Use empty variant value variant = "" diff --git a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py index b6b8bd0d9e..729c545545 100644 --- a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -168,7 +168,7 @@ class ExtractSequence(pyblish.api.Extractor): if single_file: repre_files = repre_files[0] - # Extension is harcoded + # Extension is hardcoded # - changing extension would require change code new_repre = { "name": "png", @@ -235,7 +235,7 @@ class ExtractSequence(pyblish.api.Extractor): scene_bg_color (list): Bg color set in scene. Result of george script command `tv_background`. - Retruns: + Returns: tuple: With 2 items first is list of filenames second is path to thumbnail. """ @@ -311,7 +311,7 @@ class ExtractSequence(pyblish.api.Extractor): mark_out (int): On which frame index export will end. layers (list): List of layers to be exported. - Retruns: + Returns: tuple: With 2 items first is list of filenames second is path to thumbnail. """ diff --git a/openpype/hosts/tvpaint/plugins/publish/increment_workfile_version.py b/openpype/hosts/tvpaint/plugins/publish/increment_workfile_version.py index c9f2434cef..24d6558168 100644 --- a/openpype/hosts/tvpaint/plugins/publish/increment_workfile_version.py +++ b/openpype/hosts/tvpaint/plugins/publish/increment_workfile_version.py @@ -15,7 +15,7 @@ class IncrementWorkfileVersion(pyblish.api.ContextPlugin): def process(self, context): assert all(result["success"] for result in context.data["results"]), ( - "Publishing not succesfull so version is not increased.") + "Publishing not successful so version is not increased.") path = context.data["currentFile"] workio.save_file(version_up(path)) diff --git a/openpype/hosts/tvpaint/plugins/publish/validate_marks.py b/openpype/hosts/tvpaint/plugins/publish/validate_marks.py index 9d55bb21a9..f45247ceac 100644 --- a/openpype/hosts/tvpaint/plugins/publish/validate_marks.py +++ b/openpype/hosts/tvpaint/plugins/publish/validate_marks.py @@ -44,7 +44,7 @@ class ValidateMarks(pyblish.api.ContextPlugin): handle_start = context.data["handleStart"] handle_end = context.data["handleEnd"] - # Calculate expeted Mark out (Mark In + duration - 1) + # Calculate expected Mark out (Mark In + duration - 1) expected_mark_out = ( scene_mark_in + (frame_end - frame_start) diff --git a/openpype/hosts/tvpaint/tvpaint_plugin/plugin_code/README.md b/openpype/hosts/tvpaint/tvpaint_plugin/plugin_code/README.md index 03b0a31f51..70a96b2919 100644 --- a/openpype/hosts/tvpaint/tvpaint_plugin/plugin_code/README.md +++ b/openpype/hosts/tvpaint/tvpaint_plugin/plugin_code/README.md @@ -3,7 +3,7 @@ README for TVPaint Avalon plugin Introduction ------------ This project is dedicated to integrate Avalon functionality to TVPaint. -This implementaiton is using TVPaint plugin (C/C++) which can communicate with python process. The communication should allow to trigger tools or pipeline functions from TVPaint and accept requests from python process at the same time. +This implementation is using TVPaint plugin (C/C++) which can communicate with python process. The communication should allow to trigger tools or pipeline functions from TVPaint and accept requests from python process at the same time. Current implementation is based on websocket protocol, using json-rpc communication (specification 2.0). Project is in beta stage, tested only on Windows. diff --git a/openpype/hosts/tvpaint/tvpaint_plugin/plugin_code/library.cpp b/openpype/hosts/tvpaint/tvpaint_plugin/plugin_code/library.cpp index a57124084b..bb67715cbd 100644 --- a/openpype/hosts/tvpaint/tvpaint_plugin/plugin_code/library.cpp +++ b/openpype/hosts/tvpaint/tvpaint_plugin/plugin_code/library.cpp @@ -41,7 +41,7 @@ static struct { nlohmann::json menuItemsById; std::list menuItemsIds; // Messages from server before processing. - // - messages can't be process at the moment of recieve as client is running in thread + // - messages can't be process at the moment of receive as client is running in thread std::queue messages; // Responses to requests mapped by request id std::map responses; @@ -694,7 +694,7 @@ int newMenuItemsProcess(PIFilter* iFilter) { return 1; } /**************************************************************************************/ -// something happenned that needs our attention. +// something happened that needs our attention. // Global variable where current button up data are stored std::string button_up_item_id_str; int FAR PASCAL PI_Msg( PIFilter* iFilter, INTPTR iEvent, INTPTR iReq, INTPTR* iArgs ) diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py index 519d42ce73..1c785ab2ee 100644 --- a/openpype/hosts/tvpaint/worker/worker_job.py +++ b/openpype/hosts/tvpaint/worker/worker_job.py @@ -41,7 +41,7 @@ class BaseCommand: Command also have id which is created on command creation. The idea is that command is just a data container on sender side send - througth server to a worker where is replicated one by one, executed and + through server to a worker where is replicated one by one, executed and result sent back to sender through server. """ @abstractproperty @@ -248,7 +248,7 @@ class ExecuteGeorgeScript(BaseCommand): class CollectSceneData(BaseCommand): - """Helper command which will collect all usefull info about workfile. + """Helper command which will collect all useful info about workfile. Result is dictionary with all layers data, exposure frames by layer ids pre/post behavior of layers by their ids, group information and scene data. diff --git a/openpype/hosts/unreal/api/lib.py b/openpype/hosts/unreal/api/lib.py index c0fafbb667..61dac46fac 100644 --- a/openpype/hosts/unreal/api/lib.py +++ b/openpype/hosts/unreal/api/lib.py @@ -115,7 +115,7 @@ def _darwin_get_engine_version() -> dict: Returns: dict: version as a key and path as a value. - See Aslo: + See Also: :func:`_win_get_engine_versions`. """ diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py b/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py index ad37a7a068..e2023e8b47 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py +++ b/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py @@ -98,7 +98,7 @@ class PointCacheAlembicLoader(api.Loader): frame_start = context.get('asset').get('data').get('frameStart') frame_end = context.get('asset').get('data').get('frameEnd') - # If frame start and end are the same, we increse the end frame by + # If frame start and end are the same, we increase the end frame by # one, otherwise Unreal will not import it if frame_start == frame_end: frame_end += 1 diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py index 976a14e808..92f581be5f 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_instances.py @@ -28,7 +28,7 @@ class CollectTVPaintInstances(pyblish.api.ContextPlugin): render_layer_pass_name = "beauty" # Set by settings - # Regex must constain 'layer' and 'variant' groups which are extracted from + # Regex must contain 'layer' and 'variant' groups which are extracted from # name when instances are created layer_name_regex = r"(?PL[0-9]{3}_\w+)_(?P.+)" diff --git a/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py b/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py index 85c8526c83..2142d740a5 100644 --- a/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py +++ b/openpype/hosts/webpublisher/plugins/publish/extract_tvpaint_workfile.py @@ -286,7 +286,7 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): if single_file: repre_files = repre_files[0] - # Extension is harcoded + # Extension is hardcoded # - changing extension would require change code new_repre = { "name": "png", @@ -407,7 +407,7 @@ class ExtractTVPaintSequences(pyblish.api.Extractor): mark_out (int): On which frame index export will end. layers (list): List of layers to be exported. - Retruns: + Returns: tuple: With 2 items first is list of filenames second is path to thumbnail. """ diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 30399a6ba7..135a9cd026 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -335,7 +335,7 @@ class ConfiguredExtensionsEndpoint(_RestApiEndpoint): configured = { "file_exts": set(), "sequence_exts": set(), - # workfiles that could have "Studio Procesing" hardcoded for now + # workfiles that could have "Studio Processing" hardcoded for now "studio_exts": set(["psd", "psb", "tvpp", "tvp"]) } collect_conf = sett["webpublisher"]["publish"]["CollectPublishedFiles"] From 136a163122544a3f011b96758962d7f2c0bc6d1e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 16 Jan 2022 13:16:22 +0100 Subject: [PATCH 078/160] Fix typos in openpype/lib --- openpype/lib/abstract_collect_render.py | 4 ++-- openpype/lib/abstract_submit_deadline.py | 2 +- openpype/lib/anatomy.py | 8 ++++---- openpype/lib/applications.py | 22 +++++++++++----------- openpype/lib/avalon_context.py | 16 ++++++++-------- openpype/lib/editorial.py | 6 +++--- openpype/lib/env_tools.py | 4 ++-- openpype/lib/execute.py | 4 ++-- openpype/lib/git_progress.py | 2 +- openpype/lib/import_utils.py | 2 +- openpype/lib/mongo.py | 2 +- openpype/lib/path_tools.py | 4 ++-- openpype/lib/plugin_tools.py | 4 ++-- openpype/lib/pype_info.py | 2 +- openpype/lib/terminal.py | 2 +- 15 files changed, 42 insertions(+), 42 deletions(-) diff --git a/openpype/lib/abstract_collect_render.py b/openpype/lib/abstract_collect_render.py index 2ac0fe434d..d9c8a0993d 100644 --- a/openpype/lib/abstract_collect_render.py +++ b/openpype/lib/abstract_collect_render.py @@ -49,7 +49,7 @@ class RenderInstance(object): handleStart = attr.ib(default=None) # start frame handleEnd = attr.ib(default=None) # start frame - # for softwares (like Harmony) where frame range cannot be set by DB + # for software (like Harmony) where frame range cannot be set by DB # handles need to be propagated if exist ignoreFrameHandleCheck = attr.ib(default=False) @@ -57,7 +57,7 @@ class RenderInstance(object): # With default values # metadata renderer = attr.ib(default="") # renderer - can be used in Deadline - review = attr.ib(default=False) # genereate review from instance (bool) + review = attr.ib(default=False) # generate review from instance (bool) priority = attr.ib(default=50) # job priority on farm family = attr.ib(default="renderlayer") diff --git a/openpype/lib/abstract_submit_deadline.py b/openpype/lib/abstract_submit_deadline.py index 5b6e1743e0..a0925283ac 100644 --- a/openpype/lib/abstract_submit_deadline.py +++ b/openpype/lib/abstract_submit_deadline.py @@ -485,7 +485,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin): def get_aux_files(self): """Return list of auxiliary files for Deadline job. - If needed this should be overriden, otherwise return empty list as + If needed this should be overridden, otherwise return empty list as that field even empty must be present on Deadline submission. Returns: diff --git a/openpype/lib/anatomy.py b/openpype/lib/anatomy.py index 5f7285fe6c..fa81a18ff7 100644 --- a/openpype/lib/anatomy.py +++ b/openpype/lib/anatomy.py @@ -125,7 +125,7 @@ class Anatomy: @staticmethod def _prepare_anatomy_data(anatomy_data): - """Prepare anatomy data for futher processing. + """Prepare anatomy data for further processing. Method added to replace `{task}` with `{task[name]}` in templates. """ @@ -722,7 +722,7 @@ class Templates: First is collecting all global keys (keys in top hierarchy where value is not dictionary). All global keys are set for all group keys (keys in top hierarchy where value is dictionary). Value of a key is not - overriden in group if already contain value for the key. + overridden in group if already contain value for the key. In second part all keys with "at" symbol in value are replaced with value of the key afterward "at" symbol from the group. @@ -802,7 +802,7 @@ class Templates: Result: tuple: Contain origin template without missing optional keys and - withoud optional keys identificator ("<" and ">"), information + without optional keys identificator ("<" and ">"), information about missing optional keys and invalid types of optional keys. """ @@ -1628,7 +1628,7 @@ class Roots: This property returns roots for current project or default root values. Warning: Default roots value may cause issues when project use different - roots settings. That may happend when project use multiroot + roots settings. That may happen when project use multiroot templates but default roots miss their keys. """ if self.project_name != self.loaded_project: diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index d0438e12a6..0e1f44391e 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -47,7 +47,7 @@ DEFAULT_ENV_SUBGROUP = "standard" def parse_environments(env_data, env_group=None, platform_name=None): - """Parse environment values from settings byt group and platfrom. + """Parse environment values from settings byt group and platform. Data may contain up to 2 hierarchical levels of dictionaries. At the end of the last level must be string or list. List is joined using platform @@ -261,7 +261,7 @@ class Application: data (dict): Data for the version containing information about executables, variant label or if is enabled. Only required key is `executables`. - group (ApplicationGroup): App group object that created the applicaiton + group (ApplicationGroup): App group object that created the application and under which application belongs. """ @@ -775,7 +775,7 @@ class PostLaunchHook(LaunchHook): class ApplicationLaunchContext: """Context of launching application. - Main purpose of context is to prepare launch arguments and keword arguments + Main purpose of context is to prepare launch arguments and keyword arguments for new process. Most important part of keyword arguments preparations are environment variables. @@ -969,7 +969,7 @@ class ApplicationLaunchContext: hook = klass(self) if not hook.is_valid: self.log.debug( - "Hook is not valid for curent launch context." + "Hook is not valid for current launch context." ) continue @@ -1113,7 +1113,7 @@ class ApplicationLaunchContext: )) # TODO how to handle errors? - # - store to variable to let them accesible? + # - store to variable to let them accessible? try: postlaunch_hook.execute() @@ -1357,11 +1357,11 @@ def apply_project_environments_value( ): """Apply project specific environments on passed environments. - The enviornments are applied on passed `env` argument value so it is not + The environments are applied on passed `env` argument value so it is not required to apply changes back. Args: - project_name (str): Name of project for which environemnts should be + project_name (str): Name of project for which environments should be received. env (dict): Environment values on which project specific environments will be applied. @@ -1391,7 +1391,7 @@ def apply_project_environments_value( def prepare_context_environments(data, env_group=None): - """Modify launch environemnts with context data for launched host. + """Modify launch environments with context data for launched host. Args: data (EnvironmentPrepData): Dictionary where result and intermediate @@ -1463,7 +1463,7 @@ def prepare_context_environments(data, env_group=None): "AVALON_WORKDIR": workdir } log.debug( - "Context environemnts set:\n{}".format( + "Context environments set:\n{}".format( json.dumps(context_env, indent=4) ) ) @@ -1567,7 +1567,7 @@ def should_start_last_workfile( ): """Define if host should start last version workfile if possible. - Default output is `False`. Can be overriden with environment variable + Default output is `False`. Can be overridden with environment variable `AVALON_OPEN_LAST_WORKFILE`, valid values without case sensitivity are `"0", "1", "true", "false", "yes", "no"`. @@ -1617,7 +1617,7 @@ def should_workfile_tool_start( ): """Define if host should start workfile tool at host launch. - Default output is `False`. Can be overriden with environment variable + Default output is `False`. Can be overridden with environment variable `OPENPYPE_WORKFILE_TOOL_ON_START`, valid values without case sensitivity are `"0", "1", "true", "false", "yes", "no"`. diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 8180e416a9..1254580657 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -443,7 +443,7 @@ def get_workfile_template_key( Function is using profiles from project settings to return right template for passet task type and host name. - One of 'project_name' or 'project_settings' must be passed it is preffered + One of 'project_name' or 'project_settings' must be passed it is preferred to pass settings if are already available. Args: @@ -545,7 +545,7 @@ def get_workdir_with_workdir_data( """Fill workdir path from entered data and project's anatomy. It is possible to pass only project's name instead of project's anatomy but - one of them **must** be entered. It is preffered to enter anatomy if is + one of them **must** be entered. It is preferred to enter anatomy if is available as initialization of a new Anatomy object may be time consuming. Args: @@ -582,7 +582,7 @@ def get_workdir_with_workdir_data( ) anatomy_filled = anatomy.format(workdir_data) - # Output is TemplateResult object which contain usefull data + # Output is TemplateResult object which contain useful data return anatomy_filled[template_key]["folder"] @@ -604,7 +604,7 @@ def get_workdir( because workdir template may contain `{app}` key. In `Session` is stored under `AVALON_APP` key. anatomy (Anatomy): Optional argument. Anatomy object is created using - project name from `project_doc`. It is preffered to pass this + project name from `project_doc`. It is preferred to pass this argument as initialization of a new Anatomy object may be time consuming. template_key (str): Key of work templates in anatomy templates. Default @@ -619,7 +619,7 @@ def get_workdir( workdir_data = get_workdir_data( project_doc, asset_doc, task_name, host_name ) - # Output is TemplateResult object which contain usefull data + # Output is TemplateResult object which contain useful data return get_workdir_with_workdir_data( workdir_data, anatomy, template_key=template_key ) @@ -1036,7 +1036,7 @@ class BuildWorkfile: return valid_profiles def _prepare_profile_for_subsets(self, subsets, profiles): - """Select profile for each subset byt it's data. + """Select profile for each subset by it's data. Profiles are filtered for each subset individually. Profile is filtered by subset's family, optionally by name regex and @@ -1197,7 +1197,7 @@ class BuildWorkfile: Representations are tried to load by names defined in configuration. If subset has representation matching representation name each loader is tried to load it until any is successful. If none of them was - successful then next reprensentation name is tried. + successful then next representation name is tried. Subset process loop ends when any representation is loaded or all matching representations were already tried. @@ -1240,7 +1240,7 @@ class BuildWorkfile: print("representations", representations) - # Load ordered reprensentations. + # Load ordered representations. for subset_id, repres in representations_ordered: subset_name = subsets_by_id[subset_id]["name"] diff --git a/openpype/lib/editorial.py b/openpype/lib/editorial.py index 8e8e365bdb..bf868953ea 100644 --- a/openpype/lib/editorial.py +++ b/openpype/lib/editorial.py @@ -116,7 +116,7 @@ def range_from_frames(start, duration, fps): fps (float): frame range Returns: - otio._ot._ot.TimeRange: crated range + otio._ot._ot.TimeRange: created range """ return _ot.TimeRange( @@ -131,7 +131,7 @@ def frames_to_secons(frames, framerate): Args: frames (int): frame - framerate (flaot): frame rate + framerate (float): frame rate Returns: float: second value @@ -257,7 +257,7 @@ def get_media_range_with_retimes(otio_clip, handle_start, handle_end): ((source_range.duration.value - 1) * abs( time_scalar)) + offset_out)) - # calculate available hanles + # calculate available handles if (media_in_trimmed - media_in) < handle_start: handle_start = (media_in_trimmed - media_in) if (media_out - media_out_trimmed) < handle_end: diff --git a/openpype/lib/env_tools.py b/openpype/lib/env_tools.py index ede14e00b2..6521d20f1e 100644 --- a/openpype/lib/env_tools.py +++ b/openpype/lib/env_tools.py @@ -28,11 +28,11 @@ def env_value_to_bool(env_key=None, value=None, default=False): def get_paths_from_environ(env_key=None, env_value=None, return_first=False): - """Return existing paths from specific envirnment variable. + """Return existing paths from specific environment variable. Args: env_key (str): Environment key where should look for paths. - env_value (str): Value of environemnt variable. Argument `env_key` is + env_value (str): Value of environment variable. Argument `env_key` is skipped if this argument is entered. return_first (bool): Return first found value or return list of found paths. `None` or empty list returned if nothing found. diff --git a/openpype/lib/execute.py b/openpype/lib/execute.py index f97617d906..6f18a399b4 100644 --- a/openpype/lib/execute.py +++ b/openpype/lib/execute.py @@ -79,7 +79,7 @@ def run_subprocess(*args, **kwargs): Args: *args: Variable length arument list passed to Popen. - **kwargs : Arbitary keyword arguments passed to Popen. Is possible to + **kwargs : Arbitrary keyword arguments passed to Popen. Is possible to pass `logging.Logger` object under "logger" if want to use different than lib's logger. @@ -119,7 +119,7 @@ def run_subprocess(*args, **kwargs): if _stderr: _stderr = _stderr.decode("utf-8") - # Add additional line break if output already containt stdout + # Add additional line break if output already contains stdout if full_output: full_output += "\n" full_output += _stderr diff --git a/openpype/lib/git_progress.py b/openpype/lib/git_progress.py index e9cf9a12e1..331b7b6745 100644 --- a/openpype/lib/git_progress.py +++ b/openpype/lib/git_progress.py @@ -33,7 +33,7 @@ class _GitProgress(git.remote.RemoteProgress): self._t.close() def _detroy_tqdm(self): - """ Used to close tqdm when opration ended. + """ Used to close tqdm when operation ended. """ if self._t is not None: diff --git a/openpype/lib/import_utils.py b/openpype/lib/import_utils.py index 4e72618803..e88c07fca6 100644 --- a/openpype/lib/import_utils.py +++ b/openpype/lib/import_utils.py @@ -14,7 +14,7 @@ def discover_host_vendor_module(module_name): pype_root, "hosts", host, "vendor", main_module) log.debug( - "Importing moduel from host vendor path: `{}`".format(module_path)) + "Importing module from host vendor path: `{}`".format(module_path)) if not os.path.exists(module_path): log.warning( diff --git a/openpype/lib/mongo.py b/openpype/lib/mongo.py index 7e0bd4f796..c08e76c75c 100644 --- a/openpype/lib/mongo.py +++ b/openpype/lib/mongo.py @@ -24,7 +24,7 @@ def _decompose_url(url): validation pass. """ # Use first url from passed url - # - this is beacuse it is possible to pass multiple urls for multiple + # - this is because it is possible to pass multiple urls for multiple # replica sets which would crash on urlparse otherwise # - please don't use comma in username of password url = url.split(",")[0] diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index 12e9e2db9c..c0b78c5724 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -116,10 +116,10 @@ def get_last_version_from_path(path_dir, filter): filtred_files = list() # form regex for filtering - patern = r".*".join(filter) + pattern = r".*".join(filter) for file in os.listdir(path_dir): - if not re.findall(patern, file): + if not re.findall(pattern, file): continue filtred_files.append(file) diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index 7c66f9760d..183aad939a 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -164,7 +164,7 @@ def prepare_template_data(fill_pairs): """ Prepares formatted data for filling template. - It produces mutliple variants of keys (key, Key, KEY) to control + It produces multiple variants of keys (key, Key, KEY) to control format of filled template. Args: @@ -288,7 +288,7 @@ def set_plugin_attributes_from_settings( if project_name is None: project_name = os.environ.get("AVALON_PROJECT") - # map plugin superclass to preset json. Currenly suppoted is load and + # map plugin superclass to preset json. Currently supported is load and # create (avalon.api.Loader and avalon.api.Creator) plugin_type = None if superclass.__name__.split(".")[-1] in ("Loader", "SubsetLoader"): diff --git a/openpype/lib/pype_info.py b/openpype/lib/pype_info.py index 33715e369d..378f186f23 100644 --- a/openpype/lib/pype_info.py +++ b/openpype/lib/pype_info.py @@ -62,7 +62,7 @@ def is_running_staging(): """Currently used OpenPype is staging version. Returns: - bool: True if openpype version containt 'staging'. + bool: True if openpype version contains 'staging'. """ if "staging" in get_openpype_version(): return True diff --git a/openpype/lib/terminal.py b/openpype/lib/terminal.py index ddc917ac4e..bc0744931a 100644 --- a/openpype/lib/terminal.py +++ b/openpype/lib/terminal.py @@ -130,7 +130,7 @@ class Terminal: def _multiple_replace(text, adict): """Replace multiple tokens defined in dict. - Find and replace all occurances of strings defined in dict is + Find and replace all occurrences of strings defined in dict is supplied string. Args: From cb489c055e987423104301493b71d05747a5729e Mon Sep 17 00:00:00 2001 From: karimmozlia Date: Mon, 17 Jan 2022 11:03:00 +0200 Subject: [PATCH 079/160] add family and representation --- openpype/hosts/maya/plugins/load/load_vrayproxy.py | 4 ++-- openpype/hosts/maya/vendor/studiolibrary | 1 + repos/avalon-core | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) create mode 160000 openpype/hosts/maya/vendor/studiolibrary diff --git a/openpype/hosts/maya/plugins/load/load_vrayproxy.py b/openpype/hosts/maya/plugins/load/load_vrayproxy.py index e70f40bf5a..2e86012d8d 100644 --- a/openpype/hosts/maya/plugins/load/load_vrayproxy.py +++ b/openpype/hosts/maya/plugins/load/load_vrayproxy.py @@ -17,8 +17,8 @@ from openpype.api import get_project_settings class VRayProxyLoader(api.Loader): """Load VRay Proxy with Alembic or VrayMesh.""" - families = ["vrayproxy"] - representations = ["vrmesh"] + families = ["vrayproxy", "model"] + representations = ["vrmesh", "abc"] label = "Import VRay Proxy" order = -10 diff --git a/openpype/hosts/maya/vendor/studiolibrary b/openpype/hosts/maya/vendor/studiolibrary new file mode 160000 index 0000000000..f29e350da9 --- /dev/null +++ b/openpype/hosts/maya/vendor/studiolibrary @@ -0,0 +1 @@ +Subproject commit f29e350da9e9508522a740a4f30efb93b99c89d3 diff --git a/repos/avalon-core b/repos/avalon-core index ffe9e910f1..7e5efd6885 160000 --- a/repos/avalon-core +++ b/repos/avalon-core @@ -1 +1 @@ -Subproject commit ffe9e910f1f382e222d457d8e4a8426c41ed43ae +Subproject commit 7e5efd6885330d84bb8495975bcab84df49bfa3d From 582b4a7aafc9a02baac9985c1dcbf103d950ba76 Mon Sep 17 00:00:00 2001 From: karimmozlia Date: Mon, 17 Jan 2022 12:17:42 +0200 Subject: [PATCH 080/160] test without adding family --- openpype/hosts/maya/plugins/load/load_vrayproxy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/load/load_vrayproxy.py b/openpype/hosts/maya/plugins/load/load_vrayproxy.py index 2e86012d8d..1e3222873f 100644 --- a/openpype/hosts/maya/plugins/load/load_vrayproxy.py +++ b/openpype/hosts/maya/plugins/load/load_vrayproxy.py @@ -17,7 +17,7 @@ from openpype.api import get_project_settings class VRayProxyLoader(api.Loader): """Load VRay Proxy with Alembic or VrayMesh.""" - families = ["vrayproxy", "model"] + families = ["vrayproxy"] representations = ["vrmesh", "abc"] label = "Import VRay Proxy" From 918d93b3391a621fadb683b2b6e82217a6e5fe98 Mon Sep 17 00:00:00 2001 From: karimmozlia Date: Mon, 17 Jan 2022 12:35:48 +0200 Subject: [PATCH 081/160] add model family --- openpype/hosts/maya/plugins/load/load_vrayproxy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/load/load_vrayproxy.py b/openpype/hosts/maya/plugins/load/load_vrayproxy.py index 1e3222873f..2e86012d8d 100644 --- a/openpype/hosts/maya/plugins/load/load_vrayproxy.py +++ b/openpype/hosts/maya/plugins/load/load_vrayproxy.py @@ -17,7 +17,7 @@ from openpype.api import get_project_settings class VRayProxyLoader(api.Loader): """Load VRay Proxy with Alembic or VrayMesh.""" - families = ["vrayproxy"] + families = ["vrayproxy", "model"] representations = ["vrmesh", "abc"] label = "Import VRay Proxy" From 987c1bc52560646bd818ed07c6e2d8d32850d838 Mon Sep 17 00:00:00 2001 From: karimmozlia Date: Mon, 17 Jan 2022 12:56:08 +0200 Subject: [PATCH 082/160] add animation and pointcache --- openpype/hosts/maya/plugins/load/load_vrayproxy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/load/load_vrayproxy.py b/openpype/hosts/maya/plugins/load/load_vrayproxy.py index 2e86012d8d..806cf1fd18 100644 --- a/openpype/hosts/maya/plugins/load/load_vrayproxy.py +++ b/openpype/hosts/maya/plugins/load/load_vrayproxy.py @@ -17,7 +17,7 @@ from openpype.api import get_project_settings class VRayProxyLoader(api.Loader): """Load VRay Proxy with Alembic or VrayMesh.""" - families = ["vrayproxy", "model"] + families = ["vrayproxy", "model", "pointcache", "animation"] representations = ["vrmesh", "abc"] label = "Import VRay Proxy" From 7dbb46fcbfb1e24824f3927b86b127ff5329cc39 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 17 Jan 2022 12:29:11 +0100 Subject: [PATCH 083/160] flame: update settings, remove camel case --- .../publish/extract_subset_resources.py | 22 ++++++++++++++----- .../defaults/project_settings/flame.json | 20 +++++++++-------- .../projects_schema/schema_project_flame.json | 13 +++++++---- 3 files changed, 36 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 8bdcf989b6..adb3b1ae9b 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -1,4 +1,5 @@ import os +from pprint import pformat from copy import deepcopy import pyblish.api import openpype.api @@ -35,6 +36,8 @@ class ExtractSubsetResources(openpype.api.Extractor): ] } } + keep_original_representation = False + # hide publisher during exporting hide_ui_on_process = True @@ -42,8 +45,12 @@ class ExtractSubsetResources(openpype.api.Extractor): export_presets_mapping = {} def process(self, instance): - # create representation data - if "representations" not in instance.data: + + if ( + self.keep_original_representation + and "representations" not in instance.data + or not self.keep_original_representation + ): instance.data["representations"] = [] frame_start = instance.data["frameStart"] @@ -74,9 +81,9 @@ class ExtractSubsetResources(openpype.api.Extractor): # loop all preset names and for unique_name, preset_config in export_presets.items(): kwargs = {} - preset_file = preset_config["xmlPresetFile"] - preset_dir = preset_config["xmlPresetDir"] - repre_tags = preset_config["representationTags"] + preset_file = preset_config["xml_preset_file"] + preset_dir = preset_config["xml_preset_dir"] + repre_tags = preset_config["representation_tags"] # validate xml preset file is filled if preset_file == "": @@ -144,7 +151,7 @@ class ExtractSubsetResources(openpype.api.Extractor): representation_data["files"] = files # add frame range - if preset_config["representationAddRange"]: + if preset_config["representation_add_range"]: representation_data.update({ "frameStart": frame_start_handle, "frameEnd": ( @@ -160,3 +167,6 @@ class ExtractSubsetResources(openpype.api.Extractor): self.log.info("Added representation: {}".format( representation_data)) + + self.log.debug("All representations: {}".format( + pformat(instance.data["representations"]))) diff --git a/openpype/settings/defaults/project_settings/flame.json b/openpype/settings/defaults/project_settings/flame.json index dfecd8a12e..c81069ef5c 100644 --- a/openpype/settings/defaults/project_settings/flame.json +++ b/openpype/settings/defaults/project_settings/flame.json @@ -3,29 +3,31 @@ "CreateShotClip": { "hierarchy": "{folder}/{sequence}", "clipRename": true, - "clipName": "{track}{sequence}{shot}", + "clipName": "{sequence}{shot}", + "segmentIndex": true, "countFrom": 10, "countSteps": 10, "folder": "shots", "episode": "ep01", - "sequence": "sq01", + "sequence": "a", "track": "{_track_}", - "shot": "sh###", + "shot": "####", "vSyncOn": false, "workfileFrameStart": 1001, - "handleStart": 10, - "handleEnd": 10 + "handleStart": 5, + "handleEnd": 5 } }, "publish": { "ExtractSubsetResources": { + "keep_original_representation": false, "export_presets_mapping": { "exr16fpdwaa": { "ext": "exr", - "xmlPresetDir": "", - "xmlPresetFile": "OpenEXR (16-bit fp DWAA).xml", - "representationAddRange": false, - "representationTags": [] + "xml_preset_dir": "", + "xml_preset_file": "OpenEXR (16-bit fp DWAA).xml", + "representation_add_range": true, + "representation_tags": [] } } } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json index 8ad2b11616..b1b1f3539b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json @@ -133,6 +133,11 @@ "label": "Extract Subset Resources", "is_group": true, "children": [ + { + "type": "boolean", + "key": "keep_original_representation", + "label": "Publish clip's original media" + }, { "key": "export_presets_mapping", "label": "Export presets mapping", @@ -147,12 +152,12 @@ "type": "text" }, { - "key": "xmlPresetFile", + "key": "xml_preset_file", "label": "XML preset file (with ext)", "type": "text" }, { - "key": "xmlPresetDir", + "key": "xml_preset_dir", "label": "XML preset folder (optional)", "type": "text" }, @@ -161,12 +166,12 @@ }, { "type": "boolean", - "key": "representationAddRange", + "key": "representation_add_range", "label": "Add frame range to representation" }, { "type": "list", - "key": "representationTags", + "key": "representation_tags", "label": "Add representation tags", "object_type": { "type": "text", From d10d6c65be1a800e4146df11eb481e40a2c033e1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 17 Jan 2022 13:55:02 +0100 Subject: [PATCH 084/160] hound: classics ;) --- openpype/hosts/flame/api/lib.py | 1 - openpype/hosts/flame/api/render_utils.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 800afebf41..c76d944e90 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -677,7 +677,6 @@ def get_frame_from_path(path): return None - @contextlib.contextmanager def maintained_object_duplication(item): """Maintain input item duplication diff --git a/openpype/hosts/flame/api/render_utils.py b/openpype/hosts/flame/api/render_utils.py index 1cc94f6548..f8dbfe2025 100644 --- a/openpype/hosts/flame/api/render_utils.py +++ b/openpype/hosts/flame/api/render_utils.py @@ -63,7 +63,7 @@ def export_clip(export_path, clip, preset_path, **kwargs): def get_preset_path_by_xml_name(xml_preset_name): def _search_path(root): output = [] - for root, dirs, files in os.walk(root): + for root, _dirs, files in os.walk(root): for f in files: if f != xml_preset_name: continue From 98fb1186a4a4d0ee6a8d83f4785acbd5441e71f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?G=C3=A1bor=20Marinov?= Date: Mon, 17 Jan 2022 20:10:12 +0100 Subject: [PATCH 085/160] Update load_ass.py --- openpype/hosts/maya/plugins/load/load_ass.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/load/load_ass.py b/openpype/hosts/maya/plugins/load/load_ass.py index b7d44dd431..891f21916c 100644 --- a/openpype/hosts/maya/plugins/load/load_ass.py +++ b/openpype/hosts/maya/plugins/load/load_ass.py @@ -1,5 +1,6 @@ from avalon import api import openpype.hosts.maya.api.plugin +from openpype.hosts.maya.api.plugin import get_reference_node import os from openpype.api import get_project_settings import clique @@ -111,7 +112,7 @@ class AssProxyLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): # Get reference node from container members members = cmds.sets(node, query=True, nodesOnly=True) - reference_node = self._get_reference_node(members) + reference_node = get_reference_node(members) assert os.path.exists(proxyPath), "%s does not exist." % proxyPath From 61bf321f9ba114b13d9c8a139ae2b5d8ad4e674a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 18 Jan 2022 14:04:35 +0100 Subject: [PATCH 086/160] OP-2402 - fixed progress reporting, changed to integer 0-100 --- openpype/lib/remote_publish.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/openpype/lib/remote_publish.py b/openpype/lib/remote_publish.py index 8074b2d112..95238f5469 100644 --- a/openpype/lib/remote_publish.py +++ b/openpype/lib/remote_publish.py @@ -53,7 +53,7 @@ def start_webpublish_log(dbcon, batch_id, user): "start_date": datetime.now(), "user": user, "status": "in_progress", - "progress": 0.0 + "progress": 0 # integer 0-100, percentage }).inserted_id @@ -103,10 +103,13 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None): _id = ObjectId(_id) log_lines = [] + processed = 0 + log_every = 5 for result in pyblish.util.publish_iter(): for record in result["records"]: log_lines.append("{}: {}".format( result["plugin"].label, record.msg)) + processed += 1 if result["error"]: log.error(error_format.format(**result)) @@ -126,12 +129,14 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None): context = pyblish.api.Context() close_plugin().process(context) sys.exit(1) - else: + elif processed % log_every == 0: + # pyblish returns progress in 0.0 - 2.0 + progress = min(round(result["progress"] / 2 * 100), 99) dbcon.update_one( {"_id": _id}, {"$set": { - "progress": max(result["progress"], 0.95), + "progress": progress, "log": os.linesep.join(log_lines) }} ) @@ -143,7 +148,7 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None): { "finish_date": datetime.now(), "status": "finished_ok", - "progress": 1, + "progress": 100, "log": os.linesep.join(log_lines) }} ) From 573cb3d1b30426b998be8b55d7176f906b88f776 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 18 Jan 2022 15:16:39 +0100 Subject: [PATCH 087/160] OP-2427 - added BatchReprocessEndpoint Endpoint marks latest batch with status 'error' for reprocessing. --- .../webserver_service/webpublish_routes.py | 31 +++++++++++++++++++ .../webserver_service/webserver_cli.py | 9 ++++++ 2 files changed, 40 insertions(+) diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 30399a6ba7..ff4c583098 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -351,3 +351,34 @@ class ConfiguredExtensionsEndpoint(_RestApiEndpoint): body=self.resource.encode(dict(configured)), content_type="application/json" ) + + +class BatchReprocessEndpoint(_RestApiEndpoint): + """Marks latest 'batch_id' for reprocessing, returns 404 if not found.""" + async def post(self, batch_id) -> Response: + batches = self.dbcon.find({"batch_id": batch_id, + "status": "error"}).sort("_id", -1) + batch = None + if batches: + batch = batches[0] + + if batch: + self.dbcon.update_one( + {"_id": batch["_id"]}, + {"$set": + { + "status": "reprocess" + }} + ) + output = [{"msg": "Batch id {} set to reprocess".format(batch_id)}] + status = 200 + else: + output = [{"msg": "Batch id {} not found".format(batch_id)}] + status = 404 + body = self.resource.encode(output) + + return Response( + status=status, + body=body, + content_type="application/json" + ) diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py index c96ad8e110..374e7e80a5 100644 --- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py @@ -12,6 +12,7 @@ from .webpublish_routes import ( RestApiResource, OpenPypeRestApiResource, WebpublisherBatchPublishEndpoint, + BatchReprocessEndpoint, WebpublisherTaskPublishEndpoint, WebpublisherHiearchyEndpoint, WebpublisherProjectsEndpoint, @@ -95,6 +96,14 @@ def run_webserver(*args, **kwargs): user_status_endpoint.dispatch ) + webpublisher_batch_reprocess_endpoint = \ + BatchReprocessEndpoint(openpype_resource) + server_manager.add_route( + "POST", + "/api/webpublish/reprocess/{batch_id}", + webpublisher_batch_reprocess_endpoint.dispatch + ) + server_manager.start_server() last_reprocessed = time.time() while True: From 26c5eac8b094a322c26097869fa116e491021c74 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 18 Jan 2022 15:25:45 +0100 Subject: [PATCH 088/160] OP-2427 - refactored names --- .../webserver_service/webpublish_routes.py | 10 +++++----- .../webserver_service/webserver_cli.py | 20 +++++++++---------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index ff4c583098..6606f5cb58 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -61,7 +61,7 @@ class OpenPypeRestApiResource(RestApiResource): self.dbcon = mongo_client[database_name]["webpublishes"] -class WebpublisherProjectsEndpoint(_RestApiEndpoint): +class ProjectsEndpoint(_RestApiEndpoint): """Returns list of dict with project info (id, name).""" async def get(self) -> Response: output = [] @@ -82,7 +82,7 @@ class WebpublisherProjectsEndpoint(_RestApiEndpoint): ) -class WebpublisherHiearchyEndpoint(_RestApiEndpoint): +class HiearchyEndpoint(_RestApiEndpoint): """Returns dictionary with context tree from assets.""" async def get(self, project_name) -> Response: query_projection = { @@ -181,7 +181,7 @@ class TaskNode(Node): self["attributes"] = {} -class WebpublisherBatchPublishEndpoint(_RestApiEndpoint): +class BatchPublishEndpoint(_RestApiEndpoint): """Triggers headless publishing of batch.""" async def post(self, request) -> Response: # Validate existence of openpype executable @@ -190,7 +190,7 @@ class WebpublisherBatchPublishEndpoint(_RestApiEndpoint): msg = "Non existent OpenPype executable {}".format(openpype_app) raise RuntimeError(msg) - log.info("WebpublisherBatchPublishEndpoint called") + log.info("BatchPublishEndpoint called") content = await request.json() # Each filter have extensions which are checked on first task item @@ -286,7 +286,7 @@ class WebpublisherBatchPublishEndpoint(_RestApiEndpoint): ) -class WebpublisherTaskPublishEndpoint(_RestApiEndpoint): +class TaskPublishEndpoint(_RestApiEndpoint): """Prepared endpoint triggered after each task - for future development.""" async def post(self, request) -> Response: return Response( diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py index 374e7e80a5..a88b4bbc3e 100644 --- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py @@ -11,14 +11,14 @@ from openpype.lib import PypeLogger from .webpublish_routes import ( RestApiResource, OpenPypeRestApiResource, - WebpublisherBatchPublishEndpoint, + HiearchyEndpoint, + ProjectsEndpoint, + ConfiguredExtensionsEndpoint, + BatchPublishEndpoint, BatchReprocessEndpoint, - WebpublisherTaskPublishEndpoint, - WebpublisherHiearchyEndpoint, - WebpublisherProjectsEndpoint, BatchStatusEndpoint, - PublishesStatusEndpoint, - ConfiguredExtensionsEndpoint + TaskPublishEndpoint, + PublishesStatusEndpoint ) @@ -42,14 +42,14 @@ def run_webserver(*args, **kwargs): upload_dir=kwargs["upload_dir"], executable=kwargs["executable"], studio_task_queue=studio_task_queue) - projects_endpoint = WebpublisherProjectsEndpoint(resource) + projects_endpoint = ProjectsEndpoint(resource) server_manager.add_route( "GET", "/api/projects", projects_endpoint.dispatch ) - hiearchy_endpoint = WebpublisherHiearchyEndpoint(resource) + hiearchy_endpoint = HiearchyEndpoint(resource) server_manager.add_route( "GET", "/api/hierarchy/{project_name}", @@ -65,7 +65,7 @@ def run_webserver(*args, **kwargs): # triggers publish webpublisher_task_publish_endpoint = \ - WebpublisherBatchPublishEndpoint(resource) + BatchPublishEndpoint(resource) server_manager.add_route( "POST", "/api/webpublish/batch", @@ -73,7 +73,7 @@ def run_webserver(*args, **kwargs): ) webpublisher_batch_publish_endpoint = \ - WebpublisherTaskPublishEndpoint(resource) + TaskPublishEndpoint(resource) server_manager.add_route( "POST", "/api/webpublish/task", From c2642974d984583c41df9fe9f5c04541cd208e5c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 19 Jan 2022 10:12:28 +0100 Subject: [PATCH 089/160] do not validate version if build does not support it --- openpype/lib/__init__.py | 2 ++ openpype/tools/tray/pype_tray.py | 3 ++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 62d204186d..1c8f7a57af 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -169,6 +169,7 @@ from .editorial import ( ) from .openpype_version import ( + op_version_control_available, get_openpype_version, get_build_version, get_expected_version, @@ -306,6 +307,7 @@ __all__ = [ "create_workdir_extra_folders", "get_project_basic_paths", + "op_version_control_available", "get_openpype_version", "get_build_version", "get_expected_version", diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 0d3e7ae04c..c9b8aaa842 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -16,6 +16,7 @@ from openpype.api import ( ) from openpype.lib import ( get_openpype_execute_args, + op_version_control_available, is_current_version_studio_latest, is_running_from_build, is_running_staging, @@ -218,7 +219,7 @@ class TrayManager: def _on_version_check_timer(self): # Check if is running from build and stop future validations if yes - if not is_running_from_build(): + if not is_running_from_build() or not op_version_control_available(): self._version_check_timer.stop() return From 40ba77207ff6d510355630299648f645c8558872 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 19 Jan 2022 10:47:17 +0100 Subject: [PATCH 090/160] use applications manager to get djv path --- .../event_handlers_user/action_djvview.py | 106 +++++++++++++----- 1 file changed, 81 insertions(+), 25 deletions(-) diff --git a/openpype/modules/default_modules/ftrack/event_handlers_user/action_djvview.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_djvview.py index c603a2d200..334519b4bb 100644 --- a/openpype/modules/default_modules/ftrack/event_handlers_user/action_djvview.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_djvview.py @@ -1,6 +1,8 @@ import os +import time import subprocess from operator import itemgetter +from openpype.lib import ApplicationManager from openpype_modules.ftrack.lib import BaseAction, statics_icon @@ -23,15 +25,25 @@ class DJVViewAction(BaseAction): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.djv_path = self.find_djv_path() + self.application_manager = ApplicationManager() + self._last_check = time.time() + self._check_interval = 10 - def preregister(self): - if self.djv_path is None: - return ( - 'DJV View is not installed' - ' or paths in presets are not set correctly' - ) - return True + def _get_djv_apps(self): + app_group = self.application_manager.app_groups["djvview"] + + output = [] + for app in app_group: + executable = app.find_executable() + if executable is not None: + output.append(app) + return output + + def get_djv_apps(self): + cur_time = time.time() + if (cur_time - self._last_check) > self._check_interval: + self.application_manager.refresh() + return self._get_djv_apps() def discover(self, session, entities, event): """Return available actions based on *event*. """ @@ -40,15 +52,13 @@ class DJVViewAction(BaseAction): return False entityType = selection[0].get("entityType", None) - if entityType in ["assetversion", "task"]: + if entityType not in ["assetversion", "task"]: + return False + + if self.get_djv_apps(): return True return False - def find_djv_path(self): - for path in (os.environ.get("DJV_PATH") or "").split(os.pathsep): - if os.path.exists(path): - return path - def interface(self, session, entities, event): if event['data'].get('values', {}): return @@ -88,7 +98,37 @@ class DJVViewAction(BaseAction): 'message': 'There are no Asset Versions to open.' } - items = [] + # TODO sort them (somehow?) + enum_items = [] + first_value = None + for app in self.get_djv_apps(): + if first_value is None: + first_value = app.full_name + enum_items.append({ + "value": app.full_name, + "label": app.full_label + }) + + if not enum_items: + return { + "success": False, + "message": "Couldn't find DJV executable." + } + + items = [ + { + "type": "enumerator", + "label": "DJV version:", + "name": "djv_app_name", + "data": enum_items, + "value": first_value + }, + { + "type": "label", + "value": "---" + } + ] + version_items = [] base_label = "v{0} - {1} - {2}" default_component = None last_available = None @@ -115,11 +155,11 @@ class DJVViewAction(BaseAction): last_available = file_path if component['name'] == default_component: select_value = file_path - items.append( + version_items.append( {'label': label, 'value': file_path} ) - if len(items) == 0: + if len(version_items) == 0: return { 'success': False, 'message': ( @@ -132,7 +172,7 @@ class DJVViewAction(BaseAction): 'type': 'enumerator', 'name': 'path', 'data': sorted( - items, + version_items, key=itemgetter('label'), reverse=True ) @@ -142,21 +182,37 @@ class DJVViewAction(BaseAction): else: item['value'] = last_available - return {'items': [item]} + items.append(item) + + return {'items': items} def launch(self, session, entities, event): """Callback method for DJVView action.""" # Launching application - if "values" not in event["data"]: + event_data = event["data"] + if "values" not in event_data: return - filpath = event['data']['values']['path'] + + djv_app_name = event_data["djv_app_name"] + app = self.applicaion_manager.applications.get(djv_app_name) + executable = None + if app is not None: + executable = app.find_executable() + + if not executable: + return { + "success": False, + "message": "Couldn't find DJV executable." + } + + filpath = os.path.normpath(event_data["values"]["path"]) cmd = [ # DJV path - os.path.normpath(self.djv_path), + executable, # PATH TO COMPONENT - os.path.normpath(filpath) + filpath ] try: @@ -164,8 +220,8 @@ class DJVViewAction(BaseAction): subprocess.Popen(cmd) except FileNotFoundError: return { - 'success': False, - 'message': 'File "{}" was not found.'.format( + "success": False, + "message": "File \"{}\" was not found.".format( os.path.basename(filpath) ) } From a400fbd1e01457228f8b0498a795cd07632a43d3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Jan 2022 11:28:13 +0100 Subject: [PATCH 091/160] OP-2402 - progress changed to 100 (per cent) --- .../hosts/webpublisher/webserver_service/webserver_cli.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py index c96ad8e110..a8b1cd11b8 100644 --- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py @@ -131,7 +131,7 @@ def reprocess_failed(upload_dir, webserver_url): { "finish_date": datetime.now(), "status": "error", - "progress": 1, + "progress": 100, "log": batch.get("log") + msg }} ) @@ -153,6 +153,6 @@ def reprocess_failed(upload_dir, webserver_url): { "finish_date": datetime.now(), "status": "sent_for_reprocessing", - "progress": 1 + "progress": 100 }} ) From 3b6e03046565a739a3beb1c802cfa82bd58d6f4d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Jan 2022 11:57:27 +0100 Subject: [PATCH 092/160] OP-2399 - moved error at the beginning of the log --- openpype/lib/remote_publish.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/lib/remote_publish.py b/openpype/lib/remote_publish.py index 8074b2d112..f0d73e2afe 100644 --- a/openpype/lib/remote_publish.py +++ b/openpype/lib/remote_publish.py @@ -95,7 +95,8 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None): close host app """ # Error exit as soon as any error occurs. - error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}" + error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}\n" + error_format += "-" * 80 + "\n" close_plugin = _get_close_plugin(close_plugin_name, log) @@ -111,7 +112,7 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None): if result["error"]: log.error(error_format.format(**result)) uninstall() - log_lines.append(error_format.format(**result)) + log_lines = [error_format.format(**result)] + log_lines dbcon.update_one( {"_id": _id}, {"$set": From 5a3a6cc6e8154cfe9fe473b79aaa375806c801ac Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Jan 2022 13:23:54 +0100 Subject: [PATCH 093/160] OP-2427 - better error handling when batch or user not found --- .../webserver_service/webpublish_routes.py | 22 +++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 6606f5cb58..e5ae727cd7 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -301,9 +301,16 @@ class BatchStatusEndpoint(_RestApiEndpoint): async def get(self, batch_id) -> Response: output = self.dbcon.find_one({"batch_id": batch_id}) + if output: + status = 200 + body = self.resource.encode(output) + else: + output = [{"msg": "Batch id {} not found".format(batch_id)}] + status = 404 + return Response( - status=200, - body=self.resource.encode(output), + status=status, + body=body, content_type="application/json" ) @@ -313,9 +320,16 @@ class PublishesStatusEndpoint(_RestApiEndpoint): async def get(self, user) -> Response: output = list(self.dbcon.find({"user": user})) + if output: + status = 200 + body = self.resource.encode(output) + else: + body = [{"msg": "User {} not found".format(user)}] + status = 404 + return Response( - status=200, - body=self.resource.encode(output), + status=status, + body=body, content_type="application/json" ) From 3328a80a21c2f5833c895a3f22b8eca44a9e9139 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Jan 2022 13:29:43 +0100 Subject: [PATCH 094/160] OP-2427 - better error handling when batch or user not found --- .../webpublisher/webserver_service/webpublish_routes.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index e5ae727cd7..3270fe8f27 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -303,11 +303,10 @@ class BatchStatusEndpoint(_RestApiEndpoint): if output: status = 200 - body = self.resource.encode(output) else: - output = [{"msg": "Batch id {} not found".format(batch_id)}] + output = {"msg": "Batch id {} not found".format(batch_id)} status = 404 - + body = self.resource.encode(output) return Response( status=status, body=body, @@ -322,10 +321,10 @@ class PublishesStatusEndpoint(_RestApiEndpoint): if output: status = 200 - body = self.resource.encode(output) else: - body = [{"msg": "User {} not found".format(user)}] + output = {"msg": "User {} not found".format(user)} status = 404 + body = self.resource.encode(output) return Response( status=status, From 19667cdfdd9f237d0e7bb16922d96f3ea0b97556 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Jan 2022 14:23:22 +0100 Subject: [PATCH 095/160] OP-2403 - added storing user email into env var collect_published_files is not triggered for studio processing in a host, must be passed through differently. --- .../plugins/publish/collect_username.py | 33 ++++++++----- openpype/pype_commands.py | 48 ++++++++++++------- 2 files changed, 53 insertions(+), 28 deletions(-) diff --git a/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py b/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py index 7ea1c1f323..d7d874f0a7 100644 --- a/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py +++ b/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py @@ -37,16 +37,27 @@ class CollectUsername(pyblish.api.ContextPlugin): os.environ["FTRACK_API_USER"] = os.environ["FTRACK_BOT_API_USER"] os.environ["FTRACK_API_KEY"] = os.environ["FTRACK_BOT_API_KEY"] - for instance in context: - email = instance.data["user_email"] - self.log.info("email:: {}".format(email)) - session = ftrack_api.Session(auto_connect_event_hub=False) - user = session.query("User where email like '{}'".format( - email)) + # for publishes with studio processing + user_email = os.environ.get("USER_EMAIL") + self.log.debug("Email from env:: {}".format(user_email)) + if not user_email: + # for basic webpublishes + for instance in context: + email = instance.data["user_email"] + self.log.debug("Email from instance:: {}".format(email)) + break - if not user: - raise ValueError( - "Couldnt find user with {} email".format(email)) + if not user_email: + self.log.info("No email found") + return - os.environ["FTRACK_API_USER"] = user[0].get("username") - break + session = ftrack_api.Session(auto_connect_event_hub=False) + user = session.query("User where email like '{}'".format(user_email)) + + if not user: + raise ValueError( + "Couldn't find user with {} email".format(user_email)) + + username = user[0].get("username") + self.log.debug("Resolved ftrack username:: {}".format(username)) + os.environ["FTRACK_API_USER"] = username diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index e25b56744e..c9612d8915 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -161,21 +161,32 @@ class PypeCommands: log.info("Publish finished.") @staticmethod - def remotepublishfromapp(project, batch_dir, host_name, - user, targets=None): + def remotepublishfromapp(project, batch_path, host_name, + user_email, targets=None): """Opens installed variant of 'host' and run remote publish there. - Currently implemented and tested for Photoshop where customer - wants to process uploaded .psd file and publish collected layers - from there. + Currently implemented and tested for Photoshop where customer + wants to process uploaded .psd file and publish collected layers + from there. - Checks if no other batches are running (status =='in_progress). If - so, it sleeps for SLEEP (this is separate process), - waits for WAIT_FOR seconds altogether. + Checks if no other batches are running (status =='in_progress). If + so, it sleeps for SLEEP (this is separate process), + waits for WAIT_FOR seconds altogether. - Requires installed host application on the machine. + Requires installed host application on the machine. - Runs publish process as user would, in automatic fashion. + Runs publish process as user would, in automatic fashion. + + Args: + project (str): project to publish (only single context is expected + per call of remotepublish + batch_path (str): Path batch folder. Contains subfolders with + resources (workfile, another subfolder 'renders' etc.) + host_name (str): 'photoshop' + user_email (string): email address for webpublisher - used to + find Ftrack user with same email + targets (list): Pyblish targets + (to choose validator for example) """ import pyblish.api from openpype.api import Logger @@ -185,9 +196,9 @@ class PypeCommands: log.info("remotepublishphotoshop command") - task_data = get_task_data(batch_dir) + task_data = get_task_data(batch_path) - workfile_path = os.path.join(batch_dir, + workfile_path = os.path.join(batch_path, task_data["task"], task_data["files"][0]) @@ -196,7 +207,7 @@ class PypeCommands: batch_id = task_data["batch"] dbcon = get_webpublish_conn() # safer to start logging here, launch might be broken altogether - _id = start_webpublish_log(dbcon, batch_id, user) + _id = start_webpublish_log(dbcon, batch_id, user_email) batches_in_progress = list(dbcon.find({"status": "in_progress"})) if len(batches_in_progress) > 1: @@ -219,10 +230,11 @@ class PypeCommands: print("env:: {}".format(env)) os.environ.update(env) - os.environ["OPENPYPE_PUBLISH_DATA"] = batch_dir + os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path # must pass identifier to update log lines for a batch os.environ["BATCH_LOG_ID"] = str(_id) os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib + os.environ["USER_EMAIL"] = user_email pyblish.api.register_host(host_name) if targets: @@ -247,7 +259,7 @@ class PypeCommands: time.sleep(0.5) @staticmethod - def remotepublish(project, batch_path, user, targets=None): + def remotepublish(project, batch_path, user_email, targets=None): """Start headless publishing. Used to publish rendered assets, workfiles etc. @@ -259,7 +271,8 @@ class PypeCommands: per call of remotepublish batch_path (str): Path batch folder. Contains subfolders with resources (workfile, another subfolder 'renders' etc.) - user (string): email address for webpublisher + user_email (string): email address for webpublisher - used to + find Ftrack user with same email targets (list): Pyblish targets (to choose validator for example) @@ -283,6 +296,7 @@ class PypeCommands: os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path os.environ["AVALON_PROJECT"] = project os.environ["AVALON_APP"] = host_name + os.environ["USER_EMAIL"] = user_email pyblish.api.register_host(host_name) @@ -298,7 +312,7 @@ class PypeCommands: _, batch_id = os.path.split(batch_path) dbcon = get_webpublish_conn() - _id = start_webpublish_log(dbcon, batch_id, user) + _id = start_webpublish_log(dbcon, batch_id, user_email) publish_and_log(dbcon, _id, log) From 9931a355422dd61ecc35a1e751e7b6f24c0b2489 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Jan 2022 14:23:52 +0100 Subject: [PATCH 096/160] OP-2403 - added a bit of documentation --- .../webpublisher/plugins/publish/collect_published_files.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index d2754b3df3..c1b1d66cb8 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -21,6 +21,11 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): This collector will try to find json files in provided `OPENPYPE_PUBLISH_DATA`. Those files _MUST_ share same context. + This covers 'basic' webpublishes, eg artists uses Standalone Publisher to + publish rendered frames or assets. + + This is not applicable for 'studio' processing where host application is + called to process uploaded workfile and render frames itself. """ # must be really early, context values are only in json file order = pyblish.api.CollectorOrder - 0.490 From ff76b5c3d98375a62cd4a24cd38da82d92e9d73e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Jan 2022 14:23:54 +0100 Subject: [PATCH 097/160] flame: rename path to fname https://github.com/pypeclub/OpenPype/pull/2519#discussion_r785974817 --- openpype/hosts/flame/api/lib.py | 40 ++++++++++++++------------------- 1 file changed, 17 insertions(+), 23 deletions(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index b963a1cb39..1e444f3b40 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -601,12 +601,12 @@ def get_clips_in_reels(project): return output_clips -def get_reformated_path(path, padded=True): +def get_reformated_path(fname, padded=True): """ Return fixed python expression path Args: - path (str): path url or simple file name + fname (str): file name Returns: type: string with reformated path @@ -615,27 +615,27 @@ def get_reformated_path(path, padded=True): get_reformated_path("plate.1001.exr") > plate.%04d.exr """ - padding = get_padding_from_path(path) - found = get_frame_from_path(path) + padding = get_padding_from_path(fname) + found = get_frame_from_path(fname) if not found: - log.info("Path is not sequence: {}".format(path)) - return path + log.info("File name is not sequence: {}".format(fname)) + return fname if padded: - path = path.replace(found, "%0{}d".format(padding)) + fname = fname.replace(found, "%0{}d".format(padding)) else: - path = path.replace(found, "%d") + fname = fname.replace(found, "%d") - return path + return fname -def get_padding_from_path(path): +def get_padding_from_path(fname): """ Return padding number from Flame path style Args: - path (str): path url or simple file name + fname (str): file name Returns: int: padding number @@ -644,20 +644,17 @@ def get_padding_from_path(path): get_padding_from_path("plate.0001.exr") > 4 """ - found = get_frame_from_path(path) + found = get_frame_from_path(fname) - if found: - return len(found) - else: - return None + return len(found) if found else None -def get_frame_from_path(path): +def get_frame_from_path(fname): """ Return sequence number from Flame path style Args: - path (str): path url or simple file name + fname (str): file name Returns: int: sequence frame number @@ -669,9 +666,6 @@ def get_frame_from_path(path): """ frame_pattern = re.compile(r"[._](\d+)[.]") - found = re.findall(frame_pattern, path) + found = re.findall(frame_pattern, fname) - if found: - return found.pop() - else: - return None + return found.pop() if found else None From 3dbe58a10ef90f0882f19c2ed092f506bf771943 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Jan 2022 14:28:08 +0100 Subject: [PATCH 098/160] OP-2403 - use default username if not found --- .../ftrack/plugins/publish/collect_username.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py b/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py index d7d874f0a7..303490189b 100644 --- a/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py +++ b/openpype/modules/default_modules/ftrack/plugins/publish/collect_username.py @@ -43,8 +43,8 @@ class CollectUsername(pyblish.api.ContextPlugin): if not user_email: # for basic webpublishes for instance in context: - email = instance.data["user_email"] - self.log.debug("Email from instance:: {}".format(email)) + user_email = instance.data.get("user_email") + self.log.debug("Email from instance:: {}".format(user_email)) break if not user_email: From ffc92fd1f447588833bd230c8d17f766a52dae93 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Jan 2022 14:40:19 +0100 Subject: [PATCH 099/160] flame: subset name from settings https://github.com/pypeclub/OpenPype/pull/2519#discussion_r785985961 --- .../plugins/publish/precollect_workfile.py | 24 +++++++++++++++---- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/precollect_workfile.py b/openpype/hosts/flame/plugins/publish/precollect_workfile.py index aff85e22e6..8d49993576 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/flame/plugins/publish/precollect_workfile.py @@ -1,5 +1,6 @@ import pyblish.api import avalon.api as avalon +import openpype.lib as oplib import openpype.hosts.flame.api as opfapi from openpype.hosts.flame.otio import flame_export @@ -11,19 +12,32 @@ class PrecollecTimelineOCIO(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.5 def process(self, context): - asset = avalon.Session["AVALON_ASSET"] - subset = "otioTimeline" + # plugin defined + family = "workfile" + variant = "otioTimeline" + + # main + asset_doc = context.data["assetEntity"] + task_name = avalon.Session["AVALON_TASK"] project = opfapi.get_current_project() sequence = opfapi.get_current_sequence(opfapi.CTX.selection) + # create subset name + subset_name = oplib.get_subset_name_with_asset_doc( + family, + variant, + task_name, + asset_doc, + ) + # adding otio timeline to context with opfapi.maintained_segment_selection(sequence): otio_timeline = flame_export.create_otio_timeline(sequence) instance_data = { - "name": "{}_{}".format(asset, subset), - "asset": asset, - "subset": "{}{}".format(asset, subset.capitalize()), + "name": subset_name, + "asset": asset_doc["name"], + "subset": subset_name, "family": "workfile" } From 07b9a769e1bbbbf5fc5271d2faa2eb2492eea861 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Jan 2022 14:46:49 +0100 Subject: [PATCH 100/160] flame: removing inherited mess https://github.com/pypeclub/OpenPype/pull/2519#discussion_r785987192 --- .../plugins/publish/precollect_instances.py | 30 ++++++++----------- .../plugins/publish/precollect_workfile.py | 2 +- 2 files changed, 14 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/precollect_instances.py b/openpype/hosts/flame/plugins/publish/precollect_instances.py index b4b2ebf63f..2e5a0c406b 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_instances.py +++ b/openpype/hosts/flame/plugins/publish/precollect_instances.py @@ -10,7 +10,7 @@ from pprint import pformat class PrecollectInstances(pyblish.api.ContextPlugin): """Collect all Track items selection.""" - order = pyblish.api.CollectorOrder - 0.49 + order = pyblish.api.CollectorOrder - 0.47 label = "Precollect Instances" hosts = ["flame"] @@ -57,16 +57,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin): marker_data["handleEnd"] = min( marker_data["handleEnd"], tail) - # add audio to families - with_audio = False - if marker_data.pop("audio"): - with_audio = True + with_audio = bool(marker_data.pop("audio")) - # add tag data to instance data - data = { - k: v for k, v in marker_data.items() - if k not in ("id", "applieswhole", "label") - } + # add marker data to instance data + inst_data = dict(marker_data.items()) asset = marker_data["asset"] subset = marker_data["subset"] @@ -83,7 +77,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): label += " {}".format(subset) label += " {}".format("[" + ", ".join(families) + "]") - data.update({ + inst_data.update({ "name": "{}_{}".format(asset, subset), "label": label, "asset": asset, @@ -96,17 +90,19 @@ class PrecollectInstances(pyblish.api.ContextPlugin): "path": file_path }) - # otio clip data + # get otio clip data otio_data = self._get_otio_clip_instance_data(clip_data) or {} self.log.debug("__ otio_data: {}".format(pformat(otio_data))) - data.update(otio_data) - self.log.debug("__ data: {}".format(pformat(data))) + + # add to instance data + inst_data.update(otio_data) + self.log.debug("__ inst_data: {}".format(pformat(inst_data))) # add resolution - self._get_resolution_to_data(data, context) + self._get_resolution_to_data(inst_data, context) # create instance - instance = context.create_instance(**data) + instance = context.create_instance(**inst_data) # add colorspace data instance.data.update({ @@ -116,7 +112,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): }) # create shot instance for shot attributes create/update - self._create_shot_instance(context, clip_name, **data) + self._create_shot_instance(context, clip_name, **inst_data) self.log.info("Creating instance: {}".format(instance)) self.log.info( diff --git a/openpype/hosts/flame/plugins/publish/precollect_workfile.py b/openpype/hosts/flame/plugins/publish/precollect_workfile.py index 8d49993576..34bcab83a7 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/flame/plugins/publish/precollect_workfile.py @@ -9,7 +9,7 @@ class PrecollecTimelineOCIO(pyblish.api.ContextPlugin): """Inject the current working context into publish context""" label = "Precollect Timeline OTIO" - order = pyblish.api.CollectorOrder - 0.5 + order = pyblish.api.CollectorOrder - 0.48 def process(self, context): # plugin defined From b0a71a86361b9cd6fc3a7532c56174f0de016f8d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Jan 2022 15:40:45 +0100 Subject: [PATCH 101/160] typo --- openpype/hosts/flame/api/render_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/render_utils.py b/openpype/hosts/flame/api/render_utils.py index f8dbfe2025..1b086646cc 100644 --- a/openpype/hosts/flame/api/render_utils.py +++ b/openpype/hosts/flame/api/render_utils.py @@ -121,5 +121,5 @@ def get_preset_path_by_xml_name(xml_preset_name): if installed_preset_path: return os.path.dirname(installed_preset_path) - # if nothing found then return None + # if nothing found then return False return False From 303d6e2815d3f5dc384383176d31477979d8e69f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 19 Jan 2022 16:28:34 +0100 Subject: [PATCH 102/160] change message when avalon entities are not available --- .../ftrack/event_handlers_user/action_delete_asset.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py index d3cc0ad971..be53e2f234 100644 --- a/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py @@ -163,8 +163,11 @@ class DeleteAssetSubset(BaseAction): if not selected_av_entities: return { - "success": False, - "message": "Didn't found entities in avalon" + "success": True, + "message": ( + "Didn't found entities in avalon." + " You can use Ftrack's Delete button fot this selection." + ) } # Remove cached action older than 2 minutes From d68b08bb6e31b97fb090fe763dd678d97a9774fb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 19 Jan 2022 16:29:04 +0100 Subject: [PATCH 103/160] few formatting changes --- .../action_delete_asset.py | 27 ++++++++++--------- 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py index be53e2f234..900516e790 100644 --- a/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py @@ -172,18 +172,18 @@ class DeleteAssetSubset(BaseAction): # Remove cached action older than 2 minutes old_action_ids = [] - for id, data in self.action_data_by_id.items(): + for action_id, data in self.action_data_by_id.items(): created_at = data.get("created_at") if not created_at: - old_action_ids.append(id) + old_action_ids.append(action_id) continue cur_time = datetime.now() existing_in_sec = (created_at - cur_time).total_seconds() if existing_in_sec > 60 * 2: - old_action_ids.append(id) + old_action_ids.append(action_id) - for id in old_action_ids: - self.action_data_by_id.pop(id, None) + for action_id in old_action_ids: + self.action_data_by_id.pop(action_id, None) # Store data for action id action_id = str(uuid.uuid1()) @@ -442,7 +442,11 @@ class DeleteAssetSubset(BaseAction): subsets_to_delete = to_delete.get("subsets") or [] # Convert asset ids to ObjectId obj - assets_to_delete = [ObjectId(id) for id in assets_to_delete if id] + assets_to_delete = [ + ObjectId(asset_id) + for asset_id in assets_to_delete + if asset_id + ] subset_ids_by_parent = spec_data["subset_ids_by_parent"] subset_ids_by_name = spec_data["subset_ids_by_name"] @@ -471,9 +475,8 @@ class DeleteAssetSubset(BaseAction): if not ftrack_id: ftrack_id = asset["data"].get("ftrackId") - if not ftrack_id: - continue - ftrack_ids_to_delete.append(ftrack_id) + if ftrack_id: + ftrack_ids_to_delete.append(ftrack_id) children_queue = collections.deque() for mongo_id in assets_to_delete: @@ -572,12 +575,12 @@ class DeleteAssetSubset(BaseAction): exc_info=True ) - if not_deleted_entities_id: - joined_not_deleted = ", ".join([ + if not_deleted_entities_id and asset_names_to_delete: + joined_not_deleted = ",".join([ "\"{}\"".format(ftrack_id) for ftrack_id in not_deleted_entities_id ]) - joined_asset_names = ", ".join([ + joined_asset_names = ",".join([ "\"{}\"".format(name) for name in asset_names_to_delete ]) From e02f03c3432a9a5640ec0be7f0a021d57f40e7c5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 19 Jan 2022 16:29:19 +0100 Subject: [PATCH 104/160] find all children under selection and add them to delete queue --- .../action_delete_asset.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py index 900516e790..586f04004d 100644 --- a/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py @@ -619,6 +619,25 @@ class DeleteAssetSubset(BaseAction): joined_ids_to_delete ) ).all() + # Find all children entities and add them to list + # - Delete tasks first then their parents and continue + parent_ids_to_delete = [ + entity["id"] + for entity in to_delete_entities + ] + while parent_ids_to_delete: + joined_parent_ids_to_delete = ",".join([ + "\"{}\"".format(ftrack_id) + for ftrack_id in parent_ids_to_delete + ]) + _to_delete = session.query(( + "select id, link from TypedContext where parent_id in ({})" + ).format(joined_parent_ids_to_delete)).all() + parent_ids_to_delete = [] + for entity in _to_delete: + parent_ids_to_delete.append(entity["id"]) + to_delete_entities.append(entity) + entities_by_link_len = collections.defaultdict(list) for entity in to_delete_entities: entities_by_link_len[len(entity["link"])].append(entity) From 4c1eda4558f9ae76b7833ab49da6b536872d210f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 19 Jan 2022 16:42:22 +0100 Subject: [PATCH 105/160] fix typo --- .../ftrack/event_handlers_user/action_delete_asset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py index 586f04004d..676dd80e93 100644 --- a/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py +++ b/openpype/modules/default_modules/ftrack/event_handlers_user/action_delete_asset.py @@ -166,7 +166,7 @@ class DeleteAssetSubset(BaseAction): "success": True, "message": ( "Didn't found entities in avalon." - " You can use Ftrack's Delete button fot this selection." + " You can use Ftrack's Delete button for the selection." ) } From 8eded893aafa1f1cb62bc5e371ae01c39d58d6a6 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 19 Jan 2022 16:45:52 +0100 Subject: [PATCH 106/160] fixed mising 'maintained_selection' --- openpype/hosts/nuke/api/__init__.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index d3b7f74d6d..f7ebcb41da 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -25,6 +25,9 @@ from .pipeline import ( parse_container, update_container, ) +from .lib import ( + maintained_selection +) __all__ = ( @@ -49,4 +52,6 @@ __all__ = ( "containerise", "parse_container", "update_container", + + "maintained_selection", ) From d1d2ecb35294be89564ed27b421df07643070795 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Jan 2022 16:54:28 +0100 Subject: [PATCH 107/160] OP-2427 - limit sent data for user report --- .../hosts/webpublisher/webserver_service/webpublish_routes.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 3270fe8f27..f55c8dec4c 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -317,7 +317,8 @@ class BatchStatusEndpoint(_RestApiEndpoint): class PublishesStatusEndpoint(_RestApiEndpoint): """Returns list of dict with batch info for user (email address).""" async def get(self, user) -> Response: - output = list(self.dbcon.find({"user": user})) + output = list(self.dbcon.find({"user": user}, + projection={"log": False})) if output: status = 200 From 99413c26bbd44bc8e387e27a4ce69d659b1d92f2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Jan 2022 16:55:32 +0100 Subject: [PATCH 108/160] OP-2427 - refactor name --- .../hosts/webpublisher/webserver_service/webpublish_routes.py | 2 +- .../hosts/webpublisher/webserver_service/webserver_cli.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index f55c8dec4c..808d77a4e3 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -314,7 +314,7 @@ class BatchStatusEndpoint(_RestApiEndpoint): ) -class PublishesStatusEndpoint(_RestApiEndpoint): +class UserReportEndpoint(_RestApiEndpoint): """Returns list of dict with batch info for user (email address).""" async def get(self, user) -> Response: output = list(self.dbcon.find({"user": user}, diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py index a88b4bbc3e..1208634544 100644 --- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py @@ -18,7 +18,7 @@ from .webpublish_routes import ( BatchReprocessEndpoint, BatchStatusEndpoint, TaskPublishEndpoint, - PublishesStatusEndpoint + UserReportEndpoint ) @@ -89,7 +89,7 @@ def run_webserver(*args, **kwargs): batch_status_endpoint.dispatch ) - user_status_endpoint = PublishesStatusEndpoint(openpype_resource) + user_status_endpoint = UserReportEndpoint(openpype_resource) server_manager.add_route( "GET", "/api/publishes/{user}", From dd1c0b18dfed42926cf343a8c4586b8564f14d08 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Jan 2022 17:02:29 +0100 Subject: [PATCH 109/160] OP-2427 - refactor code --- .../webserver_service/webpublish_routes.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 808d77a4e3..099819f6ca 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -372,17 +372,11 @@ class BatchReprocessEndpoint(_RestApiEndpoint): async def post(self, batch_id) -> Response: batches = self.dbcon.find({"batch_id": batch_id, "status": "error"}).sort("_id", -1) - batch = None - if batches: - batch = batches[0] - if batch: + if batches: self.dbcon.update_one( - {"_id": batch["_id"]}, - {"$set": - { - "status": "reprocess" - }} + {"_id": batches[0]["_id"]}, + {"$set": {"status": "reprocess"}} ) output = [{"msg": "Batch id {} set to reprocess".format(batch_id)}] status = 200 From f6441176e16dcba607ad0c3af2162a8ccfe45b8f Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 19 Jan 2022 17:11:57 +0100 Subject: [PATCH 110/160] set env variables to skip validation of 3rd party libs --- .github/workflows/test_build.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/test_build.yml b/.github/workflows/test_build.yml index 6e1e38d0b2..dd52e83b61 100644 --- a/.github/workflows/test_build.yml +++ b/.github/workflows/test_build.yml @@ -37,6 +37,7 @@ jobs: - name: 🔨 Build shell: pwsh run: | + $env:SKIP_THIRD_PARTY_VALIDATION = "1" ./tools/build.ps1 Ubuntu-latest: @@ -61,6 +62,7 @@ jobs: - name: 🔨 Build run: | + echo "1" >> $SKIP_THIRD_PARTY_VALIDATION ./tools/build.sh # MacOS-latest: From c4de9bc9ace6ed22e75913be55faecc7f34045e0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 19 Jan 2022 17:26:48 +0100 Subject: [PATCH 111/160] try use 'env' in step configuration --- .github/workflows/test_build.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test_build.yml b/.github/workflows/test_build.yml index dd52e83b61..6dea05559b 100644 --- a/.github/workflows/test_build.yml +++ b/.github/workflows/test_build.yml @@ -36,8 +36,9 @@ jobs: - name: 🔨 Build shell: pwsh + env: + SKIP_THIRD_PARTY_VALIDATION: "1" run: | - $env:SKIP_THIRD_PARTY_VALIDATION = "1" ./tools/build.ps1 Ubuntu-latest: @@ -61,6 +62,8 @@ jobs: ./tools/create_env.sh - name: 🔨 Build + env: + SKIP_THIRD_PARTY_VALIDATION: "1" run: | echo "1" >> $SKIP_THIRD_PARTY_VALIDATION ./tools/build.sh From 769dc41ebb5b5630c22ee2fa34fcf02b7bc4d854 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Jan 2022 17:38:58 +0100 Subject: [PATCH 112/160] OP-2427 - reprocess all failed records for batch only once --- .../webserver_service/webserver_cli.py | 28 +++++++++++++------ 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py index 45338a5925..0d0a0223d9 100644 --- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py @@ -126,7 +126,11 @@ def reprocess_failed(upload_dir, webserver_url): dbcon = mongo_client[database_name]["webpublishes"] results = dbcon.find({"status": "reprocess"}) + reprocessed_batches = set() for batch in results: + if batch["batch_id"] in reprocessed_batches: + continue + batch_url = os.path.join(upload_dir, batch["batch_id"], "manifest.json") @@ -150,18 +154,24 @@ def reprocess_failed(upload_dir, webserver_url): with open(batch_url) as f: data = json.loads(f.read()) + dbcon.update_many( + { + "batch_id": batch["batch_id"], + "status": {"$in": ["error", "reprocess"]} + }, + { + "$set": { + "finish_date": datetime.now(), + "status": "sent_for_reprocessing", + "progress": 100 + } + } + ) + try: r = requests.post(server_url, json=data) log.info("response{}".format(r)) except Exception: log.info("exception", exc_info=True) - dbcon.update_one( - {"_id": batch["_id"]}, - {"$set": - { - "finish_date": datetime.now(), - "status": "sent_for_reprocessing", - "progress": 100 - }} - ) + reprocessed_batches.add(batch["batch_id"]) From 750944ca21fd04711eced4b6e28b54ffd6f717b0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 19 Jan 2022 17:44:22 +0100 Subject: [PATCH 113/160] try to get rid of spaces --- .github/workflows/test_build.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test_build.yml b/.github/workflows/test_build.yml index 6dea05559b..9e098db69b 100644 --- a/.github/workflows/test_build.yml +++ b/.github/workflows/test_build.yml @@ -37,7 +37,7 @@ jobs: - name: 🔨 Build shell: pwsh env: - SKIP_THIRD_PARTY_VALIDATION: "1" + SKIP_THIRD_PARTY_VALIDATION: 1 run: | ./tools/build.ps1 @@ -63,9 +63,8 @@ jobs: - name: 🔨 Build env: - SKIP_THIRD_PARTY_VALIDATION: "1" + SKIP_THIRD_PARTY_VALIDATION: 1 run: | - echo "1" >> $SKIP_THIRD_PARTY_VALIDATION ./tools/build.sh # MacOS-latest: From e63b14d781419f5f944f7c9c806401ec4c4e9563 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Wed, 19 Jan 2022 18:09:17 +0100 Subject: [PATCH 114/160] use export command --- .github/workflows/test_build.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test_build.yml b/.github/workflows/test_build.yml index 9e098db69b..3b5e6dc970 100644 --- a/.github/workflows/test_build.yml +++ b/.github/workflows/test_build.yml @@ -36,9 +36,8 @@ jobs: - name: 🔨 Build shell: pwsh - env: - SKIP_THIRD_PARTY_VALIDATION: 1 run: | + $env:SKIP_THIRD_PARTY_VALIDATION="1" ./tools/build.ps1 Ubuntu-latest: @@ -62,9 +61,8 @@ jobs: ./tools/create_env.sh - name: 🔨 Build - env: - SKIP_THIRD_PARTY_VALIDATION: 1 run: | + export SKIP_THIRD_PARTY_VALIDATION=1 ./tools/build.sh # MacOS-latest: From 770a6407c499b751e08b3156ff4b92ed1159a6ff Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Jan 2022 18:14:43 +0100 Subject: [PATCH 115/160] OP-2427 - mark republished records --- openpype/lib/remote_publish.py | 35 ++++++++++++++++++++++++---------- openpype/pype_commands.py | 2 +- 2 files changed, 26 insertions(+), 11 deletions(-) diff --git a/openpype/lib/remote_publish.py b/openpype/lib/remote_publish.py index dd5a3e2864..181802792a 100644 --- a/openpype/lib/remote_publish.py +++ b/openpype/lib/remote_publish.py @@ -26,7 +26,7 @@ def headless_publish(log, close_plugin_name=None, is_test=False): "batch will be unfinished!") return - publish_and_log(dbcon, _id, log, close_plugin_name) + publish_and_log(dbcon, _id, log, close_plugin_name=close_plugin_name) else: publish(log, close_plugin_name) @@ -84,13 +84,14 @@ def publish(log, close_plugin_name=None): sys.exit(1) -def publish_and_log(dbcon, _id, log, close_plugin_name=None): +def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): """Loops through all plugins, logs ok and fails into OP DB. Args: dbcon (OpenPypeMongoConnection) - _id (str) + _id (str) - id of current job in DB log (OpenPypeLogger) + batch_id (str) - id sent from frontend close_plugin_name (str): name of plugin with responsibility to close host app """ @@ -143,15 +144,29 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None): ) # final update + if batch_id: + dbcon.update_many( + {"batch_id": batch_id, "status": "sent_for_reprocessing"}, + { + "$set": + { + "finish_date": datetime.now(), + "status": "republish_finished", + } + } + ) + dbcon.update_one( {"_id": _id}, - {"$set": - { - "finish_date": datetime.now(), - "status": "finished_ok", - "progress": 100, - "log": os.linesep.join(log_lines) - }} + { + "$set": + { + "finish_date": datetime.now(), + "status": "finished_ok", + "progress": 100, + "log": os.linesep.join(log_lines) + } + } ) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index c9612d8915..8d0eb773a2 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -314,7 +314,7 @@ class PypeCommands: dbcon = get_webpublish_conn() _id = start_webpublish_log(dbcon, batch_id, user_email) - publish_and_log(dbcon, _id, log) + publish_and_log(dbcon, _id, log, batch_id=batch_id) log.info("Publish finished.") From 69a90001d347643a1dd207a77cd3b5b7ec78d27f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Jan 2022 19:07:25 +0100 Subject: [PATCH 116/160] OP-2427 - refactor - status as code --- .../plugins/publish/collect_batch_data.py | 4 ++-- .../webserver_service/webpublish_routes.py | 10 +++++++--- .../webserver_service/webserver_cli.py | 13 +++++++++---- openpype/lib/remote_publish.py | 19 +++++++++++++------ openpype/pype_commands.py | 5 +++-- 5 files changed, 34 insertions(+), 17 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py index a710fcb3e8..062c5ce0da 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py @@ -12,7 +12,7 @@ from openpype.lib.plugin_tools import ( parse_json, get_batch_asset_task_info ) -from openpype.lib.remote_publish import get_webpublish_conn +from openpype.lib.remote_publish import get_webpublish_conn, IN_PROGRESS_STATUS class CollectBatchData(pyblish.api.ContextPlugin): @@ -74,7 +74,7 @@ class CollectBatchData(pyblish.api.ContextPlugin): dbcon.update_one( { "batch_id": batch_id, - "status": "in_progress" + "status": IN_PROGRESS_STATUS }, { "$set": { diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index 099819f6ca..cafd651167 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -11,10 +11,14 @@ from avalon.api import AvalonMongoDB from openpype.lib import OpenPypeMongoConnection from openpype_modules.avalon_apps.rest_api import _RestApiEndpoint -from openpype.lib.remote_publish import get_task_data from openpype.settings import get_project_settings from openpype.lib import PypeLogger +from openpype.lib.remote_publish import ( + get_task_data, + ERROR_STATUS, + REPROCESS_STATUS +) log = PypeLogger.get_logger("WebServer") @@ -371,12 +375,12 @@ class BatchReprocessEndpoint(_RestApiEndpoint): """Marks latest 'batch_id' for reprocessing, returns 404 if not found.""" async def post(self, batch_id) -> Response: batches = self.dbcon.find({"batch_id": batch_id, - "status": "error"}).sort("_id", -1) + "status": ERROR_STATUS}).sort("_id", -1) if batches: self.dbcon.update_one( {"_id": batches[0]["_id"]}, - {"$set": {"status": "reprocess"}} + {"$set": {"status": REPROCESS_STATUS}} ) output = [{"msg": "Batch id {} set to reprocess".format(batch_id)}] status = 200 diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py index 0d0a0223d9..909ea38bc6 100644 --- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py +++ b/openpype/hosts/webpublisher/webserver_service/webserver_cli.py @@ -20,6 +20,11 @@ from .webpublish_routes import ( TaskPublishEndpoint, UserReportEndpoint ) +from openpype.lib.remote_publish import ( + ERROR_STATUS, + REPROCESS_STATUS, + SENT_REPROCESSING_STATUS +) log = PypeLogger().get_logger("webserver_gui") @@ -125,7 +130,7 @@ def reprocess_failed(upload_dir, webserver_url): database_name = os.environ["OPENPYPE_DATABASE_NAME"] dbcon = mongo_client[database_name]["webpublishes"] - results = dbcon.find({"status": "reprocess"}) + results = dbcon.find({"status": REPROCESS_STATUS}) reprocessed_batches = set() for batch in results: if batch["batch_id"] in reprocessed_batches: @@ -143,7 +148,7 @@ def reprocess_failed(upload_dir, webserver_url): {"$set": { "finish_date": datetime.now(), - "status": "error", + "status": ERROR_STATUS, "progress": 100, "log": batch.get("log") + msg }} @@ -157,12 +162,12 @@ def reprocess_failed(upload_dir, webserver_url): dbcon.update_many( { "batch_id": batch["batch_id"], - "status": {"$in": ["error", "reprocess"]} + "status": {"$in": [ERROR_STATUS, REPROCESS_STATUS]} }, { "$set": { "finish_date": datetime.now(), - "status": "sent_for_reprocessing", + "status": SENT_REPROCESSING_STATUS, "progress": 100 } } diff --git a/openpype/lib/remote_publish.py b/openpype/lib/remote_publish.py index 181802792a..9632e63ea0 100644 --- a/openpype/lib/remote_publish.py +++ b/openpype/lib/remote_publish.py @@ -11,6 +11,13 @@ from openpype import uninstall from openpype.lib.mongo import OpenPypeMongoConnection from openpype.lib.plugin_tools import parse_json +ERROR_STATUS = "error" +IN_PROGRESS_STATUS = "in_progress" +REPROCESS_STATUS = "reprocess" +SENT_REPROCESSING_STATUS = "sent_for_reprocessing" +FINISHED_REPROCESS_STATUS = "republishing_finished" +FINISHED_OK_STATUS = "finished_ok" + def headless_publish(log, close_plugin_name=None, is_test=False): """Runs publish in a opened host with a context and closes Python process. @@ -52,7 +59,7 @@ def start_webpublish_log(dbcon, batch_id, user): "batch_id": batch_id, "start_date": datetime.now(), "user": user, - "status": "in_progress", + "status": IN_PROGRESS_STATUS, "progress": 0 # integer 0-100, percentage }).inserted_id @@ -122,7 +129,7 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): {"$set": { "finish_date": datetime.now(), - "status": "error", + "status": ERROR_STATUS, "log": os.linesep.join(log_lines) }} @@ -146,12 +153,12 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): # final update if batch_id: dbcon.update_many( - {"batch_id": batch_id, "status": "sent_for_reprocessing"}, + {"batch_id": batch_id, "status": SENT_REPROCESSING_STATUS}, { "$set": { "finish_date": datetime.now(), - "status": "republish_finished", + "status": FINISHED_REPROCESS_STATUS, } } ) @@ -162,7 +169,7 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None): "$set": { "finish_date": datetime.now(), - "status": "finished_ok", + "status": FINISHED_OK_STATUS, "progress": 100, "log": os.linesep.join(log_lines) } @@ -183,7 +190,7 @@ def fail_batch(_id, batches_in_progress, dbcon): {"$set": { "finish_date": datetime.now(), - "status": "error", + "status": ERROR_STATUS, "log": msg }} diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index 8d0eb773a2..de0336be2b 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -14,7 +14,8 @@ from openpype.lib.remote_publish import ( publish_and_log, fail_batch, find_variant_key, - get_task_data + get_task_data, + IN_PROGRESS_STATUS ) @@ -209,7 +210,7 @@ class PypeCommands: # safer to start logging here, launch might be broken altogether _id = start_webpublish_log(dbcon, batch_id, user_email) - batches_in_progress = list(dbcon.find({"status": "in_progress"})) + batches_in_progress = list(dbcon.find({"status": IN_PROGRESS_STATUS})) if len(batches_in_progress) > 1: fail_batch(_id, batches_in_progress, dbcon) print("Another batch running, probably stuck, ask admin for help") From d18ef2c51256413e1ca32bb655463d07dd808782 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 19 Jan 2022 19:25:11 +0100 Subject: [PATCH 117/160] fix hound --- .../maya/plugins/create/create_unreal_staticmesh.py | 9 ++++++--- openpype/hosts/maya/plugins/publish/clean_nodes.py | 1 - .../plugins/publish/validate_unreal_staticmesh_naming.py | 2 +- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py b/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py index 30f024a160..296116caae 100644 --- a/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py +++ b/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator for Unreal Static Meshes.""" from openpype.hosts.maya.api import plugin, lib -from avalon.api import CreatorError, Session +from avalon.api import Session from openpype.api import get_project_settings from maya import cmds # noqa @@ -38,5 +38,8 @@ class CreateUnrealStaticMesh(plugin.Creator): geometry = cmds.sets(name="geometry_SET", empty=True) collisions = cmds.sets(name="collisions_SET", empty=True) cmds.sets([geometry, collisions], forceElement=instance) - # todo: Iterate over collision prefixes and add them to correct - # sets. Put rest to the geometry set. + for node in content: + if [n for n in self.collision_prefixes if node.startswith(n)]: + cmds.sets(node, forceElement=collisions) + else: + cmds.sets(node, forceElement=geometry) diff --git a/openpype/hosts/maya/plugins/publish/clean_nodes.py b/openpype/hosts/maya/plugins/publish/clean_nodes.py index e6667b7036..cd3613cc4f 100644 --- a/openpype/hosts/maya/plugins/publish/clean_nodes.py +++ b/openpype/hosts/maya/plugins/publish/clean_nodes.py @@ -24,4 +24,3 @@ class CleanNodesUp(pyblish.api.InstancePlugin): self.log.info("Removing {} nodes".format(len(nodes_to_clean))) for node in nodes_to_clean: cmds.remove(node) - \ No newline at end of file diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py index e7df7c8cbb..c5aa14ec0c 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py +++ b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py @@ -118,4 +118,4 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError("Model naming is invalid. See log.") \ No newline at end of file + raise RuntimeError("Model naming is invalid. See log.") From 7affac0d1adfe96a9c06b918e7e3c39bf43fd6f3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 19 Jan 2022 19:43:46 +0100 Subject: [PATCH 118/160] fix setting of env MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- .github/workflows/test_build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test_build.yml b/.github/workflows/test_build.yml index 3b5e6dc970..ac7279117a 100644 --- a/.github/workflows/test_build.yml +++ b/.github/workflows/test_build.yml @@ -62,7 +62,7 @@ jobs: - name: 🔨 Build run: | - export SKIP_THIRD_PARTY_VALIDATION=1 + export SKIP_THIRD_PARTY_VALIDATION="1" ./tools/build.sh # MacOS-latest: From 33a7ddcf0b39897fa0b963af21267edffb5f6a57 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Jan 2022 20:13:32 +0100 Subject: [PATCH 119/160] flame: improving filename frame number operations --- openpype/hosts/flame/api/lib.py | 37 +++++++++++++++++---------------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index 1e444f3b40..f102eba060 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -16,6 +16,7 @@ from openpype.api import Logger log = Logger.get_logger(__name__) +FRAME_PATTERN = re.compile(r"[\._](\d+)[\.]") class CTX: # singleton used for passing data between api modules @@ -601,12 +602,12 @@ def get_clips_in_reels(project): return output_clips -def get_reformated_path(fname, padded=True): +def get_reformated_path(filename, padded=True): """ Return fixed python expression path Args: - fname (str): file name + filename (str): file name Returns: type: string with reformated path @@ -615,27 +616,28 @@ def get_reformated_path(fname, padded=True): get_reformated_path("plate.1001.exr") > plate.%04d.exr """ - padding = get_padding_from_path(fname) - found = get_frame_from_path(fname) + found = FRAME_PATTERN.search(filename) if not found: - log.info("File name is not sequence: {}".format(fname)) - return fname + log.info("File name is not sequence: {}".format(filename)) + return filename - if padded: - fname = fname.replace(found, "%0{}d".format(padding)) - else: - fname = fname.replace(found, "%d") + padding = get_padding_from_path(filename) - return fname + replacement = "%0{}d".format(padding) if padded else "%d" + start_idx, end_idx = found.span(1) + + return replacement.join( + [filename[:start_idx], filename[end_idx:]] + ) -def get_padding_from_path(fname): +def get_padding_from_path(filename): """ Return padding number from Flame path style Args: - fname (str): file name + filename (str): file name Returns: int: padding number @@ -644,17 +646,17 @@ def get_padding_from_path(fname): get_padding_from_path("plate.0001.exr") > 4 """ - found = get_frame_from_path(fname) + found = get_frame_from_path(filename) return len(found) if found else None -def get_frame_from_path(fname): +def get_frame_from_path(filename): """ Return sequence number from Flame path style Args: - fname (str): file name + filename (str): file name Returns: int: sequence frame number @@ -664,8 +666,7 @@ def get_frame_from_path(fname): ("plate.0001.exr") > 0001 """ - frame_pattern = re.compile(r"[._](\d+)[.]") - found = re.findall(frame_pattern, fname) + found = re.findall(FRAME_PATTERN, filename) return found.pop() if found else None From bef26229e5cd3a9b85955ac4fc108bba3fc45ed7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Jan 2022 20:32:54 +0100 Subject: [PATCH 120/160] flame: renaming collectors --- ...collect_instances.py => collect_timeline_instances.py} | 8 ++++---- .../{precollect_workfile.py => collect_timeline_otio.py} | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) rename openpype/hosts/flame/plugins/publish/{precollect_instances.py => collect_timeline_instances.py} (97%) rename openpype/hosts/flame/plugins/publish/{precollect_workfile.py => collect_timeline_otio.py} (92%) diff --git a/openpype/hosts/flame/plugins/publish/precollect_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py similarity index 97% rename from openpype/hosts/flame/plugins/publish/precollect_instances.py rename to openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 2e5a0c406b..a223a17977 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -7,11 +7,11 @@ from openpype.hosts.flame.otio import flame_export from pprint import pformat -class PrecollectInstances(pyblish.api.ContextPlugin): - """Collect all Track items selection.""" +class CollectTimelineInstances(pyblish.api.ContextPlugin): + """Collect all Timeline segment selection.""" - order = pyblish.api.CollectorOrder - 0.47 - label = "Precollect Instances" + order = pyblish.api.CollectorOrder - 0.09 + label = "Collect timeline Instances" hosts = ["flame"] audio_track_items = [] diff --git a/openpype/hosts/flame/plugins/publish/precollect_workfile.py b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py similarity index 92% rename from openpype/hosts/flame/plugins/publish/precollect_workfile.py rename to openpype/hosts/flame/plugins/publish/collect_timeline_otio.py index 34bcab83a7..faa5be9d68 100644 --- a/openpype/hosts/flame/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py @@ -5,11 +5,11 @@ import openpype.hosts.flame.api as opfapi from openpype.hosts.flame.otio import flame_export -class PrecollecTimelineOCIO(pyblish.api.ContextPlugin): +class CollecTimelineOTIO(pyblish.api.ContextPlugin): """Inject the current working context into publish context""" - label = "Precollect Timeline OTIO" - order = pyblish.api.CollectorOrder - 0.48 + label = "Collect Timeline OTIO" + order = pyblish.api.CollectorOrder - 0.099 def process(self, context): # plugin defined From 943dfc2568c7a49cf8c98d0f3caccb5911b07d93 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Jan 2022 21:01:53 +0100 Subject: [PATCH 121/160] flame: updating filename operations also otio export and utils --- openpype/hosts/flame/api/__init__.py | 12 ++-- openpype/hosts/flame/api/lib.py | 16 +++--- openpype/hosts/flame/otio/flame_export.py | 31 +++++----- openpype/hosts/flame/otio/utils.py | 56 +++++++++---------- .../publish/collect_timeline_instances.py | 2 +- 5 files changed, 55 insertions(+), 62 deletions(-) diff --git a/openpype/hosts/flame/api/__init__.py b/openpype/hosts/flame/api/__init__.py index 656ba11617..8e5418c78b 100644 --- a/openpype/hosts/flame/api/__init__.py +++ b/openpype/hosts/flame/api/__init__.py @@ -25,9 +25,9 @@ from .lib import ( reset_segment_selection, get_segment_attributes, get_clips_in_reels, - get_reformated_path, - get_frame_from_path, - get_padding_from_path, + get_reformated_filename, + get_frame_from_filename, + get_padding_from_filename, maintained_object_duplication ) from .utils import ( @@ -92,9 +92,9 @@ __all__ = [ "reset_segment_selection", "get_segment_attributes", "get_clips_in_reels", - "get_reformated_path", - "get_frame_from_path", - "get_padding_from_path", + "get_reformated_filename", + "get_frame_from_filename", + "get_padding_from_filename", "maintained_object_duplication", # pipeline diff --git a/openpype/hosts/flame/api/lib.py b/openpype/hosts/flame/api/lib.py index f524ea0ede..f3c918caab 100644 --- a/openpype/hosts/flame/api/lib.py +++ b/openpype/hosts/flame/api/lib.py @@ -602,7 +602,7 @@ def get_clips_in_reels(project): return output_clips -def get_reformated_path(filename, padded=True): +def get_reformated_filename(filename, padded=True): """ Return fixed python expression path @@ -613,7 +613,7 @@ def get_reformated_path(filename, padded=True): type: string with reformated path Example: - get_reformated_path("plate.1001.exr") > plate.%04d.exr + get_reformated_filename("plate.1001.exr") > plate.%04d.exr """ found = FRAME_PATTERN.search(filename) @@ -622,7 +622,7 @@ def get_reformated_path(filename, padded=True): log.info("File name is not sequence: {}".format(filename)) return filename - padding = get_padding_from_path(filename) + padding = get_padding_from_filename(filename) replacement = "%0{}d".format(padding) if padded else "%d" start_idx, end_idx = found.span(1) @@ -632,7 +632,7 @@ def get_reformated_path(filename, padded=True): ) -def get_padding_from_path(filename): +def get_padding_from_filename(filename): """ Return padding number from Flame path style @@ -643,15 +643,15 @@ def get_padding_from_path(filename): int: padding number Example: - get_padding_from_path("plate.0001.exr") > 4 + get_padding_from_filename("plate.0001.exr") > 4 """ - found = get_frame_from_path(filename) + found = get_frame_from_filename(filename) return len(found) if found else None -def get_frame_from_path(filename): +def get_frame_from_filename(filename): """ Return sequence number from Flame path style @@ -662,7 +662,7 @@ def get_frame_from_path(filename): int: sequence frame number Example: - def get_frame_from_path(path): + def get_frame_from_filename(path): ("plate.0001.exr") > 0001 """ diff --git a/openpype/hosts/flame/otio/flame_export.py b/openpype/hosts/flame/otio/flame_export.py index 615904ec09..562a368215 100644 --- a/openpype/hosts/flame/otio/flame_export.py +++ b/openpype/hosts/flame/otio/flame_export.py @@ -284,23 +284,20 @@ def create_otio_reference(clip_data): # get padding and other file infos log.debug("_ path: {}".format(path)) - is_sequence = padding = utils.get_frame_from_path(path) - if is_sequence: - number = utils.get_frame_from_path(path) - file_head = file_name.split(number)[:-1] - frame_start = int(number) - frame_duration = clip_data["source_duration"] - - if is_sequence: - metadata.update({ - "isSequence": True, - "padding": len(padding) - }) - otio_ex_ref_item = None + is_sequence = frame_number = utils.get_frame_from_filename(file_name) if is_sequence: + file_head = file_name.split(frame_number)[:-1] + frame_start = int(frame_number) + padding = len(frame_number) + + metadata.update({ + "isSequence": True, + "padding": padding + }) + # if it is file sequence try to create `ImageSequenceReference` # the OTIO might not be compatible so return nothing and do it old way try: @@ -322,10 +319,12 @@ def create_otio_reference(clip_data): pass if not otio_ex_ref_item: - reformat_path = utils.get_reformated_path(path, padded=False) + dirname, file_name = os.path.split(path) + file_name = utils.get_reformated_filename(file_name, padded=False) + reformated_path = os.path.join(dirname, file_name) # in case old OTIO or video file create `ExternalReference` otio_ex_ref_item = otio.schema.ExternalReference( - target_url=reformat_path, + target_url=reformated_path, available_range=create_otio_time_range( frame_start, frame_duration, @@ -346,7 +345,7 @@ def create_otio_clip(clip_data): media_reference = create_otio_reference(clip_data) # calculate source in - first_frame = utils.get_frame_from_path(clip_data["fpath"]) or 0 + first_frame = utils.get_frame_from_filename(clip_data["fpath"]) or 0 source_in = int(clip_data["source_in"]) - int(first_frame) # creatae source range diff --git a/openpype/hosts/flame/otio/utils.py b/openpype/hosts/flame/otio/utils.py index 57a15d65a1..e3ffdfce95 100644 --- a/openpype/hosts/flame/otio/utils.py +++ b/openpype/hosts/flame/otio/utils.py @@ -4,6 +4,8 @@ import opentimelineio as otio import logging log = logging.getLogger(__name__) +FRAME_PATTERN = re.compile(r"[\._](\d+)[\.]") + def timecode_to_frames(timecode, framerate): rt = otio.opentime.from_timecode(timecode, framerate) @@ -20,79 +22,71 @@ def frames_to_seconds(frames, framerate): return otio.opentime.to_seconds(rt) -def get_reformated_path(path, padded=True): +def get_reformated_filename(filename, padded=True): """ Return fixed python expression path Args: - path (str): path url or simple file name + filename (str): file name Returns: type: string with reformated path Example: - get_reformated_path("plate.1001.exr") > plate.%04d.exr + get_reformated_filename("plate.1001.exr") > plate.%04d.exr """ - basename = os.path.basename(path) - dirpath = os.path.dirname(path) - padding = get_padding_from_path(basename) - found = get_frame_from_path(basename) + found = FRAME_PATTERN.search(filename) if not found: - log.info("Path is not sequence: {}".format(path)) - return path + log.info("File name is not sequence: {}".format(filename)) + return filename - if padded: - basename = basename.replace(found, "%0{}d".format(padding)) - else: - basename = basename.replace(found, "%d") + padding = get_padding_from_filename(filename) - return os.path.join(dirpath, basename) + replacement = "%0{}d".format(padding) if padded else "%d" + start_idx, end_idx = found.span(1) + + return replacement.join( + [filename[:start_idx], filename[end_idx:]] + ) -def get_padding_from_path(path): +def get_padding_from_filename(filename): """ Return padding number from Flame path style Args: - path (str): path url or simple file name + filename (str): file name Returns: int: padding number Example: - get_padding_from_path("plate.0001.exr") > 4 + get_padding_from_filename("plate.0001.exr") > 4 """ - found = get_frame_from_path(path) + found = get_frame_from_filename(filename) - if found: - return len(found) - else: - return None + return len(found) if found else None -def get_frame_from_path(path): +def get_frame_from_filename(filename): """ Return sequence number from Flame path style Args: - path (str): path url or simple file name + filename (str): file name Returns: int: sequence frame number Example: - def get_frame_from_path(path): + def get_frame_from_filename(path): ("plate.0001.exr") > 0001 """ - frame_pattern = re.compile(r"[._](\d+)[.]") - found = re.findall(frame_pattern, path) + found = re.findall(FRAME_PATTERN, filename) - if found: - return found.pop() - else: - return None + return found.pop() if found else None diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index df7b0026fb..6424bce3bc 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -47,7 +47,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): # get source clip source_clip = self._get_reel_clip(file_path) - first_frame = opfapi.get_frame_from_path(file_path) or 0 + first_frame = opfapi.get_frame_from_filename(file_path) or 0 head, tail = self._get_head_tail(clip_data, first_frame) From fdc1d5acb865a0b6ab48fee2bd31671854333a29 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Jan 2022 21:25:31 +0100 Subject: [PATCH 122/160] global: rename otio plugins and change order --- openpype/plugins/publish/collect_otio_frame_ranges.py | 4 ++-- openpype/plugins/publish/collect_otio_review.py | 4 ++-- openpype/plugins/publish/collect_otio_subset_resources.py | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/plugins/publish/collect_otio_frame_ranges.py b/openpype/plugins/publish/collect_otio_frame_ranges.py index 511ed757b3..ee7b7957ad 100644 --- a/openpype/plugins/publish/collect_otio_frame_ranges.py +++ b/openpype/plugins/publish/collect_otio_frame_ranges.py @@ -12,13 +12,13 @@ import openpype.lib from pprint import pformat -class CollectOcioFrameRanges(pyblish.api.InstancePlugin): +class CollectOtioFrameRanges(pyblish.api.InstancePlugin): """Getting otio ranges from otio_clip Adding timeline and source ranges to instance data""" label = "Collect OTIO Frame Ranges" - order = pyblish.api.CollectorOrder - 0.48 + order = pyblish.api.CollectorOrder - 0.08 families = ["shot", "clip"] hosts = ["resolve", "hiero", "flame"] diff --git a/openpype/plugins/publish/collect_otio_review.py b/openpype/plugins/publish/collect_otio_review.py index 6634be0671..35c77a24cb 100644 --- a/openpype/plugins/publish/collect_otio_review.py +++ b/openpype/plugins/publish/collect_otio_review.py @@ -16,11 +16,11 @@ import pyblish.api from pprint import pformat -class CollectOcioReview(pyblish.api.InstancePlugin): +class CollectOtioReview(pyblish.api.InstancePlugin): """Get matching otio track from defined review layer""" label = "Collect OTIO Review" - order = pyblish.api.CollectorOrder - 0.47 + order = pyblish.api.CollectorOrder - 0.078 families = ["clip"] hosts = ["resolve", "hiero", "flame"] diff --git a/openpype/plugins/publish/collect_otio_subset_resources.py b/openpype/plugins/publish/collect_otio_subset_resources.py index d740ceb508..7c11462ef0 100644 --- a/openpype/plugins/publish/collect_otio_subset_resources.py +++ b/openpype/plugins/publish/collect_otio_subset_resources.py @@ -14,11 +14,11 @@ import openpype from openpype.lib import editorial -class CollectOcioSubsetResources(pyblish.api.InstancePlugin): +class CollectOtioSubsetResources(pyblish.api.InstancePlugin): """Get Resources for a subset version""" label = "Collect OTIO Subset Resources" - order = pyblish.api.CollectorOrder - 0.47 + order = pyblish.api.CollectorOrder - 0.077 families = ["clip"] hosts = ["resolve", "hiero", "flame"] @@ -64,7 +64,7 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin): a_frame_start_h = media_in - handle_start a_frame_end_h = media_out + handle_end - # create trimmed ocio time range + # create trimmed otio time range trimmed_media_range_h = editorial.range_from_frames( a_frame_start_h, (a_frame_end_h - a_frame_start_h + 1), media_fps From 3afdaf655817c036d2f424bd46417edca21b0ada Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Jan 2022 21:26:17 +0100 Subject: [PATCH 123/160] flame: remove camel case in extract resources plugin --- .../plugins/publish/extract_subset_resources.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index adb3b1ae9b..291e440cbe 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -20,17 +20,17 @@ class ExtractSubsetResources(openpype.api.Extractor): default_presets = { "thumbnail": { "ext": "jpg", - "xmlPresetFile": "Jpeg (8-bit).xml", - "xmlPresetDir": "", - "representationAddRange": False, - "representationTags": ["thumbnail"] + "xml_preset_file": "Jpeg (8-bit).xml", + "xml_preset_dir": "", + "representation_add_range": False, + "representation_tags": ["thumbnail"] }, "ftrackpreview": { "ext": "mov", - "xmlPresetFile": "Apple iPad (1920x1080).xml", - "xmlPresetDir": "", - "representationAddRange": True, - "representationTags": [ + "xml_preset_file": "Apple iPad (1920x1080).xml", + "xml_preset_dir": "", + "representation_add_range": True, + "representation_tags": [ "review", "delete" ] From 9df277fd840e070455b15c25db5f0237904dd3a4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 19 Jan 2022 21:27:23 +0100 Subject: [PATCH 124/160] flame: removing unused module --- openpype/hosts/flame/otio/utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/flame/otio/utils.py b/openpype/hosts/flame/otio/utils.py index e3ffdfce95..7ded8e55d8 100644 --- a/openpype/hosts/flame/otio/utils.py +++ b/openpype/hosts/flame/otio/utils.py @@ -1,5 +1,4 @@ import re -import os import opentimelineio as otio import logging log = logging.getLogger(__name__) From eca0e02a91ba4c2b02c417191b22d179e6681870 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 20 Jan 2022 01:26:02 +0100 Subject: [PATCH 125/160] create duplicates --- .../create/create_unreal_staticmesh.py | 26 ++++++--- .../hosts/maya/plugins/publish/clean_nodes.py | 11 +++- .../publish/collect_unreal_staticmesh.py | 2 +- .../publish/extract_unreal_staticmesh.py | 23 ++++++-- .../plugins/publish/validate_assembly_name.py | 2 +- .../validate_unreal_staticmesh_naming.py | 55 +++++++++++++------ openpype/plugins/publish/integrate_new.py | 1 + 7 files changed, 86 insertions(+), 34 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py b/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py index 296116caae..d69cc6f0a1 100644 --- a/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py +++ b/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py @@ -35,11 +35,21 @@ class CreateUnrealStaticMesh(plugin.Creator): with lib.undo_chunk(): instance = super(CreateUnrealStaticMesh, self).process() content = cmds.sets(instance, query=True) - geometry = cmds.sets(name="geometry_SET", empty=True) - collisions = cmds.sets(name="collisions_SET", empty=True) - cmds.sets([geometry, collisions], forceElement=instance) - for node in content: - if [n for n in self.collision_prefixes if node.startswith(n)]: - cmds.sets(node, forceElement=collisions) - else: - cmds.sets(node, forceElement=geometry) + + # empty set and process its former content + cmds.sets(content, rm=instance) + geometry_set = cmds.sets(name="geometry_SET", empty=True) + collisions_set = cmds.sets(name="collisions_SET", empty=True) + + cmds.sets([geometry_set, collisions_set], forceElement=instance) + + members = cmds.ls(content, long=True) or [] + children = cmds.listRelatives(members, allDescendents=True, + fullPath=True) or [] + children = cmds.ls(children, type="transform") + for node in children: + if cmds.listRelatives(node, type="shape"): + if [n for n in self.collision_prefixes if node.startswith(n)]: + cmds.sets(node, forceElement=collisions_set) + else: + cmds.sets(node, forceElement=geometry_set) diff --git a/openpype/hosts/maya/plugins/publish/clean_nodes.py b/openpype/hosts/maya/plugins/publish/clean_nodes.py index cd3613cc4f..03995cdabe 100644 --- a/openpype/hosts/maya/plugins/publish/clean_nodes.py +++ b/openpype/hosts/maya/plugins/publish/clean_nodes.py @@ -18,9 +18,14 @@ class CleanNodesUp(pyblish.api.InstancePlugin): def process(self, instance): if not instance.data.get("cleanNodes"): - self.log.info("nothing to clean") + self.log.info("Nothing to clean.") + return - nodes_to_clean = instance.data.pop("cleanNodes") + nodes_to_clean = instance.data.pop("cleanNodes", []) self.log.info("Removing {} nodes".format(len(nodes_to_clean))) for node in nodes_to_clean: - cmds.remove(node) + try: + cmds.delete(node) + except ValueError: + # object might be already deleted, don't complain about it + pass diff --git a/openpype/hosts/maya/plugins/publish/collect_unreal_staticmesh.py b/openpype/hosts/maya/plugins/publish/collect_unreal_staticmesh.py index ad6398041b..b1fb0542f2 100644 --- a/openpype/hosts/maya/plugins/publish/collect_unreal_staticmesh.py +++ b/openpype/hosts/maya/plugins/publish/collect_unreal_staticmesh.py @@ -19,7 +19,7 @@ class CollectUnrealStaticMesh(pyblish.api.InstancePlugin): # add fbx family to trigger fbx extractor instance.data["families"].append("fbx") # take the name from instance (without the `S_` prefix) - instance.data["staticMeshCombinedName"] = instance.name[1:] + instance.data["staticMeshCombinedName"] = instance.name[2:] geometry_set = [i for i in instance if i == "geometry_SET"] instance.data["membersToCombine"] = cmds.sets( diff --git a/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py b/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py index fd9cf69612..7867952de6 100644 --- a/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py +++ b/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py @@ -3,6 +3,7 @@ import openpype.api import pyblish.api from maya import cmds # noqa +from uuid import uuid4 class ExtractUnrealStaticMesh(openpype.api.Extractor): @@ -16,13 +17,27 @@ class ExtractUnrealStaticMesh(openpype.api.Extractor): to_combine = instance.data.get("membersToCombine") static_mesh_name = instance.data.get("staticMeshCombinedName") self.log.info( - "merging {] into {}".format( - "+ ".join(to_combine), static_mesh_name)) + "merging {} into {}".format( + " + ".join(to_combine), static_mesh_name)) + duplicates = cmds.duplicate(to_combine, ic=True) cmds.polyUnite( - *to_combine, - n=static_mesh_name) + *duplicates, + n=static_mesh_name, ch=False) + + collision_duplicates = cmds.duplicate( + instance.data.get("collisionMembers"), ic=True) + cmds.parent(collision_duplicates, a=True, w=True) + instance.data["collisionMembers"] = collision_duplicates + + self.log.info( + "collision members: {}".format(instance.data["collisionMembers"])) if not instance.data.get("cleanNodes"): instance.data["cleanNodes"] = [] instance.data["cleanNodes"].append(static_mesh_name) + instance.data["cleanNodes"] += duplicates + instance.data["cleanNodes"] += collision_duplicates + + instance.data["setMembers"] = [static_mesh_name] + instance.data["setMembers"] += instance.data["collisionMembers"] diff --git a/openpype/hosts/maya/plugins/publish/validate_assembly_name.py b/openpype/hosts/maya/plugins/publish/validate_assembly_name.py index 8f7a3dfaf9..41349553fc 100644 --- a/openpype/hosts/maya/plugins/publish/validate_assembly_name.py +++ b/openpype/hosts/maya/plugins/publish/validate_assembly_name.py @@ -30,7 +30,7 @@ class ValidateAssemblyName(pyblish.api.InstancePlugin): descendants = cmds.listRelatives(content_instance, allDescendents=True, fullPath=True) or [] - descendants = cmds.ls(descendants, noIntermediate=True, long=True) + descendants = cmds.ls(descendants, noIntermediate=True, type="transform") content_instance = list(set(content_instance + descendants)) assemblies = cmds.ls(content_instance, assemblies=True, long=True) diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py index c5aa14ec0c..901a2ec75e 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py +++ b/openpype/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py @@ -4,6 +4,8 @@ from maya import cmds # noqa import pyblish.api import openpype.api import openpype.hosts.maya.api.action +from avalon.api import Session +from openpype.api import get_project_settings import re @@ -15,14 +17,14 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin): in Settings UI. This plugin also validates other types of meshes - collision meshes: - UBX_[RenderMeshName]_##: + UBX_[RenderMeshName]*: Boxes are created with the Box objects type in Max or with the Cube polygonal primitive in Maya. You cannot move the vertices around or deform it in any way to make it something other than a rectangular prism, or else it will not work. - UCP_[RenderMeshName]_##: + UCP_[RenderMeshName]*: Capsules are created with the Capsule object type. The capsule does not need to have many segments (8 is a good number) at all because it is @@ -30,7 +32,7 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin): boxes, you should not move the individual vertices around. - USP_[RenderMeshName]_##: + USP_[RenderMeshName]*: Spheres are created with the Sphere object type. The sphere does not need to have many segments (8 is a good number) at all because it is @@ -38,7 +40,7 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin): boxes, you should not move the individual vertices around. - UCX_[RenderMeshName]_##: + UCX_[RenderMeshName]*: Convex objects can be any completely closed convex 3D shape. For example, a box can also be a convex object @@ -53,14 +55,23 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin): families = ["unrealStaticMesh"] label = "Unreal StaticMesh Name" actions = [openpype.hosts.maya.api.action.SelectInvalidAction] - regex_mesh = r"(?P.*)_(\d{2})" - regex_collision = r"_(?P.*)_(\d{2})" + regex_mesh = r"(?P.*))" + regex_collision = r"(?P.*)" @classmethod def get_invalid(cls, instance): invalid = [] + project_settings = get_project_settings(Session["AVALON_PROJECT"]) + collision_prefixes = ( + project_settings + ["maya"] + ["create"] + ["CreateUnrealStaticMesh"] + ["collision_prefixes"] + ) + combined_geometry_name = instance.data.get( "staticMeshCombinedName", None) if cls.validate_mesh: @@ -81,10 +92,11 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin): return False regex_collision = "{}{}".format( - "({})_".format( - "|".join("(0}".format(p) for p in cls.collision_prefixes) + "(?P({}))_".format( + "|".join("{0}".format(p) for p in collision_prefixes) ) or "", cls.regex_collision ) + cl_r = re.compile(regex_collision) for obj in collision_set: @@ -92,20 +104,29 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin): if not cl_m: cls.log.error("{} is invalid".format(obj)) invalid.append(obj) - elif cl_m.group("renderName") != combined_geometry_name: - cls.log.error( - "Collision object name doesn't match" - "static mesh name: {} != {}".format( - cl_m.group("renderName"), - combined_geometry_name) + else: + expected_collision = "{}_{}".format( + cl_m.group("prefix"), + combined_geometry_name ) - invalid.append(obj) + + if not obj.startswith(expected_collision): + + cls.log.error( + "Collision object name doesn't match " + "static mesh name" + ) + cls.log.error("{}_{} != {}_{}".format( + cl_m.group("prefix"), + cl_m.group("renderName"), + cl_m.group("prefix"), + combined_geometry_name, + )) + invalid.append(obj) return invalid def process(self, instance): - # todo: load prefixes from creator settings. - if not self.validate_mesh and not self.validate_collision: self.log.info("Validation of both mesh and collision names" "is disabled.") diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index cec2e470b3..bf214d9139 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -389,6 +389,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): repre["ext"] = ext template_data["ext"] = ext + self.log.info(template_name) template = os.path.normpath( anatomy.templates[template_name]["path"]) From c5d4374e6078dfd73a32efa45b7d9846353c49cf Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 20 Jan 2022 14:19:49 +0100 Subject: [PATCH 126/160] ftrack: adding profile for flame to have ftrack family --- .../settings/defaults/project_settings/ftrack.json | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index b3ea77a584..513611ebfb 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -318,6 +318,19 @@ "tasks": [], "add_ftrack_family": true, "advanced_filtering": [] + }, + { + "hosts": [ + "flame" + ], + "families": [ + "plate", + "take" + ], + "task_types": [], + "tasks": [], + "add_ftrack_family": true, + "advanced_filtering": [] } ] }, From 4f01991693dc8781f22bb3dec8098abb9144f1be Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 20 Jan 2022 14:20:09 +0100 Subject: [PATCH 127/160] flame: adding segment index attribute to settings --- .../schemas/projects_schema/schema_project_flame.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json index b1b1f3539b..76576ebf73 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_flame.json @@ -38,6 +38,11 @@ "key": "clipName", "label": "Clip name template" }, + { + "type": "boolean", + "key": "segmentIndex", + "label": "Accept segment order" + }, { "type": "number", "key": "countFrom", From f4655e7ff388f17eba4e75d5a4a4051c4d01413f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 20 Jan 2022 14:20:52 +0100 Subject: [PATCH 128/160] global: collect hierarchy order moved higher --- openpype/plugins/publish/collect_hierarchy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_hierarchy.py b/openpype/plugins/publish/collect_hierarchy.py index 7f7306f73b..efb40407d9 100644 --- a/openpype/plugins/publish/collect_hierarchy.py +++ b/openpype/plugins/publish/collect_hierarchy.py @@ -13,7 +13,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin): """ label = "Collect Hierarchy" - order = pyblish.api.CollectorOrder - 0.47 + order = pyblish.api.CollectorOrder - 0.076 families = ["shot"] hosts = ["resolve", "hiero", "flame"] From a6809a69aa6d44ca5c0ead628bda95f7753bf5bc Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 20 Jan 2022 16:16:24 +0100 Subject: [PATCH 129/160] OP-2427 - conform to expected format for front end --- .../hosts/webpublisher/webserver_service/webpublish_routes.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py index cafd651167..de09899104 100644 --- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py +++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py @@ -308,7 +308,9 @@ class BatchStatusEndpoint(_RestApiEndpoint): if output: status = 200 else: - output = {"msg": "Batch id {} not found".format(batch_id)} + output = {"msg": "Batch id {} not found".format(batch_id), + "status": "queued", + "progress": 0} status = 404 body = self.resource.encode(output) return Response( From 47244a39176b1ea6371aaec05de4663cd1727977 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 20 Jan 2022 17:13:21 +0100 Subject: [PATCH 130/160] create duplicates --- openpype/hosts/maya/api/lib.py | 25 +++++++++++++++++++ .../hosts/maya/plugins/publish/extract_fbx.py | 24 +++++++++++------- .../publish/extract_unreal_staticmesh.py | 10 -------- 3 files changed, 40 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 3f93bc2ab5..0858c205ea 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2853,3 +2853,28 @@ def set_colorspace(): cmds.colorManagementPrefs(e=True, renderingSpaceName=renderSpace) viewTransform = root_dict["viewTransform"] cmds.colorManagementPrefs(e=True, viewTransformName=viewTransform) + + +@contextlib.contextmanager +def root_parent(nodes): + # type: (list) -> list + """Context manager to un-parent provided nodes and return then back.""" + import pymel.core as pm # noqa + + node_parents = [] + for node in nodes: + n = pm.PyNode(node) + try: + root = pm.listRelatives(n, parent=1)[0] + except IndexError: + root = None + node_parents.append((n, root)) + try: + for node in node_parents: + node[0].setParent(world=True) + yield + finally: + for node in node_parents: + if node[1]: + node[0].setParent(node[1]) + diff --git a/openpype/hosts/maya/plugins/publish/extract_fbx.py b/openpype/hosts/maya/plugins/publish/extract_fbx.py index 720a61b0a7..e4894f28cd 100644 --- a/openpype/hosts/maya/plugins/publish/extract_fbx.py +++ b/openpype/hosts/maya/plugins/publish/extract_fbx.py @@ -1,7 +1,9 @@ +# -*- coding: utf-8 -*- import os -from maya import cmds -import maya.mel as mel +from maya import cmds # noqa +import maya.mel as mel # noqa +from openpype.hosts.maya.api.lib import root_parent import pyblish.api import avalon.maya @@ -192,10 +194,7 @@ class ExtractFBX(openpype.api.Extractor): if isinstance(value, bool): value = str(value).lower() - template = "FBXExport{0} -v {1}" - if key == "UpAxis": - template = "FBXExport{0} {1}" - + template = "FBXExport{0} {1}" if key == "UpAxis" else "FBXExport{0} -v {1}" # noqa cmd = template.format(key, value) self.log.info(cmd) mel.eval(cmd) @@ -205,9 +204,16 @@ class ExtractFBX(openpype.api.Extractor): mel.eval("FBXExportGenerateLog -v false") # Export - with avalon.maya.maintained_selection(): - cmds.select(members, r=1, noExpand=True) - mel.eval('FBXExport -f "{}" -s'.format(path)) + if "unrealStaticMesh" in instance.data["families"]: + with avalon.maya.maintained_selection(): + with root_parent(members): + self.log.info("Un-parenting: {}".format(members)) + cmds.select(members, r=1, noExpand=True) + mel.eval('FBXExport -f "{}" -s'.format(path)) + else: + with avalon.maya.maintained_selection(): + cmds.select(members, r=1, noExpand=True) + mel.eval('FBXExport -f "{}" -s'.format(path)) if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py b/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py index 7867952de6..32dc9d1d1c 100644 --- a/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py +++ b/openpype/hosts/maya/plugins/publish/extract_unreal_staticmesh.py @@ -3,7 +3,6 @@ import openpype.api import pyblish.api from maya import cmds # noqa -from uuid import uuid4 class ExtractUnrealStaticMesh(openpype.api.Extractor): @@ -24,20 +23,11 @@ class ExtractUnrealStaticMesh(openpype.api.Extractor): *duplicates, n=static_mesh_name, ch=False) - collision_duplicates = cmds.duplicate( - instance.data.get("collisionMembers"), ic=True) - cmds.parent(collision_duplicates, a=True, w=True) - instance.data["collisionMembers"] = collision_duplicates - - self.log.info( - "collision members: {}".format(instance.data["collisionMembers"])) - if not instance.data.get("cleanNodes"): instance.data["cleanNodes"] = [] instance.data["cleanNodes"].append(static_mesh_name) instance.data["cleanNodes"] += duplicates - instance.data["cleanNodes"] += collision_duplicates instance.data["setMembers"] = [static_mesh_name] instance.data["setMembers"] += instance.data["collisionMembers"] From c88f95fa6f289b0fef1daaccf79da019eb20417e Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 20 Jan 2022 17:18:06 +0100 Subject: [PATCH 131/160] =?UTF-8?q?fix=20=F0=9F=90=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/hosts/maya/api/lib.py | 1 - .../hosts/maya/plugins/create/create_unreal_staticmesh.py | 5 ++++- .../hosts/maya/plugins/publish/validate_assembly_name.py | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 0858c205ea..8e50c3c00a 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -2877,4 +2877,3 @@ def root_parent(nodes): for node in node_parents: if node[1]: node[0].setParent(node[1]) - diff --git a/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py b/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py index d69cc6f0a1..9ad560ab7c 100644 --- a/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py +++ b/openpype/hosts/maya/plugins/create/create_unreal_staticmesh.py @@ -49,7 +49,10 @@ class CreateUnrealStaticMesh(plugin.Creator): children = cmds.ls(children, type="transform") for node in children: if cmds.listRelatives(node, type="shape"): - if [n for n in self.collision_prefixes if node.startswith(n)]: + if [ + n for n in self.collision_prefixes + if node.startswith(n) + ]: cmds.sets(node, forceElement=collisions_set) else: cmds.sets(node, forceElement=geometry_set) diff --git a/openpype/hosts/maya/plugins/publish/validate_assembly_name.py b/openpype/hosts/maya/plugins/publish/validate_assembly_name.py index 41349553fc..02464b2302 100644 --- a/openpype/hosts/maya/plugins/publish/validate_assembly_name.py +++ b/openpype/hosts/maya/plugins/publish/validate_assembly_name.py @@ -30,7 +30,8 @@ class ValidateAssemblyName(pyblish.api.InstancePlugin): descendants = cmds.listRelatives(content_instance, allDescendents=True, fullPath=True) or [] - descendants = cmds.ls(descendants, noIntermediate=True, type="transform") + descendants = cmds.ls( + descendants, noIntermediate=True, type="transform") content_instance = list(set(content_instance + descendants)) assemblies = cmds.ls(content_instance, assemblies=True, long=True) From 49babb5f0ebe1248e647ddd9c276e3d72752cb58 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 20 Jan 2022 17:50:54 +0100 Subject: [PATCH 132/160] global: track name was failing if duplicated root word in name --- openpype/plugins/publish/collect_otio_review.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_otio_review.py b/openpype/plugins/publish/collect_otio_review.py index 35c77a24cb..4d8147e70d 100644 --- a/openpype/plugins/publish/collect_otio_review.py +++ b/openpype/plugins/publish/collect_otio_review.py @@ -46,7 +46,7 @@ class CollectOtioReview(pyblish.api.InstancePlugin): # loop all tracks and match with name in `reviewTrack` for track in otio_timeline.tracks: - if review_track_name not in track.name: + if review_track_name != track.name: continue # process correct track From 72170d550aa3b2d25313de36c5759e3dbab37f32 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 21 Jan 2022 01:28:07 +0100 Subject: [PATCH 133/160] moved blender implementation to openpype --- openpype/hosts/blender/__init__.py | 7 +- openpype/hosts/blender/api/__init__.py | 136 +++--- .../hosts/blender/api/icons/pyblish-32x32.png | Bin 0 -> 632 bytes openpype/hosts/blender/api/lib.py | 159 +++++++ openpype/hosts/blender/api/ops.py | 410 +++++++++++++++++ openpype/hosts/blender/api/pipeline.py | 427 ++++++++++++++++++ openpype/hosts/blender/api/plugin.py | 47 +- openpype/hosts/blender/api/workio.py | 90 ++++ .../blender/blender_addon/startup/init.py | 4 + openpype/hosts/blender/plugins/__init__.py | 0 .../hosts/blender/plugins/create/__init__.py | 0 .../blender/plugins/create/create_action.py | 2 +- .../plugins/create/create_animation.py | 5 +- .../blender/plugins/create/create_camera.py | 5 +- .../blender/plugins/create/create_layout.py | 5 +- .../blender/plugins/create/create_model.py | 5 +- .../plugins/create/create_pointcache.py | 2 +- .../blender/plugins/create/create_rig.py | 5 +- .../hosts/blender/plugins/load/__init__.py | 0 .../hosts/blender/plugins/load/load_abc.py | 11 +- .../blender/plugins/load/load_animation.py | 7 +- .../hosts/blender/plugins/load/load_audio.py | 8 +- .../blender/plugins/load/load_camera_blend.py | 8 +- .../blender/plugins/load/load_camera_fbx.py | 11 +- .../hosts/blender/plugins/load/load_fbx.py | 11 +- .../blender/plugins/load/load_layout_blend.py | 8 +- .../blender/plugins/load/load_layout_json.py | 12 +- .../hosts/blender/plugins/load/load_look.py | 16 +- .../hosts/blender/plugins/load/load_model.py | 8 +- .../hosts/blender/plugins/load/load_rig.py | 8 +- .../hosts/blender/plugins/publish/__init__.py | 0 .../plugins/publish/collect_instances.py | 8 +- .../blender/plugins/publish/extract_abc.py | 6 +- .../blender/plugins/publish/extract_blend.py | 1 - .../blender/plugins/publish/extract_camera.py | 4 +- .../blender/plugins/publish/extract_fbx.py | 6 +- .../plugins/publish/extract_fbx_animation.py | 2 +- .../blender/plugins/publish/extract_layout.py | 2 +- .../publish/increment_workfile_version.py | 4 +- openpype/hosts/blender/startup/init.py | 3 - 40 files changed, 1270 insertions(+), 183 deletions(-) create mode 100644 openpype/hosts/blender/api/icons/pyblish-32x32.png create mode 100644 openpype/hosts/blender/api/ops.py create mode 100644 openpype/hosts/blender/api/pipeline.py create mode 100644 openpype/hosts/blender/api/workio.py create mode 100644 openpype/hosts/blender/blender_addon/startup/init.py delete mode 100644 openpype/hosts/blender/plugins/__init__.py delete mode 100644 openpype/hosts/blender/plugins/create/__init__.py delete mode 100644 openpype/hosts/blender/plugins/load/__init__.py delete mode 100644 openpype/hosts/blender/plugins/publish/__init__.py delete mode 100644 openpype/hosts/blender/startup/init.py diff --git a/openpype/hosts/blender/__init__.py b/openpype/hosts/blender/__init__.py index 747394aad0..3081d3c9ba 100644 --- a/openpype/hosts/blender/__init__.py +++ b/openpype/hosts/blender/__init__.py @@ -5,11 +5,8 @@ def add_implementation_envs(env, _app): """Modify environments to contain all required for implementation.""" # Prepare path to implementation script implementation_user_script_path = os.path.join( - os.environ["OPENPYPE_REPOS_ROOT"], - "repos", - "avalon-core", - "setup", - "blender" + os.path.dirname(os.path.abspath(__file__)), + "blender_addon" ) # Add blender implementation script path to PYTHONPATH diff --git a/openpype/hosts/blender/api/__init__.py b/openpype/hosts/blender/api/__init__.py index ecf4fdf4da..e017d74d91 100644 --- a/openpype/hosts/blender/api/__init__.py +++ b/openpype/hosts/blender/api/__init__.py @@ -1,94 +1,64 @@ -import os -import sys -import traceback +"""Public API -import bpy +Anything that isn't defined here is INTERNAL and unreliable for external use. -from .lib import append_user_scripts +""" -from avalon import api as avalon -from pyblish import api as pyblish +from .pipeline import ( + install, + uninstall, + ls, + publish, + containerise, +) -import openpype.hosts.blender +from .plugin import ( + Creator, + Loader, +) -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.blender.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") -PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") -LOAD_PATH = os.path.join(PLUGINS_DIR, "load") -CREATE_PATH = os.path.join(PLUGINS_DIR, "create") -INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") +from .workio import ( + open_file, + save_file, + current_file, + has_unsaved_changes, + file_extensions, + work_root, +) -ORIGINAL_EXCEPTHOOK = sys.excepthook +from .lib import ( + lsattr, + lsattrs, + read, + maintained_selection, + get_selection, + # unique_name, +) -def pype_excepthook_handler(*args): - traceback.print_exception(*args) +__all__ = [ + "install", + "uninstall", + "ls", + "publish", + "containerise", + "Creator", + "Loader", -def install(): - """Install Blender configuration for Avalon.""" - sys.excepthook = pype_excepthook_handler - pyblish.register_plugin_path(str(PUBLISH_PATH)) - avalon.register_plugin_path(avalon.Loader, str(LOAD_PATH)) - avalon.register_plugin_path(avalon.Creator, str(CREATE_PATH)) - append_user_scripts() - avalon.on("new", on_new) - avalon.on("open", on_open) + # Workfiles API + "open_file", + "save_file", + "current_file", + "has_unsaved_changes", + "file_extensions", + "work_root", - -def uninstall(): - """Uninstall Blender configuration for Avalon.""" - sys.excepthook = ORIGINAL_EXCEPTHOOK - pyblish.deregister_plugin_path(str(PUBLISH_PATH)) - avalon.deregister_plugin_path(avalon.Loader, str(LOAD_PATH)) - avalon.deregister_plugin_path(avalon.Creator, str(CREATE_PATH)) - - -def set_start_end_frames(): - from avalon import io - - asset_name = io.Session["AVALON_ASSET"] - asset_doc = io.find_one({ - "type": "asset", - "name": asset_name - }) - - scene = bpy.context.scene - - # Default scene settings - frameStart = scene.frame_start - frameEnd = scene.frame_end - fps = scene.render.fps - resolution_x = scene.render.resolution_x - resolution_y = scene.render.resolution_y - - # Check if settings are set - data = asset_doc.get("data") - - if not data: - return - - if data.get("frameStart"): - frameStart = data.get("frameStart") - if data.get("frameEnd"): - frameEnd = data.get("frameEnd") - if data.get("fps"): - fps = data.get("fps") - if data.get("resolutionWidth"): - resolution_x = data.get("resolutionWidth") - if data.get("resolutionHeight"): - resolution_y = data.get("resolutionHeight") - - scene.frame_start = frameStart - scene.frame_end = frameEnd - scene.render.fps = fps - scene.render.resolution_x = resolution_x - scene.render.resolution_y = resolution_y - - -def on_new(arg1, arg2): - set_start_end_frames() - - -def on_open(arg1, arg2): - set_start_end_frames() + # Utility functions + "maintained_selection", + "lsattr", + "lsattrs", + "read", + "get_selection", + # "unique_name", +] diff --git a/openpype/hosts/blender/api/icons/pyblish-32x32.png b/openpype/hosts/blender/api/icons/pyblish-32x32.png new file mode 100644 index 0000000000000000000000000000000000000000..b34e397e0bd502eb336f994f014a518198d93599 GIT binary patch literal 632 zcmV-;0*C#HP)?m3OVFzoDHzW14L zXLc9IkqG*a3_I>wqC(4bTV70aHK&_zb)S-or zdR!e?Gz01oa2+_3G`|NP#ua@8z5!o>X<#;9R|2bmL0|;f63ZT7V-l+d919^l%3Ar^ zg#3!SwovNY$J`#X`Bzud{=Sy+^`w3nIH_(b$uI|`d*DIZ+4=~EfmcB5%AW!Efyc4_ z377pl-+RGsR;{ENE2 zlz9SiqM-S_dY}^X1mao&fb*4_M}R@~`4Y_Us>><|h!DGM9;+<*qLEA%q6-GNXH<>i|*pjP~hX0nBH#FZ2qakcE>` z0W5F17Z?vA{OT}XF{!tbqt{SR^~5*hEygi&)Nu5GKn2{MR3FJp2!~ Sv8(t10000>> import bpy + >>> def compute(): + ... return 6 + ... + >>> bpy.ops.mesh.primitive_cube_add() + >>> cube = bpy.context.view_layer.objects.active + >>> imprint(cube, { + ... "regularString": "myFamily", + ... "computedValue": lambda: compute() + ... }) + ... + >>> cube['avalon']['computedValue'] + 6 + """ + + imprint_data = dict() + + for key, value in data.items(): + if value is None: + continue + + if callable(value): + # Support values evaluated at imprint + value = value() + + if not isinstance(value, (int, float, bool, str, list)): + raise TypeError(f"Unsupported type: {type(value)}") + + imprint_data[key] = value + + pipeline.metadata_update(node, imprint_data) + + +def lsattr(attr: str, + value: Union[str, int, bool, List, Dict, None] = None) -> List: + r"""Return nodes matching `attr` and `value` + + Arguments: + attr: Name of Blender property + value: Value of attribute. If none + is provided, return all nodes with this attribute. + + Example: + >>> lsattr("id", "myId") + ... [bpy.data.objects["myNode"] + >>> lsattr("id") + ... [bpy.data.objects["myNode"], bpy.data.objects["myOtherNode"]] + + Returns: + list + """ + + return lsattrs({attr: value}) + + +def lsattrs(attrs: Dict) -> List: + r"""Return nodes with the given attribute(s). + + Arguments: + attrs: Name and value pairs of expected matches + + Example: + >>> lsattrs({"age": 5}) # Return nodes with an `age` of 5 + # Return nodes with both `age` and `color` of 5 and blue + >>> lsattrs({"age": 5, "color": "blue"}) + + Returns a list. + + """ + + # For now return all objects, not filtered by scene/collection/view_layer. + matches = set() + for coll in dir(bpy.data): + if not isinstance( + getattr(bpy.data, coll), + bpy.types.bpy_prop_collection, + ): + continue + for node in getattr(bpy.data, coll): + for attr, value in attrs.items(): + avalon_prop = node.get(pipeline.AVALON_PROPERTY) + if not avalon_prop: + continue + if (avalon_prop.get(attr) + and (value is None or avalon_prop.get(attr) == value)): + matches.add(node) + return list(matches) + + +def read(node: bpy.types.bpy_struct_meta_idprop): + """Return user-defined attributes from `node`""" + + data = dict(node.get(pipeline.AVALON_PROPERTY)) + + # Ignore hidden/internal data + data = { + key: value + for key, value in data.items() if not key.startswith("_") + } + + return data + + +def get_selection() -> List[bpy.types.Object]: + """Return the selected objects from the current scene.""" + return [obj for obj in bpy.context.scene.objects if obj.select_get()] + + +@contextlib.contextmanager +def maintained_selection(): + r"""Maintain selection during context + + Example: + >>> with maintained_selection(): + ... # Modify selection + ... bpy.ops.object.select_all(action='DESELECT') + >>> # Selection restored + """ + + previous_selection = get_selection() + previous_active = bpy.context.view_layer.objects.active + try: + yield + finally: + # Clear the selection + for node in get_selection(): + node.select_set(state=False) + if previous_selection: + for node in previous_selection: + try: + node.select_set(state=True) + except ReferenceError: + # This could happen if a selected node was deleted during + # the context. + log.exception("Failed to reselect") + continue + try: + bpy.context.view_layer.objects.active = previous_active + except ReferenceError: + # This could happen if the active node was deleted during the + # context. + log.exception("Failed to set active object.") diff --git a/openpype/hosts/blender/api/ops.py b/openpype/hosts/blender/api/ops.py new file mode 100644 index 0000000000..a73ef0133a --- /dev/null +++ b/openpype/hosts/blender/api/ops.py @@ -0,0 +1,410 @@ +"""Blender operators and menus for use with Avalon.""" + +import os +import sys +import platform +import time +import traceback +import collections +from pathlib import Path +from types import ModuleType +from typing import Dict, List, Optional, Union + +from Qt import QtWidgets, QtCore + +import bpy +import bpy.utils.previews + +import avalon.api +from openpype.tools.utils import host_tools +from openpype import style + +from .workio import OpenFileCacher + +PREVIEW_COLLECTIONS: Dict = dict() + +# This seems like a good value to keep the Qt app responsive and doesn't slow +# down Blender. At least on macOS I the interace of Blender gets very laggy if +# you make it smaller. +TIMER_INTERVAL: float = 0.01 + + +class BlenderApplication(QtWidgets.QApplication): + _instance = None + blender_windows = {} + + def __init__(self, *args, **kwargs): + super(BlenderApplication, self).__init__(*args, **kwargs) + self.setQuitOnLastWindowClosed(False) + + self.setStyleSheet(style.load_stylesheet()) + self.lastWindowClosed.connect(self.__class__.reset) + + @classmethod + def get_app(cls): + if cls._instance is None: + cls._instance = cls(sys.argv) + return cls._instance + + @classmethod + def reset(cls): + cls._instance = None + + @classmethod + def store_window(cls, identifier, window): + current_window = cls.get_window(identifier) + cls.blender_windows[identifier] = window + if current_window: + current_window.close() + # current_window.deleteLater() + + @classmethod + def get_window(cls, identifier): + return cls.blender_windows.get(identifier) + + +class MainThreadItem: + """Structure to store information about callback in main thread. + + Item should be used to execute callback in main thread which may be needed + for execution of Qt objects. + + Item store callback (callable variable), arguments and keyword arguments + for the callback. Item hold information about it's process. + """ + not_set = object() + sleep_time = 0.1 + + def __init__(self, callback, *args, **kwargs): + self.done = False + self.exception = self.not_set + self.result = self.not_set + self.callback = callback + self.args = args + self.kwargs = kwargs + + def execute(self): + """Execute callback and store it's result. + + Method must be called from main thread. Item is marked as `done` + when callback execution finished. Store output of callback of exception + information when callback raise one. + """ + print("Executing process in main thread") + if self.done: + print("- item is already processed") + return + + callback = self.callback + args = self.args + kwargs = self.kwargs + print("Running callback: {}".format(str(callback))) + try: + result = callback(*args, **kwargs) + self.result = result + + except Exception: + self.exception = sys.exc_info() + + finally: + print("Done") + self.done = True + + def wait(self): + """Wait for result from main thread. + + This method stops current thread until callback is executed. + + Returns: + object: Output of callback. May be any type or object. + + Raises: + Exception: Reraise any exception that happened during callback + execution. + """ + while not self.done: + print(self.done) + time.sleep(self.sleep_time) + + if self.exception is self.not_set: + return self.result + raise self.exception + + +class GlobalClass: + app = None + main_thread_callbacks = collections.deque() + is_windows = platform.system().lower() == "windows" + + +def execute_in_main_thread(main_thead_item): + print("execute_in_main_thread") + GlobalClass.main_thread_callbacks.append(main_thead_item) + + +def _process_app_events() -> Optional[float]: + """Process the events of the Qt app if the window is still visible. + + If the app has any top level windows and at least one of them is visible + return the time after which this function should be run again. Else return + None, so the function is not run again and will be unregistered. + """ + while GlobalClass.main_thread_callbacks: + main_thread_item = GlobalClass.main_thread_callbacks.popleft() + main_thread_item.execute() + if main_thread_item.exception is not MainThreadItem.not_set: + _clc, val, tb = main_thread_item.exception + msg = str(val) + detail = "\n".join(traceback.format_exception(_clc, val, tb)) + dialog = QtWidgets.QMessageBox( + QtWidgets.QMessageBox.Warning, + "Error", + msg) + dialog.setMinimumWidth(500) + dialog.setDetailedText(detail) + dialog.exec_() + + if not GlobalClass.is_windows: + if OpenFileCacher.opening_file: + return TIMER_INTERVAL + + app = GlobalClass.app + if app._instance: + app.processEvents() + return TIMER_INTERVAL + return TIMER_INTERVAL + + +class LaunchQtApp(bpy.types.Operator): + """A Base class for opertors to launch a Qt app.""" + + _app: QtWidgets.QApplication + _window = Union[QtWidgets.QDialog, ModuleType] + _tool_name: str = None + _init_args: Optional[List] = list() + _init_kwargs: Optional[Dict] = dict() + bl_idname: str = None + + def __init__(self): + if self.bl_idname is None: + raise NotImplementedError("Attribute `bl_idname` must be set!") + print(f"Initialising {self.bl_idname}...") + self._app = BlenderApplication.get_app() + GlobalClass.app = self._app + + bpy.app.timers.register( + _process_app_events, + persistent=True + ) + + def execute(self, context): + """Execute the operator. + + The child class must implement `execute()` where it only has to set + `self._window` to the desired Qt window and then simply run + `return super().execute(context)`. + `self._window` is expected to have a `show` method. + If the `show` method requires arguments, you can set `self._show_args` + and `self._show_kwargs`. `args` should be a list, `kwargs` a + dictionary. + """ + + if self._tool_name is None: + if self._window is None: + raise AttributeError("`self._window` is not set.") + + else: + window = self._app.get_window(self.bl_idname) + if window is None: + window = host_tools.get_tool_by_name(self._tool_name) + self._app.store_window(self.bl_idname, window) + self._window = window + + if not isinstance( + self._window, + (QtWidgets.QMainWindow, QtWidgets.QDialog, ModuleType) + ): + raise AttributeError( + "`window` should be a `QDialog or module`. Got: {}".format( + str(type(window)) + ) + ) + + self.before_window_show() + + if isinstance(self._window, ModuleType): + self._window.show() + window = None + if hasattr(self._window, "window"): + window = self._window.window + elif hasattr(self._window, "_window"): + window = self._window.window + + if window: + self._app.store_window(self.bl_idname, window) + + else: + origin_flags = self._window.windowFlags() + on_top_flags = origin_flags | QtCore.Qt.WindowStaysOnTopHint + self._window.setWindowFlags(on_top_flags) + self._window.show() + + if on_top_flags != origin_flags: + self._window.setWindowFlags(origin_flags) + self._window.show() + + return {'FINISHED'} + + def before_window_show(self): + return + + +class LaunchCreator(LaunchQtApp): + """Launch Avalon Creator.""" + + bl_idname = "wm.avalon_creator" + bl_label = "Create..." + _tool_name = "creator" + + def before_window_show(self): + self._window.refresh() + + +class LaunchLoader(LaunchQtApp): + """Launch Avalon Loader.""" + + bl_idname = "wm.avalon_loader" + bl_label = "Load..." + _tool_name = "loader" + + def before_window_show(self): + self._window.set_context( + {"asset": avalon.api.Session["AVALON_ASSET"]}, + refresh=True + ) + + +class LaunchPublisher(LaunchQtApp): + """Launch Avalon Publisher.""" + + bl_idname = "wm.avalon_publisher" + bl_label = "Publish..." + + def execute(self, context): + host_tools.show_publish() + return {"FINISHED"} + + +class LaunchManager(LaunchQtApp): + """Launch Avalon Manager.""" + + bl_idname = "wm.avalon_manager" + bl_label = "Manage..." + _tool_name = "sceneinventory" + + def before_window_show(self): + self._window.refresh() + + +class LaunchWorkFiles(LaunchQtApp): + """Launch Avalon Work Files.""" + + bl_idname = "wm.avalon_workfiles" + bl_label = "Work Files..." + _tool_name = "workfiles" + + def execute(self, context): + result = super().execute(context) + self._window.set_context({ + "asset": avalon.api.Session["AVALON_ASSET"], + "silo": avalon.api.Session["AVALON_SILO"], + "task": avalon.api.Session["AVALON_TASK"] + }) + return result + + def before_window_show(self): + self._window.root = str(Path( + os.environ.get("AVALON_WORKDIR", ""), + os.environ.get("AVALON_SCENEDIR", ""), + )) + self._window.refresh() + + +class TOPBAR_MT_avalon(bpy.types.Menu): + """Avalon menu.""" + + bl_idname = "TOPBAR_MT_avalon" + bl_label = os.environ.get("AVALON_LABEL") + + def draw(self, context): + """Draw the menu in the UI.""" + + layout = self.layout + + pcoll = PREVIEW_COLLECTIONS.get("avalon") + if pcoll: + pyblish_menu_icon = pcoll["pyblish_menu_icon"] + pyblish_menu_icon_id = pyblish_menu_icon.icon_id + else: + pyblish_menu_icon_id = 0 + + asset = avalon.api.Session['AVALON_ASSET'] + task = avalon.api.Session['AVALON_TASK'] + context_label = f"{asset}, {task}" + context_label_item = layout.row() + context_label_item.operator( + LaunchWorkFiles.bl_idname, text=context_label + ) + context_label_item.enabled = False + layout.separator() + layout.operator(LaunchCreator.bl_idname, text="Create...") + layout.operator(LaunchLoader.bl_idname, text="Load...") + layout.operator( + LaunchPublisher.bl_idname, + text="Publish...", + icon_value=pyblish_menu_icon_id, + ) + layout.operator(LaunchManager.bl_idname, text="Manage...") + layout.separator() + layout.operator(LaunchWorkFiles.bl_idname, text="Work Files...") + # TODO (jasper): maybe add 'Reload Pipeline', 'Reset Frame Range' and + # 'Reset Resolution'? + + +def draw_avalon_menu(self, context): + """Draw the Avalon menu in the top bar.""" + + self.layout.menu(TOPBAR_MT_avalon.bl_idname) + + +classes = [ + LaunchCreator, + LaunchLoader, + LaunchPublisher, + LaunchManager, + LaunchWorkFiles, + TOPBAR_MT_avalon, +] + + +def register(): + "Register the operators and menu." + + pcoll = bpy.utils.previews.new() + pyblish_icon_file = Path(__file__).parent / "icons" / "pyblish-32x32.png" + pcoll.load("pyblish_menu_icon", str(pyblish_icon_file.absolute()), 'IMAGE') + PREVIEW_COLLECTIONS["avalon"] = pcoll + + for cls in classes: + bpy.utils.register_class(cls) + bpy.types.TOPBAR_MT_editor_menus.append(draw_avalon_menu) + + +def unregister(): + """Unregister the operators and menu.""" + + pcoll = PREVIEW_COLLECTIONS.pop("avalon") + bpy.utils.previews.remove(pcoll) + bpy.types.TOPBAR_MT_editor_menus.remove(draw_avalon_menu) + for cls in reversed(classes): + bpy.utils.unregister_class(cls) diff --git a/openpype/hosts/blender/api/pipeline.py b/openpype/hosts/blender/api/pipeline.py new file mode 100644 index 0000000000..0e5104fea9 --- /dev/null +++ b/openpype/hosts/blender/api/pipeline.py @@ -0,0 +1,427 @@ +import os +import sys +import importlib +import traceback +from typing import Callable, Dict, Iterator, List, Optional + +import bpy + +from . import lib +from . import ops + +import pyblish.api +import avalon.api +from avalon import io, schema +from avalon.pipeline import AVALON_CONTAINER_ID + +from openpype.api import Logger +import openpype.hosts.blender + +HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.blender.__file__)) +PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") +LOAD_PATH = os.path.join(PLUGINS_DIR, "load") +CREATE_PATH = os.path.join(PLUGINS_DIR, "create") +INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") + +ORIGINAL_EXCEPTHOOK = sys.excepthook + +AVALON_INSTANCES = "AVALON_INSTANCES" +AVALON_CONTAINERS = "AVALON_CONTAINERS" +AVALON_PROPERTY = 'avalon' +IS_HEADLESS = bpy.app.background + +log = Logger.get_logger(__name__) + + +def pype_excepthook_handler(*args): + traceback.print_exception(*args) + + +def install(): + """Install Blender configuration for Avalon.""" + sys.excepthook = pype_excepthook_handler + + pyblish.api.register_host("blender") + pyblish.api.register_plugin_path(str(PUBLISH_PATH)) + + avalon.api.register_plugin_path(avalon.api.Loader, str(LOAD_PATH)) + avalon.api.register_plugin_path(avalon.api.Creator, str(CREATE_PATH)) + + lib.append_user_scripts() + + avalon.api.on("new", on_new) + avalon.api.on("open", on_open) + _register_callbacks() + _register_events() + + if not IS_HEADLESS: + ops.register() + + +def uninstall(): + """Uninstall Blender configuration for Avalon.""" + sys.excepthook = ORIGINAL_EXCEPTHOOK + + pyblish.api.deregister_host("blender") + pyblish.api.deregister_plugin_path(str(PUBLISH_PATH)) + + avalon.api.deregister_plugin_path(avalon.api.Loader, str(LOAD_PATH)) + avalon.api.deregister_plugin_path(avalon.api.Creator, str(CREATE_PATH)) + + if not IS_HEADLESS: + ops.unregister() + + +def set_start_end_frames(): + asset_name = io.Session["AVALON_ASSET"] + asset_doc = io.find_one({ + "type": "asset", + "name": asset_name + }) + + scene = bpy.context.scene + + # Default scene settings + frameStart = scene.frame_start + frameEnd = scene.frame_end + fps = scene.render.fps + resolution_x = scene.render.resolution_x + resolution_y = scene.render.resolution_y + + # Check if settings are set + data = asset_doc.get("data") + + if not data: + return + + if data.get("frameStart"): + frameStart = data.get("frameStart") + if data.get("frameEnd"): + frameEnd = data.get("frameEnd") + if data.get("fps"): + fps = data.get("fps") + if data.get("resolutionWidth"): + resolution_x = data.get("resolutionWidth") + if data.get("resolutionHeight"): + resolution_y = data.get("resolutionHeight") + + scene.frame_start = frameStart + scene.frame_end = frameEnd + scene.render.fps = fps + scene.render.resolution_x = resolution_x + scene.render.resolution_y = resolution_y + + +def on_new(arg1, arg2): + set_start_end_frames() + + +def on_open(arg1, arg2): + set_start_end_frames() + + +@bpy.app.handlers.persistent +def _on_save_pre(*args): + avalon.api.emit("before_save", args) + + +@bpy.app.handlers.persistent +def _on_save_post(*args): + avalon.api.emit("save", args) + + +@bpy.app.handlers.persistent +def _on_load_post(*args): + # Detect new file or opening an existing file + if bpy.data.filepath: + # Likely this was an open operation since it has a filepath + avalon.api.emit("open", args) + else: + avalon.api.emit("new", args) + + ops.OpenFileCacher.post_load() + + +def _register_callbacks(): + """Register callbacks for certain events.""" + def _remove_handler(handlers: List, callback: Callable): + """Remove the callback from the given handler list.""" + + try: + handlers.remove(callback) + except ValueError: + pass + + # TODO (jasper): implement on_init callback? + + # Be sure to remove existig ones first. + _remove_handler(bpy.app.handlers.save_pre, _on_save_pre) + _remove_handler(bpy.app.handlers.save_post, _on_save_post) + _remove_handler(bpy.app.handlers.load_post, _on_load_post) + + bpy.app.handlers.save_pre.append(_on_save_pre) + bpy.app.handlers.save_post.append(_on_save_post) + bpy.app.handlers.load_post.append(_on_load_post) + + log.info("Installed event handler _on_save_pre...") + log.info("Installed event handler _on_save_post...") + log.info("Installed event handler _on_load_post...") + + +def _on_task_changed(*args): + """Callback for when the task in the context is changed.""" + + # TODO (jasper): Blender has no concept of projects or workspace. + # It would be nice to override 'bpy.ops.wm.open_mainfile' so it takes the + # workdir as starting directory. But I don't know if that is possible. + # Another option would be to create a custom 'File Selector' and add the + # `directory` attribute, so it opens in that directory (does it?). + # https://docs.blender.org/api/blender2.8/bpy.types.Operator.html#calling-a-file-selector + # https://docs.blender.org/api/blender2.8/bpy.types.WindowManager.html#bpy.types.WindowManager.fileselect_add + workdir = avalon.api.Session["AVALON_WORKDIR"] + log.debug("New working directory: %s", workdir) + + +def _register_events(): + """Install callbacks for specific events.""" + + avalon.api.on("taskChanged", _on_task_changed) + log.info("Installed event callback for 'taskChanged'...") + + +def reload_pipeline(*args): + """Attempt to reload pipeline at run-time. + + Warning: + This is primarily for development and debugging purposes and not well + tested. + + """ + + avalon.api.uninstall() + + for module in ( + "avalon.io", + "avalon.lib", + "avalon.pipeline", + "avalon.tools.creator.app", + "avalon.tools.manager.app", + "avalon.api", + "avalon.tools", + ): + module = importlib.import_module(module) + importlib.reload(module) + + +def _discover_gui() -> Optional[Callable]: + """Return the most desirable of the currently registered GUIs""" + + # Prefer last registered + guis = reversed(pyblish.api.registered_guis()) + + for gui in guis: + try: + gui = __import__(gui).show + except (ImportError, AttributeError): + continue + else: + return gui + + return None + + +def add_to_avalon_container(container: bpy.types.Collection): + """Add the container to the Avalon container.""" + + avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) + if not avalon_container: + avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS) + + # Link the container to the scene so it's easily visible to the artist + # and can be managed easily. Otherwise it's only found in "Blender + # File" view and it will be removed by Blenders garbage collection, + # unless you set a 'fake user'. + bpy.context.scene.collection.children.link(avalon_container) + + avalon_container.children.link(container) + + # Disable Avalon containers for the view layers. + for view_layer in bpy.context.scene.view_layers: + for child in view_layer.layer_collection.children: + if child.collection == avalon_container: + child.exclude = True + + +def metadata_update(node: bpy.types.bpy_struct_meta_idprop, data: Dict): + """Imprint the node with metadata. + + Existing metadata will be updated. + """ + + if not node.get(AVALON_PROPERTY): + node[AVALON_PROPERTY] = dict() + for key, value in data.items(): + if value is None: + continue + node[AVALON_PROPERTY][key] = value + + +def containerise(name: str, + namespace: str, + nodes: List, + context: Dict, + loader: Optional[str] = None, + suffix: Optional[str] = "CON") -> bpy.types.Collection: + """Bundle `nodes` into an assembly and imprint it with metadata + + Containerisation enables a tracking of version, author and origin + for loaded assets. + + Arguments: + name: Name of resulting assembly + namespace: Namespace under which to host container + nodes: Long names of nodes to containerise + context: Asset information + loader: Name of loader used to produce this container. + suffix: Suffix of container, defaults to `_CON`. + + Returns: + The container assembly + + """ + + node_name = f"{context['asset']['name']}_{name}" + if namespace: + node_name = f"{namespace}:{node_name}" + if suffix: + node_name = f"{node_name}_{suffix}" + container = bpy.data.collections.new(name=node_name) + # Link the children nodes + for obj in nodes: + container.objects.link(obj) + + data = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": name, + "namespace": namespace or '', + "loader": str(loader), + "representation": str(context["representation"]["_id"]), + } + + metadata_update(container, data) + add_to_avalon_container(container) + + return container + + +def containerise_existing( + container: bpy.types.Collection, + name: str, + namespace: str, + context: Dict, + loader: Optional[str] = None, + suffix: Optional[str] = "CON") -> bpy.types.Collection: + """Imprint or update container with metadata. + + Arguments: + name: Name of resulting assembly + namespace: Namespace under which to host container + context: Asset information + loader: Name of loader used to produce this container. + suffix: Suffix of container, defaults to `_CON`. + + Returns: + The container assembly + """ + + node_name = container.name + if suffix: + node_name = f"{node_name}_{suffix}" + container.name = node_name + data = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": name, + "namespace": namespace or '', + "loader": str(loader), + "representation": str(context["representation"]["_id"]), + } + + metadata_update(container, data) + add_to_avalon_container(container) + + return container + + +def parse_container(container: bpy.types.Collection, + validate: bool = True) -> Dict: + """Return the container node's full container data. + + Args: + container: A container node name. + validate: turn the validation for the container on or off + + Returns: + The container schema data for this container node. + + """ + + data = lib.read(container) + + # Append transient data + data["objectName"] = container.name + + if validate: + schema.validate(data) + + return data + + +def ls() -> Iterator: + """List containers from active Blender scene. + + This is the host-equivalent of api.ls(), but instead of listing assets on + disk, it lists assets already loaded in Blender; once loaded they are + called containers. + """ + + for container in lib.lsattr("id", AVALON_CONTAINER_ID): + yield parse_container(container) + + +def update_hierarchy(containers): + """Hierarchical container support + + This is the function to support Scene Inventory to draw hierarchical + view for containers. + + We need both parent and children to visualize the graph. + + """ + + all_containers = set(ls()) # lookup set + + for container in containers: + # Find parent + # FIXME (jasperge): re-evaluate this. How would it be possible + # to 'nest' assets? Collections can have several parents, for + # now assume it has only 1 parent + parent = [ + coll for coll in bpy.data.collections if container in coll.children + ] + for node in parent: + if node in all_containers: + container["parent"] = node + break + + log.debug("Container: %s", container) + + yield container + + +def publish(): + """Shorthand to publish from within host.""" + + return pyblish.util.publish() diff --git a/openpype/hosts/blender/api/plugin.py b/openpype/hosts/blender/api/plugin.py index 6d437059b8..602b3b0ff9 100644 --- a/openpype/hosts/blender/api/plugin.py +++ b/openpype/hosts/blender/api/plugin.py @@ -5,10 +5,17 @@ from typing import Dict, List, Optional import bpy -from avalon import api, blender -from avalon.blender import ops -from avalon.blender.pipeline import AVALON_CONTAINERS +import avalon.api from openpype.api import PypeCreatorMixin +from .pipeline import AVALON_CONTAINERS +from .ops import ( + MainThreadItem, + execute_in_main_thread +) +from .lib import ( + imprint, + get_selection +) VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx"] @@ -119,11 +126,27 @@ def deselect_all(): bpy.context.view_layer.objects.active = active -class Creator(PypeCreatorMixin, blender.Creator): - pass +class Creator(PypeCreatorMixin, avalon.api.Creator): + """Base class for Creator plug-ins.""" + def process(self): + collection = bpy.data.collections.new(name=self.data["subset"]) + bpy.context.scene.collection.children.link(collection) + imprint(collection, self.data) + + if (self.options or {}).get("useSelection"): + for obj in get_selection(): + collection.objects.link(obj) + + return collection -class AssetLoader(api.Loader): +class Loader(avalon.api.Loader): + """Base class for Loader plug-ins.""" + + hosts = ["blender"] + + +class AssetLoader(avalon.api.Loader): """A basic AssetLoader for Blender This will implement the basic logic for linking/appending assets @@ -191,8 +214,8 @@ class AssetLoader(api.Loader): namespace: Optional[str] = None, options: Optional[Dict] = None) -> Optional[bpy.types.Collection]: """ Run the loader on Blender main thread""" - mti = ops.MainThreadItem(self._load, context, name, namespace, options) - ops.execute_in_main_thread(mti) + mti = MainThreadItem(self._load, context, name, namespace, options) + execute_in_main_thread(mti) def _load(self, context: dict, @@ -257,8 +280,8 @@ class AssetLoader(api.Loader): def update(self, container: Dict, representation: Dict): """ Run the update on Blender main thread""" - mti = ops.MainThreadItem(self.exec_update, container, representation) - ops.execute_in_main_thread(mti) + mti = MainThreadItem(self.exec_update, container, representation) + execute_in_main_thread(mti) def exec_remove(self, container: Dict) -> bool: """Must be implemented by a sub-class""" @@ -266,5 +289,5 @@ class AssetLoader(api.Loader): def remove(self, container: Dict) -> bool: """ Run the remove on Blender main thread""" - mti = ops.MainThreadItem(self.exec_remove, container) - ops.execute_in_main_thread(mti) + mti = MainThreadItem(self.exec_remove, container) + execute_in_main_thread(mti) diff --git a/openpype/hosts/blender/api/workio.py b/openpype/hosts/blender/api/workio.py new file mode 100644 index 0000000000..fd68761982 --- /dev/null +++ b/openpype/hosts/blender/api/workio.py @@ -0,0 +1,90 @@ +"""Host API required for Work Files.""" + +from pathlib import Path +from typing import List, Optional + +import bpy +from avalon import api + + +class OpenFileCacher: + """Store information about opening file. + + When file is opening QApplcation events should not be processed. + """ + opening_file = False + + @classmethod + def post_load(cls): + cls.opening_file = False + + @classmethod + def set_opening(cls): + cls.opening_file = True + + +def open_file(filepath: str) -> Optional[str]: + """Open the scene file in Blender.""" + OpenFileCacher.set_opening() + + preferences = bpy.context.preferences + load_ui = preferences.filepaths.use_load_ui + use_scripts = preferences.filepaths.use_scripts_auto_execute + result = bpy.ops.wm.open_mainfile( + filepath=filepath, + load_ui=load_ui, + use_scripts=use_scripts, + ) + + if result == {'FINISHED'}: + return filepath + return None + + +def save_file(filepath: str, copy: bool = False) -> Optional[str]: + """Save the open scene file.""" + + preferences = bpy.context.preferences + compress = preferences.filepaths.use_file_compression + relative_remap = preferences.filepaths.use_relative_paths + result = bpy.ops.wm.save_as_mainfile( + filepath=filepath, + compress=compress, + relative_remap=relative_remap, + copy=copy, + ) + + if result == {'FINISHED'}: + return filepath + return None + + +def current_file() -> Optional[str]: + """Return the path of the open scene file.""" + + current_filepath = bpy.data.filepath + if Path(current_filepath).is_file(): + return current_filepath + return None + + +def has_unsaved_changes() -> bool: + """Does the open scene file have unsaved changes?""" + + return bpy.data.is_dirty + + +def file_extensions() -> List[str]: + """Return the supported file extensions for Blender scene files.""" + + return api.HOST_WORKFILE_EXTENSIONS["blender"] + + +def work_root(session: dict) -> str: + """Return the default root to browse for work files.""" + + work_dir = session["AVALON_WORKDIR"] + scene_dir = session.get("AVALON_SCENEDIR") + if scene_dir: + return str(Path(work_dir, scene_dir)) + return work_dir diff --git a/openpype/hosts/blender/blender_addon/startup/init.py b/openpype/hosts/blender/blender_addon/startup/init.py new file mode 100644 index 0000000000..e43373bc6c --- /dev/null +++ b/openpype/hosts/blender/blender_addon/startup/init.py @@ -0,0 +1,4 @@ +from avalon import pipeline +from openpype.hosts.blender import api + +pipeline.install(api) diff --git a/openpype/hosts/blender/plugins/__init__.py b/openpype/hosts/blender/plugins/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/openpype/hosts/blender/plugins/create/__init__.py b/openpype/hosts/blender/plugins/create/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/openpype/hosts/blender/plugins/create/create_action.py b/openpype/hosts/blender/plugins/create/create_action.py index f7bb2bfc26..5f66f5da6e 100644 --- a/openpype/hosts/blender/plugins/create/create_action.py +++ b/openpype/hosts/blender/plugins/create/create_action.py @@ -4,7 +4,7 @@ import bpy from avalon import api import openpype.hosts.blender.api.plugin -from avalon.blender import lib +from openpype.hosts.blender.api import lib class CreateAction(openpype.hosts.blender.api.plugin.Creator): diff --git a/openpype/hosts/blender/plugins/create/create_animation.py b/openpype/hosts/blender/plugins/create/create_animation.py index 3b4cabe8ec..b88010ae90 100644 --- a/openpype/hosts/blender/plugins/create/create_animation.py +++ b/openpype/hosts/blender/plugins/create/create_animation.py @@ -3,9 +3,8 @@ import bpy from avalon import api -from avalon.blender import lib, ops -from avalon.blender.pipeline import AVALON_INSTANCES -from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api import plugin, lib, ops +from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES class CreateAnimation(plugin.Creator): diff --git a/openpype/hosts/blender/plugins/create/create_camera.py b/openpype/hosts/blender/plugins/create/create_camera.py index 6fa80b5a5d..cc796d464d 100644 --- a/openpype/hosts/blender/plugins/create/create_camera.py +++ b/openpype/hosts/blender/plugins/create/create_camera.py @@ -3,9 +3,8 @@ import bpy from avalon import api -from avalon.blender import lib, ops -from avalon.blender.pipeline import AVALON_INSTANCES -from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api import plugin, lib, ops +from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES class CreateCamera(plugin.Creator): diff --git a/openpype/hosts/blender/plugins/create/create_layout.py b/openpype/hosts/blender/plugins/create/create_layout.py index dac12e19b1..f62cbc52ba 100644 --- a/openpype/hosts/blender/plugins/create/create_layout.py +++ b/openpype/hosts/blender/plugins/create/create_layout.py @@ -3,9 +3,8 @@ import bpy from avalon import api -from avalon.blender import lib, ops -from avalon.blender.pipeline import AVALON_INSTANCES -from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api import plugin, lib, ops +from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES class CreateLayout(plugin.Creator): diff --git a/openpype/hosts/blender/plugins/create/create_model.py b/openpype/hosts/blender/plugins/create/create_model.py index 903b70033b..75c90f9bb1 100644 --- a/openpype/hosts/blender/plugins/create/create_model.py +++ b/openpype/hosts/blender/plugins/create/create_model.py @@ -3,9 +3,8 @@ import bpy from avalon import api -from avalon.blender import lib, ops -from avalon.blender.pipeline import AVALON_INSTANCES -from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api import plugin, lib, ops +from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES class CreateModel(plugin.Creator): diff --git a/openpype/hosts/blender/plugins/create/create_pointcache.py b/openpype/hosts/blender/plugins/create/create_pointcache.py index 03a468f82e..bf5a84048f 100644 --- a/openpype/hosts/blender/plugins/create/create_pointcache.py +++ b/openpype/hosts/blender/plugins/create/create_pointcache.py @@ -3,8 +3,8 @@ import bpy from avalon import api -from avalon.blender import lib import openpype.hosts.blender.api.plugin +from openpype.hosts.blender.api import lib class CreatePointcache(openpype.hosts.blender.api.plugin.Creator): diff --git a/openpype/hosts/blender/plugins/create/create_rig.py b/openpype/hosts/blender/plugins/create/create_rig.py index ec74e279c6..65f5061924 100644 --- a/openpype/hosts/blender/plugins/create/create_rig.py +++ b/openpype/hosts/blender/plugins/create/create_rig.py @@ -3,9 +3,8 @@ import bpy from avalon import api -from avalon.blender import lib, ops -from avalon.blender.pipeline import AVALON_INSTANCES -from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api import plugin, lib, ops +from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES class CreateRig(plugin.Creator): diff --git a/openpype/hosts/blender/plugins/load/__init__.py b/openpype/hosts/blender/plugins/load/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/openpype/hosts/blender/plugins/load/load_abc.py b/openpype/hosts/blender/plugins/load/load_abc.py index 5969432c36..07800521c9 100644 --- a/openpype/hosts/blender/plugins/load/load_abc.py +++ b/openpype/hosts/blender/plugins/load/load_abc.py @@ -7,11 +7,12 @@ from typing import Dict, List, Optional import bpy from avalon import api -from avalon.blender import lib -from avalon.blender.pipeline import AVALON_CONTAINERS -from avalon.blender.pipeline import AVALON_CONTAINER_ID -from avalon.blender.pipeline import AVALON_PROPERTY -from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api.pipeline import ( + AVALON_CONTAINERS, + AVALON_PROPERTY, + AVALON_CONTAINER_ID +) +from openpype.hosts.blender.api import plugin, lib class CacheModelLoader(plugin.AssetLoader): diff --git a/openpype/hosts/blender/plugins/load/load_animation.py b/openpype/hosts/blender/plugins/load/load_animation.py index 47c48248b2..6b8d4abd04 100644 --- a/openpype/hosts/blender/plugins/load/load_animation.py +++ b/openpype/hosts/blender/plugins/load/load_animation.py @@ -1,16 +1,11 @@ """Load an animation in Blender.""" -import logging from typing import Dict, List, Optional import bpy -from avalon.blender.pipeline import AVALON_PROPERTY from openpype.hosts.blender.api import plugin - - -logger = logging.getLogger("openpype").getChild( - "blender").getChild("load_animation") +from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY class BlendAnimationLoader(plugin.AssetLoader): diff --git a/openpype/hosts/blender/plugins/load/load_audio.py b/openpype/hosts/blender/plugins/load/load_audio.py index 660e4d7890..e065150c15 100644 --- a/openpype/hosts/blender/plugins/load/load_audio.py +++ b/openpype/hosts/blender/plugins/load/load_audio.py @@ -7,10 +7,12 @@ from typing import Dict, List, Optional import bpy from avalon import api -from avalon.blender.pipeline import AVALON_CONTAINERS -from avalon.blender.pipeline import AVALON_CONTAINER_ID -from avalon.blender.pipeline import AVALON_PROPERTY from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api.pipeline import ( + AVALON_CONTAINERS, + AVALON_PROPERTY, + AVALON_CONTAINER_ID +) class AudioLoader(plugin.AssetLoader): diff --git a/openpype/hosts/blender/plugins/load/load_camera_blend.py b/openpype/hosts/blender/plugins/load/load_camera_blend.py index 834eb467d8..61955f124d 100644 --- a/openpype/hosts/blender/plugins/load/load_camera_blend.py +++ b/openpype/hosts/blender/plugins/load/load_camera_blend.py @@ -8,10 +8,12 @@ from typing import Dict, List, Optional import bpy from avalon import api -from avalon.blender.pipeline import AVALON_CONTAINERS -from avalon.blender.pipeline import AVALON_CONTAINER_ID -from avalon.blender.pipeline import AVALON_PROPERTY from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api.pipeline import ( + AVALON_CONTAINERS, + AVALON_PROPERTY, + AVALON_CONTAINER_ID +) logger = logging.getLogger("openpype").getChild( "blender").getChild("load_camera") diff --git a/openpype/hosts/blender/plugins/load/load_camera_fbx.py b/openpype/hosts/blender/plugins/load/load_camera_fbx.py index 5edba7ec0c..175ddacf9f 100644 --- a/openpype/hosts/blender/plugins/load/load_camera_fbx.py +++ b/openpype/hosts/blender/plugins/load/load_camera_fbx.py @@ -7,11 +7,12 @@ from typing import Dict, List, Optional import bpy from avalon import api -from avalon.blender import lib -from avalon.blender.pipeline import AVALON_CONTAINERS -from avalon.blender.pipeline import AVALON_CONTAINER_ID -from avalon.blender.pipeline import AVALON_PROPERTY -from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api import plugin, lib +from openpype.hosts.blender.api.pipeline import ( + AVALON_CONTAINERS, + AVALON_PROPERTY, + AVALON_CONTAINER_ID +) class FbxCameraLoader(plugin.AssetLoader): diff --git a/openpype/hosts/blender/plugins/load/load_fbx.py b/openpype/hosts/blender/plugins/load/load_fbx.py index 5f69aecb1a..c6e6af5592 100644 --- a/openpype/hosts/blender/plugins/load/load_fbx.py +++ b/openpype/hosts/blender/plugins/load/load_fbx.py @@ -7,11 +7,12 @@ from typing import Dict, List, Optional import bpy from avalon import api -from avalon.blender import lib -from avalon.blender.pipeline import AVALON_CONTAINERS -from avalon.blender.pipeline import AVALON_CONTAINER_ID -from avalon.blender.pipeline import AVALON_PROPERTY -from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api import plugin, lib +from openpype.hosts.blender.api.pipeline import ( + AVALON_CONTAINERS, + AVALON_PROPERTY, + AVALON_CONTAINER_ID +) class FbxModelLoader(plugin.AssetLoader): diff --git a/openpype/hosts/blender/plugins/load/load_layout_blend.py b/openpype/hosts/blender/plugins/load/load_layout_blend.py index 4c1f751a77..dff7ffb9c6 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_blend.py +++ b/openpype/hosts/blender/plugins/load/load_layout_blend.py @@ -7,10 +7,12 @@ from typing import Dict, List, Optional import bpy from avalon import api -from avalon.blender.pipeline import AVALON_CONTAINERS -from avalon.blender.pipeline import AVALON_CONTAINER_ID -from avalon.blender.pipeline import AVALON_PROPERTY from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api.pipeline import ( + AVALON_CONTAINERS, + AVALON_PROPERTY, + AVALON_CONTAINER_ID +) class BlendLayoutLoader(plugin.AssetLoader): diff --git a/openpype/hosts/blender/plugins/load/load_layout_json.py b/openpype/hosts/blender/plugins/load/load_layout_json.py index 442cf05d85..2378ae4807 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_json.py +++ b/openpype/hosts/blender/plugins/load/load_layout_json.py @@ -1,18 +1,20 @@ """Load a layout in Blender.""" +import json from pathlib import Path from pprint import pformat from typing import Dict, Optional import bpy -import json from avalon import api -from avalon.blender.pipeline import AVALON_CONTAINERS -from avalon.blender.pipeline import AVALON_CONTAINER_ID -from avalon.blender.pipeline import AVALON_PROPERTY -from avalon.blender.pipeline import AVALON_INSTANCES from openpype import lib +from openpype.hosts.blender.api.pipeline import ( + AVALON_INSTANCES, + AVALON_CONTAINERS, + AVALON_PROPERTY, + AVALON_CONTAINER_ID +) from openpype.hosts.blender.api import plugin diff --git a/openpype/hosts/blender/plugins/load/load_look.py b/openpype/hosts/blender/plugins/load/load_look.py index 279af2b626..066ec0101b 100644 --- a/openpype/hosts/blender/plugins/load/load_look.py +++ b/openpype/hosts/blender/plugins/load/load_look.py @@ -8,8 +8,12 @@ import os import json import bpy -from avalon import api, blender -import openpype.hosts.blender.api.plugin as plugin +from avalon import api +from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api.pipeline import ( + containerise_existing, + AVALON_PROPERTY +) class BlendLookLoader(plugin.AssetLoader): @@ -105,7 +109,7 @@ class BlendLookLoader(plugin.AssetLoader): container = bpy.data.collections.new(lib_container) container.name = container_name - blender.pipeline.containerise_existing( + containerise_existing( container, name, namespace, @@ -113,7 +117,7 @@ class BlendLookLoader(plugin.AssetLoader): self.__class__.__name__, ) - metadata = container.get(blender.pipeline.AVALON_PROPERTY) + metadata = container.get(AVALON_PROPERTY) metadata["libpath"] = libpath metadata["lib_container"] = lib_container @@ -161,7 +165,7 @@ class BlendLookLoader(plugin.AssetLoader): f"Unsupported file: {libpath}" ) - collection_metadata = collection.get(blender.pipeline.AVALON_PROPERTY) + collection_metadata = collection.get(AVALON_PROPERTY) collection_libpath = collection_metadata["libpath"] normalized_collection_libpath = ( @@ -204,7 +208,7 @@ class BlendLookLoader(plugin.AssetLoader): if not collection: return False - collection_metadata = collection.get(blender.pipeline.AVALON_PROPERTY) + collection_metadata = collection.get(AVALON_PROPERTY) for obj in collection_metadata['objects']: for child in self.get_all_children(obj): diff --git a/openpype/hosts/blender/plugins/load/load_model.py b/openpype/hosts/blender/plugins/load/load_model.py index c33c656dec..861da9b852 100644 --- a/openpype/hosts/blender/plugins/load/load_model.py +++ b/openpype/hosts/blender/plugins/load/load_model.py @@ -7,10 +7,12 @@ from typing import Dict, List, Optional import bpy from avalon import api -from avalon.blender.pipeline import AVALON_CONTAINERS -from avalon.blender.pipeline import AVALON_CONTAINER_ID -from avalon.blender.pipeline import AVALON_PROPERTY from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api.pipeline import ( + AVALON_CONTAINERS, + AVALON_PROPERTY, + AVALON_CONTAINER_ID +) class BlendModelLoader(plugin.AssetLoader): diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py index e80da8af45..b753488144 100644 --- a/openpype/hosts/blender/plugins/load/load_rig.py +++ b/openpype/hosts/blender/plugins/load/load_rig.py @@ -7,11 +7,13 @@ from typing import Dict, List, Optional import bpy from avalon import api -from avalon.blender.pipeline import AVALON_CONTAINERS -from avalon.blender.pipeline import AVALON_CONTAINER_ID -from avalon.blender.pipeline import AVALON_PROPERTY from openpype import lib from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api.pipeline import ( + AVALON_CONTAINERS, + AVALON_PROPERTY, + AVALON_CONTAINER_ID +) class BlendRigLoader(plugin.AssetLoader): diff --git a/openpype/hosts/blender/plugins/publish/__init__.py b/openpype/hosts/blender/plugins/publish/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/openpype/hosts/blender/plugins/publish/collect_instances.py b/openpype/hosts/blender/plugins/publish/collect_instances.py index 0d683dace4..bc4b5ab092 100644 --- a/openpype/hosts/blender/plugins/publish/collect_instances.py +++ b/openpype/hosts/blender/plugins/publish/collect_instances.py @@ -1,11 +1,13 @@ +import json from typing import Generator import bpy -import json import pyblish.api -from avalon.blender.pipeline import AVALON_PROPERTY -from avalon.blender.pipeline import AVALON_INSTANCES +from openpype.hosts.blender.api.pipeline import ( + AVALON_INSTANCES, + AVALON_PROPERTY, +) class CollectInstances(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/blender/plugins/publish/extract_abc.py b/openpype/hosts/blender/plugins/publish/extract_abc.py index b75bec4e28..a26a92f7e4 100644 --- a/openpype/hosts/blender/plugins/publish/extract_abc.py +++ b/openpype/hosts/blender/plugins/publish/extract_abc.py @@ -1,10 +1,10 @@ import os +import bpy + from openpype import api from openpype.hosts.blender.api import plugin -from avalon.blender.pipeline import AVALON_PROPERTY - -import bpy +from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY class ExtractABC(api.Extractor): diff --git a/openpype/hosts/blender/plugins/publish/extract_blend.py b/openpype/hosts/blender/plugins/publish/extract_blend.py index 565e2fe425..9add633f05 100644 --- a/openpype/hosts/blender/plugins/publish/extract_blend.py +++ b/openpype/hosts/blender/plugins/publish/extract_blend.py @@ -2,7 +2,6 @@ import os import bpy -# import avalon.blender.workio import openpype.api diff --git a/openpype/hosts/blender/plugins/publish/extract_camera.py b/openpype/hosts/blender/plugins/publish/extract_camera.py index a0e78178c8..597dcecd21 100644 --- a/openpype/hosts/blender/plugins/publish/extract_camera.py +++ b/openpype/hosts/blender/plugins/publish/extract_camera.py @@ -1,10 +1,10 @@ import os +import bpy + from openpype import api from openpype.hosts.blender.api import plugin -import bpy - class ExtractCamera(api.Extractor): """Extract as the camera as FBX.""" diff --git a/openpype/hosts/blender/plugins/publish/extract_fbx.py b/openpype/hosts/blender/plugins/publish/extract_fbx.py index f9ffdea1d1..26344777a8 100644 --- a/openpype/hosts/blender/plugins/publish/extract_fbx.py +++ b/openpype/hosts/blender/plugins/publish/extract_fbx.py @@ -1,10 +1,10 @@ import os +import bpy + from openpype import api from openpype.hosts.blender.api import plugin -from avalon.blender.pipeline import AVALON_PROPERTY - -import bpy +from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY class ExtractFBX(api.Extractor): diff --git a/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py b/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py index 16443b760c..50a414c0d6 100644 --- a/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py +++ b/openpype/hosts/blender/plugins/publish/extract_fbx_animation.py @@ -7,7 +7,7 @@ import bpy_extras.anim_utils from openpype import api from openpype.hosts.blender.api import plugin -from avalon.blender.pipeline import AVALON_PROPERTY +from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY class ExtractAnimationFBX(api.Extractor): diff --git a/openpype/hosts/blender/plugins/publish/extract_layout.py b/openpype/hosts/blender/plugins/publish/extract_layout.py index cd081b4479..1ecf66099c 100644 --- a/openpype/hosts/blender/plugins/publish/extract_layout.py +++ b/openpype/hosts/blender/plugins/publish/extract_layout.py @@ -4,7 +4,7 @@ import json import bpy from avalon import io -from avalon.blender.pipeline import AVALON_PROPERTY +from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY import openpype.api diff --git a/openpype/hosts/blender/plugins/publish/increment_workfile_version.py b/openpype/hosts/blender/plugins/publish/increment_workfile_version.py index b81e1111ea..986842d0d6 100644 --- a/openpype/hosts/blender/plugins/publish/increment_workfile_version.py +++ b/openpype/hosts/blender/plugins/publish/increment_workfile_version.py @@ -1,5 +1,5 @@ import pyblish.api -import avalon.blender.workio +from openpype.hosts.blender.api.workio import save_file class IncrementWorkfileVersion(pyblish.api.ContextPlugin): @@ -20,6 +20,6 @@ class IncrementWorkfileVersion(pyblish.api.ContextPlugin): path = context.data["currentFile"] filepath = version_up(path) - avalon.blender.workio.save_file(filepath, copy=False) + save_file(filepath, copy=False) self.log.info('Incrementing script version') diff --git a/openpype/hosts/blender/startup/init.py b/openpype/hosts/blender/startup/init.py deleted file mode 100644 index 4b4e48fedc..0000000000 --- a/openpype/hosts/blender/startup/init.py +++ /dev/null @@ -1,3 +0,0 @@ -from openpype.hosts.blender import api - -api.install() From 239ab2cbaaa70b165cd1a316fd359f44af7907eb Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 21 Jan 2022 11:37:37 +0100 Subject: [PATCH 134/160] pype info will also show build version --- openpype/lib/pype_info.py | 8 +++++--- openpype/tools/tray/pype_info_widget.py | 15 +++++++++++---- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/openpype/lib/pype_info.py b/openpype/lib/pype_info.py index 848a505187..8370ecc88f 100644 --- a/openpype/lib/pype_info.py +++ b/openpype/lib/pype_info.py @@ -10,11 +10,12 @@ from .execute import get_openpype_execute_args from .local_settings import get_local_site_id from .openpype_version import ( is_running_from_build, - get_openpype_version + get_openpype_version, + get_build_version ) -def get_pype_info(): +def get_openpype_info(): """Information about currently used Pype process.""" executable_args = get_openpype_execute_args() if is_running_from_build(): @@ -23,6 +24,7 @@ def get_pype_info(): version_type = "code" return { + "build_verison": get_build_version(), "version": get_openpype_version(), "version_type": version_type, "executable": executable_args[-1], @@ -51,7 +53,7 @@ def get_workstation_info(): def get_all_current_info(): """All information about current process in one dictionary.""" return { - "pype": get_pype_info(), + "pype": get_openpype_info(), "workstation": get_workstation_info(), "env": os.environ.copy(), "local_settings": get_local_settings() diff --git a/openpype/tools/tray/pype_info_widget.py b/openpype/tools/tray/pype_info_widget.py index 2ca625f307..e68793b18c 100644 --- a/openpype/tools/tray/pype_info_widget.py +++ b/openpype/tools/tray/pype_info_widget.py @@ -9,7 +9,7 @@ from openpype.api import resources from openpype.settings.lib import get_local_settings from openpype.lib.pype_info import ( get_all_current_info, - get_pype_info, + get_openpype_info, get_workstation_info, extract_pype_info_to_file ) @@ -426,7 +426,7 @@ class PypeInfoSubWidget(QtWidgets.QWidget): """Create widget with information about OpenPype application.""" # Get pype info data - pype_info = get_pype_info() + pype_info = get_openpype_info() # Modify version key/values version_value = "{} ({})".format( pype_info.pop("version", self.not_applicable), @@ -435,13 +435,20 @@ class PypeInfoSubWidget(QtWidgets.QWidget): pype_info["version_value"] = version_value # Prepare lable mapping key_label_mapping = { - "version_value": "OpenPype version:", + "version_value": "Running version:", + "build_verison": "Build version:", "executable": "OpenPype executable:", "pype_root": "OpenPype location:", "mongo_url": "OpenPype Mongo URL:" } # Prepare keys order - keys_order = ["version_value", "executable", "pype_root", "mongo_url"] + keys_order = [ + "version_value", + "build_verison", + "executable", + "pype_root", + "mongo_url" + ] for key in pype_info.keys(): if key not in keys_order: keys_order.append(key) From e9d92ef96930334fd0ebfb49c227046d42de0c97 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 21 Jan 2022 11:37:51 +0100 Subject: [PATCH 135/160] trigger version check on start --- openpype/tools/tray/pype_tray.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index c9b8aaa842..4abf0f5a83 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -328,8 +328,8 @@ class TrayManager: self.main_thread_timer = main_thread_timer version_check_timer = QtCore.QTimer() - version_check_timer.timeout.connect(self._on_version_check_timer) if self._version_check_interval > 0: + version_check_timer.timeout.connect(self._on_version_check_timer) version_check_timer.setInterval(self._version_check_interval) version_check_timer.start() self._version_check_timer = version_check_timer @@ -341,6 +341,9 @@ class TrayManager: def _startup_validations(self): """Run possible startup validations.""" + # Trigger version validation on start + self._version_check_timer.timeout.emit() + self._validate_settings_defaults() def _validate_settings_defaults(self): From ac79f24e403a8f1e606323b4208a9bb6e8281cb0 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 21 Jan 2022 12:11:33 +0100 Subject: [PATCH 136/160] handle situations when current version is higher --- openpype/lib/__init__.py | 3 +- openpype/lib/openpype_version.py | 29 +++++++++++++ openpype/tools/tray/pype_tray.py | 74 +++++++++++++++++++++++++------- 3 files changed, 90 insertions(+), 16 deletions(-) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 1c8f7a57af..7dd9a8793b 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -175,7 +175,8 @@ from .openpype_version import ( get_expected_version, is_running_from_build, is_running_staging, - is_current_version_studio_latest + is_current_version_studio_latest, + is_current_version_higher_than_expected ) terminal = Terminal diff --git a/openpype/lib/openpype_version.py b/openpype/lib/openpype_version.py index 201bf646e9..d547d34755 100644 --- a/openpype/lib/openpype_version.py +++ b/openpype/lib/openpype_version.py @@ -195,3 +195,32 @@ def is_current_version_studio_latest(): expected_version = get_expected_version() # Check if current version is expected version return current_version == expected_version + + +def is_current_version_higher_than_expected(): + """Is current OpenPype version higher than version defined by studio. + + Returns: + None: Can't determine. e.g. when running from code or the build is + too old. + bool: True when is higher than studio version. + """ + output = None + # Skip if is not running from build or build does not support version + # control or path to folder with zip files is not accessible + if ( + not is_running_from_build() + or not op_version_control_available() + or not openpype_path_is_accessible() + ): + return output + + # Get OpenPypeVersion class + OpenPypeVersion = get_OpenPypeVersion() + # Convert current version to OpenPypeVersion object + current_version = OpenPypeVersion(version=get_openpype_version()) + + # Get expected version (from settings) + expected_version = get_expected_version() + # Check if current version is expected version + return current_version > expected_version diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 4abf0f5a83..a6ce5d3a89 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -18,6 +18,7 @@ from openpype.lib import ( get_openpype_execute_args, op_version_control_available, is_current_version_studio_latest, + is_current_version_higher_than_expected, is_running_from_build, is_running_staging, get_expected_version, @@ -104,13 +105,12 @@ class VersionDialog(QtWidgets.QDialog): label_widget.setWordWrap(True) top_layout = QtWidgets.QHBoxLayout(top_widget) - # top_layout.setContentsMargins(0, 0, 0, 0) top_layout.setSpacing(10) top_layout.addWidget(gift_icon_label, 0, QtCore.Qt.AlignCenter) top_layout.addWidget(label_widget, 1) - ignore_btn = QtWidgets.QPushButton("Later", self) - restart_btn = QtWidgets.QPushButton("Restart && Update", self) + ignore_btn = QtWidgets.QPushButton(self) + restart_btn = QtWidgets.QPushButton(self) restart_btn.setObjectName("TrayRestartButton") btns_layout = QtWidgets.QHBoxLayout() @@ -127,7 +127,12 @@ class VersionDialog(QtWidgets.QDialog): restart_btn.clicked.connect(self._on_reset) self._label_widget = label_widget + self._gift_icon_label = gift_icon_label + self._ignore_btn = ignore_btn + self._restart_btn = restart_btn + self._restart_accepted = False + self._current_is_higher = False self.setStyleSheet(style.load_stylesheet()) @@ -152,15 +157,37 @@ class VersionDialog(QtWidgets.QDialog): def closeEvent(self, event): super().closeEvent(event) - if not self._restart_accepted: - self.ignore_requested.emit() + if self._restart_accepted or self._current_is_higher: + return + # Trigger ignore requested only if restart was not clicked and current + # version is lower + self.ignore_requested.emit() - def update_versions(self, current_version, expected_version): - message = ( - "Running OpenPype version is {}." - " Your production has been updated to version {}." - ).format(str(current_version), str(expected_version)) - self._label_widget.setText(message) + def update_versions( + self, current_version, expected_version, current_is_higher + ): + if not current_is_higher: + label_message = ( + "Running OpenPype version is {}." + " Your production has been updated to version {}." + ).format(str(current_version), str(expected_version)) + ignore_label = "Later" + restart_label = "Restart && Update" + else: + label_message = ( + "Running OpenPype version is {}." + " Your production should use version {}." + ).format(str(current_version), str(expected_version)) + ignore_label = "I know" + restart_label = "Restart && Change" + + self._current_is_higher = current_is_higher + + self._gift_icon_label.setVisible(not current_is_higher) + + self._label_widget.setText(label_message) + self._ignore_btn.setText(ignore_label) + self._restart_btn.setText(restart_label) def _on_ignore(self): self.reject() @@ -247,15 +274,17 @@ class TrayManager: expected_version = get_expected_version() current_version = get_openpype_version() + current_is_higher = is_current_version_higher_than_expected() + self._version_dialog.update_versions( - current_version, expected_version + current_version, expected_version, current_is_higher ) self._version_dialog.show() self._version_dialog.raise_() self._version_dialog.activateWindow() def _restart_and_install(self): - self.restart() + self.restart(use_expected_version=True) def _outdated_version_ignored(self): self.show_tray_message( @@ -432,12 +461,18 @@ class TrayManager: self._restart_action = restart_action def _on_restart_action(self): - self.restart() + self.restart(use_expected_version=True) - def restart(self, reset_version=True): + def restart(self, use_expected_version=False, reset_version=False): """Restart Tray tool. First creates new process with same argument and close current tray. + + Args: + use_expected_version(bool): OpenPype version is set to expected + version. + reset_version(bool): OpenPype version is cleaned up so igniters + logic will decide which version will be used. """ args = get_openpype_execute_args() kwargs = { @@ -451,6 +486,15 @@ class TrayManager: if args[-1] == additional_args[0]: additional_args.pop(0) + if use_expected_version: + expected_version = get_expected_version() + if expected_version is not None: + reset_version = False + kwargs["env"]["OPENPYPE_VERSION"] = str(expected_version) + else: + # Trigger reset of version if expected version was not found + reset_version = True + # Pop OPENPYPE_VERSION if reset_version: # Add staging flag if was running from staging From 2a848bbb97c061205eead8dde7e4a95261494cc5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 21 Jan 2022 12:25:09 +0100 Subject: [PATCH 137/160] change title --- openpype/tools/tray/pype_tray.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index a6ce5d3a89..1fd4b3ae97 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -85,7 +85,7 @@ class VersionDialog(QtWidgets.QDialog): def __init__(self, parent=None): super(VersionDialog, self).__init__(parent) - self.setWindowTitle("OpenPype update is needed") + icon = QtGui.QIcon(resources.get_openpype_icon_filepath()) self.setWindowIcon(icon) self.setWindowFlags( @@ -167,6 +167,7 @@ class VersionDialog(QtWidgets.QDialog): self, current_version, expected_version, current_is_higher ): if not current_is_higher: + title = "OpenPype update is needed" label_message = ( "Running OpenPype version is {}." " Your production has been updated to version {}." @@ -174,6 +175,7 @@ class VersionDialog(QtWidgets.QDialog): ignore_label = "Later" restart_label = "Restart && Update" else: + title = "OpenPype version is higher" label_message = ( "Running OpenPype version is {}." " Your production should use version {}." @@ -181,6 +183,8 @@ class VersionDialog(QtWidgets.QDialog): ignore_label = "I know" restart_label = "Restart && Change" + self.setWindowTitle(title) + self._current_is_higher = current_is_higher self._gift_icon_label.setVisible(not current_is_higher) From 69dea46dccaa79860aa4eb1cf4068ef93cf9f5a5 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 21 Jan 2022 12:41:11 +0100 Subject: [PATCH 138/160] removed 'should' from label --- openpype/tools/tray/pype_tray.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 1fd4b3ae97..284a6a31df 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -178,7 +178,7 @@ class VersionDialog(QtWidgets.QDialog): title = "OpenPype version is higher" label_message = ( "Running OpenPype version is {}." - " Your production should use version {}." + " Your production use version {}." ).format(str(current_version), str(expected_version)) ignore_label = "I know" restart_label = "Restart && Change" From aedfb53c4e1e4851726671d3b519101c5984c639 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 21 Jan 2022 13:22:14 +0100 Subject: [PATCH 139/160] fix grammar --- openpype/tools/tray/pype_tray.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 284a6a31df..4ad5bc19ba 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -178,7 +178,7 @@ class VersionDialog(QtWidgets.QDialog): title = "OpenPype version is higher" label_message = ( "Running OpenPype version is {}." - " Your production use version {}." + " Your production uses version {}." ).format(str(current_version), str(expected_version)) ignore_label = "I know" restart_label = "Restart && Change" From 469d58a15b1e554886dbe0f096e8cb685d1b120e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 21 Jan 2022 16:49:25 +0100 Subject: [PATCH 140/160] Fix - removed obsolete import --- openpype/hosts/aftereffects/plugins/publish/collect_render.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index cd67f8ca9e..5b713f43a2 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -3,7 +3,6 @@ import re import tempfile import attr -from avalon import aftereffects import pyblish.api from openpype.settings import get_project_settings From 70927e7d33b01a60410d24666ef640f8434e0d10 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Jan 2022 17:48:47 +0100 Subject: [PATCH 141/160] fixed deadline handling --- .../maya/plugins/create/create_vrayscene.py | 150 ++++++++++++++---- .../maya/plugins/publish/collect_vrayscene.py | 2 +- 2 files changed, 122 insertions(+), 30 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_vrayscene.py b/openpype/hosts/maya/plugins/create/create_vrayscene.py index f9d3c7b8f0..5ce829b452 100644 --- a/openpype/hosts/maya/plugins/create/create_vrayscene.py +++ b/openpype/hosts/maya/plugins/create/create_vrayscene.py @@ -4,6 +4,8 @@ import os import json import appdirs import requests +import six +import sys from maya import cmds import maya.app.renderSetup.model.renderSetup as renderSetup @@ -12,7 +14,15 @@ from openpype.hosts.maya.api import ( lib, plugin ) -from openpype.api import get_system_settings +from openpype.api import ( + get_system_settings, + get_project_settings, + get_asset) + +from openpype.modules import ModulesManager + +from avalon.api import Session +from avalon.api import CreatorError class CreateVRayScene(plugin.Creator): @@ -22,11 +32,40 @@ class CreateVRayScene(plugin.Creator): family = "vrayscene" icon = "cubes" + _project_settings = None + def __init__(self, *args, **kwargs): """Entry.""" super(CreateVRayScene, self).__init__(*args, **kwargs) self._rs = renderSetup.instance() self.data["exportOnFarm"] = False + deadline_settings = get_system_settings()["modules"]["deadline"] + if not deadline_settings["enabled"]: + self.deadline_servers = {} + return + self._project_settings = get_project_settings( + Session["AVALON_PROJECT"]) + + try: + default_servers = deadline_settings["deadline_urls"] + project_servers = ( + self._project_settings["deadline"]["deadline_servers"] + ) + self.deadline_servers = { + k: default_servers[k] + for k in project_servers + if k in default_servers + } + + if not self.deadline_servers: + self.deadline_servers = default_servers + + except AttributeError: + # Handle situation were we had only one url for deadline. + manager = ModulesManager() + deadline_module = manager.modules_by_name["deadline"] + # get default deadline webservice url from deadline module + self.deadline_servers = deadline_module.deadline_urls def process(self): """Entry point.""" @@ -37,10 +76,10 @@ class CreateVRayScene(plugin.Creator): use_selection = self.options.get("useSelection") with lib.undo_chunk(): self._create_vray_instance_settings() - instance = super(CreateVRayScene, self).process() + self.instance = super(CreateVRayScene, self).process() index = 1 - namespace_name = "_{}".format(str(instance)) + namespace_name = "_{}".format(str(self.instance)) try: cmds.namespace(rm=namespace_name) except RuntimeError: @@ -48,10 +87,19 @@ class CreateVRayScene(plugin.Creator): pass while(cmds.namespace(exists=namespace_name)): - namespace_name = "_{}{}".format(str(instance), index) + namespace_name = "_{}{}".format(str(self.instance), index) index += 1 namespace = cmds.namespace(add=namespace_name) + + # add Deadline server selection list + if self.deadline_servers: + cmds.scriptJob( + attributeChange=[ + "{}.deadlineServers".format(self.instance), + self._deadline_webservice_changed + ]) + # create namespace with instance layers = self._rs.getRenderLayers() if use_selection: @@ -62,7 +110,7 @@ class CreateVRayScene(plugin.Creator): render_set = cmds.sets( n="{}:{}".format(namespace, layer.name())) sets.append(render_set) - cmds.sets(sets, forceElement=instance) + cmds.sets(sets, forceElement=self.instance) # if no render layers are present, create default one with # asterix selector @@ -71,6 +119,52 @@ class CreateVRayScene(plugin.Creator): collection = render_layer.createCollection("defaultCollection") collection.getSelector().setPattern('*') + def _deadline_webservice_changed(self): + """Refresh Deadline server dependent options.""" + # get selected server + from maya import cmds + webservice = self.deadline_servers[ + self.server_aliases[ + cmds.getAttr("{}.deadlineServers".format(self.instance)) + ] + ] + pools = self._get_deadline_pools(webservice) + cmds.deleteAttr("{}.primaryPool".format(self.instance)) + cmds.deleteAttr("{}.secondaryPool".format(self.instance)) + cmds.addAttr(self.instance, longName="primaryPool", + attributeType="enum", + enumName=":".join(pools)) + cmds.addAttr(self.instance, longName="secondaryPool", + attributeType="enum", + enumName=":".join(["-"] + pools)) + + def _get_deadline_pools(self, webservice): + # type: (str) -> list + """Get pools from Deadline. + Args: + webservice (str): Server url. + Returns: + list: Pools. + Throws: + RuntimeError: If deadline webservice is unreachable. + + """ + argument = "{}/api/pools?NamesOnly=true".format(webservice) + try: + response = self._requests_get(argument) + except requests.exceptions.ConnectionError as exc: + msg = 'Cannot connect to deadline web service' + self.log.error(msg) + six.reraise( + CreatorError, + CreatorError('{} - {}'.format(msg, exc)), + sys.exc_info()[2]) + if not response.ok: + self.log.warning("No pools retrieved") + return [] + + return response.json() + def _create_vray_instance_settings(self): # get pools pools = [] @@ -79,31 +173,29 @@ class CreateVRayScene(plugin.Creator): deadline_enabled = system_settings["deadline"]["enabled"] muster_enabled = system_settings["muster"]["enabled"] - deadline_url = system_settings["deadline"]["DEADLINE_REST_URL"] muster_url = system_settings["muster"]["MUSTER_REST_URL"] if deadline_enabled and muster_enabled: self.log.error( "Both Deadline and Muster are enabled. " "Cannot support both." ) - raise RuntimeError("Both Deadline and Muster are enabled") + raise CreatorError("Both Deadline and Muster are enabled") + + self.server_aliases = self.deadline_servers.keys() + self.data["deadlineServers"] = self.server_aliases if deadline_enabled: - argument = "{}/api/pools?NamesOnly=true".format(deadline_url) + # if default server is not between selected, use first one for + # initial list of pools. try: - response = self._requests_get(argument) - except requests.exceptions.ConnectionError as e: - msg = 'Cannot connect to deadline web service' - self.log.error(msg) - raise RuntimeError('{} - {}'.format(msg, e)) - if not response.ok: - self.log.warning("No pools retrieved") - else: - pools = response.json() - self.data["primaryPool"] = pools - # We add a string "-" to allow the user to not - # set any secondary pools - self.data["secondaryPool"] = ["-"] + pools + deadline_url = self.deadline_servers["default"] + except KeyError: + deadline_url = [ + self.deadline_servers[k] + for k in self.deadline_servers.keys() + ][0] + + pool_names = self._get_deadline_pools(deadline_url) if muster_enabled: self.log.info(">>> Loading Muster credentials ...") @@ -115,10 +207,10 @@ class CreateVRayScene(plugin.Creator): if e.startswith("401"): self.log.warning("access token expired") self._show_login() - raise RuntimeError("Access token expired") + raise CreatorError("Access token expired") except requests.exceptions.ConnectionError: self.log.error("Cannot connect to Muster API endpoint.") - raise RuntimeError("Cannot connect to {}".format(muster_url)) + raise CreatorError("Cannot connect to {}".format(muster_url)) pool_names = [] for pool in pools: self.log.info(" - pool: {}".format(pool["name"])) @@ -140,7 +232,7 @@ class CreateVRayScene(plugin.Creator): ``MUSTER_PASSWORD``, ``MUSTER_REST_URL`` is loaded from presets. Raises: - RuntimeError: If loaded credentials are invalid. + CreatorError: If loaded credentials are invalid. AttributeError: If ``MUSTER_REST_URL`` is not set. """ @@ -152,7 +244,7 @@ class CreateVRayScene(plugin.Creator): self._token = muster_json.get("token", None) if not self._token: self._show_login() - raise RuntimeError("Invalid access token for Muster") + raise CreatorError("Invalid access token for Muster") file.close() self.MUSTER_REST_URL = os.environ.get("MUSTER_REST_URL") if not self.MUSTER_REST_URL: @@ -162,7 +254,7 @@ class CreateVRayScene(plugin.Creator): """Get render pools from Muster. Raises: - Exception: If pool list cannot be obtained from Muster. + CreatorError: If pool list cannot be obtained from Muster. """ params = {"authToken": self._token} @@ -178,12 +270,12 @@ class CreateVRayScene(plugin.Creator): ("Cannot get pools from " "Muster: {}").format(response.status_code) ) - raise Exception("Cannot get pools from Muster") + raise CreatorError("Cannot get pools from Muster") try: pools = response.json()["ResponseData"]["pools"] except ValueError as e: self.log.error("Invalid response from Muster server {}".format(e)) - raise Exception("Invalid response from Muster server") + raise CreatorError("Invalid response from Muster server") return pools @@ -196,7 +288,7 @@ class CreateVRayScene(plugin.Creator): login_response = self._requests_get(api_url, timeout=1) if login_response.status_code != 200: self.log.error("Cannot show login form to Muster") - raise Exception("Cannot show login form to Muster") + raise CreatorError("Cannot show login form to Muster") def _requests_post(self, *args, **kwargs): """Wrap request post method. diff --git a/openpype/hosts/maya/plugins/publish/collect_vrayscene.py b/openpype/hosts/maya/plugins/publish/collect_vrayscene.py index e5c182c908..c1e5d388af 100644 --- a/openpype/hosts/maya/plugins/publish/collect_vrayscene.py +++ b/openpype/hosts/maya/plugins/publish/collect_vrayscene.py @@ -7,7 +7,7 @@ from maya import cmds import pyblish.api from avalon import api -from openpype.hosts.maya import lib +from openpype.hosts.maya.api import lib class CollectVrayScene(pyblish.api.InstancePlugin): From ba6452c37e6bfc693297634ed8fde3da5ec5952a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Jan 2022 18:15:10 +0100 Subject: [PATCH 142/160] remove unused imports --- openpype/hosts/maya/plugins/create/create_vrayscene.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_vrayscene.py b/openpype/hosts/maya/plugins/create/create_vrayscene.py index 5ce829b452..f2096d902e 100644 --- a/openpype/hosts/maya/plugins/create/create_vrayscene.py +++ b/openpype/hosts/maya/plugins/create/create_vrayscene.py @@ -16,8 +16,8 @@ from openpype.hosts.maya.api import ( ) from openpype.api import ( get_system_settings, - get_project_settings, - get_asset) + get_project_settings +) from openpype.modules import ModulesManager From a06b13604f3f2bd9fa51efc98c6d809eec610dae Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 21 Jan 2022 18:39:35 +0100 Subject: [PATCH 143/160] handle situations when version can't be detected --- openpype/tools/tray/pype_tray.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index 4ad5bc19ba..a21a9de705 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -258,6 +258,10 @@ class TrayManager: def validate_openpype_version(self): using_requested = is_current_version_studio_latest() + # TODO Handle situations when version can't be detected + if using_requested is None: + using_requested = True + self._restart_action.setVisible(not using_requested) if using_requested: if ( From c78eafa8eb1217a89af473bbe7b7fd93ab6291a2 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 21 Jan 2022 18:48:59 +0100 Subject: [PATCH 144/160] changed order of version settings in UI --- .../defaults/system_settings/general.json | 8 +- .../schemas/system_schema/schema_general.json | 79 ++++++++++--------- .../settings/settings/wrapper_widgets.py | 3 +- 3 files changed, 48 insertions(+), 42 deletions(-) diff --git a/openpype/settings/defaults/system_settings/general.json b/openpype/settings/defaults/system_settings/general.json index 7c78de9a5c..5a3e39e5b6 100644 --- a/openpype/settings/defaults/system_settings/general.json +++ b/openpype/settings/defaults/system_settings/general.json @@ -2,9 +2,6 @@ "studio_name": "Studio name", "studio_code": "stu", "admin_password": "", - "production_version": "", - "staging_version": "", - "version_check_interval": 5, "environment": { "__environment_keys__": { "global": [] @@ -19,5 +16,8 @@ "windows": [], "darwin": [], "linux": [] - } + }, + "production_version": "", + "staging_version": "", + "version_check_interval": 5 } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/system_schema/schema_general.json b/openpype/settings/entities/schemas/system_schema/schema_general.json index 3af3f5ce35..6306317df8 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_general.json +++ b/openpype/settings/entities/schemas/system_schema/schema_general.json @@ -30,36 +30,6 @@ { "type": "splitter" }, - { - "type": "label", - "label": "Define explicit OpenPype version that should be used. Keep empty to use latest available version." - }, - { - "type": "production-versions-text", - "key": "production_version", - "label": "Production version" - }, - { - "type": "staging-versions-text", - "key": "staging_version", - "label": "Staging version" - }, - { - "type": "splitter" - }, - { - "type": "label", - "label": "Trigger validation if running OpenPype is using studio defined version each 'n' minutes. Validation happens in OpenPype tray application." - }, - { - "type": "number", - "key": "version_check_interval", - "label": "Version check interval", - "minimum": 0 - }, - { - "type": "splitter" - }, { "key": "environment", "label": "Environment", @@ -141,12 +111,49 @@ "type": "splitter" }, { - "type": "path", - "key": "openpype_path", - "label": "Versions Repository", - "multiplatform": true, - "multipath": true, - "require_restart": true + "type": "collapsible-wrap", + "label": "OpenPype deployment control", + "collapsible": false, + "children": [ + { + "type": "path", + "key": "openpype_path", + "label": "Versions Repository", + "multiplatform": true, + "multipath": true, + "require_restart": true + }, + { + "type": "splitter" + }, + { + "type": "label", + "label": "Define explicit OpenPype version that should be used. Keep empty to use latest available version." + }, + { + "type": "production-versions-text", + "key": "production_version", + "label": "Production version" + }, + { + "type": "staging-versions-text", + "key": "staging_version", + "label": "Staging version" + }, + { + "type": "splitter" + }, + { + "type": "label", + "label": "Trigger validation if running OpenPype is using studio defined version each 'n' minutes. Validation happens in OpenPype tray application." + }, + { + "type": "number", + "key": "version_check_interval", + "label": "Version check interval", + "minimum": 0 + } + ] } ] } diff --git a/openpype/tools/settings/settings/wrapper_widgets.py b/openpype/tools/settings/settings/wrapper_widgets.py index b14a226912..7370fcf945 100644 --- a/openpype/tools/settings/settings/wrapper_widgets.py +++ b/openpype/tools/settings/settings/wrapper_widgets.py @@ -92,8 +92,7 @@ class CollapsibleWrapper(WrapperWidget): self.content_layout = content_layout if self.collapsible: - if not self.collapsed: - body_widget.toggle_content() + body_widget.toggle_content(self.collapsed) else: body_widget.hide_toolbox(hide_content=False) From 83204dde615ae723e1df7e15bc4b45ddf4a1a9ca Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Fri, 21 Jan 2022 19:08:54 +0100 Subject: [PATCH 145/160] added OpenPype deployment control to website docs --- website/docs/admin_settings_system.md | 21 +++++++++++++++++- .../settings/settings_system_general.png | Bin 33586 -> 45859 bytes .../settings_system_version_downgrade.png | Bin 0 -> 7777 bytes .../settings_system_version_update.png | Bin 0 -> 8435 bytes 4 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 website/docs/assets/settings/settings_system_version_downgrade.png create mode 100644 website/docs/assets/settings/settings_system_version_update.png diff --git a/website/docs/admin_settings_system.md b/website/docs/admin_settings_system.md index 6057ed0830..78be9fb01e 100644 --- a/website/docs/admin_settings_system.md +++ b/website/docs/admin_settings_system.md @@ -11,6 +11,8 @@ import TabItem from '@theme/TabItem'; Settings applicable to the full studio. +![general_settings](assets/settings/settings_system_general.png) + **`Studio Name`** - Full name of the studio (can be used as variable on some places) **`Studio Code`** - Studio acronym or a short code (can be used as variable on some places) @@ -24,10 +26,27 @@ as a naive barier to prevent artists from accidental setting changes. **`Disk mapping`** - Platform dependent configuration for mapping of virtual disk(s) on an artist's OpenPype machines before OP starts up. Uses `subst` command, if configured volume character in `Destination` field already exists, no re-mapping is done for that character(volume). +### OpenPype deployment control **`Versions Repository`** - Location where automatic update mechanism searches for zip files with OpenPype update packages. To read more about preparing OpenPype for automatic updates go to [Admin Distribute docs](admin_distribute#2-openpype-codebase) -![general_settings](assets/settings/settings_system_general.png) +**`Production version`** - Define what is current production version. When value is not set then latest version available in versions repository is resolved as production version. + +**`Staging version`** - Define what is current staging version. When value is not set then latest staging version available in versions repository is resolved as staging version. + +For more information about Production and Staging go to [Distribute](admin_distribute#staging-vs-production). + +**Production version** and **Staging version** fields will define which version will be used in studio. Filling explicit version will force new OpenPype processes to use it. That gives more control over studio deployment especially when some workstations don't have access to version repository (e.g. remote users). It can be also used to downgrade studio version when newer version have production breaking bug. + +When fields are not filled the latest version in versions repository is used as studio version. That makes updating easier as it is not needed to modify settings but workstations without access to versions repository can't find out which OpenPype version should be used. + +If version repository is not set or is not accessible for workstation the latest available version on workstation is used or version inside build. + +**`Version check interval`** - OpenPype tray application check if currently used OpenPype version is up to date with production/staging version. It is possible to modify how often the validation is triggered in minutes. It is possible to set the interval to `0`. That will turn off version validations but it is not recommend. + +A dialog asking for restart is shown when OpenPype tray application detect that different version should be used. +![general_settings](assets/settings/settings_system_version_update.png) +![general_settings](assets/settings/settings_system_version_downgrade.png) ## Modules diff --git a/website/docs/assets/settings/settings_system_general.png b/website/docs/assets/settings/settings_system_general.png index d04586205d24ee30445a80ce54330b2c3c03e77c..a2a684caeab6ce6538e8b7a5d8aaf2e76dfe902b 100644 GIT binary patch literal 45859 zcmeFZ2UJsQ*Di_`JG$A5iV)F%iyKfZ8<3VLYy}I&LX#2|0R=)tnv^7>0xCk-A|f@i zML>FygqkP{Q4!D}C4?j@LI@!QNQ2a~0*d;5=iKq%d%k=_PBIASI<$FBE_Ci132*N=lVAhyOfq>WasxR9>YaM9{-UsU=j% z89$saF3&L0=GMZ4t9LEkzis=PElaO$IdEdW`ODM4UAJ7mZso0CoPRx=y!znHyCJ`A z+%oU*y3b2~E!qC!5^S*bWniu~_MmHJR@Texm&eI@JFHRyL-&x|Mo-;6yeC6HxkFG- z$V8%O(h)GDpCY7j2gwfT`R&sU;!cb#7^7cCVPuODw_<+!cxkP(Fj@ZNr}diNjo@`Z ze(o=FS;Ex&>9f3YHOa;K$IrU+TDeu_KYsLH?Kx>J?~#9k7q{F6S}Ah_-8twnT!&4km3nKyH2Q>CfAmtUBRo%R}#moFfy;l{b8d>ocr>`^N$llaIP(`c(K~dRXh=xgtu5Pcu6NBkS_DOK99veW8vcsQSig}_j~d5HQ5T>Tc8a*KSWS@f1BIvW5B*K~BmU{z zJ=q7YIX_~bEh zAIHu@M3-RXE{)zDCt}@023_0^V-}ujO$`|jcDkkgCU%1b`vx6fU3=AZdY;8-S&h%g z`iT2=NbUfxnXY-EN>&BBb_07ZQlz}>NAROmIgh1b-=3&)J*xVS4VB9)QUQ(wME-ch zxD(2}QJRV}Xq3X$&Px60N5W8p8O*T`)K>HHqXdlRR0jR9Hhuz4vXj4n97f5Zvet-_ zoP$v>`|t1BKcY(~o48~_7hm(s-vPx(PT${CV<>lQ^=^4baHe~$r+=7&ZRi7qQAC{O zO#X?)$5)hs6X*Sa-K~att(!MXBykhi7FY;_d{`O?BoaB`ZHVY!+5`ofE1gV!rQTj90Fgi>1hZ zJ;;|bx~FqR)DQ@zO%<8~LXTqfbiS7};Cb~j8Yh>Fl30cKRZD2{DMD^OiWSLZ$a$DN z`5%5~^sh&V|Kdmr1J;_*#k7&z@chOVZ`pNKi2g?PQ_^!FuMS8hK5h3@b2sNA#|5{^ zlEcoCF2zy%y`h^@+oq#13(x#00HdU#q_YY%jkTz$PB0!zi>Tw-At8-Yq0F{UDkP+h zO|E2g~U)E9Mw|I+B?opM-wc;p%d3303AbDIQApVi3!B z92G28`U$xYx_d8UqCQ~|+El(W)CMPI8g>4~ksXSlfaKnyDfxH=_4JN>)L^}XwVX{z zMT((PVBNUN(M$WWn=_=ExE)1nNUcQ^SqRS`=6}}ZsaIZ-3<6-k0YZe4zX+D0MABdx zRPyNQguZN-wCXdM5CtG^7_*r!Mm;77eB>NacF9iDwMj+vwN8Yp#s;lMa{gkc_dhD| z`)6GmR3twu@M5Ngwa<@3pLI!!NoxN|iB@i2{v#>^JMVSiUuIEEGJK=oA#+n6?DxXWon zBt?Y+s>aN;)?2oLwBy|v8Q1^GtWv5~Fw^O}v=z+IL6=2TFCVd&Scno4WVJkKlw7?N zBYtkq6qnT^nY}eI2^KWbJ5SNg{nQoAR@oXMI5NQO)M5%GtnG3v_m$xK!t~Z%xn)Y*voJUBXw=(~9F9SM$lO%gvPZ855b9k=_lFJcac7#)H z>p5vmh~C^HheS1zYh&g6+{3qHLl#JC%ev-Kz$DEDQ6;1L^>;PPx}BmMnYb$Hr1Cr| z|Ms%lJXI1)_mjhZtlaesDlDS1;SDV|Z25b3m(c8kYX;$NHnc>3s;&%AI3=@x#^=8k zx3Y13=~?%0giMpi4~(42j2l>AvqR75Y~8YUykptP)zP#jakpnJUP_ zB`r3?rSl_KbPt-OLhjKrOFFyHYneYCWI28!s5ePfmWiHx+hlGW@0Q4uX>&mkO}-W- zsNjw)3zzJR&Jm+>%bJ`M#*ueaN!s)`*(hGrnkPy1=H6n&tUsPCQ=4#8Hu9;fvgD4q5(FOHh456h$Mf^;ps5)QLD= zBnOo*xmlk-e|U8lQO90v@+W@=pB{9mmr|UNsPs2abysVx^vZY`GBW3d*tBAx)n4Yt`;n`<$cakgt zBYiKxs~{gQ8G#=5akBxrh8bB#$X+ahkzX+NwA|TYPe-{kvxz^?u!vaFH*q9#?Hkk| zu!g)bTuA&?=wfMJn=;Z5O=)e5{Edp%^7TChDiL=E(fxa-MJ>&|X1K?(Fhe#PKO%}U zXe83*gYjqG)$%y_2Y9dxhr+C_r%DgXIpisbf0X4I;_ilhM0 zS>bchl&QG1(&<2Bi0<0Wb{;=OVN-7!vx{|NJ60gZYpmFsCy{k^`9Lg`M*R<6%M*NKZp z5nARqE-e?7A+~8?KCGY|vo=D4&Si`|I^q62V1D51Mch5l>XEu3y@Ke*^N6k-^Li1j zfKs`O)6>SvI0Lq`Ho~KO^Sc#FpZdIE>H9$kxbUw~ z`1agXP?*Xm!uzGn3FSpyb*@X9h*KcNyH}^#9@b$5I}dk<4$BDq7%vI0K}ZGz1E-)q zNLWy0wK#G(#P!M2q3c?W_KTx%*($P6MbG!x#RU**(+vrIBN)X^t}z32z)= zU!+I7e);o{Q%Zo8I4}C4D{>fDB_Bsa;@-{*8yz)X!kjX-UEyKlG%xt(&yvNUiRozq zji~sqjg8=Vej-mGrvHT;e)oyjVlFmS{d@j!QjM(e2XSuS`5zp}_zSQ71U`&-{=NYq z)Cg<%rdsY;aK{3U9!iTI*@MnXKk9~Z_%6lAd=RDMm0!!0w~f7$HBm_mSfO(8o6pSQfO+M0vLm0PkW~ydl!t}u3a{~(oe|O zL93Ex;qQ9KTMsT^_7OF&ggKVYEz)@E?dX#;n$rX7W`}k(sUaEPky5?~T;C zXNC9pY5bEpW|Kb;kiBg1ud<(Ka%wv|s__kAip>yW=;Pk1e2efeEC)^PTr``eP0}vI zXO>2ZZUvvJp3xZh(>vaN?I!BEr6`hQ=lZeDELHPv^*3W#x6S9<{O~neQff!cC0b3U zW~0*w{bjS%DgFbKX1?~#R%qGe)O`z4n1|e>%-Iy9-1!JoYB_zcT>NgY&6P3V@ZuZ|CU!s%gCGypwrO3N zdc@xS&?#M)J@RNWW!p{SRa(I{y!gO@M@uA)l!STNvm37adTz$xH$1JY7pysQL}2F)U#Th6W;WpBc?FZ4ji43An1g*L~ULg+*)MR)x15M^N2J}F-p z`26mY$>x(J-}8OZhk+&>{7;C>y_VM ztVPmyFwd-cSGHq*ok_F;XU?p3ukwMKTxva@mb`HeGKMPU?6xrGBwUjPuPb#g!`2aI)$W!qL3Oq)WAgWz4*V`| zNV1(0hi;gWt!E}%bK%|h-bHj~8u(t#Ok-L^M z`SOgwg5Abgc=9{y2549$vSGS6L$piV0%Z;O2DNFZHYu|fe!GZONVAwS($6!MeqAjj z3I9N9zd67v;qf^qJ^?=w-9U?zU-mC;L%;63P2v15dez<5y%bWsL<*aZo7u?FwOR;0 zuPK=ht1L6MoRR~!cWE~@pk)x7g8psP%Ewi~HTPF) zA^v2iFvu1W8+^k=M5U%OGPZJo0do$L|XYW4c#IFjg@q3L|n>^z6xH zL^o2hTgAK2McF#iH{hJZEI4eAEYkoCvH`FE^~L5N?=T^bQiAlLS8dDfsfDa9I*XciPvHTK7{T7n#Je*mZY3 z76n7Pi;=BPGsvrFYGJbO_6lYg9dxZgrQRW_txS`+%Z(ba0OnsNdZ&d@1uWL}Bt%Ua zv6`GjBhMI;t%*sAFC_L^Ne^zTJL&z~qi^u%xl^n{{i~E3xj(ynroT6(^`G|tb&IyG>hAGxCd%&A zMk8waDq4gdfKM2+i!)f|pkWrJN4?Z}Htk2KVVhoYHMulZ`qYP>6Wms9^d?zN(XC@N z;-$2%QTg_YU_G{a&Fz3W@bd4of#c0JYWU96%}ZxyaCU!Z`$0v{u?`|7|%lCf#=hk!V8>8pwIddFI*~{laPx^Sh$;RR??7 zQmiE|A6U6g)n9XFmuEl^2Xg8Lx1OX<4-OJV`w8Dbre=1v=;O*rLB1cn##p0)?_4+W z47O*h!Q3+64vG-C-0KJT6R=B$M;0og%vT+P+p-bNXFIICY<=gJ@}N~_3D+%qSd_d@ zF%QQy_v5%@xs^X`hFbmYcB_8HB8ne@q6TP_PACV*J#&j4ItR^sXzQqW*;)DityA9~ zO$@3q|2t^!>P4x!VyJN(*7~xeYulO`t}d?v`1r=e}N;HRhLeyG-~3abCh|Gx}3G0PPclPXmW)F6jCn zwZ_3##RZGCT2gjHHte58(;Y6+Nl2}%;inEcy-^T+{q-MYS2A1^w+KolfU zj(c8YMWD{u!(Si^b}@BtP<8BaY`nf-6-en17Ch5fwY0BjvxhqwB-1)nBLbn4cI|VD zxs-yu4|?!cg9JhtAP@scs)F1~N4;w;BUJAWI$T(DCqI_A-i?*ir%GzIAGNr+Avd!0 zriw>(>5G8=u`i+Iu@(*Rfp-h1%oZ{{**6iPr?`wV8AEK^>Q*b*H3a6y@=T5JHnWnp z=e}94Q=jfT@WOyCxjxa2Cl;UCKyp~(C^QaPZ$!4DH;2M7=^N#!E+Bll`-O7^@}d2S zpg~TCp;MM1VAx4yA&SIH6wVFTb#J>6bd<1}$M1pKa3)v~k*}zIMo-LmSIVzCZppR# zpjESMh#MEYA+*FjCb%oLFHZsa?OVpNT5dunv5uKU+d$ab@QVX&9 zysLdjY3}T4Mz5l488hbjCEaTw8@6Ta(z$j~^AKX(LF>wqT>=UAWhhR1B!UbcUl8-Y zEVtuOEN{;hls;nO{UucNtX+@->@+08?8bhOSr@U$b5}_K?(80{69P~Ii@ZRIRnIWL zw`^#-j2*j|%uZHdw%(PY9PEnHWDy>Xc8abp(Ven~ zQ8q1O+9bHrIr*!YHcN0K{AQ0O(ji38^ZJYNkgMx6bybCtXEs#>YC{L5*0^e<>PE2m zvhmDm=c-qEzm|rsRV=Tb(=k7EL!G;|(5X8^cZ_W{z|tL`#nq!%XYm^0);L`dxuT!X zDYC;;8&AsFy#7ZASI*A+m(97JIj?4oQNL^d0K`Aw^JVT1t4?*nXsWiZf+>qD{Yoj& z^M9LqeUnqbfLB<28qB^%kS;G8SLL?bP3SNIj zi>~J{h@_Q98uwjZe@7Gw{yOgQf;i5RNP)(Y9=FIxX5ljkzP8t@zl;+3@YQ}I2_IcF zeCQi~67#6MAxdYoD>?<5shEIL(JQ1fLjHW~b*Fdgb4WAu|cfG zKM!rHNm8LBAv3c*dSI{*{PP0LPnEL2szb)W*i5VcubSlNac4JO>3@9G`2TOo{|kqX za?L|pG6W#IeEn6`x*0CYl+Rycf(J)hnQx{~)*+UXry~YO9tVpG0-(#S zI`R?@LP`hM>*uN#u1wqNv-7d8T1Vd2zhSIG#z2s^10|`g)Trzv(=Bi(TpqfMFtTYo z)bfg6SV~!0>puNZH%iaDNrMnq=+Qj$7Vd=J;_TF`4qs1n^zI}Y{?(zb`C@0t2ACO4qy5#;Hzc9 zd6yC#Nn(Q6tcv4hooLkZzZPt$xrS8qFwcQCBpvMD{tZ?(C!!=2 zHq0#6bOQ4IL5+@5z?n&>Nfl*q?0 z>48O8so&PjPeP96R=Z3+dAZ*#JSU*(`3+>}9=Sq!|OHwYdt{XI+O zv!akKs*V=vY{$kTuO<;*w=u#iRru?r>VFqvyg?b$zk6h6NYfhZiD0TsjKFMC?xFSP zd+@ZmZbghod+kaIy+gu$-zw?F7K$d}xp8YtpjKAv&~HZMmcyuaex^!-KV8f?kyw#N z7;+0_L{zD?WgZhe@$o=VYLodB*~L9HLv@D!7fJ|)i8=`rJ9bcxp${Kt5Pf0I{aLRP z!_9D`p$38|w-NlCcgE)@2kb{XB6$;cq~|@uV%r;g^B9H@Lj=S738%kGtj);L$UjGm zf|2iIGT8fI;)zV5XF*F(Jv5npD1_i6dB8=en>coR<1%0=FurjzB(7~YxE5LOA@?-Z ze3Z?l7JxqwJS)LqwoXf&DewG>oZ7wiS;3^QGgsZw!iKabOl6vW=s9mr_YhtTI(aAX za(~7|tZ}QRHh*_(wzhwx|BC5`a7)GmW|kbko8R8`Ea(tD+(FRw*4?~^Jg`d>j@tR5 zdSX*R0qjj+FbF~pLWG+_vekWTyBG(%XvV_WM8==}0gWpOkZ$y*f$=NHI6?c9K=P>K zN|nGd%w3{6sH*L_LA!H4kwnhN2Q7JqJuz#=aw4odJaSvLlO;CR{FiE&3*t6{tuz;A z=hl|&awVE)ci$y)bnBSssNEa7wZWJ8lTnohA#`$l)1P}as}mx!S>Ss8U2S!3c;i7s zevP)i(ph@OM04%vm=E2#dEW?PxZ(K|14J05h(CncOV4JhTZ*q0v_RMILk_y}HM!nV zvVH6Hhnfu~(;#%!*wJbyF}WVv77;*|5z`a8+FS*TPd*Wpu*>>VU)P=>kS}&+^ z9IWq~v5A{z4=$->dl0+>k8Kujv7wA*zK zLBG80Pt`xyjNtdC*S->>XvXRrt0*mBoZ~A(X}oHFS7yB^54*ui3oN8 zJ!BHhW6wq1uT1J!8F&4B3Q0s6NP_ zT;E!o<{8x(#CjE^l~@ah6tY`AKajEM7zw^TFqw7gL)0`sgK>X!;)03T753<~9lQ)z z_N?Vz0-WH=ALUk%&l}?eMB49&o1X8f#0K;y0a_+0D))Nt;+I+U{mLc8)q)Q|P&VESWL#tY1_YL6sz2^I_G%QiC|+w<+S) z9V-cm&Jb7ar?`%#U0`8iRoa65!j%JPRxoX}&hvNr5~t6BZD_l_!kB$(iELMzmAnC4 zUAj#j6$TmLUIgQO!BT`w9)Q~8+t4Z6!P6|-(2luf+@js1^0{_bNQV35`Vi53#v_Ph z)y!@2!R4OZURh9s8SJykpcgvUIpYr9E)C z^=Wli?;CM5(vgo~-00P(mkQqZi3RnWab%{Ih9|_xH`_n!(yA^7?ZZG?1s>)fu)@&~ z!EbMSI^i5yMbFN-ey0dyhRox=H`eAF7kuHG^n+m-oyjZndmU#DxrxWin5u)S?l{O^ zxk;C5Hfqw>!=AnMlz{W);DyP*vxJ>^r8wPBnRH*bM|tV>u4mW!TWZg4Yu=6Ghs6s2I**Wzx6ce&THJ;Qb$9jpPRhq3v0U~F-ew|;R0nA({OSaA{%20r}z>3c*beh zE8F?MCH8#m_YUdo-A8S>Wu&u#awy7kh@;9%Amnr{rYsDYEBnYELZj| zwl&`Qp4JF?``1ZL)7|teaekZM3yzfQaA{@inIdrWL*hH;Bg zYkz3_qL5=^bWjV&(}&_)JdMa2Ff2z=rwe`gen|<<8gaBB)SIY{bf*hdyS*rCloLeb zdiect7DX8%|BWza7UZIyS>#40KHcpl-qtceG3DAn2r)xuWQB;ZluaiM?53wV48M_y zw|^j9QD-5r%;XTs`n&|I>W)0b&TLZLzI9Z@Tk-k;4Aw3hlC6YInRQ1;9k!@b380$5 zWg&&ywUS${zHIz$)O(HE-L&jHY%u-e9)O^=2(ch0J7^s+KMoOL;0Nn zEGJ0Ku*M@c0aF$MPTze`Di##C!&>F`k1wjRyar$aMnS)bZdR`}^^wR$mDdTR# z3~hDjKHHd@gs1=(w}TA@+c}6o7>_<6arB5f?Wy~GhBji{AU7EnIcD1?_O-%RY?%Czk*s-Cr{gyZVCGwV_Jt$_Q{GDN|@i>=kPAKA*Qc;NPSb?{+y?r6PDwwsO#K86CQUM{L}eZAhpZ)xB!-)%mAy zV~x}PE%Gmao_Syg!S-Y-Vd|7!a75$e;rDzN=F46i5LwmV9()mtFrdH6=s4Pz@MKH& zD59Po(3^-l(a~Kcevuqy0PzJW57qe4i_|8vrMeDc0JnUrj4u)O-X=iv$&l){=f+&b zI_?R?(Qfzlv$1VW;U9@9jqeNYd0z1SGqM^b>UC(Ub>n$?^3YPrdIM=ae{}sz zEGSSu=(TConjA~Y&kb7 zaC6cS?&`@xidv{y6egnDpI*t`X=;}>zJ3_ul(7CJln_|F7XtdVX#nS*?^D_Yeu`L` zsY=G*l8%f<(wxErUP(1gRH&K9y4zT$bUj)cvC1WqQDG8n_NcC#6UKiq96%`9plL^m zD_hfX{L^}3(1il@m?ydItV=4ocCS?yVj1Hc)@}i@sy{A@KJSjz>2ZTXjx z#$A8tzqtrnwekO*KP<=w-T^ zq_$Q|!$S}_ycVh9adhI5(Cwh|x@+Ht_%C~a?!9MjqR(iTv2ts7N-QN#SuM0p20I9@?cK zlPj!>;DKn$`9nXthk?|u!^%Ku32e=~uB9s!!xRmtn~O=&u8D6lKg>C~cc)6@XpSBS zoV0f++7$1p|2rVf+UrNpf9TpQbdAEZ?ra5_YH1uSlNJ6HRmAT*kX5XBxqOx04*|!c z>lNWSC*bT}39D0OKy97cF?V4s7&iR38CS5gkDn?DomqRY)Xgj-@g{bY79w)>j9>Xo zy1`PW!+tZTg~PxhzaCeeln9=6RXp%Ew-CKv-qpn(q8C$}23na%*!{1~j`gKXjNtQz zPM9E`;?L3g86v&1ULzbj+eRJm7pR8P&r4+w`>2jnIoA^C)n4LXDBCa_Ux|xaP-a>9 zVR31I59W_@nMm{s{MQ-QX^f4H0i<)NTIFld()D`M*n*2j+gs52PM=Iwn6l6fO|9=- zgl7*#{c`M}wZ5-W`=+vh9?y$##%FBdN2)-Yx5W+jsb<-8HnP@)C^|)H9n(iWxb$Sb zAD-?I8TNK?pLUR=vc#D2eF$li2ed7qsU&9D@Y3x|u8aNFI?I3+=VcRq|XS z-;o`MO?_FchkBwCWWna93JTi$SU0qK2hP1hWg6jor}KyIrjtC}pRY_$+Hu_zS&ay@ zjHoFa)u*dL>0^Gq`IEP!y1HYT-Vo|kNqfr>kxHfmX&5)+lfW+;1X^GBMpu*6yq2nW z>b79@t}vtAdVDK>vZBx)s$*HZ>?M}a=A40dzY@?wxezBj% zQb*b{=8sEZCY3gMK3P%GmO>jDxh$TTsA8uKHrdAUf|>Is(>G@KeB~jQe|1@|epW*KzI##K1hink86gJZJdTgLKs0{Vi1$0Ly z28Nog<>=m^g^1V7g&W^1YZzei4$dZ^9kT>-EOL?pU<}*rBaD|0q17JY4l>D;&vLA- zx^EFuZ`cu$Y04bYMliIlT%ZiSN;}%jJ_#g-^8JKHxH5&(`M|fq)U{!Qw6gYv=fE&` zv_Y1eeE$4Csq@c6w&y}C#{2r6VtYkcIqJ1CH^3VG3A>^bygg2~80TKlRKu!T)yu}8 z`cyX^y}#dsjGA5$gT*$M^u@gh_rxa2qIu0dgjD?0g$cP8)P^1|Rb&8ru2k%?9#Fxt z&f#ot>x&T0j|k_I(mO_4H7i~xukK&T42PU`52TM{S?|~uA^ga3S=?g0N=2g{FMc(+ z9AcRl$5W&n7cYni!Ytw|GLF4N6kRF`L0mtqd|?SVevQkr){#Pn`5|2Y`ajl_VAQD> zOR8M8%-xI$;&UPq3#jKj{|_%|72945Tv0L24uCAb^qcusi?69%Lx%ub?}*{m4fRiN zc_(^iW{?|(vNDrTP4i$KfR|gar|JeTCy72AvR8geA{&kU%)MGTp!79Q~ z&+aNBXU7`v?kr2EV9pP8lq>E!BX0!j-Fu>yg|45mP|gBhf1M54g&eXbT6|MyidHsp zyq^i>yuH&hV(Pxo3c5qyfvcP8V3DGw%hwb~oqvG*|A8j>zqghCNjv`g(B%K>=^yfr z|5t=8{}0vE3V8jOMQO{IkBR>}$$WvivfEu!3$@0G-ge!U(#NU{tJ{k;kvz+b1Cw36 zT7B8Mx+s3i*C&c&@70p839Mxf){x6XC+aRw_?v9$J5B6l#;1~)j|ySYSq(|0ZjDK{ z1z7hGoveD%Ws|TI)otevqpi@UobX-d^4e6Ar=5p&9J;~0kr}rN$8MIh@x<&XY;nbD z!q^|$AS6mOdbBJrA0jvx@i7Nv7uFxShZ@*nnetg4O78?k<^T<6s&1~feVffOYe=AM za7*rxVf&^IUqA`@a5pikGsXML46`S}77+t~?X}C*hE)C~05#ZPBON`f(V1!NH^Kos z`7zNj7h?yc%sZ*SjdV`v0?T;X=b;K}8-8;{oB+;1=yhX%Udi;WF6~b&5noT#xdE+B z2M!C|jH->q7xP8p>??<`D=R|s7KJPywlf!ATk*xxU*nph26UI^tfygWSzNGc>1W&0 zu|BrR%N{4Hjf0xSr}tGCA6+;Z-8rAtl6KMs(rvZjJd0xT&QOv9_tOnXb$WTQKHrW_sa9@Tf0%Ysx2_NAWk&${KFvD(5a_BNc8aMyVG+Zp?db9;~<_*2=#_&LiM;$2A zdC$(f3tvQErny7A0>@c>H6s+VnVL6QXj6$Up*RF~87_ClaUO956(RLHr500q{_bFr zVw>++es#fo)@oc8Y;!3{%Y1R&{pC!r&20<*?pYcgGVVwG^rFx>JFx~|Sz(YvPI%%E zZ}McsEed%68!^?Qmbh*wn$)^ihl$dEFL11G1KSKWu3~v5tdCb=k~3qB+lk~7G`lOa zhjN*S>l=Ig^b?kftuLqXsuD(ptv=H?BSEe`B72xXJXjU=vJqY4hbX`+H16APdol?W z?1b1{;c1nw2-7KW_%z0QzrMV_5^uB4Ru?g#0?QrSd*^<>3?<*AEn&?G#QT%Z3CCO;d$X=srO6toe*cP<(rSj3jl8N40A5*$0 zfc_x>`b&P3&@5Xv6(7Y?827ur=3-Cvc^uYyWzQy3*Cyx4AdGJZ&PR^J_py6Tuym^N?;Qz~ z`)-75{3ShHJ?Ywi(Dl)@s()e0S%ewCzj+iaPWBzd)x>`^S78kd=u4uyjex0Ko&M^u z>6r>6HR?QbVAb1S;4hdLfKM939mn|$teN-J=thr3H!O8kY^bYz{uadvzSD{vud?4_jt-3TvoJqFi;Mwga_MCH@pIIucaG z*TylHG6Sg=wIykmbtP~D2XHB@7Qa@^gcV9aZg@yx4~H5t3>T|s)OU1Oi2L}HoW-vg z-foCb;x8{C#!DFlURTeNHkjW@)=T_ODB@u3m=y?>=YO);ro zS=1{bqOZ)r|KN)5l{02sdBoSFlddW$6|;yKUA`Tuv`a_0Ufgy)$;^VPeAE$P3uFr^Rh13mhpJkl}6|$@J8`FVOqn#V@J~KCFX*73KkA{UZ07nl1ky_v0`0|2d z2SUiWd#-jgbytH=(G^r3n#;*uVEXZ|y`UqU$j=+dhF=hNQ8jK()drd5hj+KYTs9Uy*p-@-_Gj6snlM0%HRR z&;aN{CnT$0&h-aW1=z1D9gnUNRsayjNu{VgPoay(=db)ros2U7e7+-Y@?a<`V7t=!4QME-6r08*=_UQs$Tki(ZK z9uoRkPT-JE#jah-L}!L?F`xb`;i>#1tdd%_u?X?yt-liFgzJptA)($@c#FDq=Q zhH+GsM?G1g4sO+LP2Ftqa6Q7GT6()^HF&veDjKgi=Y5ydie26+%!9iKKyvdF3gC|Z z5@5HKnM1>d4@$c61+a}`>7)zzGmWSazzivJS`QS+@l*)(Kge&zuA!w&mnHfrAE|}O zaay(}$)>g>{-86a7-v43(Z@de`IPGb5&8N0Z(ZG2M*FL>;0PSt7pH&gA#}{ax~2vedAvyBwSyF?KE#^HoEQXUFj;Ce)B% zmZAjdnPkpUq<06loS6lWEAwlg;$!2aZ0=}B@lqhGl{JuznYI8rn7IRUd{2C=-*A_& z@JalD6YE;`X*G@z<=Nrklo;M*7@!&Ca`rnlyd7AQoP0@k6pASM(2q5&wjE_nb~&9N zM)D?n-WWFbeK6@B5-U=vcR}VNJj)*~>#Wsz3it|&BIv{?=dygId+zyEJD@0QKX{f0 z%$ydlc72C7%FWpWJ?pC-1QbbP*FZNZ(5YBuuGii}U6?bGj#vY%K)cr?>3O6kKp@2` zHqG%^d0V!;|L8^ebIN%NU|q2+s}X|7z^`g4(|&s)oVhW(?(f;27T*+;=EA6suAEi^r0oa0J`Iw6@1wHogl!s|A(@O|6_1D^f>}@SqCwt zs-G7R5iVz*#MGXylv5DhdEIGUFVM*CFgJWUSlA3bmRg2C@%4#fOUpyPllb}!?iSGo#~>X4F8=>tLd}io6MSj6zb~oVN zt1jxPy7J|%D)TkkKUb;@PiAL#IkUI=c-K!#r#d0YV3CBOkbthmTusG63?f}hR+Qgd zZJSfR6M zjl)E(Y^24pEt7U3pp(sh}&EWcMTSxxPyMJc8r|ooN=1i7xhowp4W0;2m_I{ zK?lkJ&UN@8*8LP7sabC6s-?oF3fRklnr+tN8Go>|`FEj8n3@Ual!NEC{fzoVbD)9~ zz8mh*uv5~dSRXG}Pe0T(_M!~{hGWhsRDH{w1;2nYI^EHDZ^i=w7zThsr}#i43^XD? zy6=hl@0-|^(e_BqV|K{%me{XvypE$|6awU>go*j5LouMHLZ5B*0pFv|m^81%*;AKl zOMU&!0G9^oWJjC4LYSW|4z4X7QB}2_P(VeVqO^T>_sf561!?8%(v!N>Jd8 zVL!LS3+kWYpHOAF>3|I|WLHiG96(b$#<-P$MFM%^jS@>HyKVytV!%~VKz8+8;lpg@ z`rOg`Tb=j6sKD;uRg=&sl8{pZu>$IqoR{hJ{KFAtIOj49^wlH*g(=9abml)>Q~1Aa z$x*JBA}cU{cVMRqeRdM0lhZwOwzgssN^cJgLcd#NiaWcMQ&0&%1zn$QIwdohlT<|; zr6BwNVjYzgMPjTm4Wi3+lW!w~#SJv6TiqUSRPV+_#b%bn=2ke-wGt>C3jt5}0>6A# ze{;86t;j9Hx@+tYiwLJ$mHO}~S~9Zk!3zu$=bNEAgJQjgI`WNQosYh;yloG+V~|@k z99aI!8%S;Q7lv3w5G^}KCo7&Xl4eT6^bF?+9JRD69{I<^=bg`jR@klGNLL|b}7 zyNX9&-E+_5aA7{3nvHR>$)}6VN;tSW@15x(-sEhI0$zcm)ZBgp$v$>Xwnpa-a`Kk$ zby4DtoatN{kfQU2kz>@k@R7IPp4>2^>h-SfmQ+>|`%ZO!W0=&Nes25Qrt*_m2KNlV z4e#jLaeegR@05&);?{{G&)+TT(jP_0#Y6c){-%>b&$(LWGzeFaKCKYOT^qj>E5x#n zaKtrZVTMV~OWow@I@qO*8#KMCr|ib(1^QElp36Pku8%kpo#;;K9_oy39$lDjO)=|E zV?LqKoHu9@ks1G5kim|r?F{;@8&E~NgF_e(wT6rY^gCfBRFy4VMaS0$QW{Xo4rx(j zdz>knV~W7iT=B!?Mb(DeL+?G$SFhYgNj&f_<3`gFgT<^zv}`|m-&7xLSm`v~smIpB z->TX+v%X#ClVktZh5^PA_;rYA+L#}<%<`i8Y15JE(X7_jQANgYLc|veLS7)Y{DC9V z=&Pi}areG{fr8q1Ea!xh5k^9|+l{wclCLu_Y$*J5HLM$c?XMNi2UP7L8kXaZ#8i6E zla*Sf9G!{xkwvgDN}T0cPOp2pzr~35fZ6TUCV<;faD?sl2b=BIMR~9(W6W@?u~oBV z1+p-Q3B57oEoZ1OGboB|v#Wsx62Gt77l_R-x7mN(MC)*Y-?kjR&Kre)S=5PhEHqEy zOn{%6+3~VsvCS~souL1#fM;}3$a>(P2zX=>#D*srO%mm!>zcFf?9O{^JLHAcjQrJ> z5t##*ixG@>+pr3015R!~2u$$S`3}gGu9#e++sCwZu_ieIMOxIDs?`NpL;QA?oMiZh z4lEOO=1kqZ$$Su(J94dfSO{*wH9e zt|~C7{CQHqQ2#%!tw^2Z^KNB8{Ui9M_&wmx8(*fbCzdc{6{d!gbMV~M_5r0~mY1re zcWX}6)oGa%&Y1;_D`tO{{dlr$&V~-#y%4A64n6Dti=4L=7Cw|XTtiH_i@2iRUJqxVUx32F=@PBLX z&Et|z|Fv&ctvA&a2w%Ot?G}&T{S(!_@P?1?Gkhw2lO=?r7*qCX#FqLVp zxPpp+ElRmSX>K5rk`f{yvMGrEK9n|dzUMj5>p8!3&a=E;$A1b$KHGi2@Av(_uJ?5b zfzHLg!<`n105PS}w+|`M;7)N}j0)@AnW8r<06VHHsy+TU$`6Zy(?Te<~mie>m$B}qxhkod1n=gC7 zsjkxR4g#8G0F38;=Nu16rb`3DkLd6k*M!54h27UxkD3b6*&K2KAU{CinC?85Y``7R{IJvoWuM z6A-`tAgXEGr<3jLku7xz3_exV^pUy5Me(BAsfkUCj2vQg9&6h30ax7%-ovSz=-|z7 z^!oH`-6QOatKNuL0*uT(klOTt=3A}XDJZ1pl_&>Z7ED=PRAE6+3%fwCYG&Y`k;lBC zo#x|wd~i=R89x7izT@<;_?qL0(^T2gHD@cCzA3>2&vo`Yl?Ckc(7{J8&nycyo!5K| zqUYe-DZF`h?dK7u`Dsa+N_By1DQxua`6krw5=MoLVLj5dRAIYl3_Zg^H)(Bi zRa-D0-hHKM;#a=q+rF^QGMn@Y;}FdX&fM*;DLP4c2T~KJ++W=hT@s@_9RT;b|k_wSTI^D&Tj*R*?slm2s67 zrEBQ>$;VB9^83~0?u%Ppn^Y&_Pp!HyLEFMI+HuQ1b|hzZA($m1C0(_`#GaXebmu03 zU^BHYEYl`f@ChW26;@5#qN;QKy59wWIv7O!;tyb>6DI7X5y;H1INq2&CLc1ro~?m2 zuRnV-I|@huo%ZCy-&YTgr+D&oF9g}^Z-iY>da-#4HuCdbK0($QR*}LFeOV%^Tow@u zD#<%|_XW!m1zL<(*RjKL#?dfGECbnhutbnQQxmdVlG+9*qQUrj_CQMMFpY6**n*v- zW-I*(Fn$j@TtW9QiWJgXJVu=h)7XRC80*@w=Um`R`J4jowo8g$>}fMfqxbisNDmMw zc7R){fz+-a$eTUY7l0(r(-B2t{tr#J5gsGIIQSc7#Vl{#_qZs#&vakb+5FQ%9 z_}CI&C=p7@;)mJRu>|Q>rf?%7&KvnL#^4Ne0z5h9eEj9)c9g98R4@~$`+cqWpe)url^!c*5 zkR=f${zzzrDkjAYNhEKSStN{X4rUt9KQ{xuKftV(O>4buL;7&K6e`P=`&&?pBo+tu zq?bcN!L9o3rl@1`fxsYJkSE%ZQ`5L+q;@1b-&%n(HutdnLL_V$o(pQbbDx^U@qGV8 z|8nExSdc~%>#sVglHq{|=+mw=NbmLXv9D!@iMh&yw7PtY2}$iSIZ8=Pj&ubo4QG10 zdKf?z8i-htW2Qa8SCs|MeLCcv(-*eI^i9HB{Fly2oG8RJh6IuIb;t z{}3Ns2Oj(q_-E%4P&ddZn=38U1Aw|bmbe|AsH*=~kfy{`UjV*zVzuU36#pf&2E6(& zl6&}1hVe8f`fqCoecfXJ+u&tJjp)$vXS%V2jUi$#+5|PS%l#;DkLbXhs*IX=ulp>i z#l3?|o=D^YNXwDg-v9?}*!eBnoR|BbP3|ev`8JM=tG_gGeC6%RQ}yrQXF?7Pe zYiT_yGs-ZNY;K4S452hD(HScz#Pdens9ZbtC&7tnK%ppvzMc!EMRcEH@YqaDt!P- z1DxmIrgm}le@f-~+{xLPw20>s;Oe-28hImn{JliJIDE!+tiDv~7+tdt6kr2?GNV*{ zbeahx(15xf`5(ut_n=2j!5*jJNmQCWbFwR31LOQ*wTjFYOR3OLo?p)P^xq!fVMoPz z#sWDlO*-yRP&pEz`O^{6E}jno6`#_xK&-W*{X0;`xquD?tgf>L?aaNU!pfQSdG<>N zE4>Z_f@bHVCpJgNjm@Z`cMIH$GXNk4QEohi9)#8@18 zS%gQr@3s6_i^L*c!@9w*^pJ1iKjhrf*~0VR-}>)m;J=rFzw7q?1^8$}+utDl{|(E) zjW$iZ{36l03Vw9>ziKxMl>RdY|D#lp)!E-_$;aLsw7XwlAXV6l?*R{8%vjGZ1lIP5) zcwmaB5%2=_!1b@>4^94s0-*?Bc$ceQ0vs$L_0fE-!Czl{L7w9Cy(H1E<6fN_XcrO6 z@241#z%$)xNI@baubjOdJEU>v(69)4Md|c$0DH%m${v5s%Ku82j+r|<()UG7+5!SV zw*dqg5QczzZ+WS_ND`a}s89ftk+BXU*8^<kFBjv$6l{0b*7W=kTJobBT!8kEyx z^S8SCj>h8&QvYlZ%Q^nx{|Y*I?%ubRrC}wYjc?0d2JOxg^WnPx-k`jr@juE0ncK1e zx#TgREzfHegS&+1N)mBSmmjZ2uWhbm6jB6>0Zr(CO4R>52LE>|{(lf5M2KAIvKuv+5f*44|+2N{iQ*n?!va|5M@K-mrcbJgt?J*@Fs8T z*upy0_`Y%w5JXStWm`c<11guTZUN}}oUaTR<_}@pMyDnbm|A!r_1ZFRW9=^Vmu83x zYR4aC=G!}hS0B7cpS0(WA9?G;OwR9U&W69>B->I4#LXtUsAFsTw5SjWVQofs8>iHJ`eXDk67U_xTD0|9%H zN7lKCkynh}%_cnxd%+0A%%x~VpQ|KJ`Xuarw}mYxOlhu>+bv!`TgiQSOFFfnIO=33 zxA`|&c`Nb+!FcnQ9ZM;PH#wk+H?emM`c|=N=m*HsFoeqN#c*J2$~I*tpsjq&V%RBO zx;FMXVbo&A5TdRP>TX-n?gw_xD~Ah5U>CgszVkWP*{GksCD$MtmPWarj;uFzKbNWh zsj!qpS-kWNR+c%bcTIhE@}Q*%*1r1md4feG%r)ti=+GH=f;+PKxGN;)dAF?k%=6Fn z0}EB3m0P+u@x6&rl{?2aZe)f@5-c(?Wj;MQkb|8DyqJ2D>WqK?^MazKLU5>#X)rW% zU`-eS%GOVYbvPRmS%~D=&eh3n7l~mMkc0YW{^4w_OMscbvz*S~kH^GE_4)T#$~aMN zhEl2J8M-tFiYHCd(gKBWQWp8kEI#WcC`(WfBlNxS6u9i_$Cvh2t3 z`>GMfXkRky{*=6>RyWw^jWXxju{?Y9P^1T+oor>EfAql7xo=hXKOYK#4Re^N-*H| z(Y<@Sq%KC?Z$0u$-21>RMMSf5&t%UKr?B#PFtidmP@hQmMjP|-WDZQBGJidZp_-B1 z-%+)-%9yjeFhCH2uU|jhxT${aR_Ld`#HvoEeqAwL*+^j>lnrJQh?Yj@J#!esi+9PO zJwD>$lC{CTIkJvogQgKFu45H7`)$)%N^KBwaWg8bnBRZ5SFj`8w*++Sk#=`@r*ODf zl;my?6|ZToB{0tu^~$$JKV8GMVRw(LS(fFu*3+^zktv#r_@Ek;qejGw;T^+jr$ZQ` z@~X~(*sT!ANqX(hS|hw3*FDhE7BVFk0dQbeWM}y`sEK~@*Fh(M0EYmpyDb%LcmX8ca`z~L5F zI+D0t{_}?9a<-e{p@4|eT|oxZ;?2n?$)p>AcrH>(>)vnvgQZ*Sv~niBLadU#h^7jVUb}E@NPK zl>WiiH_z`y4c4DhF+UsLf={L_QNj8GoQ$hY}^E3`j}PD za#hdTbQ^jC!Q2i^NIr$ER816wC^zx>b+jy}=&3vE=SvZXS~E+L!(MQ|X4pTN>C-{O zO!LC9bEu%4Nlv%QaEEW{tpHL?V0NToUvk^y{yZCP?MXqLwIIi`B+%9c6Im2XI?ImO zUy|f346q0X7={vpp|eX-vU{yJ34%s{@S0TBQGFwFw6utG9X+4i+l%xI>;MCU10Vd= zi5^|~5rF1y+)$Vesz|?aWT`WuOW}>#Q*ZK_JT|QG6`<=!+V+-&8$B)-@bdebF0zD~ zda=-6OvcS=%by}+do$w0)MHKi1y~|d4)eGg{3t0uOZz>>=3tH|j+MhiMg~qz-?lvO zKy}H|>lh}DY-i&RlRj_v#}FZwuWQ>a2}>!^3&dU<7ylM{PEZk?py;iU)INVcxmwj@ zZBa^jYjKP4-rlEkVi5HQ6J6`v>ydypD^en&E4}w@3uRXco=5VfsclaqbdE*eoq&fV z-`;?6x5`^U!o~x0-^GRg>#n9N!P&Q&SJitSp*dFhNUD(;A&CYK`5k6=h7H14VF&w= z_+1gLe%`oYkz__cI9-;;v%b|llL&sK*5~eLLcRI;ZsT2{#$4)?XfLUa)^*k_#|o+h zEYVQ&T`R+8)<5V>)Snp8Zv=Nhw-k|SVIvBtxicI zqN45av*@hx=?``nAHjPmXj}i8K94+k=9Q+E$@z?gT_}U;ESYNS@NS}Wmb(LN1@+w3 zen6W;;3soQzOs>jbTnObRA&2()$}v{${x^sjvKX*#f|Kj%12@J@+(wGebRtM?6XqaC!$0bxLLAIK2=1d)(<`@+1E&9 zeeMm*#Y0hOgY%gz@oxSd%cIqiiWkGVyKW8DD`j=T&{E4<7HZcJ57t|W{X-yn79xy^ z=8j-tD~w%4Hv$Koxj=- zg9dHW26;}3o(-$gM6gZK5`O`jl4n>M<&_%Q+{CiC7|N2{t!|kVNLl?fZmeS;A~bp@ zv{$6$a^HNk7$kQGcOvp@>}yIPKId64IBPM7OS4=k!V^#zLMqlEJNUFA6apPF|Dyz1 zj7ikLQ`=1FELa5N8D>64-R7qgn39kQ>px>lcTERzX4rjU>et*qUI^kO9SZxYHDjvx z{WLzD*x0Afxxya|!Zle)GfKO%eMv&j)oY$ornS4Wr*6=2gjT7CxwkfA+Z&pCJfbeR z#4wr&r}+GAn0P_`5U7fkmCWAvC7||7%zfYOO~=)jd|#3r&w{3%j>w;6kgmJiY>$&= zUdPYmSO&&l^M(efKk!xNr$b#K@|nlTOj=XNTBJJR*~#uk)Mai^a(TEv+*7}uSA4v4 z0?e>n(_8QCf2;rc!l573Hwr)Y#Mr0tJtqo;;!IK}pup(ssm7PI`>uO4uJ>TQ8(4 zyu=aflr^e%(6yKSAK_22Bi9l<<(`;;kJfK>;2VTQ`{9kS)0i6xHkKpi$CKMW*1R-d z#64_{E48=X{ZP%B9bdW`zNb)*qq+D><$$!$lt%CD_sWh1KZ5A-jvz4cPo^ z$1W6WUC*8i9-mXl%j&k*Cl;!Pqo@Uzd$TRiYc+K((zD&DyI#^mGM}$?J@gy6BHFo6 zSzK6Tn{COD3pvN!>g~84=Qv&uWaeSt%%aFG3wa$?Am{rgfINF3@!2BYkmQCvbaXr) z4b;pT-)iRn&We>L`=9H?{6Dx+0}gHGOHaDhPM(i+(q0B~y48QpQcH_B>?u437+2%~ z3OhwH0s!UP@~x)g*7QG?*1E3I^y#R!?90*^S+F8LT1>9{POD+fU#0XoK45{qZ1p@f zWBs2+mY21(yk^~aCT;%dgR|0F$Yw3A%RQ??quWm4e%8_o{8lMSPCMuRe)4x)t#gmQ zIy5}}5CC-Rd4B7^gZ-S(Xd2u4+gMtWe)}T#O5Y9fQb2=y2-bKEfK#uZ*}BgWGF?f@ zCQF)~-{M_Q3Aq8-!_*cgB#kUpBJ-2N!}vDfiB$xj?hACRfU9Uv2R~y?p#mwYM+wWpwC*h-)(YAI1)oHYsg*4s{}Sw+f^aqJpd{op6UGql^IE zaHS19zq%6^q<&M>Bfoj4#<^E_`?z>=G60?OE@t0j|3r>I-jgMwro>j5U`+#T!z*H$ zD8N5q`l;whIAv%f&@5R;u**m``~VN`jXqST`g|U6e&|^Rw3i*~XbRZuC1X({C2_}! zjC`2~#?(#p>E{UMMgTA5&uTW2ADFQAT{69U=|=V92k7jb9yyTC4j=6g4;`WiGY1U= zM$JHnNZV5dO+l~2a17C!oFX_CSaUn4hWHN@wL#T|Jj_^E6s?dR5wRfw~MbMkZ+k`5V z1ptHsX(lUOd$7F$KYmW2T*03JEqk-qCxK^W!J+=6NndEtKZDK$P#AdwrmFA++C@2_ z`k-F0q%TzkItHmHP_{Gkypt4)601~|ab!Eexvcg!VV;M%p=*q_=Q zE_KF+lOiYvuNuO(d{BjM$tY4iQr@nCIyr6=g{!P;as3F32fyjk&-5%aK{29mF&o*j z-TjMr=SRnHt?TvZL#3jMO0Qd@CanF@4ewiFon7HEb=;HS85T$OOjnLHXt*|uXySNO zmi>zFDjQf=Lkn5I35rd;<MD~hC^A=|cxI+eKm5c<+uYs>wTcq#eOZ@Q&n zwo9(puGxv9?9z>cXRKrzFp?KP*|X&4s{zrW#GtAff%fxX=4>c4`ggLiBl7gpTK=c?-tRBun4*dbnmsQsy-mIiy!^oMq)R zDnD*jZn-IHP*xA56nrMsl>Fomf!n8fXpD)FDcE)n@0S0T$q6 z#B_sp-=7DLQ?Ant54syJH~sOE?*|Z<`3Wx%#x|JUH>q57g=M}Jkxzb7jJLMWUOa|3+QO>qVU-yyhyjg$8Hun>?-Y;OqPbVamYg@fQrp zd9g7Efe>*>!U&^D%!fEzCqO@JlVzBV%3RMEls*kdN}&WcdYW=%x)?#vJf>nX1SJ}g^!7qGx1w4Q&SL%S_YnE zn^%t~B5eWF^|36}epJ61Pu;v1+Wn*EG60qg3-I%HUxhD9aa&N>|C8wiV`qp;7Lzin zn)l)^Q;gERB^8331iSfu3ZfblIwT;cfV-nsQCNXGPw-G%Y_Gh+TJ#9ao$eD;i=(Fo zQ7jG+j~~o2eYWniabxH0i>e9@wKB z;JY_7ZtqMq^O9ABT>N-@0^m>zcte{YF5C8sBEtzCpxv+?#rvY&kW7@ojCT}QTek+$ z!bH^HG(j~-_3ADy6z?WT-enP-d{LBBqldz>8trU1=c+_2b_jx69U{=vD_>e4bkug6 zT~o4BlGApqZ08*MP&6h0=lJx8*DM^~7Dq&0!+M>>#VYD+A*w;E*jVmj6bj=0Vct0t zorDk9`o=PZr?R>32gW5t>iQRW?hyHo_p40!EOwiG~-V|zS_>0f3{*BOov5*^? zod;w;^KKH1k9LptnzHZD(>k}z82kWH!z+u~3cQ%xE5(Hr-$iL7k7_Str^!cc@0Zrf z4^9s2XqBM>3IA?^{fYX6hDTL3cYr1Q;M^>>aijfZoV6FoA4GqOAQ5q9pS7;T|LO+% zlE(c8Q_N)O3?~8XB-^6D3`Q3sND`x+S(e4DznPVOFZX*aLVvuk^Er%Ct@_K?^lk&W z?89$0-sS`h-qVT@E5Ngmg1=dsZWxR8&c*!AzVt6nO!42fQBO~=H3qu7HqCs`e9Y|V z&qSo*fUeZLe?7tGnt$?BbO-+g_!?5t+GC8rIVr|01@52DNa!zLO;pQ&EMr=gIVCLfcnZ`=q@BGRF zLXi}3tQcZ0axYNh3(v~tFJNA(hfw^nNs&8qeiKkw2(Z%$LL;gVaAqtfnESzWu=_5L zY=j?xi}2CXc&YjjJ57jgajJm72}MTkFtUEm9Ij9#hbZ2^ssnbeW=aPoCNHV`bj=7c zRx-Tkp_^?pKJlXpF!Vmj!aD(Xt}E%=y>KdcRilH6e2y@*UiaJQe|&P6!A~S%26#>*hD1D;U=@)%@eaB3~1dWK-N&(6mhhL1M|IssXV&eE%;d z>W?JeJfOV3<-YF{o}bTJKPJ8{dl|1b6yWT4J^Xgqt(S3f<+}ohD4KVpoBL2@gBc)W zPCY|p;Oj2SzTkQfMlQ}kg-s_6LFnLw4r!@!-bw}j*dg=>FK?X zZnYN92&2Sk%Y+*OkLL=$#}0SJ9+_Scv6sOdcc8 zl#=w?U-b;>?x`rRQEgPYdCx;C$AUjhU4QzJT38~Q2%$fXnI=;CM?s@(i|a#AzKsf< z-cG1dd&^H}e^4$TJ@Cuc`J%jib&EfB_3O1qcbAmMXsIZ>_hL!MUEsik54nI^^S`ga&=W#3T>+u!UDgEv)V=p=pe)D$d zI}D4%hi{ZEO}!B9z&!bD)ur=St)|qM&6DLbcyG`_bzSd7VmX1i1SF$1>1nQOO7Jd+ z4)w64(f2<$s|x;6aWZgwu>t118RXKWcuSn*$>K4g-6Db3gT!x7?w*c!{5A*w)pp)L zx!tf_QV5_oLVV~|q?6L1=h7jhBNAA8I8vA}8PZ3%X<{F4W%Mv^4B;qBfEVEcnzcat zP)WenDbcS8=Bh`5ZV&Dat0-exI;REN zI+)mhhT;aP9S{qbBxgYYI2>Wuo8{ijDaLpI@~Vmff0Rq}Z4=hF|un;W*XqOxV$-@(;4=TZ8PnV^Pc{z5P$R#5(#*CK%_n7_;qCtv~% z5Zffvg^aN1=?q?j0CPZ5^Ep&&i}qjFaPkMX62{sCx{8Vpja{`rQ&q>V$su5EUB2&i z0y2bzsXU6r?-jnTP$^6kU-LwGMZR0@K7%S*!pJsH=M$>WXt5sy7I56r4I>ht!y@+| zlywijJc6^pQz34yFD&zpuiAV<_Jj3{?gx*VIPT4hY~6*I`jSP_R=ME{b430b2$3y_ zEjB|c;-$4+MH7Ncu;tZi#$+o#y)Cj^3KT7qA(vPjeG%Unu+l7R{lGu|b#>Uje#L1^ zI-nwC*zW$~|F*R3+mMcT@m@>EJwW<|?unUDP~BggaKwO4;$9L|5mAxsf%O^0zvsV)cNd@ zOfoA9VV4S*!Tm0eB8*lYY%zE0+Nv~rRjivQBl}6-Pn~$|R+YU;`Szte7p#lz=u!dO z=+oeC#1_^&uLZKhf^%<50vtXY+u(QR`bk_fs<{uE$Ef?Zq|rko%)jRC2TAxbxkrR`wrv4p zH4{iR8PxOK^2$}gV90%Dsd2u6A1#m{b4gXdBv6&g?TvT0kKIPE^|oKFKU9l8ltzzi zLpw(wui(`6a;btC3XIcVz!k8{>T#=jmQ70d_9E?4-WTZuRM+1KP3;JT2uV1BOy4ti0O4V#h8nX z=BB2AF;h;TP6tDTP3WzcbE2DUFafsWm{SOcr06E91Ae+HG_c=~g?)m{j#6)?1ZbMFt zA9}E|_Mtc_#s2VUyWR+bV!{rRExA0s@y{q&S+L|Cs&9;UL)@E%;I;XjR~dtX?I5kL z#LxgmR@u+N2~sD+7IPX$;5L2Em&sltjbo2nqY|tIODCHF+k-<;3K3^?`K_ePPG~!^ zW~AP^3Wk4$8SsvAwl0mtLi0JYrE{gnToV#n?-az`VtbDK;48?ELembVY0?3!+sOUH z{&b%RnLJbWX-Q}#lmrU>UBb-y;Ok5dVi%bXULpmX@`ENqy;ZsFh*;Ll$Cy=vUV(rJ z2l%0rzw$fw#hD^g3K?wHW{`G?U8qiiB|7JJH0?U7`h-s+WTfi#P8V^wjKJeV)4Fy8 zwvOI?-m&=jYpeVcJO7&88oTO}~@fVPTMHgB2 z$h=Nf0b7MCSquZqq z)~;0E<2+Zu$IN+WNc`{wF;o1OOFAh!>ct-3W>5rjuEexij4&I?((DawlkbxOQ=mFb zgrQn~ArmWlj0cPRe(9-^P@nTVx*x#X(C(^tto$Qf>wFx$Mpvfp&Digq4?i~@ER!7U zf}+V6)hnlJVQ>drKnH_k7n;m{hkk9X!zqp9>{Rqg?c~MSw^8_{Cx}$sQx`0<67Cu} z)pe*AM8+eEgqi3-j^Qvh??DOlIrRb)a8d^ND|IMtXk=`LUMO@^mfo4Rs@j}qBo=+V z_7|G$B1vxL%RTz|45T00NtJ9AVjw^G*V|L)EpeJH5WhAdSp`Ms3fy@GSyeMu^DiF zGVRFQinWF4*ABmJh7(hg+B)~BcERm-RXZo&s-6@ya`cBQtf!}M*PHL&{uoOWn_+$> zBGC1(GMX8}jBzH~P-d5|9tys~m@MV@)t+ke-9@_+%&9g1UE=&>=(I;DJ@Fvfd6&@) z4Xm8>2TTUr?~B7}mqx^_vw`zHJ3DI8l1CiTup0Xmw8D#1y5>gzqlzRqeO~n=wyo({O8FabF zZUXU<`Dw1ZV$E*UX>Nr(sC(S-ceoS9pEy1B8OEQKZ0wg)ceFl(K>7Gl^Ocx^V)Y&F zs?5|FVo)mr%#ZZvT2}duL?+UI7Rv7pk`78{rh~b}2n- z*iO_noN4$Bb{y9p>=X3fbVWi^?QaaB4)x+IWK&_UEo_~eYNyde0c-M&s3oW7$b8kx zCSQIN*|QESj2N%G!g*Na-3sg4HeAY#g^y)?g01PN3dP$9y?0^F;^HoX{xNEjC5Xay z12-Kf_CT1WS&6i$a_ISHuk@L8i7j1>t-z$JHdi7lcehNu^75s&(Q!8EQS|NR)5UnE zPqOb$w*ZA25d?-Ck4)zYgExY>V|`}?4QobJgZQJiwVIcx!O>j->mvyH_RfOn%_gPEv6JX89Zf7%aH-u+ z26QjM1q3RZfcve@ubzG%CCfGlt7n)4^%0h7vTuQPf(!Jg{H&*FY(Du!ovPR^>1f*l zKb_8LMYsNIbm%ZU>9G;ljtXr{s@84o)?sFSSb7&~6dZu<>yGwXXHRv_dgTg7v8yC+ zyB`c^JHHQSu|J@eF_bYnVX@%`PDHEKrIf-h0XCB6$UuJ)kvO3Pv4P{ zc$S#~v3)?-INf?aFw?$+lnU3pvT^C6-!4y(WpX9uQ^XNU_3etXh9=HL<@+owc)nQ}E5k**Y-yh_Y3X(~tfjy=Omg$@x>H zccg9fpoF=CDb?+(RSNJ$YMH`Ub>c_oe(DA=5< zhOK?X^}pMTb~ZS4c+^uA`BUqj>E{gj@p$P}(8ul-oL}x^wo-h&JyG>tv90*EukHt5 zd;(4$62&XwZt4keMQ!Ua17sPblL^0%a^|^b8BCk!CUfoh+^HIWgt~%VjGLP>7nuEa zs*}b`I^C^S2h*MGlB(_ddcfUR}hk zAF<5D^i$8g5^-Y$=ol8~qkjd8&UK|bbmVl0F&iWDv$lI2o6Ql^mhy)5euosar1oV` zNbr7Axot5$QNP`Krel2KToEBNDSG@PouG$(-fVg&4|hhnRJ995X28R-#UJ#bTkke% zCXgM=Ip)V=d38yqMNTb))7K~;x0*0_Lk+2IhrF6lg~~C>31epp>#EM6n0AqS_-EUk z%B-hd)XBlARynDkV5LZ9x<=24rG0BEL`P3h^HttbM8E;aWCkVbJLjld!pd3pZb;M-E zBNZdXZ7ev`9$hG|;wMDXlMg?$1(i%kCc}ym3d{yvi%=gnvWGhn445z1r(;&dP89|b zxQr_16O1yCM1LP2q?@U|k39$oL%60{9SwKMBg*P)x$u`Hu9#oFiGm@y!un#j9 z;jTKl7?WQ+oJXWQyEd(cRiXQ8&=L-`S{Jb)^Cl0L90;PV!igdw1NDV`26Lc(?T^Ws zJesH74?x3sus+*VyPSQkH5E>`==(kGD7ndKkgU9nlX8cKF;b56)>y~7*q9{1GshV4YDYs8N(FfID?5R8an ze_f8C%Gv*scqR4Iu^91^3gr=iMjm?y_L+Y=N#uYvg~M&>z(9~0h7T9?Q{INbAV8`* zSYw9JAz_}CUK5&DHwKl5^3CNmY8I7!5tY5&c*Y+2qmppuswxd0eF}?#R%5pzoRy)UKXpioR$qngc!TG0pR0p$vs(jxF_u_B)KfMGwK5Sbw(8Y!c@ud)RJJ!wVAB$R`1HBD+E`NSnT0_rlsrca8r_L z>zQz3$y@nI=(yzP&jq)+*W?qlVH;z@rELKN)_!GB3Iav$iJ6>Cl%!OjfezF@t(=Z) z%gltPmSE`tQxBr};h#g{XQN#@svf?z(pGF}Mg9t6U-cS(l=-NHgYQ6m$j2b?Pk31+_0~vZL&VY5o;K z!)q(Z+dhG%*^ur!8wkmVu3t5huNE&|s&&pdE~Ju)d)g@{O6sqhkYYrnv~C~3@MZ2> zLIH1>h`Vo%b2}I}ZjT)9$C`PP?jXVhZOxxJo+I%=PCoBm9_pExXQUNqWWpNl=Ra>l z&P3l|u$Y&s_~>Z_xW^?Pp3TS)&gsI<&U&sxAW=ZA^OXqhB(|}?Z@KZKcB6J68 zZ?Sdqgv^c?o|tC_x_uif9UV$QbAVd6i1@S7mhY6+2ll$_ey{nX%dL9t1^_CWzO3)s z>EU|`XSdH`YjRMOXXFBJ`!CrQY3rh|Fh%vy6GfM-FHrFnY zKqg*69+ff0ca)3-Dft}UXyA_&z=kT?Bs&Gp)W8M+G0h8QqGsu0g+JwXhg=1x(Y&*b zdmg+%+>%cA)B{ur;3LHeC7*6pFXg%J(77S`6mM6<6s)gukAdsC6`g=wX&G7K1TAMj z_3KE$wY&^5@`xrFxtdg#l)NmBmnQAXufQkmqE?0is-taDV(rz}?waIw?V`;RxO+pxeM4?cKs>ig^zA8EG`*2Aq}(TDDfJJplwWLY_V!yzUos~hoi zE*qPp8dkLHgOU`Qp{H?|Y8#2;F27JY#=lv#III&jX|sot9hmvV{_azz@HXRY_!PSJ z!W2Gf;|Dnc@Rk+G;wRkjYVz`aCfGOT357?czwOdz{s;hygpS~F>KAE8z`;nFS9?zX zw=0oDU8P+=kW2`bIZN)9BG(#MzOy>yn0~$*a7QRgF;W118LX&T?J12ZQT+hg~B0M1#kwoM7L_sJzl=B4SB9FgSX z#QxYc{}?WfnLb<%xmti;p2{}n9*KG_daij8A2~wL)A764TD0uAe52HS?+2 z-bKBSEtv8c6V%S>JX{^(EPyCEv8`T`37`!MSdqe^mE~nZIJ0mo{n0|*?ySGO7zW~VG`{W$EIoM$=OyB+i;@_bwxA40s=|2w?fi$Hl}9Nzr*fbosi@e^=p zEiGgv_BBc4>?n`OZ2fxx`e%ed11DT^P`-FYZC`3>^x)y^sYy%n;nMC239`oI3+Ey^ zt^A)&9ziQL$JpWQNcf5u+}o4<3^m{VecPVyrO@3~VP8U&kr_A7lAW`xFRD1^qb6K5 zxP7I1TpU{*#Zeltv!S(8zyl zUHAU;-}rBh4o|GFk%a}0QF9s?4`0>ahgWpFtFD8?&*b#ZvOO3{L+D+9+WLEScvfQdi?TW{L!1=S>Lb@ANjuN*B9UG!tjO-oMzMQEG@-J}GCG+4@@A-th6$$!H~O)A7XWShdp~g21NY+k`$B|AKhcq!jkwH_kN&$?*C_vtI)x1$9J@&Py&K2N)b1R*JZfpRezaTn% z3N$n2Zl7bUdF-F{WG`$*{iWb&Wrb2HVZP|$-5xfOO}MRB52!;_;oLAu$kB;^fOSv3 z=9*-)H|K;P^l|P~KUk_-Az6-7IB~D^Yh5N}PK;j-az#xZiLRYVRfX#h*3>$j^AEc# z?DNA#Mp5^gMLWq1ZE)pi7)<_%;YjhcZ^f7sZO`{wr}qpw(%s)n=sENd$kS~=4sacO z64bh0{hs>1=e0KUG?beplO~Q_QcZD2`d{MNu2|H&sTfhwIm*vZCJ)+^vmGg&`$exj zC0xIvz5^-89YV<7mC&^VXSO)c4>fPk6kq$<-pE#0&}iKj!XZstGYPgn+R+9KUXAa& zndyVc?A!Anu`Elu`BOMI?-76d`KlT-*4-J!lef2la;0N5iNhhd+IqTmem|Q6zmh@)F$Sz4;%7I0Ev6QP3;}& zuR{rEWVJHK-TIn&{sg?R+U%24AlGcL@rZJNU6OrCeL5XCkrn-uW|n#;Nhj}&+vO&{ z$tgWWpI@U_ykrF<5Spwwpp@3Wda67MbU@AD8EVDlY`R}5*>+d|i6fSNMQ;1cdOHv< zR+uOqR|MBC9AC8p67*obIvqLt148!10aVIwQPU44O;Zw1?~zD* z3vUzpdXmqtvC5h#Y1P!RXvHpG(U<#6yw{&35 znbg4a4fc^hi~?G^{OA&D9ifK%q!w6hIQp^|rN3*Y=I0G0pK4(-4QonI#uSNSC(4tB zPPwRdP2^nVosnP;Q|nevrCS=&)W7zN;?@+PF}|A;Te7hL>wBS!{-;T zL+mdf`Lgg>CB?rCEw0R1BdgKOiR<(B+iT*0);TTBfq-kDTcfmoSpFaU$oVh6-}%m( ye;SgP1MFC>vJLw+>-p%ydzu}J?Lc@ZEN!v~JHR0O1k9w0za1OyaRM1)9_UL(DP z5TzKUg&rUzNDUA|#E=9C32%b?Iq&(-9?v-E{l@#_`|&vjBP%Ow&3Vr`?|GN&zNRsE zjCFO-W*%rysdXXB!wQ;v_?3r8F+rc|$R1x>xPFi73c-NJDtZpP z?`);IdO#IUkm_u_4+t7SH* zqMz%;o!Z0v*95-Z8WQB-k(k2Gn8hAc?_p|a{>^9+M7v-_&J)>8k+-9}40NMv3h%G} z#ksk9C3KOkqJO7 z+3y7)1wHqq3*28XJrK5?*WXnoS2wqg)jc1{=`dq4kdI6ID^JRzCdv@J>uOX2q3Wvb&lqH(NoASv06@=xxVy}wUoBY8;d(9n#_fGS(%Q*m#YVH5L|D86 zUmAh+&PXx)0_RN{9QAA^P_>r6cnN)?R%%7iqwm!=V2;Sq%$g_M1zNRKU%HM{x>!$F z$1`8ZH`rHuZrA!K{@wi7+?WS+2_hS;q6{x}Mt9n%lSwfDsaePMAh7I8GQf?lMxJxG zsuErV6>OEH6;%68lh8YRy%DB z=PW(xIbaI#1FG%Iw`I9kE}B+GiAQ1lT=IR2%2<9_R{}V*GI^c4nJf==Uw5G~t*_#^ln3$5{IUEh4;e-&6Fqwc;2cZyq8w zHhr|$wdzA^&?iEz2s=W6nrQpfM8HVL-UCq|$(158-Mwg)`v|YiUq*^K&S+0)pG2|8 z=7)HPpHXynV&BN@h9XKn?ls?v3OqeS8W|$)May|x!zR60q_?3W>)|r$msX#TMje{F zJt06x3e83A^;`XMt$GC0`F>Ma$~u!CYtjc$z*3a-jeXtnCWIXU*Xq#s@XJw686KA( z?UC(~bF?OCr2Q1^EK(R|+=5YtN%M8${nVg2yqn%e1_6BO!8R^ElUuEg6>vpSJ05=f| zj0}8wc7_q~UI%x@E|k6TNj5E{;+}#ipkyPjl~TibFk<_vhv2qtxndPGb^v(~hQWT(Z5y6&mVnAexEtdS~(nuH^;)zkO5O# zu-E43(&~j}sqz*)&OXo4839jhz`;I=0MbjlsDMWrcodTWZ1SUw{-i}U<)3rzsKN|z z1zRC((HcpB+;aV5!23AT1t@ZQs z_OkaRjOTc!T(*Ya{hmL4CwHZ!3Kku+{|MJ4VywsZ_i_iO7^BKw7+V7-rK}R`(*9kh zF=*|cQ!^Xy*PSP_R6nlLqlmh={MAKzLwiLCrpxjLP)IosIQ+E>&-~V~J9PM!pt|Pb z^Zg>p`^BVsUuQ)thrHiZRqo(cJA6V#pccDGqMk?akoSMGt8)-7k{a(qJ+A$t4{!O* zCvW^?o4g<8YFn!7Cm&2+n4EmY1wYQwre|IBEThGVCC|5;=`{GS907%xej8-&UaJ4W zEO%Z;RkL7e8UCKp1JVjuemEh3O)rVA+cLl5P5mO+tgTD0#S@uZ7k>)u@Z&!vuq`}% zKmYTpz@I%^0{kKH4^Q1l&Q!8*82gt~zbf+IyYj%qx+mLr6%@?v5SF@Py}i>9?lt}G zFG)?mJ8UL8zs2bozt3>)agT+XiH+$*onz{c9%F>a?}%0?T);T>BL8QB9_bwFHQ@5I z#KufQhU8F43$p&qjOsdiJhbIb1k7*4K<8`WA43E3?;O{_hWrj>GJ(hcHKp~tS1KqggBe_u zR4k?`D9zZnvcf74$esl*;M(o}+G=UaE?zLS!c+N42$luurNXHgDHp{KnM`TN8NdPs zcrQ=77BgGi-qo(xwXuO`yckj*uLtAbqE7PP{!wRf0YYl5X){PhJ&gV<>$T_ECseIX zrxdA83T=L;)eIfhs#|*D3A)?Df|wOu=Fu0*I)``3;6RBBri<~1HjBu`v-1nBBzoQq z$w$LZgWD*p6^2W$uJ~@)K-I>4G_(R!kgLr+$>@nwRM~Tc?@zX$=XD}HH-5Af+|>vU zfOeE&z+hyMC3vm*Qh(p>@4HqsawF*s>+E9v2Q)UVUY;-L=kl{W|DGo+`cbYndV0T+ zZT-VE#rK*cNi#;6PtDIfMF${~j@#<+Q(P@AB%r$Q**w!5dCMrPDX*R#^4cWZt`vc&zn#-61!a-^X6 zDJml!x)fUb1l8|DJwQ&uL%Fvq8%gwM!Ihv}9KPy*UY5t6SfZ>v7wT_dN)wRR5?C*a zJ=5$9QcLzWJ{9V>psa*60?MY*^hL^E;J~EPa5^2ZGAuUfFYPCy&7Za7XM8YvA0*?K7*SYQ#S{Qm`DBa+0#ZjQ zeouslYZ|(DxcHA*mudAtEuD~#8723KJ|diz{bEeE{U8I7@|jZt7$f%AnPY4zc^z(7 z^n@Hyc563N2WA?)&8+ApVT?Gy#=|}zZe#ReZA#acLC>rl+a7#Uya~~MrCnTH5po+< zRMtNz`wZChFb(VyjX`BCvPvKK?UZfImxx^w&H(4@PEL%&a+}H;p>{b0q8FXek-VTV z-|V|KXWY^)J~*aoo2|Zk<(JP({l}MG=1_nwpBBF2vv!)*27A5)5}?qKFeU~%+lG{X6aEot=@ zmN73STGKIp;RNx})G;G36Zd|YW7C>Ae*y)(>vyg+_>aE!;;iY6e<&d*E~R@$)Q%@g zVPr$1Q=kk0@mEHY)LPScrDt+E6v`qj!v?{t++;?GMKgn{G6yq|zWLIPVV5oWaj9Kd ztgvX=#*ExfH6)J4t%%_h0vut)ddf|x$Cx(jB(f<&cwIht>bF5j8{Cl-Xn#pCQa_ZMyd1=O1 zhavs|ehlM_1vyo2n|HzI?=2BE70;ONS|L4@t`6XchjKUke%odVc5pyqopC_Ps!wKf zP)0eL#JuI@+rb#VF7ixD5s8b#b#X9+p(?QJ z8QqB*#6mN@wubt-cc`xdOlv~vBGEOVtiR+?qf3mAaL?#!_CyG>cx&m7Tf5&GFP}0B zef%h<_*NUjI#FDVv?{R;#C5_V@huv2?!nGgwT#c&Ua-Z91Ke8fZTh;-}wC-M^ zxm=5eAj>3u&|fxZF?Ye6-7nAj-sF$>bGb}yxMu`oY$Uaxn_Bb6SM1HMUKoW*mG);B=N21YMJ8Ajj#8c`6b5+fhKrbF zl)9X#v`ZBj2VJebv~w`tMu)-y%e68UZtq$c@RDCEFSX=?+^{yYkL`doU6dAyo?ccyq7x z3Ru>zn-3OF?Ma%XoVf3YCS7E+G|krMlWXl{#eF&3;2scJW@^9*P^H8XQ;Mi>sxlZGK%AN)Kz zU1vYSV4-FvbtZk2@#S0k79Xp*EjYqbZl5EBHnZPkuuB;=*7!`kdFRz`LFGa+D)H+r z(Xos{o#hjAB!y*l`V}7=q*GHE9Y<=U*cxhkyZo8d%JUSliY!96M(P(0H{3#2Bra8e z8kLPhe;Y{YSqiS89PY;ZD)WTx@I%>XFU$JOKSJ5}E4?c$b7~?m=a|$j-xLqxZIAQE zD}p+0Ybt^VtN{sty|LPATPt`qA>>Db4f{BCe7vZ+`9awLU=)USPd zpTmm{n3-?|$04Qd!3G^+!bjBylGbk#n_Cg}r-8qXVz~RhqnB>5<7dK~*hH;2ak@>Y7w-;Z5-{2Z~hE}wfHH4@|EO!`Kq za6L;z+We0%I-bT(oL{ouG1@CxuZ&f*Zb$4A{?#tGPw5x8YY0Z8`I>RYmqQl$BK~=h znMyxQugJz+j-+=3VkoS?p)l^;`?|O-EfT;U`$stb58&)SEb+f$`s2mq%na|Z)_~K> z)aSn=GJ$v?jYgcx3yvcH9j;#e*VF%l4B)@-@;^1=tGsk6bhf{8B}LxBZY$vyXnGQ{ z%Wb+aTRHDx4p3Sm?PuwcB=ofyP#=W}P|PwilLYq>PL zkJn@iVE(h3}2T1G&a>~=R-*Te3^8?g!jXUmDC#fLYt(1!toD1BLOp%A&nP<=de z&;)`HLoz`gMATUYW(6T(ZIEP|BOY*1m8V3LRTWRR>iA?DoBd2JJElg6^K^Mu}^+|+N@ zFn-Thk1ao-&hI;RrPU~RC_9|Hh9geoy~!=cwa)lDztB$gl57y}o6?>H$2IvA0w6IU z6o*84Gnt5nYoYA%DoR95F^GhbD;_R+JJ=8mGQnE~AT1$a~of!dsckXt)8 z+#WyW)}s-<9MW804XZ``bjO=5r!N_gLQaa&s46Ek2=UYxl+>Q|Vc(3vN_7}WlP{h? zXrYzo0Wp!8wJ2Y=T7$XM4!{o=y&$2ZOdq+%caYms2QseJP)5Dd11lTy8>N2dt?R}} zVjEg+Vr-N#d5dNXfuRG7P&JUy?;`!n6AMx>kD$Xi85|U}nXs!ZEoyvgxAw4u&u6ke zNDgu9>@VmwJenM&;?oK1V$?fO7^sfAT=QjASPQp)bY88DAT@`6u#~(GNZoofQ{+%5 z0)X^b?wctaesJS)4&qI>szV9lO=>{v*ukwqU^7^(C2zCueD%)ja~rPsVx&iFB?Xu@ z2{3-|IIoS9|H>5quWr4}`=rJokGNz-Xxo07>oUaO^2`~xrHGE12Mga*sgs{i8zy+R z>;CtN`wv;~Z;yL21oZ~4k)y?HD;k8Veij4{6fbLb@TQa)w z16exJ)xOS&TftQLl!>=@xxS&Xw%3pFkF-;}DXL48@!A&FS3Yb7WIyU^VJuIN(DNTB zTG*a`7ykwQAJ1gv^_?YZ7VGDhJ(qYpUVwy?Zza`lCA|`mcpNW?THyszqVVeIg;zz+ zU3dH;rxrt+8wf~5u})d@&xO3Ri?O#uQfeJ>Un9pS|Z) z_stzjb$aUSJ{VzJ)dMe$k2JhObL>^uYN#{5Yt?CQxW{Uz+$*{I+qH1Atg=$$-Hh~k zjFhWH>0M7xq>Ei1GUP!@-LI|2mZw)yaVUJHWM4z1+d*%Orh{m*$|K~vvQ$~EzeETD zobH*{^k0D&Ozae0Qr^3YDc$ubnwA=`wdk9E<&r=2j1ibNGH%U__w4$Y^PkO)p4RIa zK%T!_jgYosWG=K+Dt8&I#}EuPyZ5mtLjpUkZ`yOR!9Rm*bSYMsX;x|Ashl}2h+Pr# z+*__^*-Av<%c9Y-htql zrm&XBkc-9hWI7`W+}InYiQ_cd;p^7?_d*7I2Rr;=`6_P`F1Cd6)mk+#5_bfNuZ8&L zJ{oJ&vg&;wlY1!jgX)VhQITRM7>^=(h|~$8`VRz8nJJFDq*$ODeZFSVUaO0ip_m_a z-IA+V>jaO&SqCBboq{{L>3)RAcLLYlEp9bJzSDmY+;S1Joovl=4+AT3Mt3c%n}9UwSFj zq0;nvU1SG8wS0)HwIT1W0T2-?lpOaK_sUrt&e>TiuuXj6c%?2!;y z_uS*{v-EE}CPtUGRf@TrJ*K5}e^yMa3X8d(D?8w9$gqsHU-@ZHw5v2IMgtWeH3ev% zrX>godvaD45&|ZN6#E^BJDo%;%t$vBK_bwTD8s&W*)z$AI~kPfab-G#A%2YUiWMEH zStiMfVge~c37EQF{M~ewrq12gAY8APaIsRSaq7tk7jd?PaEEFVSu6_q!}*=bk(2he zHoc5sPso`DN!z9N3H?Y75fc+E^JBF7=sDq})s{Z=bw!>2sYS1iX$q~S-*@h2?KQyi zx-aB?u;v8<+$_>xyogg+MB%r09nySZxnd!%!M>j=tXtdC0ji%pX`!r*(7d6w*U`~Q zCcwJkd?0(#g8X@Zn@JP1$yE`(RjLfeFN^gUeSUe`4zBHN8Mz?3^hm0()a-F^NV?Dz zb9AZNlyMGI*7sJU!au^2AYNJ@WRYThVjWihIYU^~2O(Mp++2n#_W24yFC*D)rY}>)Goab+$CBrF z1Cz_v;=zEzi_5pXa0@43mC67btzFz~r2vzKJuF+LW))mOK;ZlbSK=Itic@H#N77&- zjdyFkn$nB)>X@WO_^-4@zOH3D$78T1C*<@JZ1`3_d`cq0rsZxWlqPjZObm5^cS}G5 zW2~sSe907iB2y-NXlVOcq0rkqXSV=QRr?VyZJ$I&RisBTZ(qGsIc)tzrp?!OMm;lU_t!fE^*&KV%Pr^NN(+jKLmIl&f=r2Ser5 ztg*ZXRtlRxPCVPn60m3@3hG92`%74w1kH{U`nx9mxm>KrpxBRIayF^qA#$OkhOG}D zbLMxfx?d8_^F~vDd!ujv@<#0;EiR*>l=CCf4laxr@tg~M&*W)*=l==d9#_(p=O=|i z=yD4Y*WT-!)#`^Yl~*UoOqV6m>Hr-LY!pgwPC^7wcv8wL(lr|Tel%<{2M^2F%!=f|7==&zusZqBk^#s+iC#`Pijj+WG4 z?J-B|L8GC*hO5FiO{@F6R7UaaYD;I|7r8a~u;hpGBHH~mmogH5t{g3TaZV4NKrBo_ z5;}|oGJ>+Twdi45`Dww6dwJ;dlsGR%aJGNu#h<8e9e0mSlI~x3O%n={#9NurgLCM2 zY_r+?Xb6>Ljx343P zV>vjcVYIuy>)PVIGQzhgd|dkVJdcqemk6{6ZhuC|X08`VlBj$;i&p3L;XQuoo1?f(!o|B0L#0VY z&Zn6^Z0LKXuBy@(`uW!_iU;A>YqQ>|{r4Ji4W>k=V8QqK!dwJz{ zmNhnwU`A>qGEe5$8KC4^noid^JLjIwAH7AMDQR^iUaY|x`~bR^%G}Yl)G8RP>GdO2 z^reX)W#5M6Z+trjIRaPLgL)%VkkUNtA!DdA`0*^~672;%nMter(~~yIjdbi|uqYJ3 ziyGx3_L}L-q6x(%zL4vfrp7q--92-2O6owTEmIQjj3qe?^*|PhDBZTUwz3`k=dj4c zID^I)16F^`eJiMDgzI0IHhr?Tg?YzYseDL~@Q&TT2>jt1EUh#4`tFx-!99`s$|0-& zDX?Z03_hcWvPn>7khk#Pg|*MhzwMC3_Wz^A`)_;sZvlg=UmAnxqlcA1ORHC`2+S?@b6lo>aKrZH2l9R z`G0VP{=K`9;Dp=T<|3Re3I`7Tb$_BT^)j?())47+h-1OSTsyrx)3O$(mLl-D_%xbG zs9c6`2itpJVb2vdXXE{9de}boQLtM`>Gsce`&oj-u*8JT13fd!77uEz_3Jmnzd4Qs z_FO@_cKX0Z(p z%P?tyoL-iomTkT+RY`zZ&C5@eANw~pdc8d9S^jphfuwH3lW50;SJ9n^aMjhUixutE zV$RO5yF_WbZ(uKKHmJ-Z57qQ|rg2%OB3OZ6KSrOS5iA&4*s4>QRx}rpQw5Ik#U*>b0GSXtgn7Ya<cfdH$!Hvui z5@f>mElEUAlD|}m7GCN9wcVQZp#g8N zPyP}240n?q_Nac`GM06`u|{qS46I%`F4MxG zj`yQEW>{9>y>tm9RWZ=0$|$ZjiTeTiZb9odIxL@s*Jp*Qg)IgDOwT1L3Cx8aayIrd zn^$%p`H`0ceF#CxkAF^0P2DN3f|-n!jNKEBDzrB@&l3{uR3cOh&)&;(ptu&DqIa;e zix->CJbovxomE^|*rxnry29NhckQcZ z*xl!YE@^AB;6nAbPLaG%Yft`+8z}s|alf+Pbu?%ER=^sipsi{~$;=fqfm;9QLyyR5 z>?s-UvByUeYnzSMCnn`A+?ev_W`)0g-F0#X6n2!QfFEjyg=Mi?rlK*cX|9yq_FsoR zURbOHRXv_xq7ERWuZwmAaf`4BmBS#?U-3lm=*6HKit!n zj^3{U&2>kteX$8@vN1-PgQfeb{0J`czGP}baNfmfN$7^%+odQ65wG_O`D~_Edk>?eV+M^yD0(ME`>}PCv7Dr`vcDkBWLbR zIdI9}2TU?NyYtvU>MQGqB@NqOA$rSu zOVSx>15palTIn>rH)^UfW5zt=%f1Y?S|^kuIk?u<3ewKsFtRg2e%Qq>gLC<78zVS~ zC0RBO2RZj9u&EPvC6m5JomH(|Tl8078f#MF_+K8(*Ay#^CvJpvuKK_InSPpv5>CJu zxknJGKKZ8B1dsI64QY+t%B+)dJFeM99;-;5iei#4;V_Kl_0F$udYu=1|wxfu_) zLw$ra$h&|$v;i}Fzw)A@AJ$^D{V{E0V7LB@*M>uUyNp>Kw>p*1PAr_Id!S)zB0J+N zpR{lEO}X7cLrVglXIZ^IWxinbE=s|$;L=z_fV9WR64FYN9-;5zqQ}0#dA_#uD-bIC zqIz>sLtHqxSv(zctNl~jjk}fSX&0K4D)7gxg1=l~y!!5keQWHJi?JJ6jDYXUBR==Q zyXRk8&T*_Ar&3G(*&nJwvs$&AUuz-E&rL2zNjN`jYMHhzO*$J4p5R=cmeeUsQQ5mr zStBcF&<}L5hhZsNQl~@GbQcbJW9KfCWu{mHs797eR9f( zHWng>dTQZ&Lt5_Rjt`Y`lm5&Pxf;*sApb;5OKg&{2~#G?PTO}R zogxK~uYBbS_E7_%m+R-Vr#k1|E<1##`o3NmrfW$vCH&Sy@(1sc8&TAmm8SPK54oCb zEJ(w(ISk3-W@b?lJ$px_Hm9(CIV7h9jD>hewb!td_LLn zJUb4vmY4qNPNSI0TJ!^~$QS0CO~r$5?uAb+2oyTTuF7~Bu}}>rdxm9x)v1%*<4LZ@ z9LCRU0g!;)QDgFpBS4Xf)M*k$u}{KmmUY_3<(o)m@8j7EDXthW0Ql)VlBHo1L%^;6 zp3u+S1C9o#Nz&^2R3*Dv&CwpIGVXhJi*D3~(XODFg!ONMi_8jvN^oefRyrs9TA$Kb z&2G7{#gTIO^e3HlIfO4nDe|_wrUd}M_y#VDUBTaW;wkj2s{feSy(HLucp=oWgB>|! zzik520e1{qIxya!p3Y8R)IF;fI$NQMudhm3u#?kwmAe}o`h9+ErI{?7@0*-!zcza1 zQhJ;@Hl4ioiTv3pKq=ZI2R2?gVjoY7{6!$zO14Lc0j$(AV7Lg`(aOj|I)bF!yM zVh#O%Hrj1ryj^o*UQ%l>6c({^JIft5R<~HMDGME8R!ilb|U# zLI~8(dhcw}$wisENr;6k36F47B24KkZE|j}9qpXBoV;$w`qcH7tbP}Ze>ll19bmuV z)Xyj3jP_$wNrKN` z01RpvR=(QNic3SkL-pfcydapsyEQsEbi{&Yt_GC&jTb?&8@^GUB?Uq*JoQdgrFT_v zoMd7g0qFXsJN;#VaXYs~kOnjeZv~B!sXs>uORU?qnP=uC2+LMt3y+dcm1#Rc)ThT) zyWOyE9p@mhWAJkM`MfhDW2d^gFRB1;3M|h#W&OzWk!R1wLORY*zl~DDCz*|@P=!hE zxME@+x)*^`(`RdstOcNC>qf*G<{1!X8vSfN1hUW+8mi2`z*bL_T&c=6nPIS>ej35Goe)W(U=xS7u_Hc;4uxQk^2*wb> zV;UGZZ`oC_qwcttqe)8}e(EBiB~eix<&A?^gyd_~WXyA3B;{!jHSZ%IU#5#;?jq1J z3D6f#MNKMA38+rLC2}sxp%$=8C$&6dG&VG^cJ0&6<|Y{=Ri}hkdV}pvR5-p@zNP75k8xGDVsC)%)zB8ovJQGjZ9jt{xDlN3b{Vm)#Nmq&`rtdSqok9 zyqdIh8L!vw>#^YLTG?N(r;bkx0wL^XMe6; z1QHq%yYm9aIn<=;S~DkYne&n7xs6OtJNGixWd!+A$YRe;bx)Ejyv9&%JR~DdS{Ft@ zbg~Ao5NauoYg#9%_1`{);gOf==uBVG2D+WDWw54pq1buDX174l!o1O>_As_&;)nxM zz7oFEkG)+nUi1LW$nYTY*D|^bJy|WI^_AZT`RLH+<@~xj>(jDw z(yCrpd!cV4{OwG(tMied2fbd+YP=`Hr$|dgR5hded6WnZ)!X*3!tjsbegB(P=Hivk zZ|(Snb(^aqg~3DaUGs5w^sKk26%sc&3Yp241*R8oQZeYXoNWJKKku}UWqm+6ytUUJU?7FDV3de4*wURdFj2^JSKWnskZ6Q*mt$DA%x#pl5 zb@Fskl8tBTN~`PqSvrKD>c`N{PD2R+sgBep(qfJCO?<#2BrrIHAjtqN7$SU|8GJaS zm_F-fWLnRWI3)>d=E_pR5td4J*|!_$)3Irxe^)UT_#oBitWIT`8S5g}0ByOPMmN&bAr;w6E%EY}jE;pmTG8xE*%lo(eLaBfJ`;f1>)d z(Q{$P)QmF^Ms)E-p(Br}2jHRXOYH2_gu>D8KTHOzUJ1348kLJ`J#vcT+71dwjs@uK ziFB_w4U1xRFzx)*nB8nb6I}KK6t*?H0=fp9LUW{_%w2W@6qm z-5>pfPm&0p5lDGtoRV9IX9WmK7d?GWytLHp)ZkAT3L`J>a>rxxj6g!Mf#xlOQ>TPZ zPAgj`(){C{dBrw&1Sv>wc>H$gFYQxO22ipPe{H-5RuDYI$|lQHLld^--j$-EGnjkA z!m{DSbWOh6F*9-jtZL-JlJ6)BZ*gm3V`9Y~Ek2Z9)Gc9j&I#7eaY(p%?BGkPsjA(B zvxaEl$$)aEJyTVDjKgU7V=ib5(&M$iBmzN#7NeX{6C>1?F%)Hnn>qYrap$AUlIcZT zC5Bo|4E__rt(D^t=;vWL;i@5_Ws&xsD*HD2`XN#|LT3erZAscgSdk>oRL@GHYKf`@ zlxGWCL3U63?@s)Bf!i7YFHrp#ya}Qb((P%g9)V~ocY1d_p^P%3$Scl08AZJQ1Q|3V z!1_HB>XLKJ27;np$&tke;)(lBkUi6`o3=bH+ zJNoIE^enG}%bb18JUZ{|OeyLEN!yutvp=y%8q~%zb%~%MG^Mx2485+_e?1J@P+A)_ zMkRlkzr^;4E`yfR#Z<75*ZJ4(Q|f|xWS(3}26GmoyR5l|onBh7 zO0d~0A>K{R9*LhoWzl~Zo8~?{40#RwJKR_6FCKpqyIR?L@197(sg)Cl-fx{5_z~#X zvi9w-{JGEE?PYnai>;DJ5A8dWd8fdqLJ8 zt8+W;esgs{U;Jl1?BC+M{{xit;D4d9EQIdfUsboo*%x@~HrI0Z+YHx8xT1) zl6h^rxt(dz$Bz?bTioV&AdI=#`(?l-cGY(9 z$L!1*9+oSHmtTnG|1L1?df&*X>!~mT?A^BiW+e8fLv?KXl?T_eQeDLl`dU!kA*&3> zvu81ucoU?lzfq()M$hPc4SvlmwZ}AneN~?iZ)@eO=UoaNE9qb4GVsFkPJ~)2F`zVW z;8cy{kP%7}K&zpf=bQz>TV|Z>ldhMm9~$T!D(UVBMo(n)G0ZAFfFziG!7rg2Qe(3z zHOh%#EhZ->rf@hEo1`vY>efV;30>|7xS01}(z4P91G(KHATt>)*0_=z@*e*Lvq1cz z+*}#P5()*9w|U$u&+~?$UI6iJ74KvM;+MB@@LAchsFTa~3?6^iDH@42ciRS4b$?ps zke~fJYC4_H^v9oh*ivZ&s44QjXWUM0wJ0z(sbMWf=i@9LV;x)io!_sYR;z-hT;3M=HsNarBh}1Ty+AjkCZ_ulS1mmfOD5=F}C=c zv}7L}l!&mc7+5ix3DYxk$VM#Ov0|6Bw|9WH?Di|+vTG)RrSZ`tRmaSw%d>_JM>D0A zXABDg2leOvstu4oEIhJ6y6ZZszpm3_&cbdnsIJ0W{$xqqJju(9`tHP>tR{{f>1Nu( zPN5k`1gYC9!Dq_-F<1X9s@`h8=%?Z!?|{n^ z;}c2>s(`R(eOT36Pb}8p?)Wo3-AJi{BXdmz_NxOj03OgRCNEZ#)A&qfWx&F*TlHJj z*`c~!TFXg;zC9ji_7`4pdfHH|r<6B3=)cD(p;oDMX$D($-=bK#Y~SMkt(Z(){z_p`33l=}|KO`(|AbEGd#8j| z2UN*gp5kNq8QJL5=NuF4Z3<4r7(k<~wn64HMY4()sT z!@K|7I-4)>KRE&Uf3T4MU-z*86Fm;D(f^zLax(ZY@?rj3T=|rj=kz723t9It`eXBi z=faVP5kEZeuKGqvMMQ;z#b%-B9&j0suB4ul!&ZZ31%{%{rlCba*Iv1d!;Wi)*5pMV zt12(dwzvl%ia{GE7o-3b6t?13Rurn?Bq7yYu zRN;vmPWtPZcPv65VzpufI8AjMA?Jl)t*+Ky^J)&$s`QGn#19qL*cUZ%ZH->-%{<>V z`d;zWF0C6*qb#ZC@;UONdqm?=*9ZF*ENrX5M%BVjO~@;DxfSoaMvq)1U&3=#cWqm5 zO9qwUI2pz$L0>!O4&)#iortXltuvGXj6+gH24f z2mjY%tiyA_TlE}&9zAHXs4;zfzR&fZGub3D-P$hy6;(|ot2ed`^ih|hFiO5@bURH2 zTGaennV+J8&vbiahk3*lxm~xncr+MMB!VTx6$bEJtW|eUmuJ(ANA^%Fs8)nge%3X?0UBK<64e$$oVs3&-herOdNBMd^%-z6Xpn+I@hH z0isdWx|EdysOQU^AR)>K>#W^)ST~v!>fgA#MpJDeLl=^+@r~(A*u9Gv4zGcm?m?Eu z3O84KML-_GfADC9%(p7InYhuha9Q?UF%Lq#SeSDXLO*-|x;? z^eH=%>;mlcb5BBuiOi-O(+}WziKsH+MeSdEMtpMF4iVt8TD-!ekj>y^VKRJd!^e_G zXRq>(m?fZwfLnWa*W!6r}T^?$W5+H#Rq3A}M2Z;KzDsXzw6D)0YAPCe!YEz2G{Cs6*F{ za!_7bP%XXra#x%|trlctS7}EfVB{I;c8S&t0`O7b2~|_1bzQSa#{QwOoPq?L z!6UN#115n#7ALM(0IzNza$FQfKaAe{_m8gCnTUkix$OCE(Ibp9R z+@wP0IcS(g==a<**^$#$c7gin3C>-SQy&Pv;IXCaPh-NDcB0;QB}kt1 z=iZ^#S1My0Cv|c?&g?^YS=-u9o(<09ff;!2hx*Iv!8rCCYG{0_gMXX*O$U_uLZr87 ztj;n}_Wgy$W*?V=R?1zRZ64<_?L>mX#`f6Ij13n;qV#1#75gxcLltoS1mw!I*GnAA zO=grApmU?61lDb~)~h4pGAjq^?Rn5*_Y&s@`)XJkat@VFHxEN^B&P;-fc;&>c{6_1 z>#rUU8d7WL5lR=4Ew;@apttSc)iDxPH{xr3w&i-!1c#E%ij z%@2ljQ@z2CG-0F|!M&qtl<-N5qGA%=D7Baa^zxJI4;v05dAFV36l{~W1^1R4p&(4H zszR-X4)B>aHl84Bud0XBWB|+FVw}^%^hJx`uppB;dcj$qX$mv6wb4;U4}PHcN7E^VjRC&dhN_f4z zAo&q{H{Z@gg6@t}yCw`5y?AkS`Yg*9dy`^hz27QDMP0a;7GEnZiCvV82D;3S)NBYW zl}~6a9}qyacc~~4YUi0<+RQF3CFB*pxca&;dcH*L2Fb&tZSNh@ZxDRy*DjwQ41G!~ zIOu_l`KPg zR1`K}am&zQW2B@pPY2sG^DlF+*5X1RJHmwQZqhQ-jejIHoJZV zR>fD0aPGuvaHlQ&{`R_1Lz}*|t~5jXuU-OGQdesAON;H#T-3_a60uBnr+AOELBd$2 z-TWl;dt00BPb$1G4Gu*VzveQ=rFvV&K{+hulE3nr+C;0UKY={^m^4mW*g9{wa$2e~ z>O&PPosb;bq&A_c#J*?i*Y~}RrxlZL4pc7^$ofs&^QaSYALjJeG?sG`t1!PE$GI)Z>@1Qh{MiqaJkrAt*Hpdu|05C}mM zLU0tYjDRAdbfkj>5E4oVQBhGkgb+wb5K&qPp`-vI$@>MzbI;6~^X{zk?tACHweFuR zvQmEe<-7O(wEcMp9qy74De}XQq&&1tclg5T67drhf>!{IB|95L*T!*U1vOmA-7^XD z41-Ypvcs{ps5xTRc@rdFID_+XyYEN6#hvxZ%2ienVM;%Avi;GdQ3(czKQt;l90pv& z@9I2n>s{M6HQ4}rZCCAKA}lzlTXF562-;c$>ImDkzXbM-sEcQ_Ka(D z=X7}uKcM#byP)|;*_k#^j-PLem6^nxo%381KsAY#AWq06qm{ykk9FGJ*f^d+6=Mlo z+}1#M+aw%z$#WfR3(g;;!J2m&EaOelkEVuAH*;p69&Le z7bz^by`@=8WD$-_okB7^6uQ(OFI&b*DZ;cq1W;|BV2v~w+ntl}1K}N={VV;*CBXY* zPyv3sNnSVPY*w8B=A85tFe2d3z|GD;()6dOdFG#HC)>kpPa z7MQ+HO7>Y!OjE;UW*s}QnJR2zp_DP_g79_rK)oF{h!gY@&rDRmI4W`YfbFZ7H|P~G zcn2^+N9W#be!gmz2DJYO=t!~o{``FR14f?9*Ma-0@IJ$1?)6f~s5jmzYsiXfhe2lN zo2my<^2#JQU~oVi4kG?i@Hf9Dip=c#>eY&0^$phkuLZOIIj;1_QDGG_yV_i*bJL2^ zy7o{)S(cJPrP%dD^4PXr#_v;1l7sG*TW1n>=FVIk!4UFI)B4KE3@R4f9PdpE8u;|J z@hqty-F;V18_U32cv?5tmyQ(<2Lfvi_b#JEAu z)EBXkHUd*oX5-g2m*-r_2?k)h&20*=Eao}%G1 zh^q%tuAuCKW=UV46JA7jXIA6q$^&Py5na9Qf=?-$*FGBKDOO*5WDkH>A!qNawVaVO zRp@;dGl~JHloSG5(023oHM4L1w*fq1b;ZF{j&d9T%Q@aO!b!D>=4eZR%#5RGw3Ji& zA%OT!A{D_H8h>8Z(U*>!4#5;7g_)=>xpD$JS;eU~wM};KR>8bZBRaHGbbFM}F?AM4 z+hr4W126k5%w-3`*kF6>Tq@KkF|O3=tj-NXYza|o8vkC>9~C-gEuD>;0KW(q)e!Ol zpP(aWe@cDo)fBhjd@c}9AmX!$XeESbk(f1Z%Xn4&wTvr$uN!XwwC(bxw#U7N%v@+- z(1!4)HbPtdn0~UN0Xxw>izg82L|VT~h3zEzj^0t54WwFB&mlM2t=pYF3uCBu7dsTlH)XW4Ikeb&~sF ze`|BPZITasRy!-@tn8lVp5sTfaB%hi9uB$rnpjw(ezYF>i1Hsb{;uL}PsMz1)|w%E zAP~a$%?5=OF%QMwu?D)!RLQb4qwEB;{BRSr+S=whpd(*oIHtZpA>n{4TzjulI(3s? zufY|pMkx>RfEn5`)Za);?m2+7_7IGQ>9)(*4*+M}yIw}`mWs7WwL+v~2Ho*6tpFwO4m%*<0C zKUSuJi|Etl6};ToW7GZPZ>{ocRF(?T0Hg8GBBjnP;k%_$KqpLdo8vi|Co-M3n+zH) zPIMOKG`gb}Xvfg+stvy6%A&X@;GbqaaH-3dgi#j=y~3R!f=qYXiGj03z1Z5-W;d8L zEPl{H%l8^Nb1tLZDzL<7o#ap11qElE#dFCPC{SLd9;muPUw=@Z7@R-49P`|-Zo0J# zVR-q|X$d)}BJ$p?&LdS2sh!%Eh0YH3se=Q<%(Y6{;ep?p>>K(9FMJ;aW*loy4hg5q zhq7mQ+w1IJ#O+Y+ZT9K7mOlvM-PsBE9bUa6$6v`}iU1q*mxu?*Wpx@8{DN18iZCWz zm^&tK7}-zyxe8)*nIe4i+||vB>pgVB$aCfV0Ok%!_Df*mgpR-8gB;~jK~~#=j{x!LrOM7_nvn8f4DgA+TiQJRNs|4` zq=g!(&4&KWClhWSCy52~u|S0mE9IrnYj`&Mwbcd#-{S4q?UM7cn6Vz5d=OuXAj;@f z!gEHg@pYkbyj?JUZ4I%dMYAH?cdr(!L~Px{xhU;b~^__rv4 zYk$Rs^?$-T{X?O){X>k^ruXOlk^?gx^LyUQssen;q5K*4K5^b-|k`pq;|sV?(xsNspZ&yHF>u`L2#S`@o(akfqj7g=4S=PyyNg963M z_P&FBtCt?nNSLq^tyc}pvHp%_>mUJO4GRZf3M8e}i*yqXP0sKrys#K?=-1CRcsIAm zfiFvgLn!wkg5a^Fyy5fvN{@`V^Invh9t*pZX@>f+^mnPY$caY>T||SSx{;qNT&oZ% zHj71uh_Nx!oRAMzIpWdvz?st)6qUQ=yiZosH_@-=l+5rk=+l4r_ z&pOzCf_b-bq96~c${&0LK;`tYwe0UnK?lxX>{cRCp}R5zeqpO&9+_JyW$);(KsJ3T zK^M=tCo}g@Hvt-iLa1OH%028tL7AXL^ULEQwex0ibsPDWi3OGi?b~97J?4%!{pI&d zb-#~zp3t-yy=pa^7e17`$0Tl2Z>ZMB)jjIs1J&~JvJAbjOQI-dM0kz+5Y|f0%GN{l z8t7zL2FQs%&r%+?Fg@;7-Dfb!h8CLg#;zW>hhCxEP;Of+JxJ|b!j zXC$O~^r4o=Agp&nUl;}-^}l_Z-8r;S z-(euymx>_o0R7g*N0l380zw{J+ zEn9vnw|6SzBccS5_1ypI)yR+H8A$j`bbRh!7ynZjWI>($CqvaoU8?~X?-l8BlfX=5 zUBxu*LSV9{s=HBm$*DV?Cv|QZJq8lT(l5#YYLhUsUe$<)#&!Ajc6u%ixM1J0rPj`mw)?v_Kuop3eyrl_mbT|5NkjXL}HvkRaJN5^A+b7kKl86T)6v<_%qRdh=q;H^>#vQ z{fVEMtlpK|=A-Ndd;6+OO{30kl_uwhuj#DKuClk0+b>f+!GTcS?t3EI35XhRH$(Oa{^fgMrka`OSajMD$6jM{T?$F)Lp<)uaot2h^Pa`Qox;?3bJ~!)tR1CmsRhQ zV)gV{YsUlkVrqedhJNJeMGDR*p^vce%Zm1@E9LiR)d#|dV{hc&H+eZ*caK+k;(g)^ zlizOP*+eZJ0~eK(6{CPOKMe?QCceu;#%Eq%CZm;%h~;%oX*$ozEL0l5s#iRueU9HV z{qp=JL=8ke+(trn5K|cWFQE&!d5N_!EOQS~jM;b1os;zR8t^aIusgms@W6eVSZKCg zwWlm}wAOAq5a$`MAf)egHs{Vfi&&19P*?L!u_m`1W zQ9cf2#ckxMOO5Zah$;YNBe>M7Sf;WiHS0;(JpenM(i`&$Sm~6GZ0)+CZ~=2AMiK}N z?i`H5m40ZG^cT?+P|uXJE|e9D_~)F{QX%}8uLP+}W6q@*PIjqPr=&lDK}T>tmn0W3 zwb0vv>tc+01sc5JZVN!Qsf!<6*=_bV6J4*IAVXM3YE2*~Gt>~kz3%VSvA(%pqPy$$ zWqJkI4PG+>5e@SQNtC=A`d~3|HqB%<1uF7jE*GD!+rF*n(nsT<3wj1)z|+6%?k3^@ zjmr)9qyHb^3+l(AGudh8%&r_Kon+Z>%;Ai%5g&L6!y7_%TIHYl8l;!Kqk+@_ij$HL z$-j!FcXZzR4FOR8n$NKq+uStC$FPMC@Y5p1c z3VaKHpSk{5*ueiZ?e*`TWEb*pS$N_XiZ}aX`qB!3ADCfRv?S3E4i0ky!a%GxYx#QG zUx6qD)Z!W~+cPLEN0?j_PI4jLq~M8D=M9@b-7RL5P&yjX4VJf!LtHr z&%yf%g@72wejn-Gp-_|&0ITd7EXGF_{a+vygBND@=p@s#Pu~0{a^XZ62Mu@>wWKCq z!AMNLuaYUrRInokG2jYET~jfY6nPxTOb&7c80_5BU!~zrR&_G!$tReb(iV{WO z1zcBM=t6}y+M~QVs?k!;9e(bRFEOq;p4d;cA?Mo{_zFThCuSZr1-4I%@!X}Uj4bC? zKC86M7<{{-#3yQAjzo~v@EE*liIXYWkd<>!op4IMkM!m1Sx+Ytquu||o0k-xD#_F` z$)T*r=zb~*2=WJPmTKsvp|LS_d*=P%_!ah$vJ*?bp1Ls*pbf{R@fuO^p540aaN0DK16AFJqt)wLvCTyT-hdtNmg?WIg{J%J+v3 zD6@`lCgp`{?Fl}7y}H9F;i5cW?(S(bi-YugrUmyO=_=w>)4q`XN)GLtg-y+(EL|Yj zgD8Z}^T+6xrT+U1!Tl4^<|{lcw{p``*@hGJ6FsSUbDxVu!Z3a&_u;D5>wJYNtiCk8 z@JXX6wuy5Zy>85^3pk~H0aOx-SKxoR|QNiYzQ&IWmZ3@uz1lUAJ#F*gz`cRu#&pUs|MJ|KpTRGhNoBDKOTLQDlG$FRN z+QvE@vR?t0K^D3{@oH-%H;?6i7Twsd@Xn;biiJPMd}CS5)4oi#2(u<;ga!@u1n3Ye z`;E5uBe6uRnp2_j-u=@F>-T#P!4CF$DZe<5ig?jPfpjjm$Pl1f12_?NsE3c>l>o3h z(3)7FYwZw2v|JH#$qzvBMavU5&?3$y%2z~%#&{6BK zVT~BT^L2}&DyR5*%8zyNe%0l6Zb|(W-KP}GD{=i)_iNJJA8V3Zg~~0}go~`CBw{7? zl3UIPlDE9xk1y9<(oxRv4M;J^9|@%6Y?txJsMjtJ#NVzkn7mlUm%IOzWB>m8VegD5 zMi9iDCv02nfKJfqXr$t&=7)^M)|>b}gA&I?M;T(OlHFw@uHKqJjddQGr(|l_t0=aj z0npGxU6k)lpK}JSjDcI+6uUh8bYtrfCVA4aHEFlJMwf=kay?V{AJLB1JsQ>2bzdqr zK4D(&;C8_;pXlHgQH;m#`g!>ti}7V9xi8>a*0>S&#tC@b&UMzuGJ;W|)Izrenp}?p zy6392o4)p=?CW#z(PB|>{<6V5+dlO|as%p5p-JSW&Xc^@y0}Al+kTA(+T?im{mnKu zw9OzF0}}0a`{)(vcR${AFZ9pHUnXxP-ziM0Y=30A;0pOZu)TQ@eqsvc6=ObvZvcKz z^M(Tz;O_&PM?;KYdNwlNipLHoy*ccY5S%*XFYhx%Ox(D1u-vHgjqvxy zjb}B1TrnV;U5>o8%E11>ug_hcEzSI(b2<`R@N~bu1Wvoonp8DtI|t}F!g+yNis>dq z=}~1`3oxYm?Y8MgB^=v4CI=^0`?YhT1vFT|BtG|Hh$m6y< ze&%G3`m&#Bz&-geXY<@CT_h)sJfZ1q=>_0dvd3WQDvJTspy~c$^F?a+;FJMHd)EYGmm>W)geD$*p51|aB0AQRK+of^(zaUnD$RWSY$|W<@QK^7IbWgHVV!|32MyP zGvPZX@OT8sD$4r!>}eXe?nElDrlx)+Zv|PbsP8RtGKtwfoJbcw-kt#ssZ{w=UYmDX zmCvnv$ogxuOH2bs(|;t^J4~eWNXp5As6;yRNyov&@c$(q3nbPj0?MSsMUYXThs9jb z+z-L)=dKMT+iSw7F0h`C_<3o(5hP^^f0dnNaN>pxM!=R-d&82yK$!xeFw|C>{mpgZ z%!5M%gP#lF-XEh^fx%$tiq!6;neqC$Edwu1mD3|1b{GWxQhF}uk9h2d@YNOngU9|4 z9{b-6j}^~A4sesW?e)K9S?c^jj|d>h=btKB9&osKw;E+~b9uonXL2H0={U;{329K% z$tN9yG9C$(KB{+5QglhFm)xV8FeOemwY3M-Z*8)|F+(uV;kF6r7xRp5Dn>Rj504%2;W?hjq}1?dv?jH9NEEQEDj!Y-IUj5T*qk>tZ#Zg8Qa zJRocBOg=jr%%=?%2}!qs47010GF#JyA3%|dGVn^YTH#lKG7}i>>x$XH|EIpeu?kSO zVfhZdyd}*3O>ZrqU};|f5bZ>OD7jD7E%(Vrst(}UdRz86bLMOQScBXkV{MRae(`3K z;dsrCF-}n{D>EaWWZgh!csIZD5#tN^Lo>|>Oi!`Fx!y_*&lbqMK> z$7{49gV}X8!uuV%jDYq!hMIF4WsJ4B9zVOpxEK>!y64AkBh&~(m$#hYI}P%H{zkO` zFrans8nE;e6-ce*(+TKE8#9Et<4&3vGi-7r86B>kmzeUQ9q!(rm2)E02T|^aB{wGY z|LfAUc*AY;@CKDa$AjDa#zS(o5mt4IK<8AIl3Kx3s@h?HSV73Vws5@@w{g)uD6)a+ ze)V4@Y>|gthAy3|aTu!O54tD0ivt&r;7?;{G;bQis|L?N@7jTOxPaScfr-lNsFq`a zSRig7^LNs)61NXJ!973YZEHG`=XjA2$bZnm$W})8wCqa@ll{|6!LAieAMumi-G-{e z=fQ?lIYs;HOAd&v6U`oKz|0!d94mao=)}XM;bg>v-*LTjYT2wyzax9U_BJbIuS~a# zbO}?wpSgwSmvc*D28cjSGgb_lpK6T>W4#M0&GVaGse;ZuN-gXtT{RbYzQjBM0L z4S!>W|5AO`x?OAkva0Ie% zIl*6;G7Zxj;P(;qvpo(kK9y87oB}@_z-AqRpsw_(0^CRyD?O_c0^A|ry*h2Y9)5{* zS4J25Ingkb&qOO7(|ayJ7zyuWMqa^W3R%h^vDA3=dY=DemPc7%-%Yf(0q_Vkf<;Oy zbxOsOYhA=oBv>0BydHiMP;Y?6D$!#$2KgocS_|BrkfC}KqGRz%<%p*D;BjE-(P@*- zaS=_F&^NgQ&J>gVMpd%-sQmm90`mznI9gnlzK8`dXl z;JtnT^rz5Qd54l8J%;Bj|2jKDdJ&YRjYfUg%+O0qih+VygNIr?Ivz-l{N7G z^|jA^GSJ*ZTnO%fR;WZ?FV&IYbUzeWbE?bRz`1|jAXdIvEqS4#9JF< zz$FmdHphs7YXku6wDMZ2U92fub^#gT%}H7^b^;}>flfr^Q;GIDLm~5vAoU2tzP?ev zfbt$tcnnWeX{ijJpN44vBofeg*M}GW36;8Wl3G2RpHeN10cc@5VhLCS<@fUCs=Ez6 z9EREh2p?S@=?Iwp7(=Lq+gJ$FDhNnr=`M0{b>#>V2n>HwEAE#lU&g>-yL<7YePq^XB0=w5m_Zyc2sMIl{f8KIV><-rC| zuq$T<($o0l=xvDE2k%qH&6Ub8&Xjt}Y-(;JARl!wC5ET8#s;|Qg>$)a3O!<>2O!6& zlg%P?e|_q*cmXEnO5uy@TJw(ylRYaWTH~>6U6=A1r&l%-tbwv^^7q6HUflt^jRWG( zLQH7eHg$Kj`{oJR*2PXR{x&qo9m^BFITOm##K>K zkb%B%oi&-!@lhqG_k^E>*Xqtv0M&My`!kA*beF%}HXv^h`JkH#aGW#>Vp7a`&aMsy zC{I9LFlD~zuNQK9-$F%Su00$%Otp+A*v zVUdqqY=Fw$2Y}V#Uq{mBP~b?{pSc-_`DhV7L8Aeb*Cy@k&c_OMK6O2BA^Z)PwL?N> z+G2Q3o$Q2%h`H5ZnPv(PV!1mfWl3A>pRYj%#*Ye^WnZ;UiO?uX-T#lYS&38r=Y&?7 zd#3+0$uy$HOiMTw%6>CKfN3p0jcUhG8ZP6bMAX5oe6iUvn*|um$Jb)RsoW4=Z}uw? zq6N?a)fH#%Jbo!UHC1-SyK3A8(1iM@u>sv1m9z3fJJUXb?R@{JmwrQ9_^kCq zEe}4cXwRES+=9;~@>G&?YhKUpz=zIZW z@hg9utuh%Dv)JEl*!`{6|Aj~t06oi0verL|(q@YMmM>m*Ch{gTQRjj*wI{w9o#~}e zCc)w-{kYJzsNuf`dfl52&5%^k&C3$ake@iqt?hWj{+YKyayJmx^NNvdb=^tdh(*(4_ymNp8t z+G`hyvMBdt-U}kOqXT#8(_)~w6aH?)?LY2~{3Sa9-sO2W_K@`J>oJEAkJ>VPGZa8> zg+&|da%|gfy?T$TEkc~n+ao)ip@?0yML;cL6vj8gLBr<9|D0UZ&j#wp_7Qpxn$@i5*PpRt@PY(w{$AYl^l z=Zzwou#hd~zrKm944<<9*$-jCrWVqUq9sU?hZm)O@Wc3ye0qaj=HvqO z^7ay9u6vrl1M9J*tlWE_j=8vMquI|vZjd;Z5&JCvS~$Oc&ZdA-Rzrt{uKb1$fWbYI zR(&M*1#7J5+Y~B-6h!c;gEHlnuIc!Fxf@o0Y3^#xqlO9jFova`_fnOpXftCFzOwET zVd&P#0@mR`j-0GIm7G5+bCh^zaj>u0nz0wX3s_UpH;~8>_&-kQY>fpO?NYQ0>XRzG z2*_B4a0ZwbuQYA1#66A^?$2H?>mWBI){Fw%z%Le>HO$buT{C<26v)-qojP@X((gQ0 zHeSBI3c5hb8)Hma>fx;ufJh6?e5re8wSP@JC)aCr9remPB&c@zDsA!?p@|W`DP|#aqA6OJR0s)C(#I`aw;js8?xFAp?)$Vz({i`8xi^&P`7*Yeht#Y=&B?y%$aX;}wW68b_~ zVJ=D&wJ%5-p9S3*jhIq z`=@8_A0>)6TavXWwyHOv273ATE~r1BkE_e_KDD@6b6y#S3HQ9dzx|pykm1LBZRUw0HzXQT99-EVaJ;2qOXiVv_<$E`hyCqBs~TpQ681smyvH*UO@rmcGWrb6 zc?{ksUG78pSY^O6Wl^Iuoh&h>jBR&^>LTl_>jkk82^UCE*XVRel;Z^YFrrtWu+Xa$ zcdvC2i{I&ntR0bL5-mU5XgL^yv0E#{@7AGmfGYXytrp<}4ZRuFyVM^#j*6K>Jq%GXh5 zEFzZOS@J56)3!v#c^Kg=^&Y}sLw-^x`n9XQ)KiZXwZa<@4Z~qoZl9JTe`gE;HaAVG zPjN<)OFU|oIGnNO+ruW*K?g3~2pA`NaDOXd`96wO+)ow^ydR>h4!=%NO|<5;=)tDg z)1L$58pnR@s9LGrKzX^nxR+*#m$rzSB%Pl;$wKZ7k_`W$^0FS;IcRA}gy`Em#O2)k zLg+Cw;sOc4j#Wf#eCEpPiT5-O=`JyHdf=l;(@Hr%JND$a1r%(bziG6;N{Nnp_Zo;E zmZ&Y?R*^?0l`2gx?$D{V^9IUvd|%x06e$()EeRdF|4vCod$*JVMY+5ibuX722s>FK7G{FmmV-Jysr6~}sY}27FaL2s6#xJL diff --git a/website/docs/assets/settings/settings_system_version_downgrade.png b/website/docs/assets/settings/settings_system_version_downgrade.png new file mode 100644 index 0000000000000000000000000000000000000000..277f2c6bc89a2f565c265e311a233b55d12fba01 GIT binary patch literal 7777 zcmdUUXIN8Bw{{ej2SFj!AVM%Sk=~^%5CLflN^d42AWfu~Br3h;ks>M}(p3luh7JKV zfb>p)&?NNGJE5KE^PcmbbDbaO=XZTS_MTa@XU(2j)dq1Lnixw>jQ6xD5c5py`huou|@t&vY!j0D#Ls zem`egJU%@J0JwK`wQfJ~w_Qm;PPBz4x6G$t#=*xd*5m6C`&*)fOIa^nVa(DpQ7;QW zo_CEJ$?{bgIMI-NC|X@3S1bJPI`G`J`wYM{_g}H-Nb7>Gxx*l#Nlc*Fi{$pAqUoij z8%rZ<$IsUmaq|=N0doO!xf#JmgZa{XzI%(S{p0h>dtHg|UYZ!HN1V*g5f^uBD*9{K z#dGU#x*=OXdz95G&L({uojXoBcx5c{8yYbX2c#Qg|h$Z^aJVJ0zbX%q)Lr95TWs9O- zAASY1Ob@K6{ld*n+mbvxXX_e*_J<7iwgo2zDR7@wmx0>;uDq84`56Xa?h&oj)k-vb zZBWz=V~^4_U61z!MRIZd>!IMO8)f>WC6u)X(aB-FLCvFjMP@r?Ud*`t@J#$xp0e8H z{n~sQTTPyb(6zAVwls#a?H34IE2A%o&(0>EJl9IquJK&l;aq&8n#bi=eqA8?z?UO;&VE0Y)ISG%Qh z#nOkaQ+CGsg{wulBSpeWm-|tpgEv<~9SpmCTCT-LR#f zdt6X;AH@Y7(|qJFT`tbsnn2vnb*=tHR1Q`>!pU*Zo_v=6I-(L^Wb4J*XfFw3c#~LS z&$!OVi8>QPrOgeSjdGZzKnxc`9Gs6R-Y-zME&%_0aG(2t=Y>%jF0}XT)9}+&OqN{& zFTeQBA^S|dmLuAup5%-O(fW5r3%^RrG5wub5~lwF}@#XwouMj z-1kH1BnG)nKHU=gJ%bCWJYEp_VgxW~`%pd4WmiShOzqhVQxBf}HqruZeoB(yDjrMhA@zCXy6x&=zRquvq zLQiKnn(GX4|K=r+QRt6Uzy6-_Ag6VlD)y+oX0fg&<72&AX`=1HN^!8uZ%O$)ojhLk zTa5Dv=DWneV9qXXt)qT=(2ZCeljfQH^&{f=;b3JfdAm)Cy+opRA3-6~W@Ker#l0AR z@}c>TUtdIpVbA=B4>{;KRXt6s-QKBP1oo&Gfqi^>)8^SW-RN!%3(hAb4kN}MX(6e! zNT2tk*$K1YVsC%Oz{RiMAu${KF$KQ%6WsDO&z*?865L?__51D$QCnf`SqNK^HctS4 z4im?vKi~M|oN=YG~!AMhGJ^{SDfnCn0~g|NDH)VjjV=));$MFmZ%GPW-LmwV1^DnT4}g z%q5=&hLw5LG4V!%806WM50}4TK6O6q$3Im%7aLbrzy~C5ey5{Mr3g8#Q+4A%Pm=ZC zbEnLwcMq4^o$UHlZ@&H^yZ=+c>7a-PJ2Pd1UC%xC-jaB))j>BjDuv?gKq}M^fWzbZMd{=7PF{ zp$l%EA9J`_u~15Txqm(7a_>~UN8e`MWR5^Y{UfOBVKHvrO2kyeRMh=io}v94PoeK6}=st<>PX%8xS0r=ujNjJ`bSaC-w}oLnqw#d0gjX-yIQNW9BT8vPT! z+)XfnUo)WeQxb<*(G*WH5I38Hfm&=CkOQ>*xKzxle0Zzjbk6>EZJu7kFx4h%&_;cS ze#`l6d#blZ%2)ClSAv|I=3AT>zOi68k$f;lRLq5z51F%vj8;Ia&CPSi`+M~p7&ekt zF0L&xDuX3O_woHcBj!iX?D35P8k2&L?GD?dfj1|g7~>3Q0wHiTX5|UP_bbOk==$|U zte+sUngUI3+h2lRGuSsdOq@fsRes)xYjJ?5#~0)>;tHea8GM;XxF*)a6%W5jcFyHQ zjt3k;s~@|9#ZLRgs~0jos~27$V~>zqYc_{Yvozg(>t^x2Q(f6%k%%2f^=&bi4nZri z%+ko#$3o2SOEk{81||L_P|oYdS8M|jCH_3A(&z8{+kWNPu!R@X>`_D7+nJsbTe|L9 z$kx{8^L4;3(OmXG|AdL7e+b>af ztH2N4U#jibUs}v5MO3`42OiVYQ&`LTCBdEhtC%NyZzV8(Nd1r*u~kM0E)X=aEh~ZQ zDjHa4#U<3H%klrbYvGzW=ln9!Z=W#opqVLksbXbAnN(i;T4XwBfAKE9w&_^cWm6b8 z=V@SpA4O9|BMqVlp~jF!#79spHmL5a%p^triuqP!*)CzE0$yQ+Of_Ad)La{f57%V8 zSw%$`DN|IAmn!PBPmjmf774|tFHd?FJr+OJPE2bFWSmVPUvJFy3CV3Ww9R*j8o}2L zsfgvSD06GV!Y1>oE);&M`OX@&*0^I2#|}5&7-f0E&Erk17?#?p1NN_m+0pF0lsB2 zJ}$MQ{Mc$)0l9WvOnLLBjlZ{qSfKlN`ww`W2{QBqeFt0Qzb$ZuGU|ohZn!pjIBFEE zt02D>RS?X^!O;3m=t-C^3zQ1B7DjY$f(N=c$aX@HUTClXyo8$WDJ6u2 z8yI)q^^)w2bnV}o9j;3M2qzW|wu@BW z^y5K)E~gRuCFdG+tYkkZIO=oce8nO+$pMdz86G71!}Xp>JIV0EuIYBP9Wf-I96rR( z{~bB<1Kd(8wVGadKi54ai|J^`9KT!@AMDEqTTN@;a!15}f!pVKW^}OEhSxDkk%UJ5 zm`Zl1;@Ho3@q9VicXw+NGcsdLHF_rF8+r4?p?q)jv@Mutx+*d%2nesqH_(V1%ETl1 zICi6qcAt*_-GLaN@>9am7w2+IZTbX?!_vz!t3E-3jL$^#W$Y0vL7?ZGxSOi3lQl^NfQ<(siKI)KZZ z5)~UoI}u}i3f==Ld>tcQG9yQ`s;m8YMgF^b30et4m*p9j%hUcQ&hC6+LURG}g z6e^M9$KG!3fu8!~)q^c^kWsJCli|MSao~qUH^)K?zahf*@-)Lxb0jCm7e z8Wu1ZM;s_t)x_1_)Xo&$-_F@GJU;M(wI|eB4y33>pQz+vaQXcp*{1fE>}!NB_HRf0 zZ!YxS8_^LohrP@)AeR1Kf4ua%y9^tlclf(cr$(-ifbZo;wsLU&IrHZ0Jc$J&FxCV4 z^_!t)MB;PYArg=|%pw!1t~IT|5T%#liFiiriQ^6xSZB zy4EL`xmrrty=nf&hP)h@K)-L!F)*;>lG0q@I|i-NSO+$z*yi(-iv~-`tj*U;aGJ`% zmaGoz2;Zl!$!akmY}Ob@uZ03WNMc-O%(3+Hb1*hmmDD0e`HILZiJe5i1ri zomT(2f8+I5UvT7e2a~qW2L?+~>H@;a2w|TXsftL11G3BPDp*_P&SJ%=Hg=;7Ga^j&#=Yo`>s}jb(f)VT5Zt*o!Vkm8#+NP*e6)1Vs&9n0Nc zM@p#Y6PEgbR+)~w_LBpV3FqOj?}XUgqrHTX;o75 zy>dGEMUo$(^RPZ$CTX<_Yln$rE*O2T9{%_k>zTiy(?T$;h1743bwFe!U?;Y~!kJr@ zMoDt6IoF?IXt;K|BXZ4IbH{iYfdp=;+0pRppZL$a^}i3Q_!&rPeuLD=@Kxz;)|)Hr z`1}g(slv!H4^*6N3*V;A;pMKt6XKeezLLd`WHH*TsJwME8*-()O8F8lhS4>2OlC3g zX^FvZVRozy$gH>CSl!ajLLn}IG#ZZJn`5I~^rYVck|6{*K`hl81y#>Pm2lKDa&|iz zU_5fuow&OBIQ_18Y33F;-*h6|Oh_CH!u5u79acY2EfF%Vi4%6M-_{GITd^|sfAES> zF>_W#!R}FX=5kjw{fd=v6o283uO|s_yUzBD!vgxd?zlrGg1w`vN0u&w7F-g9Iw2)x zOi`bW*%py!9i7AhK$q{xmi`sNbT^>av036@m#1{2HV0(5AuQ?w-6wwJewwZxYXC%0 zm*>a3)h5SXtwMh}UOOo36yQVB^&sm>@c8{`bM-Z;w9G2N|CKRxypJ3_p2tu0P*`4$ zofe)D}P8J&^bdl4z@aNOt}Wv;VL=W}XN zo{jdV?G;{hkIU1>FSoQL9_(u2qs@Q8i2!U2TS<(>F%$K0!c;_`gQ+AR5|^6M*HfSA~33cE1u4OCbCU(4n9+_3r_P7;JBClI< z1Y4NIW=rwcV=|5BNX7%Y%s$*j1{DzYw!7+&M3zS-cZ)m2cbJY_L2nPL>TR!5 zSV=Lkhvi1_^rAC++;gHCH{maL;lR+U<-0Hdz^OTUxVh8HdFOrmz(t$JsE>lcYRwG6 z6n={w-H+>onUMlGO^8V%oF4d%#6-H`iFl)XgI!QpsUjK${PwAc+X_LP{t!WACzZH4 zbR=v%gc+-!B=9otv{tc(GmqypF`*>*PpYLtH*Yj*gX{v+6QbX~imU3IXqi}zvE45b)*Vqw3C0onY&}zm820quco)>O8`% z@*^U;cbqxs{^tK|6>r3uveHc$Y<{v<7GENW>s5s^6nL`u_tFmx$qe6Q^+_JK=)>DZ zvmVBuYe#>pG>cWjYmpsiI~g?|gDs@5l1{Q-Na@?A?{3u_mdSI#Ui7Ov3I&e%LI?1nh|#r2MkI0fTvj zm95^xKUP*m-&5}?MFDQ{$xs?E2lXJNKMDSneV0#L1+8Lx{daASjuWD378reRSSyiV zEMg0eAY6=%-)Sw0#AiRU*5^VDG*vDb4aBYbT!V=NU3@f+#apf}J@vxEImqvA^B)`< z9L7L9R)AjV_?r7#p&%#$qou$9Tz$TeAcie4R`&qhief9t6%o(Ne3ush)K-J<}R| zoB@6`uEb4*1uorP4((`B!vCPxeeIpd??LFbR#sPglj+XHCY)1leU#Z)Z4T0yoCWnG zm?(P1CK}cnLBM>;448zVT0(&7ko17wUTKvX7ZHo5hNrP(>9EDYJWh8{M_|gc#e~DJ@{c*_Bv;?I1E%APzyFz;*bi7O_`)k zk1{V+Lw3^`&;Y>Iu*eM#%kuyV?bg7Rs4R$OT@GXc@WPe`NFxLs z%-7H>h6)Buy9HjM1$fhHF{EiZ6>H@GdYH~svLvDjxFzv2@(cj*lRXjy0K9Ph|BVoV z(GOwr{f;_m2Yz1yG*~x94+j;28xw!8X89|me7mcgr2Bif=W7mB-lGA3|F19pB*%qq zuZbd4Dj#6(hADpE5$2!%7XZMkP#F1(ZWhfS9_hGdZoZ?#Y;siKiBxs|WKRio5irQH=-NMU0GnS?I#NCa?{SbO<4i zfF|6xniQlCv@InH;1I7fXN$6)+EAe56v9Ed#7z_Ve$v~v7=}^zwYR`OjHKU7&Rpo$ zOa=d#Y;>Um&d?R-mv{M4cdiTY=RslSEgD<0d7bD+5&X&Yck&X!lIAyCHl0S1(-s`7 z_1ESdt9hkA|9TIaiI!KtqxXu}1M5@tPq(Psf4cGik8ao}p{we+kf8#HZxMeOKDPM; z8TRb|AbfK0y5yElei3=5r^N~Ob%Da|y}VC{7BoG+21~=X2T0R8dp{^9su6k?GP%9e zEr-<1RD%C5pDajGtOQH#s`B0CjsuSJbdEe*;Hmze^m{Kpk{R`K_Dhgct?4Il&^qZ$qoCcAf7*^Wi`*FK1?9p|p)e%K<+S{8n>FdxzZN^|o1+OGno>Vp~@yrvejoxu89EUJp6D|hs3hiGn?f; zB7Y|poGoxW4+4|dO=;~HoPpfe13t4Wn1-DfF=!z4utiC-A76Nx_^M~7zTr@Dh8g8$ z6S9(Tv5*FcDiPW+S9v-I#O)Rf#31V$l1V*nCX+j}rajESBqy>p)uCUg-J!}v8S;e8 zZ)8y}TIJaElm7LQrErRor6`h=svZ06JSy7b3*Yl^RX1T6$n^7#KQ_R zcrE3!2*&tV`%q>lkFdA*lFWLzi zTGB0aLe(v*JAYG)eCI0CGuPsz`*&1gxn9n!&z6Z!C~IW&b=^i!?_+*bK2Xc<@~7>G z30=i^vm=Eepux4BM05IoTm8#i17Hj)kFD*BR>vE{v3|$+w_@OdJsnH{l z7@c%7{@syvc>9i-&!(fbzDcQ3Q%{toGQ>1>z%5m)aCtaxQcDLCoUY_qcu`kLLdsA* zJPSz=)Pq~S)3ls8``@GmjbO=wvPcWVE{OOGKCm5Vq@ QY7;;gYN%DDVgKqs0NafSyZ`_I literal 0 HcmV?d00001 diff --git a/website/docs/assets/settings/settings_system_version_update.png b/website/docs/assets/settings/settings_system_version_update.png new file mode 100644 index 0000000000000000000000000000000000000000..50adbab1736700d433ff8627900152627cc5cd4f GIT binary patch literal 8435 zcmch6XIN9sw{Pf05h4OAJt#;AL3$?$NK=p^T@Xp6BfUdVLJbHCf>f!}rH3xkdr_JY zr1wA|ltAD_-~YMi{c=Ct=RWs-*n4NqteI72*80t?L_XG4r=h-04FCXWG&R8b002<} z;dcooIpK}`nh;O;5WUt{R{@j_v1||sQhQ|`WdNWOapn9u8G)v9)p+(A0Jzri=OOBL zDSQC{aKJUe%1?bPchaL`m`=jBQH_T=l=lHpG` zbh2|qnykt1QGmg;u2R3QX@r@RK6QSSXZzsuQg=y_XtDi~sY7{9X3fv4(yzy@p8xH7x%9K-+5hT-!P~4wZkE)(XH2NO#0ZrtTu|d7C@9kZm=|e#au*pm z%hoq-;e@YL_5gJ_tBAKUB_@cpww-$v-9z7c!8*t;qdh5)OfEA=#=gAZ z_^sXuS=Do}#{ZzZQ3j6Xw-zx$&KbFq;rn9Xv>89M2;msXiRI0@eIuaI7J}AgEZk z`BA1>qj9u4nw84?F0uRY%hbvj@h=7D9yRwz6t4Z|Og)-1LV9d5;-=bl4*O(-wY543 zJ~^J5b!pWpHY&~WM>iW+xh*~PTTAyIn|S#VgT_u;=^<05Ie9J~#N7r{zF^^tzb&e<;Vz9Kqz zQ|tCQu!rb;C`pI7-LP)z$)W@jU&@H6t`6Db*yCp1MCZnzD6b8w;yqk0vvYqqSKKG6 zN`vu%Nq@@MdhM7B^65O#6p-QM=p!sBtb2H*fQ>mngPr#A@YlhK#sK~~DI~o zApVVKagi*YCl98J*(KLL6>2UnbN8<-e79A;0~<b^Gm z0Kt}Z%i+jixXT_ob7T+g3$=m(pAkmn<&pI>T)8VGA(E@S7}Liuw>#!JrEXALRy~lt zE&9xe`>M>537xoSaxPkb^Q29%T<6F?5_v|;irJJFdoim$5?74;_BzrKGaLge<+|>Z zX3R4C9W%q{rm$^QdSApOVrRKG(Ql_HbI#gG?mvoR=nFQ!#L7Z(9cB<*U+F}ZpRCU1 zw8Un!OUlc#N8lH8oSV0#w8y>JMnLyh@}A@7tS&!&VALif*?)4Kb{%S6ed+(0ri9>WGMFfQ-Mnf*v7bKfa8M$}ZaG}TT#zj7HS z3LiXq`qHtgHfm@MTdsTSX34?P0*qGTsEV%l_FYiF?x$P??RFN_fqVl~GrNw#%0g$p zEU)Q#j|`}1e!hiZ|D2(t{t52rb07)rWf><*w0JH(pW>R@F4q@*qRYkz2k(@=xNi% z4_*>jY*3=Yg$1$3<$rU-a2x5u~L3m;C=ZpxELqsiFSkEXSi|l-qm%R zELo(yMxo^k)>G(Eaf+Xq`5_E3?A<|F^oJqwDFk`mviwtX)`->^qO zD>rCMnjdbS(A`Aag1%)dHh5OKUqtjgOz|+~@S6$toHv>nQdO_9z7xJ_H|lG zAKtOD@$74}W|gSf_1d0FV0$p0gir%zk3G%sdIkkHtQo7Xe}$z%#}anLPuE>l+<7qW zN7QH({b>DBBq1eFJV`}*D>?-ivp!-XFC9ud)Bg61C-5fz;t#aQmJ$-=vSc@;?+S^> zsMaiWaUZgI36Ewz05q^)^JRMl^}80j0XO{xMPw*fE~$s zyx1JJac%vBWZZ~gIeT8qq*X5J&RTtX(1D)EarjgipmxzS=VTyd)a| z%L}i9NdW*QCk_ySctrvv0stN}!zlrPkQpT~p=p8~L<|6ANzefaL`^9C&k&mb=R;yN z%ko=T?IEtCKU8k73pwX7@C=Pa`RdP5ew@_n);%0&%Byx(Ki;{@YUZ2xiS2n;1I}kc zP}1wBA@H>OI5^3% z%Djk?aSghec6ZH5@iPa=D>38!#a-|Y!f@0v9zI~I0&7oUit~Rj9fgS#xi^cO5disF z`FqBJ!2=4|Gx@%=LYkN_@PG>~D=hi;?;l$hGv~~$zw-ne(QB5|&Z>eEE*($jDrjEf zatab#NTcOxL|Cc3uD_D=7RcV%HK%|H_Gom?#a7Zx%*(wje~RkRsp5jN3=7>Y4@LtRWcw{qOIhrj9^cu^~{5{j)pT3lNtHytqjdMtyNt<^dE zGXYI7(;f>wZZdx7cBU62T!+7@l1F}~`&Vgd)UW(hNY`7bT$?~AV}rt+aX9bwkxB#H zFJqS$L&wiWg=AY!D#C4ro%8}c_1s$t=<|^0HKA!dC3a)`5bn20xjZ=F!#)9*v{sjDyChjB)n-V+uX(qwnHsxOiM-ynp+w;C9By z99e(1?n&KA@#aNj@N99ZPdfEix_Ll*D+PJtN=NdUVY>vIzh3l~&I5J=aL@c+>^wyO z%FmvgsDng$JFm@?RAWYI<{Mz@i1F{dV&A<|;nM?GU4TWM^p5vhvSDpc6 zTQj%nkO{Nhs_M@?nzUl#MzE5|+c#-1^9b9Yt+K0o`n+&*Uv3)ABbdbxjCBpUbB#@& zGh?l=F>#}+3^)q~1&H|*fw>$Vg$r?^*?xX>+7pYkVMZT>Cf?^q6REl6max+9z;=?K z3d`C{zR*A_;{mKmhTSTeQcX>gjAK((uf4`2;LK?V>0|Xw#Fp!ZZ%dW?Gyf*kJ9N~JBc#{ zi=}lYv0h;|6Xs}~(hd>^_%pW{`i-=Fku^!_3{@{gJ(q}0S*_vv(T-z1VZN^q?48k9 z^p*WK_1enxXMb-1ru#7jHvW~ypu7liY)AG35}kB5HXtI-Dh-xNHkSTC*df$T{5 zu6fhI*}!LNgcD1yX7bRDE$92AFMnPc5fZPW`s&fhd+s4@;`5KXbk#W?&*@Lv`8v#@ zWbGV$iARw&#r+LSu2%yd3W4$w{6^fTxjMr`8-S%a9Y0GHR}#A>bIap5p5&A*dYLN- zRSCtQR=lV-eyuFy!*DPVcplT z(*B@DHfc&k3I_=NB>aI;IU*q;vd7;()-x8qC(JyA-==v}PF7qUd@)8m%+nwC1|@VJ zfBBK58;N1Y006EM>(s#MDzr%YStu29H!}Hf?4mQrj8Y&mffAH6n7b6Hz4VP?}Y z%<|<|!bS?h>Woju-0 z3NGzlq2PbY_ zx%B(2YY?1svCDojp@k9&scRz~ix^@V6JXuCV-{K;M&Gcfm2JtR&m%{d3Xd~`D<_Y< zom;!LjB~#3rCt6`HxMv65iI|NQ%Wx=rp5Tm5KdSrT~R%?zwLACN`~+YZ$`H|5({vs zm&Tdtj&r{+^A_T3m?gFvX)Sf}Fo6v9x&1v4y3h0AjUN9!oaew$6WgueTjltehs^dA z*q5{G&9z^;_Dh0>A>r$Fq_TVDCgYG?ILo75r3wC~+Lq(MbrrmKy-=nvgVTKNY=7~E z{$7dnH?L9n;x1CIsZAX54uxzWQ8Xs3toM zUMN2z3Vec6{^lK+s=}qVJf7L8Jo%YT+6l$lDsA}23l3kbT#k!4u4H&cQePau-e>Cc z&`1&GXc`h0`=zf@ZsVbsRmOD*Ph{xI<;aITy*kul4YX`1X@QUL4D_D&^QL$E*g?X| zn&;G!J~>mM8meahwG0Go`&h)%ORfodKoUcx9TaGpMco}z$B?gSo0&Chvb=_cWuJob ziwhv1KD*tUHQwmY_!97;b;X2qY4!pJW0-D4#@Pm#JeT- z@tuqI4B4H^D}FmQ?ye1nrY0-4??(RI%CvwKR#y)T(YiFYi|d2^%0w=qAN<(031 zbE7gB%|)7UpjN&kvqRMNNrvR)VQ5&;i_>rkLq*dvwdrA_EtD+JfNj|&4GBn}XujLM zuv18&4>De*!zuW*Jf@;S)t$-z;$=Le^e~Sg#IP%!9@n`wfTH`>wQ^B&TNo<${r=l2 zL`9R#wbhK+=-otxOhpQPt2ybpx~!l#N0TvXNk+xx`gBMAbw^?*sz^Cmp2N-`OgO_F z)N>A_7L9gkHj0L(`uqoSisf2(p%ciNK%C}dA#;AUtxQCM2*TzgIq_fTNQZX{3W9@l zH727+1uI-hs5G4R)MofBqi5YMk9Lc(^51{SguOh=ho{_f)$H{g`?5wb--#~=eHFWMOR%W zmxp!q0$uS@eP)aMdw6~^OXX&+>k0LUe8G!nkx7!OXWiY#ryX&Dmn30L3O~;JW<`VS zR*G>&une8!)gHF0N(S7MA`~?66m&f|a%JUIv`_{IRa>MlS3efDm;0=+_4?wHx0fNV zCo_xoo7|Uu%;YPvQ25i3<4P88Ko@iSCvVS)Xd@TOvO-}}>8PRAB#OtJ22uuS zz}J3cwRU8!mT;6)E!@h`YnQSMEc!X=szAXsmvqRn%^s}b_)4&1BBBcY4T)Qm@VrNnOxLa?(0e|dSh@4<|OByTw zGvr?!1WZc(&-I$XzowU>f>1b`_{*;#u|qA|Ox=Va@a(nw-SuH7+|>y(+Rq5LbpS}8 zYQDQN6t7~kM^;Nbz*KD-oWTD*DD;X836V%`$X^#zGD89}mJQfeh7zil2GgXvS0cw7 z@$hw*eH@;(2)-N?eG}kn^v`mn+>t7;R*A=-Qi$&52TQb*8`I~mWbIPe@ZbA<78rjY zWP_=G0^ZL2(8FL1Xq*0Lhw)7TWbOka(YH;>536_gS|{y7&FkqaiOdp6oeogI4v9ot zw2P|U#we>QAGbN!oXtz}$({S}?hye}iNLwUO^B7AU6m-dRuj>R9^$}K z9(5t)Q|)qsLC)-XcsIy|L_xRQ3--+tt6sR&ru9svTMRk~XsN4NIh2&2|FXGBEq ztnz9#f?srg@f<4n5>6zpSo)ozfdj-x#}q|LFGAmB2TV8@U$X)Bh6~YP7}UN~sV_34 zBL*{plT2p-fRvdB7%Zegd>r-OVPmf{(XYyQ<+u9JMkKdhx+bE;|Lf7*poSV4tBh5{ z_Trx2;!H6+&&pVNPs%gE{GO)h9>{PP{7?W?vfHl25@@sUIPC&xEBxbza@#QT+lP+U zUZIn;UnS~Om0U<(rb-43EN$8#Jk9qQ3;94pb@6%bknscwycM<~> z_-D;Xe2ypp;Bf=4K~WLDR-Ck4KivEi{t@{I9KFp9m=OH8|iIbN~{Haks;$#}f007)gBTAGa`2MN%szx_lHQ z=N(}!JT2zC^OplTlN{!q_*mgl3;)FXk{c;X-r1{9Qc-e*TAz40xBRz0TN`&t&}ms4 zS4m}i?wixs0z^QfEKUvO#z>78{@f5!Bt10fAOsE=q00@K1K&Y0=!+1~MT)oj5=w&* zO8qux&^t6W@6FI%@PB3{<+YXs_Ikg$5u7*1tBg{9lC@BMC|_OOLiuj@v_ zFZ^SPYAJGjLM*U-1RM_m?_K13Xt)S6X3HbsivkmmIf=lPez-hdXL zgTK*}S?Wd)JVZ{fWZLe^jLF%}Cj1ro+lq+)#L$&x&nHBT$eh~i9h|hV4chPLed7Zm z?}s%WHFB)Kd`g#)r65%M1=V6*v-hJ|m@TcaMX5*i-v)}*7Z1xf0W3H`Dwcx~IK27M zwKDh|s|w|nijS20Sv-+&s%jhg8u0hr*@Z2tR1V@MZB}6KP_JjBoM?vvQ9ofxzsT2CW&5;*MID zF!tLkE|u`4R(j~D@pJ7ZTvWoUB*N`Veg+pe0Pv8BFdwzri<66Ov4lh24tih)TOjwM zm?>y6TKOIy%Vk3=I}0p}g(^&1hzoSu{-gSL-j?5lVSovfDw)TVfE38O+%GHT7e1`e zMsu+H+3i@JumuC)*!ilcvorfHAvlQj+lcf zS<|d`$D9Zgpt(S7Gaj*fe8v=$Ajz$H+SyxqP+3?`H97^hyRMRXlb}Xu_cMU_zk=}^ zUltSTe$~Hr{eOop3n3YVcyWF8Yh>*n~k|zaEv#1s_^@&FE7EnuJ+>cLD zON%=KfYkPeHJvQY-uEpxn$kirI;Zze%cQ)|NmB#*9YsY#ca;V+Q`HIaep3?55}-#K zlAje;jhrz?#O5jmN&LxfQKa8 Date: Sat, 22 Jan 2022 03:38:16 +0000 Subject: [PATCH 146/160] [Automated] Bump version --- CHANGELOG.md | 72 ++++++++++++++++++--------------------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 31 insertions(+), 45 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d1b390da5e..bffbc69931 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## [3.8.0-nightly.5](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.8.0-nightly.6](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.7.0...HEAD) @@ -11,52 +11,57 @@ **🆕 New features** -- Flame: OpenTimelineIO Export Modul [\#2398](https://github.com/pypeclub/OpenPype/pull/2398) +- Flame: collecting publishable instances [\#2519](https://github.com/pypeclub/OpenPype/pull/2519) +- Flame: create publishable clips [\#2495](https://github.com/pypeclub/OpenPype/pull/2495) **🚀 Enhancements** +- Webpublisher: Moved error at the beginning of the log [\#2559](https://github.com/pypeclub/OpenPype/pull/2559) +- Ftrack: Use ApplicationManager to get DJV path [\#2558](https://github.com/pypeclub/OpenPype/pull/2558) +- Webpublisher: Added endpoint to reprocess batch through UI [\#2555](https://github.com/pypeclub/OpenPype/pull/2555) - Settings: PathInput strip passed string [\#2550](https://github.com/pypeclub/OpenPype/pull/2550) +- Global: Exctract Review anatomy fill data with output name [\#2548](https://github.com/pypeclub/OpenPype/pull/2548) +- Flame: extracting segments with trans-coding [\#2547](https://github.com/pypeclub/OpenPype/pull/2547) +- Cosmetics: Clean up some cosmetics / typos [\#2542](https://github.com/pypeclub/OpenPype/pull/2542) - General: Validate if current process OpenPype version is requested version [\#2529](https://github.com/pypeclub/OpenPype/pull/2529) - General: Be able to use anatomy data in ffmpeg output arguments [\#2525](https://github.com/pypeclub/OpenPype/pull/2525) - Expose toggle publish plug-in settings for Maya Look Shading Engine Naming [\#2521](https://github.com/pypeclub/OpenPype/pull/2521) - Photoshop: Move implementation to OpenPype [\#2510](https://github.com/pypeclub/OpenPype/pull/2510) - TimersManager: Move module one hierarchy higher [\#2501](https://github.com/pypeclub/OpenPype/pull/2501) - Slack: notifications are sent with Openpype logo and bot name [\#2499](https://github.com/pypeclub/OpenPype/pull/2499) -- Ftrack: Event handlers settings [\#2496](https://github.com/pypeclub/OpenPype/pull/2496) -- Flame - create publishable clips [\#2495](https://github.com/pypeclub/OpenPype/pull/2495) -- Tools: Fix style and modality of errors in loader and creator [\#2489](https://github.com/pypeclub/OpenPype/pull/2489) - Project Manager: Remove project button cleanup [\#2482](https://github.com/pypeclub/OpenPype/pull/2482) -- Tools: Be able to change models of tasks and assets widgets [\#2475](https://github.com/pypeclub/OpenPype/pull/2475) -- Publish pype: Reduce publish process defering [\#2464](https://github.com/pypeclub/OpenPype/pull/2464) -- Maya: Improve speed of Collect History logic [\#2460](https://github.com/pypeclub/OpenPype/pull/2460) -- Maya: Validate Rig Controllers - fix Error: in script editor [\#2459](https://github.com/pypeclub/OpenPype/pull/2459) -- Maya: Optimize Validate Locked Normals speed for dense polymeshes [\#2457](https://github.com/pypeclub/OpenPype/pull/2457) -- Fix \#2453 Refactor missing \_get\_reference\_node method [\#2455](https://github.com/pypeclub/OpenPype/pull/2455) +- Maya: Refactor missing \_get\_reference\_node method [\#2455](https://github.com/pypeclub/OpenPype/pull/2455) - Houdini: Remove broken unique name counter [\#2450](https://github.com/pypeclub/OpenPype/pull/2450) - Maya: Improve lib.polyConstraint performance when Select tool is not the active tool context [\#2447](https://github.com/pypeclub/OpenPype/pull/2447) - General: Validate third party before build [\#2425](https://github.com/pypeclub/OpenPype/pull/2425) -- Maya : add option to not group reference in ReferenceLoader [\#2383](https://github.com/pypeclub/OpenPype/pull/2383) **🐛 Bug fixes** +- AfterEffects: Fix - removed obsolete import [\#2577](https://github.com/pypeclub/OpenPype/pull/2577) +- Ftrack: Delete action revision [\#2563](https://github.com/pypeclub/OpenPype/pull/2563) +- Webpublisher: ftrack shows incorrect user names [\#2560](https://github.com/pypeclub/OpenPype/pull/2560) +- General: Do not validate version if build does not support it [\#2557](https://github.com/pypeclub/OpenPype/pull/2557) +- Webpublisher: Fixed progress reporting [\#2553](https://github.com/pypeclub/OpenPype/pull/2553) +- Fix Maya AssProxyLoader version switch [\#2551](https://github.com/pypeclub/OpenPype/pull/2551) +- General: Fix install thread in igniter [\#2549](https://github.com/pypeclub/OpenPype/pull/2549) +- Houdini: vdbcache family preserve frame numbers on publish integration + enable validate version for Houdini [\#2535](https://github.com/pypeclub/OpenPype/pull/2535) +- Maya: Fix Load VDB to V-Ray [\#2533](https://github.com/pypeclub/OpenPype/pull/2533) +- Fix create zip tool - path argument [\#2522](https://github.com/pypeclub/OpenPype/pull/2522) +- Maya: Fix Extract Look with space in names [\#2518](https://github.com/pypeclub/OpenPype/pull/2518) - Fix published frame content for sequence starting with 0 [\#2513](https://github.com/pypeclub/OpenPype/pull/2513) -- Fix \#2497: reset empty string attributes correctly to "" instead of "None" [\#2506](https://github.com/pypeclub/OpenPype/pull/2506) -- General: Settings work if OpenPypeVersion is available [\#2494](https://github.com/pypeclub/OpenPype/pull/2494) -- General: PYTHONPATH may break OpenPype dependencies [\#2493](https://github.com/pypeclub/OpenPype/pull/2493) -- Workfiles tool: Files widget show files on first show [\#2488](https://github.com/pypeclub/OpenPype/pull/2488) -- General: Custom template paths filter fix [\#2483](https://github.com/pypeclub/OpenPype/pull/2483) -- Loader: Remove always on top flag in tray [\#2480](https://github.com/pypeclub/OpenPype/pull/2480) -- General: Anatomy does not return root envs as unicode [\#2465](https://github.com/pypeclub/OpenPype/pull/2465) +- Maya: reset empty string attributes correctly to "" instead of "None" [\#2506](https://github.com/pypeclub/OpenPype/pull/2506) +- Improve FusionPreLaunch hook errors [\#2505](https://github.com/pypeclub/OpenPype/pull/2505) - Maya: Validate Shape Zero do not keep fixed geometry vertices selected/active after repair [\#2456](https://github.com/pypeclub/OpenPype/pull/2456) **Merged pull requests:** -- General: Fix install thread in igniter [\#2549](https://github.com/pypeclub/OpenPype/pull/2549) - AfterEffects: Move implementation to OpenPype [\#2543](https://github.com/pypeclub/OpenPype/pull/2543) -- Fix create zip tool - path argument [\#2522](https://github.com/pypeclub/OpenPype/pull/2522) -- General: Modules import function output fix [\#2492](https://github.com/pypeclub/OpenPype/pull/2492) -- AE: fix hiding of alert window below Publish [\#2491](https://github.com/pypeclub/OpenPype/pull/2491) -- Maya: Validate NGONs re-use polyConstraint code from openpype.host.maya.api.lib [\#2458](https://github.com/pypeclub/OpenPype/pull/2458) +- Maya: Remove Maya Look Assigner check on startup [\#2540](https://github.com/pypeclub/OpenPype/pull/2540) +- build\(deps\): bump shelljs from 0.8.4 to 0.8.5 in /website [\#2538](https://github.com/pypeclub/OpenPype/pull/2538) +- build\(deps\): bump follow-redirects from 1.14.4 to 1.14.7 in /website [\#2534](https://github.com/pypeclub/OpenPype/pull/2534) +- Maya: ReferenceLoader fix not unique group name error for attach to root [\#2532](https://github.com/pypeclub/OpenPype/pull/2532) +- Maya: namespaced context go back to original namespace when started from inside a namespace [\#2531](https://github.com/pypeclub/OpenPype/pull/2531) +- Nuke: Merge avalon's implementation into OpenPype [\#2514](https://github.com/pypeclub/OpenPype/pull/2514) ## [3.7.0](https://github.com/pypeclub/OpenPype/tree/3.7.0) (2022-01-04) @@ -72,14 +77,6 @@ - Modules: JobQueue module moved one hierarchy level higher [\#2419](https://github.com/pypeclub/OpenPype/pull/2419) - TimersManager: Start timer post launch hook [\#2418](https://github.com/pypeclub/OpenPype/pull/2418) - General: Run applications as separate processes under linux [\#2408](https://github.com/pypeclub/OpenPype/pull/2408) -- Ftrack: Check existence of object type on recreation [\#2404](https://github.com/pypeclub/OpenPype/pull/2404) -- Enhancement: Global cleanup plugin that explicitly remove paths from context [\#2402](https://github.com/pypeclub/OpenPype/pull/2402) -- General: MongoDB ability to specify replica set groups [\#2401](https://github.com/pypeclub/OpenPype/pull/2401) -- Flame: moving `utility\_scripts` to api folder also with `scripts` [\#2385](https://github.com/pypeclub/OpenPype/pull/2385) -- Centos 7 dependency compatibility [\#2384](https://github.com/pypeclub/OpenPype/pull/2384) -- Enhancement: Settings: Use project settings values from another project [\#2382](https://github.com/pypeclub/OpenPype/pull/2382) -- Blender 3: Support auto install for new blender version [\#2377](https://github.com/pypeclub/OpenPype/pull/2377) -- Maya add render image path to settings [\#2375](https://github.com/pypeclub/OpenPype/pull/2375) **🐛 Bug fixes** @@ -89,21 +86,10 @@ - Settings UI: Breadcrumbs path does not create new entities [\#2416](https://github.com/pypeclub/OpenPype/pull/2416) - AfterEffects: Variant 2022 is in defaults but missing in schemas [\#2412](https://github.com/pypeclub/OpenPype/pull/2412) - Nuke: baking representations was not additive [\#2406](https://github.com/pypeclub/OpenPype/pull/2406) -- General: Fix access to environments from default settings [\#2403](https://github.com/pypeclub/OpenPype/pull/2403) -- Fix: Placeholder Input color set fix [\#2399](https://github.com/pypeclub/OpenPype/pull/2399) -- Settings: Fix state change of wrapper label [\#2396](https://github.com/pypeclub/OpenPype/pull/2396) -- Flame: fix ftrack publisher [\#2381](https://github.com/pypeclub/OpenPype/pull/2381) -- hiero: solve custom ocio path [\#2379](https://github.com/pypeclub/OpenPype/pull/2379) -- hiero: fix workio and flatten [\#2378](https://github.com/pypeclub/OpenPype/pull/2378) -- Nuke: fixing menu re-drawing during context change [\#2374](https://github.com/pypeclub/OpenPype/pull/2374) -- Webpublisher: Fix assignment of families of TVpaint instances [\#2373](https://github.com/pypeclub/OpenPype/pull/2373) **Merged pull requests:** - Forced cx\_freeze to include sqlite3 into build [\#2432](https://github.com/pypeclub/OpenPype/pull/2432) -- Maya: Replaced PATH usage with vendored oiio path for maketx utility [\#2405](https://github.com/pypeclub/OpenPype/pull/2405) -- \[Fix\]\[MAYA\] Handle message type attribute within CollectLook [\#2394](https://github.com/pypeclub/OpenPype/pull/2394) -- Add validator to check correct version of extension for PS and AE [\#2387](https://github.com/pypeclub/OpenPype/pull/2387) ## [3.6.4](https://github.com/pypeclub/OpenPype/tree/3.6.4) (2021-11-23) diff --git a/openpype/version.py b/openpype/version.py index 121bb01e8f..60e619d7c0 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.8.0-nightly.5" +__version__ = "3.8.0-nightly.6" diff --git a/pyproject.toml b/pyproject.toml index 04d48401ab..72152bd433 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.8.0-nightly.5" # OpenPype +version = "3.8.0-nightly.6" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 75819418e2b464284c9552845e1c2d3486e6afb6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 23 Jan 2022 14:00:10 +0100 Subject: [PATCH 147/160] Add more missing families in template --- .../schemas/template_publish_families.json | 50 ++++++++++++------- 1 file changed, 33 insertions(+), 17 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json b/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json index b5e33e2cf9..f39ad31fbb 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json @@ -10,23 +10,39 @@ "multiselection": "{multiselection}", "type": "enum", "enum_items": [ - {"action": "action"}, - {"animation": "animation"}, - {"audio": "audio"}, - {"camera": "camera"}, - {"editorial": "editorial"}, - {"layout": "layout"}, - {"look": "look"}, - {"mayaScene": "mayaScene"}, - {"model": "model"}, - {"pointcache": "pointcache"}, - {"reference": "reference"}, - {"render": "render"}, - {"review": "review"}, - {"rig": "rig"}, - {"setdress": "setdress"}, - {"workfile": "workfile"}, - {"xgen": "xgen"} + {"action": "action"}, + {"animation": "animation"}, + {"assembly": "assembly"}, + {"audio": "audio"}, + {"backgroundComp": "backgroundComp"}, + {"backgroundLayout": "backgroundLayout"}, + {"camera": "camera"}, + {"editorial": "editorial"}, + {"gizmo": "gizmo"}, + {"image": "image"}, + {"layout": "layout"}, + {"look": "look"}, + {"matchmove": "matchmove"}, + {"mayaScene": "mayaScene"}, + {"model": "model"}, + {"nukenodes": "nukenodes"}, + {"plate": "plate"}, + {"pointcache": "pointcache"}, + {"prerender": "prerender"}, + {"redshiftproxy": "redshiftproxy"}, + {"reference": "reference"}, + {"render": "render"}, + {"review": "review"}, + {"rig": "rig"}, + {"setdress": "setdress"}, + {"take": "take"}, + {"usdShade": "usdShade"}, + {"vdbcache": "vdbcache"}, + {"vrayproxy": "vrayproxy"}, + {"workfile": "workfile"}, + {"xgen": "xgen"}, + {"yetiRig": "yetiRig"}, + {"yeticache": "yeticache"} ] } ] From d5bae9c288221e0e1269cfa3c9eb0e77844ef19e Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Sun, 23 Jan 2022 14:06:54 +0100 Subject: [PATCH 148/160] Remove descriptive label, remove (on)/(off) from `is_include` label --- .../projects_schema/schemas/schema_global_tools.json | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json index 863ec7f979..f8c9482e5f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json @@ -269,11 +269,7 @@ { "type": "boolean", "key": "is_include", - "label": "Exclude (OFF) / Include (ON)" - }, - { - "type": "label", - "label": "Include: show selected families by default. Hides others by default.
Exclude: hide selected families by default. Shows others by default." + "label": "Exclude / Include" }, { "type": "template", From 708e6f11845b4f86bcbd9c6226cfc662e8c3a93e Mon Sep 17 00:00:00 2001 From: 2-REC Date: Mon, 24 Jan 2022 10:22:31 +0700 Subject: [PATCH 149/160] Variables exports --- tools/create_zip.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/create_zip.sh b/tools/create_zip.sh index 85ee18a839..46393f78b1 100755 --- a/tools/create_zip.sh +++ b/tools/create_zip.sh @@ -130,8 +130,8 @@ main () { fi echo -e "${BIGreen}>>>${RST} Generating zip from current sources ..." - PYTHONPATH="$openpype_root:$PYTHONPATH" - OPENPYPE_ROOT="$openpype_root" + export PYTHONPATH="$openpype_root:$PYTHONPATH" + export OPENPYPE_ROOT="$openpype_root" "$POETRY_HOME/bin/poetry" run python3 "$openpype_root/tools/create_zip.py" "$@" } From f0fcb1bfefd197cd00b694cc391565ff28d9be50 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Mon, 24 Jan 2022 09:40:08 +0100 Subject: [PATCH 150/160] add defaults --- .../defaults/project_anatomy/templates.json | 5 ++++ .../defaults/project_settings/global.json | 11 ++++++++ .../defaults/project_settings/maya.json | 27 ++++++++++--------- .../schemas/schema_anatomy_templates.json | 22 +++++++++++++++ 4 files changed, 52 insertions(+), 13 deletions(-) diff --git a/openpype/settings/defaults/project_anatomy/templates.json b/openpype/settings/defaults/project_anatomy/templates.json index 9a03b893bf..d46d449c77 100644 --- a/openpype/settings/defaults/project_anatomy/templates.json +++ b/openpype/settings/defaults/project_anatomy/templates.json @@ -27,5 +27,10 @@ "path": "{@folder}/{@file}" }, "delivery": {}, + "unreal": { + "folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}", + "file": "{subset}_{@version}<_{output}><.{@frame}>.{ext}", + "path": "{@folder}/{@file}" + }, "others": {} } \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 2169a62746..da0bd454f3 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -264,6 +264,17 @@ "task_types": [], "tasks": [], "template": "render{Task}{Variant}" + }, + { + "families": [ + "unrealStaticMesh" + ], + "hosts": [ + "maya" + ], + "task_types": [], + "tasks": [], + "template": "S_{asset}{variant}" } ] }, diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 67a7b84cdc..9490724ee8 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -46,6 +46,20 @@ "aov_separator": "underscore", "default_render_image_folder": "renders" }, + "CreateUnrealStaticMesh": { + "enabled": true, + "defaults": [ + "", + "_Main" + ], + "static_mesh_prefix": "S_", + "collision_prefixes": [ + "UBX", + "UCP", + "USP", + "UCX" + ] + }, "CreateAnimation": { "enabled": true, "defaults": [ @@ -123,19 +137,6 @@ "Anim" ] }, - "CreateUnrealStaticMesh": { - "enabled": true, - "defaults": [ - "Main" - ], - "static_mesh_prefix": "S_", - "collision_prefixes": [ - "UBX", - "UCP", - "USP", - "UCX" - ] - }, "CreateVrayProxy": { "enabled": true, "defaults": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_templates.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_templates.json index e208069e6f..0548824ee1 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_templates.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_templates.json @@ -143,6 +143,28 @@ "label": "Delivery", "object_type": "text" }, + { + "type": "dict", + "key": "unreal", + "label": "Unreal", + "children": [ + { + "type": "text", + "key": "folder", + "label": "Folder" + }, + { + "type": "text", + "key": "file", + "label": "File" + }, + { + "type": "text", + "key": "path", + "label": "Path" + } + ] + }, { "type": "dict-modifiable", "key": "others", From d51c9da59ca3b491af8845962d9712d9a20c14bc Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 24 Jan 2022 16:59:04 +0100 Subject: [PATCH 151/160] Fix - webpublisher from .psd used wrong value for task Subset name for color coded layer that should be published needs to use task name, not task type. --- .../hosts/photoshop/plugins/publish/collect_remote_instances.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/plugins/publish/collect_remote_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_remote_instances.py index e264d04d9f..3994048f7d 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_remote_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_remote_instances.py @@ -69,7 +69,7 @@ class CollectRemoteInstances(pyblish.api.ContextPlugin): instance.data["family"] = resolved_family instance.data["publish"] = layer.visible instance.data["asset"] = context.data["assetEntity"]["name"] - instance.data["task"] = context.data["taskType"] + instance.data["task"] = context.data["task"] fill_pairs = { "variant": variant, From 105a9a4097753ed0a037869073d53c72374ccedf Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 24 Jan 2022 17:41:02 +0100 Subject: [PATCH 152/160] update avalon-core --- repos/avalon-core | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/repos/avalon-core b/repos/avalon-core index ffe9e910f1..159d2f23e4 160000 --- a/repos/avalon-core +++ b/repos/avalon-core @@ -1 +1 @@ -Subproject commit ffe9e910f1f382e222d457d8e4a8426c41ed43ae +Subproject commit 159d2f23e4c79c04dfac57b68d2ee6ac67adec1b From 506e61f05154c345025ae65d0f62a6cd5cf9adbb Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 24 Jan 2022 17:48:41 +0100 Subject: [PATCH 153/160] Updated submodule repos/avalon-core --- repos/avalon-core | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/repos/avalon-core b/repos/avalon-core index 7e5efd6885..159d2f23e4 160000 --- a/repos/avalon-core +++ b/repos/avalon-core @@ -1 +1 @@ -Subproject commit 7e5efd6885330d84bb8495975bcab84df49bfa3d +Subproject commit 159d2f23e4c79c04dfac57b68d2ee6ac67adec1b From a68f4c46452a9f8018783a0c51688c692dfb3701 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Mon, 24 Jan 2022 17:49:38 +0100 Subject: [PATCH 154/160] Removed submodule openpype/hosts/maya/vendor/studiolibrary --- openpype/hosts/maya/vendor/studiolibrary | 1 - 1 file changed, 1 deletion(-) delete mode 160000 openpype/hosts/maya/vendor/studiolibrary diff --git a/openpype/hosts/maya/vendor/studiolibrary b/openpype/hosts/maya/vendor/studiolibrary deleted file mode 160000 index f29e350da9..0000000000 --- a/openpype/hosts/maya/vendor/studiolibrary +++ /dev/null @@ -1 +0,0 @@ -Subproject commit f29e350da9e9508522a740a4f30efb93b99c89d3 From f6fd68940b999894d122c91ecc116aaef38963cc Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 24 Jan 2022 17:50:14 +0100 Subject: [PATCH 155/160] change label from "I know" to "Later" --- openpype/tools/tray/pype_tray.py | 2 +- .../settings_system_version_downgrade.png | Bin 7777 -> 7012 bytes 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index a21a9de705..cfd0aea2a1 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -180,7 +180,7 @@ class VersionDialog(QtWidgets.QDialog): "Running OpenPype version is {}." " Your production uses version {}." ).format(str(current_version), str(expected_version)) - ignore_label = "I know" + ignore_label = "Later" restart_label = "Restart && Change" self.setWindowTitle(title) diff --git a/website/docs/assets/settings/settings_system_version_downgrade.png b/website/docs/assets/settings/settings_system_version_downgrade.png index 277f2c6bc89a2f565c265e311a233b55d12fba01..b5e35fd1db56cd59e6ee827a63e2be41a6179d41 100644 GIT binary patch literal 7012 zcmbVwc{rQf_is8Mr4%h%O3gYLLQNIa(4uOn);y$UC84Tn3ObmlwA4HuM`_KCA)$zt zkSdWD6|;&qDM3VxiEyLme1G>m_xau5eV+TrJM8uDz4qQ~?X^DdXC+#i8=W|Q_Ba3l zIALOJXaxWqfV1B#|2WFNTc>o5upbA4tc>&lwF4sa?8aelJu^K300HORaXrFrb38D% z4*~$VyY{aG*nqbl0KnO86GOebp)N~>tOjT6Ff3lNaKmWtaI~Ly^^qI$Dka8z=i_qn zS7anpBp)T^23|FYGVtgbH20SHOIpxxm=}mRp72EOqwps!+XQJT{ZD^9O}<@%ZglAO zZAyCoo-i?ucVLb&7bC#~nwungKTO7(y1r5EUBiOmr^%BAAB*?8^SR<&h2qwMm_uDk zTomYY^88RRr4PY`cY%!<_IR7T zU;(&d`_3WyW<*-9sV*b9VZIBVV}{akQ;UAo1KG{i_M&6ORP%M#bHrp^T#wM#FKAM918%plycowuhT zxit-zl*^)E7>!8^phkB>7(U`6D8Uf1p@@FwV_Y6ySJQjd-jnfEw-3dZ>4~s+_F<(P}hhOrqy3mrbtpIsXcX|9G{-t(k~*Z}(A4 zRFJNG_OSCg@1i%5Li z=F@j(%rn(_3OBmnPq;}%VGcP?Ki(Mg)&l0<9WrzS#&$V)(?+*D@6(Iw z`tvF*268Ga*oK{I!S8exc{`0MMl;{HESI=@EtCZ0orr+v1Y2mk+H^b`h5ZnO5@%!G z`fh2Ykfz)HFAVls*%dV!yRlBtJ`6w_LPRzO=5oDZyZLuB+z_Lkt~Cvzlk1%!{i@)| zNyyHksLFavyVnB!SidzGHaWSH8IbF@tmUN^z5O8atAt zniO+7GsjLpJr@z2iFK%jZ~bv`$$6tR!G#LM3mN86PsWG<+^fUJ%p$oSsdJoiQjkkHerd90XvvgG zefUo9Y_hz_2;2IFZ%F;EA!2I69V^4`cw`tU9euqn5{4tDA#xhR%N=b*)r6w*P=;ls(y~uCr!{1O8aV3kx)JH{D z%yBGS;?4lXK7psxfeGE{Sd{gnnjzQM`!1^P<++KQt-VQ;m0!CsSKgz(-q(F~)6Yk2 z8ZVJHe@8Jd1Sg^_jBX>e(Vre774n=@6JEZs%jy>onvcZi z;lSGW%Qp(-y660KnXW!#_Y`{XC-x`B-%^%%dCEoxOh6l7aVDsoAz^Ax%_vDNr4q*X0`% zW6=S~nyF^h+m;Ob7omquu^A&HZ$hE0JXGi-IA>hyVD*F1x+U}X=k8$iwNNM7ab29l zM7FH0MA6e&M~?C2ug4cFu}{|Y&H}1KWsli9lMn9LJw}g5M_ao@q`2|NzzF&0WD}+9 zgyk)iS2Yy-`lZ|CyQlP~(RDnznJgqu)u92dR>EY|r%~VBT2PX%=0QFBXHeD+&Rg6tiXolU%Wx| zWW^Y(0BrP{DgC3YdA>!*j_>AP5r)HfJwYP!4TV2$@5S8cKF*rp;N(x*wWrB z6!(0MQDsN(k`a3lGMXCG@?)#iYbo(Lsgvx*nWP1$-UWBF+{Fb5Iy>oOYwP5GhNKbh zBO!|Krhr?ih0r(2VjroR>pTd#<;X)C=AT(P%v>97sfRD1?~@oS@lp1%*6n8g&ywIfdGI%)f0{Z0Bc8S?9S8w z>a3!%`3zOD|Hoj)HPS&j*u;MOljFCZF851p6}Wp{-~5ITvWLkWr6@$VIe#egEzS8iCUY__rd~8$AT9c$AX{RAg}p1&lG@yJHk3_kduKy zT2S-p$Wj-VE^JFQh%~{SrDZB3O6{2Nm+Q^&n(z#t>h?ZN#dUs3RGO##Cs%j+Fyl{uRT z8J&(91+UeW`KN@~3$XAbWm1^Rw|h}70iLX8w8ZkmsGrlHvHi@;eH6-_CD{lFwszqz zhA3?w>&%=v9hTx`=J#G_&jnl`|jk*mSiQ&u@K3~|QB&yq+!)8v$pX1UeZRvVe! zGGZvHD%8%X3D>eGFIhJbESAjcYQmS4~yd zvsDAN);waQuUc+cS9OwQLaeMof!@55CAhsW{xkRGWSCT97Z84A(Yi>Qx3#z4!#b0a z-9Vr?^J&6t)ZT&su*vWeA05Y zBe7~X)thKSNBPciyc3dvZXzdfEAshi8Vk^GPUtMGJOvkq$$au85z`4Yb$mxu;VgpI;@kJ)77(;clU$f1$(7|p~$=hjsXxZdZ^ z?k;>FVd}c}lA9nI(!Ma7t&VGtTQ3!oJpHx}&(*z|nq3z7c0{%?<9U!2A^=!78of5weO+_l zg8P|`SSRe4^0m?egAeO9jTwjwZ=LzS27HZ9F#@H{sbfFf0aF>%rkg37CBFQTNxRIs z0p=c;*CQhDKxT3t)Nt=aZq|`U>#bSrO_}9MS54}+gh8jE*TX1BE1grqp4ICJko_~( z8?V;GeQ%NB%>dsZ8`51#j7DKRoUWz`=0#PetzFu5owRnWscpf~LUO+Z=7I7_m8tT&=V#sOsZcczlH;P5P4jM3i6|!ym za&oA(b0?LvTIwW^Y2RGzwu%l-E?_Lv$Rq)KCaE5~&O#_%q}_wpJo=x|?{Wa`-OPFW zbZP}t7j~91<8#?D4=n+cnccM5tgB??;K;%^Y5Bp6)fp9^_M|-xr zLw&_WRmNL8_f7PPDRr9SQD=B+2i@#DruZ3h8gBIg1tGUh9{fb>3uM6%?h4d zMR!kXd0F%~G4CV3Zf`NNDW{nxB)P~GiF125HD8qhD@YK$EFHi5zQx97Yh2?oC)!L$ z)*^(K;AySPa>R6RS?=bDysb zWqQ*eASz|27wjZu$fVbHONis`c)Pl!;^gb@tjad?MM*}s82JaWDj_FNY`GBw}7p61gEQGh2K^^}TEEEe#=!$C&|6pNMQ zp+RYh--h0aia%B+MV)}n(3&K-?2Ak{iWVic(sZ)dBimGIa%BDqj7DszAEJK!hs$NS z@OWrvPVME*WA`*9Pw|AAUm)+ttitoF z1(X};Z*8B_1kJ~2Z=Jz^a~ES8W2*>ovfGY(1j5^mNWCChU&zkuVrQ=Ed^l;dQ5bL9 zSz_jXkW0|2G}H&phPz(#eA7S0?aSwkAiAz1WzPKmIwykrT4Y%FgHAPK6eP&!=WfHH zk88z6oiSbOcQ9{U2>3T9CkNku3_-YT2bxDt!ayDVKM@o0-GKtskstDg=QVBIG6$*_ z`l8cH>Kq}qZ^EVl>0hHrKq>o{6Y_kiBR6D@+jby8E`~H>1q%Lp?rv~tbkoCZuFB*= z#>8SUfXp%QKRq9Y?)cS2+NB6=6?PQru1j_W_sukI*$C+ z9@H5g_ue0ryIB8YVdF;NA~UfUG@RP&bUf#hplmY}56(XC72Q4PL{N-kVQAlF#Vi|;5%qMgf+u&`B5 z*Q~{Z!wR=TYL~$e^<;v+rWYfAfMwX~qW;0Ki81HHp5V?j6*i)R6P( zBj&lPlsY9r|7Kr_cWArff$+ZVN_={QO<}+;De?}flgGtXVHaC1gBwTd9Ut;xe^dnPuT3vy$9V&NZ*ihjU0xdMnkF{ zB%b|DbhdyN2JH`5F@aa+**Q-}`k);{?>5!jMckn~nC^cmm8O^&(x?8CK5sYeLn&a} zAmxnwZ3~G!s(}3Vht(U7WD(4vd;LC%;e20VD4*VAS?c3g$@>%DFdfcFAhljM)v+X( zv)OGV&TarTxPf4Emj_u)@R6*~A3QN3ZaMAt~a(>xSEV@6azcPE)d zm{er*;^ZXuICdrQGp2RokQTC%T(vRLuT6wii%pEq@qjE9Gyc&f+Xv!6wY3&2EEf^O zjKqb;HEaBH*9>yCxf5AGzAENw{mePyTThi2jZ%OTzaP2Ou42-YrgOss1hEf7a+GhWAig9r65YO@S2O6AOiGjH zEjoI2%bI^kJmiW=Wd2S==%>~2nRyYvp9UE7|2i$>gL%VC~^!7EAeRs`TdS^`Ja zhPkbbcjDVS+FraS=4ONQrRTBFFb3$V&0zQFL?x3rDR)!tkKV)*%R5N6Z)oFKWTX_Z8VSJTCl?A{$yxdljQ!a?)OkDBkB+9QHcp z2IzP_c^T;xEhVMf^{!ER%qgZV=MgWw$75mKLt+-D$iVT1>c1V5y19P12yeP|1|~=0 z;e~^cO`1P`8hjbijPIZWgj5{ql=g-1OhbbOiEnsn#GCnxM8Y)l4R-5U~72Z7b zJHYH3Lt0zzHM`Gd)KN29L5lnjzxBv=EzoQMvI6*;yN+)(1}7nqW_TF$Rzqewx%y!!uye5v%p-+?VAq;4RJJE8Yq zzWGli_oTG;y@1mhB{f0Z!~;rRjfUEZzXRZq1AfDr-eV$*AF4!fCZcpf!tKaVN2ZQoSo{kYm$Tl$~;!|_4Jo4Gu# zBRa^prLmv(#$zPi_|qb>zG7iMCLA)tt^SW$?~lhH`9Et_f2ZVs{o0?o)$U#MyC8-6 x-j+OI{TmpB3g`s#+uz9byUvwiP1-vkq5J*p5rcDg*+2m>xo&P)tAFp={{oPG^ke`4 literal 7777 zcmdUUXIN8Bw{{ej2SFj!AVM%Sk=~^%5CLflN^d42AWfu~Br3h;ks>M}(p3luh7JKV zfb>p)&?NNGJE5KE^PcmbbDbaO=XZTS_MTa@XU(2j)dq1Lnixw>jQ6xD5c5py`huou|@t&vY!j0D#Ls zem`egJU%@J0JwK`wQfJ~w_Qm;PPBz4x6G$t#=*xd*5m6C`&*)fOIa^nVa(DpQ7;QW zo_CEJ$?{bgIMI-NC|X@3S1bJPI`G`J`wYM{_g}H-Nb7>Gxx*l#Nlc*Fi{$pAqUoij z8%rZ<$IsUmaq|=N0doO!xf#JmgZa{XzI%(S{p0h>dtHg|UYZ!HN1V*g5f^uBD*9{K z#dGU#x*=OXdz95G&L({uojXoBcx5c{8yYbX2c#Qg|h$Z^aJVJ0zbX%q)Lr95TWs9O- zAASY1Ob@K6{ld*n+mbvxXX_e*_J<7iwgo2zDR7@wmx0>;uDq84`56Xa?h&oj)k-vb zZBWz=V~^4_U61z!MRIZd>!IMO8)f>WC6u)X(aB-FLCvFjMP@r?Ud*`t@J#$xp0e8H z{n~sQTTPyb(6zAVwls#a?H34IE2A%o&(0>EJl9IquJK&l;aq&8n#bi=eqA8?z?UO;&VE0Y)ISG%Qh z#nOkaQ+CGsg{wulBSpeWm-|tpgEv<~9SpmCTCT-LR#f zdt6X;AH@Y7(|qJFT`tbsnn2vnb*=tHR1Q`>!pU*Zo_v=6I-(L^Wb4J*XfFw3c#~LS z&$!OVi8>QPrOgeSjdGZzKnxc`9Gs6R-Y-zME&%_0aG(2t=Y>%jF0}XT)9}+&OqN{& zFTeQBA^S|dmLuAup5%-O(fW5r3%^RrG5wub5~lwF}@#XwouMj z-1kH1BnG)nKHU=gJ%bCWJYEp_VgxW~`%pd4WmiShOzqhVQxBf}HqruZeoB(yDjrMhA@zCXy6x&=zRquvq zLQiKnn(GX4|K=r+QRt6Uzy6-_Ag6VlD)y+oX0fg&<72&AX`=1HN^!8uZ%O$)ojhLk zTa5Dv=DWneV9qXXt)qT=(2ZCeljfQH^&{f=;b3JfdAm)Cy+opRA3-6~W@Ker#l0AR z@}c>TUtdIpVbA=B4>{;KRXt6s-QKBP1oo&Gfqi^>)8^SW-RN!%3(hAb4kN}MX(6e! zNT2tk*$K1YVsC%Oz{RiMAu${KF$KQ%6WsDO&z*?865L?__51D$QCnf`SqNK^HctS4 z4im?vKi~M|oN=YG~!AMhGJ^{SDfnCn0~g|NDH)VjjV=));$MFmZ%GPW-LmwV1^DnT4}g z%q5=&hLw5LG4V!%806WM50}4TK6O6q$3Im%7aLbrzy~C5ey5{Mr3g8#Q+4A%Pm=ZC zbEnLwcMq4^o$UHlZ@&H^yZ=+c>7a-PJ2Pd1UC%xC-jaB))j>BjDuv?gKq}M^fWzbZMd{=7PF{ zp$l%EA9J`_u~15Txqm(7a_>~UN8e`MWR5^Y{UfOBVKHvrO2kyeRMh=io}v94PoeK6}=st<>PX%8xS0r=ujNjJ`bSaC-w}oLnqw#d0gjX-yIQNW9BT8vPT! z+)XfnUo)WeQxb<*(G*WH5I38Hfm&=CkOQ>*xKzxle0Zzjbk6>EZJu7kFx4h%&_;cS ze#`l6d#blZ%2)ClSAv|I=3AT>zOi68k$f;lRLq5z51F%vj8;Ia&CPSi`+M~p7&ekt zF0L&xDuX3O_woHcBj!iX?D35P8k2&L?GD?dfj1|g7~>3Q0wHiTX5|UP_bbOk==$|U zte+sUngUI3+h2lRGuSsdOq@fsRes)xYjJ?5#~0)>;tHea8GM;XxF*)a6%W5jcFyHQ zjt3k;s~@|9#ZLRgs~0jos~27$V~>zqYc_{Yvozg(>t^x2Q(f6%k%%2f^=&bi4nZri z%+ko#$3o2SOEk{81||L_P|oYdS8M|jCH_3A(&z8{+kWNPu!R@X>`_D7+nJsbTe|L9 z$kx{8^L4;3(OmXG|AdL7e+b>af ztH2N4U#jibUs}v5MO3`42OiVYQ&`LTCBdEhtC%NyZzV8(Nd1r*u~kM0E)X=aEh~ZQ zDjHa4#U<3H%klrbYvGzW=ln9!Z=W#opqVLksbXbAnN(i;T4XwBfAKE9w&_^cWm6b8 z=V@SpA4O9|BMqVlp~jF!#79spHmL5a%p^triuqP!*)CzE0$yQ+Of_Ad)La{f57%V8 zSw%$`DN|IAmn!PBPmjmf774|tFHd?FJr+OJPE2bFWSmVPUvJFy3CV3Ww9R*j8o}2L zsfgvSD06GV!Y1>oE);&M`OX@&*0^I2#|}5&7-f0E&Erk17?#?p1NN_m+0pF0lsB2 zJ}$MQ{Mc$)0l9WvOnLLBjlZ{qSfKlN`ww`W2{QBqeFt0Qzb$ZuGU|ohZn!pjIBFEE zt02D>RS?X^!O;3m=t-C^3zQ1B7DjY$f(N=c$aX@HUTClXyo8$WDJ6u2 z8yI)q^^)w2bnV}o9j;3M2qzW|wu@BW z^y5K)E~gRuCFdG+tYkkZIO=oce8nO+$pMdz86G71!}Xp>JIV0EuIYBP9Wf-I96rR( z{~bB<1Kd(8wVGadKi54ai|J^`9KT!@AMDEqTTN@;a!15}f!pVKW^}OEhSxDkk%UJ5 zm`Zl1;@Ho3@q9VicXw+NGcsdLHF_rF8+r4?p?q)jv@Mutx+*d%2nesqH_(V1%ETl1 zICi6qcAt*_-GLaN@>9am7w2+IZTbX?!_vz!t3E-3jL$^#W$Y0vL7?ZGxSOi3lQl^NfQ<(siKI)KZZ z5)~UoI}u}i3f==Ld>tcQG9yQ`s;m8YMgF^b30et4m*p9j%hUcQ&hC6+LURG}g z6e^M9$KG!3fu8!~)q^c^kWsJCli|MSao~qUH^)K?zahf*@-)Lxb0jCm7e z8Wu1ZM;s_t)x_1_)Xo&$-_F@GJU;M(wI|eB4y33>pQz+vaQXcp*{1fE>}!NB_HRf0 zZ!YxS8_^LohrP@)AeR1Kf4ua%y9^tlclf(cr$(-ifbZo;wsLU&IrHZ0Jc$J&FxCV4 z^_!t)MB;PYArg=|%pw!1t~IT|5T%#liFiiriQ^6xSZB zy4EL`xmrrty=nf&hP)h@K)-L!F)*;>lG0q@I|i-NSO+$z*yi(-iv~-`tj*U;aGJ`% zmaGoz2;Zl!$!akmY}Ob@uZ03WNMc-O%(3+Hb1*hmmDD0e`HILZiJe5i1ri zomT(2f8+I5UvT7e2a~qW2L?+~>H@;a2w|TXsftL11G3BPDp*_P&SJ%=Hg=;7Ga^j&#=Yo`>s}jb(f)VT5Zt*o!Vkm8#+NP*e6)1Vs&9n0Nc zM@p#Y6PEgbR+)~w_LBpV3FqOj?}XUgqrHTX;o75 zy>dGEMUo$(^RPZ$CTX<_Yln$rE*O2T9{%_k>zTiy(?T$;h1743bwFe!U?;Y~!kJr@ zMoDt6IoF?IXt;K|BXZ4IbH{iYfdp=;+0pRppZL$a^}i3Q_!&rPeuLD=@Kxz;)|)Hr z`1}g(slv!H4^*6N3*V;A;pMKt6XKeezLLd`WHH*TsJwME8*-()O8F8lhS4>2OlC3g zX^FvZVRozy$gH>CSl!ajLLn}IG#ZZJn`5I~^rYVck|6{*K`hl81y#>Pm2lKDa&|iz zU_5fuow&OBIQ_18Y33F;-*h6|Oh_CH!u5u79acY2EfF%Vi4%6M-_{GITd^|sfAES> zF>_W#!R}FX=5kjw{fd=v6o283uO|s_yUzBD!vgxd?zlrGg1w`vN0u&w7F-g9Iw2)x zOi`bW*%py!9i7AhK$q{xmi`sNbT^>av036@m#1{2HV0(5AuQ?w-6wwJewwZxYXC%0 zm*>a3)h5SXtwMh}UOOo36yQVB^&sm>@c8{`bM-Z;w9G2N|CKRxypJ3_p2tu0P*`4$ zofe)D}P8J&^bdl4z@aNOt}Wv;VL=W}XN zo{jdV?G;{hkIU1>FSoQL9_(u2qs@Q8i2!U2TS<(>F%$K0!c;_`gQ+AR5|^6M*HfSA~33cE1u4OCbCU(4n9+_3r_P7;JBClI< z1Y4NIW=rwcV=|5BNX7%Y%s$*j1{DzYw!7+&M3zS-cZ)m2cbJY_L2nPL>TR!5 zSV=Lkhvi1_^rAC++;gHCH{maL;lR+U<-0Hdz^OTUxVh8HdFOrmz(t$JsE>lcYRwG6 z6n={w-H+>onUMlGO^8V%oF4d%#6-H`iFl)XgI!QpsUjK${PwAc+X_LP{t!WACzZH4 zbR=v%gc+-!B=9otv{tc(GmqypF`*>*PpYLtH*Yj*gX{v+6QbX~imU3IXqi}zvE45b)*Vqw3C0onY&}zm820quco)>O8`% z@*^U;cbqxs{^tK|6>r3uveHc$Y<{v<7GENW>s5s^6nL`u_tFmx$qe6Q^+_JK=)>DZ zvmVBuYe#>pG>cWjYmpsiI~g?|gDs@5l1{Q-Na@?A?{3u_mdSI#Ui7Ov3I&e%LI?1nh|#r2MkI0fTvj zm95^xKUP*m-&5}?MFDQ{$xs?E2lXJNKMDSneV0#L1+8Lx{daASjuWD378reRSSyiV zEMg0eAY6=%-)Sw0#AiRU*5^VDG*vDb4aBYbT!V=NU3@f+#apf}J@vxEImqvA^B)`< z9L7L9R)AjV_?r7#p&%#$qou$9Tz$TeAcie4R`&qhief9t6%o(Ne3ush)K-J<}R| zoB@6`uEb4*1uorP4((`B!vCPxeeIpd??LFbR#sPglj+XHCY)1leU#Z)Z4T0yoCWnG zm?(P1CK}cnLBM>;448zVT0(&7ko17wUTKvX7ZHo5hNrP(>9EDYJWh8{M_|gc#e~DJ@{c*_Bv;?I1E%APzyFz;*bi7O_`)k zk1{V+Lw3^`&;Y>Iu*eM#%kuyV?bg7Rs4R$OT@GXc@WPe`NFxLs z%-7H>h6)Buy9HjM1$fhHF{EiZ6>H@GdYH~svLvDjxFzv2@(cj*lRXjy0K9Ph|BVoV z(GOwr{f;_m2Yz1yG*~x94+j;28xw!8X89|me7mcgr2Bif=W7mB-lGA3|F19pB*%qq zuZbd4Dj#6(hADpE5$2!%7XZMkP#F1(ZWhfS9_hGdZoZ?#Y;siKiBxs|WKRio5irQH=-NMU0GnS?I#NCa?{SbO<4i zfF|6xniQlCv@InH;1I7fXN$6)+EAe56v9Ed#7z_Ve$v~v7=}^zwYR`OjHKU7&Rpo$ zOa=d#Y;>Um&d?R-mv{M4cdiTY=RslSEgD<0d7bD+5&X&Yck&X!lIAyCHl0S1(-s`7 z_1ESdt9hkA|9TIaiI!KtqxXu}1M5@tPq(Psf4cGik8ao}p{we+kf8#HZxMeOKDPM; z8TRb|AbfK0y5yElei3=5r^N~Ob%Da|y}VC{7BoG+21~=X2T0R8dp{^9su6k?GP%9e zEr-<1RD%C5pDajGtOQH#s`B0CjsuSJbdEe*;Hmze^m{Kpk{R`K_Dhgct?4Il&^qZ$qoCcAf7*^Wi`*FK1?9p|p)e%K<+S{8n>FdxzZN^|o1+OGno>Vp~@yrvejoxu89EUJp6D|hs3hiGn?f; zB7Y|poGoxW4+4|dO=;~HoPpfe13t4Wn1-DfF=!z4utiC-A76Nx_^M~7zTr@Dh8g8$ z6S9(Tv5*FcDiPW+S9v-I#O)Rf#31V$l1V*nCX+j}rajESBqy>p)uCUg-J!}v8S;e8 zZ)8y}TIJaElm7LQrErRor6`h=svZ06JSy7b3*Yl^RX1T6$n^7#KQ_R zcrE3!2*&tV`%q>lkFdA*lFWLzi zTGB0aLe(v*JAYG)eCI0CGuPsz`*&1gxn9n!&z6Z!C~IW&b=^i!?_+*bK2Xc<@~7>G z30=i^vm=Eepux4BM05IoTm8#i17Hj)kFD*BR>vE{v3|$+w_@OdJsnH{l z7@c%7{@syvc>9i-&!(fbzDcQ3Q%{toGQ>1>z%5m)aCtaxQcDLCoUY_qcu`kLLdsA* zJPSz=)Pq~S)3ls8``@GmjbO=wvPcWVE{OOGKCm5Vq@ QY7;;gYN%DDVgKqs0NafSyZ`_I From cfd33342aefdeb4f8bb8a628cadaffee7e66bea4 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 24 Jan 2022 18:01:06 +0100 Subject: [PATCH 156/160] Change label "Later" to "Ignore" --- openpype/tools/tray/pype_tray.py | 2 +- .../settings_system_version_downgrade.png | Bin 7012 -> 6937 bytes 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index cfd0aea2a1..99d431172a 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -180,7 +180,7 @@ class VersionDialog(QtWidgets.QDialog): "Running OpenPype version is {}." " Your production uses version {}." ).format(str(current_version), str(expected_version)) - ignore_label = "Later" + ignore_label = "Ignore" restart_label = "Restart && Change" self.setWindowTitle(title) diff --git a/website/docs/assets/settings/settings_system_version_downgrade.png b/website/docs/assets/settings/settings_system_version_downgrade.png index b5e35fd1db56cd59e6ee827a63e2be41a6179d41..e3a5d7f499320eccda56909050b3666f6691f385 100644 GIT binary patch literal 6937 zcmc&(XH?V6wkI5vq8uqIC?$dvm5vak1}P#UL`9{8B+{e_ND~r3z<^W*sY;Hb0@8%g zAps>c>F6OqfJnd~1j4}(0wf_X=bU@*yX&p@;k{4y!+&Paf34ZG_iuiC@0m5J*X*o> zg-#0b@bCy*U$(fx!vo0X%2fgfx$Uj{do|n+5O%}LjHhNqdWAdS^SNkyk%y-)>k!NH z0Cy~S|FTmU56`dNe|~_z;PSgXJSS_cEiT@Sa$kSRM!Vk%@1xn2vB%@$PjKQ#?)2^F z=J_RwX_1w#RoTDH9INo|T{+Bp*MHT<@MUUBo@>>0D;)0JDc;w7!KXA7EK(Ge%=!fq z?%XWbsJ>n2T>Y9_!j2f3iKz`Mp{>QBikS*1UD}Dz5rj+3ItI_sU!EUCkKB=b>^YSi zF>Wbwf{(9T^KkKuh{)k?&8pA;Yj;VopTpX_6~+kDD9kd7?w#FxE-}QP+2I?6U@3;t z#`A}noqNw(>9|4iKAPi&<)m>Asc9bW{y>}KPE924CbaH%*^_5tGK(rBWpx}7Eeof! z3RS=nxBxaz6zzo%<&IUKlBDz##$??-$$!u~46Q~u3fYwH@IDn5- zYaQ+;7+w(m|#@wp|#d34BJo%CYk}lGq22% zL$-QzOBRa>{Id6VVSSl0f0}eGg~()suUXzLubSi{Uj~UIbSz?P3zB;-8SRJHPS?{! z!@ig+n^kDx*Y!P*Q`t|Z8y?^SVp$)i|5&>iMpF@Yhv{!-r!J*Cxq@v(t;O+$J#_E6 zy&mtI0UgyAKdnqZ>^9*s)Ns3Nlkfe^v<_{409`SSFpivL(IqkqlC#tcle549O|w`c zt9!o8b$S#*f8V;Du;{&zP+Mfn%q+@)m;`}3?oY*jD>LUM^*{+XTGGiTJwg6AhE!D{ zt_t-l!hnzJ0ceQqw%}4lS?s<`rP2Y|ZIWkTZv5=#tllu8bEjaMHGuJ6uxAc^Dmzlt zjM&x+o0Gr>%_Bxb&~zG?MNxZLYGe#k9c>WGhy{kIV`~Fst-+xk z*B}|mI@2gnNyDqCEoLJk@W}Dxf7*-KOW&sb8kzssV%tbv4jU&I_T3m#T9Cm@DndAGWa9PzSNOmoYafNUOu>0fCXkjG- zs1y9~erNQTd?~r<7@TX!Mk)EpOXkqzC`!kr^TB6U57uWW28H+-yZ+q4tWwV2eZ$d4 z3|gXNA*C4dQ0lUsrPDp1BFF$Jq?hxsa#K#XkvP5|gAQCLt0i4r@p0l6QG>#7?qU z4xJzllR_-;=-6|i5&SrN9Z_$gIZ3RoVmIXh#abyfh|W&~jlm&BOyID+**wA-sO`^` zDe`YX00VC@=w32JNb!$DoS&_}a0;V+^NldQkkd{Z+-lR?~+) zW*z!+Kwo`sEA8EgOR3o&pDGXU`S815JFphBJ#)ViG`x2~L8NJD(&?;};weKce4=9o z*G-b?1n>k3xnMH44-`5WTJ;TLsZu&w-DheVy&6v$^HRhpPt2oIi`+l(Hd@y2pMExDYngv9Q}W1dd}oI78naL5@TLqDckOq%w{S zRlV=Fel$TLzoWVNaeP%$t8%-1ljP&!c5W61V)krkuYg8@L9B7>u*D-NEkfK*)vkLO zWTp_v`84~G1Ri2*uzgz4t}fL}GVWRxFu7CV$6+TIR(|OovEtgnEj*HWQoN5mB_H29 z@qg|L#k0YzQablgaOb3m2oFy=F95(3f9^jZl8laKHy5rWM~dLh9kyo^cS*+Hj1{yu zBiwmfyOru5wz`EO&pA(bERrK8U$oJTA4Vr+(Y7|r1NoN^ofplHB>L$+UlK&K&ta&a z(L%gpM}FuRdYEhN@1860zie95HoP_nHIZcaO_rCp4<$)6T|5CH%LXf9`n;r3KZA{d z9wku-{WdOOZ?U_oSwJX=8!jVKG_1G@R2335H4MJgEOw zX5akgtg_tr`*4`NaCn*BzRE{QIf3xouBPt-U8v!aN)Ed2B2F@ZkTmrpIeGDmm+GY| z&vvS_arnRj>+jP$+oqu6LEsP5h-}Q6r21vBnj@ix65#1LYGrz<%z2vSps_zgI zN!lnO-3j*qrYPkNjRqkuT%CKjU0jN>i%rA(P23xxTzQr;y9fjIsb8vqU~5vYBQ93~ zUk7{0&M4P4^hY5RpDO1y)vAJaThyE|2Tz3IeGD&w@FE_q;Mv$@hzvGROhSJnz<9Q$ z$NAT4hcknV`l*`81wL`;+=RVem78p>dnX|@dCF=_qjO|n(yelF^6cK35O`mNd}g3i zm<*w!?pALAOfxmeL%|0lHy5*8QU~*$O_`6ZvJNXun=T%|u2i=28EyMkf#0bl z*tC(8GU0%Hkw1w%Gs z+cQm?>D{qk;0qL-WQ%T2pb*%r{X+9r+mF+)r0iegZ>8)jg#13~GH?Gm2VFo_E*Kg@ znLb-uD0jqFd`4S9^_=rQw1vXEr$WQUTWwYJs=9VXmfzA#LD=qHhBUV*|}D#wn1}ip!DX@CSxJX`6G>;8K(*7u|a?6row@B*d|~H zvAPS_Dm>gSbH1sO-5VFfnytq+nvnKG@mJVUwREdOn;)eaJ`+CWr44B@zKrlt*}8q6 zx;V8<=dhf+m^9t>RXX+6n~bVMFvnyE$6Yg(v>%|+E_plWrGQ0+MFJLWtN-G`W6)Eu z<~=9v%t>THbKAIKp312?$L-EgX~pe{{8aW81Js1%IoM^T%mpT$JrM1$WxMg*U;o=+ z*U2fw#E$)0QFVH?WWX`oH=6yV_&N2YXj=g#!Ys`wEMEO$M;Lk+J z)BVJDr7vj)VZIx`ogG%$t6kTUBuTRl<1}tK$R5FU$txk@H=;pU8AHQ#%T`}0y-=dk zPt=EcTgL{$FtNq9i1l#U#N6+PQc#Oj!{UcC2@;F|40R@-F^uWj8&7}w2oq?*K9A4_3HKDHAky|>Mx=1*PVm8! zP|P4EpL$}$9Iv!@l+D1GGQd^O;*(<5KY16AbI3zo@@;#tLGtlD=bmuZv|-WvPU9|4`fY72;XBZXWI-WYG;G3%Zqnb zLnvx{C!Dhs_Vb!LL#w~ERRn*>qW9@L8jP#h66XN4F=YiR!M@k$u&PW^p^&wJu7s9g zTnEWfD=5SS>bCT7t^GR)>~3f$?Xi|ThD`UVe^(Ry_N8y}$Z48kRY3H#NQB7cu{-mls+|QJTzym z^|%Na5gjj@eMs#4mwi>S*u4h@aZw30QNFw1ZkZb|+nLVvMSgD-;|)!Cy%qwGQ_Zq7 zoq|aS2?+4;+)Rk);W0h-KSixhUT7Tpu2N?om2+YB#lCRg>;!|-Eg^>Bq;qs{1JOl9 z*C(AJeeTI&2Y8`OYs6boK!wk3Ua_2jOT;}0-M><6AyPGX-RJZvs>5CGH8CZ=p^oGb z?DoZZiH8$uaarHQ{jZKaG=p&NJ)BwU6iXM)zUI?&z(VLe zFYlN0zX0lr>!;H(#9oQdX~FDg#naDZNK5u!KRS)CHEHNys?uPlEFLIo+PJbG+VoNJ z%m&gZcZK7kEYp{lv47l1<+hX9eZi{B>7N~^%kHgsF!)^)%tW5=B-KH|1JZA^u9q)* zoJZE{D9ycCXW!kQ_P8kFl~Id<9r;FB3j3KZk?lB17l*Ide8z6PLu}|ldv%X44oK?I zXv95Rd%^k>Jsf6sneBTYX#}2ysyjtLQpJ3T`f&`s z{pYiPl!OBoAA4N=fY1$w`<5e+QKscN{X(*3M02u&B`pac(8PY~oK<>Zij#z&an`WhF76~CAsPaFyM%2cmAf*Y9| zTqHf+JOeA9vnO5ewg1NYY5hf8XSu-D*q9;;RfbXVT?`q{x&Wey@eHHP?5`;u4TJ-O zZB1u(aUd-_xq0WR0OM8(0;L30qWMl`kPVO=JtvsbQI_j(ePz;KEW`iHy#2GLHG6;H znVc6}kd@|(O0C)pD}mBafIT}}(J8oW1?`J^HH_)GjY1zz2lUgRV4Xc)$zPcT%(P8R zPY^rOnDMCEr_XNtsq=C6SF*_B6*}DM>2K1lF;WLUJSEExTJ^1DA`o z%7(9NinOP^NKCgnL0EH#81)LIP|8vvPWaI>$kBvuszDx+}}S)4#{nysC8xaF_rG^JM(aJkg(&^c~cA8RRTgsOrj|{~47{0-PgQ zO@y?yjd#ADRZODZaRyHy`1x>(kM=E|BF>|>%u}pgWcxcQp4S2bz>K1~jM4g9?Ss6w zrU$VfHBzlih_SMz2NbG&%DL4~P{Za!P_Pajiz*m1pDP%9o6;J(Y3YL5r1Vl_hZxS% z+%rXLNa{M1VL5IE8ilICFf1co#zRnyVYT|`%cRyLZGpGh5CG75vg66miI?MoS%&KjKd2#Q*`qas{ppx$5(e${@7vu2$|erNpLn5$q;on&EOW&y|C$b%_G8-9u!7TZKuV{{3Q}S zzW@x!KWbfrI}{IcHmfy#rozz_AN+L}qMV3G{0YyTjkgXRmbPQGNA$SM76B8jYrW0o zseUK_LUiuxAI!Z-#7*Ifl76jjtE&1X8TujtJ{!(+KEvW?i6RSN4u9?mQDU#?Uf=a+OimBX=AEM^rg)H$%4}j z7DlzcG_MW1w z8&Mvv_~K)1Mb+H6wpwM$UA=+tZk-_dW!a)Nx}_X1GfMZ~oRSSobOx39Q{yIP*=I3B zus%^NaH{6(_*4+pjy!vR?bk3{`^WJ-T{nGGr#2Ze`rcH6jFa`&&$ukyuIxA zK=HUUXuMo2%3xbRtX&#?(RRn4d+Zy`HjNs;g1Yvst~5*Z#1WTCSAxk*sQ&0d=1oNc zS-vnABI5bq$jP+EPx%*i&4r&kwS8==8o}hme1tFll1<)$?UIBi~SKfba6_fAE5f&1rEF=3Nn#kP*JUEW^U| znQ~@BM*wPGg`g4`dj0d?FevUe1XzpR-mP#~(#7Alx&J=?O$*}noh^Y83qa~8m^H2p zDT+Au$Y5bWsUrBmsWhaj2GefYk?XP1zj>S*{{|{jCy00v|6|>&7`tu#D6*<59{1)} zEa|zRv;69VT33tL_Tu4t&i!?&fTt)bd~V7^-tL*8;Pefp-K(a#1r2om0~V+(H3&JI zZoFk9JY&H4o5P}plO*wezQ6fpE&dJyoumIzw!v5U;?TwHfqC?ra(2mG_-a=Pv??*3`7L!Tu9E4Jk|wn|4w81IOxsme(P@J~0DHe=mCjeawCiF*{kRHsJH&WD)( zE3TmVLxI}$0-^&O8Ph;WL&Juf8dRr0<$Byx?{b$71&1=Pc3NiB1k4`%U0|t*7@Q{kGNUfBE#W)%%lwo8Z3%xDey+80VMuumlh|Eie9? z^kNBUsS>j?HraZFkMGVU-&99M){r?C>SzN29{*d%xzY}UO8DM~e`?t=J6oW~ZQ^x# zJ^T*Q4sx3hms)nX`OT&nvf+HAM52oGSBjFT?Zml8EV_KX%STIgvuZE-$6ksQ$F3_! zF;8B$8S_6zsintUHj)ReOhIb4>Bzi-JAp9#a1F76m_xau5eV+TrJM8uDz4qQ~?X^DdXC+#i8=W|Q_Ba3l zIALOJXaxWqfV1B#|2WFNTc>o5upbA4tc>&lwF4sa?8aelJu^K300HORaXrFrb38D% z4*~$VyY{aG*nqbl0KnO86GOebp)N~>tOjT6Ff3lNaKmWtaI~Ly^^qI$Dka8z=i_qn zS7anpBp)T^23|FYGVtgbH20SHOIpxxm=}mRp72EOqwps!+XQJT{ZD^9O}<@%ZglAO zZAyCoo-i?ucVLb&7bC#~nwungKTO7(y1r5EUBiOmr^%BAAB*?8^SR<&h2qwMm_uDk zTomYY^88RRr4PY`cY%!<_IR7T zU;(&d`_3WyW<*-9sV*b9VZIBVV}{akQ;UAo1KG{i_M&6ORP%M#bHrp^T#wM#FKAM918%plycowuhT zxit-zl*^)E7>!8^phkB>7(U`6D8Uf1p@@FwV_Y6ySJQjd-jnfEw-3dZ>4~s+_F<(P}hhOrqy3mrbtpIsXcX|9G{-t(k~*Z}(A4 zRFJNG_OSCg@1i%5Li z=F@j(%rn(_3OBmnPq;}%VGcP?Ki(Mg)&l0<9WrzS#&$V)(?+*D@6(Iw z`tvF*268Ga*oK{I!S8exc{`0MMl;{HESI=@EtCZ0orr+v1Y2mk+H^b`h5ZnO5@%!G z`fh2Ykfz)HFAVls*%dV!yRlBtJ`6w_LPRzO=5oDZyZLuB+z_Lkt~Cvzlk1%!{i@)| zNyyHksLFavyVnB!SidzGHaWSH8IbF@tmUN^z5O8atAt zniO+7GsjLpJr@z2iFK%jZ~bv`$$6tR!G#LM3mN86PsWG<+^fUJ%p$oSsdJoiQjkkHerd90XvvgG zefUo9Y_hz_2;2IFZ%F;EA!2I69V^4`cw`tU9euqn5{4tDA#xhR%N=b*)r6w*P=;ls(y~uCr!{1O8aV3kx)JH{D z%yBGS;?4lXK7psxfeGE{Sd{gnnjzQM`!1^P<++KQt-VQ;m0!CsSKgz(-q(F~)6Yk2 z8ZVJHe@8Jd1Sg^_jBX>e(Vre774n=@6JEZs%jy>onvcZi z;lSGW%Qp(-y660KnXW!#_Y`{XC-x`B-%^%%dCEoxOh6l7aVDsoAz^Ax%_vDNr4q*X0`% zW6=S~nyF^h+m;Ob7omquu^A&HZ$hE0JXGi-IA>hyVD*F1x+U}X=k8$iwNNM7ab29l zM7FH0MA6e&M~?C2ug4cFu}{|Y&H}1KWsli9lMn9LJw}g5M_ao@q`2|NzzF&0WD}+9 zgyk)iS2Yy-`lZ|CyQlP~(RDnznJgqu)u92dR>EY|r%~VBT2PX%=0QFBXHeD+&Rg6tiXolU%Wx| zWW^Y(0BrP{DgC3YdA>!*j_>AP5r)HfJwYP!4TV2$@5S8cKF*rp;N(x*wWrB z6!(0MQDsN(k`a3lGMXCG@?)#iYbo(Lsgvx*nWP1$-UWBF+{Fb5Iy>oOYwP5GhNKbh zBO!|Krhr?ih0r(2VjroR>pTd#<;X)C=AT(P%v>97sfRD1?~@oS@lp1%*6n8g&ywIfdGI%)f0{Z0Bc8S?9S8w z>a3!%`3zOD|Hoj)HPS&j*u;MOljFCZF851p6}Wp{-~5ITvWLkWr6@$VIe#egEzS8iCUY__rd~8$AT9c$AX{RAg}p1&lG@yJHk3_kduKy zT2S-p$Wj-VE^JFQh%~{SrDZB3O6{2Nm+Q^&n(z#t>h?ZN#dUs3RGO##Cs%j+Fyl{uRT z8J&(91+UeW`KN@~3$XAbWm1^Rw|h}70iLX8w8ZkmsGrlHvHi@;eH6-_CD{lFwszqz zhA3?w>&%=v9hTx`=J#G_&jnl`|jk*mSiQ&u@K3~|QB&yq+!)8v$pX1UeZRvVe! zGGZvHD%8%X3D>eGFIhJbESAjcYQmS4~yd zvsDAN);waQuUc+cS9OwQLaeMof!@55CAhsW{xkRGWSCT97Z84A(Yi>Qx3#z4!#b0a z-9Vr?^J&6t)ZT&su*vWeA05Y zBe7~X)thKSNBPciyc3dvZXzdfEAshi8Vk^GPUtMGJOvkq$$au85z`4Yb$mxu;VgpI;@kJ)77(;clU$f1$(7|p~$=hjsXxZdZ^ z?k;>FVd}c}lA9nI(!Ma7t&VGtTQ3!oJpHx}&(*z|nq3z7c0{%?<9U!2A^=!78of5weO+_l zg8P|`SSRe4^0m?egAeO9jTwjwZ=LzS27HZ9F#@H{sbfFf0aF>%rkg37CBFQTNxRIs z0p=c;*CQhDKxT3t)Nt=aZq|`U>#bSrO_}9MS54}+gh8jE*TX1BE1grqp4ICJko_~( z8?V;GeQ%NB%>dsZ8`51#j7DKRoUWz`=0#PetzFu5owRnWscpf~LUO+Z=7I7_m8tT&=V#sOsZcczlH;P5P4jM3i6|!ym za&oA(b0?LvTIwW^Y2RGzwu%l-E?_Lv$Rq)KCaE5~&O#_%q}_wpJo=x|?{Wa`-OPFW zbZP}t7j~91<8#?D4=n+cnccM5tgB??;K;%^Y5Bp6)fp9^_M|-xr zLw&_WRmNL8_f7PPDRr9SQD=B+2i@#DruZ3h8gBIg1tGUh9{fb>3uM6%?h4d zMR!kXd0F%~G4CV3Zf`NNDW{nxB)P~GiF125HD8qhD@YK$EFHi5zQx97Yh2?oC)!L$ z)*^(K;AySPa>R6RS?=bDysb zWqQ*eASz|27wjZu$fVbHONis`c)Pl!;^gb@tjad?MM*}s82JaWDj_FNY`GBw}7p61gEQGh2K^^}TEEEe#=!$C&|6pNMQ zp+RYh--h0aia%B+MV)}n(3&K-?2Ak{iWVic(sZ)dBimGIa%BDqj7DszAEJK!hs$NS z@OWrvPVME*WA`*9Pw|AAUm)+ttitoF z1(X};Z*8B_1kJ~2Z=Jz^a~ES8W2*>ovfGY(1j5^mNWCChU&zkuVrQ=Ed^l;dQ5bL9 zSz_jXkW0|2G}H&phPz(#eA7S0?aSwkAiAz1WzPKmIwykrT4Y%FgHAPK6eP&!=WfHH zk88z6oiSbOcQ9{U2>3T9CkNku3_-YT2bxDt!ayDVKM@o0-GKtskstDg=QVBIG6$*_ z`l8cH>Kq}qZ^EVl>0hHrKq>o{6Y_kiBR6D@+jby8E`~H>1q%Lp?rv~tbkoCZuFB*= z#>8SUfXp%QKRq9Y?)cS2+NB6=6?PQru1j_W_sukI*$C+ z9@H5g_ue0ryIB8YVdF;NA~UfUG@RP&bUf#hplmY}56(XC72Q4PL{N-kVQAlF#Vi|;5%qMgf+u&`B5 z*Q~{Z!wR=TYL~$e^<;v+rWYfAfMwX~qW;0Ki81HHp5V?j6*i)R6P( zBj&lPlsY9r|7Kr_cWArff$+ZVN_={QO<}+;De?}flgGtXVHaC1gBwTd9Ut;xe^dnPuT3vy$9V&NZ*ihjU0xdMnkF{ zB%b|DbhdyN2JH`5F@aa+**Q-}`k);{?>5!jMckn~nC^cmm8O^&(x?8CK5sYeLn&a} zAmxnwZ3~G!s(}3Vht(U7WD(4vd;LC%;e20VD4*VAS?c3g$@>%DFdfcFAhljM)v+X( zv)OGV&TarTxPf4Emj_u)@R6*~A3QN3ZaMAt~a(>xSEV@6azcPE)d zm{er*;^ZXuICdrQGp2RokQTC%T(vRLuT6wii%pEq@qjE9Gyc&f+Xv!6wY3&2EEf^O zjKqb;HEaBH*9>yCxf5AGzAENw{mePyTThi2jZ%OTzaP2Ou42-YrgOss1hEf7a+GhWAig9r65YO@S2O6AOiGjH zEjoI2%bI^kJmiW=Wd2S==%>~2nRyYvp9UE7|2i$>gL%VC~^!7EAeRs`TdS^`Ja zhPkbbcjDVS+FraS=4ONQrRTBFFb3$V&0zQFL?x3rDR)!tkKV)*%R5N6Z)oFKWTX_Z8VSJTCl?A{$yxdljQ!a?)OkDBkB+9QHcp z2IzP_c^T;xEhVMf^{!ER%qgZV=MgWw$75mKLt+-D$iVT1>c1V5y19P12yeP|1|~=0 z;e~^cO`1P`8hjbijPIZWgj5{ql=g-1OhbbOiEnsn#GCnxM8Y)l4R-5U~72Z7b zJHYH3Lt0zzHM`Gd)KN29L5lnjzxBv=EzoQMvI6*;yN+)(1}7nqW_TF$Rzqewx%y!!uye5v%p-+?VAq;4RJJE8Yq zzWGli_oTG;y@1mhB{f0Z!~;rRjfUEZzXRZq1AfDr-eV$*AF4!fCZcpf!tKaVN2ZQoSo{kYm$Tl$~;!|_4Jo4Gu# zBRa^prLmv(#$zPi_|qb>zG7iMCLA)tt^SW$?~lhH`9Et_f2ZVs{o0?o)$U#MyC8-6 x-j+OI{TmpB3g`s#+uz9byUvwiP1-vkq5J*p5rcDg*+2m>xo&P)tAFp={{oPG^ke`4 From a7b74602594f22957c7cc3da2dd50e163e60b8bb Mon Sep 17 00:00:00 2001 From: OpenPype Date: Mon, 24 Jan 2022 20:03:08 +0000 Subject: [PATCH 157/160] [Automated] Bump version --- CHANGELOG.md | 25 ++++++++++--------------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 12 insertions(+), 17 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bffbc69931..4a479364fc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,16 +1,18 @@ # Changelog -## [3.8.0-nightly.6](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.8.0-nightly.7](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.7.0...HEAD) ### 📖 Documentation -- Renamed to proper name [\#2546](https://github.com/pypeclub/OpenPype/pull/2546) -- Slack: Add review to notification message [\#2498](https://github.com/pypeclub/OpenPype/pull/2498) +- Variable in docs renamed to proper name [\#2546](https://github.com/pypeclub/OpenPype/pull/2546) **🆕 New features** +- Flame: extracting segments with trans-coding [\#2547](https://github.com/pypeclub/OpenPype/pull/2547) +- Maya : V-Ray Proxy - load all ABC files via proxy [\#2544](https://github.com/pypeclub/OpenPype/pull/2544) +- Maya to Unreal: Extended static mesh workflow [\#2537](https://github.com/pypeclub/OpenPype/pull/2537) - Flame: collecting publishable instances [\#2519](https://github.com/pypeclub/OpenPype/pull/2519) - Flame: create publishable clips [\#2495](https://github.com/pypeclub/OpenPype/pull/2495) @@ -21,23 +23,20 @@ - Webpublisher: Added endpoint to reprocess batch through UI [\#2555](https://github.com/pypeclub/OpenPype/pull/2555) - Settings: PathInput strip passed string [\#2550](https://github.com/pypeclub/OpenPype/pull/2550) - Global: Exctract Review anatomy fill data with output name [\#2548](https://github.com/pypeclub/OpenPype/pull/2548) -- Flame: extracting segments with trans-coding [\#2547](https://github.com/pypeclub/OpenPype/pull/2547) - Cosmetics: Clean up some cosmetics / typos [\#2542](https://github.com/pypeclub/OpenPype/pull/2542) - General: Validate if current process OpenPype version is requested version [\#2529](https://github.com/pypeclub/OpenPype/pull/2529) - General: Be able to use anatomy data in ffmpeg output arguments [\#2525](https://github.com/pypeclub/OpenPype/pull/2525) - Expose toggle publish plug-in settings for Maya Look Shading Engine Naming [\#2521](https://github.com/pypeclub/OpenPype/pull/2521) - Photoshop: Move implementation to OpenPype [\#2510](https://github.com/pypeclub/OpenPype/pull/2510) -- TimersManager: Move module one hierarchy higher [\#2501](https://github.com/pypeclub/OpenPype/pull/2501) - Slack: notifications are sent with Openpype logo and bot name [\#2499](https://github.com/pypeclub/OpenPype/pull/2499) -- Project Manager: Remove project button cleanup [\#2482](https://github.com/pypeclub/OpenPype/pull/2482) -- Maya: Refactor missing \_get\_reference\_node method [\#2455](https://github.com/pypeclub/OpenPype/pull/2455) -- Houdini: Remove broken unique name counter [\#2450](https://github.com/pypeclub/OpenPype/pull/2450) -- Maya: Improve lib.polyConstraint performance when Select tool is not the active tool context [\#2447](https://github.com/pypeclub/OpenPype/pull/2447) +- Slack: Add review to notification message [\#2498](https://github.com/pypeclub/OpenPype/pull/2498) +- Maya: Collect 'fps' animation data only for "review" instances [\#2486](https://github.com/pypeclub/OpenPype/pull/2486) - General: Validate third party before build [\#2425](https://github.com/pypeclub/OpenPype/pull/2425) **🐛 Bug fixes** - AfterEffects: Fix - removed obsolete import [\#2577](https://github.com/pypeclub/OpenPype/pull/2577) +- General: OpenPype version updates [\#2575](https://github.com/pypeclub/OpenPype/pull/2575) - Ftrack: Delete action revision [\#2563](https://github.com/pypeclub/OpenPype/pull/2563) - Webpublisher: ftrack shows incorrect user names [\#2560](https://github.com/pypeclub/OpenPype/pull/2560) - General: Do not validate version if build does not support it [\#2557](https://github.com/pypeclub/OpenPype/pull/2557) @@ -46,6 +45,8 @@ - General: Fix install thread in igniter [\#2549](https://github.com/pypeclub/OpenPype/pull/2549) - Houdini: vdbcache family preserve frame numbers on publish integration + enable validate version for Houdini [\#2535](https://github.com/pypeclub/OpenPype/pull/2535) - Maya: Fix Load VDB to V-Ray [\#2533](https://github.com/pypeclub/OpenPype/pull/2533) +- Maya: ReferenceLoader fix not unique group name error for attach to root [\#2532](https://github.com/pypeclub/OpenPype/pull/2532) +- Maya: namespaced context go back to original namespace when started from inside a namespace [\#2531](https://github.com/pypeclub/OpenPype/pull/2531) - Fix create zip tool - path argument [\#2522](https://github.com/pypeclub/OpenPype/pull/2522) - Maya: Fix Extract Look with space in names [\#2518](https://github.com/pypeclub/OpenPype/pull/2518) - Fix published frame content for sequence starting with 0 [\#2513](https://github.com/pypeclub/OpenPype/pull/2513) @@ -59,8 +60,6 @@ - Maya: Remove Maya Look Assigner check on startup [\#2540](https://github.com/pypeclub/OpenPype/pull/2540) - build\(deps\): bump shelljs from 0.8.4 to 0.8.5 in /website [\#2538](https://github.com/pypeclub/OpenPype/pull/2538) - build\(deps\): bump follow-redirects from 1.14.4 to 1.14.7 in /website [\#2534](https://github.com/pypeclub/OpenPype/pull/2534) -- Maya: ReferenceLoader fix not unique group name error for attach to root [\#2532](https://github.com/pypeclub/OpenPype/pull/2532) -- Maya: namespaced context go back to original namespace when started from inside a namespace [\#2531](https://github.com/pypeclub/OpenPype/pull/2531) - Nuke: Merge avalon's implementation into OpenPype [\#2514](https://github.com/pypeclub/OpenPype/pull/2514) ## [3.7.0](https://github.com/pypeclub/OpenPype/tree/3.7.0) (2022-01-04) @@ -76,16 +75,12 @@ - Settings UI: Hyperlinks to settings [\#2420](https://github.com/pypeclub/OpenPype/pull/2420) - Modules: JobQueue module moved one hierarchy level higher [\#2419](https://github.com/pypeclub/OpenPype/pull/2419) - TimersManager: Start timer post launch hook [\#2418](https://github.com/pypeclub/OpenPype/pull/2418) -- General: Run applications as separate processes under linux [\#2408](https://github.com/pypeclub/OpenPype/pull/2408) **🐛 Bug fixes** - TVPaint: Create render layer dialog is in front [\#2471](https://github.com/pypeclub/OpenPype/pull/2471) - Short Pyblish plugin path [\#2428](https://github.com/pypeclub/OpenPype/pull/2428) - PS: Introduced settings for invalid characters to use in ValidateNaming plugin [\#2417](https://github.com/pypeclub/OpenPype/pull/2417) -- Settings UI: Breadcrumbs path does not create new entities [\#2416](https://github.com/pypeclub/OpenPype/pull/2416) -- AfterEffects: Variant 2022 is in defaults but missing in schemas [\#2412](https://github.com/pypeclub/OpenPype/pull/2412) -- Nuke: baking representations was not additive [\#2406](https://github.com/pypeclub/OpenPype/pull/2406) **Merged pull requests:** diff --git a/openpype/version.py b/openpype/version.py index 60e619d7c0..7bf7cf1108 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.8.0-nightly.6" +__version__ = "3.8.0-nightly.7" diff --git a/pyproject.toml b/pyproject.toml index 72152bd433..0f2f2a05b8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.8.0-nightly.6" # OpenPype +version = "3.8.0-nightly.7" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 0c437017d44888afb1ff8c90f5b88d0fb2b10e08 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Mon, 24 Jan 2022 20:19:26 +0000 Subject: [PATCH 158/160] [Automated] Release --- CHANGELOG.md | 13 ++++++------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 8 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4a479364fc..7516ac4c7b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,12 +1,8 @@ # Changelog -## [3.8.0-nightly.7](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.8.0](https://github.com/pypeclub/OpenPype/tree/3.8.0) (2022-01-24) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.7.0...HEAD) - -### 📖 Documentation - -- Variable in docs renamed to proper name [\#2546](https://github.com/pypeclub/OpenPype/pull/2546) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.7.0...3.8.0) **🆕 New features** @@ -54,6 +50,10 @@ - Improve FusionPreLaunch hook errors [\#2505](https://github.com/pypeclub/OpenPype/pull/2505) - Maya: Validate Shape Zero do not keep fixed geometry vertices selected/active after repair [\#2456](https://github.com/pypeclub/OpenPype/pull/2456) +### 📖 Documentation + +- Variable in docs renamed to proper name [\#2546](https://github.com/pypeclub/OpenPype/pull/2546) + **Merged pull requests:** - AfterEffects: Move implementation to OpenPype [\#2543](https://github.com/pypeclub/OpenPype/pull/2543) @@ -78,7 +78,6 @@ **🐛 Bug fixes** -- TVPaint: Create render layer dialog is in front [\#2471](https://github.com/pypeclub/OpenPype/pull/2471) - Short Pyblish plugin path [\#2428](https://github.com/pypeclub/OpenPype/pull/2428) - PS: Introduced settings for invalid characters to use in ValidateNaming plugin [\#2417](https://github.com/pypeclub/OpenPype/pull/2417) diff --git a/openpype/version.py b/openpype/version.py index 7bf7cf1108..d6569ec1c0 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.8.0-nightly.7" +__version__ = "3.8.0" diff --git a/pyproject.toml b/pyproject.toml index 0f2f2a05b8..feef6071a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.8.0-nightly.7" # OpenPype +version = "3.8.0" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From cd331519a82e89c312f686f1c9a1aec2d6b1c127 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 25 Jan 2022 15:54:12 +0100 Subject: [PATCH 159/160] Updated comment in Pillow --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 7e38bf5c50..9e1ffb0d8a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,7 @@ jsonschema = "^3.2.0" keyring = "^22.0.1" log4mongo = "^1.7" pathlib2= "^2.3.5" # deadline submit publish job only (single place, maybe not needed?) -Pillow = "^9.0" # only used for slates prototype +Pillow = "^9.0" # used in TVPaint and for slates pyblish-base = "^1.8.8" pynput = "^1.7.2" # idle manager in tray pymongo = "^3.11.2" From 782a430cae00a70c097164e5eff2e5cb70c6fa1b Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 26 Jan 2022 03:37:33 +0000 Subject: [PATCH 160/160] [Automated] Bump version --- CHANGELOG.md | 34 +++++++++++++++++++++++----------- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 25 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7516ac4c7b..b7741122f6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,30 @@ # Changelog +## [3.8.1-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.8.0...HEAD) + +**🚀 Enhancements** + +- Loader: Allow to toggle default family filters between "include" or "exclude" filtering [\#2541](https://github.com/pypeclub/OpenPype/pull/2541) + +**🐛 Bug fixes** + +- Webpublisher: Fix - subset names from processed .psd used wrong value for task [\#2586](https://github.com/pypeclub/OpenPype/pull/2586) +- `vrscene` creator Deadline webservice URL handling [\#2580](https://github.com/pypeclub/OpenPype/pull/2580) +- global: track name was failing if duplicated root word in name [\#2568](https://github.com/pypeclub/OpenPype/pull/2568) + +**Merged pull requests:** + +- build\(deps\): bump pillow from 8.4.0 to 9.0.0 [\#2523](https://github.com/pypeclub/OpenPype/pull/2523) + ## [3.8.0](https://github.com/pypeclub/OpenPype/tree/3.8.0) (2022-01-24) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.7.0...3.8.0) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.8.0-nightly.7...3.8.0) + +### 📖 Documentation + +- Variable in docs renamed to proper name [\#2546](https://github.com/pypeclub/OpenPype/pull/2546) **🆕 New features** @@ -50,10 +72,6 @@ - Improve FusionPreLaunch hook errors [\#2505](https://github.com/pypeclub/OpenPype/pull/2505) - Maya: Validate Shape Zero do not keep fixed geometry vertices selected/active after repair [\#2456](https://github.com/pypeclub/OpenPype/pull/2456) -### 📖 Documentation - -- Variable in docs renamed to proper name [\#2546](https://github.com/pypeclub/OpenPype/pull/2546) - **Merged pull requests:** - AfterEffects: Move implementation to OpenPype [\#2543](https://github.com/pypeclub/OpenPype/pull/2543) @@ -70,16 +88,10 @@ - General: Workdir extra folders [\#2462](https://github.com/pypeclub/OpenPype/pull/2462) - Photoshop: New style validations for New publisher [\#2429](https://github.com/pypeclub/OpenPype/pull/2429) -- General: Environment variables groups [\#2424](https://github.com/pypeclub/OpenPype/pull/2424) -- Unreal: Dynamic menu created in Python [\#2422](https://github.com/pypeclub/OpenPype/pull/2422) -- Settings UI: Hyperlinks to settings [\#2420](https://github.com/pypeclub/OpenPype/pull/2420) -- Modules: JobQueue module moved one hierarchy level higher [\#2419](https://github.com/pypeclub/OpenPype/pull/2419) -- TimersManager: Start timer post launch hook [\#2418](https://github.com/pypeclub/OpenPype/pull/2418) **🐛 Bug fixes** - Short Pyblish plugin path [\#2428](https://github.com/pypeclub/OpenPype/pull/2428) -- PS: Introduced settings for invalid characters to use in ValidateNaming plugin [\#2417](https://github.com/pypeclub/OpenPype/pull/2417) **Merged pull requests:** diff --git a/openpype/version.py b/openpype/version.py index d6569ec1c0..60daa4f54f 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.8.0" +__version__ = "3.8.1-nightly.1" diff --git a/pyproject.toml b/pyproject.toml index 9e1ffb0d8a..7403cbaf8a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.8.0" # OpenPype +version = "3.8.1-nightly.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License"