From fad36c75a9e485b765a83a24a459540a9d9e7aed Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 23 Feb 2023 16:11:33 +0100 Subject: [PATCH 01/88] nuke: rendersettins lib with farm rendering class --- openpype/hosts/nuke/api/lib_rendersettings.py | 86 +++++++++++++++++++ 1 file changed, 86 insertions(+) create mode 100644 openpype/hosts/nuke/api/lib_rendersettings.py diff --git a/openpype/hosts/nuke/api/lib_rendersettings.py b/openpype/hosts/nuke/api/lib_rendersettings.py new file mode 100644 index 0000000000..959b461c29 --- /dev/null +++ b/openpype/hosts/nuke/api/lib_rendersettings.py @@ -0,0 +1,86 @@ + +from openpype.lib import Logger +from openpype.settings import ( + get_current_project_settings, + get_system_settings +) + + +class RenderFarmSettings: + """ Class for getting farm settings from project settings + """ + log = Logger.get_logger("RenderFarmSettings") + + _active_farm_module: str = None + _farm_modules: list = [ + "deadline", "muster", "royalrender"] + _farm_plugins: dict = { + "deadline": "NukeSubmitDeadline" + } + _creator_farm_keys: list = [ + "chunk_size", "priority", "concurrent_tasks"] + + def __init__(self, project_settings=None): + """ Get project settings and active farm module + """ + self._project_settings = ( + project_settings or get_current_project_settings() + ) + # Get active farm module from system settings + self._get_active_farm_module_from_system_settings() + + def _get_active_farm_module_from_system_settings(self): + """ Get active farm module from system settings + """ + active_modules = [ + module_ + for module_ in self._farm_modules + if get_system_settings()["modules"][module_]["enabled"] + ] + if not active_modules: + raise ValueError(( + "No active farm module found in system settings." + )) + if len(active_modules) > 1: + raise ValueError(( + "Multiple active farm modules " + "found in system settings. {}".format(active_modules) + )) + + self._active_farm_module = active_modules.pop() + + @property + def active_farm_module(self): + return self._active_farm_module + + def get_rendering_attributes(self): + ''' Get rendering attributes from project settings + + Returns: + dict: rendering attributes + ''' + return_dict = {} + farm_plugin = self._farm_plugins.get(self.active_farm_module) + + if farm_plugin: + raise ValueError(( + "Farm plugin \"{}\" not found in farm plugins." + ).format(farm_plugin)) + + # Get farm module settings + module_settings = self._project_settings[self.active_farm_module] + + # Get farm plugin settings + farm_plugin_settings = ( + module_settings["publish"][farm_plugin]) + + # Get all keys from farm_plugin_settings + for key in self._creator_farm_keys: + if key not in farm_plugin_settings: + self.log.warning(( + "Key \"{}\" not found in farm plugin \"{}\" settings." + ).format(key, farm_plugin)) + continue + return_dict[key] = farm_plugin_settings[key] + + return return_dict From 073f0be7f706fd6ae222c3900cb1c02c523d1c04 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 23 Feb 2023 16:12:49 +0100 Subject: [PATCH 02/88] nuke: creators with new RenderFarmSettings class --- openpype/hosts/nuke/api/plugin.py | 2 +- .../nuke/plugins/create/create_write_prerender.py | 13 ++++++++----- .../nuke/plugins/create/create_write_render.py | 13 ++++++++----- 3 files changed, 17 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index d3f8357f7d..ef77e029ad 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -1236,7 +1236,7 @@ def convert_to_valid_instaces(): creator_attr["farm_chunk"] = ( node["deadlineChunkSize"].value()) if "deadlineConcurrentTasks" in node.knobs(): - creator_attr["farm_concurency"] = ( + creator_attr["farm_concurrency"] = ( node["deadlineConcurrentTasks"].value()) _remove_old_knobs(node) diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index a15f362dd1..a99bd0c4ab 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -12,6 +12,7 @@ from openpype.lib import ( UILabelDef ) from openpype.hosts.nuke import api as napi +from openpype.hosts.nuke.api.lib_rendersettings import RenderFarmSettings class CreateWritePrerender(napi.NukeWriteCreator): @@ -50,6 +51,8 @@ class CreateWritePrerender(napi.NukeWriteCreator): self._get_reviewable_bool() ] if "farm_rendering" in self.instance_attributes: + render_farm_settings = RenderFarmSettings().get_rendering_attributes() + attr_defs.extend([ UISeparatorDef(), UILabelDef("Farm rendering attributes"), @@ -59,21 +62,21 @@ class CreateWritePrerender(napi.NukeWriteCreator): label="Priority", minimum=1, maximum=99, - default=50 + default=render_farm_settings.get("priority", 50) ), NumberDef( "farm_chunk", label="Chunk size", minimum=1, maximum=99, - default=10 + default=render_farm_settings.get("chunk_size", 10) ), NumberDef( - "farm_concurency", - label="Concurent tasks", + "farm_concurrency", + label="Concurrent tasks", minimum=1, maximum=10, - default=1 + default=render_farm_settings.get("concurrent_tasks", 1) ) ]) return attr_defs diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 481d1d2201..bbaba212c2 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -12,6 +12,7 @@ from openpype.lib import ( UILabelDef ) from openpype.hosts.nuke import api as napi +from openpype.hosts.nuke.api.lib_rendersettings import RenderFarmSettings class CreateWriteRender(napi.NukeWriteCreator): @@ -47,6 +48,8 @@ class CreateWriteRender(napi.NukeWriteCreator): self._get_reviewable_bool() ] if "farm_rendering" in self.instance_attributes: + render_farm_settings = RenderFarmSettings().get_rendering_attributes() + attr_defs.extend([ UISeparatorDef(), UILabelDef("Farm rendering attributes"), @@ -56,21 +59,21 @@ class CreateWriteRender(napi.NukeWriteCreator): label="Priority", minimum=1, maximum=99, - default=50 + default=render_farm_settings.get("priority", 50) ), NumberDef( "farm_chunk", label="Chunk size", minimum=1, maximum=99, - default=10 + default=render_farm_settings.get("chunk_size", 10) ), NumberDef( - "farm_concurency", - label="Concurent tasks", + "farm_concurrency", + label="Concurrent tasks", minimum=1, maximum=10, - default=1 + default=render_farm_settings.get("concurrent_tasks", 1) ) ]) return attr_defs From 5aaf8eb18a021b76249c561d499e3e664c983886 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 23 Feb 2023 16:13:10 +0100 Subject: [PATCH 03/88] nuke: publishing with new settings --- .../hosts/nuke/plugins/publish/collect_writes.py | 16 ++++++++-------- .../plugins/publish/submit_nuke_deadline.py | 10 +++++----- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_writes.py b/openpype/hosts/nuke/plugins/publish/collect_writes.py index 3054e5a30c..0aa044f06d 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/collect_writes.py @@ -132,14 +132,14 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): self.log.info("Publishing rendered frames ...") elif render_target == "farm": - farm_priority = creator_attributes.get("farm_priority") - farm_chunk = creator_attributes.get("farm_chunk") - farm_concurency = creator_attributes.get("farm_concurency") - instance.data.update({ - "deadlineChunkSize": farm_chunk or 1, - "deadlinePriority": farm_priority or 50, - "deadlineConcurrentTasks": farm_concurency or 0 - }) + farm_keys = ["farm_chunk", "farm_priority", "farm_concurrency"] + for key in farm_keys: + # Skip if key is not in creator attributes + if key not in creator_attributes: + continue + # Add farm attributes to instance + instance.data[key] = creator_attributes[key] + # Farm rendering instance.data["transfer"] = False instance.data["farm"] = True diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index faa66effbd..0f8c69629e 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -162,16 +162,16 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): pass # define chunk and priority - chunk_size = instance.data["deadlineChunkSize"] - if chunk_size == 0 and self.chunk_size: + chunk_size = instance.data.get("farm_chunk") + if not chunk_size: chunk_size = self.chunk_size # define chunk and priority - concurrent_tasks = instance.data["deadlineConcurrentTasks"] - if concurrent_tasks == 0 and self.concurrent_tasks: + concurrent_tasks = instance.data.get("farm_concurrency") + if not concurrent_tasks: concurrent_tasks = self.concurrent_tasks - priority = instance.data["deadlinePriority"] + priority = instance.data.get("farm_priority") if not priority: priority = self.priority From 656318f122fe4cdc167e780b7737c8780890582b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 23 Feb 2023 16:59:40 +0100 Subject: [PATCH 04/88] adding log to render farm settings class init --- openpype/hosts/nuke/api/lib_rendersettings.py | 10 ++++++++-- .../nuke/plugins/create/create_write_prerender.py | 4 +++- .../hosts/nuke/plugins/create/create_write_render.py | 3 ++- 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/nuke/api/lib_rendersettings.py b/openpype/hosts/nuke/api/lib_rendersettings.py index 959b461c29..4d5440fe48 100644 --- a/openpype/hosts/nuke/api/lib_rendersettings.py +++ b/openpype/hosts/nuke/api/lib_rendersettings.py @@ -20,9 +20,12 @@ class RenderFarmSettings: _creator_farm_keys: list = [ "chunk_size", "priority", "concurrent_tasks"] - def __init__(self, project_settings=None): + def __init__(self, project_settings=None, log=None): """ Get project settings and active farm module """ + if log: + self.log = log + self._project_settings = ( project_settings or get_current_project_settings() ) @@ -61,8 +64,9 @@ class RenderFarmSettings: ''' return_dict = {} farm_plugin = self._farm_plugins.get(self.active_farm_module) + self.log.debug("Farm plugin: \"{}\"".format(farm_plugin)) - if farm_plugin: + if not farm_plugin: raise ValueError(( "Farm plugin \"{}\" not found in farm plugins." ).format(farm_plugin)) @@ -73,6 +77,8 @@ class RenderFarmSettings: # Get farm plugin settings farm_plugin_settings = ( module_settings["publish"][farm_plugin]) + self.log.debug( + "Farm plugin settings: \"{}\"".format(farm_plugin_settings)) # Get all keys from farm_plugin_settings for key in self._creator_farm_keys: diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index a99bd0c4ab..411a79dbf4 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -51,7 +51,9 @@ class CreateWritePrerender(napi.NukeWriteCreator): self._get_reviewable_bool() ] if "farm_rendering" in self.instance_attributes: - render_farm_settings = RenderFarmSettings().get_rendering_attributes() + render_farm_settings = RenderFarmSettings( + log=self.log).get_rendering_attributes() + attr_defs.extend([ UISeparatorDef(), diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index bbaba212c2..a51661425f 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -48,7 +48,8 @@ class CreateWriteRender(napi.NukeWriteCreator): self._get_reviewable_bool() ] if "farm_rendering" in self.instance_attributes: - render_farm_settings = RenderFarmSettings().get_rendering_attributes() + render_farm_settings = RenderFarmSettings( + log=self.log).get_rendering_attributes() attr_defs.extend([ UISeparatorDef(), From b09bc8d4a3e85eb3187b08b1672ca8bd0528dd99 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 23 Feb 2023 17:00:19 +0100 Subject: [PATCH 05/88] removing unused items from settings --- openpype/settings/defaults/project_settings/deadline.json | 1 - .../schemas/projects_schema/schema_project_deadline.json | 5 ----- 2 files changed, 6 deletions(-) diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index 7183603c4b..6b6f2d465b 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -52,7 +52,6 @@ "enabled": true, "optional": false, "active": true, - "use_published": true, "priority": 50, "chunk_size": 10, "concurrent_tasks": 1, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index a320dfca4f..bb5a65e1b7 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -285,11 +285,6 @@ "key": "active", "label": "Active" }, - { - "type": "boolean", - "key": "use_published", - "label": "Use Published scene" - }, { "type": "splitter" }, From f2274ec2e963e6eb5e10233ff593804e02ab1f5c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 23 Feb 2023 17:00:51 +0100 Subject: [PATCH 06/88] removing none existent host also fix typo --- .../plugins/publish/submit_nuke_deadline.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index 0f8c69629e..b4b59c4c77 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -23,7 +23,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): label = "Submit to Deadline" order = pyblish.api.IntegratorOrder + 0.1 - hosts = ["nuke", "nukestudio"] + hosts = ["nuke"] families = ["render.farm", "prerender.farm"] optional = True targets = ["local"] @@ -141,7 +141,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): exe_node_name, start_frame, end_frame, - responce_data=None + response_data=None ): render_dir = os.path.normpath(os.path.dirname(render_path)) batch_name = os.path.basename(script_path) @@ -152,8 +152,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): output_filename_0 = self.preview_fname(render_path) - if not responce_data: - responce_data = {} + if not response_data: + response_data = {} try: # Ensure render folder exists @@ -244,11 +244,11 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): "AuxFiles": [] } - if responce_data.get("_id"): + if response_data.get("_id"): payload["JobInfo"].update({ "JobType": "Normal", - "BatchName": responce_data["Props"]["Batch"], - "JobDependency0": responce_data["_id"], + "BatchName": response_data["Props"]["Batch"], + "JobDependency0": response_data["_id"], "ChunkSize": 99999999 }) From c2685a6c57394a4fb839a1618846f1363f470657 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 24 Feb 2023 17:02:18 +0100 Subject: [PATCH 07/88] Nuke: caching settings and retrieved active farm module --- openpype/hosts/nuke/api/lib_rendersettings.py | 35 ++++++++++++++----- 1 file changed, 27 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/nuke/api/lib_rendersettings.py b/openpype/hosts/nuke/api/lib_rendersettings.py index 4d5440fe48..6784890160 100644 --- a/openpype/hosts/nuke/api/lib_rendersettings.py +++ b/openpype/hosts/nuke/api/lib_rendersettings.py @@ -12,25 +12,40 @@ class RenderFarmSettings: log = Logger.get_logger("RenderFarmSettings") _active_farm_module: str = None - _farm_modules: list = [ - "deadline", "muster", "royalrender"] + _farm_modules: list = ["deadline"] _farm_plugins: dict = { "deadline": "NukeSubmitDeadline" } _creator_farm_keys: list = [ "chunk_size", "priority", "concurrent_tasks"] + _cached_project_settings = None + _cached_system_settings = None + def __init__(self, project_settings=None, log=None): """ Get project settings and active farm module """ if log: self.log = log - self._project_settings = ( - project_settings or get_current_project_settings() - ) - # Get active farm module from system settings - self._get_active_farm_module_from_system_settings() + if project_settings: + self._cached_project_settings = project_settings + + @property + def project_settings(self): + """ returning cached project settings or getting new one + """ + if not self._cached_project_settings: + self._cached_project_settings = get_current_project_settings() + return self._cached_project_settings + + @property + def system_settings(self): + """ returning cached project settings or getting new one + """ + if not self._cached_system_settings: + self._cached_system_settings = get_system_settings() + return self._cached_system_settings def _get_active_farm_module_from_system_settings(self): """ Get active farm module from system settings @@ -38,7 +53,7 @@ class RenderFarmSettings: active_modules = [ module_ for module_ in self._farm_modules - if get_system_settings()["modules"][module_]["enabled"] + if self.system_settings["modules"][module_]["enabled"] ] if not active_modules: raise ValueError(( @@ -54,6 +69,10 @@ class RenderFarmSettings: @property def active_farm_module(self): + # cache active farm module + if self._active_farm_module is None: + self._get_active_farm_module_from_system_settings() + return self._active_farm_module def get_rendering_attributes(self): From 61cf3a068d5aea4daab92201540e6b33c9a0fe0a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 24 Feb 2023 17:07:43 +0100 Subject: [PATCH 08/88] Nuke: missing bits --- openpype/hosts/nuke/api/lib_rendersettings.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/api/lib_rendersettings.py b/openpype/hosts/nuke/api/lib_rendersettings.py index 6784890160..5c23bcb1bc 100644 --- a/openpype/hosts/nuke/api/lib_rendersettings.py +++ b/openpype/hosts/nuke/api/lib_rendersettings.py @@ -41,7 +41,7 @@ class RenderFarmSettings: @property def system_settings(self): - """ returning cached project settings or getting new one + """ returning cached system settings or getting new one """ if not self._cached_system_settings: self._cached_system_settings = get_system_settings() @@ -91,7 +91,7 @@ class RenderFarmSettings: ).format(farm_plugin)) # Get farm module settings - module_settings = self._project_settings[self.active_farm_module] + module_settings = self.project_settings[self.active_farm_module] # Get farm plugin settings farm_plugin_settings = ( From 557ce7c016515f15878d9c37d6a8cf6fb777be43 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 27 Feb 2023 15:08:03 +0100 Subject: [PATCH 09/88] Nuke, Deadline: moving to module plugin centric approach --- openpype/hosts/nuke/api/lib_rendersettings.py | 111 ------------------ .../plugins/create/create_write_prerender.py | 37 +----- .../plugins/create/create_write_render.py | 36 +----- .../plugins/publish/submit_nuke_deadline.py | 81 +++++++++---- 4 files changed, 60 insertions(+), 205 deletions(-) delete mode 100644 openpype/hosts/nuke/api/lib_rendersettings.py diff --git a/openpype/hosts/nuke/api/lib_rendersettings.py b/openpype/hosts/nuke/api/lib_rendersettings.py deleted file mode 100644 index 5c23bcb1bc..0000000000 --- a/openpype/hosts/nuke/api/lib_rendersettings.py +++ /dev/null @@ -1,111 +0,0 @@ - -from openpype.lib import Logger -from openpype.settings import ( - get_current_project_settings, - get_system_settings -) - - -class RenderFarmSettings: - """ Class for getting farm settings from project settings - """ - log = Logger.get_logger("RenderFarmSettings") - - _active_farm_module: str = None - _farm_modules: list = ["deadline"] - _farm_plugins: dict = { - "deadline": "NukeSubmitDeadline" - } - _creator_farm_keys: list = [ - "chunk_size", "priority", "concurrent_tasks"] - - _cached_project_settings = None - _cached_system_settings = None - - def __init__(self, project_settings=None, log=None): - """ Get project settings and active farm module - """ - if log: - self.log = log - - if project_settings: - self._cached_project_settings = project_settings - - @property - def project_settings(self): - """ returning cached project settings or getting new one - """ - if not self._cached_project_settings: - self._cached_project_settings = get_current_project_settings() - return self._cached_project_settings - - @property - def system_settings(self): - """ returning cached system settings or getting new one - """ - if not self._cached_system_settings: - self._cached_system_settings = get_system_settings() - return self._cached_system_settings - - def _get_active_farm_module_from_system_settings(self): - """ Get active farm module from system settings - """ - active_modules = [ - module_ - for module_ in self._farm_modules - if self.system_settings["modules"][module_]["enabled"] - ] - if not active_modules: - raise ValueError(( - "No active farm module found in system settings." - )) - if len(active_modules) > 1: - raise ValueError(( - "Multiple active farm modules " - "found in system settings. {}".format(active_modules) - )) - - self._active_farm_module = active_modules.pop() - - @property - def active_farm_module(self): - # cache active farm module - if self._active_farm_module is None: - self._get_active_farm_module_from_system_settings() - - return self._active_farm_module - - def get_rendering_attributes(self): - ''' Get rendering attributes from project settings - - Returns: - dict: rendering attributes - ''' - return_dict = {} - farm_plugin = self._farm_plugins.get(self.active_farm_module) - self.log.debug("Farm plugin: \"{}\"".format(farm_plugin)) - - if not farm_plugin: - raise ValueError(( - "Farm plugin \"{}\" not found in farm plugins." - ).format(farm_plugin)) - - # Get farm module settings - module_settings = self.project_settings[self.active_farm_module] - - # Get farm plugin settings - farm_plugin_settings = ( - module_settings["publish"][farm_plugin]) - self.log.debug( - "Farm plugin settings: \"{}\"".format(farm_plugin_settings)) - - # Get all keys from farm_plugin_settings - for key in self._creator_farm_keys: - if key not in farm_plugin_settings: - self.log.warning(( - "Key \"{}\" not found in farm plugin \"{}\" settings." - ).format(key, farm_plugin)) - continue - return_dict[key] = farm_plugin_settings[key] - - return return_dict diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index 411a79dbf4..1603bf17e3 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -6,13 +6,9 @@ from openpype.pipeline import ( CreatedInstance ) from openpype.lib import ( - BoolDef, - NumberDef, - UISeparatorDef, - UILabelDef + BoolDef ) from openpype.hosts.nuke import api as napi -from openpype.hosts.nuke.api.lib_rendersettings import RenderFarmSettings class CreateWritePrerender(napi.NukeWriteCreator): @@ -50,37 +46,6 @@ class CreateWritePrerender(napi.NukeWriteCreator): self._get_render_target_enum(), self._get_reviewable_bool() ] - if "farm_rendering" in self.instance_attributes: - render_farm_settings = RenderFarmSettings( - log=self.log).get_rendering_attributes() - - - attr_defs.extend([ - UISeparatorDef(), - UILabelDef("Farm rendering attributes"), - BoolDef("suspended_publish", label="Suspended publishing"), - NumberDef( - "farm_priority", - label="Priority", - minimum=1, - maximum=99, - default=render_farm_settings.get("priority", 50) - ), - NumberDef( - "farm_chunk", - label="Chunk size", - minimum=1, - maximum=99, - default=render_farm_settings.get("chunk_size", 10) - ), - NumberDef( - "farm_concurrency", - label="Concurrent tasks", - minimum=1, - maximum=10, - default=render_farm_settings.get("concurrent_tasks", 1) - ) - ]) return attr_defs def create_instance_node(self, subset_name, instance_data): diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index a51661425f..72fcb4f232 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -6,13 +6,9 @@ from openpype.pipeline import ( CreatedInstance ) from openpype.lib import ( - BoolDef, - NumberDef, - UISeparatorDef, - UILabelDef + BoolDef ) from openpype.hosts.nuke import api as napi -from openpype.hosts.nuke.api.lib_rendersettings import RenderFarmSettings class CreateWriteRender(napi.NukeWriteCreator): @@ -47,36 +43,6 @@ class CreateWriteRender(napi.NukeWriteCreator): self._get_render_target_enum(), self._get_reviewable_bool() ] - if "farm_rendering" in self.instance_attributes: - render_farm_settings = RenderFarmSettings( - log=self.log).get_rendering_attributes() - - attr_defs.extend([ - UISeparatorDef(), - UILabelDef("Farm rendering attributes"), - BoolDef("suspended_publish", label="Suspended publishing"), - NumberDef( - "farm_priority", - label="Priority", - minimum=1, - maximum=99, - default=render_farm_settings.get("priority", 50) - ), - NumberDef( - "farm_chunk", - label="Chunk size", - minimum=1, - maximum=99, - default=render_farm_settings.get("chunk_size", 10) - ), - NumberDef( - "farm_concurrency", - label="Concurrent tasks", - minimum=1, - maximum=10, - default=render_farm_settings.get("concurrent_tasks", 1) - ) - ]) return attr_defs def create_instance_node(self, subset_name, instance_data): diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index b4b59c4c77..51e380dc03 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -9,11 +9,19 @@ import pyblish.api import nuke from openpype.pipeline import legacy_io +from openpype.pipeline.publish import ( + OpenPypePyblishPluginMixin +) from openpype.tests.lib import is_in_tests -from openpype.lib import is_running_from_build +from openpype.lib import ( + is_running_from_build, + BoolDef, + NumberDef, + UISeparatorDef +) - -class NukeSubmitDeadline(pyblish.api.InstancePlugin): +class NukeSubmitDeadline(pyblish.api.InstancePlugin, + OpenPypePyblishPluginMixin): """Submit write to Deadline Renders are submitted to a Deadline Web Service as @@ -21,10 +29,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): """ - label = "Submit to Deadline" + label = "Submit Nuke to Deadline" order = pyblish.api.IntegratorOrder + 0.1 hosts = ["nuke"] - families = ["render.farm", "prerender.farm"] + families = ["render", "prerender.farm"] optional = True targets = ["local"] @@ -39,7 +47,42 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): env_allowed_keys = [] env_search_replace_values = {} + @classmethod + def get_attribute_defs(cls): + return [ + NumberDef( + "priority", + label="Priority", + default=cls.priority, + decimals=0 + ), + NumberDef( + "chunk", + label="Frames Per Task", + default=cls.chunk_size, + decimals=0, + minimum=1, + maximum=1000 + ), + NumberDef( + "concurrency", + label="Concurency", + default=cls.concurrent_tasks, + decimals=0, + minimum=1, + maximum=10 + ), + BoolDef( + "use_gpu", + default=cls.use_gpu, + label="Use GPU" + ) + ] + def process(self, instance): + instance.data["attributeValues"] = self.get_attr_values_from_data( + instance.data) + instance.data["toBeRenderedOn"] = "deadline" families = instance.data["families"] @@ -161,20 +204,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): except OSError: pass - # define chunk and priority - chunk_size = instance.data.get("farm_chunk") - if not chunk_size: - chunk_size = self.chunk_size - - # define chunk and priority - concurrent_tasks = instance.data.get("farm_concurrency") - if not concurrent_tasks: - concurrent_tasks = self.concurrent_tasks - - priority = instance.data.get("farm_priority") - if not priority: - priority = self.priority - # resolve any limit groups limit_groups = self.get_limit_groups() self.log.info("Limit groups: `{}`".format(limit_groups)) @@ -193,9 +222,14 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): # Arbitrary username, for visualisation in Monitor "UserName": self._deadline_user, - "Priority": priority, - "ChunkSize": chunk_size, - "ConcurrentTasks": concurrent_tasks, + "Priority": instance.data["attributeValues"].get( + "priority", self.priority), + "ChunkSize": instance.data["attributeValues"].get( + "chunk", self.chunk_size), + "ConcurrentTasks": instance.data["attributeValues"].get( + "concurrency", + self.concurrent_tasks + ), "Department": self.department, @@ -234,7 +268,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): "AWSAssetFile0": render_path, # using GPU by default - "UseGpu": self.use_gpu, + "UseGpu": instance.data["attributeValues"].get( + "use_gpu", self.use_gpu), # Only the specific write node is rendered. "WriteNode": exe_node_name From e444ab3f5e3037cc0528a748df4f0554a597fb78 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 27 Feb 2023 15:16:27 +0100 Subject: [PATCH 10/88] hound comments --- .../modules/deadline/plugins/publish/submit_nuke_deadline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index 51e380dc03..aff34c7e4a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -16,10 +16,10 @@ from openpype.tests.lib import is_in_tests from openpype.lib import ( is_running_from_build, BoolDef, - NumberDef, - UISeparatorDef + NumberDef ) + class NukeSubmitDeadline(pyblish.api.InstancePlugin, OpenPypePyblishPluginMixin): """Submit write to Deadline From 9229ff9c0b229db2d9d25c8d91b6cb6f7b58de9b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 28 Feb 2023 13:58:01 +0100 Subject: [PATCH 11/88] hiero: fix effect item node class --- .../hosts/hiero/plugins/publish/collect_clip_effects.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py b/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py index 9489b1c4fb..95e4b09504 100644 --- a/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py +++ b/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py @@ -120,13 +120,10 @@ class CollectClipEffects(pyblish.api.InstancePlugin): track = sitem.parentTrack().name() # node serialization node = sitem.node() - node_serialized = self.node_serialisation(node) + node_serialized = self.node_serialization(node) node_name = sitem.name() + node_class = node.Class() - if "_" in node_name: - node_class = re.sub(r"(?:_)[_0-9]+", "", node_name) # more numbers - else: - node_class = re.sub(r"\d+", "", node_name) # one number # collect timelineIn/Out effect_t_in = int(sitem.timelineIn()) @@ -148,7 +145,7 @@ class CollectClipEffects(pyblish.api.InstancePlugin): "node": node_serialized }} - def node_serialisation(self, node): + def node_serialization(self, node): node_serialized = {} # adding ignoring knob keys From 2bd4e5c3c91b453bca3b21aed3c1cfd6cf19be37 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 28 Feb 2023 14:11:01 +0100 Subject: [PATCH 12/88] hound comments --- openpype/hosts/hiero/plugins/publish/collect_clip_effects.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py b/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py index 95e4b09504..d455ad4a4e 100644 --- a/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py +++ b/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py @@ -124,7 +124,6 @@ class CollectClipEffects(pyblish.api.InstancePlugin): node_name = sitem.name() node_class = node.Class() - # collect timelineIn/Out effect_t_in = int(sitem.timelineIn()) effect_t_out = int(sitem.timelineOut()) From ee3e346c8df68515d72221bb2e3fe84ab92e9b0e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 28 Feb 2023 14:53:49 +0100 Subject: [PATCH 13/88] Global: refactory colormanaged exctractor into plugin mixin --- .../plugins/publish/extract_render_local.py | 3 +- openpype/pipeline/publish/__init__.py | 4 +- openpype/pipeline/publish/publish_plugins.py | 40 ++++++++++--------- .../publish/extract_colorspace_data.py | 3 +- 4 files changed, 28 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_render_local.py b/openpype/hosts/nuke/plugins/publish/extract_render_local.py index b99a7a9548..4d7ade9c7a 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_render_local.py +++ b/openpype/hosts/nuke/plugins/publish/extract_render_local.py @@ -9,7 +9,8 @@ from openpype.pipeline import publish from openpype.lib import collect_frames -class NukeRenderLocal(publish.ExtractorColormanaged): +class NukeRenderLocal(publish.Extractor, + publish.ColormanagedPyblishPluginMixin): """Render the current Nuke composition locally. Extract the result of savers by starting a comp render diff --git a/openpype/pipeline/publish/__init__.py b/openpype/pipeline/publish/__init__.py index 05ba1c9c33..36252c9f3d 100644 --- a/openpype/pipeline/publish/__init__.py +++ b/openpype/pipeline/publish/__init__.py @@ -19,7 +19,7 @@ from .publish_plugins import ( RepairContextAction, Extractor, - ExtractorColormanaged, + ColormanagedPyblishPluginMixin ) from .lib import ( @@ -64,7 +64,7 @@ __all__ = ( "RepairContextAction", "Extractor", - "ExtractorColormanaged", + "ColormanagedPyblishPluginMixin", "get_publish_template_name", diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index e2ae893aa9..0142919e76 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -3,7 +3,7 @@ from abc import ABCMeta from pprint import pformat import pyblish.api from pyblish.plugin import MetaPlugin, ExplicitMetaPlugin - +from openpype.lib.transcoding import VIDEO_EXTENSIONS, IMAGE_EXTENSIONS from openpype.lib import BoolDef from .lib import ( @@ -288,24 +288,25 @@ class Extractor(pyblish.api.InstancePlugin): return get_instance_staging_dir(instance) -class ExtractorColormanaged(Extractor): - """Extractor base for color managed image data. - - Each Extractor intended to export pixel data representation - should inherit from this class to allow color managed data. - Class implements "get_colorspace_settings" and - "set_representation_colorspace" functions used - for injecting colorspace data to representation data for farther - integration into db document. +class ColormanagedPyblishPluginMixin(object): + """Mixin for colormanaged plugins. + This class is used to set colorspace data to a publishing + representation. It contains a static method, + get_colorspace_settings, which returns config and + file rules data for the host context. + It also contains a method, set_representation_colorspace, + which sets colorspace data to the representation. + The allowed file extensions are listed in the allowed_ext variable. + he method first checks if the file extension is in + the list of allowed extensions. If it is, it then gets the + colorspace settings from the host context and gets a + matching colorspace from rules. Finally, it infuses this + data into the representation. """ - - allowed_ext = [ - "cin", "dpx", "avi", "dv", "gif", "flv", "mkv", "mov", "mpg", "mpeg", - "mp4", "m4v", "mxf", "iff", "z", "ifl", "jpeg", "jpg", "jfif", "lut", - "1dl", "exr", "pic", "png", "ppm", "pnm", "pgm", "pbm", "rla", "rpf", - "sgi", "rgba", "rgb", "bw", "tga", "tiff", "tif", "img" - ] + allowed_ext = set( + ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS) + ) @staticmethod def get_colorspace_settings(context): @@ -375,7 +376,10 @@ class ExtractorColormanaged(Extractor): ext = representation["ext"] # check extension self.log.debug("__ ext: `{}`".format(ext)) - if ext.lower() not in self.allowed_ext: + + # check if ext in lower case is in self.allowed_ext + if ext.lstrip(".").lower() not in self.allowed_ext: + self.log.debug("Extension is not in allowed extensions.") return if colorspace_settings is None: diff --git a/openpype/plugins/publish/extract_colorspace_data.py b/openpype/plugins/publish/extract_colorspace_data.py index 611fb91cbb..363df28fb5 100644 --- a/openpype/plugins/publish/extract_colorspace_data.py +++ b/openpype/plugins/publish/extract_colorspace_data.py @@ -2,7 +2,8 @@ import pyblish.api from openpype.pipeline import publish -class ExtractColorspaceData(publish.ExtractorColormanaged): +class ExtractColorspaceData(publish.Extractor, + publish.ColormanagedPyblishPluginMixin): """ Inject Colorspace data to available representations. Input data: From 76f312a3ff026d04feda901b98bc0ad523e3b00b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 28 Feb 2023 15:04:54 +0100 Subject: [PATCH 14/88] Nuke: adding colorspace to representation when rendered mode --- .../hosts/nuke/plugins/publish/collect_writes.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_writes.py b/openpype/hosts/nuke/plugins/publish/collect_writes.py index 3054e5a30c..2b741426e6 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/collect_writes.py @@ -3,9 +3,10 @@ from pprint import pformat import nuke import pyblish.api from openpype.hosts.nuke import api as napi +from openpype.pipeline import publish - -class CollectNukeWrites(pyblish.api.InstancePlugin): +class CollectNukeWrites(pyblish.api.InstancePlugin, + publish.ColormanagedPyblishPluginMixin): """Collect all write nodes.""" order = pyblish.api.CollectorOrder - 0.48 @@ -128,6 +129,12 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): else: representation['files'] = collected_frames + # inject colorspace data + self.set_representation_colorspace( + representation, instance.context, + colorspace=colorspace + ) + instance.data["representations"].append(representation) self.log.info("Publishing rendered frames ...") @@ -147,6 +154,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): # get colorspace and add to version data colorspace = napi.get_colorspace_from_node(write_node) + + # TODO: remove this when we have proper colorspace support version_data = { "colorspace": colorspace } From 9bb36864be3911b25c37c873d68ae4871fdcf57a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 28 Feb 2023 15:05:16 +0100 Subject: [PATCH 15/88] Nuke: colorspace from node unified --- openpype/hosts/nuke/plugins/publish/extract_render_local.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_render_local.py b/openpype/hosts/nuke/plugins/publish/extract_render_local.py index 4d7ade9c7a..e5feda4cd8 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_render_local.py +++ b/openpype/hosts/nuke/plugins/publish/extract_render_local.py @@ -4,7 +4,7 @@ import shutil import pyblish.api import clique import nuke - +from openpype.hosts.nuke import api as napi from openpype.pipeline import publish from openpype.lib import collect_frames @@ -86,7 +86,7 @@ class NukeRenderLocal(publish.Extractor, ) ext = node["file_type"].value() - colorspace = node["colorspace"].value() + colorspace = napi.get_colorspace_from_node(node) if "representations" not in instance.data: instance.data["representations"] = [] From 4f94a4454ab254018c189c8aa53c21fb12b1392d Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Wed, 1 Mar 2023 16:02:12 +0000 Subject: [PATCH 16/88] Only run Maya specific code in Maya. --- .../plugins/publish/submit_publish_job.py | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 5325715e38..29f6f406df 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -933,15 +933,18 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): self.log.info(data.get("expectedFiles")) - additional_data = { - "renderProducts": instance.data["renderProducts"], - "colorspaceConfig": instance.data["colorspaceConfig"], - "display": instance.data["colorspaceDisplay"], - "view": instance.data["colorspaceView"], - "colorspaceTemplate": instance.data["colorspaceConfig"].replace( - str(context.data["anatomy"].roots["work"]), "{root[work]}" - ) - } + additional_data = {} + if pyblish.api.current_host() == "maya": + config = instance.data["colorspaceConfig"] + additional_data = { + "renderProducts": instance.data["renderProducts"], + "colorspaceConfig": instance.data["colorspaceConfig"], + "display": instance.data["colorspaceDisplay"], + "view": instance.data["colorspaceView"], + "colorspaceTemplate": config.replace( + str(context.data["anatomy"].roots["work"]), "{root[work]}" + ) + } if isinstance(data.get("expectedFiles")[0], dict): # we cannot attach AOVs to other subsets as we consider every From a3508b14122d6ab884a4303d636bdf37b35ca973 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Thu, 2 Mar 2023 08:18:47 +0000 Subject: [PATCH 17/88] Fix _get_representations --- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 29f6f406df..adfbcbded8 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -588,7 +588,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): self.log.debug("instances:{}".format(instances)) return instances - def _get_representations(self, instance, exp_files, additional_data): + def _get_representations(self, instance, exp_files): """Create representations for file sequences. This will return representations of expected files if they are not From 5bb204cacbfd0f9769f2f4112e50f6e65b4a7f6e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 2 Mar 2023 11:30:42 +0100 Subject: [PATCH 18/88] nuke flip order --- openpype/hosts/nuke/plugins/publish/collect_writes.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_writes.py b/openpype/hosts/nuke/plugins/publish/collect_writes.py index 2b741426e6..f6acd24f99 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/collect_writes.py @@ -67,6 +67,9 @@ class CollectNukeWrites(pyblish.api.InstancePlugin, write_file_path = nuke.filename(write_node) output_dir = os.path.dirname(write_file_path) + # get colorspace and add to version data + colorspace = napi.get_colorspace_from_node(write_node) + self.log.debug('output dir: {}'.format(output_dir)) if render_target == "frames": @@ -152,9 +155,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin, instance.data["farm"] = True self.log.info("Farm rendering ON ...") - # get colorspace and add to version data - colorspace = napi.get_colorspace_from_node(write_node) - # TODO: remove this when we have proper colorspace support version_data = { "colorspace": colorspace From f0997710818d3ca2f5ece87aed242ddf4c139a6c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 2 Mar 2023 11:36:17 +0100 Subject: [PATCH 19/88] hound --- openpype/hosts/nuke/plugins/publish/collect_writes.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/nuke/plugins/publish/collect_writes.py b/openpype/hosts/nuke/plugins/publish/collect_writes.py index f6acd24f99..858fa79a4b 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/collect_writes.py @@ -5,6 +5,7 @@ import pyblish.api from openpype.hosts.nuke import api as napi from openpype.pipeline import publish + class CollectNukeWrites(pyblish.api.InstancePlugin, publish.ColormanagedPyblishPluginMixin): """Collect all write nodes.""" From 91685e3d1fd3b43677fc33a537c3d93a5e8920cb Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Thu, 2 Mar 2023 11:06:47 +0000 Subject: [PATCH 20/88] Move AOV code to host agnostic. --- .../deadline/plugins/publish/submit_publish_job.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index adfbcbded8..31df4746ba 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -933,8 +933,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): self.log.info(data.get("expectedFiles")) - additional_data = {} - if pyblish.api.current_host() == "maya": + if isinstance(data.get("expectedFiles")[0], dict): + # we cannot attach AOVs to other subsets as we consider every + # AOV subset of its own. + config = instance.data["colorspaceConfig"] additional_data = { "renderProducts": instance.data["renderProducts"], @@ -946,10 +948,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): ) } - if isinstance(data.get("expectedFiles")[0], dict): - # we cannot attach AOVs to other subsets as we consider every - # AOV subset of its own. - if len(data.get("attachTo")) > 0: assert len(data.get("expectedFiles")[0].keys()) == 1, ( "attaching multiple AOVs or renderable cameras to " From 185623ff702a3ddc58038a4368e69e5b3ce4cc94 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 24 Feb 2023 16:21:00 +0000 Subject: [PATCH 21/88] Set frame range with handles on review instance. --- openpype/hosts/maya/api/lib.py | 32 +++++++++++++------ .../maya/plugins/create/create_review.py | 6 ++-- 2 files changed, 25 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 4324d321dc..0d9733fcf7 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -4,7 +4,6 @@ import os import sys import platform import uuid -import math import re import json @@ -2064,13 +2063,8 @@ def set_scene_resolution(width, height, pixelAspect): cmds.setAttr("%s.pixelAspect" % control_node, pixelAspect) -def reset_frame_range(): - """Set frame range to current asset""" - - fps = convert_to_maya_fps( - float(legacy_io.Session.get("AVALON_FPS", 25)) - ) - set_scene_fps(fps) +def get_frame_range(): + """Get the current assets frame range and handles.""" # Set frame start/end project_name = legacy_io.active_project() @@ -2097,8 +2091,26 @@ def reset_frame_range(): if handle_end is None: handle_end = handles - frame_start -= int(handle_start) - frame_end += int(handle_end) + return { + "frameStart": frame_start, + "frameEnd": frame_end, + "handleStart": handle_start, + "handleEnd": handle_end + } + + +def reset_frame_range(): + """Set frame range to current asset""" + + fps = convert_to_maya_fps( + float(legacy_io.Session.get("AVALON_FPS", 25)) + ) + set_scene_fps(fps) + + frame_range = get_frame_range() + + frame_start = frame_range["frameStart"] - int(frame_range["handleStart"]) + frame_end = frame_range["frameEnd"] + int(frame_range["handleEnd"]) cmds.playbackOptions(minTime=frame_start) cmds.playbackOptions(maxTime=frame_end) diff --git a/openpype/hosts/maya/plugins/create/create_review.py b/openpype/hosts/maya/plugins/create/create_review.py index ba51ffa009..6e0bd2e4c3 100644 --- a/openpype/hosts/maya/plugins/create/create_review.py +++ b/openpype/hosts/maya/plugins/create/create_review.py @@ -28,13 +28,13 @@ class CreateReview(plugin.Creator): def __init__(self, *args, **kwargs): super(CreateReview, self).__init__(*args, **kwargs) + data = OrderedDict(**self.data) # get basic animation data : start / end / handles / steps - data = OrderedDict(**self.data) - animation_data = lib.collect_animation_data(fps=True) - for key, value in animation_data.items(): + for key, value in lib.get_frame_range().items(): data[key] = value + data["fps"] = lib.collect_animation_data(fps=True)["fps"] data["review_width"] = self.Width data["review_height"] = self.Height data["isolate"] = self.isolate From db0a3554b62afcfc03a8a33563e20a9a935bef22 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 24 Feb 2023 16:42:01 +0000 Subject: [PATCH 22/88] Validate frame range on instance to asset. - frame start - frame end - handle start - handle end --- .../plugins/publish/validate_frame_range.py | 21 +++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_frame_range.py b/openpype/hosts/maya/plugins/publish/validate_frame_range.py index d86925184e..dbf856a30a 100644 --- a/openpype/hosts/maya/plugins/publish/validate_frame_range.py +++ b/openpype/hosts/maya/plugins/publish/validate_frame_range.py @@ -57,6 +57,10 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): inst_start = int(instance.data.get("frameStartHandle")) inst_end = int(instance.data.get("frameEndHandle")) + inst_frame_start = int(instance.data.get("frameStart")) + inst_frame_end = int(instance.data.get("frameEnd")) + inst_handle_start = int(instance.data.get("handleStart")) + inst_handle_end = int(instance.data.get("handleEnd")) # basic sanity checks assert frame_start_handle <= frame_end_handle, ( @@ -69,7 +73,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): if [ef for ef in self.exclude_families if instance.data["family"] in ef]: return - if(inst_start != frame_start_handle): + if (inst_start != frame_start_handle): errors.append("Instance start frame [ {} ] doesn't " "match the one set on instance [ {} ]: " "{}/{}/{}/{} (handle/start/end/handle)".format( @@ -78,7 +82,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): handle_start, frame_start, frame_end, handle_end )) - if(inst_end != frame_end_handle): + if (inst_end != frame_end_handle): errors.append("Instance end frame [ {} ] doesn't " "match the one set on instance [ {} ]: " "{}/{}/{}/{} (handle/start/end/handle)".format( @@ -87,6 +91,19 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): handle_start, frame_start, frame_end, handle_end )) + checks = { + "frame start": (frame_start, inst_frame_start), + "frame end": (frame_end, inst_frame_end), + "handle start": (handle_start, inst_handle_start), + "handle end": (handle_end, inst_handle_end) + } + for label, values in checks.items(): + if values[0] != values[1]: + errors.append( + "{} on instance ({}) does not match with the asset " + "({}).".format(label.title(), values[1], values[0]) + ) + for e in errors: self.log.error(e) From d33aa1cc7df5eb7ae73320693b3a9b00183b3d70 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 28 Feb 2023 07:26:50 +0000 Subject: [PATCH 23/88] Better error reports. --- openpype/hosts/maya/plugins/publish/validate_frame_range.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_frame_range.py b/openpype/hosts/maya/plugins/publish/validate_frame_range.py index dbf856a30a..59b06874b3 100644 --- a/openpype/hosts/maya/plugins/publish/validate_frame_range.py +++ b/openpype/hosts/maya/plugins/publish/validate_frame_range.py @@ -75,7 +75,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): return if (inst_start != frame_start_handle): errors.append("Instance start frame [ {} ] doesn't " - "match the one set on instance [ {} ]: " + "match the one set on asset [ {} ]: " "{}/{}/{}/{} (handle/start/end/handle)".format( inst_start, frame_start_handle, @@ -84,7 +84,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): if (inst_end != frame_end_handle): errors.append("Instance end frame [ {} ] doesn't " - "match the one set on instance [ {} ]: " + "match the one set on asset [ {} ]: " "{}/{}/{}/{} (handle/start/end/handle)".format( inst_end, frame_end_handle, From 7bbf5608711c865f3f50c82f1fbd34ddc679982a Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 28 Feb 2023 07:42:58 +0000 Subject: [PATCH 24/88] Backwards compatibility --- .../maya/plugins/create/create_review.py | 8 +++-- .../defaults/project_settings/maya.json | 13 +++++---- .../schemas/schema_maya_create.json | 29 ++++++++++++++++--- 3 files changed, 38 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_review.py b/openpype/hosts/maya/plugins/create/create_review.py index 6e0bd2e4c3..f1b626c06b 100644 --- a/openpype/hosts/maya/plugins/create/create_review.py +++ b/openpype/hosts/maya/plugins/create/create_review.py @@ -25,13 +25,17 @@ class CreateReview(plugin.Creator): "depth peeling", "alpha cut" ] + useMayaTimeline = True def __init__(self, *args, **kwargs): super(CreateReview, self).__init__(*args, **kwargs) data = OrderedDict(**self.data) - # get basic animation data : start / end / handles / steps - for key, value in lib.get_frame_range().items(): + # Option for using Maya or asset frame range in settings. + frame_range = lib.get_frame_range() + if self.useMayaTimeline: + frame_range = lib.collect_animation_data(fps=True) + for key, value in frame_range.items(): data[key] = value data["fps"] = lib.collect_animation_data(fps=True)["fps"] diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 90334a6644..dca0b95293 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -179,6 +179,13 @@ "Main" ] }, + "CreateReview": { + "enabled": true, + "defaults": [ + "Main" + ], + "useMayaTimeline": true + }, "CreateAss": { "enabled": true, "defaults": [ @@ -255,12 +262,6 @@ "Main" ] }, - "CreateReview": { - "enabled": true, - "defaults": [ - "Main" - ] - }, "CreateRig": { "enabled": true, "defaults": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 49503cce83..1598f90643 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -240,6 +240,31 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CreateReview", + "label": "Create Review", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + }, + { + "type": "boolean", + "key": "useMayaTimeline", + "label": "Use Maya Timeline for Frame Range." + } + ] + }, { "type": "dict", "collapsible": true, @@ -398,10 +423,6 @@ "key": "CreateRenderSetup", "label": "Create Render Setup" }, - { - "key": "CreateReview", - "label": "Create Review" - }, { "key": "CreateRig", "label": "Create Rig" From db98f65b43517c1930ab3be11f56f0fa672e5c8d Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Thu, 23 Feb 2023 16:23:21 +0000 Subject: [PATCH 25/88] Fix publish pool and secondary. --- .../modules/deadline/plugins/publish/submit_publish_job.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 31df4746ba..53c09ad22f 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -284,6 +284,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): args.append("--automatic-tests") # Generate the payload for Deadline submission + secondary_pool = ( + self.deadline_pool_secondary or instance.data.get("secondaryPool") + ) payload = { "JobInfo": { "Plugin": self.deadline_plugin, @@ -297,8 +300,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "Priority": priority, "Group": self.deadline_group, - "Pool": instance.data.get("primaryPool"), - "SecondaryPool": instance.data.get("secondaryPool"), + "Pool": self.deadline_pool or instance.data.get("primaryPool"), + "SecondaryPool": secondary_pool, # ensure the outputdirectory with correct slashes "OutputDirectory0": output_dir.replace("\\", "/") }, From d827ffa8fbabd6fb02dc03e123e22d69c5b87c24 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Thu, 23 Feb 2023 16:23:38 +0000 Subject: [PATCH 26/88] Use publish pool for tile jobs. --- .../deadline/plugins/publish/submit_maya_deadline.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 22b5c02296..15025e47f2 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -419,8 +419,13 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): assembly_job_info.Name += " - Tile Assembly Job" assembly_job_info.Frames = 1 assembly_job_info.MachineLimit = 1 - assembly_job_info.Priority = instance.data.get("tile_priority", - self.tile_priority) + assembly_job_info.Priority = instance.data.get( + "tile_priority", self.tile_priority + ) + + pool = instance.context.data["project_settings"]["deadline"] + pool = pool["publish"]["ProcessSubmittedJobOnFarm"]["deadline_pool"] + assembly_job_info.Pool = pool or instance.data.get("primaryPool", "") assembly_plugin_info = { "CleanupTiles": 1, From 9117a0d6329c5cf47aee68bf4fd2e57dac5fff6a Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Thu, 23 Feb 2023 16:39:10 +0000 Subject: [PATCH 27/88] Documentation --- website/docs/module_deadline.md | 34 +++++++++++++++++++++------------ 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/website/docs/module_deadline.md b/website/docs/module_deadline.md index c96da91909..c4b179b98d 100644 --- a/website/docs/module_deadline.md +++ b/website/docs/module_deadline.md @@ -28,16 +28,16 @@ For [AWS Thinkbox Deadline](https://www.awsthinkbox.com/deadline) support you ne OpenPype integration for Deadline consists of two parts: - The `OpenPype` Deadline Plug-in -- A `GlobalJobPreLoad` Deadline Script (this gets triggered for each deadline job) +- A `GlobalJobPreLoad` Deadline Script (this gets triggered for each deadline job) The `GlobalJobPreLoad` handles populating render and publish jobs with proper environment variables using settings from the `OpenPype` Deadline Plug-in. -The `OpenPype` Deadline Plug-in must be configured to point to a valid OpenPype executable location. The executable need to be installed to +The `OpenPype` Deadline Plug-in must be configured to point to a valid OpenPype executable location. The executable need to be installed to destinations accessible by DL process. Check permissions (must be executable and accessible by Deadline process) - Enable `Tools > Super User Mode` in Deadline Monitor -- Go to `Tools > Configure Plugins...`, find `OpenPype` in the list on the left side, find location of OpenPype +- Go to `Tools > Configure Plugins...`, find `OpenPype` in the list on the left side, find location of OpenPype executable. It is recommended to use the `openpype_console` executable as it provides a bit more logging. - In case of multi OS farms, provide multiple locations, each Deadline Worker goes through the list and tries to find the first accessible @@ -45,12 +45,22 @@ executable. It is recommended to use the `openpype_console` executable as it pro ![Configure plugin](assets/deadline_configure_plugin.png) +### Pools + +The main pools can be configured at `project_settings/deadline/publish/CollectDeadlinePools/primary_pool`, which is applied to the rendering jobs. + +The dependent publishing job's pool uses `project_settings/deadline/publish/ProcessSubmittedJobOnFarm/deadline_pool`. If nothing is specified the pool will fallback to the main pool above. + +:::note maya tile rendering +The logic for publishing job pool assignment applies to tiling jobs. +::: + ## Troubleshooting #### Publishing jobs fail directly in DCCs - Double check that all previously described steps were finished -- Check that `deadlinewebservice` is running on DL server +- Check that `deadlinewebservice` is running on DL server - Check that user's machine has access to deadline server on configured port #### Jobs are failing on DL side @@ -61,40 +71,40 @@ Each publishing from OpenPype consists of 2 jobs, first one is rendering, second - Jobs are failing with `OpenPype executable was not found` error - Check if OpenPype is installed on the Worker handling this job and ensure `OpenPype` Deadline Plug-in is properly [configured](#configuration) + Check if OpenPype is installed on the Worker handling this job and ensure `OpenPype` Deadline Plug-in is properly [configured](#configuration) - Publishing job is failing with `ffmpeg not installed` error - + OpenPype executable has to have access to `ffmpeg` executable, check OpenPype `Setting > General` ![FFmpeg setting](assets/ffmpeg_path.png) - Both jobs finished successfully, but there is no review on Ftrack - Make sure that you correctly set published family to be send to Ftrack. + Make sure that you correctly set published family to be send to Ftrack. ![Ftrack Family](assets/ftrack/ftrack-collect-main.png) Example: I want send to Ftrack review of rendered images from Harmony : - `Host names`: "harmony" - - `Families`: "render" + - `Families`: "render" - `Add Ftrack Family` to "Enabled" - + Make sure that you actually configured to create review for published subset in `project_settings/ftrack/publish/CollectFtrackFamily` ![Ftrack Family](assets/deadline_review.png) - Example: I want to create review for all reviewable subsets in Harmony : + Example: I want to create review for all reviewable subsets in Harmony : - Add "harmony" as a new key an ".*" as a value. - Rendering jobs are stuck in 'Queued' state or failing Make sure that your Deadline is not limiting specific jobs to be run only on specific machines. (Eg. only some machines have installed particular application.) - + Check `project_settings/deadline` - + ![Deadline group](assets/deadline_group.png) Example: I have separated machines with "Harmony" installed into "harmony" group on Deadline. I want rendering jobs published from Harmony to run only on those machines. From 6f5ba9ce2a98aed1777df57731ae02409cc474a2 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Thu, 23 Feb 2023 16:42:02 +0000 Subject: [PATCH 28/88] Docs --- website/docs/module_deadline.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/module_deadline.md b/website/docs/module_deadline.md index c4b179b98d..ab1016788d 100644 --- a/website/docs/module_deadline.md +++ b/website/docs/module_deadline.md @@ -49,7 +49,7 @@ executable. It is recommended to use the `openpype_console` executable as it pro The main pools can be configured at `project_settings/deadline/publish/CollectDeadlinePools/primary_pool`, which is applied to the rendering jobs. -The dependent publishing job's pool uses `project_settings/deadline/publish/ProcessSubmittedJobOnFarm/deadline_pool`. If nothing is specified the pool will fallback to the main pool above. +The dependent publishing job's pool uses `project_settings/deadline/publish/ProcessSubmittedJobOnFarm/deadline_pool`. If nothing is specified the pool will fallback to the primary pool above. :::note maya tile rendering The logic for publishing job pool assignment applies to tiling jobs. From b61f1ed1df5a5a6dcab162ffe320a4093b5efd12 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 28 Feb 2023 08:17:29 +0000 Subject: [PATCH 29/88] Validate against zero polygon mesh. --- .../plugins/publish/validate_mesh_empty.py | 54 +++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 openpype/hosts/maya/plugins/publish/validate_mesh_empty.py diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_empty.py b/openpype/hosts/maya/plugins/publish/validate_mesh_empty.py new file mode 100644 index 0000000000..848d66c4ae --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_empty.py @@ -0,0 +1,54 @@ +from maya import cmds + +import pyblish.api +import openpype.hosts.maya.api.action +from openpype.pipeline.publish import ( + RepairAction, + ValidateMeshOrder +) + + +class ValidateMeshEmpty(pyblish.api.InstancePlugin): + """Validate meshes have some vertices. + + Its possible to have meshes without any vertices. To replicate + this issue, delete all faces/polygons then all edges. + """ + + order = ValidateMeshOrder + hosts = ["maya"] + families = ["model"] + label = "Mesh Empty" + actions = [ + openpype.hosts.maya.api.action.SelectInvalidAction, RepairAction + ] + + @classmethod + def repair(cls, instance): + invalid = cls.get_invalid(instance) + for node in invalid: + cmds.delete(node) + + @classmethod + def get_invalid(cls, instance): + invalid = [] + + meshes = cmds.ls(instance, type="mesh", long=True) + for mesh in meshes: + num_vertices = cmds.polyEvaluate(mesh, vertex=True) + + if num_vertices == 0: + cls.log.warning( + "\"{}\" does not have any vertices.".format(mesh) + ) + invalid.append(mesh) + + return invalid + + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + "Meshes found in instance without any vertices: %s" % invalid + ) From 206bf55a4909e603e845d0ca5b3fc46896f1ca90 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 28 Feb 2023 08:18:16 +0000 Subject: [PATCH 30/88] Refactor `len_flattened` --- openpype/hosts/maya/api/lib.py | 31 +++++++++++++++++ .../plugins/publish/validate_mesh_has_uv.py | 32 +---------------- .../validate_mesh_vertices_have_edges.py | 34 +------------------ 3 files changed, 33 insertions(+), 64 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 0d9733fcf7..954576f02e 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -3574,3 +3574,34 @@ def get_color_management_output_transform(): if preferences["output_transform_enabled"]: colorspace = preferences["output_transform"] return colorspace + + +def len_flattened(components): + """Return the length of the list as if it was flattened. + + Maya will return consecutive components as a single entry + when requesting with `maya.cmds.ls` without the `flatten` + flag. Though enabling `flatten` on a large list (e.g. millions) + will result in a slow result. This command will return the amount + of entries in a non-flattened list by parsing the result with + regex. + + Args: + components (list): The non-flattened components. + + Returns: + int: The amount of entries. + + """ + assert isinstance(components, (list, tuple)) + n = 0 + + pattern = re.compile(r"\[(\d+):(\d+)\]") + for c in components: + match = pattern.search(c) + if match: + start, end = match.groups() + n += int(end) - int(start) + 1 + else: + n += 1 + return n diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py b/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py index 0eece1014e..1775bd84c6 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py @@ -1,39 +1,9 @@ -import re - from maya import cmds import pyblish.api import openpype.hosts.maya.api.action from openpype.pipeline.publish import ValidateMeshOrder - - -def len_flattened(components): - """Return the length of the list as if it was flattened. - - Maya will return consecutive components as a single entry - when requesting with `maya.cmds.ls` without the `flatten` - flag. Though enabling `flatten` on a large list (e.g. millions) - will result in a slow result. This command will return the amount - of entries in a non-flattened list by parsing the result with - regex. - - Args: - components (list): The non-flattened components. - - Returns: - int: The amount of entries. - - """ - assert isinstance(components, (list, tuple)) - n = 0 - for c in components: - match = re.search("\[([0-9]+):([0-9]+)\]", c) - if match: - start, end = match.groups() - n += int(end) - int(start) + 1 - else: - n += 1 - return n +from openpype.hosts.maya.api.lib import len_flattened class ValidateMeshHasUVs(pyblish.api.InstancePlugin): diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py b/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py index 9ac7735501..51e1ddfc7f 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py @@ -1,5 +1,3 @@ -import re - from maya import cmds import pyblish.api @@ -8,37 +6,7 @@ from openpype.pipeline.publish import ( RepairAction, ValidateMeshOrder, ) - - -def len_flattened(components): - """Return the length of the list as if it was flattened. - - Maya will return consecutive components as a single entry - when requesting with `maya.cmds.ls` without the `flatten` - flag. Though enabling `flatten` on a large list (e.g. millions) - will result in a slow result. This command will return the amount - of entries in a non-flattened list by parsing the result with - regex. - - Args: - components (list): The non-flattened components. - - Returns: - int: The amount of entries. - - """ - assert isinstance(components, (list, tuple)) - n = 0 - - pattern = re.compile(r"\[(\d+):(\d+)\]") - for c in components: - match = pattern.search(c) - if match: - start, end = match.groups() - n += int(end) - int(start) + 1 - else: - n += 1 - return n +from openpype.hosts.maya.api.lib import len_flattened class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin): From 63177ca7e935cd87e716229831eaf1f456825e66 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 28 Feb 2023 08:18:50 +0000 Subject: [PATCH 31/88] Only mesh empty validator should fail. --- .../plugins/publish/validate_mesh_has_uv.py | 9 +++++++++ .../publish/validate_mesh_non_zero_edge.py | 20 +++++++++++++++++-- .../validate_mesh_vertices_have_edges.py | 7 +++++++ 3 files changed, 34 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py b/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py index 1775bd84c6..b7836b3e92 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_has_uv.py @@ -27,6 +27,15 @@ class ValidateMeshHasUVs(pyblish.api.InstancePlugin): invalid = [] for node in cmds.ls(instance, type='mesh'): + num_vertices = cmds.polyEvaluate(node, vertex=True) + + if num_vertices == 0: + cls.log.warning( + "Skipping \"{}\", cause it does not have any " + "vertices.".format(node) + ) + continue + uv = cmds.polyEvaluate(node, uv=True) if uv == 0: diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py b/openpype/hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py index 78e844d201..b49ba85648 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_non_zero_edge.py @@ -28,7 +28,10 @@ class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): """Return the invalid edges. - Also see: http://help.autodesk.com/view/MAYAUL/2015/ENU/?guid=Mesh__Cleanup + + Also see: + + http://help.autodesk.com/view/MAYAUL/2015/ENU/?guid=Mesh__Cleanup """ @@ -36,8 +39,21 @@ class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin): if not meshes: return list() + valid_meshes = [] + for mesh in meshes: + num_vertices = cmds.polyEvaluate(mesh, vertex=True) + + if num_vertices == 0: + cls.log.warning( + "Skipping \"{}\", cause it does not have any " + "vertices.".format(mesh) + ) + continue + + valid_meshes.append(mesh) + # Get all edges - edges = ['{0}.e[*]'.format(node) for node in meshes] + edges = ['{0}.e[*]'.format(node) for node in valid_meshes] # Filter by constraint on edge length invalid = lib.polyConstraint(edges, diff --git a/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py b/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py index 51e1ddfc7f..d885158004 100644 --- a/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py +++ b/openpype/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py @@ -55,6 +55,13 @@ class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin): for mesh in meshes: num_vertices = cmds.polyEvaluate(mesh, vertex=True) + if num_vertices == 0: + cls.log.warning( + "Skipping \"{}\", cause it does not have any " + "vertices.".format(mesh) + ) + continue + # Vertices from all edges edges = "%s.e[*]" % mesh vertices = cmds.polyListComponentConversion(edges, toVertex=True) From ec7392433cc0386feaccca6c0c3adea47281f67c Mon Sep 17 00:00:00 2001 From: Toke Jepsen Date: Thu, 2 Mar 2023 17:24:41 +0000 Subject: [PATCH 32/88] Maya: Validate missing instance attributes (#4559) * Validate missing instance attributes. * Plugins docs. --- .../maya/plugins/publish/collect_instances.py | 1 + .../publish/validate_instance_attributes.py | 60 +++++++++++++++++++ 2 files changed, 61 insertions(+) create mode 100644 openpype/hosts/maya/plugins/publish/validate_instance_attributes.py diff --git a/openpype/hosts/maya/plugins/publish/collect_instances.py b/openpype/hosts/maya/plugins/publish/collect_instances.py index 6c6819f0a2..c594626569 100644 --- a/openpype/hosts/maya/plugins/publish/collect_instances.py +++ b/openpype/hosts/maya/plugins/publish/collect_instances.py @@ -137,6 +137,7 @@ class CollectInstances(pyblish.api.ContextPlugin): # Create the instance instance = context.create_instance(objset) instance[:] = members_hierarchy + instance.data["objset"] = objset # Store the exact members of the object set instance.data["setMembers"] = members diff --git a/openpype/hosts/maya/plugins/publish/validate_instance_attributes.py b/openpype/hosts/maya/plugins/publish/validate_instance_attributes.py new file mode 100644 index 0000000000..f870c9f8c4 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/validate_instance_attributes.py @@ -0,0 +1,60 @@ +from maya import cmds + +import pyblish.api +from openpype.pipeline.publish import ( + ValidateContentsOrder, PublishValidationError, RepairAction +) +from openpype.pipeline import discover_legacy_creator_plugins +from openpype.hosts.maya.api.lib import imprint + + +class ValidateInstanceAttributes(pyblish.api.InstancePlugin): + """Validate Instance Attributes. + + New attributes can be introduced as new features come in. Old instances + will need to be updated with these attributes for the documentation to make + sense, and users do not have to recreate the instances. + """ + + order = ValidateContentsOrder + hosts = ["maya"] + families = ["*"] + label = "Instance Attributes" + plugins_by_family = { + p.family: p for p in discover_legacy_creator_plugins() + } + actions = [RepairAction] + + @classmethod + def get_missing_attributes(self, instance): + plugin = self.plugins_by_family[instance.data["family"]] + subset = instance.data["subset"] + asset = instance.data["asset"] + objset = instance.data["objset"] + + missing_attributes = {} + for key, value in plugin(subset, asset).data.items(): + if not cmds.objExists("{}.{}".format(objset, key)): + missing_attributes[key] = value + + return missing_attributes + + def process(self, instance): + objset = instance.data.get("objset") + if objset is None: + self.log.debug( + "Skipping {} because no objectset found.".format(instance) + ) + return + + missing_attributes = self.get_missing_attributes(instance) + if missing_attributes: + raise PublishValidationError( + "Missing attributes on {}:\n{}".format( + objset, missing_attributes + ) + ) + + @classmethod + def repair(cls, instance): + imprint(instance.data["objset"], cls.get_missing_attributes(instance)) From f3baace6682bebfbbbae273b54c4aaf38477f4a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Fri, 3 Mar 2023 13:17:46 +0100 Subject: [PATCH 33/88] Update openpype/pipeline/publish/publish_plugins.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Fabià Serra Arrizabalaga --- openpype/pipeline/publish/publish_plugins.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index 0142919e76..7da61fec5e 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -298,7 +298,7 @@ class ColormanagedPyblishPluginMixin(object): It also contains a method, set_representation_colorspace, which sets colorspace data to the representation. The allowed file extensions are listed in the allowed_ext variable. - he method first checks if the file extension is in + The method first checks if the file extension is in the list of allowed extensions. If it is, it then gets the colorspace settings from the host context and gets a matching colorspace from rules. Finally, it infuses this From 0c517a12a618076fdd3fc043fb1ce80d7e1f3327 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 3 Mar 2023 14:07:20 +0100 Subject: [PATCH 34/88] Nuke: fix the order of plugin to be after anatomy data collector also convert anatomy data with deepcopy --- openpype/hosts/nuke/plugins/publish/collect_writes.py | 2 +- openpype/pipeline/publish/publish_plugins.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_writes.py b/openpype/hosts/nuke/plugins/publish/collect_writes.py index 858fa79a4b..304b3d8f32 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/collect_writes.py @@ -10,7 +10,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin, publish.ColormanagedPyblishPluginMixin): """Collect all write nodes.""" - order = pyblish.api.CollectorOrder - 0.48 + order = pyblish.api.CollectorOrder + 0.0021 label = "Collect Writes" hosts = ["nuke", "nukeassist"] families = ["render", "prerender", "image"] diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index 7da61fec5e..2df98221ba 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -1,3 +1,4 @@ +from copy import deepcopy import inspect from abc import ABCMeta from pprint import pformat @@ -323,7 +324,7 @@ class ColormanagedPyblishPluginMixin(object): project_name = context.data["projectName"] host_name = context.data["hostName"] - anatomy_data = context.data["anatomyData"] + anatomy_data = deepcopy(context.data["anatomyData"]) project_settings_ = context.data["project_settings"] config_data = get_imageio_config( From f2311c686638dd1e9d646333b5dd482129024869 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 28 Feb 2023 09:12:51 +0000 Subject: [PATCH 35/88] Fixes - missing platform extraction from settings - map function should be list comprehension - code cosmetics --- .../maya/plugins/publish/validate_model_name.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_model_name.py b/openpype/hosts/maya/plugins/publish/validate_model_name.py index 2dec9ba267..0e7adc640f 100644 --- a/openpype/hosts/maya/plugins/publish/validate_model_name.py +++ b/openpype/hosts/maya/plugins/publish/validate_model_name.py @@ -2,9 +2,11 @@ """Validate model nodes names.""" import os import re -from maya import cmds -import pyblish.api +import platform +from maya import cmds + +import pyblish.api from openpype.pipeline import legacy_io from openpype.pipeline.publish import ValidateContentsOrder import openpype.hosts.maya.api.action @@ -44,7 +46,7 @@ class ValidateModelName(pyblish.api.InstancePlugin): if not cmds.ls(child, transforms=True): return False return True - except: + except Exception: return False invalid = [] @@ -94,9 +96,10 @@ class ValidateModelName(pyblish.api.InstancePlugin): # load shader list file as utf-8 shaders = [] if not use_db: - if cls.material_file: - if os.path.isfile(cls.material_file): - shader_file = open(cls.material_file, "r") + material_file = cls.material_file[platform.system().lower()] + if material_file: + if os.path.isfile(material_file): + shader_file = open(material_file, "r") shaders = shader_file.readlines() shader_file.close() else: @@ -113,7 +116,7 @@ class ValidateModelName(pyblish.api.InstancePlugin): shader_file.close() # strip line endings from list - shaders = map(lambda s: s.rstrip(), shaders) + shaders = [s.rstrip() for s in shaders if s.rstrip()] # compile regex for testing names regex = cls.regex From 6f0cd7fc2f04f8ff13e7a756f932ab7068c29629 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Tue, 31 Jan 2023 17:30:27 +0100 Subject: [PATCH 36/88] add visualParent None to Shots and Assets Without this, tray-publisher throws an error and stops working. --- openpype/modules/kitsu/utils/update_op_with_zou.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 2d14b38bc4..5af3a61e81 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -389,6 +389,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): "data": { "root_of": r, "tasks": {}, + "visualParent": None, }, } for r in ["Assets", "Shots"] From 76273b7376c5b65423beb3a195c4a6376f0453be Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Wed, 1 Feb 2023 13:38:22 +0100 Subject: [PATCH 37/88] lowercase URL path to match in Kitsu Without this fix the menu-dropdown wouldn't show the correct current page in Kitsu --- openpype/modules/kitsu/actions/launcher_show_in_kitsu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py index c95079e042..4793d60fc3 100644 --- a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py +++ b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py @@ -100,7 +100,7 @@ class ShowInKitsu(LauncherAction): kitsu_url = kitsu_url[:-len("/api")] sub_url = f"/productions/{project_id}" - asset_type_url = "Shots" if asset_type in shots_url else "Assets" + asset_type_url = "shots" if asset_type in shots_url else "assets" if task_id: # Go to task page From 9795e7c9105777f0ae0c4d20376d5204b23b1b00 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Wed, 1 Feb 2023 15:52:56 +0100 Subject: [PATCH 38/88] Populate items with correct data from Kitsu or project defaults If data doesn't exist in Kitsu, use Projects default data. --- .../modules/kitsu/utils/update_op_with_zou.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 5af3a61e81..554d90f7a9 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -69,6 +69,7 @@ def set_op_project(dbcon: AvalonMongoDB, project_id: str): def update_op_assets( dbcon: AvalonMongoDB, + gazu_project: dict, project_doc: dict, entities_list: List[dict], asset_doc_ids: Dict[str, dict], @@ -119,21 +120,20 @@ def update_op_assets( # because of zou's legacy design frames_duration = int(item.get("nb_frames", 0)) except (TypeError, ValueError): - frames_duration = 0 + frames_duration = None # Frame out, fallback on frame_in + duration or project's value or 1001 frame_out = item_data.pop("frame_out", None) if not frame_out: - frame_out = frame_in + frames_duration - try: - frame_out = int(frame_out) - except (TypeError, ValueError): - frame_out = 1001 + if frames_duration: + frame_out = frame_in + frames_duration + else: + frame_out = project_doc["data"].get("frameEnd", 1001) item_data["frameEnd"] = frame_out # Fps, fallback to project's value or default value (25.0) try: - fps = float(item_data.get("fps", project_doc["data"].get("fps"))) + fps = float(item_data.get("fps")) except (TypeError, ValueError): - fps = 25.0 + fps = float(gazu_project.get("fps", project_doc["data"].get("fps", 25))) item_data["fps"] = fps # Tasks @@ -424,7 +424,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): [ UpdateOne({"_id": id}, update) for id, update in update_op_assets( - dbcon, project_doc, all_entities, zou_ids_and_asset_docs + dbcon, project, project_doc, all_entities, zou_ids_and_asset_docs ) ] ) From a005d108292821e9a062da8f3cef32f113ad8eca Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Wed, 1 Feb 2023 16:07:45 +0100 Subject: [PATCH 39/88] Add Resolution and Pixel Aspect to each item Without this info eg. Fusion would throw an error and stop working. --- openpype/modules/kitsu/utils/update_op_with_zou.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 554d90f7a9..cbf7afb413 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -135,6 +135,17 @@ def update_op_assets( except (TypeError, ValueError): fps = float(gazu_project.get("fps", project_doc["data"].get("fps", 25))) item_data["fps"] = fps + # Resolution, fall back to project default + match_res = re.match(r"(\d+)x(\d+)", item_data.get("resolution", gazu_project.get("resolution"))) + if match_res: + item_data["resolutionWidth"] = int(match_res.group(1)) + item_data["resolutionHeight"] = int(match_res.group(2)) + else: + item_data["resolutionWidth"] = project_doc["data"].get("resolutionWidth") + item_data["resolutionHeight"] = project_doc["data"].get("resolutionHeight") + # Properties that doesn't fully exist in Kitsu. Guessing the property name + # Pixel Aspect Ratio + item_data["pixelAspect"] = item_data.get("pixel_aspect", project_doc["data"].get("pixelAspect")) # Tasks tasks_list = [] From 3b4cdb13db5c3ae89b5c3e3bd6a1b0e73731fe3d Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Wed, 1 Feb 2023 20:13:00 +0100 Subject: [PATCH 40/88] Add parents key to assets and shots Without this you can't open the project in Tray Publisher --- openpype/modules/kitsu/utils/update_op_with_zou.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index cbf7afb413..1b191ccb1e 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -401,6 +401,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): "root_of": r, "tasks": {}, "visualParent": None, + "parents": [], }, } for r in ["Assets", "Shots"] From 5dee2b8ff6b8eedbaaf68af164b316874f413059 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 2 Feb 2023 17:15:55 +0100 Subject: [PATCH 41/88] Add missing attributes to each asset Without them, Tray Publisher errors out. --- openpype/modules/kitsu/utils/update_op_with_zou.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 1b191ccb1e..d88198eace 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -146,6 +146,14 @@ def update_op_assets( # Properties that doesn't fully exist in Kitsu. Guessing the property name # Pixel Aspect Ratio item_data["pixelAspect"] = item_data.get("pixel_aspect", project_doc["data"].get("pixelAspect")) + # Handle Start + item_data["handleStart"] = item_data.get("handle_start", project_doc["data"].get("handleStart")) + # Handle End + item_data["handleEnd"] = item_data.get("handle_end", project_doc["data"].get("handleEnd")) + # Clip In + item_data["clipIn"] = item_data.get("clip_in", project_doc["data"].get("clipIn")) + # Clip Out + item_data["clipOut"] = item_data.get("clip_out", project_doc["data"].get("clipOut")) # Tasks tasks_list = [] From 785825751a0e5dacdc6e7bf2825e8360456d906c Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 2 Feb 2023 17:18:16 +0100 Subject: [PATCH 42/88] Add render to families to make sure a task exists to process --- openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py | 2 +- .../modules/kitsu/plugins/publish/integrate_kitsu_review.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py index ea98e0b7cc..b801e0e4d4 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py @@ -8,7 +8,7 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin): order = pyblish.api.IntegratorOrder label = "Kitsu Note and Status" - # families = ["kitsu"] + families = ["render", "kitsu"] set_status_note = False note_status_shortname = "wfa" diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py index e5e6439439..9e9eaadc27 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py @@ -8,7 +8,7 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder + 0.01 label = "Kitsu Review" - # families = ["kitsu"] + families = ["render", "kitsu"] optional = True def process(self, instance): From 27c8a1f36099347f777c538d72c67eb5dcb9dc80 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 2 Feb 2023 19:31:06 +0100 Subject: [PATCH 43/88] Fixed so correct review file gets uploaded to Kitsu --- .../modules/kitsu/plugins/publish/integrate_kitsu_review.py | 2 +- .../projects_schema/schemas/schema_representation_tags.json | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py index 9e9eaadc27..94897b2553 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py @@ -27,7 +27,7 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin): # Add review representations as preview of comment for representation in instance.data.get("representations", []): # Skip if not tagged as review - if "review" not in representation.get("tags", []): + if "kitsureview" not in representation.get("tags", []): continue review_path = representation.get("published_path") diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json index a4b28f47bc..7046952eef 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_representation_tags.json @@ -16,6 +16,9 @@ { "shotgridreview": "Add review to Shotgrid" }, + { + "kitsureview": "Add review to Kitsu" + }, { "delete": "Delete output" }, From 72270005edc887e8e708a8d078f8a3d7ac1ec287 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 2 Feb 2023 23:33:57 +0100 Subject: [PATCH 44/88] Update Kitsu plugin to work without assetEntry --- .../plugins/publish/collect_kitsu_entities.py | 83 +++++++++++-------- 1 file changed, 49 insertions(+), 34 deletions(-) diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py index c9e78b59eb..38c67898ef 100644 --- a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py @@ -4,6 +4,11 @@ import os import gazu import pyblish.api +from openpype.client import ( + get_projects, + get_project, + get_assets, +) class CollectKitsuEntities(pyblish.api.ContextPlugin): """Collect Kitsu entities according to the current context""" @@ -12,20 +17,34 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): label = "Kitsu entities" def process(self, context): + + # Get all needed names + project_name = context.data.get("projectName") + asset_name = context.data.get("asset") + task_name = context.data.get("task") + # If asset and task name doesn't exist in context, look in instance + for instance in context: + if not asset_name: + asset_name = instance.data.get("asset") + if not task_name: + task_name = instance.data.get("task") - asset_data = context.data["assetEntity"]["data"] - zou_asset_data = asset_data.get("zou") + # Get all assets of the local project + asset_docs = { + asset_doc["name"]: asset_doc + for asset_doc in get_assets(project_name) + } + + # Get asset object + asset = asset_docs.get(asset_name) + if not asset: + raise AssertionError("{} not found in DB".format(asset_name)) + + zou_asset_data = asset["data"].get("zou") if not zou_asset_data: raise AssertionError("Zou asset data not found in OpenPype!") self.log.debug("Collected zou asset data: {}".format(zou_asset_data)) - zou_task_data = asset_data["tasks"][os.environ["AVALON_TASK"]].get( - "zou" - ) - if not zou_task_data: - self.log.warning("Zou task data not found in OpenPype!") - self.log.debug("Collected zou task data: {}".format(zou_task_data)) - kitsu_project = gazu.project.get_project(zou_asset_data["project_id"]) if not kitsu_project: raise AssertionError("Project not found in kitsu!") @@ -37,37 +56,33 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): kitsu_entity = gazu.shot.get_shot(zou_asset_data["id"]) else: kitsu_entity = gazu.asset.get_asset(zou_asset_data["id"]) - if not kitsu_entity: raise AssertionError("{} not found in kitsu!".format(entity_type)) - context.data["kitsu_entity"] = kitsu_entity self.log.debug( "Collect kitsu {}: {}".format(entity_type, kitsu_entity) ) - if zou_task_data: - kitsu_task = gazu.task.get_task(zou_task_data["id"]) - if not kitsu_task: - raise AssertionError("Task not found in kitsu!") - context.data["kitsu_task"] = kitsu_task - self.log.debug("Collect kitsu task: {}".format(kitsu_task)) - - else: - kitsu_task_type = gazu.task.get_task_type_by_name( - os.environ["AVALON_TASK"] - ) - if not kitsu_task_type: - raise AssertionError( - "Task type {} not found in Kitsu!".format( - os.environ["AVALON_TASK"] + if task_name: + zou_task_data = asset["data"]["tasks"][task_name].get("zou") + self.log.debug("Collected zou task data: {}".format(zou_task_data)) + if zou_task_data: + kitsu_task = gazu.task.get_task(zou_task_data["id"]) + if not kitsu_task: + raise AssertionError("Task not found in kitsu!") + context.data["kitsu_task"] = kitsu_task + self.log.debug("Collect kitsu task: {}".format(kitsu_task)) + else: + kitsu_task_type = gazu.task.get_task_type_by_name(task_name) + if not kitsu_task_type: + raise AssertionError( + "Task type {} not found in Kitsu!".format(task_name) ) - ) - kitsu_task = gazu.task.get_task_by_name( - kitsu_entity, kitsu_task_type - ) - if not kitsu_task: - raise AssertionError("Task not found in kitsu!") - context.data["kitsu_task"] = kitsu_task - self.log.debug("Collect kitsu task: {}".format(kitsu_task)) + kitsu_task = gazu.task.get_task_by_name( + kitsu_entity, kitsu_task_type + ) + if not kitsu_task: + raise AssertionError("Task not found in kitsu!") + context.data["kitsu_task"] = kitsu_task + self.log.debug("Collect kitsu task: {}".format(kitsu_task)) \ No newline at end of file From 62c111e9486e532255d213d0508f00281f40e86c Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Fri, 3 Feb 2023 00:55:01 +0100 Subject: [PATCH 45/88] Add kitsureview to default burnin tags --- openpype/settings/defaults/project_settings/global.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index a5e2d25a88..aad17d54da 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -139,7 +139,8 @@ "ext": "mp4", "tags": [ "burnin", - "ftrackreview" + "ftrackreview", + "kitsureview" ], "burnins": [], "ffmpeg_args": { From dc2b519fd5689389a482c4772289af8ba9154176 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Fri, 3 Feb 2023 01:07:48 +0100 Subject: [PATCH 46/88] Fixed hound-bots comments --- .../plugins/publish/collect_kitsu_entities.py | 25 ++++++++--------- .../modules/kitsu/utils/update_op_with_zou.py | 27 ++++++++++++------- 2 files changed, 29 insertions(+), 23 deletions(-) diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py index 38c67898ef..92c8c1823d 100644 --- a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py @@ -5,11 +5,10 @@ import gazu import pyblish.api from openpype.client import ( - get_projects, - get_project, get_assets, ) + class CollectKitsuEntities(pyblish.api.ContextPlugin): """Collect Kitsu entities according to the current context""" @@ -17,7 +16,7 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): label = "Kitsu entities" def process(self, context): - + # Get all needed names project_name = context.data.get("projectName") asset_name = context.data.get("asset") @@ -38,18 +37,18 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): # Get asset object asset = asset_docs.get(asset_name) if not asset: - raise AssertionError("{} not found in DB".format(asset_name)) + raise AssertionError(f"{asset_name} not found in DB") zou_asset_data = asset["data"].get("zou") if not zou_asset_data: raise AssertionError("Zou asset data not found in OpenPype!") - self.log.debug("Collected zou asset data: {}".format(zou_asset_data)) + self.log.debug(f"Collected zou asset data: {zou_asset_data}") kitsu_project = gazu.project.get_project(zou_asset_data["project_id"]) if not kitsu_project: raise AssertionError("Project not found in kitsu!") context.data["kitsu_project"] = kitsu_project - self.log.debug("Collect kitsu project: {}".format(kitsu_project)) + self.log.debug(f"Collect kitsu project: {kitsu_project}") entity_type = zou_asset_data["type"] if entity_type == "Shot": @@ -57,26 +56,24 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): else: kitsu_entity = gazu.asset.get_asset(zou_asset_data["id"]) if not kitsu_entity: - raise AssertionError("{} not found in kitsu!".format(entity_type)) + raise AssertionError(f"{entity_type} not found in kitsu!") context.data["kitsu_entity"] = kitsu_entity - self.log.debug( - "Collect kitsu {}: {}".format(entity_type, kitsu_entity) - ) + self.log.debug(f"Collect kitsu {entity_type}: {kitsu_entity}") if task_name: zou_task_data = asset["data"]["tasks"][task_name].get("zou") - self.log.debug("Collected zou task data: {}".format(zou_task_data)) + self.log.debug(f"Collected zou task data: {zou_task_data}") if zou_task_data: kitsu_task = gazu.task.get_task(zou_task_data["id"]) if not kitsu_task: raise AssertionError("Task not found in kitsu!") context.data["kitsu_task"] = kitsu_task - self.log.debug("Collect kitsu task: {}".format(kitsu_task)) + self.log.debug(f"Collect kitsu task: {kitsu_task}") else: kitsu_task_type = gazu.task.get_task_type_by_name(task_name) if not kitsu_task_type: raise AssertionError( - "Task type {} not found in Kitsu!".format(task_name) + f"Task type {task_name} not found in Kitsu!" ) kitsu_task = gazu.task.get_task_by_name( @@ -85,4 +82,4 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): if not kitsu_task: raise AssertionError("Task not found in kitsu!") context.data["kitsu_task"] = kitsu_task - self.log.debug("Collect kitsu task: {}".format(kitsu_task)) \ No newline at end of file + self.log.debug(f"Collect kitsu task: {kitsu_task}") diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index d88198eace..a079fd5529 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -133,27 +133,36 @@ def update_op_assets( try: fps = float(item_data.get("fps")) except (TypeError, ValueError): - fps = float(gazu_project.get("fps", project_doc["data"].get("fps", 25))) + fps = float(gazu_project.get( + "fps", project_doc["data"].get("fps", 25))) item_data["fps"] = fps # Resolution, fall back to project default - match_res = re.match(r"(\d+)x(\d+)", item_data.get("resolution", gazu_project.get("resolution"))) + match_res = re.match( + r"(\d+)x(\d+)", item_data.get("resolution", gazu_project.get("resolution"))) if match_res: item_data["resolutionWidth"] = int(match_res.group(1)) item_data["resolutionHeight"] = int(match_res.group(2)) else: - item_data["resolutionWidth"] = project_doc["data"].get("resolutionWidth") - item_data["resolutionHeight"] = project_doc["data"].get("resolutionHeight") + item_data["resolutionWidth"] = project_doc["data"].get( + "resolutionWidth") + item_data["resolutionHeight"] = project_doc["data"].get( + "resolutionHeight") # Properties that doesn't fully exist in Kitsu. Guessing the property name # Pixel Aspect Ratio - item_data["pixelAspect"] = item_data.get("pixel_aspect", project_doc["data"].get("pixelAspect")) + item_data["pixelAspect"] = item_data.get( + "pixel_aspect", project_doc["data"].get("pixelAspect")) # Handle Start - item_data["handleStart"] = item_data.get("handle_start", project_doc["data"].get("handleStart")) + item_data["handleStart"] = item_data.get( + "handle_start", project_doc["data"].get("handleStart")) # Handle End - item_data["handleEnd"] = item_data.get("handle_end", project_doc["data"].get("handleEnd")) + item_data["handleEnd"] = item_data.get( + "handle_end", project_doc["data"].get("handleEnd")) # Clip In - item_data["clipIn"] = item_data.get("clip_in", project_doc["data"].get("clipIn")) + item_data["clipIn"] = item_data.get( + "clip_in", project_doc["data"].get("clipIn")) # Clip Out - item_data["clipOut"] = item_data.get("clip_out", project_doc["data"].get("clipOut")) + item_data["clipOut"] = item_data.get( + "clip_out", project_doc["data"].get("clipOut")) # Tasks tasks_list = [] From bb61d43c27286bda8b319d5e49da8166c1472ae8 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Fri, 3 Feb 2023 11:52:37 +0100 Subject: [PATCH 47/88] Shortened length of line 141 --- openpype/modules/kitsu/utils/update_op_with_zou.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index a079fd5529..15e88947a1 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -138,7 +138,9 @@ def update_op_assets( item_data["fps"] = fps # Resolution, fall back to project default match_res = re.match( - r"(\d+)x(\d+)", item_data.get("resolution", gazu_project.get("resolution"))) + r"(\d+)x(\d+)", + item_data.get("resolution", gazu_project.get("resolution")) + ) if match_res: item_data["resolutionWidth"] = int(match_res.group(1)) item_data["resolutionHeight"] = int(match_res.group(2)) From 47caa760305dcb46639d94ae7c345d8682e724fc Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Fri, 10 Feb 2023 13:32:03 +0100 Subject: [PATCH 48/88] Updated Kitsu Sync module to fully work with all events Manually tried all events, added logging for events and cleaned up the code some. --- openpype/modules/kitsu/utils/sync_service.py | 318 +++++++++++++----- .../modules/kitsu/utils/update_op_with_zou.py | 17 +- 2 files changed, 248 insertions(+), 87 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 237746bea0..e371c1a9bb 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -1,3 +1,15 @@ +""" +Bugs: + * Error when adding task type to anything that isn't Shot or Assets + * Assets don't get added under an episode if TV show + * Assets added under Main Pack throws error. Can't get the name of Main Pack? + +Features ToDo: + * Select in settings what types you wish to sync + * Print what's updated on entity-update + * Add listener for Edits +""" + import os import threading @@ -5,6 +17,7 @@ import gazu from openpype.client import get_project, get_assets, get_asset_by_name from openpype.pipeline import AvalonMongoDB +from openpype.lib import Logger from .credentials import validate_credentials from .update_op_with_zou import ( create_op_asset, @@ -14,6 +27,8 @@ from .update_op_with_zou import ( update_op_assets, ) +log = Logger.get_logger(__name__) + class Listener: """Host Kitsu listener.""" @@ -33,7 +48,7 @@ class Listener: self.dbcon = AvalonMongoDB() self.dbcon.install() - gazu.client.set_host(os.environ["KITSU_SERVER"]) + gazu.client.set_host(os.environ['KITSU_SERVER']) # Authenticate if not validate_credentials(login, password): @@ -42,7 +57,7 @@ class Listener: ) gazu.set_event_host( - os.environ["KITSU_SERVER"].replace("api", "socket.io") + os.environ['KITSU_SERVER'].replace("api", "socket.io") ) self.event_client = gazu.events.init() @@ -103,6 +118,8 @@ class Listener: ) def start(self): + """Start listening for events.""" + log.info("Listening to Kitsu events...") gazu.events.run_client(self.event_client) # == Project == @@ -112,36 +129,49 @@ class Listener: # Use update process to avoid duplicating code self._update_project(data) + # Print message + ## Happens in write_project_to_op() + def _update_project(self, data): """Update project into OP DB.""" # Get project entity - project = gazu.project.get_project(data["project_id"]) - project_name = project["name"] + project = gazu.project.get_project(data['project_id']) update_project = write_project_to_op(project, self.dbcon) # Write into DB if update_project: - self.dbcon.Session["AVALON_PROJECT"] = project_name + self.dbcon.Session['AVALON_PROJECT'] = get_kitsu_project_name( + data['project_id']) self.dbcon.bulk_write([update_project]) def _delete_project(self, data): """Delete project.""" - project_name = get_kitsu_project_name(data["project_id"]) + collections = self.dbcon.database.list_collection_names() + project_name = None + for collection in collections: + post = self.dbcon.database[collection].find_one( + {"data.zou_id": data['project_id']}) + if post: + project_name = post['name'] + break - # Delete project collection - self.dbcon.database[project_name].drop() + if project_name: + # Delete project collection + self.dbcon.database[project_name].drop() + + # Print message + log.info(f"Project deleted: {project_name}") # == Asset == - def _new_asset(self, data): """Create new asset into OP DB.""" # Get project entity - set_op_project(self.dbcon, data["project_id"]) + set_op_project(self.dbcon, data['project_id']) - # Get gazu entity - asset = gazu.asset.get_asset(data["asset_id"]) + # Get asset entity + asset = gazu.asset.get_asset(data['asset_id']) # Insert doc in DB self.dbcon.insert_one(create_op_asset(asset)) @@ -149,27 +179,43 @@ class Listener: # Update self._update_asset(data) + # Print message + episode = None + ep_id = asset['episode_id'] + if ep_id and ep_id != "": + episode = gazu.asset.get_episode(ep_id) + + msg = "Asset created: " + msg = msg + f"{asset['project_name']} - " + if episode is not None: + msg = msg + f"{episode['name']}_" + msg = msg + f"{asset['asset_type_name']}_" + msg = msg + f"{asset['name']}" + log.info(msg) + def _update_asset(self, data): """Update asset into OP DB.""" - set_op_project(self.dbcon, data["project_id"]) + set_op_project(self.dbcon, data['project_id']) project_name = self.dbcon.active_project() project_doc = get_project(project_name) # Get gazu entity - asset = gazu.asset.get_asset(data["asset_id"]) + asset = gazu.asset.get_asset(data['asset_id']) # Find asset doc # Query all assets of the local project zou_ids_and_asset_docs = { - asset_doc["data"]["zou"]["id"]: asset_doc + asset_doc['data']['zou']['id']: asset_doc for asset_doc in get_assets(project_name) - if asset_doc["data"].get("zou", {}).get("id") + if asset_doc['data'].get("zou", {}).get("id") } - zou_ids_and_asset_docs[asset["project_id"]] = project_doc + zou_ids_and_asset_docs[asset['project_id']] = project_doc + gazu_project = gazu.project.get_project(asset['project_id']) # Update update_op_result = update_op_assets( - self.dbcon, project_doc, [asset], zou_ids_and_asset_docs + self.dbcon, gazu_project, project_doc, + [asset], zou_ids_and_asset_docs ) if update_op_result: asset_doc_id, asset_update = update_op_result[0] @@ -177,21 +223,37 @@ class Listener: def _delete_asset(self, data): """Delete asset of OP DB.""" - set_op_project(self.dbcon, data["project_id"]) + set_op_project(self.dbcon, data['project_id']) - # Delete - self.dbcon.delete_one( - {"type": "asset", "data.zou.id": data["asset_id"]} - ) + asset = self.dbcon.find_one({"data.zou.id": data['asset_id']}) + if asset: + # Delete + self.dbcon.delete_one( + {"type": "asset", "data.zou.id": data['asset_id']} + ) + + # Print message + episode = None + ep_id = asset['data']['zou']['episode_id'] + if ep_id and ep_id != "": + episode = gazu.asset.get_episode(ep_id) + + msg = "Asset deleted: " + msg = msg + f"{asset['data']['zou']['project_name']} - " + if episode is not None: + msg = msg + f"{episode['name']}_" + msg = msg + f"{asset['data']['zou']['asset_type_name']}_" + msg = msg + f"'{asset['name']}" + log.info(msg) # == Episode == def _new_episode(self, data): """Create new episode into OP DB.""" # Get project entity - set_op_project(self.dbcon, data["project_id"]) + set_op_project(self.dbcon, data['project_id']) # Get gazu entity - episode = gazu.shot.get_episode(data["episode_id"]) + episode = gazu.shot.get_episode(data['episode_id']) # Insert doc in DB self.dbcon.insert_one(create_op_asset(episode)) @@ -199,27 +261,34 @@ class Listener: # Update self._update_episode(data) + # Print message + msg = "Episode created: " + msg = msg + f"{episode['project_name']} - " + msg = msg + f"{episode['name']}" + def _update_episode(self, data): """Update episode into OP DB.""" - set_op_project(self.dbcon, data["project_id"]) + set_op_project(self.dbcon, data['project_id']) project_name = self.dbcon.active_project() project_doc = get_project(project_name) # Get gazu entity - episode = gazu.shot.get_episode(data["episode_id"]) + episode = gazu.shot.get_episode(data['episode_id']) # Find asset doc # Query all assets of the local project zou_ids_and_asset_docs = { - asset_doc["data"]["zou"]["id"]: asset_doc + asset_doc['data']['zou']['id']: asset_doc for asset_doc in get_assets(project_name) - if asset_doc["data"].get("zou", {}).get("id") + if asset_doc['data'].get("zou", {}).get("id") } - zou_ids_and_asset_docs[episode["project_id"]] = project_doc + zou_ids_and_asset_docs[episode['project_id']] = project_doc + gazu_project = gazu.project.get_project(episode['project_id']) # Update update_op_result = update_op_assets( - self.dbcon, project_doc, [episode], zou_ids_and_asset_docs + self.dbcon, gazu_project, project_doc, [ + episode], zou_ids_and_asset_docs ) if update_op_result: asset_doc_id, asset_update = update_op_result[0] @@ -227,22 +296,31 @@ class Listener: def _delete_episode(self, data): """Delete shot of OP DB.""" - set_op_project(self.dbcon, data["project_id"]) - print("delete episode") # TODO check bugfix + set_op_project(self.dbcon, data['project_id']) - # Delete - self.dbcon.delete_one( - {"type": "asset", "data.zou.id": data["episode_id"]} - ) + episode = self.dbcon.find_one({"data.zou.id": data['episode_id']}) + if episode: + # Delete + self.dbcon.delete_one( + {"type": "asset", "data.zou.id": data['episode_id']} + ) + + # Print message + project = gazu.project.get_project( + episode['data']['zou']['project_id']) + + msg = "Episode deleted: " + msg = msg + f"{project['name']} - " + msg = msg + f"{episode['name']}" # == Sequence == def _new_sequence(self, data): """Create new sequnce into OP DB.""" # Get project entity - set_op_project(self.dbcon, data["project_id"]) + set_op_project(self.dbcon, data['project_id']) # Get gazu entity - sequence = gazu.shot.get_sequence(data["sequence_id"]) + sequence = gazu.shot.get_sequence(data['sequence_id']) # Insert doc in DB self.dbcon.insert_one(create_op_asset(sequence)) @@ -250,27 +328,43 @@ class Listener: # Update self._update_sequence(data) + # Print message + + episode = None + ep_id = sequence['episode_id'] + if ep_id and ep_id != "": + episode = gazu.asset.get_episode(ep_id) + + msg = "Sequence created: " + msg = msg + f"{sequence['project_name']} - " + if episode is not None: + msg = msg + f"{episode['name']}_" + msg = msg + f"{sequence['name']}" + log.info(msg) + def _update_sequence(self, data): """Update sequence into OP DB.""" - set_op_project(self.dbcon, data["project_id"]) + set_op_project(self.dbcon, data['project_id']) project_name = self.dbcon.active_project() project_doc = get_project(project_name) # Get gazu entity - sequence = gazu.shot.get_sequence(data["sequence_id"]) + sequence = gazu.shot.get_sequence(data['sequence_id']) # Find asset doc # Query all assets of the local project zou_ids_and_asset_docs = { - asset_doc["data"]["zou"]["id"]: asset_doc + asset_doc['data']['zou']['id']: asset_doc for asset_doc in get_assets(project_name) - if asset_doc["data"].get("zou", {}).get("id") + if asset_doc['data'].get("zou", {}).get("id") } - zou_ids_and_asset_docs[sequence["project_id"]] = project_doc + zou_ids_and_asset_docs[sequence['project_id']] = project_doc + gazu_project = gazu.project.get_project(sequence['project_id']) # Update update_op_result = update_op_assets( - self.dbcon, project_doc, [sequence], zou_ids_and_asset_docs + self.dbcon, gazu_project, project_doc, + [sequence], zou_ids_and_asset_docs ) if update_op_result: asset_doc_id, asset_update = update_op_result[0] @@ -278,22 +372,30 @@ class Listener: def _delete_sequence(self, data): """Delete sequence of OP DB.""" - set_op_project(self.dbcon, data["project_id"]) - print("delete sequence") # TODO check bugfix + set_op_project(self.dbcon, data['project_id']) + sequence = self.dbcon.find_one({"data.zou.id": data['sequence_id']}) + if sequence: + # Delete + self.dbcon.delete_one( + {"type": "asset", "data.zou.id": data['sequence_id']} + ) - # Delete - self.dbcon.delete_one( - {"type": "asset", "data.zou.id": data["sequence_id"]} - ) + # Print message + gazu_project = gazu.project.get_project( + sequence['data']['zou']['project_id']) + msg = f"Sequence deleted: " + msg = msg + f"{gazu_project['name']} - " + msg = msg + f"{sequence['name']}" + log.info(msg) # == Shot == def _new_shot(self, data): """Create new shot into OP DB.""" # Get project entity - set_op_project(self.dbcon, data["project_id"]) + set_op_project(self.dbcon, data['project_id']) # Get gazu entity - shot = gazu.shot.get_shot(data["shot_id"]) + shot = gazu.shot.get_shot(data['shot_id']) # Insert doc in DB self.dbcon.insert_one(create_op_asset(shot)) @@ -301,89 +403,151 @@ class Listener: # Update self._update_shot(data) + # Print message + episode = None + if shot['episode_id'] and shot['episode_id'] != "": + episode = gazu.asset.get_episode(shot['episode_id']) + + msg = "Shot created: " + msg = msg + f"{shot['project_name']} - " + if episode is not None: + msg = msg + f"{episode['name']}_" + msg = msg + f"{shot['sequence_name']}_" + msg = msg + f"{shot['name']}" + log.info(msg) + def _update_shot(self, data): """Update shot into OP DB.""" - set_op_project(self.dbcon, data["project_id"]) + set_op_project(self.dbcon, data['project_id']) project_name = self.dbcon.active_project() project_doc = get_project(project_name) # Get gazu entity - shot = gazu.shot.get_shot(data["shot_id"]) + shot = gazu.shot.get_shot(data['shot_id']) # Find asset doc # Query all assets of the local project zou_ids_and_asset_docs = { - asset_doc["data"]["zou"]["id"]: asset_doc + asset_doc['data']['zou']['id']: asset_doc for asset_doc in get_assets(project_name) - if asset_doc["data"].get("zou", {}).get("id") + if asset_doc['data'].get("zou", {}).get("id") } - zou_ids_and_asset_docs[shot["project_id"]] = project_doc + zou_ids_and_asset_docs[shot['project_id']] = project_doc + gazu_project = gazu.project.get_project(shot['project_id']) # Update update_op_result = update_op_assets( - self.dbcon, project_doc, [shot], zou_ids_and_asset_docs + self.dbcon, gazu_project, project_doc, + [shot], zou_ids_and_asset_docs ) + if update_op_result: asset_doc_id, asset_update = update_op_result[0] self.dbcon.update_one({"_id": asset_doc_id}, asset_update) def _delete_shot(self, data): """Delete shot of OP DB.""" - set_op_project(self.dbcon, data["project_id"]) + set_op_project(self.dbcon, data['project_id']) + shot = self.dbcon.find_one({"data.zou.id": data['shot_id']}) - # Delete - self.dbcon.delete_one( - {"type": "asset", "data.zou.id": data["shot_id"]} - ) + if shot: + # Delete + self.dbcon.delete_one( + {"type": "asset", "data.zou.id": data['shot_id']} + ) + + # Print message + gazu_project = gazu.project.get_project( + shot['data']['zou']['project_id']) + + msg = "Shot deleted: " + msg = msg + f"{gazu_project['name']} - " + msg = msg + f"{shot['name']}" + log.info(msg) # == Task == def _new_task(self, data): """Create new task into OP DB.""" # Get project entity - set_op_project(self.dbcon, data["project_id"]) + set_op_project(self.dbcon, data['project_id']) project_name = self.dbcon.active_project() # Get gazu entity - task = gazu.task.get_task(data["task_id"]) + task = gazu.task.get_task(data['task_id']) # Find asset doc - parent_name = task["entity"]["name"] + episode = None + ep_id = task['episode_id'] + if ep_id and ep_id != "": + episode = gazu.asset.get_episode(ep_id) + + parent_name = "" + if episode is not None: + parent_name = episode['name'] + "_" + parent_name = parent_name + \ + task['sequence']['name'] + "_" + task['entity']['name'] asset_doc = get_asset_by_name(project_name, parent_name) # Update asset tasks with new one - asset_tasks = asset_doc["data"].get("tasks") - task_type_name = task["task_type"]["name"] + asset_tasks = asset_doc['data'].get("tasks") + task_type_name = task['task_type']['name'] asset_tasks[task_type_name] = {"type": task_type_name, "zou": task} self.dbcon.update_one( - {"_id": asset_doc["_id"]}, {"$set": {"data.tasks": asset_tasks}} + {"_id": asset_doc['_id']}, {"$set": {"data.tasks": asset_tasks}} ) + # Print message + msg = "Task created: " + msg = msg + f"{task['project']['name']} - " + if episode is not None: + msg = msg + f"{episode['name']}_" + msg = msg + f"{task['sequence']['name']}_" + msg = msg + f"{task['entity']['name']} - " + msg = msg + f"{task['task_type']['name']}" + log.info(msg) + def _update_task(self, data): """Update task into OP DB.""" # TODO is it necessary? - pass def _delete_task(self, data): """Delete task of OP DB.""" - set_op_project(self.dbcon, data["project_id"]) + set_op_project(self.dbcon, data['project_id']) project_name = self.dbcon.active_project() # Find asset doc asset_docs = list(get_assets(project_name)) for doc in asset_docs: # Match task - for name, task in doc["data"]["tasks"].items(): - if task.get("zou") and data["task_id"] == task["zou"]["id"]: + for name, task in doc['data']['tasks'].items(): + if task.get("zou") and data['task_id'] == task['zou']['id']: # Pop task - asset_tasks = doc["data"].get("tasks", {}) + asset_tasks = doc['data'].get("tasks", {}) asset_tasks.pop(name) # Delete task in DB self.dbcon.update_one( - {"_id": doc["_id"]}, + {"_id": doc['_id']}, {"$set": {"data.tasks": asset_tasks}}, ) + + # Print message + shot = gazu.shot.get_shot(task['zou']['entity_id']) + + episode = None + ep_id = shot['episode_id'] + if ep_id and ep_id != "": + episode = gazu.asset.get_episode(ep_id) + + msg = "Task deleted: " + msg = msg + f"{shot['project_name']} - " + if episode is not None: + msg = msg + f"{episode['name']}_" + msg = msg + f"{shot['sequence_name']}_" + msg = msg + f"{shot['name']} - " + msg = msg + f"{task['type']}" + log.info(msg) return @@ -396,7 +560,7 @@ def start_listeners(login: str, password: str): """ # Refresh token every week def refresh_token_every_week(): - print("Refreshing token...") + log.info("Refreshing token...") gazu.refresh_token() threading.Timer(7 * 3600 * 24, refresh_token_every_week).start() diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 15e88947a1..0a59724393 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -5,10 +5,6 @@ from typing import Dict, List from pymongo import DeleteOne, UpdateOne import gazu -from gazu.task import ( - all_tasks_for_asset, - all_tasks_for_shot, -) from openpype.client import ( get_project, @@ -18,7 +14,6 @@ from openpype.client import ( create_project, ) from openpype.pipeline import AvalonMongoDB -from openpype.settings import get_project_settings from openpype.modules.kitsu.utils.credentials import validate_credentials from openpype.lib import Logger @@ -85,8 +80,10 @@ def update_op_assets( Returns: List[Dict[str, dict]]: List of (doc_id, update_dict) tuples """ + if not project_doc: + return + project_name = project_doc["name"] - project_module_settings = get_project_settings(project_name)["kitsu"] assets_with_update = [] for item in entities_list: @@ -170,9 +167,9 @@ def update_op_assets( tasks_list = [] item_type = item["type"] if item_type == "Asset": - tasks_list = all_tasks_for_asset(item) + tasks_list = gazu.task.all_tasks_for_asset(item) elif item_type == "Shot": - tasks_list = all_tasks_for_shot(item) + tasks_list = gazu.task.all_tasks_for_shot(item) item_data["tasks"] = { t["task_type_name"]: {"type": t["task_type_name"], "zou": t} for t in tasks_list @@ -207,7 +204,7 @@ def update_op_assets( # Root parent folder if exist visual_parent_doc_id = ( - asset_doc_ids[parent_zou_id]["_id"] if parent_zou_id else None + asset_doc_ids[parent_zou_id].get("_id") if parent_zou_id else None ) if visual_parent_doc_id is None: # Find root folder doc ("Assets" or "Shots") @@ -282,7 +279,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: project_name = project["name"] project_doc = get_project(project_name) if not project_doc: - log.info(f"Creating project '{project_name}'") + log.info(f"Project created: {project_name}") project_doc = create_project(project_name, project_name) # Project data and tasks From c44c5c9ddbada1264691a32c78cf1765b0b9bce1 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Fri, 10 Feb 2023 14:08:33 +0100 Subject: [PATCH 49/88] Get asset and task names from instance only --- .../kitsu/plugins/publish/collect_kitsu_entities.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py index 92c8c1823d..5499b1782a 100644 --- a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py @@ -19,14 +19,12 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): # Get all needed names project_name = context.data.get("projectName") - asset_name = context.data.get("asset") - task_name = context.data.get("task") + asset_name = None + task_name = None # If asset and task name doesn't exist in context, look in instance for instance in context: - if not asset_name: - asset_name = instance.data.get("asset") - if not task_name: - task_name = instance.data.get("task") + asset_name = instance.data.get("asset") + task_name = instance.data.get("task") # Get all assets of the local project asset_docs = { From 4bc67437b61326540a2a62f141af5169296ee051 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Fri, 10 Feb 2023 14:36:17 +0100 Subject: [PATCH 50/88] Fixed line too long and too many '#' for comments --- openpype/modules/kitsu/utils/sync_service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index e371c1a9bb..00c8c4eafa 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -2,7 +2,7 @@ Bugs: * Error when adding task type to anything that isn't Shot or Assets * Assets don't get added under an episode if TV show - * Assets added under Main Pack throws error. Can't get the name of Main Pack? + * Assets added under Main Pack throws error. No Main Pack name in dict Features ToDo: * Select in settings what types you wish to sync @@ -130,7 +130,7 @@ class Listener: self._update_project(data) # Print message - ## Happens in write_project_to_op() + # - Happens in write_project_to_op() def _update_project(self, data): """Update project into OP DB.""" From 8652dab47898e30cbdeb9c3ed9ced2f841c4ec4c Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 16 Feb 2023 13:14:14 +0100 Subject: [PATCH 51/88] Fixed 'instance' code --- .../plugins/publish/collect_kitsu_entities.py | 104 ++++++++---------- .../plugins/publish/integrate_kitsu_note.py | 48 ++++---- .../plugins/publish/integrate_kitsu_review.py | 6 +- 3 files changed, 73 insertions(+), 85 deletions(-) diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py index 5499b1782a..fe6854218d 100644 --- a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py @@ -1,13 +1,7 @@ # -*- coding: utf-8 -*- -import os - import gazu import pyblish.api -from openpype.client import ( - get_assets, -) - class CollectKitsuEntities(pyblish.api.ContextPlugin): """Collect Kitsu entities according to the current context""" @@ -17,67 +11,61 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): def process(self, context): - # Get all needed names - project_name = context.data.get("projectName") - asset_name = None - task_name = None - # If asset and task name doesn't exist in context, look in instance + kitsu_project = None + + kitsu_entities_by_id = {} for instance in context: - asset_name = instance.data.get("asset") + asset_doc = instance.data.get("assetEntity") task_name = instance.data.get("task") + if not asset_doc: + continue - # Get all assets of the local project - asset_docs = { - asset_doc["name"]: asset_doc - for asset_doc in get_assets(project_name) - } + zou_asset_data = asset_doc["data"].get("zou") + if not zou_asset_data: + raise AssertionError("Zou asset data not found in OpenPype!") - # Get asset object - asset = asset_docs.get(asset_name) - if not asset: - raise AssertionError(f"{asset_name} not found in DB") + if kitsu_project is None: + kitsu_project = gazu.project.get_project( + zou_asset_data["project_id"]) + if not kitsu_project: + raise AssertionError("Project not found in kitsu!") - zou_asset_data = asset["data"].get("zou") - if not zou_asset_data: - raise AssertionError("Zou asset data not found in OpenPype!") - self.log.debug(f"Collected zou asset data: {zou_asset_data}") + entity_type = zou_asset_data["type"] + kitsu_id = zou_asset_data["id"] + kitsu_entity = kitsu_entities_by_id.get(kitsu_id) + if not kitsu_entity: + if entity_type == "Shot": + kitsu_entity = gazu.shot.get_shot(kitsu_id) + else: + kitsu_entity = gazu.asset.get_asset(kitsu_id) + kitsu_entities_by_id[kitsu_id] = kitsu_entity - kitsu_project = gazu.project.get_project(zou_asset_data["project_id"]) - if not kitsu_project: - raise AssertionError("Project not found in kitsu!") - context.data["kitsu_project"] = kitsu_project - self.log.debug(f"Collect kitsu project: {kitsu_project}") + if not kitsu_entity: + raise AssertionError( + "{} not found in kitsu!".format(entity_type)) + instance.data["kitsu_entity"] = kitsu_entity - entity_type = zou_asset_data["type"] - if entity_type == "Shot": - kitsu_entity = gazu.shot.get_shot(zou_asset_data["id"]) - else: - kitsu_entity = gazu.asset.get_asset(zou_asset_data["id"]) - if not kitsu_entity: - raise AssertionError(f"{entity_type} not found in kitsu!") - context.data["kitsu_entity"] = kitsu_entity - self.log.debug(f"Collect kitsu {entity_type}: {kitsu_entity}") - - if task_name: - zou_task_data = asset["data"]["tasks"][task_name].get("zou") - self.log.debug(f"Collected zou task data: {zou_task_data}") - if zou_task_data: - kitsu_task = gazu.task.get_task(zou_task_data["id"]) - if not kitsu_task: - raise AssertionError("Task not found in kitsu!") - context.data["kitsu_task"] = kitsu_task - self.log.debug(f"Collect kitsu task: {kitsu_task}") - else: + if not task_name: + continue + zou_task_data = asset_doc["data"]["tasks"][task_name].get("zou") + self.log.debug( + "Collected zou task data: {}".format(zou_task_data)) + if not zou_task_data: kitsu_task_type = gazu.task.get_task_type_by_name(task_name) if not kitsu_task_type: raise AssertionError( - f"Task type {task_name} not found in Kitsu!" + "Task type {} not found in Kitsu!".format(task_name) ) + continue + kitsu_task_id = zou_task_data["id"] + kitsu_task = kitsu_entities_by_id.get(kitsu_task_id) + if not kitsu_task: + kitsu_task = gazu.task.get_task(zou_task_data["id"]) + kitsu_entities_by_id[kitsu_task_id] = kitsu_task - kitsu_task = gazu.task.get_task_by_name( - kitsu_entity, kitsu_task_type - ) - if not kitsu_task: - raise AssertionError("Task not found in kitsu!") - context.data["kitsu_task"] = kitsu_task - self.log.debug(f"Collect kitsu task: {kitsu_task}") + if not kitsu_task: + raise AssertionError("Task not found in kitsu!") + instance.data["kitsu_task"] = kitsu_task + self.log.debug("Collect kitsu task: {}".format(kitsu_task)) + + context.data["kitsu_project"] = kitsu_project diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py index b801e0e4d4..aeec2481e0 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py @@ -21,30 +21,32 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin): self.log.debug("Comment is `{}`".format(publish_comment)) - # Get note status, by default uses the task status for the note - # if it is not specified in the configuration - note_status = context.data["kitsu_task"]["task_status_id"] - if self.set_status_note: - kitsu_status = gazu.task.get_task_status_by_short_name( - self.note_status_shortname - ) - if kitsu_status: - note_status = kitsu_status - self.log.info("Note Kitsu status: {}".format(note_status)) - else: - self.log.info( - "Cannot find {} status. The status will not be " - "changed!".format(self.note_status_shortname) + for instance in context: + + # Get note status, by default uses the task status for the note + # if it is not specified in the configuration + note_status = instance.data["kitsu_task"]["task_status"]["id"] + + if self.set_status_note: + kitsu_status = gazu.task.get_task_status_by_short_name( + self.note_status_shortname ) + if kitsu_status: + note_status = kitsu_status + self.log.info("Note Kitsu status: {}".format(note_status)) + else: + self.log.info( + "Cannot find {} status. The status will not be " + "changed!".format(self.note_status_shortname) + ) - # Add comment to kitsu task - self.log.debug( - "Add new note in taks id {}".format( - context.data["kitsu_task"]["id"] + # Add comment to kitsu task + task = instance.data["kitsu_task"]["id"] + self.log.debug( + "Add new note in taks id {}".format(task) + ) + kitsu_comment = gazu.task.add_comment( + task, note_status, comment=publish_comment ) - ) - kitsu_comment = gazu.task.add_comment( - context.data["kitsu_task"], note_status, comment=publish_comment - ) - context.data["kitsu_comment"] = kitsu_comment + instance.data["kitsu_comment"] = kitsu_comment diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py index 94897b2553..d8f6cb7ac8 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py @@ -13,9 +13,8 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin): def process(self, instance): - context = instance.context - task = context.data["kitsu_task"] - comment = context.data.get("kitsu_comment") + task = instance.data["kitsu_task"]["id"] + comment = instance.data["kitsu_comment"]["id"] # Check comment has been created if not comment: @@ -29,7 +28,6 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin): # Skip if not tagged as review if "kitsureview" not in representation.get("tags", []): continue - review_path = representation.get("published_path") self.log.debug("Found review at: {}".format(review_path)) From 290709705e2811c1a87a5f18b859b5880bd09772 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 16 Feb 2023 15:02:31 +0100 Subject: [PATCH 52/88] Changed logging from f-string to .format() --- .../modules/kitsu/actions/launcher_show_in_kitsu.py | 12 ++++++------ openpype/modules/kitsu/utils/credentials.py | 2 +- openpype/modules/kitsu/utils/sync_service.py | 2 +- openpype/modules/kitsu/utils/update_op_with_zou.py | 4 ++-- openpype/modules/kitsu/utils/update_zou_with_op.py | 4 ++-- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py index 4793d60fc3..e3676afc4c 100644 --- a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py +++ b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py @@ -32,12 +32,12 @@ class ShowInKitsu(LauncherAction): project = get_project(project_name=project_name, fields=["data.zou_id"]) if not project: - raise RuntimeError(f"Project {project_name} not found.") + raise RuntimeError("Project {} not found.".format(project_name)) project_zou_id = project["data"].get("zou_id") if not project_zou_id: - raise RuntimeError(f"Project {project_name} has no " - f"connected kitsu id.") + raise RuntimeError( + "Project {} has no connected kitsu id.".format(project_name)) asset_zou_name = None asset_zou_id = None @@ -48,7 +48,7 @@ class ShowInKitsu(LauncherAction): asset_zou_name = asset_name asset_fields = ["data.zou.id", "data.zou.type"] if task_name: - asset_fields.append(f"data.tasks.{task_name}.zou.id") + asset_fields.append("data.tasks.{}.zou.id".format(task_name)) asset = get_asset_by_name(project_name, asset_name=asset_name, @@ -67,7 +67,7 @@ class ShowInKitsu(LauncherAction): task_data = asset["data"]["tasks"][task_name] task_zou_data = task_data.get("zou", {}) if not task_zou_data: - self.log.debug(f"No zou task data for task: {task_name}") + self.log.debug("No zou task data for task: {}".format(task_name)) task_zou_id = task_zou_data["id"] # Define URL @@ -78,7 +78,7 @@ class ShowInKitsu(LauncherAction): task_id=task_zou_id) # Open URL in webbrowser - self.log.info(f"Opening URL: {url}") + self.log.info("Opening URL: {}".format(url)) webbrowser.open(url, # Try in new tab new=2) diff --git a/openpype/modules/kitsu/utils/credentials.py b/openpype/modules/kitsu/utils/credentials.py index adcfb07cd5..1731e1ca4f 100644 --- a/openpype/modules/kitsu/utils/credentials.py +++ b/openpype/modules/kitsu/utils/credentials.py @@ -54,7 +54,7 @@ def validate_host(kitsu_url: str) -> bool: if gazu.client.host_is_valid(): return True else: - raise gazu.exception.HostException(f"Host '{kitsu_url}' is invalid.") + raise gazu.exception.HostException("Host '{}' is invalid.".format(kitsu_url)) def clear_credentials(): diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 00c8c4eafa..d4ef5ce63a 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -162,7 +162,7 @@ class Listener: self.dbcon.database[project_name].drop() # Print message - log.info(f"Project deleted: {project_name}") + log.info("Project deleted: {}".format(project_name)) # == Asset == def _new_asset(self, data): diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 0a59724393..15e6dd70d9 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -279,7 +279,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: project_name = project["name"] project_doc = get_project(project_name) if not project_doc: - log.info(f"Project created: {project_name}") + log.info("Project created: {}".format(project_name)) project_doc = create_project(project_name, project_name) # Project data and tasks @@ -373,7 +373,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): if not project: project = gazu.project.get_project_by_name(project["name"]) - log.info(f"Synchronizing {project['name']}...") + log.info("Synchronizing {}...".format(project['name'])) # Get all assets from zou all_assets = gazu.asset.all_assets_for_project(project) diff --git a/openpype/modules/kitsu/utils/update_zou_with_op.py b/openpype/modules/kitsu/utils/update_zou_with_op.py index 39baf31b93..b13c2dd4c6 100644 --- a/openpype/modules/kitsu/utils/update_zou_with_op.py +++ b/openpype/modules/kitsu/utils/update_zou_with_op.py @@ -61,7 +61,7 @@ def sync_zou_from_op_project( project_doc = get_project(project_name) # Get all entities from zou - print(f"Synchronizing {project_name}...") + print("Synchronizing {}...".format(project_name)) zou_project = gazu.project.get_project_by_name(project_name) # Create project @@ -258,7 +258,7 @@ def sync_zou_from_op_project( for asset_doc in asset_docs.values() } for entity_id in deleted_entities: - gazu.raw.delete(f"data/entities/{entity_id}") + gazu.raw.delete("data/entities/{}".format(entity_id)) # Write into DB if bulk_writes: From 793af30caae9782ab9cb328f7b546231f0c727df Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 16 Feb 2023 15:02:53 +0100 Subject: [PATCH 53/88] Changed AssertionError to ValueError --- .../kitsu/plugins/publish/collect_kitsu_entities.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py index fe6854218d..dc7048cf2a 100644 --- a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py @@ -22,13 +22,13 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): zou_asset_data = asset_doc["data"].get("zou") if not zou_asset_data: - raise AssertionError("Zou asset data not found in OpenPype!") + raise ValueError("Zou asset data not found in OpenPype!") if kitsu_project is None: kitsu_project = gazu.project.get_project( zou_asset_data["project_id"]) if not kitsu_project: - raise AssertionError("Project not found in kitsu!") + raise ValueError("Project not found in kitsu!") entity_type = zou_asset_data["type"] kitsu_id = zou_asset_data["id"] @@ -41,7 +41,7 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): kitsu_entities_by_id[kitsu_id] = kitsu_entity if not kitsu_entity: - raise AssertionError( + raise ValueError( "{} not found in kitsu!".format(entity_type)) instance.data["kitsu_entity"] = kitsu_entity @@ -53,7 +53,7 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): if not zou_task_data: kitsu_task_type = gazu.task.get_task_type_by_name(task_name) if not kitsu_task_type: - raise AssertionError( + raise ValueError( "Task type {} not found in Kitsu!".format(task_name) ) continue @@ -64,7 +64,7 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): kitsu_entities_by_id[kitsu_task_id] = kitsu_task if not kitsu_task: - raise AssertionError("Task not found in kitsu!") + raise ValueError("Task not found in kitsu!") instance.data["kitsu_task"] = kitsu_task self.log.debug("Collect kitsu task: {}".format(kitsu_task)) From 7e6c47967f42f26fd6d0f56c33a14a0e08e77906 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 16 Feb 2023 15:04:05 +0100 Subject: [PATCH 54/88] Get episode_id using get() as ep_id might not always exist --- openpype/modules/kitsu/utils/sync_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index d4ef5ce63a..a449bf8c06 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -477,7 +477,7 @@ class Listener: # Find asset doc episode = None - ep_id = task['episode_id'] + ep_id = task.get('episode_id') if ep_id and ep_id != "": episode = gazu.asset.get_episode(ep_id) From 1045ee0c1ed5bb10c3c3d8f4e17f399a5f33c754 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 16 Feb 2023 15:04:55 +0100 Subject: [PATCH 55/88] Check if asset_doc exist before processing it --- openpype/modules/kitsu/utils/sync_service.py | 34 ++++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index a449bf8c06..eed259cda6 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -487,25 +487,25 @@ class Listener: parent_name = parent_name + \ task['sequence']['name'] + "_" + task['entity']['name'] - asset_doc = get_asset_by_name(project_name, parent_name) - # Update asset tasks with new one - asset_tasks = asset_doc['data'].get("tasks") - task_type_name = task['task_type']['name'] - asset_tasks[task_type_name] = {"type": task_type_name, "zou": task} - self.dbcon.update_one( - {"_id": asset_doc['_id']}, {"$set": {"data.tasks": asset_tasks}} - ) + asset_doc = get_asset_by_name(project_name, parent_name) + if asset_doc: + asset_tasks = asset_doc['data'].get("tasks") + task_type_name = task['task_type']['name'] + asset_tasks[task_type_name] = {"type": task_type_name, "zou": task} + self.dbcon.update_one( + {"_id": asset_doc['_id']}, {"$set": {"data.tasks": asset_tasks}} + ) - # Print message - msg = "Task created: " - msg = msg + f"{task['project']['name']} - " - if episode is not None: - msg = msg + f"{episode['name']}_" - msg = msg + f"{task['sequence']['name']}_" - msg = msg + f"{task['entity']['name']} - " - msg = msg + f"{task['task_type']['name']}" - log.info(msg) + # Print message + msg = "Task created: " + msg = msg + f"{task['project']['name']} - " + if episode is not None: + msg = msg + f"{episode['name']}_" + msg = msg + f"{task['sequence']['name']}_" + msg = msg + f"{task['entity']['name']} - " + msg = msg + f"{task['task_type']['name']}" + log.info(msg) def _update_task(self, data): """Update task into OP DB.""" From ef698b4aface5ceff0ba1016a242a318853eaa2a Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 16 Feb 2023 15:11:07 +0100 Subject: [PATCH 56/88] Fixed 1 extra frame at frameEnd Same as #4466 --- openpype/modules/kitsu/utils/update_op_with_zou.py | 6 +++--- openpype/modules/kitsu/utils/update_zou_with_op.py | 5 +++-- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 15e6dd70d9..93d0d5e3fb 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -111,18 +111,18 @@ def update_op_assets( except (TypeError, ValueError): frame_in = 1001 item_data["frameStart"] = frame_in - # Frames duration, fallback on 0 + # Frames duration, fallback on 1 try: # NOTE nb_frames is stored directly in item # because of zou's legacy design - frames_duration = int(item.get("nb_frames", 0)) + frames_duration = int(item.get("nb_frames", 1)) except (TypeError, ValueError): frames_duration = None # Frame out, fallback on frame_in + duration or project's value or 1001 frame_out = item_data.pop("frame_out", None) if not frame_out: if frames_duration: - frame_out = frame_in + frames_duration + frame_out = frame_in + frames_duration - 1 else: frame_out = project_doc["data"].get("frameEnd", 1001) item_data["frameEnd"] = frame_out diff --git a/openpype/modules/kitsu/utils/update_zou_with_op.py b/openpype/modules/kitsu/utils/update_zou_with_op.py index b13c2dd4c6..b1a9b8b82c 100644 --- a/openpype/modules/kitsu/utils/update_zou_with_op.py +++ b/openpype/modules/kitsu/utils/update_zou_with_op.py @@ -174,7 +174,8 @@ def sync_zou_from_op_project( doc["name"], frame_in=doc["data"]["frameStart"], frame_out=doc["data"]["frameEnd"], - nb_frames=doc["data"]["frameEnd"] - doc["data"]["frameStart"], + nb_frames=( + doc["data"]["frameEnd"] - doc["data"]["frameStart"] + 1), ) elif match.group(2): # Sequence @@ -229,7 +230,7 @@ def sync_zou_from_op_project( "frame_in": frame_in, "frame_out": frame_out, }, - "nb_frames": frame_out - frame_in, + "nb_frames": frame_out - frame_in + 1, } ) entity = gazu.raw.update("entities", zou_id, entity_data) From c4f1a1f452d8e0f70d6a629db7925642d3acbc8d Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 16 Feb 2023 15:13:51 +0100 Subject: [PATCH 57/88] Fixed hound's "line too long" comments --- openpype/modules/kitsu/actions/launcher_show_in_kitsu.py | 3 ++- openpype/modules/kitsu/utils/credentials.py | 3 ++- openpype/modules/kitsu/utils/sync_service.py | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py index e3676afc4c..11224f6e52 100644 --- a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py +++ b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py @@ -67,7 +67,8 @@ class ShowInKitsu(LauncherAction): task_data = asset["data"]["tasks"][task_name] task_zou_data = task_data.get("zou", {}) if not task_zou_data: - self.log.debug("No zou task data for task: {}".format(task_name)) + self.log.debug( + "No zou task data for task: {}".format(task_name)) task_zou_id = task_zou_data["id"] # Define URL diff --git a/openpype/modules/kitsu/utils/credentials.py b/openpype/modules/kitsu/utils/credentials.py index 1731e1ca4f..941343cc8d 100644 --- a/openpype/modules/kitsu/utils/credentials.py +++ b/openpype/modules/kitsu/utils/credentials.py @@ -54,7 +54,8 @@ def validate_host(kitsu_url: str) -> bool: if gazu.client.host_is_valid(): return True else: - raise gazu.exception.HostException("Host '{}' is invalid.".format(kitsu_url)) + raise gazu.exception.HostException( + "Host '{}' is invalid.".format(kitsu_url)) def clear_credentials(): diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index eed259cda6..498c8de71e 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -494,7 +494,8 @@ class Listener: task_type_name = task['task_type']['name'] asset_tasks[task_type_name] = {"type": task_type_name, "zou": task} self.dbcon.update_one( - {"_id": asset_doc['_id']}, {"$set": {"data.tasks": asset_tasks}} + {"_id": asset_doc['_id']}, + {"$set": {"data.tasks": asset_tasks}} ) # Print message From 07ac3d8d4db54b111fd0117bed4a5cbc5c9b19d7 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 16 Feb 2023 15:58:49 +0100 Subject: [PATCH 58/88] If no task in instance, continue fix fore https://github.com/ynput/OpenPype/pull/4425#discussion_r1108582918 --- .../modules/kitsu/plugins/publish/integrate_kitsu_note.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py index aeec2481e0..54fb6a4678 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py @@ -22,10 +22,13 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin): self.log.debug("Comment is `{}`".format(publish_comment)) for instance in context: + kitsu_task = instance.data.get("kitsu_task") + if kitsu_task is None: + continue # Get note status, by default uses the task status for the note # if it is not specified in the configuration - note_status = instance.data["kitsu_task"]["task_status"]["id"] + note_status = kitsu_task["task_status"]["id"] if self.set_status_note: kitsu_status = gazu.task.get_task_status_by_short_name( @@ -41,7 +44,7 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin): ) # Add comment to kitsu task - task = instance.data["kitsu_task"]["id"] + task = kitsu_task["id"] self.log.debug( "Add new note in taks id {}".format(task) ) From 915d11040493e1e59d2389e9d5f86f678ef4b9ca Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 16 Feb 2023 16:02:53 +0100 Subject: [PATCH 59/88] Set frame_out to frame_in if no duration exists fix for https://github.com/ynput/OpenPype/pull/4425#discussion_r1108593566 --- openpype/modules/kitsu/utils/update_op_with_zou.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 93d0d5e3fb..265c3638cd 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -124,7 +124,7 @@ def update_op_assets( if frames_duration: frame_out = frame_in + frames_duration - 1 else: - frame_out = project_doc["data"].get("frameEnd", 1001) + frame_out = project_doc["data"].get("frameEnd", frame_in) item_data["frameEnd"] = frame_out # Fps, fallback to project's value or default value (25.0) try: From 4bcef4406803f0210ed3a741ad99de8aa56c80a7 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Sat, 18 Feb 2023 20:52:20 +0100 Subject: [PATCH 60/88] Fixed hound's max-length note --- openpype/modules/kitsu/utils/update_op_with_zou.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 265c3638cd..898cf076c8 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -146,7 +146,8 @@ def update_op_assets( "resolutionWidth") item_data["resolutionHeight"] = project_doc["data"].get( "resolutionHeight") - # Properties that doesn't fully exist in Kitsu. Guessing the property name + # Properties that doesn't fully exist in Kitsu. + # Guessing those property names below: # Pixel Aspect Ratio item_data["pixelAspect"] = item_data.get( "pixel_aspect", project_doc["data"].get("pixelAspect")) @@ -452,7 +453,8 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): [ UpdateOne({"_id": id}, update) for id, update in update_op_assets( - dbcon, project, project_doc, all_entities, zou_ids_and_asset_docs + dbcon, project, project_doc, + all_entities, zou_ids_and_asset_docs ) ] ) From 3fc2180e51357cc74a82a76f2fcd21fa11752ecc Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Tue, 28 Feb 2023 23:06:52 +0100 Subject: [PATCH 61/88] Fixed all quotes types so they now match --- openpype/modules/kitsu/utils/sync_service.py | 133 ++++++++++--------- 1 file changed, 67 insertions(+), 66 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 498c8de71e..1af0b6edc4 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -48,16 +48,16 @@ class Listener: self.dbcon = AvalonMongoDB() self.dbcon.install() - gazu.client.set_host(os.environ['KITSU_SERVER']) + gazu.client.set_host(os.environ["KITSU_SERVER"]) # Authenticate if not validate_credentials(login, password): raise gazu.exception.AuthFailedException( - f"Kitsu authentication failed for login: '{login}'..." + 'Kitsu authentication failed for login: "{}"...'.format(login) ) gazu.set_event_host( - os.environ['KITSU_SERVER'].replace("api", "socket.io") + os.environ["KITSU_SERVER"].replace("api", "socket.io") ) self.event_client = gazu.events.init() @@ -135,14 +135,14 @@ class Listener: def _update_project(self, data): """Update project into OP DB.""" # Get project entity - project = gazu.project.get_project(data['project_id']) + project = gazu.project.get_project(data["project_id"]) update_project = write_project_to_op(project, self.dbcon) # Write into DB if update_project: - self.dbcon.Session['AVALON_PROJECT'] = get_kitsu_project_name( - data['project_id']) + self.dbcon.Session["AVALON_PROJECT"] = get_kitsu_project_name( + data["project_id"]) self.dbcon.bulk_write([update_project]) def _delete_project(self, data): @@ -168,10 +168,10 @@ class Listener: def _new_asset(self, data): """Create new asset into OP DB.""" # Get project entity - set_op_project(self.dbcon, data['project_id']) + set_op_project(self.dbcon, data["project_id"]) # Get asset entity - asset = gazu.asset.get_asset(data['asset_id']) + asset = gazu.asset.get_asset(data["asset_id"]) # Insert doc in DB self.dbcon.insert_one(create_op_asset(asset)) @@ -181,7 +181,7 @@ class Listener: # Print message episode = None - ep_id = asset['episode_id'] + ep_id = asset.get("episode_id") if ep_id and ep_id != "": episode = gazu.asset.get_episode(ep_id) @@ -195,22 +195,22 @@ class Listener: def _update_asset(self, data): """Update asset into OP DB.""" - set_op_project(self.dbcon, data['project_id']) + set_op_project(self.dbcon, data["project_id"]) project_name = self.dbcon.active_project() project_doc = get_project(project_name) # Get gazu entity - asset = gazu.asset.get_asset(data['asset_id']) + asset = gazu.asset.get_asset(data["asset_id"]) # Find asset doc # Query all assets of the local project zou_ids_and_asset_docs = { - asset_doc['data']['zou']['id']: asset_doc + asset_doc["data"]["zou"]["id"]: asset_doc for asset_doc in get_assets(project_name) - if asset_doc['data'].get("zou", {}).get("id") + if asset_doc["data"].get("zou", {}).get("id") } - zou_ids_and_asset_docs[asset['project_id']] = project_doc - gazu_project = gazu.project.get_project(asset['project_id']) + zou_ids_and_asset_docs[asset["project_id"]] = project_doc + gazu_project = gazu.project.get_project(asset["project_id"]) # Update update_op_result = update_op_assets( @@ -223,18 +223,18 @@ class Listener: def _delete_asset(self, data): """Delete asset of OP DB.""" - set_op_project(self.dbcon, data['project_id']) + set_op_project(self.dbcon, data["project_id"]) - asset = self.dbcon.find_one({"data.zou.id": data['asset_id']}) + asset = self.dbcon.find_one({"data.zou.id": data["asset_id"]}) if asset: # Delete self.dbcon.delete_one( - {"type": "asset", "data.zou.id": data['asset_id']} + {"type": "asset", "data.zou.id": data["asset_id"]} ) # Print message episode = None - ep_id = asset['data']['zou']['episode_id'] + ep_id = asset["data"]["zou"].get("episode_id") if ep_id and ep_id != "": episode = gazu.asset.get_episode(ep_id) @@ -250,10 +250,10 @@ class Listener: def _new_episode(self, data): """Create new episode into OP DB.""" # Get project entity - set_op_project(self.dbcon, data['project_id']) + set_op_project(self.dbcon, data["project_id"]) # Get gazu entity - episode = gazu.shot.get_episode(data['episode_id']) + ep = gazu.shot.get_episode(data["episode_id"]) # Insert doc in DB self.dbcon.insert_one(create_op_asset(episode)) @@ -268,22 +268,22 @@ class Listener: def _update_episode(self, data): """Update episode into OP DB.""" - set_op_project(self.dbcon, data['project_id']) + set_op_project(self.dbcon, data["project_id"]) project_name = self.dbcon.active_project() project_doc = get_project(project_name) # Get gazu entity - episode = gazu.shot.get_episode(data['episode_id']) + ep = gazu.shot.get_episode(data["episode_id"]) # Find asset doc # Query all assets of the local project zou_ids_and_asset_docs = { - asset_doc['data']['zou']['id']: asset_doc + asset_doc["data"]["zou"]["id"]: asset_doc for asset_doc in get_assets(project_name) - if asset_doc['data'].get("zou", {}).get("id") + if asset_doc["data"].get("zou", {}).get("id") } - zou_ids_and_asset_docs[episode['project_id']] = project_doc - gazu_project = gazu.project.get_project(episode['project_id']) + zou_ids_and_asset_docs[ep["project_id"]] = project_doc + gazu_project = gazu.project.get_project(ep["project_id"]) # Update update_op_result = update_op_assets( @@ -296,7 +296,7 @@ class Listener: def _delete_episode(self, data): """Delete shot of OP DB.""" - set_op_project(self.dbcon, data['project_id']) + set_op_project(self.dbcon, data["project_id"]) episode = self.dbcon.find_one({"data.zou.id": data['episode_id']}) if episode: @@ -317,10 +317,10 @@ class Listener: def _new_sequence(self, data): """Create new sequnce into OP DB.""" # Get project entity - set_op_project(self.dbcon, data['project_id']) + set_op_project(self.dbcon, data["project_id"]) # Get gazu entity - sequence = gazu.shot.get_sequence(data['sequence_id']) + sequence = gazu.shot.get_sequence(data["sequence_id"]) # Insert doc in DB self.dbcon.insert_one(create_op_asset(sequence)) @@ -331,7 +331,7 @@ class Listener: # Print message episode = None - ep_id = sequence['episode_id'] + ep_id = sequence.get("episode_id") if ep_id and ep_id != "": episode = gazu.asset.get_episode(ep_id) @@ -344,22 +344,22 @@ class Listener: def _update_sequence(self, data): """Update sequence into OP DB.""" - set_op_project(self.dbcon, data['project_id']) + set_op_project(self.dbcon, data["project_id"]) project_name = self.dbcon.active_project() project_doc = get_project(project_name) # Get gazu entity - sequence = gazu.shot.get_sequence(data['sequence_id']) + sequence = gazu.shot.get_sequence(data["sequence_id"]) # Find asset doc # Query all assets of the local project zou_ids_and_asset_docs = { - asset_doc['data']['zou']['id']: asset_doc + asset_doc["data"]["zou"]["id"]: asset_doc for asset_doc in get_assets(project_name) - if asset_doc['data'].get("zou", {}).get("id") + if asset_doc["data"].get("zou", {}).get("id") } - zou_ids_and_asset_docs[sequence['project_id']] = project_doc - gazu_project = gazu.project.get_project(sequence['project_id']) + zou_ids_and_asset_docs[sequence["project_id"]] = project_doc + gazu_project = gazu.project.get_project(sequence["project_id"]) # Update update_op_result = update_op_assets( @@ -372,15 +372,16 @@ class Listener: def _delete_sequence(self, data): """Delete sequence of OP DB.""" - set_op_project(self.dbcon, data['project_id']) - sequence = self.dbcon.find_one({"data.zou.id": data['sequence_id']}) + set_op_project(self.dbcon, data["project_id"]) + sequence = self.dbcon.find_one({"data.zou.id": data["sequence_id"]}) if sequence: # Delete self.dbcon.delete_one( - {"type": "asset", "data.zou.id": data['sequence_id']} + {"type": "asset", "data.zou.id": data["sequence_id"]} ) # Print message + ep_id = sequence["data"]["zou"].get("episode_id") gazu_project = gazu.project.get_project( sequence['data']['zou']['project_id']) msg = f"Sequence deleted: " @@ -392,10 +393,10 @@ class Listener: def _new_shot(self, data): """Create new shot into OP DB.""" # Get project entity - set_op_project(self.dbcon, data['project_id']) + set_op_project(self.dbcon, data["project_id"]) # Get gazu entity - shot = gazu.shot.get_shot(data['shot_id']) + shot = gazu.shot.get_shot(data["shot_id"]) # Insert doc in DB self.dbcon.insert_one(create_op_asset(shot)) @@ -405,7 +406,7 @@ class Listener: # Print message episode = None - if shot['episode_id'] and shot['episode_id'] != "": + if shot["episode_id"] and shot["episode_id"] != "": episode = gazu.asset.get_episode(shot['episode_id']) msg = "Shot created: " @@ -418,21 +419,21 @@ class Listener: def _update_shot(self, data): """Update shot into OP DB.""" - set_op_project(self.dbcon, data['project_id']) + set_op_project(self.dbcon, data["project_id"]) project_name = self.dbcon.active_project() project_doc = get_project(project_name) # Get gazu entity - shot = gazu.shot.get_shot(data['shot_id']) + shot = gazu.shot.get_shot(data["shot_id"]) # Find asset doc # Query all assets of the local project zou_ids_and_asset_docs = { - asset_doc['data']['zou']['id']: asset_doc + asset_doc["data"]["zou"]["id"]: asset_doc for asset_doc in get_assets(project_name) - if asset_doc['data'].get("zou", {}).get("id") - } - zou_ids_and_asset_docs[shot['project_id']] = project_doc + if asset_doc["data"].get("zou", {}).get("id")} + zou_ids_and_asset_docs[shot["project_id"]] = project_doc + gazu_project = gazu.project.get_project(shot["project_id"]) gazu_project = gazu.project.get_project(shot['project_id']) # Update @@ -447,18 +448,18 @@ class Listener: def _delete_shot(self, data): """Delete shot of OP DB.""" - set_op_project(self.dbcon, data['project_id']) - shot = self.dbcon.find_one({"data.zou.id": data['shot_id']}) + set_op_project(self.dbcon, data["project_id"]) + shot = self.dbcon.find_one({"data.zou.id": data["shot_id"]}) if shot: # Delete self.dbcon.delete_one( - {"type": "asset", "data.zou.id": data['shot_id']} + {"type": "asset", "data.zou.id": data["shot_id"]} ) # Print message gazu_project = gazu.project.get_project( - shot['data']['zou']['project_id']) + ep_id = shot["data"]["zou"].get("episode_id") msg = "Shot deleted: " msg = msg + f"{gazu_project['name']} - " @@ -469,15 +470,15 @@ class Listener: def _new_task(self, data): """Create new task into OP DB.""" # Get project entity - set_op_project(self.dbcon, data['project_id']) + set_op_project(self.dbcon, data["project_id"]) project_name = self.dbcon.active_project() # Get gazu entity - task = gazu.task.get_task(data['task_id']) + task = gazu.task.get_task(data["task_id"]) # Find asset doc episode = None - ep_id = task.get('episode_id') + ep_id = task.get("episode_id") if ep_id and ep_id != "": episode = gazu.asset.get_episode(ep_id) @@ -490,11 +491,11 @@ class Listener: # Update asset tasks with new one asset_doc = get_asset_by_name(project_name, parent_name) if asset_doc: - asset_tasks = asset_doc['data'].get("tasks") - task_type_name = task['task_type']['name'] + asset_tasks = asset_doc["data"].get("tasks") + task_type_name = task["task_type"]["name"] asset_tasks[task_type_name] = {"type": task_type_name, "zou": task} self.dbcon.update_one( - {"_id": asset_doc['_id']}, + {"_id": asset_doc["_id"]}, {"$set": {"data.tasks": asset_tasks}} ) @@ -515,29 +516,29 @@ class Listener: def _delete_task(self, data): """Delete task of OP DB.""" - set_op_project(self.dbcon, data['project_id']) + set_op_project(self.dbcon, data["project_id"]) project_name = self.dbcon.active_project() # Find asset doc asset_docs = list(get_assets(project_name)) for doc in asset_docs: # Match task - for name, task in doc['data']['tasks'].items(): - if task.get("zou") and data['task_id'] == task['zou']['id']: + for name, task in doc["data"]["tasks"].items(): + if task.get("zou") and data["task_id"] == task["zou"]["id"]: # Pop task - asset_tasks = doc['data'].get("tasks", {}) + asset_tasks = doc["data"].get("tasks", {}) asset_tasks.pop(name) # Delete task in DB self.dbcon.update_one( - {"_id": doc['_id']}, + {"_id": doc["_id"]}, {"$set": {"data.tasks": asset_tasks}}, ) # Print message - shot = gazu.shot.get_shot(task['zou']['entity_id']) + entity = gazu.entity.get_entity(task["zou"]["entity_id"]) episode = None - ep_id = shot['episode_id'] + ep_id = entity.get("episode_id") if ep_id and ep_id != "": episode = gazu.asset.get_episode(ep_id) From f9137bdb041690f2ef6cc0a3d26878ee70b321bf Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Tue, 28 Feb 2023 23:08:40 +0100 Subject: [PATCH 62/88] Added docstring and changed `doc` to `dict` in var-names --- .../modules/kitsu/utils/update_op_with_zou.py | 78 +++++++++++-------- 1 file changed, 44 insertions(+), 34 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 898cf076c8..9368848532 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -65,30 +65,32 @@ def set_op_project(dbcon: AvalonMongoDB, project_id: str): def update_op_assets( dbcon: AvalonMongoDB, gazu_project: dict, - project_doc: dict, + project_dict: dict, entities_list: List[dict], - asset_doc_ids: Dict[str, dict], + asset_dict_ids: Dict[str, dict], ) -> List[Dict[str, dict]]: """Update OpenPype assets. Set 'data' and 'parent' fields. Args: dbcon (AvalonMongoDB): Connection to DB + gazu_project dict): Dict of gazu, + project_dict dict): Dict of project, entities_list (List[dict]): List of zou entities to update - asset_doc_ids (Dict[str, dict]): Dicts of [{zou_id: asset_doc}, ...] + asset_dict_ids (Dict[str, dict]): Dicts of [{zou_id: asset_doc}, ...] Returns: List[Dict[str, dict]]: List of (doc_id, update_dict) tuples """ - if not project_doc: + if not project_dict: return - project_name = project_doc["name"] + project_name = project_dict["name"] assets_with_update = [] for item in entities_list: # Check asset exists - item_doc = asset_doc_ids.get(item["id"]) + item_doc = asset_dict_ids.get(item["id"]) if not item_doc: # Create asset op_asset = create_op_asset(item) insert_result = dbcon.insert_one(op_asset) @@ -105,7 +107,7 @@ def update_op_assets( try: frame_in = int( item_data.pop( - "frame_in", project_doc["data"].get("frameStart") + "frame_in", project_dict["data"].get("frameStart") ) ) except (TypeError, ValueError): @@ -124,14 +126,14 @@ def update_op_assets( if frames_duration: frame_out = frame_in + frames_duration - 1 else: - frame_out = project_doc["data"].get("frameEnd", frame_in) + frame_out = project_dict["data"].get("frameEnd", frame_in) item_data["frameEnd"] = frame_out # Fps, fallback to project's value or default value (25.0) try: fps = float(item_data.get("fps")) except (TypeError, ValueError): fps = float(gazu_project.get( - "fps", project_doc["data"].get("fps", 25))) + "fps", project_dict["data"].get("fps", 25))) item_data["fps"] = fps # Resolution, fall back to project default match_res = re.match( @@ -142,27 +144,27 @@ def update_op_assets( item_data["resolutionWidth"] = int(match_res.group(1)) item_data["resolutionHeight"] = int(match_res.group(2)) else: - item_data["resolutionWidth"] = project_doc["data"].get( + item_data["resolutionWidth"] = project_dict["data"].get( "resolutionWidth") - item_data["resolutionHeight"] = project_doc["data"].get( + item_data["resolutionHeight"] = project_dict["data"].get( "resolutionHeight") # Properties that doesn't fully exist in Kitsu. # Guessing those property names below: # Pixel Aspect Ratio item_data["pixelAspect"] = item_data.get( - "pixel_aspect", project_doc["data"].get("pixelAspect")) + "pixel_aspect", project_dict["data"].get("pixelAspect")) # Handle Start item_data["handleStart"] = item_data.get( - "handle_start", project_doc["data"].get("handleStart")) + "handle_start", project_dict["data"].get("handleStart")) # Handle End item_data["handleEnd"] = item_data.get( - "handle_end", project_doc["data"].get("handleEnd")) + "handle_end", project_dict["data"].get("handleEnd")) # Clip In item_data["clipIn"] = item_data.get( - "clip_in", project_doc["data"].get("clipIn")) + "clip_in", project_dict["data"].get("clipIn")) # Clip Out item_data["clipOut"] = item_data.get( - "clip_out", project_doc["data"].get("clipOut")) + "clip_out", project_dict["data"].get("clipOut")) # Tasks tasks_list = [] @@ -204,9 +206,14 @@ def update_op_assets( entity_root_asset_name = "Shots" # Root parent folder if exist - visual_parent_doc_id = ( - asset_doc_ids[parent_zou_id].get("_id") if parent_zou_id else None - ) + visual_parent_doc_id = None + if parent_zou_id is not None: + parent_zou_id_dict = asset_dict_ids.get(parent_zou_id) + if parent_zou_id_dict is not None: + visual_parent_doc_id = ( + parent_zou_id_dict.get("_id") + if parent_zou_id_dict else None) + if visual_parent_doc_id is None: # Find root folder doc ("Assets" or "Shots") root_folder_doc = get_asset_by_name( @@ -225,12 +232,15 @@ def update_op_assets( item_data["parents"] = [] ancestor_id = parent_zou_id while ancestor_id is not None: - parent_doc = asset_doc_ids[ancestor_id] - item_data["parents"].insert(0, parent_doc["name"]) + parent_doc = asset_dict_ids.get(ancestor_id) + if parent_doc is not None: + item_data["parents"].insert(0, parent_doc["name"]) - # Get parent entity - parent_entity = parent_doc["data"]["zou"] - ancestor_id = parent_entity.get("parent_id") + # Get parent entity + parent_entity = parent_doc["data"]["zou"] + ancestor_id = parent_entity.get("parent_id") + else: + ancestor_id = None # Build OpenPype compatible name if item_type in ["Shot", "Sequence"] and parent_zou_id is not None: @@ -239,7 +249,7 @@ def update_op_assets( item_name = f"{item_data['parents'][-1]}_{item['name']}" # Update doc name - asset_doc_ids[item["id"]]["name"] = item_name + asset_dict_ids[item["id"]]["name"] = item_name else: item_name = item["name"] @@ -258,7 +268,7 @@ def update_op_assets( "$set": { "name": item_name, "data": item_data, - "parent": project_doc["_id"], + "parent": project_dict["_id"], } }, ) @@ -278,13 +288,13 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: UpdateOne: Update instance for the project """ project_name = project["name"] - project_doc = get_project(project_name) - if not project_doc: + project_dict = get_project(project_name) + if not project_dict: log.info("Project created: {}".format(project_name)) - project_doc = create_project(project_name, project_name) + project_dict = create_project(project_name, project_name) # Project data and tasks - project_data = project_doc["data"] or {} + project_data = project_dict["data"] or {} # Build project code and update Kitsu project_code = project.get("code") @@ -315,7 +325,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: ) return UpdateOne( - {"_id": project_doc["_id"]}, + {"_id": project_dict["_id"]}, { "$set": { "config.tasks": { @@ -398,7 +408,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): # Try to find project document project_name = project["name"] dbcon.Session["AVALON_PROJECT"] = project_name - project_doc = get_project(project_name) + project_dict = get_project(project_name) # Query all assets of the local project zou_ids_and_asset_docs = { @@ -406,7 +416,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): for asset_doc in get_assets(project_name) if asset_doc["data"].get("zou", {}).get("id") } - zou_ids_and_asset_docs[project["id"]] = project_doc + zou_ids_and_asset_docs[project["id"]] = project_dict # Create entities root folders to_insert = [ @@ -453,7 +463,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): [ UpdateOne({"_id": id}, update) for id, update in update_op_assets( - dbcon, project, project_doc, + dbcon, project, project_dict, all_entities, zou_ids_and_asset_docs ) ] From 0f76d3a44e4974b5c0ec81f166a19289d9cb4fd6 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Tue, 28 Feb 2023 23:13:33 +0100 Subject: [PATCH 63/88] Cleaned up the fetching of the entity_id Also changed the name kitsu_id to entity_id and kitsu_entity to just entity as that's what it is. --- .../plugins/publish/collect_kitsu_entities.py | 34 ++++++++----------- 1 file changed, 15 insertions(+), 19 deletions(-) diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py index dc7048cf2a..1531c80e04 100644 --- a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py @@ -16,7 +16,6 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): kitsu_entities_by_id = {} for instance in context: asset_doc = instance.data.get("assetEntity") - task_name = instance.data.get("task") if not asset_doc: continue @@ -24,27 +23,24 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): if not zou_asset_data: raise ValueError("Zou asset data not found in OpenPype!") - if kitsu_project is None: - kitsu_project = gazu.project.get_project( - zou_asset_data["project_id"]) - if not kitsu_project: - raise ValueError("Project not found in kitsu!") + kitsu_project = gazu.project.get_project( + zou_asset_data["project_id"]) + if not kitsu_project: + raise ValueError("Project not found in kitsu!") - entity_type = zou_asset_data["type"] - kitsu_id = zou_asset_data["id"] - kitsu_entity = kitsu_entities_by_id.get(kitsu_id) - if not kitsu_entity: - if entity_type == "Shot": - kitsu_entity = gazu.shot.get_shot(kitsu_id) - else: - kitsu_entity = gazu.asset.get_asset(kitsu_id) - kitsu_entities_by_id[kitsu_id] = kitsu_entity + entity_id = zou_asset_data["id"] + entity = kitsu_entities_by_id.get(entity_id) + if not entity: + entity = gazu.entity.get_entity(entity_id) + if not entity: + raise ValueError( + "{} was not found in kitsu!".format( + zou_asset_data["name"])) - if not kitsu_entity: - raise ValueError( - "{} not found in kitsu!".format(entity_type)) - instance.data["kitsu_entity"] = kitsu_entity + kitsu_entities_by_id[entity_id] = entity + instance.data["entity"] = entity + task_name = instance.data.get("task") if not task_name: continue zou_task_data = asset_doc["data"]["tasks"][task_name].get("zou") From 13a4c7201e5fd410e3546b4cd898ab8f0068b4ab Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Tue, 28 Feb 2023 23:18:43 +0100 Subject: [PATCH 64/88] change task to task_id --- .../modules/kitsu/plugins/publish/integrate_kitsu_note.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py index 54fb6a4678..006f0bc6d0 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py @@ -44,12 +44,12 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin): ) # Add comment to kitsu task - task = kitsu_task["id"] + task_id = kitsu_task["id"] self.log.debug( - "Add new note in taks id {}".format(task) + "Add new note in taks id {}".format(task_id) ) kitsu_comment = gazu.task.add_comment( - task, note_status, comment=publish_comment + task_id, note_status, comment=publish_comment ) instance.data["kitsu_comment"] = kitsu_comment From bcea2c70a269559a2fa370185393d6c4b675a38b Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Tue, 28 Feb 2023 23:20:32 +0100 Subject: [PATCH 65/88] Cleaned up project deletion code --- openpype/modules/kitsu/utils/sync_service.py | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 1af0b6edc4..6155b396aa 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -149,20 +149,16 @@ class Listener: """Delete project.""" collections = self.dbcon.database.list_collection_names() - project_name = None for collection in collections: - post = self.dbcon.database[collection].find_one( - {"data.zou_id": data['project_id']}) - if post: - project_name = post['name'] - break + project = self.dbcon.database[collection].find_one( + {"data.zou_id": data["project_id"]}) + if project: + # Delete project collection + self.dbcon.database[project["name"]].drop() - if project_name: - # Delete project collection - self.dbcon.database[project_name].drop() - - # Print message - log.info("Project deleted: {}".format(project_name)) + # Print message + log.info("Project deleted: {}".format(project["name"])) + return # == Asset == def _new_asset(self, data): From d9ac1ee95255c2d663cffaa00e63ad99a74188af Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Tue, 28 Feb 2023 23:24:55 +0100 Subject: [PATCH 66/88] Cleaned up log.info() message creation --- openpype/modules/kitsu/utils/sync_service.py | 171 +++++++++++-------- 1 file changed, 96 insertions(+), 75 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 6155b396aa..b389d25c4f 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -176,17 +176,18 @@ class Listener: self._update_asset(data) # Print message - episode = None + ep = None ep_id = asset.get("episode_id") if ep_id and ep_id != "": - episode = gazu.asset.get_episode(ep_id) + ep = gazu.asset.get_episode(ep_id) - msg = "Asset created: " - msg = msg + f"{asset['project_name']} - " - if episode is not None: - msg = msg + f"{episode['name']}_" - msg = msg + f"{asset['asset_type_name']}_" - msg = msg + f"{asset['name']}" + msg = "Asset created: {proj_name} - {ep_name}" \ + "{asset_type_name} - {asset_name}".format( + proj_name=asset["project_name"], + ep_name=ep["name"] + " - " if ep is not None else "", + asset_type_name=asset["asset_type_name"], + asset_name=asset["name"] + ) log.info(msg) def _update_asset(self, data): @@ -229,17 +230,18 @@ class Listener: ) # Print message - episode = None + ep = None ep_id = asset["data"]["zou"].get("episode_id") if ep_id and ep_id != "": - episode = gazu.asset.get_episode(ep_id) + ep = gazu.asset.get_episode(ep_id) - msg = "Asset deleted: " - msg = msg + f"{asset['data']['zou']['project_name']} - " - if episode is not None: - msg = msg + f"{episode['name']}_" - msg = msg + f"{asset['data']['zou']['asset_type_name']}_" - msg = msg + f"'{asset['name']}" + msg = "Asset deleted: {proj_name} - {ep_name}" \ + "{asset_type_name} - {asset_name}".format( + proj_name=asset["data"]["zou"]["project_name"], + ep_name=ep["name"] + " - " if ep is not None else "", + asset_type_name=asset["data"]["zou"]["asset_type_name"], + asset_name=asset["name"] + ) log.info(msg) # == Episode == @@ -252,15 +254,17 @@ class Listener: ep = gazu.shot.get_episode(data["episode_id"]) # Insert doc in DB - self.dbcon.insert_one(create_op_asset(episode)) + self.dbcon.insert_one(create_op_asset(ep)) # Update self._update_episode(data) # Print message - msg = "Episode created: " - msg = msg + f"{episode['project_name']} - " - msg = msg + f"{episode['name']}" + msg = "Episode created: {proj_name} - {ep_name}".format( + proj_name=ep["project_name"], + ep_name=ep["name"] + ) + log.info(msg) def _update_episode(self, data): """Update episode into OP DB.""" @@ -283,8 +287,8 @@ class Listener: # Update update_op_result = update_op_assets( - self.dbcon, gazu_project, project_doc, [ - episode], zou_ids_and_asset_docs + self.dbcon, gazu_project, project_doc, + [ep], zou_ids_and_asset_docs ) if update_op_result: asset_doc_id, asset_update = update_op_result[0] @@ -294,20 +298,22 @@ class Listener: """Delete shot of OP DB.""" set_op_project(self.dbcon, data["project_id"]) - episode = self.dbcon.find_one({"data.zou.id": data['episode_id']}) - if episode: + ep = self.dbcon.find_one({"data.zou.id": data["episode_id"]}) + if ep: # Delete self.dbcon.delete_one( - {"type": "asset", "data.zou.id": data['episode_id']} + {"type": "asset", "data.zou.id": data["episode_id"]} ) # Print message project = gazu.project.get_project( - episode['data']['zou']['project_id']) + ep["data"]["zou"]["project_id"]) - msg = "Episode deleted: " - msg = msg + f"{project['name']} - " - msg = msg + f"{episode['name']}" + msg = "Episode deleted: {proj_name} - {ep_name}".format( + proj_name=project["name"], + ep_name=ep["name"] + ) + log.info(msg) # == Sequence == def _new_sequence(self, data): @@ -325,17 +331,17 @@ class Listener: self._update_sequence(data) # Print message - - episode = None + ep = None ep_id = sequence.get("episode_id") if ep_id and ep_id != "": - episode = gazu.asset.get_episode(ep_id) + ep = gazu.asset.get_episode(ep_id) - msg = "Sequence created: " - msg = msg + f"{sequence['project_name']} - " - if episode is not None: - msg = msg + f"{episode['name']}_" - msg = msg + f"{sequence['name']}" + msg = "Sequence created: {proj_name} - {ep_name}" \ + "{sequence_name}".format( + proj_name=sequence["project_name"], + ep_name=ep["name"] + " - " if ep is not None else "", + sequence_name=sequence["name"] + ) log.info(msg) def _update_sequence(self, data): @@ -377,12 +383,20 @@ class Listener: ) # Print message + ep = None ep_id = sequence["data"]["zou"].get("episode_id") + if ep_id and ep_id != "": + ep = gazu.asset.get_episode(ep_id) + gazu_project = gazu.project.get_project( - sequence['data']['zou']['project_id']) - msg = f"Sequence deleted: " - msg = msg + f"{gazu_project['name']} - " - msg = msg + f"{sequence['name']}" + sequence["data"]["zou"]["project_id"]) + + msg = "Sequence created: {proj_name} - {ep_name}" \ + "{sequence_name}".format( + proj_name=gazu_project["name"], + ep_name=ep["name"] + " - " if ep is not None else "", + sequence_name=sequence["name"] + ) log.info(msg) # == Shot == @@ -401,16 +415,17 @@ class Listener: self._update_shot(data) # Print message - episode = None + ep = None if shot["episode_id"] and shot["episode_id"] != "": - episode = gazu.asset.get_episode(shot['episode_id']) + ep = gazu.asset.get_episode(shot["episode_id"]) - msg = "Shot created: " - msg = msg + f"{shot['project_name']} - " - if episode is not None: - msg = msg + f"{episode['name']}_" - msg = msg + f"{shot['sequence_name']}_" - msg = msg + f"{shot['name']}" + msg = "Shot created: {proj_name} - {ep_name}" \ + "{sequence_name} - {shot_name}".format( + proj_name=shot["project_name"], + ep_name=ep["name"] + " - " if ep is not None else "", + sequence_name=shot["sequence_name"], + shot_name=shot["name"] + ) log.info(msg) def _update_shot(self, data): @@ -430,7 +445,6 @@ class Listener: if asset_doc["data"].get("zou", {}).get("id")} zou_ids_and_asset_docs[shot["project_id"]] = project_doc gazu_project = gazu.project.get_project(shot["project_id"]) - gazu_project = gazu.project.get_project(shot['project_id']) # Update update_op_result = update_op_assets( @@ -454,12 +468,18 @@ class Listener: ) # Print message - gazu_project = gazu.project.get_project( + ep = None ep_id = shot["data"]["zou"].get("episode_id") + if ep_id and ep_id != "": + ep = gazu.asset.get_episode(ep_id) - msg = "Shot deleted: " - msg = msg + f"{gazu_project['name']} - " - msg = msg + f"{shot['name']}" + msg = "Shot deleted: {proj_name} - {ep_name}" \ + "{sequence_name} - {shot_name}".format( + proj_name=shot["data"]["zou"]["project_name"], + ep_name=ep["name"] + " - " if ep is not None else "", + sequence_name=shot["data"]["zou"]["sequence_name"], + shot_name=shot["name"] + ) log.info(msg) # == Task == @@ -472,14 +492,14 @@ class Listener: # Get gazu entity task = gazu.task.get_task(data["task_id"]) - # Find asset doc - episode = None + # Print message + ep = None ep_id = task.get("episode_id") if ep_id and ep_id != "": - episode = gazu.asset.get_episode(ep_id) + ep = gazu.asset.get_episode(ep_id) - parent_name = "" - if episode is not None: + parent_name = None + entity_type = None parent_name = episode['name'] + "_" parent_name = parent_name + \ task['sequence']['name'] + "_" + task['entity']['name'] @@ -496,13 +516,13 @@ class Listener: ) # Print message - msg = "Task created: " - msg = msg + f"{task['project']['name']} - " - if episode is not None: - msg = msg + f"{episode['name']}_" - msg = msg + f"{task['sequence']['name']}_" - msg = msg + f"{task['entity']['name']} - " - msg = msg + f"{task['task_type']['name']}" + msg = "Task created: {proj_name} - {entity_type}{parent_name}" \ + " - {task_name}".format( + proj_name=task["project"]["name"], + entity_type=entity_type + " - " if entity_type is not None else "", + parent_name=parent_name, + task_name=task["task_type"]["name"] + ) log.info(msg) def _update_task(self, data): @@ -533,19 +553,20 @@ class Listener: # Print message entity = gazu.entity.get_entity(task["zou"]["entity_id"]) - episode = None + ep = None ep_id = entity.get("episode_id") if ep_id and ep_id != "": - episode = gazu.asset.get_episode(ep_id) + ep = gazu.asset.get_episode(ep_id) - msg = "Task deleted: " - msg = msg + f"{shot['project_name']} - " - if episode is not None: - msg = msg + f"{episode['name']}_" - msg = msg + f"{shot['sequence_name']}_" - msg = msg + f"{shot['name']} - " - msg = msg + f"{task['type']}" + msg = "Task deleted: {proj_name} - {entity_type}{parent_name}" \ + " - {task_name}".format( + proj_name=task["zou"]["project"]["name"], + entity_type=entity_type + " - " if entity_type is not None else "", + parent_name=parent_name, + task_name=task["type"] + ) log.info(msg) + return From d153e6c224a6d2ea21b11a1c33e88b2c19049123 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Tue, 28 Feb 2023 23:26:10 +0100 Subject: [PATCH 67/88] Split up the difference in name and type Assets/Shots generate Before it was only working for shots. Now it also works for Assets. I'm ding an elif as Kitsu now also have tasks for sequences, edits and other things. Will try and add those in at a later stage. --- openpype/modules/kitsu/utils/sync_service.py | 24 +++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index b389d25c4f..7e7f3f557c 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -500,9 +500,15 @@ class Listener: parent_name = None entity_type = None - parent_name = episode['name'] + "_" - parent_name = parent_name + \ - task['sequence']['name'] + "_" + task['entity']['name'] + if task["task_type"]["for_entity"] == "Asset": + parent_name = task["entity"]["name"] + entity_type = task["entity_type"]["name"] + elif task["task_type"]["for_entity"] == "Shot": + parent_name = "{ep_name}{sequence_name} - {shot_name}".format( + ep_name=ep["name"] + " - " if ep is not None else "", + sequence_name=task["sequence"]["name"], + shot_name=task["entity"]["name"] + ) # Update asset tasks with new one asset_doc = get_asset_by_name(project_name, parent_name) @@ -558,6 +564,18 @@ class Listener: if ep_id and ep_id != "": ep = gazu.asset.get_episode(ep_id) + parent_name = None + entity_type = None + if task["task_type"]["for_entity"] == "Asset": + parent_name = task["entity"]["name"] + entity_type = task["entity_type"]["name"] + elif task["task_type"]["for_entity"] == "Shot": + parent_name = "{ep_name}{sequence_name} - {shot_name}".format( + ep_name=ep["name"] + " - " if ep is not None else "", + sequence_name=task["sequence"]["name"], + shot_name=task["entity"]["name"] + ) + msg = "Task deleted: {proj_name} - {entity_type}{parent_name}" \ " - {task_name}".format( proj_name=task["zou"]["project"]["name"], From 7965b91dafd59e0a75b498961d94aa8fdaa14467 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Wed, 1 Mar 2023 11:02:25 +0100 Subject: [PATCH 68/88] Moved kitsu_project out of context loop --- .../plugins/publish/collect_kitsu_entities.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py index 1531c80e04..f68226a4a5 100644 --- a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py @@ -11,7 +11,13 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): def process(self, context): - kitsu_project = None + kitsu_project = gazu.project.get_project_by_name( + context.data["projectName"]) + if not kitsu_project: + raise ValueError("Project not found in kitsu!") + + context.data["kitsu_project"] = kitsu_project + self.log.debug("Collect kitsu project: {}".format(kitsu_project)) kitsu_entities_by_id = {} for instance in context: @@ -23,10 +29,10 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): if not zou_asset_data: raise ValueError("Zou asset data not found in OpenPype!") - kitsu_project = gazu.project.get_project( - zou_asset_data["project_id"]) - if not kitsu_project: - raise ValueError("Project not found in kitsu!") + task_name = instance.data.get("task") + if not task_name: + continue + entity_id = zou_asset_data["id"] entity = kitsu_entities_by_id.get(entity_id) @@ -63,5 +69,3 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): raise ValueError("Task not found in kitsu!") instance.data["kitsu_task"] = kitsu_task self.log.debug("Collect kitsu task: {}".format(kitsu_task)) - - context.data["kitsu_project"] = kitsu_project From 962d0783b06d804cacf1ce628d17e1d0836951b2 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Wed, 1 Mar 2023 11:05:38 +0100 Subject: [PATCH 69/88] Fixed fetching of kitsu_task + moved data checks to the top of loop --- .../plugins/publish/collect_kitsu_entities.py | 30 +++++++++++-------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py index f68226a4a5..9b34bd15a9 100644 --- a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py @@ -33,6 +33,9 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): if not task_name: continue + zou_task_data = asset_doc["data"]["tasks"][task_name].get("zou") + self.log.debug( + "Collected zou task data: {}".format(zou_task_data)) entity_id = zou_asset_data["id"] entity = kitsu_entities_by_id.get(entity_id) @@ -45,25 +48,26 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): kitsu_entities_by_id[entity_id] = entity instance.data["entity"] = entity - - task_name = instance.data.get("task") - if not task_name: - continue - zou_task_data = asset_doc["data"]["tasks"][task_name].get("zou") self.log.debug( - "Collected zou task data: {}".format(zou_task_data)) - if not zou_task_data: + "Collect kitsu {}: {}".format(zou_asset_data["type"], entity) + ) + + if zou_task_data: + kitsu_task_id = zou_task_data["id"] + kitsu_task = kitsu_entities_by_id.get(kitsu_task_id) + if not kitsu_task: + kitsu_task = gazu.task.get_task(zou_task_data["id"]) + kitsu_entities_by_id[kitsu_task_id] = kitsu_task + else: kitsu_task_type = gazu.task.get_task_type_by_name(task_name) if not kitsu_task_type: raise ValueError( "Task type {} not found in Kitsu!".format(task_name) ) - continue - kitsu_task_id = zou_task_data["id"] - kitsu_task = kitsu_entities_by_id.get(kitsu_task_id) - if not kitsu_task: - kitsu_task = gazu.task.get_task(zou_task_data["id"]) - kitsu_entities_by_id[kitsu_task_id] = kitsu_task + + kitsu_task = gazu.task.get_task_by_name( + entity, kitsu_task_type + ) if not kitsu_task: raise ValueError("Task not found in kitsu!") From 8bf970e8b96b80c2cae601530d076b27d3a6d8f7 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Wed, 1 Mar 2023 11:16:05 +0100 Subject: [PATCH 70/88] fixed hound's comments --- .../kitsu/plugins/publish/collect_kitsu_entities.py | 2 +- openpype/modules/kitsu/utils/sync_service.py | 9 ++++++--- openpype/modules/kitsu/utils/update_op_with_zou.py | 6 ++++-- openpype/modules/kitsu/utils/update_zou_with_op.py | 3 ++- 4 files changed, 13 insertions(+), 7 deletions(-) diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py index 9b34bd15a9..71ed563580 100644 --- a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py @@ -64,7 +64,7 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): raise ValueError( "Task type {} not found in Kitsu!".format(task_name) ) - + kitsu_task = gazu.task.get_task_by_name( entity, kitsu_task_type ) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 7e7f3f557c..9c5c9e24ec 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -101,7 +101,8 @@ class Listener: self.event_client, "sequence:delete", self._delete_sequence ) - gazu.events.add_listener(self.event_client, "shot:new", self._new_shot) + gazu.events.add_listener( + self.event_client, "shot:new", self._new_shot) gazu.events.add_listener( self.event_client, "shot:update", self._update_shot ) @@ -109,7 +110,8 @@ class Listener: self.event_client, "shot:delete", self._delete_shot ) - gazu.events.add_listener(self.event_client, "task:new", self._new_task) + gazu.events.add_listener( + self.event_client, "task:new", self._new_task) gazu.events.add_listener( self.event_client, "task:update", self._update_task ) @@ -515,7 +517,8 @@ class Listener: if asset_doc: asset_tasks = asset_doc["data"].get("tasks") task_type_name = task["task_type"]["name"] - asset_tasks[task_type_name] = {"type": task_type_name, "zou": task} + asset_tasks[task_type_name] = { + "type": task_type_name, "zou": task} self.dbcon.update_one( {"_id": asset_doc["_id"]}, {"$set": {"data.tasks": asset_tasks}} diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 9368848532..6590d05a82 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -94,7 +94,8 @@ def update_op_assets( if not item_doc: # Create asset op_asset = create_op_asset(item) insert_result = dbcon.insert_one(op_asset) - item_doc = get_asset_by_id(project_name, insert_result.inserted_id) + item_doc = get_asset_by_id( + project_name, insert_result.inserted_id) # Update asset item_data = deepcopy(item_doc["data"]) @@ -339,7 +340,8 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: ) -def sync_all_projects(login: str, password: str, ignore_projects: list = None): +def sync_all_projects( + login: str, password: str, ignore_projects: list = None): """Update all OP projects in DB with Zou data. Args: diff --git a/openpype/modules/kitsu/utils/update_zou_with_op.py b/openpype/modules/kitsu/utils/update_zou_with_op.py index b1a9b8b82c..617f037c1e 100644 --- a/openpype/modules/kitsu/utils/update_zou_with_op.py +++ b/openpype/modules/kitsu/utils/update_zou_with_op.py @@ -82,7 +82,8 @@ def sync_zou_from_op_project( f"x{project_doc['data']['resolutionHeight']}", } ) - gazu.project.update_project_data(zou_project, data=project_doc["data"]) + gazu.project.update_project_data( + zou_project, data=project_doc["data"]) gazu.project.update_project(zou_project) asset_types = gazu.asset.all_asset_types() From 0d981a61291d78f25c53425de4161d3f109f1505 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Wed, 1 Mar 2023 11:42:18 +0100 Subject: [PATCH 71/88] Fixed hound's comments --- openpype/modules/kitsu/utils/sync_service.py | 41 ++++++++++---------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 9c5c9e24ec..da81a23495 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -501,10 +501,10 @@ class Listener: ep = gazu.asset.get_episode(ep_id) parent_name = None - entity_type = None + ent_type = None if task["task_type"]["for_entity"] == "Asset": parent_name = task["entity"]["name"] - entity_type = task["entity_type"]["name"] + ent_type = task["entity_type"]["name"] elif task["task_type"]["for_entity"] == "Shot": parent_name = "{ep_name}{sequence_name} - {shot_name}".format( ep_name=ep["name"] + " - " if ep is not None else "", @@ -525,12 +525,12 @@ class Listener: ) # Print message - msg = "Task created: {proj_name} - {entity_type}{parent_name}" \ - " - {task_name}".format( - proj_name=task["project"]["name"], - entity_type=entity_type + " - " if entity_type is not None else "", - parent_name=parent_name, - task_name=task["task_type"]["name"] + msg = "Task created: {proj} - {ent_type}{parent}" \ + " - {task}".format( + proj=task["project"]["name"], + ent_type=ent_type + " - " if ent_type is not None else "", + parent=parent_name, + task=task["task_type"]["name"] ) log.info(msg) @@ -568,23 +568,24 @@ class Listener: ep = gazu.asset.get_episode(ep_id) parent_name = None - entity_type = None + ent_type = None if task["task_type"]["for_entity"] == "Asset": parent_name = task["entity"]["name"] - entity_type = task["entity_type"]["name"] + ent_type = task["entity_type"]["name"] elif task["task_type"]["for_entity"] == "Shot": - parent_name = "{ep_name}{sequence_name} - {shot_name}".format( - ep_name=ep["name"] + " - " if ep is not None else "", - sequence_name=task["sequence"]["name"], - shot_name=task["entity"]["name"] + parent_name = "{ep}{sequence} - {shot}".format( + ep=ep["name"] + " - " if ep is not None else "", + sequence=task["sequence"]["name"], + shot=task["entity"]["name"] ) - msg = "Task deleted: {proj_name} - {entity_type}{parent_name}" \ - " - {task_name}".format( - proj_name=task["zou"]["project"]["name"], - entity_type=entity_type + " - " if entity_type is not None else "", - parent_name=parent_name, - task_name=task["type"] + ent_type=ent_type + " - " if ent_type is not None else "", + msg = "Task deleted: {proj} - {ent_type}{parent}" \ + " - {task}".format( + proj=task["zou"]["project"]["name"], + ent_type=ent_type, + parent=parent_name, + task=task["type"] ) log.info(msg) From 0beec8c3a710e7c3e950678a4c0b6606f4f079d7 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Wed, 1 Mar 2023 11:44:55 +0100 Subject: [PATCH 72/88] Fixed hound's comments --- openpype/modules/kitsu/utils/sync_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index da81a23495..91ce84637d 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -579,7 +579,7 @@ class Listener: shot=task["entity"]["name"] ) - ent_type=ent_type + " - " if ent_type is not None else "", + ent_type=ent_type + " - " if ent_type is not None else "" msg = "Task deleted: {proj} - {ent_type}{parent}" \ " - {task}".format( proj=task["zou"]["project"]["name"], From 247778575f2e5c03cf055da288b444034bca4475 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 2 Mar 2023 12:30:19 +0100 Subject: [PATCH 73/88] Project creation logs happens outside of write_project_to_op() function --- openpype/modules/kitsu/utils/sync_service.py | 12 ++++++------ openpype/modules/kitsu/utils/update_op_with_zou.py | 9 ++++++--- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 91ce84637d..172f7555ac 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -129,12 +129,9 @@ class Listener: """Create new project into OP DB.""" # Use update process to avoid duplicating code - self._update_project(data) + self._update_project(data, new_project=True) - # Print message - # - Happens in write_project_to_op() - - def _update_project(self, data): + def _update_project(self, data, new_project=False): """Update project into OP DB.""" # Get project entity project = gazu.project.get_project(data["project_id"]) @@ -147,6 +144,9 @@ class Listener: data["project_id"]) self.dbcon.bulk_write([update_project]) + if new_project: + log.info("Project created: {}".format(project["name"])) + def _delete_project(self, data): """Delete project.""" @@ -579,7 +579,7 @@ class Listener: shot=task["entity"]["name"] ) - ent_type=ent_type + " - " if ent_type is not None else "" + ent_type = ent_type + " - " if ent_type is not None else "" msg = "Task deleted: {proj} - {ent_type}{parent}" \ " - {task}".format( proj=task["zou"]["project"]["name"], diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 6590d05a82..a559d8a19f 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -291,7 +291,6 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: project_name = project["name"] project_dict = get_project(project_name) if not project_dict: - log.info("Project created: {}".format(project_name)) project_dict = create_project(project_name, project_name) # Project data and tasks @@ -405,12 +404,16 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict): ] # Sync project. Create if doesn't exist + project_name = project["name"] + project_dict = get_project(project_name) + if not project_dict: + log.info("Project created: {}".format(project_name)) bulk_writes.append(write_project_to_op(project, dbcon)) # Try to find project document - project_name = project["name"] + if not project_dict: + project_dict = get_project(project_name) dbcon.Session["AVALON_PROJECT"] = project_name - project_dict = get_project(project_name) # Query all assets of the local project zou_ids_and_asset_docs = { From 5a9ea0d130ba6009804ac479e3ebe6ef4ff46906 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 2 Mar 2023 12:36:57 +0100 Subject: [PATCH 74/88] Changed back from dict to doc for var names --- .../modules/kitsu/utils/update_op_with_zou.py | 44 +++++++++---------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index a559d8a19f..c215126dac 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -65,32 +65,32 @@ def set_op_project(dbcon: AvalonMongoDB, project_id: str): def update_op_assets( dbcon: AvalonMongoDB, gazu_project: dict, - project_dict: dict, + project_doc: dict, entities_list: List[dict], - asset_dict_ids: Dict[str, dict], + asset_doc_ids: Dict[str, dict], ) -> List[Dict[str, dict]]: """Update OpenPype assets. Set 'data' and 'parent' fields. Args: dbcon (AvalonMongoDB): Connection to DB - gazu_project dict): Dict of gazu, - project_dict dict): Dict of project, + gazu_project (dict): Dict of gazu, + project_doc (dict): Dict of project, entities_list (List[dict]): List of zou entities to update - asset_dict_ids (Dict[str, dict]): Dicts of [{zou_id: asset_doc}, ...] + asset_doc_ids (Dict[str, dict]): Dicts of [{zou_id: asset_doc}, ...] Returns: List[Dict[str, dict]]: List of (doc_id, update_dict) tuples """ - if not project_dict: + if not project_doc: return - project_name = project_dict["name"] + project_name = project_doc["name"] assets_with_update = [] for item in entities_list: # Check asset exists - item_doc = asset_dict_ids.get(item["id"]) + item_doc = asset_doc_ids.get(item["id"]) if not item_doc: # Create asset op_asset = create_op_asset(item) insert_result = dbcon.insert_one(op_asset) @@ -108,7 +108,7 @@ def update_op_assets( try: frame_in = int( item_data.pop( - "frame_in", project_dict["data"].get("frameStart") + "frame_in", project_doc["data"].get("frameStart") ) ) except (TypeError, ValueError): @@ -127,14 +127,14 @@ def update_op_assets( if frames_duration: frame_out = frame_in + frames_duration - 1 else: - frame_out = project_dict["data"].get("frameEnd", frame_in) + frame_out = project_doc["data"].get("frameEnd", frame_in) item_data["frameEnd"] = frame_out # Fps, fallback to project's value or default value (25.0) try: fps = float(item_data.get("fps")) except (TypeError, ValueError): fps = float(gazu_project.get( - "fps", project_dict["data"].get("fps", 25))) + "fps", project_doc["data"].get("fps", 25))) item_data["fps"] = fps # Resolution, fall back to project default match_res = re.match( @@ -145,27 +145,27 @@ def update_op_assets( item_data["resolutionWidth"] = int(match_res.group(1)) item_data["resolutionHeight"] = int(match_res.group(2)) else: - item_data["resolutionWidth"] = project_dict["data"].get( + item_data["resolutionWidth"] = project_doc["data"].get( "resolutionWidth") - item_data["resolutionHeight"] = project_dict["data"].get( + item_data["resolutionHeight"] = project_doc["data"].get( "resolutionHeight") # Properties that doesn't fully exist in Kitsu. # Guessing those property names below: # Pixel Aspect Ratio item_data["pixelAspect"] = item_data.get( - "pixel_aspect", project_dict["data"].get("pixelAspect")) + "pixel_aspect", project_doc["data"].get("pixelAspect")) # Handle Start item_data["handleStart"] = item_data.get( - "handle_start", project_dict["data"].get("handleStart")) + "handle_start", project_doc["data"].get("handleStart")) # Handle End item_data["handleEnd"] = item_data.get( - "handle_end", project_dict["data"].get("handleEnd")) + "handle_end", project_doc["data"].get("handleEnd")) # Clip In item_data["clipIn"] = item_data.get( - "clip_in", project_dict["data"].get("clipIn")) + "clip_in", project_doc["data"].get("clipIn")) # Clip Out item_data["clipOut"] = item_data.get( - "clip_out", project_dict["data"].get("clipOut")) + "clip_out", project_doc["data"].get("clipOut")) # Tasks tasks_list = [] @@ -209,7 +209,7 @@ def update_op_assets( # Root parent folder if exist visual_parent_doc_id = None if parent_zou_id is not None: - parent_zou_id_dict = asset_dict_ids.get(parent_zou_id) + parent_zou_id_dict = asset_doc_ids.get(parent_zou_id) if parent_zou_id_dict is not None: visual_parent_doc_id = ( parent_zou_id_dict.get("_id") @@ -233,7 +233,7 @@ def update_op_assets( item_data["parents"] = [] ancestor_id = parent_zou_id while ancestor_id is not None: - parent_doc = asset_dict_ids.get(ancestor_id) + parent_doc = asset_doc_ids.get(ancestor_id) if parent_doc is not None: item_data["parents"].insert(0, parent_doc["name"]) @@ -250,7 +250,7 @@ def update_op_assets( item_name = f"{item_data['parents'][-1]}_{item['name']}" # Update doc name - asset_dict_ids[item["id"]]["name"] = item_name + asset_doc_ids[item["id"]]["name"] = item_name else: item_name = item["name"] @@ -269,7 +269,7 @@ def update_op_assets( "$set": { "name": item_name, "data": item_data, - "parent": project_dict["_id"], + "parent": project_doc["_id"], } }, ) From 93eb9fce8609b0d6543a1f91ddb84907f89b63a3 Mon Sep 17 00:00:00 2001 From: Ember Light <49758407+EmberLightVFX@users.noreply.github.com> Date: Thu, 2 Mar 2023 12:38:37 +0100 Subject: [PATCH 75/88] Update openpype/modules/kitsu/utils/sync_service.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Félix David --- openpype/modules/kitsu/utils/sync_service.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 172f7555ac..1efebb2d47 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -178,10 +178,11 @@ class Listener: self._update_asset(data) # Print message - ep = None ep_id = asset.get("episode_id") if ep_id and ep_id != "": ep = gazu.asset.get_episode(ep_id) + else: + ep = None msg = "Asset created: {proj_name} - {ep_name}" \ "{asset_type_name} - {asset_name}".format( From 95e1f95bc1896547bde026f5c2a2517103e9e8e8 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 2 Mar 2023 14:05:36 +0100 Subject: [PATCH 76/88] Moved all ep_dict code into one function --- openpype/modules/kitsu/utils/sync_service.py | 35 ++++++++------------ 1 file changed, 13 insertions(+), 22 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 1efebb2d47..d6bdb5391e 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -124,6 +124,11 @@ class Listener: log.info("Listening to Kitsu events...") gazu.events.run_client(self.event_client) + def get_ep_dict(self, ep_id): + if ep_id and ep_id != "": + return gazu.entity.get_entity(ep_id) + return + # == Project == def _new_project(self, data): """Create new project into OP DB.""" @@ -179,10 +184,7 @@ class Listener: # Print message ep_id = asset.get("episode_id") - if ep_id and ep_id != "": - ep = gazu.asset.get_episode(ep_id) - else: - ep = None + ep = self.get_ep_dict(ep_id) msg = "Asset created: {proj_name} - {ep_name}" \ "{asset_type_name} - {asset_name}".format( @@ -233,10 +235,8 @@ class Listener: ) # Print message - ep = None ep_id = asset["data"]["zou"].get("episode_id") - if ep_id and ep_id != "": - ep = gazu.asset.get_episode(ep_id) + ep = self.get_ep_dict(ep_id) msg = "Asset deleted: {proj_name} - {ep_name}" \ "{asset_type_name} - {asset_name}".format( @@ -334,10 +334,8 @@ class Listener: self._update_sequence(data) # Print message - ep = None ep_id = sequence.get("episode_id") - if ep_id and ep_id != "": - ep = gazu.asset.get_episode(ep_id) + ep = self.get_ep_dict(ep_id) msg = "Sequence created: {proj_name} - {ep_name}" \ "{sequence_name}".format( @@ -386,10 +384,8 @@ class Listener: ) # Print message - ep = None ep_id = sequence["data"]["zou"].get("episode_id") - if ep_id and ep_id != "": - ep = gazu.asset.get_episode(ep_id) + ep = self.get_ep_dict(ep_id) gazu_project = gazu.project.get_project( sequence["data"]["zou"]["project_id"]) @@ -418,9 +414,8 @@ class Listener: self._update_shot(data) # Print message - ep = None - if shot["episode_id"] and shot["episode_id"] != "": - ep = gazu.asset.get_episode(shot["episode_id"]) + ep_id = shot["episode_id"] + ep = self.get_ep_dict(ep_id) msg = "Shot created: {proj_name} - {ep_name}" \ "{sequence_name} - {shot_name}".format( @@ -471,10 +466,8 @@ class Listener: ) # Print message - ep = None ep_id = shot["data"]["zou"].get("episode_id") - if ep_id and ep_id != "": - ep = gazu.asset.get_episode(ep_id) + ep = self.get_ep_dict(ep_id) msg = "Shot deleted: {proj_name} - {ep_name}" \ "{sequence_name} - {shot_name}".format( @@ -496,10 +489,8 @@ class Listener: task = gazu.task.get_task(data["task_id"]) # Print message - ep = None ep_id = task.get("episode_id") - if ep_id and ep_id != "": - ep = gazu.asset.get_episode(ep_id) + ep = self.get_ep_dict(ep_id) parent_name = None ent_type = None From 966ba0166e349b2882cb5db1f686b6235abbd44b Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 2 Mar 2023 14:07:14 +0100 Subject: [PATCH 77/88] Fixed delete_task msg creation to work with assets and episodes --- openpype/modules/kitsu/utils/sync_service.py | 41 ++++++++++---------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index d6bdb5391e..893d6a8b5e 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -554,31 +554,32 @@ class Listener: # Print message entity = gazu.entity.get_entity(task["zou"]["entity_id"]) - ep = None - ep_id = entity.get("episode_id") - if ep_id and ep_id != "": - ep = gazu.asset.get_episode(ep_id) + if entity["type"] == "Asset": + ep = self.get_ep_dict(entity["source_id"]) + + parent_name = "{ep}{entity_type} - {entity}".format( + ep=ep["name"] + " - " if ep is not None else "", + entity_type=task["zou"]["entity_type_name"], + entity=task["zou"]["entity_name"] + ) + elif entity["type"] == "Shot": + shot_dict = gazu.entity.get_entity( + task["zou"]["entity_id"]) + seq_dict = gazu.entity.get_entity( + shot_dict["parent_id"]) + ep = self.get_ep_dict(seq_dict["parent_id"]) - parent_name = None - ent_type = None - if task["task_type"]["for_entity"] == "Asset": - parent_name = task["entity"]["name"] - ent_type = task["entity_type"]["name"] - elif task["task_type"]["for_entity"] == "Shot": parent_name = "{ep}{sequence} - {shot}".format( ep=ep["name"] + " - " if ep is not None else "", - sequence=task["sequence"]["name"], - shot=task["entity"]["name"] + sequence=seq_dict["name"], + shot=shot_dict["name"] ) - ent_type = ent_type + " - " if ent_type is not None else "" - msg = "Task deleted: {proj} - {ent_type}{parent}" \ - " - {task}".format( - proj=task["zou"]["project"]["name"], - ent_type=ent_type, - parent=parent_name, - task=task["type"] - ) + msg = "Task deleted: {proj} - {parent} - {task}".format( + proj=task["zou"]["project_name"], + parent=parent_name, + task=task["zou"]["task_type_name"] + ) log.info(msg) return From 804cdcafd6268d84a3c7d2e887c9e05e5798dec4 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 2 Mar 2023 19:33:11 +0100 Subject: [PATCH 78/88] Store the gazu asset data in OPs DB as sync_service does This isn't the most optimal way to do it but it makes sure the data is consistent through out the code until we can revision sync_service to only use the ID from the dict. --- openpype/modules/kitsu/utils/update_op_with_zou.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index c215126dac..6797df6344 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -175,7 +175,7 @@ def update_op_assets( elif item_type == "Shot": tasks_list = gazu.task.all_tasks_for_shot(item) item_data["tasks"] = { - t["task_type_name"]: {"type": t["task_type_name"], "zou": t} + t["task_type_name"]: {"type": t["task_type_name"], "zou": gazu.task.get_task(t["id"])} for t in tasks_list } From 7176be9f92e71ee6942971e25ffc72199c1eecf8 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Thu, 2 Mar 2023 19:35:07 +0100 Subject: [PATCH 79/88] Log msg for new_task now work for both shot and assets --- openpype/modules/kitsu/utils/sync_service.py | 37 ++++++++++---------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 893d6a8b5e..1f12217d44 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -239,10 +239,10 @@ class Listener: ep = self.get_ep_dict(ep_id) msg = "Asset deleted: {proj_name} - {ep_name}" \ - "{asset_type_name} - {asset_name}".format( + "{type_name} - {asset_name}".format( proj_name=asset["data"]["zou"]["project_name"], ep_name=ep["name"] + " - " if ep is not None else "", - asset_type_name=asset["data"]["zou"]["asset_type_name"], + type_name=asset["data"]["zou"]["asset_type_name"], asset_name=asset["name"] ) log.info(msg) @@ -390,7 +390,7 @@ class Listener: gazu_project = gazu.project.get_project( sequence["data"]["zou"]["project_id"]) - msg = "Sequence created: {proj_name} - {ep_name}" \ + msg = "Sequence deleted: {proj_name} - {ep_name}" \ "{sequence_name}".format( proj_name=gazu_project["name"], ep_name=ep["name"] + " - " if ep is not None else "", @@ -493,9 +493,12 @@ class Listener: ep = self.get_ep_dict(ep_id) parent_name = None + asset_name = None ent_type = None + if task["task_type"]["for_entity"] == "Asset": parent_name = task["entity"]["name"] + asset_name = task["entity"]["name"] ent_type = task["entity_type"]["name"] elif task["task_type"]["for_entity"] == "Shot": parent_name = "{ep_name}{sequence_name} - {shot_name}".format( @@ -503,9 +506,14 @@ class Listener: sequence_name=task["sequence"]["name"], shot_name=task["entity"]["name"] ) + asset_name = "{ep_name}{sequence_name}_{shot_name}".format( + ep_name=ep["name"] + "_" if ep is not None else "", + sequence_name=task["sequence"]["name"], + shot_name=task["entity"]["name"] + ) # Update asset tasks with new one - asset_doc = get_asset_by_name(project_name, parent_name) + asset_doc = get_asset_by_name(project_name, asset_name) if asset_doc: asset_tasks = asset_doc["data"].get("tasks") task_type_name = task["task_type"]["name"] @@ -553,32 +561,25 @@ class Listener: # Print message entity = gazu.entity.get_entity(task["zou"]["entity_id"]) + ep = self.get_ep_dict(entity["source_id"]) if entity["type"] == "Asset": - ep = self.get_ep_dict(entity["source_id"]) - parent_name = "{ep}{entity_type} - {entity}".format( ep=ep["name"] + " - " if ep is not None else "", - entity_type=task["zou"]["entity_type_name"], - entity=task["zou"]["entity_name"] + entity_type=task["zou"]["entity_type"]["name"], + entity=task["zou"]["entity"]["name"] ) elif entity["type"] == "Shot": - shot_dict = gazu.entity.get_entity( - task["zou"]["entity_id"]) - seq_dict = gazu.entity.get_entity( - shot_dict["parent_id"]) - ep = self.get_ep_dict(seq_dict["parent_id"]) - parent_name = "{ep}{sequence} - {shot}".format( ep=ep["name"] + " - " if ep is not None else "", - sequence=seq_dict["name"], - shot=shot_dict["name"] + sequence=task["zou"]["sequence"]["name"], + shot=task["zou"]["entity"]["name"] ) msg = "Task deleted: {proj} - {parent} - {task}".format( - proj=task["zou"]["project_name"], + proj=task["zou"]["project"]["name"], parent=parent_name, - task=task["zou"]["task_type_name"] + task=name ) log.info(msg) From c50678bcb8c0a1cb2696fbb526d61cbe4261a361 Mon Sep 17 00:00:00 2001 From: Ember Light <49758407+EmberLightVFX@users.noreply.github.com> Date: Fri, 3 Mar 2023 10:34:17 +0100 Subject: [PATCH 80/88] Update openpype/modules/kitsu/utils/update_op_with_zou.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Félix David --- openpype/modules/kitsu/utils/update_op_with_zou.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 6797df6344..73b7a4249d 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -175,7 +175,12 @@ def update_op_assets( elif item_type == "Shot": tasks_list = gazu.task.all_tasks_for_shot(item) item_data["tasks"] = { - t["task_type_name"]: {"type": t["task_type_name"], "zou": gazu.task.get_task(t["id"])} + item_data["tasks"] = { + t["task_type_name"]: { + "type": t["task_type_name"], + "zou": gazu.task.get_task(t["id"]), + } + } for t in tasks_list } From 8fc6978ea2f56778d794e213c541f89888b24795 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Fri, 3 Mar 2023 11:10:12 +0100 Subject: [PATCH 81/88] Formatted with Black --- .../kitsu/actions/launcher_show_in_kitsu.py | 65 +++++----- .../publish/collect_kitsu_credential.py | 1 - .../plugins/publish/collect_kitsu_entities.py | 11 +- .../plugins/publish/integrate_kitsu_note.py | 5 +- .../plugins/publish/integrate_kitsu_review.py | 1 - openpype/modules/kitsu/utils/sync_service.py | 114 ++++++++++++------ .../modules/kitsu/utils/update_zou_with_op.py | 9 +- 7 files changed, 123 insertions(+), 83 deletions(-) diff --git a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py index 11224f6e52..81d98cfffb 100644 --- a/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py +++ b/openpype/modules/kitsu/actions/launcher_show_in_kitsu.py @@ -23,36 +23,37 @@ class ShowInKitsu(LauncherAction): return True def process(self, session, **kwargs): - # Context inputs project_name = session["AVALON_PROJECT"] asset_name = session.get("AVALON_ASSET", None) task_name = session.get("AVALON_TASK", None) - project = get_project(project_name=project_name, - fields=["data.zou_id"]) + project = get_project( + project_name=project_name, fields=["data.zou_id"] + ) if not project: raise RuntimeError("Project {} not found.".format(project_name)) project_zou_id = project["data"].get("zou_id") if not project_zou_id: raise RuntimeError( - "Project {} has no connected kitsu id.".format(project_name)) + "Project {} has no connected kitsu id.".format(project_name) + ) asset_zou_name = None asset_zou_id = None - asset_zou_type = 'Assets' + asset_zou_type = "Assets" task_zou_id = None - zou_sub_type = ['AssetType', 'Sequence'] + zou_sub_type = ["AssetType", "Sequence"] if asset_name: asset_zou_name = asset_name asset_fields = ["data.zou.id", "data.zou.type"] if task_name: asset_fields.append("data.tasks.{}.zou.id".format(task_name)) - asset = get_asset_by_name(project_name, - asset_name=asset_name, - fields=asset_fields) + asset = get_asset_by_name( + project_name, asset_name=asset_name, fields=asset_fields + ) asset_zou_data = asset["data"].get("zou") @@ -68,37 +69,43 @@ class ShowInKitsu(LauncherAction): task_zou_data = task_data.get("zou", {}) if not task_zou_data: self.log.debug( - "No zou task data for task: {}".format(task_name)) + "No zou task data for task: {}".format(task_name) + ) task_zou_id = task_zou_data["id"] # Define URL - url = self.get_url(project_id=project_zou_id, - asset_name=asset_zou_name, - asset_id=asset_zou_id, - asset_type=asset_zou_type, - task_id=task_zou_id) + url = self.get_url( + project_id=project_zou_id, + asset_name=asset_zou_name, + asset_id=asset_zou_id, + asset_type=asset_zou_type, + task_id=task_zou_id, + ) # Open URL in webbrowser self.log.info("Opening URL: {}".format(url)) - webbrowser.open(url, - # Try in new tab - new=2) + webbrowser.open( + url, + # Try in new tab + new=2, + ) - def get_url(self, - project_id, - asset_name=None, - asset_id=None, - asset_type=None, - task_id=None): - - shots_url = {'Shots', 'Sequence', 'Shot'} - sub_type = {'AssetType', 'Sequence'} + def get_url( + self, + project_id, + asset_name=None, + asset_id=None, + asset_type=None, + task_id=None, + ): + shots_url = {"Shots", "Sequence", "Shot"} + sub_type = {"AssetType", "Sequence"} kitsu_module = self.get_kitsu_module() # Get kitsu url with /api stripped kitsu_url = kitsu_module.server_url if kitsu_url.endswith("/api"): - kitsu_url = kitsu_url[:-len("/api")] + kitsu_url = kitsu_url[: -len("/api")] sub_url = f"/productions/{project_id}" asset_type_url = "shots" if asset_type in shots_url else "assets" @@ -121,6 +128,6 @@ class ShowInKitsu(LauncherAction): # Add search method if is a sub_type sub_url += f"/{asset_type_url}" if asset_type in sub_type: - sub_url += f'?search={asset_name}' + sub_url += f"?search={asset_name}" return f"{kitsu_url}{sub_url}" diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_credential.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_credential.py index b7f6f67a40..ac501dd47d 100644 --- a/openpype/modules/kitsu/plugins/publish/collect_kitsu_credential.py +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_credential.py @@ -13,6 +13,5 @@ class CollectKitsuSession(pyblish.api.ContextPlugin): # rename log in # families = ["kitsu"] def process(self, context): - gazu.client.set_host(os.environ["KITSU_SERVER"]) gazu.log_in(os.environ["KITSU_LOGIN"], os.environ["KITSU_PWD"]) diff --git a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py index 71ed563580..a0bd2b305b 100644 --- a/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py +++ b/openpype/modules/kitsu/plugins/publish/collect_kitsu_entities.py @@ -10,9 +10,9 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): label = "Kitsu entities" def process(self, context): - kitsu_project = gazu.project.get_project_by_name( - context.data["projectName"]) + context.data["projectName"] + ) if not kitsu_project: raise ValueError("Project not found in kitsu!") @@ -35,7 +35,8 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): zou_task_data = asset_doc["data"]["tasks"][task_name].get("zou") self.log.debug( - "Collected zou task data: {}".format(zou_task_data)) + "Collected zou task data: {}".format(zou_task_data) + ) entity_id = zou_asset_data["id"] entity = kitsu_entities_by_id.get(entity_id) @@ -44,7 +45,9 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin): if not entity: raise ValueError( "{} was not found in kitsu!".format( - zou_asset_data["name"])) + zou_asset_data["name"] + ) + ) kitsu_entities_by_id[entity_id] = entity instance.data["entity"] = entity diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py index 006f0bc6d0..6702cbe7aa 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_note.py @@ -13,7 +13,6 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin): note_status_shortname = "wfa" def process(self, context): - # Get comment text body publish_comment = context.data.get("comment") if not publish_comment: @@ -45,9 +44,7 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin): # Add comment to kitsu task task_id = kitsu_task["id"] - self.log.debug( - "Add new note in taks id {}".format(task_id) - ) + self.log.debug("Add new note in taks id {}".format(task_id)) kitsu_comment = gazu.task.add_comment( task_id, note_status, comment=publish_comment ) diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py index d8f6cb7ac8..12482b5657 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py @@ -12,7 +12,6 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin): optional = True def process(self, instance): - task = instance.data["kitsu_task"]["id"] comment = instance.data["kitsu_comment"]["id"] diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 1f12217d44..34714fa4b3 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -102,7 +102,8 @@ class Listener: ) gazu.events.add_listener( - self.event_client, "shot:new", self._new_shot) + self.event_client, "shot:new", self._new_shot + ) gazu.events.add_listener( self.event_client, "shot:update", self._update_shot ) @@ -111,7 +112,8 @@ class Listener: ) gazu.events.add_listener( - self.event_client, "task:new", self._new_task) + self.event_client, "task:new", self._new_task + ) gazu.events.add_listener( self.event_client, "task:update", self._update_task ) @@ -146,7 +148,8 @@ class Listener: # Write into DB if update_project: self.dbcon.Session["AVALON_PROJECT"] = get_kitsu_project_name( - data["project_id"]) + data["project_id"] + ) self.dbcon.bulk_write([update_project]) if new_project: @@ -158,7 +161,8 @@ class Listener: collections = self.dbcon.database.list_collection_names() for collection in collections: project = self.dbcon.database[collection].find_one( - {"data.zou_id": data["project_id"]}) + {"data.zou_id": data["project_id"]} + ) if project: # Delete project collection self.dbcon.database[project["name"]].drop() @@ -186,13 +190,15 @@ class Listener: ep_id = asset.get("episode_id") ep = self.get_ep_dict(ep_id) - msg = "Asset created: {proj_name} - {ep_name}" \ + msg = ( + "Asset created: {proj_name} - {ep_name}" "{asset_type_name} - {asset_name}".format( proj_name=asset["project_name"], ep_name=ep["name"] + " - " if ep is not None else "", asset_type_name=asset["asset_type_name"], - asset_name=asset["name"] + asset_name=asset["name"], ) + ) log.info(msg) def _update_asset(self, data): @@ -216,8 +222,11 @@ class Listener: # Update update_op_result = update_op_assets( - self.dbcon, gazu_project, project_doc, - [asset], zou_ids_and_asset_docs + self.dbcon, + gazu_project, + project_doc, + [asset], + zou_ids_and_asset_docs, ) if update_op_result: asset_doc_id, asset_update = update_op_result[0] @@ -238,13 +247,15 @@ class Listener: ep_id = asset["data"]["zou"].get("episode_id") ep = self.get_ep_dict(ep_id) - msg = "Asset deleted: {proj_name} - {ep_name}" \ + msg = ( + "Asset deleted: {proj_name} - {ep_name}" "{type_name} - {asset_name}".format( proj_name=asset["data"]["zou"]["project_name"], ep_name=ep["name"] + " - " if ep is not None else "", type_name=asset["data"]["zou"]["asset_type_name"], - asset_name=asset["name"] + asset_name=asset["name"], ) + ) log.info(msg) # == Episode == @@ -264,8 +275,7 @@ class Listener: # Print message msg = "Episode created: {proj_name} - {ep_name}".format( - proj_name=ep["project_name"], - ep_name=ep["name"] + proj_name=ep["project_name"], ep_name=ep["name"] ) log.info(msg) @@ -290,8 +300,11 @@ class Listener: # Update update_op_result = update_op_assets( - self.dbcon, gazu_project, project_doc, - [ep], zou_ids_and_asset_docs + self.dbcon, + gazu_project, + project_doc, + [ep], + zou_ids_and_asset_docs, ) if update_op_result: asset_doc_id, asset_update = update_op_result[0] @@ -310,11 +323,11 @@ class Listener: # Print message project = gazu.project.get_project( - ep["data"]["zou"]["project_id"]) + ep["data"]["zou"]["project_id"] + ) msg = "Episode deleted: {proj_name} - {ep_name}".format( - proj_name=project["name"], - ep_name=ep["name"] + proj_name=project["name"], ep_name=ep["name"] ) log.info(msg) @@ -337,12 +350,14 @@ class Listener: ep_id = sequence.get("episode_id") ep = self.get_ep_dict(ep_id) - msg = "Sequence created: {proj_name} - {ep_name}" \ + msg = ( + "Sequence created: {proj_name} - {ep_name}" "{sequence_name}".format( proj_name=sequence["project_name"], ep_name=ep["name"] + " - " if ep is not None else "", - sequence_name=sequence["name"] + sequence_name=sequence["name"], ) + ) log.info(msg) def _update_sequence(self, data): @@ -366,8 +381,11 @@ class Listener: # Update update_op_result = update_op_assets( - self.dbcon, gazu_project, project_doc, - [sequence], zou_ids_and_asset_docs + self.dbcon, + gazu_project, + project_doc, + [sequence], + zou_ids_and_asset_docs, ) if update_op_result: asset_doc_id, asset_update = update_op_result[0] @@ -388,14 +406,17 @@ class Listener: ep = self.get_ep_dict(ep_id) gazu_project = gazu.project.get_project( - sequence["data"]["zou"]["project_id"]) + sequence["data"]["zou"]["project_id"] + ) - msg = "Sequence deleted: {proj_name} - {ep_name}" \ + msg = ( + "Sequence deleted: {proj_name} - {ep_name}" "{sequence_name}".format( proj_name=gazu_project["name"], ep_name=ep["name"] + " - " if ep is not None else "", - sequence_name=sequence["name"] + sequence_name=sequence["name"], ) + ) log.info(msg) # == Shot == @@ -417,13 +438,15 @@ class Listener: ep_id = shot["episode_id"] ep = self.get_ep_dict(ep_id) - msg = "Shot created: {proj_name} - {ep_name}" \ + msg = ( + "Shot created: {proj_name} - {ep_name}" "{sequence_name} - {shot_name}".format( proj_name=shot["project_name"], ep_name=ep["name"] + " - " if ep is not None else "", sequence_name=shot["sequence_name"], - shot_name=shot["name"] + shot_name=shot["name"], ) + ) log.info(msg) def _update_shot(self, data): @@ -440,14 +463,18 @@ class Listener: zou_ids_and_asset_docs = { asset_doc["data"]["zou"]["id"]: asset_doc for asset_doc in get_assets(project_name) - if asset_doc["data"].get("zou", {}).get("id")} + if asset_doc["data"].get("zou", {}).get("id") + } zou_ids_and_asset_docs[shot["project_id"]] = project_doc gazu_project = gazu.project.get_project(shot["project_id"]) # Update update_op_result = update_op_assets( - self.dbcon, gazu_project, project_doc, - [shot], zou_ids_and_asset_docs + self.dbcon, + gazu_project, + project_doc, + [shot], + zou_ids_and_asset_docs, ) if update_op_result: @@ -469,13 +496,15 @@ class Listener: ep_id = shot["data"]["zou"].get("episode_id") ep = self.get_ep_dict(ep_id) - msg = "Shot deleted: {proj_name} - {ep_name}" \ + msg = ( + "Shot deleted: {proj_name} - {ep_name}" "{sequence_name} - {shot_name}".format( proj_name=shot["data"]["zou"]["project_name"], ep_name=ep["name"] + " - " if ep is not None else "", sequence_name=shot["data"]["zou"]["sequence_name"], - shot_name=shot["name"] + shot_name=shot["name"], ) + ) log.info(msg) # == Task == @@ -504,12 +533,12 @@ class Listener: parent_name = "{ep_name}{sequence_name} - {shot_name}".format( ep_name=ep["name"] + " - " if ep is not None else "", sequence_name=task["sequence"]["name"], - shot_name=task["entity"]["name"] + shot_name=task["entity"]["name"], ) asset_name = "{ep_name}{sequence_name}_{shot_name}".format( ep_name=ep["name"] + "_" if ep is not None else "", sequence_name=task["sequence"]["name"], - shot_name=task["entity"]["name"] + shot_name=task["entity"]["name"], ) # Update asset tasks with new one @@ -518,20 +547,24 @@ class Listener: asset_tasks = asset_doc["data"].get("tasks") task_type_name = task["task_type"]["name"] asset_tasks[task_type_name] = { - "type": task_type_name, "zou": task} + "type": task_type_name, + "zou": task, + } self.dbcon.update_one( {"_id": asset_doc["_id"]}, - {"$set": {"data.tasks": asset_tasks}} + {"$set": {"data.tasks": asset_tasks}}, ) # Print message - msg = "Task created: {proj} - {ent_type}{parent}" \ + msg = ( + "Task created: {proj} - {ent_type}{parent}" " - {task}".format( proj=task["project"]["name"], ent_type=ent_type + " - " if ent_type is not None else "", parent=parent_name, - task=task["task_type"]["name"] + task=task["task_type"]["name"], ) + ) log.info(msg) def _update_task(self, data): @@ -567,19 +600,19 @@ class Listener: parent_name = "{ep}{entity_type} - {entity}".format( ep=ep["name"] + " - " if ep is not None else "", entity_type=task["zou"]["entity_type"]["name"], - entity=task["zou"]["entity"]["name"] + entity=task["zou"]["entity"]["name"], ) elif entity["type"] == "Shot": parent_name = "{ep}{sequence} - {shot}".format( ep=ep["name"] + " - " if ep is not None else "", sequence=task["zou"]["sequence"]["name"], - shot=task["zou"]["entity"]["name"] + shot=task["zou"]["entity"]["name"], ) msg = "Task deleted: {proj} - {parent} - {task}".format( proj=task["zou"]["project"]["name"], parent=parent_name, - task=name + task=name, ) log.info(msg) @@ -593,6 +626,7 @@ def start_listeners(login: str, password: str): login (str): Kitsu user login password (str): Kitsu user password """ + # Refresh token every week def refresh_token_every_week(): log.info("Refreshing token...") diff --git a/openpype/modules/kitsu/utils/update_zou_with_op.py b/openpype/modules/kitsu/utils/update_zou_with_op.py index 617f037c1e..be931af233 100644 --- a/openpype/modules/kitsu/utils/update_zou_with_op.py +++ b/openpype/modules/kitsu/utils/update_zou_with_op.py @@ -83,7 +83,8 @@ def sync_zou_from_op_project( } ) gazu.project.update_project_data( - zou_project, data=project_doc["data"]) + zou_project, data=project_doc["data"] + ) gazu.project.update_project(zou_project) asset_types = gazu.asset.all_asset_types() @@ -99,8 +100,7 @@ def sync_zou_from_op_project( project_module_settings = get_project_settings(project_name)["kitsu"] dbcon.Session["AVALON_PROJECT"] = project_name asset_docs = { - asset_doc["_id"]: asset_doc - for asset_doc in get_assets(project_name) + asset_doc["_id"]: asset_doc for asset_doc in get_assets(project_name) } # Create new assets @@ -176,7 +176,8 @@ def sync_zou_from_op_project( frame_in=doc["data"]["frameStart"], frame_out=doc["data"]["frameEnd"], nb_frames=( - doc["data"]["frameEnd"] - doc["data"]["frameStart"] + 1), + doc["data"]["frameEnd"] - doc["data"]["frameStart"] + 1 + ), ) elif match.group(2): # Sequence From 67bc287321fd03287aedf222e6d9c7ebf25e3332 Mon Sep 17 00:00:00 2001 From: Jacob Danell Date: Fri, 3 Mar 2023 11:12:17 +0100 Subject: [PATCH 82/88] Fix hound comments --- openpype/modules/kitsu/utils/update_op_with_zou.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 73b7a4249d..053e803ff3 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -175,12 +175,12 @@ def update_op_assets( elif item_type == "Shot": tasks_list = gazu.task.all_tasks_for_shot(item) item_data["tasks"] = { - item_data["tasks"] = { - t["task_type_name"]: { - "type": t["task_type_name"], - "zou": gazu.task.get_task(t["id"]), + item_data["tasks"] = { + t["task_type_name"]: { + "type": t["task_type_name"], + "zou": gazu.task.get_task(t["id"]), + } } - } for t in tasks_list } From 67e8f59935a7a1824aceb71cdc32e354c8a33a98 Mon Sep 17 00:00:00 2001 From: Ynbot Date: Sat, 4 Mar 2023 03:27:48 +0000 Subject: [PATCH 83/88] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 4d6f3d43e4..2939ddbbac 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.15.2-nightly.3" +__version__ = "3.15.2-nightly.4" From 5efc9e0ff0cdf0f410b7a0b92b27cc2ed03256e2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 6 Mar 2023 11:00:02 +0100 Subject: [PATCH 84/88] Editorial: Fix tasks removal (#4558) Fix tasks removal in editorial --- .../publish/extract_hierarchy_avalon.py | 52 +++++++++++++------ 1 file changed, 36 insertions(+), 16 deletions(-) diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index b2a6adc210..493780645c 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -135,6 +135,38 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): ) return project_doc + def _prepare_new_tasks(self, asset_doc, entity_data): + new_tasks = entity_data.get("tasks") or {} + if not asset_doc: + return new_tasks + + old_tasks = asset_doc.get("data", {}).get("tasks") + # Just use new tasks if old are not available + if not old_tasks: + return new_tasks + + output = deepcopy(old_tasks) + # Create mapping of lowered task names from old tasks + cur_task_low_mapping = { + task_name.lower(): task_name + for task_name in old_tasks + } + # Add/update tasks from new entity data + for task_name, task_info in new_tasks.items(): + task_info = deepcopy(task_info) + task_name_low = task_name.lower() + # Add new task + if task_name_low not in cur_task_low_mapping: + output[task_name] = task_info + continue + + # Update existing task with new info + mapped_task_name = cur_task_low_mapping.pop(task_name_low) + src_task_info = output.pop(mapped_task_name) + src_task_info.update(task_info) + output[task_name] = src_task_info + return output + def sync_asset( self, asset_name, @@ -170,11 +202,12 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): data["parents"] = parents asset_doc = asset_docs_by_name.get(asset_name) + + # Tasks + data["tasks"] = self._prepare_new_tasks(asset_doc, entity_data) + # --- Create/Unarchive asset and end --- if not asset_doc: - # Just use tasks from entity data as they are - # - this is different from the case when tasks are updated - data["tasks"] = entity_data.get("tasks") or {} archived_asset_doc = None for archived_entity in archived_asset_docs_by_name[asset_name]: archived_parents = ( @@ -201,19 +234,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): if "data" not in asset_doc: asset_doc["data"] = {} cur_entity_data = asset_doc["data"] - cur_entity_tasks = cur_entity_data.get("tasks") or {} - - # Tasks - data["tasks"] = {} - new_tasks = entity_data.get("tasks") or {} - for task_name, task_info in new_tasks.items(): - task_info = deepcopy(task_info) - if task_name in cur_entity_tasks: - src_task_info = deepcopy(cur_entity_tasks[task_name]) - src_task_info.update(task_info) - task_info = src_task_info - - data["tasks"][task_name] = task_info changes = {} for key, value in data.items(): From 08c71380709cf672e4b930b351a0671331521610 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 6 Mar 2023 11:13:57 +0100 Subject: [PATCH 85/88] Nuke: moving deepcopy to abstraction --- openpype/pipeline/colorspace.py | 31 ++++++++++---------- openpype/pipeline/publish/publish_plugins.py | 5 ++-- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/openpype/pipeline/colorspace.py b/openpype/pipeline/colorspace.py index 6f68bdc5bf..2085e2d37f 100644 --- a/openpype/pipeline/colorspace.py +++ b/openpype/pipeline/colorspace.py @@ -335,9 +335,10 @@ def get_imageio_config( get_template_data_from_session) anatomy_data = get_template_data_from_session() + formatting_data = deepcopy(anatomy_data) # add project roots to anatomy data - anatomy_data["root"] = anatomy.roots - anatomy_data["platform"] = platform.system().lower() + formatting_data["root"] = anatomy.roots + formatting_data["platform"] = platform.system().lower() # get colorspace settings imageio_global, imageio_host = _get_imageio_settings( @@ -347,7 +348,7 @@ def get_imageio_config( if config_host.get("enabled"): config_data = _get_config_data( - config_host["filepath"], anatomy_data + config_host["filepath"], formatting_data ) else: config_data = None @@ -356,7 +357,7 @@ def get_imageio_config( # get config path from either global or host_name config_global = imageio_global["ocio_config"] config_data = _get_config_data( - config_global["filepath"], anatomy_data + config_global["filepath"], formatting_data ) if not config_data: @@ -372,12 +373,12 @@ def _get_config_data(path_list, anatomy_data): """Return first existing path in path list. If template is used in path inputs, - then it is formated by anatomy data + then it is formatted by anatomy data and environment variables Args: path_list (list[str]): list of abs paths - anatomy_data (dict): formating data + anatomy_data (dict): formatting data Returns: dict: config data @@ -389,30 +390,30 @@ def _get_config_data(path_list, anatomy_data): # first try host config paths for path_ in path_list: - formated_path = _format_path(path_, formatting_data) + formatted_path = _format_path(path_, formatting_data) - if not os.path.exists(formated_path): + if not os.path.exists(formatted_path): continue return { - "path": os.path.normpath(formated_path), + "path": os.path.normpath(formatted_path), "template": path_ } -def _format_path(tempate_path, formatting_data): - """Single template path formating. +def _format_path(template_path, formatting_data): + """Single template path formatting. Args: - tempate_path (str): template string + template_path (str): template string formatting_data (dict): data to be used for - template formating + template formatting Returns: - str: absolute formated path + str: absolute formatted path """ # format path for anatomy keys - formatted_path = StringTemplate(tempate_path).format( + formatted_path = StringTemplate(template_path).format( formatting_data) return os.path.abspath(formatted_path) diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index 2df98221ba..331235fadc 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -1,4 +1,3 @@ -from copy import deepcopy import inspect from abc import ABCMeta from pprint import pformat @@ -311,7 +310,7 @@ class ColormanagedPyblishPluginMixin(object): @staticmethod def get_colorspace_settings(context): - """Retuns solved settings for the host context. + """Returns solved settings for the host context. Args: context (publish.Context): publishing context @@ -324,7 +323,7 @@ class ColormanagedPyblishPluginMixin(object): project_name = context.data["projectName"] host_name = context.data["hostName"] - anatomy_data = deepcopy(context.data["anatomyData"]) + anatomy_data = context.data["anatomyData"] project_settings_ = context.data["project_settings"] config_data = get_imageio_config( From de50783c0435ec75a8ac7d9b29068c96a7bab8de Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 6 Mar 2023 18:34:56 +0100 Subject: [PATCH 86/88] Nuke: Add option to use new creating system in workfile template builder (#4545) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * added option to use new creating system in workfile template builder * fix spaces * use 'create' method on create context to trigger creation * fix attribute access * adding headless to creators and workfile builder abstraction * adding noqa for hound * hound --------- Co-authored-by: Jakub Jezek Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- .../maya/api/workfile_template_builder.py | 2 + openpype/hosts/nuke/api/plugin.py | 6 +- .../nuke/plugins/create/create_write_image.py | 2 +- .../plugins/create/create_write_prerender.py | 2 +- .../plugins/create/create_write_render.py | 2 +- .../workfile/workfile_template_builder.py | 97 ++++++++++++++----- 6 files changed, 81 insertions(+), 30 deletions(-) diff --git a/openpype/hosts/maya/api/workfile_template_builder.py b/openpype/hosts/maya/api/workfile_template_builder.py index 2f550e787a..90ab6e21e0 100644 --- a/openpype/hosts/maya/api/workfile_template_builder.py +++ b/openpype/hosts/maya/api/workfile_template_builder.py @@ -22,6 +22,8 @@ PLACEHOLDER_SET = "PLACEHOLDERS_SET" class MayaTemplateBuilder(AbstractTemplateBuilder): """Concrete implementation of AbstractTemplateBuilder for maya""" + use_legacy_creators = True + def import_template(self, path): """Import template into current scene. Block if a template is already loaded. diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 6c2d4b84be..aec87be5ab 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -239,7 +239,11 @@ class NukeCreator(NewCreator): def get_pre_create_attr_defs(self): return [ - BoolDef("use_selection", label="Use selection") + BoolDef( + "use_selection", + default=not self.create_context.headless, + label="Use selection" + ) ] def get_creator_settings(self, project_settings, settings_key=None): diff --git a/openpype/hosts/nuke/plugins/create/create_write_image.py b/openpype/hosts/nuke/plugins/create/create_write_image.py index 1e23b3ad7f..d38253ab2f 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_image.py +++ b/openpype/hosts/nuke/plugins/create/create_write_image.py @@ -35,7 +35,7 @@ class CreateWriteImage(napi.NukeWriteCreator): attr_defs = [ BoolDef( "use_selection", - default=True, + default=not self.create_context.headless, label="Use selection" ), self._get_render_target_enum(), diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index 1603bf17e3..8103cb7c4d 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -34,7 +34,7 @@ class CreateWritePrerender(napi.NukeWriteCreator): attr_defs = [ BoolDef( "use_selection", - default=True, + default=not self.create_context.headless, label="Use selection" ), self._get_render_target_enum() diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 72fcb4f232..23efa62e36 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -31,7 +31,7 @@ class CreateWriteRender(napi.NukeWriteCreator): attr_defs = [ BoolDef( "use_selection", - default=True, + default=not self.create_context.headless, label="Use selection" ), self._get_render_target_enum() diff --git a/openpype/pipeline/workfile/workfile_template_builder.py b/openpype/pipeline/workfile/workfile_template_builder.py index 119e4aaeb7..27214af79f 100644 --- a/openpype/pipeline/workfile/workfile_template_builder.py +++ b/openpype/pipeline/workfile/workfile_template_builder.py @@ -43,7 +43,8 @@ from openpype.pipeline.load import ( load_with_repre_context, ) from openpype.pipeline.create import ( - discover_legacy_creator_plugins + discover_legacy_creator_plugins, + CreateContext, ) @@ -91,6 +92,7 @@ class AbstractTemplateBuilder(object): """ _log = None + use_legacy_creators = False def __init__(self, host): # Get host name @@ -110,6 +112,7 @@ class AbstractTemplateBuilder(object): self._placeholder_plugins = None self._loaders_by_name = None self._creators_by_name = None + self._create_context = None self._system_settings = None self._project_settings = None @@ -171,6 +174,16 @@ class AbstractTemplateBuilder(object): .get("type") ) + @property + def create_context(self): + if self._create_context is None: + self._create_context = CreateContext( + self.host, + discover_publish_plugins=False, + headless=True + ) + return self._create_context + def get_placeholder_plugin_classes(self): """Get placeholder plugin classes that can be used to build template. @@ -235,18 +248,29 @@ class AbstractTemplateBuilder(object): self._loaders_by_name = get_loaders_by_name() return self._loaders_by_name + def _collect_legacy_creators(self): + creators_by_name = {} + for creator in discover_legacy_creator_plugins(): + if not creator.enabled: + continue + creator_name = creator.__name__ + if creator_name in creators_by_name: + raise KeyError( + "Duplicated creator name {} !".format(creator_name) + ) + creators_by_name[creator_name] = creator + self._creators_by_name = creators_by_name + + def _collect_creators(self): + self._creators_by_name = dict(self.create_context.creators) + def get_creators_by_name(self): if self._creators_by_name is None: - self._creators_by_name = {} - for creator in discover_legacy_creator_plugins(): - if not creator.enabled: - continue - creator_name = creator.__name__ - if creator_name in self._creators_by_name: - raise KeyError( - "Duplicated creator name {} !".format(creator_name) - ) - self._creators_by_name[creator_name] = creator + if self.use_legacy_creators: + self._collect_legacy_creators() + else: + self._collect_creators() + return self._creators_by_name def get_shared_data(self, key): @@ -1579,6 +1603,8 @@ class PlaceholderCreateMixin(object): placeholder (PlaceholderItem): Placeholder item with information about requested publishable instance. """ + + legacy_create = self.builder.use_legacy_creators creator_name = placeholder.data["creator"] create_variant = placeholder.data["create_variant"] @@ -1589,17 +1615,28 @@ class PlaceholderCreateMixin(object): task_name = legacy_io.Session["AVALON_TASK"] asset_name = legacy_io.Session["AVALON_ASSET"] - # get asset id - asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"]) - assert asset_doc, "No current asset found in Session" - asset_id = asset_doc['_id'] + if legacy_create: + asset_doc = get_asset_by_name( + project_name, asset_name, fields=["_id"] + ) + assert asset_doc, "No current asset found in Session" + subset_name = creator_plugin.get_subset_name( + create_variant, + task_name, + asset_doc["_id"], + project_name + ) - subset_name = creator_plugin.get_subset_name( - create_variant, - task_name, - asset_id, - project_name - ) + else: + asset_doc = get_asset_by_name(project_name, asset_name) + assert asset_doc, "No current asset found in Session" + subset_name = creator_plugin.get_subset_name( + create_variant, + task_name, + asset_doc, + project_name, + self.builder.host_name + ) creator_data = { "creator_name": creator_name, @@ -1612,12 +1649,20 @@ class PlaceholderCreateMixin(object): # compile subset name from variant try: - creator_instance = creator_plugin( - subset_name, - asset_name - ).process() + if legacy_create: + creator_instance = creator_plugin( + subset_name, + asset_name + ).process() + else: + creator_instance = self.builder.create_context.create( + creator_plugin.identifier, + create_variant, + asset_doc, + task_name=task_name + ) - except Exception: + except: # noqa: E722 failed = True self.create_failed(placeholder, creator_data) From 16cece3e499b0336e490b1ac0bf01d69f715d0f6 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 6 Mar 2023 19:45:17 +0100 Subject: [PATCH 87/88] Fusion: get filepath from representation instead of listing files from publish folder --- .../fusion/plugins/load/load_sequence.py | 25 ++++++------------- 1 file changed, 8 insertions(+), 17 deletions(-) diff --git a/openpype/hosts/fusion/plugins/load/load_sequence.py b/openpype/hosts/fusion/plugins/load/load_sequence.py index 6f44c61d1b..9daf4b007d 100644 --- a/openpype/hosts/fusion/plugins/load/load_sequence.py +++ b/openpype/hosts/fusion/plugins/load/load_sequence.py @@ -1,11 +1,9 @@ -import os import contextlib -from openpype.client import get_version_by_id -from openpype.pipeline import ( - load, - legacy_io, - get_representation_path, +import openpype.pipeline.load as load +from openpype.pipeline.load import ( + get_representation_context, + get_representation_path_from_context ) from openpype.hosts.fusion.api import ( imprint_container, @@ -141,7 +139,7 @@ class FusionLoadSequence(load.LoaderPlugin): namespace = context['asset']['name'] # Use the first file for now - path = self._get_first_image(os.path.dirname(self.fname)) + path = get_representation_path_from_context(context) # Create the Loader with the filename path set comp = get_current_comp() @@ -210,13 +208,11 @@ class FusionLoadSequence(load.LoaderPlugin): assert tool.ID == "Loader", "Must be Loader" comp = tool.Comp() - root = os.path.dirname(get_representation_path(representation)) - path = self._get_first_image(root) + context = get_representation_context(representation) + path = get_representation_path_from_context(context) # Get start frame from version data - project_name = legacy_io.active_project() - version = get_version_by_id(project_name, representation["parent"]) - start = self._get_start(version, tool) + start = self._get_start(context["version"], tool) with comp_lock_and_undo_chunk(comp, "Update Loader"): @@ -249,11 +245,6 @@ class FusionLoadSequence(load.LoaderPlugin): with comp_lock_and_undo_chunk(comp, "Remove Loader"): tool.Delete() - def _get_first_image(self, root): - """Get first file in representation root""" - files = sorted(os.listdir(root)) - return os.path.join(root, files[0]) - def _get_start(self, version_doc, tool): """Return real start frame of published files (incl. handles)""" data = version_doc["data"] From bc1ef9229c2250aa0be84917bf6bc23e9ec65354 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 7 Mar 2023 10:39:20 +0100 Subject: [PATCH 88/88] Photoshop: context is not changed in publisher (#4570) * OP-5025 - fix - proper changing of context When PS is already opened, new opening from different context should change it. * OP-5025 - open last workfile for new context if present * OP-5025 - remove unneeded assignemnt * OP-5025 - removed whitespace --- openpype/hosts/photoshop/api/launch_logic.py | 79 ++++++++++++++++---- 1 file changed, 63 insertions(+), 16 deletions(-) diff --git a/openpype/hosts/photoshop/api/launch_logic.py b/openpype/hosts/photoshop/api/launch_logic.py index a4377a9972..89ba6ad4e6 100644 --- a/openpype/hosts/photoshop/api/launch_logic.py +++ b/openpype/hosts/photoshop/api/launch_logic.py @@ -10,10 +10,20 @@ from wsrpc_aiohttp import ( from qtpy import QtCore -from openpype.lib import Logger -from openpype.pipeline import legacy_io +from openpype.lib import Logger, StringTemplate +from openpype.pipeline import ( + registered_host, + Anatomy, +) +from openpype.pipeline.workfile import ( + get_workfile_template_key_from_context, + get_last_workfile, +) +from openpype.pipeline.template_data import get_template_data_with_names from openpype.tools.utils import host_tools from openpype.tools.adobe_webserver.app import WebServerTool +from openpype.pipeline.context_tools import change_current_context +from openpype.client import get_asset_by_name from .ws_stub import PhotoshopServerStub @@ -310,23 +320,28 @@ class PhotoshopRoute(WebSocketRoute): # client functions async def set_context(self, project, asset, task): """ - Sets 'project' and 'asset' to envs, eg. setting context + Sets 'project' and 'asset' to envs, eg. setting context. - Args: - project (str) - asset (str) + Opens last workile from that context if exists. + + Args: + project (str) + asset (str) + task (str """ log.info("Setting context change") - log.info("project {} asset {} ".format(project, asset)) - if project: - legacy_io.Session["AVALON_PROJECT"] = project - os.environ["AVALON_PROJECT"] = project - if asset: - legacy_io.Session["AVALON_ASSET"] = asset - os.environ["AVALON_ASSET"] = asset - if task: - legacy_io.Session["AVALON_TASK"] = task - os.environ["AVALON_TASK"] = task + log.info(f"project {project} asset {asset} task {task}") + + asset_doc = get_asset_by_name(project, asset) + change_current_context(asset_doc, task) + + last_workfile_path = self._get_last_workfile_path(project, + asset, + task) + if last_workfile_path and os.path.exists(last_workfile_path): + ProcessLauncher.execute_in_main_thread( + lambda: stub().open(last_workfile_path)) + async def read(self): log.debug("photoshop.read client calls server server calls " @@ -356,3 +371,35 @@ class PhotoshopRoute(WebSocketRoute): # Required return statement. return "nothing" + + def _get_last_workfile_path(self, project_name, asset_name, task_name): + """Returns last workfile path if exists""" + host = registered_host() + host_name = "photoshop" + template_key = get_workfile_template_key_from_context( + asset_name, + task_name, + host_name, + project_name=project_name + ) + anatomy = Anatomy(project_name) + + data = get_template_data_with_names( + project_name, asset_name, task_name, host_name + ) + data["root"] = anatomy.roots + + file_template = anatomy.templates[template_key]["file"] + + # Define saving file extension + extensions = host.get_workfile_extensions() + + folder_template = anatomy.templates[template_key]["folder"] + work_root = StringTemplate.format_strict_template( + folder_template, data + ) + last_workfile_path = get_last_workfile( + work_root, file_template, data, extensions, True + ) + + return last_workfile_path